diff --git a/AtlasTest/DatabaseTest/AthenaPoolTest/CMakeLists.txt b/AtlasTest/DatabaseTest/AthenaPoolTest/CMakeLists.txt
index 36c004650d8c34870e15b97a589f2e71a2b577b5..9ae93acd3d74ea1c62d4199ed39c7542fe951559 100644
--- a/AtlasTest/DatabaseTest/AthenaPoolTest/CMakeLists.txt
+++ b/AtlasTest/DatabaseTest/AthenaPoolTest/CMakeLists.txt
@@ -34,7 +34,7 @@ function (athenapooltest_run_test testName jo pattern)
   atlas_add_test( ${testName}
                   SCRIPT athena.py AthenaPoolTest/${jo}.py
                   LOG_SELECT_PATTERN ${pattern}
-                  LOG_IGNORE_PATTERN "DEBUG Try item:|AthenaAttribute.*DB="
+                  LOG_IGNORE_PATTERN "DEBUG Try item:|AthenaAttribute.*DB=|AtlasRelease"
                   PROPERTIES TIMEOUT 900 )
   if( ARG_DEPENDS )
     foreach( _dep ${ARG_DEPENDS} )
diff --git a/Calorimeter/CaloCalibHitRec/src/CaloCalibClusterMomentsMaker.cxx b/Calorimeter/CaloCalibHitRec/src/CaloCalibClusterMomentsMaker.cxx
index 003e78422345174d81c9ca81dda47d04ac97d87e..5cce9d6e1e9f53eea386b865ade5bcf8915f20bd 100644
--- a/Calorimeter/CaloCalibHitRec/src/CaloCalibClusterMomentsMaker.cxx
+++ b/Calorimeter/CaloCalibHitRec/src/CaloCalibClusterMomentsMaker.cxx
@@ -39,7 +39,6 @@
 #include "CaloIdentifier/CaloDM_ID.h"
 
 #include "StoreGate/ReadHandle.h" 
-#include "CLHEP/Units/SystemOfUnits.h"
 
 #include <CLHEP/Geometry/Vector3D.h>
 #include <CLHEP/Units/SystemOfUnits.h>
diff --git a/Calorimeter/CaloClusterCorrection/src/H1WeightToolCSC12Generic.h b/Calorimeter/CaloClusterCorrection/src/H1WeightToolCSC12Generic.h
index 628848d757b5532cb7434b6e4cf8580a75b568ac..1ea88df86ab91786d20557ea54837edd4bcb8082 100755
--- a/Calorimeter/CaloClusterCorrection/src/H1WeightToolCSC12Generic.h
+++ b/Calorimeter/CaloClusterCorrection/src/H1WeightToolCSC12Generic.h
@@ -17,7 +17,6 @@ Created  : Feb 2007
 #include "GaudiKernel/AlgTool.h"
 #include "CaloInterface/IHadronicCalibrationTool.h"
 #include "CaloUtils/ToolWithConstants.h"
-//#include "GaudiKernel/AlgTool.h"
 #include "AthenaBaseComps/AthAlgTool.h"
 #include <string>
 
diff --git a/Calorimeter/CaloCondPhysAlgs/src/CaloCellEnergyCorr2Ntuple.h b/Calorimeter/CaloCondPhysAlgs/src/CaloCellEnergyCorr2Ntuple.h
index d2b0d73472154c63237ad9fdadc17ea0cceaa7a3..3504be156a3390a02a52d8660d7612c8cb476c76 100644
--- a/Calorimeter/CaloCondPhysAlgs/src/CaloCellEnergyCorr2Ntuple.h
+++ b/Calorimeter/CaloCondPhysAlgs/src/CaloCellEnergyCorr2Ntuple.h
@@ -19,7 +19,6 @@
 #include "CaloIdentifier/CaloCell_ID.h"
 #include "CaloInterface/ICaloNoiseTool.h"
 #include "CaloInterface/ICaloMBAverageTool.h"
-#include "GaudiKernel/ToolHandle.h"
 
 #include "GaudiKernel/ITHistSvc.h"
 #include "TTree.h"
diff --git a/Calorimeter/CaloCondPhysAlgs/src/CaloNoise2Ntuple.h b/Calorimeter/CaloCondPhysAlgs/src/CaloNoise2Ntuple.h
index 6e9290cf7534e3125d8d433ce111a7ca01c1795d..fec206282cc7696fd11493950de9427de17995b6 100644
--- a/Calorimeter/CaloCondPhysAlgs/src/CaloNoise2Ntuple.h
+++ b/Calorimeter/CaloCondPhysAlgs/src/CaloNoise2Ntuple.h
@@ -19,7 +19,6 @@
 #include "CaloIdentifier/CaloCell_ID.h"
 #include "CaloInterface/ICaloNoiseTool.h"
 #include "CaloInterface/ICaloMBAverageTool.h"
-#include "GaudiKernel/ToolHandle.h"
 
 #include "GaudiKernel/ITHistSvc.h"
 #include "TTree.h"
diff --git a/Calorimeter/CaloCondPhysAlgs/src/CaloRescaleNoise.h b/Calorimeter/CaloCondPhysAlgs/src/CaloRescaleNoise.h
index 61b23bb36956584712d74ad7d8e770cabb4e17b1..b1e5b03061be6c626d9f5e02622078d96ffd4312 100644
--- a/Calorimeter/CaloCondPhysAlgs/src/CaloRescaleNoise.h
+++ b/Calorimeter/CaloCondPhysAlgs/src/CaloRescaleNoise.h
@@ -19,7 +19,6 @@
 #include "CaloIdentifier/CaloCell_ID.h"
 #include "CaloInterface/ICaloNoiseTool.h"
 #include "CaloInterface/ICaloMBAverageTool.h"
-#include "GaudiKernel/ToolHandle.h"
 #include "LArElecCalib/ILArHVScaleCorr.h"
 #include "StoreGate/ReadCondHandleKey.h"  
 
diff --git a/Calorimeter/CaloConditions/src/CaloLocalHadCoeff.cxx b/Calorimeter/CaloConditions/src/CaloLocalHadCoeff.cxx
index 50628d5d05cfbc61fc006df828b46bd33282b144..b843adfd9f86d2cadb89655e1946c02f7913d75e 100644
--- a/Calorimeter/CaloConditions/src/CaloLocalHadCoeff.cxx
+++ b/Calorimeter/CaloConditions/src/CaloLocalHadCoeff.cxx
@@ -6,7 +6,6 @@
  * @file CaloConditions/src/CaloLocalHadCoeff.cxx
  * @brief Hold binned correction data for local hadronic calibration procedure
  */
-//#include "CaloConditions/CaloLocalHadCoeff.h"
 #include "CaloConditions/CaloLocalHadCoeff.h"
 #include "CaloConditions/CaloLocalHadDefs.h"
 #include <algorithm>
diff --git a/Calorimeter/CaloLocalHadCalib/src/CaloHadDMCoeffData.cxx b/Calorimeter/CaloLocalHadCalib/src/CaloHadDMCoeffData.cxx
index 8ad5d46f65bf6b5c0d3ae5db9017ef288058902c..a44cadc960f20c0085d8c648be11bf0c5ee3d6bd 100644
--- a/Calorimeter/CaloLocalHadCalib/src/CaloHadDMCoeffData.cxx
+++ b/Calorimeter/CaloLocalHadCalib/src/CaloHadDMCoeffData.cxx
@@ -15,7 +15,6 @@
 //      Gennady Pospelov
 //
 //-----------------------------------------------------------------------
-#include "CaloLocalHadCalib/CaloHadDMCoeffData.h"
 #include <cmath>
 #include <fstream>
 #include <iostream>
diff --git a/Calorimeter/CaloLocalHadCalib/src/GetLCSinglePionsPerf.cxx b/Calorimeter/CaloLocalHadCalib/src/GetLCSinglePionsPerf.cxx
index 61a7266a6252c355641a7bdb0a5a90526744b00a..5f576ea4470c0024a09512e0d1ac86f3d415778d 100644
--- a/Calorimeter/CaloLocalHadCalib/src/GetLCSinglePionsPerf.cxx
+++ b/Calorimeter/CaloLocalHadCalib/src/GetLCSinglePionsPerf.cxx
@@ -1355,18 +1355,18 @@ void GetLCSinglePionsPerf::as_in_atlas ( double &eta,  double &phi,
   constexpr double Yrun2 = 70;
 
   //  Get transformation parameters
-  double Beta = 2*atan( exp(-Eta0) );
+  double Beta = 2*std::atan( std::exp(-Eta0) );
   double Zemec = Zcalo[0];
-  double b = Zemec * tan(Beta);
-  double Yrun1 = b*cos(Alpha) - Zemec*sin(Alpha);
-  double z0 = z0emec + Yrun2*tan(Alpha) - b*sin(Alpha) - Zemec*cos(Alpha);
+  double b = Zemec * std::tan(Beta);
+  double Yrun1 = b*cos(Alpha) - Zemec*std::sin(Alpha);
+  double z0 = z0emec + Yrun2*std::tan(Alpha) - b*std::sin(Alpha) - Zemec*cos(Alpha);
   double y0 = Yrun2 - Yrun1;
-  double z0calo = z0emec + (Zcalo[iCalo]-Zcalo[0]) / cos (Alpha);
+  double z0calo = z0emec + (Zcalo[iCalo]-Zcalo[0]) / std::cos (Alpha);
 
   //  Get interception point: track with calorimeter front face
   double ctga = 1./ tan(Alpha);
   //double tgth = tan(Theta);
-  double tgth = tan(2*atan(exp(-eta)))*sin(phi);
+  double tgth = std::tan(2*std::atan(std::exp(-eta)))*std::sin(phi);
   double zx = (Y0 - Z0*tgth + z0calo*ctga) / (ctga - tgth);
   double yx = (zx - z0calo) * ctga;
   double xx = X0 + xCryo;
diff --git a/Calorimeter/CaloRec/share/t3333.C b/Calorimeter/CaloRec/share/t3333.C
index d6d32ae1e79c354b18ba4d1ae4bc0b19b488a33e..71d8de52c7875473dd894e1010e6d70e2e2060a6 100644
--- a/Calorimeter/CaloRec/share/t3333.C
+++ b/Calorimeter/CaloRec/share/t3333.C
@@ -8,8 +8,6 @@
 #define t3333_cxx
 #include "t3333.h"
 #include <TH2.h>
-#include <TStyle.h>
-#include <TCanvas.h>
 
 #include <TROOT.h>
 #include <TTree.h>
diff --git a/Calorimeter/CaloRec/src/CaloCellContainerCheckerTool.cxx b/Calorimeter/CaloRec/src/CaloCellContainerCheckerTool.cxx
index 7cac96a6b851fbdca9ffbed323ae5e2c68e78bd5..adbe561f33aa2029cc7613dee7da939a1d942c5c 100644
--- a/Calorimeter/CaloRec/src/CaloCellContainerCheckerTool.cxx
+++ b/Calorimeter/CaloRec/src/CaloCellContainerCheckerTool.cxx
@@ -20,7 +20,6 @@ PURPOSE:  check integrity of CaloCellContainer find and iterators
 #include "CaloEvent/CaloConstCellContainer.h"
 #include "CaloIdentifier/CaloCell_ID.h"
 #include "CaloDetDescr/CaloDetDescrManager.h"
-#include "CaloIdentifier/CaloCell_ID.h"
 
 #include "CLHEP/Units/SystemOfUnits.h"
 
diff --git a/Calorimeter/CaloRec/src/CaloClusterCopier.cxx b/Calorimeter/CaloRec/src/CaloClusterCopier.cxx
index 2462c7fb1536feabc7e88de049d945d5ee402ac1..8ca37a305972477faac138815a66ecd55f9f728a 100644
--- a/Calorimeter/CaloRec/src/CaloClusterCopier.cxx
+++ b/Calorimeter/CaloRec/src/CaloClusterCopier.cxx
@@ -13,8 +13,6 @@ CREATED:  Sep 2005
 
 ********************************************************************/
 
-//#include "CaloUtils/CaloClusterStoreHelper.h"
-
 #include "CaloClusterCopier.h"
 #include "xAODCaloEvent/CaloClusterContainer.h"
 #include "xAODCaloEvent/CaloCluster.h"
diff --git a/Calorimeter/CaloRec/src/CaloClusterMomentsMaker_DigiHSTruth.cxx b/Calorimeter/CaloRec/src/CaloClusterMomentsMaker_DigiHSTruth.cxx
index 7562746299ecae26811f66625518a56f2f3a9158..94c8cac1d50147dbabcdb22a0fab2a6171b1263f 100755
--- a/Calorimeter/CaloRec/src/CaloClusterMomentsMaker_DigiHSTruth.cxx
+++ b/Calorimeter/CaloRec/src/CaloClusterMomentsMaker_DigiHSTruth.cxx
@@ -25,7 +25,6 @@
 #include "CaloIdentifier/CaloCell_ID.h"
 #include "GeoModelInterfaces/IGeoModelSvc.h"
 #include "AthAllocators/ArenaPoolSTLAllocator.h"
-//#include "CxxUtils/unordered_set.h"
 #include "CLHEP/Geometry/Point3D.h"
 #include "CLHEP/Geometry/Vector3D.h"
 #include "CxxUtils/prefetch.h"
@@ -37,8 +36,6 @@
 #include <iterator>
 #include <limits>
 #include <sstream>
-//#include "fastjet/PseudoJet.hh"
-//#include <fastjet/PseudoJet.hh>
 
 
 
diff --git a/Calorimeter/CaloTools/CaloTools/CaloMBAverageTool.h b/Calorimeter/CaloTools/CaloTools/CaloMBAverageTool.h
index 13364d7a8e19c7ab7b38b104364ac312de1bf5c6..4c34a65f56e1fed374072d0127434cc033f92237 100644
--- a/Calorimeter/CaloTools/CaloTools/CaloMBAverageTool.h
+++ b/Calorimeter/CaloTools/CaloTools/CaloMBAverageTool.h
@@ -27,7 +27,6 @@ class CaloDetDescrElement;
 #include "StoreGate/DataHandle.h"
 #include "CaloInterface/ICaloMBAverageTool.h"
 #include "AthenaKernel/IOVSvcDefs.h"
-#include "GaudiKernel/ToolHandle.h"
 
 #include "CxxUtils/checker_macros.h"
 
diff --git a/Calorimeter/CaloUtils/src/CaloTopoTowerBuilderTool.cxx b/Calorimeter/CaloUtils/src/CaloTopoTowerBuilderTool.cxx
index 07b6d15f7f09887ea201f1dd83c7b48c51e45c6b..77180f903dcd2b0439811df7ec4dd550811f34b4 100644
--- a/Calorimeter/CaloUtils/src/CaloTopoTowerBuilderTool.cxx
+++ b/Calorimeter/CaloUtils/src/CaloTopoTowerBuilderTool.cxx
@@ -14,8 +14,6 @@
 
 #include "StoreGate/StoreGateSvc.h"
 
-#include "CaloIdentifier/CaloCell_ID.h"
-
 #include "CaloGeoHelpers/CaloPhiRange.h"
 
 #include "CaloEvent/CaloCell.h"
diff --git a/Calorimeter/CaloUtils/test/CaloVertexedCell_test.cxx b/Calorimeter/CaloUtils/test/CaloVertexedCell_test.cxx
index fe7174d60291f009312263707946ce2fd43001c8..8c1dbf88194067b7c0df0c3aa2d22d8b4ddc1cb0 100644
--- a/Calorimeter/CaloUtils/test/CaloVertexedCell_test.cxx
+++ b/Calorimeter/CaloUtils/test/CaloVertexedCell_test.cxx
@@ -19,7 +19,6 @@
 #include "CaloDetDescr/CaloDetDescriptor.h"
 #include "CaloDetDescr/CaloDetDescrElement.h"
 #include "CaloDetDescr/CaloDetectorElements.h"
-#include "CaloDetDescr/CaloDetDescrManager.h"
 #include "CaloIdentifier/CaloCell_ID.h"
 #include "CaloIdentifier/CaloCell_Base_ID.h"
 #include "StoreGate/StoreGateSvc.h"
diff --git a/Calorimeter/CaloUtils/test/CaloVertexedCluster_test.cxx b/Calorimeter/CaloUtils/test/CaloVertexedCluster_test.cxx
index cc55456fc761582eb15020979338c076fc3b8f7d..be42d716b719a3196bff9767582e5b336b35be0a 100644
--- a/Calorimeter/CaloUtils/test/CaloVertexedCluster_test.cxx
+++ b/Calorimeter/CaloUtils/test/CaloVertexedCluster_test.cxx
@@ -16,7 +16,6 @@
 #include "CaloUtils/CaloVertexedTopoCluster.h"
 #include "CaloEvent/CaloTester.h"
 #include "xAODCaloEvent/CaloCluster.h"
-#include "CaloDetDescr/CaloDetDescrManager.h"
 #include "CaloDetDescr/CaloDetDescriptor.h"
 #include "CaloDetDescr/CaloDetDescrElement.h"
 #include "CaloDetDescr/CaloDetectorElements.h"
diff --git a/Control/AthenaKernel/AthenaKernel/IMetaDataSvc.h b/Control/AthenaKernel/AthenaKernel/IMetaDataSvc.h
index 1c84d4cd5cb9afdbf503cac22e0fcde2eb2a6471..e9d41f24322e249d3c128a7abfcef9765315e20f 100644
--- a/Control/AthenaKernel/AthenaKernel/IMetaDataSvc.h
+++ b/Control/AthenaKernel/AthenaKernel/IMetaDataSvc.h
@@ -49,6 +49,10 @@ public: // Non-static members
    template <typename T, typename TKEY> 
    StatusCode remove(const TKEY& key, bool ignoreIfAbsent=false);
 
+   /// Check if object is already is already in store
+   template <typename T, typename TKEY>
+   bool contains(const TKEY& key);
+
    /// The output MetaData Store
    virtual StoreGateSvc* outputDataStore() const = 0;
 
@@ -142,4 +146,9 @@ StatusCode IMetaDataSvc::remove(const TKEY& key, bool ignoreIfAbsent)
    return ignoreIfAbsent? StatusCode::SUCCESS : StatusCode::FAILURE;
 }
 
+template <typename T, typename TKEY>
+bool IMetaDataSvc::contains(const TKEY& key) {
+  return outputDataStore()->contains< MetaCont<T> >(key);
+}
+
 #endif
diff --git a/Control/AthenaMP/CMakeLists.txt b/Control/AthenaMP/CMakeLists.txt
index 34973fe9d020ddb4aeec74da72e6bb1ad876a6ad..7872826067a75fff878895e4d525ef87b73369c4 100644
--- a/Control/AthenaMP/CMakeLists.txt
+++ b/Control/AthenaMP/CMakeLists.txt
@@ -19,6 +19,5 @@ atlas_add_executable( getSharedMemory
                       src/memory-profiler/getPss.cc )
 
 # Install files from the package:
-atlas_install_python_modules( python/*.py python/tests/*.py )
-atlas_install_joboptions( share/AthenaMP_EventService.py )
+atlas_install_python_modules( python/*.py )
 
diff --git a/Control/AthenaMP/share/AthenaMP_EventService.py b/Control/AthenaMP/python/EventService.py
similarity index 85%
rename from Control/AthenaMP/share/AthenaMP_EventService.py
rename to Control/AthenaMP/python/EventService.py
index cd497963582467655e3f3ce974a76892c32c6eb8..c9628cda6f731ac99605465145f487c71b74efe8 100644
--- a/Control/AthenaMP/share/AthenaMP_EventService.py
+++ b/Control/AthenaMP/python/EventService.py
@@ -1,3 +1,5 @@
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
+
 # Configure AthenaMP Strategy
 from AthenaMP.AthenaMPFlags import jobproperties as jps
 jps.AthenaMPFlags.Strategy="EventService"
diff --git a/Control/AthenaMP/python/IoUtils.py b/Control/AthenaMP/python/IoUtils.py
deleted file mode 100644
index 91714fb0d2732f5b728a4b2dca34142a9fa9fef4..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/python/IoUtils.py
+++ /dev/null
@@ -1,191 +0,0 @@
-# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-
-# @file AthenaMP.IoUtils
-# @purpose I/O utils 
-# @author Mous Tatarkhanov <tmmous@berkeley.edu>
-
-__doc__     = "I/O related utils for AthenaMP"       
-__version__ = "$Revision: 1.0$"
-__author__  = "Mous Tatarkhanov <tmmous@berkeley.edu>"
-
-from AthenaCommon.Logging import log as msg     #logging handle
-from GaudiMP.IoRegistry import IoRegistry
-import six
-_debug = msg.debug
-_info = msg.info
-
-
-def create_worker_dir(top_wkdir):
-    """create tmp wkdir for forked worker under top_wkdir"""
-    #changing the workdir to tmp location  
-    import os
-    from os.path import abspath as _abspath, join as _join
-    ppid = os.getppid()
-    pid  = os.getpid()
-    curdir = _abspath (os.curdir)
-    wkdir  = _abspath (_join (top_wkdir,str(pid)))
-    _info("curdir=[%s]", curdir)
-    _info("top_wkdir=[%s]", top_wkdir)
-    _info("wkdir=[%s]", wkdir)
-    if os.path.exists(wkdir):
-        import shutil
-        shutil.rmtree (wkdir)
-    os.makedirs (wkdir)
-        
-    return wkdir
-
-def update_io_registry(wkdir, mpid, iocomp_types=None):
-    """helper method to correctly update the IoRegistry instances
-    """
-    import os
-    from os.path import join as _join
-    from os.path import basename as _basename
-    from os.path import isabs as _isabs
-
-    from PyUtils.PoolFile import PoolFileCatalog
-    
-    # ioreg is a dict:
-    # {'iocomp-name' : { 'old-fname' : ['iomode', 'new-fname'] }, ... }
-    ioreg = IoRegistry.instances
-    msg.debug("ioreg::: %s" % ioreg)
-    
-    pfc = PoolFileCatalog()
-
-    ioreg_items = six.iteritems (IoRegistry.instances)
-    for iocomp,iodata in ioreg_items:
-        #print "--iocomp,len(iodata)",iocomp, len(iodata)
-        io_items = six.iteritems(iodata)
-        for ioname,ioval in io_items:
-            # handle logical filenames...
-            #ioname=pfc(ioname)
-            pfc_name = pfc(ioname)
-            if (pfc_name != ioname):
-                ioreg[iocomp][ioname][1]=pfc_name
-        
-            ##print " --iocomp,ioname,ioval",iocomp,ioname,ioval
-            iomode,newname = ioval[0], ioval[1] or ioname
-            if iomode == '<output>':
-                newname = _join (wkdir,
-                                 "mpid_%s__%s"%(str(mpid).zfill(3),
-                                                _basename(ioname)))
-                msg.debug ("update_io_registry:<output>: newname=%s" % newname)
-            elif iomode == '<input>':
-                if not _isabs(ioname) and not ioname.startswith("root:") and not ioname.startswith("rfio"):
-                # FIXME: handle URLs/URIs...
-                    src = os.path.abspath(_join(os.curdir, ioname))
-                    dst = _join(wkdir, ioname)
-                    os.symlink(src, dst)
-                    msg.debug( "update_io_registry:<input> created symlink %s for" % dst)
-            else:
-                raise ValueError ("unexpected iomode value: %r"%iomode)
-            ioreg[iocomp][ioname][1] = newname
-            pass
-        pass
-    msg.debug( "IoRegistry.instances=%s" % IoRegistry.instances )
-    return # update_io_registry
-
-def redirect_log(wkdir):
-    """redirect stdout and stderr of forked worker to tmp wkdir"""
-    import os, sys
-    import multiprocess as mp
-    # define stdout and stderr names
-    
-    stdout = os.path.join(wkdir, 'stdout')
-    stderr = os.path.join(wkdir, 'stderr')
-    _info(" stdout->[%s]" % stdout)
-    _info(" stderr->[%s]" % stderr)
-
-    # synch-ing log
-    map(lambda x: x.flush(), (sys.stdout, sys.stderr))
-
-    flags = os.O_CREAT | os.O_WRONLY
-    fdout = os.open (stdout, flags)
-    assert fdout>=0, \
-        "problem child [%r] opening stdout" % mp.current_process()
-    fileno = sys.stdout.fileno()
-    os.close (fileno)
-    os.dup2  (fdout, fileno)
-
-    fderr = os.open (stderr, flags)
-    assert fderr>=0, \
-        "problem child [%r] opening stderr" % mp.current_process()
-    fileno = sys.stderr.fileno()
-    os.close (fileno)
-    os.dup2  (fderr, fileno)
-
-def reopen_fds(wkdir=""):
-    """redirect workers fds by setting proper flags
-       <INPUT> ->  duplicate fd in RDONLY mode
-       <OUTPUT> -> redirect fd to local output-copy w/ duplicate flags.
-    """
-    _info("reopen_fds: in dir [%s]" % wkdir)
-    import os, fcntl
-    #from IoRegistry import IoRegistry as ioreg
-    _join = os.path.join
-    
-    _fds = IoRegistry.fds_dict
-    _fds.create_symlinks(wkdir)
-
-    for k, v in six.iteritems(_fds):
-        fd = k; 
-        (real_name, iomode, flags) = v
-        if not os.path.isfile (real_name):
-            _debug("reopen_fds: %s is not file" % real_name)
-            continue
-        
-        if (iomode == '<OUTPUT>'):
-            #expect the copy of <OUTPUT> to be in curdir
-            _debug("reopen_fds: <OUTPUT> %s " % real_name)
-            pos = os.lseek(fd, 0, os.SEEK_CUR)
-            #PC HACK remove!
-            try:
-                new_fd = os.open (_join(wkdir, os.path.basename(real_name)), flags)
-                os.lseek(new_fd, pos, os.SEEK_SET)
-            except Exception as err:         
-                msg.warning("Exception caught handling OUTPUT file %s: %s" %  (real_name, err) )
-                msg.warning(" ...ignoring file FIXME!")
-                continue
-        else:
-            _debug("reopen_fds:<INPUT> %s" % real_name)
-            new_fd = os.open (real_name, os.O_RDONLY)
-        
-        os.close (fd)
-        os.dup2  (new_fd, fd)
-        fcntl.fcntl (fd, fcntl.F_SETFL, flags)
-        os.close (new_fd)
-
-def reopen_fds_old():
-    import os, fcntl
-    _realpath = os.path.realpath
-    _join = os.path.join
-    # reopen file descriptors
-    procfd = '/proc/self/fd'
-    fds = os.listdir(procfd)
-    for i in fds:
-        fd = int(i)
-        real_name = _realpath(_join(procfd,i))
-        if not os.path.exists (real_name):
-            continue
-        flags = fcntl.fcntl (fd, fcntl.F_GETFL)
-        new_fd = os.open (real_name, os.O_RDONLY)
-        os.close (fd)
-        os.dup2  (new_fd, fd)
-        fcntl.fcntl (fd, fcntl.F_SETFL, flags)
-        os.close (new_fd)
-
-
-def print_fds(msg):
-    """print all file descriptors of current process"""
-    import os, fcntl
-    _realpath = os.path.realpath
-    _join = os.path.join
-    # print out file descriptors
-    procfd = '/proc/self/fd'
-    fds = os.listdir(procfd)
-    for fd in fds:
-        i = int(fd)
-        realname = _realpath(_join(procfd,fd))
-        msg.info("fd=[%i], realname=[%s] exists=[%s]",
-                      i, realname, os.path.exists(realname))
-    return
-
diff --git a/Control/AthenaMP/python/MpProcessing.py b/Control/AthenaMP/python/MpProcessing.py
deleted file mode 100755
index 5cb7c0bc7d4cb68f271f61d67d2baf177a8679c3..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/python/MpProcessing.py
+++ /dev/null
@@ -1,161 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-import multiprocessing, os, sys, types
-dlflags = sys.getdlopenflags()
-sys.setdlopenflags( 0x100 | 0x2 )    # RTLD_GLOBAL | RTLD_NOW
-import _athenamp as amp
-sys.setdlopenflags( dlflags )
-
-__all__ = [ 'cpu_count' ]
-
-
-# cpu_count is pure python (but does call sysconf for Linux)
-cpu_count = multiprocessing.cpu_count
-
-# the following sets are replacements, which are accessed on C++ through the
-# _athenamp extesion module for now
-
-# current_process is used for identification purposes in multiprocessing; it
-# serves no real purpose in AthenaMP (it's only used for printing a message)
-# since there is a single relation of a mother process with multiple children
-
-def current_process():
-    '''
-    Return process object representing the current process
-    '''
-    return amp.Process( os.getpid() )
-
-# active_children does not match exactly, but since AthenaMP only starts
-# readers/workers/writers from the mother, an aggregate of all groups will
-# do; note also that b/c of the AthenaMP model, no cleanup is needed (as is
-# in multiprocessing: Pools could close and re-open there)
-
-_process_groups = []
-def active_children():
-    """
-    Return list of process objects corresponding to live child processes
-    """
-    ac = list()
-
-    global _process_groups
-    for g in _process_groups:
-        ac += g._children()
-    return ac
-
-
-##### class Queue
-Queue = amp.SharedQueue
-
-
-##### class Pool
-class MapResults( object ):
-    def __init__( self, group ):
-        self._group = group
-
-    def get( self, *args, **kw ):
-        status = self._group.wait()
-
-     # there are two parts to the exit code: the reported result from the worker
-     # function and the process' exit code
-
-     # TODO: handle process' exit code in a cleaner way (fix in PyComps?), as this
-     # obviously does not work in general ...
-
-        result = []
-        for proc_result in status:
-        # The result from run_worker_queue is extremely circumspect: it can either
-        # contain a single tuple of 5 entries, 1 tuple of 4 entries, or 2 tuples of
-        # 4 entries. It is checked on being 'OK' on the last entry of each tuple.
-        # By sheer coincidence, that happens to work.
-
-        # Now, the tuple can not be edited, so it's turned into a list, which takes
-        # care of the spurious arrangement of lengths. Then for all possible returns,
-        # the third entry is always theApp._exitstate and may need modification.
-
-        # General exceptions are eaten, b/c this is so PyComps specific. :P
-             r = proc_result[ 2 ]  # the python-posted result
-             try:
-                 import types
-                 if type(r[0]) == types.TupleType:              # early return case
-                     z = [ list(y) for y in r ]
-                     for l2 in z:
-                         if l2[2] == 0: l2[2] = proc_result[1]
-                 else:
-                     z = [ [ list(y) for y in x ] for x in r ]  # late return case
-                     for l1 in z:
-                         for l2 in l1:
-                             if l2[2] == 0: l2[2] = proc_result[1]
-             except Exception:
-             # make-believe? can happen e.g. on early exit() or e.g. SIGQUIT
-                 r = [[(proc_result[0], -1, proc_result[1], 'ERR')]]
-             result.append( r )
-        return result
-
-class Pool( object ):
-    packaged_count = 0
-
-    def __init__( self, processes = None, initializer = None, initargs = (),
-                  maxtasksperchild = None ):
- 
-        if not callable( initializer ):
-            raise TypeError( 'initializer must be a callable' )
-
-      # this workaround is needed b/c initargs can (and do) contain an amp.SharedQueue,
-      # which can not be marshalled, but COW will deal with it properly by binding it
-      # into this local 'packaged_initializer'
-        def packaged_initializer( initializer = initializer, initargs = initargs ):
-            return initializer( *initargs )
-
-        self.packaged_count += 1
-        self._initializer = '_amp_pool_%s_%d' % (initializer.__name__,self.packaged_count)
-
-        import __main__
-        setattr( __main__, self._initializer, packaged_initializer )
-
-        self._group = amp.ProcessGroup( processes )
-        global _process_groups
-        _process_groups.append( self._group )
-
-    def map_async( self, func, iterable, chunksize=1 ):
-     # NOTE: initializer results are ignored (same as in multiprocessing)
-        self._group.map_async( self._initializer )
-
-     # TODO: hand over iterable properly (it just so happens that in AthenaMP, it is
-     # a repeated list of MaxEvent, for use of reading from the queue)
-        self._group.map_async( '%s.%s' % (func.__module__,func.__name__), iterable[0] )
-        return MapResults( self._group )
-
-    def close( self ):
-        self._group.map_async( 'exit' )
-
-        global _process_groups
-        _process_groups.remove( self._group )
-
-    def join( self ):
-        pass  # alternative functionality for now
-
-# other features of multiprocessing are not used by AthenaMP (but might be
-# used by others; the following facade forwards for now
-
-class ModuleFacade( types.ModuleType ):
-    def __init__( self, module, name ):
-        types.ModuleType.__init__( self, name )
-        self.__dict__[ 'module' ] = module
-
-        import multiprocessing
-        self.__dict__[ 'pmp' ] = multiprocessing
-
-    def __getattr__( self, attr ):
-        try:
-            return getattr( self.module, attr )
-        except AttributeError:
-            pass
-
-        import AthenaCommon.Logging, logging
-        log = logging.getLogger( 'AthenaMP.MpProcessing' )
-        log.error( 'missing attribute %s (falling back on multiprocessing)', attr )
-        return getattr( self.pmp, attr )
-   
-
-sys.modules[ __name__ ] = ModuleFacade( sys.modules[ __name__ ], __name__ )
-del ModuleFacade
diff --git a/Control/AthenaMP/python/Utils.py b/Control/AthenaMP/python/Utils.py
deleted file mode 100644
index 954ab638aa2eff6b4c6daff18d3e1c9cb5cca74c..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/python/Utils.py
+++ /dev/null
@@ -1,184 +0,0 @@
-# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-
-# @file: AthenaMP.Utils
-# @purpose: a set of tools to handle various aspects of AthenaMP
-# @author Sebastien Binet <binet@cern.ch>
-# @coauthor: Mous Tatarkhanov <tmmous@berkeley.edu>
-
-__doc__     = "Misc. utils for AthenaMP"       
-__version__ = "$Revision: 1.1 $"
-__author__  = "Sebastien Binet <binet@cern.ch>"
-
-import os
-
-from future import standard_library
-standard_library.install_aliases()
-import subprocess
-
-#-----Helper tools for AthenaMP--#
-
-def get_mp_root(msg=""):
-    tmp_root=os.getenv("ATHENA_MP_TMPDIR")
-    if msg == "":
-        from AthenaCommon.Logging import log as msg
-    if tmp_root is None:
-        import tempfile
-        if tempfile.tempdir is None:
-            tmp_root = "/tmp"
-        else:
-            tmp_root = tempfile.tempdir
-    else:
-        msg.debug("Using ATHENA_MP_TMPDIR environment variable to set athena-mp dir")
-    username = "MP"    
-    if os.getenv("LOGNAME") != None :
-        username = os.getenv("LOGNAME")
-    elif os.getenv("USER") != None :
-        username = os.getenv("USER")
-    return  os.sep.join([tmp_root,"athena-mp-tmp-%s" % username])
-
-def hack_copy(srcDir, destDir):
-    """ explicitly copy files not captured by IoRegistry"""
-    #couple of sanity checks
-    if srcDir == '': 
-        srcDir = os.curdir
-    if srcDir == destDir:
-        from AthenaCommon.Logging import log as msg
-        msg.warning("hack_copy called with srcDir = destDir = "+srcDir)
-        return
-
-    #A hack for RDO reconstruction after nextEvent(n)
-    filepattern_list = [ 'DigitParams.db', 'SimParams.db',
-                         '*.pmon.stream' ]
-
-    import fnmatch
-    import shutil
-    import os
-    for pattern in filepattern_list:
-        for f in os.listdir(srcDir):
-            if fnmatch.fnmatch(f, pattern):
-                import os.path
-                src_path = os.path.join(srcDir,f)
-                dest_path = os.path.join(destDir,f)
-                if not os.path.isfile(dest_path):
-                    shutil.copyfile(src_path, dest_path)
-
-def slice_it(iterable, cols=2):
-    from itertools import islice
-    start = 0
-    chunksz,extra = divmod (len(iterable), cols)
-    if extra:
-        chunksz += 1
-    for i in range(cols):
-        yield islice (iterable, start, start+chunksz)
-        start += chunksz
-
-#-----memory status tools---#
-import sys
-if 'linux' in sys.platform:
-    def mem_status(msg):
-        """memory usage information: shared/private"""
-        for line in open('/proc/self/status'):
-            if line.startswith('Vm'):
-                msg.debug (line.strip())
-        private,shared=_get_mem_stats()
-        msg.info ("===> private: %s MB | shared: %s MB",
-                  private/1024.,
-                  shared /1024.)
-else:
-    def mem_status(msg):
-        """memory usage information: dummy for non-linux"""
-        return
-    
-PAGESIZE=os.sysconf("SC_PAGE_SIZE")/1024 #KiB
-have_pss=0
-def _get_mem_stats(pid='self'):
-    """parses /proc/self/statm and smaps for memory usage info"""
-    global have_pss
-    private_lines=[]
-    shared_lines=[]
-    pss_lines=[]
-    statm_name = "/proc/%s/statm" % pid
-    smaps_name = "/proc/%s/smaps" % pid
-    rss=int(open(statm_name).readline().split()[1])*PAGESIZE
-    if os.path.exists(smaps_name): #stat
-        for line in open(smaps_name).readlines(): #open
-            if line.startswith("Shared"):
-                shared_lines.append(line)
-            elif line.startswith("Private"):
-                private_lines.append(line)
-            elif line.startswith("Pss"):
-                have_pss=1
-                pss_lines.append(line)
-        shared=sum([int(line.split()[1]) for line in shared_lines])
-        private=sum([int(line.split()[1]) for line in private_lines])
-        #Note shared + private = rss above
-        #The rss in smaps includes video card mem etc.
-        if have_pss:
-            pss_adjust=0.5 #add 0.5KiB as this average error due to trunctation
-            Pss=sum([float(line.split()[1])+pss_adjust for line in pss_lines])
-            shared = Pss - private
-    #elif (2,6,1) <= kv <= (2,6,9):
-    #    shared=0 #lots of overestimation, but what can we do?
-    #    private = rss
-    else:
-        shared=int(open(statm_name).readline().split()[2])
-        shared*=PAGESIZE
-        private = rss - shared
-    return (private, shared)
-    
-
-#---- CPU-Proc affinty setting tools---#
-if 'linux' in sys.platform:
-    def get_cpu(pid):
-        """get core nbr where the proc-pid resides at that moment"""
-        cmd = "ps --pid %i -o psr" % pid
-        #print ">%s" % cmd
-        out = subprocess.getoutput(cmd)
-        cpu = int(out.splitlines()[1].split()[0])
-        #print "pid: [%i] has cpu: [%i]" % (pid, cpu)
-        return cpu
-else:
-    def get_cpu(pid):
-        """dummy on non-linux"""
-        return 0
-    
-def set_proc_affinity(pid, cpu):
-    """set pid to cpu affinity for process"""
-    cmd = "taskset -pc %i %i" % (cpu, pid)
-    #print "> taskset -pc %i %i" % (cpu, pid)                                                                                                       
-    st,out = subprocess.getstatusoutput(cmd)
-    return st
-
-
-#---- AthenaMP profiling related tools---#
-time_list = list()
-time_list2 = list()
-
-def watch(msg=None, message=""):
-    import time
-    """Timer (elap, user, system, child) with time-interval-reports into msg stream"""
-    global time_list, time_list2
-    time_list.append(os.times())
-    time_list2.append(time.time())
-    
-    if msg is not None:
-        (utime, stime, cutime, cstime, etime) = dt();
-        elap_time =     "%s_ELAP_TIME=%.4f seconds" % (message, etime)
-        user_time =     "%s_USER_TIME=%.2f" %   (message, utime)
-        system_time =   "%s_SYSTEM_TIME=%.2f" % (message, stime) 
-        child_utime =   "%s_CHILD_UTIME=%.2f" % (message, cutime)
-        child_stime =   "%s_CHILD_STIME=%.2f" % (message, cstime)
-        msg.info(elap_time)
-        msg.debug("%s %s" % (user_time, system_time) )
-        msg.debug("%s %s" % (child_utime, child_stime) )
-    return len(time_list)
-
-def dt(n=-1):
-    import operator
-    return  tuple( map(operator.sub, time_list[n], time_list[n-1]) )
-
-def dt2(n=-1):
-    return time_list2[n] - time_list2[n-1]
-
-
-
diff --git a/Control/AthenaMP/python/tests/mjMonTools.py b/Control/AthenaMP/python/tests/mjMonTools.py
deleted file mode 100644
index 9db47f643db44bbf35e85e879ec83219ab1c9ef7..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/python/tests/mjMonTools.py
+++ /dev/null
@@ -1,2252 +0,0 @@
-# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-
-# @file:    mpMonTools.py
-# @purpose: Library for mp performance monitoring of AthenaMP
-# @author:  Mous Tatarkhanov <tmmous@cern.ch>
-# @date:    December 2009
-
-from __future__ import print_function
-
-__version__ = "$Revision: 276791 $"
-__author__  = "Mous Tatarkhanov <tmmous@cern.ch>"
-
-import sys
-import os
-import signal
-import time
-import array
-import copy
-
-from future import standard_library
-standard_library.install_aliases()
-import subprocess
-
-
-TIME_STEP = 10
-KB = (1 << 10)
-MB = (1 << 20)
-GB = (1 << 30)
-
-
-PAGESIZE=os.sysconf("SC_PAGE_SIZE")/1024 #KiB
-have_pss=0
- 
-mp_stat = dict() #dictionary which will hold all statistics: ProcDict objects, io-,mem-,cpu-dicts 
-pid_list = list() #book-keeping of  all spawned and launched  processes for later clean-up
-init_mem = list() #get_memstat()
-init_numa = list() #get_numastat()
-
-T0 = time.time()
-numa_T0 = T0
-
-def init_mp_stat():
-    global mp_stat
-    global pid_list
-    global T0
-    global init_mem
-    global init_numa
-    global numa_T0
-
-    del mp_stat  #delete data from previous np
-    del pid_list #delete data from previous np
-    
-    #brand new mp_stat
-    mp_stat = dict()
-    pid_list = list()
-    mp_stat['io'] = dict()
-    mp_stat['mem'] = dict()
-    mp_stat['cpu'] = dict()
-    #mp_stat['cpid'] = dict()
-    mp_stat['pid'] = dict()
-    mp_stat['numa'] = dict()
-    mp_stat['numa']['Time'] = list()
-    mp_stat['numa']['numa_hit'] = list()
-    mp_stat['numa']['numa_miss'] = list()
-    mp_stat['numa']['numa_foreign'] = list() 
-    mp_stat['numa']['interleave_hit'] = list()
-    mp_stat['numa']['local_node'] = list()
-    mp_stat['numa']['other_node'] = list()
-    
-    T0 = time.time()
-    init_mem = get_memstat()
-    init_numa = list(get_numastat())
-    init_numa[0] = numa_T0
-    
-    print ("initial_mem=%s" % init_mem)
-    print ("initial_numa=%s" % list(init_numa))
-    
-    
-class ProcDict(dict):
-    """this is to store the process information"""
-    key_list = ['state', 'vmem', 'rss', 'sz', 'cpu_time', 'elap_time', 'private', 'shared']
-    pid = -1
-    start_time = -1
-    init_time = -1
-    out=""
-    err=""
-    elap_time = -1
-    cpu_time = -1
-    vmem = -1
-    rss  = -1
-    nbr_rows = -1 # the number of rows
-
-    def __init__(self, pid=-1, start_time=-1, out="", err="", child = False):        
-        self.pid = pid
-        self.start_time = start_time
-        #self['np']=np
-        self.out = out
-        self.err = err
-
-        for key in self.key_list:
-            self[key] = list()
-        self.proc_ps_stat()
-        if child:
-            mp_stat["cpid"][self.pid] = self
-        else:
-            mp_stat["pid"][self.pid] = self
-        
-    def add_ps_line(self, line):
-        ps_str = line.split()
-        if self.start_time is -1:
-            self.start_time = _seconds(ps_str[5])
-        self['state'].append( ps_str[1] )
-        self['vmem'].append( int(ps_str[2])/1024 )
-        self['rss'].append( int(ps_str[3])/1024 )
-        self['sz'].append( int(ps_str[4])/1024 )
-        self['cpu_time'].append( _seconds(ps_str[6]) )
-        self['elap_time'].append( _seconds(ps_str[7]) )
-        
-        if self.init_time==-1:
-            global T0
-            if grepExist(self.out, "'start processing'"):
-                self.init_time = time.time()- T0
-                print ("pid-%i: init_time=%s"% (self.pid, self.init_time))
-
-        private = shared = -1
-        try:
-            pass
-            #private, shared = _get_shared_private_mem(self.pid)
-        except Exception as e:
-            print ("## Caught exception [%s] !!" % str(e.__class__))
-            print ("## What:", e)
-            print (sys.exc_info()[0])
-            print (sys.exc_info()[1])
-        self["private"].append(private)
-        self["shared"].append(shared)  
-    
-    def proc_ps_stat(self):
-        """ ps statistics for this process of pid """
-        out = subprocess.getoutput("ps --pid %i -o pid,state,vsize,rss,sz,start,cputime,etime" % self.pid)
-        lines = out.splitlines()
-        if len(lines) > 1:
-            self.add_ps_line(lines[1])
-        else:
-            print ("there is no process with pid: [%i]", self.pid)
-            return False
-        return True  
-    
-    def children_exist(self):
-        """ figures out weather the np kids were spawned for mother mpid""" 
-        sc, out = subprocess.getstatusoutput("ps --ppid %i -o pid,start" % self.pid)
-        if sc is not 0:
-            #print ("   children_exist: Error, sc=%i" % sc)
-            return False
- 
-        ps_lines = out.splitlines()
-        nc = len(ps_lines)-1
-        print ("  children_exist().nbr of children = %i" % nc)
-        if nc > 0 :
-            print ("%i children workers exist. Creating ProcDicts..." % nc)
-            ps_lines.pop(0)
-            for line in ps_lines:
-                ps_str = line.split()
-                cpid = int(ps_str[0])
-                ProcDict(cpid, start_time = _seconds(ps_str[1]))
-                print ("..... child [%i] added" %  cpid)
-            return nc
-        else:
-            #print ("no children exist for parent: %s " % self.pid)
-            return False
-
-
-    def trim_stat(self): 
-        """trim finished process information at the end of statistics """
-        while True:
-            if (self['state'][-1] in ['S', 'Z', 'T', 'D', 'X']) or (self['vmem'][-1] is 0) or (self['rss'][-1] is 0): #STOPPED, ZOMBIE, STOPPED2, Dddd, Xxxx
-                for key in self.key_list:
-                    self[key].pop()
-            else:
-                break
-
-    def summarize(self):
-        """finalize summary information."""
-        self.elap_time = self['elap_time'][-1]
-        self.cpu_time = self['cpu_time'][-1]
-        self.vmem = self['vmem'][-1]
-        self.rss = self['rss'][-1]
-        self.nbr_rows = len(self['elap_time'])
-
-class SPSummary(dict): 
-    """ Sub Processes Summary - convenient way of presenting to ROOT or other output methods"""
-    cpid_list=list()
-    np = 1
-    total_cpu_time = 0
-    total_cpu_time_x = 0
-    total_elap_time = 0
-    total_elap_time_x = 0
-    
-    def __init__(self, np):
-        #self['np']=list()
-        self.np = np
-        self['pid']=list()
-        #self['elap_time']=list()
-        self['elap_time_x']=list()
-        #self['cpu_time']=list()
-        #self['user_time']=list()
-        #self['system_time']=list()
-        #self['bootstrap_time']=list()
-        self['cpu_time_x']=list()
-        self['vmem']=list()
-        self['rss']=list()
-        self['free_mem_spike']=list()
-        self['init_time_x'] = list()
-
-    def extract_summary(self, dir):
-        self.spid_list = mp_stat["pid"].keys()
-        for pid in  mp_stat["pid"].keys():
-            print ("extract_summary: pid %i" % pid)
-            self['pid'].append(pid)
-            self['init_time_x'].append(mp_stat['pid'][pid].init_time)
-            self['elap_time_x'].append(mp_stat['pid'][pid].elap_time)
-            self['cpu_time_x'].append(mp_stat['pid'][pid].cpu_time)
-            self['vmem'].append(mp_stat['pid'][pid].vmem)
-            self['rss'].append(mp_stat['pid'][pid].rss)
-            self["free_mem_spike"].append( get_spike(sumList(mp_stat['mem']['kbmemfree'],
-                                                            sumList(mp_stat['mem']['kbbuffers'],
-                                                                mp_stat['mem']['kbcached']) )))
-        for pid in self.cpid_list:
-            print ("  %s/%s exists ->" % (dir,pid), os.path.exists(os.path.join(dir,"%s" % pid))) #FIX: add the extraction from cpid's logs.
-            out_path = os.path.join(dir,  'stdout')
-            err_path = os.path.join(dir,  'stderr')
-
-            #e_time = int(grepValue(out_path, "WORKER_ELAP_TIME"));            
-            #u_time = int(grepValue(out_path, "WORKER_USER_TIME"));
-            #s_time = int(grepValue(out_path, "WORKER_SYSTEM_TIME"));
-            #c_time = u_time + s_time
-
-            #self['elap_time'].append(e_time); self.total_elap_time += e_time;
-            #self['user_time'].append(u_time)
-            #self['system_time'].append(s_time)
-            #self['cpu_time'].append(c_time); 
-            #self.total_cpu_time += c_time;    
-            #b_time = int(grepValue(out_path, "BOOTSTRAP_ELAP_TIME"));
-            #self['bootstrap_time'].append(b_time); 
-                                      
-            
-class MPSummary(dict):
-    """ Mother Process Summary - convenient way of presenting data to ROOT..."""
-    mpid = dict()
-    np = 1
-    def __init__(self, np):
-        mpid = mp_stat['pid']
-        self.np = np
-        #self['np'] = list()
-        #self['np'] = [ np, ]
-        self["m_par_time"] = list()
-        self["m_firstevent_time"]= list()
-        self["m_cpu_time"]= list()
-        self["m_elap_time"]= list()
-        self["x_init_time"]= list()#externally observed time
-        self["x_par_time"]= list() #externally observed time
-        self["x_fin_time"]= list()  #externally observed time
-        self["free_mem_spike"]= list()  #externally observed value
-        self["swap_change"] = list()
-        
-    def extract_summary(self, log):
-        self["m_par_time"] = [0,]# int(grepValue(log, "PARALLEL_ELAP_TIME")),]
-        self['m_merging_time'] = [0,]# int(grepValue(log, "MERGING_ELAP_TIME")),]
-        self["m_firstevent_time"] = [0,]# int(grepValue(log, "FIRSTEVENT_ELAP_TIME")),]
-        self["m_cpu_time"] =  [ mp_stat["pid"].cpu_time, ] #mp_stat["mpid"]["cpu_time"][-1]
-        self["m_elap_time"]=  [ mp_stat["pid"].elap_time, ] #mp_stat["mpid"]["elap_time"][-1]
-        self["free_mem_spike"]= [ get_spike(sumList(mp_stat['mem']['kbmemfree'],  
-                                            sumList(mp_stat['mem']['kbbuffers'], mp_stat['mem']['kbcached']))
-                                            ), ]
-        self["swap_change"] = [ get_spike(mp_stat['mem']['kbswpfree']), ]
-
-
-
-
-####### useful auxillary functions ##########################
-def _seconds(time_str): #handles time in "H:M:S" and "M:S" format 
-    time_nums = time_str.split(":")
-    if (len(time_nums)==3):
-        return 3600*int(time_nums[0])+60*int(time_nums[1]) + int(time_nums[2])
-    elif (len(time_nums)==2):
-        return 60*int(time_nums[0]) + int(time_nums[1])
-    print ("ERROR: _seconds() returning - 0")
-    return 0
-
-def show_numactl():
-    sc,out=subprocess.getstatusoutput("numactl --show")
-    if sc==256:
-        print ("mjMonTools.show_numactl: numastat is not working! zeroes will be returned")
-        return False
-    else:
-        print ("mjMonTools.show_numactl: \n %s" % out)
-        return True
-
-def get_numastat():
-    sc,out=subprocess.getstatusoutput("numastat")
-    if sc==256:
-        print ("mjMonTools.get_numastat: numastat is not working! zeroes will be returned")
-        return (0,0,0,0,0,0,0)
-    else:
-        lines = out.splitlines()
-        return (time.time(),
-                int(lines[1].split()[1]),
-                int(lines[2].split()[1]),
-                int(lines[3].split()[1]),
-                int(lines[4].split()[1]),
-                int(lines[5].split()[1]),
-                int(lines[6].split()[1])
-                )
-        
-def save_numastat():
-    current_numa = get_numastat()
-    #print ("current_numa=%s" % list(current_numa))
-
-    _numa_stat = (
-        mp_stat['numa']['Time'],
-        mp_stat['numa']['numa_hit'],
-        mp_stat['numa']['numa_miss'],
-        mp_stat['numa']['numa_foreign'],
-        mp_stat['numa']['interleave_hit'],
-        mp_stat['numa']['local_node'],
-        mp_stat['numa']['other_node']
-        )
-    
-    change_numa = subList(current_numa,init_numa)
-    print ("NUMA_CHANGE=%s" % change_numa)
-    return [_numa_stat[i].append(change_numa[i]) for i in range(len(change_numa))]
-    
-def print_memstat(msg =""):
-    mem = get_memstat()
-    t = time.time() - T0;
-    save_numastat()
-    print (msg + " [T=%i sec]" % t + " USED[%i Mb][change: %i Mb] - FREE[%i Mb][change: %i Mb]" % (
-        mem["USED"], mem["USED"]-init_mem["USED"], mem["FREE"], mem["FREE"]-init_mem["FREE"]))
-
-def get_memstat():
-    out=subprocess.getoutput("free -m")
-    mem = dict()
-    lines = out.splitlines()
-    mem_strs = lines[1].split()
-    mem['used'] = int(mem_strs[2])
-    mem['free'] = int(mem_strs[3])
-    mem['cached'] = int(mem_strs[5])
-    mem['buffers'] = int(mem_strs[6])
-    mem_strs = lines[2].split()
-    mem['USED'] = int(mem_strs[2])
-    mem['FREE'] = int(mem_strs[3])
-    #print ("mem: [%s Mbs]" %  mem)
-    return mem
-
-init_mem = get_memstat()
-
-def meanList(num_list):
-    """finds average value of the number list"""
-    if len(num_list) == 0:
-        print ("meanList: WARNING - empty list, returning 0.0")
-        return 0.0
-    return float(sum(num_list)) / len(num_list)
-    
-def sumList(l1, l2):
-    """sum up values of two lists l1 + l2"""
-    if len(l1) is not len(l2):
-        print ("sumList: WARNING: len(l1) not equals len(l2)")
-        n = len(l1) if len(l2) > len(l1) else len(l2)
-    else:
-        n = len(l1)
-
-    sum = list()
-    for i  in range(n):
-        sum.append(l1[i] + l2[i])
-    return sum
-
-def subList(l1, l2): 
-    """subtract values of two lists: l1 - l2"""
-    if len(l1) is not len(l2):
-        print ("subList: WARNING: len(l1) not equals len(l2)")
-        n = len(l1) if len(l2) > len(l1) else len(l2)
-    else:
-        n = len(l1)
-
-    sub = list()
-    for i  in range(n):
-        sub.append(l1[i] - l2[i])
-    return sub
-        
-def get_spike(l):
-    #print (" get_spike:", end='')
-    #print (" e0 = ",  l[0]/1024, "Mb",  end='')
-    #print (" eN = ",  l[-1]/1024, "Mb", end='')
-    #print (" max = ", max(l)/1024, "Mb",  end='')
-    #print (" min = ", min(l)/1024, "Mb",  end='')
-    #print (" e0 - eN = ",  (l[0] - l[-1])/1024, "Mb", end='')
-    #print (" e0 - min = ", (l[0] - min(l))/1024, "Mb", end='')
-    #print (" eN - min = ", (l[-1] - min(l))/1024, "Mb", end='')
-    #print (" return  max - min =", (max(l) - min(l))/1024, "Mb")
-    return max(l) - min(l)   
-
-def prepare_mp_stat():
-    """ this function construct "cp" dict from "cpid" dict for ROOT formation convenience
-    converting: cpid:pid:vmem,rss,cpu-time,... -> cp:pid,vmem,rss,cputime,...  """
-    mp_stat['sp'] = mp_stat['pid']
-    del mp_stat['pid']
-    mp_stat['pid']=dict()
-    mp_stat['pid']['pid'] = list()
-        
-    for key in ProcDict.key_list:
-        mp_stat['pid'][key]=list()
-            
-    for pid in mp_stat['sp'].keys():
-        mp_stat['pid']['pid'] += [pid,] * mp_stat['sp'][pid].nbr_rows
-        for key in ProcDict.key_list:
-            mp_stat['pid'][key] += mp_stat['sp'][pid][key]
-    del mp_stat['sp']
-
-
-def print_summary():
-    print ("===== SUB PROCESS SUMMARY =====")
-    for (k, v) in mp_stat['sp_summary'].items():
-        print ("sp_summary['%s']=%s " % (k, v))
-        
-################## children tools ######################
-def launched_processes_working(ppid):
-    """ ps statistics for children of ppid. returns False if no children exist """
-    out = subprocess.getoutput("ps --ppid %i -o pid,state,vsize,rss,sz,start,cputime,etime" % ppid)
-    ps_lines = out.splitlines()
-    ps_lines.pop(0)
-    
-    exist = False # switch for existance of launched processes (not any processes)
-    
-    if len(ps_lines) > 0:
-        print ("Subprocesses exist:")
-        for line in ps_lines:
-            ps_str = line.split()
-            pid = int(ps_str[0])
-            #print ("subprocess pid=%i" % pid)
-            if pid in mp_stat["pid"].keys():
-                exist = True
-                mp_stat["pid"][pid].add_ps_line(line)
-                print ("pid-%i: ps-stat appended" % pid)
-            else:
-                print ("pid-%i: secondary proc" % pid)
-        return exist 
-    else:
-        print ("No subprocesses exist for parent: %i" % ppid)
-        return  exist #False
-    return exist #False
-
-
-def summarize_children_stat(): 
-    """trim finished worker information at the end of statistics """
-    for pid in mp_stat["cpid"].keys():
-        mp_stat['cpid'][pid].trim_stat()
-        mp_stat['cpid'][pid].summarize()
-
-def summarize_proc_stat(): 
-    """summarize proc finished worker information at the end of statistics """
-    #mp_stat['mpid'].summarize()
-    for pid in mp_stat['pid'].keys():
-        #mp_stat['pid'][pid].trim_stat()
-        mp_stat['pid'][pid].summarize()
-
-def children_born(log, mpid, np):
-    """ figures out weather the np kids were spawned for mother mpid""" 
-    sc,out = subprocess.getstatusoutput("ps --ppid %i -o pid,start" % mpid)
-    if sc is not 0:
-        print ("   mpMonTools.children_born: no kids yet... Error, sc=%i" % sc)
-        return False
-
-    ps_lines = out.splitlines()
-    #print ("ps_lines=", ps_lines)
-    nc = len(ps_lines)-1
-    
-    print (" children_exist: nbr of children = [%i]" % nc)
-    if grepValue(log, "FIRSTEVENT_ELAP_TIME") is None:
-        return False        
-    
-    if nc==np : #nbr of children is equal to nbr of procs required 
-        print ("%i children workers forked! Registering them (creating ProcDicts) ..." % np)
-        ps_lines.pop(0)
-        for line in ps_lines:
-            ps_str = line.split()
-            pid = int(ps_str[0])
-            ProcDict(pid, start_time = _seconds(ps_str[1]))
-            print ("..... child [%i] added" %  pid)
-        return True
-    else:
-        print ("no children exist for parent: %s " % mpid)
-    return False
-
-
-################ LOG VARIABLES PROCESSING: Grepping ##################
-################ LOG VARIABLES PROCESSING: Grepping ##################
-def grepExist(log, field):
-    """grep check for the existance of the unique field in the log 
-    """
-    print ("grep %s %s" % (field, log),)
-    sc,out = subprocess.getstatusoutput( "grep %s %s" % (field, log))
-    if sc==256:
-        print (" FALSE:  sc=%i" % sc)
-        return False
-    line = out.splitlines()[0]
-    print (" TRUE: sc=%i \n  grepped-line=%s" % (sc,line))
-    return True
-
-def grepExist2(log, field):
-    """grep check for the existance of the unique field in the log 
-    """
-    print ("grep %s %s" % (field, log))
-    sc,out = subprocess.getstatusoutput( "grep %s %s" % (field, log))
-    if sc!=0:
-        print ("grepping %s in %s failed with sc=%i" % (field, log, sc) )
-        return False
-    line = out.splitlines()[0]
-    print ("grepped-line=%s" % line)
-    print ("sc=", sc)
-
-    return True
-                                                
-def grepValue(log, field, sep='='):
-    """grep unique field in the log and find corresponding value by regexp 
-       Example: out = 'Py:EventLoopMgr      INFO EvtMax  =  123456  something'      
-       grepValue(log, "EvtMax", sep="=") = '123456' 
-    """
-    sc,out = subprocess.getstatusoutput( "grep %s %s" % (field, log))
-    if sc!=0:
-        #print ("grepping %s in %s failed" % (field, log))
-        return None
-    line = out.splitlines()[0]
-    import re 
-    vexpr = '\\s*'+ sep+ '\\s*(\\d+)'
-    m = re.search( field + vexpr, line)
-    value = m.group(1)
-    return value
-
-def grepPath(log, field, sep=':'):
-    """grep unique field in the log and find corresponding value by regexp 
-       Example: out = 'Py:EventLoopMgr      INFO master workdir: /tmp/athena-mp-tmp-tmmous/22590-1261097934  smthng'      
-       grepPath(log, "workdir", sep=":") = '/tmp/athena-mp-tmp-tmmous/22590-1261097934' 
-    """
-    sc,out = subprocess.getstatusoutput( "grep %s %s" % (field, log))
-    if sc!=0:
-        print ("grepping %s in %s failed" % (field, log))
-        return None
-    line = out.splitlines()[0]
-    import re 
-    vexpr = '\\s*'+ sep+ '\\s*([^\\s]+)'
-    m = re.search( field + vexpr, line)
-    path = m.group(1)
-    return path
-
-
-############# related to  athena-mp #########################
-def launch_athena(jobo, ne, se, np, output_dir, numa_set=None):
-    """launching cmd: athena.py -c EvtMax=$ne $jobo  1> mp.output/stdout_$jobo.$np.$ne   2> mp.output/stderr_$jobo.$np.$ne""" 
-    
-    if not os.path.isdir(output_dir):
-        os.mkdir(output_dir)
-        
-    numa_args = list()
-    
-    print ("job command and options as template: %s" % jobo)
-    from string import Template
-    arg_template= Template(jobo)
-    arg_str = arg_template.substitute(MAXEVT=ne, SKIPEVT=se)
-    proc_args = arg_str.split();
-    job = proc_args[0]
-
-    stdout_name = os.path.join(output_dir, "stdout.%s.%i.%i"  % (job,np,ne))
-    stderr_name = os.path.join(output_dir, "stderr.%s.%i.%i"  % (job,np,ne))
-    STDOUT_FILE = open(stdout_name , "w")
-    STDERR_FILE = open(stderr_name,  "w")
-    
-    #proc_args = ["athena.py",  "-c", "EvtMax=%i; SkipEvents=%i" % (ne, se) ,  "../%s" % jobo]
-    print ("job command and options after template processing: %s" % proc_args)
-    
-    if numa_set != None:
-        numa_args = [ "numactl"]
-        if (numa_set[0] == 'f' and numa_set[1] == 'f'):
-            numa_args = list()
-        elif ( numa_set[0]=='f' and numa_set[1]!='f'):
-            numa_args.append( "--membind=%i"  % numa_set[1])
-        elif ( numa_set[0]!='f' and numa_set[1]=='f'):                    
-            numa_args.append( "--cpubind=%i"  % numa_set[0])
-        elif (numa_set[0]!='f' and numa_set[1]!='f'):
-            numa_args += ["--membind=%s" % numa_set[0], "--cpubind=%s" % numa_set[1] ]
-        else:
-            print ("SOMETHING WRONG: numa_set=%s" % numa_set)
-
-        
-    #proc_args = [   "numactl",  
-    #                    "--cpubind=%i" % numa_set[0], 
-    #                    "--membind=%i" % numa_set[1], 
-    #                    "athena.py", "-c", "EvtMax=%i; SkipEvents=%i" % (ne, se) ,  "../%s" % jobo]
-    
-    proc_args = numa_args + proc_args
-    
-    print ("<<<LAUNCH>>>: %s" % proc_args)
-    mproc = subprocess.Popen( proc_args, 
-                             stdout=STDOUT_FILE, 
-                             stderr=STDERR_FILE,
-                             cwd = output_dir,
-                             shell=False, 
-                             close_fds = True)
-    pid_list.append(mproc.pid)
-    STDOUT_FILE.close();  STDERR_FILE.close()
-    proc = ProcDict(mproc.pid, out=stdout_name, err = stderr_name,  child=False)
-    
-    return (mproc, stdout_name, stderr_name)
-
-############# TERMINATOR AREA: stopping, killing, terminating processes ###############
-
-def stop_proc(proc):
-    """ terminate/kill a process by either proc_object or pid"""
-    pid = 0
-    try:
-        if type(proc) is int:
-            pid = proc
-            os.kill(pid, signal.SIGKILL); #os.waitpid(pid, 0);
-        else:
-            pid = proc.pid 
-            if proc.poll() is None: os.kill(pid, signal.SIGKILL); 
-            proc.wait();
-        print ("process %s  terminated" % pid )
-    except Exception as e:
-        print ("## Caught exception [%s] !!" % str(e.__class__),"  ## What:",e)
-        print (sys.exc_info()[0], sys.exc_info()[1])
-        return False
-    pid_list.remove(pid)
-    return True
-
-def stop_proc_tree(pid):
-    """ Terminate/kill recursively process tree by pid. Be precautious using this!"""
-    out = subprocess.getoutput("ps --ppid %i" % pid)
-    lines = out.splitlines(); lines.pop(0) #remove header
-    try:
-        if len(lines) > 0: 
-            for line in lines:
-                cpid = int(line.split()[0])
-                print ("child [%i:%i] being terminated..." % (pid, cpid))
-                stop_proc_tree(cpid)
-        if  pid in pid_list: pid_list.remove(pid) 
-        os.kill(pid, signal.SIGKILL); #os.waitpid(pid, 0);
-        print ("[%i] - terminated." % pid )
-    except Exception as e:
-        print ("[%i] - dead #while killing caught exception [%s] !!" % (pid, str(e.__class__)),"  ## What:",e)
-        #print (sys.exc_info()[0], sys.exc_info()[1])
-        return False
-    return True
-
-
-def stop_athenaMP(mproc):
-    """ terminate/kill a process by either proc_object or pid"""
-    try:
-        pid = 0
-        if type(mproc) is int:
-            pid = mproc
-            stop_proc_tree(pid) # killing athena-mp mproc-tree
-        else:
-            pid = mproc.pid;
-            if mproc.poll() is None: os.kill(pid, signal.SIGKILL); 
-            mproc.wait();
-        print ("process %s  terminated" % pid )
-        return True
-    except Exception as e:
-        print ("## Caught exception [%s] !!" % str(e.__class__),"  ## What:",e)
-        print (sys.exc_info()[0], sys.exc_info()[1])
-        return (False)
-    return False
-
-
-
-
-############# sar related wrappers ###########################
-
-def launch_sar(log, time_step):
-    """
-      launch sar with 
-     `sar -bBcdqrRuvwWy -I SUM -I XALL -n ALL -P ALL` = `sar -A`
-    """
-    sar_args = [ "sar", "-bBrvwu", "-o", log, "%i" % time_step, "0" ]
-    print ("launching: %s %s %s %s %s %s" % tuple(sar_args)    )
-    sc,out = subprocess.getstatusoutput('sar -b 1 1')
-    if sc!=0:
-        print ('launching failed - sar do not work on this system - please install if available!')
-        return None
-    FNULL = open('/dev/null', 'w')
-    proc = subprocess.Popen(sar_args, 
-                            executable="sar",  
-                            stdout = FNULL,
-                            stderr = subprocess.STDOUT,
-                            shell=False, 
-                            close_fds = True)
-    FNULL.close()
-    pid_list.append(proc.pid)
-    return proc
-
-def _num(str):
-    """try converting str into int or float if fails return the same string"""
-    try:
-        if "." in str:
-            out = float(str)
-        else:
-            out = int(str) 
-    except ValueError:
-        return str
-    return out
-        
-def get_sar_stat(log, key):
-    """ get statistics by issueing this cmd: `sar -key $log`"""
-    print ('launching cmd: sar %s -f %s' % (key, log)        )
-    sc,out = subprocess.getstatusoutput("sar %s -f %s" % (key,log) )
-    if sc!=0:
-        print ("launching failed - either file %s does not exist or sar does not work on this system - please check!" % log)
-        return None
-    sar_dict = dict()
-    #print("##################################"); print ("out=\n", out; print "################################################")
-
-    lines = out.splitlines()
-    print ("trim1=", lines.pop(0))#trimming output
-    print ("trim2=", lines.pop(0))#trimming output
-
-    avg_line = lines.pop(); #trimming avg line at the end 
-    print ("avg_line1=", avg_line)
-    
-    hstrs = lines.pop(0).replace('%', 'p').replace('/', 'p').split() #trimming header strings and replacing '%' and '/' to satisfy ROOT 
-    hstrs[0] = "Time"
-    print ("Sar statistics fields found: ", hstrs)
-
-    #print"(##################################"; print "lines=\n", lines; print "################################################")
-    
-    for hstr in hstrs:
-        sar_dict[hstr] = list()
-    for line in lines:
-        lstrs = line.split()
-        #print ("lstrs=", lstrs)
-        for i,hstr in enumerate(hstrs):
-            if i!=0:
-                sar_dict[hstr].append( _num(lstrs[i]) )
-            else:
-                sar_dict[hstr].append(_seconds(lstrs[i])) #time conversion from "H:M:S" --> numero segundos
-    return sar_dict 
-
-
-### Tools for process statistics #######
-
-def get_full_sar_stat(log):
-    mp_stat["io"] =  get_sar_stat(log, "-b")
-    mp_stat["mem"] = get_sar_stat(log, "-r")
-    mp_stat["cpu"] = get_sar_stat(log, "-u")
-    #return mp_stat
-  
-  
-  ##############sysstat and other linux commands wrappers########
-
-def _meminfo():
-    out=subprocess.getoutput("cat /proc/meminfo")
-    lines = out.splitlines()
-    mem=dict()
-    Kb = 1024
-    mem['total']= int(lines[0].split()[1]) / Kb
-    mem['free'] = int(lines[1].split()[1]) / Kb
-    mem['buffers']= int(lines[2].split()[1]) / Kb
-    mem['cached'] = int(lines[3].split()[1]) / Kb
-    print ("meminfo.real_total: [%i Mb]", mem['total'] )
-    print ("meminfo.free: [%i Mb]", mem['free'])
-    print ("meminfo.cached: [%i Mb]", mem['cached'] )
-    print ("meminfo.buffers: [%i Mb]", mem['buffers'])
-    return mem
-
-def _get_iostat():
-    out=subprocess.getoutput("iostat")
-    io = dict()
-    lines = out.splitlines()
-    strs = lines[1].split()
-    io['used'] = int(strs[2])
-    mem=dict()
-    mem['free'] = int(strs[3])
-    mem['cached'] = int(strs[5])
-    mem['buffers'] = int(strs[6])
-    mem_strs = lines[2].split()
-    mem['USED'] = int(strs[2])
-    mem['FREE'] = int(strs[3])
-    #print ("mem: [%s Mbs]" %  mem)
-    return io
-def _used_mem():
-    out=subprocess.getoutput("free -m")
-    mem_strs = out.splitlines()[2].split()
-    used_mem = int(mem_strs[2]) 
-    print ("used_mem: [%i Mb]" % used_mem)
-    return used_mem            
-def _free_mem():
-    out=subprocess.getoutput("free -m")
-    mem_strs = out.splitlines()[2].split()
-    free_mem  = int(mem_strs[3]) 
-    print ("free_mem: [%i Mb]" % free_mem)
-    return free_mem
-
-def _launch_iostat(log, time_step):
-    print ('launching cmd: iostat $TIME_STEP -d -x > iostat.$jobo.$np.$ne &')
-    sc,out = subprocess.getstatusoutput( "iostat" )
-    if sc!=0:
-        print ('launching failed - iostat do not work on this system')
-        return None
-    f_iostat = open(log, "w")
-    iostat_proc = subprocess.Popen(
-        [ "iostat",  "%i" % time_step, "-d", "-x"], 
-        executable="iostat", 
-        stdout = f_iostat, 
-        shell=False, 
-        close_fds = True)
-
-    f_iostat.close()
-    return iostat_proc  
-def _launch_vmstat(log, time_step):
-    print ('launching cmd: vmstat $TIME_STEP -n > vmstat.$jobo.$np.$ne &'        )
-    sc,out = subprocess.getstatusoutput( "vmstat -V" )
-    if sc!=0:
-        print ('launching failed - vmstat do not work on this system')
-        return None
-    file = open(log, "w")
-    proc = subprocess.Popen([ "vmstat", "%i" % time_step, "-n" ], 
-                            executable="vmstat",  
-                            stdout = file, 
-                            shell=False, 
-                            close_fds = True)
-    file.close()
-    return proc
-def __create_childProcDicts(ppid):
-    """ creates stats dictionary with """
-    out = subprocess.getoutput("ps --ppid %i -o pid, start" % ppid)
-    ps_lines = out.splitlines()
-    ps_lines.pop(0)
-    
-    if len(ps_lines) > 1:
-
-        for line in ps_lines:
-            ps_str = line.split()
-            pid = int(ps_str[0])
-            ProcDict(pid, start_time = _seconds(ps_str[1]))
-            print ("ppid: [%i]: child [%i] added" % (ppid, pid))
-    else: 
-        print ("no children exist for parent: %s " % ppid)
-
-
-#######  adopted from AthenaMP/PyComps ###################
-def print_shared_private(pid):
-    print ("CPROC-SHARED_PRIVATE_MEM for pid: [%i]" % pid)
-    for line in open("/proc/%i/status" % pid):
-        if line.startswith('Vm'):
-                print(line.strip())
-    private,shared=_get_shared_private_mem()
-    print ("pid:[%i] ===> private: %s MB | shared: %s MB" % (pid, private/1024., shared /1024.))
-def _get_shared_private_mem(pid='self'):
-    """ Finds proc's shared and private memory size from /proc/pid/statm  and /proc/pid/smaps dir
-       Coppied from AthenaMP/PyComps.py"""
-    global have_pss
-    private_lines=[]
-    shared_lines=[]
-    pss_lines=[]
-    statm_name = "/proc/%s/statm" % pid
-    smaps_name = "/proc/%s/smaps" % pid
-    rss=int(open(statm_name).readline().split()[1])*PAGESIZE
-    if os.path.exists(smaps_name): #stat
-        for line in open(smaps_name).readlines(): #open
-            if line.startswith("Shared"):
-                shared_lines.append(line)
-            elif line.startswith("Private"):
-                private_lines.append(line)
-            elif line.startswith("Pss"):
-                have_pss=1
-                pss_lines.append(line)
-        shared=sum([int(line.split()[1]) for line in shared_lines])
-        private=sum([int(line.split()[1]) for line in private_lines])
-        #Note shared + private = rss above
-        #The rss in smaps includes video card mem etc.
-        if have_pss:
-            pss_adjust=0.5 #add 0.5KiB as this average error due to trunctation
-            Pss=sum([float(line.split()[1])+pss_adjust for line in pss_lines])
-            shared = Pss - private
-    #elif (2,6,1) <= kv <= (2,6,9):
-    #    shared=0 #lots of overestimation, but what can we do?
-    #    private = rss
-    else:
-        shared=int(open(statm_name).readline().split()[2])
-        shared*=PAGESIZE
-        private = rss - shared
-    return (private, shared)
-
-
-#################################################
-############# ROOT Output #######################
-
-def _createRootFile(outName):
-    """creating carcasus of report ROOT file"""
-    print ("create ROOT file...")
-    from PerfMonAna.PyRootLib import importRoot
-    from ROOT import TTree
-    import array
-    ROOT = importRoot( batch = True )
-    outFile = ROOT.fopen( outName, 'RECREATE' )
-    outFile.cd("/")
-    
-    i = array.array( 'i', [0] )
-    d = array.array( 'f', [0.] )
-
-    outFile.cd()
-    tree =  TTree( "io", "IO statistics tree")
-    tree.Branch('Time', i, 'int/I')
-    tree.Branch('tps',d,'float/D') #transfers per second
-    tree.Branch('rtps',d,'float/D') #read transfers per second
-    tree.Branch('wtps',d,'float/D') #write transfers per second
-    tree.Branch('breadps',d,'float/D')  #blocks read per second
-    tree.Branch('bwrtnps',d,'float/D')  #blocks written per second
-    tree.Write();
-    
-    outFile.cd()
-    tree =  TTree( "mem", "Mem statistics tree")
-    tree.Branch('Time', i, 'int/I')
-    tree.Branch('kbmemfree', i, 'int/I') #free in kB
-    tree.Branch('kbmemused', i, 'int/I') #used in kB
-    tree.Branch('pmemused', d, 'float/D') #used in kB
-    tree.Branch('kbbuffers', i, 'int/I') #buffers in kB
-    tree.Branch('kbcached', i, 'int/I') #cached in kB
-    tree.Branch('kbswpfree', i, 'int/I') #swap free in kB
-    tree.Branch('kbswpused', i, 'int/I') #swap used in kB
-    tree.Branch('pswpused', d, 'float/D') 
-    tree.Branch('kbswpcad', i, 'int/I')
-    tree.Write(); 
-    
-    outFile.cd()
-    tree =  TTree("cpu", "CPU statistics tree")
-    tree.Branch('Time', i, 'int/I')
-    tree.Branch('CPU', i,'int/I')    #CPU number or ALL
-    tree.Branch('puser',d,'float/D')  # CPU utilization percentage at user level
-    tree.Branch('pnice',d,'float/D')  # CPU utilization at nice level
-    tree.Branch('psystem',d,'float/D')  # CPU utilization at system level
-    tree.Branch('piowait',d,'float/D')  # CPU idle percentage due to IO-wait
-    tree.Branch('psteal',d,'float/D')  # virtual processes wait percentage
-    tree.Branch('pidle',d,'float/D')  # CPU idling due to non IO reasons
-    tree.Write();
-
-    outFile.cd()
-    tree =  TTree( "cp", "Children processes statistics")
-    tree.Branch('Time', i, 'int/I') # start date/time
-    tree.Branch('pid', i , 'int/I') # pid of worker process
-    tree.Branch('state', i , 'int/I') #state of the process at the moment (important at the end of process)
-    tree.Branch('vmem', i, 'int/I')
-    tree.Branch('rss', i, 'int/I') 
-    tree.Branch('sz', i, 'int/I') # Size in physical pages of the core image of the process. This includes text, data, and stack space.
-    tree.Branch('shared', i, 'int/I')# shared memory as extracted from /proc/pid/smaps
-    tree.Branch('private', i, 'int/I')# private memory as extracted from /proc/pid/smaps
-    tree.Branch('cpu_time', i,'int/I')# cpu_time 
-    tree.Branch('elap_time', i, 'int/I')# elapsed time (Wall Clock time)
-    tree.Write()
-    
-    outFile.cd()
-    tree =  TTree( "cp_summary", "children processes summary tree")
-    tree.Branch('pid', i, 'int/I')
-    tree.Branch('elap_time', i , 'int/I')
-    tree.Branch('cpu_time', i, 'int/I')
-    tree.Write();
-    
-    outFile.cd()
-    tree =  TTree( "mpid", "Mother Process statistics tree")
-    tree.Branch('pid', i , 'int/I')
-    tree.Branch('state', i , 'int/I')
-    tree.Branch('vmem', i, 'int/I')
-    tree.Branch('rss', i, 'int/I')
-    tree.Branch('sz', i, 'int/I')
-    tree.Branch('shared', i, 'int/I')
-    tree.Branch('private', i, 'int/I')
-    tree.Branch('cpu_time', i,'int/I')
-    tree.Branch('elap_time', i, 'int/I')
-    tree.Write()
-
-    outFile.cd()
-    tree =  TTree( "mp_summary", "Mother process summary tree")
-    tree.Branch('pid', i, 'int/I')
-    tree.Branch('m_elap_time', i , 'int/I') #mother process total elapsed time
-    tree.Branch('m_cpu_time', i, 'int/I') #mother process CPU-time (children-worker's time not included)
-    tree.Branch('m_firstevent_time', i, 'int/I') #time spent on firstEvents(after init before forking) - measured inside mother process
-    tree.Branch('m_par_time', i, 'int/I') #parallel time - time from forking till collecting data - measured inside mother process
-    tree.Branch('x_init_time', i, 'int/I') #externally  observed MP SERIAL-INIT executin time (from start till forking) 
-    tree.Branch('x_par_time', i, 'int/I') #externally observed MP PARALLEL execution time (from forking till joining)
-    tree.Branch('x_fin_time', i, 'int/I') #externally observed MP FINALIZATION time (from joining till end of job)
-    tree.Branch('free_mem_spike', i, 'int/I') #spike in "free+buffers+cached" at the end of MP-process -> physical used memory released.
-    tree.Branch('swap_change', i, 'int/I') # change in swap, should be zero if not means MP causing a swapping 
-    #tree.Branch('used_mem_spike', i, 'int/I')
-    tree.Branch('event_proc_rate', i, 'float/D') #event processing rate defined as ne*60*m_par_time/np = events/proces/min
-    tree.Write()
-
-    outFile.cd()
-    outFile.Write()
-    outFile.Close()
-    print ("create ROOT file... [DONE]" )
-    return
-
-def createRootFile(outName, np):
-    """creating structure of ROOT-report file from mp_stat dictionary """
-    print ("create ROOT file...")
-
-    from PerfMonAna.PyRootLib import importRoot
-    from ROOT import TTree
-    import array
-    ROOT = importRoot( batch = True )
-    outFile = ROOT.fopen( outName, 'RECREATE' )
-    print ("ROOT.fopened")
-
-    outFile.cd("/")
-    
-    i = array.array( 'i', [0] )
-    d = array.array( 'f', [0.] )
-
-    for t in mp_stat.keys():
-        tree =  TTree( t, "%s stat tree" % t)
-        tree.Branch('np', i, 'int/I') # each tree will have 'np' branch
-        for b in mp_stat[t].keys():
-            #print ("tree=%s, branch=%s" % (t,b))
-            if isinstance(mp_stat[t][b][0], int):
-                tree.Branch(b, i, 'int/I')
-            elif isinstance(mp_stat[t][b][0], float):
-                tree.Branch(b, d,'float/F')
-            else:
-                #print ("branch [%s] is not int or float type" % b)
-                tree.Branch(b, i, 'int/I')
-        tree.Write()
-    outFile.Write()
-    outFile.Close()
-    print ("create ROOT file... [DONE]")
-
-
-def fillRootTree(tree, stat, np):
-    #print ("writing %s statistics Tree:" % tree.GetName(),)
-    branches = stat.keys()
-    #print ("    branches=", branches, "...", )
-    nbr  = len(branches)
-    array_list = list()
-
-    np_array = array.array('i', [np])
-    tree.SetBranchAddress('np', np_array) #putting 'np' into each tree.
-    for branch in branches:
-        #print ("fillRT: branch=%s" % branch)
-        if isinstance(stat[branch][0], float):
-            f = stat[branch][0]
-            nums = array.array('f', [0.0])
-            array_list.append(nums)
-        elif isinstance(stat[branch][0], int):
-            i = stat[branch][0]
-            nums =  array.array('i', [0])
-            array_list.append(nums)
-        else:
-            #print ("branch [%s] is not int or float type" % branch)
-            nums = array.array('i', [-1])
-            array_list.append(nums)
-        tree.SetBranchAddress(branch, array_list[-1]);
-        
-    for index in range(len(stat[branches[0]])):
-        for array_index, branch in enumerate(branches):
-            #print ("stat[branch=%s][index=%i] array_index=%i " % (branch, index, array_index))
-            array_list[array_index][0] = stat[branch][index] if array_list[array_index][0] is not -1 else -1
-        tree.Fill()
-    #print ("[DONE]")
-
-
-
-def writeRootFile(outName, np):
-    """writes statistics into ROOT file"""
-    print ("write ROOT file %s...", outName )
-    createRootFile(outName, np)
-    from ROOT import TFile, TTree
-    import array
-    outFile = TFile( outName, 'update' )
-
-    stat_keys = mp_stat.keys()
-    #print ("mp_stat.keys()", stat_keys)
-    for key in stat_keys:
-        #print (" writing [%s]" % key)
-        tree = outFile.Get( "%s" %   key )
-        fillRootTree(tree, mp_stat[key], np)
-        tree.Write()
-
-    outFile.Write()
-    outFile.Close()        
-    print ("write ROOT file... [DONE]" )
-    return
-
-def mergeRootFiles(file, ne):
-    import glob
-    file_list = glob.glob1(os.getcwd(), "%s.*.%i.root" % (file, ne) )
-    cmd = "hadd -f6 mp_stat.%s.ne%i" % (file, ne)
-    for f in file_list:
-        cmd = cmd + ' ' + f 
-    sc, out = subprocess.getstatusoutput(cmd)
-
-def mergeRootOutput(output_file, jobo, np_list, ne):
-    from ROOT import TFile, TTree
-    #output_file = "merged.%s.ne%i.root" % (jobo, ne)
-    cmd = "hadd -f6 %s" % output_file
-    for np in np_list:
-        # here we copy mp_summary and cp_summary trees in each root file from /$np dir into root dir for further merging
-        file = "mj.%s.%i.%i.root" % (jobo, np, ne)
-        print (" ---> processing file = %s" % file)
-        #here we form the command for merging
-        cmd = cmd + " %s" % file
-
-    print ("issueing root files merging command:[%s]" % cmd)
-    sc, out = subprocess.getstatusoutput(cmd)
-    return #output_file
-
-def _createGlobalRootFile(file, ne):
-    from ROOT import TFile, TTree, Tlist
-    import glob
-    file_list = glob.glob1(os.getcwd(), "%s.*.%i.root" % (file, ne) )
-    outFile = TFile ("%s.%i.root" % (file, ne), 'RECREATE' )    
-    for f in file_list:
-        print ("Copying trees from [%s]" % f)
-        tf = TFile (f, 'READ' )
-        mpt = tf.Get("mp_summary")
-        cpt = tf.Get("cp_summary")
-        outFile.cd('/')
-        dir = "%s" % f.replace(file, "").split(".")[1]
-        print ("   creating dir for np = %s" % dir)
-        outFile.mkdir(dir) # creating dir for np
-        outFile.cd(dir)
-        mpTree = mpt.CloneTree(); mpTree.Write()
-        cpTree = cpt.CloneTree(); cpTree.Write()
-        outFile.Write()
-        tf.Close()
-
-    outFile.cd('/')
-    
-    ikeys = outFile.GetListOfKeys().MakeIterator()
-    key = ikeys.Next()
-    key_list = list()
-
-    while key is not None:
-        key_list.append(key.GetName())
-    
-    for np in key_list:
-        outFile.Get("%s/mp_summary")
-
-
-def report2(root_file, ne = 0, comments=""):
-    print('  mpMonTools.report(): root_file=', root_file)
-    from ROOT import TFile, TTree, TBranch, TCanvas, TPad, TGraph, TLegend, TMultiGraph, gStyle, TLatex, TPaveLabel, TPaveText, TH2I, TMath
-
-    def getTreeList(tree, column, condition):
-        size = tree.Draw(column, condition,'goff'); 
-        v1 = tree.GetV1(); v1.SetSize(size)
-        return list(v1)
-
-    def makeGraph(tree, name, formula, condition="", color = 1, lineWidth=1):
-        tree.Draw(formula, condition, "goff")
-        graph = TGraph(int(tree.GetSelectedRows()), tree.GetV2(), tree.GetV1())
-        graph.SetLineColor(color);
-        graph.SetLineWidth(lineWidth)
-        graph.SetName(name);
-        return graph
-
-    def MakeMultiGraph(graph_data, mg, l):
-        clr = 1
-        gl = list()
-        
-        if graph_data is None:
-            return []
-        
-        if graph_data['type'] is 'graph':
-            for name, (tree, param) in graph_data['data'].items():
-                clr+=1; formula =''; condition=''
-                if type(param).__name__=='tuple':
-                    formula = param[0]
-                    condition = param[1]
-                else:
-                    print ("MakeMG: ", formula, condition)
-                    formula = param
-                    condition = ""
-
-                print ("name=%s, tree=%s, formula=%s, condition=%s" % (name, tree.GetName(), formula, condition) )
-           
-    
-                #g = makeGraph(tree, name, formula, condition, color=clr)
-                tree.Draw(formula, condition, "goff")
-                g = TGraph(int(tree.GetSelectedRows()), tree.GetV2(), tree.GetV1()); gl.append(g)
-                
-                g.SetName(name); g.SetLineColor(clr); g.SetMarkerColor(clr); g.SetLineWidth(0)
-                if name == "worker_rate":
-                    g.SetLineColor(10)
-                    
-                mg.Add(g); 
-                l.AddEntry(g, name)
-
-        if graph_data['type'] is 'list':
-            for name, (lx,ly) in graph_data['data'].items():
-                print ("name=%s" % name); print (lx); print (ly)
-                clr+=1
-                g = TGraph( len(lx), array.array('f', lx), array.array('f', ly) )
-                g.SetName(name); g.SetLineColor(clr); g.SetLineWidth(1); g.SetMarkerColor(clr); 
-                mg.Add(g)
-                l.AddEntry(g, name)
-                gl.append(g)
-
-        if graph_data['type'] is 'array':
-            clr = 1
-            g_list = list()
-            data = graph_data['data']
-            for name,(x,y) in graph_data['data'].items():
-                print (x); print (y)
-                clr+=1;
-                g = TGraph(len(x), x, y)
-                g.SetName(name); g.SetLineColor(clr); g.SetLineWidth(1); g.SetMarkerColor(clr) 
-                gl.append(g)
-                mg.Add(g); 
-                l.AddEntry(g, name)
-        if graph_data['type'] is 'text':
-            title.DrawPaveLabel(0.1,0.93,0.9,0.99, graph_data['title'], "brNDC")
-            for s in graph_data['data']:
-                print ("graph_data['data']=%s" % s)
-                sp_pt.AddText(s)             
-            sp_pt.SetTextAlign(12);
-            sp_pt.SetTextSize(0.04)
-            sp_pt.Draw()
-            return []
-
-        if graph_data['type'] is 'latex':
-            title.DrawPaveLabel(0.1,0.93,0.9,0.99, graph_data['title'], "brNDC")
-            tl = TLatex(); tl.SetTextSize(0.02); tl.SetTextAlign(12);
-            txtd = graph_data['data']
-            i = 0; x0 = 0.05; y0 = 0.90; dx = 0.08; dy = 0.05
-            x1 = x0
-            tl.DrawLatex(x1, y0, 'np')
-            for s in txtd['np']:
-                x1 = x1 + dx
-                tl.DrawLatex(x1, y0, s)
-            txtd.pop('np')
-
-            for k in txtd.keys():
-                y0 = y0 - dy
-                tl.DrawLatex(x0-0.03, y0, k);
-                x1 = x0
-                for s in txtd[k]:
-                    x1 = x1 + dx
-                    tl.DrawLatex(x1, y0, s)
-            return []
-        if 'goptions' in graph_data.keys():
-            mg.Draw(graph_data['goptions'])
-        else:
-            mg.Draw('ALP')
-            
-        l.Draw()
-        h=mg.GetHistogram(); h.SetXTitle(graph_data['xtitle']); h.SetYTitle(graph_data['ytitle']); h.SetMinimum(0.1);
-        title.DrawPaveLabel(0.1,0.91,0.9,0.99, graph_data['title'], "brNDC")
-        if 'text' in graph_data.keys():
-            title.DrawPaveLabel(0.2,0.88,0.8,0.92, graph_data['text'], "brNDC")
-            #text_box = TPaveText(0.2,0.51,0.8,0.54);  text_box.AddText(graph_data['text']);
-            #text_box.SetFillColor(42); text_box.SetTextAlign(12); text_box.SetTextfONt(40); text_box.Draw();
-        
-        return [] # gl #list of TGraph
-
-    c = TCanvas("mpr", "AthenaMP-mp-scaling-charts", 10, 10, 800, 1024)
-    c.SetFillColor(17);  c.SetBorderSize(1); c.cd()
- 
-    tfile = TFile(root_file, "READ"); print ("   root compression factor = ", tfile.GetCompressionFactor())
-    spSumTree = tfile.Get("sp_summary")
-    #cpSumTree = tfile.Get("cp_summary")
-    ioTree = tfile.Get("io")
-    cpuTree = tfile.Get("cpu")
-    numaTree = tfile.Get("numa")
-    
-    if ne is 0:
-        ne = int(root_file.split('.')[-2].replace('ne', ''))
-        print ("extracted ne=[%i]" % ne)
-
-##### FORMING THE DATA FOR ROOT Graphing-Charting-Histogramming #####    
-    np_list = list(set(getTreeList(spSumTree, 'np', ''))); np_list.sort() #uniqeify and sort np_list
-    elap_time_stdev = list()
-    elap_time_avg = list()
-    elap_time_max = list()
-    cpu_time_stdev = list()
-    cpu_time_max = list()
-    cpu_time_avg = list()
-
-    total_rate = list()
-    elap_time_rate = list()
-    cpu_time_rate = list()
-    user_time_rate = list()
-    par_event_rate = list()
-
-    elap_cpu_time = list() # elap - cpu time avg.
-
-    elap_time_stdev_x = list()
-    cpu_time_stdev_x = list()
-    par_elap_time = list() # elap time as seen from mother
-    
-    sp_lb = [b.GetName() for b in list(spSumTree.GetListOfBranches())]
-    #sp_lb = [b.GetName() for b in list(spSumTree.GetListOfBranches())]
-
-    #mp_txt = "%s" % mp_lb + "\n"
-    sp_txt = "%s" % sp_lb + "\n" 
-    
-    sp_pt = TPaveText(0.1,0.1,0.9,0.9) 
-    sp_pt.SetFillColor(22)
-    
-    sp_latex = TLatex()
-    sp_latex.SetTextAlign(12)
-
-    txt_dict=dict()
-    ltxt_dict = dict()
-    for s in sp_lb:
-        txt_dict[s] = "%20s" % s
-        ltxt_dict[s] = list()
-    np_txt = ""
-    for np in np_list:
-        size = spSumTree.Draw('elap_time_x:cpu_time_x:elap_time_x-cpu_time_x', "np==%i" % int(np), 'goff'); 
-        elapv = spSumTree.GetV1(); 
-        cpuv = spSumTree.GetV2();
-        elap_cpuv = spSumTree.GetV3();
-        #userv = spSumTree.GetV4();
-
-        elap_time_stdev.append(float(TMath.RMS(size, elapv )))
-        elap_time_avg.append(float(TMath.Mean(size, elapv )))
-        elap_time_max.append(float(TMath.MaxElement(size, elapv )))
-
-        cpu_time_stdev.append(float(TMath.RMS(size, cpuv )))
-        cpu_time_avg.append(float(TMath.Mean(size, cpuv )))
-        cpu_time_max.append(float(TMath.MaxElement(size, cpuv )))
-        
-        elap_cpu_time.append(float(TMath.Mean(size, elap_cpuv)))
-        
-        #elap_time_rate.append( float(60*ne)/float(np*elap_time_avg[-1]) )
-        #cpu_time_rate.append( float(60*ne)/float(np*cpu_time_avg[-1]) )
-        #user_time_rate.append( float(60*ne)/float( np * float(TMath.Mean(size, userv))) ) 
-
-        elap_time_rate.append( float(60*ne)/float(elap_time_avg[-1]) )
-        cpu_time_rate.append( float(60*ne)/float(cpu_time_avg[-1]) )
-        total_rate.append(np * elap_time_rate[-1])
-                
-        #user_time_rate.append( float(60*ne)/float(float(TMath.Mean(size, userv))) ) 
-
-        #elap_time_stdev_x.append(float(TMath.RMS(size, cpSumTree.GetV2())))
-        #cpu_time_stdev_x.append(float(TMath.RMS(size,  cpSumTree.GetV4())))
-        
-        #msize = mpSumTree.Draw('m_par_time', "np==%i" % int(np), 'goff')
-        #parv = mpSumTree.GetV1();
-        #par_elap_time.append(float(TMath.Mean(msize, parv)))
-        #par_event_rate.append(float(60.0*ne)/par_elap_time[-1])
-
-        np_txt += "%10s" % np
-        for s in sp_lb:
-            txt_dict[s] += "%10.1f" % getTreeList(spSumTree, s, "np==%i" % int(np) )[0]
-            ltxt_dict[s].append( "%10.1f" % getTreeList(spSumTree, s, "np==%i" % int(np))[0] )
-
-    print ("np_list=%s\n etime_stdev=%s \n cpu_time_stdev=%s" % (np_list, elap_time_stdev, cpu_time_stdev))
-    print ("elap-cpu=%s" % (elap_cpu_time))
-
-    from socket import gethostname
-    import platform
-#########################
-    graph_list = list()
-###########################
-    graph =dict()
-    graph['type'] = 'text'
-    graph['data'] = [
-                "MP Times, Memory, IO, CPU PLOTS for ",
-                " %s " % root_file,
-                " machine: %s" % gethostname(),
-                #"%s, %s, %s, %s, %s, %s" % platform.uname(),
-                "%s" % platform.platform(),
-                "%s" % os.getenv('CMTCONFIG'),
-                " comments: %s" % comments,
-                "np=%s " % [int(s) for s in np_list]
-                ]
-    graph['title'] = 'ATHENA MP MONITOR mpMon REPORT'
-    graph_list.append(graph)
-############################
-    graph  = dict()
-    graph['type'] = 'latex'
-    graph['data'] = ltxt_dict 
-    graph['title']= "sp_summary numbers:"
-    graph_list.append(graph)
-############################
-    graph = dict()
-    graph['type'] = 'list'
-    graph['data'] = {
-                     #'parallel_elap': (np_list, par_elap_time),
-                     'proc_elap_avg': (np_list, elap_time_avg),
-                     'proc_elap_max': (np_list, elap_time_max),
-                     'proc_cpu_avg':  (np_list, cpu_time_avg),
-                     'proc_cpu_max':  (np_list, cpu_time_max),
-                     #'elap_time_stdev_x': (np_list, elap_time_stdev_x),
-                     #'cpu_time_stdev_x':  (np_list, cpu_time_stdev_x)
-                     }
-    graph['title'] = "Processes Elap and CPU Time Variations"
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Time, sec" 
-    graph_list.append(graph)
-
-############################
-    graph =dict()
-    graph['type'] = 'text'
-    graph['data'] = [
-                "This plot intentially left blank"
-                ]
-    graph['title'] = 'BLANK CHART'
-    graph_list.append(graph)
-    
-    """
-    graph  = dict()
-    graph['type'] = 'graph'
-    graph['data'] = {
-            "m_elap_time" : (mpSumTree, "m_elap_time:np"),
-            "m_firstevent_time": (mpSumTree, "m_firstevent_time:np"),
-            "m_par_time" :    (mpSumTree, "m_par_time:np"),
-            "m_merging_time": (mpSumTree, "m_merging_time:np")
-                   }
-    graph['title'] = "PARENT PROCESS TIMES"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Time, sec"
-    graph_list.append(graph)
-    
-##############################
-    graph =dict()
-    graph['type'] = 'text'
-    graph['data'] = [
-                "This plot intentially left blank"
-                ]
-    graph['title'] = 'BLANK CHART'
-    graph_list.append(graph)
-    """
-############################
-    graph = dict()
-    graph['type'] = 'list'
-    graph['data'] = {
-                     'elap_time_stdev': (np_list, elap_time_stdev),
-                     'cpu_time_stdev':  (np_list, cpu_time_stdev),
-                     'elap-cpu_avg':  (np_list, elap_cpu_time),
-                     }
-    graph['title'] = "Processes Time Deviations"
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Time_StDev, sec" 
-    graph_list.append(graph)
-
-############################
-    """ 
-    graph  = dict()
-    graph['type'] = 'graph'
-    graph['data'] = {
-            #"event_rate": (mpSumTree, "event_rate:np"),
-            #"event_rate_x": (mpSumTree, "(60*%i)/(x_par_time):np" % ne),
-            #"event_rate" :  (mpSumTree, "(60*%i)/(m_par_time):np" % ne),
-            #"per_proc_rate": (mpSumTree, "event_proc_rate:np") 
-            "event_rate" :  (mpSumTree, "(60*%i*np)/(m_par_time):np" % ne),
-            "per_proc_rate": (mpSumTree, "event_proc_rate*np:np")
-            #"per_proc_rate_x": (mpSumTree, "(60*%i)/(x_par_time*np):np" % ne)
-                     }
-    graph['title'] = "ATHENA MP EVENT PROCESSING RATE"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Evts/min, Evts/proc/min"
-    graph_list.append(graph)
-    """
-
-############################
-    
-    graph = dict()
-    graph['type'] = 'list'
-    graph['data'] = {
-                     'total_rate': (np_list, total_rate),
-                     'elap_avg_rate': (np_list, elap_time_rate),
-                     'cpu_avg_rate':  (np_list, cpu_time_rate)
-                    #'user_avg_rate': (np_list, user_time_rate)
-                     }
-    graph['title'] = "Worker event processing rates Wall-Time, User-CPU, Total-CPU time rates, averaged."
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Evts/Proc/Min" 
-    graph_list.append(graph)
-
-############################
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['data'] = {
-                     #'parallel_rate': (mpSumTree, "event_rate:np"), #"(event_rate + 207/(30*(np-2)+m_par_time)):np"),
-                     'worker_rate': (spSumTree, "(60*%i)/(elap_time_x):np" % ne),
-                     'cpu_rate':  (spSumTree, "(60*%i)/(cpu_time_x):np" % ne),
-                     #'user_rate': (spSumTree, "(60*%i)/(user_time_x):np" % ne)
-                     }
-    graph['title'] = "Event Throughput per Process, wall-clock time"
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Evts/Proc/Min" 
-    graph['goptions'] = "ALP"
-    graph_list.append(graph)
-############################
-    
-    graph = dict()
-    graph['type'] = 'list'
-    graph['data'] = {
-                     #'par_event_rate': (np_list, par_event_rate),
-                     'elap_avg_rate': (np_list, elap_time_rate),
-                     'cpu_avg_rate':  (np_list, cpu_time_rate),
-                    #'user_avg_rate': (np_list, user_time_rate)
-                     }
-    graph['title'] = "Worker event processing rates Wall-Time, User-CPU, Total-CPU time rates, averaged."
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Evts/Proc/Min" 
-    graph_list.append(graph)
-
-#############################
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['data'] ={
-        "total_mem": (spSumTree, "free_mem_spike/1024:np"),
-        "mem_per_proc": (spSumTree, "free_mem_spike/np/1024:np"),
-        }
-    graph['title'] = "PHYSICAL MEMORY CONSUMPTION BY Athena MJ (Multi Jobs)"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Memory Consumption, Mb"
-    graph_list.append(graph)
-
-    cond = "vmem!=0"
-#############################
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['data'] ={
-        "mem_per_proc": (spSumTree, "free_mem_spike/np/1024:np"),
-        "vmem": (spSumTree, ("vmem:np", cond) ),
-        "rss":  (spSumTree, ("rss:np",  cond) )
-        }
-    graph['title'] = "VMEM, RSS, RealMemory(from free-spike) per Proc"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Memory Consumption, Mb"
-    graph_list.append(graph)
-
-############################
-    cond = "np>0"
-############################    
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        "bread/sec": (ioTree,  ('breadps:Time', cond) ),
-        "bwrite/sec": (ioTree, ('bwrtnps:Time', cond) )
-        }
-    graph['title'] = "IO Activity for Athena MJ (Multi Jobs)  %s " % cond
-    graph['xtitle']= "Time"
-    graph['ytitle']= "Total Amount of Data R/W in blocks per sec"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)  
-############################    
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        "write_reqs/sec": (ioTree, ('wtps:Time', cond) ),
-        "read_reqs/sec":  (ioTree, ('rtps:Time', cond) ),
-        "total_reqs/sec": (ioTree, ('tps:Time',  cond) )
-        }
-    graph['title'] = "IO Activity for Athena MJ (Multi Jobs) %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Transfer_requests/sec"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-############################    
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['data'] ={
-        '%_user' :   (cpuTree, ('puser:Time',   cond) ),
-        '%_system':  (cpuTree, ('psystem:Time', cond) ),
-        '%_idle':    (cpuTree, ('pidle:Time',   cond) ),
-        '%_io_wait': (cpuTree, ('piowait:Time', cond) )
-        }
-    graph['title'] = "CPU Activity for Athena MJ (Multi Jobs) %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Percentage of CPU Utilization"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-############################  
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        '%_io_wait': (cpuTree, ('piowait:Time', cond) ),
-        }
-    graph['title'] = "CPU Activity for Athena MJ (Multi Jobs) %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Percentage of CPU Utilization"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-#########################
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        'numa_hit': (numaTree, ('numa_hit:Time', cond) ),
-        'numa_miss':(numaTree,('numa_miss:Time', cond) ),
-        'numa_foreign': (numaTree, ('numa_foreign:Time', cond) ),
-        'interleave_hit': (numaTree, ('interleave_hit:Time', cond) ),
-        'local_node': (numaTree, ('local_node:Time', cond) ),
-        'other_node': (numaTree, ('other_node:Time', cond) ),
-        }
-    graph['title'] = "NUMA Activity for athena MJ (Multi Jobs) %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Nbr of hits/misses"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-
-
-
-    pads = list()
-
-    cpp = 1 #charts per pad 
-    ppc = 2 #pads per canvas
-
-    c.cd()
-    gStyle.SetOptStat(0);
-    gStyle.SetPalette(1);
-    gStyle.SetCanvasColor(33);
-    gStyle.SetFrameFillColor(10);
-    gStyle.SetMarkerStyle(21)
-    gStyle.SetMarkerColor(2)
-    gStyle.SetMarkerSize(0.4)
-    print ("gStyle.Set done")
-
-    title = TPaveLabel(0.1,0.98,0.9,1, "Athena MP Plots");
-    title.SetFillColor(42); title.SetTextFont(40); 
-    #title.Draw();print ("title Drawn")
-
-    mgs =  list()  #List of TMultiGraphs
-    ls =   list()  #List of TLegends
-    gs =   list()  #List of TGraph
-
-    for j in range(ppc):
-        y_factor = 0.99;   x1 = 0.01; x2 = 0.99;  y1 = y_factor - (y_factor-0.01)*(j+1)/float(ppc); y2 = y_factor - (y_factor-0.01)*j/float(ppc)
-        print ("x1,y1,x2,y2",  x1, y1, x2, y2 )
-        pad = TPad("pad%i" % j, "pad%i" % j,   x1, y1, x2, y2,   33); pad.Draw()
-        pads.append(pad);
-    
-    num_cans = len(graph_list) /(cpp*ppc) if len(graph_list) % (cpp*ppc)==0 else len(graph_list)/(cpp*ppc) + 1 
-    graph_list += [None,]* (num_cans*cpp*ppc - len(graph_list))
-    print ("number of pages/canvases in report = ", num_cans)
-    
-    pdf_file = root_file
-    for s in ['merged.', '.py', '.root']:
-        pdf_file = pdf_file.replace(s, '')
-    pdf_file ="%s.pdf" % pdf_file
-
-    for i in range(num_cans):
-        for j in range(ppc):
-            graph = graph_list[ppc*i+j]
-            if graph is None:
-                continue
-            
-            if 'logY' in graph.keys():
-                if graph['logY']:
-                    pads[j].SetLogy()
-            else:
-                pads[j].SetLogy(0)
-            
-            pads[j].cd()
-            pads[j].SetRightMargin(0.2)
-            l = TLegend(0.82,0.20,0.99,0.89); ls.append(l) 
-            mg = TMultiGraph(); mgs.append(mg)
-            print ("graph=", graph)
-            gs.append(MakeMultiGraph(graph, mg, l))
-
-        c.Update()
-        if i == 0:
-            print ("pdf.start")
-            c.Print(pdf_file+'(', 'pdf') #start page
-        elif i < num_cans-1:
-            print ("pdf.body")
-            c.Print(pdf_file, 'pdf')    #body pages
-        else:
-            print ("pdf.end")
-            c.Print(pdf_file + ')', 'pdf') #end page
-        c.SaveAs("%s.%i.png" % (pdf_file, i))
-        for pad in pads:
-            pad.Clear()
-
-def report(root_file, ne = 0, comments=""):
-    print('  mpMonTools.report(): root_file=', root_file)
-    from ROOT import TFile, TTree, TBranch, TCanvas, TPad, TGraph, TLegend, TMultiGraph, gStyle, TLatex, TPaveLabel, TPaveText, TH2I, TMath
-
-    def getTreeList(tree, column, condition):
-        size = tree.Draw(column, condition,'goff'); 
-        v1 = tree.GetV1(); v1.SetSize(size)
-        return list(v1)
-
-    def makeGraph(tree, name, formula, condition="", color = 1, lineWidth=1):
-        tree.Draw(formula, condition, "goff")
-        graph = TGraph(int(tree.GetSelectedRows()), tree.GetV2(), tree.GetV1())
-        graph.SetLineColor(color);
-        graph.SetLineWidth(lineWidth)
-        graph.SetName(name);
-        return graph
-
-    def MakeMultiGraph(graph_data, mg, l):
-        clr = 1
-        gl = list()
-        
-        if graph_data is None:
-            return []
-        line_blank = 1
-        if 'noline' in graph_data.keys(): 
-            line_blank=0
-            
-        if graph_data['type'] is 'graph':
-            for name, (tree, param) in graph_data['data'].items():
-                clr+=1; formula =''; condition=''
-                if type(param).__name__=='tuple':
-                    formula = param[0]
-                    condition = param[1]
-                else:
-                    print ("MakeMG: ", formula, condition)
-                    formula = param
-                    condition = ""
-
-                print ("name=%s, tree=%s, formula=%s, condition=%s" % (name, tree.GetName(), formula, condition) )
-           
-                tree.Draw(formula, condition, "goff")
-                
-                selection_size = tree.GetSelectedRows()
-                if selection_size==-1:
-                    print ("-> SKIPPED (DO NOT EXIST): SELECTION_SIZE=%i" % selection_size )
-                    continue
-                else:
-                    print ("-> SELECTION_SIZE=%i" % selection_size )
-                    pass
-
-                g = TGraph(selection_size, tree.GetV2(), tree.GetV1()); gl.append(g)
-                
-                g.SetName(name); g.SetLineColor(clr*line_blank); g.SetMarkerColor(clr); g.SetLineWidth(0)
-                #if "rate" in name:
-                #    g.SetLineColor(0)
-                    
-                mg.Add(g); 
-                l.AddEntry(g, name)
-
-        if graph_data['type'] is 'list':
-            for name, (lx,ly) in graph_data['data'].items():
-                print ("name=%s" % name); print (lx); print (ly)
-                clr+=1
-                g = TGraph( len(lx), array.array('f', lx), array.array('f', ly) )
-                g.SetName(name); g.SetLineColor(clr*line_blank); g.SetLineWidth(1); g.SetMarkerColor(clr); 
-                mg.Add(g)
-                l.AddEntry(g, name)
-                gl.append(g)
-
-        if graph_data['type'] is 'array':
-            clr = 1
-            g_list = list()
-            data = graph_data['data']
-            for name,(x,y) in graph_data['data'].items():
-                print (x); print (y)
-                clr+=1;
-                g = TGraph(len(x), x, y)
-                g.SetName(name); g.SetLineColor(clr*line_blank); g.SetLineWidth(1); g.SetMarkerColor(clr) 
-                gl.append(g)
-                mg.Add(g); 
-                l.AddEntry(g, name)
-        if graph_data['type'] is 'text':
-            title.DrawPaveLabel(0.1,0.93,0.9,0.99, graph_data['title'], "brNDC")
-            for s in graph_data['data']:
-                print ("graph_data['data']=%s" % s)
-                sp_pt.AddText(s)             
-            sp_pt.SetTextAlign(12);
-            sp_pt.SetTextSize(0.04)
-            sp_pt.Draw()
-            return []
-
-        if graph_data['type'] is 'latex':
-            title.DrawPaveLabel(0.1,0.93,0.9,0.99, graph_data['title'], "brNDC")
-            tl = TLatex(); tl.SetTextSize(0.02); tl.SetTextAlign(12);
-            txtd = graph_data['data']
-            i = 0; x0 = 0.05; y0 = 0.90; dx = 0.08; dy = 0.05
-            x1 = x0
-            tl.DrawLatex(x1, y0, 'np')
-            for s in txtd['np']:
-                x1 = x1 + dx
-                tl.DrawLatex(x1, y0, s)
-            txtd.pop('np')
-
-            for k in txtd.keys():
-                y0 = y0 - dy
-                tl.DrawLatex(x0-0.03, y0, k);
-                x1 = x0
-                for s in txtd[k]:
-                    x1 = x1 + dx
-                    tl.DrawLatex(x1, y0, s)
-            return []
-        if 'goptions' in graph_data.keys():
-            mg.Draw(graph_data['goptions'])
-        else:
-            mg.Draw('ALP')
-            
-        l.Draw()
-        h=mg.GetHistogram(); h.SetXTitle(graph_data['xtitle']); h.SetYTitle(graph_data['ytitle']); h.SetMinimum(0.1);
-        title.DrawPaveLabel(0.1,0.91,0.9,0.99, graph_data['title'], "brNDC")
-        if 'text' in graph_data.keys():
-            title.DrawPaveLabel(0.2,0.88,0.8,0.92, graph_data['text'], "brNDC")
-            #text_box = TPaveText(0.2,0.51,0.8,0.54);  text_box.AddText(graph_data['text']);
-            #text_box.SetFillColor(0); text_box.SetTextAlign(12); text_box.SetTextfONt(40); text_box.Draw();
-        
-        return [] # gl #list of TGraph
-
-    c = TCanvas("mpr", "AthenaMJ-mp-scaling-charts", 1, 1, 800, 1024)
-    c.SetFillColor(0);  c.SetBorderSize(1); c.cd()
- 
-    tfile = TFile(root_file, "READ"); print ("   root compression factor = ", tfile.GetCompressionFactor())
-    spSumTree = tfile.Get("sp_summary")
-    #cpSumTree = tfile.Get("cp_summary")
-    ioTree = tfile.Get("io")
-    cpuTree = tfile.Get("cpu")
-    numaTree = tfile.Get("numa")
-    memTree = tfile.Get("mem")
-
-    if ne is 0:
-        ne = int(root_file.split('.')[-2].replace('ne', ''))
-        print ("extracted ne=[%i]" % ne)
-
-##### FORMING THE DATA FOR ROOT Graphing-Charting-Histogramming #####    
-    np_list = list(set(getTreeList(spSumTree, 'np', ''))); np_list.sort() #uniqeify and sort np_list
-    elap_time_stdev = list()
-    elap_time_avg = list()
-    elap_time_max = list()
-    cpu_time_stdev = list()
-    cpu_time_max = list()
-    cpu_time_avg = list()
-
-    total_rate = list()
-    elap_time_rate = list()
-    cpu_time_rate = list()
-    user_time_rate = list()
-    par_event_rate = list()
-
-    elap_cpu_time = list() # elap - cpu time avg.
-
-    elap_time_stdev_x = list()
-    cpu_time_stdev_x = list()
-    par_elap_time = list() # elap time as seen from mother
-    
-    sp_lb = [b.GetName() for b in list(spSumTree.GetListOfBranches())]
-    #sp_lb = [b.GetName() for b in list(spSumTree.GetListOfBranches())]
-
-    #mp_txt = "%s" % mp_lb + "\n"
-    sp_txt = "%s" % sp_lb + "\n" 
-    
-    sp_pt = TPaveText(0.1,0.1,0.9,0.9) 
-    sp_pt.SetFillColor(0)
-    
-    sp_latex = TLatex()
-    sp_latex.SetTextAlign(12)
-
-    txt_dict=dict()
-    ltxt_dict = dict()
-    for s in sp_lb:
-        txt_dict[s] = "%20s" % s
-        ltxt_dict[s] = list()
-    ltxt_dict["total_rate"] = list()
-    ltxt_dict["proc_rate_avg"] = list()
-    
-    np_txt = ""
-    for np in np_list:
-        size = spSumTree.Draw('(elap_time_x-init_time_x):cpu_time_x:elap_time_x-cpu_time_x', "np==%i" % int(np), 'goff'); 
-        elapv = spSumTree.GetV1(); 
-        cpuv = spSumTree.GetV2();
-        elap_cpuv = spSumTree.GetV3();
-        #userv = spSumTree.GetV4();
-
-        elap_time_stdev.append(float(TMath.RMS(size, elapv )))
-        elap_time_avg.append(float(TMath.Mean(size, elapv )))
-        elap_time_max.append(float(TMath.MaxElement(size, elapv )))
-
-        cpu_time_stdev.append(float(TMath.RMS(size, cpuv )))
-        cpu_time_avg.append(float(TMath.Mean(size, cpuv )))
-        cpu_time_max.append(float(TMath.MaxElement(size, cpuv )))
-        
-        elap_cpu_time.append(float(TMath.Mean(size, elap_cpuv)))
-        
-        #elap_time_rate.append( float(60*ne)/float(np*elap_time_avg[-1]) )
-        #cpu_time_rate.append( float(60*ne)/float(np*cpu_time_avg[-1]) )
-        #user_time_rate.append( float(60*ne)/float( np * float(TMath.Mean(size, userv))) ) 
-
-        elap_time_rate.append( float(60*ne)/float(elap_time_avg[-1]) )
-        cpu_time_rate.append( float(60*ne)/float(cpu_time_avg[-1]) )
-        total_rate.append(np * elap_time_rate[-1])
-                
-        #user_time_rate.append( float(60*ne)/float(float(TMath.Mean(size, userv))) ) 
-
-        #elap_time_stdev_x.append(float(TMath.RMS(size, cpSumTree.GetV2())))
-        #cpu_time_stdev_x.append(float(TMath.RMS(size,  cpSumTree.GetV4())))
-        
-        #msize = mpSumTree.Draw('m_par_time', "np==%i" % int(np), 'goff')
-        #parv = mpSumTree.GetV1();
-        #par_elap_time.append(float(TMath.Mean(msize, parv)))
-        #par_event_rate.append(float(60.0*ne)/par_elap_time[-1])
-
-        np_txt += "%10s" % np
-        for s in sp_lb:
-            gtl = getTreeList(spSumTree, s, "np==%i" % int(np) )
-            print ("%s: getTreeList: %s" % (s,gtl), end='')
-            gtl_avg = meanList(gtl)
-            print (" avg=%10.1f" % gtl_avg)
-            txt_dict[s] += "%10.1f" % gtl_avg
-            ltxt_dict[s].append( "%10.1f" % gtl_avg)
-        ltxt_dict["total_rate"].append("%10.1f" % 
-                ( 60.0*float(np)*float(ne)/( float(ltxt_dict["elap_time_x"][-1]) - float(ltxt_dict["init_time_x"][-1]) ) ) )
-        ltxt_dict["proc_rate_avg"].append("%10.1f" % 
-                ( 60.0*float(ne)/( float(ltxt_dict["elap_time_x"][-1]) - float(ltxt_dict["init_time_x"][-1]) ) ) )
-        
-    print ("np_list=%s\n etime_stdev=%s \n cpu_time_stdev=%s" % (np_list, elap_time_stdev, cpu_time_stdev))
-    print ("elap-cpu=%s" % (elap_cpu_time))
-
-    from socket import gethostname
-    import platform
-    graph_list = list()
-###########################
-    graph =dict()
-    graph['type'] = 'text'
-    graph['data'] = [
-                "MJ Times, Memory, IO, CPU PLOTS for ",
-                " %s " % root_file,
-                " machine: %s" % gethostname(),
-                #"%s, %s, %s, %s, %s, %s" % platform.uname(),
-                "%s" % platform.platform(),
-                "%s" % os.getenv('CMTCONFIG'),
-                " comments: %s" % comments,
-                "np=%s " % [int(s) for s in np_list]
-                ]
-    graph['title'] = 'ATHENA MJ MONITOR mjMon REPORT'
-    graph_list.append(graph)
-############################
-    graph  = dict()
-    graph['type'] = 'latex'
-    graph['data'] = ltxt_dict 
-    graph['title']= "mj_summary numbers:"
-    graph_list.append(graph)
-
-#########################
-    
-    graph = dict()
-    graph['type'] = 'list'
-    graph['data'] = {
-                     'total_rate': (np_list, total_rate),
-                    #'elap_avg_rate': (np_list, elap_time_rate),
-                    #'cpu_avg_rate':  (np_list, cpu_time_rate)
-                    #'user_avg_rate': (np_list, user_time_rate)
-                     }
-    graph['title'] = "Total Event processing rate, averaged from Wall-Time rates."
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Evts/Proc/Min" 
-    graph_list.append(graph)
-
-############################
-    graph = dict()
-    graph['type'] = 'list'
-    graph['data'] = {
-                     #'parallel_elap': (np_list, par_elap_time),
-                     'proc_elap_avg': (np_list, elap_time_avg),
-                     'proc_elap_max': (np_list, elap_time_max),
-                     'proc_cpu_avg':  (np_list, cpu_time_avg),
-                     'proc_cpu_max':  (np_list, cpu_time_max),
-                     #'elap_time_stdev_x': (np_list, elap_time_stdev_x),
-                     #'cpu_time_stdev_x':  (np_list, cpu_time_stdev_x)
-                     }
-    graph['title'] = "Processes Elap and CPU Time Variations"
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Time, sec" 
-    graph_list.append(graph)
-
-############################
-    graph =dict()
-    graph['type'] = 'text'
-    graph['data'] = [
-                "This plot intentially left blank"
-                ]
-    graph['title'] = 'BLANK CHART'
-    graph_list.append(graph)
-    
-############################
-    graph = dict()
-    graph['type'] = 'list'
-    graph['data'] = {
-                     'elap_time_stdev': (np_list, elap_time_stdev),
-                     'cpu_time_stdev':  (np_list, cpu_time_stdev),
-                     'elap-cpu_avg':  (np_list, elap_cpu_time),
-                     }
-    graph['title'] = "Processes Time Deviations"
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Time_StDev, sec" 
-    graph_list.append(graph)
-
-############################
-    
-    graph = dict()
-    graph['type'] = 'list'
-    graph['noline']=0
-    graph['data'] = {
-                     'total_rate': (np_list, total_rate),
-                     'elap_avg_rate': (np_list, elap_time_rate),
-                     'cpu_avg_rate':  (np_list, cpu_time_rate)
-                    #'user_avg_rate': (np_list, user_time_rate)
-                     }
-    graph['title'] = "Worker event processing rates Wall-Time, User-CPU, Total-CPU time rates, averaged."
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Evts/Proc/Min" 
-    graph_list.append(graph)
-
-############################
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['noline']=0
-    graph['data'] = {
-                     #'parallel_rate': (mpSumTree, "event_rate:np"), #"(event_rate + 207/(30*(np-2)+m_par_time)):np"),
-                     'worker_rate': (spSumTree, "(60*%i)/(elap_time_x):np" % ne),
-                     'worker_rate_': (spSumTree, "(60*%i)/(elap_time_x-200):np" % ne),
-                     'cpu_rate':  (spSumTree, "(60*%i)/(cpu_time_x):np" % ne),
-                     #'user_rate': (spSumTree, "(60*%i)/(user_time_x):np" % ne)
-                     }
-    graph['title'] = "Event Throughput per Process, wall-clock time"
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Evts/Proc/Min" 
-    graph['goptions'] = "ALP"
-    graph_list.append(graph)
-############################
-    
-    graph = dict()
-    graph['type'] = 'list'
-    graph['noline']=0
-    graph['data'] = {
-                     #'par_event_rate': (np_list, par_event_rate),
-                     'elap_avg_rate': (np_list, elap_time_rate),
-                     'cpu_avg_rate':  (np_list, cpu_time_rate),
-                    #'user_avg_rate': (np_list, user_time_rate)
-                     }
-    graph['title'] = "Worker event processing rates Wall-Time, User-CPU, Total-CPU time rates, averaged."
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Evts/Proc/Min" 
-    graph_list.append(graph)
-
-#############################
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['noline']=0
-    graph['data'] ={
-        "total_mem": (spSumTree, "free_mem_spike:np"),
-        "mem_per_proc": (spSumTree, "free_mem_spike/np:np"),
-        }
-    graph['title'] = "PHYSICAL MEMORY CONSUMPTION BY Athena MJ (Multi Jobs)"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Memory Consumption, Mb"
-    graph_list.append(graph)
-
-    cond = "vmem!=0"
-#############################
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['noline']=0
-    graph['data'] ={
-        "mem_per_proc": (spSumTree, "free_mem_spike/np:np"),
-        "vmem": (spSumTree, ("vmem:np", cond) ),
-        "rss":  (spSumTree, ("rss:np",  cond) )
-        }
-    graph['title'] = "VMEM, RSS, RealMemory(from free-spike) per Proc"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Memory Consumption, Mb"
-    graph_list.append(graph)
-
-############################  
-    cond="np>0"
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = False
-    graph['data'] ={
-        'free_mem': (memTree, ('kbmemfree/1024:Time', cond) ),
-        'used_mem': (memTree, ('kbmemused/1024:Time', cond) ),
-        'cached_mem': (memTree, ('kbcached/1024:Time', cond) ),
-        'buffers_mem': (memTree, ('kbbuffers/1024:Time', cond) ),
-        'kbswpused': (memTree, ('kbswapused/1024:Time', cond) )
-        #'commit_mem': (memTree, ('kbcommit/1024:Time', cond) )
-        }
-    graph['title'] = "Memory Activity for Athena MJ %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Memory, Mb "
-    #graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-############################
-    cond = "np>0"
-############################    
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        "bread/sec": (ioTree,  ('breadps:Time', cond) ),
-        "bwrite/sec": (ioTree, ('bwrtnps:Time', cond) )
-        }
-    graph['title'] = "IO Activity for Athena MJ (Multi Jobs)  %s " % cond
-    graph['xtitle']= "Time"
-    graph['ytitle']= "Total Amount of Data R/W in blocks per sec"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)  
-############################    
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        "write_reqs/sec": (ioTree, ('wtps:Time', cond) ),
-        "read_reqs/sec":  (ioTree, ('rtps:Time', cond) ),
-        "total_reqs/sec": (ioTree, ('tps:Time',  cond) )
-        }
-    graph['title'] = "IO Activity for Athena MJ (Multi Jobs) %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Transfer_requests/sec"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-############################    
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['data'] ={
-        '%_user' :   (cpuTree, ('puser:Time',   cond) ),
-        '%_system':  (cpuTree, ('psystem:Time', cond) ),
-        '%_idle':    (cpuTree, ('pidle:Time',   cond) ),
-        '%_io_wait': (cpuTree, ('piowait:Time', cond) )
-        }
-    graph['title'] = "CPU Activity for Athena MJ (Multi Jobs) %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Percentage of CPU Utilization"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-############################  
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        '%_io_wait': (cpuTree, ('piowait:Time', cond) ),
-        }
-    graph['title'] = "CPU Activity for Athena MJ (Multi Jobs) %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Percentage of CPU Utilization"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-#########################
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        'numa_hit': (numaTree, 'numa_hit:Time' ),
-        'numa_miss':(numaTree,'numa_miss:Time' ),
-        'numa_foreign': (numaTree, 'numa_foreign:Time' ),
-        'interleave_hit': (numaTree, 'interleave_hit:Time' ),
-        'local_node': (numaTree, 'local_node:Time' ),
-        'other_node': (numaTree, 'other_node:Time')
-        }
-    graph['title'] = "NUMA Activity for athena MJ (Multi Jobs) %s, Logscale" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Nbr of hits/misses"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-
-#########################
-    graph = dict()
-    graph['type'] = 'graph'
-    #graph['logY'] = False
-    graph['data'] ={
-        'numa_hit': (numaTree, 'numa_hit:Time' ),
-        'numa_miss':(numaTree,'numa_miss:Time' ),
-        'numa_foreign': (numaTree, 'numa_foreign:Time' ),
-        'interleave_hit': (numaTree, 'interleave_hit:Time' ),
-        'local_node': (numaTree, 'local_node:Time' ),
-        'other_node': (numaTree, 'other_node:Time' )
-        }
-    graph['title'] = "NUMA Activity for athena MJ (Multi Jobs) %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Nbr of hits/misses"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-
-
-    pads = list()
-
-    cpp = 1 #charts per pad 
-    ppc = 1 #pads per canvas
-
-    c.cd()
-    gStyle.SetOptStat(0);
-    gStyle.SetPalette(0);
-    gStyle.SetCanvasColor(0);
-    gStyle.SetFrameFillColor(0);
-    gStyle.SetMarkerStyle(21)
-    gStyle.SetMarkerColor(2)
-    gStyle.SetMarkerSize(0.5)
-    print ("gStyle.Set done")
-
-    title = TPaveLabel(0.1,0.98,0.9,1, "Athena MJ Plots");
-    title.SetFillColor(0); title.SetTextFont(40); 
-    #title.Draw();print ("title Drawn")
-
-    mgs =  list()  #List of TMultiGraphs
-    ls =   list()  #List of TLegends
-    gs =   list()  #List of TGraph
-
-    for j in range(ppc):
-        y_factor = 0.99;   x1 = 0.01; x2 = 0.99;  y1 = y_factor - (y_factor-0.01)*(j+1)/float(ppc); y2 = y_factor - (y_factor-0.01)*j/float(ppc)
-        print ("x1,y1,x2,y2",  x1, y1, x2, y2 )
-        pad = TPad("pad%i" % j, "pad%i" % j,   x1, y1, x2, y2,   0); pad.Draw()
-        pads.append(pad);
-    
-    num_cans = len(graph_list) /(cpp*ppc) if len(graph_list) % (cpp*ppc)==0 else len(graph_list)/(cpp*ppc) + 1 
-    graph_list += [None,]* (num_cans*cpp*ppc - len(graph_list))
-    print ("number of pages/canvases in report = ", num_cans)
-    
-    pdf_file = root_file
-    for s in ['merged.', '.py', '.root']:
-        pdf_file = pdf_file.replace(s, '')
-    pdf_file ="%s.pdf" % pdf_file
-
-    for i in range(num_cans):
-        for j in range(ppc):
-            graph = graph_list[ppc*i+j]
-            if graph is None:
-                continue
-            
-            if 'logY' in graph.keys():
-                if graph['logY']:
-                    pads[j].SetLogy()
-            else:
-                pads[j].SetLogy(0)
-            
-            pads[j].cd()
-            pads[j].SetRightMargin(0.2)
-            l = TLegend(0.82,0.20,0.99,0.89); ls.append(l) 
-            mg = TMultiGraph(); mgs.append(mg)
-            print ("graph=", graph)
-            gs.append(MakeMultiGraph(graph, mg, l))
-
-        c.Update()
-        if i == 0:
-            print ("pdf.start")
-            c.Print(pdf_file+'(', 'pdf') #start page
-        elif i < num_cans-1:
-            print ("pdf.body")
-            c.Print(pdf_file, 'pdf')    #body pages
-        else:
-            print ("pdf.end")
-            c.Print(pdf_file + ')', 'pdf') #end page
-        #c.SaveAs("%s.%i.png" % (pdf_file, i))
-        c.SaveAs("%s.%i.C" % (pdf_file, i))
-        for pad in pads:
-            pad.Clear()
-
-########## THE END ################    
-
diff --git a/Control/AthenaMP/python/tests/mpMonTools.py b/Control/AthenaMP/python/tests/mpMonTools.py
deleted file mode 100644
index 9d59eb9fff18ec136f68f65cc47f168a1753e935..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/python/tests/mpMonTools.py
+++ /dev/null
@@ -1,2411 +0,0 @@
-# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-
-# @file:    mpMonTools.py
-# @purpose: Library for mp performance monitoring of AthenaMP
-# @author:  Mous Tatarkhanov <tmmous@cern.ch>
-# @date:    December 2009
-
-from __future__ import print_function
-
-__version__ = "$Revision: 329336 $"
-__author__  = "Mous Tatarkhanov <tmmous@cern.ch>"
-
-import sys
-import os
-import signal
-import time
-import array
-import copy
-import six
-
-from future import standard_library
-standard_library.install_aliases()
-import subprocess
-
-
-T0 = time.time()
-numa_T0 = T0
-TIME_STEP = 10
-KB = (1 << 10)
-MB = (1 << 20)
-GB = (1 << 30)
-
-
-PAGESIZE=os.sysconf("SC_PAGE_SIZE")/1024 #KiB
-have_pss=0
- 
-mp_stat = dict() #dictionary which will hold all statistics: ProcDict objects, io-,mem-,cpu-dicts 
-pid_list = list() #book-keeping of  all spawned and launched  processes for later clean-up
-init_mem = list()
-init_numa = list()
-
-def init_mp_stat():
-    global mp_stat
-    global pid_list
-    global T0
-    global numa_T0
-    global init_mem
-    global init_numa
-
-    del mp_stat  #delete data from previous np
-    del pid_list #delete data from previous np
-    
-    #brand new mp_stat
-    mp_stat = dict()
-    pid_list = list()
-    mp_stat['io'] = dict()
-    mp_stat['mem'] = dict()
-    mp_stat['cpu'] = dict()
-    mp_stat['cpid'] = dict()
-    mp_stat['mpid'] = dict()
-    
-    mp_stat['numa'] = dict()
-    mp_stat['numa']['Time'] = list()
-    mp_stat['numa']['numa_hit'] = list()
-    mp_stat['numa']['numa_miss'] = list()
-    mp_stat['numa']['numa_foreign'] = list() 
-    mp_stat['numa']['interleave_hit'] = list()
-    mp_stat['numa']['local_node'] = list()
-    mp_stat['numa']['other_node'] = list()
-    
-    T0 = time.time()
-    init_mem = get_memstat()
-    init_numa = list(get_numastat())
-    init_numa[0] = numa_T0
-    
-    print ("initial_mem=%s" % init_mem)
-    print ("initial_numa=%s" % list(init_numa))
-    
-    init_mem = get_memstat()
-
-class ProcDict(dict):
-    """this is to store the process information"""
-    key_list = ['state', 'vmem', 'rss', 'sz', 'cpu_time', 'elap_time', 'private', 'shared']
-    pid = -1
-    start_time = -1
-    elap_time = -1
-    cpu_time = -1
-    vmem = -1
-    rss  = -1
-    nbr_rows = -1 # the number of rows
-
-    def __init__(self, pid=-1, start_time=-1, child = True):        
-        self.pid = pid
-        self.start_time = start_time
-        #self['np']=np
-
-        for key in self.key_list:
-            self[key] = list()
-        self.proc_ps_stat()
-        if child:
-            mp_stat["cpid"][self.pid] = self
-        else:
-            mp_stat["mpid"] = self
-        
-    def add_ps_line(self, line):
-        ps_str = line.split()
-        if self.start_time is -1:
-            self.start_time = _seconds(ps_str[5])
-        self['state'].append( ps_str[1] )
-        self['vmem'].append( int(ps_str[2])/1024 )
-        self['rss'].append( int(ps_str[3])/1024 )
-        self['sz'].append( int(ps_str[4])/1024 )
-        self['cpu_time'].append( _seconds(ps_str[6]) )
-        self['elap_time'].append( _seconds(ps_str[7]) )
-        
-        private = shared = -1
-        try:
-            pass
-            #private, shared = _get_shared_private_mem(self.pid)
-        except Exception as e:
-            print ("## Caught exception [%s] !!" % str(e.__class__))
-            print ("## What:", e)
-            print (sys.exc_info()[0])
-            print (sys.exc_info()[1])
-        self["private"].append(private)
-        self["shared"].append(shared)  
-    
-    def proc_ps_stat(self):
-        """ ps statistics for this process of pid """
-        out = subprocess.getoutput("ps --pid %i -o pid,state,vsize,rss,sz,start,cputime,etime" % self.pid)
-        lines = out.splitlines()
-        if len(lines) > 1:
-            self.add_ps_line(lines[1])
-        else:
-            print ("there is no process with pid: [%i]", self.pid)
-            return False
-        return True  
-    
-    def children_exist(self):
-        """ figures out weather the np kids were spawned for mother mpid""" 
-        sc, out = subprocess.getstatusoutput("ps --ppid %i -o pid,start" % self.pid)
-        if sc is not 0:
-            #print ("   children_exist: Error, sc=%i" % sc)
-            return False
- 
-        ps_lines = out.splitlines()
-        nc = len(ps_lines)-1
-        print ("  children_exist().nbr of children = %i" % nc)
-        if nc > 0 :
-            print ("%i children workers exist. Creating ProcDicts..." % nc)
-            ps_lines.pop(0)
-            for line in ps_lines:
-                ps_str = line.split()
-                cpid = int(ps_str[0])
-                ProcDict(cpid, start_time = _seconds(ps_str[1]))
-                print ("..... child [%i] added" %  cpid)
-            return nc
-        else:
-            #print ("no children exist for parent: %s " % self.pid)
-            return False
-
-
-    def trim_stat(self): 
-        """trim finished process information at the end of statistics """
-        while True:
-            if (self['state'][-1] in ['S', 'Z', 'T', 'D', 'X']) or (self['vmem'][-1] is 0) or (self['rss'][-1] is 0): #STOPPED, ZOMBIE, STOPPED2, Dddd, Xxxx
-                for key in self.key_list:
-                    self[key].pop()
-            else:
-                break
-
-    def summarize(self):
-        """finalize summary information."""
-        self.elap_time = self['elap_time'][-1]
-        self.cpu_time = self['cpu_time'][-1]
-        self.vmem = self['vmem'][-1]
-        self.rss = self['rss'][-1]
-        self.nbr_rows = len(self['elap_time'])
-
-class CPSummary(dict): 
-    """ Children Processes Summary - convenient way of presenting to ROOT or other output methods"""
-    cpid_list=list()
-    np = 1
-    total_cpu_time = 0
-    total_cpu_time_x = 0
-    total_elap_time = 0
-    total_elap_time_x = 0
-    def __init__(self, np):
-        #self['np']=list()
-        self.np = np
-        self['pid']=list()
-        self['elap_time']=list()
-        self['elap_time_x']=list()
-        self['cpu_time']=list()
-        self['user_time']=list()
-        self['system_time']=list()
-        self['bootstrap_time']=list()
-        self['cpu_time_x']=list()
-        self['vmem']=list()
-        self['rss']=list()
-
-    def extract_summary(self, dir):
-        self.cpid_list = mp_stat["cpid"].keys()
-        for pid in self.cpid_list:
-            self['pid'].append(pid)
-            #self['np'].append(self.np)
-            self['elap_time_x'].append(mp_stat['cpid'][pid].elap_time)
-            self['cpu_time_x'].append(mp_stat['cpid'][pid].cpu_time)
-            self.total_cpu_time_x += mp_stat['cpid'][pid].cpu_time
-            self.total_elap_time_x += mp_stat['cpid'][pid].elap_time
-
-            self['vmem'].append(mp_stat['cpid'][pid].vmem)
-            self['rss'].append(mp_stat['cpid'][pid].rss)
-        for pid in self.cpid_list:
-            print ("  %s/%s exists ->" % (dir,pid), os.path.exists(os.path.join(dir,"%s" % pid))) #FIX: add the extraction from cpid's logs.
-            out_path = os.path.join(dir, "%s" % pid, 'stdout')
-            err_path = os.path.join(dir, "%s" % pid, 'stderr')
-
-            e_time = float(grepValue(out_path, "WORKER_ELAP_TIME"));            
-            u_time = float(grepValue(out_path, "WORKER_USER_TIME"));
-            s_time = float(grepValue(out_path, "WORKER_SYSTEM_TIME"));
-            c_time = u_time + s_time
-
-            self['elap_time'].append(e_time); self.total_elap_time += e_time;
-            self['user_time'].append(u_time)
-            self['system_time'].append(s_time)
-            self['cpu_time'].append(c_time); 
-            self.total_cpu_time += c_time;
-            
-            b_time = float(grepValue(out_path, "BOOTSTRAP_ELAP_TIME"));
-            self['bootstrap_time'].append(b_time); 
-                                      
-            
-        
-            
-
-class MPSummary(dict):
-    """ Mother Process Summary - convenient way of presenting data to ROOT..."""
-    mpid = dict()
-    np = 1
-    def __init__(self, np):
-        mpid = mp_stat['mpid']
-        self.np = np
-        #self['np'] = list()
-        #self['np'] = [ np, ]
-        self["m_par_time"] = list()
-        self["m_firstevent_time"]= list()
-        self["m_cpu_time"]= list()
-        self["m_elap_time"]= list()
-        self["x_init_time"]= list()#externally observed time
-        self["x_par_time"]= list() #externally observed time
-        self["x_fin_time"]= list()  #externally observed time
-        self["free_mem_spike"]= list()  #externally observed value
-        self["swap_change"] = list()
-        
-    def extract_summary(self, log):
-        self["m_par_time"] = [ float(grepValue(log, "PARALLEL_ELAP_TIME")),]
-        self['m_merging_time'] = [ float(grepValue(log, "MERGING_ELAP_TIME")),]
-        self["m_firstevent_time"] = [ float(grepValue(log, "FIRSTEVENT_ELAP_TIME")),]
-        self["m_cpu_time"] =  [ mp_stat["mpid"].cpu_time, ] #mp_stat["mpid"]["cpu_time"][-1]
-        self["m_elap_time"]=  [ mp_stat["mpid"].elap_time, ] #mp_stat["mpid"]["elap_time"][-1]
-        self["free_mem_spike"]= [ get_spike(sumList(mp_stat['mem']['kbmemfree'],  
-                                            sumList(mp_stat['mem']['kbbuffers'], mp_stat['mem']['kbcached']))
-                                            ), ]
-        self["swap_change"] = [ 0,]# get_spike(mp_stat['mem']['kbswpfree']), ]
-
-
-
-
-####### useful auxillary functions ##########################
-
-def _seconds(time_str): #handles time in "H:M:S" and "M:S" format 
-    time_nums = time_str.split(":")
-    if (len(time_nums)==3):
-        return 3600*int(time_nums[0])+60*int(time_nums[1]) + int(time_nums[2])
-    elif (len(time_nums)==2):
-        return 60*int(time_nums[0]) + int(time_nums[1])
-    print ("ERROR: _seconds() returning - 0")
-    return 0
-    
-def get_numastat():
-    sc,out=subprocess.getstatusoutput("numastat")
-    if sc==256:
-        print ("mjMonTools.get_numastat: numastat is not working! zeroes will be returned")
-        return (0,0,0,0,0,0,0)
-    else:
-        lines = out.splitlines()
-        return (time.time(),
-                int(lines[1].split()[1]),
-                int(lines[2].split()[1]),
-                int(lines[3].split()[1]),
-                int(lines[4].split()[1]),
-                int(lines[5].split()[1]),
-                int(lines[6].split()[1])
-                )
-        
-def save_numastat():
-    current_numa = get_numastat()
-    #print ("current_numa=%s" % list(current_numa))
-
-    _numa_stat = (
-        mp_stat['numa']['Time'],
-        mp_stat['numa']['numa_hit'],
-        mp_stat['numa']['numa_miss'],
-        mp_stat['numa']['numa_foreign'],
-        mp_stat['numa']['interleave_hit'],
-        mp_stat['numa']['local_node'],
-        mp_stat['numa']['other_node']
-        )
-    
-    change_numa = subList(current_numa,init_numa)
-    print ("NUMA_CHANGE=%s" % change_numa)
-    return [_numa_stat[i].append(change_numa[i]) for i in range(len(change_numa))]
-    
-def print_memstat(msg =""):
-    mem = get_memstat()
-    t = time.time() - T0;
-    save_numastat()
-    print (msg + " [T=%i sec]" % t + " USED[%i Mb][change: %i Mb] - FREE[%i Mb][change: %i Mb]" % ( 
-        mem["USED"], mem["USED"]-init_mem["USED"], mem["FREE"], mem["FREE"]-init_mem["FREE"]))
-
-def get_memstat():
-    out=subprocess.getoutput("free -m")
-    mem = dict()
-    lines = out.splitlines()
-    mem_strs = lines[1].split()
-    mem['used'] = int(mem_strs[2])
-    mem['free'] = int(mem_strs[3])
-    mem['cached'] = int(mem_strs[5])
-    mem['buffers'] = int(mem_strs[6])
-    mem_strs = lines[2].split()
-    mem['USED'] = int(mem_strs[2])
-    mem['FREE'] = int(mem_strs[3])
-    #print ("mem: [%s Mbs]" %  mem)
-    return mem
-
-init_mem = get_memstat()
-
-def meanList(num_list):
-    """finds average value of the number list"""
-    if len(num_list) == 0:
-        print ("meanList: WARNING - empty list, returning 0.0")
-        return 0.0
-    return float(sum(num_list)) / len(num_list)
-    
-def sumList(l1, l2):
-    """sum up values of two lists l1 + l2"""
-    if len(l1) is not len(l2):
-        print ("sumList: WARNING: len(l1) not equals len(l2)")
-        n = len(l1) if len(l2) > len(l1) else len(l2)
-    else:
-        n = len(l1)
-
-    sum = list()
-    for i  in range(n):
-        sum.append(l1[i] + l2[i])
-    return sum
-
-def subList(l1, l2): 
-    """subtract values of two lists: l1 - l2"""
-    if len(l1) is not len(l2):
-        print ("subList: WARNING: len(l1) not equals len(l2)")
-        n = len(l1) if len(l2) > len(l1) else len(l2)
-    else:
-        n = len(l1)
-
-    sub = list()
-    for i  in range(n):
-        sub.append(l1[i] - l2[i])
-    return sub
-        
-def get_spike(l):
-    #print (" get_spike:",)
-    #print (" e0 = ",  l[0]/1024, "Mb",  end='')
-    #print (" eN = ",  l[-1]/1024, "Mb", end='')
-    #print (" max = ", max(l)/1024, "Mb", end='' )
-    #print (" min = ", min(l)/1024, "Mb",  end='')
-    #print (" e0 - eN = ",  (l[0] - l[-1])/1024, "Mb", end='')
-    #print (" e0 - min = ", (l[0] - min(l))/1024, "Mb", end='')
-    #print (" eN - min = ", (l[-1] - min(l))/1024, "Mb", end='')
-    #print (" return  max - min =", (max(l) - min(l))/1024, "Mb")
-    return max(l) - min(l)   
-
-def prepare_mp_stat():
-    """ this function construct "cp" dict from "cpid" dict for ROOT formation convenience
-    converting: cpid:pid:vmem,rss,cpu-time,... -> cp:pid,vmem,rss,cputime,...  """
-    mp_stat['cp'] = mp_stat['cpid']
-    del mp_stat['cpid']
-    mp_stat['cpid']=dict()
-    mp_stat['cpid']['pid'] = list()
-        
-    for key in ProcDict.key_list:
-        mp_stat['cpid'][key]=list()
-            
-    for pid in mp_stat['cp'].keys():
-        mp_stat['cpid']['pid'] += [pid,] * mp_stat['cp'][pid].nbr_rows
-        for key in ProcDict.key_list:
-            mp_stat['cpid'][key] += mp_stat['cp'][pid][key]
-    del mp_stat['cp']
-
-
-def print_summary():
-    print ("===== MOTHER PROCESS SUMMARY =====")
-    for (k, v) in mp_stat['mp_summary'].items():
-        print ("mp_summary['%s']=%s " % (k, v))
-
-    print ("===== CHILDREN PROCESS SUMMARY ===")
-    for (k, v) in mp_stat['cp_summary'].items():
-        print ("cp_summary['%s']=%s " % (k, v))
-
-        
-################## children tools ######################
-def children_working(ppid):
-    """ ps statistics for children of ppid. returns False if no children exist """
-    out = subprocess.getoutput("ps --ppid %i -o pid,state,vsize,rss,sz,start,cputime,etime" % ppid)
-    ps_lines = out.splitlines()
-    ps_lines.pop(0)
-        
-    if len(ps_lines) > 0:
-        for line in ps_lines:
-            ps_str = line.split()
-            pid = int(ps_str[0])
-            if pid in mp_stat["cpid"].keys():
-                mp_stat["cpid"][pid].add_ps_line(line)
-            #print ("child_stat.appended for kid: %i" % pid       )
-        return True #ps returns something -> children still exist   
-    else:
-        print (" mpMonTools.children_working: no children exist for parent: %i" % ppid)
-        return False #ps returns nothing -> children either weren't born or died.           
-    return False
-
-def summarize_children_stat(): 
-    """trim finished worker information at the end of statistics """
-    for pid in mp_stat["cpid"].keys():
-        mp_stat['cpid'][pid].trim_stat()
-        mp_stat['cpid'][pid].summarize()
-
-def summarize_proc_stat(): 
-    """trim finished worker information at the end of statistics """
-    mp_stat['mpid'].summarize()
-    for pid in mp_stat['cpid'].keys():
-        mp_stat['cpid'][pid].trim_stat()
-        mp_stat['cpid'][pid].summarize()
-
-def children_born(log, mpid, np):
-    """ figures out weather the np kids were spawned for mother mpid""" 
-    sc,out = subprocess.getstatusoutput("ps --ppid %i -o pid,start" % mpid)
-    if sc is not 0:
-        print ("   mpMonTools.children_born: no kids yet... Error, sc=%i" % sc)
-        return False
-
-    ps_lines = out.splitlines()
-    #print ("ps_lines=", ps_lines)
-    nc = len(ps_lines)-1
-    
-    print (" children_exist: nbr of children = [%i]" % nc)
-    if grepValue(log, "FIRSTEVENT_ELAP_TIME") is None:
-        return False
-    else:
-        pass
-
-    if nc>=np : #nbr of children is equal to nbr of procs required 
-        print ("%i children workers forked! Registering them (creating ProcDicts) ..." % np)
-        ps_lines.pop(0)
-        for line in ps_lines:
-            ps_str = line.split()
-            pid = int(ps_str[0])
-            print ("child [%i] born" % pid, )
-            if grepExist(log, "%i-%i" % (mpid, pid)):
-                ProcDict(pid, start_time = _seconds(ps_str[1]))
-                print ("..... child WORKER [%i] added" %  pid)
-        return True
-    else:
-        print ("no children exist for parent: %s " % mpid)
-    return False
-
-
-################ LOG VARIABLES PROCESSING: Grepping ##################
-def grepExist(log, field):
-    """grep check for the existance of the unique field in the log 
-    """
-    #print ("grepping %s in %s" % (field, log))
-    sc,out = subprocess.getstatusoutput( "grep %s %s" % (field, log))
-    if sc==256:
-        print ("grepExist: FALSE: grep %s %s failed with sc=%i" % (field, log, sc))
-        return False
-    line = out.splitlines()[0]
-    print ("grepExist: TRUE: sc=%i grepped-line=%s" % (sc,line))
-    return True
-    
-def grepValue(log, field, sep='='):
-    """grep unique field in the log and find corresponding value by regexp 
-       Example: out = 'Py:EventLoopMgr      INFO EvtMax  =  123456  something'      
-       grepValue(log, "EvtMax", sep="=") = '123456' 
-    """
-    sc,out = subprocess.getstatusoutput( "grep %s %s" % (field, log))
-    if sc!=0:
-        print ("grepping %s in %s failed" % (field, log))
-        return None
-    line = out.splitlines()[0]
-    print ("grepped-line=%s" % line)
-    import re 
-    vexpr = '\\s*'+ sep+ '\\s*(\\d*\\.?\\d+)' #vexpr = '\\s*'+ sep+ '\\s*(\\d+)'
-    m = re.search( field + vexpr, line)
-    value = m.group(1)
-    print ("grepValue:[%s], line=%s" % (value,line))
-    return value
-
-def grepValueList(log, search_str, field ='', sep='='):
-    """grep unique field in the log and find corresponding value by regexp 
-       Example: out = 'Py:EventLoopMgr      INFO EvtMax  =  123456  something'      
-       grepValue(log, "EvtMax", sep="=") = '123456' 
-    """
-    sc,out = subprocess.getstatusoutput( "grep %s %s" % (search_str, log))
-    if sc!=0:
-        print ("grepping %s in %s failed with sc=%s" % (search_str, log, sc), "out=%s" % out)
-        return []
-
-    if field =='':
-        field = search_str
-
-    value_list = []
-    
-    #print ("grepped lines = %s" % out)
-
-    import re 
-    vexpr = '\\s*'+ sep+ '\\s*(\\d*\\.?\\d+)'
-    for line in out.splitlines():
-        print ("grepped-line=%s" % line)
-        m = re.search( field + vexpr, line)
-        value = m.group(1)
-        print ("grepValue:[%s], line=%s" % (value,line))
-        value_list.append(value)
-    return value_list
-
-def grepPath(log, field, sep=':'):
-    """grep unique field in the log and find corresponding value by regexp 
-       Example: out = 'Py:EventLoopMgr      INFO master workdir: /tmp/athena-mp-tmp-tmmous/22590-1261097934  smthng'      
-       grepPath(log, "workdir", sep=":") = '/tmp/athena-mp-tmp-tmmous/22590-1261097934' 
-    """
-    sc,out = subprocess.getstatusoutput( "grep %s %s" % (field, log))
-    if sc!=0:
-        print ("grepping %s in %s failed" % (field, log))
-        return None
-    line = out.splitlines()[0]
-    import re 
-    vexpr = '\\s*'+ sep+ '\\s*([^\\s]+)'
-    m = re.search( field + vexpr, line)
-    path = m.group(1)
-    return path
-
-
-def grepWorkerPathes(log, field = "WORKER_STDOUT", sep=':'):
-    sc,out = subprocess.getstatusoutput( "grep %s %s" % (field, log))
-    if sc!=0:
-        print ("grepping %s in %s failed" % (field, log))
-        return None
-
-    workers = dict()
-
-    #pids = grepValueList(log, "WORKER_PID")
-    #if len(pids)==0:
-    #    return []
-
-    import re
-    vexpr = '\\s*'+ sep+ '\\s*([^\\s]+)'
-    
-    for  line in out.splitlines():
-        rout = re.search( field + vexpr, line)
-        rpid = re.search( 'WORKER_PID'  +  '\\s*'+ '=' + '\\s*([^\\s]+)', line)
-        path = rout.group(1)
-        pid = rpid.group(1)
-
-        workers[pid] =path
-
-    return workers
-
-def grepWorkerStat(log, search_str = "WORKER_EVENT_STAT", fields=['elap_time',],  sep='='):
-    sc,out = subprocess.getstatusoutput( "grep %s %s" % (search_str, log))
-    if sc!=0:
-        print ("grepping %s in %s failed" % (search_str, log))
-        return None
-
-    worker_stat = dict()
-    for field in fields:
-        worker_stat[field]=list()
-
-    #pids = grepValueList(log, "WORKER_PID")
-    #if len(pids)==0:
-    #    return []
-
-    import re
-    vexpr = '\\s*'+ sep+ '\\s*([^\\s]+)'
-    
-    for  line in out.splitlines():
-        for field in fields:
-            rfield = re.search( field + vexpr, line)
-            value = rfield.group(1)
-            worker_stat[field].append( value ) 
-
-    return worker_stat
-
-def extractWorkersStat(mlog):
-    """extract event based statistics of the WORKER using methods implemented above"""
-    paths_dict = grepWorkerPathes(mlog)
-    worker_stat = dict()
-    for pid,path in six.iteritems(paths_dict):
-        worker_stat[pid] = grepWorkerStat(path, fields=['evt', 'cpu', 'elap_time', 'elap_os_time', 'system_time', 'user_time'])
-    return worker_stat
-
-def writeOutWorkersStat(mlog):
-    ws = extractWorkersStat(mlog)
-    import os
-    ppid = grepValue(mlog, "PARENT_PID")
-    dir = "stat-%s" % ppid
-
-    if os.path.exists(dir):
-        import shutil 
-        shutil.rmtree(dir)
-    
-    os.mkdir("stat-%s" % ppid )
-    os.chdir("stat-%s" % ppid )
-
-    for pid,stat in six.iteritems(ws):
-        rows = list() 
-        for i in range( 1 + len(stat['evt'])):
-            rows.append('');
-
-        for field,values in six.iteritems(stat):
-            rows[0] = "%s\t%s" % (rows[0], field)
-            i=1
-            for value in values:
-                rows[i] = "%s\t%s" % (rows[i], value)
-                i+=1
-        f_name = "WORKER_%s_STAT.txt" % pid
-        f = open(f_name, 'w')
-        for row in rows:
-            f.write(row + "\n")
-        
-        del rows
-        f.close()
-        print ("  worker-stat file  ./%s/%s created." % (dir, f_name) )
-
-    os.chdir("..")
-
-
-############# related to  athena-mp #########################
-def launch_athenaMP2(cmd, job, np, ne):
-    """"launching cmd: athena.py --nprocs=$np -c EvtMax=$ne $jobo  1> mp.output/stdout_$jobo.$np.$ne   2> mp.output/stderr_$jobo.$np.$ne""" 
-    
-    print ("job command and options as template: %s" % cmd)
-    from string import Template
-    arg_template= Template(cmd)
-    arg_str = arg_template.substitute(MAXEVT=np*ne, NPROCS=np, JOBO=job)
-    proc_args = arg_str.split();
-    
-    output_dir = "mp.output"
-    if not os.path.isdir(output_dir):
-        os.mkdir(output_dir)  
-    
-    stdout_name = os.path.join(output_dir, "stdout.mp.%s.%i.%i"  % (job,np,ne))
-    stderr_name = os.path.join(output_dir, "stderr.mp.%s.%i.%i"  % (job,np,ne))
-
-    if not os.path.exists(job):
-        print ("job options file %s doesn't exist" % job)
-        return None
-    import shutil
-    shutil.copy(job, output_dir)
-    
-    
-    STDOUT_FILE = open(stdout_name , "w")
-    STDERR_FILE = open(stderr_name,  "w")
-    
-    print ("<<<LAUNCH>>>: %s" % proc_args)
-    mproc = subprocess.Popen( proc_args, 
-                             stdout=STDOUT_FILE, 
-                             stderr=STDERR_FILE,
-                             cwd = output_dir,
-                             shell=False, 
-                             close_fds = True)
-
-    pid_list.append(mproc.pid)
-    STDOUT_FILE.close();  STDERR_FILE.close()
-    return mproc
-
-def launch_athenaMP(jobo, np, ne):
-    """"launching cmd: athena.py --nprocs=$np -c EvtMax=$ne $jobo  1> mp.output/stdout_$jobo.$np.$ne   2> mp.output/stderr_$jobo.$np.$ne""" 
-    
-    output_dir = "mp.output"
-    stdout_name = os.path.join(output_dir, "stdout.mp.%s.%i.%i"  % (jobo,np,ne))
-    stderr_name = os.path.join(output_dir, "stderr.mp.%s.%i.%i"  % (jobo,np,ne))
-    print ("launching: athena.py --nprocs=%i -c EvtMax=%i %s \
-        1> %s   2> %s" % (np, np*ne, jobo, stdout_name, stderr_name))
-
-    if not os.path.exists(jobo):
-        print ("job options file doesn't exist")
-        return None
-
-    if not os.path.isdir(output_dir):
-        os.mkdir(output_dir)
-        
-    STDOUT_FILE = open(stdout_name , "w")
-    STDERR_FILE = open(stderr_name,  "w")
-    mproc = subprocess.Popen(["athena.py", "--nprocs=%i" % np,  "-c", "EvtMax=%i" % (np*ne) ,  "../%s" % jobo], 
-                             executable='athena.py', 
-                             stdout=STDOUT_FILE, 
-                             stderr=STDERR_FILE,
-                             cwd = output_dir,
-                             shell=False, 
-                             close_fds = True)
-    pid_list.append(mproc.pid)
-    STDOUT_FILE.close();  STDERR_FILE.close()
-    return mproc
-
-############# TERMINATOR AREA: stopping, killing, terminating processes ###############
-
-def stop_proc(proc):
-    """ terminate/kill a process by either proc_object or pid"""
-    pid = 0
-    try:
-        if type(proc) is int:
-            pid = proc
-            os.kill(pid, signal.SIGKILL); #os.waitpid(pid, 0);
-        else:
-            pid = proc.pid 
-            if proc.poll() is None: os.kill(pid, signal.SIGKILL); 
-            proc.wait();
-        print ("process %s  terminated" % pid )
-    except Exception as e:
-        print ("## Caught exception [%s] !!" % str(e.__class__),"  ## What:",e)
-        print (sys.exc_info()[0], sys.exc_info()[1])
-        return False
-    pid_list.remove(pid)
-    return True
-
-def stop_proc_tree(pid):
-    """ Terminate/kill recursively process tree by pid. Be precautious using this!"""
-    out = subprocess.getoutput("ps --ppid %i" % pid)
-    lines = out.splitlines(); lines.pop(0) #remove header
-    try:
-        if len(lines) > 0: 
-            for line in lines:
-                cpid = int(line.split()[0])
-                print ("child [%i:%i] being terminated..." % (pid, cpid))
-                stop_proc_tree(cpid)
-        if  pid in pid_list: pid_list.remove(pid) 
-        os.kill(pid, signal.SIGKILL); #os.waitpid(pid, 0);
-        print ("[%i] - terminated." % pid )
-    except Exception as e:
-        print ("[%i] - dead #while killing caught exception [%s] !!" % (pid, str(e.__class__)),"  ## What:",e)
-        #print (sys.exc_info()[0], sys.exc_info()[1])
-        return False
-    return True
-
-
-def stop_athenaMP(mproc):
-    """ terminate/kill a process by either proc_object or pid"""
-    try:
-        pid = 0
-        if type(mproc) is int:
-            pid = mproc
-            stop_proc_tree(pid) # killing athena-mp mproc-tree
-        else:
-            pid = mproc.pid;
-            if mproc.poll() is None: os.kill(pid, signal.SIGKILL); 
-            mproc.wait();
-        print ("process %s  terminated" % pid )
-        return True
-    except Exception as e:
-        print ("## Caught exception [%s] !!" % str(e.__class__),"  ## What:",e)
-        print (sys.exc_info()[0], sys.exc_info()[1])
-        return False
-    return False
-
-
-
-
-############# sar related wrappers ###########################
-
-def launch_sar(log, time_step):
-    """
-      launch sar with 
-     `sar -bBcdqrRuvwWy -I SUM -I XALL -n ALL -P ALL` = `sar -A`
-    """
-    sar_args = [ "sar", "-bBrvwu", "-o", log, "%i"% time_step, "0" ]
-    print ("launching: %s %s %s %s %s %s" % tuple(sar_args)    )
-    sc,out = subprocess.getstatusoutput('sar -b 1 1')
-    if sc!=0:
-        print ('launching failed - sar do not work on this system - please install if available!')
-        return None
-    FNULL = open('/dev/null', 'w')
-    proc = subprocess.Popen(sar_args, 
-                            executable="sar",  
-                            stdout = FNULL,
-                            stderr = subprocess.STDOUT,
-                            shell=False, 
-                            close_fds = True
-                            )
-
-    FNULL.close()
-    print ("sc=%i" % sc)
-    print ("out=%s" % out)
-
-    pid_list.append(proc.pid)
-    return proc
-        
-def _num(str):
-    """try converting str into int or float if fails return the same string"""
-    try:
-        if "." in str:
-            out = float(str)
-        else:
-            out = int(str) 
-    except ValueError:
-        return str
-    return out
-        
-def get_sar_stat(log, key):
-    """ get statistics by issueing this cmd: `sar -key $log`"""
-    print ('launching cmd: sar %s -f %s' % (key, log)        )
-    sc,out = subprocess.getstatusoutput("sar %s -f %s" % (key,log) )
-    if sc!=0:
-        print ("launching failed - either file %s does not exist or sar does not work on this system - please check!" % log)
-        return None
-    sar_dict = dict()
-    #print"(##################################"; print "out=\n", out; print "################################################")
-
-    lines = out.splitlines()
-    print ("trim1=", lines.pop(0))#trimming output
-    print ("trim2=", lines.pop(0))#trimming output
-
-    avg_line = lines.pop(); #trimming avg line at the end 
-    print ("avg_line1=", avg_line)
-    
-    hstrs = lines.pop(0).replace('%', 'p').replace('/', 'p').split() #trimming header strings and replacing '%' and '/' to satisfy ROOT 
-    hstrs[0] = "Time"
-    print ("Sar statistics fields found: ", hstrs)
-
-    #print("##################################"); print ("lines=\n", lines; print "################################################)"
-    
-    for hstr in hstrs:
-        sar_dict[hstr] = list()
-    for line in lines:
-        lstrs = line.split()
-        print ("lstrs=", lstrs)
-        for i,hstr in enumerate(hstrs):
-            if i!=0:
-                sar_dict[hstr].append( _num(lstrs[i]) )
-            else:
-                sar_dict[hstr].append(_seconds(lstrs[i])) #time conversion from "H:M:S" --> numero segundos
-    return sar_dict 
-
-
-
-### Tools for process statistics #######
-
-def get_full_sar_stat(log):
-    mp_stat["io"] =  get_sar_stat(log, "-b")
-    mp_stat["mem"] = get_sar_stat(log, "-r")
-    mp_stat["cpu"] = get_sar_stat(log, "-u")
-    #return mp_stat
-  
-  
-  ##############sysstat and other linux commands wrappers########
-
-def _meminfo():
-    out=subprocess.getoutput("cat /proc/meminfo")
-    lines = out.splitlines()
-    mem=dict()
-    Kb = 1024
-    mem['total']= int(lines[0].split()[1]) / Kb
-    mem['free'] = int(lines[1].split()[1]) / Kb
-    mem['buffers']= int(lines[2].split()[1]) / Kb
-    mem['cached'] = int(lines[3].split()[1]) / Kb
-    print ("meminfo.real_total: [%i Mb]", mem['total'] )
-    print ("meminfo.free: [%i Mb]", mem['free'])
-    print ("meminfo.cached: [%i Mb]", mem['cached'] )
-    print ("meminfo.buffers: [%i Mb]", mem['buffers'])
-    return mem
-
-def _get_iostat():
-    out=subprocess.getoutput("iostat")
-    io = dict()
-    lines = out.splitlines()
-    strs = lines[1].split()
-    io['used'] = int(strs[2])
-    mem = dict()
-    mem['free'] = int(strs[3])
-    mem['cached'] = int(strs[5])
-    mem['buffers'] = int(strs[6])
-    mem_strs = lines[2].split()
-    mem['USED'] = int(strs[2])
-    mem['FREE'] = int(strs[3])
-    #print ("mem: [%s Mbs]" %  mem)
-    return io
-def _used_mem():
-    out=subprocess.getoutput("free -m")
-    mem_strs = out.splitlines()[2].split()
-    used_mem = int(mem_strs[2]) 
-    print ("used_mem: [%i Mb]" % used_mem)
-    return used_mem            
-def _free_mem():
-    out=subprocess.getoutput("free -m")
-    mem_strs = out.splitlines()[2].split()
-    free_mem  = int(mem_strs[3]) 
-    print ("free_mem: [%i Mb]" % free_mem)
-    return free_mem
-
-def _launch_iostat(log, time_step):
-    print ('launching cmd: iostat $TIME_STEP -d -x > iostat.$jobo.$np.$ne &')
-    sc,out = subprocess.getstatusoutput( "iostat" )
-    if sc!=0:
-        print ('launching failed - iostat do not work on this system')
-        return None
-    f_iostat = open(log, "w")
-    iostat_proc = subprocess.Popen(
-        [ "iostat",  "%i" % time_step, "-d", "-x"], 
-        executable="iostat", 
-        stdout = f_iostat, 
-        shell=False, 
-        close_fds = True)
-
-    f_iostat.close()
-    return iostat_proc  
-def _launch_vmstat(log, time_step):
-    print ('launching cmd: vmstat $TIME_STEP -n > vmstat.$jobo.$np.$ne &'        )
-    sc,out = subprocess.getstatusoutput( "vmstat -V" )
-    if sc!=0:
-        print ('launching failed - vmstat do not work on this system')
-        return None
-    file = open(log, "w")
-    proc = subprocess.Popen([ "vmstat", "%i" % time_step, "-n" ], 
-                            executable="vmstat",  
-                            stdout = file, 
-                            shell=False, 
-                            close_fds = True)
-    file.close()
-    return proc
-def __create_childProcDicts(ppid):
-    """ creates stats dictionary with """
-    out = subprocess.getoutput("ps --ppid %i -o pid, start" % ppid)
-    ps_lines = out.splitlines()
-    ps_lines.pop(0)
-    
-    if len(ps_lines) > 1:
-
-        for line in ps_lines:
-            ps_str = line.split()
-            pid = int(ps_str[0])
-            ProcDict(pid, start_time = _seconds(ps_str[1]))
-            print ("ppid: [%i]: child [%i] added" % (ppid, pid))
-    else: 
-        print ("no children exist for parent: %s " % ppid)
-
-
-#######  adopted from AthenaMP/PyComps ###################
-def print_shared_private(pid):
-    print ("CPROC-SHARED_PRIVATE_MEM for pid: [%i]" % pid)
-    for line in open("/proc/%i/status" % pid):
-        if line.startswith('Vm'):
-                print(line.strip())
-    private,shared=_get_shared_private_mem()
-    print ("pid:[%i] ===> private: %s MB | shared: %s MB" % (pid, private/1024., shared /1024.))
-def _get_shared_private_mem(pid='self'):
-    """ Finds proc's shared and private memory size from /proc/pid/statm  and /proc/pid/smaps dir
-       Coppied from AthenaMP/PyComps.py"""
-    global have_pss
-    private_lines=[]
-    shared_lines=[]
-    pss_lines=[]
-    statm_name = "/proc/%s/statm" % pid
-    smaps_name = "/proc/%s/smaps" % pid
-    rss=int(open(statm_name).readline().split()[1])*PAGESIZE
-    if os.path.exists(smaps_name): #stat
-        for line in open(smaps_name).readlines(): #open
-            if line.startswith("Shared"):
-                shared_lines.append(line)
-            elif line.startswith("Private"):
-                private_lines.append(line)
-            elif line.startswith("Pss"):
-                have_pss=1
-                pss_lines.append(line)
-        shared=sum([int(line.split()[1]) for line in shared_lines])
-        private=sum([int(line.split()[1]) for line in private_lines])
-        #Note shared + private = rss above
-        #The rss in smaps includes video card mem etc.
-        if have_pss:
-            pss_adjust=0.5 #add 0.5KiB as this average error due to trunctation
-            Pss=sum([float(line.split()[1])+pss_adjust for line in pss_lines])
-            shared = Pss - private
-    #elif (2,6,1) <= kv <= (2,6,9):
-    #    shared=0 #lots of overestimation, but what can we do?
-    #    private = rss
-    else:
-        shared=int(open(statm_name).readline().split()[2])
-        shared*=PAGESIZE
-        private = rss - shared
-    return (private, shared)
-
-
-#################################################
-############# ROOT Output #######################
-
-def _createRootFile(outName):
-    """creating carcasus of report ROOT file"""
-    print ("create ROOT file...")
-    from PerfMonAna.PyRootLib import importRoot
-    from ROOT import TTree
-    import array
-    ROOT = importRoot( batch = True )
-    outFile = ROOT.fopen( outName, 'RECREATE' )
-    outFile.cd("/")
-    
-    i = array.array( 'i', [0] )
-    d = array.array( 'f', [0.] )
-
-    outFile.cd()
-    tree =  TTree( "io", "IO statistics tree")
-    tree.Branch('Time', i, 'int/I')
-    tree.Branch('tps',d,'float/D') #transfers per second
-    tree.Branch('rtps',d,'float/D') #read transfers per second
-    tree.Branch('wtps',d,'float/D') #write transfers per second
-    tree.Branch('breadps',d,'float/D')  #blocks read per second
-    tree.Branch('bwrtnps',d,'float/D')  #blocks written per second
-    tree.Write();
-    
-    outFile.cd()
-    tree =  TTree( "mem", "Mem statistics tree")
-    tree.Branch('Time', i, 'int/I')
-    tree.Branch('kbmemfree', i, 'int/I') #free in kB
-    tree.Branch('kbmemused', i, 'int/I') #used in kB
-    tree.Branch('pmemused', d, 'float/D') #used in kB
-    tree.Branch('kbbuffers', i, 'int/I') #buffers in kB
-    tree.Branch('kbcached', i, 'int/I') #cached in kB
-    tree.Branch('kbswpfree', i, 'int/I') #swap free in kB
-    tree.Branch('kbswpused', i, 'int/I') #swap used in kB
-    tree.Branch('pswpused', d, 'float/D') 
-    tree.Branch('kbswpcad', i, 'int/I')
-    tree.Write(); 
-    
-    outFile.cd()
-    tree =  TTree("cpu", "CPU statistics tree")
-    tree.Branch('Time', i, 'int/I')
-    tree.Branch('CPU', i,'int/I')    #CPU number or ALL
-    tree.Branch('puser',d,'float/D')  # CPU utilization percentage at user level
-    tree.Branch('pnice',d,'float/D')  # CPU utilization at nice level
-    tree.Branch('psystem',d,'float/D')  # CPU utilization at system level
-    tree.Branch('piowait',d,'float/D')  # CPU idle percentage due to IO-wait
-    tree.Branch('psteal',d,'float/D')  # virtual processes wait percentage
-    tree.Branch('pidle',d,'float/D')  # CPU idling due to non IO reasons
-    tree.Write();
-
-    outFile.cd()
-    tree =  TTree( "cp", "Children processes statistics")
-    tree.Branch('Time', i, 'int/I') # start date/time
-    tree.Branch('pid', i , 'int/I') # pid of worker process
-    tree.Branch('state', i , 'int/I') #state of the process at the moment (important at the end of process)
-    tree.Branch('vmem', i, 'int/I')
-    tree.Branch('rss', i, 'int/I') 
-    tree.Branch('sz', i, 'int/I') # Size in physical pages of the core image of the process. This includes text, data, and stack space.
-    tree.Branch('shared', i, 'int/I')# shared memory as extracted from /proc/pid/smaps
-    tree.Branch('private', i, 'int/I')# private memory as extracted from /proc/pid/smaps
-    tree.Branch('cpu_time', i,'int/I')# cpu_time 
-    tree.Branch('elap_time', i, 'int/I')# elapsed time (Wall Clock time)
-    tree.Write()
-    
-    outFile.cd()
-    tree =  TTree( "cp_summary", "children processes summary tree")
-    tree.Branch('pid', i, 'int/I')
-    tree.Branch('elap_time', i , 'int/I')
-    tree.Branch('cpu_time', i, 'int/I')
-    tree.Write();
-    
-    outFile.cd()
-    tree =  TTree( "mpid", "Mother Process statistics tree")
-    tree.Branch('pid', i , 'int/I')
-    tree.Branch('state', i , 'int/I')
-    tree.Branch('vmem', i, 'int/I')
-    tree.Branch('rss', i, 'int/I')
-    tree.Branch('sz', i, 'int/I')
-    tree.Branch('shared', i, 'int/I')
-    tree.Branch('private', i, 'int/I')
-    tree.Branch('cpu_time', i,'int/I')
-    tree.Branch('elap_time', i, 'int/I')
-    tree.Write()
-
-    outFile.cd()
-    tree =  TTree( "mp_summary", "Mother process summary tree")
-    tree.Branch('pid', i, 'int/I')
-    tree.Branch('m_elap_time', i , 'int/I') #mother process total elapsed time
-    tree.Branch('m_cpu_time', i, 'int/I') #mother process CPU-time (children-worker's time not included)
-    tree.Branch('m_firstevent_time', i, 'int/I') #time spent on firstEvents(after init before forking) - measured inside mother process
-    tree.Branch('m_par_time', i, 'int/I') #parallel time - time from forking till collecting data - measured inside mother process
-    tree.Branch('x_init_time', i, 'int/I') #externally  observed MP SERIAL-INIT executin time (from start till forking) 
-    tree.Branch('x_par_time', i, 'int/I') #externally observed MP PARALLEL execution time (from forking till joining)
-    tree.Branch('x_fin_time', i, 'int/I') #externally observed MP FINALIZATION time (from joining till end of job)
-    tree.Branch('free_mem_spike', i, 'int/I') #spike in "free+buffers+cached" at the end of MP-process -> physical used memory released.
-    tree.Branch('swap_change', i, 'int/I') # change in swap, should be zero if not means MP causing a swapping 
-    #tree.Branch('used_mem_spike', i, 'int/I')
-    tree.Branch('event_proc_rate', i, 'float/D') #event processing rate defined as ne*60*m_par_time/np = events/proces/min
-    tree.Write()
-
-    outFile.cd()
-    outFile.Write()
-    outFile.Close()
-    print ("create ROOT file... [DONE]" )
-    return
-
-def createRootFile(outName, np):
-    """creating structure of ROOT-report file from mp_stat dictionary """
-    print ("create ROOT file...")
-
-    from PerfMonAna.PyRootLib import importRoot
-    from ROOT import TTree
-    import array
-    ROOT = importRoot( batch = True )
-    outFile = ROOT.fopen( outName, 'RECREATE' )
-    outFile.cd("/")
-    
-    i = array.array( 'i', [0] )
-    d = array.array( 'f', [0.] )
-
-    for t in mp_stat.keys():
-        tree =  TTree( t, "%s stat tree" % t)
-        tree.Branch('np', i, 'int/I') # each tree will have 'np' branch
-        for b in mp_stat[t].keys():
-            if isinstance(mp_stat[t][b][0], int):
-                tree.Branch(b, i, 'int/I')
-            elif isinstance(mp_stat[t][b][0], float):
-                tree.Branch(b, d,'float/F')
-            else:
-                #print ("branch [%s] is not int or float type" % b)
-                tree.Branch(b, i, 'int/I')
-        tree.Write()
-    outFile.Write()
-    outFile.Close()
-    print ("create ROOT file... [DONE]")
-
-
-def fillRootTree(tree, stat, np):
-    #print ("writing %s statistics Tree:" % tree.GetName(), end='')
-    branches = stat.keys()
-    #print ("    branches=", branches, "...", end='')
-    nbr  = len(branches)
-    array_list = list()
-
-    np_array = array.array('i', [np])
-    tree.SetBranchAddress('np', np_array) #putting 'np' into each tree.
-    for branch in branches:
-        if isinstance(stat[branch][0], float):
-            f = stat[branch][0]
-            nums = array.array('f', [0.0])
-            array_list.append(nums)
-        elif isinstance(stat[branch][0], int):
-            i = stat[branch][0]
-            nums =  array.array('i', [0])
-            array_list.append(nums)
-        else:
-            #print ("branch [%s] is not int or float type" % branch)
-            nums = array.array('i', [-1])
-            array_list.append(nums)
-        tree.SetBranchAddress(branch, array_list[-1]);
-        
-    for index in range(len(stat[branches[0]])):
-        for array_index, branch in enumerate(branches):
-            array_list[array_index][0] = stat[branch][index] if array_list[array_index][0] is not -1 else -1
-        tree.Fill()
-    #print ("[DONE]")
-
-
-
-def writeRootFile(outName, np):
-    """writes statistics into ROOT file"""
-    print ("write ROOT file %s...", outName )
-    createRootFile(outName, np)
-    from ROOT import TFile, TTree
-    import array
-    outFile = TFile( outName, 'update' )
-
-    stat_keys = mp_stat.keys()
-    #print ("mp_stat.keys()", stat_keys)
-    for key in stat_keys:
-        #print (" writing [%s]" % key)
-        tree = outFile.Get( "%s" %   key )
-        fillRootTree(tree, mp_stat[key], np)
-        tree.Write()
-
-    outFile.Write()
-    outFile.Close()        
-    print ("write ROOT file... [DONE]" )
-    return
-
-def mergeRootFiles(file, ne):
-    import glob
-    file_list = glob.glob1(os.getcwd(), "%s.*.%i.root" % (file, ne) )
-    cmd = "hadd -f6 mp_stat.%s.ne%i" % (file, ne)
-    for f in file_list:
-        cmd = cmd + ' ' + f 
-    sc, out = subprocess.getstatusoutput(cmd)
-
-def mergeRootOutput(output_file, jobo, np_list, ne):
-    from ROOT import TFile, TTree
-    #output_file = "merged.%s.ne%i.root" % (jobo, ne)
-    cmd = "hadd -f6 %s" % output_file
-    for np in np_list:
-        # here we copy mp_summary and cp_summary trees in each root file from /$np dir into root dir for further merging
-        file = "mp.%s.%i.%i.root" % (jobo, np, ne)
-        print (" ---> processing file = %s" % file)
-        #here we form the command for merging
-        cmd = cmd + " %s" % file
-
-    print ("issuing root files merging command:[%s]" % cmd)
-    sc, out = subprocess.getstatusoutput(cmd)
-    return #output_file
-
-def _createGlobalRootFile(file, ne):
-    from ROOT import TFile, TTree, Tlist
-    import glob
-    file_list = glob.glob1(os.getcwd(), "%s.*.%i.root" % (file, ne) )
-    outFile = TFile ("%s.%i.root" % (file, ne), 'RECREATE' )    
-    for f in file_list:
-        print ("Copying trees from [%s]" % f)
-        tf = TFile (f, 'READ' )
-        mpt = tf.Get("mp_summary")
-        cpt = tf.Get("cp_summary")
-        outFile.cd('/')
-        dir = "%s" % f.replace(file, "").split(".")[1]
-        print ("   creating dir for np = %s" % dir)
-        outFile.mkdir(dir) # creating dir for np
-        outFile.cd(dir)
-        mpTree = mpt.CloneTree(); mpTree.Write()
-        cpTree = cpt.CloneTree(); cpTree.Write()
-        outFile.Write()
-        tf.Close()
-
-    outFile.cd('/')
-    
-    ikeys = outFile.GetListOfKeys().MakeIterator()
-    key = ikeys.Next()
-    key_list = list()
-
-    while key is not None:
-        key_list.append(key.GetName())
-    
-    for np in key_list:
-        outFile.Get("%s/mp_summary")
-
-
-def report2(root_file, ne = 0, comments=""):
-    print('  mpMonTools.report(): root_file=', root_file)
-    from ROOT import TFile, TTree, TBranch, TCanvas, TPad, TGraph, TLegend, TMultiGraph, gStyle, TLatex, TPaveLabel, TPaveText, TH2I, TMath
-
-    def getTreeList(tree, column, condition):
-        size = tree.Draw(column, condition,'goff'); 
-        v1 = tree.GetV1(); v1.SetSize(size)
-        return list(v1)
-
-    def makeGraph(tree, name, formula, condition="", color = 1, lineWidth=1):
-        tree.Draw(formula, condition, "goff")
-        graph = TGraph(int(tree.GetSelectedRows()), tree.GetV2(), tree.GetV1())
-        graph.SetLineColor(color);
-        graph.SetLineWidth(lineWidth)
-        graph.SetName(name);
-        return graph
-
-    def MakeMultiGraph(graph_data, mg, l):
-        clr = 1
-        gl = list()
-        
-        if graph_data is None:
-            return []
-        
-        if graph_data['type'] is 'graph':
-            for name, (tree, param) in graph_data['data'].items():
-                clr+=1; formula =''; condition=''
-                if type(param).__name__=='tuple':
-                    formula = param[0]
-                    condition = param[1]
-                else:
-                    print ("MakeMG: ", formula, condition)
-                    formula = param
-                    condition = ""
-
-                print ("name=%s, tree=%s, formula=%s, condition=%s" % (name, tree.GetName(), formula, condition) )
-           
-                #g = makeGraph(tree, name, formula, condition, color=clr)
-                tree.Draw(formula, condition, "goff")
-                g = TGraph(int(tree.GetSelectedRows()), tree.GetV2(), tree.GetV1()); gl.append(g)
-                
-                g.SetName(name); g.SetLineColor(clr); g.SetMarkerColor(clr); g.SetLineWidth(0)
-                if name == "worker_rate":
-                    g.SetLineColor(10)
-                    
-                mg.Add(g); 
-                l.AddEntry(g, name)
-
-        if graph_data['type'] is 'list':
-            for name, (lx,ly) in graph_data['data'].items():
-                print ("name=%s" % name); print (lx); print (ly)
-                clr+=1
-                g = TGraph( len(lx), array.array('f', lx), array.array('f', ly) )
-                g.SetName(name); g.SetLineColor(clr); g.SetLineWidth(1); g.SetMarkerColor(clr); 
-                mg.Add(g)
-                l.AddEntry(g, name)
-                gl.append(g)
-
-        if graph_data['type'] is 'array':
-            clr = 1
-            g_list = list()
-            data = graph_data['data']
-            for name,(x,y) in graph_data['data'].items():
-                print (x); print (y)
-                clr+=1;
-                g = TGraph(len(x), x, y)
-                g.SetName(name); g.SetLineColor(clr); g.SetLineWidth(1); g.SetMarkerColor(clr) 
-                gl.append(g)
-                mg.Add(g); 
-                l.AddEntry(g, name)
-        if graph_data['type'] is 'text':
-            title.DrawPaveLabel(0.1,0.93,0.9,0.99, graph_data['title'], "brNDC")
-            for s in graph_data['data']:
-                print ("graph_data['data']=%s" % s)
-                mp_pt.AddText(s)             
-            mp_pt.SetTextAlign(12);
-            mp_pt.SetTextSize(0.04)
-            mp_pt.Draw()
-            return []
-
-        if graph_data['type'] is 'latex':
-            title.DrawPaveLabel(0.1,0.93,0.9,0.99, graph_data['title'], "brNDC")
-            tl = TLatex(); tl.SetTextSize(0.02); tl.SetTextAlign(12);
-            txtd = graph_data['data']
-            i = 0; x0 = 0.05; y0 = 0.90; dx = 0.08; dy = 0.05
-            x1 = x0
-            tl.DrawLatex(x1, y0, 'np')
-            for s in txtd['np']:
-                x1 = x1 + dx
-                tl.DrawLatex(x1, y0, s)
-            txtd.pop('np')
-
-            for k in txtd.keys():
-                y0 = y0 - dy
-                tl.DrawLatex(x0-0.03, y0, k);
-                x1 = x0
-                for s in txtd[k]:
-                    x1 = x1 + dx
-                    tl.DrawLatex(x1, y0, s)
-            return []
-        if 'goptions' in graph_data.keys():
-            mg.Draw(graph_data['goptions'])
-        else:
-            mg.Draw('ALP')
-            
-        l.Draw()
-        h=mg.GetHistogram(); h.SetXTitle(graph_data['xtitle']); h.SetYTitle(graph_data['ytitle']); h.SetMinimum(0.1);
-        title.DrawPaveLabel(0.1,0.91,0.9,0.99, graph_data['title'], "brNDC")
-        if 'text' in graph_data.keys():
-            title.DrawPaveLabel(0.2,0.88,0.8,0.92, graph_data['text'], "brNDC")
-            #text_box = TPaveText(0.2,0.51,0.8,0.54);  text_box.AddText(graph_data['text']);
-            #text_box.SetFillColor(42); text_box.SetTextAlign(12); text_box.SetTextfONt(40); text_box.Draw();
-        
-        return [] # gl #list of TGraph
-
-    c = TCanvas("mpr", "AthenaMP-mp-scaling-charts", 10, 10, 800, 1024)
-    c.SetFillColor(17);  c.SetBorderSize(1); c.cd()
- 
-    tfile = TFile(root_file, "READ"); print ("   root compression factor = ", tfile.GetCompressionFactor())
-    mpSumTree = tfile.Get("mp_summary")
-    cpSumTree = tfile.Get("cp_summary")
-    ioTree = tfile.Get("io")
-    cpuTree = tfile.Get("cpu") 
-    numaTree = tfile.Get("numa")
-
-    if ne is 0:
-        ne = int(root_file.split('.')[-2].replace('ne', ''))
-        print ("extracted ne=[%i]" % ne)
-
-##### FORMING THE DATA FOR ROOT Graphing-Charting-Histogramming #####    
-    np_list = list(set(getTreeList(cpSumTree, 'np', ''))); np_list.sort() #uniqeify and sort np_list
-    elap_time_stdev = list()
-    elap_time_avg = list()
-    elap_time_max = list()
-    cpu_time_stdev = list()
-    cpu_time_max = list()
-    cpu_time_avg = list()
-
-    elap_time_rate = list()
-    cpu_time_rate = list()
-    user_time_rate = list()
-    par_event_rate = list()
-
-    elap_cpu_time = list() # elap - cpu time avg.
-
-    elap_time_stdev_x = list()
-    cpu_time_stdev_x = list()
-    par_elap_time = list() # elap time as seen from mother
-    
-    mp_lb = [b.GetName() for b in list(mpSumTree.GetListOfBranches())]
-    cp_lb = [b.GetName() for b in list(cpSumTree.GetListOfBranches())]
-
-    mp_txt = "%s" % mp_lb + "\n"
-    cp_txt = "%s" % cp_lb + "\n" 
-    
-    mp_pt = TPaveText(0.1,0.1,0.9,0.9) 
-    mp_pt.SetFillColor(22)
-    
-    mp_latex = TLatex()
-    mp_latex.SetTextAlign(12)
-
-    txt_dict=dict()
-    ltxt_dict = dict()
-    for s in mp_lb:
-        txt_dict[s] = "%20s" % s
-        ltxt_dict[s] = list()
-    np_txt = ""
-    for np in np_list:
-        size = cpSumTree.Draw('elap_time:cpu_time:elap_time-cpu_time:user_time', "np==%i" % int(np), 'goff'); 
-        elapv = cpSumTree.GetV1(); 
-        cpuv = cpSumTree.GetV2();
-        elap_cpuv = cpSumTree.GetV3();
-        userv = cpSumTree.GetV4();
-
-        elap_time_stdev.append(float(TMath.RMS(size, elapv )))
-        elap_time_avg.append(float(TMath.Mean(size, elapv )))
-        elap_time_max.append(float(TMath.MaxElement(size, elapv )))
-
-        cpu_time_stdev.append(float(TMath.RMS(size, cpuv )))
-        cpu_time_avg.append(float(TMath.Mean(size, cpuv )))
-        cpu_time_max.append(float(TMath.MaxElement(size, cpuv )))
-        
-        elap_cpu_time.append(float(TMath.Mean(size, elap_cpuv)))
-        
-        #elap_time_rate.append( float(60*ne)/float(np*elap_time_avg[-1]) )
-        #cpu_time_rate.append( float(60*ne)/float(np*cpu_time_avg[-1]) )
-        #user_time_rate.append( float(60*ne)/float( np * float(TMath.Mean(size, userv))) ) 
-
-        elap_time_rate.append( float(60*ne)/float(elap_time_avg[-1]) )
-        cpu_time_rate.append( float(60*ne)/float(cpu_time_avg[-1]) )
-        user_time_rate.append( float(60*ne)/float(float(TMath.Mean(size, userv))) ) 
-
-        #elap_time_stdev_x.append(float(TMath.RMS(size, cpSumTree.GetV2())))
-        #cpu_time_stdev_x.append(float(TMath.RMS(size,  cpSumTree.GetV4())))
-        
-        msize = mpSumTree.Draw('m_par_time', "np==%i" % int(np), 'goff')
-        parv = mpSumTree.GetV1();
-        par_elap_time.append(float(TMath.Mean(msize, parv)))
-        par_event_rate.append(float(60.0*ne)/par_elap_time[-1])
-
-        np_txt += "%10s" % np
-        for s in mp_lb:
-            txt_dict[s] += "%10.1f" % getTreeList(mpSumTree, s, "np==%i" % int(np) )[0]
-            ltxt_dict[s].append( "%10.1f" % getTreeList(mpSumTree, s, "np==%i" % int(np))[0] )
-
-    print ("np_list=%s\n etime_stdev=%s \n cpu_time_stdev=%s" % (np_list, elap_time_stdev, cpu_time_stdev))
-    print ("elap-cpu=%s" % (elap_cpu_time))
-    
-    #mn = 7; mt=10000
-    from socket import gethostname
-    import platform
-#########################
-    graph_list = list()
-###########################
-    graph =dict()
-    graph['type'] = 'text'
-    graph['data'] = [
-                "MP Times, Memory, IO, CPU PLOTS for ",
-                " %s " % root_file,
-                " machine: %s" % gethostname(),
-                #"%s, %s, %s, %s, %s, %s" % platform.uname(),
-                "%s" % platform.platform(),
-                "%s" % os.getenv('CMTCONFIG'),
-                " comments: %s" % comments,
-                "np=%s " % [int(s) for s in np_list]
-                ]
-    graph['title'] = 'ATHENA MP MONITOR mpMon REPORT'
-    graph_list.append(graph)
-############################
-    graph  = dict()
-    graph['type'] = 'latex'
-    graph['data'] = ltxt_dict 
-    graph['title']= "mp_summary numbers:"
-    graph_list.append(graph)
-############################
-    graph = dict()
-    graph['type'] = 'list'
-    graph['data'] = {
-                     'parallel_elap': (np_list, par_elap_time),
-                     'worker_elap_avg': (np_list, elap_time_avg),
-                     'worker_elap_max': (np_list, elap_time_max),
-                     'worker_cpu_avg':  (np_list, cpu_time_avg),
-                     'worker_cpu_max':  (np_list, cpu_time_max),
-                     #'elap_time_stdev_x': (np_list, elap_time_stdev_x),
-                     #'cpu_time_stdev_x':  (np_list, cpu_time_stdev_x)
-                     }
-    graph['title'] = "Parallel and Workers Elap and CPU Time Variations"
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Time, sec" 
-    graph_list.append(graph)
-
-############################
-    graph  = dict()
-    graph['type'] = 'graph'
-    graph['data'] = {
-            "m_elap_time" : (mpSumTree, "m_elap_time:np"),
-            "m_firstevent_time": (mpSumTree, "m_firstevent_time:np"),
-            "m_par_time" :    (mpSumTree, "m_par_time:np"),
-            "m_merging_time": (mpSumTree, "m_merging_time:np")
-                   }
-    graph['title'] = "PARENT PROCESS TIMES"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Time, sec"
-    graph_list.append(graph)
-##############################
-    """
-    graph =dict()
-    graph['type'] = 'text'
-    graph['data'] = [
-                "This plot intentially left blank"
-                ]
-    graph['title'] = 'BLANK CHART'
-    graph_list.append(graph)
-    """
-############################
-    graph = dict()
-    graph['type'] = 'list'
-    graph['data'] = {
-                     'elap_time_stdev': (np_list, elap_time_stdev),
-                     'cpu_time_stdev':  (np_list, cpu_time_stdev),
-                     'elap-cpu_avg':  (np_list, elap_cpu_time),
-                     }
-    graph['title'] = "Worker Processes Time Deviations"
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Time_StDev, sec" 
-    graph_list.append(graph)
-
-############################
-    graph  = dict()
-    graph['type'] = 'graph'
-    graph['data'] = {
-            #"event_rate": (mpSumTree, "event_rate:np"),
-            #"event_rate_x": (mpSumTree, "(60*%i)/(x_par_time):np" % ne),
-            #"event_rate" :  (mpSumTree, "(60*%i)/(m_par_time):np" % ne),
-            #"per_proc_rate": (mpSumTree, "event_proc_rate:np") 
-            "event_rate" :  (mpSumTree, "(60*%i*np)/(m_par_time):np" % ne),
-            "per_proc_rate": (mpSumTree, "event_proc_rate*np:np")
-            #"per_proc_rate_x": (mpSumTree, "(60*%i)/(x_par_time*np):np" % ne)
-                     }
-    graph['title'] = "ATHENA MP EVENT PROCESSING RATE"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Evts/min, Evts/proc/min"
-    graph_list.append(graph)
-############################
-    
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['data'] = {
-                     'parallel_rate': (mpSumTree, "event_rate:np"), #"(event_rate + 207/(30*(np-2)+m_par_time)):np"),
-                     'worker_rate': (cpSumTree, "(60*%i)/(elap_time):np" % ne),
-                     #'cpu_rate':  (cpSumTree, "(60*%i)/(cpu_time):np" % ne),
-                     #'user_rate': (cpSumTree, "(60*%i)/(user_time):np" % ne)
-                     }
-    graph['title'] = "Workers and parallel event processing rates, wall-clock time"
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Evts/Proc/Min" 
-    graph['goptions'] = "ALP"
-    graph_list.append(graph)
-############################
-    
-    graph = dict()
-    graph['type'] = 'list'
-    graph['data'] = {
-                     'par_event_rate': (np_list, par_event_rate),
-                     'elap_avg_rate': (np_list, elap_time_rate),
-                     'cpu_avg_rate':  (np_list, cpu_time_rate),
-                     'user_avg_rate': (np_list, user_time_rate)
-                     }
-    graph['title'] = "Worker event processing rates Wall-Time, User-CPU, Total-CPU time rates, averaged."
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Evts/Proc/Min" 
-    graph_list.append(graph)
-
-#############################
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['data'] ={
-        "total_mem": (mpSumTree, "free_mem_spike/1024:np"),
-        "mem_per_proc": (mpSumTree, "free_mem_spike/np/1024:np"),
-        }
-    graph['title'] = "PHYSICAL MEMORY CONSUMPTION BY AthenaMP"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Memory Consumption, Kb"
-    graph_list.append(graph)
-
-    cond = "vmem!=0"
-#############################
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['data'] ={
-        "mem_per_proc": (mpSumTree, "free_mem_spike/np/1024:np"),
-        "vmem": (cpSumTree, ("vmem:np", cond) ),
-        "rss":  (cpSumTree, ("rss:np",  cond) )
-        }
-    graph['title'] = "VMEM, RSS, RealMemory(from free-spike) per Worker"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Memory Consumption, Mb"
-    graph_list.append(graph)
-
-############################
-    cond = "np>0"
-############################    
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        "bread/sec": (ioTree,  ('breadps:Time', cond) ),
-        "bwrite/sec": (ioTree, ('bwrtnps:Time', cond) )
-        }
-    graph['title'] = "IO Activity for Athena MP  %s " % cond
-    graph['xtitle']= "Time"
-    graph['ytitle']= "Total Amount of Data R/W in blocks per sec"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)  
-############################    
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        "write_reqs/sec": (ioTree, ('wtps:Time', cond) ),
-        "read_reqs/sec":  (ioTree, ('rtps:Time', cond) ),
-        "total_reqs/sec": (ioTree, ('tps:Time',  cond) )
-        }
-    graph['title'] = "IO Activity for Athena MP %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Transfer_requests/sec"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-############################    
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['data'] ={
-        '%_user' :   (cpuTree, ('puser:Time',   cond) ),
-        '%_system':  (cpuTree, ('psystem:Time', cond) ),
-        '%_idle':    (cpuTree, ('pidle:Time',   cond) ),
-        '%_io_wait': (cpuTree, ('piowait:Time', cond) )
-        }
-    graph['title'] = "CPU Activity for Athena MP %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Percentage of CPU Utilization"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-############################  
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        '%_io_wait': (cpuTree, ('piowait:Time', cond) ),
-        }
-    graph['title'] = "CPU Activity for Athena MP %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Percentage of CPU Utilization"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-#########################
-
-#########################
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        'numa_hit': (numaTree, ('numa_hit:Time', cond) ),
-        'numa_miss':(numaTree,('numa_miss:Time', cond) ),
-        'numa_foreign': (numaTree, ('numa_foreign:Time', cond) ),
-        'interleave_hit': (numaTree, ('interleave_hit:Time', cond) ),
-        'local_node': (numaTree, ('local_node:Time', cond) ),
-        'other_node': (numaTree, ('other_node:Time', cond) ),
-        }
-    graph['title'] = "NUMA Activity for athena MJ (Multi Jobs) %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Nbr of hits/misses"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-
-
-
-    pads = list()
-
-    cpp = 1 #charts per pad 
-    ppc = 2 #pads per canvas
-
-    c.cd()
-    gStyle.SetOptStat(0);
-    gStyle.SetPalette(1);
-    gStyle.SetCanvasColor(33);
-    gStyle.SetFrameFillColor(10);
-    gStyle.SetMarkerStyle(21)
-    gStyle.SetMarkerColor(2)
-    gStyle.SetMarkerSize(0.4)
-    print ("gStyle.Set done")
-
-    title = TPaveLabel(0.1,0.98,0.9,1, "Athena MP Plots");
-    title.SetFillColor(42); title.SetTextFont(40); 
-    #title.Draw();print ("title Drawn")
-
-    mgs =  list()  #List of TMultiGraphs
-    ls =   list()  #List of TLegends
-    gs =   list()  #List of TGraph
-
-    for j in range(ppc):
-        y_factor = 0.99;   x1 = 0.01; x2 = 0.99;  y1 = y_factor - (y_factor-0.01)*(j+1)/float(ppc); y2 = y_factor - (y_factor-0.01)*j/float(ppc)
-        print ("x1,y1,x2,y2",  x1, y1, x2, y2 )
-        pad = TPad("pad%i" % j, "pad%i" % j,   x1, y1, x2, y2,   33); pad.Draw()
-        pads.append(pad);
-    
-    num_cans = len(graph_list) /(cpp*ppc) if len(graph_list) % (cpp*ppc)==0 else len(graph_list)/(cpp*ppc) + 1 
-    graph_list += [None,]* (num_cans*cpp*ppc - len(graph_list))
-    print ("number of pages/canvases in report = ", num_cans)
-    
-    pdf_file = root_file
-    for s in ['merged.', '.py', '.root']:
-        pdf_file = pdf_file.replace(s, '')
-    pdf_file ="%s.pdf" % pdf_file
-
-    for i in range(num_cans):
-        for j in range(ppc):
-            graph = graph_list[ppc*i+j]
-            if graph is None:
-                continue
-            
-            if 'logY' in graph.keys():
-                if graph['logY']:
-                    pads[j].SetLogy()
-            else:
-                pads[j].SetLogy(0)
-            
-            pads[j].cd()
-            pads[j].SetRightMargin(0.2)
-            l = TLegend(0.82,0.20,0.99,0.89); ls.append(l) 
-            mg = TMultiGraph(); mgs.append(mg)
-            print ("graph=", graph)
-            gs.append(MakeMultiGraph(graph, mg, l))
-
-        c.Update()
-        if i == 0:
-            print ("pdf.start")
-            c.Print(pdf_file+'(', 'pdf') #start page
-        elif i < num_cans-1:
-            print ("pdf.body")
-            c.Print(pdf_file, 'pdf')    #body pages
-        else:
-            print ("pdf.end")
-            c.Print(pdf_file + ')', 'pdf') #end page
-        c.SaveAs("%s.%i.png" % (pdf_file, i))
-        for pad in pads:
-            pad.Clear()
-
-
-def report(root_file, ne = 0, comments=""):
-    print('  mpMonTools.report(): root_file=', root_file)
-    from ROOT import TFile, TTree, TBranch, gPad, TCanvas, TPad,TProfile, TGraph, TLegend, TLegendEntry, TMultiGraph, gStyle, TLatex, TPaveLabel, TPaveText, TH2I, TMath
-
-    def getTreeList(tree, column, condition):
-        size = tree.Draw(column, condition,'goff'); 
-        v1 = tree.GetV1(); v1.SetSize(size)
-        return list(v1)
-
-    def makeGraph(tree, name, formula, condition="", color = 1, lineWidth=1):
-        tree.Draw(formula, condition, "goff")
-        graph = TGraph(int(tree.GetSelectedRows()), tree.GetV2(), tree.GetV1())
-        graph.SetLineColor(color);
-        graph.SetLineWidth(lineWidth)
-        graph.SetName(name);
-        return graph
-
-    def MakeMultiGraph(c, pad, graph_data, mg, l, tpl):
-        clr = 1
-        gl = list()
-        hl = list()
-        l.SetFillColor(0)
-        lel = list()
-
-        draw_option=""
-        if graph_data is None:
-            return []
-        if graph_data['type'] is 'profile':
-            for name, (tree, param) in graph_data['data'].items():
-                clr+=1; formula =''; condition=''
-                if type(param).__name__=='tuple':
-                    formula = param[0]
-                    condition = param[1]
-                else:
-                    print ("MakeMG: ", formula, condition)
-                    formula = param
-                    condition = ""
-
-                print ("PROFILE: name=%s, tree=%s, formula=%s, condition=%s" % (name, tree.GetName(), formula, condition) )
-                
-                hprof = TProfile(
-                        "%s" % name, 
-                        "     %s;%s;%s" % (name, graph_data['xtitle'], graph_data['ytitle']),
-                         100,0,18, " " 
-                         )
-                tpl.append(hprof)
-                tree.Draw("%s>>%s"% (formula, name), condition, "prof") 
-                tpl[-1].SetMarkerColor(clr)
-                
-                if draw_option == "": 
-                    tpl[-1].Draw()
-                    draw_option="PSAME"
-                else:
-                    print ("PROFILE: %s 'PSAME' clr=%i " % (name, clr))
-                    tpl[-1].Draw("PSAME")
-                le = l.AddEntry(tpl[-1], name)
-                le.SetFillColor(0)
-                lel.append(le)
-                l.Draw()
-                #gPad.Update()
-                #c.Update()
-            return []
-        
-        if graph_data['type'] is 'graph':
-            for name, (tree, param) in graph_data['data'].items():
-                clr+=1; formula =''; condition=''
-                if type(param).__name__=='tuple':
-                    formula = param[0]
-                    condition = param[1]
-                else:
-                    print ("MakeMG: ", formula, condition)
-                    formula = param
-                    condition = ""
-                
-                if tree is None:
-                    print ("name=%s -> TTree DOESN't EXIST" % name)
-                    continue
-
-                print ("name=%s, tree=%s, formula=%s, condition=%s" % (name, tree.GetName(), formula, condition), end='')
-                tree.Draw(formula, condition,   "goff")
-                
-                selection_size = tree.GetSelectedRows()
-                if selection_size==-1:
-                    print ("-> SKIPPED (DO NOT EXIST): SELECTION_SIZE=%i" % selection_size )
-                    continue
-                else:
-                    print ("-> SELECTION_SIZE=%i" % selection_size )
-                    pass
-
-                g = TGraph(selection_size, tree.GetV2(), tree.GetV1()); gl.append(g)
-                
-                g.SetName(name); g.SetLineColor(clr); g.SetMarkerColor(clr); g.SetLineWidth(0)
-                if name == "worker_rate":
-                    g.SetLineColor(10)
-                    
-                mg.Add(g); 
-                le = l.AddEntry(g, name)
-                le.SetFillColor(0)
-                lel.append(le)
-                
-        if graph_data['type'] is 'list':
-            for name, (lx,ly) in graph_data['data'].items():
-                print ("name=%s" % name); print (lx); print (ly)
-                clr+=1
-                g = TGraph( len(lx), array.array('f', lx), array.array('f', ly) )
-                g.SetName(name); g.SetLineColor(clr); g.SetLineWidth(1); g.SetMarkerColor(clr); 
-                mg.Add(g)
-                l.AddEntry(g, name).SetFillColor(0)
-                gl.append(g)
-
-        if graph_data['type'] is 'array':
-            clr = 1
-            g_list = list()
-            data = graph_data['data']
-            for name,(x,y) in graph_data['data'].items():
-                print (x); print (y)
-                clr+=1;
-                g = TGraph(len(x), x, y)
-                g.SetName(name); g.SetLineColor(clr); g.SetLineWidth(1); g.SetMarkerColor(clr) 
-                gl.append(g)
-                mg.Add(g); 
-                l.AddEntry(g, name).SetFillColor(0)
-        if graph_data['type'] is 'text':
-            title.DrawPaveLabel(0.1,0.93,0.9,0.99, graph_data['title'], "brNDC")
-            for s in graph_data['data']:
-                print ("graph_data['data']=%s" % s)
-                mp_pt.AddText(s)             
-            mp_pt.SetTextAlign(12);
-            mp_pt.SetTextSize(0.04)
-            mp_pt.Draw()
-            return []
-
-        if graph_data['type'] is 'latex':
-            title.DrawPaveLabel(0.1,0.93,0.9,0.99, graph_data['title'], "brNDC")
-            tl = TLatex(); tl.SetTextSize(0.02); tl.SetTextAlign(12);
-            txtd = graph_data['data']
-            i = 0; x0 = 0.05; y0 = 0.90; dx = 0.08; dy = 0.05
-            x1 = x0
-            tl.DrawLatex(x1, y0, 'np')
-            for s in txtd['np']:
-                x1 = x1 + dx
-                tl.DrawLatex(x1, y0, s)
-            txtd.pop('np')
-
-            for k in txtd.keys():
-                y0 = y0 - dy
-                tl.DrawLatex(x0-0.03, y0, k);
-                x1 = x0
-                for s in txtd[k]:
-                    x1 = x1 + dx
-                    tl.DrawLatex(x1, y0, s)
-            return []
-        
-        if mg.GetListOfGraphs() is None:
-            print ("MultiGraph: Empty",)
-            print ("mg=%s" % mg.GetName())
-            return[]
-
-        if 'goptions' in graph_data.keys():
-            mg.Draw(graph_data['goptions'])
-        else:
-            mg.Draw('ALP')
-        
-        l.Draw()
-        h=mg.GetHistogram(); h.SetXTitle(graph_data['xtitle']); h.SetYTitle(graph_data['ytitle']); h.SetMinimum(0.1);
-        title.DrawPaveLabel(0.1,0.91,0.9,0.99, graph_data['title'], "brNDC")
-        if 'text' in graph_data.keys():
-            title.DrawPaveLabel(0.2,0.88,0.8,0.92, graph_data['text'], "brNDC")
-            #text_box = TPaveText(0.2,0.51,0.8,0.54);  text_box.AddText(graph_data['text']);
-            #text_box.SetFillColor(0); text_box.SetTextAlign(12); text_box.SetTextfONt(40); text_box.Draw();
-        
-        return [] # gl #list of TGraph
-
-    c = TCanvas("mp_can", "AthenaMP-mp-scaling-charts", 1, 1, 800, 1024)
-    c.SetFillColor(0);  c.SetBorderSize(1); c.cd()
- 
-    tfile = TFile(root_file, "READ"); print ("   root compression factor = ", tfile.GetCompressionFactor())
-    mpSumTree = tfile.Get("mp_summary")
-    cpSumTree = tfile.Get("cp_summary")
-    ioTree = tfile.Get("io")
-    cpuTree = tfile.Get("cpu")
-    memTree = tfile.Get("mem")
-    numaTree = tfile.Get("numa")
-    print ("numaTree=%s" % numaTree)
-
-    if ne is 0:
-        ne = int(root_file.split('.')[-2].replace('ne', ''))
-        print ("extracted ne=[%i]" % ne)
-
-##### FORMING THE DATA FOR ROOT Graphing-Charting-Histogramming #####    
-    np_list = list(set(getTreeList(cpSumTree, 'np', ''))); np_list.sort() #uniqeify and sort np_list
-    elap_time_stdev = list()
-    elap_time_avg = list()
-    elap_time_max = list()
-    cpu_time_stdev = list()
-    cpu_time_max = list()
-    cpu_time_avg = list()
-
-    elap_time_rate = list()
-    cpu_time_rate = list()
-    user_time_rate = list()
-    par_event_rate = list()
-
-    elap_cpu_time = list() # elap - cpu time avg.
-
-    elap_time_stdev_x = list()
-    cpu_time_stdev_x = list()
-    par_elap_time = list() # elap time as seen from mother
-    
-    mp_lb = [b.GetName() for b in list(mpSumTree.GetListOfBranches())]
-    cp_lb = [b.GetName() for b in list(cpSumTree.GetListOfBranches())]
-
-    mp_txt = "%s" % mp_lb + "\n"
-    cp_txt = "%s" % cp_lb + "\n" 
-    
-    mp_pt = TPaveText(0.1,0.1,0.9,0.9) 
-    mp_pt.SetFillColor(0) #22
-    
-    mp_latex = TLatex()
-    mp_latex.SetTextAlign(12)
-
-    txt_dict=dict()
-    ltxt_dict = dict()
-    for s in mp_lb:
-        txt_dict[s] = "%20s" % s
-        ltxt_dict[s] = list()
-    ltxt_dict["total_rate"]=list()
-    
-    np_txt = ""
-    for np in np_list:
-        size = cpSumTree.Draw('elap_time:cpu_time:elap_time-cpu_time:user_time', "np==%i" % int(np), 'goff'); 
-        elapv = cpSumTree.GetV1(); 
-        cpuv = cpSumTree.GetV2();
-        elap_cpuv = cpSumTree.GetV3();
-        userv = cpSumTree.GetV4();
-
-        elap_time_stdev.append(float(TMath.RMS(size, elapv )))
-        elap_time_avg.append(float(TMath.Mean(size, elapv )))
-        elap_time_max.append(float(TMath.MaxElement(size, elapv )))
-
-        cpu_time_stdev.append(float(TMath.RMS(size, cpuv )))
-        cpu_time_avg.append(float(TMath.Mean(size, cpuv )))
-        cpu_time_max.append(float(TMath.MaxElement(size, cpuv )))
-        
-        elap_cpu_time.append(float(TMath.Mean(size, elap_cpuv)))
-        
-        #elap_time_rate.append( float(60*ne)/float(np*elap_time_avg[-1]) )
-        #cpu_time_rate.append( float(60*ne)/float(np*cpu_time_avg[-1]) )
-        #user_time_rate.append( float(60*ne)/float( np * float(TMath.Mean(size, userv))) ) 
-
-        elap_time_rate.append( float(60*ne)/float(elap_time_avg[-1]) )
-        cpu_time_rate.append( float(60*ne*np)/float(cpu_time_avg[-1]) )
-        user_time_rate.append( float(60*ne)/float(float(TMath.Mean(size, userv))) ) 
-
-        #elap_time_stdev_x.append(float(TMath.RMS(size, cpSumTree.GetV2())))
-        #cpu_time_stdev_x.append(float(TMath.RMS(size,  cpSumTree.GetV4())))
-        
-        msize = mpSumTree.Draw('m_par_time', "np==%i" % int(np), 'goff')
-        parv = mpSumTree.GetV1();
-        par_elap_time.append(float(TMath.Mean(msize, parv)))
-        par_event_rate.append(float(60.0*ne)/par_elap_time[-1])
-
-        np_txt += "%10s" % np
-        for s in mp_lb:
-            gtl = getTreeList(mpSumTree, s, "np==%i" % int(np) )
-            print ("%s: getTreeList: %s" % (s,gtl), end='')
-            gtl_avg = meanList(gtl)
-            print (" avg=%10.1f" % gtl_avg)
-            txt_dict[s] += "%10.1f" % gtl_avg
-            ltxt_dict[s].append( "%10.1f" % gtl_avg)
-        ltxt_dict["total_rate"].append("%10.1f" % 
-                    ( 60.0*float(np)*float(ne)/ float(ltxt_dict["m_par_time"][-1]) )
-                    ) 
-
-    print ("np_list=%s\n etime_stdev=%s \n cpu_time_stdev=%s" % (np_list, elap_time_stdev, cpu_time_stdev))
-    print ("elap-cpu=%s" % (elap_cpu_time))
-    
-    #mn = 7; mt=10000
-    from socket import gethostname
-    import platform
-#########################
-    graph_list = list()
-
-###########################
-    graph =dict()
-    graph['type'] = 'text'
-    graph['data'] = [
-                "MP Times, Memory, IO, CPU PLOTS for ",
-                " %s " % root_file,
-                " machine: %s" % gethostname(),
-                #"%s, %s, %s, %s, %s, %s" % platform.uname(),
-                "%s" % platform.platform(),
-                "%s" % os.getenv('CMTCONFIG'),
-                " comments: %s" % comments,
-                "np=%s " % [int(s) for s in np_list]
-                ]
-    graph['title'] = 'ATHENA MP MONITOR mpMon REPORT'
-    graph_list.append(graph)
-############################
-    graph  = dict()
-    graph['type'] = 'latex'
-    graph['data'] = ltxt_dict 
-    graph['title']= "mp_summary numbers:"
-    graph_list.append(graph)
-
-############################
-    graph  = dict()
-    graph['type'] = 'graph'
-    graph['data'] = {
-            "event_rate" :  (mpSumTree, "(60*%i*np)/(m_par_time):np" % ne),
-            #"per_proc_rate": (mpSumTree, "event_proc_rate*np:np")
-                     }
-    graph['title'] = "ATHENA MP EVENT PROCESSING RATE"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Evts/min     "
-    graph_list.append(graph)
-
-############################
-    
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['data'] = {
-                     'parallel_rate': (mpSumTree, "event_rate:np"), 
-                     'worker_rate': (cpSumTree, "(60*%i)/(elap_time):np" % ne),
-                     }
-    graph['title'] = "Workers and parallel event processing rates, wall-clock time"
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Evts/Proc/Min" 
-    graph['goptions'] = "ALP"
-    graph_list.append(graph)
-############################  
-    cond="np>0"
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = False
-    graph['data'] ={
-        'free_mem': (memTree, ('kbmemfree/1024:Time', cond) ),
-        'used_mem': (memTree, ('kbmemused/1024:Time', cond) ),
-        'cached_mem': (memTree, ('kbcached/1024:Time', cond) ),
-        'buffers_mem': (memTree, ('kbbuffers/1024:Time', cond) ),
-        'kbswpused': (memTree, ('kbswapused/1024:Time', cond) )
-        #'commit_mem': (memTree, ('kbcommit/1024:Time', cond) )
-        }
-    graph['title'] = "Memory Activity for Athena MP %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Memory, Mb "
-    #graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-############################
-    graph = dict()
-    graph['type'] = 'list'
-    graph['data'] = {
-                     'parallel_elap': (np_list, par_elap_time),
-                     'worker_elap_avg': (np_list, elap_time_avg),
-                     'worker_elap_max': (np_list, elap_time_max),
-                     'worker_cpu_avg':  (np_list, cpu_time_avg),
-                     'worker_cpu_max':  (np_list, cpu_time_max),
-                     #'elap_time_stdev_x': (np_list, elap_time_stdev_x),
-                     #'cpu_time_stdev_x':  (np_list, cpu_time_stdev_x)
-                     }
-    graph['title'] = "Parallel and Workers Elap and CPU Time Variations"
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Time, sec" 
-    graph_list.append(graph)
-
-############################
-    graph  = dict()
-    graph['type'] = 'graph'
-    graph['data'] = {
-            "m_elap_time" : (mpSumTree, "m_elap_time:np"),
-            "m_firstevent_time": (mpSumTree, "m_firstevent_time:np"),
-            "m_par_time" :    (mpSumTree, "m_par_time:np"),
-            "m_merging_time": (mpSumTree, "m_merging_time:np")
-                   }
-    graph['title'] = "PARENT PROCESS TIMES"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Time, sec"
-    graph_list.append(graph)
-##############################
-    """
-    graph =dict()
-    graph['type'] = 'text'
-    graph['data'] = [
-                "This plot intentially left blank"
-                ]
-    graph['title'] = 'BLANK CHART'
-    graph_list.append(graph)
-    """
-############################
-    graph = dict()
-    graph['type'] = 'list'
-    graph['data'] = {
-                     'elap_time_stdev': (np_list, elap_time_stdev),
-                     'cpu_time_stdev':  (np_list, cpu_time_stdev),
-                     'elap-cpu_avg':  (np_list, elap_cpu_time),
-                     }
-    graph['title'] = "Worker Processes Time Deviations"
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Time_StDev, sec" 
-    graph_list.append(graph)
-
-############################
-    graph  = dict()
-    graph['type'] = 'graph'
-    graph['data'] = {
-            #"event_rate": (mpSumTree, "event_rate:np"),
-            #"event_rate_x": (mpSumTree, "(60*%i)/(x_par_time):np" % ne),
-            #"event_rate" :  (mpSumTree, "(60*%i)/(m_par_time):np" % ne),
-            #"per_proc_rate": (mpSumTree, "event_proc_rate:np") 
-            "event_rate" :  (mpSumTree, "(60*%i*np)/(m_par_time):np" % ne),
-            "per_proc_rate": (mpSumTree, "event_proc_rate*np:np")
-            #"per_proc_rate_x": (mpSumTree, "(60*%i)/(x_par_time*np):np" % ne)
-                     }
-    graph['title'] = "ATHENA MP EVENT PROCESSING RATE"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Evts/min, Evts/proc/min"
-    graph_list.append(graph)
-############################
-    
-    graph = dict()
-    graph['type'] = 'list'
-    graph['data'] = {
-                     'par_event_rate': (np_list, par_event_rate),
-                     'elap_avg_rate': (np_list, elap_time_rate),
-                     'cpu_avg_rate':  (np_list, cpu_time_rate),
-                     'user_avg_rate': (np_list, user_time_rate)
-                     }
-    graph['title'] = "Worker event processing rates Wall-Time, User-CPU, Total-CPU time rates, averaged."
-    graph['xtitle'] = "NbrProc"
-    graph['ytitle'] = "Evts/Proc/Min" 
-    graph_list.append(graph)
-
-#############################
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['data'] ={
-        "total_mem": (mpSumTree, "free_mem_spike/1024:np"),
-        "mem_per_proc": (mpSumTree, "free_mem_spike/np/1024:np"),
-        }
-    graph['title'] = "PHYSICAL MEMORY CONSUMPTION BY AthenaMP"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Memory Consumption, Kb"
-    graph_list.append(graph)
-
-    cond = "vmem!=0"
-############################# 
-    graph = dict()
-    graph['type'] = 'profile'
-    graph['data'] ={
-        "VMem": (cpSumTree, "vmem:np" ),
-        "Memory_per_proc": (mpSumTree, "free_mem_spike/np/1024:np"),
-        #"VMem1": (cpSumTree, ("vmem:np", cond) ),
-        #"VMem2": (cpSumTree, ("vmem:np", cond) ),
-        #"VMem3": (cpSumTree, ("vmem:np", cond) )
-        }
-    graph['title'] = "VMem,  RealMemory per Worker"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Memory Consumption, Mb"
-    graph_list.append(graph)
-
-############################
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['data'] ={
-        "mem_per_proc": (mpSumTree, "free_mem_spike/np/1024:np"),
-        "vmem": (cpSumTree, ("vmem:np", cond) ),
-        "rss":  (cpSumTree, ("rss:np",  cond) )
-        }
-    graph['title'] = "VMEM, RSS, RealMemory(from free-spike) per Worker"
-    graph['xtitle']= "Nbr of Processes"
-    graph['ytitle']= "Memory Consumption, Mb"
-    graph_list.append(graph)
-
-############################
-    cond = "np>0"
-############################    
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        "bread/sec": (ioTree,  ('breadps:Time', cond) ),
-        "bwrite/sec": (ioTree, ('bwrtnps:Time', cond) )
-        }
-    graph['title'] = "IO Activity for Athena MP  %s " % cond
-    graph['xtitle']= "Time"
-    graph['ytitle']= "Total Amount of Data R/W in blocks per sec"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)  
-############################    
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        "write_reqs/sec": (ioTree, ('wtps:Time', cond) ),
-        "read_reqs/sec":  (ioTree, ('rtps:Time', cond) ),
-        "total_reqs/sec": (ioTree, ('tps:Time',  cond) )
-        }
-    graph['title'] = "IO Activity for Athena MP %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Transfer_requests/sec"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-############################    
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['data'] ={
-        '%_user' :   (cpuTree, ('puser:Time',   cond) ),
-        '%_system':  (cpuTree, ('psystem:Time', cond) ),
-        '%_idle':    (cpuTree, ('pidle:Time',   cond) ),
-        '%_io_wait': (cpuTree, ('piowait:Time', cond) )
-        }
-    graph['title'] = "CPU Activity for Athena MP %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Percentage of CPU Utilization"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-############################  
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        '%_io_wait': (cpuTree, ('piowait:Time', cond) ),
-        }
-    graph['title'] = "CPU Activity for Athena MP %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Percentage of CPU Utilization"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-#########################
-
-#########################
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['logY'] = True
-    graph['data'] ={
-        'numa_hit': (numaTree, ('numa_hit:Time', cond) ),
-        'numa_miss':(numaTree,('numa_miss:Time', cond) ),
-        'numa_foreign': (numaTree, ('numa_foreign:Time', cond) ),
-        'interleave_hit': (numaTree, ('interleave_hit:Time', cond) ),
-        'local_node': (numaTree, ('local_node:Time', cond) ),
-        'other_node': (numaTree, ('other_node:Time', cond) ),
-        }
-    graph['title'] = "NUMA Activity for athena MJ (Multi Jobs) %s" % cond
-    graph['xtitle']= "Time, sec since 00:00"
-    graph['ytitle']= "Nbr of hits/misses"
-    graph['text']= "np = %s" % np_list
-    graph_list.append(graph)
-
-############################  
-    cond="np==2"
-    graph = dict()
-    graph['type'] = 'graph'
-    graph['data'] ={
-        'free_cached_buffers': (memTree, ('(kbmemfree+kbcached+kbbuffers)/1048576:(Time-60476)', cond) ),
-        }
-    graph['title'] = "Memory Consumption for Athena MP %s" % cond
-    graph['xtitle']= "Time, Sec"
-    graph['ytitle']= "Memory, Gb "
-    graph_list.append(graph)
-############################
-
-    pads = list()
-
-    cpp = 1 #charts per pad 
-    ppc = 1 #pads per canvas
-
-    c.cd()
-    gStyle.SetOptStat(0);
-    gStyle.SetPalette(1);
-    gStyle.SetCanvasColor(33);
-    gStyle.SetFrameFillColor(0);
-    gStyle.SetMarkerStyle(21)
-    gStyle.SetMarkerColor(2)
-    gStyle.SetMarkerSize(0.3)
-    print ("gStyle.Set done")
-
-    title = TPaveLabel(0.1,0.98,0.9,1, "Athena MP Plots");
-    title.SetFillColor(0); title.SetTextFont(40); 
-    #title.Draw();print ("title Drawn")
-
-    mgs =  list()  #List of TMultiGraphs
-    ls =   list()  #List of TLegends
-    gs =   list()  #List of TGraph
-
-    for j in range(ppc):
-        y_factor = 0.99;   x1 = 0.01; x2 = 0.99;  y1 = y_factor - (y_factor-0.01)*(j+1)/float(ppc); y2 = y_factor - (y_factor-0.01)*j/float(ppc)
-        print ("x1,y1,x2,y2",  x1, y1, x2, y2 )
-        pad = TPad("pad%i" % j, "pad%i" % j,   x1, y1, x2, y2,   10); pad.Draw()
-        pads.append(pad);
-    
-    num_cans = len(graph_list) /(cpp*ppc) if len(graph_list) % (cpp*ppc)==0 else len(graph_list)/(cpp*ppc) + 1 
-    graph_list += [None,]* (num_cans*cpp*ppc - len(graph_list))
-    print ("number of pages/canvases in report = ", num_cans)
-    
-    pdf_file = root_file
-    for s in ['merged.', '.py', '.root']:
-        pdf_file = pdf_file.replace(s, '')
-    pdf_file ="%s.pdf" % pdf_file
-    tpl = list()
-    for i in range(num_cans):
-        for j in range(ppc):
-            graph = graph_list[ppc*i+j]
-            if graph is None:
-                continue
-            
-            if 'logY' in graph.keys():
-                if graph['logY']:
-                    pads[j].SetLogy()
-            else:
-                pads[j].SetLogy(0)
-            
-            pads[j].cd()
-            pads[j].SetRightMargin(0.2)
-            l = TLegend(0.82,0.20,0.99,0.89); ls.append(l) 
-            #tpl = list()#TProfile list    
-            mg = TMultiGraph(); mgs.append(mg)
-            print ("graph=", graph)
-            gs.append(MakeMultiGraph(c, pads[j],graph, mg, l, tpl))
-
-        c.Update()
-        if i == 0:
-            print ("pdf.start")
-            c.Print(pdf_file+'(', 'pdf') #start page
-        elif i < num_cans-1:
-            print ("pdf.body")
-            c.Print(pdf_file, 'pdf')    #body pages
-        else:
-            print ("pdf.end")
-            c.Print(pdf_file + ')', 'pdf') #end page
-        #c.SaveAs("%s.%i.png" % (pdf_file, i))
-        #c.SaveAs("%s.%i.C" % (pdf_file, i))
-        for pad in pads:
-            pad.Clear()
-         
-    #c.Update()
-    #c.cd()
-    pads[0].cd()
-    pads[0].SetRightMargin(0.2)
-    draw_option=""
-    for tp in tpl:
-        print (" TProfile: %s" % tp.GetName())
-        if draw_option=="":
-            tp.Draw()
-            draw_option="PSAME"
-        else:
-            tp.Draw("PSAME")
-    c.Update()
-    #c.Print("%s.pdf" % "coors.memory_fig")
-    #c.SaveAs("%s.png" % "coors.memory_fig")
-    #c.SaveAs("%s.C" % "coors.memory_fig")
-
-        
-########## THE END ################    
-
-
diff --git a/Control/AthenaMP/share/AthenaMP_jobProperties.py b/Control/AthenaMP/share/AthenaMP_jobProperties.py
deleted file mode 100755
index 31d32ee8291aee806db6d37f05cd00b22b83cc62..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/AthenaMP_jobProperties.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# @file: Control/AthenaMP/share/AthenaMP_jobProperties
-# @author: Mous Tatarkhanov <tmmous@cern.ch>
-# $Id: AthenaMP_jobProperties.py, v 1.1 2010-12-02 tmmous Exp $
-
-#==============================================================
-# List of all available AthenaMP job properties explained
-#==============================================================
-
-#-------------------IMPORTANT NOTE-----------------------------
-# All mp jobs get default job property values
-# NO NEED to include this file!
-# 
-# USE this file and change settings in your job options 
-# ONLY when you want to change/play with properties!
-#--------------------------------------------------------------
-
-#==============================================================
-# HOW TO RUN ATHENAMP:
-# 1. Job option runs:   
-#   CLI '--nprocs=$n' to be used in command line run of athena.py 
-# 2. Job transform runs:
-#   CLI  '--athenaopts="--nprocs=$n"' to be used in command 
-#==============================================================
-
-#--------------------------------------------------------------
-# AthenaMP properties
-#--------------------------------------------------------------
-from AthenaMP.AthenaMPFlags import jobproperties as jps
-
-#--------------------TmpDir-----------------------------------
-#TmpDir property controls where the workers create tmp-dir for output
-
-jps.AthenaMPFlags.TmpDir= "" #default -> "/tmp/$USER/athena-mp-tmp-$USER/$PID"
-#jps.AthenaMPFlags.TmpDir = "$HOME" #set to your home folder
-#-EXAMPLE-
-#for testing different IO setting for MP runs (workers output):
-#jps.AthenaMPFlags.TmpDir = "$HOME/scratch0" # on lxplus network drive
-#jps.AthenaMPFlags.TmpDir = "/dev/shmm" # standard in memory drive
-#--------------------------------------------------------------
-
-
-#-------------------AffinityCPUList-----------------------------
-# AffinityCPUlist option controls the affinity of workers to CPU-cores on 
-# multicore machines. default value = [] -> workers float (not pinned)
-
-jps.AthenaMPFlags.AffinityCPUList=[] #default -> workers are floating (not pinned)
-#jps.AthenaMPFlags.AffinityCPUList=[0,1,2,3,4,5,6,7,8] 
-
-#-NOTES-
-#if you have only 4 workers they are pinned 
-#to following order of CPU-cores: 0,1,2,3
-#if you want to change the order to 8,7,6,5 
-# CPUList should be [8,7,6,5,...]
-
-##-EXAMPLE-
-#If you are forking only two workers, and you want to pin them 
-# to the first and last CPU-cores available on your machine do:
-#import multiprocessing
-#jps.AthenaMPFlags.AffinityCPUList=[0, multiprocessing.cpu_count()-1] 
-#--------------------------------------------------------------
-
-
-#----------------- EventsBeforeFork----------------------------
-# EventsBeforeFork option controls when to fork Workers from mother process 0-after init
-# value n=0-> after init,  n>0 -> after n'th event. default value - 0.
-
-jps.AthenaMPFlags.EventsBeforeFork=0    #default
-#jps.AthenaMPFlags.EventsBeforeFork=1   #fork workers after processing 1-evt in mother 
-
-#-NOTES-
-#Forking after processing evts in mother has advantage of
-#sharing more memory among workers and parent by COW mechanism. (>0.5Gb vs. 0.2Gb)
-#But that triggers vetoing algorithm on first events processed in mother
-#on each output stream to avoid writing of pool_file in mother  
-#This is done for merging compliance reasons of pool files created by workers
-# /vetoing algorithm ref: Charles Leggett/
-#--------------------------------------------------------------
-
-
-#--------------------doFastMerge-----------------------------
-#doFastMerge is the switch between pool_fast_merge and pool_slow_merge
-
-jps.AthenaMPFlags.doFastMerge = False #default -> use more reliable slow merge
-#jps.AthenaMPFlags.doFastMerge = True 
-
-#-NOTES-
-#slow merge creates separate Athena merging job /ref: Sebastien Binet/
-#fast_merge should be an order of magnitude faster than slow merge
-#fast merge uses PoolMerge tool /ref: Peter Van Gemmeren/
-#--------------------------------------------------------------
-
-
-#--------------------doRoundRobin-----------------------------
-#doRoundRobing is the switch between RoundRobin and Queue mechanism
-#of feeding evts to Workers
-
-jps.AthenaMPFlags.doRoundRobin = False #default -> use MP.Queue 
-#--------------------useSingleReader-----------------------------
-#useSingleReader is the switch to choose single reader to get events
-#into shared memory queue
-
-jps.AthenaMPFlags.useSingleReader = False #default -> worker read from file
-#jps.AthenaMPFlags.doFastMerge = True  #use RoundRobin
-
-#-COMMENTS-
-#default Mp.Queue is more efficient way of distributing evts to Workers 
-#but when EvtMax=-1 (undefined nbr. of evts.) -> RoundRobin scheme is used
-#--------------------------------------------------------------
-
-
-#--------------------------------------------------------------
-#--------- more options will be created as needed or requested-
-#--------------------------------------------------------------
-
-#==============================================================
-#---------------------- THE END -------------------------------
-#==============================================================
diff --git a/Control/AthenaMP/share/bs2esd.py b/Control/AthenaMP/share/bs2esd.py
deleted file mode 100644
index 8d4c593ae5cc60eaad439adc285284a7be6086e3..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/bs2esd.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# steering file for BS->ESD step
-
-
-from RecExConfig.RecFlags import rec
-rec.doTrigger  .set_Value_and_Lock( False )
-rec.doESD      .set_Value_and_Lock( False )
-rec.doAOD      .set_Value_and_Lock( False )
-rec.doWriteAOD .set_Value_and_Lock( False )
-rec.doWriteTAG .set_Value_and_Lock( False )
-rec.doCBNT     .set_Value_and_Lock( False )
-rec.doTruth    .set_Value_and_Lock( False )
-rec.doPerfMon  .set_Value_and_Lock( False )
-
-rec.readRDO    .set_Value_and_Lock( True )
-rec.doWriteESD .set_Value_and_Lock( True )
-
-BSRDOInput=["/afs/cern.ch/atlas/project/rig/data/data10_7TeV.00152166.physics_MinBias.merge.RAW._lb0206._0001.1"]
-
-from AthenaCommon.AthenaCommonFlags import athenaCommonFlags as acf
-acf.FilesInput = BSRDOInput
-acf.EvtMax = 10
-
-
-# main jobOption
-include ("RecExCommon/RecExCommon_topOptions.py")
-
-
diff --git a/Control/AthenaMP/share/mp_rdotoesd.py b/Control/AthenaMP/share/mp_rdotoesd.py
deleted file mode 100755
index f50dc0717051f5cad8adeccb0586ecfc8efa3287..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/mp_rdotoesd.py
+++ /dev/null
@@ -1,48 +0,0 @@
-
-#==============================================================
-# Job Options for fast-reco with AthenaMP
-#==============================================================
-
-# assumptions:
-# 1. RecExCommon_links.sh to be sourced in curdir
-# 2. CLI option --nprocs to be used in command line run of athena.py
-
-
-#----------------------------------------------------------------------
-# AthenaMP properties
-#----------------------------------------------------------------------
-# expect "--nprocs" to be used in command line options of athena.py
-from AthenaMP.AthenaMPFlags import jobproperties as jps
-jps.AthenaMPFlags.EventsBeforeFork=0 
-
-import multiprocessing
-cpu_list = range( multiprocessing.cpu_count() ) # [0,1,2,..,ncpus] for many-core machine 
-cpu_list.reverse() #reverse the cpu-proc pinning order
-jps.AthenaMPFlags.AffinityCPUList=cpu_list
-
-
-#----------------------------------------------------------------------
-# setting mp_rdotoesd reco
-#----------------------------------------------------------------------
-# expect RecExCommon_links.sh to be sourced in curdir
-
-
-
-# main jobOption
-include ("RecExCommon/rdotoesd.py")
-
-
-#----------------------------------------------------------------------
-# FOR DEBUGGING PURPOSES
-#----------------------------------------------------------------------
-#from AthenaCommon.AppMgr import theApp
-#theApp.ReflexPluginDebugLevel = 10000
-
-#from AthenaCommon.Logging import log as msg
-#msg.info ( "svcMgr=%s" % svcMgr)
-#msg.info ( "appMgr=%s" % theApp)
-
-
-#----------------------------------------------------------------------
-# user modifier should come here
-#----------------------------------------------------------------------
diff --git a/Control/AthenaMP/share/mp_rdotoesd_id.py b/Control/AthenaMP/share/mp_rdotoesd_id.py
deleted file mode 100755
index 41446d9630c61eb4fa073fce185f3779e5850ead..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/mp_rdotoesd_id.py
+++ /dev/null
@@ -1,48 +0,0 @@
-
-#==============================================================
-# Job Options for fast-reco with AthenaMP
-#==============================================================
-
-# assumptions:
-# 1. RecExCommon_links.sh to be sourced in curdir
-# 2. CLI option --nprocs to be used in command line run of athena.py
-
-
-#----------------------------------------------------------------------
-# AthenaMP properties
-#----------------------------------------------------------------------
-# expect "--nprocs" to be used in command line options of athena.py
-from AthenaMP.AthenaMPFlags import jobproperties as jps
-jps.AthenaMPFlags.EventsBeforeFork=0 
-
-import multiprocessing
-cpu_list = range( multiprocessing.cpu_count() ) # [0,1,2,..,ncpus] for many-core machine 
-cpu_list.reverse() #reverse the cpu-proc pinning order
-jps.AthenaMPFlags.AffinityCPUList=cpu_list
-
-
-#----------------------------------------------------------------------
-# setting mp_rdotoesd reco
-#----------------------------------------------------------------------
-# expect RecExCommon_links.sh to be sourced in curdir
-
-
-
-# main jobOption
-include ("RecExRecoTest/RecExRecoTest_RTT_id.py")
-
-
-#----------------------------------------------------------------------
-# FOR DEBUGGING PURPOSES
-#----------------------------------------------------------------------
-#from AthenaCommon.AppMgr import theApp
-#theApp.ReflexPluginDebugLevel = 10000
-
-#from AthenaCommon.Logging import log as msg
-#msg.info ( "svcMgr=%s" % svcMgr)
-#msg.info ( "appMgr=%s" % theApp)
-
-
-#----------------------------------------------------------------------
-# user modifier should come here
-#----------------------------------------------------------------------
diff --git a/Control/AthenaMP/share/mp_reco_fast.py b/Control/AthenaMP/share/mp_reco_fast.py
deleted file mode 100755
index f10ff1b2a674c67356674fed181064c1139af296..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/mp_reco_fast.py
+++ /dev/null
@@ -1,75 +0,0 @@
-
-#==============================================================
-# Job Options for fast-reco with AthenaMP
-#==============================================================
-
-# assumptions:
-# 1. RecExCommon_links.sh to be sourced in curdir
-# 2. CLI option --nprocs to be used in command line run of athena.py
-
-
-#----------------------------------------------------------------------
-# AthenaMP properties
-#----------------------------------------------------------------------
-# expect "--nprocs" to be used in command line options of athena.py
-from AthenaMP.AthenaMPFlags import jobproperties as jps
-jps.AthenaMPFlags.EventsBeforeFork=1  
-
-import multiprocessing
-cpu_list = range( multiprocessing.cpu_count() ) # [0,1,2,..,ncpus] for many-core machine 
-cpu_list.reverse() #reverse the cpu-proc pinning order
-jps.AthenaMPFlags.AffinityCPUList=cpu_list
-
-
-#----------------------------------------------------------------------
-# Setting fast-reco w/o Calo, Muon or Trigger.
-#----------------------------------------------------------------------
-
-# expect RecExCommon_links.sh to be sourced in curdir
-
-from AthenaCommon.AthenaCommonFlags import athenaCommonFlags as acFlags
-acFlags.EvtMax=10
-acFlags.PoolESDOutput="ESD.pool.root"
-
-from RecExConfig.RecFlags import rec
-rec.doPerfMon=True
-
-rec.doCalo=False
-rec.doMuon=False
-#rec.doID=True
-rec.doTrigger=False
-
-rec.doESD=True
-rec.doAOD=False
-rec.doHist=False
-rec.doWriteESD=True
-rec.doWriteAOD=False
-rec.doWriteTAG=False
-
-# if needed to configure trigger
-# see https://twiki.cern.ch/twiki/bin/view/Atlas/TriggerFlags
-# include ( "TriggerJobOpts/TriggerFlags.py" )
-
-# if needed to configure AOD building
-# see https://twiki.cern.ch/twiki/bin/view/Atlas/UserAnalysisTest#The_AOD_Production_Flags
-# from ParticleBuilderOptions.AODFlags import AODFlags
-
-# main jobOption
-include ("RecExCommon/RecExCommon_topOptions.py")
-
-
-#----------------------------------------------------------------------
-# FOR DEBUGGING PURPOSES
-#----------------------------------------------------------------------
-#from AthenaCommon.AppMgr import theApp
-#theApp.ReflexPluginDebugLevel = 10000
-
-#from AthenaCommon.Logging import log as msg
-#msg.info ( "svcMgr=%s" % svcMgr)
-#msg.info ( "appMgr=%s" % theApp)
-
-
-
-#----------------------------------------------------------------------
-# user modifier should come here
-#----------------------------------------------------------------------
diff --git a/Control/AthenaMP/share/tests/AMP_basictests.py b/Control/AthenaMP/share/tests/AMP_basictests.py
deleted file mode 100755
index 24e27c5bd8d6ad336d3d33425f3c0aaa9235581b..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/tests/AMP_basictests.py
+++ /dev/null
@@ -1,217 +0,0 @@
-import sys, os, unittest
-sys.path.append( os.path.join( os.getcwd(), os.pardir ) )
-
-from common import *
-
-__all__ = [
-   'Basic011ModuleTestCase',
-   'Basic02ExecutionTestCase',
-   'Basic03GroupTestCase',
-   'Basic04SharedQueueTestCase',
-]
-
-if '--build' in sys.argv:
-   res = os.system( "cd ../../cmt; make QUICK=1" )
-   if res:
-      sys.exit( res )
-
-
-### basic module test cases ==================================================
-class Basic01ModuleTestCase( MyTestCase ):
-   def test01API( self ):
-      """Test module loading and API existence"""
-
-      import _athenamp
-
-      self.assert_( hasattr( _athenamp, 'launch' ) )
-      self.assert_( hasattr( _athenamp, 'ProcessGroup' ) )
-      self.assert_( hasattr( _athenamp, 'Process' ) )
-      self.assert_( hasattr( _athenamp, 'SharedQueue' ) )
-
-   def test02ArgumentsAndErrors( self ):
-      """Test basic faulty argument error handling"""
-
-      import _athenamp
-
-      self.assertRaises( TypeError, _athenamp.launch, 1 )
-
-   def test03Instantiations( self ):
-      """Test class instantiations"""
-
-      import _athenamp
-
-      proc = _athenamp.Process( -1 )
-      self.assertEqual( proc.pid, -1 )
-
-      proc = _athenamp.Process( pid = -1 )
-      self.assertEqual( proc.pid, -1 )
-
-      group = _athenamp.ProcessGroup()
-      group = _athenamp.ProcessGroup( 4 )
-      group = _athenamp.ProcessGroup( nprocs = 4 )
-
-      queue = _athenamp.SharedQueue()
-      queue = _athenamp.SharedQueue( 100 )
-
-
-### basic execution test cases ===============================================
-class Basic02ExecutionTestCase( MyTestCase ):
-   def _checkChildren( self ):
-    # the following tests that there are no children running
-      self.assertRaises( OSError, os.wait )
-
-   def setUp( self ):
-      self._checkChildren()
-
-   def tearDown( self ):
-      self._checkChildren()
-
-   def test01RunChild( self ): 
-      """Test running and destruction of a child"""
-
-      import _athenamp
-
-      proc = _athenamp.launch()
-      self.assert_( 0 <= proc.pid )
-
-      if proc.pid == 0:
-         import signal
-         signal.pause()
-      else:
-         import time, signal
-         time.sleep(1)
-         sigtosend = signal.SIGKILL
-         os.kill( proc.pid, sigtosend )
-         result = os.waitpid( proc.pid, 0 )
-         self.assertEqual( result[0], proc.pid )
-         self.assertEqual( result[1], sigtosend )
-
-   def test02RunChildren( self ):
-      """Test running and destruction of a group of children"""
-
-      import _athenamp
-
-      pids = []
-
-      leader = _athenamp.launch()
-      if leader.pid == 0:     # make child wait
-         import signal
-         signal.pause()
-      else:
-         os.setpgid( leader.pid, 0 )
-         pids.append( leader.pid )
-
-      for i in range( 2 ):
-         proc = _athenamp.launch();
-         self.assert_( 0 <= proc.pid )
-
-         if proc.pid == 0:    # make all children wait
-            import signal
-            signal.pause()
-         else:
-            assert leader.pid
-            os.setpgid( proc.pid, os.getpgid( leader.pid ) )
-            pids.append( proc.pid )
-
-      import time, signal
-      time.sleep( 1 )
-      sigtosend = signal.SIGKILL
-      pgid = os.getpgid( leader.pid )
-      os.killpg( pgid, sigtosend )
-      while pids:
-         result = os.waitpid( -pgid, 0) 
-         self.assert_( result[0] in pids )
-         self.assertEqual( result[1], sigtosend )
-         pids.remove( result[0] )
-
-
-### basic group usage test cases =============================================
-class Basic03GroupTestCase( MyTestCase ):
-   def test01GroupLifetime( self ):
-      """Test creation and life time of a group"""
-
-      import _athenamp
-
-      group = _athenamp.ProcessGroup( 4 )
-
-    # nothing started yet, so waiting should simply return
-      self.assertEqual( group.wait(), () )
-      self.assertEqual( group.wait( 0 ), () )
-      self.assertEqual( group.wait( options = 0 ), () )
-
-   def test02RunMapAsync( self ):
-      """Test no-op running of map_async on a worker group"""
-      
-      import _athenamp
-
-      group = _athenamp.ProcessGroup( 4 )
-      group.map_async( "exit" )
-      self.assertEqual( len(group._children()), 4 )   # now instantiated
-
-      status = group.wait()
-      self.assertEqual( [ x[1] for x in status ], 4*[0,] )
-
-   def test03PythonTaskMapAsync( self ):
-      """Test running a python task via map_async on a worker group"""
-      
-      import _athenamp, __main__
-
-      def myfunc():
-      #  print 'called myfunc'
-          return 1
-      __main__.myfunc = myfunc
-
-    # existing function with return value
-      group = _athenamp.ProcessGroup( 4 )
-      group.map_async( "myfunc" )
-      status = group.wait()
-
-      self.assertEqual( [ x[1] for x in status ], 4*[0,] )
-      self.assertEqual( [ x[2] for x in status ], 4*[1,] )
-
-    # non-existing function, leading to failure
-      group = _athenamp.ProcessGroup( 4 )
-      group.map_async( "no_such_func" )
-      status = group.wait()
-
-      self.assertEqual( [ x[1] for x in status ], 4*[0x0B,] )
-
-
-### basic group usage test cases =============================================
-class Basic04SharedQueueTestCase( MyTestCase ):
-   def test01SharedQueueSending( self ):
-      """Test put functionality of shared queue"""
-
-      import _athenamp, random
-
-      q = _athenamp.SharedQueue( 5 )
-
-      r = random.Random( 1 )
-      largebuf = ''.join( [ str(r.random()) for i in range(4096) ] )
-      self.assertRaises( OverflowError, q.put_nowait, largebuf ) # too large for buffer
-
-      for i in range(5):
-         q.put_nowait( i )
-      self.assertRaises( OverflowError, q.put_nowait, 5 )        # too many elements
-
-      for i in range(5):
-         self.assertEqual( q.get_nowait(), i )
-      self.assertRaises( EOFError, q.get_nowait )
-
-      sdata = [ "text", "text\0with\0null", "morenull\0\0" ]
-      for t in sdata:
-         q.put_nowait( t )
-         self.assertEqual( q.get_nowait(), t )
-
-
-## actual test run
-if __name__ == '__main__':
-   from MyTextTestRunner import MyTextTestRunner
-
-   loader = unittest.TestLoader()
-   testSuite = loader.loadTestsFromModule( sys.modules[ __name__ ] )
-
-   runner = MyTextTestRunner( verbosity = 2 )
-   result = not runner.run( testSuite ).wasSuccessful()
-
-   sys.exit( result )
diff --git a/Control/AthenaMP/share/tests/MyTextTestRunner.py b/Control/AthenaMP/share/tests/MyTextTestRunner.py
deleted file mode 100755
index be8438bd2b125548d4221441080d0166a8c48e69..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/tests/MyTextTestRunner.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import unittest
-
-if hasattr( unittest, 'TextTestResult' ):
-   class MyTextTestResult( unittest.TextTestResult ):
-      def getDescription(self, test):
-         return test.shortDescription()
-else:
-   class MyTextTestResult( object ):
-      pass
-
-
-class MyTextTestRunner( unittest.TextTestRunner ):
-   resultclass = MyTextTestResult
-
-   def run( self, test ):
-      """Run the given test case or test suite."""
-
-      result = self._makeResult()
-      test( result )
-      result.printErrors()
-      self.stream.writeln( result.separator2 )
-      run = result.testsRun
-      self.stream.writeln()
-
-      if not result.wasSuccessful():
-         self.stream.write( "FAILED (" )
-         failed, errored = map( len, ( result.failures, result.errors ) )
-         if failed:
-            self.stream.write( "failures=%d" % failed )
-         if errored:
-            if failed: self.stream.write( ", " )
-            self.stream.write( "errors=%d" % errored )
-         self.stream.writeln( ")" )
-      else:
-         self.stream.writeln( "OK" )
-   
-      return result
diff --git a/Control/AthenaMP/share/tests/common.py b/Control/AthenaMP/share/tests/common.py
deleted file mode 100755
index 4a7d1da005ba2d59f0fcb912bf23b375a5fa041d..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/tests/common.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# File: roottest/python/common.py
-# Author: Wim Lavrijsen (LBNL, WLavrijsen@lbl.gov)
-# Created: 09/24/10
-# Last: 09/30/10
-
-__all__ = [ 'pylong', 'maxvalue', 'MyTestCase' ]
-
-import os, sys, unittest
-
-if sys.hexversion >= 0x3000000:
-   pylong = int
-   maxvalue = sys.maxsize
-
-   class MyTestCase( unittest.TestCase ):
-      def shortDescription( self ):
-         desc = str(self)
-         doc_first_line = None
-
-         if self._testMethodDoc:
-            doc_first_line = self._testMethodDoc.split("\n")[0].strip()
-         if doc_first_line:
-            desc = doc_first_line
-         return desc
-else:
-   pylong = long
-   maxvalue = sys.maxint
-
-   class MyTestCase( unittest.TestCase ):
-      pass
diff --git a/Control/AthenaMP/share/tests/extract_mp_stat.py b/Control/AthenaMP/share/tests/extract_mp_stat.py
deleted file mode 100755
index 8d77ccc29283e38a3e16c136ea9eaa00f7217528..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/tests/extract_mp_stat.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env python
-
-# @file:    mpMon.py
-# @purpose: This script extract the WORKER information from the parent process's log and then WORKER's logs in AthenaMP
-# @author:  Mous Tatarkhanov <tmmous@cern.ch>
-# @date:    April 15, 2009
-# @example:
-# @code
-# extract_mp_stat.py    log.parent_process
-# @endcode
-#
-
-from __future__ import print_function
-
-__version__ = "$Revision: 285809 $"
-__author__  = "Mous Tatarkhanov <tmmous@cern.ch>"
-
-from optparse import OptionParser
-#import AthenaMP.mpMonTools as mpt
-
-import sys
-
-if __name__ == "__main__":
-
-    parser = OptionParser(usage="usage: %prog [options] -f jobo")
-    p = parser.add_option
-    p( "-f",
-        "--file",
-        dest = "log_file",
-        default = None,
-        help ="athenaMP parent process stdout log" 
-    )
-    
-    (options, args) = parser.parse_args()
-    
-    import sys, os
-    if  options.log_file == None:
-        str(parser.print_help() or "")
-        sys.exit(1)
-        
-    log_file = options.log_file
-
-    print ("log_file = %s" % log_file)
-    if not os.path.exists(log_file):
-        print ("log_file doesn't exist. Please give valid parent process log file")
-        str(parser.print_help() or "")
-        sys.exit(1)
-    
-    
-    from AthenaMP import mpMonTools as mpt
-    mpt.writeOutWorkersStat(log_file)
-    print ("DONE...")
diff --git a/Control/AthenaMP/share/tests/flush_cache.py b/Control/AthenaMP/share/tests/flush_cache.py
deleted file mode 100755
index efeab70276392bef71eb70d6951fd87e6b854572..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/tests/flush_cache.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/env python
-
-from __future__ import print_function
-
-import os, sys, time
-import multiprocessing
-from multiprocessing import Pool
-
-from future import standard_library
-standard_library.install_aliases()
-import subprocess
-
-def flush_cache(n):
-    l = list()
-    for i in range(n):
-        l.append("a" * (1 << 30))
-    print ("%i gb of memory eaten" % n)
-    time.sleep(10)
-    return n
-
-if __name__ == '__main__':
-    out = subprocess.getoutput('free -m')
-    print (">free -m \n", out)
-
-    total_mem_mb =int( out.splitlines()[1].split()[1])
-    cached_mem_mb = int( out.splitlines()[1].split()[6])
-    print ("CACHED [%i Mb] - before flush" % cached_mem_mb)
-    
-    if cached_mem_mb < 200:
-        print ("no need to flush the cache... bye!")
-        sys.exit(0)
-
-    gb = 1 + (total_mem_mb >> 10) 
-    
-    ncpus = multiprocessing.cpu_count() 
-    print ("ncpus= [%i]" % ncpus)
-    print ("total available memory [%i Mb] [%i Gb]" % (total_mem_mb, gb))
-
-    nprocs = 2*ncpus
-    ngb = 1 + gb / nprocs
-
-    
-    print ("Nbr of Procs to bite on memory [%i] " % nprocs)
-    print ("Nbr of Gb to flush per process  [%i Gb]" % ngb)
-
-
-    pool = Pool(processes = nprocs)
-    result = pool.map(flush_cache, [ngb,]*nprocs)
-    print ("Total memory eaten: [%i Gb]" % sum(result))
-
-    out = subprocess.getoutput('free -m')        
-    print (">free -m \n", out)
-    cached_mem_mb = int( out.splitlines()[1].split()[6])    
-    print ("CACHED [%i Mb] - after flush" % cached_mem_mb)
-    print ("Your machine's memory cache is  flushed" )
-    
-    time.sleep(5)
diff --git a/Control/AthenaMP/share/tests/mjMon.py b/Control/AthenaMP/share/tests/mjMon.py
deleted file mode 100755
index 28697e436aa873261770450111e5072aec3453d5..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/tests/mjMon.py
+++ /dev/null
@@ -1,287 +0,0 @@
-#!/usr/bin/env python
-
-# @file:    mjMon.py
-# @purpose: This script monitors the mp performance of AthenaMP
-#           Rewriting the shell script - athenaMP_monitor.sh
-#           Inspired by monitoring tools from Sebastien: PerfMon; 
-# @author:  Mous Tatarkhanov <tmmous@cern.ch>
-# @date:    December 2009
-#
-# @example:
-# @code
-# monitor_athenaMP 2 rdotoesd.py 60
-# @endcode
-#
-
-from __future__ import print_function
-
-__version__ = "$Revision: 276792 $"
-__author__  = "Mous Tatarkhanov <tmmous@cern.ch>"
-
-from optparse import OptionParser
-import AthenaMP.mjMonTools as mpt
-
-import sys
-
-class Writer:
-    def __init__(self, stdout, filename):
-        self.stdout = stdout
-        self.logfile = open(filename, 'a')
-
-    def write(self, text):
-        self.stdout.write(text)
-        self.logfile.write(text)
-
-    def close(self):
-        self.stdout.close()
-        self.logfile.close()
-
-    def flush(self):
-        self.stdout.flush()
-        self.logfile.flush()
-
-if __name__ == "__main__":
-
-    parser = OptionParser(usage="usage: %prog [options] -f jobo")
-    p = parser.add_option
-    p( "-j",
-       "--jobo",
-       dest = "jobo",
-       help = "The path to the job options file to run in parallel" )
-    p( "-p",
-       "--np",
-       dest    = "nbrProcs",
-       default = [1,2],
-       help = "Nbr of parallel processes to fork" )
-    p( "-e",
-       "--ne",
-       dest    = "nbrEvts",
-       default = "-1",
-       help = "Number of events to process, EvtMax" )
-    p( "-o",
-       "--output",
-       dest = "outFileName",
-       default = 'mplog',
-       help = "Name of the output file which will contain the informations gathered for  monitoring." )
-    p( "-f",
-        "--flush_cache",
-        dest = "doFlushCache",
-        action = "store_true",
-        default = False,
-        help ="switch to activate flushing of the machine cache of the machine before launching athenaMP" )
-    p( "-c",
-       "--comments",
-       dest = "commentsStr",
-       default = "",
-       help = "comments to add to the name and report" )
-    p( "-d",
-       "--doPlots",  #FIX
-       dest = "doPlots",
-       action="store_true",
-       default = False,
-       help = "switch to activate plotting of report charts at the end"
-       ) 
-    p( "-n",
-       "--numa_set",  #FIX
-       dest = "numaSet",
-       #action="store_true",
-       default = None,
-       help = "this option activates numa settings. Format: [(0,0),(1,1),...]"
-       )
-    (options, args) = parser.parse_args()
-    
-    import sys
-    if options.jobo == None or options.nbrProcs ==None or options.nbrEvts == None:
-        str(parser.print_help() or "")
-        sys.exit(1)
-        
-    if len(args) > 0:
-        fileNames = [ arg for arg in args if arg[0] != "-" ]
-        pass
-    
-    #output redirection to file and display
-    writer = Writer(sys.stdout, options.outFileName )
-    sys.stdout = writer 
-    np_list = eval(options.nbrProcs)
-    if isinstance(np_list, int):
-        np_list = [np_list,]
-
-    if options.numaSet is not None:
-        numa_list = eval(options.numaSet)
-    else:
-        numa_list = None
-
-    if isinstance(numa_list, list):
-        print ("numa_list=%s" % numa_list)
-    elif numa_list is not None:
-        print ("Please input correct numa_list")
-        str(parser.print_help() or "")
-        sys.exit(1)
-
-    ne = int(options.nbrEvts)
-    jobo = options.jobo
-    print ("np_list = ", np_list)
-    print ("ne = ", ne)
-    print ("jobo = ", jobo)
-    job = jobo.split()[0] 
-    print ("mpMon.log =", options.outFileName)
-    print ("doFluchCache=", options.doFlushCache, type(options.doFlushCache))
-    if options.doFlushCache:
-        options.commentsStr += ".doFlushCache"
-    print ("numa_list=%s" % numa_list)
-
-    def cleanup():
-        print (' Cleaning...Goodbye!')
-        for pid in mpt.pid_list:
-            mpt.stop_proc_tree(pid)
-    
-    def _print_mem():
-        mpt.print_memstat("<np%i.ne%i>:" % (np, ne))
-
-    import atexit
-    atexit.register(cleanup)
-    
-    TIME_STEP = mpt.TIME_STEP
-    
-    import os
-    import subprocess
-    import signal
-    import time
-    
-    
-    for np in np_list:
-        writer.flush()
-        
-        mpt.show_numactl()
-        
-        suffix = "mj.%s.%i.%i" % (jobo.split()[0], np, ne) 
-        sar_log = "sar.%s" % suffix
-        if os.path.exists(sar_log):
-            os.remove(sar_log)
-               
-        if options.doFlushCache:
-            print (subprocess.call(['flush_cache.py',]))
-            
-        #time.sleep(TIME_STEP) 
-        
-        mpt.init_mp_stat()
-        _mp_stat = mpt.mp_stat
-        #_print_mem()
-        
-        sar_proc = mpt.launch_sar(sar_log, TIME_STEP) #launching sar for io,mem,cpu monitoring
-        
-        #time.sleep(3*TIME_STEP)
-        
-        _print_mem();
-        t0=t1=t2=t3=0   
-        t0=time.time()
-        
-        #LAUNCH:
-        proc_list = list()
-        proc_dict = dict()
-        #cpubind = [0,0,0,0,  1,1,1,1]
-        #membind=   [0,0,0,0,  1,1,1,1]
-        
-        if numa_list is not None:
-            if len(numa_list) < np:
-                print ("len(numa_list) < np:  need to append [('f','f'),]")
-                numa_list += [('f','f'),] * (np - len(numa_list))
-            else:
-                print ("len(numa_list)==len(range(np)): there are enough numa settings defined")
-            
-            iterator = zip(range(np), numa_list)
-        else:
-            iterator = zip(range(np), range(np))
-
-        print ("numa_list=%s" % numa_list)
-
-        se = 0 # skip events
-        numa_set = None
-        numa_str = ""
-        for item in iterator:
-            i = item[0]
-            if numa_list is not None:
-                numa_set = item[1]
-                numa_str = "numactl --membind=%i --cpubind=%i " % (numa_set[0],numa_set[1])
-            
-            (proc, proc_out, proc_err) = mpt.launch_athena(jobo, ne, se, np, "ne%i.ni%i" % (ne,i), numa_set ); #launching athena
-            proc_list.append(proc)
-            proc_dict[proc.pid] = (proc_out, proc_err)
-            #print ("%s athena %i.%i.%i proc launched ...[pid %i]  out:%s err:%s" % (numa_str, ne, np, i, proc.pid, proc_out, proc_err ))
-            se +=ne
-        time.sleep(TIME_STEP); 
-
-        #EXECUTION: 
-        sc = list()# status code 
-        ppid=os.getpid()
-        while mpt.launched_processes_working(ppid):
-            _print_mem()
-            time.sleep(TIME_STEP)
-            for proc in proc_list:
-                rc = proc.poll() # return code if subprocess finished
-                if sc is not None:
-                    sc.append(rc)
-            
-        t1=time.time()
-        
-        print ("athena processes finished:")
-        
-        #SUMMARY
-        mpt.summarize_proc_stat()
-        for i in range(2):
-            _print_mem()
-            time.sleep(TIME_STEP)
-        
-        print ("FINISHED MONITORING:")
-        mpt.stop_proc(sar_proc)
-        
-        print ("COLLECTING STATISTICS...")
-        mpt.get_full_sar_stat(sar_log)
-        print ("FINISHED COLLECTING STATISTICS")
-        
-        print ("START ANALYSIS...")
-        
-        print (" ELAPSED TIMES: \n Time: dt1=[%i sec]" % (t1-t0))
-        
-    
-        _mp_stat['sp_summary']=mpt.SPSummary(np)
-        _mp_summary = _mp_stat['sp_summary'] 
-        _mp_summary.extract_summary("dir") 
-        
-        #_mp_summary['x_init_time']= [t1-t0, ] #externally observed time
-        #_mp_summary['x_par_time'] = [t2-t1, ] #externally observed time
-        #_mp_summary['x_fin_time'] = [t3-t2, ] #externally observed time
-        #_mp_summary['x_elap_time']= [t1-t0, ]
-        #_mp_summary['event_rate']= [ float(ne)*60.0/float(_mp_summary['m_par_time'][0]), ]
-        #_mp_summary['event_rate_x'] = [ float(ne)*60.0/float(t1-t0), ]
-        #_mp_summary['event_proc_rate']= [ _mp_summary['event_rate'][0] / float(np), ]
-        #_mp_summary['event_proc_rate_x'] = [_mp_summary['event_rate_x'][0] / float(np), ]
-
-        
-        mpt.print_summary()
-        
-        print ("FINISHED ANALYSIS")
-        
-        print ("START REPORT...")
-        mpt.prepare_mp_stat() # preparing mp_stat dictionary for ROOT
-        import pickle
-        pickle.dump(_mp_stat, open("pickle.%s.f" % suffix,  "wb"))
-
-        mpt.writeRootFile("%s.root" % suffix, np)
-        print ("FINISHED REPORT.")
-        
-        cleanup()
-
-    import platform
-    from socket import gethostname
-    host_name =  gethostname().split('.')[0]
-    merged_root_file = "%s.mj.%s.ne%i.%s.root" % (host_name, job, ne, options.commentsStr)
-    mpt.mergeRootOutput(merged_root_file, job, np_list, ne)    
-    
-    if options.doPlots:
-        mpt.report(merged_root_file, ne, comments = options.commentsStr)
-    
-    cleanup()
-    print ("The End")
-    sys.exit(0)
-
diff --git a/Control/AthenaMP/share/tests/mpMon.py b/Control/AthenaMP/share/tests/mpMon.py
deleted file mode 100755
index 92400de6aaf920c6a2645634857e51ae44a65b75..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/tests/mpMon.py
+++ /dev/null
@@ -1,276 +0,0 @@
-#!/usr/bin/env python
-
-# @file:    mpMon.py
-# @purpose: This script monitors the mp performance of AthenaMP
-#           Rewriting the shell script - athenaMP_monitor.sh
-#           Inspired by monitoring tools from Sebastien: PerfMon; 
-# @author:  Mous Tatarkhanov <tmmous@cern.ch>
-# @date:    December 2009
-#
-# @example:
-# @code
-# monitor_athenaMP 2 rdotoesd.py 60
-# @endcode
-#
-
-from __future__ import print_function
-
-
-"""
-PROCESS STATE CODES
-Here are the different values that the s, stat and state output specifiers (header "STAT" or "S") will
-display to describe the state of a process.
-D    Uninterruptible sleep (usually IO)
-R    Running or runnable (on run queue)
-S    Interruptible sleep (waiting for an event to complete)
-T    Stopped, either by a job control signal or because it is being traced.
-W    paging (not valid since the 2.6.xx kernel)
-X    dead (should never be seen)
-Z    Defunct ("zombie") process, terminated but not reaped by its parent.
-"""
-
-__version__ = "$Revision: 285809 $"
-__author__  = "Mous Tatarkhanov <tmmous@cern.ch>"
-
-from optparse import OptionParser
-import AthenaMP.mpMonTools as mpt
-
-import sys
-
-class Writer:
-    def __init__(self, stdout, filename):
-        self.stdout = stdout
-        self.logfile = open(filename, 'a')
-
-    def write(self, text):
-        self.stdout.write(text)
-        self.logfile.write(text)
-
-    def close(self):
-        self.stdout.close()
-        self.logfile.close()
-
-    def flush(self):
-        self.stdout.flush()
-        self.logfile.flush()
-
-if __name__ == "__main__":
-
-    parser = OptionParser(usage="usage: %prog [options] -f jobo")
-    p = parser.add_option
-    p( "-j",
-       "--jobo",
-       dest = "jobo",
-       help = "The path to the job options file to run in parallel" )
-    p( "-p",
-       "--np",
-       dest    = "nbrProcs",
-       default = [1,2],
-       help = "Nbr of parallel processes to fork" )
-    p( "-e",
-       "--ne",
-       dest    = "nbrEvts",
-       default = "-1",
-       help = "Number of events to process, EvtMax" )
-    p( "-o",
-       "--output",
-       dest = "outFileName",
-       default = 'mplog',
-       help = "Name of the output file which will contain the informations gathered for  monitoring." )
-    p( "-f",
-        "--flush_cache",
-        dest = "doFlushCache",
-        action = "store_true",
-        default = False,
-        help ="switch to activate flushing of the machine cache of the machine before launching athenaMP" )
-    p( "-c",
-       "--comments",
-       dest = "commentsStr",
-       default = "",
-       help = "comments to add to the name and report" )
-    p( "-d",
-       "--doPlots",  #FIX
-       dest = "doPlots",
-       action="store_true",
-       default = False,
-       help = "switch to activate plotting of report charts at the end"
-       )
-    
-    (options, args) = parser.parse_args()
-    
-    import sys
-    if options.jobo == None or options.nbrProcs ==None or options.nbrEvts == None:
-        str(parser.print_help() or "")
-        sys.exit(1)
-        
-    if len(args) > 0:
-        fileNames = [ arg for arg in args if arg[0] != "-" ]
-        pass
-    
-    #output redirection to file and display
-    writer = Writer(sys.stdout, options.outFileName )
-    sys.stdout = writer 
-    np_list = eval(options.nbrProcs)
-    if isinstance(np_list, int):
-        np_list = [np_list,]
-
-    ne = int(options.nbrEvts)
-    jobo = options.jobo
-    print ("np_list = ", np_list)
-    print ("ne = ", ne)
-    print ("jobo = ", jobo)
-    print ("mpMon.log =", options.outFileName)
-    print ("doFluchCache=", options.doFlushCache, type(options.doFlushCache))
-    if options.doFlushCache:
-        options.commentsStr += ".doFlushCache"
-    
-    
-    def cleanup():
-        print (' Cleaning...Goodbye!')
-        for pid in mpt.pid_list:
-            mpt.stop_proc_tree(pid)
-
-    import atexit
-    atexit.register(cleanup)
-    
-    TIME_STEP = mpt.TIME_STEP
-    
-    import os
-    import subprocess
-    import signal
-    import time
-    
-    for np in np_list:
-        writer.flush()
-        
-        suffix = "mp.%s.%i.%i" % (jobo, np, ne) 
-        sar_log = "sar.%s" % suffix
-        if os.path.exists(sar_log):
-            os.remove(sar_log)
-            
-        def _print_mem():
-            mpt.print_memstat("<np%i.ne%i>:" % (np, ne))
-       
-        if options.doFlushCache:
-            print (subprocess.call(['flush_cache.py',]))
-            time.sleep(TIME_STEP) 
-        
-        mpt.init_mp_stat()
-        _mp_stat = mpt.mp_stat
-        #_print_mem()
-        
-        sar_proc = mpt.launch_sar(sar_log, TIME_STEP) #launching sar for io,mem,cpu monitoring
-        
-        time.sleep(3*TIME_STEP)
-        
-        _print_mem();
-        t0=t1=t2=t3=0   
-        t0=time.time()
-        mproc = mpt.launch_athenaMP(jobo, np, ne); #launching athena-MP
-        mpid = mproc.pid #mother process pid
-        print ("parent launched ...[ %i]" % mpid       )
-        
-        mp_log = os.path.join("mp.output", "stdout.%s" % suffix)
-        #print ("mpid_log = ", mp_log)
-        
-        _mproc = mpt.ProcDict(mpid, child=False)        
-       
-        time.sleep(TIME_STEP); 
-
-        #SERIAL: Mother Init Stage
-        while not mpt.children_born(mp_log, mpid,np) and _mproc.proc_ps_stat():
-            if np==0: break
-            time.sleep(TIME_STEP)
-        t1=time.time()
-        
-        
-        #PARALLEL Stage
-        while mpt.children_working(mpid) and _mproc.proc_ps_stat():
-            if np==0: break
-            _print_mem()
-            time.sleep(TIME_STEP)
-        t2 = time.time()
-        
-        _print_mem()
-        
-        print ("children processes finished:")
-        
-        #SERIAL: Mother-Finalize stage
-        while mproc.poll() is None:
-            _mproc.proc_ps_stat()
-            _print_mem()        
-            time.sleep(TIME_STEP)
-        t3 = time.time()
-        
-        mpt.summarize_proc_stat()
-       
-        #print ("EXIT, thus have to terminate all created processes:")
-        try:
-            mproc.wait(); print ("mproc joined-finished")
-        except Exception as e:
-            print ("## while waiting mother process caught exception [%s] !!" % str(e.__class__), "## What:",e,)
-            print (sys.exc_info()[0], sys.exc_info()[1])
-            sc = 1
-            pass
-        
-        for i in range(3):
-            _print_mem()
-            time.sleep(TIME_STEP)
-        
-        print ("FINISHED MONITORING:")
-        mpt.stop_proc(sar_proc)
-        
-        print ("COLLECTING STATISTICS...")
-        mpt.get_full_sar_stat(sar_log)
-        print ("FINISHED COLLECTING STATISTICS")
-        
-        print ("START ANALYSIS...")
-        
-        cp_dir = mpt.grepPath(mp_log, "workdir", sep=':')
-        #print ("worker master cpid_dir = ", cp_dir)
-        print (" ELAPSED TIMES: \n MotherInit: dt1=[%i sec] \n Parallel dt2=[%i sec] \n MotherFinalize dt3=[%i sec]" % (t1-t0, t2-t1, t3-t2))
-        
-        _mp_stat['cp_summary']=mpt.CPSummary(np)
-        _mp_stat['mp_summary']=mpt.MPSummary(np)
-        _mp_summary = _mp_stat['mp_summary']
-        _cp_summary = _mp_stat['cp_summary']
-        
-        _mp_summary.extract_summary(mp_log)
-        _cp_summary.extract_summary(cp_dir)
-        _mp_summary['x_init_time']= [t1-t0, ] #externally observed time
-        _mp_summary['x_par_time'] = [t2-t1, ] #externally observed time
-        _mp_summary['x_fin_time'] = [t3-t2, ] #externally observed time
-
-        _mp_summary['event_rate']= [ float(ne)*60.0/float(_mp_summary['m_par_time'][0]), ]
-        _mp_summary['event_rate_x'] = [ float(ne)*60.0/float(t2-t1), ]
-        _mp_summary['event_proc_rate']= [ _mp_summary['event_rate'][0] / float(np), ]
-        _mp_summary['event_proc_rate_x'] = [_mp_summary['event_rate_x'][0] / float(np), ]
-
-        
-        mpt.print_summary()
-        
-        print ("FINISHED ANALYSIS")
-        
-        print ("START REPORT...")
-        mpt.prepare_mp_stat() # preparing mp_stat dictionary for ROOT
-        import pickle
-        pickle.dump(_mp_stat, open("pickle.%s.f" % suffix,  "wb"))
-
-        mpt.writeRootFile("%s.root" % suffix, np)
-        print ("FINISHED REPORT.")
-        
-        cleanup()
-
-    import platform
-    from socket import gethostname
-    host_name =  gethostname().split('.')[0]
-    merged_root_file = "%s.mp.%s.ne%i.%s.root" % (host_name, jobo, ne, options.commentsStr)
-    mpt.mergeRootOutput(merged_root_file, jobo, np_list, ne)    
-    
-    if options.doPlots:
-        mpt.report(merged_root_file, ne, comments = options.commentsStr)
-    
-    cleanup()
-    print ("The End")
-    sys.exit(0)
-
diff --git a/Control/AthenaMP/share/tests/mp_basic_test.py b/Control/AthenaMP/share/tests/mp_basic_test.py
deleted file mode 100644
index 91ac6f92eff939379e4e9dde81377924dc26c62f..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/tests/mp_basic_test.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python
-
-# @file mp_basic_test.py
-# @purpose: simple file to create a few elephantino events with athena-mp
-
-from __future__ import print_function
-
-input_file_name = 'my.data.pool'
-output_file_name= 'reaccessed.my.data.pool'
-
-import PyUtils.AthFile as af
-af.server.flush_cache()
-
-import os
-import AthenaCommon.ChapPy as accp
-app = accp.AthenaApp()
-app << """
-EVTMAX=1000
-OUTPUT='%(input_file_name)s'
-""" % globals()
-app.include('AthExThinning/AthExThinning_makeData.py')
-
-print ("=== create an elephantino file...")
-rc = app.run(stdout=os.devnull)
-if rc:
-    raise RuntimeError(rc)
-print ("=== create an elephantino file... [ok]")
-
-
-app = accp.AthenaApp(cmdlineargs=['--nprocs=-1'])
-app << """
-EVTMAX=1000 #-1
-INPUT=['%(input_file_name)s']
-OUTPUT='%(output_file_name)s'
-""" % globals()
-
-app.include('AthExThinning/ReadNonThinnedData_jobOptions.py')
-
-mp_logfile = open('mp.elephantino.readback.logfile.txt', 'w+')
-print ("=== read the elephantino file back (with athena-mp)... (logfile=%s)" % (mp_logfile.name,))
-rc = app.run(stdout=mp_logfile)
-if rc:
-    raise RuntimeError(rc)
-print ("=== read the elephantino file back (with athena-mp)... [ok]")
-
-input_file  = af.fopen(input_file_name).infos
-output_file = af.fopen(output_file_name).infos
-
-print (":"*80)
-print ("::: results:")
-
-print ("""\
-input_file: [%s]
-  nentries: %s""" % (
-  input_file['file_name'],
-  input_file['nentries']))
-
-print ("""\
-output_file: [%s]
-   nentries: %s""" % (
-   output_file['file_name'],
-   output_file['nentries']))
-
-print ("::: bye.")
-print (":"*80)
diff --git a/Control/AthenaMP/share/tests/mp_genevt_test.py b/Control/AthenaMP/share/tests/mp_genevt_test.py
deleted file mode 100644
index 05a7e125c3ccd5ea2825a178f30f6590bcdc9ccc..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/tests/mp_genevt_test.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python
-
-# @file mp_genevt_test.py
-# @purpose: simple file to create a few ttbar events and read them back
-#           with athena-mp
-
-from __future__ import print_function
-
-input_file_name = 'mc.event.pool'
-output_file_name= 'reaccessed.mc.event.pool'
-
-import PyUtils.AthFile as af
-af.server.flush_cache()
-
-import os
-import AthenaCommon.ChapPy as accp
-app = accp.AthenaApp()
-app << """
-EVTMAX=1000
-OUTPUT='%(input_file_name)s'
-""" % globals()
-app.include('McParticleTests/iotest_WriteGenEvent_jobOptions.py')
-
-evt_logfile = open('mp.evgen.logfile.txt', 'w+')
-print ("=== create an EVGEN file...")
-rc = app.run(stdout=evt_logfile)
-if rc:
-    raise RuntimeError(rc)
-print ("=== create an EVGEN file... [ok]")
-
-
-app = accp.AthenaApp(cmdlineargs=['--nprocs=-1'])
-app << """
-EVTMAX=1000
-INPUT=['%(input_file_name)s']
-OUTPUT='%(output_file_name)s'
-""" % globals()
-
-app.include('McParticleTests/iotest_ReadGenEvent_jobOptions.py')
-
-mp_logfile = open('mp.readback.logfile.txt', 'w+')
-print ("=== read the EVGEN file back (with athena-mp)... (logfile=%s)" % (mp_logfile.name,))
-rc = app.run(stdout=mp_logfile)
-if rc:
-    raise RuntimeError(rc)
-print ("=== read the EVGEN file back (with athena-mp)... [ok]")
-
-print (":"*80)
-print ("::: results:")
-input_file = af.fopen(input_file_name).infos
-print ("input_file: [%s]\n nentries: %s" % (input_file['file_name'],
-                                           input_file['nentries'],))
-
-output_file = af.fopen('reaccessed.mc.event.pool').infos
-print ("output_file: [%s]\n nentries: %s" % (output_file['file_name'],
-                                             output_file['nentries'],))
-print ("::: bye.")
-print (":"*80)
diff --git a/Control/AthenaMP/share/tests/smem_mon.py b/Control/AthenaMP/share/tests/smem_mon.py
deleted file mode 100755
index 79d95923b325eb383bd3c35fbc07ad9d3b769771..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/tests/smem_mon.py
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/env python
-
-# @author:  Mous Tatarkhanov <tmmous@cern.ch>
-# @date:    August 23, 2010
-# @example:
-# @code
-# @endcode
-#
-
-from __future__ import print_function
-
-
-__version__ = "$Revision: 000001 $"
-__author__  = "Mous Tatarkhanov <tmmous@cern.ch>"
-
-from optparse import OptionParser
-
-import sys, os
-import time, operator
-
-from future import standard_library
-standard_library.install_aliases()
-import subprocess
-
-smem_exe = "/afs/cern.ch/user/t/tmmous/smem-0.9/smem" 
-smem_log = "smem_log"
-smem_ppid  = None
-smem_time_step = 0.5
-
-### helpers -------------------------------------------------------------------
-def smem(ppid = None, message = None):
-
-    if ppid is None:
-        ppid = smem_ppid
-
-    if message is not None:
-        cmd = "echo %s >> %s" % (message, smem_log)
-        out = subprocess.getoutput(cmd)
-
-    cmd = "%s -P athena.py -s pid >> %s" % (smem_exe, smem_log) 
-    out += subprocess.getoutput(cmd)
-    
-    print ("smem: %s" % out)
-    
-    if ps_line_nbr(ppid) > 0:
-        return True
-    else: 
-        return False
-
-def ps_line_nbr(ppid):
-    cmd = "ps --ppid %s -o pid,state,vsize,rss,sz,start,cputime,etime " % ppid
-    (sc, out) = subprocess.getstatusoutput(cmd)
-    
-    if (sc != 0):
-        print ("%s\n" % cmd)
-        print (" PS> ERRROR... sc=%i" % sc)
-        print (" out=%s" % out )
-        return 0
-    
-    print (">PS sc=%i" % sc)
-    print ("%s" % out)
-
-    
-    ln = len(out.splitlines()) - 1
-    print ("line_nbr=", ln)
-    return ln
-
-def get_cpu(pid):
-    cmd = "ps --pid %i -o psr" % pid
-    #print (">%s" % cmd)
-    out = subprocess.getoutput(cmd)
-    cpu = int(out.splitlines()[1].split()[0])
-    #print ("pid: [%i] has cpu: [%i]" % (pid, cpu))
-    return cpu
-
-def set_proc_affinity(pid, cpu):
-    cmd = "taskset -pc %i %i" % (cpu, pid)
-    #print ("> taskset -pc %i %i" % (cpu, pid)                                                                                                       )
-    st,out = subprocess.getstatusoutput(cmd)
-    return st
-
-time_list = list()
-
-def watch( message=None):
-    time_list.append(time.time())
-    
-    if message is not None:
-        return "[%i] %s " % (dt(), message)
-    else:
-        return len(time_list)
-
-def dt(n=-1):
-    return time_list[n] - time_list[n-1]
-
-if __name__ == "__main__":
-
-    parser = OptionParser(usage="usage: %prog [options] -f jobo")
-    p = parser.add_option
-    p( "-l",
-        "--log",
-        dest = "log_file",
-        default = None,
-        help ="smem log file " 
-    )
-    
-    p( "-e",
-        "--exe",
-        dest="exe_file",
-        default = "/afs/cern.ch/user/t/tmmous/smem-0.9/smem",
-        help="location of smem executable"
-    )
-    p(  "-p",
-        "--ppid",
-        dest="ppid",
-        default = None,
-        help = "parent process pid"
-    )
-    p ( "-t",
-        "--time_step",
-        dest = "time_step",
-        default = 0.5,
-        help = "smem measurement time step"
-    )
-    (options, args) = parser.parse_args()
-    
-    if  options.log_file == None:
-        str(parser.print_help() or "")
-        sys.exit(1)
-    
-    if  options.ppid == None:
-        str("invalid ppid given")
-        str(parser.print_help() or "")
-        sys.exit(1)
-
-    if (options.exe_file == None) or  not os.path.exists(options.exe_file):
-        str("invalid exe_file, please provide exe file location")
-        str(parser.print_help() or "")
-        sys.exit(1)
-        
-    smem_log = options.log_file
-    smem_exe = options.exe_file
-    smem_ppid = options.ppid
-    smem_time_step = float(options.time_step)
-
-    print ("smem log_file = [%s]" % smem_log)
-    print ("smem exe_file = [%s]" % smem_exe)
-    print ("smem ppid = [%s]" % smem_ppid)
-    print ("smem time_step = [%.1f]" % smem_time_step)
-
-    if os.path.exists(smem_log):
-        print ("  given smem_log name %s exists.. renaming it to old.%s" % (smem_log, smem_log))
-        os.rename(smem_log, "OLD.%s" % smem_log)
-    
-    t0 = time.time()
-
-    while( smem( message = "time=%.2f" % (time.time()-t0)) ):
-        time.sleep(smem_time_step);
-        pass
-
-    print ("DONE...")
diff --git a/Control/AthenaMP/share/tests/test_VetoFirstEvent.py b/Control/AthenaMP/share/tests/test_VetoFirstEvent.py
deleted file mode 100755
index a14d30516fc242434e99e6b3039c0831f356f05e..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/share/tests/test_VetoFirstEvent.py
+++ /dev/null
@@ -1,89 +0,0 @@
-
-#==============================================================
-# Job Options for fast-reco with AthenaMP
-#==============================================================
-
-# assumptions:
-# 1. RecExCommon_links.sh to be sourced in curdir
-# 2. CLI option --nprocs to be used in command line run of athena.py
-
-
-#----------------------------------------------------------------------
-# AthenaMP properties
-#----------------------------------------------------------------------
-# expect "--nprocs" to be used in command line options of athena.py
-from AthenaMP.AthenaMPFlags import jobproperties as jps
-jps.AthenaMPFlags.EventsBeforeFork=3  
-
-import multiprocessing
-cpu_list = range( multiprocessing.cpu_count() ) # [0,1,2,..,ncpus] for many-core machine 
-cpu_list.reverse() #reverse the cpu-proc pinning order
-jps.AthenaMPFlags.AffinityCPUList=cpu_list
-
-
-
-from AthenaCommon.AlgSequence import AlgSequence
-job = AlgSequence()
-# schedule our analysis algorithm
-# from file(.py) import classname
-from AthenaMP.VetoFirstEvent import VetoFirstEvent
-job += VetoFirstEvent(name='EventOutputVeto')
-job.EventOutputVeto.OutputLevel = INFO
-job.EventOutputVeto.EventsBeforeFork=jps.AthenaMPFlags.EventsBeforeFork
-
-#----------------------------------------------------------------------
-# Setting fast-reco w/o Calo, Muon or Trigger.
-#----------------------------------------------------------------------
-
-# expect RecExCommon_links.sh to be sourced in curdir
-
-from AthenaCommon.AthenaCommonFlags import athenaCommonFlags as acFlags
-acFlags.EvtMax=10
-acFlags.PoolESDOutput="ESD.pool.root"
-
-from RecExConfig.RecFlags import rec
-rec.doPerfMon=True
-
-rec.doCalo=False
-rec.doMuon=False
-#rec.doID=True
-rec.doTrigger=False
-
-rec.doESD=True
-rec.doAOD=False
-rec.doHist=False
-rec.doWriteESD=True
-rec.doWriteAOD=False
-rec.doWriteTAG=False
-
-# if needed to configure trigger
-# see https://twiki.cern.ch/twiki/bin/view/Atlas/TriggerFlags
-# include ( "TriggerJobOpts/TriggerFlags.py" )
-
-# if needed to configure AOD building
-# see https://twiki.cern.ch/twiki/bin/view/Atlas/UserAnalysisTest#The_AOD_Production_Flags
-# from ParticleBuilderOptions.AODFlags import AODFlags
-
-# main jobOption
-include ("RecExCommon/RecExCommon_topOptions.py")
-
-
-#----------------------------------------------------------------------
-# FOR DEBUGGING PURPOSES
-#----------------------------------------------------------------------
-#from AthenaCommon.AppMgr import theApp
-#theApp.ReflexPluginDebugLevel = 10000
-
-#from AthenaCommon.Logging import log as msg
-#msg.info ( "svcMgr=%s" % svcMgr)
-#msg.info ( "appMgr=%s" % theApp)
-
-
-
-#----------------------------------------------------------------------
-# user modifier should come here
-#----------------------------------------------------------------------
-
-StreamESD.VetoAlgs    += ["EventOutputVeto"]
-#StreamAOD.VetoAlgs    += ["EventOutputVeto"]
-AANTupleStream.ExistDataHeader = False
diff --git a/Control/AthenaMP/test/AthenaMP.xml b/Control/AthenaMP/test/AthenaMP.xml
deleted file mode 100644
index 3a8eb5f2d8d7966c88bb5cc155ab3a767757cf09..0000000000000000000000000000000000000000
--- a/Control/AthenaMP/test/AthenaMP.xml
+++ /dev/null
@@ -1,27 +0,0 @@
-<?xml version="1.0"?>
-<atn>
-   <TEST name="athenamp.basic" type="script" suite="athenamp">
-      <package_atn>Control/AthenaMP</package_atn>
-      <!-- <options_atn>chappy.py AthenaMP/tests/mp_basic_test.py</options_atn> -->
-      <options_atn>python -c 'print "OK"'</options_atn>
-      <timelimit>30</timelimit>
-      <author> Sebastien Binet </author>
-      <mailto> binet@cern.ch </mailto>
-      <expectations>
-         <returnValue>0</returnValue>
-      </expectations>
-   </TEST>
-
-   <TEST name="athenamp.genevt" type="script" suite="athenamp">
-      <package_atn>Control/AthenaMP</package_atn>
-      <!-- <options_atn>chappy.py AthenaMP/tests/mp_genevt_test.py</options_atn> -->
-      <options_atn>python -c 'print "OK"'</options_atn>
-      <timelimit>30</timelimit>
-      <author> Sebastien Binet </author>
-      <mailto> binet@cern.ch </mailto>
-      <expectations>
-         <returnValue>0</returnValue>
-      </expectations>
-   </TEST>
-
-</atn>
diff --git a/Control/StoreGate/StoreGate/ReadDecorHandle.h b/Control/StoreGate/StoreGate/ReadDecorHandle.h
index 3628361739c8be11666c64bc8e08eb8a01935ad6..f9e0f9beead966bab1545ab2f9b8f7485a228ab5 100644
--- a/Control/StoreGate/StoreGate/ReadDecorHandle.h
+++ b/Control/StoreGate/StoreGate/ReadDecorHandle.h
@@ -84,6 +84,9 @@ namespace SG {
  * For a container C with decoration d, the @c WriteDecorHandle will make
  * an alias C.d for C.  The @c ReadDecorHandle will then retrieve C.d from
  * StoreGate.  The alias C.d is also what enters into scheduling decisions.
+ *
+ * The key() method will return the key of the container.  Use decorKey()
+ * to get the name used for the decoration alias.
  */
 template <class T, class D>
 class ReadDecorHandle
@@ -202,6 +205,12 @@ public:
    */
   SG::auxid_t auxid() const;
 
+
+  /**
+   * @brief Return the name of the decoration alias (CONT.DECOR).
+   */
+  std::string decorKey() const;
+
   
 private:
   /** 
@@ -230,6 +239,10 @@ private:
   const SG::AuxVectorData* vectorData();
 
 
+  /// Name of the decoration alias.
+  std::string m_decorKey;
+
+
   /// Accessor for the aux data item.
   accessor_t m_acc;
 };
diff --git a/Control/StoreGate/StoreGate/ReadDecorHandle.icc b/Control/StoreGate/StoreGate/ReadDecorHandle.icc
index 2660bba7aa07067a0932f360ce8af834eddbf7a5..213eb5694a1026c930123c6be8fd973316c76977 100644
--- a/Control/StoreGate/StoreGate/ReadDecorHandle.icc
+++ b/Control/StoreGate/StoreGate/ReadDecorHandle.icc
@@ -21,7 +21,8 @@ namespace SG {
  */
 template <class T, class D>
 ReadDecorHandle<T, D>::ReadDecorHandle (const ReadDecorHandleKey<T>& key)
-  : Base (key),
+  : Base (key.contHandleKey()),
+    m_decorKey (key.key()),
     m_acc (SG::decorKeyFromKey (key.key()))
 {
 }
@@ -41,7 +42,8 @@ ReadDecorHandle<T, D>::ReadDecorHandle (const ReadDecorHandleKey<T>& key)
 template <class T, class D>
 ReadDecorHandle<T, D>::ReadDecorHandle (const ReadDecorHandleKey<T>& key,
                                         const EventContext& ctx)
-  : Base (key, ctx),
+  : Base (key.contHandleKey(), ctx),
+    m_decorKey (key.key()),
     m_acc (SG::decorKeyFromKey (key.key()))
 {
 }
@@ -53,6 +55,7 @@ ReadDecorHandle<T, D>::ReadDecorHandle (const ReadDecorHandleKey<T>& key,
 template <class T, class D>
 ReadDecorHandle<T, D>::ReadDecorHandle (const ReadDecorHandle& rhs)
   : Base (rhs),
+    m_decorKey (rhs.m_decorKey),
     m_acc (rhs.m_acc)
 {
 }
@@ -64,6 +67,7 @@ ReadDecorHandle<T, D>::ReadDecorHandle (const ReadDecorHandle& rhs)
 template <class T, class D>
 ReadDecorHandle<T, D>::ReadDecorHandle (ReadDecorHandle&& rhs)
   : Base (std::move (rhs)),
+    m_decorKey (std::move (rhs.m_decorKey)),
     m_acc (std::move (rhs.m_acc))
 {
 }
@@ -77,6 +81,7 @@ ReadDecorHandle<T, D>& ReadDecorHandle<T, D>::operator= (const ReadDecorHandle&
 {
   if (this != &rhs) {
     *static_cast<Base*>(this) = rhs;
+    m_decorKey = rhs.m_decorKey;
     m_acc = rhs.m_acc;
   }
   return *this;
@@ -91,6 +96,7 @@ ReadDecorHandle<T, D>& ReadDecorHandle<T, D>::operator= (ReadDecorHandle&& rhs)
 {
   if (this != &rhs) {
     *static_cast<Base*>(this) = std::move (rhs);
+    m_decorKey = std::move (rhs.m_decorKey);
     m_acc = std::move (rhs.m_acc);
   }
   return *this;
@@ -106,9 +112,10 @@ ReadDecorHandle<T, D>& ReadDecorHandle<T, D>::operator= (ReadDecorHandle&& rhs)
  * Const method; the handle does not change as a result of this.
  */
 template <class T, class D>
+inline
 bool ReadDecorHandle<T, D>::isPresent() const
 {
-  return this->isPresent_impl (contKeyFromKey (this->key()));
+  return Base::isPresent();
 }
 
 
@@ -178,6 +185,17 @@ SG::auxid_t ReadDecorHandle<T, D>::auxid() const
 }
 
 
+/**
+ * @brief Return the name of the decoration alias (CONT.DECOR).
+ */
+template <class T, class D>
+inline
+std::string ReadDecorHandle<T, D>::decorKey() const
+{
+  return m_decorKey;
+}
+
+
 /** 
  * @brief Return the referenced object as a @c SG::AuxVectorData.
  *        Specialization for the case of a standalone object
diff --git a/Control/StoreGate/StoreGate/ReadDecorHandleKey.h b/Control/StoreGate/StoreGate/ReadDecorHandleKey.h
index 322a4fb2698bc11d4e9d562fb5221e651560cdc6..b12ab3bd5a3ca1471bd1ae169754d6878efe9a1b 100644
--- a/Control/StoreGate/StoreGate/ReadDecorHandleKey.h
+++ b/Control/StoreGate/StoreGate/ReadDecorHandleKey.h
@@ -1,8 +1,7 @@
 // This file's extension implies that it's C, but it's really -*- C++ -*-.
 /*
- * Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration.
+ * Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration.
  */
-// $Id$
 /**
  * @file StoreGate/ReadDecorHandleKey.h
  * @author scott snyder <snyder@bnl.gov>
@@ -117,16 +116,58 @@ public:
                       const std::string& doc = "");
 
   
-  /// Can get this from the base class.
-  using Base::operator=;
+  /**
+   * @brief Change the key of the object to which we're referring.
+   * @param sgkey The StoreGate key for the object.
+   * 
+   * The provided key may actually start with the name of the store,
+   * separated by a "+":  "MyStore+Obj".  If no "+" is present,
+   * the store is not changed.
+   */
+  ReadDecorHandleKey& operator= (const std::string& sgkey);
 
 
+  /**
+   * @brief Change the key of the object to which we're referring.
+   * @param sgkey The StoreGate key for the object.
+   * 
+   * The provided key may actually start with the name of the store,
+   * separated by a "+":  "MyStore+Obj".  If no "+" is present
+   * the store is not changed.  A key name that starts with a slash
+   * is interpreted as a hierarchical key name, not an empty store name.
+   *
+   * Returns failure the key string format is bad.
+   */
+  virtual StatusCode assign (const std::string& sgkey) override;
+
+  
   /**
    * @brief Return the class ID for the referenced object.
    *
    * Overridden here to return the CLID for @c T instead of @c topbase_t.
    */
   CLID clid() const;
+
+
+  /**
+   * @brief If this object is used as a property, then this should be called
+   *        during the initialize phase.  It will fail if the requested
+   *        StoreGate service cannot be found or if the key is blank.
+   * @param used If false, then this handle is not to be used.
+   *             Instead of normal initialization, the key will be cleared.
+   */
+  StatusCode initialize (bool used = true);
+
+
+  /**
+   * @brief Return the handle key for the container.
+   */
+  const ReadHandleKey<T>& contHandleKey() const;
+
+
+private:
+  /// The container handle.
+  ReadHandleKey<T> m_contHandleKey;
 };
 
 
diff --git a/Control/StoreGate/StoreGate/ReadDecorHandleKey.icc b/Control/StoreGate/StoreGate/ReadDecorHandleKey.icc
index 7af27b195e3d4df02d21118bb616d4d4707f6845..0ea35d60f289f39de6621a6e333785c48810464d 100644
--- a/Control/StoreGate/StoreGate/ReadDecorHandleKey.icc
+++ b/Control/StoreGate/StoreGate/ReadDecorHandleKey.icc
@@ -1,9 +1,6 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
-/*
- */
-// $Id$
 /**
  * @file StoreGate/ReadDecorHandleKey.icc
  * @author scott snyder <snyder@bnl.gov>
@@ -13,6 +10,9 @@
  */
 
 
+#include "StoreGate/tools/DecorKeyHelpers.h"
+
+
 namespace SG {
 
 
@@ -29,7 +29,8 @@ template <class T>
 inline
 ReadDecorHandleKey<T>::ReadDecorHandleKey (const std::string& key /*= ""*/,
                                            const std::string& storeName /*= StoreID::storeName(StoreID::EVENT_STORE)*/)
-  : Base (ClassID_traits<topbase_t>::ID(), key, storeName)
+  : Base (ClassID_traits<topbase_t>::ID(), key, storeName),
+    m_contHandleKey (contKeyFromKey (key), storeName)
 {
 }
 
@@ -55,11 +56,50 @@ ReadDecorHandleKey<T>::ReadDecorHandleKey( OWNER* owner,
                                            const std::string& name,
                                            const K& key /*={}*/,
                                            const std::string& doc /*=""*/)
-  : Base (ClassID_traits<topbase_t>::ID(), owner, name, key, doc)
+  : Base (ClassID_traits<topbase_t>::ID(), owner, name, key, doc),
+    m_contHandleKey (contKeyFromKey (key), StoreID::storeName(StoreID::EVENT_STORE) )
+{
+}
+
+
+/**
+ * @brief Change the key of the object to which we're referring.
+ * @param sgkey The StoreGate key for the object.
+ * 
+ * The provided key may actually start with the name of the store,
+ * separated by a "+":  "MyStore+Obj".  If no "+" is present,
+ * the store is not changed.
+ */
+template <class T>
+ReadDecorHandleKey<T>&
+ReadDecorHandleKey<T>::operator= (const std::string& sgkey)
 {
+  m_contHandleKey = contKeyFromKey (sgkey);
+  Base::operator= (sgkey);
+  return *this;
 }
 
 
+/**
+ * @brief Change the key of the object to which we're referring.
+ * @param sgkey The StoreGate key for the object.
+ * 
+ * The provided key may actually start with the name of the store,
+ * separated by a "+":  "MyStore+Obj".  If no "+" is present
+ * the store is not changed.  A key name that starts with a "+"
+ * is interpreted as a hierarchical key name, not an empty store name.
+ *
+ * Returns failure the key string format is bad.
+ */
+template <class T>
+StatusCode ReadDecorHandleKey<T>::assign (const std::string& sgkey)
+{
+  if (m_contHandleKey.assign (contKeyFromKey (sgkey)).isFailure())
+    return StatusCode::FAILURE;
+  return Base::assign (sgkey);
+}
+
+  
 /**
  * @brief Return the class ID for the referenced object.
  *
@@ -73,4 +113,31 @@ CLID ReadDecorHandleKey<T>::clid() const
 }
 
 
+/**
+ * @brief If this object is used as a property, then this should be called
+ *        during the initialize phase.  It will fail if the requested
+ *        StoreGate service cannot be found or if the key is blank.
+ *
+ * @param used If false, then this handle is not to be used.
+ *             Instead of normal initialization, the key will be cleared.
+ */
+template <class T>
+StatusCode ReadDecorHandleKey<T>::initialize (bool used /*= true*/)
+{
+  if (m_contHandleKey.initialize (used).isFailure())
+    return StatusCode::FAILURE;
+  return Base::initialize (used);
+}
+
+
+/**
+ * @brief Return the handle key for the container.
+ */
+template <class T>
+const ReadHandleKey<T>& ReadDecorHandleKey<T>::contHandleKey() const
+{
+  return m_contHandleKey;
+}
+
+
 } // namespace SG
diff --git a/Control/StoreGate/StoreGate/WriteDecorHandle.h b/Control/StoreGate/StoreGate/WriteDecorHandle.h
index 3d447c95f9861d4c08a3a30e5a79f08f891c5f8d..6f426c8c696ffca48f59a843be906e8333240d86 100644
--- a/Control/StoreGate/StoreGate/WriteDecorHandle.h
+++ b/Control/StoreGate/StoreGate/WriteDecorHandle.h
@@ -89,6 +89,9 @@ namespace SG {
  * For a container C with decoration d, the @c WriteDecorHandle will make
  * an alias C.d for C.  The @c ReadDecorHandle will then retrieve C.d from
  * StoreGate.  The alias C.d is also what enters into scheduling decisions.
+ *
+ * The key() method will return the key of the container.  Use decorKey()
+ * to get the name used for the decoration alias.
  */
 template <class T, class D>
 class WriteDecorHandle
@@ -223,6 +226,18 @@ public:
    */
   SG::auxid_t auxid() const;
 
+
+  /**
+   * @brief Return the mode (read/write/update) for this handle.
+   */
+  Gaudi::DataHandle::Mode mode() const;
+
+
+  /**
+   * @brief Return the name of the decoration alias (CONT.DECOR).
+   */
+  std::string decorKey() const;
+
   
 private:
   /**
@@ -279,10 +294,8 @@ private:
   const SG::AuxVectorData* vectorData();
 
 
-  /// Handle for reading the referenced object using its original name
-  /// (not the alias).
-  SG::ReadHandle<T> m_contHandle;
-
+  /// Name of the decoration alias: CONT.DECOR.
+  std::string m_decorKey;
 
   /// Accessor for the aux data item.
   accessor_t m_acc;
diff --git a/Control/StoreGate/StoreGate/WriteDecorHandle.icc b/Control/StoreGate/StoreGate/WriteDecorHandle.icc
index 61c68ffd4fdc06f68a830f9b3aa1501aa671eb82..d839d0d2c0240bb0206552d5b3236e78220d2acf 100644
--- a/Control/StoreGate/StoreGate/WriteDecorHandle.icc
+++ b/Control/StoreGate/StoreGate/WriteDecorHandle.icc
@@ -24,8 +24,8 @@ namespace SG {
  */
 template <class T, class D>
 WriteDecorHandle<T, D>::WriteDecorHandle (const WriteDecorHandleKey<T>& key)
-  : Base (key, nullptr),
-    m_contHandle (key.contHandleKey()),
+  : Base (key.contHandleKey(), nullptr),
+    m_decorKey (key.key()),
     m_acc (SG::decorKeyFromKey (key.key())),
     m_madeAlias (false)
 {
@@ -46,8 +46,8 @@ WriteDecorHandle<T, D>::WriteDecorHandle (const WriteDecorHandleKey<T>& key)
 template <class T, class D>
 WriteDecorHandle<T, D>::WriteDecorHandle (const WriteDecorHandleKey<T>& key,
                                           const EventContext& ctx)
-  : Base (key, &ctx),
-    m_contHandle (key.contHandleKey(), ctx),
+  : Base (key.contHandleKey(), &ctx),
+    m_decorKey (key.key()),
     m_acc (SG::decorKeyFromKey (key.key())),
     m_madeAlias (false)
 {
@@ -60,7 +60,7 @@ WriteDecorHandle<T, D>::WriteDecorHandle (const WriteDecorHandleKey<T>& key,
 template <class T, class D>
 WriteDecorHandle<T, D>::WriteDecorHandle (const WriteDecorHandle& rhs)
   : Base (rhs),
-    m_contHandle (rhs.m_contHandle),
+    m_decorKey (rhs.m_decorKey),
     m_acc (rhs.m_acc),
     m_madeAlias (rhs.m_madeAlias)
 {
@@ -73,7 +73,7 @@ WriteDecorHandle<T, D>::WriteDecorHandle (const WriteDecorHandle& rhs)
 template <class T, class D>
 WriteDecorHandle<T, D>::WriteDecorHandle (WriteDecorHandle&& rhs)
   : Base (std::move (rhs)),
-    m_contHandle (std::move (rhs.m_contHandle)),
+    m_decorKey (std::move (rhs.m_decorKey)),
     m_acc (std::move (rhs.m_acc)),
     m_madeAlias (rhs.m_madeAlias)
 {
@@ -107,7 +107,7 @@ WriteDecorHandle<T, D>& WriteDecorHandle<T, D>::operator= (const WriteDecorHandl
   if (this != &rhs) {
     *static_cast<Base*>(this) = rhs;
     m_acc = rhs.m_acc;
-    m_contHandle = rhs.m_contHandle;
+    m_decorKey = rhs.m_decorKey;
     m_madeAlias = rhs.m_madeAlias;
   }
   return *this;
@@ -123,7 +123,7 @@ WriteDecorHandle<T, D>& WriteDecorHandle<T, D>::operator= (WriteDecorHandle&& rh
   if (this != &rhs) {
     *static_cast<Base*>(this) = std::move (rhs);
     m_acc = std::move (rhs.m_acc);
-    m_contHandle = std::move (rhs.m_contHandle);
+    m_decorKey = std::move (rhs.m_decorKey);
     m_madeAlias = rhs.m_madeAlias;
     rhs.m_madeAlias = false;
   }
@@ -142,7 +142,7 @@ WriteDecorHandle<T, D>& WriteDecorHandle<T, D>::operator= (WriteDecorHandle&& rh
 template <class T, class D>
 bool WriteDecorHandle<T, D>::isPresent() const
 {
-  return m_contHandle.isPresent();
+  return Base::isPresent();
 }
 
 
@@ -159,8 +159,6 @@ template <class T, class D>
 StatusCode WriteDecorHandle<T, D>::setProxyDict (IProxyDict* store)
 {
   m_madeAlias = false;
-  if (m_contHandle.setProxyDict (store).isFailure())
-    return StatusCode::FAILURE;
   return Base::setProxyDict (store);
 }
 
@@ -217,8 +215,9 @@ template <class T, class D>
 inline
 bool WriteDecorHandle<T, D>::isAvailable (std::true_type)
 {
-  if (this->m_ptr) {
-    const SG::AuxVectorData* obj = static_cast<const T*>(this->m_ptr)->container();
+  const T* ptr = this->ptr();
+  if (ptr) {
+    const SG::AuxVectorData* obj = ptr->container();
     if (obj) {
       return obj->isAvailable (m_acc.auxid());
     }
@@ -238,8 +237,9 @@ template <class T, class D>
 inline
 bool WriteDecorHandle<T, D>::isAvailable (std::false_type)
 {
-  if (this->m_ptr) {
-    return static_cast<const T*>(this->m_ptr)->isAvailable (m_acc.auxid());
+  const T* ptr = this->ptr();
+  if (ptr) {
+    return ptr->isAvailable (m_acc.auxid());
   }
 
   return false;
@@ -273,6 +273,28 @@ SG::auxid_t WriteDecorHandle<T, D>::auxid() const
 }
 
 
+/**
+ * @brief Return the mode (read/write/update) for this handle.
+ */
+template <class T, class D>
+inline
+Gaudi::DataHandle::Mode WriteDecorHandle<T, D>::mode() const
+{
+  return Gaudi::DataHandle::Writer;
+}
+
+
+/**
+ * @brief Return the name of the decoration alias (CONT.DECOR).
+ */
+template <class T, class D>
+inline
+std::string WriteDecorHandle<T, D>::decorKey() const
+{
+  return m_decorKey;
+}
+
+
 /**
  * @brief Retrieve an object from StoreGate.
  * @param quiet If true, suppress failure messages.
@@ -285,11 +307,14 @@ void* WriteDecorHandle<T, D>::typeless_dataPointer_impl (bool quiet)
 {
   if (this->m_ptr && this->m_madeAlias)
     return this->m_ptr;
-  if (m_contHandle.alias (WriteHandleKey<T> (this->key())).isFailure())
-    return nullptr;
   if (!this->m_ptr) {
     ReadHandle<T>::typeless_dataPointer_impl (quiet);
   }
+  if (!this->m_ptr) {
+    return nullptr;
+  }
+  if (this->alias (WriteHandleKey<T> (this->m_decorKey)).isFailure())
+    return nullptr;
   // Important to call the base class method above before calling vectorData;
   // otherwise, we'll get an infinite recursion.
   // Also don't call getDecorationArray if the container is empty.
diff --git a/Control/StoreGate/test/ReadDecorHandleKey_test.cxx b/Control/StoreGate/test/ReadDecorHandleKey_test.cxx
index 30c9ee4d3cdc14d80d79efa285f97551613a803e..b0dab8aa7057da78668eeee1726e6a702c4ddd3e 100644
--- a/Control/StoreGate/test/ReadDecorHandleKey_test.cxx
+++ b/Control/StoreGate/test/ReadDecorHandleKey_test.cxx
@@ -34,11 +34,19 @@ void test1()
   assert (k1.initialize().isSuccess());
   assert (k1.storeHandle().isSet());
 
+  assert (k1.contHandleKey().clid() == 293847295);
+  assert (k1.contHandleKey().key() == "aaa");
+  assert (k1.contHandleKey().mode() == Gaudi::DataHandle::Reader);
+  assert (k1.contHandleKey().storeHandle().name() == "StoreGateSvc");
+  assert (k1.contHandleKey().storeHandle().isSet());
+
   k1 = "bbb.foo";
   assert (k1.key() == "bbb.foo");
+  assert (k1.contHandleKey().key() == "bbb");
 
   assert (k1.assign ("ccc.fee").isSuccess());
   assert (k1.key() == "ccc.fee");
+  assert (k1.contHandleKey().key() == "ccc");
 
 
   TestOwner owner;
@@ -46,6 +54,9 @@ void test1()
   assert (k3.clid() == 293847295);
   assert (k3.key() == "ccc.dec");
   assert (k3.mode() == Gaudi::DataHandle::Reader);
+  assert (k3.contHandleKey().clid() == 293847295);
+  assert (k3.contHandleKey().key() == "ccc");
+  assert (k3.contHandleKey().mode() == Gaudi::DataHandle::Reader);
   assert (owner.getProperty ("CCCKey").name() == "CCCKey");
   assert (owner.getProperty ("CCCKey").documentation() == "doc string");
   assert (owner.getProperty ("CCCKey").type_info() == &typeid(SG::ReadHandleKey<MyObj>));
diff --git a/Control/StoreGate/test/ReadDecorHandle_test.cxx b/Control/StoreGate/test/ReadDecorHandle_test.cxx
index 30e781ec901a233ae544a269258285cfcee2e5f0..a39f19c9b7a2509f8c7000f16a34e6edf9174126 100644
--- a/Control/StoreGate/test/ReadDecorHandle_test.cxx
+++ b/Control/StoreGate/test/ReadDecorHandle_test.cxx
@@ -53,7 +53,8 @@ void test1()
 
   SG::ReadDecorHandle<MyObj, int> h1 (k3);
   assert (h1.clid() == MyCLID);
-  assert (h1.key() == "asd.aaa");
+  assert (h1.key() == "asd");
+  assert (h1.decorKey() == "asd.aaa");
   assert (h1.storeHandle().name() == "StoreGateSvc");
   assert (h1.mode() == Gaudi::DataHandle::Reader);
   assert (h1.auxid() == ityp);
@@ -64,7 +65,8 @@ void test1()
   ctx5.setExtension( Atlas::ExtendedEventContext(&dumstore) );
   SG::ReadDecorHandle<MyObj, int> h5 (k3, ctx5);
   assert (h5.clid() == MyCLID);
-  assert (h5.key() == "asd.aaa");
+  assert (h5.key() == "asd");
+  assert (h5.decorKey() == "asd.aaa");
   assert (h5.storeHandle().name() == "StoreGateSvc");
   assert (h5.mode() == Gaudi::DataHandle::Reader);
   assert (h5.store() == "TestStore");
@@ -74,7 +76,8 @@ void test1()
   assert (k6.initialize().isSuccess());
   SG::ReadDecorHandle<MyObj, int> h6 (k6, ctx5);
   assert (h6.clid() == MyCLID);
-  assert (h6.key() == "asd.aaa");
+  assert (h6.key() == "asd");
+  assert (h6.decorKey() == "asd.aaa");
   assert (h6.storeHandle().name() == "OtherStore");
   assert (h6.mode() == Gaudi::DataHandle::Reader);
   assert (h6.store() == "OtherStore" || h6.store() == "OtherStore_Impl");
@@ -122,7 +125,8 @@ void test2()
   assert (h1.auxid() == ityp);
 
   SG::ReadDecorHandle<MyObj, int> h2 (h1);
-  assert (h2.key() == "foo.aaa");
+  assert (h2.key() == "foo");
+  assert (h2.decorKey() == "foo.aaa");
   assert (h2.store() == "TestStore");
   assert (h2.isInitialized());
   assert (h2.cptr() == fooptr);
@@ -130,7 +134,8 @@ void test2()
   assert (h2.auxid() == ityp);
 
   SG::ReadDecorHandle<MyObj, int> h3 (std::move(h2));
-  assert (h3.key() == "foo.aaa");
+  assert (h3.key() == "foo");
+  assert (h3.decorKey() == "foo.aaa");
   assert (h3.store() == "TestStore");
   assert (h3.isInitialized());
   assert (h3.cptr() == fooptr);
@@ -151,11 +156,13 @@ void test2()
   assert (h4.auxid() == ityp2);
 
   h3 = h4;
-  assert (h3.key() == "bar.bbb");
+  assert (h3.key() == "bar");
+  assert (h3.decorKey() == "bar.bbb");
   assert (h3.store() == "TestStore");
   assert (h3.isInitialized());
   assert (h3.cptr() == barptr);
-  assert (h4.key() == "bar.bbb");
+  assert (h4.key() == "bar");
+  assert (h4.decorKey() == "bar.bbb");
   assert (h4.store() == "TestStore");
   assert (h4.isInitialized());
   assert (h4.cptr() == barptr);
@@ -168,7 +175,8 @@ void test2()
   // h1: foo, h2: unint, h3: bar, h4: bar
 
   h2 = std::move(h3);
-  assert (h2.key() == "bar.bbb");
+  assert (h2.key() == "bar");
+  assert (h2.decorKey() == "bar.bbb");
   assert (h2.store() == "TestStore");
   assert (h2.isInitialized());
   assert (h2.cptr() == barptr);
@@ -220,6 +228,25 @@ void test3()
   assert (h1 (*(*pcont)[1]) == 11);
   assert (h1 (2) == 12);
   assert (h1.getDataArray()[0] == 10);
+
+  // Test case of no alias.
+  SG::ReadDecorHandleKey<MyObjCont> k2 ("foo.bbb");
+  assert (k2.initialize().isSuccess());
+  SG::ReadDecorHandle<MyObjCont, int> h2 (k2);
+  assert (h2.setProxyDict (&testStore).isSuccess());
+  assert (h2.auxid() == r.getAuxID<int> ("bbb"));
+  assert (h2.isPresent());
+  assert (!h2.isAvailable());
+
+  MyObj::Decorator<int> bdec ("bbb");
+  bdec (*(*pcont)[0]) = 110;
+  bdec (*(*pcont)[1]) = 111;
+  bdec (*(*pcont)[2]) = 112;
+
+  assert (h2.isAvailable());
+  assert (h2 (*(*pcont)[1]) == 111);
+  assert (h2 (2) == 112);
+  assert (h2.getDataArray()[0] == 110);
 }
 
 
@@ -270,7 +297,8 @@ void test5()
   assert (k1.initialize().isSuccess());
   auto h1 = SG::makeHandle<int> (k1);
   assert (h1.clid() == MyCLID);
-  assert (h1.key() == "asd.aaa");
+  assert (h1.key() == "asd");
+  assert (h1.decorKey() == "asd.aaa");
   assert (h1.storeHandle().name() == "StoreGateSvc");
   assert (h1.mode() == Gaudi::DataHandle::Reader);
   assert (h1.auxid() == ityp);
@@ -284,7 +312,8 @@ void test5()
   ctx.setExtension( Atlas::ExtendedEventContext(&dumstore) );
   auto h2 = SG::makeHandle<int> (k1, ctx);
   assert (h2.clid() == MyCLID);
-  assert (h2.key() == "asd.aaa");
+  assert (h2.key() == "asd");
+  assert (h2.decorKey() == "asd.aaa");
   assert (h2.storeHandle().name() == "StoreGateSvc");
   assert (h2.mode() == Gaudi::DataHandle::Reader);
   assert (h2.store() == "TestStore");
@@ -294,7 +323,8 @@ void test5()
   assert (k3.initialize().isSuccess());
   auto h3 = SG::makeHandle<int> (k3, ctx);
   assert (h3.clid() == MyCLID);
-  assert (h3.key() == "asd.aaa");
+  assert (h3.key() == "asd");
+  assert (h3.decorKey() == "asd.aaa");
   assert (h3.storeHandle().name() == "OtherStore");
   assert (h3.mode() == Gaudi::DataHandle::Reader);
   assert (h3.store() == "OtherStore" || h3.store() == "OtherStore_Impl");
diff --git a/Control/StoreGate/test/WriteDecorHandle_test.cxx b/Control/StoreGate/test/WriteDecorHandle_test.cxx
index c625497cb16d701ec23301a95878e756f0a0d7af..bc0b9192a42fe0c4083c17efb1ba210f16c0ec2f 100644
--- a/Control/StoreGate/test/WriteDecorHandle_test.cxx
+++ b/Control/StoreGate/test/WriteDecorHandle_test.cxx
@@ -52,7 +52,8 @@ void test1()
 
   SG::WriteDecorHandle<MyObj, int> h1 (k3);
   assert (h1.clid() == MyCLID);
-  assert (h1.key() == "asd.aaa");
+  assert (h1.key() == "asd");
+  assert (h1.decorKey() == "asd.aaa");
   assert (h1.storeHandle().name() == "StoreGateSvc");
   assert (h1.mode() == Gaudi::DataHandle::Writer);
   assert (h1.auxid() == ityp);
@@ -64,7 +65,8 @@ void test1()
 
   SG::WriteDecorHandle<MyObj, int> h5 (k3, ctx5);
   assert (h5.clid() == MyCLID);
-  assert (h5.key() == "asd.aaa");
+  assert (h5.key() == "asd");
+  assert (h5.decorKey() == "asd.aaa");
   assert (h5.storeHandle().name() == "StoreGateSvc");
   assert (h5.mode() == Gaudi::DataHandle::Writer);
   assert (h5.store() == "TestStore");
@@ -74,7 +76,8 @@ void test1()
   assert (k6.initialize().isSuccess());
   SG::WriteDecorHandle<MyObj, int> h6 (k6, ctx5);
   assert (h6.clid() == MyCLID);
-  assert (h6.key() == "asd.aaa");
+  assert (h6.key() == "asd");
+  assert (h6.decorKey() == "asd.aaa");
   assert (h6.storeHandle().name() == "OtherStore");
   assert (h6.mode() == Gaudi::DataHandle::Writer);
   assert (h6.store() == "OtherStore" || h6.store() == "OtherStore_Impl");
@@ -125,7 +128,8 @@ void test2()
   assert (foo_proxy->refCount() == 2);
 
   SG::WriteDecorHandle<MyObj, int> h2 (h1);
-  assert (h2.key() == "foo.aaa");
+  assert (h2.key() == "foo");
+  assert (h2.decorKey() == "foo.aaa");
   assert (h2.store() == "TestStore");
   assert (h2.isInitialized());
   assert (h2.auxid() == ityp);
@@ -133,7 +137,8 @@ void test2()
   assert (foo_proxy->refCount() == 2);
 
   SG::WriteDecorHandle <MyObj, int> h3 (std::move(h2));
-  assert (h3.key() == "foo.aaa");
+  assert (h3.key() == "foo");
+  assert (h3.decorKey() == "foo.aaa");
   assert (h3.store() == "TestStore");
   assert (h3.isInitialized());
   assert (h3.cptr() == fooptr);
@@ -153,11 +158,13 @@ void test2()
   assert (h4.auxid() == ityp2);
 
   h3 = h4;
-  assert (h3.key() == "bar.bbb");
+  assert (h3.key() == "bar");
+  assert (h3.decorKey() == "bar.bbb");
   assert (h3.store() == "TestStore");
   assert (h3.isInitialized());
   assert (h3.cptr() == barptr);
-  assert (h4.key() == "bar.bbb");
+  assert (h4.key() == "bar");
+  assert (h4.decorKey() == "bar.bbb");
   assert (h4.store() == "TestStore");
   assert (h4.isInitialized());
   assert (h4.cptr() == barptr);
@@ -170,7 +177,8 @@ void test2()
   // h1: foo, h2: unint, h3: bar, h4: bar
 
   h2 = std::move(h3);
-  assert (h2.key() == "bar.bbb");
+  assert (h2.key() == "bar");
+  assert (h2.decorKey() == "bar.bbb");
   assert (h2.store() == "TestStore");
   assert (h2.isInitialized());
   assert (h2.cptr() == barptr);
@@ -220,6 +228,19 @@ void test3()
   assert (adec (*(*pcont)[0]) == 10);
   assert (adec (*(*pcont)[1]) == 11);
   assert (adec (*(*pcont)[2]) == 12);
+
+  MyObj::Decorator<int> bdec ("bbb");
+  bdec (*(*pcont)[0]) = 110;
+  bdec (*(*pcont)[1]) = 111;
+  bdec (*(*pcont)[2]) = 112;
+
+  SG::WriteDecorHandleKey<MyObjCont> k2 ("foo.bbb");
+  assert (k2.initialize().isSuccess());
+  SG::WriteDecorHandle<MyObjCont, int> h2 (k2);
+  assert (h2.setProxyDict (&testStore).isSuccess());
+  assert (h2.auxid() == r.getAuxID<int> ("bbb"));
+  assert (h2.isAvailable());
+  assert (h2.isPresent());
 }
 
 
@@ -266,7 +287,8 @@ void test5()
   assert (k1.initialize().isSuccess());
   auto h1 = SG::makeHandle<int> (k1);
   assert (h1.clid() == MyCLID);
-  assert (h1.key() == "asd.aaa");
+  assert (h1.key() == "asd");
+  assert (h1.decorKey() == "asd.aaa");
   assert (h1.storeHandle().name() == "StoreGateSvc");
   assert (h1.mode() == Gaudi::DataHandle::Writer);
   assert (h1.auxid() == ityp);
@@ -280,7 +302,8 @@ void test5()
   ctx.setExtension( Atlas::ExtendedEventContext(&dumstore) );
   auto h2 = SG::makeHandle<int> (k1, ctx);
   assert (h2.clid() == MyCLID);
-  assert (h2.key() == "asd.aaa");
+  assert (h2.key() == "asd");
+  assert (h2.decorKey() == "asd.aaa");
   assert (h2.storeHandle().name() == "StoreGateSvc");
   assert (h2.mode() == Gaudi::DataHandle::Writer);
   assert (h2.store() == "TestStore");
@@ -290,7 +313,8 @@ void test5()
   assert (k3.initialize().isSuccess());
   auto h3 = SG::makeHandle<int> (k3, ctx);
   assert (h3.clid() == MyCLID);
-  assert (h3.key() == "asd.aaa");
+  assert (h3.key() == "asd");
+  assert (h3.decorKey() == "asd.aaa");
   assert (h3.storeHandle().name() == "OtherStore");
   assert (h3.mode() == Gaudi::DataHandle::Writer);
   assert (h3.store() == "OtherStore" || h3.store() == "OtherStore_Impl");
diff --git a/Control/xAODRootAccess/Root/TAuxStore.cxx b/Control/xAODRootAccess/Root/TAuxStore.cxx
index 51ee8ecc62ef64404af9f8100d56b7f98c9bfbd8..e1760d722fb7002d1b0719bde6380db1bc76e1ba 100644
--- a/Control/xAODRootAccess/Root/TAuxStore.cxx
+++ b/Control/xAODRootAccess/Root/TAuxStore.cxx
@@ -61,7 +61,7 @@ namespace xAOD {
         m_splitLevel( splitLevel ), m_entry( 0 ), m_inTree( 0 ), m_outTree( 0 ),
         m_inputScanned( kFALSE ), m_selection(), m_transientStore( 0 ),
         m_auxIDs(), m_vecs(), m_size( 0 ), m_locked( kFALSE ), m_isDecoration(),
-        m_mutex1(), m_mutex2(), 
+        m_mutex1(), m_mutex2(),
         m_branches(), m_branchesWritten(), m_missingBranches() {
 
    }
@@ -290,7 +290,7 @@ namespace xAOD {
       guard_t guard( m_mutex1 );
 
       // Check if the transient store already handles this variable:
-      if( m_transientStore && 
+      if( m_transientStore &&
           ( m_transientStore->getAuxIDs().test( auxid ) ) ) {
          return m_transientStore->getData( auxid );
       }
@@ -453,7 +453,7 @@ namespace xAOD {
 
    /// Lock a decoration.
    void TAuxStore::lockDecoration (SG::auxid_t auxid)
-   { 
+   {
      if( m_transientStore ) {
        m_transientStore->lockDecoration (auxid);
      }
@@ -1213,7 +1213,7 @@ namespace xAOD {
          bool primitiveBranch = (strlen( brType->name() ) == 1);
          m_branches[ auxid ] =
             new TBranchHandle( kFALSE, ( strlen( brType->name() ) == 1 ),
-                               (primitiveBranch ? 
+                               (primitiveBranch ?
                                 brType :
                                 m_vecs[ auxid ]->objType() ),
                                ( m_structMode == kObjectStore ?
@@ -1777,6 +1777,25 @@ namespace xAOD {
         }
       }
 
+      // If we got this far, the branch may have undergone schema evolution. If
+      // it's one that ROOT can deal with itself, then we should still be able
+      // to read the branch with this code.
+      //
+      // Note that even after looking at the ROOT source code, I'm still not
+      // 100% sure whether we would need to delete the objects returned by
+      // TClass::GetConversionStreamerInfo(...) in this code. :-( But based on
+      // general experience with the ROOT code, I'm going to say no...
+      TClass* aux_vec_cl =
+         TClass::GetClass( Utils::getTypeName( *aux_vec_ti ).c_str() );
+      if( aux_vec_cl && aux_vec_cl->GetConversionStreamerInfo( cl, 0 ) ) {
+         return kTRUE;
+      }
+      TClass* aux_obj_cl =
+         TClass::GetClass( Utils::getTypeName( *aux_obj_ti ).c_str() );
+      if( aux_obj_cl && aux_obj_cl->GetConversionStreamerInfo( cl, 0 ) ) {
+         return kFALSE;
+      }
+
      // If neither, then something went wrong...
       ::Error( "xAOD::TAuxStore::isContainerBranch",
                XAOD_MESSAGE( "Couldn't determine if branch describes a single "
diff --git a/DataQuality/DataQualityConfigurations/config/Pixel/collisions_run.config b/DataQuality/DataQualityConfigurations/config/Pixel/collisions_run.config
index 2e0136b3927b069cd946553c33134f2cb962dc73..6b48ce1db823ca01c0fe18ea242d6059679dd1c0 100644
--- a/DataQuality/DataQualityConfigurations/config/Pixel/collisions_run.config
+++ b/DataQuality/DataQualityConfigurations/config/Pixel/collisions_run.config
@@ -250,106 +250,106 @@ output top_level {
 dir Pixel {
   dir Hits {
 
-    hist Occupancy_per_pixel_event_B0 {
+    hist OccupancyPerPixelEvent_B0 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX0/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Occupancy_per_pixel_event_B1 {
+    hist OccupancyPerPixelEvent_B1 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX1/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Occupancy_per_pixel_event_B2 {
+    hist OccupancyPerPixelEvent_B2 {
       algorithm = Pix_Occupancy_B2
       output = InnerDetector/Pixel/PIX2/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Occupancy_per_pixel_event_ECA {
+    hist OccupancyPerPixelEvent_ECA {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECA/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Occupancy_per_pixel_event_ECC {
+    hist OccupancyPerPixelEvent_ECC {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECC/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Occupancy_per_pixel_event_IBL {
+    hist OccupancyPerPixelEvent_IBL {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIXIBL/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
 
-    hist AvgOcc_active_per_lumi_B0 {
+    hist AvgOccActivePerLumi_B0 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX0/Hits
       display = StatBox
     }
-    hist AvgOcc_active_per_lumi_B1 {
+    hist AvgOccActivePerLumi_B1 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX1/Hits
       display = StatBox
     }
-    hist AvgOcc_active_per_lumi_B2 {
+    hist AvgOccActivePerLumi_B2 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX2/Hits
       display = StatBox
     }
-    hist AvgOcc_active_per_lumi_ECA {
+    hist AvgOccActivePerLumi_ECA {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECA/Hits
       display = StatBox
     }
-    hist AvgOcc_active_per_lumi_ECC {
+    hist AvgOccActivePerLumi_ECC {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECC/Hits
       display = StatBox
     }
-    hist AvgOcc_active_per_lumi_IBL {
+    hist AvgOccActivePerLumi_IBL {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXIBL/Hits
       display = StatBox
     }
 
-    hist AvgOcc_per_BCID_B0 {
+    hist AvgOccPerBCID_B0 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX0/_Experts/HitsLB
       display = StatBox
     }
-    hist AvgOcc_per_BCID_B1 {
+    hist AvgOccPerBCID_B1 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX1/_Experts/HitsLB
       display = StatBox
     }
-    hist AvgOcc_per_BCID_B2 {
+    hist AvgOccPerBCID_B2 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX2/_Experts/HitsLB
       display = StatBox
     }
-    hist AvgOcc_per_BCID_ECA {
+    hist AvgOccPerBCID_ECA {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECA/_Experts/HitsLB
       display = StatBox
     }
-    hist AvgOcc_per_BCID_ECC {
+    hist AvgOccPerBCID_ECC {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECC/_Experts/HitsLB
       display = StatBox
     }
-    hist AvgOcc_per_BCID_IBL {
+    hist AvgOccPerBCID_IBL {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXIBL/_Experts/HitsLB
       display = StatBox
     }
 
-    hist Hits_per_lumi {
+    hist HitsPerLumi {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PixelExpert/GeneralAllHits
       output = InnerDetector/Pixel/DQShift/LBDependence
@@ -357,37 +357,37 @@ dir Pixel {
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
 
-    hist Hits_per_lumi_ECA {
+    hist HitsPerLumi_ECA {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECA/_Experts/HitsLB
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Hits_per_lumi_ECC {
+    hist HitsPerLumi_ECC {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECC/_Experts/HitsLB
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Hits_per_lumi_IBL {
+    hist HitsPerLumi_IBL {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXIBL/_Experts/HitsLB
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Hits_per_lumi_B0 {
+    hist HitsPerLumi_B0 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX0/_Experts/HitsLB
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Hits_per_lumi_B1 {
+    hist HitsPerLumi_B1 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX1/_Experts/HitsLB
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Hits_per_lumi_B2 {
+    hist HitsPerLumi_B2 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX2/_Experts/HitsLB
       display = StatBox
@@ -398,105 +398,105 @@ dir Pixel {
 
   dir Timing {
 
-    hist Cluster_LVL1A_SizeCut_IBL {
+    hist ClusterLVL1ASizeCut_IBL {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_B0 {
+    hist ClusterLVL1ASizeCut_B0 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_B1 {
+    hist ClusterLVL1ASizeCut_B1 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_B2 {
+    hist ClusterLVL1ASizeCut_B2 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_ECA {
+    hist ClusterLVL1ASizeCut_ECA {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_ECC {
+    hist ClusterLVL1ASizeCut_ECC {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_IBL {
+    hist ClusterLVL1AToTCut_IBL {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_B0 {
+    hist ClusterLVL1AToTCut_B0 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_B1 {
+    hist ClusterLVL1AToTCut_B1 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_B2 {
+    hist ClusterLVL1AToTCut_B2 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_ECA {
+    hist ClusterLVL1AToTCut_ECA {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_ECC {
+    hist ClusterLVL1AToTCut_ECC {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
 
-    hist Hit_LVL1A_IBL {
+    hist HitLVL1A_IBL {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXIBL/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Hit_LVL1A_B0 {
+    hist HitLVL1A_B0 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX0/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Hit_LVL1A_B1 {
+    hist HitLVL1A_B1 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX1/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Hit_LVL1A_B2 {
+    hist HitLVL1A_B2 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX2/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Hit_LVL1A_ECA {
+    hist HitLVL1A_ECA {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECA/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Hit_LVL1A_ECC {
+    hist HitLVL1A_ECC {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECC/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
 
-    hist Cluster_LVL1A {
+    hist ClusterLVL1A {
       algorithm = Pix_Timing_CheckHisto_Mean&BinPrint
       output = InnerDetector/Pixel/PixelExpert/TimingAllHits
       display = StatBox
@@ -507,118 +507,118 @@ dir Pixel {
 
   dir TimingOnTrack {
 
-    hist Cluster_LVL1A_Mod_OnTrack_IBL {
+    hist ClusterLVL1AModOnTrack_IBL {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_Mod_OnTrack_B0 {
+    hist ClusterLVL1AModOnTrack_B0 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_Mod_OnTrack_B1 {
+    hist ClusterLVL1AModOnTrack_B1 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_Mod_OnTrack_B2 {
+    hist ClusterLVL1AModOnTrack_B2 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_Mod_OnTrack_ECA {
+    hist ClusterLVL1AModOnTrack_ECA {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_Mod_OnTrack_ECC {
+    hist ClusterLVL1AModOnTrack_ECC {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
  
-    hist Cluster_LVL1A_SizeCut_OnTrack_IBL {
+    hist ClusterLVL1ASizeCutOnTrack_IBL {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_OnTrack_B0 {
+    hist ClusterLVL1ASizeCutOnTrack_B0 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_OnTrack_B1 {
+    hist ClusterLVL1ASizeCutOnTrack_B1 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_OnTrack_B2 {
+    hist ClusterLVL1ASizeCutOnTrack_B2 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_OnTrack_ECA {
+    hist ClusterLVL1ASizeCutOnTrack_ECA {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_OnTrack_ECC {
+    hist ClusterLVL1ASizeCutOnTrack_ECC {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
 
-    hist Cluster_LVL1A_ToTCut_OnTrack_IBL {
+    hist ClusterLVL1AToTCutOnTrack_IBL {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     } 
-    hist Cluster_LVL1A_ToTCut_OnTrack_B0 {
+    hist ClusterLVL1AToTCutOnTrack_B0 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_OnTrack_B1 {
+    hist ClusterLVL1AToTCutOnTrack_B1 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_OnTrack_B2 {
+    hist ClusterLVL1AToTCutOnTrack_B2 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_OnTrack_ECA {
+    hist ClusterLVL1AToTCutOnTrack_ECA {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_OnTrack_ECC {
+    hist ClusterLVL1AToTCutOnTrack_ECC {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
 
-    hist Cluster_LVL1A_OnTrack {
+    hist ClusterLVL1AOnTrack {
       algorithm = Pix_Timing_CheckHisto_Mean&BinPrint
       output = InnerDetector/Pixel/PixelExpert/TimingAllHits
       display = StatBox
@@ -630,106 +630,106 @@ dir Pixel {
 
   dir Clusters {
 
-    hist Clus_Occ_SizeCut_IBL {
+    hist ClusOccSizeCut_IBL {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIXIBL/_Experts/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Clus_Occ_SizeCut_B0 {
+    hist ClusOccSizeCut_B0 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX0/_Experts/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Clus_Occ_SizeCut_B1 {
+    hist ClusOccSizeCut_B1 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX1/_Experts/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Clus_Occ_SizeCut_B2 {
+    hist ClusOccSizeCut_B2 {
       algorithm = Pix_Occupancy_B2
       output = InnerDetector/Pixel/PIX2/_Experts/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Clus_Occ_SizeCut_ECA {
+    hist ClusOccSizeCut_ECA {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECA/_Experts/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Clus_Occ_SizeCut_ECC {
+    hist ClusOccSizeCut_ECC {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECC/_Experts/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
    
-    hist Cluster_Occupancy_IBL {
+    hist ClusterOccupancy_IBL {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIXIBL/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_B0 {
+    hist ClusterOccupancy_B0 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX0/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_B1 {
+    hist ClusterOccupancy_B1 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX1/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_B2 {
+    hist ClusterOccupancy_B2 {
       algorithm = Pix_Occupancy_B2
       output = InnerDetector/Pixel/PIX2/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_ECA {
+    hist ClusterOccupancy_ECA {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECA/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_ECC {
+    hist ClusterOccupancy_ECC {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECC/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
 
-    hist Clusters_per_lumi {
+    hist ClustersPerLumi {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PixelExpert/GeneralAllHits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
 
-    hist Clusters_per_lumi_ECA {
+    hist ClustersPerLumi_ECA {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECA/ClusterHits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_ECC {
+    hist ClustersPerLumi_ECC {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECC/ClusterHits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_IBL {
+    hist ClustersPerLumi_IBL {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXIBL/ClusterHits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_B0 {
+    hist ClustersPerLumi_B0 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX0/ClusterHits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_B1 {
+    hist ClustersPerLumi_B1 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX1/ClusterHits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_B2 {
+    hist ClustersPerLumi_B2 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX2/ClusterHits
       display = StatBox
@@ -740,68 +740,68 @@ dir Pixel {
 
   dir ClustersOnTrack {
  
-    hist Cluster_Occupancy_OnTrack_IBL {
+    hist ClusterOccupancyOnTrack_IBL {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_OnTrack_B0 {
+    hist ClusterOccupancyOnTrack_B0 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_OnTrack_B1 {
+    hist ClusterOccupancyOnTrack_B1 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_OnTrack_B2 {
+    hist ClusterOccupancyOnTrack_B2 {
       algorithm = Pix_Occupancy_B2
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_OnTrack_ECA {
+    hist ClusterOccupancyOnTrack_ECA {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_OnTrack_ECC {
+    hist ClusterOccupancyOnTrack_ECC {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
 
-    hist Cluster_QxCosAlpha_OnTrack_IBL {
+    hist ClusterQxCosAlphaOnTrack_IBL {
       algorithm = Pix_Charge_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_QxCosAlpha_OnTrack_B0 {
+    hist ClusterQxCosAlphaOnTrack_B0 {
       algorithm = Pix_Charge_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_QxCosAlpha_OnTrack_B1 {
+    hist ClusterQxCosAlphaOnTrack_B1 {
       algorithm = Pix_Charge_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_QxCosAlpha_OnTrack_B2 {
+    hist ClusterQxCosAlphaOnTrack_B2 {
       algorithm = Pix_Charge_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_QxCosAlpha_OnTrack_ECA {
+    hist ClusterQxCosAlphaOnTrack_ECA {
       algorithm = Pix_Charge_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_QxCosAlpha_OnTrack_ECC {
+    hist ClusterQxCosAlphaOnTrack_ECC {
       algorithm = Pix_Charge_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       display = StatBox
@@ -809,97 +809,87 @@ dir Pixel {
     }    
 
 
-    hist Cluster_ToTxCosAlpha_OnTrack_IBL {
+    hist ClusterToTxCosAlphaOnTrack_IBL {
       algorithm = Pix_ToT_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_ToTxCosAlpha_OnTrack_ECA {
+    hist ClusterToTxCosAlphaOnTrack_ECA {
       algorithm = Pix_ToT_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_ToTxCosAlpha_OnTrack_ECC {
+    hist ClusterToTxCosAlphaOnTrack_ECC {
       algorithm = Pix_ToT_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_ToTxCosAlpha_OnTrack_B0 {
+    hist ClusterToTxCosAlphaOnTrack_B0 {
       algorithm = Pix_ToT_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_ToTxCosAlpha_OnTrack_B1 {
+    hist ClusterToTxCosAlphaOnTrack_B1 {
       algorithm = Pix_ToT_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_ToTxCosAlpha_OnTrack_B2 {
+    hist ClusterToTxCosAlphaOnTrack_B2 {
       algorithm = Pix_ToT_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
     
-    hist num_clusters {
-      algorithm = PixTrack_Noise_CheckHisto_Mean&GatherData
-      output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
-      display = AxisRange(0.0,50.0,"X"),StatBox
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
-    }
+    #hist num_clusters {
+    #  algorithm = PixTrack_Noise_CheckHisto_Mean&GatherData
+    #  output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
+    #  display = AxisRange(0.0,50.0,"X"),StatBox
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
+    #}
 
-    hist Cluster_groupsize_OnTrack {
-      algorithm = Histogram_Not_Empty
-      output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
-      display = StatBox
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
-    }
+    #hist Cluster_groupsize_OnTrack {
+    #  algorithm = Histogram_Not_Empty
+    #  output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
+    #  display = StatBox
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
+    #}
 
-    hist Cluster_Size_Map_OnTrack_B0 {
+    hist ClusterSizeMapOnTrack_B0 {
       algorithm = Pix_Histogram_Not_Empty
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Size_Map_OnTrack_B1 {
+    hist ClusterSizeMapOnTrack_B1 {
       algorithm = Pix_Histogram_Not_Empty
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Size_Map_OnTrack_B2 {
+    hist ClusterSizeMapOnTrack_B2 {
       algorithm = Pix_Histogram_Not_Empty
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Size_Map_OnTrack_ECA {
+    hist ClusterSizeMapOnTrack_ECA {
       algorithm = Pix_Histogram_Not_Empty
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Size_Map_OnTrack_ECC {
+    hist ClusterSizeMapOnTrack_ECC {
       algorithm = Pix_Histogram_Not_Empty
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Size_Map_OnTrack_IBL {
+    hist ClusterSizeMapOnTrack_IBL {
       algorithm = Pix_Histogram_Not_Empty
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-   # hist Cluster_Size_Map_IBL2D {
-   #   algorithm =  Pix_Histogram_Not_Empty
-   #   output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
-   #   description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
-   # }
-   # hist Cluster_Size_Map_IBL3D {
-   #   algorithm = Pix_Histogram_Not_Empty
-   #   output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
-   #   description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
-   # }
 
     #hist Cluster_groupsize@1 {
     #  algorithm = Pix_Histogram_Not_Empty
@@ -938,239 +928,239 @@ dir Pixel {
     #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
     #}
 
-    hist TotalClusters_per_lumi {
-      algorithm = Pix_LB_Bins_Diff_FromAvg
-      output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
-      display = StatBox
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
+    #hist TotalClusters_per_lumi {
+    #  algorithm = Pix_LB_Bins_Diff_FromAvg
+    #  output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
+    #  display = StatBox
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
 
-    hist Clusters_per_lumi {
+    hist ClustersPerLumiOnTrack {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
 
-    hist Clusters_per_lumi_OnTrack_ECA {
+    hist ClustersPerLumiOnTrack_ECA {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_OnTrack_ECC {
+    hist ClustersPerLumiOnTrack_ECC {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_OnTrack_IBL {
+    hist ClustersPerLumiOnTrack_IBL {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_OnTrack_B0 {
+    hist ClustersPerLumiOnTrack_B0 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_OnTrack_B1 {
+    hist ClustersPerLumiOnTrack_B1 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_OnTrack_B2 {
+    hist ClustersPerLumiOnTrack_B2 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
 
-    hist num_clusters_per_track_per_lumi_IBL {
+    hist NumClustersPerTrackPerLumi_IBL {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
     }
-    hist num_clusters_per_track_per_lumi_B0 {
+    hist NumClustersPerTrackPerLumi_B0 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
     }
-    hist num_clusters_per_track_per_lumi_B1 {
+    hist NumClustersPerTrackPerLumi_B1 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
     } 
-    hist num_clusters_per_track_per_lumi_B2 {
+    hist NumClustersPerTrackPerLumi_B2 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
     } 
-    hist num_clusters_per_track_per_lumi_ECA {
+    hist NumClustersPerTrackPerLumi_ECA {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
     }
-    hist num_clusters_per_track_per_lumi_ECC {
+    hist NumClustersPerTrackPerLumi_ECC {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
     }
 
-    hist LargeClusters_per_lumi_OnTrack {
-      algorithm = Plain_GatherData
-      output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
-      display = StatBox
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
+    #hist LargeClusters_per_lumi_OnTrack {
+    #  algorithm = Plain_GatherData
+    #  output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
+    #  display = StatBox
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
 
-    hist VeryLargeClusters_per_lumi_OnTrack {
-      algorithm = Plain_GatherData
-      output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
-      display = StatBox
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
+    #hist VeryLargeClusters_per_lumi_OnTrack {
+    #  algorithm = Plain_GatherData
+    #  output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
+    #  display = StatBox
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
 
-    hist HighNClusters_per_lumi_OnTrack {
-      algorithm = Plain_GatherData
-      output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
-      display = StatBox
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
+    #hist HighNClusters_per_lumi_OnTrack {
+    #  algorithm = Plain_GatherData
+    #  output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
+    #  display = StatBox
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
   }
 
 
   dir Status {
-    hist Map_Of_Modules_Status_IBL {
+    hist MapOfModulesStatus_IBL {
       algorithm = Pix_DisabledMod_B0B1
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrors
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Disabled_Modules
     }
 
-    hist Map_Of_Modules_Status_B0 {
+    hist MapOfModulesStatus_B0 {
       algorithm = Pix_DisabledMod_B0B1
       output = InnerDetector/Pixel/PIX0/DisableAndErrors
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Disabled_Modules
     }
-    hist Map_Of_Modules_Status_B1 {
+    hist MapOfModulesStatus_B1 {
       algorithm = Pix_DisabledMod_B0B1
       output = InnerDetector/Pixel/PIX1/DisableAndErrors
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Disabled_Modules
     }
-    hist Map_Of_Modules_Status_B2 {
+    hist MapOfModulesStatus_B2 {
       algorithm = Pix_DisabledMod_B2
       output = InnerDetector/Pixel/PIX2/DisableAndErrors
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Disabled_Modules
     }
-    hist Map_Of_Modules_Status_ECA {
+    hist MapOfModulesStatus_ECA {
       algorithm = Pix_DisabledMod_Endcap
       output = InnerDetector/Pixel/PIXECA/DisableAndErrors
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Disabled_Modules
     }
-    hist Map_Of_Modules_Status_ECC {
+    hist MapOfModulesStatus_ECC {
       algorithm = Pix_DisabledMod_Endcap
       output = InnerDetector/Pixel/PIXECC/DisableAndErrors
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Disabled_Modules
     }
 
-    hist DisabledModules_per_lumi {
-      algorithm = Pix_DisabledLB_CheckHisto_Mean&GatherData
-      output = InnerDetector/Pixel/PixelExpert/Errors/ALL
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
+    #hist DisabledModules_per_lumi {
+    #  algorithm = Pix_DisabledLB_CheckHisto_Mean&GatherData
+    #  output = InnerDetector/Pixel/PixelExpert/Errors/ALL
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
 
-    hist DisabledModules_per_lumi_ECA {
+    hist DisabledModulesPerLumi_ECA {
       algorithm = Pix_DisabledLB_CheckHisto_Mean_Endcap&GatherData
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist DisabledModules_per_lumi_ECC {
+    hist DisabledModulesPerLumi_ECC {
       algorithm = Pix_DisabledLB_CheckHisto_Mean_Endcap&GatherData
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist DisabledModules_per_lumi_IBL {
+    hist DisabledModulesPerLumi_IBL {
       algorithm = Pix_DisabledLB_CheckHisto_Mean_B0B1&GatherData
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist DisabledModules_per_lumi_IBL2D {
-      algorithm = Pix_DisabledLB_CheckHisto_Mean_B0B1&GatherData
-      output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
-    hist DisabledModules_per_lumi_IBL3D {
-      algorithm = Pix_DisabledLB_CheckHisto_Mean_B0B1&GatherData
-      output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
-    hist DisabledModules_per_lumi_B0 {
+    #hist DisabledModules_per_lumi_IBL2D {
+    #  algorithm = Pix_DisabledLB_CheckHisto_Mean_B0B1&GatherData
+    #  output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
+    #hist DisabledModules_per_lumi_IBL3D {
+    #  algorithm = Pix_DisabledLB_CheckHisto_Mean_B0B1&GatherData
+    #  output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
+    hist DisabledModulesPerLumi_B0 {
       algorithm = Pix_DisabledLB_CheckHisto_Mean_B0B1&GatherData
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist DisabledModules_per_lumi_B1 {
+    hist DisabledModulesPerLumi_B1 {
       algorithm = Pix_DisabledLB_CheckHisto_Mean_B0B1&GatherData
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist DisabledModules_per_lumi_B2 {
+    hist DisabledModulesPerLumi_B2 {
       algorithm = Pix_DisabledLB_CheckHisto_Mean_B2&GatherData
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
 
-    hist BadModules_per_lumi {
-      algorithm = Plain_GatherData
-      output = InnerDetector/Pixel/PixelExpert/Errors/ALL
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
+    #hist BadModules_per_lumi {
+    #  algorithm = Plain_GatherData
+    #  output = InnerDetector/Pixel/PixelExpert/Errors/ALL
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
 
-    hist BadModules_per_lumi_ECA {
+    hist BadModulesPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PIXECA/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist BadModules_per_lumi_ECC {
+    hist BadModulesPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PIXECC/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist BadModules_per_lumi_IBL {
-      algorithm = Plain_GatherData
-      output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
-    hist BadModules_per_lumi_IBL2D {
+    hist BadModulesPerLumi_IBL {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist BadModules_per_lumi_IBL3D {
-      algorithm = Plain_GatherData
-      output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
-    hist BadModules_per_lumi_B0 {
+    #hist BadModules_per_lumi_IBL2D {
+    #  algorithm = Plain_GatherData
+    #  output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
+    #hist BadModules_per_lumi_IBL3D {
+    #  algorithm = Plain_GatherData
+    #  output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
+    hist BadModulesPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PIX0/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist BadModules_per_lumi_B1 {
+    hist BadModulesPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PIX1/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist BadModules_per_lumi_B2 {
+    hist BadModulesPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PIX2/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
@@ -1180,274 +1170,242 @@ dir Pixel {
 
   dir Errors {
 
-    hist SyncErrors_Mod_Frac_per_event_IBL {
+    hist SyncErrorsFracPerEvent_IBL {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist SyncErrors_Mod_Frac_per_event_B0 {
+    hist SyncErrorsFracPerEvent_B0 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_Mod_Frac_per_event_B1 {
+    hist SyncErrorsFracPerEvent_B1 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_Mod_Frac_per_event_B2 {
+    hist SyncErrorsFracPerEvent_B2 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_Mod_Frac_per_event_ECA {
+    hist SyncErrorsFracPerEvent_ECA {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_Mod_Frac_per_event_ECC {
+    hist SyncErrorsFracPerEvent_ECC {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist SyncErrors_ROD_Frac_per_event_IBL {
+    hist SyncErrorsRODFracPerEvent_IBL {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-
-    hist SyncErrors_ROD_Frac_per_event_B0 {
+    hist SyncErrorsRODFracPerEvent_B0 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_ROD_Frac_per_event_B1 {
+    hist SyncErrorsRODFracPerEvent_B1 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_ROD_Frac_per_event_B2 {
+    hist SyncErrorsRODFracPerEvent_B2 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_ROD_Frac_per_event_ECA {
+    hist SyncErrorsRODFracPerEvent_ECA {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_ROD_Frac_per_event_ECC {
+    hist SyncErrorsRODFracPerEvent_ECC {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist ErrorBit_per_lumi_IBL {
+    hist ErrorStatePerLumi_IBL {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist ErrorBit_per_lumi_B0 {
+    hist ErrorStatePerLumi_B0 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist ErrorBit_per_lumi_B1 {
+    hist ErrorStatePerLumi_B1 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist ErrorBit_per_lumi_B2 {
+    hist ErrorStatePerLumi_B2 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist ErrorBit_per_lumi_ECA {
+    hist ErrorStatePerLumi_ECA {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist ErrorBit_per_lumi_ECC {
+    hist ErrorStatePerLumi_ECC {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist ErrorBit_per_lumi_IBL {
-      algorithm = Pix_NonZero_Errors
-      output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
-    }
-    hist ErrorBit_per_lumi_B0 {
-      algorithm = Pix_NonZero_Errors
-      output = InnerDetector/Pixel/PIX0/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
-    }
-    hist ErrorBit_per_lumi_B1 {
-      algorithm = Pix_NonZero_Errors
-      output = InnerDetector/Pixel/PIX1/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
-    }
-    hist ErrorBit_per_lumi_B2 {
-      algorithm = Pix_NonZero_Errors
-      output = InnerDetector/Pixel/PIX2/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
-    }
-    hist ErrorBit_per_lumi_ECA {
-      algorithm = Pix_NonZero_Errors
-      output = InnerDetector/Pixel/PIXECA/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
-    }
-    hist ErrorBit_per_lumi_ECC {
-      algorithm = Pix_NonZero_Errors
-      output = InnerDetector/Pixel/PIXECC/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
-    }
-
-    hist SEUErrorsFrac_per_event_IBL {
+    hist SEUErrorsFracPerEvent_IBL {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SEUErrorsFrac_per_event_B0 {
+    hist SEUErrorsFracPerEvent_B0 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SEUErrorsFrac_per_event_B1 {
+    hist SEUErrorsFracPerEvent_B1 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SEUErrorsFrac_per_event_B2 {
+    hist SEUErrorsFracPerEvent_B2 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SEUErrorsFrac_per_event_ECA {
+    hist SEUErrorsFracPerEvent_ECA {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SEUErrorsFrac_per_event_ECC {
+    hist SEUErrorsFracPerEvent_ECC {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist TimeoutErrorsFrac_per_event_IBL {
+    hist TimeoutErrorsFracPerEvent_IBL {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TimeoutErrorsFrac_per_event_B0 {
+    hist TimeoutErrorsFracPerEvent_B0 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TimeoutErrorsFrac_per_event_B1 {
+    hist TimeoutErrorsFracPerEvent_B1 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TimeoutErrorsFrac_per_event_B2 {
+    hist TimeoutErrorsFracPerEvent_B2 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TimeoutErrorsFrac_per_event_ECA {
+    hist TimeoutErrorsFracPerEvent_ECA {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TimeoutErrorsFrac_per_event_ECC {
+    hist TimeoutErrorsFracPerEvent_ECC {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist OpticalErrorsFrac_per_event_IBL {
+    hist OpticalErrorsFracPerEvent_IBL {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist OpticalErrorsFrac_per_event_B0 {
+    hist OpticalErrorsFracPerEvent_B0 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist OpticalErrorsFrac_per_event_B1 {
+    hist OpticalErrorsFracPerEvent_B1 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist OpticalErrorsFrac_per_event_B2 {
+    hist OpticalErrorsFracPerEvent_B2 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist OpticalErrorsFrac_per_event_ECA {
+    hist OpticalErrorsFracPerEvent_ECA {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist OpticalErrorsFrac_per_event_ECC {
+    hist OpticalErrorsFracPerEvent_ECC {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist TruncationErrorsFrac_per_event_IBL {
+    hist TruncationErrorsFracPerEvent_IBL {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TruncationErrorsFrac_per_event_B0 {
+    hist TruncationErrorsFracPerEvent_B0 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TruncationErrorsFrac_B1 {
+    hist TruncationErrorsFracPerEvent_B1 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TruncationErrorsFrac_per_event_B2 {
+    hist TruncationErrorsFracPerEvent_B2 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TruncationErrorsFrac_per_event_ECA {
+    hist TruncationErrorsFracPerEvent_ECA {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TruncationErrorsFrac_per_event_ECC {
+    hist TruncationErrorsFracPerEvent_ECC {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
@@ -1455,69 +1413,69 @@ dir Pixel {
     }
 
     ######################################
-    hist SyncErrorsFrac_per_event_IBL {
+    hist SyncErrorsFracPerEvent_IBL {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrorsFrac_per_event_B0 {
+    hist SyncErrorsFracPerEvent_B0 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrorsFrac_per_event_B1 {
+    hist SyncErrorsFracPerEvent_B1 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrorsFrac_per_event_B2 {
+    hist SyncErrorsFracPerEvent_B2 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrorsFrac_per_event_ECA {
+    hist SyncErrorsFracPerEvent_ECA {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrorsFrac_per_event_ECC {
+    hist SyncErrorsFracPerEvent_ECC {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist errors_per_lumi_ECA {
+    hist ErrorsPerLumi_ECA {
       algorithm = Pix_ErrorLB_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXECA/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist errors_per_lumi_ECC {
+    hist ErrorsPerLumi_ECC {
       algorithm = Pix_ErrorLB_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXECC/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist errors_per_lumi_IBL {
+    hist ErrorsPerLumi_IBL {
       algorithm = Pix_ErrorLB_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist errors_per_lumi_B0 {
+    hist ErrorsPerLumi_B0 {
       algorithm = Pix_ErrorLB_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX0/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist errors_per_lumi_B1 {
+    hist ErrorsPerLumi_B1 {
       algorithm = Pix_ErrorLB_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX1/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist errors_per_lumi_B2 {
+    hist ErrorsPerLumi_B2 {
       algorithm = Pix_ErrorLB_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX2/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
@@ -1526,342 +1484,343 @@ dir Pixel {
 
   dir ErrorsExpert {
 
-    hist FE_Warning_per_lumi_PIX {
-      algorithm = Plain_GatherData
-      output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/FE_Warning
-    }
-    hist FE_Warning_per_lumi_ECA {
+    #hist FE_Warning_per_lumi_PIX {
+    #  algorithm = Plain_GatherData
+    #  output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/FE_Warning
+    #}
+    hist FEWarningPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/FE_Warning
     }
-    hist FE_Warning_per_lumi_ECC {
+    hist FEWarningPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/FE_Warning
     }
-    hist FE_Warning_per_lumi_B0 {
+    hist FEWarningPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/FE_Warning
     }
-    hist FE_Warning_per_lumi_B1 {
+    hist FEWarningPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/FE_Warning
     }
-    hist FE_Warning_per_lumi_B2 {
+    hist FEWarningPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/FE_Warning
     }
 
-    hist Mod_Sync_BCID1_errors_per_lumi_ECA {
+    hist ModSyncBCID1ErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID1
     }
-    hist Mod_Sync_BCID1_errors_per_lumi_ECC {
+    hist ModSyncBCID1ErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID1
     }
-    hist Mod_Sync_BCID1_errors_per_lumi_B0 {
+    hist ModSyncBCID1ErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID1
     }
-    hist Mod_Sync_BCID1_errors_per_lumi_B1 {
+    hist ModSyncBCID1ErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID1
     }
-    hist Mod_Sync_BCID1_errors_per_lumi_B2 {
+    hist ModSyncBCID1ErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID1
     }
 
-    hist Mod_Sync_BCID2_errors_per_lumi_B0 {
+    hist ModSyncBCID2ErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID2
     }
-    hist Mod_Sync_BCID2_errors_per_lumi_B1 {
+    hist ModSyncBCID2ErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID2
     }
-    hist Mod_Sync_BCID2_errors_per_lumi_B2 {
+    hist ModSyncBCID2ErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID2
     }
-    hist Mod_Sync_BCID2_errors_per_lumi_ECA {
+    hist ModSyncBCID2ErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID2
     }
-    hist Mod_Sync_BCID2_errors_per_lumi_ECC {
+    hist ModSyncBCID2ErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID2
     }
 
-    hist Mod_Sync_LVL1ID_errors_per_lumi_B0 {
+    hist ModSyncLVL1IDErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_LVL1ID
     }
-    hist Mod_Sync_LVL1ID_errors_per_lumi_B1 {
+    hist ModSyncLVL1IDErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_LVL1ID
     }
-    hist Mod_Sync_LVL1ID_errors_per_lumi_B2 {
+    hist ModSyncLVL1IDErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_LVL1ID
     }
-    hist Mod_Sync_LVL1ID_errors_per_lumi_ECA {
+    hist ModSyncLVL1IDErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_LVL1ID
     }
-    hist Mod_Sync_LVL1ID_errors_per_lumi_ECC {
+    hist ModSyncLVL1IDErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_LVL1ID
     }
 
-    hist Mod_Trunc_EOC_errors_per_lumi_ECA {
+    hist ModTruncEOCErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoC
     }
-    hist Mod_Trunc_EOC_errors_per_lumi_ECC {
+    hist ModTruncEOCErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoC
     }
-    hist Mod_Trunc_EOC_errors_per_lumi_B0 {
+    hist ModTruncEOCErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoC
     }
-    hist Mod_Trunc_EOC_errors_per_lumi_B1 {
+    hist ModTruncEOCErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoC
     }
-    hist Mod_Trunc_EOC_errors_per_lumi_B2 {
+    hist ModTruncEOCErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoC
     }
 
-    hist Mod_Trunc_EoE_Overflow_errors_per_lumi_B0 {
+    hist ModTruncEoEOverflowErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoE_Overflow
     }
-    hist Mod_Trunc_EoE_Overflow_errors_per_lumi_B1 {
+    hist ModTruncEoEOverflowErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoE_Overflow
     }
-    hist Mod_Trunc_EoE_Overflow_errors_per_lumi_B2 {
+    hist ModTruncEoEOverflowErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoE_Overflow
     }
-    hist Mod_Trunc_EoE_Overflow_errors_per_lumi_ECA {
+    hist ModTruncEoEOverflowErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoE_Overflow
     }
-    hist Mod_Trunc_EoE_Overflow_errors_per_lumi_ECC {
+    hist ModTruncEoEOverflowErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoE_Overflow
     }
-    hist Mod_Trunc_Hit_Overflow_errors_per_lumi_B0 {
+
+    hist ModTruncHitOverflowErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_Hit_Overflow
     }
-    hist Mod_Trunc_Hit_Overflow_errors_per_lumi_B1 {
+    hist ModTruncHitOverflowErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_Hit_Overflow
     }
-    hist Mod_Trunc_Hit_Overflow_errors_per_lumi_B2 {
+    hist ModTruncHitOverflowErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_Hit_Overflow
     }
-    hist Mod_Trunc_Hit_Overflow_errors_per_lumi_ECA {
+    hist ModTruncHitOverflowErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_Hit_Overflow
     }
-    hist Mod_Trunc_Hit_Overflow_errors_per_lumi_ECC {
+    hist ModTruncHitOverflowErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_Hit_Overflow
     }
 
-    hist Optical_Errors_per_lumi_B0 {
+    hist OpticalErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Optical_Errors
     }
-    hist Optical_Errors_per_lumi_B1 {
+    hist OpticalErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Optical_Errors
     }
-    hist Optical_Errors_per_lumi_B2 {
+    hist OpticalErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Optical_Errors
     }
-    hist Optical_Errors_per_lumi_ECA {
+    hist OpticalErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Optical_Errors
     }
-    hist Optical_Errors_per_lumi_ECC {
+    hist OpticalErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Optical_Errors
     }
 
-    hist ROD_Sync_BCID_errors_per_lumi_B0 {
+    hist RODSyncBCIDErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_BCID
     }
-    hist ROD_Sync_BCID_errors_per_lumi_B1 {
+    hist RODSyncBCIDErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_BCID
     }
-    hist ROD_Sync_BCID_errors_per_lumi_B2 {
+    hist RODSyncBCIDErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_BCID
     }
-    hist ROD_Sync_BCID_errors_per_lumi_ECA {
+    hist RODSyncBCIDErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_BCID
     }
-    hist ROD_Sync_BCID_errors_per_lumi_ECC {
+    hist RODSyncBCIDErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_BCID
     }
 
-    hist ROD_Sync_LVL1ID_errors_per_lumi_B0 {
+    hist RODSyncLVL1IDErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_LVL1ID
     }
-    hist ROD_Sync_LVL1ID_errors_per_lumi_B1 {
+    hist RODSyncLVL1IDErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_LVL1ID
     }
-    hist ROD_Sync_LVL1ID_errors_per_lumi_B2 {
+    hist RODSyncLVL1IDErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_LVL1ID
     }
-    hist ROD_Sync_LVL1ID_errors_per_lumi_ECA {
+    hist RODSyncLVL1IDErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_LVL1ID
     }
-    hist ROD_Sync_LVL1ID_errors_per_lumi_ECC {
+    hist RODSyncLVL1IDErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_LVL1ID
     }
 
-    hist ROD_Timeout_per_lumi_B0 {
+    hist RODTimeoutPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/ROD_Timeout
     }
-    hist ROD_Timeout_per_lumi_B1 {
+    hist RODTimeoutPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/ROD_Timeout
     }
-    hist ROD_Timeout_per_lumi_B2 {
+    hist RODTimeoutPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/ROD_Timeout
     }
-    hist ROD_Timeout_per_lumi_ECA {
+    hist RODTimeoutPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/ROD_Timeout
     }
-    hist ROD_Timeout_per_lumi_ECC {
+    hist RODTimeoutPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/ROD_Timeout
     }
 
-    hist ROD_Trunc_HT_Limit_errors_per_lumi_B0 {
+    hist RODTruncHTLimitErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_HT_Limit
     }
-    hist ROD_Trunc_HT_Limit_errors_per_lumi_B1 {
+    hist RODTruncHTLimitErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_HT_Limit
     }
-    hist ROD_Trunc_HT_Limit_errors_per_lumi_B2 {
+    hist RODTruncHTLimitErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_HT_Limit
     }
-    hist ROD_Trunc_HT_Limit_errors_per_lumi_ECA {
+    hist RODTruncHTLimitErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_HT_Limit
     }
-    hist ROD_Trunc_HT_Limit_errors_per_lumi_ECC {
+    hist RODTruncHTLimitErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_HT_Limit
     }
 
-    hist ROD_Trunc_ROD_OF_errors_per_lumi_B0 {
+    hist RODTruncRODOFErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_FIFO_Overflow
     }
-    hist ROD_Trunc_ROD_OF_errors_per_lumi_B1 {
+    hist RODTruncRODOFErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_FIFO_Overflow
     }
-    hist ROD_Trunc_ROD_OF_errors_per_lumi_B2 {
+    hist RODTruncRODOFErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_FIFO_Overflow
     }
-    hist ROD_Trunc_ROD_OF_errors_per_lumi_ECA {
+    hist RODTruncRODOFErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_FIFO_Overflow
     }
-    hist ROD_Trunc_ROD_OF_errors_per_lumi_ECC {
+    hist RODTruncRODOFErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_FIFO_Overflow
     }
 
 
-    hist SEU_Hamming_per_lumi_ECA {
+    hist SEUHammingPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hamming
     }
-    hist SEU_Hamming_per_lumi_ECC {
+    hist SEUHammingPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hamming
     }
-    hist SEU_Hamming_per_lumi_B0 {
+    hist SEUHammingPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hamming
     }
-    hist SEU_Hamming_per_lumi_B1 {
+    hist SEUHammingPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hamming
     }
-    hist SEU_Hamming_per_lumi_B2 {
+    hist SEUHammingPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hamming
     }
 
-    hist SEU_Hit_Parity_per_lumi_ECA {
+    hist SEUHitParityPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hit_Parity
     }
-    hist SEU_Hit_Parity_per_lumi_ECC {
+    hist SEUHitParityPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hit_Parity
     }
-    hist SEU_Hit_Parity_per_lumi_B0 {
+    hist SEUHitParityPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hit_Parity
     }
-    hist SEU_Hit_Parity_per_lumi_B1 {
+    hist SEUHitParityPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hit_Parity
     }
-    hist SEU_Hit_Parity_per_lumi_B2 {
+    hist SEUHitParityPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hit_Parity
     }
 
-    hist SEU_Register_Parity_per_lumi_ECA {
+    hist SEURegisterParityPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Register_Parity
     }
-    hist SEU_Register_Parity_per_lumi_ECC {
+    hist SEURegisterParityPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Register_Parity
     }
-    hist SEU_Register_Parity_per_lumi_B0 {
+    hist SEURegisterParityPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Register_Parity
     }
-    hist SEU_Register_Parity_per_lumi_B1 {
+    hist SEURegisterParityPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Register_Parity
     }
-    hist SEU_Register_Parity_per_lumi_B2 {
+    hist SEURegisterParityPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Register_Parity
     }
@@ -1873,7 +1832,7 @@ dir Pixel {
   #Below plot Combined Tracks
   dir TrackOnTrack {
 
-    hist tracksPerEvt_per_lumi {
+    hist tracksPerEvtPerLumi {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PixelExpert/Tracks
       display = StatBox
@@ -1881,82 +1840,83 @@ dir Pixel {
     }
     
     ### Track State On Surface: Hole
-    hist TSOS_Hole_IBL {
+    hist TSOSHole_IBL {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Hole_B0 {
+    hist TSOSHole_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Hole_B1 {
+    hist TSOSHole_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Hole_B2 {
+    hist TSOSHole_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Hole_ECA {
+    hist TSOSHole_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Hole_ECC {
+    hist TSOSHole_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
     ### Track State On Surface: Measurement
-    hist TSOS_Measurement_IBL {
+    hist TSOSMeasurement_IBL {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Measurement_B0 {
+    hist TSOSMeasurement_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Measurement_B1 {
+    hist TSOSMeasurement_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Measurement_B2 {
+    hist TSOSMeasurement_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Measurement_ECA {
+    hist TSOSMeasurement_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Measurement_ECC {
+    hist TSOSMeasurement_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
     ### Track State On Surface: Outlier
-    hist TSOS_Outlier_IBL {
+    hist TSOSOutlier_IBL {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Outlier_B0 {
+    hist TSOSOutlier_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Outlier_B1 {
+    hist TSOSOutlier_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Outlier_B2 {
+    hist TSOSOutlier_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Outlier_ECA {
+    hist TSOSOutlier_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Outlier_ECC {
+    hist TSOSOutlier_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
   }
 }
+
 # now get the pixel relevant plots from InDetGlobal
 dir InDetGlobal {
   dir Pixel {
diff --git a/DataQuality/DataQualityConfigurations/config/Pixel/cosmics_run.config b/DataQuality/DataQualityConfigurations/config/Pixel/cosmics_run.config
index 2e0136b3927b069cd946553c33134f2cb962dc73..6b48ce1db823ca01c0fe18ea242d6059679dd1c0 100644
--- a/DataQuality/DataQualityConfigurations/config/Pixel/cosmics_run.config
+++ b/DataQuality/DataQualityConfigurations/config/Pixel/cosmics_run.config
@@ -250,106 +250,106 @@ output top_level {
 dir Pixel {
   dir Hits {
 
-    hist Occupancy_per_pixel_event_B0 {
+    hist OccupancyPerPixelEvent_B0 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX0/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Occupancy_per_pixel_event_B1 {
+    hist OccupancyPerPixelEvent_B1 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX1/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Occupancy_per_pixel_event_B2 {
+    hist OccupancyPerPixelEvent_B2 {
       algorithm = Pix_Occupancy_B2
       output = InnerDetector/Pixel/PIX2/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Occupancy_per_pixel_event_ECA {
+    hist OccupancyPerPixelEvent_ECA {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECA/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Occupancy_per_pixel_event_ECC {
+    hist OccupancyPerPixelEvent_ECC {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECC/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Occupancy_per_pixel_event_IBL {
+    hist OccupancyPerPixelEvent_IBL {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIXIBL/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
 
-    hist AvgOcc_active_per_lumi_B0 {
+    hist AvgOccActivePerLumi_B0 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX0/Hits
       display = StatBox
     }
-    hist AvgOcc_active_per_lumi_B1 {
+    hist AvgOccActivePerLumi_B1 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX1/Hits
       display = StatBox
     }
-    hist AvgOcc_active_per_lumi_B2 {
+    hist AvgOccActivePerLumi_B2 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX2/Hits
       display = StatBox
     }
-    hist AvgOcc_active_per_lumi_ECA {
+    hist AvgOccActivePerLumi_ECA {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECA/Hits
       display = StatBox
     }
-    hist AvgOcc_active_per_lumi_ECC {
+    hist AvgOccActivePerLumi_ECC {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECC/Hits
       display = StatBox
     }
-    hist AvgOcc_active_per_lumi_IBL {
+    hist AvgOccActivePerLumi_IBL {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXIBL/Hits
       display = StatBox
     }
 
-    hist AvgOcc_per_BCID_B0 {
+    hist AvgOccPerBCID_B0 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX0/_Experts/HitsLB
       display = StatBox
     }
-    hist AvgOcc_per_BCID_B1 {
+    hist AvgOccPerBCID_B1 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX1/_Experts/HitsLB
       display = StatBox
     }
-    hist AvgOcc_per_BCID_B2 {
+    hist AvgOccPerBCID_B2 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX2/_Experts/HitsLB
       display = StatBox
     }
-    hist AvgOcc_per_BCID_ECA {
+    hist AvgOccPerBCID_ECA {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECA/_Experts/HitsLB
       display = StatBox
     }
-    hist AvgOcc_per_BCID_ECC {
+    hist AvgOccPerBCID_ECC {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECC/_Experts/HitsLB
       display = StatBox
     }
-    hist AvgOcc_per_BCID_IBL {
+    hist AvgOccPerBCID_IBL {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXIBL/_Experts/HitsLB
       display = StatBox
     }
 
-    hist Hits_per_lumi {
+    hist HitsPerLumi {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PixelExpert/GeneralAllHits
       output = InnerDetector/Pixel/DQShift/LBDependence
@@ -357,37 +357,37 @@ dir Pixel {
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
 
-    hist Hits_per_lumi_ECA {
+    hist HitsPerLumi_ECA {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECA/_Experts/HitsLB
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Hits_per_lumi_ECC {
+    hist HitsPerLumi_ECC {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECC/_Experts/HitsLB
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Hits_per_lumi_IBL {
+    hist HitsPerLumi_IBL {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXIBL/_Experts/HitsLB
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Hits_per_lumi_B0 {
+    hist HitsPerLumi_B0 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX0/_Experts/HitsLB
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Hits_per_lumi_B1 {
+    hist HitsPerLumi_B1 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX1/_Experts/HitsLB
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Hits_per_lumi_B2 {
+    hist HitsPerLumi_B2 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX2/_Experts/HitsLB
       display = StatBox
@@ -398,105 +398,105 @@ dir Pixel {
 
   dir Timing {
 
-    hist Cluster_LVL1A_SizeCut_IBL {
+    hist ClusterLVL1ASizeCut_IBL {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_B0 {
+    hist ClusterLVL1ASizeCut_B0 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_B1 {
+    hist ClusterLVL1ASizeCut_B1 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_B2 {
+    hist ClusterLVL1ASizeCut_B2 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_ECA {
+    hist ClusterLVL1ASizeCut_ECA {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_ECC {
+    hist ClusterLVL1ASizeCut_ECC {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_IBL {
+    hist ClusterLVL1AToTCut_IBL {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_B0 {
+    hist ClusterLVL1AToTCut_B0 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_B1 {
+    hist ClusterLVL1AToTCut_B1 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_B2 {
+    hist ClusterLVL1AToTCut_B2 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_ECA {
+    hist ClusterLVL1AToTCut_ECA {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_ECC {
+    hist ClusterLVL1AToTCut_ECC {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
 
-    hist Hit_LVL1A_IBL {
+    hist HitLVL1A_IBL {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXIBL/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Hit_LVL1A_B0 {
+    hist HitLVL1A_B0 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX0/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Hit_LVL1A_B1 {
+    hist HitLVL1A_B1 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX1/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Hit_LVL1A_B2 {
+    hist HitLVL1A_B2 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX2/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Hit_LVL1A_ECA {
+    hist HitLVL1A_ECA {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECA/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Hit_LVL1A_ECC {
+    hist HitLVL1A_ECC {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECC/Hits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
 
-    hist Cluster_LVL1A {
+    hist ClusterLVL1A {
       algorithm = Pix_Timing_CheckHisto_Mean&BinPrint
       output = InnerDetector/Pixel/PixelExpert/TimingAllHits
       display = StatBox
@@ -507,118 +507,118 @@ dir Pixel {
 
   dir TimingOnTrack {
 
-    hist Cluster_LVL1A_Mod_OnTrack_IBL {
+    hist ClusterLVL1AModOnTrack_IBL {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_Mod_OnTrack_B0 {
+    hist ClusterLVL1AModOnTrack_B0 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_Mod_OnTrack_B1 {
+    hist ClusterLVL1AModOnTrack_B1 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_Mod_OnTrack_B2 {
+    hist ClusterLVL1AModOnTrack_B2 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_Mod_OnTrack_ECA {
+    hist ClusterLVL1AModOnTrack_ECA {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_Mod_OnTrack_ECC {
+    hist ClusterLVL1AModOnTrack_ECC {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
  
-    hist Cluster_LVL1A_SizeCut_OnTrack_IBL {
+    hist ClusterLVL1ASizeCutOnTrack_IBL {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_OnTrack_B0 {
+    hist ClusterLVL1ASizeCutOnTrack_B0 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_OnTrack_B1 {
+    hist ClusterLVL1ASizeCutOnTrack_B1 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_OnTrack_B2 {
+    hist ClusterLVL1ASizeCutOnTrack_B2 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_OnTrack_ECA {
+    hist ClusterLVL1ASizeCutOnTrack_ECA {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_SizeCut_OnTrack_ECC {
+    hist ClusterLVL1ASizeCutOnTrack_ECC {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
 
-    hist Cluster_LVL1A_ToTCut_OnTrack_IBL {
+    hist ClusterLVL1AToTCutOnTrack_IBL {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     } 
-    hist Cluster_LVL1A_ToTCut_OnTrack_B0 {
+    hist ClusterLVL1AToTCutOnTrack_B0 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_OnTrack_B1 {
+    hist ClusterLVL1AToTCutOnTrack_B1 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_OnTrack_B2 {
+    hist ClusterLVL1AToTCutOnTrack_B2 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_OnTrack_ECA {
+    hist ClusterLVL1AToTCutOnTrack_ECA {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
-    hist Cluster_LVL1A_ToTCut_OnTrack_ECC {
+    hist ClusterLVL1AToTCutOnTrack_ECC {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Timing
     }
 
-    hist Cluster_LVL1A_OnTrack {
+    hist ClusterLVL1AOnTrack {
       algorithm = Pix_Timing_CheckHisto_Mean&BinPrint
       output = InnerDetector/Pixel/PixelExpert/TimingAllHits
       display = StatBox
@@ -630,106 +630,106 @@ dir Pixel {
 
   dir Clusters {
 
-    hist Clus_Occ_SizeCut_IBL {
+    hist ClusOccSizeCut_IBL {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIXIBL/_Experts/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Clus_Occ_SizeCut_B0 {
+    hist ClusOccSizeCut_B0 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX0/_Experts/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Clus_Occ_SizeCut_B1 {
+    hist ClusOccSizeCut_B1 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX1/_Experts/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Clus_Occ_SizeCut_B2 {
+    hist ClusOccSizeCut_B2 {
       algorithm = Pix_Occupancy_B2
       output = InnerDetector/Pixel/PIX2/_Experts/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Clus_Occ_SizeCut_ECA {
+    hist ClusOccSizeCut_ECA {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECA/_Experts/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Clus_Occ_SizeCut_ECC {
+    hist ClusOccSizeCut_ECC {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECC/_Experts/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
    
-    hist Cluster_Occupancy_IBL {
+    hist ClusterOccupancy_IBL {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIXIBL/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_B0 {
+    hist ClusterOccupancy_B0 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX0/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_B1 {
+    hist ClusterOccupancy_B1 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX1/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_B2 {
+    hist ClusterOccupancy_B2 {
       algorithm = Pix_Occupancy_B2
       output = InnerDetector/Pixel/PIX2/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_ECA {
+    hist ClusterOccupancy_ECA {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECA/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_ECC {
+    hist ClusterOccupancy_ECC {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECC/ClusterHits
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
 
-    hist Clusters_per_lumi {
+    hist ClustersPerLumi {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PixelExpert/GeneralAllHits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
 
-    hist Clusters_per_lumi_ECA {
+    hist ClustersPerLumi_ECA {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECA/ClusterHits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_ECC {
+    hist ClustersPerLumi_ECC {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECC/ClusterHits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_IBL {
+    hist ClustersPerLumi_IBL {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXIBL/ClusterHits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_B0 {
+    hist ClustersPerLumi_B0 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX0/ClusterHits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_B1 {
+    hist ClustersPerLumi_B1 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX1/ClusterHits
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_B2 {
+    hist ClustersPerLumi_B2 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX2/ClusterHits
       display = StatBox
@@ -740,68 +740,68 @@ dir Pixel {
 
   dir ClustersOnTrack {
  
-    hist Cluster_Occupancy_OnTrack_IBL {
+    hist ClusterOccupancyOnTrack_IBL {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_OnTrack_B0 {
+    hist ClusterOccupancyOnTrack_B0 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_OnTrack_B1 {
+    hist ClusterOccupancyOnTrack_B1 {
       algorithm = Pix_Occupancy_B0B1
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_OnTrack_B2 {
+    hist ClusterOccupancyOnTrack_B2 {
       algorithm = Pix_Occupancy_B2
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_OnTrack_ECA {
+    hist ClusterOccupancyOnTrack_ECA {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Occupancy_OnTrack_ECC {
+    hist ClusterOccupancyOnTrack_ECC {
       algorithm = Pix_Occupancy_Endcap
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
 
-    hist Cluster_QxCosAlpha_OnTrack_IBL {
+    hist ClusterQxCosAlphaOnTrack_IBL {
       algorithm = Pix_Charge_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_QxCosAlpha_OnTrack_B0 {
+    hist ClusterQxCosAlphaOnTrack_B0 {
       algorithm = Pix_Charge_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_QxCosAlpha_OnTrack_B1 {
+    hist ClusterQxCosAlphaOnTrack_B1 {
       algorithm = Pix_Charge_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_QxCosAlpha_OnTrack_B2 {
+    hist ClusterQxCosAlphaOnTrack_B2 {
       algorithm = Pix_Charge_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_QxCosAlpha_OnTrack_ECA {
+    hist ClusterQxCosAlphaOnTrack_ECA {
       algorithm = Pix_Charge_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_QxCosAlpha_OnTrack_ECC {
+    hist ClusterQxCosAlphaOnTrack_ECC {
       algorithm = Pix_Charge_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       display = StatBox
@@ -809,97 +809,87 @@ dir Pixel {
     }    
 
 
-    hist Cluster_ToTxCosAlpha_OnTrack_IBL {
+    hist ClusterToTxCosAlphaOnTrack_IBL {
       algorithm = Pix_ToT_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_ToTxCosAlpha_OnTrack_ECA {
+    hist ClusterToTxCosAlphaOnTrack_ECA {
       algorithm = Pix_ToT_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_ToTxCosAlpha_OnTrack_ECC {
+    hist ClusterToTxCosAlphaOnTrack_ECC {
       algorithm = Pix_ToT_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_ToTxCosAlpha_OnTrack_B0 {
+    hist ClusterToTxCosAlphaOnTrack_B0 {
       algorithm = Pix_ToT_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_ToTxCosAlpha_OnTrack_B1 {
+    hist ClusterToTxCosAlphaOnTrack_B1 {
       algorithm = Pix_ToT_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
-    hist Cluster_ToTxCosAlpha_OnTrack_B2 {
+    hist ClusterToTxCosAlphaOnTrack_B2 {
       algorithm = Pix_ToT_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Charge_and_ToT
     }
     
-    hist num_clusters {
-      algorithm = PixTrack_Noise_CheckHisto_Mean&GatherData
-      output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
-      display = AxisRange(0.0,50.0,"X"),StatBox
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
-    }
+    #hist num_clusters {
+    #  algorithm = PixTrack_Noise_CheckHisto_Mean&GatherData
+    #  output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
+    #  display = AxisRange(0.0,50.0,"X"),StatBox
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
+    #}
 
-    hist Cluster_groupsize_OnTrack {
-      algorithm = Histogram_Not_Empty
-      output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
-      display = StatBox
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
-    }
+    #hist Cluster_groupsize_OnTrack {
+    #  algorithm = Histogram_Not_Empty
+    #  output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
+    #  display = StatBox
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
+    #}
 
-    hist Cluster_Size_Map_OnTrack_B0 {
+    hist ClusterSizeMapOnTrack_B0 {
       algorithm = Pix_Histogram_Not_Empty
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Size_Map_OnTrack_B1 {
+    hist ClusterSizeMapOnTrack_B1 {
       algorithm = Pix_Histogram_Not_Empty
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Size_Map_OnTrack_B2 {
+    hist ClusterSizeMapOnTrack_B2 {
       algorithm = Pix_Histogram_Not_Empty
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Size_Map_OnTrack_ECA {
+    hist ClusterSizeMapOnTrack_ECA {
       algorithm = Pix_Histogram_Not_Empty
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Size_Map_OnTrack_ECC {
+    hist ClusterSizeMapOnTrack_ECC {
       algorithm = Pix_Histogram_Not_Empty
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-    hist Cluster_Size_Map_OnTrack_IBL {
+    hist ClusterSizeMapOnTrack_IBL {
       algorithm = Pix_Histogram_Not_Empty
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
     }
-   # hist Cluster_Size_Map_IBL2D {
-   #   algorithm =  Pix_Histogram_Not_Empty
-   #   output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
-   #   description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
-   # }
-   # hist Cluster_Size_Map_IBL3D {
-   #   algorithm = Pix_Histogram_Not_Empty
-   #   output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
-   #   description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Occupancies
-   # }
 
     #hist Cluster_groupsize@1 {
     #  algorithm = Pix_Histogram_Not_Empty
@@ -938,239 +928,239 @@ dir Pixel {
     #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
     #}
 
-    hist TotalClusters_per_lumi {
-      algorithm = Pix_LB_Bins_Diff_FromAvg
-      output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
-      display = StatBox
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
+    #hist TotalClusters_per_lumi {
+    #  algorithm = Pix_LB_Bins_Diff_FromAvg
+    #  output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
+    #  display = StatBox
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
 
-    hist Clusters_per_lumi {
+    hist ClustersPerLumiOnTrack {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
 
-    hist Clusters_per_lumi_OnTrack_ECA {
+    hist ClustersPerLumiOnTrack_ECA {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_OnTrack_ECC {
+    hist ClustersPerLumiOnTrack_ECC {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_OnTrack_IBL {
+    hist ClustersPerLumiOnTrack_IBL {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_OnTrack_B0 {
+    hist ClustersPerLumiOnTrack_B0 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_OnTrack_B1 {
+    hist ClustersPerLumiOnTrack_B1 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist Clusters_per_lumi_OnTrack_B2 {
+    hist ClustersPerLumiOnTrack_B2 {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
 
-    hist num_clusters_per_track_per_lumi_IBL {
+    hist NumClustersPerTrackPerLumi_IBL {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXIBL/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
     }
-    hist num_clusters_per_track_per_lumi_B0 {
+    hist NumClustersPerTrackPerLumi_B0 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX0/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
     }
-    hist num_clusters_per_track_per_lumi_B1 {
+    hist NumClustersPerTrackPerLumi_B1 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX1/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
     } 
-    hist num_clusters_per_track_per_lumi_B2 {
+    hist NumClustersPerTrackPerLumi_B2 {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIX2/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
     } 
-    hist num_clusters_per_track_per_lumi_ECA {
+    hist NumClustersPerTrackPerLumi_ECA {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECA/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
     }
-    hist num_clusters_per_track_per_lumi_ECC {
+    hist NumClustersPerTrackPerLumi_ECC {
       algorithm = Pix_GatherData
       output = InnerDetector/Pixel/PIXECC/ClusterHitsOnTrack
       display = StatBox
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#General_Hits
     }
 
-    hist LargeClusters_per_lumi_OnTrack {
-      algorithm = Plain_GatherData
-      output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
-      display = StatBox
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
+    #hist LargeClusters_per_lumi_OnTrack {
+    #  algorithm = Plain_GatherData
+    #  output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
+    #  display = StatBox
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
 
-    hist VeryLargeClusters_per_lumi_OnTrack {
-      algorithm = Plain_GatherData
-      output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
-      display = StatBox
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
+    #hist VeryLargeClusters_per_lumi_OnTrack {
+    #  algorithm = Plain_GatherData
+    #  output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
+    #  display = StatBox
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
 
-    hist HighNClusters_per_lumi_OnTrack {
-      algorithm = Plain_GatherData
-      output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
-      display = StatBox
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
+    #hist HighNClusters_per_lumi_OnTrack {
+    #  algorithm = Plain_GatherData
+    #  output = InnerDetector/Pixel/PixelExpert/GeneralOnTrack
+    #  display = StatBox
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
   }
 
 
   dir Status {
-    hist Map_Of_Modules_Status_IBL {
+    hist MapOfModulesStatus_IBL {
       algorithm = Pix_DisabledMod_B0B1
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrors
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Disabled_Modules
     }
 
-    hist Map_Of_Modules_Status_B0 {
+    hist MapOfModulesStatus_B0 {
       algorithm = Pix_DisabledMod_B0B1
       output = InnerDetector/Pixel/PIX0/DisableAndErrors
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Disabled_Modules
     }
-    hist Map_Of_Modules_Status_B1 {
+    hist MapOfModulesStatus_B1 {
       algorithm = Pix_DisabledMod_B0B1
       output = InnerDetector/Pixel/PIX1/DisableAndErrors
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Disabled_Modules
     }
-    hist Map_Of_Modules_Status_B2 {
+    hist MapOfModulesStatus_B2 {
       algorithm = Pix_DisabledMod_B2
       output = InnerDetector/Pixel/PIX2/DisableAndErrors
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Disabled_Modules
     }
-    hist Map_Of_Modules_Status_ECA {
+    hist MapOfModulesStatus_ECA {
       algorithm = Pix_DisabledMod_Endcap
       output = InnerDetector/Pixel/PIXECA/DisableAndErrors
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Disabled_Modules
     }
-    hist Map_Of_Modules_Status_ECC {
+    hist MapOfModulesStatus_ECC {
       algorithm = Pix_DisabledMod_Endcap
       output = InnerDetector/Pixel/PIXECC/DisableAndErrors
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#Disabled_Modules
     }
 
-    hist DisabledModules_per_lumi {
-      algorithm = Pix_DisabledLB_CheckHisto_Mean&GatherData
-      output = InnerDetector/Pixel/PixelExpert/Errors/ALL
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
+    #hist DisabledModules_per_lumi {
+    #  algorithm = Pix_DisabledLB_CheckHisto_Mean&GatherData
+    #  output = InnerDetector/Pixel/PixelExpert/Errors/ALL
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
 
-    hist DisabledModules_per_lumi_ECA {
+    hist DisabledModulesPerLumi_ECA {
       algorithm = Pix_DisabledLB_CheckHisto_Mean_Endcap&GatherData
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist DisabledModules_per_lumi_ECC {
+    hist DisabledModulesPerLumi_ECC {
       algorithm = Pix_DisabledLB_CheckHisto_Mean_Endcap&GatherData
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist DisabledModules_per_lumi_IBL {
+    hist DisabledModulesPerLumi_IBL {
       algorithm = Pix_DisabledLB_CheckHisto_Mean_B0B1&GatherData
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist DisabledModules_per_lumi_IBL2D {
-      algorithm = Pix_DisabledLB_CheckHisto_Mean_B0B1&GatherData
-      output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
-    hist DisabledModules_per_lumi_IBL3D {
-      algorithm = Pix_DisabledLB_CheckHisto_Mean_B0B1&GatherData
-      output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
-    hist DisabledModules_per_lumi_B0 {
+    #hist DisabledModules_per_lumi_IBL2D {
+    #  algorithm = Pix_DisabledLB_CheckHisto_Mean_B0B1&GatherData
+    #  output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
+    #hist DisabledModules_per_lumi_IBL3D {
+    #  algorithm = Pix_DisabledLB_CheckHisto_Mean_B0B1&GatherData
+    #  output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
+    hist DisabledModulesPerLumi_B0 {
       algorithm = Pix_DisabledLB_CheckHisto_Mean_B0B1&GatherData
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist DisabledModules_per_lumi_B1 {
+    hist DisabledModulesPerLumi_B1 {
       algorithm = Pix_DisabledLB_CheckHisto_Mean_B0B1&GatherData
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist DisabledModules_per_lumi_B2 {
+    hist DisabledModulesPerLumi_B2 {
       algorithm = Pix_DisabledLB_CheckHisto_Mean_B2&GatherData
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
 
-    hist BadModules_per_lumi {
-      algorithm = Plain_GatherData
-      output = InnerDetector/Pixel/PixelExpert/Errors/ALL
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
+    #hist BadModules_per_lumi {
+    #  algorithm = Plain_GatherData
+    #  output = InnerDetector/Pixel/PixelExpert/Errors/ALL
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
 
-    hist BadModules_per_lumi_ECA {
+    hist BadModulesPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PIXECA/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist BadModules_per_lumi_ECC {
+    hist BadModulesPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PIXECC/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist BadModules_per_lumi_IBL {
-      algorithm = Plain_GatherData
-      output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
-    hist BadModules_per_lumi_IBL2D {
+    hist BadModulesPerLumi_IBL {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist BadModules_per_lumi_IBL3D {
-      algorithm = Plain_GatherData
-      output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
-    }
-    hist BadModules_per_lumi_B0 {
+    #hist BadModules_per_lumi_IBL2D {
+    #  algorithm = Plain_GatherData
+    #  output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
+    #hist BadModules_per_lumi_IBL3D {
+    #  algorithm = Plain_GatherData
+    #  output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
+    #  description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
+    #}
+    hist BadModulesPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PIX0/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist BadModules_per_lumi_B1 {
+    hist BadModulesPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PIX1/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist BadModules_per_lumi_B2 {
+    hist BadModulesPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PIX2/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
@@ -1180,274 +1170,242 @@ dir Pixel {
 
   dir Errors {
 
-    hist SyncErrors_Mod_Frac_per_event_IBL {
+    hist SyncErrorsFracPerEvent_IBL {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist SyncErrors_Mod_Frac_per_event_B0 {
+    hist SyncErrorsFracPerEvent_B0 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_Mod_Frac_per_event_B1 {
+    hist SyncErrorsFracPerEvent_B1 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_Mod_Frac_per_event_B2 {
+    hist SyncErrorsFracPerEvent_B2 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_Mod_Frac_per_event_ECA {
+    hist SyncErrorsFracPerEvent_ECA {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_Mod_Frac_per_event_ECC {
+    hist SyncErrorsFracPerEvent_ECC {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist SyncErrors_ROD_Frac_per_event_IBL {
+    hist SyncErrorsRODFracPerEvent_IBL {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-
-    hist SyncErrors_ROD_Frac_per_event_B0 {
+    hist SyncErrorsRODFracPerEvent_B0 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_ROD_Frac_per_event_B1 {
+    hist SyncErrorsRODFracPerEvent_B1 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_ROD_Frac_per_event_B2 {
+    hist SyncErrorsRODFracPerEvent_B2 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_ROD_Frac_per_event_ECA {
+    hist SyncErrorsRODFracPerEvent_ECA {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrors_ROD_Frac_per_event_ECC {
+    hist SyncErrorsRODFracPerEvent_ECC {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist ErrorBit_per_lumi_IBL {
+    hist ErrorStatePerLumi_IBL {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist ErrorBit_per_lumi_B0 {
+    hist ErrorStatePerLumi_B0 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist ErrorBit_per_lumi_B1 {
+    hist ErrorStatePerLumi_B1 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist ErrorBit_per_lumi_B2 {
+    hist ErrorStatePerLumi_B2 {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist ErrorBit_per_lumi_ECA {
+    hist ErrorStatePerLumi_ECA {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist ErrorBit_per_lumi_ECC {
+    hist ErrorStatePerLumi_ECC {
       algorithm = Pix_NonZero_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist ErrorBit_per_lumi_IBL {
-      algorithm = Pix_NonZero_Errors
-      output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
-    }
-    hist ErrorBit_per_lumi_B0 {
-      algorithm = Pix_NonZero_Errors
-      output = InnerDetector/Pixel/PIX0/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
-    }
-    hist ErrorBit_per_lumi_B1 {
-      algorithm = Pix_NonZero_Errors
-      output = InnerDetector/Pixel/PIX1/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
-    }
-    hist ErrorBit_per_lumi_B2 {
-      algorithm = Pix_NonZero_Errors
-      output = InnerDetector/Pixel/PIX2/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
-    }
-    hist ErrorBit_per_lumi_ECA {
-      algorithm = Pix_NonZero_Errors
-      output = InnerDetector/Pixel/PIXECA/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
-    }
-    hist ErrorBit_per_lumi_ECC {
-      algorithm = Pix_NonZero_Errors
-      output = InnerDetector/Pixel/PIXECC/_Experts/DisableAndErrorsLB
-      description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
-    }
-
-    hist SEUErrorsFrac_per_event_IBL {
+    hist SEUErrorsFracPerEvent_IBL {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SEUErrorsFrac_per_event_B0 {
+    hist SEUErrorsFracPerEvent_B0 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SEUErrorsFrac_per_event_B1 {
+    hist SEUErrorsFracPerEvent_B1 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SEUErrorsFrac_per_event_B2 {
+    hist SEUErrorsFracPerEvent_B2 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SEUErrorsFrac_per_event_ECA {
+    hist SEUErrorsFracPerEvent_ECA {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SEUErrorsFrac_per_event_ECC {
+    hist SEUErrorsFracPerEvent_ECC {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist TimeoutErrorsFrac_per_event_IBL {
+    hist TimeoutErrorsFracPerEvent_IBL {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TimeoutErrorsFrac_per_event_B0 {
+    hist TimeoutErrorsFracPerEvent_B0 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TimeoutErrorsFrac_per_event_B1 {
+    hist TimeoutErrorsFracPerEvent_B1 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TimeoutErrorsFrac_per_event_B2 {
+    hist TimeoutErrorsFracPerEvent_B2 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TimeoutErrorsFrac_per_event_ECA {
+    hist TimeoutErrorsFracPerEvent_ECA {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TimeoutErrorsFrac_per_event_ECC {
+    hist TimeoutErrorsFracPerEvent_ECC {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist OpticalErrorsFrac_per_event_IBL {
+    hist OpticalErrorsFracPerEvent_IBL {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist OpticalErrorsFrac_per_event_B0 {
+    hist OpticalErrorsFracPerEvent_B0 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist OpticalErrorsFrac_per_event_B1 {
+    hist OpticalErrorsFracPerEvent_B1 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist OpticalErrorsFrac_per_event_B2 {
+    hist OpticalErrorsFracPerEvent_B2 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist OpticalErrorsFrac_per_event_ECA {
+    hist OpticalErrorsFracPerEvent_ECA {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist OpticalErrorsFrac_per_event_ECC {
+    hist OpticalErrorsFracPerEvent_ECC {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist TruncationErrorsFrac_per_event_IBL {
+    hist TruncationErrorsFracPerEvent_IBL {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TruncationErrorsFrac_per_event_B0 {
+    hist TruncationErrorsFracPerEvent_B0 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TruncationErrorsFrac_B1 {
+    hist TruncationErrorsFracPerEvent_B1 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TruncationErrorsFrac_per_event_B2 {
+    hist TruncationErrorsFracPerEvent_B2 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TruncationErrorsFrac_per_event_ECA {
+    hist TruncationErrorsFracPerEvent_ECA {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist TruncationErrorsFrac_per_event_ECC {
+    hist TruncationErrorsFracPerEvent_ECC {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
@@ -1455,69 +1413,69 @@ dir Pixel {
     }
 
     ######################################
-    hist SyncErrorsFrac_per_event_IBL {
+    hist SyncErrorsFracPerEvent_IBL {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXIBL/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrorsFrac_per_event_B0 {
+    hist SyncErrorsFracPerEvent_B0 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX0/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrorsFrac_per_event_B1 {
+    hist SyncErrorsFracPerEvent_B1 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX1/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrorsFrac_per_event_B2 {
+    hist SyncErrorsFracPerEvent_B2 {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIX2/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrorsFrac_per_event_ECA {
+    hist SyncErrorsFracPerEvent_ECA {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECA/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
-    hist SyncErrorsFrac_per_event_ECC {
+    hist SyncErrorsFracPerEvent_ECC {
       #algorithm = Pix_NonZero_Errors
       algorithm = Pix_Frac_Errors
       output = InnerDetector/Pixel/PIXECC/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelOfflineShiftInstructionsDQRun2#Errors
     }
 
-    hist errors_per_lumi_ECA {
+    hist ErrorsPerLumi_ECA {
       algorithm = Pix_ErrorLB_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXECA/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist errors_per_lumi_ECC {
+    hist ErrorsPerLumi_ECC {
       algorithm = Pix_ErrorLB_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXECC/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist errors_per_lumi_IBL {
+    hist ErrorsPerLumi_IBL {
       algorithm = Pix_ErrorLB_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIXIBL/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist errors_per_lumi_B0 {
+    hist ErrorsPerLumi_B0 {
       algorithm = Pix_ErrorLB_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX0/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist errors_per_lumi_B1 {
+    hist ErrorsPerLumi_B1 {
       algorithm = Pix_ErrorLB_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX1/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
     }
-    hist errors_per_lumi_B2 {
+    hist ErrorsPerLumi_B2 {
       algorithm = Pix_ErrorLB_CheckHisto_Mean&GatherData
       output = InnerDetector/Pixel/PIX2/_Experts/DisableAndErrorsLB
       description = https://twiki.cern.ch/twiki/bin/view/Atlas/PixelOfflineShiftInstructionsDQ#LB_Dependence
@@ -1526,342 +1484,343 @@ dir Pixel {
 
   dir ErrorsExpert {
 
-    hist FE_Warning_per_lumi_PIX {
-      algorithm = Plain_GatherData
-      output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/FE_Warning
-    }
-    hist FE_Warning_per_lumi_ECA {
+    #hist FE_Warning_per_lumi_PIX {
+    #  algorithm = Plain_GatherData
+    #  output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/FE_Warning
+    #}
+    hist FEWarningPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/FE_Warning
     }
-    hist FE_Warning_per_lumi_ECC {
+    hist FEWarningPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/FE_Warning
     }
-    hist FE_Warning_per_lumi_B0 {
+    hist FEWarningPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/FE_Warning
     }
-    hist FE_Warning_per_lumi_B1 {
+    hist FEWarningPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/FE_Warning
     }
-    hist FE_Warning_per_lumi_B2 {
+    hist FEWarningPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/FE_Warning
     }
 
-    hist Mod_Sync_BCID1_errors_per_lumi_ECA {
+    hist ModSyncBCID1ErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID1
     }
-    hist Mod_Sync_BCID1_errors_per_lumi_ECC {
+    hist ModSyncBCID1ErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID1
     }
-    hist Mod_Sync_BCID1_errors_per_lumi_B0 {
+    hist ModSyncBCID1ErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID1
     }
-    hist Mod_Sync_BCID1_errors_per_lumi_B1 {
+    hist ModSyncBCID1ErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID1
     }
-    hist Mod_Sync_BCID1_errors_per_lumi_B2 {
+    hist ModSyncBCID1ErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID1
     }
 
-    hist Mod_Sync_BCID2_errors_per_lumi_B0 {
+    hist ModSyncBCID2ErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID2
     }
-    hist Mod_Sync_BCID2_errors_per_lumi_B1 {
+    hist ModSyncBCID2ErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID2
     }
-    hist Mod_Sync_BCID2_errors_per_lumi_B2 {
+    hist ModSyncBCID2ErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID2
     }
-    hist Mod_Sync_BCID2_errors_per_lumi_ECA {
+    hist ModSyncBCID2ErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID2
     }
-    hist Mod_Sync_BCID2_errors_per_lumi_ECC {
+    hist ModSyncBCID2ErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_BCID2
     }
 
-    hist Mod_Sync_LVL1ID_errors_per_lumi_B0 {
+    hist ModSyncLVL1IDErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_LVL1ID
     }
-    hist Mod_Sync_LVL1ID_errors_per_lumi_B1 {
+    hist ModSyncLVL1IDErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_LVL1ID
     }
-    hist Mod_Sync_LVL1ID_errors_per_lumi_B2 {
+    hist ModSyncLVL1IDErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_LVL1ID
     }
-    hist Mod_Sync_LVL1ID_errors_per_lumi_ECA {
+    hist ModSyncLVL1IDErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_LVL1ID
     }
-    hist Mod_Sync_LVL1ID_errors_per_lumi_ECC {
+    hist ModSyncLVL1IDErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_Module_LVL1ID
     }
 
-    hist Mod_Trunc_EOC_errors_per_lumi_ECA {
+    hist ModTruncEOCErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoC
     }
-    hist Mod_Trunc_EOC_errors_per_lumi_ECC {
+    hist ModTruncEOCErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoC
     }
-    hist Mod_Trunc_EOC_errors_per_lumi_B0 {
+    hist ModTruncEOCErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoC
     }
-    hist Mod_Trunc_EOC_errors_per_lumi_B1 {
+    hist ModTruncEOCErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoC
     }
-    hist Mod_Trunc_EOC_errors_per_lumi_B2 {
+    hist ModTruncEOCErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoC
     }
 
-    hist Mod_Trunc_EoE_Overflow_errors_per_lumi_B0 {
+    hist ModTruncEoEOverflowErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoE_Overflow
     }
-    hist Mod_Trunc_EoE_Overflow_errors_per_lumi_B1 {
+    hist ModTruncEoEOverflowErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoE_Overflow
     }
-    hist Mod_Trunc_EoE_Overflow_errors_per_lumi_B2 {
+    hist ModTruncEoEOverflowErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoE_Overflow
     }
-    hist Mod_Trunc_EoE_Overflow_errors_per_lumi_ECA {
+    hist ModTruncEoEOverflowErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoE_Overflow
     }
-    hist Mod_Trunc_EoE_Overflow_errors_per_lumi_ECC {
+    hist ModTruncEoEOverflowErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_EoE_Overflow
     }
-    hist Mod_Trunc_Hit_Overflow_errors_per_lumi_B0 {
+
+    hist ModTruncHitOverflowErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_Hit_Overflow
     }
-    hist Mod_Trunc_Hit_Overflow_errors_per_lumi_B1 {
+    hist ModTruncHitOverflowErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_Hit_Overflow
     }
-    hist Mod_Trunc_Hit_Overflow_errors_per_lumi_B2 {
+    hist ModTruncHitOverflowErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_Hit_Overflow
     }
-    hist Mod_Trunc_Hit_Overflow_errors_per_lumi_ECA {
+    hist ModTruncHitOverflowErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_Hit_Overflow
     }
-    hist Mod_Trunc_Hit_Overflow_errors_per_lumi_ECC {
+    hist ModTruncHitOverflowErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_Module_Hit_Overflow
     }
 
-    hist Optical_Errors_per_lumi_B0 {
+    hist OpticalErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Optical_Errors
     }
-    hist Optical_Errors_per_lumi_B1 {
+    hist OpticalErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Optical_Errors
     }
-    hist Optical_Errors_per_lumi_B2 {
+    hist OpticalErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Optical_Errors
     }
-    hist Optical_Errors_per_lumi_ECA {
+    hist OpticalErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Optical_Errors
     }
-    hist Optical_Errors_per_lumi_ECC {
+    hist OpticalErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Optical_Errors
     }
 
-    hist ROD_Sync_BCID_errors_per_lumi_B0 {
+    hist RODSyncBCIDErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_BCID
     }
-    hist ROD_Sync_BCID_errors_per_lumi_B1 {
+    hist RODSyncBCIDErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_BCID
     }
-    hist ROD_Sync_BCID_errors_per_lumi_B2 {
+    hist RODSyncBCIDErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_BCID
     }
-    hist ROD_Sync_BCID_errors_per_lumi_ECA {
+    hist RODSyncBCIDErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_BCID
     }
-    hist ROD_Sync_BCID_errors_per_lumi_ECC {
+    hist RODSyncBCIDErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_BCID
     }
 
-    hist ROD_Sync_LVL1ID_errors_per_lumi_B0 {
+    hist RODSyncLVL1IDErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_LVL1ID
     }
-    hist ROD_Sync_LVL1ID_errors_per_lumi_B1 {
+    hist RODSyncLVL1IDErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_LVL1ID
     }
-    hist ROD_Sync_LVL1ID_errors_per_lumi_B2 {
+    hist RODSyncLVL1IDErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_LVL1ID
     }
-    hist ROD_Sync_LVL1ID_errors_per_lumi_ECA {
+    hist RODSyncLVL1IDErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_LVL1ID
     }
-    hist ROD_Sync_LVL1ID_errors_per_lumi_ECC {
+    hist RODSyncLVL1IDErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Synchronization_ROD_LVL1ID
     }
 
-    hist ROD_Timeout_per_lumi_B0 {
+    hist RODTimeoutPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/ROD_Timeout
     }
-    hist ROD_Timeout_per_lumi_B1 {
+    hist RODTimeoutPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/ROD_Timeout
     }
-    hist ROD_Timeout_per_lumi_B2 {
+    hist RODTimeoutPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/ROD_Timeout
     }
-    hist ROD_Timeout_per_lumi_ECA {
+    hist RODTimeoutPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/ROD_Timeout
     }
-    hist ROD_Timeout_per_lumi_ECC {
+    hist RODTimeoutPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/ROD_Timeout
     }
 
-    hist ROD_Trunc_HT_Limit_errors_per_lumi_B0 {
+    hist RODTruncHTLimitErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_HT_Limit
     }
-    hist ROD_Trunc_HT_Limit_errors_per_lumi_B1 {
+    hist RODTruncHTLimitErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_HT_Limit
     }
-    hist ROD_Trunc_HT_Limit_errors_per_lumi_B2 {
+    hist RODTruncHTLimitErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_HT_Limit
     }
-    hist ROD_Trunc_HT_Limit_errors_per_lumi_ECA {
+    hist RODTruncHTLimitErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_HT_Limit
     }
-    hist ROD_Trunc_HT_Limit_errors_per_lumi_ECC {
+    hist RODTruncHTLimitErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_HT_Limit
     }
 
-    hist ROD_Trunc_ROD_OF_errors_per_lumi_B0 {
+    hist RODTruncRODOFErrorsPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_FIFO_Overflow
     }
-    hist ROD_Trunc_ROD_OF_errors_per_lumi_B1 {
+    hist RODTruncRODOFErrorsPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_FIFO_Overflow
     }
-    hist ROD_Trunc_ROD_OF_errors_per_lumi_B2 {
+    hist RODTruncRODOFErrorsPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_FIFO_Overflow
     }
-    hist ROD_Trunc_ROD_OF_errors_per_lumi_ECA {
+    hist RODTruncRODOFErrorsPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_FIFO_Overflow
     }
-    hist ROD_Trunc_ROD_OF_errors_per_lumi_ECC {
+    hist RODTruncRODOFErrorsPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/Truncation_ROD_FIFO_Overflow
     }
 
 
-    hist SEU_Hamming_per_lumi_ECA {
+    hist SEUHammingPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hamming
     }
-    hist SEU_Hamming_per_lumi_ECC {
+    hist SEUHammingPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hamming
     }
-    hist SEU_Hamming_per_lumi_B0 {
+    hist SEUHammingPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hamming
     }
-    hist SEU_Hamming_per_lumi_B1 {
+    hist SEUHammingPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hamming
     }
-    hist SEU_Hamming_per_lumi_B2 {
+    hist SEUHammingPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hamming
     }
 
-    hist SEU_Hit_Parity_per_lumi_ECA {
+    hist SEUHitParityPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hit_Parity
     }
-    hist SEU_Hit_Parity_per_lumi_ECC {
+    hist SEUHitParityPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hit_Parity
     }
-    hist SEU_Hit_Parity_per_lumi_B0 {
+    hist SEUHitParityPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hit_Parity
     }
-    hist SEU_Hit_Parity_per_lumi_B1 {
+    hist SEUHitParityPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hit_Parity
     }
-    hist SEU_Hit_Parity_per_lumi_B2 {
+    hist SEUHitParityPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Hit_Parity
     }
 
-    hist SEU_Register_Parity_per_lumi_ECA {
+    hist SEURegisterParityPerLumi_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Register_Parity
     }
-    hist SEU_Register_Parity_per_lumi_ECC {
+    hist SEURegisterParityPerLumi_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Register_Parity
     }
-    hist SEU_Register_Parity_per_lumi_B0 {
+    hist SEURegisterParityPerLumi_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Register_Parity
     }
-    hist SEU_Register_Parity_per_lumi_B1 {
+    hist SEURegisterParityPerLumi_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Register_Parity
     }
-    hist SEU_Register_Parity_per_lumi_B2 {
+    hist SEURegisterParityPerLumi_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Errors/Bad_Module_Errors/SEU_Register_Parity
     }
@@ -1873,7 +1832,7 @@ dir Pixel {
   #Below plot Combined Tracks
   dir TrackOnTrack {
 
-    hist tracksPerEvt_per_lumi {
+    hist tracksPerEvtPerLumi {
       algorithm = Pix_LB_Bins_Diff_FromAvg
       output = InnerDetector/Pixel/PixelExpert/Tracks
       display = StatBox
@@ -1881,82 +1840,83 @@ dir Pixel {
     }
     
     ### Track State On Surface: Hole
-    hist TSOS_Hole_IBL {
+    hist TSOSHole_IBL {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Hole_B0 {
+    hist TSOSHole_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Hole_B1 {
+    hist TSOSHole_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Hole_B2 {
+    hist TSOSHole_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Hole_ECA {
+    hist TSOSHole_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Hole_ECC {
+    hist TSOSHole_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
     ### Track State On Surface: Measurement
-    hist TSOS_Measurement_IBL {
+    hist TSOSMeasurement_IBL {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Measurement_B0 {
+    hist TSOSMeasurement_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Measurement_B1 {
+    hist TSOSMeasurement_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Measurement_B2 {
+    hist TSOSMeasurement_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Measurement_ECA {
+    hist TSOSMeasurement_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Measurement_ECC {
+    hist TSOSMeasurement_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
     ### Track State On Surface: Outlier
-    hist TSOS_Outlier_IBL {
+    hist TSOSOutlier_IBL {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Outlier_B0 {
+    hist TSOSOutlier_B0 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Outlier_B1 {
+    hist TSOSOutlier_B1 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Outlier_B2 {
+    hist TSOSOutlier_B2 {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Outlier_ECA {
+    hist TSOSOutlier_ECA {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
-    hist TSOS_Outlier_ECC {
+    hist TSOSOutlier_ECC {
       algorithm = Plain_GatherData
       output = InnerDetector/Pixel/PixelExpert/Tracks
     }
   }
 }
+
 # now get the pixel relevant plots from InDetGlobal
 dir InDetGlobal {
   dir Pixel {
diff --git a/Database/AthenaPOOL/OutputStreamAthenaPool/python/CreateOutputStreams.py b/Database/AthenaPOOL/OutputStreamAthenaPool/python/CreateOutputStreams.py
index cb3d49ae862f6ac4548e4457bc520172fc6ffc66..261097117ea62a2345cdf30de86d41ec0ca0ecc5 100644
--- a/Database/AthenaPOOL/OutputStreamAthenaPool/python/CreateOutputStreams.py
+++ b/Database/AthenaPOOL/OutputStreamAthenaPool/python/CreateOutputStreams.py
@@ -81,7 +81,24 @@ def createOutputStream( streamName, fileName = "", asAlg = False, noTag = False,
          Key=event_format_key,
       )
       outputStream.MetadataItemList += ["xAOD::EventFormat#{}".format(event_format_key)]
-      outputStream.HelperTools = [ streamInfoTool, event_format_tool]
+
+      # Create a new xAOD::FileMetaData object
+      file_metadata_key = "FileMetaData"
+      file_metadata_creator_tool = CfgMgr.xAODMaker__FileMetaDataCreatorTool(
+          "FileMetaDataCreatorTool",
+          OutputKey=file_metadata_key,
+          StreamName=streamName,
+      )
+      outputStream.MetadataItemList += [
+          "xAOD::FileMetaData#{}".format(file_metadata_key),
+          "xAOD::FileMetaDataAuxInfo#{}Aux.".format(file_metadata_key),
+      ]
+
+      outputStream.HelperTools = [
+          streamInfoTool,
+          event_format_tool,
+          file_metadata_creator_tool,
+      ]
 
 
    # Support for MT thinning.
diff --git a/Database/AthenaPOOL/OutputStreamAthenaPool/python/MultipleStreamManager.py b/Database/AthenaPOOL/OutputStreamAthenaPool/python/MultipleStreamManager.py
index a0bb9d68b64a73855df2c7df7f04c438e9a8efa8..f9e6d51227f4a6e1175e58ba7f2719175f89c522 100644
--- a/Database/AthenaPOOL/OutputStreamAthenaPool/python/MultipleStreamManager.py
+++ b/Database/AthenaPOOL/OutputStreamAthenaPool/python/MultipleStreamManager.py
@@ -559,10 +559,6 @@ class MultipleStreamManager:
         from AthenaCommon.AppMgr import theApp
         svcMgr = theApp.serviceMgr()
 
-        from AthenaCommon import CfgMgr
-        streamMarkUpTool = CfgMgr.xAODMaker__FileMetaDataMarkUpTool( StreamName + "_FileMetaDataMarkUpTool" )
-        streamMarkUpTool.Key = StreamName
-        theStream.Stream.HelperTools += [ streamMarkUpTool ]
         theStream.Stream.WritingTool.SubLevelBranchName = "<key>"
         svcMgr.AthenaPoolCnvSvc.PoolAttributes += [ "DatabaseName = '" + FileName + "'; COMPRESSION_LEVEL = '5'" ]
         svcMgr.AthenaPoolCnvSvc.PoolAttributes += [ "DatabaseName = '" + FileName + "'; ContainerName = 'TTree=CollectionTree'; TREE_AUTO_FLUSH = '-20000000'" ]
diff --git a/Database/AthenaPOOL/OutputStreamAthenaPool/python/OutputStreamConfig.py b/Database/AthenaPOOL/OutputStreamAthenaPool/python/OutputStreamConfig.py
index 3e8818964214617f336b078dd2b4db0234142885..631954562ba642ceadf461f0872589a15c602636 100644
--- a/Database/AthenaPOOL/OutputStreamAthenaPool/python/OutputStreamConfig.py
+++ b/Database/AthenaPOOL/OutputStreamAthenaPool/python/OutputStreamConfig.py
@@ -11,12 +11,12 @@ def OutputStreamCfg(configFlags, streamName, ItemList=[], MetadataItemList=[],
    AthenaOutputStreamTool=CompFactory.AthenaOutputStreamTool
    StoreGateSvc=CompFactory.StoreGateSvc
 
+   msg = logging.getLogger('OutputStreamCfg')
    flagName="Output.%sFileName" % streamName
    if configFlags.hasFlag(flagName):
       fileName=configFlags._get(flagName)
    else:
       fileName="my%s.pool.root" % streamName
-      msg = logging.getLogger('OutputStreamCfg')
       msg.info("No file name predefined for stream %s. Using %s", streamName, fileName)
 
    if fileName in configFlags.Input.Files:
@@ -54,16 +54,6 @@ def OutputStreamCfg(configFlags, streamName, ItemList=[], MetadataItemList=[],
         "IOVMetaDataContainer#*",
    ]
 
-   # Event Tag
-   if not disableEventTag:
-      key = "SimpleTag"
-      outputStream.WritingTool.AttributeListKey=key
-      # build eventinfo attribute list
-      EventInfoAttListTool, EventInfoTagBuilder=CompFactory.getComps("EventInfoAttListTool","EventInfoTagBuilder",)
-      tagBuilder = EventInfoTagBuilder(AttributeList=key,
-                                       Tool=EventInfoAttListTool())
-      result.addEventAlgo(tagBuilder)
-
    # Make EventFormat object
    event_format_key = 'EventFormat{}'.format(streamName)
    event_format_tool = CompFactory.xAODMaker.EventFormatStreamHelperTool(
@@ -71,17 +61,39 @@ def OutputStreamCfg(configFlags, streamName, ItemList=[], MetadataItemList=[],
       Key=event_format_key,
    )
    outputStream.HelperTools.append(event_format_tool)
+   msg.info("Creating event format for this stream")
 
    # Simplifies naming 
    outputStream.MetadataItemList.append(
       "xAOD::EventFormat#{}".format(event_format_key)
    )
+
+   # setup FileMetaData
+   file_metadata_key = "FileMetaData"
+   outputStream.HelperTools.append(
+        CompFactory.xAODMaker.FileMetaDataCreatorTool(
+            name='{}_FileMetaDataCreatorTool'.format(streamName),
+            OutputKey=file_metadata_key,
+            StreamName=streamName,
+        )
+   )
+   outputStream.MetadataItemList += [
+        "xAOD::FileMetaData#{}".format(file_metadata_key),
+        "xAOD::FileMetaDataAuxInfo#{}Aux.".format(file_metadata_key),
+   ]
+
+   # Event Tag
+   if not disableEventTag:
+      key = "SimpleTag"
+      outputStream.WritingTool.AttributeListKey=key
+      # build eventinfo attribute list
+      EventInfoAttListTool, EventInfoTagBuilder=CompFactory.getComps("EventInfoAttListTool","EventInfoTagBuilder",)
+      tagBuilder = EventInfoTagBuilder(AttributeList=key,
+                                       Tool=EventInfoAttListTool())
+      result.addEventAlgo(tagBuilder)
+
    # For xAOD output
    if "xAOD" in streamName:
-      xAODMaker__FileMetaDataMarkUpTool=CompFactory.xAODMaker.FileMetaDataMarkUpTool
-      streamMarkUpTool = xAODMaker__FileMetaDataMarkUpTool( streamName + "_FileMetaDataMarkUpTool" )
-      streamMarkUpTool.Key = streamName
-      outputStream.HelperTools += [ streamMarkUpTool ]
       outputStream.WritingTool.SubLevelBranchName = "<key>"
 
       AthenaPoolCnvSvc=CompFactory.AthenaPoolCnvSvc
diff --git a/Event/xAOD/xAODBTaggingAthenaPool/src/xAODBTaggingAuxVariableInit.cxx b/Event/xAOD/xAODBTagging/Root/xAODBTaggingAuxVariableInit.cxx
similarity index 100%
rename from Event/xAOD/xAODBTaggingAthenaPool/src/xAODBTaggingAuxVariableInit.cxx
rename to Event/xAOD/xAODBTagging/Root/xAODBTaggingAuxVariableInit.cxx
diff --git a/Event/xAOD/xAODMetaData/Root/FileMetaData_v1.cxx b/Event/xAOD/xAODMetaData/Root/FileMetaData_v1.cxx
index bd0ec69a931825f4f28f502c2546ea5f992a2ad3..efc4dd3e0b424bcd0429511ca9fdf1f6e12f065d 100644
--- a/Event/xAOD/xAODMetaData/Root/FileMetaData_v1.cxx
+++ b/Event/xAOD/xAODMetaData/Root/FileMetaData_v1.cxx
@@ -25,7 +25,7 @@ namespace xAOD {
 
    }
 
-   bool FileMetaData_v1::operator==( const FileMetaData_v1& rhs ) {
+   bool FileMetaData_v1::operator==( const FileMetaData_v1& rhs ) const {
 
       // Get the variable types from both objects:
       const SG::auxid_set_t& auxids1 = this->getAuxIDs();
@@ -161,7 +161,7 @@ namespace xAOD {
       return true;
    }
 
-   bool FileMetaData_v1::operator!=( const FileMetaData_v1& rhs ) {
+   bool FileMetaData_v1::operator!=( const FileMetaData_v1& rhs ) const {
 
       return !( this->operator==( rhs ) );
    }
diff --git a/Event/xAOD/xAODMetaData/xAODMetaData/FileMetaDataAuxInfo.h b/Event/xAOD/xAODMetaData/xAODMetaData/FileMetaDataAuxInfo.h
index 0df7b0038cc48fca609848a5ae3ed890797493df..49d1b0e871f8bd71d3e864663a9399b289c62bac 100644
--- a/Event/xAOD/xAODMetaData/xAODMetaData/FileMetaDataAuxInfo.h
+++ b/Event/xAOD/xAODMetaData/xAODMetaData/FileMetaDataAuxInfo.h
@@ -19,5 +19,11 @@ namespace xAOD {
 // Declare a CLID for the type:
 #include "xAODCore/CLASS_DEF.h"
 CLASS_DEF( xAOD::FileMetaDataAuxInfo, 73252552, 1 )
+#ifndef XAOD_STANDALONE
+#include "AthenaKernel/MetaCont.h"
+CLASS_DEF( MetaCont<xAOD::FileMetaDataAuxInfo> , 233059795, 1 )
+#include "xAODCore/BaseInfo.h"
+SG_BASE( MetaCont<xAOD::FileMetaDataAuxInfo>, MetaContBase );
+#endif // not XAOD_STANDALONE
 
 #endif // XAODMETADATA_FILEMETADATAAUXINFO_H
diff --git a/Event/xAOD/xAODMetaData/xAODMetaData/versions/FileMetaData_v1.h b/Event/xAOD/xAODMetaData/xAODMetaData/versions/FileMetaData_v1.h
index 00e2bbfa1a85eaf310a32049fd4c1e1bd094b807..19f0fb56072a9e2368635fd9308bd5f2863bd079 100644
--- a/Event/xAOD/xAODMetaData/xAODMetaData/versions/FileMetaData_v1.h
+++ b/Event/xAOD/xAODMetaData/xAODMetaData/versions/FileMetaData_v1.h
@@ -38,9 +38,9 @@ namespace xAOD {
       /// @{
 
       /// Operator testing the equality of two objects
-      bool operator==( const FileMetaData_v1& rhs );
+      bool operator==( const FileMetaData_v1& rhs ) const;
       /// Operator testing the inequality of two objects
-      bool operator!=( const FileMetaData_v1& rhs );
+      bool operator!=( const FileMetaData_v1& rhs ) const;
 
       /// @}
 
diff --git a/Event/xAOD/xAODMetaDataCnv/ATLAS_CHECK_THREAD_SAFETY b/Event/xAOD/xAODMetaDataCnv/ATLAS_CHECK_THREAD_SAFETY
new file mode 100644
index 0000000000000000000000000000000000000000..0159809cfdda797ae3ffd40a1049af663fbf86ce
--- /dev/null
+++ b/Event/xAOD/xAODMetaDataCnv/ATLAS_CHECK_THREAD_SAFETY
@@ -0,0 +1 @@
+Event/xAOD/xAODMetaDataCnv
diff --git a/Event/xAOD/xAODMetaDataCnv/CMakeLists.txt b/Event/xAOD/xAODMetaDataCnv/CMakeLists.txt
index 101270ff9e4d0529a4e38a003a1cb2f1257a9251..8e83703a2281abb800e1b1fe390f024a50927357 100644
--- a/Event/xAOD/xAODMetaDataCnv/CMakeLists.txt
+++ b/Event/xAOD/xAODMetaDataCnv/CMakeLists.txt
@@ -9,24 +9,39 @@ if( XAOD_STANDALONE )
   # The main library is an installed one in this case:
   atlas_add_library( xAODMetaDataCnv
     xAODMetaDataCnv/*.h Root/*.cxx
-    PUBLIC_HEADERS xAODMetaDataCnv
-    LINK_LIBRARIES AsgTools xAODMetaData )
+    PUBLIC_HEADERS
+      xAODMetaDataCnv
+    LINK_LIBRARIES
+      AsgTools
+      xAODMetaData )
 
   # Plus we also build a dictionary:
   atlas_add_dictionary( xAODMetaDataCnvDict
     xAODMetaDataCnv/xAODMetaDataCnvDict.h
     xAODMetaDataCnv/selection.xml
-    LINK_LIBRARIES xAODMetaDataCnv )
+    LINK_LIBRARIES
+      xAODMetaDataCnv )
 
 else()
 
   # The main library is a component one in this case:
   atlas_add_component( xAODMetaDataCnv
     xAODMetaDataCnv/*.h src/*.cxx Root/*.cxx src/components/*.cxx
-    LINK_LIBRARIES AsgTools xAODMetaData GaudiKernel AthenaKernel
-    AthenaPoolUtilities EventInfo )
+    LINK_LIBRARIES
+      AsgTools
+      xAODMetaData
+      AthenaKernel
+      AthenaPoolUtilities
+      xAODEventInfo
+      PersistentDataModel
+      IOVDbDataModel )
+
+  atlas_add_test( WriteFileMetaData
+    SCRIPT python -m "xAODMetaDataCnv.TestWriteFileMetaData" )
+
 
 endif()
 
 # Install files from the package:
 atlas_install_joboptions( share/*.py )
+atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
diff --git a/Event/xAOD/xAODMetaDataCnv/Root/FileMetaDataTool.cxx b/Event/xAOD/xAODMetaDataCnv/Root/FileMetaDataTool.cxx
index 352fa3ff7aa019e4ac958ef54ef541ed1a60af4c..6dc6055204f3c90251a2aec82004fe3974ef86eb 100644
--- a/Event/xAOD/xAODMetaDataCnv/Root/FileMetaDataTool.cxx
+++ b/Event/xAOD/xAODMetaDataCnv/Root/FileMetaDataTool.cxx
@@ -5,16 +5,24 @@
 // Local include(s):
 #include "xAODMetaDataCnv/FileMetaDataTool.h"
 
+// standard library includes
+#include <memory>
+#include <utility>
+
+// EDM include(s):
+#include "xAODMetaData/FileMetaData.h"
+#include "xAODMetaData/FileMetaDataAuxInfo.h"
+
+
 namespace xAODMaker {
 
 FileMetaDataTool::FileMetaDataTool(const std::string& name)
-    : asg::AsgMetadataTool(name)
-    , m_md()
-    , m_mdAux()
-    , m_beginFileIncidentSeen(false) {
-      declareProperty("InputKey", m_inputKey = "FileMetaData");
-      declareProperty("OutputKey", m_outputKey = "FileMetaData");
+    : asg::AsgMetadataTool(name) {
+  declareProperty("InputKey", m_inputKey = "FileMetaData",
+                  "Key of xAOD::FileMetaData object in input");
 
+  declareProperty("OutputKey", m_outputKey = "FileMetaData",
+                  "Key of xAOD::FileMetaData in MetaDataStore");
 #ifndef XAOD_STANDALONE
       declareInterface< ::IMetaDataTool >(this);
 #endif  // XAOD_STANDALONE
@@ -22,20 +30,27 @@ FileMetaDataTool::FileMetaDataTool(const std::string& name)
 
 StatusCode
     FileMetaDataTool::initialize() {
-      // Greet the user:
-      ATH_MSG_DEBUG("Initialising xAODMaker::FileMetaDataTool");
-      ATH_MSG_DEBUG("  InputKey  = " << m_inputKey);
-      ATH_MSG_DEBUG("  OutputKey = " << m_outputKey);
+#ifndef XAOD_STANDALONE
+      ASG_CHECK(m_metaDataSvc.retrieve());
+#endif  // XAOD_STANDALONE
 
-      // Reset the member variable(s):
-      m_md.reset();
-      m_mdAux.reset();
-      m_beginFileIncidentSeen = false;
+      // Return gracefully:
+      return StatusCode::SUCCESS;
+    }
 
+#ifndef XAOD_STANDALONE
+StatusCode
+    FileMetaDataTool::endInputFile(const SG::SourceID&) {
       // Return gracefully:
       return StatusCode::SUCCESS;
     }
 
+StatusCode
+    FileMetaDataTool::beginInputFile(const SG::SourceID&) {
+      return beginInputFile();
+    }
+#endif  // XAOD_STANDALONE
+
 StatusCode
     FileMetaDataTool::endInputFile() {
       // Return gracefully:
@@ -44,49 +59,67 @@ StatusCode
 
 StatusCode
     FileMetaDataTool::beginInputFile() {
-      // Whatever happens, we've seen the incident:
-      m_beginFileIncidentSeen = true;
+      // Previous input file has been processed
+      std::lock_guard lock(m_toolMutex);
 
-      // If the input file doesn't have any file-level metadata, then
-      // finish right away:
+      // Quit gracefully if there is nothing to do
       if (!inputMetaStore()->contains< xAOD::FileMetaData >(m_inputKey)) {
+        ATH_MSG_INFO("No xAOD::FileMetaData in the input file");
         return StatusCode::SUCCESS;
       }
 
-      // Retrieve the input object:
-      const xAOD::FileMetaData* input = 0;
-      ATH_CHECK(inputMetaStore()->retrieve(input, m_inputKey));
+      // Get the FileMetaData object from the input file
+      const xAOD::FileMetaData * input = nullptr;
+      ASG_CHECK(inputMetaStore()->retrieve(input, m_inputKey));
 
-      // Create the output objects if they don't exist yet:
-      if ((!m_md.get()) && (!m_mdAux.get())) {
-        ATH_MSG_DEBUG("Creating output objects");
-        m_md.reset(new xAOD::FileMetaData());
-        m_mdAux.reset(new xAOD::FileMetaDataAuxInfo());
-        m_md->setStore(m_mdAux.get());
+      // Emit a warning if the FileMetaData from previous files does not
+      // match that of the new input file
+#ifdef XAOD_STANDALONE
+      if (outputMetaStore()->contains< xAOD::FileMetaData >(m_outputKey)) {
+        xAOD::FileMetaData * output = nullptr;
+        ASG_CHECK(
+            outputMetaStore()->retrieve(output, m_outputKey));
+#else
+      if (m_metaDataSvc->contains< xAOD::FileMetaData >(m_outputKey)) {
+        auto output = m_metaDataSvc->tryConstRetrieve< xAOD::FileMetaData >(m_outputKey);
+        if (!output) return StatusCode::FAILURE;
+#endif  // XAOD_STANDALONE
 
-        // Copy the payload of the input object:
-        *(m_md.get()) = *input;
-      }
+        if (*input != *output)
+          ATH_MSG_WARNING("Inconsistent input file MetaData");
+
+        return StatusCode::SUCCESS;
 
-      // Make sure that the objects are compatible:
-      if (*(m_md.get()) != *input) {
-        ATH_MSG_ERROR("Processing input files with differing conditions");
-        ATH_MSG_ERROR("Consistent xAOD::FileMetaData can't be provided for "
-                      "the output");
-        return StatusCode::FAILURE;
       }
 
-      // Return gracefully:
-      return StatusCode::SUCCESS;
-    }
+      ATH_MSG_DEBUG("Creating output objects");
+      auto output = std::make_unique< xAOD::FileMetaData >();
+      auto outputAux = std::make_unique< xAOD::FileMetaDataAuxInfo >();
+      output->setStore(outputAux.get());
 
-StatusCode
-    FileMetaDataTool::beginEvent() {
-      // In case we missed the BeginInputFile incident for the first input file,
-      // make sure that we still run the appropriate function.
-      if (!m_beginFileIncidentSeen) {
-        ATH_CHECK(beginInputFile());
-      }
+      // Copy input object
+      *output = *input;
+
+
+#ifdef XAOD_STANDALONE
+      ASG_CHECK(
+          outputMetaStore()->record< xAOD::FileMetaData >(
+              std::move(output), m_outputKey));
+
+      ASG_CHECK(
+          outputMetaStore()->record< xAOD::FileMetaDataAuxInfo >(
+              std::move(outputAux), m_outputKey + "Aux."));
+#else
+      ASG_CHECK(
+          m_metaDataSvc->record< xAOD::FileMetaData >(
+              std::move(output), m_outputKey));
+
+      ASG_CHECK(
+          m_metaDataSvc->record< xAOD::FileMetaDataAuxInfo >(
+              std::move(outputAux), m_outputKey + "Aux."));
+#endif  // XAOD_STANDALONE
+
+      ATH_MSG_INFO("Copied xAOD::FileMetaData to MetaDataStore");
 
       // Return gracefully:
       return StatusCode::SUCCESS;
@@ -94,22 +127,7 @@ StatusCode
 
 StatusCode
     FileMetaDataTool::metaDataStop() {
-      // Don't be offended if the metadata already exists in the output:
-      if (outputMetaStore()->contains< xAOD::FileMetaData >(m_outputKey)) {
-        ATH_MSG_DEBUG("xAOD::FileMetaData already in the output");
-        return StatusCode::SUCCESS;
-      }
-
-      // Record the metadata, if any was found on the input:
-      if (m_md.get() && m_mdAux.get()) {
-        ATH_MSG_DEBUG("Recoding file level metadata");
-        ATH_CHECK(outputMetaStore()->record(m_md.release(), m_outputKey));
-        ATH_CHECK(outputMetaStore()->record(m_mdAux.release(),
-                                            m_outputKey + "Aux."));
-      }
-
       // Return gracefully:
       return StatusCode::SUCCESS;
     }
-
 }  // namespace xAODMaker
diff --git a/Event/xAOD/xAODMetaDataCnv/python/FileMetaDataConfig.py b/Event/xAOD/xAODMetaDataCnv/python/FileMetaDataConfig.py
new file mode 100644
index 0000000000000000000000000000000000000000..89aba61c3560f478c1b38d3d59ba6feb3e1c13da
--- /dev/null
+++ b/Event/xAOD/xAODMetaDataCnv/python/FileMetaDataConfig.py
@@ -0,0 +1,62 @@
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
+"""Configure jobs to propogate and/or create xAOD::FileMetaData
+
+Configure the FileMetaDataTool to propogate FileMetaData into the MetaDataStore
+if it exists in the input. In addition configure the FileMetaDataCreatorTool to
+create xAOD::FileMetaData from the output stream. The object created for the
+output stream is written to the output file.
+
+    Typical usage example
+
+        from xAODFileMetaDataCnv import FileMetaDataConfig
+        ca = ComponentAccumulator()
+        ca.merge(
+            FileMetaDataConfig.FileMetaDataCfg(
+                flags=ConfigFlags,
+                stream=outputStreamObject,
+            )
+        )
+"""
+from AthenaConfiguration import ComponentFactory, ComponentAccumulator
+from AthenaCommon import Logging
+from AthenaServices import MetaDataSvcConfig
+
+
+def FileMetaDataCfg(flags, stream, streamName=None, key="FileMetaData"):
+    """Add tools creating and propogating xAOD::FileMetaData"""
+    if not streamName:
+        streamName = stream.name
+    result = ComponentAccumulator.ComponentAccumulator()
+    components = ComponentFactory.CompFactory
+
+    # attach the FileMetaData creator to the output stream
+    creator = components.xAODMaker.FileMetaDataCreatorTool(
+        '{}_FileMetaDataCreatorTool'.format(streamName),
+        OutputKey=key,
+        StreamName=streamName,
+    )
+    stream.HelperTools.append(creator)
+    stream.MetadataItemList += [
+        "xAOD::FileMetaData#{}".format(key),
+        "xAOD::FileMetaDataAuxInfo#{}Aux.".format(key),
+    ]
+
+    # Set up the tool propogating the FileMetaData
+    result.merge(
+        MetaDataSvcConfig.MetaDataSvcCfg(
+            flags,
+            ["xAODMaker::FileMetaDataTool"]
+        )
+    )
+
+    return result
+
+
+def main():
+    """Run a job writing a file with FileMetaData"""
+    msg = Logging.logging.getLogger("FileMetaDataConfig")
+    msg.info("FileMetaData config OK")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/Event/xAOD/xAODMetaDataCnv/python/TestWriteFileMetaData.py b/Event/xAOD/xAODMetaDataCnv/python/TestWriteFileMetaData.py
new file mode 100644
index 0000000000000000000000000000000000000000..a2e4c548764c45c2981394020ce9f146499dbeb0
--- /dev/null
+++ b/Event/xAOD/xAODMetaDataCnv/python/TestWriteFileMetaData.py
@@ -0,0 +1,73 @@
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
+"""Test multi-threaded xAOD::FileMetaData writing
+
+Read default test AOD and configure to write output stream with
+xAOD::FileMetaData object. Set up to process 4 concurrent events. Inspect the
+produced file for xAOD::FileMetaData metadata items.
+
+    Typical usage example
+        python -m xAODMetaDataCnv.TestWriteFileMetaData |tee log
+"""
+from AthenaConfiguration import AllConfigFlags, TestDefaults, MainServicesConfig
+from AthenaCommon import Configurable, Logging
+from OutputStreamAthenaPool import OutputStreamConfig
+from PyUtils import MetaReader
+from xAODEventInfoCnv import xAODEventInfoCnvConfig
+
+
+def writeFileMetaData(flags):
+    """set up an output stream and xAOD::EventInfo"""
+    accumulator = OutputStreamConfig.OutputStreamCfg(
+        configFlags=flags, streamName="AOD"
+    )
+
+    accumulator.merge(
+        xAODEventInfoCnvConfig.EventInfoCnvAlgCfg(
+            flags=flags, inputKey="",
+        )
+    )
+
+    return accumulator
+
+
+def testMetaData(file_name):
+    """Check that file metadata is in output"""
+    meta = MetaReader.read_metadata([file_name])[file_name]["metadata_items"]
+    file_info_items = [
+        v for _, v in meta.items() if "FileMetaData" in v
+    ]
+    print("found file metadata objects:", file_info_items)
+    return bool(file_info_items)
+
+
+def main():
+    """Run a job writing a file with FileMetaData"""
+    Configurable.Configurable.configurableRun3Behavior = True
+    msg = Logging.logging.getLogger("TestFileMetaData")
+
+    config_flags = AllConfigFlags.ConfigFlags
+    config_flags.Input.Files = TestDefaults.defaultTestFiles.AOD
+    config_flags.Output.AODFileName = "test.pool.root"
+    config_flags.Output.doWriteAOD = True
+    config_flags.Concurrency.NumThreads = 4
+    config_flags.Concurrency.NumConcurrentEvents = 4
+    config_flags.lock()
+
+    write = MainServicesConfig.MainServicesCfg(config_flags)
+    write.merge(writeFileMetaData(config_flags))
+    write.run(100)
+
+    try:
+        if testMetaData(config_flags.Output.AODFileName):
+            msg.info("File contains xAOD::FileMetaData")
+            return 0
+        msg.error("File does not contain xAOD::FileMetaData")
+    except ReferenceError:
+        msg.error("Failed to produce output file")
+    except KeyError:
+        msg.error("Failed to get metadata item list from file")
+    return 1
+
+
+if __name__ == "__main__":
+    main()
diff --git a/Event/xAOD/xAODMetaDataCnv/share/FileMetaDataCreatorTool_jobOptions.py b/Event/xAOD/xAODMetaDataCnv/share/FileMetaDataCreatorTool_jobOptions.py
index 21d0dcfefac729a314e212520430aeb79ad47b42..402c6f76a82ee51217aaf8d53844bf38fc5098bb 100644
--- a/Event/xAOD/xAODMetaDataCnv/share/FileMetaDataCreatorTool_jobOptions.py
+++ b/Event/xAOD/xAODMetaDataCnv/share/FileMetaDataCreatorTool_jobOptions.py
@@ -1,50 +1,72 @@
 # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-#
-# Test jobO for exercising xAODMaker::FileMetaDataCreatorTool to create
-# the file-level metadata.
-#
+"""Test writing xAOD::EventFormat in job reading from input
+
+Read in POOL files created by the write test job options. Uses the metadata
+tool to transcribe the EventFormat metadata to the MetaDataStore. Then an
+algorithm reads the EventFormat from the MetaDataStore and prints the content.
+
+    Typical usage example
+        1. run: athena xAODEventFormatCnv/EventFormatWriteTestJobOptions.py
+        2. run: athena xAODEventFormatCnv/EventFormatReadWriteTestJobOptions.py
+"""
 from AthenaCommon import CfgMgr
+from AthenaCommon.AlgSequence import AlgSequence
 from AthenaCommon.AppMgr import ServiceMgr as svcMgr
+from AthenaCommon.AppMgr import theApp
 from AthenaCommon.AppMgr import ToolSvc
-from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
-from RecExConfig.RecFlags import rec
+import AthenaPoolCnvSvc.ReadAthenaPool
+from xAODEventInfoCnv.xAODEventInfoCreator import xAODMaker__EventInfoCnvAlg
 from OutputStreamAthenaPool.MultipleStreamManager import MSMgr
 
-# Set up the reading of an input file:
-INFILE = (
-    "/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/CommonInputs/"
-    "data16_13TeV.00311321.physics_Main.recon.AOD.r9264/"
-    "AOD.11038520._000001.pool.root.1"
-)
+VERBOSE = 1
+DEBUG = 2
+INFO = 3
+WARNING = 4
 
-athenaCommonFlags.FilesInput = [INFILE]
-athenaCommonFlags.EvtMax = 10
 
-# Turn off most of what RecExCommon does:
+def makeStream(name='TestStream'):
+    """create and return an output stream"""
+    test_stream = MSMgr.NewStream(
+        StreamName=name,
+        FileName="{}.pool.root".format(name),
+    )
+    return test_stream
 
-rec.doCBNT.set_Value_and_Lock(False)
-rec.doWriteAOD.set_Value_and_Lock(False)
-rec.doWriteTAG.set_Value_and_Lock(False)
-rec.AutoConfiguration = ["everything"]
 
-# Read the file using RecExCommon:
-include("RecExCommon/RecExCommon_topOptions.py")
+# grab input files
+svcMgr.EventSelector.InputCollections = ["/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/CommonInputs/data16_13TeV.00311321.physics_Main.recon.AOD.r9264/AOD.11038520._000001.pool.root.1"]
 
-# Set up the metadata tool:
-ToolSvc += CfgMgr.xAODMaker__FileMetaDataCreatorTool(
-    "FileMetaDataCreatorTool", OutputLevel=2
+# propogate xAOD::EventFormat from InputMetaDataStore to MetaDataStore
+ToolSvc += CfgMgr.xAODMaker__FileMetaDataTool(
+    "FileMetaDataTool",
+    InputKey="FileMetaData",
+    OutputKey="FileMetaData",
+    OutputLevel=DEBUG,
 )
-svcMgr.MetaDataSvc.MetaDataTools += [ToolSvc.FileMetaDataCreatorTool]
+svcMgr.MetaDataSvc.MetaDataTools += [ToolSvc.FileMetaDataTool]
 
-# Set up the writing of an output file:
 
-stream = MSMgr.NewPoolRootStream("StreamDAOD", "DxAOD.pool.root")
-stream.AddItem(
-    ["xAOD::EventInfo#EventInfo", "xAOD::EventAuxInfo#EventInfoAux."]
-)
-stream.AddMetaDataItem(
-    [
-        "xAOD::FileMetaData#FileMetaData",
-        "xAOD::FileMetaDataAuxInfo#FileMetaDataAux.",
-    ]
-)
+# Add the xAOD::EventFormat printer algorithm to main algorithm sequence
+algSeq = AlgSequence()
+
+# Make sure event info is available
+algSeq += xAODMaker__EventInfoCnvAlg()
+
+# Write some output streams to file with this container.
+streams = [
+    makeStream('xAOD')
+]
+
+svcMgr.MessageSvc.OutputLevel = INFO
+# svcMgr.MetaDataSvc.OutputLevel = INFO
+
+
+# Set the event printout interval.
+if not hasattr(svcMgr, theApp.EventLoop):
+    svcMgr += getattr(CfgMgr, theApp.EventLoop)()
+evtLoop = getattr(svcMgr, theApp.EventLoop)
+evtLoop.EventPrintoutInterval = 1000
+
+# read the last event of the first file and the first event of the second file
+# to test metadata access across file boundary
+theApp.EvtMax = 100
diff --git a/Event/xAOD/xAODMetaDataCnv/src/FileMetaDataCreatorTool.cxx b/Event/xAOD/xAODMetaDataCnv/src/FileMetaDataCreatorTool.cxx
index f7cecdc30cc0fdb257d479b6c990cf0da681ccff..ad383449e65bef1c2abd157fe4fdd132d479befe 100644
--- a/Event/xAOD/xAODMetaDataCnv/src/FileMetaDataCreatorTool.cxx
+++ b/Event/xAOD/xAODMetaDataCnv/src/FileMetaDataCreatorTool.cxx
@@ -2,274 +2,362 @@
   Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
+// Local include(s):
+#include "FileMetaDataCreatorTool.h"
+
 // System include(s):
 #include <algorithm>
+#include <functional>
+#include <memory>
+#include <stdexcept>
 #include <sstream>
+#include <utility>
 
 // Athena metadata EDM:
 #include "AthenaPoolUtilities/CondAttrListCollection.h"
 #include "AthenaPoolUtilities/AthenaAttributeList.h"
-#include "EventInfo/EventStreamInfo.h"
+#include "StoreGate/ReadHandle.h"
+#include "StoreGate/WriteHandle.h"
+#include "xAODMetaData/FileMetaData.h"
+#include "xAODMetaData/FileMetaDataAuxInfo.h"
 
-// Local include(s):
-#include "FileMetaDataCreatorTool.h"
 
-namespace xAODMaker {
+inline StatusCode toStatusCode(bool b) {
+  if (b)
+    return StatusCode::SUCCESS;
+  else
+    return StatusCode::FAILURE;
+}
 
-/// The folder in DetectorStore that we get the information from
-static const char FOLDER_NAME[] = "/TagInfo";
-static const char SIMFOLDER_NAME[] = "/Simulation/Parameters";
-
-/// Helper macro to execute the setter functions correctly
-#define CHECK_BOOL(EXP)                                      \
-do {                                                         \
-  try {                                                      \
-    if (!EXP) {                                              \
-      ATH_MSG_ERROR("Failed to execute: " << #EXP);          \
-      return StatusCode::FAILURE;                            \
-    }                                                        \
-  } catch(const std::exception& ex) {                        \
-    ATH_MSG_INFO("Couldn't extract attribute with command: " \
-                 << #EXP);                                   \
-  }                                                          \
-} while (0)
-
-FileMetaDataCreatorTool::FileMetaDataCreatorTool(const std::string& name)
-  : asg::AsgMetadataTool(name)
-  , m_md()
-  , m_mdAux() {
-    declareProperty("OutputKey", m_outputKey = "FileMetaData");
-  }
+namespace xAODMaker {
 
 StatusCode
-    FileMetaDataCreatorTool::start() {
-      const DataHandle< CondAttrListCollection > dummy;
-      StatusCode sc = detStore()->regFcn(
-          &xAODMaker::FileMetaDataCreatorTool::update,
-          this,
-          dummy,
-          FOLDER_NAME,
-          true);
-
-      if (sc.isFailure()) {
-        ATH_MSG_WARNING("Unable to register callback for " << FOLDER_NAME);
-        // return StatusCode::FAILURE;
+    FileMetaDataCreatorTool::initialize() {
+      // Greet the user:
+      ATH_MSG_DEBUG("Initialising - Package version: " << PACKAGE_VERSION);
+
+      ATH_CHECK(m_eventStore.retrieve());
+      ATH_CHECK(m_metaDataSvc.retrieve());
+
+      ATH_CHECK(m_tagInfoKey.initialize());
+      ATH_CHECK(m_simInfoKey.initialize());
+
+      // If DataHeader key not specified, try determining it
+      if (m_dataHeaderKey.empty()) {
+        auto parentAlg = dynamic_cast< const INamedInterface* >(parent());
+        if (parentAlg)
+          m_dataHeaderKey = parentAlg->name();
       }
 
-      if (detStore()->contains< AthenaAttributeList >(SIMFOLDER_NAME)) {
-        const DataHandle< AthenaAttributeList > dummy2;
-        sc = detStore()->regFcn(
-            &xAODMaker::FileMetaDataCreatorTool::update,
-            this,
-            dummy2,
-            SIMFOLDER_NAME,
-            true);
-
-        if (sc.isFailure()) {
-          ATH_MSG_WARNING("Unable to register callback for " << SIMFOLDER_NAME);
-          // return StatusCode::FAILURE;
+      // Listen for the begin of an input file. Act after MetaDataSvc, which
+      // has priority 80. That means the FileMetaDataTool be called first
+      ServiceHandle< IIncidentSvc > incidentSvc("IncidentSvc", name());
+      ATH_CHECK(incidentSvc.retrieve());
+      incidentSvc->addListener(this, "BeginInputFile", 70);
+
+      // Create a fresh object to fill
+      ATH_MSG_DEBUG("Creating new xAOD::FileMetaData object to output");
+      m_info = std::make_unique< xAOD::FileMetaData >();
+      m_aux  = std::make_unique< xAOD::FileMetaDataAuxInfo >();
+      m_info->setStore(m_aux.get());
+
+      // FileMetaData has no content
+      m_filledNonEvent = false;
+      m_filledEvent = false;
+
+
+      // Return gracefully:
+      return StatusCode::SUCCESS;
+    }
+
+void
+    FileMetaDataCreatorTool::handle(const Incident& inc) {
+      // gracefully ignore unexpected incident types
+      if (inc.type() == "BeginInputFile") {
+        if (!updateFromNonEvent().isSuccess()) {
+          ATH_MSG_INFO("Failed to fill FileMetaData with non-event info");
         }
-      } else {
-        ATH_MSG_INFO("Processing file with no " << SIMFOLDER_NAME);
       }
-      return StatusCode::SUCCESS;
     }
 
 StatusCode
-    FileMetaDataCreatorTool::initialize() {
-      // Greet the user:
-      ATH_MSG_DEBUG("Initialising - Package version: " << PACKAGE_VERSION);
-      ATH_MSG_DEBUG("  OutputKey = " << m_outputKey);
-
-      // Return gracefully:
+    FileMetaDataCreatorTool::finalize() {
+      // Nothing to do here
       return StatusCode::SUCCESS;
     }
 
 StatusCode
-    FileMetaDataCreatorTool::beginInputFile() {
-      // Create the output objects if they don't exist yet:
-      if ((!m_md.get()) && (!m_mdAux.get())) {
-        ATH_MSG_DEBUG("Creating output objects");
-        m_md.reset(new xAOD::FileMetaData());
-        m_mdAux.reset(new xAOD::FileMetaDataAuxInfo());
-        m_md->setStore(m_mdAux.get());
-      }
-
-      ATH_MSG_DEBUG("DetectorStore contents:\n\n" << detStore()->dump());
+    FileMetaDataCreatorTool::postInitialize() {
+      return StatusCode::SUCCESS;
+    }
 
-      // Return gracefully:
+StatusCode
+    FileMetaDataCreatorTool::preExecute() {
       return StatusCode::SUCCESS;
     }
 
 StatusCode
-    FileMetaDataCreatorTool::endInputFile() {
-      // Return gracefully:
+    FileMetaDataCreatorTool::preStream() {
       return StatusCode::SUCCESS;
     }
 
 StatusCode
-    FileMetaDataCreatorTool::metaDataStop() {
-      // Don't be offended if the metadata already exists in the output:
-      if (outputMetaStore()->contains< xAOD::FileMetaData >(m_outputKey)) {
-        ATH_MSG_DEBUG("xAOD::FileMetaData already in the output");
-        return StatusCode::SUCCESS;
+    FileMetaDataCreatorTool::postExecute() {
+      // Lock the tool while working with FileMetaData
+      std::lock_guard lock(m_toolMutex);
+
+      // Return if object has already been filled
+      if (m_filledEvent) return StatusCode::SUCCESS;
+
+      // Sanity check
+      if (!(m_info && m_aux)) {
+        ATH_MSG_ERROR("No xAOD::FileMetaData object to fill");
+        return StatusCode::FAILURE;
       }
 
-      SG::ConstIterator<EventStreamInfo> esi_h1;
-      SG::ConstIterator<EventStreamInfo> esi_h2;
-      StatusCode sc = outputMetaStore()->retrieve(esi_h1, esi_h2);
-      if (sc.isSuccess() && esi_h1 != esi_h2) {
-        if (esi_h1->getProcessingTags().size() == 1) {
-          CHECK_BOOL(m_md->setValue(
-                  xAOD::FileMetaData::dataType,
-                  *(esi_h1->getProcessingTags().begin())));
-        } else {
-          ATH_MSG_WARNING("Found multiple output process tags");
-          CHECK_BOOL(m_md->setValue(xAOD::FileMetaData::dataType, "multiple"));
+      // Read xAOD event info
+      auto eventInfo =
+          m_eventStore->tryConstRetrieve< xAOD::EventInfo >(m_eventInfoKey);
+      if (eventInfo) {
+        try {
+          ATH_MSG_DEBUG("Valid " << m_eventInfoKey << " handle");
+
+          xAOD::FileMetaData::MetaDataType type = xAOD::FileMetaData::mcProcID;
+          const float id = static_cast< float >(eventInfo->mcChannelNumber());
+
+          if (m_info->setValue(type, id))
+            ATH_MSG_DEBUG("setting " << type << " to "<< id);
+          else
+            ATH_MSG_WARNING("error setting " << type << " to "<< id);
+        } catch (std::exception&) {
+          // Probably processing data
+          ATH_MSG_INFO("No xAOD::FileMetaData:mcProcID update from "
+                       << m_eventInfoKey);
         }
-        if (esi_h1->getEventTypes().size() == 1) {
-          float id = esi_h1->getEventTypes().begin()->mc_channel_number();
-          CHECK_BOOL(m_md->setValue(xAOD::FileMetaData::mcProcID, id));
-        } else {
-          ATH_MSG_WARNING("Found multiple eventtypes");
-          CHECK_BOOL(m_md->setValue(xAOD::FileMetaData::mcProcID, 0.0f));
+      } else {
+        ATH_MSG_WARNING("No " << m_eventInfoKey);
+      }
+
+      // Read data header
+      auto dataHeader =
+          m_eventStore->tryConstRetrieve< DataHeader >(m_dataHeaderKey);
+      if (dataHeader) {
+        try {
+          ATH_MSG_DEBUG("valid " << m_dataHeaderKey << " handle");
+
+          xAOD::FileMetaData::MetaDataType type = xAOD::FileMetaData::dataType;
+          const std::string tag = dataHeader->getProcessTag();
+
+          if (m_info->setValue(type, tag))
+            ATH_MSG_DEBUG("set " << type << " to "<< tag);
+          else
+            ATH_MSG_WARNING("error setting " << type << " to "<< tag);
+        } catch (std::exception&) {
+          ATH_MSG_INFO("No xAOD::FileMetaData::dataType update from "
+                       << m_dataHeaderKey);
         }
+      } else {
+        ATH_MSG_WARNING("No " << m_dataHeaderKey);
+      }
+
+      m_filledEvent = true;
+
+      return StatusCode::SUCCESS;
+    }
+
+StatusCode
+    FileMetaDataCreatorTool::preFinalize() {
+      std::lock_guard lock(m_toolMutex);
+
+      // Create and object if there isn't one already present
+      if (!m_metaDataSvc->contains< xAOD::FileMetaData >(m_key)) {
+        ATH_MSG_DEBUG("Creating new xAOD::FileMetaData object to output");
+        auto info = std::make_unique< xAOD::FileMetaData >();
+        auto aux  = std::make_unique< xAOD::FileMetaDataAuxInfo >();
+        info->setStore(aux.get());
+        ATH_CHECK(m_metaDataSvc->record(std::move(info), m_key));
+        ATH_CHECK(m_metaDataSvc->record(std::move(aux), m_key + "Aux."));
       }
 
-      // Record the metadata, if any was found on the input:
-      if (m_md.get() && m_mdAux.get()) {
-        ATH_MSG_DEBUG("Recoding file level metadata");
-        ATH_CHECK(outputMetaStore()->record(m_md.release(), m_outputKey));
-        ATH_CHECK(outputMetaStore()->record(m_mdAux.release(),
-                                            m_outputKey + "Aux."));
+      // Replace content in store with content created for this stream
+      auto output = m_metaDataSvc->tryRetrieve< xAOD::FileMetaData >(m_key);
+      if (!output) {
+        ATH_MSG_DEBUG("Cannot fill xAOD::FileMetaData output object");
+        return StatusCode::FAILURE;
       }
+      *output = *m_info;
 
-      // Return gracefully:
       return StatusCode::SUCCESS;
     }
 
 StatusCode
-    FileMetaDataCreatorTool::update(IOVSVC_CALLBACK_ARGS_K(keys)) {
-      ATH_MSG_DEBUG("update() invoked for key(s): " << keys);
+    FileMetaDataCreatorTool::updateFromNonEvent() {
+      // Lock the tool while we work on the FileMetaData
+      std::lock_guard lock(m_toolMutex);
 
+      // Have we already done this?
+      if (m_filledNonEvent) return StatusCode::SUCCESS;
+
+      // Sanity check
+      if (!(m_info && m_aux)) {
+        ATH_MSG_ERROR("No xAOD::FileMetaData object to fill");
+        return StatusCode::FAILURE;
+      }
+
+      // Read TagInfo
+      auto tagInfo = SG::makeHandle(m_tagInfoKey);
+      if (!tagInfo.isValid()) {
+        ATH_MSG_INFO("Invalid \"" << m_tagInfoKey.key() << "\" handle");
+        return StatusCode::SUCCESS;
+      }
+
+      for (const auto* payload : *tagInfo->payloadContainer()) {
+        for (const auto& itr : *payload) {
+          const coral::AttributeList& attributeList = itr.second;
+          ATH_CHECK(
+              setString(
+                  attributeList,
+                  "AtlasRelease",
+                  xAOD::FileMetaData::productionRelease));
+
+          ATH_CHECK(
+              setString(
+                  attributeList,
+                  "AMITag",
+                  xAOD::FileMetaData::amiTag));
+
+          ATH_CHECK(
+              setString(
+                  attributeList,
+                  "GeoAtlas",
+                  xAOD::FileMetaData::geometryVersion));
+
+          ATH_CHECK(
+              setString(
+                  attributeList,
+                  "IOVDbGlobalTag",
+                  xAOD::FileMetaData::conditionsTag));
+
+          ATH_CHECK(
+              setFloat(
+                  attributeList,
+                  "beam_energy",
+                  xAOD::FileMetaData::beamEnergy));
+
+          ATH_CHECK(
+              setString(
+                  attributeList,
+                  "beam_type",
+                  xAOD::FileMetaData::beamType));
+
+          // only investigate the first IOV
+          break;
+        }
+        // only investigate the first payload in the container
+        break;
+      }
+
+      // Read simulation parameters
+      SG::ReadHandle< IOVMetaDataContainer > simInfo(m_simInfoKey);
       // Bail if the requested folder is not amongst the keys:
-      if (std::find(keys.begin(), keys.end(), FOLDER_NAME) == keys.end()
-          && std::find(keys.begin(), keys.end(), SIMFOLDER_NAME) == keys.end()) {
-        ATH_MSG_WARNING("Update callback received without the \""
-                        << FOLDER_NAME << "\" key");
+      if (!simInfo.isValid()) {
+        ATH_MSG_INFO("Invalid \"" << m_simInfoKey.key() << "\" handle");
         return StatusCode::SUCCESS;
       }
 
-      // Retrieve the needed metadata:
-      if (std::find(keys.begin(), keys.end(), FOLDER_NAME) != keys.end()) {
-        const CondAttrListCollection* tagInfo = 0;
-        ATH_CHECK(detStore()->retrieve(tagInfo, FOLDER_NAME));
+      for (const CondAttrListCollection* payload : *simInfo->payloadContainer()) {
+        for (const auto& itr : *payload) {
+          const coral::AttributeList& attributeList = itr.second;
 
-        ATH_MSG_DEBUG("Retrieved object: " << FOLDER_NAME);
+          ATH_CHECK(
+              setString(
+                  attributeList,
+                  "SimulationFlavour",
+                  xAOD::FileMetaData::simFlavour));
 
-        // Make sure that it has some the expected size:
-        if (tagInfo->size() > 1) {
-          ATH_MSG_WARNING("Collection with >1 size received for \""
-                          << FOLDER_NAME << "\"");
-        }
-        if (!tagInfo->size()) {
-          ATH_MSG_ERROR("Empty collection received for \""
-                        << FOLDER_NAME << "\"");
-          return StatusCode::FAILURE;
-        }
-        // Access the first, and only channel of the object:
-        const CondAttrListCollection::AttributeList& al =
-            tagInfo->attributeList(0);
-
-        if (al.exists("AtlasRelease")) {
-          CHECK_BOOL(m_md->setValue(
-                  xAOD::FileMetaData::productionRelease,
-                  al[ "AtlasRelease" ].data< std::string >()));
-        } else {
-          ATH_MSG_WARNING("Did not find AtlasRelease in TagInfo setting to none");
-          CHECK_BOOL(m_md->setValue(xAOD::FileMetaData::productionRelease, "none"));
-        }
+          ATH_CHECK(
+              setBool(
+                  attributeList,
+                  "IsEventOverlayInputSim",
+                  xAOD::FileMetaData::isDataOverlay));
 
-        if (al.exists("AMITag")) {
-          CHECK_BOOL(m_md->setValue(
-                  xAOD::FileMetaData::amiTag,
-                  al["AMITag"].data< std::string >()));
-        } else {
-          ATH_MSG_WARNING("Did not find AMITag in TagInfo setting to none");
-          CHECK_BOOL(m_md->setValue(xAOD::FileMetaData::amiTag, "none"));
+          // only investigate first IOV
+          break;
         }
+        // only investigate first payload
+        break;
+      }
 
-        if (al.exists("GeoAtlas")) {
-          CHECK_BOOL(m_md->setValue(xAOD::FileMetaData::geometryVersion,
-                                    al["GeoAtlas"].data< std::string >()));
-        } else {
-          ATH_MSG_WARNING("Did not find GeoAtlas in TagInfo setting to none");
-          CHECK_BOOL(m_md->setValue(xAOD::FileMetaData::geometryVersion, "none"));
-        }
+      // FileMetaData object has been filled with non event info
+      m_filledNonEvent = true;
+
+      return StatusCode::SUCCESS;
+    }
 
-        if (al.exists("IOVDbGlobalTag")) {
-          CHECK_BOOL(m_md->setValue(
-                  xAOD::FileMetaData::conditionsTag,
-                  al["IOVDbGlobalTag"].data< std::string >()));
-        } else {
-          ATH_MSG_WARNING("Did not find IOVDbGlobalTag in TagInfo setting to none");
-          CHECK_BOOL(m_md->setValue(xAOD::FileMetaData::conditionsTag, "none"));
+StatusCode
+    FileMetaDataCreatorTool::setString(
+        const coral::AttributeList& attrList,
+        const std::string& tag,
+        const xAOD::FileMetaData::MetaDataType type) {
+      try {
+        std::string attr = "none";
+        if (attrList.exists(tag)) {
+          attr = attrList[tag].data< std::string >();
+          // remap simulation flavor "default" to "FullSim"
+          if (type == xAOD::FileMetaData::simFlavour && attr == "default")
+            attr = "FullSim";
         }
+        ATH_MSG_DEBUG("Setting " << type << " to \"" << attr << "\"");
+        return toStatusCode(m_info->setValue(type, attr));
+      } catch (std::exception&) {
+        ATH_MSG_ERROR("unexpected error building FileMetaData");
+        return StatusCode::FAILURE;
+      }
+    }
 
-        // Convert the beam energy to float. (If it exists.)
-        if (al.exists("beam_energy")) {
+StatusCode
+    FileMetaDataCreatorTool::setFloat(
+        const coral::AttributeList& attrList,
+        const std::string& tag,
+        const xAOD::FileMetaData::MetaDataType type) {
+      try {
+        float number = -1.0f;
+        if (attrList.exists(tag)) {
           try {
-            const std::string beamEnergyStr = al["beam_energy"].data< std::string >();
-            char* endptr = 0;
-            float beamEnergy = strtof(beamEnergyStr.c_str(), &endptr);
-            if (endptr) {
-              if (!m_md->setValue(xAOD::FileMetaData::beamEnergy, beamEnergy)) {
-                ATH_MSG_ERROR("Failed to set the beam energy");
-                return StatusCode::FAILURE;
-              }
-            }
-          } catch(const std::exception& ex) {
-            ATH_MSG_INFO("Couldn't set the beam energy");
+            const std::string attr = attrList[tag].data< std::string >();
+            number = std::stof(attr);
+          } catch (std::invalid_argument& e) {
+            ATH_MSG_INFO("beam energy tag could not be converted to float");
+          } catch (std::out_of_range& e) {
+            ATH_MSG_INFO("converted beam energy value outside float range");
           }
-        } else {
-          ATH_MSG_WARNING("Did not find beam_energy in TagInfo setting to -1");
-          CHECK_BOOL(m_md->setValue(xAOD::FileMetaData::beamEnergy, -1.0f));
-        }
-
-        if (al.exists("beam_type")) {
-          CHECK_BOOL(m_md->setValue(
-                  xAOD::FileMetaData::beamType,
-                  al["beam_type"].data< std::string >()));
-        } else {
-          ATH_MSG_WARNING("Did not find beam_type in TagInfo setting to none");
-          CHECK_BOOL(m_md->setValue(xAOD::FileMetaData::beamType, "none"));
         }
+        ATH_MSG_DEBUG("Setting " << type << " to \"" << number << "\"");
+        return toStatusCode(m_info->setValue(type, number));
+      } catch (std::exception&) {
+        ATH_MSG_ERROR("unexpected error building FileMetaData");
+        return StatusCode::FAILURE;
       }
+    }
 
-      if (std::find(keys.begin(), keys.end(), SIMFOLDER_NAME) != keys.end()) {
-        const AthenaAttributeList* simInfo = 0;
-        ATH_CHECK(detStore()->retrieve(simInfo, SIMFOLDER_NAME));
-
-        if (simInfo->exists("SimulationFlavour")) {
-          std::string flavor = (*simInfo)["SimulationFlavour"].data< std::string >();
-          if (flavor == "default") flavor = "FullSim";
-          CHECK_BOOL(m_md->setValue(xAOD::FileMetaData::simFlavour, flavor));
-        } else {
-          ATH_MSG_ERROR("Unable to retrieve SimulationFlavour from " << SIMFOLDER_NAME);
+StatusCode
+    FileMetaDataCreatorTool::setBool(
+        const coral::AttributeList& attrList,
+        const std::string& tag,
+        const xAOD::FileMetaData::MetaDataType type) {
+      try {
+        bool yesNo = false;
+        if (attrList.exists(tag)) {
+          yesNo = attrList[tag].data< std::string >() == "True";
         }
 
-        if (simInfo->exists("IsEventOverlayInputSim")) {
-          bool isDataOverlay = (*simInfo)["IsEventOverlayInputSim"].data< std::string >() == "True";
-          CHECK_BOOL(m_md->setValue(xAOD::FileMetaData::isDataOverlay, isDataOverlay));
-        } else {
-          ATH_MSG_INFO("Unable to retrieve IsEventOverlayInputSim from "
-                       << SIMFOLDER_NAME << " - assuming not data overlay");
-          CHECK_BOOL(m_md->setValue(xAOD::FileMetaData::isDataOverlay, false));
-        }
+        ATH_MSG_DEBUG("Setting " << type << " to " << std::boolalpha << yesNo
+                      << std::noboolalpha);
+        return toStatusCode(m_info->setValue(type, yesNo));
+      } catch (std::exception&) {
+        ATH_MSG_ERROR("unexpected error building FileMetaData");
+        return StatusCode::FAILURE;
       }
-
-      // Return gracefully:
-      return StatusCode::SUCCESS;
     }
 
 }  // namespace xAODMaker
diff --git a/Event/xAOD/xAODMetaDataCnv/src/FileMetaDataCreatorTool.h b/Event/xAOD/xAODMetaDataCnv/src/FileMetaDataCreatorTool.h
index 144431f39f522a614fbafe3e9f004f3327d1c2bc..dfcd28f0658c336e9a945f7d08482be2dd707bd1 100644
--- a/Event/xAOD/xAODMetaDataCnv/src/FileMetaDataCreatorTool.h
+++ b/Event/xAOD/xAODMetaDataCnv/src/FileMetaDataCreatorTool.h
@@ -10,82 +10,157 @@
 #include <memory>
 
 // Infrastructure include(s):
+#include "Gaudi/Property.h"
+#include "GaudiKernel/extends.h"
+#include "GaudiKernel/IIncidentListener.h"
 #include "GaudiKernel/ServiceHandle.h"
-#include "AthenaKernel/IIOVSvc.h"
-#include "AsgTools/AsgMetadataTool.h"
-#include "AthenaKernel/IMetaDataTool.h"
+#include "AthenaBaseComps/AthAlgTool.h"
+#include "AthenaKernel/IAthenaOutputTool.h"
+#include "AthenaKernel/IMetaDataSvc.h"
+#include "IOVDbDataModel/IOVMetaDataContainer.h"
+#include "CoralBase/AttributeList.h"
+#include "StoreGate/ReadHandleKey.h"
 
 // EDM include(s):
+#include "PersistentDataModel/DataHeader.h"
+#include "xAODEventInfo/EventInfo.h"
 #include "xAODMetaData/FileMetaData.h"
 #include "xAODMetaData/FileMetaDataAuxInfo.h"
 
 namespace xAODMaker {
 
-/// Tool creating or propagating xAOD::FileMetaData information
+/// Tool creating xAOD::FileMetaData information
 ///
-/// This Athena-only tool can be used to create xAOD::FileMetaData
-/// information out of the non-ROOT-readable metadata available in
-/// the input.
-///
-/// Or, if the input file already has xAOD::FileMetaData payload,
-/// it is taken as is, and copied to the output.
-///
-/// The class uses asg::AsgMetadataTool as a base class for convenience,
-/// but it's not a dual-use tool. (Hence the header is hidden from the
-/// outside world.)
+/// This Athena-only tool can be used to create xAOD::FileMetaData information
+/// out of the non-ROOT-readable metadata available in the input. The
+/// FileMetaDataTool may have copied a xAOD::FileMetaData object to the
+/// MetaDataStore. If such a xAOD::FileMetaData object is found it is updated
+/// to reflect the current data type and MC channel number.
 ///
 /// @author Attila Krasznahorkay <Attila.Krasznahorkay@cern.ch>
-///
-/// $Revision: 676522 $
-/// $Date: 2015-06-19 00:17:03 +0200 (Fri, 19 Jun 2015) $
+/// @author Frank Berghaus <fberghaus@anl.gov>
 ///
 class FileMetaDataCreatorTool
-    : public asg::AsgMetadataTool
-    , public virtual ::IMetaDataTool {
-  /// Declare the correct constructor for Athena
-  ASG_TOOL_CLASS(FileMetaDataCreatorTool, IMetaDataTool)
-
+    : public extends< AthAlgTool, IAthenaOutputTool, IIncidentListener > {
  public:
-      /// Regular AsgTool constructor
-      FileMetaDataCreatorTool(const std::string& name =
-                              "FileMetaDataCreatorTool");
+  using extends::extends;
 
-      /// Function initialising the tool
-      virtual StatusCode initialize();
-      virtual StatusCode start();
+  /// @name AlgTool Methods
+  /// @{
+  /// Called by AthenaOutputStream::initialize() (via ToolSvc retrieve()).
+  StatusCode initialize() override;
 
- protected:
-      /// @name Functions called by the AsgMetadataTool base class
-      /// @{
+  /// Called at the end of AthenaOutputStream::finalize() (via release()).
+  StatusCode finalize() override;
+  /// @}
 
-      /// Function collecting the metadata from a new input file
-      virtual StatusCode beginInputFile();
+  /// @name Methods inherited by IAthenaOutputTool
+  /// @{
+  /// Called at the end of AthenaOutputStream::initialize().
+  StatusCode postInitialize() override;
 
-      /// Function collecting the metadata from a new input file
-      virtual StatusCode endInputFile();
+  /// Called at the beginning of AthenaOutputStream::execute().
+  StatusCode preExecute() override;
 
-      /// Function writing the collected metadata to the output
-      virtual StatusCode metaDataStop();
+  /// Called before actually streaming objects.
+  StatusCode preStream() override;
 
-      /// Function collecting the metadata from a new input file
-      virtual StatusCode beginInputFile(const SG::SourceID&) {return beginInputFile();}
+  /// Called at the end of AthenaOutputStream::execute().
+  StatusCode postExecute() override;
 
-      /// Function collecting the metadata from a new input file
-      virtual StatusCode endInputFile(const SG::SourceID&) {return endInputFile();}
+  /// Called at the beginning of AthenaOutputStream::finalize().
+  StatusCode preFinalize() override;
+  /// @}
 
-      /// @}
+  /// @name IIncidentListener methods
+  //@{
+  /// Handle BeginInputFile incident after MetaDataSvc
+  void handle(const Incident&) override;
+  //@}
 
  private:
-      /// Function called by the DetectorStore when the metadata is updated
-      StatusCode update(IOVSVC_CALLBACK_ARGS_P(I, keys));
-
-      /// Key of the metadata object for the output file
-      std::string m_outputKey;
-
-      /// The output interface object
-      std::unique_ptr< xAOD::FileMetaData > m_md;
-      /// The output auxiliary object
-      std::unique_ptr< xAOD::FileMetaDataAuxInfo > m_mdAux;
+  /// output key for produced xAOD::FileMetaData in MetaDataStore
+  Gaudi::Property< std::string > m_key{
+      this,
+      "OutputKey",
+      "FileMetaData",
+      "Key to use for FileMetaData in MetaDataStore"
+  };
+
+  /// Read tag information
+  SG::ReadHandleKey< IOVMetaDataContainer > m_tagInfoKey {
+      this,
+      "TagInfoKey",
+      "InputMetaDataStore+/TagInfo",
+      "Store and Key to use to look up tags"
+  };
+
+  /// Read simulation parameters
+  SG::ReadHandleKey< IOVMetaDataContainer > m_simInfoKey {
+      this,
+      "SimInfoKey",
+      "InputMetaDataStore+/Simulation/Parameters",
+      "Store and Key to use to look up simulation parameters"
+  };
+
+  /// DataHeader is produced by another OutputTool, so need StoreGateSvc
+  ServiceHandle< StoreGateSvc > m_eventStore{"StoreGateSvc", name()};
+
+
+  /// Key for xAOD::EventInfo to update MC channel number
+  Gaudi::Property< std::string > m_eventInfoKey {
+      this,
+      "EventInfoKey",
+      "EventInfo",
+      "StoreGate key to read xAOD::EventInfo"
+  };
+
+  /// Key for DataHeader in StoreGateSvc
+  Gaudi::Property< std::string > m_dataHeaderKey {
+      this,
+      "StreamName",
+      "",
+      "key of data header in event store"
+  };
+
+  /// Use MetaDataSvc store interface to support output in EventService
+  ServiceHandle< IMetaDataSvc > m_metaDataSvc{"MetaDataSvc", name()};
+
+  /// Update from Simulation Parameters and TagInfo
+  StatusCode updateFromNonEvent();
+
+  /// helper tool to update file meta data with IOV string content
+  StatusCode setString(
+      const coral::AttributeList& attributeList,
+      const std::string& tag,
+      const xAOD::FileMetaData::MetaDataType type);
+
+  /// helper tool to update file meta data with IOV float content
+  StatusCode setFloat(
+      const coral::AttributeList& attrList,
+      const std::string& tag,
+      const xAOD::FileMetaData::MetaDataType);
+
+  /// helper tool to update file meta data with IOV boolean content
+  StatusCode setBool(
+      const coral::AttributeList& attrList,
+      const std::string& tag,
+      const xAOD::FileMetaData::MetaDataType);
+
+  /// The object created for this output stream
+  std::unique_ptr< xAOD::FileMetaData > m_info;
+
+  /// The auxiliary containing the created object
+  std::unique_ptr< xAOD::FileMetaDataAuxInfo > m_aux;
+
+  /// FileMetaData has been filled with non-event info
+  bool m_filledNonEvent{false};
+
+  /// FileMetaData has been filled with event information
+  bool m_filledEvent{false};
+
+  /// creation of FileMetaData should happen on a single thread
+  std::mutex m_toolMutex;
 };  // class FileMetaDataCreatorTool
 
 }  // namespace xAODMaker
diff --git a/Event/xAOD/xAODMetaDataCnv/src/FileMetaDataMarkUpTool.cxx b/Event/xAOD/xAODMetaDataCnv/src/FileMetaDataMarkUpTool.cxx
deleted file mode 100644
index 17c31e3c70f6362b9186728c0b06c5e2fa57ecca..0000000000000000000000000000000000000000
--- a/Event/xAOD/xAODMetaDataCnv/src/FileMetaDataMarkUpTool.cxx
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
-  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-*/
-
-// System include(s):
-#include <algorithm>
-#include <sstream>
-#include <vector>
-
-// Athena metadata EDM:
-#include "EventInfo/EventStreamInfo.h"
-
-// Local include(s):
-#include "FileMetaDataMarkUpTool.h"
-
-namespace xAODMaker {
-
-/// Helper macro to execute the setter functions correctly
-#define CHECK_BOOL(EXP)                                        \
-  do {                                                         \
-    try {                                                      \
-      if (!EXP) {                                              \
-        ATH_MSG_ERROR("Failed to execute: " << #EXP);          \
-        return StatusCode::FAILURE;                            \
-      }                                                        \
-    } catch(const std::exception& ex) {                        \
-      ATH_MSG_INFO("Couldn't extract attribute with command: " \
-                   << #EXP);                                   \
-    }                                                          \
-  } while (0)
-
-  FileMetaDataMarkUpTool::FileMetaDataMarkUpTool(const std::string& name)
-      : asg::AsgMetadataTool(name) {
-        declareProperty("Key", m_outputKey = "");
-      }
-
-  StatusCode
-      FileMetaDataMarkUpTool::initialize() {
-    return StatusCode::SUCCESS;
-  }
-
-  StatusCode
-      FileMetaDataMarkUpTool::postInitialize() {
-    return StatusCode::SUCCESS;
-  }
-
-  StatusCode
-      FileMetaDataMarkUpTool::preExecute() {
-    return StatusCode::SUCCESS;
-  }
-
-  StatusCode
-      FileMetaDataMarkUpTool::preStream() {
-    return StatusCode::SUCCESS;
-  }
-
-  StatusCode
-      FileMetaDataMarkUpTool::postExecute() {
-    return StatusCode::SUCCESS;
-  }
-
-  StatusCode
-      FileMetaDataMarkUpTool::preFinalize() {
-        ATH_MSG_DEBUG("in preFinalize()");
-
-        std::vector<std::string> fmd_keys;
-        outputMetaStore()->keys<xAOD::FileMetaData>(fmd_keys);
-        if (fmd_keys.size() != 1) {
-          return StatusCode::SUCCESS;
-        }
-
-        if (!outputMetaStore()->transientContains< xAOD::FileMetaData >(
-                *(fmd_keys.begin()))) {
-          return StatusCode::SUCCESS;
-        }
-
-        xAOD::FileMetaData* fmd = nullptr;
-        StatusCode sc = outputMetaStore()->retrieve(fmd, *(fmd_keys.begin()));
-        if (!sc.isSuccess()) {
-          return StatusCode::SUCCESS;
-        }
-
-        const EventStreamInfo* esi = nullptr;
-        sc = outputMetaStore()->retrieve(esi, m_outputKey);
-        if (sc.isSuccess()) {
-          ATH_MSG_DEBUG("Setting xAOD::FileMetaData from output "
-                        << "EventStreamInfo object " << m_outputKey);
-          if (esi->getProcessingTags().size() == 1) {
-            CHECK_BOOL(fmd->setValue(xAOD::FileMetaData::dataType,
-                                     *(esi->getProcessingTags().begin())));
-          } else {
-            CHECK_BOOL(fmd->setValue(xAOD::FileMetaData::dataType, m_outputKey));
-          }
-          if (esi->getEventTypes().size() == 1) {
-            float id = esi->getEventTypes().begin()->mc_channel_number();
-            CHECK_BOOL(fmd->setValue(xAOD::FileMetaData::mcProcID, id));
-          }
-        }
-        return StatusCode::SUCCESS;
-      }
-
-  StatusCode
-      FileMetaDataMarkUpTool::finalize() {
-    return StatusCode::SUCCESS;
-  }
-
-  StatusCode
-      FileMetaDataMarkUpTool::start() {
-    return StatusCode::SUCCESS;
-  }
-
-  StatusCode
-      FileMetaDataMarkUpTool::beginInputFile() {
-    return StatusCode::SUCCESS;
-  }
-
-  StatusCode
-      FileMetaDataMarkUpTool::metaDataStop() {
-    return StatusCode::SUCCESS;
-  }
-
-}  // namespace xAODMaker
diff --git a/Event/xAOD/xAODMetaDataCnv/src/FileMetaDataMarkUpTool.h b/Event/xAOD/xAODMetaDataCnv/src/FileMetaDataMarkUpTool.h
deleted file mode 100644
index 50ce08e07e99ef6ff7e05702888926d8fadc9eba..0000000000000000000000000000000000000000
--- a/Event/xAOD/xAODMetaDataCnv/src/FileMetaDataMarkUpTool.h
+++ /dev/null
@@ -1,88 +0,0 @@
-// Dear emacs, this is -*- c++ -*-
-/*
-  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-*/
-#ifndef XAODMETADATACNV_FILEMETADATAMARKUPTOOL_H
-#define XAODMETADATACNV_FILEMETADATAMARKUPTOOL_H
-
-// System include(s):
-#include <string>
-#include <memory>
-
-// Infrastructure include(s):
-#include "GaudiKernel/ServiceHandle.h"
-#include "AsgTools/AsgMetadataTool.h"
-#include "AthenaKernel/IAthenaOutputTool.h"
-
-// EDM include(s):
-#include "xAODMetaData/FileMetaData.h"
-#include "xAODMetaData/FileMetaDataAuxInfo.h"
-
-namespace xAODMaker {
-
-/// Tool creating or propagating xAOD::FileMetaData information
-///
-/// This Athena-only tool can be used to create xAOD::FileMetaData
-/// information out of the non-ROOT-readable metadata available in
-/// the input.
-///
-/// Or, if the input file already has xAOD::FileMetaData payload,
-/// it is taken as is, and copied to the output.
-///
-/// The class uses asg::AsgMetadataTool as a base class for convenience,
-/// but it's not a dual-use tool. (Hence the header is hidden from the
-/// outside world.)
-///
-/// @author Attila Krasznahorkay <Attila.Krasznahorkay@cern.ch>
-///
-/// $Revision: 676522 $
-/// $Date: 2015-06-19 00:17:03 +0200 (Fri, 19 Jun 2015) $
-///
-class FileMetaDataMarkUpTool
-    : public asg::AsgMetadataTool
-    , public virtual ::IAthenaOutputTool {
-  /// Declare the correct constructor for Athena
-  ASG_TOOL_CLASS(FileMetaDataMarkUpTool, IAthenaOutputTool)
-
- public:
-      /// Regular AsgTool constructor
-      explicit FileMetaDataMarkUpTool(
-          const std::string& name = "FileMetaDataMarkUpTool");
-
-      /// Required of all IAthenaOutputTools:
-      /// Called by AthenaOutputStream::initialize() (via ToolSvc retrieve()).
-      StatusCode initialize() override;
-      /// Called at the end of AthenaOutputStream::initialize().
-      StatusCode postInitialize() override;
-      /// Called at the beginning of AthenaOutputStream::execute().
-      StatusCode preExecute() override;
-      /// Called before actually streaming objects.
-      StatusCode preStream() override;
-      /// Called at the end of AthenaOutputStream::execute().
-      StatusCode postExecute() override;
-      /// Called at the beginning of AthenaOutputStream::finalize().
-      StatusCode preFinalize() override;
-      /// Called at the end of AthenaOutputStream::finalize() (via release()).
-      StatusCode finalize() override;
-      StatusCode start() override;
-
- protected:
-      /// @name Functions called by the AsgMetadataTool base class
-      /// @{
-
-      /// Function collecting the metadata from a new input file
-      StatusCode beginInputFile() override;
-
-      /// Function writing the collected metadata to the output
-      StatusCode metaDataStop() override;
-
-      /// @}
-
- private:
-      /// Key of the metadata object for the output file
-      std::string m_outputKey;
-};  // class FileMetaDataMarkUpTool
-
-}  // namespace xAODMaker
-
-#endif  // XAODMETADATACNV_FILEMETADATAMARKUPTOOL_H
diff --git a/Event/xAOD/xAODMetaDataCnv/src/components/xAODMetaDataCnv_entries.cxx b/Event/xAOD/xAODMetaDataCnv/src/components/xAODMetaDataCnv_entries.cxx
index 7e3a95a0dedfab332111c5e52a306e6fbc9e5d77..1a7c70cad4d056f8e14a145aff422edac4054915 100644
--- a/Event/xAOD/xAODMetaDataCnv/src/components/xAODMetaDataCnv_entries.cxx
+++ b/Event/xAOD/xAODMetaDataCnv/src/components/xAODMetaDataCnv_entries.cxx
@@ -1,8 +1,9 @@
+// Dear emacs, this is -*- c++ -*-
+/*
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
+*/
 #include "xAODMetaDataCnv/FileMetaDataTool.h"
 #include "../FileMetaDataCreatorTool.h"
-#include "../FileMetaDataMarkUpTool.h"
 
 DECLARE_COMPONENT( xAODMaker::FileMetaDataTool )
 DECLARE_COMPONENT( xAODMaker::FileMetaDataCreatorTool )
-DECLARE_COMPONENT( xAODMaker::FileMetaDataMarkUpTool )
-
diff --git a/Event/xAOD/xAODMetaDataCnv/xAODMetaDataCnv/FileMetaDataTool.h b/Event/xAOD/xAODMetaDataCnv/xAODMetaDataCnv/FileMetaDataTool.h
index 3c3c718a93e53bfea98b4908fcf274a70d68f914..c45092a35af3219849e3b8497cf5b0ef330fc097 100644
--- a/Event/xAOD/xAODMetaDataCnv/xAODMetaDataCnv/FileMetaDataTool.h
+++ b/Event/xAOD/xAODMetaDataCnv/xAODMetaDataCnv/FileMetaDataTool.h
@@ -7,89 +7,79 @@
 
 // System include(s):
 #include <string>
-#include <memory>
 
 // Infrastructure include(s):
 #include "AsgTools/AsgMetadataTool.h"
 #ifndef XAOD_STANDALONE
-#   include "AthenaKernel/IMetaDataTool.h"
-#endif  // XAOD_STANDALONE
-
-// EDM include(s):
-#include "xAODMetaData/FileMetaData.h"
-#include "xAODMetaData/FileMetaDataAuxInfo.h"
+# include "GaudiKernel/ServiceHandle.h"
+# include "AthenaKernel/IMetaDataSvc.h"
+# include "AthenaKernel/IMetaDataTool.h"
+#endif
 
 namespace xAODMaker {
 
-/// Tool taking care of propagating xAOD::FileMetaData information
-///
-/// This dual-use tool can be used both in Athena and in AnalysisBase
-/// to propagate the generic file-level metadata from the processed
-/// input files to an output file.
+/// Tool propagating xAOD::FileMetaData from input to output
 ///
-/// It relies on the input already containing the information in an
-/// xAOD format.
+/// This tool propogates the xAOD::FileMetaData object from the input files to
+/// the MetaDataStore in Athena. It requires the input to contain the
+/// information in an xAOD format. The tool will emit a warning if the file
+/// metadata between inputs does not match.
 ///
 /// @author Attila Krasznahorkay <Attila.Krasznahorkay@cern.ch>
-///
-/// $Revision: 683697 $
-/// $Date: 2015-07-17 11:12:14 +0200 (Fri, 17 Jul 2015) $
+/// @author Frank Berghaus <fberghaus@anl.gov>
 ///
 class FileMetaDataTool
-    : public asg::AsgMetadataTool
-#ifndef XAOD_STANDALONE
-    , public virtual ::IMetaDataTool
-#endif  // XAOD_STANDALONE
-{
-      /// Declare the correct constructor for Athena
-      ASG_TOOL_CLASS0(FileMetaDataTool)
-
+#ifdef XAOD_STANDALONE
+    : public asg::AsgMetadataTool {
+#else
+    : public asg::AsgMetadataTool, virtual public IMetaDataTool {
+#endif
  public:
-      /// Regular AsgTool constructor
-      explicit FileMetaDataTool(const std::string& name = "FileMetaDataTool");
-
-      /// Function initialising the tool
-      virtual StatusCode initialize();
+  //using extends::extends;
+  ASG_TOOL_CLASS0(FileMetaDataTool)
 
- protected:
-      /// @name Functions called by the AsgMetadataTool base class
-      /// @{
+  /// Regular AsgTool constructor
+  explicit FileMetaDataTool(const std::string& name = "FileMetaDataTool");
 
-      /// Function collecting the metadata from a new input file
-      virtual StatusCode beginInputFile();
+  /// Function initialising the tool
+  StatusCode initialize() override;
 
-      /// Function collecting the metadata from a new input file
-      virtual StatusCode endInputFile();
+  /// @name Functions called by the IMetaDataTool base class
+  /// @{
 
-      /// Function making sure that BeginInputFile incidents are not missed
-      virtual StatusCode beginEvent();
+  /// Collecting file metadata from input and write to output
+  StatusCode beginInputFile() override;
 
-      /// Function writing the collected metadata to the output
-      virtual StatusCode metaDataStop();
+  /// Does nothing
+  StatusCode endInputFile() override;
 
 #ifndef XAOD_STANDALONE
-      /// Function collecting the metadata from a new input file
-      virtual StatusCode beginInputFile(const SG::SourceID&) {return beginInputFile();}
+  /// Collecting file metadata from input and write to output
+  StatusCode beginInputFile(const SG::SourceID&) override;
 
-      /// Function collecting the metadata from a new input file
-      virtual StatusCode endInputFile(const SG::SourceID&) {return endInputFile();}
-#endif  // XAOD_STANDALONE
-      /// @}
+  /// Does nothing
+  StatusCode endInputFile(const SG::SourceID&) override;
+#endif
+
+  /// Does nothing
+  StatusCode metaDataStop() override;
+
+  /// @}
 
  private:
-      /// Key of the metadata object in the input file
-      std::string m_inputKey;
-      /// Key of the metadata object for the output file
-      std::string m_outputKey;
-
-      /// The output interface object
-      std::unique_ptr< xAOD::FileMetaData > m_md;
-      /// The output auxiliary object
-      std::unique_ptr< xAOD::FileMetaDataAuxInfo > m_mdAux;
-
-      /// Internal flag for keeping track of whether a BeginInputFile incident
-      /// was seen already
-      bool m_beginFileIncidentSeen;
+  /// Key of the metadata object in the input file
+  std::string m_inputKey;
+
+  /// Key of the metadata object for the output file
+  std::string m_outputKey;
+
+#ifndef XAOD_STANDALONE
+  /// Get a handle on the metadata store for the job
+  ServiceHandle< IMetaDataSvc > m_metaDataSvc{"MetaDataSvc", name()};
+#endif
+
+  // To lock/unlock the tool
+  std::mutex m_toolMutex;
 };  // class FileMetaDataTool
 
 }  // namespace xAODMaker
diff --git a/Event/xAOD/xAODMetaDataCnv/xAODMetaDataCnv/xAODMetaDataCnvAthena.h b/Event/xAOD/xAODMetaDataCnv/xAODMetaDataCnv/xAODMetaDataCnvAthena.h
deleted file mode 100644
index aa05a376f510c6da2959f819ab92bf3d64e62077..0000000000000000000000000000000000000000
--- a/Event/xAOD/xAODMetaDataCnv/xAODMetaDataCnv/xAODMetaDataCnvAthena.h
+++ /dev/null
@@ -1,16 +0,0 @@
-// Dear emacs, this is -*- c++ -*-
-/*
-  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-*/
-
-// $Id: xAODMetaDataCnvAthena.h 670279 2015-05-27 15:18:27Z krasznaa $
-#ifndef XAODMETADATACNV_XAODMETADATACNVATHENA_H
-#define XAODMETADATACNV_XAODMETADATACNVATHENA_H
-
-// This file is used to convince checkreq.py that the correct dependencies
-// were declared for the package. It should never be explicitly included
-// by any client code.
-
-#include "AthenaKernel/IMetaDataTool.h"
-
-#endif // XAODMETADATACNV_XAODMETADATACNVATHENA_H
diff --git a/Event/xAOD/xAODMuon/Root/Muon_v1.cxx b/Event/xAOD/xAODMuon/Root/Muon_v1.cxx
index 0554974727591e6e8b4d46f422bfa773ce90e996..41c8f231a87116a47b228ae441fdd59a65dad6b0 100644
--- a/Event/xAOD/xAODMuon/Root/Muon_v1.cxx
+++ b/Event/xAOD/xAODMuon/Root/Muon_v1.cxx
@@ -7,10 +7,8 @@
 
 // EDM include(s):
 #include "xAODCore/AuxStoreAccessorMacros.h"
-#include "xAODPrimitives/tools/getIsolationAccessor.h"
 #include "xAODTracking/TrackParticle.h"
 #include "xAODTracking/TrackSummaryAccessors_v1.h"
-
 #include "xAODPrimitives/tools/getIsolationAccessor.h"
 #include "xAODPrimitives/tools/getIsolationCorrectionAccessor.h"
 
diff --git a/ForwardDetectors/ALFA/ALFA_Ntuple/src/ALFA_Ntuple.cxx b/ForwardDetectors/ALFA/ALFA_Ntuple/src/ALFA_Ntuple.cxx
index 9651ec70080cc2caadd714e10cf75c806463c6bf..f092ac8b5de910cb3cb623d9206ded3d322163aa 100644
--- a/ForwardDetectors/ALFA/ALFA_Ntuple/src/ALFA_Ntuple.cxx
+++ b/ForwardDetectors/ALFA/ALFA_Ntuple/src/ALFA_Ntuple.cxx
@@ -925,7 +925,7 @@ StatusCode ALFA_Ntuple::TruthInfo()
 					m  = (*child)->momentum().m();
 
 					ATH_MSG_DEBUG(std::setprecision(12));
-					ATH_MSG_DEBUG("particle barcode = " << (*child)->barcode());
+					ATH_MSG_DEBUG("particle barcode = " << HepMC::barcode(*child));
 					ATH_MSG_DEBUG("particle pdg = " << (*child)->pdg_id());
 					ATH_MSG_DEBUG("particle status = " << (*child)->status());
 					ATH_MSG_DEBUG(" *  px = " << px << ", py = " << py << ", pz = " << pz <<  ", E = " << E << ", m = " << m);
diff --git a/Generators/AtlasHepMC/AtlasHepMC/GenEvent.h b/Generators/AtlasHepMC/AtlasHepMC/GenEvent.h
index 55e3e7b4a3b7231127714999897eb42c4c4348ab..20259b2fd1d4dee5d7b2f7558956b452697f97dd 100644
--- a/Generators/AtlasHepMC/AtlasHepMC/GenEvent.h
+++ b/Generators/AtlasHepMC/AtlasHepMC/GenEvent.h
@@ -63,6 +63,14 @@ if (id>0&&id<=(int)particles.size()) return particles[id-1];
 return  HepMC3::GenParticlePtr(); 
 }
 
+inline int mpi(const GenEvent evt) {
+std::shared_ptr<HepMC3::IntAttribute> A_mpi=evt.attribute<HepMC3::IntAttribute>("mpi");
+ return A_mpi?(A_mpi->value()):0;
+}
+inline int mpi(const GenEvent* evt) {
+std::shared_ptr<HepMC3::IntAttribute> A_mpi=evt->attribute<HepMC3::IntAttribute>("mpi");
+ return A_mpi?(A_mpi->value()):0;
+}
 
 inline int signal_process_id(const GenEvent evt) {
 std::shared_ptr<HepMC3::IntAttribute> A_signal_process_id=evt.attribute<HepMC3::IntAttribute>("signal_process_id");
@@ -74,6 +82,8 @@ std::shared_ptr<HepMC3::IntAttribute> A_signal_process_id=evt->attribute<HepMC3:
 }
 inline void set_signal_process_id(GenEvent* e, const int i=0) {     std::shared_ptr<HepMC3::IntAttribute> signal_process_id = std::make_shared<HepMC3::IntAttribute>(i);
                                                                     e->add_attribute("signal_process_id",signal_process_id);  }
+inline void set_mpi(GenEvent* e, const int i=0) {     std::shared_ptr<HepMC3::IntAttribute> mpi = std::make_shared<HepMC3::IntAttribute>(i);
+                                                                    e->add_attribute("mpi",mpi);  }
 inline void set_random_states(GenEvent* e, std::vector<long int>& a)  { 
  e->add_attribute("random_states",std::make_shared<HepMC3::VectorLongIntAttribute>(a));
 }
@@ -99,6 +109,12 @@ inline GenEvent* newGenEvent(const int a, const int b ){ return new GenEvent(a,b
 inline GenVertex* signal_process_vertex(const GenEvent* e) { return e->signal_process_vertex(); }
 inline GenVertex* barcode_to_vertex(const GenEvent* e, int id ){return  e->barcode_to_vertex(id);}
 inline GenParticle* barcode_to_particle(const GenEvent* e, int id ){return  e->barcode_to_particle(id);}
+inline int mpi(const GenEvent e) {
+    return e.mpi();
+}
+inline int mpi(const GenEvent* e) {
+    return e->mpi();
+}
 inline int signal_process_id(const GenEvent e) {
     return e.signal_process_id();
 }
@@ -108,6 +124,9 @@ inline int signal_process_id(const GenEvent* e) {
 inline void set_signal_process_id(GenEvent* e, const int i) {
     e->set_signal_process_id(i);
 }
+inline void set_mpi(GenEvent* e, const int i) {
+    e->set_mpi(i);
+}
 template <class T> void set_random_states(GenEvent* e, std::vector<T> a) {
     e->set_random_states(a);
 }
diff --git a/Generators/EvgenProdTools/CMakeLists.txt b/Generators/EvgenProdTools/CMakeLists.txt
index c2a3600b0573e8b6fc5a2b5916e2866cd4f06c78..a15920cd613ca9791369f4c68a76fc5824308252 100644
--- a/Generators/EvgenProdTools/CMakeLists.txt
+++ b/Generators/EvgenProdTools/CMakeLists.txt
@@ -11,7 +11,7 @@ atlas_add_library( EvgenProdToolsLib
                    src/*.cxx
                    PUBLIC_HEADERS EvgenProdTools
                    INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} 
-                   LINK_LIBRARIES ${ROOT_LIBRARIES} AtlasHepMCLib AthenaBaseComps TruthHelper GeneratorModulesLib GenInterfacesLib
+                   LINK_LIBRARIES ${ROOT_LIBRARIES} AtlasHepMCLib AtlasHepMCsearchLib AthenaBaseComps TruthHelper GeneratorModulesLib GenInterfacesLib
                    PRIVATE_LINK_LIBRARIES AthenaKernel EventInfo GaudiKernel TruthUtils )
 
 atlas_add_component( EvgenProdTools
diff --git a/Generators/FlowAfterburner/CMakeLists.txt b/Generators/FlowAfterburner/CMakeLists.txt
index bbeab93b32a051f8b7cd388b4b5ceb7c784a1774..1b181b1dea646b9b813d035de14cb789cb6a032c 100644
--- a/Generators/FlowAfterburner/CMakeLists.txt
+++ b/Generators/FlowAfterburner/CMakeLists.txt
@@ -15,7 +15,7 @@ atlas_add_library( FlowAfterburnerLib
                    INCLUDE_DIRS ${GSL_INCLUDE_DIRS} ${ROOT_INCLUDE_DIRS} ${CLHEP_INCLUDE_DIRS}
                    PRIVATE_INCLUDE_DIRS 
                    DEFINITIONS ${CLHEP_DEFINITIONS}
-                   LINK_LIBRARIES ${GSL_LIBRARIES} ${ROOT_LIBRARIES} ${CLHEP_LIBRARIES} AthenaBaseComps AthenaKernel AtlasHepMCLib GaudiKernel TruthHelper GeneratorObjects
+                   LINK_LIBRARIES ${GSL_LIBRARIES} ${ROOT_LIBRARIES} ${CLHEP_LIBRARIES} AthenaBaseComps AthenaKernel AtlasHepMCLib AtlasHepMCsearchLib GaudiKernel TruthHelper GeneratorObjects
                    PRIVATE_LINK_LIBRARIES StoreGateLib TruthUtils )
 
 atlas_add_component( FlowAfterburner
diff --git a/Generators/HforTool/CMakeLists.txt b/Generators/HforTool/CMakeLists.txt
index 56ff1587cb23572fa3f3567b55feff74afccafa0..0df6bfadfa3da7aa69a6ef5dabfeea888eb0b496 100644
--- a/Generators/HforTool/CMakeLists.txt
+++ b/Generators/HforTool/CMakeLists.txt
@@ -7,4 +7,4 @@ atlas_subdir( HforTool )
 atlas_add_component( HforTool
                      src/*.cxx
                      src/components/*.cxx
-                     LINK_LIBRARIES AtlasHepMCLib AthenaBaseComps EventInfo JetEvent GeneratorObjects TruthUtils GenInterfacesLib )
+                     LINK_LIBRARIES AtlasHepMCLib AtlasHepMCsearchLib AthenaBaseComps EventInfo JetEvent GeneratorObjects TruthUtils GenInterfacesLib )
diff --git a/InnerDetector/InDetConditions/SCT_ConditionsAlgorithms/python/SCT_ByteStreamErrorsTestAlgConfig.py b/InnerDetector/InDetConditions/SCT_ConditionsAlgorithms/python/SCT_ByteStreamErrorsTestAlgConfig.py
new file mode 100644
index 0000000000000000000000000000000000000000..931ba6945007d2605bcdd3b697931e92fc108440
--- /dev/null
+++ b/InnerDetector/InDetConditions/SCT_ConditionsAlgorithms/python/SCT_ByteStreamErrorsTestAlgConfig.py
@@ -0,0 +1,44 @@
+"""Define method to configure and test SCT_ByteStreamErrorsTestAlg
+
+Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
+"""
+from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator
+from AthenaConfiguration.ComponentFactory import CompFactory
+
+def SCT_ByteStreamErrorsTestAlgCfg(flags, name="SCT_ByteStreamErrorsTestAlg", **kwargs):
+    """Return a configured SCT_ByteStreamErrorsTestAlg"""
+    acc = ComponentAccumulator()
+    from InDetConfig.InDetRecToolConfig import SCT_ByteStreamErrorsToolCfg
+    kwargs.setdefault("ByteStreamErrorsTool", acc.popToolsAndMerge(SCT_ByteStreamErrorsToolCfg(flags)))
+    acc.addEventAlgo(CompFactory.SCT_ByteStreamErrorsTestAlg(**kwargs))
+    return acc
+
+if __name__=="__main__":
+    from AthenaCommon.Logging import log
+    from AthenaCommon.Constants import INFO
+    log.setLevel(INFO)
+
+    from AthenaCommon.Configurable import Configurable
+    Configurable.configurableRun3Behavior=1
+    
+    from AthenaConfiguration.AllConfigFlags import ConfigFlags
+    ConfigFlags.Input.isMC = False
+    ConfigFlags.Input.Files = ["./myESD.pool.root"]
+    ConfigFlags.Input.ProjectName = "data17_13TeV" # q431 input
+    ConfigFlags.Input.RunNumber = 330470 # q431 input
+    ConfigFlags.IOVDb.GlobalTag = "CONDBR2-BLKPA-2018-03" # q431 setup
+    ConfigFlags.GeoModel.AtlasVersion = "ATLAS-R2-2016-01-00-01" # q431 setup
+    ConfigFlags.Detector.GeometrySCT = True
+    ConfigFlags.lock()
+
+    from AthenaConfiguration.MainServicesConfig import MainServicesCfg
+    cfg = MainServicesCfg(ConfigFlags)
+
+    from AthenaPoolCnvSvc.PoolReadConfig import PoolReadCfg
+    cfg.merge(PoolReadCfg(ConfigFlags))
+
+    algkwargs = {}
+    algkwargs["OutputLevel"] = INFO
+    cfg.merge(SCT_ByteStreamErrorsTestAlgCfg(ConfigFlags, **algkwargs))
+
+    cfg.run(maxEvents=20)
diff --git a/InnerDetector/InDetDetDescr/SCT_GeoModel/src/SCT_DetectorFactory.cxx b/InnerDetector/InDetDetDescr/SCT_GeoModel/src/SCT_DetectorFactory.cxx
index 610527dc10bde8633ab4b47b81a7e344545cdede..cc2bafadb17cca1da83fab904fe835cc28e065fe 100644
--- a/InnerDetector/InDetDetDescr/SCT_GeoModel/src/SCT_DetectorFactory.cxx
+++ b/InnerDetector/InDetDetDescr/SCT_GeoModel/src/SCT_DetectorFactory.cxx
@@ -7,57 +7,58 @@
 //
 
 
-#include "SCT_GeoModel/SCT_DetectorFactory.h" 
+#include "SCT_GeoModel/SCT_DetectorFactory.h"
 
-#include "SCT_GeoModel/SCT_DataBase.h"
-#include "SCT_GeoModel/SCT_Identifier.h"
-#include "SCT_GeoModel/SCT_GeometryManager.h" 
-#include "SCT_GeoModel/SCT_MaterialManager.h"
-#include "SCT_GeoModel/SCT_GeneralParameters.h"
-#include "InDetReadoutGeometry/Version.h" 
-#include "InDetReadoutGeometry/SiCommonItems.h" 
-#include "InDetReadoutGeometry/InDetDD_Defs.h"
-#include "SCT_ReadoutGeometry/SCT_ModuleSideDesign.h" 
+#include "AthenaPoolUtilities/CondAttrListCollection.h"
 
-#include "SCT_GeoModel/SCT_Barrel.h"
-#include "SCT_GeoModel/SCT_Forward.h"
-#include "SCT_GeoModel/SCT_DataBase.h"
-#include "SCT_GeoModel/SCT_GeoModelAthenaComps.h"
+#include "DetDescrConditions/AlignableTransformContainer.h"
 
 //
 // GeoModel include files:
 //
-#include "GeoModelKernel/GeoMaterial.h"  
-#include "GeoModelKernel/GeoTube.h"  
-#include "GeoModelKernel/GeoLogVol.h"  
-#include "GeoModelKernel/GeoNameTag.h"  
-#include "GeoModelKernel/GeoIdentifierTag.h"  
-#include "GeoModelKernel/GeoPhysVol.h"  
-#include "GeoModelKernel/GeoVPhysVol.h"  
-#include "GeoModelKernel/GeoTransform.h"  
-#include "GeoModelKernel/GeoAlignableTransform.h"  
+#include "GeoModelInterfaces/IGeoDbTagSvc.h"
+#include "GeoModelInterfaces/StoredMaterialManager.h"
+#include "GeoModelKernel/GeoAlignableTransform.h"
+#include "GeoModelKernel/GeoDefinitions.h"
+#include "GeoModelKernel/GeoIdentifierTag.h"
+#include "GeoModelKernel/GeoLogVol.h"
+#include "GeoModelKernel/GeoMaterial.h"
+#include "GeoModelKernel/GeoNameTag.h"
+#include "GeoModelKernel/GeoPhysVol.h"
 #include "GeoModelKernel/GeoShape.h"
-#include "GeoModelKernel/GeoShapeUnion.h"
 #include "GeoModelKernel/GeoShapeShift.h"
-#include "GeoModelInterfaces/StoredMaterialManager.h"
-#include "GeoModelInterfaces/IGeoDbTagSvc.h"
+#include "GeoModelKernel/GeoShapeUnion.h"
+#include "GeoModelKernel/GeoTransform.h"
+#include "GeoModelKernel/GeoTube.h"
+#include "GeoModelKernel/GeoVPhysVol.h"
 #include "GeoModelUtilities/DecodeVersionKey.h"
+
+#include "InDetReadoutGeometry/InDetDD_Defs.h"
+#include "InDetReadoutGeometry/SiCommonItems.h"
+#include "InDetReadoutGeometry/Version.h"
+
 #include "RDBAccessSvc/IRDBAccessSvc.h"
-#include "RDBAccessSvc/IRDBRecordset.h"
 #include "RDBAccessSvc/IRDBRecord.h"
-#include "AthenaPoolUtilities/CondAttrListCollection.h"
-#include "DetDescrConditions/AlignableTransformContainer.h"
-#
-#include "StoreGate/StoreGateSvc.h"
-#include "GaudiKernel/ISvcLocator.h"
+#include "RDBAccessSvc/IRDBRecordset.h"
 
-#include "GeoModelKernel/GeoDefinitions.h"
-#include "GaudiKernel/SystemOfUnits.h"
+#include "SCT_GeoModel/SCT_Barrel.h"
+#include "SCT_GeoModel/SCT_DataBase.h"
+#include "SCT_GeoModel/SCT_Forward.h"
+#include "SCT_GeoModel/SCT_GeneralParameters.h"
+#include "SCT_GeoModel/SCT_GeometryManager.h"
+#include "SCT_GeoModel/SCT_GeoModelAthenaComps.h"
+#include "SCT_GeoModel/SCT_Identifier.h"
+#include "SCT_GeoModel/SCT_MaterialManager.h"
+
+#include "SCT_ReadoutGeometry/SCT_ModuleSideDesign.h"
 
+#include "StoreGate/StoreGateSvc.h"
 
+#include "GaudiKernel/ISvcLocator.h"
+#include "GaudiKernel/SystemOfUnits.h"
 
-#include <iostream> 
-#include <iomanip> 
+#include <iomanip>
+#include <iostream>
 #include <string>
  
 using InDetDD::SCT_DetectorManager; 
diff --git a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BarrelElement.h b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BarrelElement.h
index 8c1fe0401a517e53b6e3dc5b4c37198b01080936..ad60f03a11682725b40ff418a0987ca489a87fbd 100755
--- a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BarrelElement.h
+++ b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BarrelElement.h
@@ -46,22 +46,22 @@ namespace InDetDD {
   public:
     
     /** Constructor: */
-    TRT_BarrelElement(const GeoVFullPhysVol *volume, 
-		      const TRT_BarrelDescriptor *descriptor, 
-		      bool isPositive, 
-		      unsigned int modIndex, 
-		      unsigned int phiIndex, 
-		      unsigned int strawLayIndex,
-		      const TRT_ID * idHelper,
-		      const TRT_Conditions * conditions,
-                      const GeoAlignmentStore* geoAlignStore=nullptr);
+    TRT_BarrelElement(const GeoVFullPhysVol* volume,
+                      const TRT_BarrelDescriptor* descriptor,
+                      bool isPositive,
+                      unsigned int modIndex,
+                      unsigned int phiIndex,
+                      unsigned int strawLayIndex,
+                      const TRT_ID* idHelper,
+                      const TRT_Conditions* conditions,
+                      const GeoAlignmentStore* geoAlignStore = nullptr);
+
+    TRT_BarrelElement(const TRT_BarrelElement& right,
+                      const GeoAlignmentStore* geoAlignStore);
 
-
-    TRT_BarrelElement(const TRT_BarrelElement &right, const GeoAlignmentStore* geoAlignStore);
-    
     /** Destructor: */
-    virtual ~TRT_BarrelElement();
-    
+    virtual ~TRT_BarrelElement() = default;
+
     /** Type information */
     virtual TRT_BaseElement::Type type() const {return TRT_BaseElement::BARREL;} 
 
@@ -88,9 +88,6 @@ namespace InDetDD {
     
     /** Default Local -> global transform of the straw (ie before alignment corrections) */
     virtual HepGeom::Transform3D defStrawTransform(int straw) const;
- 
-    /** Get the number of straws: */
-    virtual unsigned int nStraws() const;
     
     /** Get the length of the straws (active length): */
     virtual const double & strawLength() const;
diff --git a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BarrelElement.icc b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BarrelElement.icc
index 95f743e8d0c152500b89e95a490c681ed6381da4..181f1c73aee7baec404ddedee5627178f4e612cf 100755
--- a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BarrelElement.icc
+++ b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BarrelElement.icc
@@ -33,27 +33,22 @@ inline const TRT_BarrelCode & TRT_BarrelElement::getCode() const
 
 
 // Get X Position - should not be used anymore
-inline double TRT_BarrelElement::strawXPos (unsigned int straw) const {   
+inline double TRT_BarrelElement::strawXPos (unsigned int straw) const {
   return strawCenter(straw).x();
 }
-  
+
 // Get Y Position - should not be used anymore
 inline double TRT_BarrelElement::strawYPos (unsigned int straw) const {
     return strawCenter(straw).y();
 }
-  
+
 // Get Z Position - should not be used anymore
 inline double TRT_BarrelElement::strawZPos (unsigned int straw) const {
     return strawCenter(straw).z();
 }
 
-// Get the number of straws:
-inline unsigned int  TRT_BarrelElement::nStraws() const {
-  return m_descriptor->nStraws();	
-}
-
 // Get the length of the straws:
 inline const double & TRT_BarrelElement::strawLength() const {
-  return m_descriptor->strawLength();	
+  return m_descriptor->strawLength();
 }
 }
diff --git a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BaseElement.h b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BaseElement.h
index 27f9092858f5f93c19c1054180173723c6525710..328f2cb92dcaa00baa54549e284c6bff40d4f55f 100755
--- a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BaseElement.h
+++ b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BaseElement.h
@@ -63,12 +63,17 @@ namespace InDetDD {
     enum Type {BARREL, ENDCAP};
 
     /** Constructor: */
-    TRT_BaseElement(const GeoVFullPhysVol *volume, const Identifier& id, const TRT_ID* idHelper, const TRT_Conditions* conditions, const GeoAlignmentStore* geoAlignStore=nullptr);
+    TRT_BaseElement(const GeoVFullPhysVol* volume,
+                    const Identifier& id,
+                    const TRT_ID* idHelper,
+                    const TRT_Conditions* conditions,
+                    const GeoAlignmentStore* geoAlignStore = nullptr);
+
+    TRT_BaseElement(const TRT_BaseElement& right,
+                    const GeoAlignmentStore* geoAlignStore);
 
-    TRT_BaseElement(const TRT_BaseElement&right, const GeoAlignmentStore* geoAlignStore);
-    
     /** Destructor: */
-    virtual ~TRT_BaseElement();
+    virtual ~TRT_BaseElement() = default;
 
     /** Type information: returns BARREL or ENDCAP */
     virtual TRT_BaseElement::Type type() const = 0; 
@@ -156,7 +161,7 @@ namespace InDetDD {
     Amg::Vector3D strawAxis(int straw) const;
       
     /** Number of straws in the element. */
-    virtual unsigned int nStraws() const = 0;
+    unsigned int nStraws() const;
 
     /** Active straw length */
     virtual const double& strawLength() const = 0;
@@ -221,27 +226,25 @@ namespace InDetDD {
     /** Illegal operations: */
     TRT_BaseElement(const TRT_BaseElement&right);
     const TRT_BaseElement& operator=(const TRT_BaseElement&right);
-
     /** Helper method for cache dealing */
     void deleteCache();
-
     void createStrawSurfaces() const;
     void createStrawSurfacesCache() const;
 
   protected:
-
     Identifier                                          m_id;
     IdentifierHash                                      m_idHash;
-    const TRT_ID*                                       m_idHelper;
-    const TRT_Conditions*                               m_conditions;
-
+    const TRT_ID*                                       m_idHelper=nullptr;
+    const TRT_Conditions*                               m_conditions=nullptr;
+    /*
+     * The number of straws and the vector below need to 
+     * initialosed in the derived constructors for now.
+     * This should fine as this is pure virtual class
+     */
+    unsigned int                                        m_nstraws=0;
     // Amg cache for the straw surfaces
-    CxxUtils::CachedUniquePtrT<
-      std::vector<std::unique_ptr<Trk::StraightLineSurface>>>
-      m_strawSurfaces{};
-
-    CxxUtils::CachedUniquePtrT<std::vector<std::unique_ptr<SurfaceCache>>>
-      m_strawSurfacesCache{};
+    std::vector<CxxUtils::CachedUniquePtr<Trk::StraightLineSurface>> m_strawSurfaces{};
+    std::vector<CxxUtils::CachedUniquePtr<SurfaceCache>> m_strawSurfacesCache{};
 
     //!< helper element surface for the cache   
     CxxUtils::CachedUniquePtr<SurfaceCache> m_surfaceCache;
@@ -249,13 +252,12 @@ namespace InDetDD {
     
     mutable std::vector<const Trk::Surface*> m_surfaces ATLAS_THREAD_SAFE; // Guarded by m_mutex
     mutable std::mutex m_mutex;
-
-    const GeoAlignmentStore* m_geoAlignStore{};
+    const GeoAlignmentStore* m_geoAlignStore=nullptr;
 
   };
     
 }
-
+#include "TRT_ReadoutGeometry/TRT_BaseElement.icc"
 #endif
 
 
diff --git a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BaseElement.icc b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BaseElement.icc
index 828631ffe639d43c00fb32dde2a0a5adf0a6d0bc..e48aed5da52160c20e0411e45a5f079cc584ac05 100755
--- a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BaseElement.icc
+++ b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_BaseElement.icc
@@ -3,10 +3,12 @@
 */
 
 namespace InDetDD {
-	
-inline const HepGeom::Transform3D & TRT_BaseElement::defTransform() const
+
+// Get the number of straws:
+inline unsigned int
+TRT_BaseElement::nStraws() const
 {
-  return getMaterialGeom()->getDefAbsoluteTransform();
+  return m_nstraws;
 }
 
 }
diff --git a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_EndcapElement.h b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_EndcapElement.h
index 19e612b061c152dc0ab1c11ea0aa8415b2325bb8..644e5b47e171f11329cd023780e2c4779b4a711a 100755
--- a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_EndcapElement.h
+++ b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/TRT_ReadoutGeometry/TRT_EndcapElement.h
@@ -45,15 +45,15 @@ namespace InDetDD {
     
     public:
       /** Constructor */
-      TRT_EndcapElement(const GeoVFullPhysVol *volume, 
-      	                const TRT_EndcapDescriptor *descriptor, 
-      	                bool isPositive, 
-      	                unsigned int wheelIndex, 
-      	                unsigned int strawLayIndex,
-      	                unsigned int phiIndex,
-      	                const TRT_ID * idHelper,
-      		        const TRT_Conditions * conditions,
-                        const GeoAlignmentStore* geoAlignStore=nullptr);
+      TRT_EndcapElement(const GeoVFullPhysVol* volume,
+                        const TRT_EndcapDescriptor* descriptor,
+                        bool isPositive,
+                        unsigned int wheelIndex,
+                        unsigned int strawLayIndex,
+                        unsigned int phiIndex,
+                        const TRT_ID* idHelper,
+                        const TRT_Conditions* conditions,
+                        const GeoAlignmentStore* geoAlignStore = nullptr);
 
       TRT_EndcapElement(const TRT_EndcapElement &right, const GeoAlignmentStore* geoAlignStore);
     
@@ -66,9 +66,6 @@ namespace InDetDD {
       /** Default Local -> global transform of the straw (ie before alignment corrections) */
       virtual HepGeom::Transform3D defStrawTransform(int straw) const;
     
-      /** Number of straws in the element */
-      virtual unsigned int nStraws() const;
-    
       /** Active straw length */
       virtual const double & strawLength() const;
     
diff --git a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/src/TRT_BarrelElement.cxx b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/src/TRT_BarrelElement.cxx
index 0b930f6c744f8604faf5073fce6bf92b4c7fda69..6cd61074c81321ee05ef3042f39072fc7e6ed2ba 100755
--- a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/src/TRT_BarrelElement.cxx
+++ b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/src/TRT_BarrelElement.cxx
@@ -22,49 +22,54 @@
 
 namespace InDetDD {
 
-TRT_BarrelElement::TRT_BarrelElement(const GeoVFullPhysVol *volume, 
-				     const TRT_BarrelDescriptor *descriptor,
-				     bool isPositive, 
-				     unsigned int modIndex, 
-				     unsigned int phiIndex, 
-				     unsigned int strawLayIndex, 
-				     const TRT_ID * idHelper,
-				     const TRT_Conditions * conditions,
+TRT_BarrelElement::TRT_BarrelElement(const GeoVFullPhysVol* volume,
+                                     const TRT_BarrelDescriptor* descriptor,
+                                     bool isPositive,
+                                     unsigned int modIndex,
+                                     unsigned int phiIndex,
+                                     unsigned int strawLayIndex,
+                                     const TRT_ID* idHelper,
+                                     const TRT_Conditions* conditions,
                                      const GeoAlignmentStore* geoAlignStore)
-  :
-  TRT_BaseElement(volume, 
-		  idHelper->layer_id((isPositive ? 1:-1), phiIndex, modIndex, strawLayIndex),
-		  idHelper, conditions, geoAlignStore),
-  m_code(isPositive,modIndex,phiIndex,strawLayIndex),
-  m_descriptor(descriptor),
-  m_nextInPhi(NULL),
-  m_previousInPhi(NULL),
-  m_nextInR(NULL),
-  m_previousInR(NULL)
+  : TRT_BaseElement(volume,
+                    idHelper->layer_id((isPositive ? 1 : -1),
+                                       phiIndex,
+                                       modIndex,
+                                       strawLayIndex),
+                    idHelper,
+                    conditions,
+                    geoAlignStore)
+  , m_code(isPositive, modIndex, phiIndex, strawLayIndex)
+  , m_descriptor(descriptor)
+  , m_nextInPhi(nullptr)
+  , m_previousInPhi(nullptr)
+  , m_nextInR(nullptr)
+  , m_previousInR(nullptr)
 
 {
+  m_nstraws = m_descriptor->nStraws();
+  m_strawSurfaces.resize(m_nstraws);
+  m_strawSurfacesCache.resize(m_nstraws);
 }
 
-
-  TRT_BarrelElement::TRT_BarrelElement(const TRT_BarrelElement &right, const GeoAlignmentStore* geoAlignStore) :
-    TRT_BaseElement(right,geoAlignStore),
-    m_code (right.m_code),
-    m_descriptor (right.m_descriptor),
-    m_nextInPhi (right.m_nextInPhi),
-    m_previousInPhi (right.m_previousInPhi),
-    m_nextInR (right.m_nextInR),
-    m_previousInR (right.m_previousInR)
-  {   
-  }
-
-
-TRT_BarrelElement::~TRT_BarrelElement()
+TRT_BarrelElement::TRT_BarrelElement(const TRT_BarrelElement& right,
+                                     const GeoAlignmentStore* geoAlignStore)
+  : TRT_BaseElement(right, geoAlignStore)
+  , m_code(right.m_code)
+  , m_descriptor(right.m_descriptor)
+  , m_nextInPhi(right.m_nextInPhi)
+  , m_previousInPhi(right.m_previousInPhi)
+  , m_nextInR(right.m_nextInR)
+  , m_previousInR(right.m_previousInR)
 {
+  m_nstraws = right.m_nstraws;
+  m_strawSurfaces.resize(m_nstraws);
+  m_strawSurfacesCache.resize(m_nstraws);
 }
 
 const TRT_BarrelConditions * TRT_BarrelElement::getConditionsData() const
 {
-  return NULL;
+  return nullptr;
 }
 
 const TRT_BarrelDescriptor * TRT_BarrelElement::getDescriptor() const
@@ -94,10 +99,10 @@ void  TRT_BarrelElement::setPreviousInR(const TRT_BarrelElement *element)
 
 
 
-HepGeom::Transform3D TRT_BarrelElement::calculateStrawTransform(int straw) const 
+HepGeom::Transform3D TRT_BarrelElement::calculateStrawTransform(int straw) const
 {
   // NB The tranformation to a straw is reconstructed here precisely as
-  // it was ... hopefully... in the factory.  One could eliminate this 
+  // it was ... hopefully... in the factory.  One could eliminate this
   // requirement and make the code a little more robust in this regard but
   // at the cost of doubling the descriptors.  (One descriptor now suffices
   // for both positive and negative endcaps).
@@ -111,7 +116,7 @@ HepGeom::Transform3D TRT_BarrelElement::calculateStrawTransform(int straw) const
     return  Amg::EigenTransformToCLHEP(getMaterialGeom()->getAbsoluteTransform()*((*f)(straw+offsetInto)))
       * HepGeom::RotateY3D(zAng)*HepGeom::TranslateZ3D(zPos)
       * calculateLocalStrawTransform(straw);
-    ////return  conditions()->solenoidFrame() 
+    ////return  conditions()->solenoidFrame()
     ////  * getMaterialGeom()->getAbsoluteTransform()*((*f)(straw+offsetInto))
     ////  * HepGeom::RotateY3D(zAng)*HepGeom::TranslateZ3D(zPos)
     ////  * calculateLocalStrawTransform(straw);
@@ -149,13 +154,13 @@ HepGeom::Transform3D TRT_BarrelElement::calculateLocalStrawTransform(int straw)
 }
 
 
-HepGeom::Transform3D TRT_BarrelElement::defStrawTransform(int straw) const 
+HepGeom::Transform3D TRT_BarrelElement::defStrawTransform(int straw) const
 {
   // Same as calculateStrawTransform, except we use getDefAbsoluteTransform()
   // rather than  getAbsoluteTransform()
 
   // NB The tranformation to a straw is reconstructed here precisely as
-  // it was ... hopefully... in the factory.  One could eliminate this 
+  // it was ... hopefully... in the factory.  One could eliminate this
   // requirement and make the code a little more robust in this regard but
   // at the cost of doubling the descriptors.  (One descriptor now suffices
   // for both positive and negative endcaps).
@@ -165,27 +170,27 @@ HepGeom::Transform3D TRT_BarrelElement::defStrawTransform(int straw) const
     size_t offsetInto = m_descriptor->getStrawTransformOffset();
     double zPos = -m_descriptor->strawZPos();
     double zAng =  m_code.isPosZ() ? M_PI : 0;
-    return Amg::EigenTransformToCLHEP(getMaterialGeom()->getDefAbsoluteTransform()*((*f)(straw+offsetInto))) 
+    return Amg::EigenTransformToCLHEP(getMaterialGeom()->getDefAbsoluteTransform()*((*f)(straw+offsetInto)))
       * HepGeom::RotateY3D(zAng)*HepGeom::TranslateZ3D(zPos);
   } else {
     std::cout << "calculateStrawTransform:  f is 0 !!!!" << std::endl;
     return HepGeom::Transform3D();
   }
-  
+
 }
 
- 
-const Trk::SurfaceBounds& TRT_BarrelElement::strawBounds() const 
+
+const Trk::SurfaceBounds& TRT_BarrelElement::strawBounds() const
 {
   return m_descriptor->strawBounds();
 }
 
-const Trk::Surface& TRT_BarrelElement::elementSurface() const 
+const Trk::Surface& TRT_BarrelElement::elementSurface() const
 {
   if (not m_surface) m_surface.set(std::make_unique<Trk::PlaneSurface>(*this));
   return *m_surface;
 }
-   
+
 void TRT_BarrelElement::createSurfaceCache() const
 {
   // Calculate the surface from the two end straws.
@@ -197,13 +202,13 @@ void TRT_BarrelElement::createSurfaceCache() const
 
   // Calculate center as the average position of the end straws.
   Amg::Vector3D* center = new Amg::Vector3D(0.5*(centerFirstStraw+centerLastStraw));
-  
+
   Amg::Vector3D  phiAxis = centerLastStraw - centerFirstStraw;
   double width = phiAxis.mag();
   phiAxis = phiAxis.normalized();
   double elementWidth = width + 2 * m_descriptor->innerTubeRadius(); // Add the straw tube radius
-  
-  // Get local z-axis. This is roughly in +ve global z direction  (exactly if no misalignment) 
+
+  // Get local z-axis. This is roughly in +ve global z direction  (exactly if no misalignment)
   // We could probably use any straw for this but we average the first and last straw and renormalize
   // to a unit vector.
   Amg::Vector3D etaAxis = 0.5*(strawAxis(firstStraw) + strawAxis(lastStraw));
@@ -214,7 +219,7 @@ void TRT_BarrelElement::createSurfaceCache() const
   // of increasing phi and the straw axis is in +ve z direction.
   Amg::Vector3D* normal = new Amg::Vector3D(phiAxis.cross( etaAxis ));  // phi cross z
 
-  // Transform from local to global. 
+  // Transform from local to global.
   // local x axis -> phiAxis
   // local y axis -> etaAxis
   // local z axis -> cross product of local x and local y
@@ -222,21 +227,23 @@ void TRT_BarrelElement::createSurfaceCache() const
 
   // This constructor takes three points in the two coordinate systems.
   Amg::Transform3D* transform = new Amg::Transform3D();
-  
+
   Amg::RotationMatrix3D rotation;
   rotation.col(0) = phiAxis;
   rotation.col(1) = etaAxis;
   rotation.col(2) = (*normal);
 
   (*transform)  = Amg::Translation3D(*center) * rotation;
-    
+
   // create the element bounds
   Trk::RectangleBounds * elementBounds = new Trk::RectangleBounds(0.5*elementWidth, 0.5*strawLength());
   // create the surface cache
   m_surfaceCache.set(std::make_unique<SurfaceCache>(transform, center, normal, elementBounds));
 
   // creaete the surface (only if needed, links are still ok even if cache update)
-  if (not m_surface) elementSurface();
+  if (not m_surface) {
+    elementSurface();
+  }
 }
 
 
@@ -244,9 +251,9 @@ int TRT_BarrelElement::strawDirection() const
 {
   // Return +1 if the straw local axis is in the same direction as the z axis, -1 otherwise.
   // The straw axis by convention goes in the direction away from the readout.
-  // So for -ve endcap this is the positive z direction (we return +1) and in the 
+  // So for -ve endcap this is the positive z direction (we return +1) and in the
   // +ve endcap its in the -ve z direction (we return -1).
-  // 
+  //
   return !(m_code.isPosZ()) ? +1 : -1;
 }
 
diff --git a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/src/TRT_BaseElement.cxx b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/src/TRT_BaseElement.cxx
index 1ec682cc6d645d6b448cd1321ffc7edfa12357d7..b1d19eb1a7f63a9db2a18ec3f878fd0ed7b5cd61 100755
--- a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/src/TRT_BaseElement.cxx
+++ b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/src/TRT_BaseElement.cxx
@@ -5,351 +5,334 @@
 #include "TRT_ReadoutGeometry/TRT_BaseElement.h"
 #include "TRT_ReadoutGeometry/TRT_Conditions.h"
 
-#include "CLHEP/Geometry/Transform3D.h"
 #include "CLHEP/Geometry/Point3D.h"
+#include "CLHEP/Geometry/Transform3D.h"
 #include "CLHEP/Geometry/Vector3D.h"
 #include "GeoModelUtilities/GeoAlignmentStore.h"
 #include "GeoPrimitives/CLHEPtoEigenConverter.h"
 
-#include "InDetIdentifier/TRT_ID.h"
 #include "GeoModelUtilities/GeoAlignmentStore.h"
-
+#include "InDetIdentifier/TRT_ID.h"
 
 #include <vector>
 
 namespace InDetDD {
 
+TRT_BaseElement::TRT_BaseElement(const GeoVFullPhysVol* volume,
+                                 const Identifier& id,
+                                 const TRT_ID* idHelper,
+                                 const TRT_Conditions* conditions,
+                                 const GeoAlignmentStore* geoAlignStore)
+  : Trk::TrkDetElementBase(volume)
+  , m_id(id)
+  , m_idHelper(idHelper)
+  , m_conditions(conditions)
+  , m_surfaceCache{}
+  , m_surface{}
+  , m_surfaces{}
+  , m_mutex{}
+  , m_geoAlignStore(geoAlignStore)
+{
+  m_idHash = m_idHelper->straw_layer_hash(id);
+}
+
+TRT_BaseElement::TRT_BaseElement(const TRT_BaseElement& right,
+                                 const GeoAlignmentStore* geoAlignmentStore)
+  : Trk::TrkDetElementBase(right.getMaterialGeom())
+  , m_id(right.m_id)
+  , m_idHash(right.m_idHash)
+  , m_idHelper(right.m_idHelper)
+  , m_conditions(right.m_conditions)
+  , m_geoAlignStore(geoAlignmentStore)
+{}
+
+Identifier
+TRT_BaseElement::identify() const
+{
+  return m_id;
+}
+
+IdentifierHash
+TRT_BaseElement::identifyHash() const
+{
+  return m_idHash;
+}
+
+// [0] GeoModel / CLHEP Access
+const HepGeom::Transform3D
+TRT_BaseElement::getAbsoluteTransform(int straw) const
+{
+  return Amg::EigenTransformToCLHEP(strawTransform(straw));
+}
+
+// [A] description of the current detector element : Amg first, then CLHEP
+// converted methods
+const Trk::Surface&
+TRT_BaseElement::surface() const
+{
+  return elementSurface();
+}
+
+const Trk::SurfaceBounds&
+TRT_BaseElement::bounds() const
+{
+  if (not m_surfaceCache) {
+    createSurfaceCache();
+  }
+  return *(m_surfaceCache->bounds());
+}
+
+const Amg::Transform3D&
+TRT_BaseElement::transform() const
+{
+  if (not m_surfaceCache) {
+    createSurfaceCache();
+  }
+  return *(m_surfaceCache->transform());
+}
+
+const Amg::Vector3D&
+TRT_BaseElement::center() const
+{
+  if (not m_surfaceCache) {
+    createSurfaceCache();
+  }
+  return *(m_surfaceCache->center());
+}
+
+const Amg::Vector3D&
+TRT_BaseElement::normal() const
+{
+  if (not m_surfaceCache) {
+    createSurfaceCache();
+  }
+  return *(m_surfaceCache->normal());
+}
+
+// [B] Description of the individual straws
+const Trk::Surface&
+TRT_BaseElement::surface(const Identifier& id) const
+{
+  int straw = m_idHelper->straw(id);
+  if (!m_strawSurfaces[straw]) {
+    createSurfaceCache(id);
+  }
+  return *(m_strawSurfaces[straw].get());
+}
+
+const std::vector<const Trk::Surface*>&
+TRT_BaseElement::surfaces() const
+{
+  std::lock_guard<std::mutex> lock{ m_mutex };
+  if (!m_surfaces.size()) {
+    m_surfaces.reserve(nStraws());
+    for (unsigned is = 0; is < nStraws(); ++is)
+      m_surfaces.push_back(&strawSurface(is));
+  }
+  return m_surfaces;
+}
+
+const Trk::SurfaceBounds&
+TRT_BaseElement::bounds(const Identifier&) const
+{
+  return strawBounds();
+}
+
+const Amg::Transform3D&
+TRT_BaseElement::transform(const Identifier& id) const
+{
+  int straw = m_idHelper->straw(id);
+  if (!m_strawSurfacesCache[straw]) {
+    createSurfaceCache(id);
+  }
+  // forward the transform of the cache
+  return *(m_strawSurfacesCache[straw]->transform());
+}
+
+const Amg::Transform3D&
+TRT_BaseElement::strawTransform(unsigned int straw) const
+{
+  if (!m_strawSurfacesCache[straw]) {
+    Identifier id = m_idHelper->straw_id(identify(), straw);
+    createSurfaceCache(id);
+  }
+  // forward the transform of the cache
+  return *(m_strawSurfacesCache[straw]->transform());
+}
+
+const Amg::Vector3D&
+TRT_BaseElement::normal(const Identifier&) const
+{
+  // Not sure if the normal of the straw is ever used.
+  // nor is there a well defined normal.
+  // This wont be corrected for alignments.
+  // Just return the element normal
+  return normal();
+}
+
+const Amg::Vector3D&
+TRT_BaseElement::center(const Identifier& id) const
+{
+  int straw = m_idHelper->straw(id);
+  if (!m_strawSurfacesCache[straw]) {
+    createSurfaceCache(id);
+  }
+  // forward the transform of the cache
+  return *(m_strawSurfacesCache[straw]->center());
+}
+
+const Trk::StraightLineSurface&
+TRT_BaseElement::strawSurface(int straw) const
+{
+  if (!m_strawSurfaces[straw]) {
+    // get the straw identifier to the given straw number and element identifier
+    Identifier id = m_idHelper->straw_id(identify(), straw);
+    createSurfaceCache(id);
+  }
+  return *(m_strawSurfaces[straw].get());
+}
+
+const Amg::Transform3D&
+TRT_BaseElement::strawTransform(int straw) const
+{
+  if (!m_strawSurfacesCache[straw]) {
+    Identifier id = m_idHelper->straw_id(identify(), straw);
+    createSurfaceCache(id);
+  }
+  // forward the transform of the cache
+  return *(m_strawSurfacesCache[straw]->transform());
+}
+
+const Amg::Vector3D&
+TRT_BaseElement::strawCenter(int straw) const
+{
+  if (!m_strawSurfacesCache[straw]) {
+    Identifier id = m_idHelper->straw_id(identify(), straw);
+    createSurfaceCache(id);
+  }
+  // forward the transform of the cache
+  return *(m_strawSurfacesCache[straw]->center());
+}
+
+Amg::Vector3D
+TRT_BaseElement::strawAxis(int straw) const
+{
+  return (strawTransform(straw).linear() * Amg::Vector3D::UnitZ() *
+          strawDirection());
+}
+
+/// ----- can be removed after full Amg migration -----------------------
+/// (start)
+// [A] in CLHEP
+const HepGeom::Transform3D
+TRT_BaseElement::transformCLHEP() const
+{
+  return Amg::EigenTransformToCLHEP(*(m_surfaceCache->transform()));
+}
+
+const HepGeom::Point3D<double>
+TRT_BaseElement::centerCLHEP() const
+{
+
+  const Amg::Vector3D& cCenter = center();
+  return HepGeom::Point3D<double>(cCenter.x(), cCenter.y(), cCenter.z());
+}
+
+const HepGeom::Vector3D<double>
+TRT_BaseElement::normalCLHEP() const
+{
+  const Amg::Vector3D& cNormal = normal();
+  return HepGeom::Vector3D<double>(cNormal.x(), cNormal.y(), cNormal.z());
+}
+
+// [B] in CLHEP
+const HepGeom::Transform3D
+TRT_BaseElement::transformCLHEP(const Identifier& id) const
+{
+  return Amg::EigenTransformToCLHEP(transform(id));
+}
+
+const HepGeom::Point3D<double>
+TRT_BaseElement::centerCLHEP(const Identifier& id) const
+{
+  const Amg::Vector3D cCenter = center(id);
+  return HepGeom::Point3D<double>(cCenter.x(), cCenter.y(), cCenter.z());
+}
+
+const HepGeom::Vector3D<double>
+TRT_BaseElement::normalCLHEP(const Identifier&) const
+{
+  // Not sure if the normal of the straw is ever used.
+  // nor is there a well defined normal.
+  // This wont be corrected for alignments.
+  // Just return the element normal
+  return normalCLHEP();
+}
+//(end)
+// ----- can be removed after full Amg migration -----------------------
+
+void
+TRT_BaseElement::createSurfaceCache(Identifier id) const
+{
+  int straw = m_idHelper->straw(id);
+  // get the StrawTransform from GeoModel
+  HepGeom::Transform3D cStrawTransform = calculateStrawTransform(straw);
+
+  // convert neccessary parts to Amg
+  if (!m_strawSurfacesCache[straw]) {
+    Amg::Transform3D* sTransform =
+      new Amg::Transform3D(Amg::CLHEPTransformToEigen(cStrawTransform));
+    Amg::Vector3D* sCenter = new Amg::Vector3D(sTransform->translation());
+    // create the surface cache & fill it
+    m_strawSurfacesCache[straw].set(
+      std::make_unique<SurfaceCache>(sTransform, sCenter, nullptr, nullptr));
+  }
+  // creaete the surface only if needed (the links are still intact)
+  if (!m_strawSurfaces[straw]) {
+    m_strawSurfaces[straw].set(
+      std::make_unique<Trk::StraightLineSurface>(*this, id));
+  }
+}
+
+void
+TRT_BaseElement::invalidate()
+{
+  // Invalidate the caches
+  // Call and barrel or endcap specific invalidation
+  invalidateOther();
+  // Its enough to delete and zero the caches.
+  deleteCache();
+}
+
+void
+TRT_BaseElement::deleteCache()
+{
+  // for all straws
+  for (size_t i = 0; i < m_strawSurfacesCache.size(); i++) {
+    m_strawSurfacesCache[i].store(nullptr);
+  }
+}
+
+void
+TRT_BaseElement::updateAllCaches()
+{
+  // delete the caches first
+  deleteCache();
+  // Strawlayer caches
+  if (not m_surfaceCache){
+    createSurfaceCache();
+  }
+  // Loop over all straws and request items that get cached.
+  for (unsigned int iStraw = 0; iStraw < nStraws(); iStraw++) {
+    Identifier strawId = m_idHelper->straw_id(identify(), iStraw);
+    createSurfaceCache(strawId);
+  }
+}
+
+const TRT_Conditions*
+TRT_BaseElement::conditions() const
+{
+  return m_conditions;
+}
 
-  TRT_BaseElement::TRT_BaseElement(const GeoVFullPhysVol *volume, const Identifier & id, const TRT_ID * idHelper, const TRT_Conditions * conditions, const GeoAlignmentStore* geoAlignStore) :
-        Trk::TrkDetElementBase(volume),
-        m_id(id),
-        m_idHelper(idHelper),
-        m_conditions(conditions),
-        m_strawSurfaces(nullptr),
-        m_strawSurfacesCache(nullptr),
-        m_surfaceCache{},
-        m_surface{},
-        m_surfaces{},
-        m_mutex{},
-        m_geoAlignStore(geoAlignStore)
-    {
-        m_idHash = m_idHelper->straw_layer_hash(id);
-    }
-
-    TRT_BaseElement::TRT_BaseElement(const TRT_BaseElement&right, const GeoAlignmentStore* geoAlignmentStore):
-    Trk::TrkDetElementBase(right.getMaterialGeom()),
-    m_id (right.m_id),
-    m_idHash (right.m_idHash),
-    m_idHelper (right.m_idHelper),
-    m_conditions (right.m_conditions),
-    m_geoAlignStore(geoAlignmentStore)
-    {
-    }
-
-    Identifier TRT_BaseElement::identify() const
-    {
-        return m_id;
-    }
-
-    IdentifierHash TRT_BaseElement::identifyHash() const
-    {
-        return m_idHash;
-    }
-
-    // [0] GeoModel / CLHEP Access
-    const HepGeom::Transform3D TRT_BaseElement::getAbsoluteTransform(int straw) const
-    {
-        return Amg::EigenTransformToCLHEP(strawTransform(straw));
-    }
-
-    // [A] description of the current detector element : Amg first, then CLHEP converted methods
-    const Trk::Surface& TRT_BaseElement::surface() const
-    {
-        return elementSurface();
-    }
-
-    const Trk::SurfaceBounds& TRT_BaseElement::bounds() const
-    {
-        if (not m_surfaceCache) createSurfaceCache();
-        return *(m_surfaceCache->bounds());
-    }
-
-    const Amg::Transform3D& TRT_BaseElement::transform() const
-    {
-        if (not m_surfaceCache) createSurfaceCache();
-        return *(m_surfaceCache->transform());
-    }
-
-
-    const Amg::Vector3D& TRT_BaseElement::center() const
-    {
-        if (not m_surfaceCache) createSurfaceCache();
-        return *(m_surfaceCache->center());
-    }
-
-    const Amg::Vector3D& TRT_BaseElement::normal() const
-    {
-        if (not m_surfaceCache) createSurfaceCache();
-        return *(m_surfaceCache->normal());
-    }
-
-    // [B] Description of the individual straws
-    const Trk::Surface&  TRT_BaseElement::surface(const Identifier& id) const
-    {
-        int straw = m_idHelper->straw(id);
-        // Create vector of all straws.
-        if (!m_strawSurfaces) {createStrawSurfaces();}
-        Trk::Surface * surfacePtr = (*m_strawSurfaces)[straw].get();
-        if (!surfacePtr) {
-            createSurfaceCache(id);
-            surfacePtr = (*m_strawSurfaces)[straw].get();
-        }
-        return *surfacePtr;
-    }
-
-
-    const std::vector<const Trk::Surface*>& TRT_BaseElement::surfaces() const
-    {
-        std::lock_guard<std::mutex> lock{m_mutex};
-        if (!m_surfaces.size()){
-            m_surfaces.reserve(nStraws());
-            for (unsigned is = 0; is<nStraws(); ++is)
-                m_surfaces.push_back(&strawSurface(is));
-        }
-        return m_surfaces;
-    }
-
-
-    const Trk::SurfaceBounds& TRT_BaseElement::bounds(const Identifier&) const
-    {
-        return strawBounds();
-    }
-
-    const Amg::Transform3D& TRT_BaseElement::transform(const Identifier & id) const
-    {
-        int straw = m_idHelper->straw(id);
-        if (!m_strawSurfacesCache) {createStrawSurfacesCache();}
-        SurfaceCache* sCachePtr = (*m_strawSurfacesCache)[straw].get();
-        if (!sCachePtr) {
-            createSurfaceCache(id);
-            sCachePtr = (*m_strawSurfacesCache)[straw].get();
-        }
-        // forward the transform of the cache
-        return *(sCachePtr->transform());
-    }
-
-    const Amg::Transform3D& TRT_BaseElement::strawTransform(unsigned int straw) const
-    {
-      if (!m_strawSurfacesCache) {createStrawSurfacesCache();}
-        SurfaceCache* sCachePtr = (*m_strawSurfacesCache)[straw].get();
-        if (!sCachePtr) {
-            Identifier id =  m_idHelper->straw_id(identify(), straw);
-            createSurfaceCache(id);
-            sCachePtr = (*m_strawSurfacesCache)[straw].get();
-        }
-        // forward the transform of the cache
-        return *(sCachePtr->transform());
-    }
-
-
-    const Amg::Vector3D& TRT_BaseElement::normal(const Identifier&) const
-    {
-       // Not sure if the normal of the straw is ever used.
-       // nor is there a well defined normal.
-       // This wont be corrected for alignments.
-       // Just return the element normal
-       return normal();
-    }
-
-    const Amg::Vector3D& TRT_BaseElement::center(const Identifier & id) const
-    {
-        int straw = m_idHelper->straw(id);
-        if (!m_strawSurfacesCache) {createStrawSurfacesCache();}
-        SurfaceCache* sCachePtr = (*m_strawSurfacesCache)[straw].get();
-        if (!sCachePtr) {
-            createSurfaceCache(id);
-            sCachePtr = (*m_strawSurfacesCache)[straw].get();
-        }
-        // forward the transform of the cache
-        return *(sCachePtr->center());
-    }
-
-    const Trk::StraightLineSurface& TRT_BaseElement::strawSurface(int straw) const
-    {
-        // Create vector of all straws.
-        if (!m_strawSurfaces) {createStrawSurfaces();}
-        Trk::StraightLineSurface* surfacePtr = (*m_strawSurfaces)[straw].get();
-        if (!surfacePtr) {
-            // get the straw identifier to the given straw number and element identifier
-            Identifier id =  m_idHelper->straw_id(identify(), straw);
-            createSurfaceCache(id);
-            surfacePtr = (*m_strawSurfaces)[straw].get();
-        }
-        return *surfacePtr;
-    }
-
-    const Amg::Transform3D& TRT_BaseElement::strawTransform(int straw) const
-    {
-       if (!m_strawSurfacesCache) {createStrawSurfacesCache();}
-       SurfaceCache* sCachePtr = (*m_strawSurfacesCache)[straw].get();
-       if (!sCachePtr) {
-           Identifier id =  m_idHelper->straw_id(identify(), straw);
-           createSurfaceCache(id);
-           sCachePtr = (*m_strawSurfacesCache)[straw].get();
-       }
-       // forward the transform of the cache
-       return *(sCachePtr->transform());
-    }
-
-    const Amg::Vector3D& TRT_BaseElement::strawCenter(int straw) const
-    {
-        if (!m_strawSurfacesCache) {createStrawSurfacesCache();}
-        SurfaceCache* sCachePtr = (*m_strawSurfacesCache)[straw].get();
-        if (!sCachePtr) {
-            Identifier id =  m_idHelper->straw_id(identify(), straw);
-            createSurfaceCache(id);
-            sCachePtr = (*m_strawSurfacesCache)[straw].get();
-        }
-        // forward the transform of the cache
-        return *(sCachePtr->center());
-    }
-
-    Amg::Vector3D TRT_BaseElement::strawAxis(int straw) const
-    {
-        return ( strawTransform(straw).linear()*Amg::Vector3D::UnitZ()*strawDirection() );
-    }
-
-    /// ----- can be removed after full Amg migration ----------------------- (start)
-    // [A] in CLHEP
-
-    const HepGeom::Transform3D TRT_BaseElement::transformCLHEP() const
-    {
-        return Amg::EigenTransformToCLHEP(*(m_surfaceCache->transform()));
-    }
-
-    const HepGeom::Point3D<double> TRT_BaseElement::centerCLHEP() const
-    {
-
-        const Amg::Vector3D& cCenter = center();
-        return HepGeom::Point3D<double>(cCenter.x(),cCenter.y(),cCenter.z());
-    }
-
-    const HepGeom::Vector3D<double> TRT_BaseElement::normalCLHEP() const
-    {
-        const Amg::Vector3D& cNormal = normal();
-        return HepGeom::Vector3D<double>(cNormal.x(),cNormal.y(),cNormal.z());
-    }
-
-    // [B] in CLHEP
-
-    const HepGeom::Transform3D TRT_BaseElement::transformCLHEP(const Identifier & id) const
-    {
-        return Amg::EigenTransformToCLHEP(transform(id));
-    }
-
-    const HepGeom::Point3D<double> TRT_BaseElement::centerCLHEP(const Identifier & id) const
-    {
-        const Amg::Vector3D cCenter = center(id);
-        return HepGeom::Point3D<double>(cCenter.x(),cCenter.y(),cCenter.z());
-    }
-
-    const HepGeom::Vector3D<double> TRT_BaseElement::normalCLHEP(const Identifier&) const
-    {
-        // Not sure if the normal of the straw is ever used.
-        // nor is there a well defined normal.
-        // This wont be corrected for alignments.
-        // Just return the element normal
-        return normalCLHEP();
-    }
-
-    /// ----- can be removed after full Amg migration -----------------------  (end)
-
-
-    void TRT_BaseElement::createSurfaceCache(Identifier id) const
-    {
-      // should not happen, but worth the protection
-      if (!m_strawSurfacesCache) {
-        createStrawSurfacesCache();
-      }
-      if (!m_strawSurfaces) {
-        createStrawSurfaces();
-      }
-
-      int straw = m_idHelper->straw(id);
-
-      // get the StrawTransform from GeoModel
-      HepGeom::Transform3D cStrawTransform = calculateStrawTransform(straw);
-
-      // convert neccessary parts to Amg
-      if (!(*m_strawSurfacesCache)[straw]) {
-        Amg::Transform3D* sTransform = new Amg::Transform3D(Amg::CLHEPTransformToEigen(cStrawTransform));
-        Amg::Vector3D* sCenter = new Amg::Vector3D(sTransform->translation());
-        // create the surface cache & fill it
-        (*m_strawSurfacesCache)[straw] = std::make_unique<SurfaceCache>(sTransform, sCenter, nullptr, nullptr);
-      }
-      // creaete the surface only if needed (the links are still intact)
-      if (!(*m_strawSurfaces)[straw]) {
-        (*m_strawSurfaces)[straw] = std::make_unique<Trk::StraightLineSurface>(*this, id);
-      }
-    }
-
-    void TRT_BaseElement::invalidate()
-    {
-        // Invalidate the caches
-        // Call and barrel or endcap specific invalidation
-        invalidateOther();
-        // Its enough to delete and zero the caches.
-        deleteCache();
-    }
-
-    void TRT_BaseElement::deleteCache()
-    {
-        // invalidates the cache, surface object can still live
-
-        // for all straws
-        if (m_strawSurfacesCache) {
-          for (size_t i = 0; i < m_strawSurfacesCache->size(); i++) {
-            (*m_strawSurfacesCache)[i].reset();
-          }
-        }
-        m_strawSurfacesCache.store(nullptr);
-    }
-
-    void TRT_BaseElement::updateAllCaches()
-    {
-        // delete the caches first
-        deleteCache();
-        // Strawlayer caches
-        if (not m_surfaceCache) createSurfaceCache();
-        // Loop over all straws and request items that get cached.
-        for (unsigned int iStraw=0; iStraw < nStraws(); iStraw++) {
-            Identifier strawId = m_idHelper->straw_id(identify(),iStraw);
-            createSurfaceCache(strawId);
-        }
-    }
-
-    const TRT_Conditions* TRT_BaseElement::conditions() const
-    {
-        return m_conditions;
-    }
-
-    TRT_BaseElement::~TRT_BaseElement()
-    {
-    }
-
-    void
-    TRT_BaseElement::createStrawSurfaces() const
-    {
-      auto strawSurfaces = std::make_unique<
-        std::vector<std::unique_ptr<Trk::StraightLineSurface>>>(nStraws());
-      m_strawSurfaces.set(std::move(strawSurfaces));
-    }
-
-    void
-    TRT_BaseElement::createStrawSurfacesCache() const
-    {
-      auto strawSurfacesCache =
-        std::make_unique<std::vector<std::unique_ptr<SurfaceCache>>>(nStraws());
-      m_strawSurfacesCache.set(std::move(strawSurfacesCache));
-    }
 }
diff --git a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/src/TRT_EndcapElement.cxx b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/src/TRT_EndcapElement.cxx
index 05b17b2b919d03540f413ff61be5a35e9c3c7ffd..d583e97d4ede562a9734e50bb31d2c17903b3f55 100755
--- a/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/src/TRT_EndcapElement.cxx
+++ b/InnerDetector/InDetDetDescr/TRT_ReadoutGeometry/src/TRT_EndcapElement.cxx
@@ -6,14 +6,14 @@
 #include "InDetReadoutGeometry/SurfaceCache.h"
 #include "TRT_ReadoutGeometry/TRT_Conditions.h"
 
-#include "InDetIdentifier/TRT_ID.h"
 #include "Identifier/Identifier.h"
+#include "InDetIdentifier/TRT_ID.h"
 
-#include "GeoPrimitives/GeoPrimitives.h"
 #include "GeoPrimitives/CLHEPtoEigenConverter.h"
+#include "GeoPrimitives/GeoPrimitives.h"
 
-#include "GeoModelKernel/GeoDefinitions.h"
 #include "CLHEP/Geometry/Transform3D.h"
+#include "GeoModelKernel/GeoDefinitions.h"
 #include "GeoModelUtilities/GeoAlignmentStore.h"
 
 #include "TrkSurfaces/DiscBounds.h"
@@ -24,248 +24,284 @@
 
 #include "GeoModelUtilities/GeoAlignmentStore.h"
 
-
 namespace InDetDD {
 
-    TRT_EndcapElement::TRT_EndcapElement(const GeoVFullPhysVol *volume, 
-                                         const TRT_EndcapDescriptor *descriptor, 
-                                         bool isPositive, 
-                                         unsigned int wheelIndex, 
-                                         unsigned int strawLayIndex, 
-                                         unsigned int phiIndex,
-                                         const TRT_ID * idHelper,
-                                         const TRT_Conditions * conditions,
-                                         const GeoAlignmentStore* geoAlignStore) :
-        TRT_BaseElement(volume,
-        idHelper->layer_id((isPositive ? 2:-2), phiIndex, wheelIndex, strawLayIndex),
-			idHelper, conditions, geoAlignStore),
-        m_code(isPositive,wheelIndex,strawLayIndex,phiIndex),
-        m_descriptor(descriptor),
-        m_nextInZ(NULL),
-        m_previousInZ(NULL)
-    {
-    }
-
-  TRT_EndcapElement::TRT_EndcapElement(const TRT_EndcapElement &right, const GeoAlignmentStore* geoAlignStore) :
-    TRT_BaseElement(right,geoAlignStore),
-    m_code (right.m_code),
-    m_descriptor (right.m_descriptor),
-    m_nextInZ (right.m_nextInZ),
-    m_previousInZ (right.m_previousInZ)
-  {   
-  }   
-
-    TRT_EndcapElement::~TRT_EndcapElement()
-    {}
-
-    unsigned int TRT_EndcapElement::nStraws() const
-    {
-        return m_descriptor->nStraws();
-    }
-
-    const double & TRT_EndcapElement::strawLength() const
-    {
-        return m_descriptor->strawLength();
-    }  
-
-
-    const TRT_EndcapConditions * TRT_EndcapElement::getConditionsData() const
-    {
-        return NULL;
-    }
-
-    const TRT_EndcapDescriptor * TRT_EndcapElement::getDescriptor() const
-    {
-        return m_descriptor;
-    }
-
-    void TRT_EndcapElement::setNextInZ(const TRT_EndcapElement *element)
-    {
-        m_nextInZ=element;
-    }
-
-    void TRT_EndcapElement::setPreviousInZ(const TRT_EndcapElement *element)
-    {
-        m_previousInZ=element;
-    }
-
-
-    HepGeom::Transform3D TRT_EndcapElement::calculateStrawTransform(int straw) const 
-    {
-        // NB The tranformation to a straw is reconstructed here precisely as
-        // it was ... hopefully... in the factory.  One could eliminate this 
-        // requirement and make the code a little more robust in this regard but
-        // at the cost of doubling the descriptors.  (One descriptor now suffices
-        // for both positive and negative endcaps).
-        const GeoXF::Function *f= m_descriptor->getStrawTransform();
-
-        if (f) {
-            int istraw = m_code.isPosZ() ? straw : m_descriptor->nStraws()-1 - straw;
-
-            size_t offsetInto = m_descriptor->getStrawTransformOffset();
-
-            return Amg::EigenTransformToCLHEP(getMaterialGeom()->getAbsoluteTransform()*((*f)(istraw+offsetInto))) 
-	      * calculateLocalStrawTransform(straw);
-            ////return conditions()->solenoidFrame() 
-            ////  * getMaterialGeom()->getAbsoluteTransform()*((*f)(istraw+offsetInto));
-
-        } else {
-
-            // Will not work properly with alignments.
-            std::cout << "ALTERNATIVE METHOD" << std::endl;
-
-            double phi = m_descriptor->startPhi() +  m_descriptor->strawPitch() * straw;
-            double r = m_descriptor->innerRadius() + 0.5 * m_descriptor->strawLength() ;
-            CLHEP::Hep3Vector pos(r*cos(phi)
-				  , r*sin(phi)
-				  , (Amg::EigenTransformToCLHEP(getMaterialGeom()->getAbsoluteTransform())*HepGeom::Point3D<double>()).z());
-            CLHEP::HepRotation rot;
-            // Axis (in local (0,0,1)) points towards beam axis.
-            rot.rotateY(-0.5*M_PI); // Make it point along -ve X.
-            rot.rotateZ(phi);
-            return HepGeom::Transform3D(rot, pos);
-        }
-
-    }
-
-    // The L3 Alignment 
-    HepGeom::Transform3D TRT_EndcapElement::calculateLocalStrawTransform(int straw) const
-    {
-        const TRTCond::StrawDxContainer* container = conditions()->dxContainer();
-        HepGeom::Transform3D rc ;
-        if (container) {
-
-            // important note: dx1 moves the 'negative' wire endpoint end dx2
-            // the 'positive' wire endpoint in the local straw frame. 
-            // In the global frame, 'dx1' corresponds to the readout side and 'dx2'
-            // to the side closest the beampipe.
-
-            int bec = getCode().isPosZ() ? +2 : -2 ;
-            int wheel = getCode().getWheelIndex();
-            int phimodule = getCode().getPhiIndex();
-            int strawlayer = getCode().getStrawLayerIndex();
-            TRTCond::ExpandedIdentifier id = TRTCond::ExpandedIdentifier(bec
-                ,wheel
-                ,phimodule
-                ,strawlayer
-                ,straw
-                ,TRTCond::ExpandedIdentifier::STRAW);
-
-            double dx1 = container->getDx1(id);
-            double dx2 = container->getDx2(id);
-            double ang = (dx2-dx1)/strawLength();
-            double dy = -1*(dx2+dx1)/2.;
-
-            // In the local straw frame:
-            //   - the z-axis is along the straw and points toward the beampipe
-            //   - the x-axis is along global-z and away from the interaction point 
-            //          (locX = globZ A-side / locX = -1 *gobZ C-side) 
-            //   - the y-axis is along global phi_hat direction determined by the other 2. 
-            //          (clockwise C-side, counter clockwise A-Side)
-            rc = HepGeom::TranslateY3D(dy)*HepGeom::RotateX3D(ang) ;
-        }
-        return rc ;
-    }
-
-    HepGeom::Transform3D TRT_EndcapElement::defStrawTransform(int straw) const 
-    {
-        // Same as calculateStrawTransform, except we use getDefAbsoluteTransform()
-        // rather than  getAbsoluteTransform()
-        
-        // NB The tranformation to a straw is reconstructed here precisely as
-        // it was ... hopefully... in the factory.  One could eliminate this 
-        // requirement and make the code a little more robust in this regard but
-        // at the cost of doubling the descriptors.  (One descriptor now suffices
-        // for both positive and negative endcaps).
-
-        const GeoXF::Function *f= m_descriptor->getStrawTransform();
-
-        if (f) {
-
-            int istraw = m_code.isPosZ() ? straw : m_descriptor->nStraws()-1 - straw;
-
-            size_t offsetInto = m_descriptor->getStrawTransformOffset();
-            return Amg::EigenTransformToCLHEP(getMaterialGeom()->getDefAbsoluteTransform()*((*f)(istraw+offsetInto)));
-
-        } else {
-
-            double phi = m_descriptor->startPhi() +  m_descriptor->strawPitch() * straw;
-            double r = m_descriptor->innerRadius() + 0.5 * m_descriptor->strawLength() ;
-            CLHEP::Hep3Vector pos(r*cos(phi)
-				  , r*sin(phi)
-				  , (Amg::EigenTransformToCLHEP(getMaterialGeom()->getDefAbsoluteTransform())*HepGeom::Point3D<double>()).z());
-            CLHEP::HepRotation rot;
-            rot.rotateY(-0.5*M_PI); // Make it point along -ve X.
-            rot.rotateZ(phi);
-            return HepGeom::Transform3D(rot, pos);
-        }
-
-    }
-
-    const Trk::SurfaceBounds& TRT_EndcapElement::strawBounds() const
-    {
-        return m_descriptor->strawBounds();
-    }
-
-    const Trk::Surface& TRT_EndcapElement::elementSurface() const 
-    {
-        if (not m_surface) m_surface.set(std::make_unique<Trk::DiscSurface>(*this));
-        return *m_surface;
-    }
-
-    void TRT_EndcapElement::createSurfaceCache() const
-    {
-        // Calculate the surface 
-        double phiCenter = m_descriptor->startPhi() +  m_descriptor->strawPitch() * 0.5*(nStraws()-1);
-        double phiHalfWidth = 0.5 * m_descriptor->strawPitch() * nStraws();
-        double rMin = m_descriptor->innerRadius();
-        double rMax = rMin +m_descriptor->strawLength();
-
-        // The transform of the endcap is a translation in z for no
-        // misalignement. For the -ve endcap there is also a 180deg rotation
-        // around the y axis. getAbsoluteTransform() will also include the
-        // misalignment.  
-        //
-        // To get the transform of the element we have to first rotate
-        // around z to the phi center of the element.  We want the local z
-        // to point in the same direction for both endcaps (approximately
-        // global z axis). For the negative endcap we therefore have to
-        // rotate 180 CLHEP::deg around the Y axis.
-        
-        // We need to rotate to phi center before we apply the
-        // misalignment. However, in the negative endcap the the phi
-        // location is inverted (due to 180 rotation around y axis). This is
-        // taken care of by the extra 180 CLHEP::deg rotation around Y that we do
-        // to get the z axis pointing in the correct direction.
-
-        Amg::Transform3D * transform = 0;
-        if  (m_code.isPosZ())
-	  transform = new Amg::Transform3D((getMaterialGeom()->getAbsoluteTransform() * GeoTrf::RotateZ3D(phiCenter)));
-        else
-	  transform = new Amg::Transform3D((getMaterialGeom()->getAbsoluteTransform() * GeoTrf::RotateY3D(180*CLHEP::deg) * GeoTrf::RotateZ3D(phiCenter)));
-
-        // create the igredients and the cache
-        Trk::DiscBounds* bounds = new Trk::DiscBounds(rMin, rMax, phiHalfWidth);
-        Amg::Vector3D*  center = new Amg::Vector3D(transform->translation());
-        Amg::Vector3D*  normal = new Amg::Vector3D(transform->rotation().col(2));
-        m_surfaceCache.set(std::make_unique<SurfaceCache>(transform, center, normal, bounds));
-        // create the surface if needed 
-        if (not m_surface) elementSurface();
-    }
-
-
-
-    int TRT_EndcapElement::strawDirection() const
-    {
-  // Return +1 if the straw local axis is in the same direction as increasing eta direction,
+TRT_EndcapElement::TRT_EndcapElement(const GeoVFullPhysVol* volume,
+                                     const TRT_EndcapDescriptor* descriptor,
+                                     bool isPositive,
+                                     unsigned int wheelIndex,
+                                     unsigned int strawLayIndex,
+                                     unsigned int phiIndex,
+                                     const TRT_ID* idHelper,
+                                     const TRT_Conditions* conditions,
+                                     const GeoAlignmentStore* geoAlignStore)
+  :
+
+  TRT_BaseElement(volume,
+                  idHelper->layer_id((isPositive ? 2 : -2),
+                                     phiIndex,
+                                     wheelIndex,
+                                     strawLayIndex),
+                  idHelper,
+                  conditions,
+                  geoAlignStore)
+  , m_code(isPositive, wheelIndex, strawLayIndex, phiIndex)
+  , m_descriptor(descriptor)
+  , m_nextInZ(nullptr)
+  , m_previousInZ(nullptr)
+{
+  m_nstraws = m_descriptor->nStraws();
+  m_strawSurfaces.resize(m_nstraws);
+  m_strawSurfacesCache.resize(m_nstraws);
+}
+
+TRT_EndcapElement::TRT_EndcapElement(const TRT_EndcapElement& right,
+                                     const GeoAlignmentStore* geoAlignStore)
+  : TRT_BaseElement(right, geoAlignStore)
+  , m_code(right.m_code)
+  , m_descriptor(right.m_descriptor)
+  , m_nextInZ(right.m_nextInZ)
+  , m_previousInZ(right.m_previousInZ)
+{
+  m_nstraws = right.m_nstraws;
+  m_strawSurfaces.resize(m_nstraws);
+  m_strawSurfacesCache.resize(m_nstraws);
+}
+
+TRT_EndcapElement::~TRT_EndcapElement() {}
+
+
+const double&
+TRT_EndcapElement::strawLength() const
+{
+  return m_descriptor->strawLength();
+}
+
+const TRT_EndcapConditions*
+TRT_EndcapElement::getConditionsData() const
+{
+  return nullptr;
+}
+
+const TRT_EndcapDescriptor*
+TRT_EndcapElement::getDescriptor() const
+{
+  return m_descriptor;
+}
+
+void
+TRT_EndcapElement::setNextInZ(const TRT_EndcapElement* element)
+{
+  m_nextInZ = element;
+}
+
+void
+TRT_EndcapElement::setPreviousInZ(const TRT_EndcapElement* element)
+{
+  m_previousInZ = element;
+}
+
+HepGeom::Transform3D
+TRT_EndcapElement::calculateStrawTransform(int straw) const
+{
+  // NB The tranformation to a straw is reconstructed here precisely as
+  // it was ... hopefully... in the factory.  One could eliminate this
+  // requirement and make the code a little more robust in this regard but
+  // at the cost of doubling the descriptors.  (One descriptor now suffices
+  // for both positive and negative endcaps).
+  const GeoXF::Function* f = m_descriptor->getStrawTransform();
+
+  if (f) {
+    int istraw = m_code.isPosZ() ? straw : m_descriptor->nStraws() - 1 - straw;
+
+    size_t offsetInto = m_descriptor->getStrawTransformOffset();
+
+    return Amg::EigenTransformToCLHEP(
+             getMaterialGeom()->getAbsoluteTransform() *
+             ((*f)(istraw + offsetInto))) *
+           calculateLocalStrawTransform(straw);
+    ////return conditions()->solenoidFrame()
+    ////  * getMaterialGeom()->getAbsoluteTransform()*((*f)(istraw+offsetInto));
+
+  } else {
+
+    // Will not work properly with alignments.
+    std::cout << "ALTERNATIVE METHOD" << std::endl;
+
+    double phi = m_descriptor->startPhi() + m_descriptor->strawPitch() * straw;
+    double r = m_descriptor->innerRadius() + 0.5 * m_descriptor->strawLength();
+    CLHEP::Hep3Vector pos(
+      r * cos(phi),
+      r * sin(phi),
+      (Amg::EigenTransformToCLHEP(getMaterialGeom()->getAbsoluteTransform()) *
+       HepGeom::Point3D<double>())
+        .z());
+    CLHEP::HepRotation rot;
+    // Axis (in local (0,0,1)) points towards beam axis.
+    rot.rotateY(-0.5 * M_PI); // Make it point along -ve X.
+    rot.rotateZ(phi);
+    return HepGeom::Transform3D(rot, pos);
+  }
+}
+
+// The L3 Alignment
+HepGeom::Transform3D
+TRT_EndcapElement::calculateLocalStrawTransform(int straw) const
+{
+  const TRTCond::StrawDxContainer* container = conditions()->dxContainer();
+  HepGeom::Transform3D rc;
+  if (container) {
+
+    // important note: dx1 moves the 'negative' wire endpoint end dx2
+    // the 'positive' wire endpoint in the local straw frame.
+    // In the global frame, 'dx1' corresponds to the readout side and 'dx2'
+    // to the side closest the beampipe.
+
+    int bec = getCode().isPosZ() ? +2 : -2;
+    int wheel = getCode().getWheelIndex();
+    int phimodule = getCode().getPhiIndex();
+    int strawlayer = getCode().getStrawLayerIndex();
+    TRTCond::ExpandedIdentifier id =
+      TRTCond::ExpandedIdentifier(bec,
+                                  wheel,
+                                  phimodule,
+                                  strawlayer,
+                                  straw,
+                                  TRTCond::ExpandedIdentifier::STRAW);
+
+    double dx1 = container->getDx1(id);
+    double dx2 = container->getDx2(id);
+    double ang = (dx2 - dx1) / strawLength();
+    double dy = -1 * (dx2 + dx1) / 2.;
+
+    // In the local straw frame:
+    //   - the z-axis is along the straw and points toward the beampipe
+    //   - the x-axis is along global-z and away from the interaction point
+    //          (locX = globZ A-side / locX = -1 *gobZ C-side)
+    //   - the y-axis is along global phi_hat direction determined by the
+    //   other 2.
+    //          (clockwise C-side, counter clockwise A-Side)
+    rc = HepGeom::TranslateY3D(dy) * HepGeom::RotateX3D(ang);
+  }
+  return rc;
+}
+
+HepGeom::Transform3D
+TRT_EndcapElement::defStrawTransform(int straw) const
+{
+  // Same as calculateStrawTransform, except we use getDefAbsoluteTransform()
+  // rather than  getAbsoluteTransform()
+
+  // NB The tranformation to a straw is reconstructed here precisely as
+  // it was ... hopefully... in the factory.  One could eliminate this
+  // requirement and make the code a little more robust in this regard but
+  // at the cost of doubling the descriptors.  (One descriptor now suffices
+  // for both positive and negative endcaps).
+
+  const GeoXF::Function* f = m_descriptor->getStrawTransform();
+
+  if (f) {
+
+    int istraw = m_code.isPosZ() ? straw : m_descriptor->nStraws() - 1 - straw;
+
+    size_t offsetInto = m_descriptor->getStrawTransformOffset();
+    return Amg::EigenTransformToCLHEP(
+      getMaterialGeom()->getDefAbsoluteTransform() *
+      ((*f)(istraw + offsetInto)));
+
+  } else {
+
+    double phi = m_descriptor->startPhi() + m_descriptor->strawPitch() * straw;
+    double r = m_descriptor->innerRadius() + 0.5 * m_descriptor->strawLength();
+    CLHEP::Hep3Vector pos(r * cos(phi),
+                          r * sin(phi),
+                          (Amg::EigenTransformToCLHEP(
+                             getMaterialGeom()->getDefAbsoluteTransform()) *
+                           HepGeom::Point3D<double>())
+                            .z());
+    CLHEP::HepRotation rot;
+    rot.rotateY(-0.5 * M_PI); // Make it point along -ve X.
+    rot.rotateZ(phi);
+    return HepGeom::Transform3D(rot, pos);
+  }
+}
+
+const Trk::SurfaceBounds&
+TRT_EndcapElement::strawBounds() const
+{
+  return m_descriptor->strawBounds();
+}
+
+const Trk::Surface&
+TRT_EndcapElement::elementSurface() const
+{
+  if (not m_surface)
+    m_surface.set(std::make_unique<Trk::DiscSurface>(*this));
+  return *m_surface;
+}
+
+void
+TRT_EndcapElement::createSurfaceCache() const
+{
+  // Calculate the surface
+  double phiCenter = m_descriptor->startPhi() +
+                     m_descriptor->strawPitch() * 0.5 * (nStraws() - 1);
+  double phiHalfWidth = 0.5 * m_descriptor->strawPitch() * nStraws();
+  double rMin = m_descriptor->innerRadius();
+  double rMax = rMin + m_descriptor->strawLength();
+
+  // The transform of the endcap is a translation in z for no
+  // misalignement. For the -ve endcap there is also a 180deg rotation
+  // around the y axis. getAbsoluteTransform() will also include the
+  // misalignment.
+  //
+  // To get the transform of the element we have to first rotate
+  // around z to the phi center of the element.  We want the local z
+  // to point in the same direction for both endcaps (approximately
+  // global z axis). For the negative endcap we therefore have to
+  // rotate 180 CLHEP::deg around the Y axis.
+
+  // We need to rotate to phi center before we apply the
+  // misalignment. However, in the negative endcap the the phi
+  // location is inverted (due to 180 rotation around y axis). This is
+  // taken care of by the extra 180 CLHEP::deg rotation around Y that we do
+  // to get the z axis pointing in the correct direction.
+
+  Amg::Transform3D* transform = 0;
+  if (m_code.isPosZ())
+    transform =
+      new Amg::Transform3D((getMaterialGeom()->getAbsoluteTransform() *
+                            GeoTrf::RotateZ3D(phiCenter)));
+  else
+    transform = new Amg::Transform3D(
+      (getMaterialGeom()->getAbsoluteTransform() *
+       GeoTrf::RotateY3D(180 * CLHEP::deg) * GeoTrf::RotateZ3D(phiCenter)));
+
+  // create the igredients and the cache
+  Trk::DiscBounds* bounds = new Trk::DiscBounds(rMin, rMax, phiHalfWidth);
+  Amg::Vector3D* center = new Amg::Vector3D(transform->translation());
+  Amg::Vector3D* normal = new Amg::Vector3D(transform->rotation().col(2));
+  m_surfaceCache.set(
+    std::make_unique<SurfaceCache>(transform, center, normal, bounds));
+  // create the surface if needed
+  if (not m_surface){
+    elementSurface();
+  }
+}
+
+int
+TRT_EndcapElement::strawDirection() const
+{
+  // Return +1 if the straw local axis is in the same direction as increasing
+  // eta direction,
   //  -1 otherwise.
-  // The straw axis by convention goes in the direction away from the readout. This is
-  // towards the beam pipe. For +ve endcap it is what we want. For -ve endcap it is oppposite.
-  // 
-  // 
-        return (m_code.isPosZ()) ? +1 : -1;
-    }
+  // The straw axis by convention goes in the direction away from the readout.
+  // This is towards the beam pipe. For +ve endcap it is what we want. For -ve
+  // endcap it is oppposite.
+  //
+  //
+  return (m_code.isPosZ()) ? +1 : -1;
+}
 
 } // end namespace
 
-
diff --git a/InnerDetector/InDetEventCnv/InDetPrepRawDataToxAOD/src/PixelPrepDataToxAOD.cxx b/InnerDetector/InDetEventCnv/InDetPrepRawDataToxAOD/src/PixelPrepDataToxAOD.cxx
index ddbd2c9bd8b364247ea0d305a4cf184892140f73..93cc1a051d1674e4770654dc5b8a8200e0a975fa 100644
--- a/InnerDetector/InDetEventCnv/InDetPrepRawDataToxAOD/src/PixelPrepDataToxAOD.cxx
+++ b/InnerDetector/InDetEventCnv/InDetPrepRawDataToxAOD/src/PixelPrepDataToxAOD.cxx
@@ -1024,7 +1024,7 @@ void  PixelPrepDataToxAOD::addNNTruthInfo(  xAOD::TrackMeasurementValidation* xp
     positions_indexY[hitNumber] = truthIndexY - cellIdWeightedPosition.etaIndex();
 
     HepGeom::Point3D<double> diffPositions = (siHit.localEndPosition() - siHit.localStartPosition());
-    double bowphi = atan2( diffPositions.y(), diffPositions.x() );
+    double bowphi = std::atan2( diffPositions.y(), diffPositions.x() );
    
 
     //Truth Track incident angle theta
diff --git a/InnerDetector/InDetExample/InDetAlignExample/CMakeLists.txt b/InnerDetector/InDetExample/InDetAlignExample/CMakeLists.txt
index 0ea8406d7061d981a356d229ab5691b2f2381844..c133a1a53a84e1023619ade11ecce5fad602d775 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/CMakeLists.txt
+++ b/InnerDetector/InDetExample/InDetAlignExample/CMakeLists.txt
@@ -1,13 +1,11 @@
-################################################################################
-# Package: InDetAlignExample
-################################################################################
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 # Declare the package name:
 atlas_subdir( InDetAlignExample )
 
 # Install files from the package:
-atlas_install_python_modules( python/*.py )
+atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
 atlas_install_joboptions( share/*.py share/InDetAlignExampleFlags/*.py share/Data/*.txt )
-atlas_install_runtime( test/InDetAlignExample_TestConfiguration.xml macros/*.cc html/*.html )
+atlas_install_runtime( macros/*.cc html/*.html )
 atlas_install_scripts( share/RunIterator_Run2Rel19.py )
 
diff --git a/InnerDetector/InDetExample/InDetAlignExample/python/IDAlignGridDatasets.py b/InnerDetector/InDetExample/InDetAlignExample/python/IDAlignGridDatasets.py
index 0fb0488194beab57db1550508b21d71a2fb724ae..3615f9183d49890b699cb3ef30bdb7b945bda104 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/python/IDAlignGridDatasets.py
+++ b/InnerDetector/InDetExample/InDetAlignExample/python/IDAlignGridDatasets.py
@@ -6,13 +6,8 @@
 #    Authors: Jike Wang      (jike.wang@cern.ch)
 #################################################################
 
-from __future__ import print_function
-
-import os, types
+import os
 import sys
-
-from future import standard_library
-standard_library.install_aliases()
 import subprocess
 
 
@@ -105,14 +100,14 @@ class ConfiguredIDAlignDatasets:
 		if ("mc09" == self.__datasetType or "MC09" == self.__datasetType) :
 			oneDatasetName = "mc09_valid.107271.Multimuons_pt9.recon.ESD.e436_s561_r731"
 			return oneDatasetName
-	 
+
 		elif self.containType("Customed") and "Customed" == topology :
 			oneDatasetName = self.__DatasetsOptions["CustomedDatasetsNameList"][0]
 
 		elif ("Collision" == topology) :  
 			if self.stream() == "MinBias" and self.containType("900GeV") :
 				oneDatasetName = "data09_900GeV.%08d.physics_MinBias.recon.ESD.%s" % ( int(self.__DatasetsOptions["CollisionRunList"][0]), self.__DatasetsOptions["CollisionRecoTag"][0] )
-									                                  
+
 			if self.stream() == "MinBias" and self.containType("7TeV"):
 				oneDatasetName = "data10_7TeV.%08d.physics_MinBias.recon.ESD.%s"   % ( int(self.__DatasetsOptions["CollisionRunList"][0]), self.__DatasetsOptions["CollisionRecoTag"][0] )
 
@@ -244,8 +239,6 @@ class ConfiguredIDAlignDatasets:
 				namesList.append(datasetName)
 			str = ",".join(namesList)
 			return str
-                #elif("CosmicBon" == topology)
-
 
 
 	def recoScript(self, topology = "", i = 0) :
@@ -257,8 +250,8 @@ class ConfiguredIDAlignDatasets:
 
 			#elif "mc" in self.__DatasetsOptions["CustomedDatasetsNameList"][i] :
 			#	recoScript = "InDetAlignExample/loadInDetRec.py"                               
-	     	         
-			else:                           
+
+			else:
 				recoScript = "InDetAlignExample/loadInDetRec_new.py"
 
 			return recoScript 
diff --git a/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignExample_IteratorClasses.py b/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignExample_IteratorClasses.py
index 92705175ff3deb49c4dd3e171a859d69ad4db2eb..71d109bfb4642fde8c01fca8e5119274b46c37cc 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignExample_IteratorClasses.py
+++ b/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignExample_IteratorClasses.py
@@ -5,7 +5,6 @@
 #  Class for local processing
 #
 # =====================================================================
-from __future__ import print_function
 
 from threading import Thread
 import os
@@ -43,87 +42,85 @@ class runProcess(Thread):
                 self.status=1
                 break
             
-import os
 class SortCpus:
-	def __init__(self, TOTALCPUS, LOCALDIR, FILELIST, OutputLevel):
-		def sort_by_value(d):
-			""" Returns the keys of dictionary d sorted by their values """
-			items=d.items()
-			backitems=[ [v[1],v[0]] for v in items]
-			backitems.sort()
-			backitems.reverse()	
-			return [ backitems[i][1] for i in range(0,len(backitems))]		
-		self.OutputLevel = OutputLevel
-		inputfiles = open(FILELIST, "r")
-		filelist = inputfiles.read().split()
-		inputfiles.close()
-		if not LOCALDIR:
-			print ("Reading Custom File")
-			FinalListSorted = []
-			for line in filelist:
-				if line and line[0] != '#':
-					FinalListSorted.append(line)
-			print (FinalListSorted)
-			
-		elif "castor" in LOCALDIR:
-			print ("Reading castor directory. Please wait...")
-			extendedFileList = os.popen("rfdir "+ LOCALDIR[7:]).read().splitlines()
-		else:
-			print ("Reading directory. Please wait...")
-			extendedFileList = os.popen("ls -l "+ LOCALDIR).read().splitlines()
+        def __init__(self, TOTALCPUS, LOCALDIR, FILELIST, OutputLevel):
+                def sort_by_value(d):
+                        """ Returns the keys of dictionary d sorted by their values """
+                        items=d.items()
+                        backitems=[ [v[1],v[0]] for v in items]
+                        backitems.sort()
+                        backitems.reverse()
+                        return [ backitems[i][1] for i in range(0,len(backitems))]
+                self.OutputLevel = OutputLevel
+                inputfiles = open(FILELIST, "r")
+                filelist = inputfiles.read().split()
+                inputfiles.close()
+                if not LOCALDIR:
+                        print ("Reading Custom File")
+                        FinalListSorted = []
+                        for line in filelist:
+                                if line and line[0] != '#':
+                                        FinalListSorted.append(line)
+                        print (FinalListSorted)
+
+                elif "castor" in LOCALDIR:
+                        print ("Reading castor directory. Please wait...")
+                        extendedFileList = os.popen("rfdir "+ LOCALDIR[7:]).read().splitlines()
+                else:
+                        print ("Reading directory. Please wait...")
+                        extendedFileList = os.popen("ls -l "+ LOCALDIR).read().splitlines()
 
-		if LOCALDIR:
-			i = 0
-			SizeList = {}
-			for line in extendedFileList:
-				curr = line.split()
-				SizeList[i] = {}
-				SizeList[i][0] = curr[8]
-				SizeList[i][1] = curr[4]
-				i = i+1
-			FinalList = {}
-			count = 0
-			for i in range(0,len(SizeList)):
-				if SizeList[i][0] in filelist:
-					#print (SizeList[i][0], " size:", SizeList[i][1])
-					FinalList[SizeList[i][0]] = int(SizeList[i][1])
+                if LOCALDIR:
+                        i = 0
+                        SizeList = {}
+                        for line in extendedFileList:
+                                curr = line.split()
+                                SizeList[i] = {}
+                                SizeList[i][0] = curr[8]
+                                SizeList[i][1] = curr[4]
+                                i = i+1
+                        FinalList = {}
+                        for i in range(0,len(SizeList)):
+                                if SizeList[i][0] in filelist:
+                                        #print (SizeList[i][0], " size:", SizeList[i][1])
+                                        FinalList[SizeList[i][0]] = int(SizeList[i][1])
 
-			#SizeListSorted = [ (k,SizeList[k]) for k in sorted(SizeList.values())] 
-			FinalListSorted = sort_by_value(FinalList)
-			#print ("Sorted list" )
-			#for i in range(0,len(FinalListSorted)):
-			#	print (FinalListSorted[i], "\tsize:\t", FinalList[FinalListSorted[i]])
-		currCPU = 0
-		reverse = False
-		self.CPUsFiles = {}
-		for i in range(0,len(FinalListSorted)):
-			#print (FinalListSorted[i], "CPU: ", currCPU)
-			if currCPU in self.CPUsFiles:
-				self.CPUsFiles[currCPU].append(LOCALDIR+FinalListSorted[i])
-			else:
-				self.CPUsFiles[currCPU] = [LOCALDIR+FinalListSorted[i]]
-			if(not reverse):
-				currCPU = currCPU + 1
-				if(currCPU == TOTALCPUS):
-					#currCPU = currCPU - 1
-					reverse = not reverse
-			if(reverse):
-				currCPU = currCPU - 1
-				if(currCPU == -1):
-					currCPU = currCPU + 1
-					reverse = not reverse
+                        #SizeListSorted = [ (k,SizeList[k]) for k in sorted(SizeList.values())]
+                        FinalListSorted = sort_by_value(FinalList)
+                        #print ("Sorted list" )
+                        #for i in range(0,len(FinalListSorted)):
+                        #       print (FinalListSorted[i], "\tsize:\t", FinalList[FinalListSorted[i]])
+                currCPU = 0
+                reverse = False
+                self.CPUsFiles = {}
+                for i in range(0,len(FinalListSorted)):
+                        #print (FinalListSorted[i], "CPU: ", currCPU)
+                        if currCPU in self.CPUsFiles:
+                                self.CPUsFiles[currCPU].append(LOCALDIR+FinalListSorted[i])
+                        else:
+                                self.CPUsFiles[currCPU] = [LOCALDIR+FinalListSorted[i]]
+                        if(not reverse):
+                                currCPU = currCPU + 1
+                                if(currCPU == TOTALCPUS):
+                                        #currCPU = currCPU - 1
+                                        reverse = not reverse
+                        if(reverse):
+                                currCPU = currCPU - 1
+                                if(currCPU == -1):
+                                        currCPU = currCPU + 1
+                                        reverse = not reverse
                     
                                         
-	def getCPU(self,CURRENTCPU):
-		if self.OutputLevel=='DEBUG':
-			print ("|",40*"-"," CPU #: ", CURRENTCPU, 40*"-", "|")
-			for line in self.CPUsFiles[CURRENTCPU]:
-				print ("|  - ",line)
-			print ("|",93*"-","|")
-		return self.CPUsFiles[CURRENTCPU]
+        def getCPU(self,CURRENTCPU):
+                if self.OutputLevel=='DEBUG':
+                        print ("|",40*"-"," CPU #: ", CURRENTCPU, 40*"-", "|")
+                        for line in self.CPUsFiles[CURRENTCPU]:
+                                print ("|  - ",line)
+                        print ("|",93*"-","|")
+                return self.CPUsFiles[CURRENTCPU]
            
                 
-		
+
 class writeJob:
     def __init__(self,
                  OutputPath,
@@ -196,7 +193,7 @@ class writeJob:
         topOptions=open(topOptionFileName,'r')
             
         #job=open(TempPath+"/"+self.JOBNAME,'w')
-        job=open(self.JOBNAME,'w')	
+        job=open(self.JOBNAME,'w')
         job.write(topOptions.readline())
         job.write(topOptions.readline())
         job.write("\n")
@@ -315,7 +312,7 @@ class writeScript:
         script.write("source %s/../InnerDetector/InDetExample/InDetAlignExample/cmt/setup.sh \n" % self.CMTDIR)
 
 #        script.write("source %s/../%s/InnerDetector/InDetExample/InDetAlignExample/cmt/setup.sh \n" % (self.CMTDIR,self.ATHENAREL))
-		
+
 #        script.write("cd %s \n" % temppath)
         script.write("cd %s \n" % self.RUNPATH)
 
@@ -375,7 +372,6 @@ class collectRAmodules:
         print ("------------------------------------------")
         print ("  Collecting Iter%d RA module files" % self.i)
         print ("------------------------------------------")
-        HOME = os.environ['HOME']
         os.chdir("%s/Iter%d" % (self.OutputPath,self.i))
         os.mkdir("moduleRA")
         
@@ -452,7 +448,6 @@ class mergeMatrix:
         print ("------------------------------------------")
         print ("  Merging Iter%d GX2 Matrices" % self.i)
         print ("------------------------------------------")
-        HOME = os.environ['HOME']
         if os.environ['HOSTNAME'] != 'tst01.ific.uv.es':
             os.chdir(self.addbigPath)
             print (self.addbigPath)
@@ -598,7 +593,6 @@ class COG:
                     ATHENAREL,
                     TAGS,
                     RUNPATH):
-        TempPath="%s/Iter%d/" % (self.OutputPath, self.iter)
         script=open(self.SCRIPTNAME,'w')
         script.write("#BSUB -J %s_Iter%dCog \n" % (self.preName,self.iter))         
         script.write("#BSUB -o %s/Iter%d/logs/Iter%dCog.log \n" % (self.OutputPath,self.iter,self.iter))
diff --git a/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignJobRunner.py b/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignJobRunner.py
index a8355f37075d720267adf5667a177581e4d5a011..1fadf029526ffc9139835386e1a2ca16575c35fb 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignJobRunner.py
+++ b/InnerDetector/InDetExample/InDetAlignExample/python/InDetAlignJobRunner.py
@@ -2,15 +2,10 @@
 
 # Written by Juerg Beringer in April 2008.
 
-from __future__ import print_function
-
 import math
 import os
 import socket
 import time
-
-from future import standard_library
-standard_library.install_aliases()
 import subprocess
 
 
@@ -161,7 +156,7 @@ class InDetAlignJobRunner:
 
     def submitBackground(self,jobnr):
         """Execute a configured job in the background"""
-        if not jobnr in self.jobs: raise InDetAlignJobRunnerError ('Job number %s is not yet configured' % jobnr)
+        if jobnr not in self.jobs: raise InDetAlignJobRunnerError ('Job number %s is not yet configured' % jobnr)
         scriptfile = self.jobs[jobnr]['scriptfile']
         logfile = self.jobs[jobnr]['logfile']
         os.system(scriptfile+' >& '+logfile+' &')
@@ -169,7 +164,7 @@ class InDetAlignJobRunner:
 
     def submitLSF(self,jobnr):
         """Execute a configured job as a LSF batch job"""
-        if not jobnr in self.jobs: raise InDetAlignJobRunnerError ('Job number %s is not yet configured' % jobnr)
+        if jobnr not in self.jobs: raise InDetAlignJobRunnerError ('Job number %s is not yet configured' % jobnr)
         batchCmd = 'bsub -q %(batchQueue)s -J %(jobname)s -o %(logfile)s %(scriptfile)s' % self.jobs[jobnr]
         print (batchCmd)
         os.system(batchCmd)
@@ -178,7 +173,7 @@ class InDetAlignJobRunner:
     def run(self):
         """Run all jobs either in batch or in the background, as specified by option batchType."""
         batchType = self.options['batchType']    # Currently the same for all jobs
-        if not batchType in ('LSF','background','configureOnly'): raise InDetAlignJobRunnerError ('Cannot run job type %s' % type)
+        if batchType not in ('LSF','background','configureOnly'): raise InDetAlignJobRunnerError ('Cannot run job type %s' % type)
         filesPerJob = self.options['filesPerJob']
         njobs = int(math.ceil(float(len(self.inputfiles))/filesPerJob))
         self.options['njobs'] = njobs
diff --git a/InnerDetector/InDetExample/InDetAlignExample/python/IteratorGridClasses.py b/InnerDetector/InDetExample/InDetAlignExample/python/IteratorGridClasses.py
index 3971c62b2ddc233c4d059b5e1a44bebddd24f71d..51ed2f0cf71ceecd88c44d5bf70e845509654b3a 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/python/IteratorGridClasses.py
+++ b/InnerDetector/InDetExample/InDetAlignExample/python/IteratorGridClasses.py
@@ -6,13 +6,9 @@
 #             Song-Ming Wang (smwang@phys.sinica.edu.tw)
 # =====================================================================
 
-from __future__ import print_function
-
-
 from threading import Thread
 import os
-import time,datetime
-import string
+import time
 import sys
 
 from future import standard_library
@@ -21,157 +17,156 @@ import subprocess
 
 
 class runProcess(Thread):
-	def __init__ (self,
-		      jobOptions,
-		      iter,
-		      part,
-		      OutputPath,
-		      OutputLevel):
-		Thread.__init__(self)
-		self.jobOptions = jobOptions
-		self.i = iter
-		self.j = part
-		self.OutputPath = OutputPath
-		self.OutputLevel = OutputLevel
-		self.status = -1
-
-	def run(self):
-		if self.j == -1:
-			print ("----------------------------------------------")
-			print ("  Running Iter%d - Solve in local machine" % (self.i))
-			print ("----------------------------------------------")
-			process=os.popen("athena.py %s | tee %s/Iter%02d/logs/Iter%02dSolve.log" % (self.jobOptions, self.OutputPath, self.i, self.i))
-		else:
-			print ("----------------------------------------------")
-			print ("  Running Iter%d - Part%02d in local machine" % (self.i, self.j))
-			print ("----------------------------------------------")
-			print (" - output path: %s/Iter%02d/%02d" % (self.OutputPath, self.i, self.j))
-			process=os.popen("athena.py %s | tee %s/Iter%02d/logs/Iter%02dPart%02d.log" % (self.jobOptions, self.OutputPath, self.i, self.i ,self.j))
-
-		print ("Running...")
-		while 1:
-			line = process.readline()
-			if self.OutputLevel == 'DEBUG':
-				print (line, end='')
-			if not line:
-				self.status=1
-				break
-			
+        def __init__ (self,
+                      jobOptions,
+                      iter,
+                      part,
+                      OutputPath,
+                      OutputLevel):
+                Thread.__init__(self)
+                self.jobOptions = jobOptions
+                self.i = iter
+                self.j = part
+                self.OutputPath = OutputPath
+                self.OutputLevel = OutputLevel
+                self.status = -1
+
+        def run(self):
+                if self.j == -1:
+                        print ("----------------------------------------------")
+                        print ("  Running Iter%d - Solve in local machine" % (self.i))
+                        print ("----------------------------------------------")
+                        process=os.popen("athena.py %s | tee %s/Iter%02d/logs/Iter%02dSolve.log" % (self.jobOptions, self.OutputPath, self.i, self.i))
+                else:
+                        print ("----------------------------------------------")
+                        print ("  Running Iter%d - Part%02d in local machine" % (self.i, self.j))
+                        print ("----------------------------------------------")
+                        print (" - output path: %s/Iter%02d/%02d" % (self.OutputPath, self.i, self.j))
+                        process=os.popen("athena.py %s | tee %s/Iter%02d/logs/Iter%02dPart%02d.log" % (self.jobOptions, self.OutputPath, self.i, self.i ,self.j))
+
+                print ("Running...")
+                while 1:
+                        line = process.readline()
+                        if self.OutputLevel == 'DEBUG':
+                                print (line, end='')
+                        if not line:
+                                self.status=1
+                                break
+
 
 
 
 def prepareForThisIter(iteration, GridFileOptions):
 
-	GridFileOptions["GridAccSubJobID"]  = 'GridAccSubJobID_%02d.txt'  % (iteration)
-	GridFileOptions["GridAccSubInfo"]   = 'GridAccSubInfo_%02d.txt'   % (iteration)
-	GridFileOptions["GridAccOutDS"]     = 'GridAccOutDS_%02d.txt'     % (iteration)
-	GridFileOptions["GridSolvingOutDS"] = 'GridSolvingOutDS_%02d.txt' % (iteration)
+        GridFileOptions["GridAccSubJobID"]  = 'GridAccSubJobID_%02d.txt'  % (iteration)
+        GridFileOptions["GridAccSubInfo"]   = 'GridAccSubInfo_%02d.txt'   % (iteration)
+        GridFileOptions["GridAccOutDS"]     = 'GridAccOutDS_%02d.txt'     % (iteration)
+        GridFileOptions["GridSolvingOutDS"] = 'GridSolvingOutDS_%02d.txt' % (iteration)
+
 
 
-			
 class writeJobGrid:
-	def __init__(self,
-		     OutputPath,
-		     CosmicsBoff,
-		     CosmicsBon,
-		     iter,
-		     part,
-		     JOBNAME,
-		     RecoOptions      = {},
-		     extraOptions     = {},
-		     AlignmentOptions = {},
-		     RecoScript       = "InDetAlignExample/NewTopOptions.py",
-		     #AlignmentScript  = "InDetAlignExample/NewInDetAlignAlgSetup.py",
-		     AlignmentScript  = "NewInDetAlignAlgSetup.py",
-		     AlignmentLevels  = "InDetAlignExample/NewInDetAlignLevels.py",
-		     MonitoringScript = "InDetRecExample/InDetMonitoringAlignment.py"
-		     ):
-		self.OutputPath       = OutputPath
-		self.CosmicsBoff      = CosmicsBoff
-		self.CosmicsBon       = CosmicsBon
-		self.i                = iter
-		self.part             = part
-		self.JOBNAME          = JOBNAME
-		self.RecoOptions      = RecoOptions
-		self.extraOptions     = extraOptions
-		self.RecoScript       = RecoScript
-		self.AlignmentOptions = AlignmentOptions
-		self.AlignmentScript  = AlignmentScript
-		self.AlignmentLevels  = AlignmentLevels
-		self.MonitoringScript = MonitoringScript
-		
-	def write(self):
-		PrefixName="Iter%02d_" % self.i
-		if self.part == "Solve":
-			TempPath         = "%s/Iter%02d/"                 % (self.OutputPath, self.i)
-		else:
-			if self.CosmicsBoff:
-				TempPath = "%s/Iter%02d/CosmicsBoff/" % (self.OutputPath, self.i)
-			elif self.CosmicsBon:
-				TempPath = "%s/Iter%02d/CosmicsBon/"   % (self.OutputPath, self.i)
-			else:
-				TempPath = "%s/Iter%02d/Collision/"       % (self.OutputPath, self.i)
-
-		os.system('mkdir -p %s' % TempPath)
-		os.system('mkdir -p %s' % TempPath+"/InDetRecExample")
-		if self.MonitoringScript != "":
-			os.system('ln -s %s %s' % (self.MonitoringScript,TempPath+"/InDetRecExample/InDetMonitoringAlignment.py") )
-
-		# Make Monitoring script dir and file here
-
-		#job=open(TempPath+self.JOBNAME,'w')
-		job=open(self.JOBNAME,'w')
-		job.write('##-------- Alignment Configuration --------------------\n')
-
-		for option in self.AlignmentOptions:
-			if type(self.AlignmentOptions[option]) is str:
-				customoption = option + "\t=\t\"" +  self.AlignmentOptions[option]+'\"\n'
-				job.write(customoption)
-			else:
-				customoption = option + "\t=\t"   +  str(self.AlignmentOptions[option])+'\n'
-				job.write(customoption)
-			
-		job.write("\n")
-		job.write('##-------- Loading the Alignment Levels --------------------\n')
-		job.write('include("'+str(self.AlignmentLevels)+'") \n')
-		job.write("\n")
-		job.write('##-------- Reconstruction Configuration --------------------\n')
-		
-		for option in self.RecoOptions:
-			if type(self.RecoOptions[option]) is str:
-				customoption = option + "\t=\t\"" +  self.RecoOptions[option]+'\"\n'
-				job.write(customoption)
-			else:
-				customoption = option + "\t=\t"   +  str(self.RecoOptions[option])+'\n'
-				job.write(customoption)
-
-		if len(self.extraOptions):
-			job.write("\n")
-			job.write('##-------- Extra Configuration --------------------\n')
-		
-		for option in self.extraOptions:
-			if type(self.extraOptions[option]) is str:
-				customoption = option + "\t=\t\"" +  self.extraOptions[option]+'\"\n'
-				job.write(customoption)
-			else:
-				customoption = option + "\t=\t"   +  str(self.extraOptions[option])+'\n'
-				job.write(customoption)
-
-		job.write("\n")
-		job.write('##-------- End of custom options -------------\n')
-
-		# Need to write the InDetRec Options Here:
-		job.write("\n")
-		job.write('##-------- Load Reconstruction or purely build geometry ---------------\n')
-		job.write('include("'+str(self.RecoScript)+'") \n')
-
-		job.write("\n")
-		job.write('##-------- Load Alignment --------------------\n')
-		job.write('include("'+str(self.AlignmentScript)+'") \n')
-		job.write("\n")
-		job.close()
-
-	
+        def __init__(self,
+                     OutputPath,
+                     CosmicsBoff,
+                     CosmicsBon,
+                     iter,
+                     part,
+                     JOBNAME,
+                     RecoOptions      = {},
+                     extraOptions     = {},
+                     AlignmentOptions = {},
+                     RecoScript       = "InDetAlignExample/NewTopOptions.py",
+                     #AlignmentScript  = "InDetAlignExample/NewInDetAlignAlgSetup.py",
+                     AlignmentScript  = "NewInDetAlignAlgSetup.py",
+                     AlignmentLevels  = "InDetAlignExample/NewInDetAlignLevels.py",
+                     MonitoringScript = "InDetRecExample/InDetMonitoringAlignment.py"
+                     ):
+                self.OutputPath       = OutputPath
+                self.CosmicsBoff      = CosmicsBoff
+                self.CosmicsBon       = CosmicsBon
+                self.i                = iter
+                self.part             = part
+                self.JOBNAME          = JOBNAME
+                self.RecoOptions      = RecoOptions
+                self.extraOptions     = extraOptions
+                self.RecoScript       = RecoScript
+                self.AlignmentOptions = AlignmentOptions
+                self.AlignmentScript  = AlignmentScript
+                self.AlignmentLevels  = AlignmentLevels
+                self.MonitoringScript = MonitoringScript
+
+        def write(self):
+                if self.part == "Solve":
+                        TempPath         = "%s/Iter%02d/"                 % (self.OutputPath, self.i)
+                else:
+                        if self.CosmicsBoff:
+                                TempPath = "%s/Iter%02d/CosmicsBoff/" % (self.OutputPath, self.i)
+                        elif self.CosmicsBon:
+                                TempPath = "%s/Iter%02d/CosmicsBon/"   % (self.OutputPath, self.i)
+                        else:
+                                TempPath = "%s/Iter%02d/Collision/"       % (self.OutputPath, self.i)
+
+                os.system('mkdir -p %s' % TempPath)
+                os.system('mkdir -p %s' % TempPath+"/InDetRecExample")
+                if self.MonitoringScript != "":
+                        os.system('ln -s %s %s' % (self.MonitoringScript,TempPath+"/InDetRecExample/InDetMonitoringAlignment.py") )
+
+                # Make Monitoring script dir and file here
+
+                #job=open(TempPath+self.JOBNAME,'w')
+                job=open(self.JOBNAME,'w')
+                job.write('##-------- Alignment Configuration --------------------\n')
+
+                for option in self.AlignmentOptions:
+                        if type(self.AlignmentOptions[option]) is str:
+                                customoption = option + "\t=\t\"" +  self.AlignmentOptions[option]+'\"\n'
+                                job.write(customoption)
+                        else:
+                                customoption = option + "\t=\t"   +  str(self.AlignmentOptions[option])+'\n'
+                                job.write(customoption)
+
+                job.write("\n")
+                job.write('##-------- Loading the Alignment Levels --------------------\n')
+                job.write('include("'+str(self.AlignmentLevels)+'") \n')
+                job.write("\n")
+                job.write('##-------- Reconstruction Configuration --------------------\n')
+
+                for option in self.RecoOptions:
+                        if type(self.RecoOptions[option]) is str:
+                                customoption = option + "\t=\t\"" +  self.RecoOptions[option]+'\"\n'
+                                job.write(customoption)
+                        else:
+                                customoption = option + "\t=\t"   +  str(self.RecoOptions[option])+'\n'
+                                job.write(customoption)
+
+                if len(self.extraOptions):
+                        job.write("\n")
+                        job.write('##-------- Extra Configuration --------------------\n')
+
+                for option in self.extraOptions:
+                        if type(self.extraOptions[option]) is str:
+                                customoption = option + "\t=\t\"" +  self.extraOptions[option]+'\"\n'
+                                job.write(customoption)
+                        else:
+                                customoption = option + "\t=\t"   +  str(self.extraOptions[option])+'\n'
+                                job.write(customoption)
+
+                job.write("\n")
+                job.write('##-------- End of custom options -------------\n')
+
+                # Need to write the InDetRec Options Here:
+                job.write("\n")
+                job.write('##-------- Load Reconstruction or purely build geometry ---------------\n')
+                job.write('include("'+str(self.RecoScript)+'") \n')
+
+                job.write("\n")
+                job.write('##-------- Load Alignment --------------------\n')
+                job.write('include("'+str(self.AlignmentScript)+'") \n')
+                job.write("\n")
+                job.close()
+
+
 
 
 
@@ -179,1373 +174,1373 @@ class writeJobGrid:
 
 class writeScriptGridForTFile :
 
-	def __init__(self,
-		     OutputPath,
-		     preName,
-		     iter,
-		     part,
-		     ATHENAREL,
-		     SCRIPTNAME,
-		     Datasets,
-		     outDS,
-		     AccSubJobID,
-		     AccSubInfo,
-		     JOBNAMES,
-		     thisJobDir,
-		     jobId,
-		     retryNo,
-		     GridOptions = {},
-		     AlignmentOptions = {}
-       		     ):
-
-			self.OutputPath       = OutputPath
-			self.preName          = preName
-			self.i                = iter
-			self.part             = part
-			self.ATHENAREL	      = ATHENAREL
-			self.SCRIPTNAME       = SCRIPTNAME
-			self.Datasets         =  Datasets
-			self.outDS            = outDS
-			self.AccSubJobID      = AccSubJobID
-			self.AccSubInfo       = AccSubInfo     
-			self.JOBNAMES         = JOBNAMES
-			self.thisJobDir       = thisJobDir
-			self.jobId            = -99
-			self.retryNo	      = 0
-			self.GridOptions      = GridOptions
-			self.AlignmentOptions = AlignmentOptions
-
-
-
-	def write(self) :
-
-
-		print (" start to create the job submit command line ...\n "   )
-		extOutFileStr    = "AlignmentTFile.root"
-		extOutFileStrAcc = extOutFileStr
-
-		if self.GridOptions["doMonitoring"] :
-			extOutFileStrAcc = extOutFileStr + ",monitoring.root"
-		if self.GridOptions["doTrkNtuple"]  :
-			extOutFileStrAcc += ",TrkValidation.root"
-
-		if self.part == "Accumulate":
-			self.SCRIPTNAME
-			script = open(self.SCRIPTNAME,'w')
-
-			if self.Datasets.containType("Customed") :
-				if self.Datasets.doDetailedTagsConfig() :
-					for index in range(len(self.Datasets.namesList("Customed"))) :
-						if ( "group" in self.GridOptions["userIDnum"] ) :
-							scriptStr   = "pathena --official --voms=atlas:/atlas/det-indet/Role=production  %s  " % (self.JOBNAMES["Customed"][index])
-						else :
-							scriptStr   = "pathena  %s  " % (self.JOBNAMES["Customed"][index])
-
-						if ("accumulateLibDS" in self.GridOptions) and (self.GridOptions["accumulateLibDS"] != "") :
-							scriptStr += " --libDS %s "          % self.GridOptions["accumulateLibDS"]
-							scriptStr += " --excludeFile %s "    % "*.cxx,*.h,*.o"
-
-						if ("removeFileList" in self.GridOptions) and (self.GridOptions["removeFileList"]   != "") :
-							scriptStr += " --removeFileList %s " % self.GridOptions["removeFileList"]
-
-						if ("fileList"       in self.GridOptions) and (self.GridOptions["fileList"]         != "") :
-							scriptStr += " --fileList %s "       % self.GridOptions["fileList"]
-
-						if ("siteName"	     in self.GridOptions) and (self.GridOptions["siteName"]         != "") :
-							scriptStr += " --site %s "           % self.GridOptions["siteName"]
-
-						if ("excludedSite"   in self.GridOptions) and (self.GridOptions["excludedSite"]     != "") :
-							scriptStr += " --excludedSite %s "   % self.GridOptions["excludedSite"]
-
-						if ("SkipFiles"      in self.GridOptions) and (self.GridOptions["SkipFiles"]        != "") :
-							scriptStr += " --nSkipFiles %s "     % self.GridOptions["SkipFiles"][self.i]
-
-
-						if self.GridOptions["dbRelease"]    == "":
-							scriptStr += " --dbRelease LATEST "
-						else :
-							scriptStr += " --dbRelease %s "    % self.GridOptions["dbRelease"]
-
-
-
-						scriptStr += " --extFile "
-
-						if (0 == self.i) : 
-							if os.path.isfile("initial_AlignmentConstants.root") :
-								scriptStr += "   initial_AlignmentConstants.root,Scaling.root  "   
-							else : 
-								scriptStr += " Scaling.root " 
-						else :
-							lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
-							if os.path.isfile("%s"  % lastAlignConstant ) : 
-								scriptStr += "  %s,Scaling.root "      % lastAlignConstant 
-								
-						
-		
-						if self.GridOptions["debugLevel"] == 4 :
-							self.GridOptions["doNFilesPerJobSplit"] = False
-							scriptStr += " --nEventsPerJob %s" % self.GridOptions["nEventsPerJob"]
-  	
-						if self.GridOptions["doNFilesPerJobSplit" ] :
-							if (-1 != self.GridOptions["CustomedNFiles"][index]) :
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) )
-							else : 
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s  --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) ) 
-						else : 
-							if (-1 != self.GridOptions["CustomedNFiles"][index]) :
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
-							
-							else :   
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.Datasets.nFiles("Customed")[index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
-
-							
-						script.write(scriptStr)
-							
-					print (" You are running alignment accumulation and using TFile for bookkeeping \n" )
-					print (" You are running on customed dataset and doing detailed tags configuration \n")
-					print (" the Grid job submission command line is : \n")
-					print (scriptStr)
-
-
-			if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
+        def __init__(self,
+                     OutputPath,
+                     preName,
+                     iter,
+                     part,
+                     ATHENAREL,
+                     SCRIPTNAME,
+                     Datasets,
+                     outDS,
+                     AccSubJobID,
+                     AccSubInfo,
+                     JOBNAMES,
+                     thisJobDir,
+                     jobId,
+                     retryNo,
+                     GridOptions = {},
+                     AlignmentOptions = {}
+                     ):
+
+                        self.OutputPath       = OutputPath
+                        self.preName          = preName
+                        self.i                = iter
+                        self.part             = part
+                        self.ATHENAREL        = ATHENAREL
+                        self.SCRIPTNAME       = SCRIPTNAME
+                        self.Datasets         =  Datasets
+                        self.outDS            = outDS
+                        self.AccSubJobID      = AccSubJobID
+                        self.AccSubInfo       = AccSubInfo
+                        self.JOBNAMES         = JOBNAMES
+                        self.thisJobDir       = thisJobDir
+                        self.jobId            = -99
+                        self.retryNo          = 0
+                        self.GridOptions      = GridOptions
+                        self.AlignmentOptions = AlignmentOptions
+
+
+
+        def write(self) :
+
+
+                print (" start to create the job submit command line ...\n "   )
+                extOutFileStr    = "AlignmentTFile.root"
+                extOutFileStrAcc = extOutFileStr
+
+                if self.GridOptions["doMonitoring"] :
+                        extOutFileStrAcc = extOutFileStr + ",monitoring.root"
+                if self.GridOptions["doTrkNtuple"]  :
+                        extOutFileStrAcc += ",TrkValidation.root"
+
+                if self.part == "Accumulate":
+                        self.SCRIPTNAME
+                        script = open(self.SCRIPTNAME,'w')
+
+                        if self.Datasets.containType("Customed") :
+                                if self.Datasets.doDetailedTagsConfig() :
+                                        for index in range(len(self.Datasets.namesList("Customed"))) :
+                                                if ( "group" in self.GridOptions["userIDnum"] ) :
+                                                        scriptStr   = "pathena --official --voms=atlas:/atlas/det-indet/Role=production  %s  " % (self.JOBNAMES["Customed"][index])
+                                                else :
+                                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["Customed"][index])
+
+                                                if ("accumulateLibDS" in self.GridOptions) and (self.GridOptions["accumulateLibDS"] != "") :
+                                                        scriptStr += " --libDS %s "          % self.GridOptions["accumulateLibDS"]
+                                                        scriptStr += " --excludeFile %s "    % "*.cxx,*.h,*.o"
+
+                                                if ("removeFileList" in self.GridOptions) and (self.GridOptions["removeFileList"]   != "") :
+                                                        scriptStr += " --removeFileList %s " % self.GridOptions["removeFileList"]
+
+                                                if ("fileList"       in self.GridOptions) and (self.GridOptions["fileList"]         != "") :
+                                                        scriptStr += " --fileList %s "       % self.GridOptions["fileList"]
+
+                                                if ("siteName"       in self.GridOptions) and (self.GridOptions["siteName"]         != "") :
+                                                        scriptStr += " --site %s "           % self.GridOptions["siteName"]
+
+                                                if ("excludedSite"   in self.GridOptions) and (self.GridOptions["excludedSite"]     != "") :
+                                                        scriptStr += " --excludedSite %s "   % self.GridOptions["excludedSite"]
+
+                                                if ("SkipFiles"      in self.GridOptions) and (self.GridOptions["SkipFiles"]        != "") :
+                                                        scriptStr += " --nSkipFiles %s "     % self.GridOptions["SkipFiles"][self.i]
+
+
+                                                if self.GridOptions["dbRelease"]    == "":
+                                                        scriptStr += " --dbRelease LATEST "
+                                                else :
+                                                        scriptStr += " --dbRelease %s "    % self.GridOptions["dbRelease"]
+
+
+
+                                                scriptStr += " --extFile "
+
+                                                if (0 == self.i) :
+                                                        if os.path.isfile("initial_AlignmentConstants.root") :
+                                                                scriptStr += "   initial_AlignmentConstants.root,Scaling.root  "
+                                                        else :
+                                                                scriptStr += " Scaling.root "
+                                                else :
+                                                        lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                                                        if os.path.isfile("%s"  % lastAlignConstant ) :
+                                                                scriptStr += "  %s,Scaling.root "      % lastAlignConstant
+
+                                                
+
+                                                if self.GridOptions["debugLevel"] == 4 :
+                                                        self.GridOptions["doNFilesPerJobSplit"] = False
+                                                        scriptStr += " --nEventsPerJob %s" % self.GridOptions["nEventsPerJob"]
+
+                                                if self.GridOptions["doNFilesPerJobSplit" ] :
+                                                        if (-1 != self.GridOptions["CustomedNFiles"][index]) :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) )
+                                                        else :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s  --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) )
+                                                else :
+                                                        if (-1 != self.GridOptions["CustomedNFiles"][index]) :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
+
+                                                        else :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.Datasets.nFiles("Customed")[index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
+
+
+                                                script.write(scriptStr)
+
+                                        print (" You are running alignment accumulation and using TFile for bookkeeping \n" )
+                                        print (" You are running on customed dataset and doing detailed tags configuration \n")
+                                        print (" the Grid job submission command line is : \n")
+                                        print (scriptStr)
+
+
+                        if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
+
+                                if self.GridOptions["ColCPUs"][self.i]:
+                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["Collision"])
+                                if self.GridOptions["CosBonCPUs"][self.i]:
+                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBon"])
+                                if self.GridOptions["CosBoffCPUs"][self.i]:
+                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBoff"])
+
+
+                                if ("fileList" in self.GridOptions) and (self.GridOptions["fileList"]     != "") :
+                                        scriptStr += "--fileList %s "     % self.GridOptions["fileList"]
+
+                                if ("siteName" in self.GridOptions ) and (self.GridOptions["siteName"]     != "") :
+                                        scriptStr += "--site %s "         % self.GridOptions["siteName"]
+
+                                if ("excludedSite" in self.GridOptions) and (self.GridOptions["excludedSite"] != "") :
+                                        scriptStr += "--excludedSite %s " % self.GridOptions["excludedSite"]
+
+                                if self.GridOptions["dbRelease"]    == "":
+                                        scriptStr += "--dbRelease LATEST "
+                                else:
+                                        scriptStr += "--dbRelease %s "    % self.GridOptions["dbRelease"]
+
+
+
+
+                                scriptStr += " --extFile "
+
+                                if (0 == self.i) :
+                                        if os.path.isfile("initial_AlignmentConstants.root") :
+                                                scriptStr += "   initial_AlignmentConstants.root,Scaling.root  "
+                                        else :
+                                                scriptStr += " Scaling.root "
+                                else :
+                                        lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                                        if os.path.isfile("%s"  % lastAlignConstant ) :
+                                                scriptStr += "  %s,Scaling.root "      % lastAlignConstant
+
+
+
+
+                                if self.GridOptions["ColCPUs"][self.i] :
+                                        if self.GridOptions["doNFilesPerJobSplit" ] :
+                                                scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColNFPJ"][self.i], self.GridOptions["TmpWorkDir"] )
+                                        else :
+                                                scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
+                                        script.write(scriptStr_forCollision)
+
+
+                                if self.GridOptions["CosBonCPUs"][self.i]:
+                                        scriptStr_forBon       = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Bon.txt ;       \n" % ( self.Datasets.namesList("CosBon"),    self.outDS["CosBon"], extOutFileStrAcc, self.Datasets.nFiles("Bon"),       self.GridOptions["CosBonCPUs"][self.i],   self.GridOptions["TmpWorkDir"] )
+                                        script.write(scriptStr_forBon)
+
+                                if self.GridOptions["CosBoffCPUs"][self.i]:
+                                        scriptStr_forBoff      = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Boff.txt ;      \n" % (self.Datasets.namesList("CosBoff"), self.outDS["CosBoff"],   extOutFileStrAcc, self.Datasets.nFiles("Boff"),      self.GridOptions["CosBoffCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
+                                        script.write(scriptStr_forBoff)
+
+                        script.close()
+
+
+                elif self.part == "Prun" :
+                        print (self.SCRIPTNAME)
+                        script     = open(self.SCRIPTNAME,'w')
+
+                        prunSolve = open("prunSolve.py",'w')
+                        prunSolve.write('import os\nimport string\nimport sys\nfrom future import standard_library\nstandard_library.install_aliases()\nimport subprocess\n\n\n')
+                        prunSolve.write('inputTFiles = []\ninFiles = []\n\n\n')
+                        prunSolve.write('ret, out = subprocess.getstatusoutput(\"cat input1.txt\")\n')
+                        prunSolve.write('print (\" the content of file input1.txt:  \", out) \n')
+                        prunSolve.write('lines = out.split(\",\")\n')
+                        prunSolve.write('for line in lines:\n')
+                        prunSolve.write('\tif \"AlignmentTFile.root\" in str(line):\n\t\tinputTFiles.append(line)\n')
+
+                        tmpStrJO = "newSolveJO_Iter%02d.py" % self.i
+                        prunSolve.write('jofile = open( \'%s\' , \'w+\')\n' % tmpStrJO )
+
+                        tmpStr = ""
+
+                        if (0 == self.i) :
+                                if os.path.isfile("initial_AlignmentConstants.root") :
+                                        tmpStr = "pool_insertFileToCatalog  initial_AlignmentConstants.root "
+
+
+                        else :
+
+                                alignConstants = "Iter%02d_AlignmentConstants.root" % (self.i - 1)
+                                if os.path.isfile(alignConstants) :
+                                        tmpStr = "pool_insertFileToCatalog  %s "  % alignConstants
+                                else :
+                                        print ("ALIGNMENT CONSTANTS %s NOT EXIST, WILL EXIT ANYHOW !!!  "  % alignConstants)
+                                        sys.exit()
+
+
+                        prunSolve.write('jofile.write(\'os.system(\\\"%s\\\")\\n\')\n'  % tmpStr)
+                        prunSolve.write('jofile.write(\"inputTFiles = \" + str(inputTFiles) + \'\\n\')\n')
+
+                        prunSolve.write('ff = open(\'%s\', \'r\')\n' % self.JOBNAMES["Solve"] )
+                        prunSolve.write('jofile.write(ff.read())\nff.close()\njofile.close()\n')
+
+                        prunSolve.write('os.system(\"athena.py %s \")\n'  % tmpStrJO )
+
+                        # debugging ...
+                        print ("prunSolve.py: ")
+                        os.system(" cat prunSolve.py ")
+                        print (" newSolveJO_Iter%02d.py: " % self.i)
+                        os.system(" cat %s " % tmpStrJO)
+
+
+                        extOutFileStrSol = ""
+
+                        if (    ("sctAlignmentLevel"          in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevel"]          == 3 ) or
+                                ("sctAlignmentLevelBarrel"    in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelBarrel"]    == 3)  or
+                                ("sctAlignmentLevelEndcaps"   in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelEndcaps"]   == 3)  or
+                                ("pixelAlignmentLevel"        in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevel"]        == 3)  or
+                                ("pixelAlignmentLevelBarrel"  in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelBarrel"]  == 3)  or
+                                ("pixelAlignmentLevelEndcaps" in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelEndcaps"] == 3) ) :
+                                print ("hmn, you are going to run L3 alignment, Eigen is being used, so no eigen value information!!! ")
+
+                        else :
 
-				if self.GridOptions["ColCPUs"][self.i]:
-					scriptStr   = "pathena  %s  " % (self.JOBNAMES["Collision"])
-				if self.GridOptions["CosBonCPUs"][self.i]:
-					scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBon"])
-				if self.GridOptions["CosBoffCPUs"][self.i]:
-					scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBoff"])
-
-
-				if ("fileList" in self.GridOptions) and (self.GridOptions["fileList"]     != "") :
-					scriptStr += "--fileList %s "     % self.GridOptions["fileList"]
-
-				if ("siteName" in self.GridOptions ) and (self.GridOptions["siteName"]     != "") :
-					scriptStr += "--site %s "         % self.GridOptions["siteName"]
-						
-				if ("excludedSite" in self.GridOptions) and (self.GridOptions["excludedSite"] != "") :
-					scriptStr += "--excludedSite %s " % self.GridOptions["excludedSite"]
-						                       
-				if self.GridOptions["dbRelease"]    == "":
-					scriptStr += "--dbRelease LATEST "
-				else:
-					scriptStr += "--dbRelease %s "    % self.GridOptions["dbRelease"]
-
-
-
-
-				scriptStr += " --extFile "
-
-				if (0 == self.i) : 
-					if os.path.isfile("initial_AlignmentConstants.root") :
-						scriptStr += "   initial_AlignmentConstants.root,Scaling.root  "   
-					else : 
-						scriptStr += " Scaling.root " 
-				else :
-					lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
-					if os.path.isfile("%s"  % lastAlignConstant ) : 
-						scriptStr += "  %s,Scaling.root "      % lastAlignConstant 
+                                if ( "writeEigenMat"      in self.GridOptions  and self.GridOptions["writeEigenMat"]    is True  ) and ( self.AlignmentOptions["runLocal"] is False ):
+                                        extOutFileStrSol += "eigenvectors.bin,"
+                                        extOutFileStrSol += "eigenvalues.bin,"
 
+                                if ( "writeEigenMatTxt"   in self.GridOptions  and self.GridOptions["writeEigenMatTxt"] is True  ) and ( self.AlignmentOptions["runLocal"] is False ):
+                                        extOutFileStrSol += "eigenvectors.txt,"
+                                        extOutFileStrSol += "eigenvalues.txt,"
 
 
+                        extOutFileStrSol = extOutFileStrSol + extOutFileStr + ",Iter%02d_AlignmentConstants.root,OldSiAlignment.txt,OutputSiAlignment.txt,alignlogfile.txt" % (self.i)
 
-				if self.GridOptions["ColCPUs"][self.i] :
-					if self.GridOptions["doNFilesPerJobSplit" ] :
-						scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColNFPJ"][self.i], self.GridOptions["TmpWorkDir"] )   
-					else :
-						scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
-					script.write(scriptStr_forCollision)
-
-
-				if self.GridOptions["CosBonCPUs"][self.i]:
-					scriptStr_forBon       = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Bon.txt ;       \n" % ( self.Datasets.namesList("CosBon"),    self.outDS["CosBon"], extOutFileStrAcc, self.Datasets.nFiles("Bon"),       self.GridOptions["CosBonCPUs"][self.i],   self.GridOptions["TmpWorkDir"] )
-					script.write(scriptStr_forBon)
-
-				if self.GridOptions["CosBoffCPUs"][self.i]:
-					scriptStr_forBoff      = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Boff.txt ;      \n" % (self.Datasets.namesList("CosBoff"), self.outDS["CosBoff"],   extOutFileStrAcc, self.Datasets.nFiles("Boff"),      self.GridOptions["CosBoffCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
-					script.write(scriptStr_forBoff)
-	
-			script.close()
-
-
-		elif self.part == "Prun" :
-			print (self.SCRIPTNAME)
-			script     = open(self.SCRIPTNAME,'w')
-
-			prunSolve = open("prunSolve.py",'w')
-			prunSolve.write('import os\nimport string\nimport sys\nfrom future import standard_library\nstandard_library.install_aliases()\nimport subprocess\n\n\n')
-			prunSolve.write('inputTFiles = []\ninFiles = []\n\n\n')
-			prunSolve.write('ret, out = subprocess.getstatusoutput(\"cat input1.txt\")\n')
-			prunSolve.write('print (\" the content of file input1.txt:  \", out) \n')
-			prunSolve.write('lines = out.split(\",\")\n')
-			prunSolve.write('for line in lines:\n')
-			prunSolve.write('\tif \"AlignmentTFile.root\" in str(line):\n\t\tinputTFiles.append(line)\n')
-	
-			tmpStrJO = "newSolveJO_Iter%02d.py" % self.i
-			prunSolve.write('jofile = open( \'%s\' , \'w+\')\n' % tmpStrJO )
-
-			tmpStr = ""
 
-			if (0 == self.i) :
-				if os.path.isfile("initial_AlignmentConstants.root") :
-					tmpStr = "pool_insertFileToCatalog  initial_AlignmentConstants.root "
+                        extFileStr = ""
 
+                        if (0 == self.i) :
+                                if os.path.isfile("initial_AlignmentConstants.root") :
+                                        extFileStr = " initial_AlignmentConstants.root,Scaling.root "
+                                else :
+                                        extFileStr = " Scaling.root "
 
-			else : 
+                        else :
+                                lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                                if os.path.isfile("%s"  % lastAlignConstant ) :
+                                        extFileStr = lastAlignConstant + ",Scaling.root "
 
-				alignConstants = "Iter%02d_AlignmentConstants.root" % (self.i - 1)
-				if os.path.isfile(alignConstants) :
-					tmpStr = "pool_insertFileToCatalog  %s "  % alignConstants
-				else :
-					print ("ALIGNMENT CONSTANTS %s NOT EXIST, WILL EXIT ANYHOW !!!  "  % alignConstants)
-					sys.exit()
 
+                        TNFiles = 0
+                        tmpInDS = []
+                        print (" self.outDS : " , self.outDS)
 
-			prunSolve.write('jofile.write(\'os.system(\\\"%s\\\")\\n\')\n'  % tmpStr)
-			prunSolve.write('jofile.write(\"inputTFiles = \" + str(inputTFiles) + \'\\n\')\n')
-	
-			prunSolve.write('ff = open(\'%s\', \'r\')\n' % self.JOBNAMES["Solve"] )                               
-			prunSolve.write('jofile.write(ff.read())\nff.close()\njofile.close()\n')
-	
-			prunSolve.write('os.system(\"athena.py %s \")\n'  % tmpStrJO )
+                        print (" self.outDS[Customed]: " , self.outDS["Customed"])
+                        for item in self.outDS["Customed"] :
+                                print ("self.outDS[Customed] item:  " , item)
+                                tmpInDS.append(item+"/")
+                                rec, nFiles = subprocess.getstatusoutput("dq2-ls -f %s | grep -i files | grep -i total | cut -c 13-" %  (item+"/") )
+                                print (" nFiles of this outDS[Customed] :" , nFiles)
+                                TNFiles += int(nFiles)
+                        solveInDS = ",".join(tmpInDS)
 
-			# debugging ...
-			print ("prunSolve.py: ")
-			os.system(" cat prunSolve.py ")
-			print (" newSolveJO_Iter%02d.py: " % self.i)
-			os.system(" cat %s " % tmpStrJO)
+                        #scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express --athenaTag %s,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % (self.ATHENAREL, solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
+                        if ( "group" in self.GridOptions["userIDnum"] ) :
+                                scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express  --useAthenaPackages --match \"*AlignmentTFile.root*\"  --official --voms=atlas:/atlas/det-indet/Role=production --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % ( solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
+                        else :
+                                scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express  --useAthenaPackages --match \"*AlignmentTFile.root*\"  --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % ( solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
 
 
-			extOutFileStrSol = ""
 
-			if (    ("sctAlignmentLevel"          in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevel"]          == 3 ) or 
-				("sctAlignmentLevelBarrel"    in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelBarrel"]    == 3)  or 
-				("sctAlignmentLevelEndcaps"   in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelEndcaps"]   == 3)  or 
-				("pixelAlignmentLevel"        in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevel"]        == 3)  or 
-				("pixelAlignmentLevelBarrel"  in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelBarrel"]  == 3)  or 
-				("pixelAlignmentLevelEndcaps" in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelEndcaps"] == 3) ) :
-				print ("hmn, you are going to run L3 alignment, Eigen is being used, so no eigen value information!!! ")
-			 
-			else : 
+                        if  (True is self.GridOptions["reUseSolveLibDS"]) and  self.GridOptions["solveLibDS"]   != "":
+                                scriptStr += " --libDS %s "        % self.GridOptions["solveLibDS"]
 
-				if ( "writeEigenMat"      in self.GridOptions  and self.GridOptions["writeEigenMat"]    == True  ) and ( self.AlignmentOptions["runLocal"] == False ):
-					extOutFileStrSol += "eigenvectors.bin,";
-					extOutFileStrSol += "eigenvalues.bin," ;
-			   
-				if ( "writeEigenMatTxt"   in self.GridOptions  and self.GridOptions["writeEigenMatTxt"] == True  ) and ( self.AlignmentOptions["runLocal"] == False ):
-					extOutFileStrSol += "eigenvectors.txt,";
-					extOutFileStrSol += "eigenvalues.txt," ;
 
+                        scriptStr += " --tmpDir %s >& tmpSubJobInfo_prunSolve.txt; \n" % self.GridOptions["TmpWorkDir"]
 
-			extOutFileStrSol = extOutFileStrSol + extOutFileStr + ",Iter%02d_AlignmentConstants.root,OldSiAlignment.txt,OutputSiAlignment.txt,alignlogfile.txt" % (self.i)
-
-
-			extFileStr = ""
-
-			if (0 == self.i) :
-				if os.path.isfile("initial_AlignmentConstants.root") :
-					extFileStr = " initial_AlignmentConstants.root,Scaling.root "
-				else : 
-					extFileStr = " Scaling.root "
+                        script.write(scriptStr)
+                        script.close()
 
-			else :
-				lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
-				if os.path.isfile("%s"  % lastAlignConstant ) :
-					extFileStr = lastAlignConstant + ",Scaling.root "
 
+                else :
+                        print ("Hi, except accumulate and solve, where do you want to go?")
 
-			TNFiles = 0
-			tmpInDS = []
-			print (" self.outDS : " , self.outDS)
-
-			print (" self.outDS[Customed]: " , self.outDS["Customed"])
-			for item in self.outDS["Customed"] :
-				print ("self.outDS[Customed] item:  " , item)
-				tmpInDS.append(item+"/")
-				rec, nFiles = subprocess.getstatusoutput("dq2-ls -f %s | grep -i files | grep -i total | cut -c 13-" %  (item+"/") )
-				print (" nFiles of this outDS[Customed] :" , nFiles)
-				TNFiles += int(nFiles)
-			solveInDS = ",".join(tmpInDS)
 
-			#scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express --athenaTag %s,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % (self.ATHENAREL, solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
-			if ( "group" in self.GridOptions["userIDnum"] ) : 
-				scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express  --useAthenaPackages --match \"*AlignmentTFile.root*\"  --official --voms=atlas:/atlas/det-indet/Role=production --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % ( solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
-			else : 
-				scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express  --useAthenaPackages --match \"*AlignmentTFile.root*\"  --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % ( solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
-
-
-
-			if  (True == self.GridOptions["reUseSolveLibDS"]) and  self.GridOptions["solveLibDS"]   != "":
-				scriptStr += " --libDS %s "        % self.GridOptions["solveLibDS"]
 
+        def send(self) :
 
-			scriptStr += " --tmpDir %s >& tmpSubJobInfo_prunSolve.txt; \n" % self.GridOptions["TmpWorkDir"]
+                os.system("pwd")
+                os.system("chmod 777 %s" % self.SCRIPTNAME)
 
-			script.write(scriptStr)
-			script.close()
+                print ("----------------------------------------------")
+                if self.part == "Accumulate":
+                        print ("Sending the %dth iteration accumulation job to grid at site ... %s" % (self.i, self.GridOptions["siteName"]))
 
+                        if self.Datasets.containType("Customed") :
+                                for i in range(len(self.Datasets.namesList("Customed"))) :
+                                        os.system(" rm -rf tmpSubJobInfo_Customed_%02d.txt" % i )
 
-		else : 
-			print ("Hi, except accumulate and solve, where do you want to go?")
+                        os.system(" rm -rf  tmpSubJobInfo_collision.txt tmpSubJobInfo_Bon.txt tmpSubJobInfo_Boff.txt ")
+                        os.system("source %s" % self.SCRIPTNAME)
 
 
+                        if self.Datasets.containType("Customed") :
+                                if self.Datasets.doDetailedTagsConfig() :
+                                        for i in range(len(self.Datasets.namesList("Customed"))) :
+                                                os.system("cat tmpSubJobInfo_Customed_%02d.txt                 >> %s\n" % (i, self.AccSubInfo))
+                                                os.system("cat tmpSubJobInfo_Customed_%02d.txt | grep -i JobID >> %s\n" % (i, self.AccSubJobID))
+                                else :
+                                        print (" you don't plan to do detailed tags configuration for every dataset, so will submit only one job with long inDS: ")
 
-	def send(self) :
 
-		os.system("pwd")
-		os.system("chmod 777 %s" % self.SCRIPTNAME)
-
-		print ("----------------------------------------------")
-		if self.part == "Accumulate":           
-			print ("Sending the %dth iteration accumulation job to grid at site ... %s" % (self.i, self.GridOptions["siteName"]))
-		 
-			if self.Datasets.containType("Customed") :
-				for i in range(len(self.Datasets.namesList("Customed"))) :   
-					os.system(" rm -rf tmpSubJobInfo_Customed_%02d.txt" % i )
-		     
-			os.system(" rm -rf  tmpSubJobInfo_collision.txt tmpSubJobInfo_Bon.txt tmpSubJobInfo_Boff.txt ")
-			os.system("source %s" % self.SCRIPTNAME)
-		     
+                        if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
+                                if self.GridOptions["ColCPUs"][self.i]:
+                                        os.system("cat tmpSubJobInfo_collision.txt                 >> %s\n" % (self.AccSubInfo))
+                                        os.system("cat tmpSubJobInfo_collision.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-			if self.Datasets.containType("Customed") :
-				if self.Datasets.doDetailedTagsConfig() :
-					for i in range(len(self.Datasets.namesList("Customed"))) :
-						os.system("cat tmpSubJobInfo_Customed_%02d.txt                 >> %s\n" % (i, self.AccSubInfo))
-						os.system("cat tmpSubJobInfo_Customed_%02d.txt | grep -i JobID >> %s\n" % (i, self.AccSubJobID))
-				else :
-					print (" you don't plan to do detailed tags configuration for every dataset, so will submit only one job with long inDS: ")
-			     	                                       
-			     	
-			if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
-				if self.GridOptions["ColCPUs"][self.i]:
-					os.system("cat tmpSubJobInfo_collision.txt                 >> %s\n" % (self.AccSubInfo))
-					os.system("cat tmpSubJobInfo_collision.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
-				 	
-				if self.GridOptions["CosBonCPUs"][self.i]:
-					os.system("cat tmpSubJobInfo_Bon.txt                       >> %s\n" % (self.AccSubInfo))
-					os.system("cat tmpSubJobInfo_Bon.txt       | grep -i JobID >> %s\n" % (self.AccSubJobID))
-				   	
-				if self.GridOptions["CosBoffCPUs"][self.i]:
-					os.system("cat tmpSubJobInfo_Boff.txt                      >> %s\n" % (self.AccSubInfo))
-					os.system("cat tmpSubJobInfo_Boff.txt      | grep -i JobID >> %s\n" % (self.AccSubJobID))
+                                if self.GridOptions["CosBonCPUs"][self.i]:
+                                        os.system("cat tmpSubJobInfo_Bon.txt                       >> %s\n" % (self.AccSubInfo))
+                                        os.system("cat tmpSubJobInfo_Bon.txt       | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
+                                if self.GridOptions["CosBoffCPUs"][self.i]:
+                                        os.system("cat tmpSubJobInfo_Boff.txt                      >> %s\n" % (self.AccSubInfo))
+                                        os.system("cat tmpSubJobInfo_Boff.txt      | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-		elif self.part == "Prun" :
 
-			print ("  Sending the %dth iteration prun matrix solving job to grid site ... %s" % (self.i, self.GridOptions["siteName"]))
-			os.system("rm -f tmpSubJobInfo_prunSolve.txt")
-			os.system("source %s" % self.SCRIPTNAME)
-			os.system("cat tmpSubJobInfo_prunSolve.txt                 >> %s\n" % (self.AccSubInfo))
-			os.system("cat tmpSubJobInfo_prunSolve.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
-	
-			print ("----------------------------------------------")
-		
-		else : 
+                elif self.part == "Prun" :
 
-			print ("Hi, where do you want to go?")
+                        print ("  Sending the %dth iteration prun matrix solving job to grid site ... %s" % (self.i, self.GridOptions["siteName"]))
+                        os.system("rm -f tmpSubJobInfo_prunSolve.txt")
+                        os.system("source %s" % self.SCRIPTNAME)
+                        os.system("cat tmpSubJobInfo_prunSolve.txt                 >> %s\n" % (self.AccSubInfo))
+                        os.system("cat tmpSubJobInfo_prunSolve.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
+                        print ("----------------------------------------------")
 
+                else :
 
+                        print ("Hi, where do you want to go?")
 
-	def wait(self,logfilename):
-		print ("Pathena wait()" )
-	
-		if self.jobId == -99:
-			print ("logiflename: ",logfilename)
-			ret, out = subprocess.getstatusoutput("cat "+logfilename)
-			lines = out.split('\n')
+
+
+
+        def wait(self,logfilename):
+                print ("Pathena wait()" )
+
+                if self.jobId == -99:
+                        print ("logiflename: ",logfilename)
+                        ret, out = subprocess.getstatusoutput("cat "+logfilename)
+                        lines = out.split('\n')
              
-			# looping over all the job IDs
-			for line in lines:
-				items = line.split()
-				if len(items)>0 and items[0]=="JobID" :
-					self.jobId = int(items[2])
-					print ("jobId = ",self.jobId)
-
-				# check status of each job ID
-				# while self.bjobs() == 0:
-				while self.bjobs() != 1:       
-					print (" waiting for jobID ",self.jobId,"...")
-					time.sleep(300)
-
-
-
-
-
-	def bjobs(self) :
-		if self.jobId == -99:
-			print ("need jobId")
-			sys.exit(3)
-	
-		print ("Pathena bjobs(), jobId: ",self.jobId)
-		jobId = self.jobId
-		bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
-		print (bjobstring)
-		ret, out = subprocess.getstatusoutput(bjobstring)
-		print ("statusoutput: ",out)
-		for line in out.split("\n") :
-			items_1 = line.split()
-			if len(items_1)>0 and items_1[0] != "jobStatus" :
-				continue
-	
-			if len(items_1)>2 :
-				if items_1[2] == "frozen" :
-					print ("jobStatus: " , items_1[2])
-	
-					### search the libDS #######   
-					for line2 in out.split("\n") :
-						print (" line2: " , line2)
-						items_2 = line2.split()
-						if items_2[0] == "libDS" :
-							break 
-	
-					if self.part == "Accumulate" and self.GridOptions["accumulateLibDS"] == "" :   
-						self.GridOptions["accumulateLibDS"] = items_2[2]
-						print (" self.GridOptions accumulateLibDS: " , self.GridOptions["accumulateLibDS"])
-	
-					if (self.part == "Grid" or self.part == "Prun") and self.GridOptions["solveLibDS"] == "":
-						self.GridOptions["solveLibDS"     ] = items_2[2]                               
-						print (" self.GridOptions solveLibDS: "      , self.GridOptions["solveLibDS"])
-	                                  
-					return 1
-				else :
-					print ("jobStatus: ",items_1[2])
-					return 0
-	
-
-
-	def whetherRetry(self) :               
-		nfailed   = 0
-		nfinished = 0
-		if self.jobId == -99 :
-			print ("need jobId, JOB SUBMISSION FAILED!!!, check the log files")
-			sys.exit(3)
-	
-		print ("Pathena bjobs(), jobId: ",self.jobId)
-		jobId = self.jobId
-		bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
-		print (bjobstring)
-		ret, out = subprocess.getstatusoutput(bjobstring)
+                        # looping over all the job IDs
+                        for line in lines:
+                                items = line.split()
+                                if len(items)>0 and items[0]=="JobID" :
+                                        self.jobId = int(items[2])
+                                        print ("jobId = ",self.jobId)
+
+                                # check status of each job ID
+                                # while self.bjobs() == 0:
+                                while self.bjobs() != 1:
+                                        print (" waiting for jobID ",self.jobId,"...")
+                                        time.sleep(300)
+
+
+
+
+
+        def bjobs(self) :
+                if self.jobId == -99:
+                        print ("need jobId")
+                        sys.exit(3)
+
+                print ("Pathena bjobs(), jobId: ",self.jobId)
+                jobId = self.jobId
+                bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
+                print (bjobstring)
+                ret, out = subprocess.getstatusoutput(bjobstring)
+                print ("statusoutput: ",out)
+                for line in out.split("\n") :
+                        items_1 = line.split()
+                        if len(items_1)>0 and items_1[0] != "jobStatus" :
+                                continue
+
+                        if len(items_1)>2 :
+                                if items_1[2] == "frozen" :
+                                        print ("jobStatus: " , items_1[2])
+
+                                        ### search the libDS #######
+                                        for line2 in out.split("\n") :
+                                                print (" line2: " , line2)
+                                                items_2 = line2.split()
+                                                if items_2[0] == "libDS" :
+                                                        break
+
+                                        if self.part == "Accumulate" and self.GridOptions["accumulateLibDS"] == "" :
+                                                self.GridOptions["accumulateLibDS"] = items_2[2]
+                                                print (" self.GridOptions accumulateLibDS: " , self.GridOptions["accumulateLibDS"])
+
+                                        if (self.part == "Grid" or self.part == "Prun") and self.GridOptions["solveLibDS"] == "":
+                                                self.GridOptions["solveLibDS"     ] = items_2[2]
+                                                print (" self.GridOptions solveLibDS: "      , self.GridOptions["solveLibDS"])
+
+                                        return 1
+                                else :
+                                        print ("jobStatus: ",items_1[2])
+                                        return 0
+
+
+
+        def whetherRetry(self) :
+                nfailed   = 0
+                nfinished = 0
+                if self.jobId == -99 :
+                        print ("need jobId, JOB SUBMISSION FAILED!!!, check the log files")
+                        sys.exit(3)
+
+                print ("Pathena bjobs(), jobId: ",self.jobId)
+                jobId = self.jobId
+                bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
+                print (bjobstring)
+                ret, out = subprocess.getstatusoutput(bjobstring)
                
-		lines  = out.split("\n")
-		nlines = len(lines)
-		print (" nlines: " , nlines)
-	
-		for i in range(0, nlines) :
-			items = lines[i].split()
-			print (" items: " , items)
-			if "failed"   in items :
-				nfailed   = int(items[-1])
-			if "finished" in items :
-				nfinished = int(items[-1])
-				# Hi, just stop, next line will cause the loop crash ...
-				break
-
-
-		if (self.retryNo == 0) and (self.i == 0) : 
-			if ( 0 == (nfailed + nfinished - 1) ) : 
-				successRatio = -1 
-			else : 
-				successRatio = float(nfinished - 1)/(nfailed + nfinished - 1) 
-		else : 
-			successRatio = float(nfinished)/(nfailed + nfinished) 
-
-
-		print ("the success ratio: ", successRatio)
-		if successRatio >= self.GridOptions["successRatioCut"] :
-			print ("The success ratio is higher than the cut, will not retry ---")
-			return False
-		else :
-			print ("The success ratio is lower than the cut, will retry ---")
-			return True
-	
-
-
-
-
-	def retry(self) :
-		jobId = self.jobId
-		retrystring = "pbook -c 'retry(" + str(jobId) + ")'"   
-		ret, out    = subprocess.getstatusoutput(retrystring)
-		print (" out1: " , out)
-		#self.jobId =  self.jobId + 2
-
-		## get the new JobID ## 
-		for line in out.split("\n") : 
-			items = line.split() 
-			nitems = len(items) 
-			for i in range(0, nitems) : 
-				if items[i] == "New" : 
-					jobstring = items[i+1].split("=") 
-					self.jobId = int(jobstring[-1]) 
-					print ("new JobID: " , self.jobId )
-					break 
-		self.retryNo = self.retryNo + 1 
-
-
-
-		while self.bjobs() != 1 :
-			print (" waiting for the first retry jobID " , self.jobId , "...")
-			time.sleep(300)
-	
-		if self.whetherRetry() :
-			jobId = self.jobId
-			retrystring = "pbook -c 'retry(" + str(jobId) + ")'"
-			ret, out    = subprocess.getstatusoutput(retrystring)
-			print (" out2: " , out)
-	
-			#self.jobId = self.jobId + 2
-
-			## get the new JobID ## 
-			for line in out.split("\n") : 
-				items = line.split() 
-				nitems = len(items) 
-				for i in range(0, nitems) : 
-					if items[i] == "New" : 
-						jobstring = items[i+1].split("=") 
-						self.jobId = int(jobstring[-1]) 
-						print ("new JobID: " , self.jobId )
-						break 
-			self.retryNo = self.retryNo + 1
-
-
-
-			while self.bjobs() != 1 :
-				print (" waiting for the second retry jobID " , self.jobId,"...")
-				time.sleep(300)
-	
-			
-	
-	def getGridOptions(self, option = "") :
-		return self.GridOptions[option]
-	
-	def setGridOptions(self, option1 = "", option2 = "") :
-		self.GridOptions[option1] = option2
-
-
-
-
-
-
-
-
-
-class writeScriptGrid :		
-	def __init__(self,
-		     OutputPath,
-		     preName,
-		     iter,
-		     part,
-		     CMTDIR,
-		     ATHENAREL,
-		     TAGS,
-		     SCRIPTNAME,
-		     Datasets,
-		     outDS,
-		     AccSubJobID,
-		     AccSubInfo,
-		     JOBNAMES,
-		     thisJobDir,
-		     jobId,
-		     GridOptions = {},
-		     AlignmentOptions = {}
-		     ):
-		self.OutputPath       = OutputPath
-		self.preName          = preName
-		self.i                = iter
-		self.part             = part
-		self.CMTDIR           = CMTDIR
-		self.ATHENAREL        = ATHENAREL
-		self.TAGS             = TAGS
-		self.SCRIPTNAME       = SCRIPTNAME
-		self.Datasets         =  Datasets
-		self.outDS            = outDS
-		self.AccSubJobID      = AccSubJobID 
-		self.AccSubInfo       = AccSubInfo	
-		self.JOBNAMES         = JOBNAMES
-		self.thisJobDir       = thisJobDir
-		self.jobId            = -99
-		self.GridOptions      = GridOptions
-		self.AlignmentOptions = AlignmentOptions
-
-
-	def write(self):
-
-
-		extOutFile = []
-
-		extOutFile.append("hitmap.bin")
-		extOutFile.append("matrix.bin")
-		extOutFile.append("vector.bin")
-			
-
-		if (    ("sctAlignmentLevel"          in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevel"]          == 3 ) or 
-			("sctAlignmentLevelBarrel"    in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelBarrel"]    == 3)  or 
-			("sctAlignmentLevelEndcaps"   in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelEndcaps"]   == 3)  or 
-			("pixelAlignmentLevel"        in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevel"]        == 3)  or 
-			("pixelAlignmentLevelBarrel"  in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelBarrel"]  == 3)  or 
-			("pixelAlignmentLevelEndcaps" in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelEndcaps"] == 3) ) : 
-			#extOutFile.append("vector.txt")
-			print ("hmmn, you plan to do Level 3 alignment ... ")
-
-			### put the three flags writeMatrixFileTxt,writeHitmapTxt,writeEigenMatTxt in the GridOptions, not in the AlignmentOptions
-			if ( "writeMatrixFileTxt" in self.GridOptions  and self.GridOptions["writeMatrixFileTxt"] == True  ) : 
-				extOutFile.append("matrix.txt")
-				extOutFile.append("vector.txt")
-			if ( "writeHitmapTxt"     in self.GridOptions  and self.GridOptions["writeHitmapTxt"]     == True  ) :        
-				extOutFile.append("hitmap.txt")
-
-		else :
-			extOutFile.append("matrix.txt")
-			extOutFile.append("hitmap.txt")				
-			if ( "writeMatrixFileTxt" in self.GridOptions  and self.GridOptions["writeMatrixFileTxt"] == False  ) :        
-				extOutFile.remove("matrix.txt")
-			if ( "writeHitmapTxt"     in self.GridOptions  and self.GridOptions["writeHitmapTxt"    ] == False  ) :        
-				extOutFile.remove("hitmap.txt")
-
-			
-		extOutFileStr = ",".join(extOutFile)
-
-
-		if self.GridOptions["doMonitoring"] :
-			extOutFileStrAcc = extOutFileStr + ",monitoring.root"
-		if self.GridOptions["doTrkNtuple"]  :
-			extOutFileStrAcc += ",TrkValidation.root"
-
-
-		if self.part == "Accumulate":
-			print (self.SCRIPTNAME)
-			script = open(self.SCRIPTNAME,'w')
-
-			if self.Datasets.containType("Customed") : 
-				if self.Datasets.doDetailedTagsConfig() : 
-					for index in range(len(self.Datasets.namesList("Customed"))) :
-
-						if ("group" in self.GridOptions["userIDnum"] ) :
-							scriptStr   = "pathena  --official --voms=atlas:/atlas/det-indet/Role=production %s  " % (self.JOBNAMES["Customed"][index])
-						else : 
-							scriptStr   = "pathena  %s  " % (self.JOBNAMES["Customed"][index])
-						
-						if self.GridOptions["accumulateLibDS"]   != "":
-							scriptStr += " --libDS %s "        % self.GridOptions["accumulateLibDS"]
-							scriptStr += " --excludeFile %s "  % "*.cxx,*.h,*.o"
-
-						if  "removeFileList" in self.GridOptions : 
-							if self.GridOptions["removeFileList"] != "" :
-								scriptStr += " --removeFileList %s " % self.GridOptions["removeFileList"] 
-
-						if self.GridOptions["fileList"]     != "":
-							scriptStr += " --fileList %s "     % self.GridOptions["fileList"]
-						if self.GridOptions["siteName"]     != "":
-							scriptStr += " --site %s "         % self.GridOptions["siteName"]
-						if self.GridOptions["excludedSite"] != "":
-							scriptStr += " --excludedSite %s " % self.GridOptions["excludedSite"]
-						if self.GridOptions["dbRelease"]    == "":
-							scriptStr += " --dbRelease LATEST "
-						else:
-							scriptStr += " --dbRelease %s "    % self.GridOptions["dbRelease"]
-
-
-						#if self.GridOptions["reUseAccumulateLibDS"] :
-						if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") : 
-							scriptStr += " --extFile  initial_AlignmentConstants.root,Scaling.root  "    
-						else :
-							lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
-							if os.path.isfile("%s"  % lastAlignConstant ) :  
-								scriptStr += " --extFile %s,Scaling.root  "      % lastAlignConstant    
-						
-						#print (index)
-						#print (self.Datasets.namesList("Customed")[index])
-						#print (self.outDS["Customed"][index])
-						#print (self.Datasets.nFiles("Customed")[index])
-						#print (self.GridOptions["CustomedCPUs"][index])
-
-
-						if self.GridOptions["debugLevel"] == 4 :
-							self.GridOptions["doNFilesPerJobSplit"] = False 
-							scriptStr += " --nEventsPerJob %s" % self.GridOptions["nEventsPerJob"]
-
-						if self.GridOptions["doNFilesPerJobSplit" ] :
-							if (-1 != self.GridOptions["CustomedNFiles"][index]) : 
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) )
-
-							else : 
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s  --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) )
-
-						else : 
-							if (-1 != self.GridOptions["CustomedNFiles"][index]) :
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
-
-							else :   
-								scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.Datasets.nFiles("Customed")[index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
-						script.write(scriptStr)
-
-				else : 
-					print (" you don't plan to do detailed tags configuration for every dataset, so will submit only one job with a LONG inDS: ")
-
-					if ( "group" in self.GridOptions["userIDnum"] ) :
-						scriptStr   = "pathena  --official --voms=atlas:/atlas/det-indet/Role=production  %s  " % (self.JOBNAMES["Customed"][0])
-					else :
-						scriptStr   = "pathena  %s  " % (self.JOBNAMES["Customed"][0])
-
-					#if self.GridOptions["accumulateLibDS"]   != "":
-					#	scriptStr += " --libDS %s "        % self.GridOptions["accumulateLibDS"]
-					if self.GridOptions["fileList"]     != "":
-						scriptStr += " --fileList %s "     % self.GridOptions["fileList"]
-					if self.GridOptions["siteName"]     != "":
-						scriptStr += " --site %s "         % self.GridOptions["siteName"]
-					if self.GridOptions["excludedSite"] != "":
-						scriptStr += " --excludedSite %s " % self.GridOptions["excludedSite"]
-					if self.GridOptions["dbRelease"]    == "":
-						scriptStr += " --dbRelease LATEST "
-					else:
-						scriptStr += " --dbRelease %s "    % self.GridOptions["dbRelease"]
-
-
-					#if self.GridOptions["reUseAccumulateLibDS"] :
-					if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
-						scriptStr += " --extFile  initial_AlignmentConstants.root "      
-					else :
-						lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
-						if os.path.isfile("%s"  % lastAlignConstant ) :
-							scriptStr += " --extFile %s "      % lastAlignConstant
-
-
-					if self.GridOptions["doNFilesPerJobSplit" ] :
-						scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed.txt ; \n" % ( self.Datasets.namesList("Customed"), self.outDS["Customed"][0], extOutFileStrAcc, self.Datasets.nFiles("Customed"), self.GridOptions["CustomedNFPJ"][0], self.GridOptions["TmpWorkDir"] )
-					else : 
-						scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed.txt ; \n" % ( self.Datasets.namesList("Customed"), self.outDS["Customed"][0], extOutFileStrAcc, self.Datasets.nFiles("Customed"), self.GridOptions["CustomedCPUs"][0], self.GridOptions["TmpWorkDir"] )
-					script.write(scriptStr)
-
-
-			######################################################################################################################################		
-			if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
-
-				if self.GridOptions["ColCPUs"][self.i]:
-					scriptStr   = "pathena  %s  " % (self.JOBNAMES["Collision"])
-				if self.GridOptions["CosBonCPUs"][self.i]:
-					scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBon"])
-				if self.GridOptions["CosBoffCPUs"][self.i]:
-					scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBoff"])
-
-				#if self.GridOptions["accumulateLibDS"]   != "":
-				#	scriptStr += "--libDS %s "        % self.GridOptions["accumulateLibDS"]
-
-				if self.GridOptions["fileList"]     != "":
-					scriptStr += "--fileList %s "     % self.GridOptions["fileList"]
-
-				if self.GridOptions["siteName"]     != "":
-					scriptStr += "--site %s "         % self.GridOptions["siteName"]
-
-				if self.GridOptions["excludedSite"] != "":
-					scriptStr += "--excludedSite %s " % self.GridOptions["excludedSite"]
-			
-				if self.GridOptions["dbRelease"]    == "":
-					scriptStr += "--dbRelease LATEST "
-				else:
-					scriptStr += "--dbRelease %s "    % self.GridOptions["dbRelease"] 
-		
-
-				#if self.GridOptions["reUseAccumulateLibDS"] :
-
-				if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
-					scriptStr += " --extFile  initial_AlignmentConstants.root "
-				else :                                         
-					lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
-					if os.path.isfile("%s"  % lastAlignConstant ) :    
-						scriptStr += " --extFile %s "      % lastAlignConstant
-
-
-        	                ########################## for submitting different topology jobs ########################################################################
-				if self.GridOptions["ColCPUs"][self.i] :
-					if self.GridOptions["doNFilesPerJobSplit" ] :
-						scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColNFPJ"][self.i], self.GridOptions["TmpWorkDir"] )   
-					else : 
-						scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
-					script.write(scriptStr_forCollision)
-
-				if self.GridOptions["CosBonCPUs"][self.i]:
-					scriptStr_forBon       = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Bon.txt ;       \n" % ( self.Datasets.namesList("CosBon"),    self.outDS["CosBon"], extOutFileStrAcc, self.Datasets.nFiles("Bon"),       self.GridOptions["CosBonCPUs"][self.i],   self.GridOptions["TmpWorkDir"] )
-					script.write(scriptStr_forBon)
-
-				if self.GridOptions["CosBoffCPUs"][self.i]:
-					scriptStr_forBoff      = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Boff.txt ;      \n" % (self.Datasets.namesList("CosBoff"), self.outDS["CosBoff"],   extOutFileStrAcc, self.Datasets.nFiles("Boff"),      self.GridOptions["CosBoffCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
-					script.write(scriptStr_forBoff)
-			script.close()
-
-
-
-		elif self.part == "Grid" :
-			print (self.SCRIPTNAME)
-			script     = open(self.SCRIPTNAME,'w')
-		
-			extOutFileStrSol = extOutFileStr + ",Iter%02d_AlignmentConstants.root,OldSiAlignment.txt,OutputSiAlignment.txt,mycool.db,alignlogfile.txt" % (self.i)
- 			#  should consider more details about how to get one files and the corresponding geometry/condition tags, below is just temporary solution 
-			'''
-			if self.Datasets.containType("Customed") :
-				scriptStr  = "pathena %s --inDS %s --fileList %s --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.Datasets.oneDatasetName("Customed"), self.Datasets.oneFileName("Customed"),   self.outDS["Solve"], extOutFileStrSol)
-
-			if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
-				scriptStr  = "pathena %s --inDS %s --fileList %s --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.Datasets.oneDatasetName("Collision"), self.Datasets.oneFileName("Collision"), self.outDS["Solve"], extOutFileStrSol)
-			'''
+                lines  = out.split("\n")
+                nlines = len(lines)
+                print (" nlines: " , nlines)
+
+                for i in range(0, nlines) :
+                        items = lines[i].split()
+                        print (" items: " , items)
+                        if "failed"   in items :
+                                nfailed   = int(items[-1])
+                        if "finished" in items :
+                                nfinished = int(items[-1])
+                                # Hi, just stop, next line will cause the loop crash ...
+                                break
+
+
+                if (self.retryNo == 0) and (self.i == 0) :
+                        if ( 0 == (nfailed + nfinished - 1) ) :
+                                successRatio = -1
+                        else :
+                                successRatio = float(nfinished - 1)/(nfailed + nfinished - 1)
+                else :
+                        successRatio = float(nfinished)/(nfailed + nfinished)
+
+
+                print ("the success ratio: ", successRatio)
+                if successRatio >= self.GridOptions["successRatioCut"] :
+                        print ("The success ratio is higher than the cut, will not retry ---")
+                        return False
+                else :
+                        print ("The success ratio is lower than the cut, will retry ---")
+                        return True
+
+
+
+
+
+        def retry(self) :
+                jobId = self.jobId
+                retrystring = "pbook -c 'retry(" + str(jobId) + ")'"
+                ret, out    = subprocess.getstatusoutput(retrystring)
+                print (" out1: " , out)
+                #self.jobId =  self.jobId + 2
+
+                ## get the new JobID ##
+                for line in out.split("\n") :
+                        items = line.split()
+                        nitems = len(items)
+                        for i in range(0, nitems) :
+                                if items[i] == "New" :
+                                        jobstring = items[i+1].split("=")
+                                        self.jobId = int(jobstring[-1])
+                                        print ("new JobID: " , self.jobId )
+                                        break
+                self.retryNo = self.retryNo + 1
+
+
+
+                while self.bjobs() != 1 :
+                        print (" waiting for the first retry jobID " , self.jobId , "...")
+                        time.sleep(300)
+
+                if self.whetherRetry() :
+                        jobId = self.jobId
+                        retrystring = "pbook -c 'retry(" + str(jobId) + ")'"
+                        ret, out    = subprocess.getstatusoutput(retrystring)
+                        print (" out2: " , out)
+
+                        #self.jobId = self.jobId + 2
+
+                        ## get the new JobID ##
+                        for line in out.split("\n") :
+                                items = line.split()
+                                nitems = len(items)
+                                for i in range(0, nitems) :
+                                        if items[i] == "New" :
+                                                jobstring = items[i+1].split("=")
+                                                self.jobId = int(jobstring[-1])
+                                                print ("new JobID: " , self.jobId )
+                                                break
+                        self.retryNo = self.retryNo + 1
+
+
+
+                        while self.bjobs() != 1 :
+                                print (" waiting for the second retry jobID " , self.jobId,"...")
+                                time.sleep(300)
+
+
+        
+        def getGridOptions(self, option = "") :
+                return self.GridOptions[option]
+
+        def setGridOptions(self, option1 = "", option2 = "") :
+                self.GridOptions[option1] = option2
+
+
+
+
+
+
+
+
+
+class writeScriptGrid :
+        def __init__(self,
+                     OutputPath,
+                     preName,
+                     iter,
+                     part,
+                     CMTDIR,
+                     ATHENAREL,
+                     TAGS,
+                     SCRIPTNAME,
+                     Datasets,
+                     outDS,
+                     AccSubJobID,
+                     AccSubInfo,
+                     JOBNAMES,
+                     thisJobDir,
+                     jobId,
+                     GridOptions = {},
+                     AlignmentOptions = {}
+                     ):
+                self.OutputPath       = OutputPath
+                self.preName          = preName
+                self.i                = iter
+                self.part             = part
+                self.CMTDIR           = CMTDIR
+                self.ATHENAREL        = ATHENAREL
+                self.TAGS             = TAGS
+                self.SCRIPTNAME       = SCRIPTNAME
+                self.Datasets         =  Datasets
+                self.outDS            = outDS
+                self.AccSubJobID      = AccSubJobID
+                self.AccSubInfo       = AccSubInfo
+                self.JOBNAMES         = JOBNAMES
+                self.thisJobDir       = thisJobDir
+                self.jobId            = -99
+                self.GridOptions      = GridOptions
+                self.AlignmentOptions = AlignmentOptions
+
+
+        def write(self):
+
+
+                extOutFile = []
+
+                extOutFile.append("hitmap.bin")
+                extOutFile.append("matrix.bin")
+                extOutFile.append("vector.bin")
+
+
+                if (    ("sctAlignmentLevel"          in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevel"]          == 3 ) or 
+                        ("sctAlignmentLevelBarrel"    in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelBarrel"]    == 3)  or
+                        ("sctAlignmentLevelEndcaps"   in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelEndcaps"]   == 3)  or
+                        ("pixelAlignmentLevel"        in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevel"]        == 3)  or
+                        ("pixelAlignmentLevelBarrel"  in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelBarrel"]  == 3)  or
+                        ("pixelAlignmentLevelEndcaps" in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelEndcaps"] == 3) ) :
+                        #extOutFile.append("vector.txt")
+                        print ("hmmn, you plan to do Level 3 alignment ... ")
+
+                        ### put the three flags writeMatrixFileTxt,writeHitmapTxt,writeEigenMatTxt in the GridOptions, not in the AlignmentOptions
+                        if ( "writeMatrixFileTxt" in self.GridOptions  and self.GridOptions["writeMatrixFileTxt"] is True  ) :
+                                extOutFile.append("matrix.txt")
+                                extOutFile.append("vector.txt")
+                        if ( "writeHitmapTxt"     in self.GridOptions  and self.GridOptions["writeHitmapTxt"]     is True  ) :
+                                extOutFile.append("hitmap.txt")
+
+                else :
+                        extOutFile.append("matrix.txt")
+                        extOutFile.append("hitmap.txt")
+                        if ( "writeMatrixFileTxt" in self.GridOptions  and self.GridOptions["writeMatrixFileTxt"] is False  ) :
+                                extOutFile.remove("matrix.txt")
+                        if ( "writeHitmapTxt"     in self.GridOptions  and self.GridOptions["writeHitmapTxt"    ] is False  ) :
+                                extOutFile.remove("hitmap.txt")
+
+
+                extOutFileStr = ",".join(extOutFile)
+
+
+                if self.GridOptions["doMonitoring"] :
+                        extOutFileStrAcc = extOutFileStr + ",monitoring.root"
+                if self.GridOptions["doTrkNtuple"]  :
+                        extOutFileStrAcc += ",TrkValidation.root"
+
+
+                if self.part == "Accumulate":
+                        print (self.SCRIPTNAME)
+                        script = open(self.SCRIPTNAME,'w')
+
+                        if self.Datasets.containType("Customed") :
+                                if self.Datasets.doDetailedTagsConfig() :
+                                        for index in range(len(self.Datasets.namesList("Customed"))) :
+
+                                                if ("group" in self.GridOptions["userIDnum"] ) :
+                                                        scriptStr   = "pathena  --official --voms=atlas:/atlas/det-indet/Role=production %s  " % (self.JOBNAMES["Customed"][index])
+                                                else :
+                                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["Customed"][index])
+
+                                                if self.GridOptions["accumulateLibDS"]   != "":
+                                                        scriptStr += " --libDS %s "        % self.GridOptions["accumulateLibDS"]
+                                                        scriptStr += " --excludeFile %s "  % "*.cxx,*.h,*.o"
+
+                                                if  "removeFileList" in self.GridOptions :
+                                                        if self.GridOptions["removeFileList"] != "" :
+                                                                scriptStr += " --removeFileList %s " % self.GridOptions["removeFileList"]
+
+                                                if self.GridOptions["fileList"]     != "":
+                                                        scriptStr += " --fileList %s "     % self.GridOptions["fileList"]
+                                                if self.GridOptions["siteName"]     != "":
+                                                        scriptStr += " --site %s "         % self.GridOptions["siteName"]
+                                                if self.GridOptions["excludedSite"] != "":
+                                                        scriptStr += " --excludedSite %s " % self.GridOptions["excludedSite"]
+                                                if self.GridOptions["dbRelease"]    == "":
+                                                        scriptStr += " --dbRelease LATEST "
+                                                else:
+                                                        scriptStr += " --dbRelease %s "    % self.GridOptions["dbRelease"]
+
+
+                                                #if self.GridOptions["reUseAccumulateLibDS"] :
+                                                if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
+                                                        scriptStr += " --extFile  initial_AlignmentConstants.root,Scaling.root  "
+                                                else :
+                                                        lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                                                        if os.path.isfile("%s"  % lastAlignConstant ) :
+                                                                scriptStr += " --extFile %s,Scaling.root  "      % lastAlignConstant
+
+                                                #print (index)
+                                                #print (self.Datasets.namesList("Customed")[index])
+                                                #print (self.outDS["Customed"][index])
+                                                #print (self.Datasets.nFiles("Customed")[index])
+                                                #print (self.GridOptions["CustomedCPUs"][index])
+
+
+                                                if self.GridOptions["debugLevel"] == 4 :
+                                                        self.GridOptions["doNFilesPerJobSplit"] = False
+                                                        scriptStr += " --nEventsPerJob %s" % self.GridOptions["nEventsPerJob"]
+
+                                                if self.GridOptions["doNFilesPerJobSplit" ] :
+                                                        if (-1 != self.GridOptions["CustomedNFiles"][index]) :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) )
+
+                                                        else :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s  --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFPJ"][index], self.GridOptions["TmpWorkDir"], int(index) )
+
+                                                else :
+                                                        if (-1 != self.GridOptions["CustomedNFiles"][index]) :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.GridOptions["CustomedNFiles"][index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
+
+                                                        else :
+                                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed_%02d.txt ; \n" % ( self.Datasets.namesList("Customed")[index], self.outDS["Customed"][index], extOutFileStrAcc, self.Datasets.nFiles("Customed")[index], self.GridOptions["CustomedCPUs"][index], self.GridOptions["TmpWorkDir"], int(index) )
+                                                script.write(scriptStr)
+
+                                else :
+                                        print (" you don't plan to do detailed tags configuration for every dataset, so will submit only one job with a LONG inDS: ")
+
+                                        if ( "group" in self.GridOptions["userIDnum"] ) :
+                                                scriptStr   = "pathena  --official --voms=atlas:/atlas/det-indet/Role=production  %s  " % (self.JOBNAMES["Customed"][0])
+                                        else :
+                                                scriptStr   = "pathena  %s  " % (self.JOBNAMES["Customed"][0])
+
+                                        #if self.GridOptions["accumulateLibDS"]   != "":
+                                        #       scriptStr += " --libDS %s "        % self.GridOptions["accumulateLibDS"]
+                                        if self.GridOptions["fileList"]     != "":
+                                                scriptStr += " --fileList %s "     % self.GridOptions["fileList"]
+                                        if self.GridOptions["siteName"]     != "":
+                                                scriptStr += " --site %s "         % self.GridOptions["siteName"]
+                                        if self.GridOptions["excludedSite"] != "":
+                                                scriptStr += " --excludedSite %s " % self.GridOptions["excludedSite"]
+                                        if self.GridOptions["dbRelease"]    == "":
+                                                scriptStr += " --dbRelease LATEST "
+                                        else:
+                                                scriptStr += " --dbRelease %s "    % self.GridOptions["dbRelease"]
+
+
+                                        #if self.GridOptions["reUseAccumulateLibDS"] :
+                                        if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
+                                                scriptStr += " --extFile  initial_AlignmentConstants.root "
+                                        else :
+                                                lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                                                if os.path.isfile("%s"  % lastAlignConstant ) :
+                                                        scriptStr += " --extFile %s "      % lastAlignConstant
+
+
+                                        if self.GridOptions["doNFilesPerJobSplit" ] :
+                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_Customed.txt ; \n" % ( self.Datasets.namesList("Customed"), self.outDS["Customed"][0], extOutFileStrAcc, self.Datasets.nFiles("Customed"), self.GridOptions["CustomedNFPJ"][0], self.GridOptions["TmpWorkDir"] )
+                                        else :
+                                                scriptStr += " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_Customed.txt ; \n" % ( self.Datasets.namesList("Customed"), self.outDS["Customed"][0], extOutFileStrAcc, self.Datasets.nFiles("Customed"), self.GridOptions["CustomedCPUs"][0], self.GridOptions["TmpWorkDir"] )
+                                        script.write(scriptStr)
+
+
+                        ######################################################################################################################################
+                        if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
+
+                                if self.GridOptions["ColCPUs"][self.i]:
+                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["Collision"])
+                                if self.GridOptions["CosBonCPUs"][self.i]:
+                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBon"])
+                                if self.GridOptions["CosBoffCPUs"][self.i]:
+                                        scriptStr   = "pathena  %s  " % (self.JOBNAMES["CosBoff"])
+
+                                #if self.GridOptions["accumulateLibDS"]   != "":
+                                #       scriptStr += "--libDS %s "        % self.GridOptions["accumulateLibDS"]
+
+                                if self.GridOptions["fileList"]     != "":
+                                        scriptStr += "--fileList %s "     % self.GridOptions["fileList"]
+
+                                if self.GridOptions["siteName"]     != "":
+                                        scriptStr += "--site %s "         % self.GridOptions["siteName"]
+
+                                if self.GridOptions["excludedSite"] != "":
+                                        scriptStr += "--excludedSite %s " % self.GridOptions["excludedSite"]
+
+                                if self.GridOptions["dbRelease"]    == "":
+                                        scriptStr += "--dbRelease LATEST "
+                                else:
+                                        scriptStr += "--dbRelease %s "    % self.GridOptions["dbRelease"]
+
+
+                                #if self.GridOptions["reUseAccumulateLibDS"] :
+
+                                if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
+                                        scriptStr += " --extFile  initial_AlignmentConstants.root "
+                                else :
+                                        lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                                        if os.path.isfile("%s"  % lastAlignConstant ) :
+                                                scriptStr += " --extFile %s "      % lastAlignConstant
+
+
+                                ########################## for submitting different topology jobs ########################################################################
+                                if self.GridOptions["ColCPUs"][self.i] :
+                                        if self.GridOptions["doNFilesPerJobSplit" ] :
+                                                scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --nFilesPerJob %d --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColNFPJ"][self.i], self.GridOptions["TmpWorkDir"] )
+                                        else :
+                                                scriptStr_forCollision = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d        --tmpDir %s  >& tmpSubJobInfo_collision.txt ; \n" % ( self.Datasets.namesList("Collision"), self.outDS["Collision"], extOutFileStrAcc, self.Datasets.nFiles("Collision"), self.GridOptions["ColCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
+                                        script.write(scriptStr_forCollision)
+
+                                if self.GridOptions["CosBonCPUs"][self.i]:
+                                        scriptStr_forBon       = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Bon.txt ;       \n" % ( self.Datasets.namesList("CosBon"),    self.outDS["CosBon"], extOutFileStrAcc, self.Datasets.nFiles("Bon"),       self.GridOptions["CosBonCPUs"][self.i],   self.GridOptions["TmpWorkDir"] )
+                                        script.write(scriptStr_forBon)
+
+                                if self.GridOptions["CosBoffCPUs"][self.i]:
+                                        scriptStr_forBoff      = scriptStr + " --inDS %s --outDS %s --extOutFile %s --nFiles %d --split %d --tmpDir %s  >& tmpSubJobInfo_Boff.txt ;      \n" % (self.Datasets.namesList("CosBoff"), self.outDS["CosBoff"],   extOutFileStrAcc, self.Datasets.nFiles("Boff"),      self.GridOptions["CosBoffCPUs"][self.i], self.GridOptions["TmpWorkDir"] )
+                                        script.write(scriptStr_forBoff)
+                        script.close()
+
+
+
+                elif self.part == "Grid" :
+                        print (self.SCRIPTNAME)
+                        script     = open(self.SCRIPTNAME,'w')
+
+                        extOutFileStrSol = extOutFileStr + ",Iter%02d_AlignmentConstants.root,OldSiAlignment.txt,OutputSiAlignment.txt,mycool.db,alignlogfile.txt" % (self.i)
+                        #  should consider more details about how to get one files and the corresponding geometry/condition tags, below is just temporary solution
+                        '''
+                        if self.Datasets.containType("Customed") :
+                                scriptStr  = "pathena %s --inDS %s --fileList %s --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.Datasets.oneDatasetName("Customed"), self.Datasets.oneFileName("Customed"),   self.outDS["Solve"], extOutFileStrSol)
+
+                        if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
+                                scriptStr  = "pathena %s --inDS %s --fileList %s --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.Datasets.oneDatasetName("Collision"), self.Datasets.oneFileName("Collision"), self.outDS["Solve"], extOutFileStrSol)
+                        '''
  
-			if ( "group" in self.GridOptions["userIDnum"] ) :
-				scriptStr  = "pathena  --official --voms=atlas:/atlas/det-indet/Role=production  %s  --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.outDS["Solve"], extOutFileStrSol)
-			      
-			else :
-				scriptStr  = "pathena %s  --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.outDS["Solve"], extOutFileStrSol)
+                        if ( "group" in self.GridOptions["userIDnum"] ) :
+                                scriptStr  = "pathena  --official --voms=atlas:/atlas/det-indet/Role=production  %s  --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.outDS["Solve"], extOutFileStrSol)
+
+                        else :
+                                scriptStr  = "pathena %s  --outDS %s --extOutFile %s --nEventsPerFile 1 --nEventsPerJob 1 --nFilesPerJob 1  " % (self.JOBNAMES["Solve"], self.outDS["Solve"], extOutFileStrSol)
+
+
+                        if ( True is self.GridOptions["reUseSolveLibDS"] ) and self.GridOptions["solveLibDS"]   != "":
+                                scriptStr += "--libDS %s "        % self.GridOptions["solveLibDS"]
+
+                        if self.GridOptions["siteName"]     != "":
+                                scriptStr += "--site %s "         % self.GridOptions["siteName"]
+
+                        if self.GridOptions["excludedSite"] != "":
+                                scriptStr += "--excludedSite %s " % self.GridOptions["excludedSite"]
+
+                        if self.GridOptions["dbRelease"]    == "":
+                                scriptStr += "--dbRelease LATEST "
+                        else:
+                                scriptStr += "--dbRelease %s "    % self.GridOptions["dbRelease"]
+
 
 
-			if ( True == self.GridOptions["reUseSolveLibDS"] ) and self.GridOptions["solveLibDS"]   != "":
-				scriptStr += "--libDS %s "        % self.GridOptions["solveLibDS"]
+                        if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
+                                scriptStr += " --extFile  initial_AlignmentConstants.root,"
+                        else :
+                                lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                                if os.path.isfile("%s"  % lastAlignConstant ) :
+                                        scriptStr += " --extFile %s,"      % lastAlignConstant
 
-			if self.GridOptions["siteName"]     != "":
-				scriptStr += "--site %s "         % self.GridOptions["siteName"]
+                        scriptStr += "*.bin "
 
-			if self.GridOptions["excludedSite"] != "":
-				scriptStr += "--excludedSite %s " % self.GridOptions["excludedSite"]
+                        if self.GridOptions["reUseSolveLibDS"] :
+                                #ret, out = subprocess.getstatusoutput("cat %s" % GridAccOutDS)
+                                ret, out = subprocess.getstatusoutput("cat GridAccOutDS.txt")
+                                print ("out: ",out)
+                                #lines = out.split('\n')
+                                #extFiles = []
 
-			if self.GridOptions["dbRelease"]    == "":
-				scriptStr += "--dbRelease LATEST "
-			else:
-				scriptStr += "--dbRelease %s "    % self.GridOptions["dbRelease"]
 
+                                #fileName = open('GridAccOutDS.txt', 'r+')
+                                #ret, out = subprocess.getstatusoutput('fileName.read()')
+                                #lines = out.split('\n')
+                                #extFiles = []
 
+                                #for line in lines :
+                                #       print ("line: ", line)
+                                #       thisStr = "%s/\*.bin" % line
+                                #       extFiles.append(thisStr)
+                                #print (" thisStr: " , thisStr)
+                                #extFileStr = ",".join(extFiles)
+                                #print (" extFileStr: " , extFileStr)
+                                #scriptStr += " --extFile %s "     %  extFileStr
 
-			if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
-				scriptStr += " --extFile  initial_AlignmentConstants.root,"
-			else :
-				lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
-				if os.path.isfile("%s"  % lastAlignConstant ) :
-					scriptStr += " --extFile %s,"      % lastAlignConstant
 
-			scriptStr += "*.bin "
 
-			if self.GridOptions["reUseSolveLibDS"] : 
-				#ret, out = subprocess.getstatusoutput("cat %s" % GridAccOutDS)
-				ret, out = subprocess.getstatusoutput("cat GridAccOutDS.txt")
-				print ("out: ",out)
-				lines = out.split('\n')
-				extFiles = []
-				 
+                        scriptStr += " --tmpDir %s >& tmpSubJobInfo_solve.txt; \n" % self.GridOptions["TmpWorkDir"]
 
-				#fileName = open('GridAccOutDS.txt', 'r+')				
- 				#ret, out = subprocess.getstatusoutput('fileName.read()')
-				#lines = out.split('\n')
-				#extFiles = []
+                        script.write(scriptStr)
+                        script.close()
 
-				#for line in lines :
-				#	print ("line: ", line)
-				#	thisStr = "%s/\*.bin" % line
-				#	extFiles.append(thisStr)
-				#print (" thisStr: " , thisStr)
-				#extFileStr = ",".join(extFiles)
-				#print (" extFileStr: " , extFileStr)
-				#scriptStr += " --extFile %s "     %  extFileStr
+                elif self.part == "Prun" :
+                        print (self.SCRIPTNAME)
+                        script     = open(self.SCRIPTNAME,'w')
 
 
+                        prunSolve = open("prunSolve.py",'w')
+                        prunSolve.write('import os\nimport string\nimport sys\nfrom future import standard_library\nstandard_library.install_aliases()\nimport subprocess\n\n')
+                        prunSolve.write('inputHitmapFiles = []\ninputMatrixFiles = []\ninputVectorFiles = []\ninFiles = []\n\n\n')
+                        prunSolve.write('ret, out = subprocess.getstatusoutput(\"cat input1.txt\")\n')
+                        prunSolve.write('print (\" the content of file input1.txt:  \", out) \n')
+                        prunSolve.write('lines = out.split(\",\")\n')
+                        prunSolve.write('for line in lines:\n')
+                        prunSolve.write('\tif \"hitmap.bin\" in str(line):\n\t\tif \"dcap://\" in str(line) :\n\t\t\tos.system(\" dccp %s ./\" % str(line) )\n\t\t\tlineHitmap = line.split(\'/\')[-1]\n\t\t\tprint (\'file name retrieved after splitting dcap address is %s \' % lineHitmap)\n\t\t\tinputHitmapFiles.append(lineHitmap)\n\t\telse : \n\t\t\tinputHitmapFiles.append(line)\n')
+                        prunSolve.write('\tif \"vector.bin\" in str(line):\n\t\tif \"dcap://\" in str(line) :\n\t\t\tos.system(\" dccp %s ./\" % str(line) )\n\t\t\tlineVector = line.split(\'/\')[-1]\n\t\t\tprint (\'file name retrieved after splitting dcap address is %s \' % lineVector)\n\t\t\tinputVectorFiles.append(lineVector)\n\t\telse : \n\t\t\tinputVectorFiles.append(line)\n')
+                        prunSolve.write('\tif \"matrix.bin\" in str(line):\n\t\tif \"dcap://\" in str(line) :\n\t\t\tos.system(\" dccp %s ./\" % str(line) )\n\t\t\tlineMatrix = line.split(\'/\')[-1]\n\t\t\tprint (\'file name retrieved after splitting dcap address is %s \' % lineMatrix)\n\t\t\tinputMatrixFiles.append(lineMatrix)\n\t\telse : \n\t\t\tinputMatrixFiles.append(line)\n')
 
-			scriptStr += " --tmpDir %s >& tmpSubJobInfo_solve.txt; \n" % self.GridOptions["TmpWorkDir"]
+                        tmpStrJO = "newSolveJO_Iter%02d.py" % self.i
+                        prunSolve.write('jofile = open( \'%s\' , \'w+\')\n' % tmpStrJO )
 
-			script.write(scriptStr)
-			script.close()
 
-		elif self.part == "Prun" :
-			print (self.SCRIPTNAME)
-			script     = open(self.SCRIPTNAME,'w')
+                        tmpStr = ""
+                        if (0 == self.i) :
+                                if os.path.isfile("initial_AlignmentConstants.root") :
+                                        tmpStr = "pool_insertFileToCatalog  initial_AlignmentConstants.root "
+                        else :
+                                alignConstants = "Iter%02d_AlignmentConstants.root" % (self.i - 1)
+                                if os.path.isfile(alignConstants) :
+                                        tmpStr = "pool_insertFileToCatalog  %s "  % alignConstants
+                                else :
+                                        print ("ALIGNMENT CONSTANTS %s NOT EXIST, WILL EXIT ANYHOW !!!  "  % alignConstants)
+                                        sys.exit()
 
 
-			prunSolve = open("prunSolve.py",'w')
-			prunSolve.write('import os\nimport string\nimport sys\nfrom future import standard_library\nstandard_library.install_aliases()\nimport subprocess\n\n')
-			prunSolve.write('inputHitmapFiles = []\ninputMatrixFiles = []\ninputVectorFiles = []\ninFiles = []\n\n\n')
-			prunSolve.write('ret, out = subprocess.getstatusoutput(\"cat input1.txt\")\n')
-			prunSolve.write('print (\" the content of file input1.txt:  \", out) \n')
-			prunSolve.write('lines = out.split(\",\")\n')
-			prunSolve.write('for line in lines:\n')
-			prunSolve.write('\tif \"hitmap.bin\" in str(line):\n\t\tif \"dcap://\" in str(line) :\n\t\t\tos.system(\" dccp %s ./\" % str(line) )\n\t\t\tlineHitmap = line.split(\'/\')[-1]\n\t\t\tprint (\'file name retrieved after splitting dcap address is %s \' % lineHitmap)\n\t\t\tinputHitmapFiles.append(lineHitmap)\n\t\telse : \n\t\t\tinputHitmapFiles.append(line)\n')
-			prunSolve.write('\tif \"vector.bin\" in str(line):\n\t\tif \"dcap://\" in str(line) :\n\t\t\tos.system(\" dccp %s ./\" % str(line) )\n\t\t\tlineVector = line.split(\'/\')[-1]\n\t\t\tprint (\'file name retrieved after splitting dcap address is %s \' % lineVector)\n\t\t\tinputVectorFiles.append(lineVector)\n\t\telse : \n\t\t\tinputVectorFiles.append(line)\n')
-			prunSolve.write('\tif \"matrix.bin\" in str(line):\n\t\tif \"dcap://\" in str(line) :\n\t\t\tos.system(\" dccp %s ./\" % str(line) )\n\t\t\tlineMatrix = line.split(\'/\')[-1]\n\t\t\tprint (\'file name retrieved after splitting dcap address is %s \' % lineMatrix)\n\t\t\tinputMatrixFiles.append(lineMatrix)\n\t\telse : \n\t\t\tinputMatrixFiles.append(line)\n')
+                        prunSolve.write('jofile.write(\'os.system(\\\"%s\\\")\\n\')\n'  % tmpStr)
+                        prunSolve.write('jofile.write(\"inputHitmapFiles = \" + str(inputHitmapFiles) + \'\\n\')\n')
+                        prunSolve.write('jofile.write(\"inputVectorFiles = \" + str(inputVectorFiles) + \'\\n\')\n')
+                        prunSolve.write('jofile.write(\"inputMatrixFiles = \" + str(inputMatrixFiles) + \'\\n\')\n')
 
-			tmpStrJO = "newSolveJO_Iter%02d.py" % self.i
-			prunSolve.write('jofile = open( \'%s\' , \'w+\')\n' % tmpStrJO )
+                        prunSolve.write('ff = open(\'%s\', \'r\')\n' % self.JOBNAMES["Solve"] )
+                        prunSolve.write('jofile.write(ff.read())\nff.close()\njofile.close()\n')
 
+                        prunSolve.write('os.system(\"athena.py %s \")\n'  % tmpStrJO )
 
-			tmpStr = ""
-			if (0 == self.i) : 
-				if os.path.isfile("initial_AlignmentConstants.root") :
-					tmpStr = "pool_insertFileToCatalog  initial_AlignmentConstants.root "
-			else :
-				alignConstants = "Iter%02d_AlignmentConstants.root" % (self.i - 1)
-				if os.path.isfile(alignConstants) :
-					tmpStr = "pool_insertFileToCatalog  %s "  % alignConstants
-				else : 
-					print ("ALIGNMENT CONSTANTS %s NOT EXIST, WILL EXIT ANYHOW !!!  "  % alignConstants)
-					sys.exit()
+                        # debugging ...
+                        print ("prunSolve.py: ")
+                        os.system(" cat prunSolve.py ")
+                        print (" newSolveJO_Iter%02d.py: " % self.i)
+                        os.system(" cat %s " % tmpStrJO)
 
 
-			prunSolve.write('jofile.write(\'os.system(\\\"%s\\\")\\n\')\n'  % tmpStr)
-			prunSolve.write('jofile.write(\"inputHitmapFiles = \" + str(inputHitmapFiles) + \'\\n\')\n')
-			prunSolve.write('jofile.write(\"inputVectorFiles = \" + str(inputVectorFiles) + \'\\n\')\n')
-			prunSolve.write('jofile.write(\"inputMatrixFiles = \" + str(inputMatrixFiles) + \'\\n\')\n')
+                        #extOutFileStrSol = "prunSolve.py,newSolveJO_Iter%02d.py,"  % (self.i)
+                        extOutFileStrSol = ""
 
-			prunSolve.write('ff = open(\'%s\', \'r\')\n' % self.JOBNAMES["Solve"] )                               
-			prunSolve.write('jofile.write(ff.read())\nff.close()\njofile.close()\n')
+                        if (    ("sctAlignmentLevel"          in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevel"]          == 3 ) or
+                                ("sctAlignmentLevelBarrel"    in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelBarrel"]    == 3)  or
+                                ("sctAlignmentLevelEndcaps"   in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelEndcaps"]   == 3)  or
+                                ("pixelAlignmentLevel"        in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevel"]        == 3)  or
+                                ("pixelAlignmentLevelBarrel"  in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelBarrel"]  == 3)  or
+                                ("pixelAlignmentLevelEndcaps" in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelEndcaps"] == 3) ) :
+                                print ("hmn, you are going to run L3 alignment, Eigen is going to be used, so no eigen value information!!! ")
 
-			prunSolve.write('os.system(\"athena.py %s \")\n'  % tmpStrJO )
+                        else :
+                                if ( "writeEigenMat"      in self.GridOptions  and self.GridOptions["writeEigenMat"]    is True  ) and ( self.AlignmentOptions["runLocal"] is False ):
+                                        extOutFileStrSol += "eigenvectors.bin,"
+                                        extOutFileStrSol += "eigenvalues.bin,"
 
-			# debugging ...
-			print ("prunSolve.py: ")
-			os.system(" cat prunSolve.py ")
-			print (" newSolveJO_Iter%02d.py: " % self.i)
-			os.system(" cat %s " % tmpStrJO)
+                                if ( "writeEigenMatTxt"   in self.GridOptions  and self.GridOptions["writeEigenMatTxt"] is True  ) and ( self.AlignmentOptions["runLocal"] is False ):
+                                        extOutFileStrSol += "eigenvectors.txt,"
+                                        extOutFileStrSol += "eigenvalues.txt,"
 
+                        extOutFileStrSol = extOutFileStrSol + extOutFileStr + ",Iter%02d_AlignmentConstants.root,OldSiAlignment.txt,OutputSiAlignment.txt,mycool.db,alignlogfile.txt" % (self.i)
 
-			#extOutFileStrSol = "prunSolve.py,newSolveJO_Iter%02d.py,"  % (self.i)	
-			extOutFileStrSol = ""
+                        extFileStr = ""
+                        if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
+                                extFileStr = " initial_AlignmentConstants.root "
+                        else :
+                                lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
 
-			if (    ("sctAlignmentLevel"          in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevel"]          == 3 ) or 
-				("sctAlignmentLevelBarrel"    in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelBarrel"]    == 3)  or 
-				("sctAlignmentLevelEndcaps"   in self.AlignmentOptions and self.AlignmentOptions["sctAlignmentLevelEndcaps"]   == 3)  or 
-				("pixelAlignmentLevel"        in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevel"]        == 3)  or 
-				("pixelAlignmentLevelBarrel"  in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelBarrel"]  == 3)  or 
-				("pixelAlignmentLevelEndcaps" in self.AlignmentOptions and self.AlignmentOptions["pixelAlignmentLevelEndcaps"] == 3) ) : 
-				print ("hmn, you are going to run L3 alignment, Eigen is going to be used, so no eigen value information!!! ")
+                                if os.path.isfile("%s"  % lastAlignConstant ) :
+                                        extFileStr = lastAlignConstant
 
-			else : 
-				if ( "writeEigenMat"      in self.GridOptions  and self.GridOptions["writeEigenMat"]    == True  ) and ( self.AlignmentOptions["runLocal"] == False ):
-					extOutFileStrSol += "eigenvectors.bin,";
-					extOutFileStrSol += "eigenvalues.bin," ;
 
-				if ( "writeEigenMatTxt"   in self.GridOptions  and self.GridOptions["writeEigenMatTxt"] == True  ) and ( self.AlignmentOptions["runLocal"] == False ):
-					extOutFileStrSol += "eigenvectors.txt,";
-					extOutFileStrSol += "eigenvalues.txt," ;
 
-			extOutFileStrSol = extOutFileStrSol + extOutFileStr + ",Iter%02d_AlignmentConstants.root,OldSiAlignment.txt,OutputSiAlignment.txt,mycool.db,alignlogfile.txt" % (self.i)
+                        TNFiles = 0
+                        tmpInDS = []
+                        print (" self.outDS : " , self.outDS)
 
-			extFileStr = ""
-			if (0 == self.i) and os.path.isfile("initial_AlignmentConstants.root") :
-				extFileStr = " initial_AlignmentConstants.root "
-			else :
-				lastAlignConstant = "Iter%02d_AlignmentConstants.root"  %  (self.i-1)
+                        print (" self.outDS[Customed]: " , self.outDS["Customed"])
+                        for item in self.outDS["Customed"] :
+                                print ("self.outDS[Customed] item:  " , item)
+                                tmpInDS.append(item+"/")
+                                rec, nFiles = subprocess.getstatusoutput("dq2-ls -f %s | grep -i files | grep -i total | cut -c 13-" %  (item+"/") )
+                                print (" nFiles of this outDS[Customed] :" , nFiles)
+                                TNFiles += int(nFiles)
 
-				if os.path.isfile("%s"  % lastAlignConstant ) :
-					extFileStr = lastAlignConstant
+                        solveInDS = ",".join(tmpInDS)
+                        #scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --athenaTag 15.8.0,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % ( (self.outDS["Customed"][0]+"/") , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(float(nFiles)) )
 
+                        if ( "group" in self.GridOptions["userIDnum"] ) :
+                                scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --official --voms=atlas:/atlas/det-indet/Role=production --express --athenaTag %s,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % (self.ATHENAREL, solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
 
+                        else :
+                                scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express --athenaTag %s,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % (self.ATHENAREL, solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
 
-			TNFiles = 0
-			tmpInDS = []
-			print (" self.outDS : " , self.outDS)
+                        if  (True is self.GridOptions["reUseSolveLibDS"]) and  self.GridOptions["solveLibDS"]   != "":
+                                scriptStr += " --libDS %s "        % self.GridOptions["solveLibDS"]
 
-			print (" self.outDS[Customed]: " , self.outDS["Customed"])
-			for item in self.outDS["Customed"] : 
-				print ("self.outDS[Customed] item:  " , item)
-				tmpInDS.append(item+"/")
-				rec, nFiles = subprocess.getstatusoutput("dq2-ls -f %s | grep -i files | grep -i total | cut -c 13-" %  (item+"/") )
-				print (" nFiles of this outDS[Customed] :" , nFiles)
-				TNFiles += int(nFiles)
+                        scriptStr += " --tmpDir %s >& tmpSubJobInfo_prunSolve.txt; \n" % self.GridOptions["TmpWorkDir"]
 
-			solveInDS = ",".join(tmpInDS) 
-			#scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --athenaTag 15.8.0,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % ( (self.outDS["Customed"][0]+"/") , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(float(nFiles)) )
+                        script.write(scriptStr)
+                        script.close()
 
-			if ( "group" in self.GridOptions["userIDnum"] ) :
-				scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --official --voms=atlas:/atlas/det-indet/Role=production --express --athenaTag %s,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % (self.ATHENAREL, solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
 
-			else : 
-				scriptStr = " prun --exec \" python prunSolve.py \" --writeInputToTxt IN:input1.txt --express --athenaTag %s,AtlasProduction --inDS %s --outputs %s --outDS %s --extFile %s --nJobs 1 --nFilesPerJob %d -v --maxNFilesPerJob 100000 " % (self.ATHENAREL, solveInDS , extOutFileStrSol, self.outDS["Solve"], extFileStr,  int(TNFiles) )
+                else:
+                   print ("Hi, except accumulate and solve, where do you want to go?")
 
-			if  (True == self.GridOptions["reUseSolveLibDS"]) and  self.GridOptions["solveLibDS"]   != "":
-				scriptStr += " --libDS %s "        % self.GridOptions["solveLibDS"]
 
-			scriptStr += " --tmpDir %s >& tmpSubJobInfo_prunSolve.txt; \n" % self.GridOptions["TmpWorkDir"]
+        def send(self):
+                os.system("pwd")
+                os.system("chmod 777 %s" % self.SCRIPTNAME)
 
-			script.write(scriptStr)
-			script.close()
+                print ("----------------------------------------------")
+                if self.part == "Accumulate":
+                        print ("Sending %s_Iter%02d accumulation job to grid at site ... %s" % (self.preName, self.i, self.GridOptions["siteName"]))
 
+                        if self.Datasets.containType("Customed") :
+                                for i in range(len(self.Datasets.namesList("Customed"))) :
+                                        os.system(" rm -rf tmpSubJobInfo_Customed_%02d.txt" % i )
 
-		else:
-		   print ("Hi, except accumulate and solve, where do you want to go?")
-			
-			
-	def send(self):
-		os.system("pwd")
-		os.system("chmod 777 %s" % self.SCRIPTNAME)
+                        os.system(" rm -rf  tmpSubJobInfo_collision.txt tmpSubJobInfo_Bon.txt tmpSubJobInfo_Boff.txt ")
+                        os.system("source %s" % self.SCRIPTNAME)
 
-		print ("----------------------------------------------")
-		if self.part == "Accumulate":		
-			print ("Sending %s_Iter%02d accumulation job to grid at site ... %s" % (self.preName, self.i, self.GridOptions["siteName"]))
+                        if self.Datasets.containType("Customed") :
+                                if self.Datasets.doDetailedTagsConfig() :
+                                        for i in range(len(self.Datasets.namesList("Customed"))) :
+                                                os.system("cat tmpSubJobInfo_Customed_%02d.txt                 >> %s\n" % (i, self.AccSubInfo))
+                                                os.system("cat tmpSubJobInfo_Customed_%02d.txt | grep -i JobID >> %s\n" % (i, self.AccSubJobID))
+                                else :
+                                        print (" you don't plan to do detailed tags configuration for every dataset, so will submit only one job with long inDS: ")
 
-			if self.Datasets.containType("Customed") :
-				for i in range(len(self.Datasets.namesList("Customed"))) :   
-					os.system(" rm -rf tmpSubJobInfo_Customed_%02d.txt" % i )
 
-			os.system(" rm -rf  tmpSubJobInfo_collision.txt tmpSubJobInfo_Bon.txt tmpSubJobInfo_Boff.txt ")
-			os.system("source %s" % self.SCRIPTNAME)
+                        if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
+                                if self.GridOptions["ColCPUs"][self.i]:
+                                        os.system("cat tmpSubJobInfo_collision.txt                 >> %s\n" % (self.AccSubInfo))
+                                        os.system("cat tmpSubJobInfo_collision.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-			if self.Datasets.containType("Customed") :
-				if self.Datasets.doDetailedTagsConfig() :
-					for i in range(len(self.Datasets.namesList("Customed"))) :
-						os.system("cat tmpSubJobInfo_Customed_%02d.txt                 >> %s\n" % (i, self.AccSubInfo))
-						os.system("cat tmpSubJobInfo_Customed_%02d.txt | grep -i JobID >> %s\n" % (i, self.AccSubJobID))
-				else : 
-					print (" you don't plan to do detailed tags configuration for every dataset, so will submit only one job with long inDS: ")
-					
+                                if self.GridOptions["CosBonCPUs"][self.i]:
+                                        os.system("cat tmpSubJobInfo_Bon.txt                       >> %s\n" % (self.AccSubInfo))
+                                        os.system("cat tmpSubJobInfo_Bon.txt       | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-			if (self.Datasets.containType("900GeV") or self.Datasets.containType("7TeV")) :
-				if self.GridOptions["ColCPUs"][self.i]:
-					os.system("cat tmpSubJobInfo_collision.txt                 >> %s\n" % (self.AccSubInfo))
-					os.system("cat tmpSubJobInfo_collision.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
+                                if self.GridOptions["CosBoffCPUs"][self.i]:
+                                        os.system("cat tmpSubJobInfo_Boff.txt                      >> %s\n" % (self.AccSubInfo))
+                                        os.system("cat tmpSubJobInfo_Boff.txt      | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-				if self.GridOptions["CosBonCPUs"][self.i]:
-					os.system("cat tmpSubJobInfo_Bon.txt                       >> %s\n" % (self.AccSubInfo))
-					os.system("cat tmpSubJobInfo_Bon.txt       | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-				if self.GridOptions["CosBoffCPUs"][self.i]:
-					os.system("cat tmpSubJobInfo_Boff.txt                      >> %s\n" % (self.AccSubInfo))
-					os.system("cat tmpSubJobInfo_Boff.txt      | grep -i JobID >> %s\n" % (self.AccSubJobID))
+                elif self.part == "Grid" :
 
+                        print ("  Sending %s_Iter%02d matrix solving job to grid site %s" % (self.preName, self.i, self.GridOptions["siteName"]))
+                        os.system("rm -f tmpSubJobInfo_solve.txt")
+                        os.system("source %s" % self.SCRIPTNAME)
+                        os.system("cat tmpSubJobInfo_solve.txt                 >> %s\n" % (self.AccSubInfo))
+                        os.system("cat tmpSubJobInfo_solve.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-		elif self.part == "Grid" :
+                        print ("----------------------------------------------")
 
-			print ("  Sending %s_Iter%02d matrix solving job to grid site %s" % (self.preName, self.i, self.GridOptions["siteName"]))
-			os.system("rm -f tmpSubJobInfo_solve.txt")
-			os.system("source %s" % self.SCRIPTNAME)
-			os.system("cat tmpSubJobInfo_solve.txt                 >> %s\n" % (self.AccSubInfo))
-			os.system("cat tmpSubJobInfo_solve.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
+                elif self.part == "Prun" :
 
-			print ("----------------------------------------------")
+                        print ("  Sending %s_Iter%02d prun matrix solving job to grid site %s" % (self.preName, self.i, self.GridOptions["siteName"]))
+                        os.system("rm -f tmpSubJobInfo_prunSolve.txt")
+                        os.system("source %s" % self.SCRIPTNAME)
+                        os.system("cat tmpSubJobInfo_prunSolve.txt                 >> %s\n" % (self.AccSubInfo))
+                        os.system("cat tmpSubJobInfo_prunSolve.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-		elif self.part == "Prun" :
+                        print ("----------------------------------------------")
 
-			print ("  Sending %s_Iter%02d prun matrix solving job to grid site %s" % (self.preName, self.i, self.GridOptions["siteName"]))
-			os.system("rm -f tmpSubJobInfo_prunSolve.txt")
-			os.system("source %s" % self.SCRIPTNAME)
-			os.system("cat tmpSubJobInfo_prunSolve.txt                 >> %s\n" % (self.AccSubInfo))
-			os.system("cat tmpSubJobInfo_prunSolve.txt | grep -i JobID >> %s\n" % (self.AccSubJobID))
 
-			print ("----------------------------------------------")
+                else:
+                        print ("Hi, where do you want to go?")
 
 
-		else:
-			print ("Hi, where do you want to go?")
 
 
+        # take this part from Muon alignment in Jobs.py
+        def wait(self,logfilename):
+                print ("Pathena wait()" )
 
+                if self.jobId == -99:
+                        print ("logiflename: ",logfilename)
+                        ret, out = subprocess.getstatusoutput("cat "+logfilename)
+                        lines = out.split('\n')
 
-	# take this part from Muon alignment in Jobs.py
-	def wait(self,logfilename):
-		print ("Pathena wait()" )
-
-		if self.jobId == -99:
-			print ("logiflename: ",logfilename)
-			ret, out = subprocess.getstatusoutput("cat "+logfilename)
-			lines = out.split('\n')
-			
-			# looping over all the job IDs
-			for line in lines:
-				items = line.split()
-				if len(items)>0 and items[0]=="JobID" :
-					self.jobId = int(items[2])
-					print ("jobId = ",self.jobId)
-
-				# check status of each job ID
-				# while self.bjobs() == 0:
-				while self.bjobs() != 1:	
-					print (" waiting for jobID ",self.jobId,"...")
-					time.sleep(300)
-
-
-
-	def bjobs(self) :
-		if self.jobId == -99:
-			print ("need jobId")
-			sys.exit(3)
-
-		print ("Pathena bjobs(), jobId: ",self.jobId)
-		jobId = self.jobId
-		bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
-		print (bjobstring)
-		ret, out = subprocess.getstatusoutput(bjobstring)
-		print ("statusoutput: ",out)
-		for line in out.split("\n") :
-			items_1 = line.split()
-			if len(items_1)>0 and items_1[0] != "jobStatus" :
-				continue
-
-			if len(items_1)>2 :
-				if items_1[2] == "frozen" :
-					print ("jobStatus: " , items_1[2])
-
-					### search the libDS #######	
-					for line2 in out.split("\n") : 
-						print (" line2: " , line2 )
-						items_2 = line2.split()
-						if items_2[0] == "libDS" :
-							break 
-
-					if self.part == "Accumulate" and self.GridOptions["accumulateLibDS"] == "" : 	
-						self.GridOptions["accumulateLibDS"] = items_2[2] 
-						print (" self.GridOptions accumulateLibDS: " , self.GridOptions["accumulateLibDS"])
-
-					if (self.part == "Grid" or self.part == "Prun") and self.GridOptions["solveLibDS"] == "":
-						self.GridOptions["solveLibDS"     ] = items_2[2]				
-						print (" self.GridOptions solveLibDS: "      , self.GridOptions["solveLibDS"])
-
-					
-					return 1
-				else :
-					print ("jobStatus: ",items_1[2])
-					return 0
-
+                        # looping over all the job IDs
+                        for line in lines:
+                                items = line.split()
+                                if len(items)>0 and items[0]=="JobID" :
+                                        self.jobId = int(items[2])
+                                        print ("jobId = ",self.jobId)
 
+                                # check status of each job ID
+                                # while self.bjobs() == 0:
+                                while self.bjobs() != 1:
+                                        print (" waiting for jobID ",self.jobId,"...")
+                                        time.sleep(300)
 
 
-	def whetherRetry(self) :	  	
-		nfailed   = 0
-		nfinished = 0
-		if self.jobId == -99 :
-			print ("need jobId")
-			sys.exit(3)
 
-		print ("Pathena bjobs(), jobId: ",self.jobId)
-		jobId = self.jobId
-		bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
-		print (bjobstring)
-		ret, out = subprocess.getstatusoutput(bjobstring)
-		
-		lines  = out.split("\n")
-		nlines = len(lines)
-		print (" nlines: " , nlines)
+        def bjobs(self) :
+                if self.jobId == -99:
+                        print ("need jobId")
+                        sys.exit(3)
 
-		for i in range(0, nlines) : 
-			items = lines[i].split()
-			print (" items: " , items)
-			if "failed"   in items :
-				nfailed   = int(items[-1])
-			if "finished" in items :
-				nfinished = int(items[-1])
-				# Hi, just stop, next line will cause the loop crash ...
-				break
+                print ("Pathena bjobs(), jobId: ",self.jobId)
+                jobId = self.jobId
+                bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
+                print (bjobstring)
+                ret, out = subprocess.getstatusoutput(bjobstring)
+                print ("statusoutput: ",out)
+                for line in out.split("\n") :
+                        items_1 = line.split()
+                        if len(items_1)>0 and items_1[0] != "jobStatus" :
+                                continue
 
-		if (self.retryNo == 0) and (self.i == 0) : 
-			if ( 0 == (nfailed + nfinished - 1) ) : 
-				successRatio = -1 
-			else : 
-				successRatio = float(nfinished - 1)/(nfailed + nfinished - 1) 
-		else : 
-			successRatio = float(nfinished)/(nfailed + nfinished) 
+                        if len(items_1)>2 :
+                                if items_1[2] == "frozen" :
+                                        print ("jobStatus: " , items_1[2])
 
+                                        ### search the libDS #######
+                                        for line2 in out.split("\n") :
+                                                print (" line2: " , line2 )
+                                                items_2 = line2.split()
+                                                if items_2[0] == "libDS" :
+                                                        break
 
+                                        if self.part == "Accumulate" and self.GridOptions["accumulateLibDS"] == "" :
+                                                self.GridOptions["accumulateLibDS"] = items_2[2]
+                                                print (" self.GridOptions accumulateLibDS: " , self.GridOptions["accumulateLibDS"])
 
-		print ("the success ratio: ", successRatio)
-		if successRatio >= self.GridOptions["successRatioCut"] :
-			print ("The success ratio is higher than the cut, will not retry ---")
-			return False
-		else :
-			print ("The success ratio is lower than the cut, will retry ---")
-			return True
+                                        if (self.part == "Grid" or self.part == "Prun") and self.GridOptions["solveLibDS"] == "":
+                                                self.GridOptions["solveLibDS"     ] = items_2[2]
+                                                print (" self.GridOptions solveLibDS: "      , self.GridOptions["solveLibDS"])
 
 
+                                        return 1
+                                else :
+                                        print ("jobStatus: ",items_1[2])
+                                        return 0
 
-	def retry(self) :
-		jobId = self.jobId
-		retrystring = "pbook -c 'retry(" + str(jobId) + ")'"	
-		ret, out    = subprocess.getstatusoutput(retrystring)
-		print (" out1: " , out )
-		#self.jobId =  self.jobId + 2
 
-		## get the new JobID ## 
-		for line in out.split("\n") : 
-			items = line.split() 
-			nitems = len(items) 
-			for i in range(0, nitems) : 
-				if items[i] == "New" : 
-					jobstring = items[i+1].split("=") 
-					self.jobId = int(jobstring[-1]) 
-					print ("new JobID: " , self.jobId )
-					break 
-		self.retryNo = self.retryNo + 1
 
 
+        def whetherRetry(self) :
+                nfailed   = 0
+                nfinished = 0
+                if self.jobId == -99 :
+                        print ("need jobId")
+                        sys.exit(3)
 
-		while self.bjobs() != 1 :
-			print (" waiting for the first retry jobID " , self.jobId , "...")
-			time.sleep(300)
+                print ("Pathena bjobs(), jobId: ",self.jobId)
+                jobId = self.jobId
+                bjobstring = "pbook -c 'show(" + str(jobId) + ")'"
+                print (bjobstring)
+                ret, out = subprocess.getstatusoutput(bjobstring)
 
-		if self.whetherRetry() :
-			jobId = self.jobId
-			retrystring = "pbook -c 'retry(" + str(jobId) + ")'"
-			ret, out    = subprocess.getstatusoutput(retrystring)
-			print (" out2: " , out)
+                lines  = out.split("\n")
+                nlines = len(lines)
+                print (" nlines: " , nlines)
 
-			#self.jobId =self.jobId + 2
+                for i in range(0, nlines) :
+                        items = lines[i].split()
+                        print (" items: " , items)
+                        if "failed"   in items :
+                                nfailed   = int(items[-1])
+                        if "finished" in items :
+                                nfinished = int(items[-1])
+                                # Hi, just stop, next line will cause the loop crash ...
+                                break
 
+                if (self.retryNo == 0) and (self.i == 0) :
+                        if ( 0 == (nfailed + nfinished - 1) ) :
+                                successRatio = -1
+                        else :
+                                successRatio = float(nfinished - 1)/(nfailed + nfinished - 1)
+                else :
+                        successRatio = float(nfinished)/(nfailed + nfinished)
 
-			## get the new JobID ## 
-			for line in out.split("\n") : 
-				items = line.split() 
-				nitems = len(items) 
-				for i in range(0, nitems) : 
-					if items[i] == "New" : 
-						jobstring = items[i+1].split("=") 
-						self.jobId = int(jobstring[-1]) 
-						print ("new JobID: " , self.jobId )
-						break 
-			self.retryNo = self.retryNo + 1 
 
 
-			while self.bjobs() != 1 :
-				print (" waiting for the second retry jobID " , self.jobId,"...")
-				time.sleep(300)
+                print ("the success ratio: ", successRatio)
+                if successRatio >= self.GridOptions["successRatioCut"] :
+                        print ("The success ratio is higher than the cut, will not retry ---")
+                        return False
+                else :
+                        print ("The success ratio is lower than the cut, will retry ---")
+                        return True
 
 
 
+        def retry(self) :
+                jobId = self.jobId
+                retrystring = "pbook -c 'retry(" + str(jobId) + ")'"
+                ret, out    = subprocess.getstatusoutput(retrystring)
+                print (" out1: " , out )
+                #self.jobId =  self.jobId + 2
 
+                ## get the new JobID ##
+                for line in out.split("\n") :
+                        items = line.split()
+                        nitems = len(items)
+                        for i in range(0, nitems) :
+                                if items[i] == "New" :
+                                        jobstring = items[i+1].split("=")
+                                        self.jobId = int(jobstring[-1])
+                                        print ("new JobID: " , self.jobId )
+                                        break
+                self.retryNo = self.retryNo + 1
 
-	def getGridOptions(self, option = "") : 
-		return self.GridOptions[option]
 
-	def setGridOptions(self, option1 = "", option2 = "") : 
-		self.GridOptions[option1] = option2
+
+                while self.bjobs() != 1 :
+                        print (" waiting for the first retry jobID " , self.jobId , "...")
+                        time.sleep(300)
+
+                if self.whetherRetry() :
+                        jobId = self.jobId
+                        retrystring = "pbook -c 'retry(" + str(jobId) + ")'"
+                        ret, out    = subprocess.getstatusoutput(retrystring)
+                        print (" out2: " , out)
+
+                        #self.jobId =self.jobId + 2
+
+
+                        ## get the new JobID ##
+                        for line in out.split("\n") :
+                                items = line.split()
+                                nitems = len(items)
+                                for i in range(0, nitems) :
+                                        if items[i] == "New" :
+                                                jobstring = items[i+1].split("=")
+                                                self.jobId = int(jobstring[-1])
+                                                print ("new JobID: " , self.jobId )
+                                                break
+                        self.retryNo = self.retryNo + 1
+
+
+                        while self.bjobs() != 1 :
+                                print (" waiting for the second retry jobID " , self.jobId,"...")
+                                time.sleep(300)
+
+
+
+
+
+        def getGridOptions(self, option = "") :
+                return self.GridOptions[option]
+
+        def setGridOptions(self, option1 = "", option2 = "") :
+                self.GridOptions[option1] = option2
 
 
 
 
 
 class writeScriptAFS : 
-	def __init__(self,
-		     iter,
-		     JOBNAME,
-		     SCRIPTNAME,
-		     preName,
-		     QUEUE          = "8nh",
-		     CMTDIR         = "",
-		     ATHENAREL      = "",
-		     TAGS           = "",
-		     inputPoolFiles = ""
-		     ):
-
-		self.i              = iter
-		self.JOBNAME        = JOBNAME
-		self.SCRIPTNAME     = SCRIPTNAME
-		self.preName        = preName
-		self.QUEUE          = QUEUE
-		self.CMTDIR         = CMTDIR
-		self.ATHENAREL      = ATHENAREL
-		self.TAGS           = TAGS
-		self.inputPoolFiles = inputPoolFiles
-
-	def write(self) : 
-		script = open(self.SCRIPTNAME,'w')
-
-		script.write("\n")
-		script.write("#   setup the environment \n")
-		script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENAREL, self.TAGS))
-		for file in self.inputPoolFiles:
-			if "atlasdatadisk" in file: 
-				script.write("export STAGE_SVCCLASS=atldata\n")
-				break
-		for file in self.inputPoolFiles:
-			if "DAQ" in file:
-				script.write("export STAGE_SVCCLASS=atlcal\n")
-				break
-
-		for file in self.inputPoolFiles:
-			if 'ESD' in file or 'AlignmentConstants' in file:
-				script.write("pool_insertFileToCatalog "+ file + " \n")
-
-		nowPath = os.getcwd()
-		print ("current path: ", nowPath)
-
-		script.write("athena %s \n" % (nowPath + "/" + self.JOBNAME) )
-		script.close()
-
-
-	def send(self, runmode) :
-		os.system("chmod +x %s" % self.SCRIPTNAME)
-		if "Local" == runmode : 
-			print ("Running Iter%02dSolve job locally ..." % (self.i))
-			os.system("sh %s | tee Iter%02dSolveLocally.log \n" % (self.SCRIPTNAME, self.i))
-
-		if "Batch" == runmode :
-			print ("Submitting Iter%02dSolve job to queue %s ..." % (self.i, self.QUEUE))
-			os.system("bsub -q %s  %s" % (self.QUEUE, self.SCRIPTNAME) )
-
-
-	def wait(self) : 
-		print ("Processing in lxbatch...")
-		time.sleep(60)
-		while os.popen('bjobs -w').read().find(self.preName) != -1:
-			time.sleep(30)
+        def __init__(self,
+                     iter,
+                     JOBNAME,
+                     SCRIPTNAME,
+                     preName,
+                     QUEUE          = "8nh",
+                     CMTDIR         = "",
+                     ATHENAREL      = "",
+                     TAGS           = "",
+                     inputPoolFiles = ""
+                     ):
+
+                self.i              = iter
+                self.JOBNAME        = JOBNAME
+                self.SCRIPTNAME     = SCRIPTNAME
+                self.preName        = preName
+                self.QUEUE          = QUEUE
+                self.CMTDIR         = CMTDIR
+                self.ATHENAREL      = ATHENAREL
+                self.TAGS           = TAGS
+                self.inputPoolFiles = inputPoolFiles
+
+        def write(self) :
+                script = open(self.SCRIPTNAME,'w')
+
+                script.write("\n")
+                script.write("#   setup the environment \n")
+                script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENAREL, self.TAGS))
+                for file in self.inputPoolFiles:
+                        if "atlasdatadisk" in file:
+                                script.write("export STAGE_SVCCLASS=atldata\n")
+                                break
+                for file in self.inputPoolFiles:
+                        if "DAQ" in file:
+                                script.write("export STAGE_SVCCLASS=atlcal\n")
+                                break
+
+                for file in self.inputPoolFiles:
+                        if 'ESD' in file or 'AlignmentConstants' in file:
+                                script.write("pool_insertFileToCatalog "+ file + " \n")
+
+                nowPath = os.getcwd()
+                print ("current path: ", nowPath)
+
+                script.write("athena %s \n" % (nowPath + "/" + self.JOBNAME) )
+                script.close()
+
+
+        def send(self, runmode) :
+                os.system("chmod +x %s" % self.SCRIPTNAME)
+                if "Local" == runmode :
+                        print ("Running Iter%02dSolve job locally ..." % (self.i))
+                        os.system("sh %s | tee Iter%02dSolveLocally.log \n" % (self.SCRIPTNAME, self.i))
+
+                if "Batch" == runmode :
+                        print ("Submitting Iter%02dSolve job to queue %s ..." % (self.i, self.QUEUE))
+                        os.system("bsub -q %s  %s" % (self.QUEUE, self.SCRIPTNAME) )
+
+
+        def wait(self) :
+                print ("Processing in lxbatch...")
+                time.sleep(60)
+                while os.popen('bjobs -w').read().find(self.preName) != -1:
+                        time.sleep(30)
 
 
 
 def prepareForNextIter(OutputPath, iteration, GridSolvingOutDS, runSolveMode):
 
-	if ( "Grid"  == runSolveMode or "Prun" == runSolveMode ) :
-		print ("GridSolveOutDS = ", (GridSolvingOutDS))
-		ret, outDS = subprocess.getstatusoutput("cat %s" % GridSolvingOutDS)
-		print ("solve outDS: ",outDS)
-
-		ret, out = subprocess.getstatusoutput("dq2-ls -f %s/ " % outDS)
-		rootlist = []
-
-		print ("out: ",out)
-		lines = out.split('\n')
-		for line in lines :
-			items = line.split()
-			for item in items :
-				print (" item : " , item)
-				if item.find("AlignmentConstants.root") != -1 :
-					rootlist.append(item)
-		rootstr = ",".join(rootlist)
-		print ("rootstr : " , rootstr)
-		os.system("dq2-get -f %s -H %s/ -V %s/ "  % ( rootstr, outDS, outDS ) )
-	
-
-		jobstring2  = "mv %s/*.Iter%02d_AlignmentConstants.root  Iter%02d_AlignmentConstants.root\n\n" % \
-			(outDS, iteration, iteration)
-	 
-		'''
-		jobstring2 += "mv %s/*.OldSiAlignment.txt     %s/Iter%02d/OldSiAlignment.txt\n\n" % \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.OutputSiAlignment.txt  %s/Iter%02d/OutputSiAlignment.txt\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.mycool.db   %s/Iter%02d/mycool.db\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.vector.txt  %s/Iter%02d/vector.txt\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.vector.bin  %s/Iter%02d/vector.bin\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.matrix.txt  %s/Iter%02d/matrix.txt\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.matrix.bin  %s/Iter%02d/matrix.bin\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.hitmap.txt  %s/Iter%02d/hitmap.txt\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.hitmap.bin  %s/Iter%02d/hitmap.bin\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.alignlogfile.txt  %s/Iter%02d/alignlogfile.txt\n\n" %  \
-			(outDS, OutputPath, iteration)
-		jobstring2 += "mv %s/*.log.tgz  %s/Iter%02d/\n\n" % (outDS, OutputPath, iteration)     
-		# move the merged total monitoring file into every iteration directory
-		jobstring2 += "mv TotalMonitoring.root %s/Iter%02d/\n\n" % (OutputPath, iteration)
-		'''
-		os.system(jobstring2)
-
-	else : 	
-		jobstring3 = ""
-		jobstring3 += "mv ./OldSiAlignment.txt     %s/Iter%02d/OldSiAlignment.txt\n" %     ( OutputPath, iteration)
-		jobstring3 += "mv ./OutputSiAlignment.txt  %s/Iter%02d/OutputSiAlignment.txt\n" %  ( OutputPath, iteration)
-		jobstring3 += "mv ./mycool.db              %s/Iter%02d/mycool.db\n" %              ( OutputPath, iteration)
-		jobstring3 += "mv ./vector.txt             %s/Iter%02d/vector.txt\n" %             ( OutputPath, iteration)
-		jobstring3 += "mv ./vector.bin             %s/Iter%02d/vector.bin\n" %             ( OutputPath, iteration)
-		jobstring3 += "mv ./matrix.txt             %s/Iter%02d/matrix.txt\n" %             ( OutputPath, iteration)
-		jobstring3 += "mv ./matrix.bin             %s/Iter%02d/matrix.bin\n" %             ( OutputPath, iteration)
-		jobstring3 += "mv ./hitmap.txt             %s/Iter%02d/hitmap.txt\n" %             ( OutputPath, iteration)
-		jobstring3 += "mv ./hitmap.bin             %s/Iter%02d/hitmap.bin\n" %             ( OutputPath, iteration)
-		jobstring3 += "mv ./alignlogfile.txt       %s/Iter%02d/alignlogfile.txt\n" %       ( OutputPath, iteration)
-		os.system(jobstring3)
+        if ( "Grid"  == runSolveMode or "Prun" == runSolveMode ) :
+                print ("GridSolveOutDS = ", (GridSolvingOutDS))
+                ret, outDS = subprocess.getstatusoutput("cat %s" % GridSolvingOutDS)
+                print ("solve outDS: ",outDS)
+
+                ret, out = subprocess.getstatusoutput("dq2-ls -f %s/ " % outDS)
+                rootlist = []
+
+                print ("out: ",out)
+                lines = out.split('\n')
+                for line in lines :
+                        items = line.split()
+                        for item in items :
+                                print (" item : " , item)
+                                if item.find("AlignmentConstants.root") != -1 :
+                                        rootlist.append(item)
+                rootstr = ",".join(rootlist)
+                print ("rootstr : " , rootstr)
+                os.system("dq2-get -f %s -H %s/ -V %s/ "  % ( rootstr, outDS, outDS ) )
+
+
+                jobstring2  = "mv %s/*.Iter%02d_AlignmentConstants.root  Iter%02d_AlignmentConstants.root\n\n" % \
+                        (outDS, iteration, iteration)
+
+                '''
+                jobstring2 += "mv %s/*.OldSiAlignment.txt     %s/Iter%02d/OldSiAlignment.txt\n\n" % \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.OutputSiAlignment.txt  %s/Iter%02d/OutputSiAlignment.txt\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.mycool.db   %s/Iter%02d/mycool.db\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.vector.txt  %s/Iter%02d/vector.txt\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.vector.bin  %s/Iter%02d/vector.bin\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.matrix.txt  %s/Iter%02d/matrix.txt\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.matrix.bin  %s/Iter%02d/matrix.bin\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.hitmap.txt  %s/Iter%02d/hitmap.txt\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.hitmap.bin  %s/Iter%02d/hitmap.bin\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.alignlogfile.txt  %s/Iter%02d/alignlogfile.txt\n\n" %  \
+                        (outDS, OutputPath, iteration)
+                jobstring2 += "mv %s/*.log.tgz  %s/Iter%02d/\n\n" % (outDS, OutputPath, iteration)
+                # move the merged total monitoring file into every iteration directory
+                jobstring2 += "mv TotalMonitoring.root %s/Iter%02d/\n\n" % (OutputPath, iteration)
+                '''
+                os.system(jobstring2)
+
+        else :
+                jobstring3 = ""
+                jobstring3 += "mv ./OldSiAlignment.txt     %s/Iter%02d/OldSiAlignment.txt\n" %     ( OutputPath, iteration)
+                jobstring3 += "mv ./OutputSiAlignment.txt  %s/Iter%02d/OutputSiAlignment.txt\n" %  ( OutputPath, iteration)
+                jobstring3 += "mv ./mycool.db              %s/Iter%02d/mycool.db\n" %              ( OutputPath, iteration)
+                jobstring3 += "mv ./vector.txt             %s/Iter%02d/vector.txt\n" %             ( OutputPath, iteration)
+                jobstring3 += "mv ./vector.bin             %s/Iter%02d/vector.bin\n" %             ( OutputPath, iteration)
+                jobstring3 += "mv ./matrix.txt             %s/Iter%02d/matrix.txt\n" %             ( OutputPath, iteration)
+                jobstring3 += "mv ./matrix.bin             %s/Iter%02d/matrix.bin\n" %             ( OutputPath, iteration)
+                jobstring3 += "mv ./hitmap.txt             %s/Iter%02d/hitmap.txt\n" %             ( OutputPath, iteration)
+                jobstring3 += "mv ./hitmap.bin             %s/Iter%02d/hitmap.bin\n" %             ( OutputPath, iteration)
+                jobstring3 += "mv ./alignlogfile.txt       %s/Iter%02d/alignlogfile.txt\n" %       ( OutputPath, iteration)
+                os.system(jobstring3)
 
 
 
@@ -1555,225 +1550,222 @@ def prepareForNextIter(OutputPath, iteration, GridSolvingOutDS, runSolveMode):
 
 def mergeMatrix(OutputPath, iteration, GridAccOutDS, GridOptions):
 
-	matrixlist = []
-	vectorlist = []
-	hitmaplist = []
-
-	print ("GridAccOutDS = ", (GridAccOutDS))
-
-	ret, out = subprocess.getstatusoutput("cat %s" % GridAccOutDS)
-	print ("out: ",out)
-	lines=out.split('\n')
-
-	nowDir = os.getcwd()
-	os.chdir(GridOptions["TmpWorkDir"])
-
-	# looping over all output dataset names
-	for line in lines:
-		items = line.split()
-		if len(items)>0 :
-			outDS = items[0]
-			print ("when merging matrixes and vectors, this outDS name : ", outDS)
-			binlist = []
-			ret, out = subprocess.getstatusoutput("dq2-ls -f %s/ " % outDS)
-			print (" dq2-ls -f, out : ", out)
-			lines = out.split('\n')
-			for line in lines :
-				items = line.split()
-				for item in items :
-					print (" item : " , item)
-					if item.find(".bin") != -1 :
-						binlist.append(item)
-			binstr = ",".join(binlist)
-			print ("binary files string : " , binstr)
-			os.system("dq2-get -f %s -H %s/ -V %s/ "  % ( binstr, outDS, outDS ))
-
-
-			jobstr2 = "ls %s/*.matrix.bin" % (outDS)
-			job2 = open("job2.sh",'w')
-			os.system("chmod 777 job2.sh")
-			job2.write(jobstr2)
-			job2.close()
-			ret, out = subprocess.getstatusoutput('sh job2.sh')
-			for line in out.split("\n"):
-				MatrixPath =  line
-				print ("MatrixPath: ",MatrixPath)
-				#if os.path.isfile(MatrixPath):
-				matrixlist.append(MatrixPath)
-
-
-			jobstr3 = "ls %s/*.vector.bin" % (outDS)
-			job3 = open("job3.sh",'w')
-			os.system("chmod 777 job3.sh")
-			job3.write(jobstr3)
-			job3.close()
-			print ("job3: ",job3)
-			ret, out = subprocess.getstatusoutput('sh job3.sh')
-			for line in out.split("\n"):
-				VectorPath = line
-				#		if os.path.isfile(VectorPath):
-				vectorlist.append(VectorPath)
-
-			print ("vectorlist: ",vectorlist)
-
-			jobstr4 = "ls %s/*.hitmap.bin" % (outDS)
-			job4 = open("job4.sh",'w')
-			os.system("chmod 777 job4.sh")
-			job4.write(jobstr4)
-			job4.close()
-			print ("job4: ",job4)
-			ret, out = subprocess.getstatusoutput('sh job4.sh')
-			for line in out.split("\n"):
-				HitmapPath =  line
-				#		if os.path.isfile(HitmapPath):
-				hitmaplist.append(HitmapPath)
-			print ("hitmaplist: ",hitmaplist)
-		
-		else:
-			print ("Problem getting the outDS files")
-			
+        matrixlist = []
+        vectorlist = []
+        hitmaplist = []
+
+        print ("GridAccOutDS = ", (GridAccOutDS))
+
+        ret, out = subprocess.getstatusoutput("cat %s" % GridAccOutDS)
+        print ("out: ",out)
+        lines=out.split('\n')
+
+        nowDir = os.getcwd()
+        os.chdir(GridOptions["TmpWorkDir"])
+
+        # looping over all output dataset names
+        for line in lines:
+                items = line.split()
+                if len(items)>0 :
+                        outDS = items[0]
+                        print ("when merging matrixes and vectors, this outDS name : ", outDS)
+                        binlist = []
+                        ret, out = subprocess.getstatusoutput("dq2-ls -f %s/ " % outDS)
+                        print (" dq2-ls -f, out : ", out)
+                        lines = out.split('\n')
+                        for line in lines :
+                                items = line.split()
+                                for item in items :
+                                        print (" item : " , item)
+                                        if item.find(".bin") != -1 :
+                                                binlist.append(item)
+                        binstr = ",".join(binlist)
+                        print ("binary files string : " , binstr)
+                        os.system("dq2-get -f %s -H %s/ -V %s/ "  % ( binstr, outDS, outDS ))
+
+
+                        jobstr2 = "ls %s/*.matrix.bin" % (outDS)
+                        job2 = open("job2.sh",'w')
+                        os.system("chmod 777 job2.sh")
+                        job2.write(jobstr2)
+                        job2.close()
+                        ret, out = subprocess.getstatusoutput('sh job2.sh')
+                        for line in out.split("\n"):
+                                MatrixPath =  line
+                                print ("MatrixPath: ",MatrixPath)
+                                #if os.path.isfile(MatrixPath):
+                                matrixlist.append(MatrixPath)
+
+
+                        jobstr3 = "ls %s/*.vector.bin" % (outDS)
+                        job3 = open("job3.sh",'w')
+                        os.system("chmod 777 job3.sh")
+                        job3.write(jobstr3)
+                        job3.close()
+                        print ("job3: ",job3)
+                        ret, out = subprocess.getstatusoutput('sh job3.sh')
+                        for line in out.split("\n"):
+                                VectorPath = line
+                                #               if os.path.isfile(VectorPath):
+                                vectorlist.append(VectorPath)
+
+                        print ("vectorlist: ",vectorlist)
+
+                        jobstr4 = "ls %s/*.hitmap.bin" % (outDS)
+                        job4 = open("job4.sh",'w')
+                        os.system("chmod 777 job4.sh")
+                        job4.write(jobstr4)
+                        job4.close()
+                        print ("job4: ",job4)
+                        ret, out = subprocess.getstatusoutput('sh job4.sh')
+                        for line in out.split("\n"):
+                                HitmapPath =  line
+                                #               if os.path.isfile(HitmapPath):
+                                hitmaplist.append(HitmapPath)
+                        print ("hitmaplist: ",hitmaplist)
+
+                else:
+                        print ("Problem getting the outDS files")
+
   
-	print ("------------------------------------------")
-	print ("  Setting Matrices list" )
-	print ("------------------------------------------")
+        print ("------------------------------------------")
+        print ("  Setting Matrices list" )
+        print ("------------------------------------------")
+
+        os.system("rm *.sh")
+        os.chdir(nowDir)
 
-	os.system("rm *.sh")
-	os.chdir(nowDir)
+        return matrixlist,vectorlist,hitmaplist
+
+                
 
-	return matrixlist,vectorlist,hitmaplist
-				
-		
-			 
 # For the merging of the monitoring Files
 class mergeMonitoringScript:
-	def __init__(self,
-		     OutputPath,
-		     preName,
-		     iter,
-		     CosmicsBoff,
-		     CosmicsBon,
-		     Collision,
-		     CMTDIR,
-		     ATHENAREL,
-		     TAGS,
-		     SCRIPTNAME,
-		     JOBNAME, 
-		     GridAccOutDS
-		     ):
-		self.OutputPath      = OutputPath
-		self.preName         = preName
-		self.i               = iter
-		self.CosmicsBoff     = CosmicsBoff
-		self.CosmicsBon      = CosmicsBon
-		self.Collision       = Collision
-		self.CMTDIR          = CMTDIR
-		self.ATHENAREL       = ATHENAREL
-		self.TAGS            = TAGS
-		self.SCRIPTNAME      = SCRIPTNAME
-		self.JOBNAME         = JOBNAME
-		self.GridAccOutDS    = GridAccOutDS
-
-
-	def write(self):
-		TempPath="%s/Iter%02d" % (self.OutputPath, self.i)
-		#self.SCRIPTNAME = TempPath + '/' + self.SCRIPTNAME
-		# list of Files to be merged
-		mergeMonitoringFilesName = 'mergeMonitoringFiles.txt'
-
-		script=open(self.SCRIPTNAME,'w')
-		script.write("#BSUB -J %s_Iter%02dMerge \n" % (self.preName, self.i))
-		script.write("#BSUB -o %s/Iter%02d/logs/Iter%02dMerge.log \n" % (self.OutputPath,self.i,self.i))
-		script.write("\n")
-		script.write("#   setup the environment \n")
-		script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENAREL, self.TAGS))
-		#script.write("cd %s/Iter%d/ \n" % (self.OutputPath,self.i))
-		script.write("DQHistogramMerge.py %s TotalMonitoring.root True\n" % mergeMonitoringFilesName)
-		script.close()
-
-		mergeMonitoringFile = open(mergeMonitoringFilesName,"w")
-		ret, out = subprocess.getstatusoutput("cat %s" % self.GridAccOutDS)
-		print ("out: ",out)
-		lines = out.split('\n')
-		# looping over all output dataset names
-		for line in lines:
-			items = line.split()
-			if len(items)>0 :
-				outDS = items[0]
-				print ("outDS = ",outDS)
-				#print ("hmn", glob.glob(("%s/*.root") % outDS))
-				os.system("find %s/*.root >> %s \n " % (outDS, mergeMonitoringFilesName))
-
-		mergeMonitoringFile.close()
-
-
-	def send(self):
-		os.system("chmod +x %s" % self.SCRIPTNAME)
-		print ("in doing merge----------------------------------------------")
-		os.system("sh %s  \n"   % self.SCRIPTNAME)
-		print ("after doing merge----------------------------------------------")
+        def __init__(self,
+                     OutputPath,
+                     preName,
+                     iter,
+                     CosmicsBoff,
+                     CosmicsBon,
+                     Collision,
+                     CMTDIR,
+                     ATHENAREL,
+                     TAGS,
+                     SCRIPTNAME,
+                     JOBNAME,
+                     GridAccOutDS
+                     ):
+                self.OutputPath      = OutputPath
+                self.preName         = preName
+                self.i               = iter
+                self.CosmicsBoff     = CosmicsBoff
+                self.CosmicsBon      = CosmicsBon
+                self.Collision       = Collision
+                self.CMTDIR          = CMTDIR
+                self.ATHENAREL       = ATHENAREL
+                self.TAGS            = TAGS
+                self.SCRIPTNAME      = SCRIPTNAME
+                self.JOBNAME         = JOBNAME
+                self.GridAccOutDS    = GridAccOutDS
+
+
+        def write(self):
+                #TempPath="%s/Iter%02d" % (self.OutputPath, self.i)
+                #self.SCRIPTNAME = TempPath + '/' + self.SCRIPTNAME
+                # list of Files to be merged
+                mergeMonitoringFilesName = 'mergeMonitoringFiles.txt'
+
+                script=open(self.SCRIPTNAME,'w')
+                script.write("#BSUB -J %s_Iter%02dMerge \n" % (self.preName, self.i))
+                script.write("#BSUB -o %s/Iter%02d/logs/Iter%02dMerge.log \n" % (self.OutputPath,self.i,self.i))
+                script.write("\n")
+                script.write("#   setup the environment \n")
+                script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENAREL, self.TAGS))
+                #script.write("cd %s/Iter%d/ \n" % (self.OutputPath,self.i))
+                script.write("DQHistogramMerge.py %s TotalMonitoring.root True\n" % mergeMonitoringFilesName)
+                script.close()
+
+                mergeMonitoringFile = open(mergeMonitoringFilesName,"w")
+                ret, out = subprocess.getstatusoutput("cat %s" % self.GridAccOutDS)
+                print ("out: ",out)
+                lines = out.split('\n')
+                # looping over all output dataset names
+                for line in lines:
+                        items = line.split()
+                        if len(items)>0 :
+                                outDS = items[0]
+                                print ("outDS = ",outDS)
+                                #print ("hmn", glob.glob(("%s/*.root") % outDS))
+                                os.system("find %s/*.root >> %s \n " % (outDS, mergeMonitoringFilesName))
+
+                mergeMonitoringFile.close()
+
+
+        def send(self):
+                os.system("chmod +x %s" % self.SCRIPTNAME)
+                print ("in doing merge----------------------------------------------")
+                os.system("sh %s  \n"   % self.SCRIPTNAME)
+                print ("after doing merge----------------------------------------------")
 
 
 
 
 # For Comparing the before and after monitoring files 
 class compareMonitoringScript:
-	def __init__(self,
-		     OutputPath,
-		     numIter,
-		     CMTDIR,
-		     ATHENAREL,
-		     TAGS
-		     ):
-		self.OutputPath = OutputPath
-		self.numIter    = numIter
-		self.CMTDIR     = CMTDIR
-		self.ATHENAREL  = ATHENAREL
-		self.TAGS       = TAGS
-
-		
-	def write(self):
-		# Names of the Job and the Script
-		self.SCRIPTNAME = self.OutputPath + '/MonitoringComparison.lsf'
-		self.JOBNAME = 'MonitoringComparison.py'
-		
-		# Write the Script
-		script=open(self.SCRIPTNAME,'w')
-		script.write("#BSUB -J MonitoringComparision \n")
-		script.write("#BSUB -o "+self.OutputPath+"/MonitoringComparision.log \n")
-		script.write("\n")
-		script.write("#   setup the environment \n")
-		script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENAREL, self.TAGS))
-		script.write("cd "+self.OutputPath+" \n")
-		script.write("athena.py "+self.JOBNAME+" \n")
-		script.write("cd -")
-		script.close()
-		
-		# Write the Job
-		job=open(self.OutputPath+"/"+self.JOBNAME,"w")
-		job.write(" \n")
-		job.write("# ==================================== \n")
-		job.write("# The Reference File (Black)           \n")
-		job.write('afterAlignmentFile = "'+self.OutputPath+'/Iter'+str(self.numIter-1)+'/TotalMonitoring.root" \n')
-		job.write(" \n")
-		job.write("# ==================================== \n")
-		job.write("# The Monitored File (Red)             \n")
-		job.write('beforeAlignmentFile = "'+self.OutputPath+'/Iter0/TotalMonitoring.root"\n')
-		job.write("\n")
-		job.write("# ==================================== \n")
-		job.write("# The Output File                      \n")
-		job.write('outputFile = "AlignmentOutput.root"    \n')
-		job.write("\n")
-		job.write("# ==================================== \n")
-		job.write("include('InDetAlignmentMonitoring/makeComparision.py') \n")
-		job.write(" \n")
-		job.close()
-		
-	def send(self):
-		os.system("chmod +x %s" % self.SCRIPTNAME)
-		print ("----------------------------------------------")
-		print ("  Running MonitoringComparision.lsf job")
-		os.system("sh "+self.SCRIPTNAME+" | tee "+self.OutputPath+"/MonitoringComparison.log \n")
-		print ("----------------------------------------------")
-			
-         
-
+        def __init__(self,
+                     OutputPath,
+                     numIter,
+                     CMTDIR,
+                     ATHENAREL,
+                     TAGS
+                     ):
+                self.OutputPath = OutputPath
+                self.numIter    = numIter
+                self.CMTDIR     = CMTDIR
+                self.ATHENAREL  = ATHENAREL
+                self.TAGS       = TAGS
+
+
+        def write(self):
+                # Names of the Job and the Script
+                self.SCRIPTNAME = self.OutputPath + '/MonitoringComparison.lsf'
+                self.JOBNAME = 'MonitoringComparison.py'
+
+                # Write the Script
+                script=open(self.SCRIPTNAME,'w')
+                script.write("#BSUB -J MonitoringComparision \n")
+                script.write("#BSUB -o "+self.OutputPath+"/MonitoringComparision.log \n")
+                script.write("\n")
+                script.write("#   setup the environment \n")
+                script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENAREL, self.TAGS))
+                script.write("cd "+self.OutputPath+" \n")
+                script.write("athena.py "+self.JOBNAME+" \n")
+                script.write("cd -")
+                script.close()
+
+                # Write the Job
+                job=open(self.OutputPath+"/"+self.JOBNAME,"w")
+                job.write(" \n")
+                job.write("# ==================================== \n")
+                job.write("# The Reference File (Black)           \n")
+                job.write('afterAlignmentFile = "'+self.OutputPath+'/Iter'+str(self.numIter-1)+'/TotalMonitoring.root" \n')
+                job.write(" \n")
+                job.write("# ==================================== \n")
+                job.write("# The Monitored File (Red)             \n")
+                job.write('beforeAlignmentFile = "'+self.OutputPath+'/Iter0/TotalMonitoring.root"\n')
+                job.write("\n")
+                job.write("# ==================================== \n")
+                job.write("# The Output File                      \n")
+                job.write('outputFile = "AlignmentOutput.root"    \n')
+                job.write("\n")
+                job.write("# ==================================== \n")
+                job.write("include('InDetAlignmentMonitoring/makeComparision.py') \n")
+                job.write(" \n")
+                job.close()
+
+        def send(self):
+                os.system("chmod +x %s" % self.SCRIPTNAME)
+                print ("----------------------------------------------")
+                print ("  Running MonitoringComparision.lsf job")
+                os.system("sh "+self.SCRIPTNAME+" | tee "+self.OutputPath+"/MonitoringComparison.log \n")
+                print ("----------------------------------------------")
diff --git a/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_Challenges.py b/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_Challenges.py
index c147e3dacf98655c7f0e484626a2fcb64bb75383..ba1e1bb493995d930d1950ec57be3fcbf5062f4e 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_Challenges.py
+++ b/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_Challenges.py
@@ -1,202 +1,199 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 import os
 
 class setupChallenge:
-	def __init__ (self
+        def __init__ (self
                       ,thisChallenge
                       ,geometrySetting):
-		self.thisChallenge = thisChallenge
-		self.GlobalTag = ""
-		self.DetDescrVersion = ""
-		self.DetDescrVersionNoField = ""
-		
-		if self.thisChallenge == "CSC":
-			if os.environ['HOSTNAME'] == 'tst01.ific.uv.es':
-				collisionDir = "/data1/cescobar/data/csc/multimuons/"
-			else:
-				collisionDir = "castor:/castor/cern.ch/user/a/atlidali/data/csc/multimuons/"
-				
-			self.setup(CollisionDirectory = collisionDir
-			      ,CollisionFileList = "inputData_CSC_multimuons_digits.txt"
-			      ,CosmicBFieldDirectory = "castor:/castor/cern.ch/user/l/lytken/cosmic_13010/digitization/TRTBarrel/misaligned/"
-			      ,CosmicBFieldFileList  = "inputData_CSC_CosmicsRel13_BFOn.txt"
-			      ,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/user/l/lytken/cosmic_13010/digitization/NoField/TRTBarrel/misaligned/"  
-			      ,CosmicNoBFieldFileList  = "inputData_CSC_CosmicsRel13_BFOff.txt")
-
-			if geometrySetting == 'Nominal':
-			    self.DetDescrVersion = "ATLAS-CSC-01-00-00"
-			    self.GlobalTag = 'OFLCOND-CSC-00-00-00'
-			if geometrySetting == 'Perfect':
-			    self.DetDescrVersion = "ATLAS-CSC-01-02-00"
-			    self.GlobalTag = 'OFLCOND-CSC-00-01-00'
-			if geometrySetting == 'Aligned':
-			    self.DetDescrVersion = "ATLAS-CSC-01-00-00"
-			    self.GlobalTag = 'OFLCOND-CSC-00-01-05'
-
-		if self.thisChallenge == 'FDR1':
-			self.setup(CollisionDirectory = "castor:/castor/cern.ch/user/h/hawkings/calibstream/fdr1/"
-			      ,CollisionFileList  = "inputData_FDR1_idcalibstream.txt")
-			
-			self.DetDescrVersion = "ATLAS-GEO-08-00-00"
-			self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
-			if geometrySetting == 'Nominal':
-				self.GlobalTag = 'COMCOND-REPC-003-00'
-			if geometrySetting == 'Aligned':
-				self.GlobalTag = 'COMCOND-ES1C-000-00'
-
-		if self.thisChallenge == 'FDR2':
-			self.setup(CollisionDirectory = "castor:/castor/cern.ch/user/b/bvendapi/FDR2_Pi/BS_files/CalibStream/"
-			      ,CollisionFileList = "inputData_FDR2_idcalibstream.txt"
-			      ,CosmicBFieldDirectory = "castor:/castor/cern.ch/user/b/bvendapi/cosmics_fdr2/"
-			      ,CosmicBFieldFileLiast = "inputData_FDR2_cosmicstream.txt")
-
-
-		if self.thisChallenge == 'M8plus':
-			self.setup(CosmicBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/caf/atlcal/perm/id/cosmics/ESDs/"
-				   ,CosmicBFieldFileList  = "inputData_M8plus_Cosmic_91800.txt"
-				   ,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/caf/atlcal/perm/id/cosmics/ESDs/"
-				   ,CosmicNoBFieldFileList  = "inputData_M8plus_Cosmic_NoBField.txt")
-			
-			self.DetDescrVersion = "ATLAS-GEO-03-00-00"
-			self.DetDescrVersionNoField = "ATLAS-GEONF-04-00-00"
-
-		if self.thisChallenge == 'Cosmic09':
-			self.setup(CosmicBFieldDirectory = ""
-			      #,CosmicBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasdatadisk/data09_cos/ESD/"
-			      ,CosmicBFieldFileList  = "inputData_Cosmic09_BFOn.txt"
-			      ,CosmicNoBFieldDirectory = ""
-			      #,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasdatadisk/data09_cos/ESD/"
-			      ,CosmicNoBFieldFileList  = "inputData_Cosmic09_BFOff.txt")
-			
-			self.DetDescrVersion = "ATLAS-GEO-08-00-00"
-			self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
-			
-			if geometrySetting == 'Nominal':
-				self.GlobalTag = 'COMCOND-REPC-003-00'
-			if geometrySetting == 'Aligned':
-				self.GlobalTag = 'COMCOND-ES1C-000-00'
-
-
-		if self.thisChallenge == 'CosmicStream':
-			self.setup(CosmicBFieldFileList =  "inputData_CosmicStream.txt"
-			      ,CosmicBFieldDirectory = "rfio:/castor/cern.ch/user/s/sfyrla/91338_PEB/")
-
-		if self.thisChallenge == 'CosmicsRel14':
-			self.setup(CosmicBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasgroupdisk/proj-simcos/rel14/ATLAS-GEO-03-00-00/dig/"
-			      ,CosmicBFieldFileList  = "inputData_CosmicsRel14_BFOn.txt"
-			      ,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasgroupdisk/proj-simcos/rel14/ATLAS-GEONF-04-00-00/"
-			      ,CosmicNoBFieldFileList  = "inputData_CosmicsRel14_BFOff.txt")
-
-		if self.thisChallenge == 'CalibrationStream':
-			self.setup(CollisionDirectory = ""
-			      ,CollisionFileList = "inputData_CalibrationStream.txt")
-
-		if self.thisChallenge == 'CosmicSim09':
-			self.setup(CosmicBFieldFileList = "inputData_CosmicSim2009_BOn.txt"
-			      ,CosmicNoBFieldFileList = "inputData_CosmicSim2009_BOff.txt")
-
-
-		if self.thisChallenge == "FDR09":
-			self.setup(CollisionDirectory = "castor:/castor/cern.ch/grid/atlas/caf/atlcal/perm/id/FDR2_IDCalibStream/ESD/"
-			      ,CollisionFileList = "inputData_FDR09.txt")
-			
-			if geometrySetting == 'Nominal':
-				self.DetDescrVersion = "ATLAS-CSC-02-00-00"
-				self.GlobalTag = 'OFLCOND-FDR-02-08-00'
-			if geometrySetting == 'Perfect':
-				self.DetDescrVersion = "ATLAS-CSC-02-00-00"
-				self.GlobalTag = 'OFLCOND-CSC-01-00-00'
-
-		if self.thisChallenge == "MC09":
-			self.setup(CollisionDirectory = "castor:/castor/cern.ch/grid/atlas/atlasgroupdisk/perf-idtracking/dq2/mc09_valid/ESD/e436_s561_r731/mc09_valid.107271.Multimuons_pt9.recon.ESD.e436_s561_r731_tid076491/"
-			      ,CollisionFileList = "inputData_multimuons09.txt")
-			
-			if geometrySetting == 'Nominal':
-				self.DetDescrVersion = "ATLAS-GEO-08-00-00"
-				self.GlobalTag = 'OFLCOND-SIM-00-00-07'
-			if geometrySetting == 'Perfect':
-				self.DetDescrVersion = "ATLAS-GEO-08-00-00"
-				self.GlobalTag = 'OFLCOND-CSC-01-00-00'
-
-		if self.thisChallenge == "MinBias":
-			self.setup(CollisionDirectory = "/afs/cern.ch/user/a/atlidali/w0/data/minbias/"
-			      ,CollisionFileList = "inputData_minbias.txt")
-
-			if geometrySetting == 'Nominal':
-				self.DetDescrVersion = "ATLAS-GEO-08-00-00"
-				self.GlobalTag = 'OFLCOND-SIM-00-00-00'
-			if geometrySetting == 'Perfect':
-				self.DetDescrVersion = "ATLAS-GEO-08-00-00"
-				self.GlobalTag = 'OFLCOND-SIM-01-00-00'
-			if geometrySetting == 'Aligned':
-				self.DetDescrVersion = "ATLAS-GEO-08-00-00"
-				self.GlobalTag = 'OFLCOND-CSC-00-01-05'
-
-		if self.thisChallenge == "Halo":
-			# Beam halo events
-			self.setup(BeamHaloDirectory = "castor:/castor/cern.ch/user/t/tcorneli/"
-			      ,BeamHaloFileList = "inputData_halo_digits.txt")
-
-		if self.thisChallenge == "BeamGas":
-			# Beam Gas O events
-			self.setup(BeamGasDirectory ="castor:/castor/cern.ch/user/s/stradlin/BeamGas_1.1/digit/hijing.O.digit/"
-			      ,BeamGasFileList  = "inputData_beamgas_digits.txt")
-
-		if self.thisChallenge == "900GeV":
-			self.setup(CollisionDirectory = "",
-				   CollisionFileList  = "CustomRun_900GeV.txt",
-				   CosmicNoBFieldFileList = "GoodRunList_Cosmic_NoBF_DPD_atlasdatadisk.txt")
-
-			if 'Nominal':
-				self.DetDescrVersion = "ATLAS-GEO-08-00-02"
-				# self.GlobalTag = "COMCOND-ES1PST-001-00"
-				self.GlobalTag = "COMCOND-ES1PS-001-00"
-#			self.DetDescrVersion = "ATLAS-GEO-08-00-02"
-			self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
-		
-		if self.thisChallenge == "7TeV":
-			self.setup(CollisionDirectory = "",
-				   CollisionFileList  = "CustomRun_7TeV.txt",
-				   CosmicNoBFieldFileList = "CustomRun_CosmicsNoBF_7TeV.txt",
-				   CosmicBFieldFileList = "CustomRun_CosmicsBF_7TeV.txt")
-			#self.GlobalTag = "COMCOND-ES1PST-002-00"
-			self.GlobalTag = ""  #2010 data
-			#self.DetDescrVersion = "ATLAS-GEO-10-00-00"
-			self.DetDescrVersion = ""
-			self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
-		
-		
-		if self.thisChallenge == "SingleBeam":
-			self.setup(CollisionDirectory = ""
-				   ,CollisionFileList  = "CustomRun_SingleBeam.txt")
-			if 'Nominal':
-				self.DetDescrVersion = "ATLAS-GEO-03-00-00"
-				self.GlobalTag = "COMCOND-ES1C-000-00"
-		
-	
-	def setup(self
+                self.thisChallenge = thisChallenge
+                self.GlobalTag = ""
+                self.DetDescrVersion = ""
+                self.DetDescrVersionNoField = ""
+
+                if self.thisChallenge == "CSC":
+                        if os.environ['HOSTNAME'] == 'tst01.ific.uv.es':
+                                collisionDir = "/data1/cescobar/data/csc/multimuons/"
+                        else:
+                                collisionDir = "castor:/castor/cern.ch/user/a/atlidali/data/csc/multimuons/"
+
+                        self.setup(CollisionDirectory = collisionDir
+                              ,CollisionFileList = "inputData_CSC_multimuons_digits.txt"
+                              ,CosmicBFieldDirectory = "castor:/castor/cern.ch/user/l/lytken/cosmic_13010/digitization/TRTBarrel/misaligned/"
+                              ,CosmicBFieldFileList  = "inputData_CSC_CosmicsRel13_BFOn.txt"
+                              ,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/user/l/lytken/cosmic_13010/digitization/NoField/TRTBarrel/misaligned/"
+                              ,CosmicNoBFieldFileList  = "inputData_CSC_CosmicsRel13_BFOff.txt")
+
+                        if geometrySetting == 'Nominal':
+                            self.DetDescrVersion = "ATLAS-CSC-01-00-00"
+                            self.GlobalTag = 'OFLCOND-CSC-00-00-00'
+                        if geometrySetting == 'Perfect':
+                            self.DetDescrVersion = "ATLAS-CSC-01-02-00"
+                            self.GlobalTag = 'OFLCOND-CSC-00-01-00'
+                        if geometrySetting == 'Aligned':
+                            self.DetDescrVersion = "ATLAS-CSC-01-00-00"
+                            self.GlobalTag = 'OFLCOND-CSC-00-01-05'
+
+                if self.thisChallenge == 'FDR1':
+                        self.setup(CollisionDirectory = "castor:/castor/cern.ch/user/h/hawkings/calibstream/fdr1/"
+                              ,CollisionFileList  = "inputData_FDR1_idcalibstream.txt")
+
+                        self.DetDescrVersion = "ATLAS-GEO-08-00-00"
+                        self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
+                        if geometrySetting == 'Nominal':
+                                self.GlobalTag = 'COMCOND-REPC-003-00'
+                        if geometrySetting == 'Aligned':
+                                self.GlobalTag = 'COMCOND-ES1C-000-00'
+
+                if self.thisChallenge == 'FDR2':
+                        self.setup(CollisionDirectory = "castor:/castor/cern.ch/user/b/bvendapi/FDR2_Pi/BS_files/CalibStream/"
+                              ,CollisionFileList = "inputData_FDR2_idcalibstream.txt"
+                              ,CosmicBFieldDirectory = "castor:/castor/cern.ch/user/b/bvendapi/cosmics_fdr2/"
+                              ,CosmicBFieldFileLiast = "inputData_FDR2_cosmicstream.txt")
+
+
+                if self.thisChallenge == 'M8plus':
+                        self.setup(CosmicBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/caf/atlcal/perm/id/cosmics/ESDs/"
+                                   ,CosmicBFieldFileList  = "inputData_M8plus_Cosmic_91800.txt"
+                                   ,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/caf/atlcal/perm/id/cosmics/ESDs/"
+                                   ,CosmicNoBFieldFileList  = "inputData_M8plus_Cosmic_NoBField.txt")
+
+                        self.DetDescrVersion = "ATLAS-GEO-03-00-00"
+                        self.DetDescrVersionNoField = "ATLAS-GEONF-04-00-00"
+
+                if self.thisChallenge == 'Cosmic09':
+                        self.setup(CosmicBFieldDirectory = ""
+                              #,CosmicBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasdatadisk/data09_cos/ESD/"
+                              ,CosmicBFieldFileList  = "inputData_Cosmic09_BFOn.txt"
+                              ,CosmicNoBFieldDirectory = ""
+                              #,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasdatadisk/data09_cos/ESD/"
+                              ,CosmicNoBFieldFileList  = "inputData_Cosmic09_BFOff.txt")
+
+                        self.DetDescrVersion = "ATLAS-GEO-08-00-00"
+                        self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
+
+                        if geometrySetting == 'Nominal':
+                                self.GlobalTag = 'COMCOND-REPC-003-00'
+                        if geometrySetting == 'Aligned':
+                                self.GlobalTag = 'COMCOND-ES1C-000-00'
+
+
+                if self.thisChallenge == 'CosmicStream':
+                        self.setup(CosmicBFieldFileList =  "inputData_CosmicStream.txt"
+                              ,CosmicBFieldDirectory = "rfio:/castor/cern.ch/user/s/sfyrla/91338_PEB/")
+
+                if self.thisChallenge == 'CosmicsRel14':
+                        self.setup(CosmicBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasgroupdisk/proj-simcos/rel14/ATLAS-GEO-03-00-00/dig/"
+                              ,CosmicBFieldFileList  = "inputData_CosmicsRel14_BFOn.txt"
+                              ,CosmicNoBFieldDirectory = "castor:/castor/cern.ch/grid/atlas/atlasgroupdisk/proj-simcos/rel14/ATLAS-GEONF-04-00-00/"
+                              ,CosmicNoBFieldFileList  = "inputData_CosmicsRel14_BFOff.txt")
+
+                if self.thisChallenge == 'CalibrationStream':
+                        self.setup(CollisionDirectory = ""
+                              ,CollisionFileList = "inputData_CalibrationStream.txt")
+
+                if self.thisChallenge == 'CosmicSim09':
+                        self.setup(CosmicBFieldFileList = "inputData_CosmicSim2009_BOn.txt"
+                              ,CosmicNoBFieldFileList = "inputData_CosmicSim2009_BOff.txt")
+
+
+                if self.thisChallenge == "FDR09":
+                        self.setup(CollisionDirectory = "castor:/castor/cern.ch/grid/atlas/caf/atlcal/perm/id/FDR2_IDCalibStream/ESD/"
+                              ,CollisionFileList = "inputData_FDR09.txt")
+
+                        if geometrySetting == 'Nominal':
+                                self.DetDescrVersion = "ATLAS-CSC-02-00-00"
+                                self.GlobalTag = 'OFLCOND-FDR-02-08-00'
+                        if geometrySetting == 'Perfect':
+                                self.DetDescrVersion = "ATLAS-CSC-02-00-00"
+                                self.GlobalTag = 'OFLCOND-CSC-01-00-00'
+
+                if self.thisChallenge == "MC09":
+                        self.setup(CollisionDirectory = "castor:/castor/cern.ch/grid/atlas/atlasgroupdisk/perf-idtracking/dq2/mc09_valid/ESD/e436_s561_r731/mc09_valid.107271.Multimuons_pt9.recon.ESD.e436_s561_r731_tid076491/"
+                              ,CollisionFileList = "inputData_multimuons09.txt")
+
+                        if geometrySetting == 'Nominal':
+                                self.DetDescrVersion = "ATLAS-GEO-08-00-00"
+                                self.GlobalTag = 'OFLCOND-SIM-00-00-07'
+                        if geometrySetting == 'Perfect':
+                                self.DetDescrVersion = "ATLAS-GEO-08-00-00"
+                                self.GlobalTag = 'OFLCOND-CSC-01-00-00'
+
+                if self.thisChallenge == "MinBias":
+                        self.setup(CollisionDirectory = "/afs/cern.ch/user/a/atlidali/w0/data/minbias/"
+                              ,CollisionFileList = "inputData_minbias.txt")
+
+                        if geometrySetting == 'Nominal':
+                                self.DetDescrVersion = "ATLAS-GEO-08-00-00"
+                                self.GlobalTag = 'OFLCOND-SIM-00-00-00'
+                        if geometrySetting == 'Perfect':
+                                self.DetDescrVersion = "ATLAS-GEO-08-00-00"
+                                self.GlobalTag = 'OFLCOND-SIM-01-00-00'
+                        if geometrySetting == 'Aligned':
+                                self.DetDescrVersion = "ATLAS-GEO-08-00-00"
+                                self.GlobalTag = 'OFLCOND-CSC-00-01-05'
+
+                if self.thisChallenge == "Halo":
+                        # Beam halo events
+                        self.setup(BeamHaloDirectory = "castor:/castor/cern.ch/user/t/tcorneli/"
+                              ,BeamHaloFileList = "inputData_halo_digits.txt")
+
+                if self.thisChallenge == "BeamGas":
+                        # Beam Gas O events
+                        self.setup(BeamGasDirectory ="castor:/castor/cern.ch/user/s/stradlin/BeamGas_1.1/digit/hijing.O.digit/"
+                              ,BeamGasFileList  = "inputData_beamgas_digits.txt")
+
+                if self.thisChallenge == "900GeV":
+                        self.setup(CollisionDirectory = "",
+                                   CollisionFileList  = "CustomRun_900GeV.txt",
+                                   CosmicNoBFieldFileList = "GoodRunList_Cosmic_NoBF_DPD_atlasdatadisk.txt")
+
+                        if 'Nominal':
+                                self.DetDescrVersion = "ATLAS-GEO-08-00-02"
+                                # self.GlobalTag = "COMCOND-ES1PST-001-00"
+                                self.GlobalTag = "COMCOND-ES1PS-001-00"
+#                       self.DetDescrVersion = "ATLAS-GEO-08-00-02"
+                        self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
+
+                if self.thisChallenge == "7TeV":
+                        self.setup(CollisionDirectory = "",
+                                   CollisionFileList  = "CustomRun_7TeV.txt",
+                                   CosmicNoBFieldFileList = "CustomRun_CosmicsNoBF_7TeV.txt",
+                                   CosmicBFieldFileList = "CustomRun_CosmicsBF_7TeV.txt")
+                        #self.GlobalTag = "COMCOND-ES1PST-002-00"
+                        self.GlobalTag = ""  #2010 data
+                        #self.DetDescrVersion = "ATLAS-GEO-10-00-00"
+                        self.DetDescrVersion = ""
+                        self.DetDescrVersionNoField = "ATLAS-GEONF-08-00-00"
+
+
+                if self.thisChallenge == "SingleBeam":
+                        self.setup(CollisionDirectory = ""
+                                   ,CollisionFileList  = "CustomRun_SingleBeam.txt")
+                        if 'Nominal':
+                                self.DetDescrVersion = "ATLAS-GEO-03-00-00"
+                                self.GlobalTag = "COMCOND-ES1C-000-00"
+
+        
+        def setup(self
                   ,CollisionDirectory = ""
                   ,CollisionFileList = ""
                   ,CosmicBFieldDirectory = ""
                   ,CosmicBFieldFileList  = ""
                   ,CosmicNoBFieldDirectory = ""
                   ,CosmicNoBFieldFileList  = ""):
-		
-		self.CollisionDirectory = CollisionDirectory
-		self.CollisionFileList = CollisionFileList
-		if self.CollisionFileList:
-			os.system("get_files -jo %s >/dev/null" % self.CollisionFileList)
 
-		self.CosmicBFieldDirectory = CosmicBFieldDirectory
-		self.CosmicBFieldFileList = CosmicBFieldFileList
-		if self.CosmicBFieldFileList:
-			os.system("get_files -jo %s >/dev/null" % self.CosmicBFieldFileList)
+                self.CollisionDirectory = CollisionDirectory
+                self.CollisionFileList = CollisionFileList
+                if self.CollisionFileList:
+                        os.system("get_files -jo %s >/dev/null" % self.CollisionFileList)
 
-		self.CosmicNoBFieldDirectory = CosmicNoBFieldDirectory
-		self.CosmicNoBFieldFileList = CosmicNoBFieldFileList
-		if self.CosmicNoBFieldDirectory:
-			os.system("get_files -jo %s >/dev/null" % self.CosmicNoBFieldFileList)
-
-            
+                self.CosmicBFieldDirectory = CosmicBFieldDirectory
+                self.CosmicBFieldFileList = CosmicBFieldFileList
+                if self.CosmicBFieldFileList:
+                        os.system("get_files -jo %s >/dev/null" % self.CosmicBFieldFileList)
 
+                self.CosmicNoBFieldDirectory = CosmicNoBFieldDirectory
+                self.CosmicNoBFieldFileList = CosmicNoBFieldFileList
+                if self.CosmicNoBFieldDirectory:
+                        os.system("get_files -jo %s >/dev/null" % self.CosmicNoBFieldFileList)
diff --git a/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses.py b/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses.py
index b091ed52a1a77a5a9c84dc0b80074f0d9e1b498c..64e4885f8470a50738bbfae233579d187be5c8cb 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses.py
+++ b/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses.py
@@ -6,9 +6,6 @@
 #
 # =====================================================================
 
-from __future__ import print_function
-
-from threading import Thread
 import os
 import time
 
@@ -26,11 +23,11 @@ class getAthenaConfig:
         
         try:
             self.athenaTestArea = os.environ['TestArea']
-        except:
+        except Exception:
             self.athenaTestArea = ""
         try:
             self.athenaTags = os.environ['AtlasProject'] + ',' + self.athenaConf.replace("-",",")
-        except:
+        except Exception:
             self.athenaTags = ""
         if self.atlasSet == "AtlasSetup":
             self.atlasSetupPath = os.environ['AtlasSetup']
@@ -119,8 +116,7 @@ class manageJob:
         #   os.system('ln -s %s %s' % (self.MonitoringScript,self.RunPath+"/InDetRecExample/InDetMonitoringAlignment.py") )
 
     def writeJO(self):
-        PrefixName="Iter%d%s_" % (self.i, self.folderSuffix)
-        
+
         job=open(self.RunPath + self.JOBNAME,'w')
         job.write('##-------- Alignment Configuration --------------------\n')
 
@@ -267,7 +263,6 @@ class manageJob:
         while (os.popen('bjobs -w').read().find(self.preName)!=-1 and os.popen('bjobs -w').read().find(self.folderSuffix)!=-1):
             time.sleep(30)
             
-import os
 class SortCpus:
     def __init__(self, TOTALCPUS, LOCALDIR, FILELIST, OutputLevel,doDetailedSplitting = False, nEventsPerFile=-1):
             def sort_by_value(d):
@@ -323,7 +318,6 @@ class SortCpus:
                             SizeList[i][1] = curr[4].rstrip()
                             i = i+1
                     FinalList = {}
-                    count = 0
                     
                     for i in range(0,len(SizeList)):
                         #print (SizeList[i][0])
@@ -786,8 +780,6 @@ def HandleRunOptions():
     User_ColCPUs = 0
     Col_CPUs = Def_ColCPUs
 
-    argNum = 1
-    argMax = len(sys.argv)-1
     argCurr = -1 # jut to make that first time it points to 0
     
     #print (' >>> user input has %s arguments' % argMax)
diff --git a/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses_EoverPMaps.py b/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses_EoverPMaps.py
index 7332c5397065314c2342852cac5834fc23d3d99b..2d0027b91ca84188effda5dffb56018022292ce3 100644
--- a/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses_EoverPMaps.py
+++ b/InnerDetector/InDetExample/InDetAlignExample/python/NewInDet_IteratorClasses_EoverPMaps.py
@@ -1,210 +1,208 @@
 # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
-from __future__ import print_function
-
 import os, time
 
 class manageJobEoverPMaps:
-	def __init__(self,
-		     OutputPath,
-		     dataName,
-		     iter,
-		     part,
-		     JOBNAME,
-		     preName,
-     	     ATHENACFG,
-		     RecoOptions={},
-		     extraOptions={},
-		     AlignmentOptions={},
-		     RecoScript="InDetAlignExample/NewTopOptions.py",
-		     AlignmentScript="InDetAlignExample/NewInDetAlignAlgSetup.py",
-		     AlignmentLevels = "InDetAlignExample/NewInDetAlignLevels.py",
-		     #MonitoringScript = "InDetRecExample/InDetMonitoringAlignment.py", 
-		     QUEUE = "1nh",
-		     CMTDIR = "",
-		     inputPoolFiles = "",
-		     SCRIPTNAME = "",
-		     RunPath = ""):
-		self.OutputPath = OutputPath
-		self.preName = preName
-		self.i=iter
-		self.j=part
-		self.dataName = dataName
-		self.QUEUE=QUEUE
-		self.CMTDIR=CMTDIR
-		self.ATHENACFG=ATHENACFG
-		self.inputPoolFiles = inputPoolFiles
-		self.JOBNAME=JOBNAME
-		self.RecoOptions = RecoOptions
-		self.extraOptions = extraOptions
-		self.RecoScript = RecoScript
-		self.AlignmentOptions = AlignmentOptions
-		self.AlignmentScript = AlignmentScript
-		self.AlignmentLevels = AlignmentLevels
-		#self.MonitoringScript = MonitoringScript
-		self.SCRIPTNAME = SCRIPTNAME
-		self.RunPath = RunPath
-
-		
-	def createDirectories(self):
-		os.system('mkdir -p %s' % self.RunPath)
-
-	def writeJO(self):		
-		job=open(self.RunPath + self.JOBNAME,'w')
-		
-
-		job.write('##-------- Reconstruction Configuration --------------------\n')
-		
-		for option in self.RecoOptions:
-			if type(self.RecoOptions[option]) is str:
-				customoption = option + "\t=\t\"" +  self.RecoOptions[option]+'\"\n'
-				job.write(customoption)
-			else:
-				if option == "inputFiles":
-					customoption = option + "\t=\t" +  str(self.RecoOptions[option])+'\n'
-				else:
-					customoption = option + "\t=\t" +  str(self.RecoOptions[option])+'\n'
-				job.write(customoption)
-
-		if len(self.extraOptions):
-			job.write("\n")
-			job.write('##-------- Extra Configuration --------------------\n')
-		
-		for option in self.extraOptions:
-			if type(self.extraOptions[option]) is str:
-				customoption = option + "\t=\t\"" +  self.extraOptions[option]+'\"\n'
-				job.write(customoption)
-			else:
-				customoption = option + "\t=\t" +  str(self.extraOptions[option])+'\n'
-				job.write(customoption)
-
-		job.write("\n")
-		job.write('##-------- End of custom options -------------\n')
-
-		# Need to write the InDetRec Options Here:
-		job.write("\n")
-		job.write('##-------- Load Reconstruction --------------------\n')
-		job.write('include("'+str(self.RecoScript)+'") \n')
-
-		job.close()
-
-	def writeScript(self):
-		self.SCRIPTNAME = self.RunPath + '/' + self.SCRIPTNAME
-		script=open(self.SCRIPTNAME,'w')
-		script.write("#BSUB -J %s_%s_Part%02d \n" % (self.preName,self.dataName, self.j))
-		script.write("#BSUB -o %s/logs/%s_Part%02d.log \n" % (self.OutputPath,self.dataName,self.j))
-		script.write("#BSUB -e %s/logs/%s_Part%02d.err \n" % (self.OutputPath,self.dataName,self.j))
-					
-		script.write("#BSUB -q %s \n" % self.QUEUE)
-		script.write("\n")
-		script.write("#   setup the environment \n")
-		if self.ATHENACFG.atlasSetup() == "CMTHOME":
-			script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENACFG.Release(), self.ATHENACFG.Tags()))
-		elif "single" in self.ATHENACFG.AtlasSetupOptions():
-			script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s --single \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
-		else:
-			script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
-		script.write("export STAGE_SVCCLASS=atldata\n")
-		
-
-		script.write("cd %s \n" % self.RunPath)
-
-		for file in self.inputPoolFiles:
-			#if 'ESD' in file or 'AlignmentConstants' in file:
-			if 'AlignmentConstants' in file or 'ESD' in file:
-				script.write("pool_insertFileToCatalog "+ file + " \n")
-		
-		script.write("athena %s \n" % self.JOBNAME)
-		script.write("cd -")
-		script.close()
-		
-	def send(self,runmode):
-		os.system("chmod +x %s" % self.SCRIPTNAME)
-		print ("----------------------------------------------")
-		if runmode == "batch":
-			os.system("bsub <%s" % self.SCRIPTNAME)
-		   
-			print ("  Sending %s_%s_Part%02d job to LxBatch" % (self.preName,self.dataName, self.j))
-				  
-		elif runmode == "local":
-			print ("  Running %s_%s_Part%02d job" % (self.preName,self.dataName,self.j))
-			os.system("sh %s | tee %s/logs/%s_Part%02d.log \n" % (self.SCRIPTNAME, self.OutputPath,self.dataName,self.j))
-
-			
-		print ("----------------------------------------------")
-
-	def wait(self):
-		print ("Processing in lxbatch...")
-		# Wait for signal
-		time.sleep(30)
-		while os.popen('bjobs -w').read().find(self.preName)!=-1:
-			time.sleep(30)
-			
+        def __init__(self,
+                     OutputPath,
+                     dataName,
+                     iter,
+                     part,
+                     JOBNAME,
+                     preName,
+             ATHENACFG,
+                     RecoOptions={},
+                     extraOptions={},
+                     AlignmentOptions={},
+                     RecoScript="InDetAlignExample/NewTopOptions.py",
+                     AlignmentScript="InDetAlignExample/NewInDetAlignAlgSetup.py",
+                     AlignmentLevels = "InDetAlignExample/NewInDetAlignLevels.py",
+                     #MonitoringScript = "InDetRecExample/InDetMonitoringAlignment.py",
+                     QUEUE = "1nh",
+                     CMTDIR = "",
+                     inputPoolFiles = "",
+                     SCRIPTNAME = "",
+                     RunPath = ""):
+                self.OutputPath = OutputPath
+                self.preName = preName
+                self.i=iter
+                self.j=part
+                self.dataName = dataName
+                self.QUEUE=QUEUE
+                self.CMTDIR=CMTDIR
+                self.ATHENACFG=ATHENACFG
+                self.inputPoolFiles = inputPoolFiles
+                self.JOBNAME=JOBNAME
+                self.RecoOptions = RecoOptions
+                self.extraOptions = extraOptions
+                self.RecoScript = RecoScript
+                self.AlignmentOptions = AlignmentOptions
+                self.AlignmentScript = AlignmentScript
+                self.AlignmentLevels = AlignmentLevels
+                #self.MonitoringScript = MonitoringScript
+                self.SCRIPTNAME = SCRIPTNAME
+                self.RunPath = RunPath
+
+
+        def createDirectories(self):
+                os.system('mkdir -p %s' % self.RunPath)
+
+        def writeJO(self):
+                job=open(self.RunPath + self.JOBNAME,'w')
+
+
+                job.write('##-------- Reconstruction Configuration --------------------\n')
+
+                for option in self.RecoOptions:
+                        if type(self.RecoOptions[option]) is str:
+                                customoption = option + "\t=\t\"" +  self.RecoOptions[option]+'\"\n'
+                                job.write(customoption)
+                        else:
+                                if option == "inputFiles":
+                                        customoption = option + "\t=\t" +  str(self.RecoOptions[option])+'\n'
+                                else:
+                                        customoption = option + "\t=\t" +  str(self.RecoOptions[option])+'\n'
+                                job.write(customoption)
+
+                if len(self.extraOptions):
+                        job.write("\n")
+                        job.write('##-------- Extra Configuration --------------------\n')
+
+                for option in self.extraOptions:
+                        if type(self.extraOptions[option]) is str:
+                                customoption = option + "\t=\t\"" +  self.extraOptions[option]+'\"\n'
+                                job.write(customoption)
+                        else:
+                                customoption = option + "\t=\t" +  str(self.extraOptions[option])+'\n'
+                                job.write(customoption)
+
+                job.write("\n")
+                job.write('##-------- End of custom options -------------\n')
+
+                # Need to write the InDetRec Options Here:
+                job.write("\n")
+                job.write('##-------- Load Reconstruction --------------------\n')
+                job.write('include("'+str(self.RecoScript)+'") \n')
+
+                job.close()
+
+        def writeScript(self):
+                self.SCRIPTNAME = self.RunPath + '/' + self.SCRIPTNAME
+                script=open(self.SCRIPTNAME,'w')
+                script.write("#BSUB -J %s_%s_Part%02d \n" % (self.preName,self.dataName, self.j))
+                script.write("#BSUB -o %s/logs/%s_Part%02d.log \n" % (self.OutputPath,self.dataName,self.j))
+                script.write("#BSUB -e %s/logs/%s_Part%02d.err \n" % (self.OutputPath,self.dataName,self.j))
+
+                script.write("#BSUB -q %s \n" % self.QUEUE)
+                script.write("\n")
+                script.write("#   setup the environment \n")
+                if self.ATHENACFG.atlasSetup() == "CMTHOME":
+                        script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENACFG.Release(), self.ATHENACFG.Tags()))
+                elif "single" in self.ATHENACFG.AtlasSetupOptions():
+                        script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s --single \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
+                else:
+                        script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
+                script.write("export STAGE_SVCCLASS=atldata\n")
+
+
+                script.write("cd %s \n" % self.RunPath)
+
+                for file in self.inputPoolFiles:
+                        #if 'ESD' in file or 'AlignmentConstants' in file:
+                        if 'AlignmentConstants' in file or 'ESD' in file:
+                                script.write("pool_insertFileToCatalog "+ file + " \n")
+
+                script.write("athena %s \n" % self.JOBNAME)
+                script.write("cd -")
+                script.close()
+
+        def send(self,runmode):
+                os.system("chmod +x %s" % self.SCRIPTNAME)
+                print ("----------------------------------------------")
+                if runmode == "batch":
+                        os.system("bsub <%s" % self.SCRIPTNAME)
+
+                        print ("  Sending %s_%s_Part%02d job to LxBatch" % (self.preName,self.dataName, self.j))
+
+                elif runmode == "local":
+                        print ("  Running %s_%s_Part%02d job" % (self.preName,self.dataName,self.j))
+                        os.system("sh %s | tee %s/logs/%s_Part%02d.log \n" % (self.SCRIPTNAME, self.OutputPath,self.dataName,self.j))
+
+
+                print ("----------------------------------------------")
+
+        def wait(self):
+                print ("Processing in lxbatch...")
+                # Wait for signal
+                time.sleep(30)
+                while os.popen('bjobs -w').read().find(self.preName)!=-1:
+                        time.sleep(30)
+
 class mergeScriptEoverPMaps:
-	def __init__(self,
-		     OutputPath,
-		     preName,
-		     QUEUE,
-		     CMTDIR,
-		     ATHENACFG,
-		     SCRIPTNAME,
-		     JOBNAME
-		     ):
-		self.OutputPath = OutputPath
-		self.preName = preName
-		self.QUEUE=QUEUE
-		self.CMTDIR=CMTDIR
-		self.ATHENACFG=ATHENACFG
-		self.SCRIPTNAME=SCRIPTNAME
-		self.JOBNAME=JOBNAME
-
-	def write(self):
-		TempPath = self.OutputPath
-		self.SCRIPTNAME = TempPath + '/' + self.SCRIPTNAME
-		
-		# list of Files to be merged
-		mergeFilesName = TempPath + '/merge_Files.txt'
-		script=open(self.SCRIPTNAME,'w')
-		
-		script.write("#BSUB -J %s_Merge \n" % (self.preName))
-		script.write("#BSUB -o %s/logs/%s_Merge.log \n" % (self.OutputPath,self.preName))
-		script.write("#BSUB -e %s/logs/%s_Merge.err \n" % (self.OutputPath,self.preName))
-		
-		script.write("#BSUB -q %s \n" % self.QUEUE)
-		script.write("\n")
-		script.write("#   setup the environment \n")
-		if self.ATHENACFG.atlasSetup() == "CMTHOME":
-			script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENACFG.Release(), self.ATHENACFG.Tags()))
-
-		elif "single" in self.ATHENACFG.AtlasSetupOptions():
-			script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s --single \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
-
-		else:
-			script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
-		script.write("cd %s \n" % (self.OutputPath))
-		script.write("DQHistogramMerge.py %s ./eoverpValidationMerge.root True\n" %(mergeFilesName))
-		script.write("cd -")
-		script.close()
-		
-		mergeFiles=open(mergeFilesName,"w")
-		os.chdir(self.OutputPath)
-		files = os.popen("find -name 'eoverpValidationOut.root'").readlines()
-		for line in files:
-			mergeFiles.write("%s" % line)
-		mergeFiles.close()
-				
-
-	def send(self,runmode):
-		os.system("chmod +x %s" % self.SCRIPTNAME)
-		print ("----------------------------------------------")
-		if runmode == "batch":
-			os.system("bsub <%s" % self.SCRIPTNAME)
-			
-			print ("  Sending %s_Merge job " % self.preName)
-			
-		elif runmode == "local":
-			print ("  Running %s_Merge job" % self.preName)
-			os.system("sh %s | tee %s/logs/Merge.log \n" % (self.SCRIPTNAME, self.OutputPath))
-			
-		print ("----------------------------------------------")
+        def __init__(self,
+                     OutputPath,
+                     preName,
+                     QUEUE,
+                     CMTDIR,
+                     ATHENACFG,
+                     SCRIPTNAME,
+                     JOBNAME
+                     ):
+                self.OutputPath = OutputPath
+                self.preName = preName
+                self.QUEUE=QUEUE
+                self.CMTDIR=CMTDIR
+                self.ATHENACFG=ATHENACFG
+                self.SCRIPTNAME=SCRIPTNAME
+                self.JOBNAME=JOBNAME
+
+        def write(self):
+                TempPath = self.OutputPath
+                self.SCRIPTNAME = TempPath + '/' + self.SCRIPTNAME
+
+                # list of Files to be merged
+                mergeFilesName = TempPath + '/merge_Files.txt'
+                script=open(self.SCRIPTNAME,'w')
+
+                script.write("#BSUB -J %s_Merge \n" % (self.preName))
+                script.write("#BSUB -o %s/logs/%s_Merge.log \n" % (self.OutputPath,self.preName))
+                script.write("#BSUB -e %s/logs/%s_Merge.err \n" % (self.OutputPath,self.preName))
+
+                script.write("#BSUB -q %s \n" % self.QUEUE)
+                script.write("\n")
+                script.write("#   setup the environment \n")
+                if self.ATHENACFG.atlasSetup() == "CMTHOME":
+                        script.write("source %s/setup.sh -tag=%s,%s \n" % (self.CMTDIR, self.ATHENACFG.Release(), self.ATHENACFG.Tags()))
+
+                elif "single" in self.ATHENACFG.AtlasSetupOptions():
+                        script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s --single \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
+
+                else:
+                        script.write("source %s/scripts/asetup.sh %s --testarea=%s --tags=%s \n" % (self.ATHENACFG.AtlasSetupPath(),self.ATHENACFG.Release(),self.ATHENACFG.TestArea(), self.ATHENACFG.Tags()))
+                script.write("cd %s \n" % (self.OutputPath))
+                script.write("DQHistogramMerge.py %s ./eoverpValidationMerge.root True\n" %(mergeFilesName))
+                script.write("cd -")
+                script.close()
+
+                mergeFiles=open(mergeFilesName,"w")
+                os.chdir(self.OutputPath)
+                files = os.popen("find -name 'eoverpValidationOut.root'").readlines()
+                for line in files:
+                        mergeFiles.write("%s" % line)
+                mergeFiles.close()
+
+
+        def send(self,runmode):
+                os.system("chmod +x %s" % self.SCRIPTNAME)
+                print ("----------------------------------------------")
+                if runmode == "batch":
+                        os.system("bsub <%s" % self.SCRIPTNAME)
+
+                        print ("  Sending %s_Merge job " % self.preName)
+
+                elif runmode == "local":
+                        print ("  Running %s_Merge job" % self.preName)
+                        os.system("sh %s | tee %s/logs/Merge.log \n" % (self.SCRIPTNAME, self.OutputPath))
+
+                print ("----------------------------------------------")
                        
diff --git a/InnerDetector/InDetExample/InDetAlignExample/test/InDetAlignExample_TestConfiguration.xml b/InnerDetector/InDetExample/InDetAlignExample/test/InDetAlignExample_TestConfiguration.xml
deleted file mode 100644
index 2a7210c3414b6bb385731640ad9b3925ec73da85..0000000000000000000000000000000000000000
--- a/InnerDetector/InDetExample/InDetAlignExample/test/InDetAlignExample_TestConfiguration.xml
+++ /dev/null
@@ -1,78 +0,0 @@
-<?xml version="1.0"?>
-<!DOCTYPE unifiedTestConfiguration SYSTEM "http://www.hep.ucl.ac.uk/atlas/AtlasTesting/DTD/unifiedTestConfiguration.dtd">
-
-<unifiedTestConfiguration>
- <atn>
-   <!-- Test has been failing for a long time and no one is willing to fix it.
-        See ATLIDALIGN-7.
-   <TEST name="InDetAlignExample_ATNSimple" type="athena" suite="Examples">
-     <options_atn>InDetAlignExample/AlignmentATNSimple.py</options_atn>
-     <author> Atlas Developer </author>
-     <mailto>lacuesta@ific.uv.es, john.alison@cern.ch, anthony.morley@cern.ch, salvador.marti@ific.uv.es, jike.wang@cern.ch, regina.moles.valls@cern.ch</mailto>
-     <expectations>
-       <returnValue>0</returnValue>
-     </expectations>
-   </TEST>
-   -->
-   
- </atn>
-
- <rtt xmlns="http://www.hep.ucl.ac.uk/atlas/AtlasTesting/rtt">
-   <rttContactPerson>InDetAlignment Monitoring Group</rttContactPerson>
-   <mailto>Salvador.Marti@ific.uv.es, moles@ific.uv.es, lacuesta@ific.uv.es</mailto>
-   <refRelease>15.0.0</refRelease>
-   <jobList>
-      <classification>
-         <displayClass>OfflineValidation</displayClass>
-         <displayProcess>Reco</displayProcess>
-         <displayComponent>Det-InDet</displayComponent>
-      </classification>
-      <jobTransform userJobId="NewInDetSiLevel1Alignment"> 
-	<doc>Script to run 2 iterations of level 1 silicon only alignment.</doc> 
-	<jobTransformJobName>NewInDetSiLevel1Alignment</jobTransformJobName>
-        <jobTransformCmd>RunIterator.py</jobTransformCmd>
-        <group>NewInDetSiLevel1Alignment</group>
-        <queue>long</queue>
-	<castorSvcClass>atlasgroupdisk</castorSvcClass>
-	<castorStageHost>castoratlas</castorStageHost>
-      </jobTransform> 
-   
-   </jobList>
-   
-   <jobGroups>
-     <jobGroup name="NewInDetSiLevel1Alignment" parent="Transform">
-       <keepFilePattern>Iter0/*</keepFilePattern>
-       <keepFilePattern>Iter0/logs/*</keepFilePattern>
-       <keepFilePattern>Iter0/Collision/*</keepFilePattern>
-       <keepFilePattern>Iter1/*</keepFilePattern>
-       <keepFilePattern>Iter1/logs/*</keepFilePattern>
-       <keepFilePattern>Iter1/Collision/*</keepFilePattern>
-       <keepFilePattern>*.html</keepFilePattern>
-       <keepFilePattern>*.eps</keepFilePattern>
-       <keepFilePattern>*.png</keepFilePattern>
-       <keepFilePattern>*.html</keepFilePattern>
-       <auxFilePattern>NewSiL1AlignRTT.cc</auxFilePattern>
-       <auxFilePattern>Constants_L1.cc</auxFilePattern>
-       <auxFilePattern>InDetAlignExample_NewInDetSiLevel1Alignment.html</auxFilePattern>
-       <action>
-	 <modulename>RttLibraryTools</modulename>
-	 <testname>ROOTMacroRunner</testname>
-	 <arg>
-           <argname>macro</argname>
-           <argvalue>NewSiL1AlignRTT.cc</argvalue>
-	 </arg>
-	 <arg>
-           <argname>macro</argname>
-           <argvalue>Constants_L1.cc</argvalue>
-	 </arg>
-       </action>
-       <testToRemove>
-         <jobGroupName>Top</jobGroupName> 
-	 <testidentifier>CheckFileRunner0</testidentifier> 
-       </testToRemove>
-     </jobGroup>
-   </jobGroups>
-      
- </rtt>
-</unifiedTestConfiguration>
-
diff --git a/InnerDetector/InDetExample/InDetRecExample/python/InDetJobProperties.py b/InnerDetector/InDetExample/InDetRecExample/python/InDetJobProperties.py
index 4cf5588f43f9ff567303df8ec38fef8d4be14065..e4dfda89918f719b9c16879fd29653c953063212 100644
--- a/InnerDetector/InDetExample/InDetRecExample/python/InDetJobProperties.py
+++ b/InnerDetector/InDetExample/InDetRecExample/python/InDetJobProperties.py
@@ -1662,6 +1662,10 @@ class InDetJobProperties(JobPropertyContainer):
     if rec.doExpressProcessing() :
        self.checkThenSet(self.useBeamConstraint,False)
 
+    # --- Setup correct mu for TRT mu-scaling
+    if not rec.doRDOTrigger():
+       self.checkThenSet(self.useMuForTRTErrorScaling, True) 
+
   def init(self):
     #Method to do the final setup of the flags according to user input before.
     #This method MUST ONLY BE CALLED once in InDetRecExample/InDetRec_jobOptions.py!!
diff --git a/InnerDetector/InDetExample/InDetTrigRecExample/python/ConfiguredNewTrackingTrigCuts.py b/InnerDetector/InDetExample/InDetTrigRecExample/python/ConfiguredNewTrackingTrigCuts.py
index 2c6918e0958b53963da18b392be8def5f9211dc1..a5392b9879eb73ae7c005c0f30ffee783207b7cf 100644
--- a/InnerDetector/InDetExample/InDetTrigRecExample/python/ConfiguredNewTrackingTrigCuts.py
+++ b/InnerDetector/InDetExample/InDetTrigRecExample/python/ConfiguredNewTrackingTrigCuts.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 """ Derive from the offline class and override InDetFlags
 """
@@ -7,7 +7,6 @@ __author__ = "J. Masik"
 __version__= "$Revision: 1.2 $"
 __doc__    = "ConfiguredNewTrackingTrigCuts"
 
-from AthenaCommon.Include import include
 
 _sharedcuts = False
 if _sharedcuts:
@@ -22,11 +21,11 @@ class ConfiguredNewTrackingTrigCuts(InDetTrigTrackingCuts):
     self.__indetflags = InDetTrigFlags
 
 
-
-EFIDTrackingCuts = ConfiguredNewTrackingTrigCuts("Offline")
-EFIDTrackingCutsCosmics = ConfiguredNewTrackingTrigCuts("Cosmics")
-EFIDTrackingCutsBeamGas = ConfiguredNewTrackingTrigCuts("BeamGas")
-EFIDTrackingCutsLowPt = ConfiguredNewTrackingTrigCuts("LowPt")
-EFIDTrackingCutsTRT = ConfiguredNewTrackingTrigCuts("TRT")
-EFIDTrackingCutsHeavyIon = ConfiguredNewTrackingTrigCuts("HeavyIon")
-L2IDTrackingCuts = EFIDTrackingCuts
+if not 'EFIDTrackingCuts' in globals():
+  EFIDTrackingCuts = ConfiguredNewTrackingTrigCuts("Offline")
+  EFIDTrackingCutsCosmics = ConfiguredNewTrackingTrigCuts("Cosmics")
+  EFIDTrackingCutsBeamGas = ConfiguredNewTrackingTrigCuts("BeamGas")
+  EFIDTrackingCutsLowPt = ConfiguredNewTrackingTrigCuts("LowPt")
+  EFIDTrackingCutsTRT = ConfiguredNewTrackingTrigCuts("TRT")
+  EFIDTrackingCutsHeavyIon = ConfiguredNewTrackingTrigCuts("HeavyIon")
+  L2IDTrackingCuts = EFIDTrackingCuts
diff --git a/InnerDetector/InDetExample/InDetTrigRecExample/python/InDetTrigTrackingCuts.py b/InnerDetector/InDetExample/InDetTrigRecExample/python/InDetTrigTrackingCuts.py
index b7d2fa99f2155ccb72330973e891ad20f459a367..7e2476b027b9ae7519ad5a230db0f22958c255d7 100755
--- a/InnerDetector/InDetExample/InDetTrigRecExample/python/InDetTrigTrackingCuts.py
+++ b/InnerDetector/InDetExample/InDetTrigRecExample/python/InDetTrigTrackingCuts.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 from __future__ import print_function
 
@@ -6,12 +6,10 @@ class InDetTrigTrackingCuts :
   """
   A copy of ConfiguredNewtrackingCuts from InDetRecExample used by the trigger.
   
-  think twice before jumping to wrong conclusions
-
   """
 
 
-  def __init__ (self, mode = "offline"):
+  def __init__ (self, mode = "Offline"):
 
     import AthenaCommon.SystemOfUnits as Units
 
@@ -131,7 +129,6 @@ class InDetTrigTrackingCuts :
       self.__useTRTonlyParamCuts       = False
       self.__useTRTonlyOldLogic        = True
 
-
     #
     # --------------------------------------
     # --- now start tighening cuts level by level
@@ -183,13 +180,60 @@ class InDetTrigTrackingCuts :
       # --- slightly tighen NewTracking cuts
       self.__maxHoles                  = 2                # was 3
       self.__maxPixelHoles             = 1                # was 2
-    
+
     if self.__indetflags.cutLevel() >= 9:
       self.__maxZImpact              = 200. * Units.mm     #this should come from RS/Roi
 
     if self.__indetflags.cutLevel() >= 10:
       self.__doZBoundary              = True
 
+    if self.__indetflags.cutLevel() >= 12:
+      # --- Tighten track reconstruction criteria
+      self.__Xi2max                  = 9.0  # was 15
+      self.__Xi2maxNoAdd             = 25.0 # was 35
+      self.__nHolesMax               = 2 # was 3
+      self.__nHolesGapMax            = 2 # was 3
+
+    """
+    if self.__indetflags.cutLevel() >= 13 and DetFlags.detdescr.Calo_allOn():
+      # --- turn on RoI seeded for Back Tracking and TRT only
+      self.__RoISeededBackTracking   = True
+      self.__minRoIClusterEt         = 4500. * Units.MeV #Default cut to mimic rel21-ish
+
+    if self.__indetflags.cutLevel() >= 14 :
+      self.__minPT                   = 0.5 * Units.GeV
+
+    if self.__indetflags.cutLevel() >= 15 :
+      self.__minClusters             = 8 #based on studies by R.Jansky
+
+    if self.__indetflags.cutLevel() >= 16 :
+      self.__maxPrimaryImpact        = 5.0 * Units.mm #based on studies by T.Strebler
+
+    if self.__indetflags.cutLevel() >= 17:
+      # Tuning of the search road and strip seed IP in the track finder.
+      # Designed to speed up reconstruction at minimal performance impact. 
+      self.__roadWidth              = 12
+      self.__maxdImpactSSSSeeds     = 5.0 * Units.mm
+      self.__maxZImpact              = 200
+
+    if self.__indetflags.cutLevel() >= 18:
+      # Further tuning of the pattern recognition designed to 
+      # speed up reconstruction compared to 17 with minimal additional 
+      # impact. Kept as separate level pending cross-check of 
+      # seed confirmation robustness with end-of-run-3 radiation
+      # damage. 
+      self.__keepAllConfirmedPixelSeeds  = True
+      self.__maxSeedsPerSP_Pixels          = 1
+      self.__maxSeedsPerSP_Strips          = 5
+    
+    if self.__indetflags.cutLevel() >= 19:
+      # Calo cluster Et for RoI seeded backtracking for TRT segment finding
+      # and for TRT-si extensions
+      self.__minRoIClusterEt         = 6000. * Units.MeV
+      self.__minSecondaryPt          = 3.0 * Units.GeV  # Increase pT cut used for back-tracking to match calo-RoI
+
+    """
+
     if self.__indetflags.cutLevel() >= mxlevel:
       print ('InDetTrigTrackingCuts INFO using cutLevel %d/%d' % (mxlevel,self.__indetflags.cutLevel()))
 
diff --git a/InnerDetector/InDetExample/InDetTrigRecExample/share/InDetTrigRec_jobOptions.py b/InnerDetector/InDetExample/InDetTrigRecExample/share/InDetTrigRec_jobOptions.py
index 5f120409dab5ade8930a8133165421e4ccb28435..6262b5f47feddd2542a51e8eeaf9aef2bf759cfc 100755
--- a/InnerDetector/InDetExample/InDetTrigRecExample/share/InDetTrigRec_jobOptions.py
+++ b/InnerDetector/InDetExample/InDetTrigRecExample/share/InDetTrigRec_jobOptions.py
@@ -6,26 +6,25 @@
 # Jiri.Masik@cern.ch
 # -----------------------------------
 
-#retrieve common trigger settings (truth)
-from TriggerJobOpts.TriggerFlags import TriggerFlags
+include.block("InDetTrigRecExample/InDetTrigRec_jobOptions.py")
 
 from AthenaCommon.Logging import logging 
 log = logging.getLogger("InDetTrigRec_jobOptions.py")
 
-if not 'InDetTrigFlags' in dir():
-   # --- setup flags with default values
-   log.info("InDetTrigRec_jobOptions: InDetTrigFlags not set - setting to defaults")
-   from InDetTrigRecExample.InDetTrigFlags import InDetTrigFlags
-   InDetTrigFlags.doNewTracking.set_Value_and_Lock(True)
-   #InDetTrigFlags.InDet25nsec = True      #autoconfig and runHLT_standalone setting for BS 
-   InDetTrigFlags.primaryVertexSetup = "IterativeFinding"
-   #InDetTrigFlags.primaryVertexSetup = "DefaultFastFinding"
-   InDetTrigFlags.doRefit = True    # switched on for ATR-12226 (z0 uncertainties in bjets)
-   InDetTrigFlags.doPixelClusterSplitting = False
-   InDetTrigFlags.doPrintConfigurables = False    #
-   #InDetTrigFlags.doPrintConfigurables = True    #
+# --- setup flags with default values
+log.info("InDetTrigRec_jobOptions: InDetTrigFlags - setting to defaults")
+from InDetTrigRecExample.InDetTrigFlags import InDetTrigFlags
+InDetTrigFlags.doNewTracking.set_Value_and_Lock(True)
+InDetTrigFlags.cutLevel.set_Value_and_Lock(12)      
+#InDetTrigFlags.InDet25nsec = True      #autoconfig and runHLT_standalone setting for BS 
+InDetTrigFlags.primaryVertexSetup = "IterativeFinding"
+InDetTrigFlags.doRefit = True    # switched on for ATR-12226 (z0 uncertainties in bjets)
+InDetTrigFlags.doPixelClusterSplitting = False
+InDetTrigFlags.doPrintConfigurables = False
+from InDetTrigRecExample.ConfiguredNewTrackingTrigCuts import EFIDTrackingCuts  #noqa instantiate objects early on
 
 #moved the truth setting (can be overriden with set&lock)
+from TriggerJobOpts.TriggerFlags import TriggerFlags
 InDetTrigFlags.doTruth = TriggerFlags.doTruth()
 
 InDetTrigFlags.init()
@@ -81,11 +80,4 @@ include ("InDetRecExample/InDetRecCabling.py")
 
 
 
-# ------------------------------------------------------------
-#
-# ----------- Loading the Tracking Tools and Services
-#
-# ------------------------------------------------------------
-#moved to python
-include ("InDetTrigRecExample/InDetTrigRecLoadTools.py")
 
diff --git a/InnerDetector/InDetRecEvent/SiSpacePoint/SiSpacePoint/SCT_SpacePoint.h b/InnerDetector/InDetRecEvent/SiSpacePoint/SiSpacePoint/SCT_SpacePoint.h
index 1744a8bba3d22e12fb5e71cf5e7f7b4638977e15..71253ef70c2d28815a802a621b5f3759ce0f2b95 100755
--- a/InnerDetector/InDetRecEvent/SiSpacePoint/SiSpacePoint/SCT_SpacePoint.h
+++ b/InnerDetector/InDetRecEvent/SiSpacePoint/SiSpacePoint/SCT_SpacePoint.h
@@ -14,8 +14,8 @@
 #define TRKSPACEPOINT_SCTSPACEPOINT_H
 
 #include "TrkSpacePoint/SpacePoint.h"
+
 #include "Identifier/IdentifierHash.h"
-#include "TrkSpacePoint/SpacePoint.h"
 
 namespace Trk
 {
diff --git a/LArCalorimeter/LArCafJobs/LArCafJobs/LArNoiseBursts.h b/LArCalorimeter/LArCafJobs/LArCafJobs/LArNoiseBursts.h
index d8c568a7f6dd092552a298bc280fa7571b9b82dd..471f149605cba1d45490e8d779ab121c64c22990 100644
--- a/LArCalorimeter/LArCafJobs/LArCafJobs/LArNoiseBursts.h
+++ b/LArCalorimeter/LArCafJobs/LArCafJobs/LArNoiseBursts.h
@@ -19,9 +19,6 @@
 //LAr services:
 #include "Identifier/Range.h" 
 #include "Identifier/IdentifierHash.h"
-//#include "Identifier/HWIdentifier.h"
-//#include "LArElecCalib/ILArPedestal.h"
-//#include "LArRecConditions/ILArBadChannelMasker.h"
 #include "LArRecConditions/LArBadChannelCont.h"
 #include "CaloInterface/ICaloNoiseTool.h"
 #include "CaloInterface/ICalorimeterNoiseTool.h"
@@ -38,10 +35,6 @@
 // Electrons
 #include "egammaEvent/ElectronContainer.h"
 
-//CBNT_UTILS
-//#include "CBNT_Utils/CBNT_AthenaAwareBase.h"
-//#include "AthenaBaseComps/AthAlgorithm.h"
-
 //STL:
 #include <string>
 #include <bitset>
diff --git a/LArCalorimeter/LArCafJobs/src/LArShapeDumper.cxx b/LArCalorimeter/LArCafJobs/src/LArShapeDumper.cxx
index 7005f0edd9c926975513104815c4bb9fed24813d..da5a16fac1e50e8b18d334069b324d19bf554b14 100755
--- a/LArCalorimeter/LArCafJobs/src/LArShapeDumper.cxx
+++ b/LArCalorimeter/LArCafJobs/src/LArShapeDumper.cxx
@@ -30,7 +30,6 @@
 #include "LArCafJobs/CellInfo.h"
 #include "LArCafJobs/RunData.h"
 #include "LArCafJobs/EventData.h"
-#include "LArRawEvent/LArOFIterResultsContainer.h"
 #include "TrigT1Result/RoIBResult.h"
 #include "TrigSteeringEvent/TrigRoiDescriptor.h"
 #include "TFile.h"
@@ -43,9 +42,7 @@
 #include "TrigConfHLTData/HLTSequence.h"
 #include "TrigConfHLTData/HLTSequenceList.h"
 
-// #include "LArCafJobs/DataContainer.h"
 // #include "LArCafJobs/HistoryContainer.h"
-// #include "LArCafJobs/CellInfo.h"
 // #include "LArCafJobs/TreeAccessor.h"
 
 
diff --git a/LArCalorimeter/LArCalibTools/LArCalibTools/LArParamsProperties.h b/LArCalorimeter/LArCalibTools/LArCalibTools/LArParamsProperties.h
index 9a58c4a1f6399e537673f187fdb5d2bf51e13211..8c7dab81937f1d76e49c15bef947554d1db1bfd9 100644
--- a/LArCalorimeter/LArCalibTools/LArCalibTools/LArParamsProperties.h
+++ b/LArCalorimeter/LArCalibTools/LArCalibTools/LArParamsProperties.h
@@ -28,7 +28,6 @@
 #include "LArRawConditions/LArCableLengthComplete.h"
 #include "LArRawConditions/LArCableAttenuationComplete.h"
 #include "LArRawConditions/LArOFCBinComplete.h"
-#include "LArRawConditions/LArPhysCaliTdiffComplete.h"
 
 namespace LArParamsProperties {
 
diff --git a/LArCalorimeter/LArCalibTools/LArCalibTools/LArPulseShape.h b/LArCalorimeter/LArCalibTools/LArCalibTools/LArPulseShape.h
index f1aae4f4547092e4548a456b61bf9cbc4427b0e1..79ac746efa088ec111373463b16f0ec64486bd28 100644
--- a/LArCalorimeter/LArCalibTools/LArCalibTools/LArPulseShape.h
+++ b/LArCalorimeter/LArCalibTools/LArCalibTools/LArPulseShape.h
@@ -22,7 +22,6 @@
 #include "LArRawEvent/LArDigitContainer.h"
 #include "LArCabling/LArOnOffIdMapping.h"
 #include "LArRecConditions/LArBadChannelCont.h"
-#include "LumiBlockData/LuminosityCondData.h"
 #include "LArElecCalib/ILArPedestal.h"
 #include "StoreGate/ReadCondHandleKey.h"
 
diff --git a/LArCalorimeter/LArCalibTools/share/LArDelayFromDB2NTuple_jobOptions.py b/LArCalorimeter/LArCalibTools/share/LArDelayFromDB2NTuple_jobOptions.py
index 1a6a3b0855d879da9cedc7d455c9681ef60cbc50..6ee8650612badce38ecbf17bbd34457ec07dd121 100644
--- a/LArCalorimeter/LArCalibTools/share/LArDelayFromDB2NTuple_jobOptions.py
+++ b/LArCalorimeter/LArCalibTools/share/LArDelayFromDB2NTuple_jobOptions.py
@@ -11,7 +11,7 @@ import commands
 ###########################################################################
 
 include("LArCalibProcessing/LArCalib_Flags.py")
-include("RecExCommission/GetInputFiles.py")
+include("LArCalibProcessing/GetInputFiles.py")
 
 #######################################################
 #       Run properties
diff --git a/LArCalorimeter/LArCalibTools/share/LArDigits2Ntuple_jobOptions.py b/LArCalorimeter/LArCalibTools/share/LArDigits2Ntuple_jobOptions.py
index 0df7787c0572bc99204eb354c6748b748f3d1fad..717dac3411a2fa02d92cb8a69d6612926c08b97c 100755
--- a/LArCalorimeter/LArCalibTools/share/LArDigits2Ntuple_jobOptions.py
+++ b/LArCalorimeter/LArCalibTools/share/LArDigits2Ntuple_jobOptions.py
@@ -72,7 +72,7 @@ if not 'SuperCells' in dir():
 
 if not SuperCells: include("LArCalibProcessing/LArCalib_Flags.py")
 else: include("LArCalibProcessing/LArCalib_FlagsSC.py")
-include("RecExCommission/GetInputFiles.py")
+include("LArCalibProcessing/GetInputFiles.py")
 
 if SuperCells:
    from AthenaCommon.GlobalFlags import globalflags
diff --git a/LArCalorimeter/LArCalibTools/share/LArFEBTemp2Ntuple_jobOptions.py b/LArCalorimeter/LArCalibTools/share/LArFEBTemp2Ntuple_jobOptions.py
index 0e72cfe73b6bbadaf71700f04ef1a4a08917716b..6a846543e23e912fc5d7b8f5af03e669da447b1f 100755
--- a/LArCalorimeter/LArCalibTools/share/LArFEBTemp2Ntuple_jobOptions.py
+++ b/LArCalorimeter/LArCalibTools/share/LArFEBTemp2Ntuple_jobOptions.py
@@ -11,7 +11,7 @@ DBConnectionCOOL_ONL = "oracle://ATLAS_COOLPROD;schema=ATLAS_COOLONL_LAR;dbname=
 
 include("LArCalibProcessing/LArCalib_Flags.py")
 include("LArCalibProcessing/LArCalib_MinimalSetup.py")
-include("RecExCommission/GetInputFiles.py")
+include("LArCalibProcessing/GetInputFiles.py")
 include("AthenaCommon/Atlas_Gen.UnixStandardJob.py")
 
 from AthenaCommon.AlgSequence import AlgSequence
diff --git a/LArCalorimeter/LArCalibTools/share/LArParamsFromDB2NTuple_jobOptions.py b/LArCalorimeter/LArCalibTools/share/LArParamsFromDB2NTuple_jobOptions.py
index 1d284860d5404053f33c50bea290348b8f272acc..bba67009f41ecee6dc71e40032cf1c58d1396aa8 100644
--- a/LArCalorimeter/LArCalibTools/share/LArParamsFromDB2NTuple_jobOptions.py
+++ b/LArCalorimeter/LArCalibTools/share/LArParamsFromDB2NTuple_jobOptions.py
@@ -2,7 +2,7 @@ import commands
 ###########################################################################
 
 include("LArCalibProcessing/LArCalib_Flags.py")
-include("RecExCommission/GetInputFiles.py")
+include("LArCalibProcessing/GetInputFiles.py")
 
 #######################################################
 #       Run properties
diff --git a/LArCalorimeter/LArCalibTools/share/LArPhysWaveFromDB2NTuple_jobOptions.py b/LArCalorimeter/LArCalibTools/share/LArPhysWaveFromDB2NTuple_jobOptions.py
index 834ec145fbb059a622ca08a51d6457af466d21f6..61353c2f345d83dea3b3d4f64ff7e09476e4743b 100644
--- a/LArCalorimeter/LArCalibTools/share/LArPhysWaveFromDB2NTuple_jobOptions.py
+++ b/LArCalorimeter/LArCalibTools/share/LArPhysWaveFromDB2NTuple_jobOptions.py
@@ -2,7 +2,7 @@ import commands
 ###########################################################################
 
 include("LArCalibProcessing/LArCalib_Flags.py")
-include("RecExCommission/GetInputFiles.py")
+include("LArCalibProcessing/GetInputFiles.py")
 
 #######################################################
 #       Run properties
diff --git a/LArCalorimeter/LArCalibTools/src/LArPulseShape.cxx b/LArCalorimeter/LArCalibTools/src/LArPulseShape.cxx
index 11a3631a0738f0d1d9fd7c75cff57867847f2c5f..c45f29b2cd34f6249994bf4853e4d3c920897724 100644
--- a/LArCalorimeter/LArCalibTools/src/LArPulseShape.cxx
+++ b/LArCalorimeter/LArCalibTools/src/LArPulseShape.cxx
@@ -16,7 +16,6 @@
 #include "TKey.h"
 #include "TTree.h"
 #include "TSystem.h"
-#include "TSystem.h"
 
 LArPulseShape::LArPulseShape(const std::string& name, ISvcLocator* pSvcLocator): 
   AthAlgorithm(name, pSvcLocator), m_initialized(false),  m_calo_id(0),  
diff --git a/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArOFCAlg.h b/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArOFCAlg.h
index ba6c9a93e11252da9bc344db3da472e44bab38f6..3173a587a7416c085d5d328b6318ed86531e12c2 100644
--- a/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArOFCAlg.h
+++ b/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArOFCAlg.h
@@ -37,8 +37,6 @@ class LArOnlineID_Base;
 class CaloDetDescrManager_Base; 
 class LArDSPConfig;
 
-#include "GaudiKernel/ToolHandle.h"
-
 class LArOFCAlg:public AthAlgorithm {
  
 public:
diff --git a/LArCalorimeter/LArCalibUtils/src/LArOFCAlg.cxx b/LArCalorimeter/LArCalibUtils/src/LArOFCAlg.cxx
index fe1bf11077a5e23a35398a32eea8d1bddf7a2f13..71ebbacc6552acf6c52ac054c2f4ea6de37687c9 100644
--- a/LArCalorimeter/LArCalibUtils/src/LArOFCAlg.cxx
+++ b/LArCalorimeter/LArCalibUtils/src/LArOFCAlg.cxx
@@ -9,7 +9,6 @@
 #include "LArCalibUtils/LArOFCAlg.h"
 
 #include "LArRawConditions/LArWaveHelper.h"
-#include "LArRawConditions/LArOFCComplete.h"
 #include "LArRawConditions/LArShapeComplete.h"
 #include "LArRawConditions/LArOFCComplete.h"
 #include "LArRawConditions/LArOFCBinComplete.h"
diff --git a/LArCalorimeter/LArCalibUtils/src/LArPhysWaveHECTool.cxx b/LArCalorimeter/LArCalibUtils/src/LArPhysWaveHECTool.cxx
index f29e139409f6a1dfc04ef0c9bf87a8377e9e07fa..d907e616110c10796e2f422711d2d4b6876a1218 100755
--- a/LArCalorimeter/LArCalibUtils/src/LArPhysWaveHECTool.cxx
+++ b/LArCalorimeter/LArCalibUtils/src/LArPhysWaveHECTool.cxx
@@ -4,7 +4,6 @@
 
 #include "LArCalibUtils/LArPhysWaveHECTool.h" 
 
-#include "gsl/gsl_integration.h"
 #include <gsl/gsl_integration.h>
 #include <gsl/gsl_errno.h>
 
diff --git a/LArCalorimeter/LArCalibUtils/src/LArPhysWavePredictor.cxx b/LArCalorimeter/LArCalibUtils/src/LArPhysWavePredictor.cxx
index be2f460e8db04f871a829152ce7fdce549b2f542..7365b689b1794602e9aaa166e5d93a9a52982bab 100644
--- a/LArCalorimeter/LArCalibUtils/src/LArPhysWavePredictor.cxx
+++ b/LArCalorimeter/LArCalibUtils/src/LArPhysWavePredictor.cxx
@@ -34,8 +34,6 @@
 #include "LArCalibUtils/LArPhysWaveTool.h"     // added by FT
 #include "LArCalibUtils/LArPhysWaveHECTool.h"  // added by FT
 
-#include "LArCalibUtils/LArPhysWaveTool.h"
-
 #include <stdio.h>
 #include <iostream>
 #include <fstream>
diff --git a/LArCalorimeter/LArCalibUtils/src/LArRampFCalCorr.cxx b/LArCalorimeter/LArCalibUtils/src/LArRampFCalCorr.cxx
index 635c9e0077398b0f464de759b54b5272addbfe92..99538a10fbe4f24bf0d66e64705764f536d5389c 100755
--- a/LArCalorimeter/LArCalibUtils/src/LArRampFCalCorr.cxx
+++ b/LArCalorimeter/LArCalibUtils/src/LArRampFCalCorr.cxx
@@ -27,8 +27,6 @@
 #include <cmath>
 using std::pow;
 
-#include <cmath>
-
 LArRampFCalCorr::LArRampFCalCorr(const std::string& name,ISvcLocator* pSvcLocator)
   :AthAlgorithm(name, pSvcLocator),
    m_onlineHelper(0)
diff --git a/LArCalorimeter/LArCellRec/src/LArNoisyROTool.cxx b/LArCalorimeter/LArCellRec/src/LArNoisyROTool.cxx
index 5662980119defc86eb75144e8748e62fe13e8c75..67839ad1d330f7936f07a73b2bebd755f27e85e8 100644
--- a/LArCalorimeter/LArCellRec/src/LArNoisyROTool.cxx
+++ b/LArCalorimeter/LArCellRec/src/LArNoisyROTool.cxx
@@ -14,7 +14,6 @@
 #include "CaloIdentifier/CaloCell_ID.h"
 #include "LArIdentifier/LArOnlineID.h" 
 #include "LArCabling/LArOnOffIdMapping.h"
-#include "LArRecEvent/LArNoisyROSummary.h"
 #include "StoreGate/ReadCondHandle.h"
 
 LArNoisyROTool::LArNoisyROTool( const std::string& type, 
diff --git a/LArCalorimeter/LArCondUtils/src/LArFecLvTempDcsTool.h b/LArCalorimeter/LArCondUtils/src/LArFecLvTempDcsTool.h
index 6d920f821e15a7b6184ff8dcdab07935cffe4f08..cf73f555d93011f1adf5dbe7fc41623764bb7f82 100755
--- a/LArCalorimeter/LArCondUtils/src/LArFecLvTempDcsTool.h
+++ b/LArCalorimeter/LArCondUtils/src/LArFecLvTempDcsTool.h
@@ -23,7 +23,6 @@
 #include "StoreGate/DataHandle.h" 
 #include "Identifier/Identifier.h" 
 #include "AthenaPoolUtilities/AthenaAttributeList.h"
-#include "StoreGate/DataHandle.h" 
 
 #include <string>
 
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_DelayXtalk_jobOptions.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_DelayXtalk_jobOptions.py
index 0810c2f37f22283d0dcdbf5a593f4208f0ab0540..e29e28d5d70510e56dc492af44e23f3e71ebbbbe 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_DelayXtalk_jobOptions.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_DelayXtalk_jobOptions.py
@@ -24,7 +24,7 @@ import subprocess
 ###########################################################################
 
 include("LArCalibProcessing/LArCalib_Flags.py")
-include("RecExCommission/GetInputFiles.py")
+include("LArCalibProcessing/GetInputFiles.py")
 
 #######################################################
 #       Crosstalk parameters
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_OFC_Cali_jobOptions.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_OFC_Cali_jobOptions.py
index 46b460d5d60da649c097cc48d78452bb6338d38e..790d8c81a63c5d5f6de6ec66ec38645d15f1758e 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_OFC_Cali_jobOptions.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_OFC_Cali_jobOptions.py
@@ -25,7 +25,7 @@ if not "SuperCells" in dir():
    
 if not SuperCells: include("LArCalibProcessing/LArCalib_Flags.py")
 if SuperCells:     include("LArCalibProcessing/LArCalib_FlagsSC.py")
-#include("RecExCommission/GetInputFiles.py")
+#include("LArCalibProcessing/GetInputFiles.py")
 include("LArCalibProcessing/GetInputFiles.py")
 
 #######################################################
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_OFC_splitter_jobOptions.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_OFC_splitter_jobOptions.py
index 974f7af0800ad475b58f2ad5d5d169fbbcef24ff..7c0ab0ef7b519ec69439b3edb39870877f2801ba 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_OFC_splitter_jobOptions.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_OFC_splitter_jobOptions.py
@@ -11,7 +11,7 @@ import subprocess
 ###########################################################################
 
 include("LArCalibProcessing/LArCalib_Flags.py")
-include("RecExCommission/GetInputFiles.py")
+include("LArCalibProcessing/GetInputFiles.py")
 
 #######################################################
 #       Run properties
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_jobOptions.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_jobOptions.py
index 394e43803e155d54154ad27ab18398454cb2da12..6f1963283ff634d97afbe6e8c5dbaa0677fc7870 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_jobOptions.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_jobOptions.py
@@ -13,7 +13,7 @@ import subprocess
 ###########################################################################
 
 include("LArCalibProcessing/LArCalib_Flags.py")
-include("RecExCommission/GetInputFiles.py")
+include("LArCalibProcessing/GetInputFiles.py")
 
 #######################################################
 #       Run properties
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_splitter_jobOptions.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_splitter_jobOptions.py
index 6a56f053460154c336e48e16e7b10ef51ee9c35c..2fa7ee7aaddd4467f2915c044a16afabbb65a330 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_splitter_jobOptions.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Delay_splitter_jobOptions.py
@@ -11,7 +11,7 @@ import subprocess
 ###########################################################################
 
 include("LArCalibProcessing/LArCalib_Flags.py")
-include("RecExCommission/GetInputFiles.py")
+include("LArCalibProcessing/GetInputFiles.py")
 
 #######################################################
 #       Run properties
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_DigitAccumulator_jobOptions.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_DigitAccumulator_jobOptions.py
index 707ef4518de6653649bac79aaabacec1479d3283..9a34211b959db6aa45118e3968412de54f60aff2 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_DigitAccumulator_jobOptions.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_DigitAccumulator_jobOptions.py
@@ -19,7 +19,7 @@ import subprocess
 ###############################################################################
 
 include("LArCalibProcessing/LArCalib_Flags.py")
-include("RecExCommission/GetInputFiles.py")
+include("LArCalibProcessing/GetInputFiles.py")
 
 #######################################################
 #                Run properties
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_PedestalAutoCorr_jobOptions.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_PedestalAutoCorr_jobOptions.py
index a09a8f0525a1e70108c9a114771e9b1a7aa3f7f3..80df73f49b42bcee46d5fef2f48cde8715c6b31f 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_PedestalAutoCorr_jobOptions.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_PedestalAutoCorr_jobOptions.py
@@ -18,7 +18,6 @@ if not "SuperCells" in dir():
 
 if not SuperCells: include("LArCalibProcessing/LArCalib_Flags.py")
 if SuperCells:     include("LArCalibProcessing/LArCalib_FlagsSC.py")
-#include("RecExCommission/GetInputFiles.py")
 include("LArCalibProcessing/GetInputFiles.py")
 
 #######################################################
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Ramp_jobOptions.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Ramp_jobOptions.py
index 94d2ea6624b3e5381dc5df03fda30cdafb3c25c8..02231e9776b25290d494cad3304a2834b88a4213 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Ramp_jobOptions.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Ramp_jobOptions.py
@@ -17,7 +17,6 @@ if not "SuperCells" in dir():
    
 if not SuperCells: include("LArCalibProcessing/LArCalib_Flags.py")
 if SuperCells:     include("LArCalibProcessing/LArCalib_FlagsSC.py")
-#include("RecExCommission/GetInputFiles.py")
 include("LArCalibProcessing/GetInputFiles.py")
 
 #######################################################
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Ramp_splitter_jobOptions.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Ramp_splitter_jobOptions.py
index 935026b83565f5d9fd1d31830dbe56fa05763798..943c56204b8f996cf0f0bb6b5693d37eadda5511 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Ramp_splitter_jobOptions.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_Ramp_splitter_jobOptions.py
@@ -11,7 +11,7 @@ import subprocess
 ###########################################################################
 
 include("LArCalibProcessing/LArCalib_Flags.py")
-include("RecExCommission/GetInputFiles.py")
+include("LArCalibProcessing/GetInputFiles.py")
 
 #######################################################
 #       Run properties
diff --git a/LArCalorimeter/LArExample/TestLArDetDescr/src/TestCaloDDE.cxx b/LArCalorimeter/LArExample/TestLArDetDescr/src/TestCaloDDE.cxx
index ec51253979a9c0d5613b10d9817b499fadca52f3..d6457911456c23a8f2fcd9c7556ca5960f4a06c7 100755
--- a/LArCalorimeter/LArExample/TestLArDetDescr/src/TestCaloDDE.cxx
+++ b/LArCalorimeter/LArExample/TestLArDetDescr/src/TestCaloDDE.cxx
@@ -23,7 +23,6 @@
 #include "CaloDetDescr/CaloDetDescriptor.h"
 #include "CaloDetDescr/ICaloRecoMaterialTool.h"
 #include "CaloDetDescr/ICaloRecoSimpleGeomTool.h"
-#include "CaloDetDescr/CaloDetDescriptor.h"
 #include "CaloDetDescr/CaloDetDescrElement.h"
 #include "CaloGeoHelpers/CaloPhiRange.h"
 #include <cmath>
diff --git a/LArCalorimeter/LArG4/LArG4EC/src/CryostatCalibrationMixedCalculator.cc b/LArCalorimeter/LArG4/LArG4EC/src/CryostatCalibrationMixedCalculator.cc
index 3f5a3ef46c8cd8a06c3bedc367c604d18b943935..0de45dc3597b610c80fe992adac1a02db4dd1f17 100644
--- a/LArCalorimeter/LArG4/LArG4EC/src/CryostatCalibrationMixedCalculator.cc
+++ b/LArCalorimeter/LArG4/LArG4EC/src/CryostatCalibrationMixedCalculator.cc
@@ -26,7 +26,6 @@
 #include "GaudiKernel/ISvcLocator.h"
 #include "GaudiKernel/Bootstrap.h"
 #include "StoreGate/StoreGateSvc.h"
-#include "globals.hh"
 
 #include "G4Step.hh"
 #include "G4StepPoint.hh"
diff --git a/LArCalorimeter/LArG4/LArG4EC/src/EMECSupportCalibrationCalculator.cc b/LArCalorimeter/LArG4/LArG4EC/src/EMECSupportCalibrationCalculator.cc
index aa8220f0418930bbc956343d9445c88c2ea4584d..9834c5c8594925953a881e1559801a24b55b7e08 100644
--- a/LArCalorimeter/LArG4/LArG4EC/src/EMECSupportCalibrationCalculator.cc
+++ b/LArCalorimeter/LArG4/LArG4EC/src/EMECSupportCalibrationCalculator.cc
@@ -27,7 +27,6 @@
 #include "GaudiKernel/Bootstrap.h"
 #include "StoreGate/StoreGateSvc.h"
 #include "AthenaKernel/Units.h"
-#include "globals.hh"
 
 #include "G4Step.hh"
 #include "G4StepPoint.hh"
diff --git a/LArCalorimeter/LArG4/LArG4FCAL/CMakeLists.txt b/LArCalorimeter/LArG4/LArG4FCAL/CMakeLists.txt
index 0a12952cf7a9502139c7c79c9fb42ab86767a99c..a98afbb40b3aba24851d79ac3b9e91bb57b34aef 100644
--- a/LArCalorimeter/LArG4/LArG4FCAL/CMakeLists.txt
+++ b/LArCalorimeter/LArG4/LArG4FCAL/CMakeLists.txt
@@ -1,25 +1,16 @@
-################################################################################
-# Package: LArG4FCAL
-################################################################################
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 # Declare the package name:
 atlas_subdir( LArG4FCAL )
 
 # External dependencies:
-find_package( Boost COMPONENTS filesystem thread system )
-find_package( CLHEP )
-find_package( CORAL COMPONENTS CoralBase CoralKernel RelationalAccess )
 find_package( Geant4 )
-find_package( XercesC )
 
 # Component(s) in the package:
 atlas_add_component( LArG4FCAL
                    src/*.cc
                    src/components/*.cxx
-                   PRIVATE_INCLUDE_DIRS ${Boost_INCLUDE_DIRS} ${CORAL_INCLUDE_DIRS} ${XERCESC_INCLUDE_DIRS} ${GEANT4_INCLUDE_DIRS} ${CLHEP_INCLUDE_DIRS}
-                   PRIVATE_DEFINITIONS ${CLHEP_DEFINITIONS}
-                   LINK_LIBRARIES LArG4Code LArReadoutGeometry CaloG4SimLib StoreGateLib SGtests  CxxUtils
-                   PRIVATE_LINK_LIBRARIES ${Boost_LIBRARIES} ${CORAL_LIBRARIES} ${XERCESC_LIBRARIES} ${GEANT4_LIBRARIES} ${CLHEP_LIBRARIES} GaudiKernel LArG4RunControl PathResolver GeoModelInterfaces RDBAccessSvcLib LArHV )
-
-atlas_install_python_modules( python/*.py )
+                   INCLUDE_DIRS ${GEANT4_INCLUDE_DIRS}
+                   LINK_LIBRARIES ${GEANT4_LIBRARIES} AthenaKernel CaloG4SimLib CxxUtils GaudiKernel GeoModelInterfaces LArG4Code LArHV LArReadoutGeometry PathResolver RDBAccessSvcLib StoreGateLib )
 
+atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
diff --git a/LArCalorimeter/LArG4/LArG4FCAL/python/LArG4FCALConfig.py b/LArCalorimeter/LArG4/LArG4FCAL/python/LArG4FCALConfig.py
index 110707247fd667f1943cf6191386e4ba139d5aad..243f79cf850458e0eed5d0d307d1b925631d302b 100644
--- a/LArCalorimeter/LArG4/LArG4FCAL/python/LArG4FCALConfig.py
+++ b/LArCalorimeter/LArG4/LArG4FCAL/python/LArG4FCALConfig.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 from AthenaCommon import CfgMgr
 from AthenaCommon.SystemOfUnits import mm,deg,ns
@@ -10,37 +10,37 @@ def getFCALCalculatorBase(name="FCALCalculatorBase", **kwargs):
 
 
 def getFCAL1Calculator(name="FCAL1Calculator", **kwargs):
-    kwargs.setdefault("FCALSampling",1);
+    kwargs.setdefault("FCALSampling",1)
     return getFCALCalculatorBase(name, **kwargs)
 
 
 def getFCAL2Calculator(name="FCAL2Calculator", **kwargs):
-    kwargs.setdefault("FCALSampling",2);
+    kwargs.setdefault("FCALSampling",2)
     return getFCALCalculatorBase(name, **kwargs)
 
 
 def getFCAL3Calculator(name="FCAL3Calculator", **kwargs):
-    kwargs.setdefault("FCALSampling",3);
+    kwargs.setdefault("FCALSampling",3)
     return getFCALCalculatorBase(name, **kwargs)
 
 
 def getFCAL1CalibCalculator(name="FCAL1CalibCalculator", **kwargs):
-    kwargs.setdefault("FCALdeltaX",7.5*mm);
-    kwargs.setdefault("FCALdeltaY",7.5*mm*math.sin(60*deg));
-    kwargs.setdefault("FCALSampling",1);
+    kwargs.setdefault("FCALdeltaX",7.5*mm)
+    kwargs.setdefault("FCALdeltaY",7.5*mm*math.sin(60*deg))
+    kwargs.setdefault("FCALSampling",1)
     return CfgMgr.LArG4__FCAL__LArFCALCalibCalculatorBase(name, **kwargs)
 
 
 def getFCAL2CalibCalculator(name="FCAL2CalibCalculator", **kwargs):
-    kwargs.setdefault("FCALdeltaX",8.179*mm);
-    kwargs.setdefault("FCALdeltaY",8.179*mm*math.sin(60*deg));
-    kwargs.setdefault("FCALSampling",2);
+    kwargs.setdefault("FCALdeltaX",8.179*mm)
+    kwargs.setdefault("FCALdeltaY",8.179*mm*math.sin(60*deg))
+    kwargs.setdefault("FCALSampling",2)
     return CfgMgr.LArG4__FCAL__LArFCALCalibCalculatorBase(name, **kwargs)
 
 
 def getFCAL3CalibCalculator(name="FCAL3CalibCalculator", **kwargs):
-    kwargs.setdefault("FCALdeltaX",9.0*mm);
-    kwargs.setdefault("FCALdeltaY",9.0*mm*math.sin(60*deg));
-    kwargs.setdefault("FCALSampling",3);
+    kwargs.setdefault("FCALdeltaX",9.0*mm)
+    kwargs.setdefault("FCALdeltaY",9.0*mm*math.sin(60*deg))
+    kwargs.setdefault("FCALSampling",3)
     return CfgMgr.LArG4__FCAL__LArFCALCalibCalculatorBase(name, **kwargs)
 
diff --git a/LArCalorimeter/LArG4/LArG4FastSimSvc/CMakeLists.txt b/LArCalorimeter/LArG4/LArG4FastSimSvc/CMakeLists.txt
index 0b8cc9f1688762da0463e17b7a589884cd126ce7..48055b764b9d453e769f98d56b5ff9d640e12433 100644
--- a/LArCalorimeter/LArG4/LArG4FastSimSvc/CMakeLists.txt
+++ b/LArCalorimeter/LArG4/LArG4FastSimSvc/CMakeLists.txt
@@ -1,6 +1,4 @@
-################################################################################
-# Package: LArG4FastSimSvc
-################################################################################
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 # Declare the package name:
 atlas_subdir( LArG4FastSimSvc )
@@ -8,17 +6,15 @@ atlas_subdir( LArG4FastSimSvc )
 # External dependencies:
 find_package( CLHEP )
 find_package( Geant4 )
-find_package( XercesC )
 
 # Component(s) in the package:
 atlas_add_component( LArG4FastSimSvc
                      src/*.cxx
                      src/components/*.cxx
-                     INCLUDE_DIRS ${GEANT4_INCLUDE_DIRS} ${XERCESC_INCLUDE_DIRS} ${CLHEP_INCLUDE_DIRS} 
-                     LINK_LIBRARIES ${GEANT4_LIBRARIES} ${XERCESC_LIBRARIES} ${CLHEP_LIBRARIES} AtlasHepMCLib AthenaBaseComps )
+                     INCLUDE_DIRS ${GEANT4_INCLUDE_DIRS} ${CLHEP_INCLUDE_DIRS}
+                     LINK_LIBRARIES ${GEANT4_LIBRARIES} ${CLHEP_LIBRARIES} AtlasHepMCLib AthenaBaseComps GaudiKernel )
 
 # Install files from the package:
-atlas_install_headers( LArG4FastSimSvc )
-atlas_install_python_modules( python/*.py )
+atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
 atlas_install_joboptions( share/*.py )
 
diff --git a/LArCalorimeter/LArG4/LArG4FastSimSvc/python/LArG4FastSimSvcConfig.py b/LArCalorimeter/LArG4/LArG4FastSimSvc/python/LArG4FastSimSvcConfig.py
index 4c6fc8565eef50f6680a8c38f4804d8aef0e94db..31454aac515ba927cdf97c202b6a50b886e0bc54 100644
--- a/LArCalorimeter/LArG4/LArG4FastSimSvc/python/LArG4FastSimSvcConfig.py
+++ b/LArCalorimeter/LArG4/LArG4FastSimSvc/python/LArG4FastSimSvcConfig.py
@@ -1,25 +1,25 @@
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 from LArG4FastSimSvc.LArG4FastSimSvcConf import LArG4FastSimSvc
 from AthenaCommon.SystemOfUnits import GeV
+from AthenaCommon.Logging import logging
 
 class DefaultLArG4FastSimSvc(LArG4FastSimSvc):
     __slots__ = []
+    log = logging.getLogger( 'LArG4FastSimSvc' )
 
     def __init__(self, name = "DefaultLArG4FastSimSvc"):
         super( DefaultLArG4FastSimSvc, self ).__init__( name )
 
-        from AthenaCommon.Logging import logging
-        log = logging.getLogger( 'LArG4FastSimSvc' )
-
         # get parametrisation level
         from G4AtlasApps.SimFlags import simFlags
         
         if not hasattr(simFlags, 'LArParameterization'):
-            log.warning("JobProperty LArParameterization is not defined! Can not set up fast simulation!");
+            self.log.warning("JobProperty LArParameterization is not defined! Can not set up fast simulation!")
             return
 
         if simFlags.LArParameterization() == 0:
-            log.warning("JobProperty LArParameterization is zero! No fast simulation requested!");
+            self.log.warning("JobProperty LArParameterization is zero! No fast simulation requested!")
             return
 
     def setDefaults(cls, handle):
@@ -32,7 +32,7 @@ class LArG4EMBFastSimSvc(DefaultLArG4FastSimSvc):
         super( DefaultLArG4FastSimSvc, self ).__init__( name )
 
         from AthenaCommon.Logging import logging
-        log = logging.getLogger( 'LArG4EMBFastSimSvc' )
+        self.log = logging.getLogger( 'LArG4EMBFastSimSvc' )
 
         # set defaults
         self.EFlagToShowerLib=True
@@ -64,7 +64,7 @@ class LArG4EMBFastSimSvc(DefaultLArG4FastSimSvc):
         from AthenaCommon.AppMgr import ServiceMgr
 
         if not hasattr( ServiceMgr, 'LArG4ShowerLibSvc' ):
-            log.warning("ShowerLibSvc not setup")
+            cls.log.warning("ShowerLibSvc not setup")
 
         if handle.EFlagToShowerLib:
             ServiceMgr.LArG4ShowerLibSvc.FileNameList+=["LArG4ShowerLib.EMB.11.root"]
@@ -77,13 +77,11 @@ class LArG4EMBFastSimSvc(DefaultLArG4FastSimSvc):
 
 class LArG4EMECFastSimSvc(DefaultLArG4FastSimSvc):
     __slots__ = []
+    log = logging.getLogger( 'LArG4EMECFastSimSvc' )
 
     def __init__(self, name = "LArG4EMECFastSimSvc"):
         super( DefaultLArG4FastSimSvc, self ).__init__( name )
 
-        from AthenaCommon.Logging import logging
-        log = logging.getLogger( 'LArG4EMECFastSimSvc' )
-
         # set defaults
         self.EFlagToShowerLib=True
         self.GFlagToShowerLib=True
@@ -113,7 +111,7 @@ class LArG4EMECFastSimSvc(DefaultLArG4FastSimSvc):
         from AthenaCommon.AppMgr import ServiceMgr
 
         if not hasattr( ServiceMgr, 'LArG4ShowerLibSvc' ):
-            log.warning("ShowerLibSvc not setup")
+            cls.log.warning("ShowerLibSvc not setup")
 
         if handle.EFlagToShowerLib:
             ServiceMgr.LArG4ShowerLibSvc.FileNameList+=["LArG4ShowerLib.EMEC.11.root"]
@@ -127,13 +125,11 @@ class LArG4EMECFastSimSvc(DefaultLArG4FastSimSvc):
 
 class LArG4FCALFastSimSvc(DefaultLArG4FastSimSvc):
     __slots__ = []
+    log = logging.getLogger( 'LArG4FCALFastSimSvc' )
 
     def __init__(self, name = "LArG4FCALFastSimSvc"):
         super( DefaultLArG4FastSimSvc, self ).__init__( name )
 
-        from AthenaCommon.Logging import logging
-        log = logging.getLogger( 'LArG4FCALFastSimSvc' )
-
         # set defaults
         self.EFlagToShowerLib=True
         self.GFlagToShowerLib=True
@@ -158,7 +154,7 @@ class LArG4FCALFastSimSvc(DefaultLArG4FastSimSvc):
         from AthenaCommon.AppMgr import ServiceMgr
 
         if not hasattr( ServiceMgr, 'LArG4ShowerLibSvc' ):
-            log.warning("ShowerLibSvc not setup")
+            cls.log.warning("ShowerLibSvc not setup")
 
         if handle.EFlagToShowerLib:
             ServiceMgr.LArG4ShowerLibSvc.FileNameList+=["LArG4ShowerLib.FCAL1.11.root"]
@@ -172,13 +168,11 @@ class LArG4FCALFastSimSvc(DefaultLArG4FastSimSvc):
 
 class LArG4FCAL2FastSimSvc(DefaultLArG4FastSimSvc):
     __slots__ = []
+    log = logging.getLogger( 'LArG4FCAL2FastSimSvc' )
 
     def __init__(self, name = "LArG4FCAL2FastSimSvc"):
         super( DefaultLArG4FastSimSvc, self ).__init__( name )
 
-        from AthenaCommon.Logging import logging
-        log = logging.getLogger( 'LArG4FCAL2FastSimSvc' )
-
         # set defaults
         self.EFlagToShowerLib=True
         self.GFlagToShowerLib=True
@@ -203,7 +197,7 @@ class LArG4FCAL2FastSimSvc(DefaultLArG4FastSimSvc):
         from AthenaCommon.AppMgr import ServiceMgr
 
         if not hasattr( ServiceMgr, 'LArG4ShowerLibSvc' ):
-            log.warning("ShowerLibSvc not setup")
+            cls.log.warning("ShowerLibSvc not setup")
 
         if handle.EFlagToShowerLib:
             ServiceMgr.LArG4ShowerLibSvc.FileNameList+=["LArG4ShowerLib.FCAL2.11.root"]
@@ -217,13 +211,11 @@ class LArG4FCAL2FastSimSvc(DefaultLArG4FastSimSvc):
 
 class DeadMaterialFastSimSvc(DefaultLArG4FastSimSvc):
     __slots__ = []
+    log = logging.getLogger( 'DeadMaterialFastSimSvc' )
 
     def __init__(self, name = "DeadMaterialFastSimSvc"):
         super( DefaultLArG4FastSimSvc, self ).__init__( name )
 
-        from AthenaCommon.Logging import logging
-        log = logging.getLogger( 'DeadMaterialFastSimSvc' )
-
         self.EMinEneShowerLib = 0.*GeV # electrons with this or lower energy will be killed on sight
         self.EMaxEneShowerLib = 1000.*GeV # particles with energy higher than this will be left alive
         self.GMaxEneShowerLib = 5698. # particles with Z coord this or more will be killed
diff --git a/LArCalorimeter/LArG4/LArG4FastSimSvc/python/LArG4FastSimSvcInit.py b/LArCalorimeter/LArG4/LArG4FastSimSvc/python/LArG4FastSimSvcInit.py
index fba6f92432c80249bd1ec0f7ed968ea13eca7985..37131512d4cc9643244652987ac319890562e183 100644
--- a/LArCalorimeter/LArG4/LArG4FastSimSvc/python/LArG4FastSimSvcInit.py
+++ b/LArCalorimeter/LArG4/LArG4FastSimSvc/python/LArG4FastSimSvcInit.py
@@ -1,3 +1,5 @@
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
+
 def LArG4FastSimSvcInit():
 
     from AthenaCommon.Logging import logging
@@ -7,11 +9,11 @@ def LArG4FastSimSvcInit():
     from G4AtlasApps.SimFlags import simFlags
 
     if not hasattr(simFlags, 'LArParameterization'):
-        log.warning("JobProperty LArParameterization is not defined! Can not set up fast simulation!");
+        log.warning("JobProperty LArParameterization is not defined! Can not set up fast simulation!")
         return
 
     if simFlags.LArParameterization() == 0:
-        log.warning("JobProperty LArParameterization is zero! No fast simulation requested!");
+        log.warning("JobProperty LArParameterization is zero! No fast simulation requested!")
         return
 
     # get service manager
diff --git a/LArCalorimeter/LArG4/LArG4FastSimSvc/python/__init__.py b/LArCalorimeter/LArG4/LArG4FastSimSvc/python/__init__.py
index c33da32420c22665493391b81eb3e1e73ba20bd5..2cd30841d022fba5d68a828da128a25e698f9b29 100644
--- a/LArCalorimeter/LArG4/LArG4FastSimSvc/python/__init__.py
+++ b/LArCalorimeter/LArG4/LArG4FastSimSvc/python/__init__.py
@@ -1,4 +1,4 @@
-# File: $Id: __init__.py 448398 2011-07-12 18:38:02Z gsedov $
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 __author__  = 'Zach Marshall (zmarshal@caltech.edu), Wolfgang Ehrenfeld (wolfgang.ehrenfeld@desy.de)'
 __version__ = '$Revision: 448398 $'
diff --git a/LArCalorimeter/LArG4/LArG4FastSimulation/src/EndcapFastSimDedicatedSD.cxx b/LArCalorimeter/LArG4/LArG4FastSimulation/src/EndcapFastSimDedicatedSD.cxx
index f8d45b3582280a68bebcd8127cabdb94d2a581d3..c6803c0e2bab5d6964fa92485e3ec4f86a8c3045 100644
--- a/LArCalorimeter/LArG4/LArG4FastSimulation/src/EndcapFastSimDedicatedSD.cxx
+++ b/LArCalorimeter/LArG4/LArG4FastSimulation/src/EndcapFastSimDedicatedSD.cxx
@@ -8,7 +8,6 @@
 #include "LArReadoutGeometry/EMECDetectorManager.h"
 #include "LArReadoutGeometry/EMECDetectorRegion.h"
 #include "LArG4Code/EnergySpot.h"
-#include "LArSimEvent/LArHitContainer.h"
 #include "GeoSpecialShapes/LArWheelCalculator.h"
 #include "StoreGate/StoreGateSvc.h"
 #include "CLHEP/Geometry/Point3D.h"
diff --git a/LArCalorimeter/LArG4/LArG4FastSimulation/src/FCALFastSimDedicatedSD.cxx b/LArCalorimeter/LArG4/LArG4FastSimulation/src/FCALFastSimDedicatedSD.cxx
index 8bf38caf18f002fbb7dca94b497d0a1b4cc0e374..0eabcc57a88f1495743e475048f07ccd669608b7 100644
--- a/LArCalorimeter/LArG4/LArG4FastSimulation/src/FCALFastSimDedicatedSD.cxx
+++ b/LArCalorimeter/LArG4/LArG4FastSimulation/src/FCALFastSimDedicatedSD.cxx
@@ -8,7 +8,6 @@
 #include "LArReadoutGeometry/FCALDetectorManager.h"
 #include "LArReadoutGeometry/FCALModule.h"
 #include "LArG4Code/EnergySpot.h"
-#include "LArSimEvent/LArHitContainer.h"
 #include "GeoModelKernel/GeoTubs.h"
 #include "StoreGate/StoreGateSvc.h"
 #include "CLHEP/Geometry/Transform3D.h"
diff --git a/LArCalorimeter/LArG4/LArG4H6SD/src/LArGeoH62004SteppingAction.cc b/LArCalorimeter/LArG4/LArG4H6SD/src/LArGeoH62004SteppingAction.cc
index e043bfb8cfb9de8e5bc122b3f294652c765bec3a..f71e68cd9677fca0e211b8d09638f64b4e3eef58 100644
--- a/LArCalorimeter/LArG4/LArG4H6SD/src/LArGeoH62004SteppingAction.cc
+++ b/LArCalorimeter/LArG4/LArG4H6SD/src/LArGeoH62004SteppingAction.cc
@@ -13,7 +13,6 @@
 #include "G4VPhysicalVolume.hh"
 #include "G4SDManager.hh"
 #include "G4TrackStatus.hh"
-#include "G4StepPoint.hh"
 
 #include "boost/io/ios_state.hpp"
 #include <iomanip>
diff --git a/LArCalorimeter/LArG4/LArG4Validation/src/SingleTrackValidation.cxx b/LArCalorimeter/LArG4/LArG4Validation/src/SingleTrackValidation.cxx
index f1a14f0091b6134f03d635860170032bed7b7b85..08f8be0c04d73a7f99da21ecb032be645378fb61 100755
--- a/LArCalorimeter/LArG4/LArG4Validation/src/SingleTrackValidation.cxx
+++ b/LArCalorimeter/LArG4/LArG4Validation/src/SingleTrackValidation.cxx
@@ -392,10 +392,10 @@ StatusCode SingleTrackValidation::execute() {
 
     // You have an x,y, and z position.  Now go and get the Element corresponding to
     // that hit position. There are four, one for each sampling layer:
-    double radImpact   = sqrt(x*x+y*y+z*z);
-    double phiImpact   = atan2(y,x);
-    double thetaImpact = acos(z/radImpact);
-    double etaImpact   = -log(tan(thetaImpact/2));
+    double radImpact   = std::sqrt(x*x+y*y+z*z);
+    double phiImpact   = std::atan2(y,x);
+    double thetaImpact = std::acos(z/radImpact);
+    double etaImpact   = -std::log(std::tan(thetaImpact/2));
       
     const CaloDetDescrElement *element[15]={NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL};
 
@@ -536,18 +536,18 @@ StatusCode SingleTrackValidation::execute() {
       m_c->s_t00[z]=t00[z];
       m_c->s_hits[z]=hit_count[z];
       if (z<12){
-        m_c->s_deltaPhi[z]=radImpact*sin(thetaImpact)*(ePhi[z]);
-        m_c->s_sigmaPhi[z]=radImpact*sin(thetaImpact)*sqrt(ePhiPhi[z]- ePhi[z]*ePhi[z]);
+        m_c->s_deltaPhi[z]=radImpact*std::sin(thetaImpact)*(ePhi[z]);
+        m_c->s_sigmaPhi[z]=radImpact*std::sin(thetaImpact)*std::sqrt(ePhiPhi[z]- ePhi[z]*ePhi[z]);
         m_c->s_deltaEta[z]=radImpact*dThetaDEta*(eEta[z]-etaImpact);
-        m_c->s_sigmaEta[z]=radImpact*fabs(dThetaDEta)*sqrt(eEtaEta[z]- eEta[z]*eEta[z]);
+        m_c->s_sigmaEta[z]=radImpact*std::fabs(dThetaDEta)*std::sqrt(eEtaEta[z]- eEta[z]*eEta[z]);
       } else {
         m_c->s_deltaPhi[z]=(eX[z]-x);
-        m_c->s_sigmaPhi[z]=sqrt(eXX[z]- eX[z]*eX[z]);
+        m_c->s_sigmaPhi[z]=std::sqrt(eXX[z]- eX[z]*eX[z]);
         m_c->s_deltaEta[z]=(eY[z]-y);
-        m_c->s_sigmaEta[z]=sqrt(eYY[z]-eY[z]*eY[z]);
+        m_c->s_sigmaEta[z]=std::sqrt(eYY[z]-eY[z]*eY[z]);
       }
-      m_c->s_widthX[z]=sqrt(eXX[z]-eX[z]*eX[z]);
-      m_c->s_widthY[z]=sqrt(eYY[z]-eY[z]*eY[z]);
+      m_c->s_widthX[z]=std::sqrt(eXX[z]-eX[z]*eX[z]);
+      m_c->s_widthY[z]=std::sqrt(eYY[z]-eY[z]*eY[z]);
     }
 	
     m_histos[161]->Fill(e_dep/Units::GeV);
@@ -559,15 +559,15 @@ StatusCode SingleTrackValidation::execute() {
       m_histos[126+i]->Fill( sqrt(eXX[i]-eX[i]*eX[i]) );
       m_histos[141+i]->Fill( sqrt(eYY[i]-eY[i]*eY[i]) );
       if (i<8){
-        m_histos[51+i]->Fill( radImpact*sin(thetaImpact)*ePhi[i] );
-        m_histos[66+i]->Fill( radImpact*sin(thetaImpact)*sqrt(ePhiPhi[i]-ePhi[i]*ePhi[i]) );
+        m_histos[51+i]->Fill( radImpact*std::sin(thetaImpact)*ePhi[i] );
+        m_histos[66+i]->Fill( radImpact*std::sin(thetaImpact)*std::sqrt(ePhiPhi[i]-ePhi[i]*ePhi[i]) );
         m_histos[81+i]->Fill( radImpact*dThetaDEta*(eEta[i]-etaImpact) );
-        m_histos[96+i]->Fill( radImpact*fabs(dThetaDEta)*sqrt(eEtaEta[i]-eEta[i]*eEta[i]) );
+        m_histos[96+i]->Fill( radImpact*std::fabs(dThetaDEta)*std::sqrt(eEtaEta[i]-eEta[i]*eEta[i]) );
       } else {
         m_histos[51+i]->Fill( eX[i]-x );
-        m_histos[66+i]->Fill( sqrt(eXX[i]-eX[i]*eX[i]) );
+        m_histos[66+i]->Fill( std::sqrt(eXX[i]-eX[i]*eX[i]) );
         m_histos[81+i]->Fill( eY[i]-y );
-        m_histos[96+i]->Fill( sqrt(eYY[i]-eY[i]*eY[i]) );
+        m_histos[96+i]->Fill( std::sqrt(eYY[i]-eY[i]*eY[i]) );
       }
     }
 
diff --git a/LArCalorimeter/LArGeoModel/LArGeoEndcap/src/EndcapCryostatConstruction.cxx b/LArCalorimeter/LArGeoModel/LArGeoEndcap/src/EndcapCryostatConstruction.cxx
index 4d82b7f0bf0b3c264a14153991a52f246db2b021..9a40b0eb08bf7ead9545a6d01268c16c2c29d67d 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoEndcap/src/EndcapCryostatConstruction.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoEndcap/src/EndcapCryostatConstruction.cxx
@@ -7,7 +7,6 @@
 // 04-Jan-2002 WGS: Revised to create cryostat mother volume, and to
 // place all endcap components (EMEC, HEC, FCAL) within this volume.
 
-#include "LArGeoEndcap/EndcapCryostatConstruction.h"
 #include "LArGeoEndcap/EndcapCryostatConstruction.h"
 #include "LArGeoEndcap/EndcapPresamplerConstruction.h"
 #include "EndcapDMConstruction.h"
@@ -56,7 +55,6 @@
 
 
 #include "GeoModelInterfaces/IGeoModelSvc.h"
-#include "GeoModelUtilities/GeoDBUtils.h"
 
 #include "GaudiKernel/MsgStream.h"
 #include "GaudiKernel/Bootstrap.h"
diff --git a/LArCalorimeter/LArGeoModel/LArGeoEndcap/src/EndcapPresamplerConstruction.cxx b/LArCalorimeter/LArGeoModel/LArGeoEndcap/src/EndcapPresamplerConstruction.cxx
index 20efcce207efab2f85998bf6b20b443a1249650b..feda70f550554b2957ffe0220a9a59fd7f4d34c7 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoEndcap/src/EndcapPresamplerConstruction.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoEndcap/src/EndcapPresamplerConstruction.cxx
@@ -13,7 +13,6 @@
 #include "GeoModelKernel/GeoNameTag.h"  
 #include "GeoModelKernel/GeoIdentifierTag.h"  
 #include "GeoModelKernel/GeoDefinitions.h"
-#include "StoreGate/StoreGateSvc.h"
 #include "GeoModelInterfaces/StoredMaterialManager.h"
 #include "GeoModelKernel/GeoShapeUnion.h"
 #include "GeoModelKernel/GeoShapeShift.h"
diff --git a/LArCalorimeter/LArGeoModel/LArGeoH62002Algs/src/FrontBeamConstructionH62002.cxx b/LArCalorimeter/LArGeoModel/LArGeoH62002Algs/src/FrontBeamConstructionH62002.cxx
index 8208804a06736914088f57d0d11e8dc53385f67f..34d476abfe33027ed6cb38828176abc06da0f6a5 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoH62002Algs/src/FrontBeamConstructionH62002.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoH62002Algs/src/FrontBeamConstructionH62002.cxx
@@ -22,7 +22,6 @@
 #include "GeoModelKernel/GeoSerialTransformer.h"
 #include "GeoModelKernel/GeoAlignableTransform.h"  
 #include "GeoModelKernel/GeoIdentifierTag.h"  
-#include "GeoModelKernel/GeoSerialDenominator.h"
 #include "GeoModelKernel/GeoDefinitions.h"
 #include "StoreGate/StoreGateSvc.h"
 #include "GeoModelInterfaces/StoredMaterialManager.h"
diff --git a/LArCalorimeter/LArGeoModel/LArGeoH62002Algs/src/TableConstructionH62002.cxx b/LArCalorimeter/LArGeoModel/LArGeoH62002Algs/src/TableConstructionH62002.cxx
index 36e89b222a1e02ebca7f3802862b4cc2df6602c6..a7ab6158c8ad6db622b0d4ffdf494297377e6ecd 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoH62002Algs/src/TableConstructionH62002.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoH62002Algs/src/TableConstructionH62002.cxx
@@ -22,7 +22,6 @@
 #include "GeoModelKernel/GeoSerialTransformer.h"
 #include "GeoModelKernel/GeoAlignableTransform.h"  
 #include "GeoModelKernel/GeoIdentifierTag.h"  
-#include "GeoModelKernel/GeoSerialDenominator.h"
 #include "GeoModelKernel/GeoDefinitions.h"
 #include "StoreGate/StoreGateSvc.h"
 #include "GeoModelInterfaces/StoredMaterialManager.h"
diff --git a/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/ExcluderConstructionH62004.cxx b/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/ExcluderConstructionH62004.cxx
index 6e87ce6855a39df68877c61ed3a2a13e64a0ac78..6423df2fad3d10420b9faf6e54dde83441eeea6e 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/ExcluderConstructionH62004.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/ExcluderConstructionH62004.cxx
@@ -12,7 +12,6 @@
 #include "GeoModelKernel/GeoTubs.h"
 #include "GeoModelKernel/GeoLogVol.h"
 #include "GeoModelKernel/GeoShapeShift.h"
-#include "GeoModelKernel/GeoShapeUnion.h"
 #include "GeoModelKernel/GeoShapeIntersection.h"
 #include "GeoModelKernel/GeoShapeSubtraction.h"
 #include "GeoModelKernel/GeoNameTag.h"
diff --git a/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/FCALConstructionH62004.cxx b/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/FCALConstructionH62004.cxx
index 066462edba78593681179ea83ab297875a66dd4a..ebbf687e9a4334832e66a735b8895b2f9d0a8f9e 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/FCALConstructionH62004.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/FCALConstructionH62004.cxx
@@ -63,7 +63,6 @@
 #include <stdexcept>
 
 #include "GaudiKernel/ISvcLocator.h"
-#include "GaudiKernel/Bootstrap.h" 
 
 //===================constructor
 
diff --git a/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/MiddleBeamConstructionH62004.cxx b/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/MiddleBeamConstructionH62004.cxx
index c972caa449c6ebc2956f13e8c09c485162864a3c..0014e1032f9691e0706a6dd9c2da816719c975a4 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/MiddleBeamConstructionH62004.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/MiddleBeamConstructionH62004.cxx
@@ -17,7 +17,6 @@
 #include "GeoModelKernel/GeoTube.h"  
 #include "GeoModelKernel/GeoNameTag.h"  
 #include "GeoModelKernel/GeoTransform.h"  
-#include "GeoModelKernel/GeoSerialDenominator.h"  
 #include "GeoModelKernel/GeoSerialIdentifier.h"
 #include "GeoModelKernel/GeoSerialTransformer.h"
 #include "GeoModelKernel/GeoAlignableTransform.h"  
diff --git a/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/ModulesConstructionH62004.cxx b/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/ModulesConstructionH62004.cxx
index 178fb693b08a84ed41a17ca1af55a068c0fcc9d6..dd61de61389fa6d20b5efbdfa111049566ed8ccc 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/ModulesConstructionH62004.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/ModulesConstructionH62004.cxx
@@ -20,7 +20,6 @@
 #include "GeoModelKernel/GeoCons.h"
 #include "GeoModelKernel/GeoLogVol.h"
 #include "GeoModelKernel/GeoShapeShift.h"
-#include "GeoModelKernel/GeoShapeUnion.h"
 #include "GeoModelKernel/GeoShapeIntersection.h"
 #include "GeoModelKernel/GeoShapeSubtraction.h"
 #include "GeoModelKernel/GeoNameTag.h"
diff --git a/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/MovableTableConstructionH62004.cxx b/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/MovableTableConstructionH62004.cxx
index c7c64cadeee3c1147e3e66da75284b7989787ae8..e0ebffe3bbe686881ed2ae7122e3824bf2aff636 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/MovableTableConstructionH62004.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/MovableTableConstructionH62004.cxx
@@ -18,7 +18,6 @@
 #include "GeoModelKernel/GeoTube.h"  
 #include "GeoModelKernel/GeoNameTag.h"  
 #include "GeoModelKernel/GeoTransform.h"  
-#include "GeoModelKernel/GeoSerialDenominator.h"  
 #include "GeoModelKernel/GeoSerialIdentifier.h"
 #include "GeoModelKernel/GeoSerialTransformer.h"
 #include "GeoModelKernel/GeoAlignableTransform.h"  
diff --git a/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/Other/LArGeoH62004SteppingAction.cc b/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/Other/LArGeoH62004SteppingAction.cc
index a8d13dd93ab3115de174b4d339eaff3f8be241d6..1e6114b2db197d86ff80d6f5ad1a578db8828262 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/Other/LArGeoH62004SteppingAction.cc
+++ b/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/Other/LArGeoH62004SteppingAction.cc
@@ -17,7 +17,6 @@
 #include "LArG4SD/LArG4SD.h"
 #include "LArG4Code/LArG4Identifier.h"
 #include "G4Track.hh"
-#include "G4StepPoint.hh"
 #include "G4TouchableHistory.hh"
 #include "G4TransportationManager.hh"
 #include "G4VPhysicalVolume.hh"
diff --git a/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/WarmTCConstructionH62004.cxx b/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/WarmTCConstructionH62004.cxx
index 4f583c3d1a88a6a3d7fc20781f04f35794c31edf..fde9d204d9505e8ae7c3c6e0793ad663aefa72b5 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/WarmTCConstructionH62004.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoH62004Algs/src/WarmTCConstructionH62004.cxx
@@ -13,7 +13,6 @@
 #include "GeoModelKernel/GeoCons.h"
 #include "GeoModelKernel/GeoLogVol.h"
 #include "GeoModelKernel/GeoShapeShift.h"
-#include "GeoModelKernel/GeoShapeUnion.h"
 #include "GeoModelKernel/GeoShapeIntersection.h"
 #include "GeoModelKernel/GeoShapeSubtraction.h"
 #include "GeoModelKernel/GeoNameTag.h"
diff --git a/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/BPCConstruction.cxx b/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/BPCConstruction.cxx
index 47b9c16c98908747e4e4ae97e4dbedb5a05e190e..b119bac86f564bdd31be740b025566dfcc13eca7 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/BPCConstruction.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/BPCConstruction.cxx
@@ -23,10 +23,8 @@
 #include "GeoModelKernel/GeoSerialTransformer.h"
 #include "GeoModelKernel/GeoAlignableTransform.h"  
 #include "GeoModelKernel/GeoIdentifierTag.h"  
-#include "GeoModelKernel/GeoSerialDenominator.h"
 #include "GeoModelKernel/GeoDefinitions.h"
 #include "GeoModelKernel/Units.h"
-#include "StoreGate/StoreGateSvc.h"
 #include "GeoModelInterfaces/StoredMaterialManager.h"
 #include "GeoModelKernel/GeoShapeUnion.h"
 #include "GeoModelKernel/GeoShapeShift.h"
diff --git a/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/H6CryostatConstruction.cxx b/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/H6CryostatConstruction.cxx
index 6b3b210c43e59aeafb59dcd8b1bf800f665bc2fe..caf65d8c7b7b65a5e9ea34457155b603766e59d9 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/H6CryostatConstruction.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/H6CryostatConstruction.cxx
@@ -23,7 +23,6 @@
 #include "GeoModelKernel/GeoAlignableTransform.h"  
 #include "GeoModelKernel/GeoIdentifierTag.h"  
 #include "GeoModelKernel/GeoDefinitions.h"
-#include "StoreGate/StoreGateSvc.h"
 #include "GeoModelInterfaces/StoredMaterialManager.h"
 #include "GeoModelKernel/GeoShapeUnion.h"
 #include "GeoModelKernel/GeoShapeShift.h"
diff --git a/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/MWPCConstruction.cxx b/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/MWPCConstruction.cxx
index 53f04814e7533c8eb5722b45c0bc9dfe13bc1ce7..ffe55d17d448d8417676500fc1d27cd647236d21 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/MWPCConstruction.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/MWPCConstruction.cxx
@@ -26,9 +26,7 @@
 #include "GeoModelKernel/GeoSerialTransformer.h"
 #include "GeoModelKernel/GeoAlignableTransform.h"  
 #include "GeoModelKernel/GeoIdentifierTag.h"  
-#include "GeoModelKernel/GeoSerialDenominator.h"
 #include "GeoModelKernel/GeoDefinitions.h"
-#include "StoreGate/StoreGateSvc.h"
 #include "GeoModelInterfaces/StoredMaterialManager.h"
 #include "GeoModelKernel/GeoShapeUnion.h"
 #include "GeoModelKernel/GeoShapeShift.h"
diff --git a/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/WallsConstruction.cxx b/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/WallsConstruction.cxx
index c05eff61fb7b9ad4c195a99706c8ac709f412f4a..77ccc0eb9cf457f144fe00a2c442f21c4abbb088 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/WallsConstruction.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoH6Cryostats/src/WallsConstruction.cxx
@@ -21,9 +21,7 @@
 #include "GeoModelKernel/GeoSerialTransformer.h"
 #include "GeoModelKernel/GeoAlignableTransform.h"  
 #include "GeoModelKernel/GeoIdentifierTag.h"  
-#include "GeoModelKernel/GeoSerialDenominator.h"
 #include "GeoModelKernel/GeoDefinitions.h"
-#include "StoreGate/StoreGateSvc.h"
 #include "GeoModelInterfaces/StoredMaterialManager.h"
 #include "GeoModelKernel/GeoShapeUnion.h"
 #include "GeoModelKernel/GeoShapeShift.h"
diff --git a/LArCalorimeter/LArGeoModel/LArGeoTBEC/src/CryostatConstructionTBEC.cxx b/LArCalorimeter/LArGeoModel/LArGeoTBEC/src/CryostatConstructionTBEC.cxx
index 303628b82f321f01768e6ca464b929f9ce7fe35c..1cdc7fd50a03415850a83ecb56f9aa0accb7aeb6 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoTBEC/src/CryostatConstructionTBEC.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoTBEC/src/CryostatConstructionTBEC.cxx
@@ -23,7 +23,6 @@
 #include "GeoModelKernel/GeoAlignableTransform.h"  
 #include "GeoModelKernel/GeoIdentifierTag.h"  
 #include "GeoModelKernel/GeoDefinitions.h"
-#include "StoreGate/StoreGateSvc.h"
 #include "GeoModelInterfaces/StoredMaterialManager.h"
 #include "GeoModelKernel/GeoShapeUnion.h"
 #include "GeoModelKernel/GeoShapeShift.h"
diff --git a/LArCalorimeter/LArGeoModel/LArGeoTBEC/src/LArDetectorConstructionTBEC.cxx b/LArCalorimeter/LArGeoModel/LArGeoTBEC/src/LArDetectorConstructionTBEC.cxx
index 997ec1276aa9b88faf88a2286902880136b25251..ec19e9de59765b796e79628a7885492c40893e75 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoTBEC/src/LArDetectorConstructionTBEC.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoTBEC/src/LArDetectorConstructionTBEC.cxx
@@ -22,7 +22,6 @@
 #include "GeoModelKernel/GeoAlignableTransform.h"  
 #include "GeoModelKernel/GeoIdentifierTag.h"  
 #include "GeoModelKernel/GeoDefinitions.h"
-#include "StoreGate/StoreGateSvc.h"
 #include "GeoModelInterfaces/StoredMaterialManager.h"
 #include "GeoModelUtilities/StoredPhysVol.h"
 #include "GeoModelUtilities/DecodeVersionKey.h"
diff --git a/LArCalorimeter/LArGeoModel/LArGeoTBEC/src/LArDetectorFactoryTBEC.cxx b/LArCalorimeter/LArGeoModel/LArGeoTBEC/src/LArDetectorFactoryTBEC.cxx
index 3000815bdbca4d0ff34c13dba14d974113a70c33..073ac8b943e79c26a4f6afe794bd8b001b5f4318 100755
--- a/LArCalorimeter/LArGeoModel/LArGeoTBEC/src/LArDetectorFactoryTBEC.cxx
+++ b/LArCalorimeter/LArGeoModel/LArGeoTBEC/src/LArDetectorFactoryTBEC.cxx
@@ -24,7 +24,6 @@
 #include "GaudiKernel/Bootstrap.h"
 #include "GeoModelUtilities/StoredPhysVol.h"
 
-#include "LArReadoutGeometry/EMECDetectorManager.h"
 #include "LArReadoutGeometry/EMECDetectorManager.h"
 #include "LArReadoutGeometry/EMECDetectorRegion.h"
 #include "LArReadoutGeometry/EMECDetDescr.h"
diff --git a/LArCalorimeter/LArGeoModel/LArHV/src/LArHVManager.cxx b/LArCalorimeter/LArGeoModel/LArHV/src/LArHVManager.cxx
index 94e3a9afec476074de7a3acf3d9d75c752940972..9a129001b46bb9c795cd773bd5fe0b82110a4f5b 100644
--- a/LArCalorimeter/LArGeoModel/LArHV/src/LArHVManager.cxx
+++ b/LArCalorimeter/LArGeoModel/LArHV/src/LArHVManager.cxx
@@ -8,7 +8,6 @@
 #include "LArHV/FCALHVManager.h"
 #include "LArHV/EMBPresamplerHVManager.h"
 #include "LArHV/EMECPresamplerHVManager.h"
-#include "LArHV/LArHVManager.h"
 
 LArHVManager::LArHVManager()
   : m_embHV()
diff --git a/LArCalorimeter/LArGeoModel/LArReadoutGeometry/src/EMECDetectorRegion.cxx b/LArCalorimeter/LArGeoModel/LArReadoutGeometry/src/EMECDetectorRegion.cxx
index ca45e94ccb2c1fc06ecf86355214150723ac3dfc..107494cb4b804b4a7c6b38d04619889c7565cd5f 100755
--- a/LArCalorimeter/LArGeoModel/LArReadoutGeometry/src/EMECDetectorRegion.cxx
+++ b/LArCalorimeter/LArGeoModel/LArReadoutGeometry/src/EMECDetectorRegion.cxx
@@ -5,7 +5,6 @@
 #include "GeoModelKernel/GeoVFullPhysVol.h"
 
 #include "LArReadoutGeometry/EMECDetectorRegion.h"
-#include "GeoModelKernel/GeoVFullPhysVol.h"
 #include "GeoModelKernel/GeoPcon.h"
 #include "GeoPrimitives/CLHEPtoEigenConverter.h"
 
diff --git a/LArCalorimeter/LArMonTools/share/LArCollisionTimeMonTool_jobOptions.py b/LArCalorimeter/LArMonTools/share/LArCollisionTimeMonTool_jobOptions.py
index 1f4183b57fda21fa7393371fd26d1e016ba608f9..474d45e18396732b867c00f9352d80b8d02c0dfa 100755
--- a/LArCalorimeter/LArMonTools/share/LArCollisionTimeMonTool_jobOptions.py
+++ b/LArCalorimeter/LArMonTools/share/LArCollisionTimeMonTool_jobOptions.py
@@ -37,7 +37,7 @@ LArCollisionTimeMon = LArCollisionTimeMonTool(
 
 
 LArCluCollTimeMonTool=LArCollisionTimeMonTool(Key="ClusterCollTime",
-                                              histPath="LArClusterCollTime",
+                                              histPath="LArClusterCollTimeOldTool",
                                               nCells=0,
                                               m_lumi_blocks=3000,
                                               BunchCrossingTool = theBunchCrossingTool,
diff --git a/LArCalorimeter/LArMonTools/share/LArNoiseMonTools_jobOptions.py b/LArCalorimeter/LArMonTools/share/LArNoiseMonTools_jobOptions.py
index da016a6b05fb7f835bfe52c6b97cea054eca130f..d212701429d7869d0d6554f7b7656d37096e3a17 100755
--- a/LArCalorimeter/LArMonTools/share/LArNoiseMonTools_jobOptions.py
+++ b/LArCalorimeter/LArMonTools/share/LArNoiseMonTools_jobOptions.py
@@ -6,7 +6,7 @@ ToolSvc.LArDigitNoiseMonTool.regionIDs = [0] # [0] to turn on all regions, [] to
 from LArMonTools.LArMonToolsConf import LArFebNoiseMonTool
 theLArFebNoiseMonTool = LArFebNoiseMonTool(name="LArFebNoiseMonTool",
                                            LArDigitContainerKey = LArMonFlags.LArDigitKey(),
-                                           histoPathBase = "LAr/FebNoise",
+                                           histoPathBase = "LAr/FebNoiseOldTool",
                                            febIDs  = [ 0 ],
                                            TriggerSign = +1,
                                            AlternateSum = False,
@@ -24,7 +24,7 @@ LArMon.AthenaMonTools+=[ theLArFebNoiseMonTool ]
 from LArMonTools.LArMonToolsConf import LArFebNoiseMonTool
 theLArFebNoiseMonToolAlt = LArFebNoiseMonTool(name="LArFebNoiseMonToolAlt",
                                               LArDigitContainerKey = LArMonFlags.LArDigitKey(),
-                                              histoPathBase = "LAr/FebNoiseAlt",
+                                              histoPathBase = "LAr/FebNoiseAltOldTool",
                                               febIDs  = [ 0 ],
                                               TriggerSign = +1,
                                               AlternateSum = True,
diff --git a/LArCalorimeter/LArMonTools/share/LArRawChannelMonTool_jobOptions.py b/LArCalorimeter/LArMonTools/share/LArRawChannelMonTool_jobOptions.py
index 214f9c2cd6c24df164ef477d83c01c050f6553b4..b840058010b2bc398dfc8293fa86107c34e3df49 100755
--- a/LArCalorimeter/LArMonTools/share/LArRawChannelMonTool_jobOptions.py
+++ b/LArCalorimeter/LArMonTools/share/LArRawChannelMonTool_jobOptions.py
@@ -19,7 +19,7 @@ LArRawChannelMon = LArRawChannelMonTool(
     # --- set up mon tool ---
     name                      = "LArRawChannelMon",
     LArRawChannelContainerKey = "LArRawChannels",
-    histoPathBase             = "LAr/RawChannel",
+    histoPathBase             = "LAr/RawChannelOldTool",
     OutputLevel               = 3,
     # ATLAS Ready Filter Tool
 
diff --git a/LArCalorimeter/LArMonTools/src/LArAffectedRegions.cxx b/LArCalorimeter/LArMonTools/src/LArAffectedRegions.cxx
index 09880c6308e9a41367f724cd76e0f0f5142b7664..6b89ea80ed5beb75a29d411d2915dd211bbdae24 100644
--- a/LArCalorimeter/LArMonTools/src/LArAffectedRegions.cxx
+++ b/LArCalorimeter/LArMonTools/src/LArAffectedRegions.cxx
@@ -108,7 +108,7 @@ LArAffectedRegions::bookHistograms()
   //  if(isNewRun ){ // Commented by B.Trocme to comply with new ManagedMonitorToolBase
   
     // Create top folder for histos
-  MonGroup generalGroup( this, "/LAr/AffectedRegions", run, ATTRIB_MANAGED,"", "weightedAverage");
+  MonGroup generalGroup( this, "/LAr/AffectedRegionsOldTool", run, ATTRIB_MANAGED,"", "weightedAverage");
     
     // EM Calorimeter - Barrel - PS 
     m_hLArAffectedRegionsEMBPS[0] = TH2I_LW::create("LArAffectedRegionsEMBAPS",
diff --git a/LArCalorimeter/LArMonTools/src/LArCollisionTimeMonTool.cxx b/LArCalorimeter/LArMonTools/src/LArCollisionTimeMonTool.cxx
index db4ff004b799d58df2d76b5fa5f1de80126c307c..37e576c33e6a78840aa81e8982261bc3f2236b37 100755
--- a/LArCalorimeter/LArMonTools/src/LArCollisionTimeMonTool.cxx
+++ b/LArCalorimeter/LArMonTools/src/LArCollisionTimeMonTool.cxx
@@ -58,7 +58,7 @@ LArCollisionTimeMonTool::LArCollisionTimeMonTool(const std::string& type,
   declareProperty( "timeDiffCut"	,      m_timeCut = 5.0 );
   declareProperty( "nCells"		,      m_minCells = 2 );
   declareProperty( "eWeighted"		,      m_eWeighted = true );
-  declareProperty( "histPath"           ,      m_histPath="LArCollisionTime"); 
+  declareProperty( "histPath"           ,      m_histPath="LArCollisionTimeOldTool"); 
   declareProperty( "BunchCrossingTool"  ,      m_bunchGroupTool); 
   declareProperty( "TrainFrontDistance" ,      m_distance = 30); 
   declareProperty( "IsOnline"           ,      m_IsOnline=false);
diff --git a/LArCalorimeter/LArMonTools/src/LArCosmicsMonTool.cxx b/LArCalorimeter/LArMonTools/src/LArCosmicsMonTool.cxx
index 490e4ee771cab162b78ec2b9da9387066f533d7d..3c6eee2097a8cac93668bdf26d0caa8382f9889a 100644
--- a/LArCalorimeter/LArMonTools/src/LArCosmicsMonTool.cxx
+++ b/LArCalorimeter/LArMonTools/src/LArCosmicsMonTool.cxx
@@ -130,7 +130,7 @@ LArCosmicsMonTool::bookHistograms() {
     // Create top folder for histos
     //
     
-    MonGroup generalGroupShift( this, "/LAr/Cosmics/", run, ATTRIB_MANAGED );
+    MonGroup generalGroupShift( this, "/LAr/CosmicsOldTool/", run, ATTRIB_MANAGED );
     
     // Store cells granularity and plots boundaries for each sampling in ECAL
     float etaminECAL = -3.2 ; float etamaxECAL = 3.2;
diff --git a/LArCalorimeter/LArMonTools/src/LArCoverage.cxx b/LArCalorimeter/LArMonTools/src/LArCoverage.cxx
index 698da2bed0039ebe6c146c7e0ae2d042e677a0cb..fa06b3c6a834ac55336ecd55490cc439046340cc 100644
--- a/LArCalorimeter/LArMonTools/src/LArCoverage.cxx
+++ b/LArCalorimeter/LArMonTools/src/LArCoverage.cxx
@@ -136,12 +136,12 @@ LArCoverage::bookHistograms()
     }
   
     // Create top folder for histos
-    MonGroup generalGroup( this, "/LAr/Coverage", run, ATTRIB_MANAGED );
+    MonGroup generalGroup( this, "/LAr/CoverageOldTool", run, ATTRIB_MANAGED );
     
     //
     // Book Maps vs eta/phi - One map per sampling
     //
-    MonGroup CovGroupShift( this, "/LAr/Coverage/perPartition", run, ATTRIB_MANAGED, "", "lowerLB" );
+    MonGroup CovGroupShift( this, "/LAr/CoverageOldTool/perPartition", run, ATTRIB_MANAGED, "", "lowerLB" );
 
     //
     // Coverage EM Barrel - Store Granularity
@@ -471,7 +471,7 @@ LArCoverage::bookHistograms()
     // CaloNoiseTool histograms - per FT/Slot
     //
 
-    MonGroup CaloNoiseGroupShift( this, "/LAr/Coverage/CaloNoiseTool", run, ATTRIB_MANAGED, "", "lowerLB");
+    MonGroup CaloNoiseGroupShift( this, "/LAr/CoverageOldTool/CaloNoiseTool", run, ATTRIB_MANAGED, "", "lowerLB");
 
     // EM histos
     for (int i=0; i<4;i++){
@@ -507,7 +507,7 @@ LArCoverage::bookHistograms()
     // Bad Channels DataBase content
     //
      
-    MonGroup BadChannelsGroupShift( this, "/LAr/Coverage/BadChannels/", run, ATTRIB_MANAGED, "", "lowerLB");
+    MonGroup BadChannelsGroupShift( this, "/LAr/CoverageOldTool/BadChannels/", run, ATTRIB_MANAGED, "", "lowerLB");
  
     m_hBadChannelsBarrelA = TH2I_LW::create("DBBadChannelsBarrelA",Form("Known Bad Channels - Barrel A - LB %4d",lb1),
 					    448,0.,32.,128,-0.5,127.5);
diff --git a/LArCalorimeter/LArMonTools/src/LArDigitMon.cxx b/LArCalorimeter/LArMonTools/src/LArDigitMon.cxx
index 459e3751f3b8254f62dcad45e1f5825998f911df..c7695a6c1a188b96eb39a4579b22fff57c492276 100755
--- a/LArCalorimeter/LArMonTools/src/LArDigitMon.cxx
+++ b/LArCalorimeter/LArMonTools/src/LArDigitMon.cxx
@@ -203,40 +203,40 @@ LArDigitMon::bookHistograms()
     m_eventsCounter=0;
     
     /**Book Histograms of Barrel.*/
-    MonGroup GroupBarrelShift( this, "/LAr/Digits/Barrel", run, ATTRIB_MANAGED );
-    MonGroup GroupBarrelExpert( this, "/LAr/Digits/Barrel", run, ATTRIB_MANAGED );
-    MonGroup GroupBarrelExpertEff( this, "/LAr/Digits/Barrel", run, ATTRIB_MANAGED ,"","weightedEff");
+    MonGroup GroupBarrelShift( this, "/LAr/DigitsOldTool/Barrel", run, ATTRIB_MANAGED );
+    MonGroup GroupBarrelExpert( this, "/LAr/DigitsOldTool/Barrel", run, ATTRIB_MANAGED );
+    MonGroup GroupBarrelExpertEff( this, "/LAr/DigitsOldTool/Barrel", run, ATTRIB_MANAGED ,"","weightedEff");
     
     LArDigitMon::BookPartitions(m_BarrelA,"BarrelA",GroupBarrelShift,GroupBarrelExpert,GroupBarrelExpertEff);
     LArDigitMon::BookPartitions(m_BarrelC,"BarrelC",GroupBarrelShift,GroupBarrelExpert,GroupBarrelExpertEff);
     
     /**Book Histogram of EMEC*/
-    MonGroup GroupEMECShift( this, "/LAr/Digits/EMEC", run, ATTRIB_MANAGED );
-    MonGroup GroupEMECExpert( this, "/LAr/Digits/EMEC", run, ATTRIB_MANAGED );
-    MonGroup GroupEMECExpertEff( this, "/LAr/Digits/EMEC", run, ATTRIB_MANAGED,"","weightedEff");
+    MonGroup GroupEMECShift( this, "/LAr/DigitsOldTool/EMEC", run, ATTRIB_MANAGED );
+    MonGroup GroupEMECExpert( this, "/LAr/DigitsOldTool/EMEC", run, ATTRIB_MANAGED );
+    MonGroup GroupEMECExpertEff( this, "/LAr/DigitsOldTool/EMEC", run, ATTRIB_MANAGED,"","weightedEff");
     
     LArDigitMon::BookPartitions(m_EmecA,"EmecA",GroupEMECShift,GroupEMECExpert,GroupEMECExpertEff);
     LArDigitMon::BookPartitions(m_EmecC,"EmecC",GroupEMECShift,GroupEMECExpert,GroupEMECExpertEff);
     
     /**Book Histogram of HEC*/
-    MonGroup GroupHECShift( this, "/LAr/Digits/HEC", run, ATTRIB_MANAGED );
-    MonGroup GroupHECExpert( this, "/LAr/Digits/HEC", run, ATTRIB_MANAGED );
-    MonGroup GroupHECExpertEff( this, "/LAr/Digits/HEC", run, ATTRIB_MANAGED,"","weightedEff");
+    MonGroup GroupHECShift( this, "/LAr/DigitsOldTool/HEC", run, ATTRIB_MANAGED );
+    MonGroup GroupHECExpert( this, "/LAr/DigitsOldTool/HEC", run, ATTRIB_MANAGED );
+    MonGroup GroupHECExpertEff( this, "/LAr/DigitsOldTool/HEC", run, ATTRIB_MANAGED,"","weightedEff");
     
     LArDigitMon::BookPartitions(m_HecA,"HecA",GroupHECShift,GroupHECExpert,GroupHECExpertEff);
     LArDigitMon::BookPartitions(m_HecC,"HecC",GroupHECShift,GroupHECExpert,GroupHECExpertEff);
     
     /**Book Histogram of FCAL.*/
-    MonGroup GroupFCALShift( this, "/LAr/Digits/FCAL", run, ATTRIB_MANAGED );
-    MonGroup GroupFCALExpert( this, "/LAr/Digits/FCAL", run, ATTRIB_MANAGED );
-    MonGroup GroupFCALExpertEff( this, "/LAr/Digits/FCAL", run, ATTRIB_MANAGED,"","weightedEff");
+    MonGroup GroupFCALShift( this, "/LAr/DigitsOldTool/FCAL", run, ATTRIB_MANAGED );
+    MonGroup GroupFCALExpert( this, "/LAr/DigitsOldTool/FCAL", run, ATTRIB_MANAGED );
+    MonGroup GroupFCALExpertEff( this, "/LAr/DigitsOldTool/FCAL", run, ATTRIB_MANAGED,"","weightedEff");
     
     LArDigitMon::BookPartitions(m_FcalA,"FcalA",GroupFCALShift,GroupFCALExpert,GroupFCALExpertEff);
     LArDigitMon::BookPartitions(m_FcalC,"FcalC",GroupFCALShift,GroupFCALExpert,GroupFCALExpertEff);
     
     
     /**Book summary histo*/
-    MonGroup generalGroup( this, "/LAr/Digits", run, ATTRIB_MANAGED );
+    MonGroup generalGroup( this, "/LAr/DigitsOldTool", run, ATTRIB_MANAGED );
     const char *  hName = "summary";
     const char * hTitle = "LArDigit Summary";
     
diff --git a/LArCalorimeter/LArMonTools/src/LArFEBMon.cxx b/LArCalorimeter/LArMonTools/src/LArFEBMon.cxx
index 0aa4592d192facbe6d00605fa81d775f4316a71c..43aa03f8aa6259b8c6e0f15bd19cce40b7b4f420 100755
--- a/LArCalorimeter/LArMonTools/src/LArFEBMon.cxx
+++ b/LArCalorimeter/LArMonTools/src/LArFEBMon.cxx
@@ -159,10 +159,10 @@ StatusCode LArFEBMon::bookHistograms() {
   StatusCode sc = StatusCode::SUCCESS;
   
   //  if(isNewRun){
-    MonGroup summaryGroupW( this, "/LAr/FEBMon/Summary", run, ATTRIB_MANAGED, "", "weightedEff" );
-    MonGroup summaryGroup( this, "/LAr/FEBMon/Summary", run, ATTRIB_MANAGED );
-    MonGroup perPartitionDataGroup( this, "/LAr/FEBMon/perPartitionData", run, ATTRIB_MANAGED );
-    MonGroup perPartitionDataGroupLowerLB( this, "/LAr/FEBMon/perPartitionData", run, ATTRIB_MANAGED, "", "lowerLB" );
+    MonGroup summaryGroupW( this, "/LAr/FEBMonOldTool/Summary", run, ATTRIB_MANAGED, "", "weightedEff" );
+    MonGroup summaryGroup( this, "/LAr/FEBMonOldTool/Summary", run, ATTRIB_MANAGED );
+    MonGroup perPartitionDataGroup( this, "/LAr/FEBMonOldTool/perPartitionData", run, ATTRIB_MANAGED );
+    MonGroup perPartitionDataGroupLowerLB( this, "/LAr/FEBMonOldTool/perPartitionData", run, ATTRIB_MANAGED, "", "lowerLB" );
     // General summary histos
     m_rejectedHisto = TH1F_LW::create("EventsRejected","Nb of events rejected (at least one error)",3,0.5,3.5);
     (m_rejectedHisto->GetXaxis())->SetBinLabel(1,"Whole event corrupted");
@@ -170,7 +170,7 @@ StatusCode LArFEBMon::bookHistograms() {
     (m_rejectedHisto->GetXaxis())->SetBinLabel(3,"Accepted");
     //sc &= summaryGroup.regHist(m_rejectedHisto);
     
-    sc = regHist(m_rejectedHisto,  "/LAr/FEBMon/Summary", run);
+    sc = regHist(m_rejectedHisto,  "/LAr/FEBMonOldTool/Summary", run);
     
     m_rejectedYield = TH1F_LW::create("EventsRejectedYield","Data corruption yield",3,0.5,3.5);
     (m_rejectedYield->GetXaxis())->SetBinLabel(1,"Whole event corrupted");
@@ -904,10 +904,10 @@ StatusCode LArFEBMon::bookNewPartitionSumm(summaryPartition& summ,std::string su
 {
   ATH_MSG_DEBUG( "In bookNewPartitionSumm ->" << summName );
   
-  MonGroup perPartitionGroup( this, "/LAr/FEBMon/perPartition", run, ATTRIB_MANAGED );
-  MonGroup perPartitionYieldGroup( this, "/LAr/FEBMon/perPartition", run, ATTRIB_MANAGED, "" , "weightedEff" );
-  MonGroup perPartitionDataGroup( this, "/LAr/FEBMon/perPartitionData", run, ATTRIB_MANAGED );
-  MonGroup perPartitionMiscGroup( this, "/LAr/FEBMon/perPartitionMisc", run, ATTRIB_MANAGED );
+  MonGroup perPartitionGroup( this, "/LAr/FEBMonOldTool/perPartition", run, ATTRIB_MANAGED );
+  MonGroup perPartitionYieldGroup( this, "/LAr/FEBMonOldTool/perPartition", run, ATTRIB_MANAGED, "" , "weightedEff" );
+  MonGroup perPartitionDataGroup( this, "/LAr/FEBMonOldTool/perPartitionData", run, ATTRIB_MANAGED );
+  MonGroup perPartitionMiscGroup( this, "/LAr/FEBMonOldTool/perPartitionMisc", run, ATTRIB_MANAGED );
   
   int nbOfFT = 25;
   int nbOfSlot = 15;
@@ -1188,7 +1188,7 @@ LArFEBMon::fillFebInError(const summaryPartition& summ,int errorType,int barrel_
   
   //  TH2I* tempHisto = TH2I_LW::create(*summ.parity);
   
-  std::string hName = "/LAr/FEBMon/perPartition/FebInErrors/" + summName;
+  std::string hName = "/LAr/FEBMonOldTool/perPartition/FebInErrors/" + summName;
   
   MonGroup generalGroup( this, hName.c_str(), run, ATTRIB_MANAGED);
   
diff --git a/LArCalorimeter/LArMonTools/src/LArHVCorrectionMonTool.cxx b/LArCalorimeter/LArMonTools/src/LArHVCorrectionMonTool.cxx
index 05ccf97fb31a9bd590ae6bc43a9695ff8bc0d4da..4e53e302c875eaf745059b2285bd679c7ec2a5c7 100644
--- a/LArCalorimeter/LArMonTools/src/LArHVCorrectionMonTool.cxx
+++ b/LArCalorimeter/LArMonTools/src/LArHVCorrectionMonTool.cxx
@@ -113,7 +113,7 @@ LArHVCorrectionMonTool::bookHistograms()
   //  if(isNewRun){
     
     // Create top folder for histos
-    MonGroup generalGroup( this, "/LAr/HVCorrection", run, ATTRIB_MANAGED, "", "weightedAverage" );
+    MonGroup generalGroup( this, "/LAr/HVCorrectionOldTool", run, ATTRIB_MANAGED, "", "weightedAverage" );
     
     // EM Calorimeter - Barrel
     m_hLArHVCorrectionEMB[0] = TH2F_LW::create("LArHVCorrectionEMBA",
diff --git a/LArCalorimeter/LArMonTools/src/LArNoiseCorrelationMon.cxx b/LArCalorimeter/LArMonTools/src/LArNoiseCorrelationMon.cxx
index 4a563e57e10ae32929a0c59aad4a628fa2a2f102..84f403901fa83e513ad091f6d3af541338268555 100644
--- a/LArCalorimeter/LArMonTools/src/LArNoiseCorrelationMon.cxx
+++ b/LArCalorimeter/LArMonTools/src/LArNoiseCorrelationMon.cxx
@@ -189,14 +189,14 @@ LArNoiseCorrelationMon::bookHistograms()
     m_chan_low=-0.5;
     m_chan_up=127.5;
 
-    MonGroup GroupEMBA( this, "/LAr/NoiseCorrelation/EMBA", run, ATTRIB_MANAGED );
-    MonGroup GroupEMBC( this, "/LAr/NoiseCorrelation/EMBC", run, ATTRIB_MANAGED );
-    MonGroup GroupEMECA( this, "/LAr/NoiseCorrelation/EMECA", run, ATTRIB_MANAGED );
-    MonGroup GroupEMECC( this, "/LAr/NoiseCorrelation/EMECC", run, ATTRIB_MANAGED );
-    MonGroup GroupHECA( this, "/LAr/NoiseCorrelation/HECA", run, ATTRIB_MANAGED );
-    MonGroup GroupHECC( this, "/LAr/NoiseCorrelation/HECC", run, ATTRIB_MANAGED );
-    MonGroup GroupFCALA( this, "/LAr/NoiseCorrelation/FCALA", run, ATTRIB_MANAGED );
-    MonGroup GroupFCALC( this, "/LAr/NoiseCorrelation/FCALC", run, ATTRIB_MANAGED );
+    MonGroup GroupEMBA( this, "/LAr/NoiseCorrelationOldTool/EMBA", run, ATTRIB_MANAGED );
+    MonGroup GroupEMBC( this, "/LAr/NoiseCorrelationOldTool/EMBC", run, ATTRIB_MANAGED );
+    MonGroup GroupEMECA( this, "/LAr/NoiseCorrelationOldTool/EMECA", run, ATTRIB_MANAGED );
+    MonGroup GroupEMECC( this, "/LAr/NoiseCorrelationOldTool/EMECC", run, ATTRIB_MANAGED );
+    MonGroup GroupHECA( this, "/LAr/NoiseCorrelationOldTool/HECA", run, ATTRIB_MANAGED );
+    MonGroup GroupHECC( this, "/LAr/NoiseCorrelationOldTool/HECC", run, ATTRIB_MANAGED );
+    MonGroup GroupFCALA( this, "/LAr/NoiseCorrelationOldTool/FCALA", run, ATTRIB_MANAGED );
+    MonGroup GroupFCALC( this, "/LAr/NoiseCorrelationOldTool/FCALC", run, ATTRIB_MANAGED );
 
     /**declare strings for histograms title*/
     m_hist_name = "NoiseCorr_"; 
diff --git a/LArCalorimeter/LArMonTools/src/LArNoisyROMon.cxx b/LArCalorimeter/LArMonTools/src/LArNoisyROMon.cxx
index beccdac2b127a6446f03581db2a8193bb63f3488..b0382448a9c9a02066d6d9c8c6d857b46741a099 100644
--- a/LArCalorimeter/LArMonTools/src/LArNoisyROMon.cxx
+++ b/LArCalorimeter/LArMonTools/src/LArNoisyROMon.cxx
@@ -101,19 +101,19 @@ StatusCode LArNoisyROMon::bookHistograms()
     std::string hTitle;
     std::stringstream tit;
 
-    MonGroup overall(this, "/LAr/NoisyRO", run, ATTRIB_MANAGED );
+    MonGroup overall(this, "/LAr/NoisyROOldTool", run, ATTRIB_MANAGED );
     if(m_doHisto) {
     // Book histograms per partitions    
-    MonGroup GroupBarrel(this, "/LAr/NoisyRO/Barrel", run, ATTRIB_MANAGED );
-    MonGroup GroupBarrelFrac(this, "/LAr/NoisyRO/Barrel", run, ATTRIB_MANAGED ,"", "weightedEff");
-    MonGroup GroupBarrelFracBin(this, "/LAr/NoisyRO/Barrel", run, ATTRIB_MANAGED ,"", "perBinEffPerCent");
+    MonGroup GroupBarrel(this, "/LAr/NoisyROiOldTool/Barrel", run, ATTRIB_MANAGED );
+    MonGroup GroupBarrelFrac(this, "/LAr/NoisyROOldTool/Barrel", run, ATTRIB_MANAGED ,"", "weightedEff");
+    MonGroup GroupBarrelFracBin(this, "/LAr/NoisyROOldTool/Barrel", run, ATTRIB_MANAGED ,"", "perBinEffPerCent");
     m_partHistos.resize(4);
     bookPartitionHistos(m_partHistos[1],"EMBA",GroupBarrel,GroupBarrelFrac,GroupBarrelFracBin);
     bookPartitionHistos(m_partHistos[2],"EMBC",GroupBarrel,GroupBarrelFrac,GroupBarrelFracBin);
     
-    MonGroup GroupEMEC(this, "/LAr/NoisyRO/EMEC", run, ATTRIB_MANAGED );
-    MonGroup GroupEMECFrac(this, "/LAr/NoisyRO/EMEC", run, ATTRIB_MANAGED,"", "weightedEff" );
-    MonGroup GroupEMECFracBin(this, "/LAr/NoisyRO/EMEC", run, ATTRIB_MANAGED,"", "perBinEffPerCent" );
+    MonGroup GroupEMEC(this, "/LAr/NoisyROOldTool/EMEC", run, ATTRIB_MANAGED );
+    MonGroup GroupEMECFrac(this, "/LAr/NoisyROOldTool/EMEC", run, ATTRIB_MANAGED,"", "weightedEff" );
+    MonGroup GroupEMECFracBin(this, "/LAr/NoisyROOldTool/EMEC", run, ATTRIB_MANAGED,"", "perBinEffPerCent" );
     bookPartitionHistos(m_partHistos[0],"EMECA",GroupEMEC,GroupEMECFrac,GroupEMECFracBin);
     bookPartitionHistos(m_partHistos[3],"EMECC",GroupEMEC,GroupEMECFrac,GroupEMECFracBin);
 
diff --git a/LArCalorimeter/LArMonTools/src/LArRODMonTool.cxx b/LArCalorimeter/LArMonTools/src/LArRODMonTool.cxx
index 1147d2b7e21d55cf59930f62455f9b061e3682a0..9f6d578670410ee2b3b74c603f60137e4a028834 100755
--- a/LArCalorimeter/LArMonTools/src/LArRODMonTool.cxx
+++ b/LArCalorimeter/LArMonTools/src/LArRODMonTool.cxx
@@ -268,10 +268,10 @@ LArRODMonTool::bookHistograms()
     strHelper.setDefaultNameType(LArOnlineIDStrHelper::LARONLINEID);
 
     // EMB A and C
-    MonGroup generalGroupBarrel( this, "/LAr/DSPMonitoring/EMB", run, ATTRIB_MANAGED );
-    MonGroup generalGroupEndcap( this, "/LAr/DSPMonitoring/Emec", run, ATTRIB_MANAGED );
-    MonGroup generalGroupHec( this, "/LAr/DSPMonitoring/Hec", run, ATTRIB_MANAGED );
-    MonGroup generalGroupFcal( this, "/LAr/DSPMonitoring/Fcal", run, ATTRIB_MANAGED );
+    MonGroup generalGroupBarrel( this, "/LAr/DSPMonitoringOldTool/EMB", run, ATTRIB_MANAGED );
+    MonGroup generalGroupEndcap( this, "/LAr/DSPMonitoringOldTool/Emec", run, ATTRIB_MANAGED );
+    MonGroup generalGroupHec( this, "/LAr/DSPMonitoringOldTool/Hec", run, ATTRIB_MANAGED );
+    MonGroup generalGroupFcal( this, "/LAr/DSPMonitoringOldTool/Fcal", run, ATTRIB_MANAGED );
 
     m_histos[EMBA].m_partName="EMBA";
     m_histos[EMBA].m_monGroup=&generalGroupBarrel;
@@ -399,7 +399,7 @@ LArRODMonTool::bookHistograms()
     }//end loop over partitions
 
     // Summary histogram
-    MonGroup generalGroup( this, "/LAr/DSPMonitoring/Summary", run, ATTRIB_MANAGED );   
+    MonGroup generalGroup( this, "/LAr/DSPMonitoringOldTool/Summary", run, ATTRIB_MANAGED );   
     
     hName = "Summary_E";
     hTitle = "Summary of errors on Energy per partition and per gain";
@@ -473,7 +473,7 @@ LArRODMonTool::bookHistograms()
     m_hQ_all->GetXaxis()->SetTitle("Q_{offline} - Q_{online} / #sqrt{Q_{offline}}");
     CHECK(generalGroup.regHist(m_hQ_all));
 
-    MonGroup debugGroup( this, "/LAr/DSPMonitoring/DQMD", run, ATTRIB_MANAGED ); // "shift" SHOULD BE CHANGED do "debug" ?
+    MonGroup debugGroup( this, "/LAr/DSPMonitoringOldTool/DQMD", run, ATTRIB_MANAGED ); // "shift" SHOULD BE CHANGED do "debug" ?
 
     for (unsigned p=0;p<N_PARTITIONS;++p) {
       HistGroup& hg=m_histos[p];
@@ -503,7 +503,7 @@ LArRODMonTool::bookHistograms()
     CHECK(debugGroup.regHist(m_hE_ranges_all));
 
 
-    MonGroup infosGroup( this, "/LAr/DSPMonitoring/Infos", run, ATTRIB_MANAGED );
+    MonGroup infosGroup( this, "/LAr/DSPMonitoringOldTool/Infos", run, ATTRIB_MANAGED );
     
     hName = "EErrorsPerLB";
     std::string cut = "#delta ADC>"+std::to_string(m_adc_th)+" and |t_{offline}| < "+std::to_string(int(m_peakTime_cut))+"ns";
diff --git a/LArCalorimeter/LArRawConditions/LArRawConditions/LArRawConditionsDict1.h b/LArCalorimeter/LArRawConditions/LArRawConditions/LArRawConditionsDict1.h
index 4ef479599477ccf9b918c5bbf6bc04e25223b01d..eb92c4c141ff427d7aa323fc4b0a5720c2c268b3 100644
--- a/LArCalorimeter/LArRawConditions/LArRawConditions/LArRawConditionsDict1.h
+++ b/LArCalorimeter/LArRawConditions/LArRawConditions/LArRawConditionsDict1.h
@@ -13,7 +13,6 @@
 //=== LArAutoCorrP
 #include "LArRawConditions/LArAutoCorrMC.h"
 #include "LArRawConditions/LArAutoCorrP.h"
-#include "LArRawConditions/LArAutoCorrP.h"
 #include "LArRawConditions/LArAutoCorrP1.h"
 INSTAN_CC(LArAutoCorrP1);
 INSTAN_CS(LArAutoCorrP);
diff --git a/LArCalorimeter/LArRecEvent/LArRecEvent/LArFebEnergyCollection.h b/LArCalorimeter/LArRecEvent/LArRecEvent/LArFebEnergyCollection.h
index af487cbcb5fbc83b4b2970d022ee8c9af329866e..72409bbfcf1cd3d6bb816e713574de3394460ec6 100755
--- a/LArCalorimeter/LArRecEvent/LArRecEvent/LArFebEnergyCollection.h
+++ b/LArCalorimeter/LArRecEvent/LArRecEvent/LArFebEnergyCollection.h
@@ -18,9 +18,6 @@
 #include "AthContainers/DataVector.h"
 #include "AthenaKernel/CLASS_DEF.h"
 
-#include "LArRecEvent/LArFebEnergy.h"
-
-
 class LArFebEnergyCollection : public DataVector<LArFebEnergy> {
 
  public:
diff --git a/LArCalorimeter/LArRecUtils/CMakeLists.txt b/LArCalorimeter/LArRecUtils/CMakeLists.txt
index 1645d736d9d6368aded27a11b599cee93700b803..f982a64c9a9966ad07de32481ed6e45ce7a912f3 100644
--- a/LArCalorimeter/LArRecUtils/CMakeLists.txt
+++ b/LArCalorimeter/LArRecUtils/CMakeLists.txt
@@ -8,12 +8,13 @@ find_package( Boost )
 find_package( CLHEP )
 find_package( Eigen )
 find_package( CORAL COMPONENTS CoralBase )
+find_package( ROOT )
 
 # Component(s) in the package:
 atlas_add_library( LArRecUtilsLib
                    src/*.cxx
                    PUBLIC_HEADERS LArRecUtils
-                   PRIVATE_INCLUDE_DIRS ${Boost_INCLUDE_DIRS} ${CLHEP_INCLUDE_DIRS} ${CORAL_INCLUDE_DIRS} ${EIGEN_INCLUDE_DIRS}
+                   PRIVATE_INCLUDE_DIRS ${Boost_INCLUDE_DIRS} ${CLHEP_INCLUDE_DIRS} ${CORAL_INCLUDE_DIRS} ${EIGEN_INCLUDE_DIRS} ${ROOT_INCLUDE_DIRS}
                    PRIVATE_DEFINITIONS ${CLHEP_DEFINITIONS}
                    LINK_LIBRARIES AthAllocators AthenaBaseComps AthenaKernel CaloIdentifier CaloUtilsLib LArCOOLConditions LArElecCalib LArIdentifier LArRawEvent LArRecConditions LArRecEvent StoreGateLib
                    PRIVATE_LINK_LIBRARIES ${Boost_LIBRARIES} ${CLHEP_LIBRARIES} ${CORAL_LIBRARIES} ${EIGEN_LIBRARIES} ${ROOT_LIBRARIES} AthenaPoolUtilities CaloConditions CaloDetDescrLib CaloEvent CaloGeoHelpers CaloInterfaceLib GaudiKernel Identifier LArCablingLib LArHV LArRawConditions LArRawUtilsLib LArReadoutGeometry PathResolver SGTools )
diff --git a/LArCalorimeter/LArRecUtils/src/LArFlatConditionsAlg.h b/LArCalorimeter/LArRecUtils/src/LArFlatConditionsAlg.h
index dc017b2cd7e718b64e39c1da686c95846236fac9..b710f7c1a723af7f7bb2c735e7ab4a3cc36ef68f 100644
--- a/LArCalorimeter/LArRecUtils/src/LArFlatConditionsAlg.h
+++ b/LArCalorimeter/LArRecUtils/src/LArFlatConditionsAlg.h
@@ -35,13 +35,9 @@ class LArFlatConditionsAlg: public AthAlgorithm {
 
 
 
-//#include "LArCOOLConditions/LArHVScaleCorrFlat.h"
-//typedef LArFlatConditionsAlg<LArHVScaleCorrFlat> LArFlatCondAlgHVScale;
-
 #include "LArCOOLConditions/LArPedestalFlat.h"
 typedef LArFlatConditionsAlg<LArPedestalFlat> LArCondAlgPedestalFlat;
 
-
 #include "LArCOOLConditions/LArAutoCorrSC.h"
 typedef LArFlatConditionsAlg<LArAutoCorrSC> LArCondAlgAutoCorrSC;
 
@@ -72,9 +68,6 @@ typedef LArFlatConditionsAlg<LArNoiseSC> LArCondAlgNoiseSC;
 #include "LArCOOLConditions/LArOFCFlat.h"
 typedef LArFlatConditionsAlg<LArOFCFlat> LArCondAlgOFCFlat;
 
-#include "LArCOOLConditions/LArPedestalFlat.h"
-typedef LArFlatConditionsAlg<LArPedestalFlat> LArCondAlgPedestalFlat;
-
 #include "LArCOOLConditions/LArPedestalSC.h"
 typedef LArFlatConditionsAlg<LArPedestalSC> LArCondAlgPedestalSC;
 
diff --git a/LArCalorimeter/LArRecUtils/src/LArHVCondAlg.cxx b/LArCalorimeter/LArRecUtils/src/LArHVCondAlg.cxx
index ef8deea8b39518426e947fba90f3bd26c463c4a9..c41808e350a296a4f4a248fc40a8654ce1d6477e 100755
--- a/LArCalorimeter/LArRecUtils/src/LArHVCondAlg.cxx
+++ b/LArCalorimeter/LArRecUtils/src/LArHVCondAlg.cxx
@@ -31,8 +31,6 @@
 #include "LArHV/FCALHVManager.h"
 #include "LArHV/FCALHVLine.h"
 
-#include "LArHV/LArHVManager.h"
-
 #include "LArIdentifier/LArElectrodeID.h"
 #include "LArIdentifier/LArHVLineID.h"
 #include "LArIdentifier/LArOnlineID.h"
diff --git a/LArCalorimeter/LArRecUtils/src/components/LArRecUtils_entries.cxx b/LArCalorimeter/LArRecUtils/src/components/LArRecUtils_entries.cxx
index ea0407b85a31b6af63f199f5dedc60608fccd5f5..7a4b5c10ff546234a5e14fa4a039617da9a998ae 100644
--- a/LArCalorimeter/LArRecUtils/src/components/LArRecUtils_entries.cxx
+++ b/LArCalorimeter/LArRecUtils/src/components/LArRecUtils_entries.cxx
@@ -15,7 +15,6 @@
 #include "../LArOnOffMappingAlg.h"
 #include "../LArCalibLineMappingAlg.h"
 #include "../LArFebRodMappingAlg.h"
-#include "../LArHVIdMappingAlg.h"
 #include "../LArSymConditionsAlg.h"
 #include "../LArMCSymCondAlg.h"
 #include "../LArADC2MeVCondAlg.h"
diff --git a/LArCalorimeter/LArTest/LArCalibTest/src/FixLArElecCalib.cxx b/LArCalorimeter/LArTest/LArCalibTest/src/FixLArElecCalib.cxx
index 6e11e6c8c778fb435801944379383e6c11207ea4..82ec4a159ad05964cb3ec8f5eb58b08ff0223ed1 100644
--- a/LArCalorimeter/LArTest/LArCalibTest/src/FixLArElecCalib.cxx
+++ b/LArCalorimeter/LArTest/LArCalibTest/src/FixLArElecCalib.cxx
@@ -66,7 +66,6 @@
 #include "LArIdentifier/LArOnlineID.h"
 #include "CaloIdentifier/LArEM_ID.h"
 #include "LArIdentifier/LArOnline_SuperCellID.h"
-#include "CaloIdentifier/LArEM_ID.h"
 #include "CaloIdentifier/CaloCell_SuperCell_ID.h"
 
 #include "LArRawConditions/LArDAC2uAMC.h"
diff --git a/LArCalorimeter/LArTest/LArConditionsTest/LArConditionsTest/LArConditionsTestAlg.h b/LArCalorimeter/LArTest/LArConditionsTest/LArConditionsTest/LArConditionsTestAlg.h
index 6fdec0918f490480c22b65303c2484d4d8995b63..768c35aec589a8d99f5fb8d079158e43fe36d36f 100644
--- a/LArCalorimeter/LArTest/LArConditionsTest/LArConditionsTest/LArConditionsTestAlg.h
+++ b/LArCalorimeter/LArTest/LArConditionsTest/LArConditionsTest/LArConditionsTestAlg.h
@@ -26,11 +26,7 @@
 #include "GaudiKernel/IIncidentListener.h" 
 #include "AthenaKernel/IOVSvcDefs.h"
 
-//  #include "LArCondCnv/ExampleData.h" 
-//  #include "LArCondCnv/ExampleDataRamp.h" 
-#include "LArElecCalib/ILArRamp.h" 
 #include "StoreGate/DataHandle.h"
-//#include "AthenaKernel/IOVSvcDefs.h" 
 #include "StoreGate/ReadCondHandleKey.h"
 #include "LArCabling/LArOnOffIdMapping.h"
 #include "LArRecConditions/LArCalibLineMapping.h"
diff --git a/LArCalorimeter/LArTest/LArEventTest/src/TriggerPatternCount.cxx b/LArCalorimeter/LArTest/LArEventTest/src/TriggerPatternCount.cxx
index e287ebd8cb87a3b043c061548394598100c8e248..e4539bab510ad8c765077070933186bc5ca95689 100755
--- a/LArCalorimeter/LArTest/LArEventTest/src/TriggerPatternCount.cxx
+++ b/LArCalorimeter/LArTest/LArEventTest/src/TriggerPatternCount.cxx
@@ -55,8 +55,6 @@ StatusCode TriggerPatternCount::execute()
   return StatusCode::SUCCESS;
 }
 
-#include <stdlib.h>
-
 StatusCode TriggerPatternCount::finalize()
 {
   ATH_MSG_INFO ( "TriggerPatternCount has finished." );
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcAth/AmdcAth/AmdcAthMisc.h b/MuonSpectrometer/Amdcsimrec/AmdcAth/AmdcAth/AmdcAthMisc.h
deleted file mode 100755
index 390da5735a8b333db67e50d81261a831d9f1fc65..0000000000000000000000000000000000000000
--- a/MuonSpectrometer/Amdcsimrec/AmdcAth/AmdcAth/AmdcAthMisc.h
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-//
-//  Author :  Saclay Muon Software Group SaMuSoG
-//
-// The AmdcAthMisc header contains all this Bxxxxy stuff
-//
-#ifndef AmdcAthMisc_H
-#define AmdcAthMisc_H
-
-#include <iostream>
-#include <iomanip>
-#include <fstream>
-#include <cstdio>
-#include <cmath>
-#include <algorithm>
-#include <iterator>
-#include <list>
-#include <vector>
-#include <map>
-#include <string>
-#include <utility>
-
-#endif
-
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcAth/AmdcAth/AmdcsimrecAthenaSvc.h b/MuonSpectrometer/Amdcsimrec/AmdcAth/AmdcAth/AmdcsimrecAthenaSvc.h
index a4aff8e08a5a17c0a15d11ad046145cb0ff95b45..b20b0c241d0b4d6ed4ca85cdcf97028955f3a123 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcAth/AmdcAth/AmdcsimrecAthenaSvc.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcAth/AmdcAth/AmdcsimrecAthenaSvc.h
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #ifndef AMDCATH_AMDCSIMRECATHENASVC_H
@@ -22,8 +22,8 @@ class AtlasDetectorID ;
 
 #include "AthenaKernel/IOVSvcDefs.h"
 
-/////////////////////////////////////////////////////////////////////////////
-#include "AmdcAth/AmdcAthMisc.h"
+#include <vector>
+#include <string>
 
 class AmdcAlineStore ;
 class AmdcBlineStore ;
@@ -55,10 +55,6 @@ public:
 
    virtual StatusCode queryInterface( const InterfaceID& riid, void** ppvInterface );
 
-   bool       InitializedSvc();
-   bool       UsableSvc();
-   StatusCode UpdatedSvc(IOVSVC_CALLBACK_ARGS);
-
    /**Get Amdcsimrec interface */
    Amdcsimrec* GetAmdcsimrec();
 
@@ -101,10 +97,6 @@ private:
 ///////////////////////////////////
    void getAbsMax(double& currentMax, const double toCheck) const;
 
-   bool m_IsUsable ; //!< Tell usuability state 
-
-   bool m_IsInitialized ; //!< Tell initialisation state 
-
    /**Set A and B lines collections*/
    StatusCode SetAmdcABlineFromCool();
 
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcAth/src/AmdcsimrecAthenaSvc.cxx b/MuonSpectrometer/Amdcsimrec/AmdcAth/src/AmdcsimrecAthenaSvc.cxx
index f378dd2add50ecd75b36292643cfe7157bdf8097..dc4189317622daa17fe2c2d4ce9ae8f18c243c31 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcAth/src/AmdcsimrecAthenaSvc.cxx
+++ b/MuonSpectrometer/Amdcsimrec/AmdcAth/src/AmdcsimrecAthenaSvc.cxx
@@ -36,6 +36,8 @@
 #include "AmdcStand/bigamdcdump.h"
 #include "AmdcStand/loadamdcagddfromstring.h"
 
+#include <fstream>
+
 /// Standard Constructor
 AmdcsimrecAthenaSvc::AmdcsimrecAthenaSvc(const std::string& name,ISvcLocator* svc):
   AthService(name,svc),
@@ -60,9 +62,6 @@ AmdcsimrecAthenaSvc::AmdcsimrecAthenaSvc(const std::string& name,ISvcLocator* sv
    m_AmdcString = "" ;
    m_AgddString = "" ;
 
-   m_IsInitialized = false;
-   m_IsUsable      = false;
-
    m_AmdcABlinesStamp = 1;
    
    m_AGDD2GeoSwitchesStamp = -1 ;
@@ -208,9 +207,6 @@ StatusCode AmdcsimrecAthenaSvc::initialize() {
       ATH_MSG_DEBUG( "=>Strings come from Ascii file and A/B line stores from cool<=" ) ;
     }
 
-    m_IsInitialized = true ;
-    m_IsUsable      = true ;
-
     ATH_CHECK(initializeAscii());
     ATH_MSG_DEBUG( "Done: initializeAscii " ) ;
     
@@ -225,9 +221,6 @@ StatusCode AmdcsimrecAthenaSvc::initialize() {
   if ( (m_NameOfTheSource=="POOL" || m_NameOfTheSource=="GEOMODEL" ) && m_AlignmentSource == 3 ){
     ATH_MSG_DEBUG( "=>Strings come from Geomodel and A/B line stores as well<=" ) ;
     
-    m_IsInitialized = true ;
-    m_IsUsable      = true ;
-    
     ATH_CHECK(initializeFromGeomodel());
     ATH_MSG_DEBUG( "Done: initializeFromGeomodel " ) ;
   }
@@ -235,8 +228,6 @@ StatusCode AmdcsimrecAthenaSvc::initialize() {
 //Strings come from Geomodel and A/B line stores from cool
    if ( (m_NameOfTheSource=="POOL" || m_NameOfTheSource=="GEOMODEL" ) && m_AlignmentSource == 2 ){
     ATH_MSG_DEBUG( "=>Strings come from Geomodel and A/B line stores from cool<=" ) ;
-    m_IsInitialized = true ;
-    m_IsUsable      = true ;
 
     ATH_CHECK(initializeFromGeomodel());
     ATH_MSG_DEBUG( "Done: initializeFromGeomodel " ) ;
@@ -257,9 +248,6 @@ StatusCode AmdcsimrecAthenaSvc::initialize() {
       ATH_MSG_DEBUG( "=>Strings come from Oracle and A/B line stores from cool<=" ) ;
     }
 
-    m_IsInitialized = true ;
-    m_IsUsable      = true ;
-
     ATH_CHECK(initializeFromOracleNode());
     ATH_MSG_DEBUG( "Done: initializeFromOracleNode " ) ;
     
@@ -291,14 +279,6 @@ StatusCode AmdcsimrecAthenaSvc::queryInterface( const InterfaceID& riid, void**
   return StatusCode::SUCCESS;
 }
 
-bool AmdcsimrecAthenaSvc::UsableSvc()      {return m_IsUsable     ;}
-bool AmdcsimrecAthenaSvc::InitializedSvc() {return m_IsInitialized;}
-StatusCode AmdcsimrecAthenaSvc::UpdatedSvc(IOVSVC_CALLBACK_ARGS)
-{
- ATH_MSG_DEBUG("----> UpdatedSvc is called" ) ; 
- return StatusCode::SUCCESS;
-}
- 
 Amdcsimrec* AmdcsimrecAthenaSvc::GetAmdcsimrec(){return p_Amdcsimrec;}
 
 std::string AmdcsimrecAthenaSvc::GetNameOfTheSource(){return m_NameOfTheSource;}
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcAline.h b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcAline.h
index 864a9d7b165605cd570c4d73d8c36e2c10171707..385968379307a143633192cd50b41cc046152878 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcAline.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcAline.h
@@ -1,11 +1,12 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcAline_H
-#define AmdcAline_H
+#ifndef AMDCCORE_AMDCALINE_H
+#define AMDCCORE_AMDCALINE_H
 
-#include "AmdcCore/AmdcsimrecMisc.h"
+#include <iostream>
+#include <string>
 
   /**
    @class AmdcAline
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcAlineStore.h b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcAlineStore.h
index e91deda45d7fb67ce7e6748b298e1c4eef70a205..7034e3ad3f65a429e7899a5c488666c57dff1828 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcAlineStore.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcAlineStore.h
@@ -1,13 +1,13 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcAlineStore_H
-#define AmdcAlineStore_H
-
-#include "AmdcCore/AmdcsimrecMisc.h"
+#ifndef AMDCCORE_AMDCALINESTORE_H
+#define AMDCCORE_AMDCALINESTORE_H
 
 #include "AmdcCore/AmdcAline.h"
+#include <vector>
+#include <iostream>
 
   /**
    @class AmdcAlineStore
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcBline.h b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcBline.h
index 2591affc1c3fdab44deaa14a2696f5de62555adc..6df438a603a628e79528c841e2d470b9a608463c 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcBline.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcBline.h
@@ -1,11 +1,12 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcBline_H
-#define AmdcBline_H
+#ifndef AMDCCORE_AMDCBLINE_H
+#define AMDCCORE_AMDCBLINE_H
 
-#include "AmdcCore/AmdcsimrecMisc.h"
+#include <string>
+#include <iostream>
 
   /**
    @class AmdcBline
@@ -19,7 +20,7 @@
 class AmdcBline{
 public:
    AmdcBline();
-   virtual ~AmdcBline();
+   ~AmdcBline();
 
 public:
 ///////////////////////////////////
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcBlineStore.h b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcBlineStore.h
index 062edb47bb85c3b2cd3b3c5f63f3b8269791d9c4..061fee79953a414e57ae2416a6acdd13f37e5e78 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcBlineStore.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcBlineStore.h
@@ -1,13 +1,13 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcBlineStore_H
-#define AmdcBlineStore_H
-
-#include "AmdcCore/AmdcsimrecMisc.h"
+#ifndef AMDCCORE_AMDCBLINESTORE_H
+#define AMDCCORE_AMDCBLINESTORE_H
 
 #include "AmdcCore/AmdcBline.h"
+#include <vector>
+#include <iostream>
 
   /**
    @class AmdcBlineStore
@@ -21,7 +21,7 @@
 class AmdcBlineStore{
 public:
     AmdcBlineStore();
-    virtual ~AmdcBlineStore();
+    ~AmdcBlineStore();
 
 public:
 ///////////////////////////////////
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcIline.h b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcIline.h
index 02d77d65c74d4e3aba123ab476da7aee3c3a8f2a..7f0047d941838233adc1a2a1d0bf20e22d49c1a0 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcIline.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcIline.h
@@ -1,11 +1,12 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcIline_H
-#define AmdcIline_H
+#ifndef AMDCCORE_AMDCILINE_H
+#define AMDCCORE_AMDCILINE_H
 
-#include "AmdcCore/AmdcsimrecMisc.h"
+#include <string>
+#include <iostream>
 
   /**
    @class AmdcIline
@@ -19,7 +20,7 @@
 class AmdcIline{
 public:
    AmdcIline();
-   virtual ~AmdcIline();
+   ~AmdcIline();
 
 public:
 ///////////////////////////////////
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcIlineStore.h b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcIlineStore.h
index 06114ae34bf253f5a986da07816df2bd15f14299..844faa0dca22aa8ef3a8a9cc0a52c05cdd410527 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcIlineStore.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcIlineStore.h
@@ -1,11 +1,12 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcIlineStore_H
-#define AmdcIlineStore_H
+#ifndef AMDCCORE_AMDCILINESTORE_H
+#define AMDCCORE_AMDCILINESTORE_H
 
-#include "AmdcCore/AmdcsimrecMisc.h"
+#include <vector>
+#include <iostream>
 
 #include "AmdcCore/AmdcIline.h"
 
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/Amdcsimrec.h b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/Amdcsimrec.h
index 472c201b4a5a8e4ded3f64a61d6e0d606a286cd0..10337e6c79f92c32f6ed64c929864f6ad10da21c 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/Amdcsimrec.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/Amdcsimrec.h
@@ -1,11 +1,11 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef Amdcsimrec_H
-#define Amdcsimrec_H
+#ifndef AMDCCORE_AMDCSIMREC_H
+#define AMDCCORE_AMDCSIMREC_H
 
-#include "AmdcCore/AmdcsimrecMisc.h"
+#include <string>
 
   /**
    @class Amdcsimrec
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcsimrecAccess.h b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcsimrecAccess.h
index b79688f2fa77edb0e1579ca2bd508e17f001a5e9..adb743ea9cfc3672c4f7f5c0e79171fc31c62406 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcsimrecAccess.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcsimrecAccess.h
@@ -1,11 +1,9 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcsimrecAccess_H
-#define AmdcsimrecAccess_H
-
-#include "AmdcCore/AmdcsimrecMisc.h"
+#ifndef AMDCCORE_AMDCSIMRECACCESS_H
+#define AMDCCORE_AMDCSIMRECACCESS_H
 
 #include "AmdcCore/Amdcsimrec.h"
 
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcsimrecMisc.h b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcsimrecMisc.h
deleted file mode 100755
index 6224a8c67fa600a7ff8ed43bac0bba4ff61933f9..0000000000000000000000000000000000000000
--- a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/AmdcsimrecMisc.h
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-//
-//  Author :  Saclay Muon Software Group SaMuSoG
-//
-// The AmdcsimrecMisc header contains all this Bxxxxy stuff
-//
-#ifndef AmdcsimrecMisc_H
-#define AmdcsimrecMisc_H
-
-#include <iostream>
-#include <iomanip>
-#include <fstream>
-#include <cstdio>
-#include <cmath>
-#include <algorithm>
-#include <iterator>
-#include <list>
-#include <vector>
-#include <map>
-#include <string>
-#include <utility>
-
-#endif
-
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/cppbigdump.h b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/cppbigdump.h
index e7aa52dd4a1cb7224d82b79d9ced428cb0c307bc..aebd44997c6363e7e51b71fd406be74bb5c6e6fd 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/cppbigdump.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcCore/AmdcCore/cppbigdump.h
@@ -1,12 +1,11 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef cppbigdump_H
-#define cppbigdump_H
+#ifndef AMDCCORE_CPPBIGDUMP_H
+#define AMDCCORE_CPPBIGDUMP_H
 
-#include "AmdcCore/Amdcsimrec.h"
-#include "AmdcCore/AmdcsimrecMisc.h"
+class Amdcsimrec;
 
 void cppbigdump(Amdcsimrec* pAmdcsimrec) ;
 
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcCore/src/AmdcAline.cxx b/MuonSpectrometer/Amdcsimrec/AmdcCore/src/AmdcAline.cxx
index 6698005e2c1829c7c61b142e39eeade4fcf7c0be..45ec2a2e4fec60884e2e0057dd9fdde327d6d52e 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcCore/src/AmdcAline.cxx
+++ b/MuonSpectrometer/Amdcsimrec/AmdcCore/src/AmdcAline.cxx
@@ -1,8 +1,9 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #include "AmdcCore/AmdcAline.h"
+#include <iomanip>
 
 using std::setiosflags;
 
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcCore/src/AmdcBline.cxx b/MuonSpectrometer/Amdcsimrec/AmdcCore/src/AmdcBline.cxx
index 722af20677500bd93ab719c2c3fd4060ae179f94..856419392e466d2a44e6d128d0dbdd87f44994d6 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcCore/src/AmdcBline.cxx
+++ b/MuonSpectrometer/Amdcsimrec/AmdcCore/src/AmdcBline.cxx
@@ -1,8 +1,9 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #include "AmdcCore/AmdcBline.h"
+#include <iomanip>
 
 using std::setiosflags;
 
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcCore/src/AmdcIline.cxx b/MuonSpectrometer/Amdcsimrec/AmdcCore/src/AmdcIline.cxx
index 2d5fad625bc89da705fa064ba89a6452360bb2da..4718e1ac280a6ed4aa2944cc3c67254570ecb69f 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcCore/src/AmdcIline.cxx
+++ b/MuonSpectrometer/Amdcsimrec/AmdcCore/src/AmdcIline.cxx
@@ -1,8 +1,9 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #include "AmdcCore/AmdcIline.h"
+#include <iomanip>
 
 using std::setiosflags;
 
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcCore/src/cppbigdump.cxx b/MuonSpectrometer/Amdcsimrec/AmdcCore/src/cppbigdump.cxx
index 9b6c68556097c49b8fc3ef482b5800259e233167..b5da9f97e6a2a4e21b33858bad98c8a710423600 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcCore/src/cppbigdump.cxx
+++ b/MuonSpectrometer/Amdcsimrec/AmdcCore/src/cppbigdump.cxx
@@ -1,9 +1,10 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #include "AmdcCore/Amdcsimrec.h"
-#include "AmdcCore/AmdcsimrecMisc.h"
+#include <fstream>
+#include <iomanip>
 
 using std::setiosflags;
 
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDb.h b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDb.h
index 14b6f95fd171a2c0ed4f18c74bde3940d44d18b5..e78595ec08fb2cf6602b86cc3006168ed36f764f 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDb.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDb.h
@@ -2,15 +2,11 @@
   Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcDb_H
-#define AmdcDb_H
+#ifndef AMDCDB_AMDCDB_H
+#define AMDCDB_AMDCDB_H
 
 #include "AthenaBaseComps/AthService.h"
-#include "GaudiKernel/ToolHandle.h"
 #include "GaudiKernel/ServiceHandle.h" 
-#include "AthenaKernel/IOVSvcDefs.h"
-#include "AmdcDb/AmdcDbMisc.h"
-#include "AmdcDb/IRDBAccessSvcWithUpdate.h"
 #include "RDBAccessSvc/IRDBAccessSvc.h"
 #include "RDBAccessSvc/IRDBQuery.h"
 #include <memory>
@@ -30,7 +26,7 @@ template <class TYPE> class SvcFactory;
  *
  */
 
-class AmdcDb final : public AthService, virtual public IRDBAccessSvcWithUpdate {
+class AmdcDb final : public AthService, virtual public IRDBAccessSvc {
  public:
   static const InterfaceID& interfaceID() { return IID_IRDBAccessSvc; }
 
@@ -39,12 +35,6 @@ class AmdcDb final : public AthService, virtual public IRDBAccessSvcWithUpdate {
 
   virtual StatusCode queryInterface( const InterfaceID& riid, void** ppvInterface ) override;
 
-  virtual bool       InitializedSvc() override;
-  virtual bool       UsableSvc() override;
-  virtual StatusCode UpdatedSvc(IOVSVC_CALLBACK_ARGS) override;
-
-  StatusCode AmdcsimrecAthenaSvcUpdatedSvc(IOVSVC_CALLBACK_ARGS);
-  
   virtual IRDBRecordset_ptr getRecordsetPtr(const std::string& node,
                                             const std::string& tag,
                                             const std::string& tag2node="",
@@ -74,14 +64,10 @@ class AmdcDb final : public AthService, virtual public IRDBAccessSvcWithUpdate {
   virtual ~AmdcDb();
 
  private:
-   bool m_IsUsable ; //!< Tell usuability state 
-   
-   bool m_IsInitialized ; //!< Tell initialisation state 
-   
-   StoreGateSvc* p_detStore     ; //!< Pointer On detector store
+   StoreGateSvc* m_detStore     ; //!< Pointer On detector store
   
-   AmdcDbSvc* p_AmdcDbSvcFromAmdc ; //!< Pointer on AmdcDbSvc
-   AmdcDbSvc* p_AmdcDbSvcFromRDB  ; //!< Pointer on AmdcDbSvc
+   AmdcDbSvc* m_AmdcDbSvcFromAmdc ; //!< Pointer on AmdcDbSvc
+   AmdcDbSvc* m_AmdcDbSvcFromRDB  ; //!< Pointer on AmdcDbSvc
 
    std::string m_AMDBtag; //!< name of Oracle node
 
@@ -110,15 +96,13 @@ class AmdcDb final : public AthService, virtual public IRDBAccessSvcWithUpdate {
    
    IRDBRecordset_ptr m_emptyRecordset ; //!< Pointer on an empty Recordset
 
-   ServiceHandle<AmdcsimrecAthenaSvc> p_AmdcsimrecAthenaSvc;  //!< Pointer On AmdcsimrecAthenaSvc
+   ServiceHandle<AmdcsimrecAthenaSvc> m_AmdcsimrecAthenaSvc;  //!< Pointer On AmdcsimrecAthenaSvc
 
    //Db Keys
    std::string m_detectorKey  ;
    std::string m_detectorNode ;
 
-   StatusCode regFcnAmdcsimrecAthenaSvcUpdatedSvc();
    StatusCode DoUpdatedSvc();
-   int m_AmdcsimrecAthenaSvcUpdatedSvcDONE ;
 };
 
 #endif
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDb2Sql.h b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDb2Sql.h
index 92798548e7b18e39e30b0fdd08c3a3baadbdbb2d..592dc485a46eccb60e36b402143bb470c8a8747f 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDb2Sql.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDb2Sql.h
@@ -1,11 +1,10 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcDb2Sql_H
-#define AmdcDb2Sql_H
+#ifndef AMDCDB_AMDCDB2SQL_H
+#define AMDCDB_AMDCDB2SQL_H
 
-#include "AmdcDb/AmdcDbMisc.h"
 class IRDBAccessSvc;
 
   /**
@@ -20,7 +19,7 @@ class IRDBAccessSvc;
 class AmdcDb2Sql{
 public:
     AmdcDb2Sql();
-    virtual ~AmdcDb2Sql();
+    ~AmdcDb2Sql();
 
 public:
 ///////////////////////////////////
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbMisc.h b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbMisc.h
deleted file mode 100755
index 9c426bc08c1e68ebbf508b1c05592ea84bd2c89f..0000000000000000000000000000000000000000
--- a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbMisc.h
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-//
-#ifndef AmdcOracleMisc_H
-#define AmdcOracleMisc_H
-
-#include <sstream>
-#include <iostream>
-#include <iomanip>
-#include <fstream>
-#include <cstdio>
-#include <cmath>
-#include <algorithm>
-#include <iterator>
-#include <list>
-#include <vector>
-#include <map>
-#include <string>
-#include <utility>
-#include <cctype>
-
-#endif
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbRecord.h b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbRecord.h
index a8bbc8160f5f53c6cfd9deb7b3b508b6a59e388f..8830b0b3e256af582ca35d33395e65e1e2ca3a49 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbRecord.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbRecord.h
@@ -1,14 +1,15 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcDbRecord_H
-#define AmdcDbRecord_H
+#ifndef AMDCDB_AMDCDBRECORD_H
+#define AMDCDB_AMDCDBRECORD_H
 
 #include "RDBAccessSvc/IRDBRecord.h"
 
-/////////////////////////////////////////////////////////
-#include "AmdcDb/AmdcDbMisc.h"
+#include <string>
+#include <vector>
+#include <map>
 
   /**
    @class AmdcDbRecord
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbRecordset.h b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbRecordset.h
index 355a49cfa4c4b6aa9bf8cbe3dc525bbdb6a5c081..e6273a5f344c5b53a3cd56f6bd38c3f3d0d9639b 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbRecordset.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbRecordset.h
@@ -1,12 +1,12 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcDbRecordset_H
-#define AmdcDbRecordset_H
+#ifndef AMDCDB_AMDCDBRECORDSET_H
+#define AMDCDB_AMDCDBRECORDSET_H
 
 #include "RDBAccessSvc/IRDBRecordset.h"
-#include "AmdcDb/AmdcDbMisc.h"
+#include <string>
 
   /**
    @class AmdcDbRecordset
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbSvc.h b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbSvc.h
index 99067a046ba0b59e3e7b4a80040acc05e0f4781f..a3c0a36d73d7da37cee53b2073f13adc76058fd5 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbSvc.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbSvc.h
@@ -5,8 +5,9 @@
 #ifndef AMDCDB_AMDCDBSVC_H
 #define AMDCDB_AMDCDBSVC_H
 
-#include "AmdcDb/AmdcDbMisc.h"
 #include "RDBAccessSvc/IRDBAccessSvc.h"
+#include <string>
+#include <map>
 
 class pIRDBRecordset;
 
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbSvcMakerFromAmdc.h b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbSvcMakerFromAmdc.h
index e7200df3d4a62f587c86dab283a5f57c39237ac7..c5c18325687f76a55cf2e404c790169e33634cbe 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbSvcMakerFromAmdc.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbSvcMakerFromAmdc.h
@@ -2,10 +2,12 @@
   Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcDbSvcMakerFromAmdc_H
-#define AmdcDbSvcMakerFromAmdc_H
+#ifndef AMDCDB_AMDCDBSVCMAKERFROMAMDC_H
+#define AMDCDB_AMDCDBSVCMAKERFROMAMDC_H
+
+#include <string>
+#include <map>
 
-#include "AmdcDb/AmdcDbMisc.h"
 class Amdcsimrec;
 class AmdcDbSvc;
 class AmdcDbRecordset;
@@ -23,7 +25,7 @@ class AmdcDbRecord;
 class AmdcDbSvcMakerFromAmdc{
 public:
     AmdcDbSvcMakerFromAmdc();
-    virtual ~AmdcDbSvcMakerFromAmdc()=default;
+    ~AmdcDbSvcMakerFromAmdc()=default;
 
 public:
 ///////////////////////////////////
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbSvcMakerFromRDB.h b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbSvcMakerFromRDB.h
index 5290740e77d42ef7d43e321d7b10afce058eb7a1..322fb11b52178d70f15d24003c66681bd240484e 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbSvcMakerFromRDB.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/AmdcDbSvcMakerFromRDB.h
@@ -2,10 +2,11 @@
   Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcDbSvcMakerFromRDB_H
-#define AmdcDbSvcMakerFromRDB_H
+#ifndef AMDCDB_AMDCDBSVCMAKERFROMRDB_H
+#define AMDCDB_AMDCDBSVCMAKERFROMRDB_H
 
-#include "AmdcDb/AmdcDbMisc.h"
+#include <string>
+#include <map>
 
 class IRDBAccessSvc;
 class AmdcDbSvc;
@@ -27,7 +28,7 @@ namespace AmdcDb_detail {
 class AmdcDbSvcMakerFromRDB{
 public:
     AmdcDbSvcMakerFromRDB();
-    virtual ~AmdcDbSvcMakerFromRDB()=default;
+    ~AmdcDbSvcMakerFromRDB()=default;
 
 public:
 ///////////////////////////////////
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/IRDBAccessSvcWithUpdate.h b/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/IRDBAccessSvcWithUpdate.h
deleted file mode 100755
index 9867fe5c22eb7c62a3c05359d3084333d8afd54c..0000000000000000000000000000000000000000
--- a/MuonSpectrometer/Amdcsimrec/AmdcDb/AmdcDb/IRDBAccessSvcWithUpdate.h
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef IRDBAccessSvcWithUpdate_H
-#define IRDBAccessSvcWithUpdate_H
-
-#include "RDBAccessSvc/IRDBAccessSvc.h"
-
-#include "AthenaKernel/IOVSvcDefs.h"
-
-  /**
-   @class AmdcDb
-
-   This class dreived from IRDBAccessSvc adds the control state method
-   
-  @author samusog@cern.ch
-  
-  */
-
-class IRDBAccessSvcWithUpdate : virtual public IRDBAccessSvc {
-public:
-
-  virtual bool       InitializedSvc() = 0 ;
-  virtual bool       UsableSvc() = 0 ;
-  virtual StatusCode UpdatedSvc(IOVSVC_CALLBACK_ARGS) = 0 ;
-  
-};
-
-#endif
-
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDb.cxx b/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDb.cxx
index 98baf256284f9383a2bdd3b3b60e6c3cf9eb9671..ea84217fc16b42caa9e73770efbe1eb8b3c79ac0 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDb.cxx
+++ b/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDb.cxx
@@ -22,13 +22,10 @@
 AmdcDb::AmdcDb(const std::string& name,ISvcLocator* svc)
   : AthService(name,svc) 
   , m_emptyRecordset(new AmdcDbRecordset())
-  , p_AmdcsimrecAthenaSvc ( "AmdcsimrecAthenaSvc",name )
+  , m_AmdcsimrecAthenaSvc ( "AmdcsimrecAthenaSvc",name )
 {
-  p_AmdcDbSvcFromAmdc = 0 ;
-  p_AmdcDbSvcFromRDB  = 0 ;
-
-  m_IsInitialized = false;
-  m_IsUsable      = false;
+  m_AmdcDbSvcFromAmdc = 0 ;
+  m_AmdcDbSvcFromRDB  = 0 ;
 
   m_AMDBtag = "" ;
   m_UglyCodeOn = 0 ;
@@ -40,8 +37,6 @@ AmdcDb::AmdcDb(const std::string& name,ISvcLocator* svc)
   m_FromRDBEpsLengthCM = 5 ;
   m_FromRDBEpsAngle    = 6 ;
 
-  m_AmdcsimrecAthenaSvcUpdatedSvcDONE = false ;
-
   std::string DummyString = "AAAA" ;
   m_TabFromAmdcEpsLengthMM.push_back(DummyString) ; m_TabFromAmdcEpsLengthMM.clear() ;
   m_TabFromAmdcEpsLengthCM.push_back(DummyString) ; m_TabFromAmdcEpsLengthCM.clear() ;
@@ -84,15 +79,15 @@ AmdcDb::AmdcDb(const std::string& name,ISvcLocator* svc)
   declareProperty( "ValFromRDBEpsAngle"      , m_ValFromRDBEpsAngle     ) ;
    
 
-  declareProperty("AmdcsimrecAthenaSvc", p_AmdcsimrecAthenaSvc);
+  declareProperty("AmdcsimrecAthenaSvc", m_AmdcsimrecAthenaSvc);
   
-  p_detStore = 0 ;
+  m_detStore = 0 ;
 }
  
 /// Standard Destructor
 AmdcDb::~AmdcDb()  {
-  delete p_AmdcDbSvcFromAmdc ;
-  delete p_AmdcDbSvcFromRDB  ;
+  delete m_AmdcDbSvcFromAmdc ;
+  delete m_AmdcDbSvcFromRDB  ;
 }
  
 /// Service initialisation
@@ -135,106 +130,28 @@ StatusCode AmdcDb::initialize() {
   ATH_MSG_INFO( "Keys are  (key) "  << m_detectorKey << " (node) " << m_detectorNode ) ;
 
 //Set pointer on DetectorStore 
-  sc = service("DetectorStore",p_detStore);
+  sc = service("DetectorStore",m_detStore);
   if ( sc.isFailure() ) {
     ATH_MSG_FATAL( "DetectorStore service not found !" ) ;
     return StatusCode::FAILURE;
   }
 
-//Retrieve p_AmdcsimrecAthenaSvc and set up call back
-  if ( p_AmdcsimrecAthenaSvc.retrieve().isFailure() ) {
-    ATH_MSG_FATAL( "Failed to retrieve service " << p_AmdcsimrecAthenaSvc ) ;
+//Retrieve m_AmdcsimrecAthenaSvc and set up call back
+  if ( m_AmdcsimrecAthenaSvc.retrieve().isFailure() ) {
+    ATH_MSG_FATAL( "Failed to retrieve service " << m_AmdcsimrecAthenaSvc ) ;
     return StatusCode::FAILURE;
   } 
-  ATH_MSG_INFO( "Retrieved service " << p_AmdcsimrecAthenaSvc ) ;
-
-  if (p_AmdcsimrecAthenaSvc->InitializedSvc()) {
-    ATH_MSG_INFO( "p_AmdcsimrecAthenaSvc->InitializedSvc() is true " ) ;
-    m_AmdcsimrecAthenaSvcUpdatedSvcDONE = true ; 
-
-  }else{
-    ATH_MSG_INFO( "p_AmdcsimrecAthenaSvc->InitializedSvc() is false " ) ;
-
-    sc=regFcnAmdcsimrecAthenaSvcUpdatedSvc();
-    if ( sc.isFailure() ) {
-      ATH_MSG_FATAL("regFcnAmdcsimrecAthenaSvcUpdatedSvc failed" ) ;
-      return StatusCode::FAILURE;
-    }
-    ATH_MSG_INFO( "Done: regFcnAmdcsimrecAthenaSvcUpdatedSvc " ) ;
+  ATH_MSG_INFO( "Retrieved service " << m_AmdcsimrecAthenaSvc ) ;
 
-  }
-
-//Do something now if possible
-  if ( m_AmdcsimrecAthenaSvcUpdatedSvcDONE ){
-    ATH_MSG_INFO( "m_AmdcsimrecAthenaSvcUpdatedSvcDONE found true in initialize " ) ;
-      StatusCode sc = DoUpdatedSvc() ;
-      if ( sc.isFailure() ) {
-        ATH_MSG_FATAL( "DoUpdatedSvc failed" ) ; 
-        return StatusCode::FAILURE;
-      }
-      m_IsInitialized = true;
-  }
+  ATH_CHECK(DoUpdatedSvc());
   
   ATH_MSG_INFO( "Initialisation ended     " ) ;
   return StatusCode::SUCCESS;
 
 }
- 
-StatusCode AmdcDb::regFcnAmdcsimrecAthenaSvcUpdatedSvc()
-{
-
-  StatusCode sc = p_detStore->regFcn(
-                         &AmdcsimrecAthenaSvc::UpdatedSvc,(&*p_AmdcsimrecAthenaSvc),
-                         &AmdcDb::AmdcsimrecAthenaSvcUpdatedSvc,this,true
-                         );
-  if (sc.isFailure()) {
-    ATH_MSG_FATAL( "Unable to register callback on AmdcDb::AmdcsimrecAthenaSvcUpdatedSvc from AmdcsimrecAthenaSvc::UpdatedSvc " ) ;
-    return StatusCode::FAILURE;
-  }
-  ATH_MSG_INFO( "Done: Register callback on AmdcDb::AmdcsimrecAthenaSvcUpdatedSvc from AmdcsimrecAthenaSvc::UpdatedSvc" ) ;
-  sc = p_detStore->regFcn(
-              &AmdcDb::AmdcsimrecAthenaSvcUpdatedSvc,this,
-              &IRDBAccessSvcWithUpdate::UpdatedSvc,dynamic_cast<IRDBAccessSvcWithUpdate*>(this),true
-              );
-  if (sc.isFailure()) {
-    ATH_MSG_FATAL( "Unable to register callback on AmdcDb::UpdatedSvc from AmdcDb::AmdcsimrecAthenaSvcUpdatedSvc " ) ;
-    return StatusCode::FAILURE;
-  }
-  ATH_MSG_INFO( "Done: Register callback on AmdcDb::UpdatedSvc from AmdcDb::AmdcsimrecAthenaSvcUpdatedSvc " ) ;
-
-  return StatusCode::SUCCESS;
-  
-}
-
-StatusCode AmdcDb::AmdcsimrecAthenaSvcUpdatedSvc(IOVSVC_CALLBACK_ARGS)
-{
-  ATH_MSG_INFO( "AmdcsimrecAthenaSvcUpdatedSvc called     " ) ;  
-
-  if ( !(p_AmdcsimrecAthenaSvc->UsableSvc()) ) {
-    ATH_MSG_INFO( "BUT p_AmdcsimrecAthenaSvc found NOT usable yet  " ) ;  
-    return StatusCode::SUCCESS;
-  }else{
-    m_AmdcsimrecAthenaSvcUpdatedSvcDONE = true ;
-    ATH_MSG_INFO( "AND p_AmdcsimrecAthenaSvc found usable   " ) ;  
-  }
-
-
-  StatusCode sc = DoUpdatedSvc() ;
-  if ( sc.isFailure() ) {
-    ATH_MSG_FATAL( "DoUpdatedSvc failed" ) ; 
-    return StatusCode::FAILURE;
-  }
-
-  return StatusCode::SUCCESS;
-
-}
 
 StatusCode AmdcDb::DoUpdatedSvc()
 {
-  if ( !m_AmdcsimrecAthenaSvcUpdatedSvcDONE ){
-    ATH_MSG_INFO( "DoUpdatedSvc() called BUT m_AmdcsimrecAthenaSvcUpdatedSvcDONE is false    " ) ;
-    return StatusCode::SUCCESS;
-  }
 
 //Set Db Keys
   IRDBAccessSvc* pIRDBAccessSvc;
@@ -245,11 +162,11 @@ StatusCode AmdcDb::DoUpdatedSvc()
   }
   
 //Get Amdc geometry
-  Amdcsimrec* pAmdcsimrec = p_AmdcsimrecAthenaSvc->GetAmdcsimrec();
+  Amdcsimrec* pAmdcsimrec = m_AmdcsimrecAthenaSvc->GetAmdcsimrec();
    
-//Set p_AmdcDbSvcFromAmdc
-  delete p_AmdcDbSvcFromAmdc ;
-  p_AmdcDbSvcFromAmdc = new AmdcDbSvc();
+//Set m_AmdcDbSvcFromAmdc
+  delete m_AmdcDbSvcFromAmdc ;
+  m_AmdcDbSvcFromAmdc = new AmdcDbSvc();
   AmdcDbSvcMakerFromAmdc aAmdcDbSvcMakerFromAmdc;
   aAmdcDbSvcMakerFromAmdc.SetUglyCodeOn(m_UglyCodeOn);
   aAmdcDbSvcMakerFromAmdc.SetEpsLengthMM(m_FromAmdcEpsLengthMM);
@@ -261,33 +178,23 @@ StatusCode AmdcDb::DoUpdatedSvc()
   ItemMax =  m_TabFromAmdcEpsAngle.size()    ; for (int Item=0; Item<ItemMax ; Item++) aAmdcDbSvcMakerFromAmdc.SetEpsAngle   ( m_TabFromAmdcEpsAngle[Item]    , m_ValFromAmdcEpsAngle[Item]    ) ; 
   
   
-  aAmdcDbSvcMakerFromAmdc.Set(pAmdcsimrec,p_AmdcDbSvcFromAmdc);
+  aAmdcDbSvcMakerFromAmdc.Set(pAmdcsimrec,m_AmdcDbSvcFromAmdc);
  
-//Set p_AmdcDbSvcFromRDB
-  delete p_AmdcDbSvcFromRDB  ;
-  p_AmdcDbSvcFromRDB = new AmdcDbSvc();
+//Set m_AmdcDbSvcFromRDB
+  delete m_AmdcDbSvcFromRDB  ;
+  m_AmdcDbSvcFromRDB = new AmdcDbSvc();
   AmdcDbSvcMakerFromRDB aAmdcDbSvcMakerFromRDB;
   aAmdcDbSvcMakerFromRDB.SetEpsLengthMM(m_FromRDBEpsLengthMM);
   aAmdcDbSvcMakerFromRDB.SetEpsLengthCM(m_FromRDBEpsLengthCM);
   aAmdcDbSvcMakerFromRDB.SetEpsAngle(m_FromRDBEpsAngle);
-  aAmdcDbSvcMakerFromRDB.Set(m_detectorKey,m_detectorNode,pIRDBAccessSvc,p_AmdcDbSvcFromRDB);
+  aAmdcDbSvcMakerFromRDB.Set(m_detectorKey,m_detectorNode,pIRDBAccessSvc,m_AmdcDbSvcFromRDB);
   ItemMax =  m_TabFromRDBEpsLengthMM.size() ; for (int Item=0; Item<ItemMax ; Item++) aAmdcDbSvcMakerFromRDB.SetEpsLengthMM( m_TabFromRDBEpsLengthMM[Item] , m_ValFromRDBEpsLengthMM[Item] ) ;
   ItemMax =  m_TabFromRDBEpsLengthCM.size() ; for (int Item=0; Item<ItemMax ; Item++) aAmdcDbSvcMakerFromRDB.SetEpsLengthCM( m_TabFromRDBEpsLengthCM[Item] , m_ValFromRDBEpsLengthCM[Item] ) ;
   ItemMax =  m_TabFromRDBEpsAngle.size()    ; for (int Item=0; Item<ItemMax ; Item++) aAmdcDbSvcMakerFromRDB.SetEpsAngle   ( m_TabFromRDBEpsAngle[Item]    , m_ValFromRDBEpsAngle[Item]    ) ; 
 
-  m_IsUsable      = true ;
-  
   return StatusCode::SUCCESS;
 }
 
-bool AmdcDb::UsableSvc()      {return m_IsUsable     ;}
-bool AmdcDb::InitializedSvc() {return m_IsInitialized;}
-StatusCode AmdcDb::UpdatedSvc(IOVSVC_CALLBACK_ARGS)
-{
-  ATH_MSG_INFO( "UpdatedSvc done     " ) ;
-  return StatusCode::SUCCESS;
-}
- 
 /// Service finalisation
 StatusCode AmdcDb::finalize(){return StatusCode::SUCCESS;}
  
@@ -308,10 +215,10 @@ IRDBRecordset_ptr AmdcDb::getRecordsetPtr(const std::string& node,
                                           const std::string& /*connName*/)
 {
   if(tag=="RDB") {
-    return p_AmdcDbSvcFromRDB->getRecordset(node);
+    return m_AmdcDbSvcFromRDB->getRecordset(node);
   }
   else if(tag=="Amdc") {
-    return p_AmdcDbSvcFromAmdc->getRecordset(node);
+    return m_AmdcDbSvcFromAmdc->getRecordset(node);
   }
   else {
     return m_emptyRecordset;
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDb2Sql.cxx b/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDb2Sql.cxx
index 1fcbe908bd82434f3ceeb273ed9d4e3666e39d5c..5389b2f202a8ff7c2ed83ba8c93b2938602a2524 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDb2Sql.cxx
+++ b/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDb2Sql.cxx
@@ -10,6 +10,9 @@
 #include "AmdcDb/AmdcDb2Sql.h"
 
 #include "AmdcDb/AmdcDbRecord.h"
+#include <iostream>
+#include <fstream>
+#include <iomanip>
 
 AmdcDb2Sql::AmdcDb2Sql(){ 
 
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDbRecord.cxx b/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDbRecord.cxx
index 71af92deefa761e0d3d9f55346bcc48c791a035d..0e5b658bf2531d9ab80ad8068a60a6ee4f4e6f19 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDbRecord.cxx
+++ b/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDbRecord.cxx
@@ -4,6 +4,10 @@
 
 #include "AmdcDb/AmdcDbRecord.h"
 #include <cmath>
+#include <iostream>
+#include <fstream>
+#include <iomanip>
+
 AmdcDbRecord::AmdcDbRecord(long UniversalId,std::string NameOfTheRecord){
 
   m_MapOfint["Dummy"]    =  0  ; m_MapOfint.clear();
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDbRecordset.cxx b/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDbRecordset.cxx
index c478be0c72c96cf51d251bcde0bfdf2729db4224..5339df63b7da9a93f5768cc5e8aa5b6b6674bc0e 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDbRecordset.cxx
+++ b/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDbRecordset.cxx
@@ -1,9 +1,10 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #include "AmdcDb/AmdcDbRecordset.h"
 #include "AmdcDb/AmdcDbRecord.h"
+#include <iostream>
 
 AmdcDbRecordset::AmdcDbRecordset(){}
 
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDbSvcMakerFromRDB.cxx b/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDbSvcMakerFromRDB.cxx
index da7eb91953fa29b4bbed3e61010e0c09b6b90117..a571d091bf4fe4228b7e91dacc7031101abb9df3 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDbSvcMakerFromRDB.cxx
+++ b/MuonSpectrometer/Amdcsimrec/AmdcDb/src/AmdcDbSvcMakerFromRDB.cxx
@@ -10,6 +10,8 @@
 #include "AmdcDb/AmdcDbRecordset.h"
 #include "AmdcDb/AmdcDbRecord.h"
 #include "boost/range/iterator_range.hpp"
+#include <iostream>
+#include <sstream>
 
 namespace AmdcDb_detail {
 
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcMGM/AmdcMGM/AmdcDumpGeoModel.h b/MuonSpectrometer/Amdcsimrec/AmdcMGM/AmdcMGM/AmdcDumpGeoModel.h
index 5450ca47cf6c062e2a38c0b7426854cbd686ad35..2fbcfef9d71eba2e548247c294d4473523b4f8c1 100644
--- a/MuonSpectrometer/Amdcsimrec/AmdcMGM/AmdcMGM/AmdcDumpGeoModel.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcMGM/AmdcMGM/AmdcDumpGeoModel.h
@@ -2,8 +2,8 @@
   Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcDumpGeoModel_H
-#define AmdcDumpGeoModel_H
+#ifndef AMDCMGM_AMDCDUMPGEOMODEL_H
+#define AMDCMGM_AMDCDUMPGEOMODEL_H
 
 #include "AthenaBaseComps/AthAlgorithm.h"
 #include "GaudiKernel/ServiceHandle.h"
@@ -13,9 +13,12 @@
 #include "GeoPrimitives/GeoPrimitives.h"
 
 /////////////////////////////////////////////////////////////////////////////
-#include "AmdcMGM/AmdcMGMMisc.h"
 #include "AmdcAth/AmdcsimrecAthenaSvc.h"
 
+#include <string>
+#include <vector>
+#include <fstream>
+
 namespace MuonGM {
     class MdtReadoutElement;
     class RpcReadoutElement;
@@ -40,8 +43,6 @@ public:
    StatusCode initialize();
    StatusCode execute(){return StatusCode::SUCCESS;}
 
-   StatusCode DoItCallback(IOVSVC_CALLBACK_ARGS);
-
 private:
 ///////////////////////////////////
 //Functions
@@ -210,16 +211,12 @@ private:
    double m_Csc_MaxDiffzz  ; //!< Max deviation
    double m_Csc_MaxDifftt  ; //!< Max deviation
 
-   bool m_AmdcsimrecAthenaSvcUpdatedSvcDONE     ; //!< status of p_AmdcsimrecAthenaSvc
-
    int m_KountCallsDoIt     ; //!< Kount calls to DoIt
 
    ServiceHandle<AmdcsimrecAthenaSvc> p_AmdcsimrecAthenaSvc;  //!< Pointer On AmdcsimrecAthenaSvc
    ServiceHandle<Muon::IMuonIdHelperSvc> m_idHelperSvc {this, "MuonIdHelperSvc", "Muon::MuonIdHelperSvc/MuonIdHelperSvc"};
 
-   StatusCode regFcnDoIt();
    StatusCode DoIt();
-
 };
 
 #endif
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcMGM/AmdcMGM/AmdcMGMMisc.h b/MuonSpectrometer/Amdcsimrec/AmdcMGM/AmdcMGM/AmdcMGMMisc.h
deleted file mode 100644
index eceffe86cfba58d06ef01d9c4bae8b9e29fa3580..0000000000000000000000000000000000000000
--- a/MuonSpectrometer/Amdcsimrec/AmdcMGM/AmdcMGM/AmdcMGMMisc.h
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-//
-#ifndef AmdcMGMMISC_H
-#define AmdcMGMMISC_H
-
-#include <sstream>
-#include <iostream>
-#include <iomanip>
-#include <fstream>
-#include <cstdio>
-#include <cmath>
-#include <algorithm>
-#include <iterator>
-#include <list>
-#include <vector>
-#include <map>
-#include <string>
-#include <utility>
-
-#endif
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcMGM/src/AmdcDumpGeoModel.cxx b/MuonSpectrometer/Amdcsimrec/AmdcMGM/src/AmdcDumpGeoModel.cxx
index 3a98b992d71064fa55873d5d5b7ce6155cf9263f..a5e18705b87a3c58d003362d009e312264de9d33 100644
--- a/MuonSpectrometer/Amdcsimrec/AmdcMGM/src/AmdcDumpGeoModel.cxx
+++ b/MuonSpectrometer/Amdcsimrec/AmdcMGM/src/AmdcDumpGeoModel.cxx
@@ -16,15 +16,12 @@
 #include "AmdcMGM/AmdcDumpGeoModel.h"
 
 
-AmdcDumpGeoModel::AmdcDumpGeoModel(const std::string& name, ISvcLocator* pSvcLocator) :
-  AthAlgorithm(name, pSvcLocator),
-p_AmdcsimrecAthenaSvc ( "AmdcsimrecAthenaSvc",name ) 
+AmdcDumpGeoModel::AmdcDumpGeoModel(const std::string& name, ISvcLocator* pSvcLocator) 
+  : AthAlgorithm(name, pSvcLocator)
+  , p_AmdcsimrecAthenaSvc ( "AmdcsimrecAthenaSvc",name ) 
 {
-
-  m_AmdcsimrecAthenaSvcUpdatedSvcDONE = false ; 
   m_KountCallsDoIt   = 0 ;
-
-   p_MuonDetectorManager = 0 ; 
+  p_MuonDetectorManager = 0 ; 
 
 // CheckTEC if 1 perform the comparison for TEC 
 // EpsLoTEC Min value on the position difference to output warning for TEC
@@ -64,16 +61,16 @@ p_AmdcsimrecAthenaSvc ( "AmdcsimrecAthenaSvc",name )
    declareProperty("CenterCscMean"             , m_CenterCscMean            = 0     ) ;
    declareProperty("EmergencyOut"              , m_EmergencyOut             = 1     ) ;
 
-   declareProperty("AntiStationSelection"          , m_AntiStationSelection         = 0     ) ;
-   declareProperty("AntiStationSelected"           , m_AntiStationSelected           ) ;
+   declareProperty("AntiStationSelection"      , m_AntiStationSelection     = 0     ) ;
+   declareProperty("AntiStationSelected"       , m_AntiStationSelected              ) ;
 
    declareProperty("StationSelection"          , m_StationSelection         = 0     ) ;
-   declareProperty("StationSelected"           , m_StationSelected           ) ;
+   declareProperty("StationSelected"           , m_StationSelected                  ) ;
 
    declareProperty("ChamberSelection"          , m_ChamberSelection         = 0     ) ;
-   declareProperty("StationNameSelected"       , m_StationNameSelected       ) ;
-   declareProperty("StationAbsAmdcJzzSelected" , m_StationAbsAmdcJzzSelected ) ;
-   declareProperty("StationAmdcJffSelected"    , m_StationAmdcJffSelected    ) ;
+   declareProperty("StationNameSelected"       , m_StationNameSelected              ) ;
+   declareProperty("StationAbsAmdcJzzSelected" , m_StationAbsAmdcJzzSelected        ) ;
+   declareProperty("StationAmdcJffSelected"    , m_StationAmdcJffSelected           ) ;
 
 
    declareProperty("AmdcsimrecAthenaSvc", p_AmdcsimrecAthenaSvc);
@@ -183,21 +180,7 @@ StatusCode AmdcDumpGeoModel::initialize(){
     ATH_CHECK(p_AmdcsimrecAthenaSvc.retrieve());
     ATH_MSG_INFO( "Retrieved service " << p_AmdcsimrecAthenaSvc ) ;
 
-    if (p_AmdcsimrecAthenaSvc->InitializedSvc()) {
-      ATH_MSG_INFO( "p_AmdcsimrecAthenaSvc->InitializedSvc() is true " ) ;
-      m_AmdcsimrecAthenaSvcUpdatedSvcDONE = true ; 
-    }else{
-      ATH_MSG_INFO( "p_AmdcsimrecAthenaSvc->InitializedSvc() is false " ) ;
-      ATH_CHECK(regFcnDoIt());
-      ATH_MSG_INFO( "Done: regFcnDoIt " ) ;
-    }
-
-//  Do something now if possible
-    if ( m_AmdcsimrecAthenaSvcUpdatedSvcDONE ){
-      ATH_MSG_INFO( "m_AmdcsimrecAthenaSvcUpdatedSvcDONE found true in initialize " ) ;
-      ATH_CHECK(DoIt());
-    }
-
+    ATH_CHECK(DoIt());
   }
   
   ATH_MSG_INFO( "Initialisation ended     " ) ;
@@ -207,53 +190,10 @@ StatusCode AmdcDumpGeoModel::initialize(){
 }
 
 // Do it
-StatusCode AmdcDumpGeoModel::DoItCallback(IOVSVC_CALLBACK_ARGS)
-{
-  ATH_MSG_INFO( "DoItCallback called     " ) ;  
-
-  if ( !(p_AmdcsimrecAthenaSvc->UsableSvc()) ) {
-    ATH_MSG_INFO( "BUT p_AmdcsimrecAthenaSvc found NOT usable yet  " ) ;  
-    return StatusCode::SUCCESS;
-  }else{
-    m_AmdcsimrecAthenaSvcUpdatedSvcDONE = true ;
-    ATH_MSG_INFO( "AND p_AmdcsimrecAthenaSvc found usable   " ) ;  
-  }
-
-  StatusCode sc = DoIt() ;
-  if ( sc.isFailure() ) {
-    ATH_MSG_FATAL( "DoIt failed" ) ; 
-    return StatusCode::FAILURE;
-  }
-  
-  return StatusCode::SUCCESS;
-
-}
-
-StatusCode AmdcDumpGeoModel::regFcnDoIt()
-{
-
-  StatusCode sc = detStore()->regFcn(
-                         &AmdcsimrecAthenaSvc::UpdatedSvc,(&*p_AmdcsimrecAthenaSvc),
-                         &AmdcDumpGeoModel::DoItCallback,this,true
-                        );
-  if (sc.isFailure()) {
-    ATH_MSG_FATAL( "Unable to register callback on AmdcDumpGeoModel::DoItCallback from AmdcsimrecAthenaSvc::UpdatedSvc " ) ;
-    return StatusCode::FAILURE;
-  }
-  ATH_MSG_INFO( "Done: Register callback on AmdcDumpGeoModel::DoItCallback from AmdcsimrecAthenaSvc::UpdatedSvc " ) ;
-
-  return StatusCode::SUCCESS;
-  
-}
 StatusCode AmdcDumpGeoModel::DoIt() 
 {
   ATH_MSG_INFO( "DoIt called     " ) ;  
 
-  if ( !m_AmdcsimrecAthenaSvcUpdatedSvcDONE ){
-    ATH_MSG_INFO( "DoIt() called BUT m_AmdcsimrecAthenaSvcUpdatedSvcDONE is false    " ) ;
-    return StatusCode::SUCCESS;
-  }
-
   if (m_SwitchOff == 0) {
 
 //  Count calls
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcOracle/AmdcOracle/AmdcDumpOracle.h b/MuonSpectrometer/Amdcsimrec/AmdcOracle/AmdcOracle/AmdcDumpOracle.h
index da52d8ed242aecdbd904ce4be1ade22cb8a675ba..bae89b19c50f5acc9df9edd517f5c7c1fe5c5a56 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcOracle/AmdcOracle/AmdcDumpOracle.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcOracle/AmdcOracle/AmdcDumpOracle.h
@@ -1,17 +1,14 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcDumpOracle_H
-#define AmdcDumpOracle_H
+#ifndef AMDCORACLE_AMDCDUMPORACLE_H
+#define AMDCORACLE_AMDCDUMPORACLE_H
 
 #include "AthenaBaseComps/AthAlgorithm.h"
 #include "GaudiKernel/ToolHandle.h"
 #include "GaudiKernel/ServiceHandle.h" 
-#include "AmdcDb/IRDBAccessSvcWithUpdate.h"
-
-/////////////////////////////////////////////////////////////////////////////
-#include "AmdcOracle/AmdcOracleMisc.h"
+#include "AmdcDb/AmdcDb.h"
 
 class IRDBAccessSvc;
 
@@ -35,8 +32,6 @@ public:
    StatusCode execute();
    StatusCode finalize();
 
-   StatusCode DoItCallback(IOVSVC_CALLBACK_ARGS);
-
 private:
 ///////////////////////////////////
 
@@ -70,9 +65,6 @@ private:
    void DumpXtomoData( IRDBAccessSvc* pIRDBAccessSvc , std::string TagAdd ,int& Kwarn, int& Kchck );
 
 
-   bool m_IRDBAccessSvcWithUpdateUpdatedSvcDONE     ; //!< status of p_IRDBAccessSvcWithUpdate
-   
-   
    int m_SwitchOff     ; //!< Control execution
  
    int m_WarningLevel  ; //!< Warning Level
@@ -84,9 +76,8 @@ private:
   
    int m_KountCallsDoIt     ; //!< Kount calls to DoIt
   
-   ServiceHandle<IRDBAccessSvcWithUpdate> p_IRDBAccessSvcWithUpdate;  //!< Pointer On IRDBAccessSvcWithUpdate
+   ServiceHandle<IRDBAccessSvc> m_amdcDb;  //!< Pointer to AmdcDb
 
-   StatusCode regFcnDoIt();
    StatusCode DoIt();
   
 };
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcOracle/AmdcOracle/AmdcOracleMisc.h b/MuonSpectrometer/Amdcsimrec/AmdcOracle/AmdcOracle/AmdcOracleMisc.h
deleted file mode 100755
index eb06285bc7e83c50989312ea2554b311310e2dc7..0000000000000000000000000000000000000000
--- a/MuonSpectrometer/Amdcsimrec/AmdcOracle/AmdcOracle/AmdcOracleMisc.h
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-//
-#ifndef AmdcOracleMisc_H
-#define AmdcOracleMisc_H
-
-#include <sstream>
-#include <iostream>
-#include <iomanip>
-#include <fstream>
-#include <cstdio>
-#include <cmath>
-#include <algorithm>
-#include <iterator>
-#include <list>
-#include <vector>
-#include <map>
-#include <string>
-#include <utility>
-
-#endif
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcOracle/src/AmdcDumpOracle.cxx b/MuonSpectrometer/Amdcsimrec/AmdcOracle/src/AmdcDumpOracle.cxx
index 61f445ee19bd95698dbc6752700615e6723885fc..7df7f6d4a0adb8f1f9582390dc1461c38f8530df 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcOracle/src/AmdcDumpOracle.cxx
+++ b/MuonSpectrometer/Amdcsimrec/AmdcOracle/src/AmdcDumpOracle.cxx
@@ -11,21 +11,18 @@
 #include "RDBAccessSvc/IRDBRecord.h"
 #include "RDBAccessSvc/IRDBRecordset.h"
 
-/////////////////////////////////////////////////////////
-#include "AmdcDb/IRDBAccessSvcWithUpdate.h"
-
 #include "AmdcOracle/AmdcDumpOracle.h"
 
 #include "AmdcDb/AmdcDbRecord.h"
 
 #include "AmdcDb/AmdcDb2Sql.h"
 
-AmdcDumpOracle::AmdcDumpOracle(const std::string& name, ISvcLocator* pSvcLocator) :
-  AthAlgorithm(name, pSvcLocator),
-p_IRDBAccessSvcWithUpdate ( "AmdcDb",name )
-{
+#include <fstream>
 
-  m_IRDBAccessSvcWithUpdateUpdatedSvcDONE = false ; 
+AmdcDumpOracle::AmdcDumpOracle(const std::string& name, ISvcLocator* pSvcLocator) 
+  : AthAlgorithm(name, pSvcLocator)
+  , m_amdcDb ( "AmdcDb",name )
+{
 
 //Set Default values
    m_KountCallsDoIt   = 0 ;
@@ -56,42 +53,9 @@ StatusCode AmdcDumpOracle::initialize(){
   ATH_MSG_INFO( "= SwitchOff          " << m_SwitchOff          ) ;
   ATH_MSG_INFO( "================================" ) ;
 
-
   if (m_SwitchOff == 0) {
-    StatusCode sc ;
-
-//  Retrieve p_IRDBAccessSvcWithUpdate and set up call back
-    if ( p_IRDBAccessSvcWithUpdate.retrieve().isFailure() ) {
-      ATH_MSG_FATAL( "Failed to retrieve service " << p_IRDBAccessSvcWithUpdate ) ;
-      return StatusCode::FAILURE;
-    } 
-    ATH_MSG_INFO( "Retrieved service " << p_IRDBAccessSvcWithUpdate ) ;
-
-    if (p_IRDBAccessSvcWithUpdate->InitializedSvc()) {
-      ATH_MSG_INFO( "p_IRDBAccessSvcWithUpdate->InitializedSvc() is true " ) ;
-      m_IRDBAccessSvcWithUpdateUpdatedSvcDONE = true ; 
-    }else{
-      ATH_MSG_INFO( "p_IRDBAccessSvcWithUpdate->InitializedSvc() is false " ) ;
-
-      sc=regFcnDoIt();
-      if ( sc.isFailure() ) {
-        ATH_MSG_FATAL("regFcnDoIt failed" ) ;
-        return StatusCode::FAILURE;
-      }
-      ATH_MSG_INFO( "Done: regFcnDoIt " ) ;
-     
-    }
-
-//  Do something now if possible
-    if ( m_IRDBAccessSvcWithUpdateUpdatedSvcDONE ){
-      ATH_MSG_INFO( "m_IRDBAccessSvcWithUpdateUpdatedSvcDONE found true in initialize " ) ;
-      sc = DoIt() ;
-      if ( sc.isFailure() ) {
-        ATH_MSG_FATAL( "DoIt failed" ) ; 
-        return StatusCode::FAILURE;
-      }
-    }
-  
+    ATH_CHECK(m_amdcDb.retrieve());
+    ATH_CHECK(DoIt());
   }
 
   ATH_MSG_INFO( "Initialisation ended     " ) ;
@@ -100,54 +64,10 @@ StatusCode AmdcDumpOracle::initialize(){
 
 }
 
-// Do it
-StatusCode AmdcDumpOracle::DoItCallback(IOVSVC_CALLBACK_ARGS)
-{
-  ATH_MSG_INFO( "DoItCallback called     " ) ;  
-
-  if ( !(p_IRDBAccessSvcWithUpdate->UsableSvc()) ) {
-    ATH_MSG_INFO( "BUT p_IRDBAccessSvcWithUpdate found NOT usable yet  " ) ;  
-    return StatusCode::SUCCESS;
-  }else{
-    m_IRDBAccessSvcWithUpdateUpdatedSvcDONE = true ;
-    ATH_MSG_INFO( "AND p_IRDBAccessSvcWithUpdate found usable   " ) ;  
-  }
-
-  StatusCode sc = DoIt() ;
-  if ( sc.isFailure() ) {
-    ATH_MSG_FATAL( "DoIt failed" ) ; 
-    return StatusCode::FAILURE;
-  }
-  
-  return StatusCode::SUCCESS;
-
-}
-
-StatusCode AmdcDumpOracle::regFcnDoIt()
-{
-
-  StatusCode sc = detStore()->regFcn(
-                         &IRDBAccessSvcWithUpdate::UpdatedSvc,dynamic_cast<IRDBAccessSvcWithUpdate*>(&*p_IRDBAccessSvcWithUpdate),
-                         &AmdcDumpOracle::DoItCallback,this,true
-                        );
-  if (sc.isFailure()) {
-    ATH_MSG_FATAL( "Unable to register callback on AmdcDumpOracle::DoItCallback from IRDBAccessSvcWithUpdate::UpdatedSvc " ) ;
-    return StatusCode::FAILURE;
-  }
-  ATH_MSG_INFO( "Done: Register callback on AmdcDumpOracle::DoItCallback from IRDBAccessSvcWithUpdate::UpdatedSvc " ) ;
-
-  return StatusCode::SUCCESS;
-  
-}
 StatusCode AmdcDumpOracle::DoIt()
 {
   ATH_MSG_INFO( "DoIt called     " ) ;  
 
-  if ( !m_IRDBAccessSvcWithUpdateUpdatedSvcDONE ){
-    ATH_MSG_INFO( "DoIt() called BUT m_IRDBAccessSvcWithUpdateUpdatedSvcDONE is false    " ) ;
-    return StatusCode::SUCCESS;
-  }
-
   if (m_SwitchOff == 0) {
   
 //  Count calls
@@ -166,10 +86,10 @@ StatusCode AmdcDumpOracle::DoIt()
     aAmdcDb2Sql.SetUseKeysOn(m_UseKeysOn);
     
     StringBidon = "Out.AmdcOracle.DB" + KountCallsDoItASstring ;
-    aAmdcDb2Sql.DoIt(StringBidon,"RDB",(&*p_IRDBAccessSvcWithUpdate));
+    aAmdcDb2Sql.DoIt(StringBidon,"RDB",m_amdcDb.get());
     
     StringBidon = "Out.AmdcOracle.AM" + KountCallsDoItASstring ;
-    aAmdcDb2Sql.DoIt(StringBidon,"Amdc",(&*p_IRDBAccessSvcWithUpdate));
+    aAmdcDb2Sql.DoIt(StringBidon,"Amdc",m_amdcDb.get());
 
 //  Check contents
     StringBidon = "Out.AmdcOracle" + KountCallsDoItASstring ;
@@ -185,7 +105,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check ASZT
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpASZT((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpASZT(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -195,7 +115,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check ISZT
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpISZT((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpISZT(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -205,7 +125,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check APTP
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpAPTP((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpAPTP(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -215,7 +135,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check ALMN
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpALMN((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpALMN(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -225,7 +145,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check ALIN
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpALIN((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpALIN(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -235,7 +155,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check WMDT
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpWMDT((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpWMDT(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -245,7 +165,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check WSPA
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpWSPA((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpWSPA(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -255,7 +175,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check WSUP
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpWSUP((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpWSUP(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -265,7 +185,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check WCHV
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpWCHV((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpWCHV(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -275,7 +195,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check WCMI
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpWCMI((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpWCMI(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -285,7 +205,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check WCRO
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpWCRO((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpWCRO(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -295,7 +215,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check WLBI
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpWLBI((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpWLBI(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -305,7 +225,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check WDED
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpWDED((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpWDED(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -315,7 +235,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check ASMP
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpASMP((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpASMP(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -325,7 +245,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check DBAM
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpDBAM((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpDBAM(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -335,7 +255,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check WCSC
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpWCSC((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpWCSC(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -345,7 +265,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check ATLN
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpATLN((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpATLN(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -355,7 +275,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check GGLN
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpGGLN((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpGGLN(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -365,7 +285,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check WTGC
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpWTGC((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpWTGC(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -375,7 +295,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check AWLN
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpAWLN((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpAWLN(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -385,7 +305,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check WRPC
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpWRPC((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpWRPC(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -395,7 +315,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check ACUT
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpACUT((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpACUT(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -405,7 +325,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check ATYP
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpATYP((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpATYP(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -415,7 +335,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check AMDC
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpAMDC((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpAMDC(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -425,7 +345,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check HwSwIdMapping
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpHwSwIdMapping((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpHwSwIdMapping(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
@@ -435,7 +355,7 @@ StatusCode AmdcDumpOracle::DoIt()
 //  Check XtomoData
     Kwarn = 0 ;
     Kchck = 0 ;
-    DumpXtomoData((&*p_IRDBAccessSvcWithUpdate),KountCallsDoItASstring,Kwarn,Kchck);
+    DumpXtomoData(m_amdcDb.get(),KountCallsDoItASstring,Kwarn,Kchck);
     KwarnTot = KwarnTot + Kwarn ;
     KchckTot = KchckTot + Kchck ;
     OutFile 
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcStand/AmdcStand/AmdcStandMisc.h b/MuonSpectrometer/Amdcsimrec/AmdcStand/AmdcStand/AmdcStandMisc.h
deleted file mode 100755
index 25ee4b06d5ab983ce22503cc277bf1e6dfc2b24d..0000000000000000000000000000000000000000
--- a/MuonSpectrometer/Amdcsimrec/AmdcStand/AmdcStand/AmdcStandMisc.h
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-//
-//  Author :  Saclay Muon Software Group SaMuSoG
-//
-// The AmdcStandMisc header contains all this Bxxxxy stuff
-//
-#ifndef AmdcStandMisc_H
-#define AmdcStandMisc_H
-
-#include <iostream>
-#include <iomanip>
-#include <fstream>
-#include <cstdio>
-#include <cmath>
-#include <algorithm>
-#include <iterator>
-#include <list>
-#include <vector>
-#include <map>
-#include <string>
-#include <utility>
-
-#endif
-
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcStand/AmdcStand/AmdcsimrecStand.h b/MuonSpectrometer/Amdcsimrec/AmdcStand/AmdcStand/AmdcsimrecStand.h
index 1a9e8e313a4219e7010da83608710fbacf08508d..7624fb77bb05f0d5a732b95515cc474fc902723a 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcStand/AmdcStand/AmdcsimrecStand.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcStand/AmdcStand/AmdcsimrecStand.h
@@ -1,11 +1,11 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef AmdcsimrecStand_H
-#define AmdcsimrecStand_H
+#ifndef AMDCSTAND_AMDCSIMRECSTAND_H
+#define AMDCSTAND_AMDCSIMRECSTAND_H
 
-#include "AmdcStand/AmdcStandMisc.h"
+#include <string>
 
 #include "AmdcCore/Amdcsimrec.h"
 
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcStand/AmdcStand/GetAmdcsimrecVariable.h b/MuonSpectrometer/Amdcsimrec/AmdcStand/AmdcStand/GetAmdcsimrecVariable.h
index e5a8889a63a630eec3ed421c4d90bec4a435d910..f5b5c2912c8da75b75498626bf65d9cfb27dea42 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcStand/AmdcStand/GetAmdcsimrecVariable.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcStand/AmdcStand/GetAmdcsimrecVariable.h
@@ -1,11 +1,11 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef GetAmdcsimrecVariable_H
-#define GetAmdcsimrecVariable_H
+#ifndef AMDCSTAND_GETAMDCSIMRECVARIABLE_H
+#define AMDCSTAND_GETAMDCSIMRECVARIABLE_H
 
-#include "AmdcStand/AmdcStandMisc.h"
+#include <string>
 
 double       GetAmdcRealVar(std::string VarName, int I1, int I2, int I3);
 int          GetAmdcIntVar (std::string VarName, int I1, int I2, int I3);
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcStand/src/AmdcsimrecStand.cxx b/MuonSpectrometer/Amdcsimrec/AmdcStand/src/AmdcsimrecStand.cxx
index 18b574d6148e3df9454e5993fcbb4f451f64f36e..ec66ff326508ade1e572a4951bd3d4c21f8f1c0f 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcStand/src/AmdcsimrecStand.cxx
+++ b/MuonSpectrometer/Amdcsimrec/AmdcStand/src/AmdcsimrecStand.cxx
@@ -11,6 +11,9 @@
 #include "TomoTubeDisplacement.h"
 #include "TomoTubeDisplacementContinuous.h"
 
+#include <iomanip>
+#include <cmath>
+#include <fstream>
 #include "f1get.h"
 #include "getindfromamdc.h"
 #include "getamdcindfrom.h"
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcStand/src/GetAmdcsimrecVariable.cxx b/MuonSpectrometer/Amdcsimrec/AmdcStand/src/GetAmdcsimrecVariable.cxx
index 87b5ac4b9f6a9d12ac85abc3c1402372dc6881a4..21df48407e274d2ef7ef0fa6723620a6aa4d1e1d 100755
--- a/MuonSpectrometer/Amdcsimrec/AmdcStand/src/GetAmdcsimrecVariable.cxx
+++ b/MuonSpectrometer/Amdcsimrec/AmdcStand/src/GetAmdcsimrecVariable.cxx
@@ -2,8 +2,7 @@
   Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#include "AmdcStand/AmdcStandMisc.h"
-
+#include <string>
 
 // Fortran routines -------------------------------------------------------------
 #include "f1get.h"
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcStand/src/TomoTubeDisplacement.h b/MuonSpectrometer/Amdcsimrec/AmdcStand/src/TomoTubeDisplacement.h
index 42376c9ba9e0254e80c4abeb5a970617b4a62da2..a1adce705b0111a490c083d87107b55704536563 100644
--- a/MuonSpectrometer/Amdcsimrec/AmdcStand/src/TomoTubeDisplacement.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcStand/src/TomoTubeDisplacement.h
@@ -1,11 +1,11 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef TomoTubeDisplacement_H
-#define TomoTubeDisplacement_H
+#ifndef AMDCSTAND_TOMOTUBEDISPLACEMENT_H
+#define AMDCSTAND_TOMOTUBEDISPLACEMENT_H
 
-#include "AmdcStand/AmdcStandMisc.h"
+#include <string>
 
 void tomotubedisplacement_(
 		         int& optionflag,
diff --git a/MuonSpectrometer/Amdcsimrec/AmdcStand/src/TomoTubeDisplacementContinuous.h b/MuonSpectrometer/Amdcsimrec/AmdcStand/src/TomoTubeDisplacementContinuous.h
index dea1cacad551bcf6998d2f28147ebf906b431486..8917f0e04579a39c80ac7d4959d637b14956e1af 100644
--- a/MuonSpectrometer/Amdcsimrec/AmdcStand/src/TomoTubeDisplacementContinuous.h
+++ b/MuonSpectrometer/Amdcsimrec/AmdcStand/src/TomoTubeDisplacementContinuous.h
@@ -1,11 +1,11 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-#ifndef TomoTubeDisplacementContinuous_H
-#define TomoTubeDisplacementContinuous_H
+#ifndef AMDCSTAND_TOMOTUBEDISPLACEMENTCONTINUOUS_H
+#define AMDCSTAND_TOMOTUBEDISPLACEMENTCONTINUOUS_H
 
-#include "AmdcStand/AmdcStandMisc.h"
+#include <string>
 
 void tomotubedisplacementcontinuous_(
                          int Iside_In,
diff --git a/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/chi2validation_x.cpp b/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/chi2validation_x.cpp
index 894f7d730501001f20e61a2dc042d2126a2f896b..60e4294fe46e96f1f22a905e84b56c72b9c8cb93 100644
--- a/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/chi2validation_x.cpp
+++ b/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/chi2validation_x.cpp
@@ -21,10 +21,8 @@
 #include <TAxis.h>
 #include "TFile.h"
 #include "TH1F.h"
-#include "TStyle.h"
 #include "TLine.h"
 #include "TSystem.h"
-#include "TEventList.h"
 #include "TMatrixD.h"
 
 #include <iostream>
diff --git a/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/combineUnbiasedResPlots_x.cpp b/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/combineUnbiasedResPlots_x.cpp
index 1485dd765662bb7606ce932422280170282541a7..e1a0699d9acce105afa1c94f2454217e67a747a3 100644
--- a/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/combineUnbiasedResPlots_x.cpp
+++ b/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/combineUnbiasedResPlots_x.cpp
@@ -10,7 +10,6 @@
 #include "TGraphErrors.h"
 #include "TText.h"
 #include "TLegend.h"
-#include "TF1.h"
 #include "TChain.h"
 #include "TPaveStats.h"
 
diff --git a/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/muontrackplots_x.cpp b/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/muontrackplots_x.cpp
index 63d21254cc72d4238c2c99e4cf8fced7f0aa6140..ef8b41cf98d07373d51c72b442b5c9d51da8b112 100644
--- a/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/muontrackplots_x.cpp
+++ b/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/muontrackplots_x.cpp
@@ -21,7 +21,6 @@
 #include <TAxis.h>
 #include "TFile.h"
 #include "TH1F.h"
-#include "TStyle.h"
 #include "TLine.h"
 #include "TSystem.h"
 #include "TH2F.h"
diff --git a/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/plotCorr.C b/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/plotCorr.C
index 0d210ff4fa0f287c0182a4244a28bc31c2f215fa..58474f7bf1f1f1b636270b7236967371ba2f567a 100644
--- a/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/plotCorr.C
+++ b/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/plotCorr.C
@@ -10,7 +10,6 @@
 #include "TGraphErrors.h"
 #include "TText.h"
 #include "TLegend.h"
-#include "TF1.h"
 #include "TChain.h"
 #include "TH2F.h"
 #include "TProfile.h"
diff --git a/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/unbiasedResPlots_x.cpp b/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/unbiasedResPlots_x.cpp
index 1a3aa6855781611f0c0f44b8ad7e89e9a79786ef..dddb507a461fd605de789acc98bd369877856eb2 100644
--- a/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/unbiasedResPlots_x.cpp
+++ b/MuonSpectrometer/MuonAlignment/MuonAlignExample/scripts/unbiasedResPlots_x.cpp
@@ -11,7 +11,6 @@
 #include "TGraphErrors.h"
 #include "TText.h"
 #include "TLegend.h"
-#include "TF1.h"
 #include "TChain.h"
 #include "TPaveStats.h"
 #include "TProfile.h"
diff --git a/MuonSpectrometer/MuonCablings/MuonCablingServers/src/TGCcablingServerSvc.cxx b/MuonSpectrometer/MuonCablings/MuonCablingServers/src/TGCcablingServerSvc.cxx
index 53ed1e7fb7b7f4ac5d35edf87618842ec551a858..c61391da535885dd3e09bdf2aa0384a442708efd 100644
--- a/MuonSpectrometer/MuonCablings/MuonCablingServers/src/TGCcablingServerSvc.cxx
+++ b/MuonSpectrometer/MuonCablings/MuonCablingServers/src/TGCcablingServerSvc.cxx
@@ -5,14 +5,8 @@
 #include "GaudiKernel/MsgStream.h"
 #include "GaudiKernel/ISvcLocator.h"
 #include "GaudiKernel/StatusCode.h"
-
-#include "GaudiKernel/ISvcLocator.h"
-#include "GaudiKernel/StatusCode.h"
-
 #include "StoreGate/StoreGateSvc.h"
-
 #include "MuonCablingServers/TGCcablingServerSvc.h"
-#include "GaudiKernel/MsgStream.h"
 
 TGCcablingServerSvc::TGCcablingServerSvc(const std::string& name, ISvcLocator* sl) : 
 AthService( name, sl )
diff --git a/MuonSpectrometer/MuonCablings/MuonMDT_Cabling/MuonMDT_Cabling/MuonMDT_CablingSvc.h b/MuonSpectrometer/MuonCablings/MuonMDT_Cabling/MuonMDT_Cabling/MuonMDT_CablingSvc.h
index 771c38d822f3fd25dfb4d04ef4485cfda1e9d203..83770fdca77d7eb08603fd61bd15f53f1fc02bae 100644
--- a/MuonSpectrometer/MuonCablings/MuonMDT_Cabling/MuonMDT_Cabling/MuonMDT_CablingSvc.h
+++ b/MuonSpectrometer/MuonCablings/MuonMDT_Cabling/MuonMDT_Cabling/MuonMDT_CablingSvc.h
@@ -1,8 +1,7 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-
 #ifndef MUONMDT_CABLING_MUONMDT_CABLINGSVC_H
 #define MUONMDT_CABLING_MUONMDT_CABLINGSVC_H
 
@@ -11,15 +10,12 @@
 #include "GaudiKernel/IInterface.h"
 #include "GaudiKernel/ToolHandle.h"
 #include "GaudiKernel/ServiceHandle.h"
-#include "AthenaKernel/IOVSvcDefs.h"
 
 class MuonMDT_CablingMap;
 class MdtMezzanineType;
 class StoreGateSvc;
-//class IMDTcablingSvc;
 class IMDTCablingDbTool;
 class ITagInfoMgr;
-
 class IdentifierHash;
 
 #include <list>
@@ -27,12 +23,7 @@ class IdentifierHash;
 
 static const InterfaceID IID_IMuonMDT_CablingSvc("MuonMDT_CablingSvc", 1, 0);
 
-
-
-class MuonMDT_CablingSvc : public AthService 
-			   //			   virtual public IInterface, 
-
-{
+class MuonMDT_CablingSvc : public AthService  {
 
  public:
 
diff --git a/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibData/src/MdtRtRelation.cxx b/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibData/src/MdtRtRelation.cxx
index d5d532d063a3108b704642c05fa7487e48652204..1f270f71030c1a8cc8a40d0c274fb2015ab9cfd9 100644
--- a/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibData/src/MdtRtRelation.cxx
+++ b/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibData/src/MdtRtRelation.cxx
@@ -1,12 +1,11 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #include "MdtCalibData/IRtRelation.h"
 #include "MdtCalibData/IRtResolution.h"
 #include "MdtCalibData/TrRelation.h"
 #include "MdtCalibData/MdtRtRelation.h"
-#include "MdtCalibData/TrRelation.h"
 
 namespace MuonCalib {
 
diff --git a/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibRt/src/RtCalibrationAnalytic.cxx b/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibRt/src/RtCalibrationAnalytic.cxx
index 1a3c8c07f5622e056c4df7015e0a90e6276a21a1..c8bde808be2254d220c7952e92210b202461bf3e 100644
--- a/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibRt/src/RtCalibrationAnalytic.cxx
+++ b/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibRt/src/RtCalibrationAnalytic.cxx
@@ -37,8 +37,6 @@
 #include "MdtCalibRt/RtParabolicExtrapolation.h"
 #include "MdtCalibData/RtFromPoints.h"
 #include "MdtCalibData/IRtRelation.h"
-#include "MdtCalibData/RtRelationLookUp.h"
-#include "MdtCalibRt/RtCalibrationOutput.h"
 #include "MdtCalibInterfaces/IMdtCalibrationOutput.h"
 #include "MuonCalibEventBase/MuonCalibSegment.h"
 #include "MuonCalibMath/BaseFunction.h"
diff --git a/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibRt/src/RtCalibrationCurved.cxx b/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibRt/src/RtCalibrationCurved.cxx
index 009f4ca9c93334e59d2faf3ce666d8d6d4c4589a..58794f6b268b1242120ec771160d2a594db24f24 100644
--- a/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibRt/src/RtCalibrationCurved.cxx
+++ b/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibRt/src/RtCalibrationCurved.cxx
@@ -37,9 +37,7 @@
 #include "MdtCalibRt/AdaptiveResidualSmoothing.h"
 #include "MdtCalibInterfaces/IMdtCalibrationOutput.h"
 #include "MdtCalibData/IRtRelation.h"
-#include "MdtCalibData/RtRelationLookUp.h"
 #include "MdtCalibData/RtScaleFunction.h"
-#include "MdtCalibRt/RtCalibrationOutput.h"
 #include "MdtCalibFitters/CurvedPatRec.h"
 #include "MuonCalibEventBase/MuonCalibSegment.h"
 #include "MuonCalibMath/BaseFunction.h"
diff --git a/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibT0/MdtCalibT0/T0CalibrationMT.h b/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibT0/MdtCalibT0/T0CalibrationMT.h
index 709c49b5453b35780fed51942ec58cf1148be3fd..2f0b52808fac4f3bc151eeff5429558ba84555ca 100644
--- a/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibT0/MdtCalibT0/T0CalibrationMT.h
+++ b/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibT0/MdtCalibT0/T0CalibrationMT.h
@@ -1,17 +1,10 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-//   T0CalibrationMT.h
-//   Header file for class T0CalibrationMT
-///////////////////////////////////////////////////////////////////
-// (c) ATLAS Detector software
-///////////////////////////////////////////////////////////////////
-// 
-///////////////////////////////////////////////////////////////////
-
 #ifndef T0CALIBRATIONMT_H
 #define T0CALIBRATIONMT_H
+
 #include <iostream>
 #include <string>
 #include <vector>
@@ -20,12 +13,7 @@
 
 #include "MdtCalibInterfaces/IMdtCalibration.h"
 #include "MdtCalibData/MdtTubeFitContainer.h"
-#include "MdtCalibData/MdtTubeFitContainer.h"
 #include "MuonCalibStandAloneBase/NtupleStationId.h"
-//#include "T0MTSettings.h"
-//#include "T0MTHistos.h"
-//#include "ADCMTHistos.h"
-//#include "HistogramId.h"
 
 class TH1;
 class TFile;
@@ -42,9 +30,6 @@ class T0ADCHistos;
 class ADCMTHistos;
 class HistogramId;
 class MdtRelativeTubeT0;
-//class MdtTubeFitContainer;
-
-
 
 /**@class T0CalibrationMT
    Implementation of a T0 calibration using the MT approach.
diff --git a/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibUtils/MdtCalibUtils/RtData_t_r_reso.h b/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibUtils/MdtCalibUtils/RtData_t_r_reso.h
index db61b0b4813eca6b8e1b2047264ec821783e1e00..0fa9c32b9759f56bb26e47da42ebb1e3f6c746aa 100644
--- a/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibUtils/MdtCalibUtils/RtData_t_r_reso.h
+++ b/MuonSpectrometer/MuonCalib/MdtCalib/MdtCalibUtils/MdtCalibUtils/RtData_t_r_reso.h
@@ -7,7 +7,6 @@
 
 #include <iostream>
 #include <vector>
-#include <iostream>
 
 namespace MuonCalib {
 
diff --git a/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeAlg/src/ExtraTreeTrackFillerTool.cxx b/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeAlg/src/ExtraTreeTrackFillerTool.cxx
index 2574566541d8f170b1b65310d19c068833e2cc64..5fe2aeca0769f8b56cd884c25f474b287eec1d7d 100644
--- a/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeAlg/src/ExtraTreeTrackFillerTool.cxx
+++ b/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeAlg/src/ExtraTreeTrackFillerTool.cxx
@@ -22,7 +22,6 @@
 #include "TrkDetElementBase/TrkDetElementBase.h"
 #include "TrkPseudoMeasurementOnTrack/PseudoMeasurementOnTrack.h"
 #include "TrkMeasurementBase/MeasurementBase.h"
-#include "TrkTrack/Track.h"
 #include "TrkParameters/TrackParameters.h"
 #include "TrkRIO_OnTrack/RIO_OnTrack.h"
 #include "TrkEventPrimitives/ParamDefs.h"
diff --git a/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeEvent/MuonCalibExtraTreeEvent/MuonCalibExtendedSegment.h b/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeEvent/MuonCalibExtraTreeEvent/MuonCalibExtendedSegment.h
index c2958702910b1c1ed493f704f0ea7426313c43ba..bfdcdbb06515052e7900ce37d0756e3fefa2231d 100644
--- a/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeEvent/MuonCalibExtraTreeEvent/MuonCalibExtendedSegment.h
+++ b/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeEvent/MuonCalibExtraTreeEvent/MuonCalibExtendedSegment.h
@@ -9,7 +9,6 @@
 
 #include "MuonCalibExtraTreeEvent/MuonCalibTrackSummary.h"
 #include "MuonCalibExtraUtils/MuonCalibLayerMapping.h"
-#include "MuonCalibEventBase/MuonCalibSegment.h"
 #include "MuonCalibExtraTreeEvent/MuonCalibRawHitAssociationMap.h"
 #include "CxxUtils/checker_macros.h"
 
diff --git a/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeEvent/src/EventHandler.cxx b/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeEvent/src/EventHandler.cxx
index 57932cef85252787a32d2fdd62670cfddf1ba2f0..63602131f4312a8d6fb56a78b1b8dbb406346b1a 100644
--- a/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeEvent/src/EventHandler.cxx
+++ b/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeEvent/src/EventHandler.cxx
@@ -23,7 +23,6 @@
 #include "MuonCalibEventBase/MuonCalibPattern.h"
 #include "MuonCalibEventBase/MuonCalibSegment.h"
 #include "MuonCalibEventBase/MuonCalibRawHitCollection.h"
-#include "MuonCalibEventBase/MuonCalibTruthCollection.h"
 #include "MuonCalibStl/DeleteObject.h"
 #include "GaudiKernel/MsgStream.h"
 #include "AthenaKernel/getMessageSvc.h"
diff --git a/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeEvent/src/MuonCalibExtendedRawHits.cxx b/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeEvent/src/MuonCalibExtendedRawHits.cxx
index 34bb0434b3b6677c79965b2b1f7c26b73131b5c0..c4a41fcff3d09356f85a787905ec2ebd3afe8811 100644
--- a/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeEvent/src/MuonCalibExtendedRawHits.cxx
+++ b/MuonSpectrometer/MuonCalib/MuonCalibExtraTree/MuonCalibExtraTreeEvent/src/MuonCalibExtendedRawHits.cxx
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #include "MuonCalibExtraTreeEvent/MuonCalibExtendedRawHits.h"
@@ -17,7 +17,6 @@
 
 #include "MuonCalibExtraUtils/MuonFixedIdPrinter.h"
 #include "MuonCalibExtraUtils/MuonFixedIdManipulator.h"
-#include "MuonCalibExtraUtils/MuonFixedIdPrinter.h"
 
 #include <sstream>
 
diff --git a/MuonSpectrometer/MuonCalib/MuonCalibNtuple/src/MuonEventNtupleBranch.cxx b/MuonSpectrometer/MuonCalib/MuonCalibNtuple/src/MuonEventNtupleBranch.cxx
index ab5786a9cd01fdd5b8049b522e5ea3c4f0d9466b..609bfd4c253119632c73cc0f9c838a8a39b67635 100644
--- a/MuonSpectrometer/MuonCalib/MuonCalibNtuple/src/MuonEventNtupleBranch.cxx
+++ b/MuonSpectrometer/MuonCalib/MuonCalibNtuple/src/MuonEventNtupleBranch.cxx
@@ -10,7 +10,6 @@
 //other packages
 #include "CLHEP/Vector/ThreeVector.h"
 #include "TTree.h"
-#include "TString.h"
 
 // std packages
 #include <iostream>
diff --git a/MuonSpectrometer/MuonCalib/MuonCalibStandAlone/MdtCalibIOSvc/MdtCalibIOSvc/MdtCalibOutputDbSvc.h b/MuonSpectrometer/MuonCalib/MuonCalibStandAlone/MdtCalibIOSvc/MdtCalibIOSvc/MdtCalibOutputDbSvc.h
index 75e7b98166926a958c691bc4951997c9cfbf2c22..bd860ed1156c70f0adf4734c935450efc90d906f 100644
--- a/MuonSpectrometer/MuonCalib/MuonCalibStandAlone/MdtCalibIOSvc/MdtCalibIOSvc/MdtCalibOutputDbSvc.h
+++ b/MuonSpectrometer/MuonCalib/MuonCalibStandAlone/MdtCalibIOSvc/MdtCalibIOSvc/MdtCalibOutputDbSvc.h
@@ -43,8 +43,6 @@ namespace MuonCalib {
   class IRtResolution;
   class MdtTubeFitContainer;
 }
-//class CalibDBCoral;
-#include "MdtCalibUtils/RtDataFromFile.h"
 
 // interface to enable retrieving of a pointer to the singleton //
 const InterfaceID IID_IMdtCalibOutputDbSvc("MdtCalibOutputDbSvc", 1, 0);
diff --git a/MuonSpectrometer/MuonCalib/MuonCalibStandAlone/MuonCalibStandAloneBase/src/RegionSelectionSvc.cxx b/MuonSpectrometer/MuonCalib/MuonCalibStandAlone/MuonCalibStandAloneBase/src/RegionSelectionSvc.cxx
index d7f012df6269f5d8fec8fce9c3059f336955a6b4..a8b7aa1c476da63bb57fa6476222a60a61f37211 100644
--- a/MuonSpectrometer/MuonCalib/MuonCalibStandAlone/MuonCalibStandAloneBase/src/RegionSelectionSvc.cxx
+++ b/MuonSpectrometer/MuonCalib/MuonCalibStandAlone/MuonCalibStandAloneBase/src/RegionSelectionSvc.cxx
@@ -23,7 +23,6 @@
 #include "TTree.h"
 #include "TFile.h"
 #include "TDirectory.h"
-#include "TTree.h"
 #include "TKey.h"
 
 using namespace MuonCalib;
diff --git a/MuonSpectrometer/MuonCnv/MuonCSC_CnvTools/src/CscRDO_Decoder.h b/MuonSpectrometer/MuonCnv/MuonCSC_CnvTools/src/CscRDO_Decoder.h
index 405c511f94ac79ea9a193b3f4f4a0e7437021991..896ae449d7a106201ca04bc729327ea339c1a829 100644
--- a/MuonSpectrometer/MuonCnv/MuonCSC_CnvTools/src/CscRDO_Decoder.h
+++ b/MuonSpectrometer/MuonCnv/MuonCSC_CnvTools/src/CscRDO_Decoder.h
@@ -11,12 +11,7 @@
 #include "AthenaBaseComps/AthAlgTool.h"
 #include "GaudiKernel/ServiceHandle.h"
 #include "GaudiKernel/ToolHandle.h"
-
 #include "CSCcabling/CSCcablingSvc.h"
-
-#include "GaudiKernel/ServiceHandle.h"
-#include "CSCcabling/CSCcablingSvc.h"
-
 #include "MuonIdHelpers/IMuonIdHelperSvc.h"
 
 #include <inttypes.h>
diff --git a/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/src/MdtPrepDataContainerCnv.cxx b/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/src/MdtPrepDataContainerCnv.cxx
index 67caf1c1dbdc5ce1d23c35e7488e8004807d039c..a2525cc73c01d3b715b20b918ab827892ab2cee5 100644
--- a/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/src/MdtPrepDataContainerCnv.cxx
+++ b/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/src/MdtPrepDataContainerCnv.cxx
@@ -1,11 +1,9 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #include "MdtPrepDataContainerCnv.h"
 
-#include "MdtPrepDataContainerCnv.h"
-
 // Gaudi
 #include "GaudiKernel/StatusCode.h"
 #include "GaudiKernel/MsgStream.h"
diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/test/MuonSegmentCnv_p2_test.cxx b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/test/MuonSegmentCnv_p2_test.cxx
index ed5291264dd341b024e429194b6ec26debfa2ac6..0e6dbaf06c022f689626ada8c2aab1a14e926afa 100644
--- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/test/MuonSegmentCnv_p2_test.cxx
+++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/test/MuonSegmentCnv_p2_test.cxx
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 // $Id$
@@ -18,7 +18,6 @@
 #include "TrkEventPrimitives/FitQuality.h"
 #include "TrkEventPrimitives/CurvilinearUVT.h"
 #include "TrkPseudoMeasurementOnTrack/PseudoMeasurementOnTrack.h"
-#include "TrkEventTPCnv/SegmentCollectionCnv_tlp3.h"
 #include "TestTools/leakcheck.h"
 #include "TestTools/FLOATassert.h"
 #include "GaudiKernel/MsgStream.h"
diff --git a/MuonSpectrometer/MuonCnv/MuonJiveXML/MuonJiveXML/TrigRpcDataRetriever.h b/MuonSpectrometer/MuonCnv/MuonJiveXML/MuonJiveXML/TrigRpcDataRetriever.h
index b9e8f7dd1f29814f4afe6f2b5bf1adf0607c8387..f7e27ea3647827eeab9d38dac53ab5575aaf0175 100644
--- a/MuonSpectrometer/MuonCnv/MuonJiveXML/MuonJiveXML/TrigRpcDataRetriever.h
+++ b/MuonSpectrometer/MuonCnv/MuonJiveXML/MuonJiveXML/TrigRpcDataRetriever.h
@@ -8,18 +8,13 @@
 #include <string>
 
 #include "JiveXML/IDataRetriever.h"
-
 #include "AthenaBaseComps/AthAlgTool.h"
-
 #include "MuonReadoutGeometry/MuonDetectorManager.h"
 #include "MuonReadoutGeometry/RpcReadoutElement.h"
 #include "MuonRPC_CnvTools/IRPC_RDO_Decoder.h"
 #include "GaudiKernel/ServiceHandle.h"
 #include "GaudiKernel/ToolHandle.h"
-
-#include "GaudiKernel/ServiceHandle.h"
 #include "MuonIdHelpers/IMuonIdHelperSvc.h"
-
 #include "RPC_CondCabling/RpcCablingCondData.h"
 #include "StoreGate/ReadCondHandleKey.h"
 
diff --git a/MuonSpectrometer/MuonCnv/MuonRPC_CnvTools/src/RpcROD_Decoder.cxx b/MuonSpectrometer/MuonCnv/MuonRPC_CnvTools/src/RpcROD_Decoder.cxx
index 55336736a6b72003bc97b262af13a6cefdc355db..2befd4acdc5f0a27f0aebb0d21129d63a58642a2 100755
--- a/MuonSpectrometer/MuonCnv/MuonRPC_CnvTools/src/RpcROD_Decoder.cxx
+++ b/MuonSpectrometer/MuonCnv/MuonRPC_CnvTools/src/RpcROD_Decoder.cxx
@@ -4,8 +4,6 @@
 
 #include "RpcROD_Decoder.h"
 
-#include <algorithm> 
-
 #include <deque>
 #include <vector>
 #include <utility>
diff --git a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondAlg/src/MdtCalibDbAlg.cxx b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondAlg/src/MdtCalibDbAlg.cxx
index 8365ca781ac42ca938b49a9e41b16f28dff03c39..7f9b8e53613f5f7fe6908d45e10d88e4735ecfba 100644
--- a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondAlg/src/MdtCalibDbAlg.cxx
+++ b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondAlg/src/MdtCalibDbAlg.cxx
@@ -35,7 +35,6 @@
 #include "MdtCalibData/BFieldCorFunc.h"
 #include "MdtCalibData/WireSagCorFunc.h"
 #include "MdtCalibData/MdtSlewCorFuncHardcoded.h"
-#include "MdtCalibData/CalibFunc.h"
 #include "GaudiKernel/PhysicalConstants.h"
 
 //TODO: use smart pointers
diff --git a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/CscCondDataCollectionBase.h b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/CscCondDataCollectionBase.h
index 8b46e52202a710a5552679b2b55f60b29586934a..41fe3500f0f1168c2f2f1f7e77fc603aa1541c53 100755
--- a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/CscCondDataCollectionBase.h
+++ b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/CscCondDataCollectionBase.h
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #ifndef CSCCONDDATACOLLECTIONBASE_H 
@@ -13,20 +13,13 @@
 
         Exists so that CscCondDataCollection objects of different types can be stored in the same vector, map, etc. 
 **/
-#include <sstream>
+
 #include "AthContainers/DataVector.h"
 #include "StoreGate/DataHandle.h"
-//#include "AthenaKernel/CLASS_DEF.h"
 #include "AthenaPoolUtilities/CondAttrListCollection.h"
 #include "MuonCondData/CscCondData.h"
-#include <sstream>
 
-class CondAttrListCollection;
-/*
-namespace std {
-typedef istringstream;
-}
- */
+#include <sstream>
 
 namespace MuonCalib {
   class CscCondDataCollectionBase 
diff --git a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/CscCondDbData.h b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/CscCondDbData.h
index 733d0af094afd76d76eb8d18ba77555a74d970a8..40d27267e79cfc50a22be32f6fe268ca1ac740e3 100644
--- a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/CscCondDbData.h
+++ b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/CscCondDbData.h
@@ -140,7 +140,6 @@ public:
 };
 
 CLASS_DEF( CscCondDbData, 10742890, 1)
-#include "AthenaKernel/CondCont.h"
 CLASS_DEF( CondCont<CscCondDbData>, 43297256, 0)
 
 #endif
diff --git a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/MdtCondDbData.h b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/MdtCondDbData.h
index 8f8b09372ec8b4680db1b188227ea4a4e414e847..5c7fbf920d948632c7babfdce8eb472c60c9292b 100644
--- a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/MdtCondDbData.h
+++ b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/MdtCondDbData.h
@@ -105,7 +105,6 @@ private:
 };
 
 CLASS_DEF( MdtCondDbData, 58088442, 1)
-#include "AthenaKernel/CondCont.h"
 CLASS_DEF( CondCont<MdtCondDbData>, 62077248, 0)
 
 #endif
diff --git a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/RpcCondDbData.h b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/RpcCondDbData.h
index 8bb33673eae669e772181b24a9ce83536067dd0a..521d934614ca8bdbf474a5635a5550081b5f74d6 100644
--- a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/RpcCondDbData.h
+++ b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/RpcCondDbData.h
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #ifndef MUONCONDDATA_RPCCONDDBDATA_H
@@ -120,7 +120,6 @@ public:
 };
 
 CLASS_DEF( RpcCondDbData, 25128902, 1)
-#include "AthenaKernel/CondCont.h"
 CLASS_DEF( CondCont<RpcCondDbData>, 178635428, 0)
 
 #endif
diff --git a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/TgcCondDbData.h b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/TgcCondDbData.h
index 8660cc8d8066c527a7b363d7d3723d6aa6ec20f8..7c767e5b1c920ce63071e781af9070f24367d7e7 100644
--- a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/TgcCondDbData.h
+++ b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondData/MuonCondData/TgcCondDbData.h
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #ifndef MUONCONDDATA_TGCCONDDBDATA_H
@@ -49,7 +49,6 @@ private:
 };
 
 CLASS_DEF( TgcCondDbData, 130737053, 1)
-#include "AthenaKernel/CondCont.h"
 CLASS_DEF( CondCont<TgcCondDbData>, 178252645, 0)
 
 #endif
diff --git a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/CSC_DCSConditionsSvc.h b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/CSC_DCSConditionsSvc.h
index c53cda4c93b6d493899bf7a52728cb02d0f59753..2f99c415b62762723a7adba2f6482c1e1b0797d7 100644
--- a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/CSC_DCSConditionsSvc.h
+++ b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/CSC_DCSConditionsSvc.h
@@ -14,10 +14,8 @@
 #include "MuonCondInterface/ICSC_DCSConditionsTool.h"
 #include "GaudiKernel/ToolHandle.h"
 #include "AthenaKernel/IOVSvcDefs.h"
-#include "AthenaBaseComps/AthService.h"
 #include "GaudiKernel/ServiceHandle.h"
 #include "MuonCondSvc/MuonHierarchy.h"
-#include "StoreGate/StoreGateSvc.h"
 #include "AthenaKernel/IIOVDbSvc.h" 
 
 #include <vector>
diff --git a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/MDT_DCSConditionsRun2Svc.h b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/MDT_DCSConditionsRun2Svc.h
index 74e287b815142975529f9c0af652d5b1af620176..fd39b9800ecd030ab50383f8c8142ee7010c2ed2 100644
--- a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/MDT_DCSConditionsRun2Svc.h
+++ b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/MDT_DCSConditionsRun2Svc.h
@@ -14,10 +14,8 @@
 #include "MuonCondInterface/IMDT_DCSConditionsRun2Tool.h"
 #include "GaudiKernel/ToolHandle.h"
 #include "AthenaKernel/IOVSvcDefs.h"
-#include "AthenaBaseComps/AthService.h"
 #include "GaudiKernel/ServiceHandle.h"
 #include "MuonCondSvc/MuonHierarchy.h"
-#include "StoreGate/StoreGateSvc.h"
 #include "AthenaKernel/IIOVDbSvc.h" 
 
 #include <vector>
diff --git a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/MDT_DCSConditionsSvc.h b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/MDT_DCSConditionsSvc.h
index 1ea00c5723ff57e54daa7365bfff888029a240f6..af72a04a36c83d33a67dc6e730b86373b45cfe4e 100644
--- a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/MDT_DCSConditionsSvc.h
+++ b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/MDT_DCSConditionsSvc.h
@@ -14,10 +14,8 @@
 #include "MuonCondInterface/IMDT_DCSConditionsTool.h"
 #include "GaudiKernel/ToolHandle.h"
 #include "AthenaKernel/IOVSvcDefs.h"
-#include "AthenaBaseComps/AthService.h"
 #include "GaudiKernel/ServiceHandle.h"
 #include "MuonCondSvc/MuonHierarchy.h"
-#include "StoreGate/StoreGateSvc.h"
 #include "AthenaKernel/IIOVDbSvc.h" 
 
 #include <vector>
diff --git a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/MdtCoolStrSvc.h b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/MdtCoolStrSvc.h
index 0f025daa0f6618f5ed099272c805c3534c5c1d07..d9aceb686444b17c0fdb7a2fb9e237fe51a05c28 100755
--- a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/MdtCoolStrSvc.h
+++ b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/MdtCoolStrSvc.h
@@ -11,7 +11,7 @@
 #include "AthenaBaseComps/AthService.h"
 
 #include "MuonCondInterface/MdtICoolStrSvc.h"
-#include "GaudiKernel/Service.h"
+
 namespace MuonCalib {
 class MdtCoolStrSvc : public AthService, virtual MdtICoolStrSvc
 {
diff --git a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RPC_DCSConditionsSvc.h b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RPC_DCSConditionsSvc.h
index ed944a6c07e41f4e666da437e8dbb992b0be74a0..4878ac3d39ff7d4c6ad6bffe348eb10b94e673bf 100644
--- a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RPC_DCSConditionsSvc.h
+++ b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RPC_DCSConditionsSvc.h
@@ -14,10 +14,8 @@
 #include "MuonCondInterface/IRPC_DCSConditionsTool.h"
 #include "GaudiKernel/ToolHandle.h"
 #include "AthenaKernel/IOVSvcDefs.h"
-#include "AthenaBaseComps/AthService.h"
 #include "GaudiKernel/ServiceHandle.h"
 #include "MuonCondSvc/MuonHierarchy.h"
-#include "StoreGate/StoreGateSvc.h"
 #include "AthenaKernel/IIOVDbSvc.h" 
 
 #include <vector>
diff --git a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RPC_STATUSConditionsSvc.h b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RPC_STATUSConditionsSvc.h
index c6455825d4de5d5b1fdbf4e58aa6ce7f1e25607a..8f7e0dcb400d955f98dbedb54d2406ac432e2195 100644
--- a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RPC_STATUSConditionsSvc.h
+++ b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RPC_STATUSConditionsSvc.h
@@ -14,10 +14,8 @@
 #include "MuonCondInterface/IRpcDetectorStatusDbTool.h"
 #include "GaudiKernel/ToolHandle.h"
 #include "AthenaKernel/IOVSvcDefs.h"
-#include "AthenaBaseComps/AthService.h"
 #include "GaudiKernel/ServiceHandle.h"
 #include "MuonCondSvc/MuonHierarchy.h"
-#include "StoreGate/StoreGateSvc.h"
 #include "AthenaKernel/IIOVDbSvc.h" 
 
 #include <vector>
diff --git a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RPC_TriggerSvc_test.h b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RPC_TriggerSvc_test.h
index e582c4e3f3a5b0d8ab8101c47562f8195a0ed1fe..dac0a12776f209a2dc6a70a6a193d6a381dfe5fd 100644
--- a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RPC_TriggerSvc_test.h
+++ b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RPC_TriggerSvc_test.h
@@ -14,10 +14,8 @@
 #include "MuonCondInterface/IRPC_TriggerSvc_test.h"
 #include "GaudiKernel/ToolHandle.h"
 #include "AthenaKernel/IOVSvcDefs.h"
-#include "AthenaBaseComps/AthService.h"
 #include "GaudiKernel/ServiceHandle.h"
 #include "MuonCondSvc/MuonHierarchy.h"
-#include "StoreGate/StoreGateSvc.h"
 #include "AthenaKernel/IIOVDbSvc.h" 
 
 #include <vector>
diff --git a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RpcStatusDbSvc.h b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RpcStatusDbSvc.h
index fe0006a6f6e5b0839fd987c7864f742824034e77..9187420bf0fb20f93925d207a70a88b016c7433b 100644
--- a/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RpcStatusDbSvc.h
+++ b/MuonSpectrometer/MuonConditions/MuonCondGeneral/MuonCondSvc/MuonCondSvc/RpcStatusDbSvc.h
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #ifndef MUONCONDSVC_RPCSTATUSDBSVC_H
@@ -12,9 +12,6 @@
 #include "AthenaBaseComps/AthService.h"
 
 #include "MuonCondInterface/IRpcDetectorStatusDbTool.h"
-//#include "MuonCondData/RpcCalibDataContainer.h"
-//#include "MuonCondData/RpcStripStatus.h"
-
 
 class IIOVSvc;
 
diff --git a/MuonSpectrometer/MuonDetDescr/MuonAGDD/src/MuonAGDDToolHelper.cxx b/MuonSpectrometer/MuonDetDescr/MuonAGDD/src/MuonAGDDToolHelper.cxx
index 0606e2b108e80435581e7002e78df66ad4a1380b..4b205e39d39ac166670364b0d51aff33cd98290c 100644
--- a/MuonSpectrometer/MuonDetDescr/MuonAGDD/src/MuonAGDDToolHelper.cxx
+++ b/MuonSpectrometer/MuonDetDescr/MuonAGDD/src/MuonAGDDToolHelper.cxx
@@ -20,7 +20,6 @@
 #include "AGDDKernel/AliasStore.h"
 #include "AGDDKernel/AGDDDetector.h"
 #include "AGDDKernel/AGDDDetectorStore.h"
-#include "AGDD2GeoSvc/IAGDD2GeoSvc.h"
 
 #include "RDBAccessSvc/IRDBAccessSvc.h"
 #include "RDBAccessSvc/IRDBRecord.h"
diff --git a/MuonSpectrometer/MuonDetDescr/MuonReadoutGeometry/src/CscReadoutElement.cxx b/MuonSpectrometer/MuonDetDescr/MuonReadoutGeometry/src/CscReadoutElement.cxx
index 1d59c4e37153dead431d1ac8fb9e7d5f91511a9d..7d84ed60da0e55ae779101e77aa64e57e7b8c501 100644
--- a/MuonSpectrometer/MuonDetDescr/MuonReadoutGeometry/src/CscReadoutElement.cxx
+++ b/MuonSpectrometer/MuonDetDescr/MuonReadoutGeometry/src/CscReadoutElement.cxx
@@ -7,9 +7,6 @@
 #include "MuonIdHelpers/CscIdHelper.h"
 #include "GeoModelKernel/GeoPhysVol.h"
 #include "GeoModelKernel/GeoFullPhysVol.h"
-
-#include "MuonAlignmentData/CscInternalAlignmentPar.h"
-
 #include "TrkSurfaces/PlaneSurface.h"
 #include "TrkSurfaces/TrapezoidBounds.h"
 #include "TrkSurfaces/RotatedTrapezoidBounds.h"
diff --git a/MuonSpectrometer/MuonDetDescr/MuonReadoutGeometry/src/RpcReadoutElement.cxx b/MuonSpectrometer/MuonDetDescr/MuonReadoutGeometry/src/RpcReadoutElement.cxx
index a5587a3c1582c9c07595f91f3ed1f420351b241f..eca19e25dda0bfba1097e23ed21f6573265836ea 100644
--- a/MuonSpectrometer/MuonDetDescr/MuonReadoutGeometry/src/RpcReadoutElement.cxx
+++ b/MuonSpectrometer/MuonDetDescr/MuonReadoutGeometry/src/RpcReadoutElement.cxx
@@ -15,7 +15,6 @@
 #include "GaudiKernel/MsgStream.h"
 #include "TrkSurfaces/PlaneSurface.h"
 #include "TrkSurfaces/RectangleBounds.h"
-#include "GaudiKernel/MsgStream.h"
 #include "AthenaKernel/getMessageSvc.h"
 #include <TString.h> // for Form
 
diff --git a/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/CSC_RegionSelectorTable.cxx b/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/CSC_RegionSelectorTable.cxx
index 9019b39bc1908b2138bd9cf6b9fca00b342c88e3..faee8bc20a011b8eef49ec860ff81e0aa65409b8 100644
--- a/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/CSC_RegionSelectorTable.cxx
+++ b/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/CSC_RegionSelectorTable.cxx
@@ -17,7 +17,6 @@
 #include "MuonReadoutGeometry/RpcReadoutSet.h"
 #include "MuonReadoutGeometry/MuonStation.h"
 #include "CSCcabling/CSCcablingSvc.h"
-#include "RegSelLUT/RegSelSiLUT.h"
 
 #include <iostream>
 #include <vector>
diff --git a/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/MDT_RegionSelectorTable.cxx b/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/MDT_RegionSelectorTable.cxx
index f1586dae8b31365675ae38a41ccd8a5f262db923..c4ae68c060cb7c5be8a0c7d1ec48ded155d1b12f 100644
--- a/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/MDT_RegionSelectorTable.cxx
+++ b/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/MDT_RegionSelectorTable.cxx
@@ -16,7 +16,6 @@
 #include "MuonReadoutGeometry/TgcReadoutElement.h"
 #include "MuonReadoutGeometry/RpcReadoutSet.h"
 #include "MuonReadoutGeometry/MuonStation.h"
-#include "RegSelLUT/RegSelSiLUT.h"
 #include "MuonMDT_Cabling/MuonMDT_CablingSvc.h"
 
 #include <iostream>
diff --git a/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/MM_RegionSelectorTable.cxx b/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/MM_RegionSelectorTable.cxx
index 3e5567f828f1838d8c099b9f728a9974f8803128..f8a07484ce9c8d7c6b80c16b2b9d38f44d8f8f22 100644
--- a/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/MM_RegionSelectorTable.cxx
+++ b/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/MM_RegionSelectorTable.cxx
@@ -14,7 +14,6 @@ Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 #include "MuonAGDDDescription/MMDetectorHelper.h"
 #include "MuonAGDDDescription/MMDetectorDescription.h"
 #include "MuonReadoutGeometry/MuonStation.h"
-#include "RegSelLUT/RegSelSiLUT.h"
 
 #include <iostream>
 #include <vector>
diff --git a/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/TGC_RegionSelectorTable.cxx b/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/TGC_RegionSelectorTable.cxx
index 0bdbfe59bb1d924a970df774925323db8d980bbf..8edafe06a46ede6536364ed7c5d8bc5eb5d3862d 100644
--- a/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/TGC_RegionSelectorTable.cxx
+++ b/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/TGC_RegionSelectorTable.cxx
@@ -5,11 +5,6 @@
 #include "MuonRegionSelector/TGC_RegionSelectorTable.h"
 #include "CLHEP/Units/SystemOfUnits.h"
 #include "Identifier/IdentifierHash.h"
-
-#include <iostream>
-#include <fstream>
-#include <string>
-
 #include "RegSelLUT/StoreGateIDRS_ClassDEF.h" 
 #include "RegSelLUT/RegSelModule.h" 
 #include "RegSelLUT/RegSelSiLUT.h" 
@@ -17,12 +12,12 @@
 #include "MuonReadoutGeometry/MuonReadoutElement.h" 
 #include "MuonReadoutGeometry/TgcReadoutElement.h"
 #include "MuonReadoutGeometry/MuonStation.h"
-#include "RegSelLUT/RegSelSiLUT.h"
 #include "MuonTGC_Cabling/MuonTGC_CablingSvc.h"
 
 #include <iostream>
 #include <vector>
 #include <fstream>
+#include <string>
 
 TGC_RegionSelectorTable::TGC_RegionSelectorTable(const std::string& type, 
 						 const std::string& name,
diff --git a/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/sTGC_RegSelCondAlg.cxx b/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/sTGC_RegSelCondAlg.cxx
index 5a93655c60df951282b5f29985bed77c8788d517..65195f93172871d511de98e888a30cac28c1b766 100644
--- a/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/sTGC_RegSelCondAlg.cxx
+++ b/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/sTGC_RegSelCondAlg.cxx
@@ -32,12 +32,6 @@
 
 #include "MuonReadoutGeometry/MuonStation.h"
 
-#include "RegSelLUT/RegSelSiLUT.h"
-
-
-/// not implemented yet
-/// #include "Muon_sTGC_Cabling/MuonMM_CablingSvc.h"
-
 #include "sTGC_RegSelCondAlg.h"
 
 
diff --git a/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/sTGC_RegionSelectorTable.cxx b/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/sTGC_RegionSelectorTable.cxx
index 7c6f9604d8dc966c687753ad74b16764bad1fda5..2688f15610dfc4abf04e3b47cd8cc49eb86c1a17 100644
--- a/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/sTGC_RegionSelectorTable.cxx
+++ b/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/sTGC_RegionSelectorTable.cxx
@@ -19,7 +19,6 @@ Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 #include "MuonAGDDDescription/sTGCDetectorHelper.h"
 #include "MuonAGDDDescription/sTGCDetectorDescription.h"
 #include "MuonReadoutGeometry/MuonStation.h"
-#include "RegSelLUT/RegSelSiLUT.h"
 
 #include <iostream>
 #include <vector>
diff --git a/MuonSpectrometer/MuonDetDescr/MuonTrackingGeometry/src/MuonStationBuilder.cxx b/MuonSpectrometer/MuonDetDescr/MuonTrackingGeometry/src/MuonStationBuilder.cxx
index 3102d20636f672449a7cefc1f9dd0790f0cc43ec..030cf625672d0cd7ca87d3e61ae683c5a6d4bb5d 100644
--- a/MuonSpectrometer/MuonDetDescr/MuonTrackingGeometry/src/MuonStationBuilder.cxx
+++ b/MuonSpectrometer/MuonDetDescr/MuonTrackingGeometry/src/MuonStationBuilder.cxx
@@ -51,12 +51,6 @@
 #include "GeoModelKernel/GeoPgon.h"
 #include "GeoModelKernel/GeoPara.h"
 #include "GeoModelKernel/GeoVolumeCursor.h"
-#include "GeoModelKernel/GeoShape.h"
-#include "GeoModelKernel/GeoShapeShift.h"
-#include "GeoModelKernel/GeoShapeSubtraction.h"
-#include "GeoModelKernel/GeoBox.h"
-#include "GeoModelKernel/GeoTrd.h"
-#include "GeoModelKernel/GeoVolumeCursor.h"
 
 #include <map>
 #include <fstream>
diff --git a/MuonSpectrometer/MuonDigitization/MM_Digitization/MM_Digitization/MM_ElectronicsResponseSimulation.h b/MuonSpectrometer/MuonDigitization/MM_Digitization/MM_Digitization/MM_ElectronicsResponseSimulation.h
index 299bb496dd72c7f6c9049062a53add7218578116..d3ba9e0f0c8180d9cf955dc1b4c6c0c42217be93 100644
--- a/MuonSpectrometer/MuonDigitization/MM_Digitization/MM_Digitization/MM_ElectronicsResponseSimulation.h
+++ b/MuonSpectrometer/MuonDigitization/MM_Digitization/MM_Digitization/MM_ElectronicsResponseSimulation.h
@@ -33,7 +33,6 @@
 #include <iomanip>
 #include <utility>
 #include <string>
-#include <sstream>
 #include <sys/stat.h>
 #include <memory>
 
diff --git a/MuonSpectrometer/MuonDigitization/MM_Digitization/MM_Digitization/MM_StripsResponseSimulation.h b/MuonSpectrometer/MuonDigitization/MM_Digitization/MM_Digitization/MM_StripsResponseSimulation.h
index 5ffebecd268bf1ce52439f639c11ba99d5325dc9..4907d9584be1475a27b87cb1d732682730ebb242 100644
--- a/MuonSpectrometer/MuonDigitization/MM_Digitization/MM_Digitization/MM_StripsResponseSimulation.h
+++ b/MuonSpectrometer/MuonDigitization/MM_Digitization/MM_Digitization/MM_StripsResponseSimulation.h
@@ -49,7 +49,6 @@ Comments to be added here...
 #include <iomanip>
 #include <utility>
 #include <string>
-#include <sstream>
 #include <sys/stat.h>
 
 class MM_DigitToolInput;
diff --git a/MuonSpectrometer/MuonDigitization/MM_Digitization/src/MM_DigitizationTool.cxx b/MuonSpectrometer/MuonDigitization/MM_Digitization/src/MM_DigitizationTool.cxx
index 381bdf51888d06beea80eb5c5c315816e63ca304..b950d26cebf1133abc349c2e8c795728cee0d8b8 100644
--- a/MuonSpectrometer/MuonDigitization/MM_Digitization/src/MM_DigitizationTool.cxx
+++ b/MuonSpectrometer/MuonDigitization/MM_Digitization/src/MM_DigitizationTool.cxx
@@ -36,15 +36,8 @@
 #include "MuonSimEvent/MicromegasHitIdHelper.h"
 #include "TrkDetDescrUtils/GeometryStatics.h"
 #include "TrkEventPrimitives/LocalDirection.h"
-#include "TrkSurfaces/Surface.h"
 #include "PathResolver/PathResolver.h"
 #include "AIDA/IHistogram1D.h"
-#include "MuonReadoutGeometry/MuonDetectorManager.h"
-#include "MuonReadoutGeometry/MMReadoutElement.h"
-#include "MuonReadoutGeometry/MuonChannelDesign.h"
-#include "MuonSimEvent/MicromegasHitIdHelper.h"
-#include "TrkDetDescrUtils/GeometryStatics.h"
-#include "TrkEventPrimitives/LocalDirection.h"
 #include "TrkSurfaces/Surface.h"
 
 //Truth
diff --git a/MuonSpectrometer/MuonDigitization/RPC_Digitization/RPC_Digitization/RpcDigitizationTool.h b/MuonSpectrometer/MuonDigitization/RPC_Digitization/RPC_Digitization/RpcDigitizationTool.h
index fa8b740b62fc623c71c00b66fe38d3bb10d47a82..96cf97e60510de7758b58d3ac46e7495288e8c85 100644
--- a/MuonSpectrometer/MuonDigitization/RPC_Digitization/RPC_Digitization/RpcDigitizationTool.h
+++ b/MuonSpectrometer/MuonDigitization/RPC_Digitization/RPC_Digitization/RpcDigitizationTool.h
@@ -1,5 +1,3 @@
-/* -*- C++ -*- */
-
 /*
   Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
@@ -47,7 +45,6 @@
 #include "MuonSimEvent/RPCSimHit.h"
 #include "MuonSimEvent/RPCSimHitCollection.h"
 #include "HitManagement/TimedHitCollection.h"
-#include "MuonSimData/MuonSimDataCollection.h"
 #include "MuonCondInterface/IRPCConditionsSvc.h"
 #include "MuonCondData/RpcCondDbData.h"
 
diff --git a/MuonSpectrometer/MuonGeoModel/src/MuonDetectorTool.cxx b/MuonSpectrometer/MuonGeoModel/src/MuonDetectorTool.cxx
index 1e410e8a7ff1c0561fe3815abfa05806350e1584..29675909ddfa9bad94759cbcdafaf435cc9f5169 100755
--- a/MuonSpectrometer/MuonGeoModel/src/MuonDetectorTool.cxx
+++ b/MuonSpectrometer/MuonGeoModel/src/MuonDetectorTool.cxx
@@ -322,15 +322,8 @@ MuonDetectorTool::createFactory(MuonDetectorFactory001& theFactory) const
     bool isAmdcDb = false;
     if( dynamic_cast<AmdcDb*>(access) && m_amdcDb) {
       ATH_MSG_INFO("AmdcDb is used instead of RDBAccessSvc");
-      AmdcDb* p_access = dynamic_cast<AmdcDb*>(access);
       isAmdcDb = true;
-      if (p_access->InitializedSvc()) {
-        ATH_MSG_INFO("AmdcDb->InitializedSvc() is true") ;
-      }else{
-        ATH_MSG_INFO("AmdcDb->InitializedSvc() is false");
-        if(p_access->initialize()) ATH_MSG_INFO("Now it's initialized. Go ahead and use it!");
-        ATH_MSG_INFO("\t\t BUT PAY ATTENTION THE HARD WIRED ENVELOPE IS USED (see MuonDetectorFactory001.cxx)!!");
-      }
+      ATH_MSG_INFO("AmdcDb->InitializedSvc() is true") ;
     }
 
     theFactory.setDBAtlasVersion(AtlasVersion);
diff --git a/MuonSpectrometer/MuonGeoModelTest/test/CheckDifferences.cxx b/MuonSpectrometer/MuonGeoModelTest/test/CheckDifferences.cxx
index a16c23eac12c0f09fe11f4fbf4a454c1da878a59..5d867fd77717540768a5bf9aca12eb2c5060f306 100644
--- a/MuonSpectrometer/MuonGeoModelTest/test/CheckDifferences.cxx
+++ b/MuonSpectrometer/MuonGeoModelTest/test/CheckDifferences.cxx
@@ -1,24 +1,12 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #include <fstream>
 #include <iostream>
 #include <string>
-//use the method defined at:  http://annwm.lbl.gov/~leggett/Atlas/gcc-3.2.html
-// #ifdef HAVE_NEW_IOSTREAMS
-//   #include <sstream>
-// typedef std::istringstream my_isstream;
-
-// #else
-// #include <strstream>
-// typedef strstream my_isstream;
-// #endif
-
 #include <sstream>
 typedef std::istringstream my_isstream;
-
-
 #include <cmath>
 #include <cassert>
 
@@ -27,8 +15,6 @@ typedef std::istringstream my_isstream;
 //#define print_level 2 //info
 #define print_level 3 //summary
 
-//
-
 typedef enum iost {READ, WRITE} IOMaterialMap;
 
 class Record
diff --git a/MuonSpectrometer/MuonReconstruction/MuonDataPrep/CscClusterization/src/CscPeakThresholdClusterBuilderTool.cxx b/MuonSpectrometer/MuonReconstruction/MuonDataPrep/CscClusterization/src/CscPeakThresholdClusterBuilderTool.cxx
index 28befc2018edfa6f78acb5a2f822c2f969b0425d..c0bb9978b6ea31c0b71b18bf92fff6c332d506ac 100644
--- a/MuonSpectrometer/MuonReconstruction/MuonDataPrep/CscClusterization/src/CscPeakThresholdClusterBuilderTool.cxx
+++ b/MuonSpectrometer/MuonReconstruction/MuonDataPrep/CscClusterization/src/CscPeakThresholdClusterBuilderTool.cxx
@@ -2,7 +2,6 @@
   Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-// CscPeakThresholdClusterBuilderTool.cxx
 #include "CscPeakThresholdClusterBuilderTool.h"
 
 #include <sstream>
diff --git a/MuonSpectrometer/MuonReconstruction/MuonDataPrep/CscClusterization/src/CscThresholdClusterBuilderTool.h b/MuonSpectrometer/MuonReconstruction/MuonDataPrep/CscClusterization/src/CscThresholdClusterBuilderTool.h
index 19f0b7b312d8411bbf26d24c6e0a81b196fae7b5..6e44b57442cafde86a9ade443f3bfe8f98fe2458 100644
--- a/MuonSpectrometer/MuonReconstruction/MuonDataPrep/CscClusterization/src/CscThresholdClusterBuilderTool.h
+++ b/MuonSpectrometer/MuonReconstruction/MuonDataPrep/CscClusterization/src/CscThresholdClusterBuilderTool.h
@@ -2,8 +2,6 @@
   Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-// CscThresholdClusterBuilderTool.h
-
 #ifndef CscThresholdClusterBuilderTool_H
 #define CscThresholdClusterBuilderTool_H
 
@@ -62,7 +60,6 @@
 #include "MuonPrepRawData/MuonPrepDataContainer.h"
 #include "MuonReadoutGeometry/MuonDetectorManager.h"
 #include "CscClusterization/ICscClusterBuilder.h"
-#include "MuonPrepRawData/CscPrepDataContainer.h"
 
 class ICscCalibTool;
 class ICscStripFitter;
diff --git a/MuonSpectrometer/MuonReconstruction/MuonRecTools/MuonTGRecTools/src/MuonTGMeasurementTool.cxx b/MuonSpectrometer/MuonReconstruction/MuonRecTools/MuonTGRecTools/src/MuonTGMeasurementTool.cxx
index 79bf4a1412884d0493a8152e7cc8675b86d7c787..21389582bf0900d54ab4280486b0ed5bba9d209f 100644
--- a/MuonSpectrometer/MuonReconstruction/MuonRecTools/MuonTGRecTools/src/MuonTGMeasurementTool.cxx
+++ b/MuonSpectrometer/MuonReconstruction/MuonRecTools/MuonTGRecTools/src/MuonTGMeasurementTool.cxx
@@ -8,7 +8,6 @@
 #include "TrkEventPrimitives/ParamDefs.h"
 #include "TrkEventPrimitives/ParticleHypothesis.h"
 #include "TrkEventPrimitives/FitQualityOnSurface.h"
-#include "TrkParameters/TrackParameters.h"
 #include "TrkMeasurementBase/MeasurementBase.h"
 #include "TrkSurfaces/DistanceSolution.h"
 #include "TrkGeometry/Layer.h"
diff --git a/MuonSpectrometer/MuonReconstruction/MuonRecUtils/MuonLayerHough/src/LayerAnalysis.cxx b/MuonSpectrometer/MuonReconstruction/MuonRecUtils/MuonLayerHough/src/LayerAnalysis.cxx
index 832a3480b4a97e2ecdca24587327d3c9618b3b4f..e7d857f7f072ad975e4c28c2423b28cd94fbf706 100644
--- a/MuonSpectrometer/MuonReconstruction/MuonRecUtils/MuonLayerHough/src/LayerAnalysis.cxx
+++ b/MuonSpectrometer/MuonReconstruction/MuonRecUtils/MuonLayerHough/src/LayerAnalysis.cxx
@@ -18,7 +18,6 @@
 #include "TH1F.h"
 #include "TH2F.h"
 #include "TFile.h"
-#include "TTree.h"
 #include "TStyle.h"
 #include "TVector3.h"
 #include <set>
diff --git a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonSegmentRegionRecoveryTool.h b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonSegmentRegionRecoveryTool.h
index 08a3ba9463769678113f2cf71a57ed4a9be4587d..cc99d1a403c46cc1fac5df970a819ae71108decd 100644
--- a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonSegmentRegionRecoveryTool.h
+++ b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonSegmentRegionRecoveryTool.h
@@ -16,7 +16,6 @@
 #include "MuonRecHelperTools/IMuonEDMHelperSvc.h"
 #include "MuonRecToolInterfaces/IMuonHoleRecoveryTool.h"
 #include "MuonRecToolInterfaces/IMuonTrackSegmentMatchingTool.h"
-#include "MuonRecHelperTools/IMuonEDMHelperSvc.h"
 #include "MuonRecHelperTools/MuonEDMPrinterTool.h"
 #include "MuonRecToolInterfaces/IMdtDriftCircleOnTrackCreator.h"
 #include "MuonRecToolInterfaces/IMuonClusterOnTrackCreator.h"
@@ -30,7 +29,6 @@
 #include "MuidInterfaces/ICombinedMuonTrackBuilder.h"
 #include "MuonChamberHoleRecoveryTool.h"
 #include "MuonRecToolInterfaces/IMuonSeededSegmentFinder.h"
-#include "MuonChamberHoleRecoveryTool.h"
 #include "IRegionSelector/IRegSelTool.h"
 
 #include "AthenaBaseComps/AthAlgTool.h"
diff --git a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonTrackCleaner.h b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonTrackCleaner.h
index 2c9ca258f273e3f80e4f9272b1ff4b3eb8cabdb7..f6e86b22a97745d70c6eafcd6ba088b2abbcfdbe 100644
--- a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonTrackCleaner.h
+++ b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonTrackCleaner.h
@@ -17,7 +17,6 @@
 #include "MuonRecToolInterfaces/IMuonCompetingClustersOnTrackCreator.h"
 #include "MuonIdHelpers/IMuonIdHelperSvc.h"
 #include "MuonRecHelperTools/MuonEDMPrinterTool.h"
-#include "MuonRecHelperTools/IMuonEDMHelperSvc.h"
 #include "TrkFitterInterfaces/ITrackFitter.h"
 #include "TrkExInterfaces/IExtrapolator.h"
 #include "TrkToolInterfaces/IResidualPullCalculator.h"
diff --git a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonTrackExtrapolationTool.cxx b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonTrackExtrapolationTool.cxx
index 37f5ba4f9d193830a8ffbd9a73dc55527101cc3f..3fb1331cb3d3cfef22f2ea6c09cc4ecee347762e 100644
--- a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonTrackExtrapolationTool.cxx
+++ b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonTrackExtrapolationTool.cxx
@@ -5,18 +5,12 @@
 #include "MuonTrackExtrapolationTool.h"
 
 #include "MuonTrackMakerUtils/MuonTSOSHelper.h"
-
 #include "TrkMeasurementBase/MeasurementBase.h"
 #include "TrkPseudoMeasurementOnTrack/PseudoMeasurementOnTrack.h"
-
 #include "TrkSurfaces/PerigeeSurface.h"
 #include "TrkSurfaces/DistanceSolution.h"
-
 #include "TrkTrack/TrackStateOnSurface.h"
 #include "TrkTrack/Track.h"
-
-#include "TrkSurfaces/PerigeeSurface.h"
-
 #include "TrkGeometry/TrackingVolume.h"
 #include "TrkGeometry/TrackingGeometry.h"
 #include "TrkGeometry/MagneticFieldProperties.h"
diff --git a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MooCandidateMatchingTool.h b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MooCandidateMatchingTool.h
index 0126b69d8ec008fa222c31e063e61f10442ddc55..63a85c9d75ab362cdb1cc6a090f78a14cab118ce 100644
--- a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MooCandidateMatchingTool.h
+++ b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MooCandidateMatchingTool.h
@@ -15,7 +15,6 @@
 #include "CxxUtils/checker_macros.h"
 #include "EventPrimitives/EventPrimitives.h"
 #include "GeoPrimitives/GeoPrimitives.h"
-// For magneticfield
 #include "MagFieldConditions/AtlasFieldCacheCondObj.h"
 #include "MuonRecToolInterfaces/IMuonTrackSegmentMatchingTool.h"
 #include "MuonIdHelpers/IMuonIdHelperSvc.h"
@@ -23,7 +22,6 @@
 #include "MuonRecHelperTools/MuonEDMPrinterTool.h"
 #include "MuonTrackMakerUtils/SortMeasurementsByPosition.h"
 #include "MuonSegmentMakerToolInterfaces/IMuonSegmentMatchingTool.h"
-#include "MuonRecToolInterfaces/IMuonTrackSegmentMatchingTool.h"
 #include "TrkExInterfaces/IExtrapolator.h"
 #include "MuPatCandidateTool.h"
 
diff --git a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MuPatCandidateTool.h b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MuPatCandidateTool.h
index cd3457a486d952bb000f11f7722477f7a58c7134..d4943d12c82b8d46b7fed6a05bb36bb60b735beb 100644
--- a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MuPatCandidateTool.h
+++ b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MuPatCandidateTool.h
@@ -8,7 +8,6 @@
 #include "AthenaBaseComps/AthAlgTool.h"
 
 #include "AthenaKernel/SlotSpecificObj.h"
-#include "MuonRecHelperTools/IMuonEDMHelperSvc.h"
 #include "MuonTrackMakerUtils/MuonTrackMakerStlTools.h"
 #include "TrkParameters/TrackParameters.h"
 
diff --git a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MuonTrackSteering.cxx b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MuonTrackSteering.cxx
index c94eb116f07426ee1924f0c79c70427bd3d71c07..b44dc2b6880a498062fd74561c9c15126246f810 100644
--- a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MuonTrackSteering.cxx
+++ b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MuonTrackSteering.cxx
@@ -2,19 +2,15 @@
   Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
-
 #include "MuonTrackSteering.h"
-#include "MuonTrackSteeringStrategy.h"
 
+#include "MuonTrackSteeringStrategy.h"
 #include "MuonSegment/MuonSegment.h"
 #include "MuonSegment/MuonSegmentCombination.h"
-
 #include "MuPatSegment.h"
 #include "MuPatCandidateBase.h"
 #include "MuPatTrack.h"
 #include "MuonTrackMakerUtils/MuonTrackMakerStlTools.h"
-
-#include "MuonSegment/MuonSegment.h"
 #include "TrkSegment/SegmentCollection.h"
 #include "TrkTrack/TrackCollection.h"
 #include "TrkParameters/TrackParameters.h"
diff --git a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MuonTrackSteering.h b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MuonTrackSteering.h
index 2c3fd2efb20ccb007e6f50a62a73e7441e7e8334..1907efe70c64d95b8239049f19bbdccfb2685d3f 100644
--- a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MuonTrackSteering.h
+++ b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MuonTrackSteering.h
@@ -18,8 +18,6 @@
 
 #include "TrkParameters/TrackParameters.h"
 #include "TrkTrack/TrackCollection.h"
-#include "MuonRecHelperTools/IMuonEDMHelperSvc.h"
-
 #include "MuonRecHelperTools/IMuonEDMHelperSvc.h"
 #include "MuonRecHelperTools/MuonEDMPrinterTool.h"
 #include "MuonSegmentMakerToolInterfaces/IMuonSegmentInOverlapResolvingTool.h"
diff --git a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerUtils/MuonTrackMakerUtils/TrackStateOnSurfaceComparisonFunction.h b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerUtils/MuonTrackMakerUtils/TrackStateOnSurfaceComparisonFunction.h
index afcd86070c29b3f17ab7b18f3e7ed34d4b5b6ba4..30da57d158d87c1e4873d2322bd41d1297c96662 100644
--- a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerUtils/MuonTrackMakerUtils/TrackStateOnSurfaceComparisonFunction.h
+++ b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerUtils/MuonTrackMakerUtils/TrackStateOnSurfaceComparisonFunction.h
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 ///////////////////////////////////////////////////////////////////
@@ -23,7 +23,6 @@
 #include "TrkSurfaces/DiscSurface.h"
 // extra-maths for cylinder intersections
 #include "TrkSurfaces/CylinderSurface.h"
-#include "TrkSurfaces/CylinderSurface.h"
 #include "TrkSurfaces/SurfaceBounds.h"
 #include "TrkSurfaces/RectangleBounds.h"
 #include "TrkSurfaces/TrapezoidBounds.h"
diff --git a/MuonSpectrometer/MuonTruthAlgs/src/DetailedMuonPatternTruthBuilder.cxx b/MuonSpectrometer/MuonTruthAlgs/src/DetailedMuonPatternTruthBuilder.cxx
index 5fea6adda781d26e674832d40cbd1a2d1b5abc55..1d8ec4f7641c603df190c4dee24eaf624161b34a 100755
--- a/MuonSpectrometer/MuonTruthAlgs/src/DetailedMuonPatternTruthBuilder.cxx
+++ b/MuonSpectrometer/MuonTruthAlgs/src/DetailedMuonPatternTruthBuilder.cxx
@@ -29,25 +29,17 @@
 #include "MuonPrepRawData/MMPrepData.h"
 #include "MuonPrepRawData/sTgcPrepData.h"
 #include "TrkParameters/TrackParameters.h" 
-#include "TrkTrack/Track.h"
 #include "TrkFitterInterfaces/ITrackFitter.h"
 #include "TrkExInterfaces/IExtrapolator.h"
 #include "TrkSurfaces/PerigeeSurface.h"
 #include "TrkDetElementBase/TrkDetElementBase.h"
-#include "TrkPrepRawData/PrepRawData.h"
-#include "TrkMeasurementBase/MeasurementBase.h"
-#include "TrkRIO_OnTrack/RIO_OnTrack.h"
 #include "TrkPseudoMeasurementOnTrack/PseudoMeasurementOnTrack.h"
 #include "TrackRecord/TrackRecordCollection.h"
 #include "MuonRIO_OnTrack/MdtDriftCircleOnTrack.h"
 #include "MuonRIO_OnTrack/MuonClusterOnTrack.h"
 #include "MuonRIO_OnTrack/MMClusterOnTrack.h"
-#include "MuonPrepRawData/MdtPrepData.h"
-#include "MuonPrepRawData/MMPrepData.h"
-#include "MuonPrepRawData/sTgcPrepData.h"
 #include "MuonPrepRawData/MuonCluster.h"
 #include "MuonSegment/MuonSegment.h"
-#include "AtlasHepMC/GenParticle.h"
 #include "MuonRecHelperTools/MuonEDMPrinterTool.h"
 #include "MuonRecHelperTools/IMuonEDMHelperSvc.h"
 #include "MuonRecToolInterfaces/IMdtDriftCircleOnTrackCreator.h"
@@ -415,7 +407,7 @@ void DetailedMuonPatternTruthBuilder::addTrack(DetailedMuonPatternTruthCollectio
     const HepMC::GenParticle *current = link.cptr();
     
     do {
-      HepMcParticleLink curlink(current->barcode(), eventIndex);
+      HepMcParticleLink curlink(HepMC::barcode(current), eventIndex);
 
       // remove the current particle from the list of particles to consider (if it is still there)
       seeds.erase(curlink);
@@ -477,7 +469,7 @@ void DetailedMuonPatternTruthBuilder::addTrack(DetailedMuonPatternTruthCollectio
     TruthTrajectory traj;
     traj.reserve(2); // The average size is about 1.05.  Hardcode that instead of using slow list::size().
     for(Sprout::const_iterator ppart=s->second.begin(); ppart!=s->second.end(); ppart++) {
-      traj.push_back(HepMcParticleLink((*ppart)->barcode(), s->first.eventIndex()));
+      traj.push_back(HepMcParticleLink(HepMC::barcode(*ppart), s->first.eventIndex()));
     }
 
     // Count PRDs on the TruthTrajectory
@@ -966,7 +958,7 @@ void DetailedMuonPatternTruthBuilder::addDetailedTrackTruth(std::vector<Detailed
     TruthTrajectory traj;
     traj.reserve(2); // The average size is about 1.05.  Hardcode that instead of using slow list::size().
     for(Sprout::const_iterator ppart=s->second.begin(); ppart!=s->second.end(); ppart++) {
-      traj.push_back(HepMcParticleLink((*ppart)->barcode(), s->first.eventIndex()));
+      traj.push_back(HepMcParticleLink(HepMC::barcode(*ppart), s->first.eventIndex()));
     }
 
     // Count PRDs on the TruthTrajectory
@@ -1214,7 +1206,7 @@ void DetailedMuonPatternTruthBuilder::addDetailedTrackTruthFromSegment(std::vect
     TruthTrajectory traj;
     traj.reserve(2); // The average size is about 1.05.  Hardcode that instead of using slow list::size().
     for(Sprout::const_iterator ppart=s->second.begin(); ppart!=s->second.end(); ppart++) {
-      traj.push_back(HepMcParticleLink((*ppart)->barcode(), s->first.eventIndex()));
+      traj.push_back(HepMcParticleLink(HepMC::barcode(*ppart), s->first.eventIndex()));
     }
 
     // Count PRDs on the TruthTrajectory
diff --git a/MuonSpectrometer/MuonTruthAlgs/src/DetailedMuonPatternTruthBuilder.h b/MuonSpectrometer/MuonTruthAlgs/src/DetailedMuonPatternTruthBuilder.h
index ed21b33141d71982c59ec873ab6ce087a4fc2bfe..104b9073ccc7eba661092581757e9e3fdf51514a 100755
--- a/MuonSpectrometer/MuonTruthAlgs/src/DetailedMuonPatternTruthBuilder.h
+++ b/MuonSpectrometer/MuonTruthAlgs/src/DetailedMuonPatternTruthBuilder.h
@@ -19,7 +19,6 @@
 #include "MuonPattern/MuonPatternCombinationCollection.h"
 #include "MuonPattern/DetailedMuonPatternTruthCollection.h"
 #include "TrkToolInterfaces/ITruthTrajectoryBuilder.h"
-#include "TrkTruthData/DetailedTrackTruth.h"
 #include "TrkTruthData/DetailedSegmentTruth.h"
 #include "TrkTruthData/SubDetHitStatistics.h"
 #include "TrkTruthData/DetailedTrackTruthCollection.h"
diff --git a/MuonSpectrometer/MuonTruthAlgs/src/MuonDecayTruthTrajectoryBuilder.cxx b/MuonSpectrometer/MuonTruthAlgs/src/MuonDecayTruthTrajectoryBuilder.cxx
index cdbc6aa847c60ddd8b5d54d884d084172d4e4994..ce4a24ad9692a14c605a6d3fe4285350d7874d12 100644
--- a/MuonSpectrometer/MuonTruthAlgs/src/MuonDecayTruthTrajectoryBuilder.cxx
+++ b/MuonSpectrometer/MuonTruthAlgs/src/MuonDecayTruthTrajectoryBuilder.cxx
@@ -42,7 +42,7 @@ namespace Muon {
     result->clear();
     if(input) {
       const HepMC::GenParticle *next(0), *current = input;
-      ATH_MSG_DEBUG( " New TruthTrajectory: input: barcode " << input->barcode() << " PDG id " << input->pdg_id());
+      ATH_MSG_DEBUG( " New TruthTrajectory: input: barcode " << HepMC::barcode(input) << " PDG id " << input->pdg_id());
 
       // Extend trajectory outwards.  The last particle should go at [0]
       // in the TruthTrajectory, so we need to use a tmp storage while
@@ -74,7 +74,7 @@ namespace Muon {
       for( ;pit!=pit_end;++pit ){
 	const HepMC::GenParticle& par = *pit->cptr(); 
 	if(msgLvl(MSG::DEBUG))  {
-	  msg(MSG::DEBUG) << " PDG ID " << par.pdg_id() << " barcode: " << par.barcode() << " pt: " << par.momentum().perp();
+	  msg(MSG::DEBUG) << " PDG ID " << par.pdg_id() << " barcode: " << HepMC::barcode(par) << " pt: " << par.momentum().perp();
 	  if( par.production_vertex() ) msg(MSG::DEBUG) << " vertices prod: r " << par.production_vertex()->position().perp() 
 							<< " z " << par.production_vertex()->position().z();
 	  if( par.end_vertex() ) msg(MSG::DEBUG) << " end: r " << par.end_vertex()->position().perp() << " z " << par.end_vertex()->position().z();
diff --git a/MuonSpectrometer/MuonTruthAlgs/src/MuonTrackTruthTool.cxx b/MuonSpectrometer/MuonTruthAlgs/src/MuonTrackTruthTool.cxx
index b256dcb13bb2ded4b988355698aad67bdda24cfe..c343b108ef4b51159869e2aea340876b05d83bca 100644
--- a/MuonSpectrometer/MuonTruthAlgs/src/MuonTrackTruthTool.cxx
+++ b/MuonSpectrometer/MuonTruthAlgs/src/MuonTrackTruthTool.cxx
@@ -710,9 +710,9 @@ namespace Muon {
     bool foundBC = false;
     for( auto pit: traj){
       if (!pit) continue;
-      if (pit->barcode()==barcodeIn || foundBC){
+      if (HepMC::barcode(pit)==barcodeIn || foundBC){
         foundBC = true;
-        ATH_MSG_DEBUG( "getMother() : pdg = " << pit->pdg_id() << " barcode = " << pit->barcode () );
+        ATH_MSG_DEBUG( "getMother() : pdg = " << pit->pdg_id() << " barcode = " << HepMC::barcode (pit) );
         if( pit->pdg_id() != pdgFinal ) { // the first case a track had a different flavour
           if (pit->pdg_id()==pdgFinal) ATH_MSG_ERROR( "Wrong pdgId association in getMother() " );
           return pit;
@@ -727,7 +727,7 @@ namespace Muon {
     bool foundBC = false;
     for(auto pit: traj){
       if (!pit) continue;
-      if (pit->barcode()==barcodeIn || foundBC){
+      if (HepMC::barcode(pit)==barcodeIn || foundBC){
         foundBC = true;
         if( pit->status() > 1 ) {//first non final state particle
           return pit;
@@ -747,7 +747,7 @@ namespace Muon {
     double ePrev = 0.;
     const HepMC::GenParticle* theFirst=nullptr;
     for(auto pit=traj.begin();pit!=traj.end();++pit){
-      if ((*pit)->barcode()==barcodeIn || foundBC){
+      if (HepMC::barcode(*pit)==barcodeIn || foundBC){
         if (!foundBC){
           foundBC = true;
           theFirst = *pit;
diff --git a/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MMRawDataMonitoring/MMRawDataMonitoring/MMRawDataMonAlg.h b/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MMRawDataMonitoring/MMRawDataMonitoring/MMRawDataMonAlg.h
index d63ec4b50663df5cb625041bed78081925517d40..03eaa12834ec80122c5d1739eaf6c0c21783e9ff 100755
--- a/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MMRawDataMonitoring/MMRawDataMonitoring/MMRawDataMonAlg.h
+++ b/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MMRawDataMonitoring/MMRawDataMonitoring/MMRawDataMonAlg.h
@@ -28,7 +28,6 @@
 #include "AthenaMonitoring/DQAtlasReadyFilterTool.h"
 #include "xAODTrigger/MuonRoIContainer.h"
 #include "xAODMuon/MuonContainer.h"
-#include "MuonPrepRawData/MuonPrepDataContainer.h"
 #include "MuonPrepRawData/MMPrepDataContainer.h"
 #include "MuonPrepRawData/MMPrepData.h"
 #include "StoreGate/ReadHandleKey.h"
diff --git a/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtVsTgcRawDataMonitoring/MdtVsTgcRawDataMonitoring/MdtVsTgcRawDataValAlg.h b/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtVsTgcRawDataMonitoring/MdtVsTgcRawDataMonitoring/MdtVsTgcRawDataValAlg.h
index b8486bf76531d505f63c424a7a7ec82e792f0145..41a4450e7fcb4eb5c83ce1340b09932b2c6cd9b3 100644
--- a/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtVsTgcRawDataMonitoring/MdtVsTgcRawDataMonitoring/MdtVsTgcRawDataValAlg.h
+++ b/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtVsTgcRawDataMonitoring/MdtVsTgcRawDataMonitoring/MdtVsTgcRawDataValAlg.h
@@ -24,7 +24,6 @@
 #include "MuonReadoutGeometry/TgcReadoutElement.h"
 #include "MuonTrigCoinData/TgcCoinData.h"
 #include "MuonTrigCoinData/TgcCoinDataContainer.h"
-#include "MuonReadoutGeometry/MuonDetectorManager.h"
 #include "xAODMuon/MuonSegmentContainer.h"
 #include "StoreGate/ReadHandleKey.h"
 #include "MuonPrepRawData/MdtPrepDataContainer.h"
diff --git a/MuonSpectrometer/MuonValidation/MuonPRDTest/macros/NSWHitsTree.h b/MuonSpectrometer/MuonValidation/MuonPRDTest/macros/NSWHitsTree.h
index 6059b7113b175bd73c9633f2e6aa5dafd21e74a3..148919f1b098923de2a7ab7a99d6d2c92a6eacf2 100644
--- a/MuonSpectrometer/MuonValidation/MuonPRDTest/macros/NSWHitsTree.h
+++ b/MuonSpectrometer/MuonValidation/MuonPRDTest/macros/NSWHitsTree.h
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 //////////////////////////////////////////////////////////
@@ -18,8 +18,6 @@
 
 // Header file for the classes stored in the TTree if any.
 #include <vector>
-#include <vector>
-#include <vector>
 
 // Fixed size dimensions of array or collections stored in the TTree if any.
 
diff --git a/MuonSpectrometer/MuonValidation/MuonPRDTest/macros/NSWMatching_offline/NSWstudies.h b/MuonSpectrometer/MuonValidation/MuonPRDTest/macros/NSWMatching_offline/NSWstudies.h
index cf996ee2678a309bf6f4afd5daa04cf4ffeb1af8..a76d356c14190a77b11d14247bec7bd57b5b90f4 100644
--- a/MuonSpectrometer/MuonValidation/MuonPRDTest/macros/NSWMatching_offline/NSWstudies.h
+++ b/MuonSpectrometer/MuonValidation/MuonPRDTest/macros/NSWMatching_offline/NSWstudies.h
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 //////////////////////////////////////////////////////////
@@ -17,12 +17,7 @@
 #include <TFile.h>
 
 // Header file for the classes stored in the TTree if any.
-#include "vector"
-#include "vector"
-#include "vector"
-#include "vector"
-#include "vector"
-#include "vector"
+#include <vector>
 
 class NSWstudies {
 public :
diff --git a/MuonSpectrometer/MuonValidation/MuonRecValidation/MuonTrackPerformance/MuonTrackPerformance/MuonTrackPerformanceAlg.h b/MuonSpectrometer/MuonValidation/MuonRecValidation/MuonTrackPerformance/MuonTrackPerformance/MuonTrackPerformanceAlg.h
index 29f9ff37fd9ce1d5f6614ad5ec20b36feb987436..8a51e1f90a7cdb4687af9871733bbce7a111b1cf 100644
--- a/MuonSpectrometer/MuonValidation/MuonRecValidation/MuonTrackPerformance/MuonTrackPerformance/MuonTrackPerformanceAlg.h
+++ b/MuonSpectrometer/MuonValidation/MuonRecValidation/MuonTrackPerformance/MuonTrackPerformance/MuonTrackPerformanceAlg.h
@@ -22,7 +22,6 @@
 #include "MuonSimData/MuonSimDataCollection.h"
 #include "MuonSimData/CscSimDataCollection.h"
 #include "TrackRecord/TrackRecordCollection.h"
-#include "TrkTrack/TrackCollection.h"
 #include "StoreGate/ReadHandleKey.h"
 #include "GeneratorObjects/McEventCollection.h"
 #include "MuonRecHelperTools/IMuonEDMHelperSvc.h"
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/CMakeLists.txt b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/CMakeLists.txt
deleted file mode 100644
index c9ddae25d64c4d798a9267e994de1d78df12cd96..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/CMakeLists.txt
+++ /dev/null
@@ -1,23 +0,0 @@
-################################################################################
-# Package: AnalysisExamples
-################################################################################
-
-# Declare the package name:
-atlas_subdir( AnalysisExamples )
-
-# External dependencies:
-find_package( CLHEP )
-find_package( HepPDT )
-find_package( ROOT COMPONENTS Core Tree MathCore Hist RIO pthread MathMore Minuit Minuit2 Matrix Physics HistPainter Rint )
-
-# Component(s) in the package:
-atlas_add_component( AnalysisExamples
-                     src/*.cxx
-                     src/components/*.cxx
-                     INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} ${HEPPDT_INCLUDE_DIRS} ${CLHEP_INCLUDE_DIRS} 
-                     LINK_LIBRARIES ${ROOT_LIBRARIES} ${HEPPDT_LIBRARIES} ${CLHEP_LIBRARIES} AtlasHepMCLib CaloEvent AthenaBaseComps AthenaKernel Navigation SGTools StoreGateLib SGtests NavFourMom xAODEventInfo xAODTracking GaudiKernel TruthHelper GeneratorObjects AthAnalysisToolsLib AnalysisUtilsLib ParticleEvent AnalysisTriggerEvent JetTagEvent JetTagInfo MuonIDEvent McParticleEvent McParticleKernel JetEvent muonEvent Particle ParticleTruth RecoToolInterfaces egammaEvent VxVertex TrkVKalVrtFitterLib TrigDecisionToolLib TrigInDetEvent TrigParticle TrigSteeringEvent CompositeParticleEvent )
-
-# Install files from the package:
-atlas_install_python_modules( python/*.py )
-atlas_install_joboptions( share/*.py )
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/README b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/README
deleted file mode 100644
index d917b4ebe4066b92291121182c17596521744cb7..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/README
+++ /dev/null
@@ -1 +0,0 @@
-this package is an example of using the new CP Tools for analysis 
\ No newline at end of file
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/doc/packagedoc.h b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/doc/packagedoc.h
deleted file mode 100644
index 364a28c8a8aed6b2a5b8eb38480b2e19a6d89111..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/doc/packagedoc.h
+++ /dev/null
@@ -1,216 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-/**
-
-@page AnalysisExamples_page Analysis Examples
-
- - In this package, we have few complete analysis codes to show the user how
-to do analyses on ESD and AOD.
-
- - The user analysis code should be developed in the package PhysicsAnalysis/AnalysisCommon/UserAnalysis
-The user should check out the package UserAnalysis and implement his/he analysis code there. In the
-package UserAnalysis, we have a very simple analysis code: AnalysisSkeleton. But it does not scale.
-The user may follow the structure of code development in this package AnalysisExamples, or not. In any
-case however, the user should make his analysis code modular and scalable.
-
- - DoBJet: BJet Analysis code 
-The user is free to modify this as he/she sees fit  This algorithm fills the container of user pre selected 
-B-jets in store gate and fills an ntuple or histograms. The user must override the following 
-methods, 
-otherwise the base class methods will be called - they do nothing:
-
-         *   virtual StatusCode doPreSelection();
-         *   virtual StatusCode initializeParticle();
-         *   virtual StatusCode finalizeParticle();
-
- - Job options: DoBJet_jobOptions.py. The cuts for b jet  selection are defined here, should be 
-modified for the particular analysis
-
- - DoElectron: Electron Analysis code
-The user is free to modify this as he/she sees fit. This algorithm fills the container of user pre 
-selected electrons in store gate and fills an ntuple or histograms. The user must override the 
-following methods, otherwise the base class methods will be called - they do nothing:
-
-         *   virtual StatusCode doPreSelection();
-         *   virtual StatusCode initializeParticle();
-         *   virtual StatusCode finalizeParticle();
-
- - Job options: DoElectron_jobOptions.py. The cuts for electron selection are defined here, should be 
-modified for the particular analysis
-
- - DoMuon: Muon analysis code
-The use is free to modify this as he/she sees fit. This algorithm fills the container of user pre 
-selected muons in store gate and fills an ntuple or histograms. The user must override the 
-following methods, otherwise the base class methods will be called - they do nothing:
-
-         *   virtual StatusCode doPreSelection();
-         *   virtual StatusCode intializeParticle();
-         *   virtual StatusCode finalizeParticle();
-
- - Job options: DoMuon_jobOptions.py. The cuts for muon seletion are defined here, should be modified 
-for the particular analysis
-
- - DoParticle: Particle Analysis base class,  should never be instanciated!
-
- - DoParticleJet: ParticleJet analysis code
-The user should modify this as he/she sees fit. This algorithm fills the container of user pre selected 
-jets in store gate and fills an ntuple or histograms. The user must override the following 
-methods, otherwise the base class methods will be called - they do nothing:
-
-         *   virtual StatusCode doPreSelection();
-         *   virtual StatusCode initializeParticle();
-         *   virtual StatusCode finalizeParticle();
-
- - Job options: DoParticleJet_jobOptions.py. The cuts for q, g jet selectrion are defined here, should 
-be modified for the particular analysis
-
- - DoPhoton: Photon Analysis code
-The user is free to modify this as he/she sees fit. This algorithm fills the container of user pre 
-selected photons in store gate and fills an ntuple or histograms. The user must override the 
-following methods, otherwise the base class methods will be called - they do nothing:
-
-         *   virtual StatusCode doPreSelection();
-         *   virtual StatusCode initializeParticle();
-         *   virtual StatusCode finalizeParticle();
-
- - Job options: DoPhoton_jobOptions.py. The cuts for photon selection are defined here, should be 
-modified for the particular analysis
-
- - DoTauJet:  TauJet analysis code
-The user should modify this as he/she sees fit. This algorithm fills the container of user pre selected 
-tau-jets in store gate and fills an ntuple or histograms. The user must override the following 
-methods, 
-otherwise the base class methods will be called - they do nothing:
-
-         *   virtual StatusCode doPreSelection();
-         *   virtual StatusCode initializeParticle();
-         *   virtual StatusCode finalizeParticle();
-
- - Job options: DoTauJet_jobOptions.py. The cuts for tau jet selection are defined here, should be 
-modified for the particular analysis
-
- - Zll analysis Example (l = electron, muon, tau)
-An analysis algorithm to demonstrate Z to leptons analysis. It simply reads the containers of the pre 
-selected particles from StoreGate and does Z to lepton reconstruction. The pre selected particles 
-are recorded in the transient event store (StoreGate) by the DoParticle algorithms described above. It 
-therefore runs the DoParticle algorithms described above so that the pre selected (or selected) 
-particles are available the event store beforehand.
-
- - Job Options: ZeeZmmOnAODExample_jobOptions.py, should be modified for cuts, etc. One ROOT output 
-files are produced for histograms.
-
- - Job Options: VFitZmmOnAODExample_jobOptions.py, should be modified for cuts, etc. One ROOT output 
-files are produced for histograms and ntuple.
-
- - Job Options: ZeeOnESDExample_jobOptions.py, should be modified for cuts, etc. For analysis on ESD.
-One ROOT output files are produced for histograms.
-
- - Job Options: ZtautauExample_jobOptions.py, should be modified for cuts, etc. For Z -> tautau 
-reconstruction on AOD. One ROOT output files are produced for histograms.
-
- - H to 4 lepton analysis Example (l = electron, muon)
-An analysis algorithm to demonstrate Higgs to leptons analysis. It simply reads the containers of the 
-pre selected particles from StoreGate and does H to 4-lepton reconstruction. The pre selected particles 
-are recorded in the transient event store (StoreGate) by the DoParticle algorithms described above. It 
-therefore runs the DoParticle algorithms described above so that the pre selected (or selected) 
-particles are available the event store beforehand.
-
- - Job options: H4lExample_jobOptions.py, should be modified for cuts, etc. Two ROOT output 
-files are produced: one for ntuples and the other for histograms.
-
- - gg to ttbar to jjb lnu b analysis example
-An analyis algorithm to demoonstrate the reconstruction of ttbar to jjb lnu b. It reads the container of
-electrons, muons, jets and b-jets, and also the missing Et object from StoreGate. It uses the SpecialUtils
-for the solution to the neutrino pz problem, and also its uses analysis combinations and permutations. As
-described above, the DoParticle algorithms run first, recording the pre-selected particles in StoreGate.
-This ttbar algorithm does not have an ntuple, only histograms.
-
- - Job options: ttbarExample_jobOptions.py, should be modified for cuts, etc. One ROOT output file is 
-produced for the histograms.
-
- - Job options: PIDinAOD_jobOptions.py, for single particle idnetification studies. It runs only the 
-DoParticle algorithms. Note that the DoParticle_jobOptions.py cannot run independently by themselves,
-they must be included as fragments in other job options such as PIDinAOD_jobOptions.py. Two 
-ROOT output files are produced: one for histograms the other for ntuples.  
-
- - Job options: AnalysisCommon_jobOptions.py, for common options to all algorithms such as common DLL. 
-Cannnot run independently, must be included as a fragment in other job options.
-
- - Job options: produceGenEvents_jobOptions.py, an example job options for event generation. The 
-generated events can be passed to atlfast for fast simulation or to full simulation. Consult the 
-Monte Carlo Generator Group for details.
-
- - Analysis flags: these are flags which control which DoParticle_jobOptions the user wants to run. For 
-examples, in the H to 4 leptons analysis algorithms, only the electron and the muon selection algorithm 
-are ON (all the other particle selection algorithms are switched OFF). All the flags are ON by default. 
-Look at the job options ZllExamples_jobOptions.py or H4lExample_jobOptions.py to see how to switch OFF 
-particular algorithms.
-
- - Examples of job transformations for large scale production of ESD and AOD: look in the 
-sub-directory "scripts".
-
- - For details on how to produce ESD, AOD, how to set your input ESD or AOD in the analysis job 
-options, see the following "Customizing ESD and AOD production" on this web page:
-@htmlinclude https://uimon.cern.ch/twiki/bin/view/Atlas/PhysicsAnalysisTools
-
- - A python script to merge many AOD files into a single file: look in the directory mergeAOD. There 
-you will find 2 files: globalMerge.py which allows you merge many AOD files into one. The reason you
-may want to do this is that, for large statistics, running your analysis code over many AOD POOL 
-files may be slow because of I/O operations. So you may be better off merging the AOD files, so your 
-analysis code runs on a fewer number of AOD files (same statistics) and a much less processing time. 
-The script globalMerge.py should be executed in python. It produces a shell script, merge.sh, and a 
-number of job options fragments for the files to merge. You can then execute merge.sh, interactively 
-or in batch. The script merge.sh run athena jobs which use the top job options Merge.py to do the 
-merging. The output file names of the merged AOD POOL data files are defined in merge.sh when it is 
-produced by globalMerge.py. 
-
- - globalMergeCLArgs.py. Purpose: set variables at commandline 
-    Usage: python globalMergeCLArgs.py <dir> <fileprefix> <datasetid> 
-    Example: python globalMerge2.py rfio:/castor/...  rome.004100.recov10.T1_McAtNLO_top 41 
-
- - globalMergeConfigFile.py. Purpose: set variables in config file mergeconfig.py 
-    Usage: python globalMergeConfigFile.py 
-    Example of Config file: 
-      prefix="rome.004100.recov10.T1_McAtNLO_top" 
-      dir="rfio:/castor/cern.ch/grid/atlas/datafiles/rome/recov10" 
-      dsid=4100 
-      catalog="file:PoolFileCatalog.xml" 
-      nfiles=20 
-
- - MiscellaneousExamples: an example algorithm to show how to access TrackParticle (Trk::Track) 
-information from the Electron, the Muon for example, how to do back navigation from the AOD to the ESD 
-(from ParticleJet to calorimeter cells for examples), how to do symLink, how to use the CompositeParticle 
-class and how to do combinations with a selection.
-
- - AthenaAwareNTuple: an example of how to create an ATHENA aware NTuple. What is meant by ATHENA 
-aware is that it can be used as input to other ATHENA, for example to make pre-selection.
-
- - JetTagAna: To simply access the b-tagging weight for each jet, have a look at the section 
-SECTION::WEIGHTS in the JetTagAna.cxx file. To access the Monte-Carlo label (b,c or light) of a jet, 
-have a look at the section SECTION::LABEL. To access detailed information about each tag, see  
-SECTION::DETAILS in the JetTagAna.cxx
-
- - TrackExample: an example algorithm to show how to access TrackParticle collections, useful for analyses
-that requires access to TrackParticle data such as track summary, perigee parameters, vertices, etc.
-
- - FilterExample: an example algorithm to show how to filter various collections of AOD classes using the so-called Filter Tools of AnalysisUtils package.
-It makes use of a collection of IParticleFilter classes for each of the collection one wants to filter.
-The Algorithm first retrieves the containers from StoreGate and copy the elements in a new container only if it satisfies the criteria of the IParticleFilter.
-Eventually it stores these filtered containers.
-Notice the use of the 
-@verbatim AnalysisUtils::copy_if( inCont->begin(), inCont->end(), std::back_inserter(*outCont), filter );@endverbatim
-method which does the copy and the filtering in one go.
-
- - ReadTruthParticles: an example algorithm which exercises the TruthParticle interface. It shows how to navigate back to the HepMC::GenParticles and retrieve the children (in a HepMC format) as well as how to retrieve the (TruthParticle) children of a TruthParticle.
- The Algorithm first retrieves a container of TruthParticle, loop over its elements and for each of them asks for its HepMC::GenParticle creator (and use the GenParticle methods).
- Then, for each TruthParticle, it loops over its children in a TruthParticle format.
-
- - An example of LCG Minuit Minimization: see LCGMinuitMinimization.cxx 
-
- - For questions or comments: Ketevi A. Assamagan
-   ketevi@bnl.gov
-
-
-*/
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/Merge.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/Merge.py
deleted file mode 100644
index f621802bfc40649c8c8a8c19671e6fa69acecfbd..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/Merge.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-# This is the job options that does the merging
-
-###############################################################
-#
-# Job options file
-#
-#==============================================================
-#--------------------------------------------------------------
-# Load POOL support
-#--------------------------------------------------------------
-include( "AthenaPoolCnvSvc/ReadAthenaPool_jobOptions.py" )
-
-#--------------------------------------------------------------
-# Event related parameters
-#--------------------------------------------------------------
-theApp.EvtMax = 1000
-
-#--------------------------------------------------------------
-# Private Application Configuration options
-#--------------------------------------------------------------
-# Load "user algorithm"
-#top algorithms to be run, and the libraries that house them
-
-DetDescrVersion="ATLAS-CSC-01-02-00"
-include ("RecExCommon/AllDet_detDescr.py")
-
-# Pool Converters for AOD
-include( "ParticleBuilderOptions/ESD_PoolCnv_jobOptions.py" )
-include( "ParticleBuilderOptions/AOD_PoolCnv_jobOptions.py" )
-include( "ParticleBuilderOptions/McAOD_PoolCnv_jobOptions.py")
-include( "EventAthenaPool/EventAthenaPool_joboptions.py" )
-
-doTrigger=True
-doWriteAOD = True
-doTruth = True
-
-#
-#--------------------------------------------------------------
-# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-#--------------------------------------------------------------
-MessageSvc = Service( "MessageSvc" )
-MessageSvc.OutputLevel = 3
-EventSelector = Service( "EventSelector" )
-include(mergeFile)
-#==============================================================
-#
-# End of job options file
-#
-###############################################################
-#--------------------------------------------------------------
-#---   Secondary Write portion  ----- Don't change it !!!
-#--------------------------------------------------------------
-theApp.OutStream     =["Stream1"]
-theApp.OutStreamType ="AthenaOutputStream"
-Stream1 = Algorithm( "Stream1" )
-Stream1.Output = OUTPUT
-Stream1.EvtConversionSvc     ="AthenaPoolCnvSvc"
-PoolSvc = Service( "PoolSvc" )
-
-# list of output objects 
-include( "ParticleBuilderOptions/AOD_OutputList_jobOptions.py")
-Stream1.ItemList = AOD_ItemList
-
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/globalMerge.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/globalMerge.py
deleted file mode 100644
index 7cfe5ba175d706c20d2040e2a460376d94271074..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/globalMerge.py
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/local/pkg/Python-2.2.2/bin/python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-# by Davide Costanzo with a few modification from Ketevi A. Assamagan
-
-# You have to execute this job in python:
-#lxplus> python
-#python>>> import globalMerge
-#python>>> Ctrl D
-#lxplus>
-
-# as output:
-# merge.sh - the script to do the merging
-# a number of .py which contains the lists of AOD files to merge into one
-
-import string
-import os
-import sys
-
-# sub-string to select the AOD to be merged from your PoolFileCatalog.xml
-myAODfiles = "BsDsPi.AOD_Trig900GeV.slim.100evts"
-
-#number of AOD files to be merged into a single file
-NFileToMerge = 20
-
-# get the AODs to be merged:
-AodList = []
-import xml.dom.minidom
-root  = xml.dom.minidom.parse("PoolFileCatalog.xml")
-files = root.getElementsByTagName('File')
-for file in files:
-   # get PFN
-   physical = file.getElementsByTagName('physical')[0]
-   pfnNode  = physical.getElementsByTagName('pfn')[0]
-   # convert UTF8 to Raw
-   pfn = str(pfnNode.getAttribute('name'))
-   # check basename
-   if pfn.split('/')[-1].startswith(myAODfiles):
-      # append
-      AodList.append(pfn)
-
-NMergedFiles=0
-fileNumber=0
-AodList.sort()
-
-# Ketevi - find out the total number of AOD files to merge 
-size = 0
-for name in AodList :
-   size = size+1
-
-# The script merge.sh is produced as output
-# you can then submit that script to LSF batch or run interactively to merge the AOD files
-script=open('merge.sh','w')
-
-i = 0
-for filename in AodList :
-    AodFile = string.strip(filename)
-    i = i+1
-    if NMergedFiles == NFileToMerge:
-        NMergedFiles = 0    
-        fileNumber = fileNumber+1
-        f.close()
-    if NMergedFiles == 0 :
-        fileToOpen=myAODfiles+'Merge_'+str(fileNumber)+'.py'
-        f=open(fileToOpen,'w')
-        outfile = myAODfiles+'_Merge'+str(fileNumber)
-        script.write("athena.py Merge.py -c 'mergeFile="+'"'+fileToOpen+'"'+'; OUTPUT="'+outfile+'.root"'+"'\n")
-        print "open file ", fileToOpen
-        f.write("EventSelector.InputCollections = [ \n")
-    f.write("'"+AodFile+"'")
-    if NMergedFiles < NFileToMerge-1 and i < size : 
-       f.write(", \n")
-    else :
-       f.write(']')
-
-    NMergedFiles = NMergedFiles+1
-
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/globalMergeCLArgs.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/globalMergeCLArgs.py
deleted file mode 100644
index f8d5fcdc6b81e6101b3e15e5e4efc99e8774d820..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/globalMergeCLArgs.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/local/pkg/Python-2.2.2/bin/python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-# Original by Davide Costanzo with a few modification from Ketevi A. Assamagan
-# Modified by Jack Cranshaw for rome tags 26.04.05
-
-# You have to execute this job in python:
-#lxplus> python
-#python>>> import globalMergeWithTags
-#python>>> Ctrl D
-#lxplus>
-
-# as output:
-# mergeTags.sh - the script to do the merging
-# a number of .py which contains the lists of AOD files to merge into one
-
-import string
-import os
-import sys
-
-# translate command line arguments
-if len(sys.argv)<4: 
-  print "**>> Need at least 3 arguments: dir, prefix, dsid"
-  sys.exit(1)
-myAODfiles  = sys.argv[1]+sys.argv[2]
-pithyprefix = sys.argv[2]
-dataset     = sys.argv[3]
-
-#number of AOD files to be merged into a single file
-NFileToMerge = 20 
-
-# get the AODs to be merged:
-#
-# if FClistPFN is not found then you should source the TestRelease setup
-get = os.popen('FClistPFN | grep '+myAODfiles)
-AodList = get.readlines()
-NMergedFiles=0
-fileNumber=0
-AodList.sort()
-
-# Ketevi - find out the total number of AOD files to merge 
-size = 0
-for name in AodList :
-   size = size+1
-
-print "Found ", size, "files to merge"
-
-if (size>0):
-    # The script mergeTags.sh is produced as output
-    # you can then submit that script to LSF batch or run interactively to merge the AOD files
-    script=open('mergeTags.sh','w')
-
-    i = 0
-    for filename in AodList :
-        AodFile = string.strip(filename)
-        i = i+1
-        if NMergedFiles == NFileToMerge:
-            NMergedFiles = 0    
-            fileNumber = fileNumber+1
-            f.close()
-        if NMergedFiles == 0 :
-            fileToOpen=pithyprefix+'Merge_'+str(fileNumber)+'.py'
-            f=open(fileToOpen,'w')
-            outfile = myAODfiles+"_Merge"+str(fileNumber)+".AOD"
-            script.write('athena.py MergeWithTags.py -c "DSID='+str(dataset)+'; dbseed='+str(dataset+fileNumber)+'; mergeFile='+"'"+fileToOpen+"'"+"; OUTPUT='"+outfile+"'"+'"'+"\n")
-            print "open file ", fileToOpen
-            f.write("EventSelector.InputCollections = [ \n")
-        f.write("'"+AodFile+"'")
-        if NMergedFiles < NFileToMerge-1 and i < size : 
-           f.write(", \n")
-        else :
-           f.write(']')
-
-        NMergedFiles = NMergedFiles+1
-else:
-    print " ** No files for search string \n",myAODfiles, "\nfound in catalog"
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/globalMergeConfigFile.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/globalMergeConfigFile.py
deleted file mode 100644
index 49c5590ab1a5cc7aa54e9baf0538e9a1a7692444..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/globalMergeConfigFile.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/local/pkg/Python-2.2.2/bin/python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-# Original by Davide Costanzo with a few modification from Ketevi A. Assamagan
-# Modified by Jack Cranshaw for rome tags 26.04.05
-
-# You have to execute this job in python:
-#lxplus> python
-#python>>> import globalMergeWithTags
-#python>>> Ctrl D
-#lxplus>
-
-# as output:
-# mergeTags.sh - the script to do the merging
-# a number of .py which contains the lists of AOD files to merge into one
-
-import string
-import os
-import sys
-
-import mergeconfig
-
-# get parameters
-try:
-  myAODfiles  = mergeconfig.dir+'/'+mergeconfig.prefix+'/'+mergeconfig.prefix
-  pithyprefix = mergeconfig.prefix
-  postfix = mergeconfig.postfix
-except:
-  print "MUST define dir and prefix and postfix"
-  sys.exit(1)
-
-try:
-  dataset     = mergeconfig.dsid
-except:
-  dataset     = 0
-
-#number of AOD files to be merged into a single file
-try:
-   NFileToMerge = mergeconfig.nfiles
-except:
-   NFileToMerge = 20
-
-# is it a different catalog
-try:
-  source = mergeconfig.catalog
-except:
-  source = "file:PoolFileCatalog.xml"
-
-try:
-  seed = mergeconfig.dbseed
-  print "seed in conf file"
-except:
-  seed = dataset
-  print "NO SEED in conf file"
-
-print "**"
-print "myAODfiles = ",myAODfiles
-print "pithyprefix = ",pithyprefix
-print "dataset = ",dataset
-print " N = ",NFileToMerge
-print "source = ",source
-
-# get the AODs to be merged:
-#
-# if FClistPFN is not found then you should source the TestRelease setup
-get = os.popen('FClistPFN -u '+source+' | grep '+myAODfiles)
-AodList = get.readlines()
-NMergedFiles=0
-fileNumber=0
-AodList.sort()
-
-# Ketevi - find out the total number of AOD files to merge 
-size = 0
-for name in AodList :
-   size = size+1
-
-print "Found ", size, "files to merge"
-
-if (size>0):
-    # The script mergeTags.sh is produced as output
-    # you can then submit that script to LSF batch or run interactively to merge the AOD files
-    script=open('mergeTags.sh','w')
-
-    i = 0
-    for filename in AodList :
-        AodFile = string.strip(filename)
-        i = i+1
-        if NMergedFiles == NFileToMerge:
-            NMergedFiles = 0    
-            fileNumber = fileNumber+1
-            seed = seed+1
-            f.close()
-        if NMergedFiles == 0 :
-            fileToOpen=postfix+'._'+str(fileNumber)+'.py'
-            f=open(fileToOpen,'w')
-            outfile=mergeconfig.dir+'/'+mergeconfig.prefix+'/'+postfix+"._"+str(fileNumber)+".AOD"
-            script.write('athena.py MergeWithTags.py -c "DSID='+str(dataset)+'; dbseed='+str(seed)+'; mergeFile='+"'"+fileToOpen+"'"+"; OUTPUT='"+outfile+"'"+'"'+"\n")
-            print "open file ", fileToOpen
-            f.write("EventSelector.InputCollections = [ \n")
-        f.write("'"+AodFile+"'")
-        if NMergedFiles < NFileToMerge-1 and i < size : 
-           f.write(", \n")
-        else :
-           f.write(']')
-
-        NMergedFiles = NMergedFiles+1
-else:
-    print " ** No files for search string \n",myAODfiles, "\nfound in catalog"
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/mergeconfig.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/mergeconfig.py
deleted file mode 100644
index 24718afcd0d51e66a1e7b3469c0fd7fab9d27343..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/mergingAOD/mergeconfig.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-dsname      = "T1_McAtNLO_top"
-dsid        = 4100
-inputstage  = "recov10"
-outputstage = "merge"
-process     = "rome.00"
-dir         = "rfio:/castor/cern.ch/grid/atlas/datafiles/rome/recov10"
-catalog     = "file:PoolFileCatalog.xml"
-nfiles      = 20
-dbseed      = 3124
-prefix      = process + str(dsid) + '.' + inputstage  + '.' + dsname
-postfix     = process + str(dsid) + '.' + outputstage + '.' + dsname
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/python/AnalysisFlags.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/python/AnalysisFlags.py
deleted file mode 100644
index 7a3ee2aa954362a5a2dccdf3f40ab3a3410fd453..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/python/AnalysisFlags.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-class _AnalysisFlags:
-
-    # Particle Analysis Flags Default is ON=True
-    _partAna = [
-        'DoPhoton',
-        'DoElectron',
-        'DoMuon',
-        'DoTauJet',
-        'DoParticleJet',
-        'DoBJet',
-        'DoNavigation',
-        'DoInteractive'        
-        ]
-    
-    def __init__ (self):
-        # Particle Analysis Attributes
-        for attr in self._partAna:
-            setattr(self, attr, True)
-        self.DoNavigation = False
-        self.DoInteractive = False
-    def Print (self):
-        format = "%17s : %s"
-        for attr in self._partAna:
-            if getattr(self, attr):
-                print(format % (attr, "ON"))
-            else:
-                print(format % (attr, "--"))
-
-AnalysisFlags = _AnalysisFlags()
-
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/python/Lib.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/python/Lib.py
deleted file mode 100644
index b2428b4dbf006a1a0aab5b9cd90014ee57ddf060..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/python/Lib.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-from AthenaPython import PyAthena
-StatusCode = PyAthena.StatusCode
-
-class PyReadD3pdNtuple( PyAthena.Alg ):
-
-    def __init__(self, name='PyReadD3pdNtuple', **kw):
-        kw['name'] = name
-        super(PyReadD3pdNtuple, self).__init__(**kw)
-
-        self.treename = kw.get('TreeName',
-                               '/temp/TTreeStream/egamma')
-        return
-
-    def initialize(self):
-
-        hsvc = PyAthena.py_svc('THistSvc', iface='ITHistSvc')
-        if not hsvc:
-            self.msg.error("Could not retrieve THistSvc !")
-            return StatusCode.Failure
-
-        self.tree = hsvc.get(self.treename, klass='TTree')
-        if not self.tree:
-            self.msg.error('could not retrieve tree from THistSvc')
-            return StatusCode.Failure
-        return StatusCode.Success
-
-    def execute(self):
-        info = self.msg.info
-        info('running execute...')
-        t = self.tree
-        info('run-nbr: %s', t.RunNumber)
-        info('evt-nbr: %s', t.EventNumber)
-        info('el-nbr:  %s', t.el_n)
-        if t.el_n >0:
-            info('el.eta[0]: %s', t.el_eta[0])
-            nmax = len(t.el_jetcone_dr[0])
-            info('el_jetcone_dr[0].size: %s', nmax)
-            for i in range(nmax):
-                info(' jet-cone-dr[0][%i]: %s', i, t.el_jetcone_dr[0][i])
-                
-        return StatusCode.Success
-
-    def finalize(self):
-        return StatusCode.Success
-
-    pass # PyReadD3pdNtuple
-
-class PyReadD3pdEvtStore( PyAthena.Alg ):
-
-    def __init__(self, name='PyReadD3pdEvtStore', **kw):
-        kw['name'] = name
-        super(PyReadD3pdEvtStore, self).__init__(**kw)
-
-        return
-
-    def initialize(self):
-
-        self.evtstore = PyAthena.py_svc('StoreGateSvc')
-        if not self.evtstore:
-            self.msg.error('could not retrieve EvtStore')
-            return StatusCode.Failure
-        return StatusCode.Success
-
-    def execute(self):
-        info = self.msg.info
-        info('running execute...')
-        sg = self.evtstore
-        info('run-nbr: %s', sg["RunNumber"])
-        info('evt-nbr: %s', sg["EventNumber"])
-        info('el-nbr:  %s', sg["el_n"])
-        if sg["el_n"] >0:
-            el_eta = sg["el_eta"]
-            el_jetcone_dr = sg["el_jetcone_dr"]
-            info('el.eta[0]: %s', el_eta[0])
-            nmax = len(el_jetcone_dr[0])
-            info('el_jetcone_dr[0].size: %s', nmax)
-            for i in range(nmax):
-                info(' jet-cone-dr[0][%i]: %s', i, el_jetcone_dr[0][i])
-                
-        return StatusCode.Success
-
-    def finalize(self):
-        return StatusCode.Success
-
-    pass # PyReadD3pdEvtStore
-
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/python/__init__.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/python/__init__.py
deleted file mode 100644
index 74583d364ec2ca794156596c7254d9b234a940c6..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/python/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/AnalysisCommon_jobOptions.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/AnalysisCommon_jobOptions.py
deleted file mode 100644
index 2d3aa4cdddf8d3059420ce2980070c16342020ab..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/AnalysisCommon_jobOptions.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# import the data types 
-import EventKernel.ParticleDataType
-
-# get a handle on the ServiceManager which holds all the services
-from AthenaCommon.AppMgr import ServiceMgr
-
-# load additional libraries for back navigation
-from AnalysisExamples.AnalysisFlags import AnalysisFlags
-
-from AthenaCommon.AlgSequence import AlgSequence 
-topSequence = AlgSequence()
-
-import AthenaPoolCnvSvc.ReadAthenaPool
-
-if AnalysisFlags.DoNavigation:
-   include( "RecExCommon/AllDet_detDescr.py" ) 
-
-# the Dlls
-# Don't need them
-#
-#include ( "ParticleBuilderOptions/ESD_PoolCnv_jobOptions.py" )
-#include ( "ParticleBuilderOptions/AOD_PoolCnv_jobOptions.py" )
-#include ( "ParticleBuilderOptions/McAOD_PoolCnv_jobOptions.py" )
-#include ( "EventAthenaPool/EventAthenaPool_joboptions.py" )
-
-# write out a summary of the time spent
-#theAuditorSvc = AuditorSvc()
-#theAuditorSvc.Auditors  += [ "NameAuditor"]
-#theAuditorSvc.Auditors  += [ "ChronoAuditor", "MemStatAuditor"]
-
-# Athena-Aware NTuple making Tools
-
-include ("CBNT_Athena/CBNT_AthenaAware_jobOptions.py")
-include ("CBNT_Athena/CBNT_EventInfo_jobOptions.py")
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/AnalysisMaster.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/AnalysisMaster.py
deleted file mode 100644
index a32558447ff993eb06274253a2fbb9d093e13487..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/AnalysisMaster.py
+++ /dev/null
@@ -1,55 +0,0 @@
-############################################################
-# AnalysisMaster.py
-# These are the master job options for running your analysis
-# Include your own job options as shown below ensuring you
-############################################################
-
-# We use "jp" to pass the input file to the userAlgs jO file defined below
-# if you want to run over multiple files locally, use glob as shown below
-
-from AthenaCommon.AthenaCommonFlags import jobproperties as jp
-jp.AthenaCommonFlags.FilesInput = ['/afs/cern.ch/atlas/maxidisk/d49/AOD.191045._001937.pool.root.1']
-
-# to run over multiple input files
-##################
-#DATAPATH = '/afs/cern.ch/atlas/maxidisk/d49/'
-#from glob import glob
-#INPUT = glob(DATAPATH + 'AOD*.root*')
-#print INPUT
-#jp.AthenaCommonFlags.FilesInput = INPUT
-
-##################
-#
-from RecExConfig.RecFlags import rec
-
-jp.AthenaCommonFlags.EvtMax=-1 # number of event to process
-
-# include your algorithm job options here
-
-#rec.UserAlgs=[ "ZeeZmmOnAODExample_jobOptions_NEW.py" ] 
-#rec.UserAlgs=[ "ZtautauExample_jobOptions_NEW.py" ] 
-#rec.UserAlgs=[ "ttbarExample_jobOptions_NEW.py" ] 
-rec.UserAlgs=[ "VFitZmmOnAOD_jobOptions_NEW.py" ]
-
-# Output log setting; this is for the framework in general
-# You may over-ride this in your job options for your algorithm
-rec.OutputLevel = INFO
-
-# Control the writing of your own n-tuple in the alg's job options
-# The following line only turns off the standard CBNT made by RecExCommon.
-# and has no bearing on the ntuple that you make in your user job Options file.
-#
-rec.doCBNT = False
-
-# for analysis you don't need to write out anything
-rec.doWriteESD.set_Value_and_Lock(False)
-rec.doWriteAOD.set_Value_and_Lock(False)
-rec.doWriteTAG.set_Value_and_Lock(False)
-
-# to turn off perfmon - avoids writing out big ntuples
-
-rec.doPerfMon=False
-
-# main jobOption - must always be included
-include ("RecExCommon/RecExCommon_topOptions.py")
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/FilterBeforeAlgorithmExample_topOptions.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/FilterBeforeAlgorithmExample_topOptions.py
deleted file mode 100644
index 706dc245b7002286abab107da6d229d8e3463529..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/FilterBeforeAlgorithmExample_topOptions.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Author: Ketevi A. Assamagan
-# CERN, February 13, 2005
-
-#get a handle on the ServiceManager which holds all the services
-from AthenaCommon.AppMgr import ServiceMgr
-
-# Detector Description if needed
-DetDescrVersion="ATLAS-CSC-01-02-00" # default - uncomment to set the geometry
-include ("RecExCommon/RecExCommon_flags.py")
-#DetFlags.ID_setOff()
-#DetFlags.Calo_setOff()
-#DetFlags.Muon_setOff()
-include( "RecExCommon/AllDet_detDescr.py" )
-
-# The ESD or AOD input data
-# note that here, you can use the ESD or the AOD as input
-# since the collections of TrackParticles are both in the ESD and in the AOD
-import AthenaPoolCnvSvc.ReadAthenaPool
-ServiceMgr.EventSelector.InputCollections = [ "AOD.pool.root" ] 
-
-include( "TrackIsolationTools/TrackIsolationTool_jobOptions.py" )
-
-# use sequencer for filtering
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-from AthenaCommon.AlgSequence import AthSequencer
-Sequencer = AthSequencer( "MySequencer" )
-Sequencer.StopOverride = False
-topSequence += Sequencer
-
-##### Define the filter algorithm
-from AnalysisExamples.AnalysisExamplesConf import FilterExample
-FilterExample = FilterExample(
-                 Jets      = "Cone7H1TopoParticleJets",
-                 Electrons = "ElectronAODCollection",
-                 Muons     = "StacoMuonCollection",
-                 Photons   = "PhotonAODCollection",
-                 TauJets   = "TauRecContainer",
-                 MissingET = "MET_Final",
-                 OutputLevel = DEBUG
-                )
-print FilterExample
-Sequencer += FilterExample
-######## End Filter algorithm
-
-######## Now define the algorithms to run after the filter passed
-from AnalysisExamples.AnalysisExamplesConf import TrackExample
-TrackExample = TrackExample()
-TrackExample.TrackIsolationTool = TrackIsolationTool
-TrackExample.TrackParticleContainerName = "TrackParticleCandidate"
-TrackExample.TrackIsolationCone = 0.4
-TrackExample.EventWeight = 1.0
-TrackExample.TrackMomentumCut = 0.0*GeV
-TrackExample.OutputLevel = INFO
-print TrackExample
-Sequencer += TrackExample
-
-############# End of the algorithms to run
-
-# Output level and Number of Events to process
-ServiceMgr.MessageSvc.OutputLevel = INFO 
-theApp.EvtMax = 500
-
-#########################################
-#Setup TTree registration Service
-# save ROOT histograms and NTuple
-from GaudiSvc.GaudiSvcConf import THistSvc
-ServiceMgr += THistSvc()
-#Root Ntuple output file and name
-ServiceMgr.THistSvc.Output = ["AANT DATAFILE='TrackExample.hist.root' OPT='RECREATE'"] 
-
-# For interactive analysis in athena
-#import PyAnalysisCore.InitPyAnalysisCore
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_RTT.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_RTT.py
deleted file mode 100644
index 2cebfa03a2e597c2a54788242a3999a892f9bea5..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_RTT.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# Python job to run b-tagging+JetTagAna in RTT framework
-# Laurent Vacavant 2005/10/12
-
-from AthenaCommon.AthenaCommonFlags  import athenaCommonFlags
-from AthenaCommon.AppMgr import theApp
-from AthenaCommon.AppMgr import ServiceMgr
-from AthenaCommon.GlobalFlags  import globalflags
-
-DetDescrVersion = "ATLAS-GEO-02-01-00"
-include( "RecExCommon/AllDet_detDescr.py")
-
-import AthenaPoolCnvSvc.ReadAthenaPool
-ServiceMgr.EventSelector.InputCollections = [ "AOD.pool.root" ]
-theApp.EvtMax = -1
-
-include( "PartPropSvc/PartPropSvc.py" )
-include( "AthenaPoolCnvSvc/ReadAthenaPool_jobOptions.py" )
-include( "EventAthenaPool/EventAthenaPool_joboptions.py" )
-theApp.Dlls += ["CaloTools" ]
-theApp.Dlls += ["TileRecAlgs"]
-theApp.Dlls += ["LArClusterRec"]
-theApp.Dlls += ["CaloRec"]
-theApp.Dlls += ["TileRec"]
-include( "ParticleBuilderOptions/ESD_PoolCnv_jobOptions.py")
-include( "ParticleBuilderOptions/AOD_PoolCnv_jobOptions.py")
-# non-existent file: include( "InDetEventCnvTools/InDetEventCnvTools_jobOptions.py" )
-include( "ParticleBuilderOptions/McAOD_PoolCnv_jobOptions.py")
-
-# ------ BTagging configuration:
-redoBtag = True
-if not 'BTaggingFlags' in dir():
-  from BTagging.BTaggingFlags import BTaggingFlags
-if redoBtag:
-  BTaggingFlags.Runmodus      = "analysis" 
-  BTaggingFlags.PoolInputType = "AOD"
-  from ParticleBuilderOptions.AODFlags import AODFlags
-  AODFlags.TruthParticleJet = False
-  BTaggingFlags.OutputLevel = INFO
-  BTaggingFlags.Jets = ['Cone4H1Tower']
-  BTaggingFlags.JetsWithInfoPlus = ['Cone4H1Tower']
-  include( "BTagging/BTagging_jobOptions.py" )
-  from JetTagTools.JetTagToolsConf import Analysis__JetFitterTag
-  JetFitterTagTool = Analysis__JetFitterTag(name = "JetFitterTag")
-  JetFitterTagTool.jetCollectionList = ['Cone4H1Tower'] 
-
-# ------ JetTagAna part:
-include("AnalysisExamples/JetTagAna_jobOptions.py")
-if redoBtag:
-  MyJetTagAna.JetContainer = 'Cone4H1TowerJetsAOD'
-else:
-  MyJetTagAna.JetContainer = 'Cone4H1TowerJets'
-MyJetTagAna.PrimaryVertexContainer = BTaggingFlags.PrimaryVertexCollectionName
-MyJetTagAna.TrackParticleContainer = BTaggingFlags.TrackParticleCollectionName
-print MyJetTagAna
-
-# ------ MsgSvc:
-ServiceMgr.MessageSvc.OutputLevel = ERROR
-ServiceMgr.MessageSvc.defaultLimit = 9999999  # all messages
-ServiceMgr.MessageSvc.Format = "% F%50W%S%7W%R%T %0W%M"
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_RTT_postProcessing.C b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_RTT_postProcessing.C
deleted file mode 100644
index 555fc541547f7f6214c148b3ac51a1a52b0b04bf..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_RTT_postProcessing.C
+++ /dev/null
@@ -1,631 +0,0 @@
-
-#include "TFile.h"
-#include "TH1F.h"
-#include "TCanvas.h"
-#include "TLegend.h"
-
-// Always leave N_HISTO_TESTS as last entry
-enum histo_tests {HAS_ENTRIES, IS_EMPTY, ARE_NOT_IDENTICAL, ARE_IDENTICAL, MEAN_NEAR_ZERO, KS_TEST, ARE_ALMOST_IDENTICAL, N_HISTO_TESTS}; 
-
-int DrawHistograms(TCanvas *, TH1F* ref, TH1F* cur, char * refstr, char *curstr);
-bool CheckHistogram(TH1F *hist, TH1F* hist2, char * tests, char *hname, char *extension, char *dir);
-bool PerformHistogramTest(TH1F* h1, TH1F *h2, char *test, char *extension, char *test_description, char *result_str);
-int SaveHistograms(TCanvas * cx, char *filename, char *extension, char * dir);
-int PrintHtmlTableEntry(char *html, char *hist_name, bool, bool ,bool );
-int PrintHtmlTableEntry2(char *html, char *test, char *result, bool pass);
-int PrintHtmlHead(char *, char *, char *, char *);
-int PrintHtmlHead2(char *, char *);
-int PrintHtmlFoot(char *);
-int PrintHtmlFoot2(char *html, char *image_file);
-void FormatFileName(char * h, char * h2);
-
-
-// file:     root_to_html.cc
-// author:   Sven Vahsen, sevahsen@lbl.gov
-// version:  09/02/05
-// synposis: builds web pages (.html and .gif files) for Inner Detector Tracking
-// inputs:   two .root files with histograms, a testlist, and some strings
-// outputs:  main web page, sub-pages for each histogram, images (.GIFs) linked from these pages
-//
-// revisions:
-//
-// 09/02/05: o error message on web page when input .root files not find
-//           o new function argument for name of main html file
-//             (previously hardcoded as 'test.html')
-//
-// to-do list:
-//
-//           o error message on web page in case of wrong format for testlist file --> also list testdate
-//           o implement more histograms comparisons --> KS test. What else?
-//           o allow appending string to all filenames ?
-
-#include <iostream>
-#include <time.h>
-#include "TFile.h"
-#include "TH1F.h"
-#include "TCanvas.h"
-#include "TLegend.h"
-#include "TStyle.h"
-#include "TPaveStats.h" 
-
-char *test_keyword[N_HISTO_TESTS]={"has_entries","is_empty","are_not_identical","are_identical", "mean_near_zero","ks_test","are_almost_identical"};
-char *test_description[N_HISTO_TESTS]={"Does the histogram have entries?","Is the histogram empty?","Do the histograms differ?","Are the histograms identical?", "Is the histogram mean consistent with zero?","KS Test","Are the histograms almost identical?"};
-
-int root_to_html(char *ref_file, char *cur_file, char *ref_version, char *cur_version, char *particle_type, char *testlist, char *tmpdir, char *pagetmp)
-{
-  char aline[512], hname[512], tests[512]; 
-  char refstr[512], curstr [512], hname2[512], page[512], dir[512];
-
-  sprintf(dir,"%s%s",tmpdir, (strlen(tmpdir)>0) ? "/" : "");  
-  sprintf(page,"%s%s",dir,pagetmp);
-  sprintf(refstr,"%s (reference)",ref_version);
-  sprintf(curstr,"%s (current)",cur_version);
-
-  // Open Root Files, initialize canvas
-  //gROOT->SetStyle("Plain");
-  gStyle->SetOptStat(111111);
-  TCanvas *c0 = (TCanvas *) gROOT->FindObject("c0"); 
-  if (c0) c0->Delete();
-  c0 = new TCanvas("c0");
-
-  // add check later: if (!OpenRootFiles()) return -1;
-  TFile reference(ref_file);
-  TFile current(cur_file);
-
-  if (! reference.IsOpen()) cerr << "ERROR: Unable to open file \"" << ref_file << "\"!\n";
-  if (! current.IsOpen())   cerr << "ERROR: Unable to open file \"" << cur_file << "\"!\n";
-
-  // Open file which lists the checks to be performed
-  FILE * infile = fopen(testlist,"r");
-  if(!infile)
-  {
-    cerr << "Unable to open input file " << testlist << "\n";
-    return -1;
-  }
-
-  PrintHtmlHead(page,ref_version,cur_version,particle_type, current, reference);
-
-  while (fgets(aline,256,infile)) 
-  {   
-    int nvars = sscanf(aline,"Histogram %s Test %[^\n]\n", hname, tests); 
-
-    if (nvars < 2)
-    { 
-      cerr << "Formatting error in txt file: Line \"" << aline << "\" will be ignored.\n";
-      continue;
-    }
-
-    TH1F *ref_hist = 0;
-    TH1F *cur_hist = 0;
-
-    if (reference.IsOpen()) ref_hist = (TH1F*)reference.Get(hname);
-    if (current.IsOpen())   cur_hist = (TH1F*)current.Get(hname);
-
-    char* result = strstr(cur_hist->GetName(),"rej");
-    if(result == NULL) {
-
-      ref_hist->Scale(1./ref_hist->GetEntries());
-      cur_hist->Scale(1./cur_hist->GetEntries());
-
-    }
-
-    FormatFileName(hname,hname2);
-
-    DrawHistograms(c0, ref_hist, NULL, refstr, curstr );
-    SaveHistograms(c0, hname2, "ref", dir);
-    DrawHistograms(c0, NULL, cur_hist, refstr, curstr );
-    SaveHistograms(c0, hname2, "cur", dir);
-    DrawHistograms(c0, ref_hist, cur_hist, refstr, curstr );
-    SaveHistograms(c0, hname2, "cmp", dir);
-    
-    bool pass_ref = CheckHistogram(ref_hist, NULL, tests, hname2, "ref", dir);
-    bool pass_cur = CheckHistogram(cur_hist, NULL, tests, hname2, "cur", dir);
-    bool pass_cmp = CheckHistogram(ref_hist, cur_hist, tests, hname2, "cmp", dir);
-
-    PrintHtmlTableEntry(page, hname2, pass_ref, pass_cur, pass_cmp);
-  }
-  
-  PrintHtmlFoot(page);
-  return 1;
-}
-
-
-// substitute '/' or '\' characters in filename with '_'
-void FormatFileName(char * h, char * h2)
-{
-  int len = strlen(h)+1;
-
-  for (int i=0; i<len && i<511; i++)
-  {
-    *h2=*h;
-    if (*h2=='/' || *h2=='\\') *h2='_';
-    h++; h2++;
-  }
-}
-
-// Check whether histogram exists
-// Perform specified tests on histogram
-// create html output file with gif of 
-// histogram and table of tests results
-bool CheckHistogram(TH1F *hist, TH1F *hist2, char * tests, char *hname, char *extension, char *dir)
-{
-  char image_file[512], html_file[512], one_test[512], result_str[512], tmp[512], test_descr[512]; 
-  sprintf(image_file,"%s_%s.gif", hname,extension);
-  sprintf(html_file, "%s%s_%s.html",dir, hname,extension);
-  
-  PrintHtmlHead2(html_file, hname);
-  
-  bool pass = true;
-  if (!strcmp(extension,"cmp") && (!hist || !hist2))
-  { 
-    // fail if comparison test, and one of the histogram doesn't exist
-    pass = false;
-    if (!hist)  PrintHtmlTableEntry2 (html_file,"Reference histogram not found in root file!","",false);
-    if (!hist2) PrintHtmlTableEntry2 (html_file,"Current histogram not found in root file!","",false);
-  }  
-  else if (!hist)
-  {
-    // always fail if histogram 1 doesn't exist
-    pass = false;
-    PrintHtmlTableEntry2(html_file,"Histogram not found in root file!","",false);
-  }
-  else
-  {
-    int nvars = 1;
-    char *ptr = tests;
-    int ntests=0;
-
-    // get single words (test instructions) from string
-    while (nvars > 0 && nvars != EOF && (ptr < (tests+strlen(tests))))
-    {
-      nvars = sscanf(ptr,"%s", one_test);
-      ptr += strlen(one_test) + 1;
-      if (nvars>0 && nvars !=EOF) 
-      {
-        strcpy(tmp,one_test); tmp[3]=0;
-        //cout << "position=" << ptr-tests << ", tmp = \"" << tmp << "\" ,extension=\"" << extension << "\"\n";
-
-        if (!strcmp(extension,tmp))
-        {
-          bool tmp_pass = PerformHistogramTest(hist, hist2, one_test, extension, test_descr, result_str);
-          PrintHtmlTableEntry2(html_file, test_descr, result_str ,tmp_pass);
-          if (!tmp_pass) pass = false;
-          ntests++;
-        }
-      }
-    }
-    if (!ntests) PrintHtmlTableEntry2(html_file, "No tests defined for this histogram", "-" , 1); 
-  }
-  PrintHtmlFoot2(html_file, image_file);  
-  return pass;
-}
-
-
-bool PerformHistogramTest(TH1F* h1, TH1F* h2, char *test, char *extension, char *test_descr, char *result)
-{
-
-  bool match=false;
-  bool pass=false;
-  int entries=0;
-  double mean=0;
-  double sigma=0;
-  double signific=0;
-  int entries_2=0;
-  double mean_2=0;
-  double sigma_2=0;
-  char current[512];
-  float cut = 0;
-
-  // which histogram are we testing?
-
-  sprintf(test_descr,test);
-
-  cout << "PerformHistogramTest: tests=" << test << ", extension=" << extension << "\n";
-  if (strlen(test)> 4)
-  {
-    cout << "Comparing " << test << endl;
-
-    for (int i=0; i<N_HISTO_TESTS; i++)
-    {
-
-      // Truncate length of test, name it current
-      int j;
-      for (j=0;(j<strlen(test))&&(j<strlen(test_keyword[i])+4); j++) {
-        current[j] = test[j];
-      }
-      current[j] = '\0';
-
-      cout << "  To " << test_keyword[i] << ": " << current << endl;
-
-      // check if test name matches any of the keywords
-      if (!strcmp(current+4,test_keyword[i]))
-      {
-        match = true;
-        strcpy(test_descr,test_description[i]);
-        switch(i)
-        {
-          case HAS_ENTRIES:
-            entries = (int) h1->GetEntries();
-            sprintf(result,"Entries=%d",entries);
-            if (entries > 0) pass=true;    
-            break;
-          case IS_EMPTY:
-            entries = (int) h1->GetEntries();
-            sprintf(result,"Entries=%d",entries);
-            if (entries != 0) pass=false;          
-            break;
-          case MEAN_NEAR_ZERO:
-            sscanf(test+4,"mean_near_zero(%f)",&cut);
-
-            if (cut == 0) cut = 4;
-
-            entries  = (int) h1->GetEntries();
-            mean     = h1->GetMean(1);
-            sigma    = h1->GetMeanError(1);
-            if (sigma !=0)
-            { 
-              signific = fabs(mean/sigma);
-              sprintf(result,"fabs(mean/sigma_of_mean)=%f (cut was %f)",signific,cut);
-              if (signific < cut ) pass =true;
-              else pass = false;
-            }
-            else
-            {
-               signific = 999999.0;
-               sprintf(result,"sigma_mean=0,can't calculate significance!");
-               pass = false;
-            }
-            break;
-        case ARE_IDENTICAL:
-        case ARE_NOT_IDENTICAL:
-            entries    = (int) h1->GetEntries();
-            mean       = h1->GetMean(1);
-            sigma      = h1->GetMeanError(1);
-            entries_2  = (int) h2->GetEntries();
-            mean_2     = h2->GetMean(1);
-            sigma_2    = h2->GetMeanError(1);
-
-            if (entries != entries_2 || mean!=mean_2 || sigma !=sigma_2)
-            { 
-              sprintf(result,"Histogram number of entries, mean, or sigma differ");
-              pass = false;
-            }
-            else
-            {
-              sprintf(result,"Histogram number of entries, mean, and sigma are identical");
-              pass = true;
-            }
-            if (i==ARE_NOT_IDENTICAL) pass = !pass;
-          break;
-        case KS_TEST:
-          sscanf(test+4,"ks_test(%f)",&cut);
-
-          double ks =  h1->KolmogorovTest(h2);
-          sprintf(result,"Output from KS test is %f (cut is %f)",ks,cut);
-          pass = (ks > cut);
-          break;
-
-        case ARE_ALMOST_IDENTICAL:
-          sscanf(test+4,"are_almost_identical(%f)",&cut);
-
-
-
-          double dA = 0;
-          double A = 0;
-
-          for (int k=0; k <= h1->GetNbinsX()+1; k++) {
-            dA += fabs(h1->GetBinContent(k) - h2->GetBinContent(k));
-            A += h1->GetBinContent(k);
-          }
-
-          if (A > 0) {
-            dA = dA / A;
-          }
-
-          sprintf(result,"&Sigma;(|&Delta;A|)/&Sigma;A is %f (cut is %f)",dA,cut);
-
-          pass = (dA < cut);
-          break;
-
-          default:
-            sprintf(test_descr,test+4);
-            sprintf(result,"Test name recognized, but no action defined in root_to_html.cc!");
-            pass=false;
-        }
-        break;
-      }
-    }
-  }   
-  if (!match)
-  {
-    sprintf(result,"Unknown test!");
-    pass=false;
-  }
-  return pass;
-}
-
-
-int DrawHistograms(TCanvas *c0, TH1F* ref_hist, TH1F* cur_hist, char *refstr, char *curstr)
-{
-  if (!ref_hist && !cur_hist)
-  {
-    c0->Clear();
-    return 0;
-  }
-
-  double posStatX = 0.89;
-  double posLeg1X = 0.50;
-  double posLeg2X = 0.89;
-  gStyle->SetStatY(0.67);
-  gStyle->SetOptStat(111111);
-  // Modif LV: decide to put stat box on left or right side:
-  TH1F* h = 0;
-  if(cur_hist) h = cur_hist;
-  if(ref_hist) h = ref_hist;
-  if(h) {
-    double activL = h->Integral(0,h->GetNbinsX()/3);
-    double activR = h->Integral(h->GetNbinsX()*2/3,h->GetNbinsX());
-    if(activL*1.2<activR) {
-      posStatX = 0.33;
-    } else {
-      posStatX = 0.89;
-    }
-  }
-  gStyle->SetStatX(posStatX);
- 
-  if (ref_hist)
-  {
-    ref_hist->Draw();
-  }
-  if (cur_hist)
-  {
-    cur_hist->SetMarkerStyle(20);
-    if (ref_hist) // make sure stat boxes don't overlap
-    {
-      TPaveStats *st = (TPaveStats*)cur_hist->GetListOfFunctions()->FindObject("stats"); 
-      //st->SetLabel(curstr); 
-      st->SetY2NDC(0.40);     
-      st->SetY1NDC(0.16);     
-    }
-    cur_hist->Draw((ref_hist) ? "samesp" : "p");
-  }
-  TLegend *legend=new TLegend(posLeg1X,0.75,posLeg2X,0.88);
-  legend->SetTextFont(72);
-  legend->SetTextSize(0.03);
-  if (ref_hist) legend->AddEntry(ref_hist,refstr,"l");
-  if (cur_hist) legend->AddEntry(cur_hist,curstr,"p");
-  legend->Draw();
-
-  c0->Modified();
-  c0->Update(); 
-
-  return 1;
-}
-
-
-// print out one line of RTT results table for histogram details page, in html format
-int PrintHtmlTableEntry2(char *html, char *test, char*result, bool pass)
-{
-  FILE * f = fopen(html,"a");
-  if (!f) return 0;
-
-  // cell 1
-  fputs("<tr>\n",f);
-  fprintf(f,"<td align=\"center\">%s</td>\n",test);
-  fprintf(f,"<td align=\"center\">%s</td>\n",result);
-  fprintf(f,"<td align=\"center\"><span style=\"color: rgb(%s);\">%s</span></td>\n",
-          (pass ? "51, 204, 0":"255, 0, 0"),  (pass ? "PASS" : "FAIL"));
-  fputs("</tr>\n",f);
-  fclose(f);
-
-  return 1;
-}
-
-// print out one line of RTT results table in html format
-int PrintHtmlTableEntry(char *html, char *hist_name, bool pass_ref, bool pass_cur, bool pass_cmp)
-{
-  FILE * f = fopen(html,"a");
-  if (!f) return 0;
-
-  fputs("<tr>\n",f);
-
-  // cell 1
-  fprintf(f,"<td align=\"center\"><a href=\"%s_expl.txt\">%s</a></td>\n",hist_name,hist_name);
-
-  // cell 2
-  fprintf(f,"<td align=\"center\"><span style=\"color: rgb(%s);\">%s <a target=\"_blank\" href=\"%s_ref.html\">(view)</a></span></td>\n",
-          (pass_ref ? "51, 204, 0":"255, 0, 0"),  (pass_ref ? "PASS" : "FAIL"), hist_name);
-
-  // cell 3
-  fprintf(f,"<td align=\"center\"><span style=\"color: rgb(%s);\">%s <a target=\"_blank\" href=\"%s_cur.html\">(view)</a></span></td>\n",
-          (pass_cur ? "51, 204, 0":"255, 0, 0"),  (pass_cur ? "PASS" : "FAIL"), hist_name);
-
-  // cell 4
-  fprintf(f,"<td align=\"center\"><span style=\"color: rgb(%s);\">%s <a target=\"_blank\" href=\"%s_cmp.html\">(view)</a></span></td>\n",
-          (pass_cmp ? "51, 204, 0":"255, 0, 0"),  (pass_cmp ? "PASS" : "FAIL"), hist_name);
-
-  fputs("</tr>\n",f);
-
-  if (!f) return 0;
-  fclose(f);
-  return 1;
-}
-
-
-// used to build html pages that show histograms
-int PrintHtmlHead2(char *html, char *hname)
-{
-  FILE * f = fopen(html,"w");
-  if (!f) return 0;
-
-  fputs("<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\">\n",f);
-  fputs("<html>\n",f);
-  fputs("<head>\n",f);
-  fputs("<meta content=\"text/html; charset=ISO-8859-1\"\n",f);
-  fputs("http-equiv=\"content-type\">\n",f);
-  fputs("<title>ATLAS B-tagging RTT</title>\n",f);
-  fputs("</head>\n",f);
-  fputs("<body>\n",f);
-  fputs("<span style=\"font-weight: bold;\">B-tagging Histogram\n",f);
-  fprintf(f,"\"%s\"<br>\n",hname);
-  fputs("<br>\n",f);
-  fputs("</span>\n",f);
-  fputs("<table style=\"text-align: left; width: 70%;\" border=\"1\" cellpadding=\"2\"\n",f);
-  fputs("cellspacing=\"2\">\n",f);
-  fputs("<tbody>\n",f);
-  fputs("<tr>\n",f);
-  fputs("<tr>\n",f);
-  fputs("<td style=\"font-weight: bold;\" align=\"center\">Test performed</td>\n",f);
-  fputs("<td style=\"font-weight: bold;\" align=\"center\">Result</td>\n",f);
-  fputs("<td style=\"font-weight: bold;\" align=\"center\">Pass/Fail</td>\n",f);
-  fputs("</tr>\n",f);
-  fclose(f);
-  return 1;
-}
-
-// used to build html pages that show histograms
-int PrintHtmlFoot2(char *html, char *image_file)
-{
-  FILE * f = fopen(html,"a");
-  if (!f) return 0;
-
-  time_t rawtime;
-  struct tm * timeinfo;
-  time ( &rawtime );
-  timeinfo = localtime ( &rawtime );
-
-  fputs("</tr>\n",f);
-  fputs("</tbody>\n",f);
-  fputs("</table>\n",f);
-  fputs("<br>\n",f);
-
-  fputs("<img style=\"border: 1px solid;\" alt=\"IMAGE\"\n",f);
-  fprintf(f,"src=\"%s\"><br>\n",image_file);
-  fputs("<br>\n",f);
-  fputs("<br>\n",f);
-  fputs("<address>This page was automatically generated using a modified version of root_to_html (S.Vahsen/LBNL) on ",f);
-  fputs(asctime(timeinfo),f);
-  fputs(".</address>\n",f);
-  fputs("</body>\n",f);
-  fputs("</html>\n",f);
-  fclose(f);
-  return 1;
-}
-
-
-int PrintHtmlHead(char *html, char *ref_version, char *cur_version, char *particle_type, TFile &cfile, TFile &rfile)
-{
-  FILE * f = fopen(html,"w");
-  if (!f) return 0;
-
-  fputs("<!DOCTYPE html PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\">\n",f);
-  fputs("<html>\n",f);
-  fputs("<head>\n",f);
-  fputs("<meta content=\"text/html; charset=ISO-8859-1\"\n",f);
-  fputs("http-equiv=\"content-type\">\n",f);
-  fputs("<title>ATLAS B-tagging RTT</title>\n",f);
-  fputs("</head>\n",f);
-  fputs("<body>\n",f);
-  fputs("<h1>ATLAS B-tagging RTT</h1>\n",f);
-
-  fputs("<table style=\"text-align: left;\" border=\"1\"\n",f);
-  fputs("cellpadding=\"2\" cellspacing=\"2\">\n",f);
-  fputs("<tbody>\n",f);
-  fputs("<tr>\n",f);
-
-  fputs("<td align=\"center\">Current Athena release:</td>\n",f);
-  fprintf(f,"<td align=\"center\">%s</td>\n",cur_version);
-  fputs("<td align=\"center\">Histogram file:</td>\n",f);  
-          fprintf(f,"<td align=\"center\"><span style=\"color: rgb(%s);\">%s%s</span></td>\n",
-          (cfile.IsOpen() ? "51, 204, 0":"255, 0, 0"),  cfile.GetName(), (cfile.IsOpen() ? "" : ": UNABLE TO OPEN FILE!"));
-  fputs("</tr>\n",f);
-
-  fputs("<tr>\n",f);
-  fputs("<td align=\"center\">Reference Athena release:</td>\n",f);
-  fprintf(f,"<td align=\"center\">%s</td>\n",ref_version);
-  fputs("<td align=\"center\">Histogram file:</td>\n",f);  
-          fprintf(f,"<td align=\"center\"><span style=\"color: rgb(%s);\">%s%s</span></td>\n",
-          (rfile.IsOpen() ? "51, 204, 0":"255, 0, 0"),  rfile.GetName(), (rfile.IsOpen() ? "" : ": UNABLE TO OPEN FILE!"));
-  fputs("</tr>\n",f);
-
-  //fputs("<tr>\n",f);
-  //fputs("<td align=\"center\">Particle Species:</td>\n",f);
-  //fprintf(f,"<td align=\"center\">%s</td>\n",particle_type);
-  //fprintf(f,"<td align=\"center\" colspan=2>All tracks from iPatRec<br>via CBNT::TrackParticle (for now)</td>\n"); 
-  //fputs("</tr>\n",f);
-
-  fputs("</tbody>\n",f);
-  fputs("</table>\n",f);
-  fputs("<p>Click on the link in the histogram column to see the a brief explanation of the histogram.  <br>Click on \"view\" for each column to see what tests were run and a plot of the quantity.\n",f);
-  fputs("&nbsp;<br>\n",f);
-  fputs("<h3>Test Results: <br>\n",f);
-  fputs("</h3>\n",f);
-  //fputs("<h3>Click on histogram names for brief explanations. <br>\n",f);
-  //fputs("</h3>\n",f);
-  //fputs("<h3>Click on Pass/Fail to view actual histogram and tests performed.</h3>\n",f);
-  fputs("<table style=\"text-align: left; width: 70%;\" border=\"1\" cellpadding=\"2\"\n",f);
-  fputs("cellspacing=\"2\">\n",f);
-  fputs("<tbody>\n",f);
-  fputs("<tr>\n",f);
-  fputs("<td align=\"center\"><span style=\"font-weight: bold;\">Histogram</span></td>\n",f);
-  fputs("<td align=\"center\"><span style=\"font-weight: bold;\">Reference\n",f);
-  fputs("Release<br>\n",f);
-  fputs("</span></td>\n",f);
-  fputs("<td align=\"center\"><span style=\"font-weight: bold;\">Current\n",f);
-  fputs("Release</span></td>\n",f);
-  fputs("<td align=\"center\"><span style=\"font-weight: bold;\">Comparison</span></td>\n",f);
-  fputs("</tr>\n",f);
-  fclose(f);
-  return 1;
-}
-
-int PrintHtmlFoot(char *html)
-{
-  time_t rawtime;
-  struct tm * timeinfo;
-  time ( &rawtime );
-  timeinfo = localtime ( &rawtime );
-
-  FILE * f = fopen(html,"a");
-  if (!f) return 0;
-  fputs("</tr>\n",f);
-  fputs("</tbody>\n",f);
-  fputs("</table>\n",f);
-  fputs("<br>\n",f);
-  fputs("<address>This page was automatically generated using a modified version of root_to_html (S.Vahsen/LBNL) on ",f);
-  fputs(asctime(timeinfo),f);
-  fputs(".</address>\n",f);
-  fputs("</body>\n",f);
-  fputs("</html>\n",f);
-  fclose(f);
-  return 1;
-}
-
-int SaveHistograms(TCanvas * cx, char *hname, char *ext, char *dir)
-{
-  char epsname[512];
-  char gifname[512];
-  char comm1[512];
-  char comm2[512];
-  char comm3[512];
-
-  sprintf(epsname,"%s%s_%s.eps",dir, hname,ext);
-  sprintf(gifname,"%s%s_%s.gif",dir, hname,ext);
-  sprintf(comm1,"%s %s","pstopnm -ppm -xborder 0 -yborder 0 -portrait",epsname);
-  sprintf(comm2,"%s %s%s %s","ppmtogif",epsname,"001.ppm >",gifname);
-  sprintf(comm3,"%s%s%s","rm -f ",epsname,"001.ppm");
-
-  //  cx->SaveAs(gifname);
-
-  cx->SaveAs(epsname);
-  gSystem->Exec(comm1);
-  gSystem->Exec(comm2);
-  gSystem->Exec(comm3);
-
-  return 1;
-}
-
-void JetTagAna_RTT_postProcessing() {
-  cout << "Starting JetTagAna_RTT_postProcessing..." << endl;
-  root_to_html("JetTagAna_RTT_refsHist.root","tagana.root","12.0.6.5","current","","JetTagAna_RTT_testList.txt","","all_tests.html");
-}
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_RTT_testList.txt b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_RTT_testList.txt
deleted file mode 100644
index 4f19dc54cd580e9c9b9d7434d92a31902ea57a71..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_RTT_testList.txt
+++ /dev/null
@@ -1,131 +0,0 @@
-Histogram global_counters Test cmp_are_almost_identical(0.03)
-Histogram global_nprimvtx Test cmp_are_almost_identical(0.03)
-Histogram global_xprimvtx Test cmp_are_almost_identical(0.03)
-Histogram global_yprimvtx Test cmp_are_almost_identical(0.03)
-Histogram global_zprimvtx Test cmp_are_almost_identical(0.03)
-Histogram global_primvtxresx Test ref_mean_near_zero cur_mean_near_zero cmp_are_almost_identical(0.03)
-Histogram global_primvtxresy Test ref_mean_near_zero cur_mean_near_zero cmp_are_almost_identical(0.03)
-Histogram global_primvtxresz Test ref_mean_near_zero cur_mean_near_zero cmp_are_almost_identical(0.03)
-Histogram global_nmcpart Test cmp_are_almost_identical(0.03)
-Histogram global_ntrkpart Test cmp_are_almost_identical(0.03)
-Histogram global_BLayerHits Test cmp_are_almost_identical(0.03)
-Histogram global_BLayerSharedHits Test cmp_are_almost_identical(0.03)
-Histogram global_PixelHits Test cmp_are_almost_identical(0.03)
-Histogram global_PixelLayers Test cmp_are_almost_identical(0.03)
-Histogram global_SiHits Test cmp_are_almost_identical(0.03)
-Histogram global_TRTHits Test cmp_are_almost_identical(0.03)
-Histogram global_nmuon Test cmp_are_almost_identical(0.03)
-Histogram global_nelectron Test cmp_are_almost_identical(0.03)
-Histogram global_njettag Test cmp_are_almost_identical(0.03)
-Histogram truth_bquark_nb Test cmp_are_almost_identical(0.03)
-Histogram truth_bquark_pt Test cmp_are_almost_identical(0.03)
-Histogram truth_bquark_eta Test cmp_are_almost_identical(0.03)
-Histogram truth_cquark_nb Test cmp_are_almost_identical(0.03)
-Histogram truth_cquark_pt Test cmp_are_almost_identical(0.03)
-Histogram truth_cquark_eta Test cmp_are_almost_identical(0.03)
-Histogram truth_bhadr_nb Test cmp_are_almost_identical(0.03)
-Histogram truth_bhadr_pt Test cmp_are_almost_identical(0.03)
-Histogram truth_bhadr_eta Test cmp_are_almost_identical(0.03)
-Histogram truth_chadr_nb Test cmp_are_almost_identical(0.03)
-Histogram truth_chadr_pt Test cmp_are_almost_identical(0.03)
-Histogram truth_chadr_eta Test cmp_are_almost_identical(0.03)
-Histogram jet_ntotal Test cmp_are_almost_identical(0.03)
-Histogram jet_label Test cmp_are_almost_identical(0.03)
-Histogram jet_nlabelb Test cmp_are_almost_identical(0.03)
-Histogram jet_nlabelc Test cmp_are_almost_identical(0.03)
-Histogram jet_nlabelt Test cmp_are_almost_identical(0.03)
-Histogram jet_ntag Test cmp_are_almost_identical(0.03)
-Histogram jet_eta Test cmp_are_almost_identical(0.03)
-Histogram jet_phi Test cmp_are_almost_identical(0.03)
-Histogram jet_et Test cmp_are_almost_identical(0.03)
-Histogram jet_ntracks Test cmp_are_almost_identical(0.03)
-Histogram jet_nmuons Test cmp_are_almost_identical(0.03)
-Histogram jet_muons_pt Test cmp_are_almost_identical(0.03)
-Histogram jet_nelectrons Test cmp_are_almost_identical(0.03)
-Histogram jet_electrons_pt Test cmp_are_almost_identical(0.03)
-Histogram tag_jetprob_w Test cmp_are_almost_identical(0.03)
-Histogram tag_i2d_w Test cmp_are_almost_identical(0.03)
-Histogram tag_i2d_n Test cmp_are_almost_identical(0.03)
-Histogram tag_i2d_b Test cmp_are_almost_identical(0.03)
-Histogram tag_i2d_u Test cmp_are_almost_identical(0.03)
-Histogram tag_i2d_sig Test cmp_are_almost_identical(0.03)
-Histogram tag_i3d_w Test cmp_are_almost_identical(0.03)
-Histogram tag_i3d_n Test cmp_are_almost_identical(0.03)
-Histogram tag_i3d_b Test cmp_are_almost_identical(0.03)
-Histogram tag_i3d_u Test cmp_are_almost_identical(0.03)
-Histogram tag_i3d_sig Test cmp_are_almost_identical(0.03)
-Histogram tag_sv1_w Test cmp_are_almost_identical(0.03)
-Histogram tag_sv1_b Test cmp_are_almost_identical(0.03)
-Histogram tag_sv1_u Test cmp_are_almost_identical(0.03)
-Histogram tag_sv2_w Test cmp_are_almost_identical(0.03)
-Histogram tag_sv2_b Test cmp_are_almost_identical(0.03)
-Histogram tag_sv2_u Test cmp_are_almost_identical(0.03)
-Histogram tag_sv_n Test cmp_are_almost_identical(0.03)
-Histogram tag_sv_n2t Test cmp_are_almost_identical(0.03)
-Histogram tag_sv_frc Test cmp_are_almost_identical(0.03)
-Histogram tag_sv_m Test cmp_are_almost_identical(0.03)
-Histogram tag_cmb_w Test cmp_are_almost_identical(0.03)
-Histogram tag_lf2d_w Test cmp_are_almost_identical(0.03)
-Histogram tag_svbu_w Test cmp_are_almost_identical(0.03)
-Histogram tag_lhsig_w Test cmp_are_almost_identical(0.03)
-Histogram tag_softm_w Test cmp_are_almost_identical(0.03)
-Histogram tag_softe_w Test cmp_are_almost_identical(0.03)
-Histogram tag_jetfitter_w Test cmp_are_almost_identical(0.03)
-Histogram tag_jetfitcomb_w Test cmp_are_almost_identical(0.03)
-Histogram tag_jetfitternn_w Test cmp_are_almost_identical(0.03)
-Histogram tag_jetfitcombnn_w Test cmp_are_almost_identical(0.03)
-Histogram perf_ip2d_b Test cmp_are_almost_identical(0.03)
-Histogram perf_ip2d_u Test cmp_are_almost_identical(0.03)
-Histogram perf_ip2d_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_ip3d_b Test cmp_are_almost_identical(0.03)
-Histogram perf_ip3d_u Test cmp_are_almost_identical(0.03)
-Histogram perf_ip3d_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_sv1_b Test cmp_are_almost_identical(0.03)
-Histogram perf_sv1_u Test cmp_are_almost_identical(0.03)
-Histogram perf_sv1_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_sv2_b Test cmp_are_almost_identical(0.03)
-Histogram perf_sv2_u Test cmp_are_almost_identical(0.03)
-Histogram perf_sv2_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_ip3dsv1_b Test cmp_are_almost_identical(0.03)
-Histogram perf_ip3dsv1_u Test cmp_are_almost_identical(0.03)
-Histogram perf_ip3dsv1_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_lifetime2d_b Test cmp_are_almost_identical(0.03)
-Histogram perf_lifetime2d_u Test cmp_are_almost_identical(0.03)
-Histogram perf_lifetime2d_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_secvtxtagbu_b Test cmp_are_almost_identical(0.03)
-Histogram perf_secvtxtagbu_u Test cmp_are_almost_identical(0.03)
-Histogram perf_secvtxtagbu_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_lhsig_b Test cmp_are_almost_identical(0.03)
-Histogram perf_lhsig_u Test cmp_are_almost_identical(0.03)
-Histogram perf_lhsig_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_softmuontag_b Test cmp_are_almost_identical(0.03)
-Histogram perf_softmuontag_u Test cmp_are_almost_identical(0.03)
-Histogram perf_softmuontag_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_softelectrontag_b Test cmp_are_almost_identical(0.03)
-Histogram perf_softelectrontag_u Test cmp_are_almost_identical(0.03)
-Histogram perf_softelectrontag_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_jetfitter_b Test cmp_are_almost_identical(0.03)
-Histogram perf_jetfitter_u Test cmp_are_almost_identical(0.03)
-Histogram perf_jetfitter_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_jetfitcomb_b Test cmp_are_almost_identical(0.03)
-Histogram perf_jetfitcomb_u Test cmp_are_almost_identical(0.03)
-Histogram perf_jetfitcomb_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_jetfitternn_b Test cmp_are_almost_identical(0.03)
-Histogram perf_jetfitternn_u Test cmp_are_almost_identical(0.03)
-Histogram perf_jetfitternn_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_jetfitcombnn_b Test cmp_are_almost_identical(0.03)
-Histogram perf_jetfitcombnn_u Test cmp_are_almost_identical(0.03)
-Histogram perf_jetfitcombnn_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_jetprob_b Test cmp_are_almost_identical(0.03)
-Histogram perf_jetprob_u Test cmp_are_almost_identical(0.03)
-Histogram perf_jetprob_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_vxeff_den_b Test cmp_are_almost_identical(0.03)
-Histogram perf_vxeff_num_b Test cmp_are_almost_identical(0.03)
-Histogram perf_vxeff_den_u Test cmp_are_almost_identical(0.03)
-Histogram perf_vxeff_num_u Test cmp_are_almost_identical(0.03)
-Histogram perf_vxeff_den_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_vxeff_num_upur Test cmp_are_almost_identical(0.03)
-Histogram perf_rej50 Test cmp_are_almost_identical(0.03)
-Histogram perf_rej60 Test cmp_are_almost_identical(0.03)
-Histogram perf_rejpur50 Test cmp_are_almost_identical(0.03)
-Histogram perf_rejpur60 Test cmp_are_almost_identical(0.03)
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_jobOptions.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_jobOptions.py
deleted file mode 100644
index 82c37edff88b96a582989a82b7224bdeedb30269..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_jobOptions.py
+++ /dev/null
@@ -1,21 +0,0 @@
-
-# Python job to configure JetTagAna
-# Laurent Vacavant 2005/10/12
-
-if not "AnalysisExamples" in theApp.Dlls:
-	theApp.Dlls += [ "AnalysisExamples" ]
-
-from AnalysisExamples.AnalysisExamplesConf import JetTagAna
-MyJetTagAna = JetTagAna(
-	name="MyJetTagAna",
-        #SelectBFromRun = run_number # use only u-jets from this run
-        #SelectUFromRun = run_number # use only u-jets from this run
-	OutputLevel=INFO)
-
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-topSequence += MyJetTagAna
-		
-from GaudiSvc.GaudiSvcConf import THistSvc
-ServiceMgr += THistSvc()
-ServiceMgr.THistSvc.Output = ["fileJetTagAna DATAFILE='tagana.root' OPT='RECREATE'"]
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_postProcessing.C b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_postProcessing.C
deleted file mode 100644
index 8166ab03004a864a876e4d4f9bcc1e8395128a19..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_postProcessing.C
+++ /dev/null
@@ -1,144 +0,0 @@
-#include <iostream>
-#include <iomanip>
-#include <sstream>
-#include <cmath>
-
-#include "TROOT.h"
-#include "TFile.h"
-#include "TGraph.h"
-#include "TH1.h"
-#include "TStyle.h"
-#include "TCanvas.h"
-#include "TPad.h"
-#include "TLegend.h"
-
-void plotPerf(std::string fileIn)
-{
-  
-  TFile* file = new TFile(fileIn.c_str(),"read");
-
-  std::string tagger[10];
-  tagger[0] = "IP2D";
-  tagger[1] = "IP3D";
-  tagger[2] = "SV1";
-  tagger[3] = "SV2";
-  tagger[4] = "IP3DSV1";
-  tagger[5] = "Lifetime2D";
-  tagger[6] = "SecVtxTagBU";
-  tagger[7] = "lhSig";
-  tagger[8] = "SoftMuonTag";
-  tagger[9] = "SoftElectronTag";
-  for(int i=0;i<10;i++)
-    {
-      std::cout << "Starting with tagger " << tagger[i] << std::endl;
-
-      TString tit("perf_"); tit+=tagger[i]; tit.ToLower();
-
-      std::cout << "Histo " << tit << std::endl;
-
-      // b-jets:
-      TH1F* bjet =  (TH1F*)file->Get(tit+"_b");
-      if(bjet==0) continue;
-      double nbjets = bjet->GetEntries();
-
-      // light jets:
-      TH1F* ujet =  (TH1F*)file->Get(tit+"_u");
-      if(ujet==0)continue;
-      double nujets = ujet->GetEntries();
-
-      // purified light jets:
-      TH1F* upurjet =  (TH1F*)file->Get(tit+"_upur");
-      if(upurjet==0)continue;
-      double nupurjets = upurjet->GetEntries();
-
-      std::cout << "Histograms " << bjet << " " << ujet << " " << upurjet << std::endl;
-
-      int nbins = ujet->GetNbinsX();
-      const int nmax = 200;
-      if(nbins>nmax)std::cout << "JetTagAna_postProcessing ERROR: nbins too large" << std::endl;
-      double beff[nmax] = {0};
-      double ueff[nmax] = {0};
-      double upureff[nmax] = {0};
-      int ngdbins(nbins);
-      int ngdbinspur(nbins);
-      for(int k=0;k<nbins;k++)
-	{
-	  if( ngdbins   ==nbins && (bjet->Integral(k+1,nbins) == 0 || ujet   ->Integral(k+1,nbins) == 0) ) ngdbins = k;
-	  if( ngdbinspur==nbins && (bjet->Integral(k+1,nbins) == 0 || upurjet->Integral(k+1,nbins) == 0) ) ngdbinspur = k;
-	  if(nbjets)beff[k]    = bjet   ->Integral(k+1,nbins)/nbjets;
-	  if(nujets)ueff[k]    = ujet   ->Integral(k+1,nbins)/nujets;
-	  if(nupurjets)upureff[k] = upurjet->Integral(k+1,nbins)/nupurjets;
-	}
-
-      std::cout << "Done computing efficiencies..." << std::endl;
-
-      if(ngdbins==0||ngdbinspur==0)continue;
-
-      double urej   [nmax] = {0};
-      double upurrej[nmax] = {0};
-      for(int k=0;k<nbins;k++)
-	{
-	  if(ueff[k])urej[k] = 1./ueff[k];
-	  if(upureff[k])upurrej[k] = 1./upureff[k];
-	}  
-
-      std::cout << "Done computing rejections..." << std::endl;
-
-      TString cn = tit+"_canvas";
-      TCanvas* c = new TCanvas(cn,cn,800,600);
-      c->Draw();
-      c->cd();
-
-      TGraph* bVSupur(0);      
-      if(ngdbinspur)
-	{
-	  bVSupur = new TGraph(ngdbinspur,beff,upurrej);
-	  std::cout << "TGraph " << bVSupur << std::endl;      
-	  bVSupur->SetName(tit+"_graph_pur");
-	  bVSupur->SetTitle(tagger[i].c_str());
-	  bVSupur->SetLineColor(2);
-	  bVSupur->SetMarkerSize(1.3);
-	  bVSupur->SetLineWidth(4);
-	  bVSupur->SetMarkerColor(2);
-	  bVSupur->SetMarkerStyle(21);
-	  bVSupur->GetXaxis()->SetTitle("#epsilon_{B}");
-	  bVSupur->GetYaxis()->SetTitle("Rejection");
-	  bVSupur->Draw("ACP");
-	}
-
-      TGraph* bVSu(0);      
-      if(ngdbins)
-	{
-	  bVSu = new TGraph(ngdbins,beff,urej);
-	  std::cout << "TGraph " << bVSu << std::endl;      
-	  bVSu->SetName(tit+"_graph");
-	  bVSu->SetTitle(tagger[i].c_str());
-	  bVSu->SetLineColor(4);
-	  bVSu->SetMarkerSize(1.3);
-	  bVSu->SetLineWidth(4);
-	  bVSu->SetMarkerColor(4);
-	  bVSu->SetMarkerStyle(20);
-	  bVSu->GetXaxis()->SetTitle("#epsilon_{B}");
-	  bVSu->GetYaxis()->SetTitle("Rejection");
-	  if(ngdbinspur)bVSu->Draw("CPsame");
-	  else bVSu->Draw("ACP");
-	}
-
-      TLegend* leg1    = new TLegend(0.55,0.7,0.9,.87);
-      leg1->SetName(tit+"_leg");
-      leg1->AddEntry(bVSu->GetName(),"All light jets","pl");
-      leg1->AddEntry(bVSupur->GetName(),"Purified light jets","pl");
-      leg1->SetFillColor(0);
-      leg1->Draw();
-
-      c->Print(tit+".eps");
-      c->Print(tit+".gif");
-
-      std::cout << "Done with tagger " << tagger[i] << std::endl;
-    }
-  
-  file->Close();
-  file->Delete();
-
-}
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_postProcessingHLT.C b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_postProcessingHLT.C
deleted file mode 100644
index b0902dd44622b32cc84570664dec371d2beb26d2..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/JetTagAna_postProcessingHLT.C
+++ /dev/null
@@ -1,328 +0,0 @@
-#include <fstream>
-
-
-void JetTagAna_postProcessingHLT(TString sample){
-
-  ofstream fout("out.txt");
-
-  gROOT->Reset();
-  gStyle->SetOptStat(0);
-  gROOT->ForceStyle();
-
-  Int_t first=1;
-  Int_t last=1;
-
-  TString path_bb = "./";
-  TString path_uu = "./";
-  TString path = "./";
-
-  TString fileName_bb;
-  TString fileName_uu;
-  TString fileName;
-
-  TH1F* hbbL2   = 0;
-  TH1F* hbbEF   = 0;
-  TH1F* huuL2   = 0;
-  TH1F* huuEF   = 0;
-  TH1F* hbbOf1  = 0;
-  TH1F* huuOf1  = 0;
-  TH1F* hbbOf2  = 0;
-  TH1F* huuOf2  = 0;
-
-  if (sample == "5850" || sample == "5851") {
-
-    fileName_bb = "tagana.bb.0";
-    fileName_uu = "tagana.uu.0";
-
-  } else if (sample == "5200") {
-
-    fileName = "tagana.0";
-    
-  }
-
-  if (sample == "5850" || sample == "5851") {
-
-    bool doFirst_u = true;
-    bool doFirst_b = true;
-    
-    for(Int_t i = first; i <= last; i++) {
-      
-      TString realFileName_bb;
-      TString realFileName_uu;
-      
-      TString insideFileName_bb = fileName_bb;
-      TString insideFileName_uu = fileName_uu;
-      
-      TString number = ""; number+=i;
-      
-      insideFileName_bb.Resize(insideFileName_bb.Length()-(number.Length()-1));
-      insideFileName_uu.Resize(insideFileName_uu.Length()-(number.Length()-1));
-      
-      realFileName_bb = insideFileName_bb+number+".root";
-      realFileName_uu = insideFileName_uu+number+".root";
-      
-      fout << path_bb << realFileName_bb << endl;
-      fout << path_uu << realFileName_uu << endl;    
-      
-      ifstream fb(path_bb + realFileName_bb);
-      ifstream fu(path_uu + realFileName_uu);
-      
-      TFile* file;
-      
-      if(fb) {
-	
-	file = new TFile(path_bb + realFileName_bb);
-	
-	TH1F* h_bbL2  = (TH1F*)file->Get("perf_l2ip3d_b");
-	TH1F* h_bbEF  = (TH1F*)file->Get("perf_ef_afterl2_ip3d_b");
-	TH1F* h_bbOf1 = (TH1F*)file->Get("perf_ofip3d_b");
-	TH1F* h_bbOf2 = (TH1F*)file->Get("perf_ip3d_b");
-      }
-      
-      if(fu) {
-	
-	file = new TFile(path_uu + realFileName_uu);
-	
-	TH1F* h_uuL2  = (TH1F*)file->Get("perf_l2ip3d_u");
-	TH1F* h_uuEF  = (TH1F*)file->Get("perf_ef_afterl2_ip3d_u");
-	TH1F* h_uuOf1 = (TH1F*)file->Get("perf_ofip3d_u");
-	TH1F* h_uuOf2 = (TH1F*)file->Get("perf_ip3d_u");
-      }
-      
-      if (fu && doFirst_u){
-
-	huuL2  = new TH1F(*h_uuL2);
-	huuEF  = new TH1F(*h_uuEF);
-	huuOf1 = new TH1F(*h_uuOf1);
-	huuOf2 = new TH1F(*h_uuOf2);
-
-	doFirst_u = false;
-      }
-      
-      if (fb && doFirst_b){
-
-	hbbL2  = new TH1F(*h_bbL2);
-	hbbEF  = new TH1F(*h_bbEF);
-	hbbOf1 = new TH1F(*h_bbOf1);
-	hbbOf2 = new TH1F(*h_bbOf2);
-
-	doFirst_b = false;
-      }
-
-      if(fu) {
-	huuL2->Add(h_uuL2);   huuEF->Add(h_uuEF); 
-	huuOf1->Add(h_uuOf1); huuOf2->Add(h_uuOf2); 
-      }
-      
-      if(fb) {
-	hbbL2->Add(h_bbL2);   hbbEF->Add(h_bbEF); 
-	hbbOf1->Add(h_bbOf1); hbbOf2->Add(h_bbOf2); 
-      }
-    }
-  }
-
-  if (sample == "5200") {
-
-    bool doFirst = true;
-    
-    for(Int_t i = first; i <= last; i++) {
-      
-      TString realFileName;
-      
-      TString insideFileName = fileName;
-      
-      TString number = ""; number+=i;
-      
-      insideFileName.Resize(insideFileName.Length()-(number.Length()-1));
-      
-      realFileName = insideFileName+number+".root";
-      
-      fout << path << realFileName << endl;
-      
-      ifstream f(path + realFileName);
-      
-      TFile* file;
-      
-      if(f) {
-	
-	file = new TFile(path + realFileName);
-	
-	TH1F* h_bbL2  = (TH1F*)file->Get("perf_l2_ip3d_b");
-	TH1F* h_bbEF  = (TH1F*)file->Get("perf_ef_afterl2_ip3d_b");
-	//TH1F* h_bbEF  = (TH1F*)file->Get("perf_ef_ip3d_b");
-	TH1F* h_bbOf1 = (TH1F*)file->Get("perf_offline_afterhlt_ip3d_b");
-	TH1F* h_bbOf2 = (TH1F*)file->Get("perf_ip3d_b");
-
-	TH1F* h_uuL2  = (TH1F*)file->Get("perf_l2_ip3d_u");
-	TH1F* h_uuEF  = (TH1F*)file->Get("perf_ef_afterl2_ip3d_u");
-	//TH1F* h_uuEF  = (TH1F*)file->Get("perf_ef_ip3d_u");
-	TH1F* h_uuOf1 = (TH1F*)file->Get("perf_offline_afterhlt_ip3d_u");
-	TH1F* h_uuOf2 = (TH1F*)file->Get("perf_ip3d_u");
-
-	TH1F* h_bbL2JetProb = (TH1F*)file->Get("perf_l2_jetprob_b");
-	TH1F* h_bbEFJetProb = (TH1F*)file->Get("perf_ef_jetprob_b");
-
-	TH1F* h_uuL2JetProb = (TH1F*)file->Get("perf_l2_jetprob_u");
-	TH1F* h_uuEFJetProb = (TH1F*)file->Get("perf_ef_jetprob_u");
-      }
-
-      if (f && doFirst){
-
-	hbbL2  = new TH1F(*h_bbL2);
-	hbbEF  = new TH1F(*h_bbEF);
-	hbbOf1 = new TH1F(*h_bbOf1);
-	hbbOf2 = new TH1F(*h_bbOf2);
-
-	huuL2  = new TH1F(*h_uuL2);
-	huuEF  = new TH1F(*h_uuEF);
-	huuOf1 = new TH1F(*h_uuOf1);
-	huuOf2 = new TH1F(*h_uuOf2);
-
-	hbbL2JetProb = new TH1F(*h_bbL2JetProb);
-	hbbEFJetProb = new TH1F(*h_bbEFJetProb);
-
-	huuL2JetProb = new TH1F(*h_uuL2JetProb);
-	huuEFJetProb = new TH1F(*h_uuEFJetProb);
-
-	doFirst = false;
-
-      }
-
-      if(f) {
-
-	hbbL2->Add(h_bbL2);   hbbEF->Add(h_bbEF); 
-	hbbOf1->Add(h_bbOf1); hbbOf2->Add(h_bbOf2); 
-
-	huuL2->Add(h_uuL2);   huuEF->Add(h_uuEF); 
-	huuOf1->Add(h_uuOf1); huuOf2->Add(h_uuOf2); 
-
-	hbbL2JetProb->Add(h_bbL2JetProb);   hbbEFJetProb->Add(h_bbEFJetProb);
-	huuL2JetProb->Add(h_uuL2JetProb);   huuEFJetProb->Add(h_uuEFJetProb);
-      }
-    }
-  }
-
-  TCanvas *c = new TCanvas("c","",10,10,500,500);
-  TGraphErrors *grx1 = new TGraphErrors();
-  TGraphErrors *grx2 = new TGraphErrors();
-  TGraphErrors *grx3 = new TGraphErrors();
-  TGraphErrors *grx4 = new TGraphErrors();
-
-  fout << "L2 " << endl;
-  plot(fout,hbbL2,huuL2,grx1);
-  fout << "EF " << endl;
-  plot(fout,hbbEF,huuEF,grx2,1.,1.,"SAME");
-  fout << "Offline corr." << endl;
-  plot(fout,hbbOf1,huuOf1,grx3,1.,1.,"SAME");
-  fout << "Offline " << endl;
-  plot(fout,hbbOf2,huuOf2,grx4,1.,1.,"SAME");
-
-  grx1->Draw("AP");
-  grx1->GetXaxis()->SetLimits(0.38,1.02);
-  grx1->SetMinimum(1);
-  grx1->SetMaximum(800);
-  grx1->SetMarkerStyle(22);
-  grx2->Draw("P");
-  grx2->SetMarkerStyle(26);
-  grx3->Draw("P");
-  grx3->SetMarkerStyle(20);
-  grx4->Draw("P");
-  grx4->SetMarkerStyle(18);
-
-  gPad->SetLogy();
-  gPad->SetGrid();
-  gPad->SetLeftMargin(0.15);
-  gPad->SetBottomMargin(0.15);
-  gPad->SetFillColor(0);
-  gPad->SetHighLightColor(0);
-  gPad->SetBorderMode(0);
-
-  grx1->GetYaxis()->SetTitle("Light-jet rejection");
-  grx1->GetXaxis()->SetTitle("b-jet efficiency");
-  grx1->GetYaxis()->SetTitleOffset(1.3);
-
-  TLegend *leg = new TLegend(0.5,0.65,0.88,0.85);
-  leg->SetTextSize(0.04);
-  leg->AddEntry(grx1,"L2","PL");
-  leg->AddEntry(grx2,"EF after L2 ","PL");
-  leg->AddEntry(grx3,"Offline after HLT","PL");
-  leg->AddEntry(grx4,"Offline","PL");
-  leg->Draw();
-
-  TCanvas *c2 = new TCanvas("c2","",10,10,500,500);
-  TGraphErrors *grx5 = new TGraphErrors();
-  TGraphErrors *grx6 = new TGraphErrors();
-
-  fout << "L2 JetProb" << endl;
-  plot(fout,hbbL2JetProb,huuL2JetProb,grx5);
-  fout << "EF JetProb" << endl;
-  plot(fout,hbbEFJetProb,huuEFJetProb,grx6,1.,1.,"SAME");
-
-  grx5->Draw("AP");
-  grx5->GetXaxis()->SetLimits(0.38,1.02);
-  grx5->SetMinimum(1);
-  grx5->SetMaximum(110);
-  grx5->SetMarkerStyle(22);
-  grx6->Draw("P");
-  grx6->SetMarkerStyle(26);
-
-  gPad->SetLogy();
-  gPad->SetGrid();
-  gPad->SetLeftMargin(0.15);
-  gPad->SetBottomMargin(0.15);
-  gPad->SetFillColor(0);
-  gPad->SetHighLightColor(0);
-  gPad->SetBorderMode(0);
-
-  grx5->GetYaxis()->SetTitle("Light-jet rejection");
-  grx5->GetXaxis()->SetTitle("b-jet efficiency");
-  grx5->GetYaxis()->SetTitleOffset(1.3);
-
-  TLegend *leg = new TLegend(0.5,0.65,0.88,0.85);
-  leg->SetTextSize(0.04);
-  leg->AddEntry(grx5,"L2 JetProb","PL");
-  leg->AddEntry(grx6,"EF JetProb","PL");
-  leg->Draw();
-}
-
-void plot(ofstream &fout, TH1F* hb, TH1F* hu,TGraphErrors *gr,double off_eps=1, double off_R=1, TString opt="AP"){
-  
-  double totb = 0;
-  double totu = 0;
-  for (int i=0;i<hb->GetNbinsX();i++){
-    totb += hb->GetBinContent(i+1);
-    totu += hu->GetBinContent(i+1);
-  }
-  
-  fout << "totb = " << totb << "; totu = " << totu << endl;
-
-  double sumu=totu,sumb=totb;
-  double effu,effb,seffu,seffb;
-  int ind=0;
-
-  for (int i=0;i<hb->GetNbinsX()-1;i++){
-    if (i>=0){
-      sumb -= hb->GetBinContent(i+1);
-      sumu -= hu->GetBinContent(i+1);
-    }
-
-    //fout << "GetBinContent " << i+1 << " = " << hb->GetBinContent(i+1) << " " << totb << endl;
-    //fout << "GetBinCenter  " << i   << " = " << hu->GetBinCenter(i) << endl;
-    //fout << "sumb = " << sumb << "; sumu = " << sumu << endl;
-
-    effb  = sumb/totb*off_eps;
-    effu  = sumu/totu*(1/off_R);
-    seffb = sqrt((effb)*(1-effb)/totb);
-    seffu = sqrt((effu)*(1-effu)/totu);
-
-    fout << "cut " << hb->GetBinCenter(i+1)+hb->GetBinWidth(i+1) << "  effb = " << effb << "; effu = " << effu << endl;
-    
-    if (effu!=0 && effb!=0){ 
-      seffu = seffu/pow(effu,2);
-      gr->SetPoint(ind,effb,1./effu);
-      gr->SetPointError(ind,seffb,seffu);
-      ind++;
-    }
-  }
-}
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/MiscellaneousExamples_jobOptions.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/MiscellaneousExamples_jobOptions.py
deleted file mode 100644
index a75b15ca7dbe531e28552090ae218e64810d4dce..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/MiscellaneousExamples_jobOptions.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Author: Ketevi A. Assamagan
-# BNL, November 20, 2004
-
-# Read in the AOD from POOL - AOD.pool.root
-# this file is produced by running the AOD builders of the RDO raw data
-
-# import the analysis flags
-from AnalysisExamples.AnalysisFlags import AnalysisFlags
-
-# back navigation and interactive session flags
-AnalysisFlags.DoNavigation      = False
-AnalysisFlags.DoInteractive     = True
-
-include( "AnalysisExamples/AnalysisCommon_jobOptions.py")
-
-AnalysisFlags.Print()
-
-# get a handle on the ServiceManager which holds all the services
-from AthenaCommon.AppMgr import ServiceMgr
-# The AOD input file
-import AthenaPoolCnvSvc.ReadAthenaPool
-ServiceMgr.EventSelector.InputCollections = ["AOD.pool.root"]
-
-# The AOD input file for large statistics
-#include ( "aodinput.py" )
-
-###########################################
-# setup TTree registration Service
-# save ROOT histograms and NTuple
-from GaudiSvc.GaudiSvcConf import THistSvc
-ServiceMgr += THistSvc()
-ServiceMgr.THistSvc.Output = ["AANT DATAFILE='MiscellaneousExamples.root' OPT='RECREATE'"]
-from AnalysisTools.AnalysisToolsConf import AANTupleStream
-topSequence += AANTupleStream()
-AANTupleStream = AANTupleStream()
-
-AANTupleStream.ExtraRefNames = [ "StreamESD","Stream1" ]
-AANTupleStream.OutputName = 'MisellaneousExamples.root'
-AANTupleStream.WriteInputDataHeader = True
-AANTupleStream.OutputLevel = WARNING
-
-# The user analysis algorithm to be executed
-from AnalysisExamples.AnalysisExamplesConf import MiscellaneousExamples 
-topSequence += MiscellaneousExamples()
-
-MiscellaneousExamples = MiscellaneousExamples()
-# The user analysis cuts - default values to changed by the user as needed
-
-# set the names of the particle containers to be retrieved from StoreGate
-# must be the same name used to build the AOD container
-# do not touch these if you do know what you are doing
-
-MiscellaneousExamples.JetContainerName = "Cone4H1TowerJets"
-MiscellaneousExamples.ElectronContainerName = "ElectronAODCollection"
-MiscellaneousExamples.MuonContainerName = "StacoMuonCollection"
-MiscellaneousExamples.EventWeight = 1.0
-MiscellaneousExamples.JetPtCut = 20.0*GeV
-MiscellaneousExamples.DeltaMjj = 25.0*GeV
-MiscellaneousExamples.OutputLevel = INFO
-
-# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-ServiceMgr.MessageSvc.OutputLevel = ERROR
-
-# Number of Events to process
-theApp.EvtMax = 200
-
-# For interactive analysis in athena
-if AnalysisFlags.DoInteractive:
-   include( "PyAnalysisCore/InitPyAnalysisCore.py" )
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/PIDinAOD_jobOptions.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/PIDinAOD_jobOptions.py
deleted file mode 100644
index 51b746b8aa5b533a946113ea468c49083f38f855..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/PIDinAOD_jobOptions.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# Author: Ketevi A. Assamagan
-# BNL, July 16, 2004
-
-# job options for particle identification analysis
-# Read in the AOD from POOL 
-# this file is produced by running the AOD builders of the RDO raw data
-# The user is supposed to modify the input data andset the flags as
-# appropriate for his/her analysis
-
-#get a handle on the ServiceManager which holds all the services
-from AthenaCommon.AppMgr import ServiceMgr
-
-# import the analysis flags
-from AnalysisExamples.AnalysisFlags import AnalysisFlags
-
-# load common options and common DLL
-include( "AnalysisExamples/AnalysisCommon_jobOptions.py" )
-
-# Print the flags before running the analysis code
-AnalysisFlags.Print()
-
-# particle pre selection job options
-if AnalysisFlags.DoElectron:
-   include( "AnalysisExamples/DoElectron_jobOptions.py" )
-if AnalysisFlags.DoPhoton:	
-   include( "AnalysisExamples/DoPhoton_jobOptions.py" )
-if AnalysisFlags.DoMuon:
-   include( "AnalysisExamples/DoMuon_jobOptions.py" )
-if AnalysisFlags.DoTauJet:
-   include( "AnalysisExamples/DoTauJet_jobOptions.py" )
-if AnalysisFlags.DoParticleJet:
-   include( "AnalysisExamples/DoParticleJet_jobOptions.py" )
-if AnalysisFlags.DoBJet:
-   include( "AnalysisExamples/DoBJet_jobOptions.py" )
-
-# The AOD input file
-import AthenaPoolCnvSvc.ReadAthenaPool
-ServiceMgr.EventSelector.InputCollections = [ "AOD.pool.root" ]
-
-# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-ServiceMgr.MessageSvc.OutputLevel = DEBUG
-
-# Number of Events to process
-theApp.EvtMax = 10000
-
-#########################################
-#Setup TTree registration Service
-# save ROOT histograms and NTuple
-from GaudiSvc.GaudiSvcConf import THistSvc
-ServiceMgr += THistSvc()
-# Root Ntuple output
-ServiceMgr.THistSvc.Output = [ "FILE1 DATAFILE='PIDinAOD.ntuple.root' OPT='NEW'" ]
-ServiceMgr.THistSvc.Output += [ "FILE2 DATAFILE='PIDinAOD.hist.root' OPT='NEW'" ]
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/PileUpTruthExample_topOptions.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/PileUpTruthExample_topOptions.py
deleted file mode 100644
index 768d5b07329802d29cb10d2fbb1ad63468317c3e..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/PileUpTruthExample_topOptions.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# Author: Ketevi A. Assamagan
-# BNL, february 2008
-
-# get a handle on the service manager
-from AthenaCommon.AppMgr import ServiceMgr as svcMgr
-
-# get a handle on the sequence of top-level algorithms
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-# Event selector
-import AthenaPoolCnvSvc.ReadAthenaPool
-svcMgr.EventSelector.InputCollections = [ "ESD.pool.root" ]
-
-from AnalysisExamples.AnalysisExamplesConf import PileUpTruthExample
-topSequence += PileUpTruthExample(
-    "PileUpTruthExample",
-    McEventKey = "TruthEvent",
-    OutputLevel = INFO
-    )
-
-from AthenaCommon.AppMgr import theApp
-theApp.EvtMax = 100
-
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/README.TXT b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/README.TXT
deleted file mode 100644
index 0319ebb7014e568b5c7a9db377e503554311bed3..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/README.TXT
+++ /dev/null
@@ -1,8 +0,0 @@
-Some of the examples have three jobOptions files, e.g.,
-
-ttbarExample_jobOptions.py
-ttbarExample_jobOptions_AutoConfig.py
-ttbarExample_jobOptions_New.py
-
-Please look at https://twiki.cern.ch/twiki/bin/view/AtlasProtected/PhysicsAnalysisWorkBookAODAnalysisRel15#What_s_NEW_on_this_page for an explanation
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/ReadTruthParticles_jobOptions.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/ReadTruthParticles_jobOptions.py
deleted file mode 100644
index f36389993cb7c7fc4221a8b1b2c86dfb04dfeca9..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/ReadTruthParticles_jobOptions.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Fragment to run the ReadTruthParticles Algorithm
-# Author : S.Binet <binet.At_cern.ch>
-
-import AthenaCommon.Constants as Lvl
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-from McParticleTools.McParticleToolsConf import TruthParticleCnvTool
-from AnalysisExamples.AnalysisExamplesConf import ReadTruthParticles
-
-topSequence += ReadTruthParticles(
-    "ReadTruthParticles",
-    ## input location of the TruthParticles
-    TruthParticles = "SpclMC",
-    
-    ## message level for this algorithm
-    OutputLevel = Lvl.INFO
-    )
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/ReadTruthParticles_topOptions.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/ReadTruthParticles_topOptions.py
deleted file mode 100644
index 6ebf9614e2386669e9fe71228d3c00a950f598fc..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/ReadTruthParticles_topOptions.py
+++ /dev/null
@@ -1,54 +0,0 @@
-# Author: Ketevi A. Assamagan
-# BNL, June 12, 2004
-
-# get a handle on the service manager
-from AthenaCommon.AppMgr import ServiceMgr as svcMgr
-
-# get a handle on the sequence of top-level algorithms
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-# Event selector
-import AthenaPoolCnvSvc.ReadAthenaPool
-
-# the POOL converters: loaded on demand from now on (release 13)
-## include( "ParticleBuilderOptions/ESD_PoolCnv_jobOptions.py" )
-## include( "ParticleBuilderOptions/AOD_PoolCnv_jobOptions.py")
-## include( "ParticleBuilderOptions/McAOD_PoolCnv_jobOptions.py")
-## include( "EventAthenaPool/EventAthenaPool_joboptions.py" )
-
-# Read in the AOD from POOL - AOD.pool.root
-# this file is produced by running the AOD builders of the RDO raw data
-
-#-------- to convert "GEN_EVENT" to TruthParticles on the fly - Ketevi A. Assamagan
-#from ParticleBuilderOptions.AODFlags import AODFlags
-#AODFlags.McEventKey = "GEN_EVENT"
-#from McParticleAlgs.JobOptCfg import createTruthParticlesBuilder
-#topSequence += createTruthParticlesBuilder()
-#from JetRec.JetGetters import *
-#from JetRec.JetRecFlags import jetFlags
-#jetFlags.inputFileType = 'GEN'
-#make_StandardJetGetter('Cone',0.4,'Truth').jetAlgorithmHandle()
-
-#------------
-
-include ( "AnalysisExamples/ReadTruthParticles_jobOptions.py" )
-ReadTruthParticles = topSequence.ReadTruthParticles
-ReadTruthParticles.ConvertFromESD = True
-
-if ReadTruthParticles.ConvertFromESD:
-    #ESD case
-    svcMgr.EventSelector.InputCollections = [ "ESD.pool.root" ]
-    ReadTruthParticles.CnvTool.McEvents = "TruthEvent"
-    ReadTruthParticles.CnvTool.TruthParticlesOutput = "SpclMC_ESD"
-    ReadTruthParticles.TruthParticles = "SpclMC_ESD"
-else:
-    # The AOD case
-    svcMgr.EventSelector.InputCollections = [ "AOD.pool.root" ]
-
-# Number of Events to process
-from AthenaCommon.AppMgr import theApp
-theApp.EvtMax = 500
-
-
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/TrackExample_jobOptions.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/TrackExample_jobOptions.py
deleted file mode 100644
index 4a422d62fb4aad98db5bce74954463aaa807c7f4..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/TrackExample_jobOptions.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Author: Ketevi A. Assamagan
-# CERN, February 13, 2005
-
-# Set up the reading of an xAOD file using RecExCommon:
-FNAME = "/afs/cern.ch/atlas/project/PAT/data/xAOD/" \
-    "valid2.147807.PowhegPythia8_AU2CT10_Zmumu.digit." \
-    "AOD.e2657_s1933_s1964_r5493.pool.root"
-import AthenaPoolCnvSvc.ReadAthenaPool
-ServiceMgr.EventSelector.InputCollections = [ FNAME ]
-
-# Access the algorithm sequence:
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-# Create and configure the example algorithm:
-from AnalysisExamples.AnalysisExamplesConf import TrackExample
-trackExample = TrackExample()
-
-# The stream/file to write to
-trackExample.RootStreamName = "/AANT"
-trackExample.RootDirName    = "TrackParticle"
-
-# Track isolation cone
-trackExample.TrackIsolationCone = 16
-
-# Event weight
-trackExample.EventWeight = 1.0
-
-# Track Momentum cut
-trackExample.TrackMomentumCut = 1.0 * GeV
-
-# Message output level for this algorithm
-trackExample.OutputLevel = DEBUG
-
-# Print the algorithm's properties:
-print trackExample
-
-# Add the algorithm to the sequence:
-topSequence += trackExample
-
-# Number of Events to process
-theApp.EvtMax = 10
-
-#########################################
-#Setup TTree registration Service
-# save ROOT histograms and NTuple
-from GaudiSvc.GaudiSvcConf import THistSvc
-ServiceMgr += THistSvc()
-#Root Ntuple output file and name
-ServiceMgr.THistSvc.Output += [
-    "AANT DATAFILE='TrackExample.hist.root' OPT='RECREATE'"
-    ] 
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/TrigAnalysisExample_jobOptions.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/TrigAnalysisExample_jobOptions.py
deleted file mode 100644
index 78e7101d233ea5d8e262d97d71263884961d3ca4..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/TrigAnalysisExample_jobOptions.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# TriggerExample_jobOptions.py
-# Based on UserAnalysis/AnalysisSkeleton_topOptions.py
-
-# get a handle on the ServiceManager which holds all the services
-from AthenaCommon.AppMgr import ServiceMgr
-# Event selector
-import AthenaPoolCnvSvc.ReadAthenaPool
-
-# Particle Properties
-from PartPropSvc.PartPropSvcConf import PartPropSvc
-
-# the POOL converters
-include( "ParticleBuilderOptions/ESD_PoolCnv_jobOptions.py" )
-include( "ParticleBuilderOptions/AOD_PoolCnv_jobOptions.py")
-include( "ParticleBuilderOptions/McAOD_PoolCnv_jobOptions.py")
-include( "EventAthenaPool/EventAthenaPool_joboptions.py" )
-
-# The AOD input file
-ServiceMgr.EventSelector.InputCollections = [ "AOD.pool.root" ]
-
-# Create the algorithm sequence:
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-# Add the analysis algorithm:
-from AnalysisExamples.AnalysisExamplesConf import TrigAnalysisExample
-topSequence += TrigAnalysisExample()
-topSequence.TrigAnalysisExample.OutputLevel = INFO
-
-##
-##########################################
-## Set up trigger configuration service and metadata service
-## it relies on, for analysis job without RecExCommon
-
-from AthenaCommon.GlobalFlags import GlobalFlags
-GlobalFlags.DetGeo.set_atlas()
-
-# set up trigger configuration service
-from TriggerJobOpts.TriggerConfigGetter import TriggerConfigGetter
-cfg =  TriggerConfigGetter("ReadPool")
-
-## set up trigger decision tool
-from TrigDecisionTool.TrigDecisionToolConf import Trig__TrigDecisionTool
-ToolSvc += Trig__TrigDecisionTool( "TrigDecisionTool" )
-topSequence.TrigAnalysisExample.TrigDecisionTool = ToolSvc.TrigDecisionTool
-
-## END of trigger setup
-##
-
-# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-ServiceMgr.MessageSvc.OutputLevel = WARNING
-
-# Number of Events to process
-theApp.EvtMax = 10
-
-from GaudiCommonSvc.GaudiCommonSvcConf import AuditorSvc
-ServiceMgr.AuditorSvc.Auditors  += [ "ChronoAuditor"]
-
-AthenaPoolCnvSvc = Service("AthenaPoolCnvSvc")
-AthenaPoolCnvSvc.UseDetailChronoStat = TRUE
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/WriteTruthParticles_jobOptions.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/WriteTruthParticles_jobOptions.py
deleted file mode 100644
index 57fd84457728d15bcf6106c6eb5d8fae26fc4afb..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/WriteTruthParticles_jobOptions.py
+++ /dev/null
@@ -1,105 +0,0 @@
-###############################################################
-#
-# Job options file
-#
-#==============================================================
-
-# You can use this jobO like that:
-# athena -c 'MCEVENTKEY="GEN_EVENT"; POOL_OUTPUT="my.pool.root"' AnalysisExamples/WriteTruthParticles_jobOptions.py
-
-#--------------------------------------------------------------
-# General Application Configuration options
-#--------------------------------------------------------------
-import AthenaCommon.AtlasUnixGeneratorJob
-
-## get a handle on the sequence of top-level algorithms
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-## get a handle on the service manager
-from AthenaCommon.AppMgr import ServiceMgr as svcMgr
-
-## get a handle on the application manager
-from AthenaCommon.AppMgr import theApp
-
-#--------------------------------------------------------------
-# Event related parameters
-#--------------------------------------------------------------
-if not 'EVTMAX' in dir():
-    EVTMAX = 5
-    pass
-theApp.EvtMax = EVTMAX
-
-from AthenaServices.AthenaServicesConf import AtRndmGenSvc
-svcMgr += AtRndmGenSvc(
-    "AtRndmGenSvc",
-    Seeds = [
-       "PYTHIA 4789899 989240512", "PYTHIA_INIT 820021 2347532",
-       "JIMMY 390020611 821000366", "JIMMY_INIT 820021 2347532",
-       "HERWIG 390020611 821000366", "HERWIG_INIT 820021 2347532",
-       ]
-    )
-
-####################
-# Generate the event
-####################
-from Pythia_i.Pythia_iConf import Pythia
-
-topSequence += Pythia(
-    "Pythia",
-    PythiaCommand = [
-        # generate ttbar events
-        "pysubs msel 6",
-
-        #selection of the subprocess gg -> ttbar
-        "pysubs msub 81 1",
-        "pysubs msub 82 1",
-        ]
-    )
-
-###############################
-# Dump the event from Generator
-###############################
-if not 'MCEVENTKEY' in dir():
-    MCEVENTKEY = "GEN_EVENT"
-    pass
-# uncomment this if you want to dump the McEventCollection on-screen
-## from TruthExamples.TruthExamplesConf import DumpMC
-## topSequence += DumpMC("GenMcEvent",
-##                       McEventKey = MCEVENTKEY)
-
-###############################
-# Convert a McEventCollection
-# into a TruthParticleContainer
-###############################
-from McParticleTools.McParticleToolsConf import TruthParticleCnvTool
-from AnalysisExamples.AnalysisExamplesConf import WriteTruthParticles
-topSequence += WriteTruthParticles(
-    "WriteTruthParticles",
-    CnvTool = TruthParticleCnvTool( "CnvTool",
-                                    McEvents = MCEVENTKEY,
-                                    TruthParticlesOutput = "My"+MCEVENTKEY )
-    )
-
-#---------------------------------------------------------------
-# Pool Persistency
-#---------------------------------------------------------------
-from AthenaPoolCnvSvc.WriteAthenaPool import AthenaPoolOutputStream
-
-if not 'POOL_OUTPUT' in dir():
-    POOL_OUTPUT = "mc.aod.pool"
-    pass
-
-outStream = AthenaPoolOutputStream("OutStream")
-outStream.ItemList  = [
-    "EventInfo#*",
-    "McEventCollection#*",
-    "TruthParticleContainer#*",
-    ]
-outStream.OutputFile = POOL_OUTPUT
-
-#==============================================================
-#
-# End of job options file
-#
-###############################################################
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/ZeeOnESDExample_jobOptions_AutoConfig.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/ZeeOnESDExample_jobOptions_AutoConfig.py
deleted file mode 100644
index 07a487fbf6c878f016b2c675af7b1ec6b5df271b..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/ZeeOnESDExample_jobOptions_AutoConfig.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# Author: Ketevi A. Assamagan
-# CERN, February 2005
-# Modified by Vivek Jain, Mar. 2012
-
-# This file uses AutoConfig to peek at the input file, and set Geometry tag, etc., correctly
-# you don't need to set Geometry tags anymore. They are read from the file itself.
-
-# here give the full path of a file that you run over locally
-# if you are running on the grid, it will ignore what you set here, and do the right thing
-# if you want to run over multiple files locally, use glob as shown below
-
-from AthenaCommon.AthenaCommonFlags import jobproperties as jp
-#jp.AthenaCommonFlags.FilesInput = ['/afs/cern.ch/atlas/maxidisk/d49/AOD.191045._001937.pool.root.1']
-jp.AthenaCommonFlags.FilesInput = ['root://eosatlas.cern.ch//eos/atlas/atlascerngroupdisk/proj-sit/rtt/prod/tct/rel_2/17.2.X/i686-slc5-gcc43-opt/offline/Tier0ChainTests/Run00183021_JetTauEtmiss0_Collisions/myESD_JetTauEtmiss_0.pool.root']
-#jp.AthenaCommonFlags.FilesInput = ['/usatlas/u/vj/vj_bnl_local/datafile/mc11_7TeV.105014.J5_pythia_jetjet.recon.ESD.e815_s1273_s1274_r3060_tid709350_00/ESD.709350._002530.pool.root.1']
-
-#DATAPATH = '/afs/cern.ch/atlas/maxidisk/d49/'
-#from glob import glob
-#INPUT = glob(DATAPATH + 'AOD*.root*')
-#print INPUT
-#jp.AthenaCommonFlags.FilesInput = INPUT
-
-#
-from RecExConfig.RecFlags import rec
-
-# import the data types 
-import EventKernel.ParticleDataType
-
-# get a handle on the ServiceManager which holds all the services 
-from AthenaCommon.AppMgr import ServiceMgr
-
-include ("RecExCond/RecExCommon_flags.py")
-include( "RecExCond/AllDet_detDescr.py" )
-include( "AthenaPoolCnvSvc/ReadAthenaPool_jobOptions.py" )
-
-
-# The input file already specified near the top of this file
-ServiceMgr.EventSelector.InputCollections = jp.AthenaCommonFlags.FilesInput()
-
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-from AnalysisExamples.AnalysisExamplesConf import ZeeOnESD
-topSequence += ZeeOnESD()
-
-# The user analysis cuts - default values to changed by the user as needed
-
-# set the names of the particle containers to be retrieved from StoreGate
-# must be the same name used to build the AOD container
-# do not touch these if you do know what you are doing
-
-# Electron Parameters
-topSequence.ZeeOnESD.egammaContainer = "ElectronCollection"
-topSequence.ZeeOnESD.TrackMatchContainer="egDetailContainer"
-topSequence.ZeeOnESD.egammaEtCut  = 20.0*GeV
-topSequence.ZeeOnESD.egammaEtaCut  = 2.5
-
-
-# message level for ZeeOnESD
-topSequence.ZeeOnESD.OutputLevel = INFO
-
-# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-ServiceMgr.MessageSvc.OutputLevel = INFO
-# print out all messages. may need to increase it
-ServiceMgr.MessageSvc.defaultLimit = 9999999
-
-# Number of Events to process
-theApp.EvtMax = -1
-
-# setup TTree registration Service
-# save ROOT histograms and Tuple
-from GaudiSvc.GaudiSvcConf import THistSvc
-ServiceMgr += THistSvc()
-ServiceMgr.THistSvc.Output = ["AANT DATAFILE='ZeeOnESD.root' OPT='RECREATE'"]
-from AnalysisTools.AthAnalysisToolsConf import AANTupleStream
-topSequence += AANTupleStream()
-topSequence.AANTupleStream.ExtraRefNames = [ "StreamESD","Stream1" ]
-topSequence.AANTupleStream.OutputName = 'ZeeOnESD.root'
-topSequence.AANTupleStream.WriteInputDataHeader = True
-topSequence.AANTupleStream.OutputLevel = WARNING
-# Root Ntuple output file and name
-#theApp.HistogramPersistency = "ROOT"
-#theApp.HistogramPersistencySvc.OutputFile = "VFitZmmOnAOD.root"
-
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/produceGenEvents_jobOptions.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/produceGenEvents_jobOptions.py
deleted file mode 100644
index c544b7f49e0f685087039184854fca33fea0f3df..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/produceGenEvents_jobOptions.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import AthenaCommon.AtlasUnixGeneratorJob
-from AthenaCommon.AppMgr import theApp
-
-include( "PartPropSvc/PartPropSvc.py" )
-
-# get a handle on the ServiceManager which holds all the services
-from AthenaCommon.AppMgr import ServiceMgr
-
-#* load relevant libraries
-ServiceMgr.AuditorSvc.Auditors  = [ "ChronoAuditor" ]
-from AthenaServices.AthenaServicesConf import AtRndmGenSvc
-ServiceMgr += AtRndmGenSvc()
-
-ServiceMgr.AtRndmGenSvc.Seeds = ["PYTHIA 85909879 9707499", "PYTHIA_INIT 87928165 11160529"]
-
-#--------------------------------------------------------------
-# Persistency services
-#--------------------------------------------------------------
-# you shouldn't need to change this
-ServiceMgr.EventPersistencySvc.CnvServices     = [ "McCnvSvc" ];  
-#--------------------------------------------------------------
-# Event related parameters
-#--------------------------------------------------------------
-# Number of events to be processed (default is 10)
-theApp.EvtMax = 10
-#--------------------------------------------------------------
-# Private Application Configuration options
-#--------------------------------------------------------------
-# OUTPUT PRINTOUT LEVEL
-# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-# you can override this for individual modules if necessary
-ServiceMgr.MessageSvc.OutputLevel               = 3
-#--------------------------------------------------------------
-#GENERATORS SETUP (select by uncommenting/commenting)
-#--------------------------------------------------------------
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-from Pythia_i.Pythia_iConf import Pythia
-topSequence += Pythia()
-
-#-------------------------------------------------------------
-# define your physics process here
-# Declare the Pythia to be the Generator to be used
-topSequence.Pythia.PythiaCommand += ["pysubs msel 6"]
-#selection of the subprocess gg -> ttbar
-topSequence.Pythia.PythiaCommand += ["pysubs msub 81 1"]
-topSequence.Pythia.PythiaCommand += ["pysubs msub 82 1"]
-
-#-------------------------------------------------------------
-
-#-------------------------------------------------------------
-# Pool Persistency
-#-------------------------------------------------------------
-from AthenaPoolCnvSvc.WriteAthenaPool import AthenaPoolOutputStream
-
-# EventInfo Converters
-include( "EventAthenaPool/EventAthenaPool_joboptions.py")
-PoolSvc = Service( "PoolSvc" )
-Stream1 = AthenaPoolOutputStream( "Stream1" )
-Stream1.ItemList+=["EventInfo#*"];        	 
-Stream1.ItemList+=[ "McEventCollection#*" ]
-Stream1.OutputFile = "McEvent.root"
-#PoolSvc.Output    = "McEvent.root"
-#==============================================================
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/py_read_egamma_d3pd.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/py_read_egamma_d3pd.py
deleted file mode 100644
index 907e7e064ad9e1d40df1b39dba23154c2a5346bf..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/py_read_egamma_d3pd.py
+++ /dev/null
@@ -1,26 +0,0 @@
-_d3pd_files = [
-    'root://castoratlas//castor/cern.ch/user/b/binet/utests/utests/filter-d3pd/ntuple.0.root',
-    #'ntuple.0.root',
-    ]
-import AthenaRootComps.ReadAthenaRoot
-svcMgr.EventSelector.InputCollections = _d3pd_files
-svcMgr.EventSelector.TupleName = "egamma"
-
-#activeBranches = [ 'nMC' ] # only 1 branch
-activeBranches = [ '*' ]   # all branches
-activeBranches = []        # no branch
-svcMgr.EventSelector.ActiveBranches = activeBranches
-
-from AthenaCommon.AlgSequence import AlgSequence
-job = AlgSequence()
-
-import AnalysisExamples.Lib as AEL
-job += AEL.PyReadD3pdNtuple(
-    TreeName = '/temp/TTreeStream/egamma',
-    OutputLevel = Lvl.DEBUG
-    )
-
-if not 'EVTMAX' in dir():
-    EVTMAX=10
-theApp.EvtMax = EVTMAX
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/py_read_egamma_d3pd_evtstore.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/py_read_egamma_d3pd_evtstore.py
deleted file mode 100644
index c58add868b2419ff982d95df5118df252ff05c35..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/py_read_egamma_d3pd_evtstore.py
+++ /dev/null
@@ -1,25 +0,0 @@
-_d3pd_files = [
-    'root://castoratlas//castor/cern.ch/user/b/binet/utests/utests/filter-d3pd/ntuple.0.root',
-    'root://castoratlas//castor/cern.ch/user/b/binet/utests/utests/filter-d3pd/ntuple.1.root',
-    #'ntuple.0.root',
-    #'ntuple.1.root',
-    ]
-import AthenaRootComps.ReadAthenaRoot
-svcMgr.EventSelector.InputCollections = _d3pd_files
-svcMgr.EventSelector.TupleName = "egamma"
-
-# disable all branches (will be activated on a need-to-use basis)
-svcMgr.EventSelector.ActiveBranches = []
-
-from AthenaCommon.AlgSequence import AlgSequence
-job = AlgSequence()
-
-import AnalysisExamples.Lib as AEL
-job += AEL.PyReadD3pdEvtStore(
-    OutputLevel = Lvl.DEBUG
-    )
-
-if not 'EVTMAX' in dir():
-    EVTMAX=10
-theApp.EvtMax = EVTMAX
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/read_egamma_d3pd.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/read_egamma_d3pd.py
deleted file mode 100644
index cf8ecffa50b86ea14647c9acb7920a3624b4f9a6..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/read_egamma_d3pd.py
+++ /dev/null
@@ -1,24 +0,0 @@
-_d3pd_files = [
-    'root://castoratlas//castor/cern.ch/user/b/binet/utests/utests/filter-d3pd/ntuple.0.root',
-    #'ntuple.0.root',
-    ]
-import AthenaRootComps.ReadAthenaRoot
-svcMgr.EventSelector.InputCollections = _d3pd_files
-svcMgr.EventSelector.TupleName = "egamma"
-
-#activeBranches = [ 'nMC' ] # only 1 branch
-activeBranches = [ '*' ]   # all branches
-activeBranches = []        # no branch
-svcMgr.EventSelector.ActiveBranches = activeBranches
-
-from AthenaCommon.AlgSequence import AlgSequence
-job = AlgSequence()
-job += CfgMgr.AthEx__ReadD3pdNtuple(
-    TreeName = '/temp/TTreeStream/egamma',
-    OutputLevel = Lvl.DEBUG
-    )
-
-if not 'EVTMAX' in dir():
-    EVTMAX=10
-theApp.EvtMax = EVTMAX
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/read_egamma_d3pd_evtstore.py b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/read_egamma_d3pd_evtstore.py
deleted file mode 100644
index 22d5c1181e2f8c926a40e3bd90855bfe4f46d7e2..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/share/read_egamma_d3pd_evtstore.py
+++ /dev/null
@@ -1,30 +0,0 @@
-_d3pd_files = [
-    'root://castoratlas//castor/cern.ch/user/b/binet/utests/utests/filter-d3pd/ntuple.0.root',
-    'root://castoratlas//castor/cern.ch/user/b/binet/utests/utests/filter-d3pd/ntuple.1.root',
-    #'ntuple.0.root',
-    #'ntuple.1.root',
-    ]
-import AthenaRootComps.ReadAthenaRoot
-svcMgr.EventSelector.InputCollections = _d3pd_files
-svcMgr.EventSelector.TupleName = "egamma"
-
-# disable all branches (will be activated on a need-to-use basis)
-svcMgr.EventSelector.ActiveBranches = []
-
-from AthenaCommon.AlgSequence import AlgSequence
-job = AlgSequence()
-
-job += CfgMgr.AthEx__ReadD3pdEvtStore(
-    'alg1',
-    OutputLevel = Lvl.DEBUG
-    )
-
-job += CfgMgr.AthEx__ReadD3pdEvtStore(
-    'alg2',
-    OutputLevel = Lvl.DEBUG
-    )
-
-if not 'EVTMAX' in dir():
-    EVTMAX=10
-theApp.EvtMax = EVTMAX
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/AthExReadD3pdEvtStore.cxx b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/AthExReadD3pdEvtStore.cxx
deleted file mode 100644
index fc1eaf0006d745d7ecdf0bfef80522d990fa3ea6..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/AthExReadD3pdEvtStore.cxx
+++ /dev/null
@@ -1,151 +0,0 @@
-///////////////////////// -*- C++ -*- /////////////////////////////
-
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-// AthExReadD3pdEvtStore.cxx 
-// Implementation file for class AthEx::ReadD3pdEvtStore
-// Author: S.Binet<binet@cern.ch>
-/////////////////////////////////////////////////////////////////// 
-
-// AnalysisExamples includes
-#include "AthExReadD3pdEvtStore.h"
-
-// STL includes
-
-// FrameWork includes
-#include "Gaudi/Property.h"
-#include "AthenaKernel/IDictLoaderSvc.h"
-#include "GaudiKernel/ServiceHandle.h"
-#include "GaudiKernel/ITHistSvc.h"
-
-// SGTools
-#include "SGTools/BuiltinsClids.h"  // to put ints,... in evtstore
-#include "SGTools/StlVectorClids.h" // to put std::vectors... in evtstore
-
-
-namespace AthEx {
-
-/////////////////////////////////////////////////////////////////// 
-// Public methods: 
-/////////////////////////////////////////////////////////////////// 
-
-// Constructors
-////////////////
-ReadD3pdEvtStore::ReadD3pdEvtStore( const std::string& name, 
-                                ISvcLocator* pSvcLocator ) : 
-  ::AthAlgorithm( name, pSvcLocator )
-{
-  //
-  // Property declaration
-  // 
-  //declareProperty( "Property", m_nProperty );
-
-  m_runnbr = "RunNumber";
-  m_evtnbr = "EventNumber"; 
-  m_el_n   = "el_n";
-  m_el_eta = "el_eta";
-  m_el_jetcone_dr = "el_jetcone_dr";
-  
-}
-
-// Destructor
-///////////////
-ReadD3pdEvtStore::~ReadD3pdEvtStore()
-{}
-
-// Athena Algorithm's Hooks
-////////////////////////////
-StatusCode ReadD3pdEvtStore::initialize()
-{
-  ATH_MSG_INFO ("Initializing " << name() << "...");
-
-  return StatusCode::SUCCESS;
-}
-
-StatusCode ReadD3pdEvtStore::finalize()
-{
-  ATH_MSG_INFO ("Finalizing " << name() << "...");
-
-  return StatusCode::SUCCESS;
-}
-
-StatusCode ReadD3pdEvtStore::execute()
-{  
-  ATH_MSG_DEBUG ("Executing " << name() << "...");
-
-  const uint32_t *runnbr = 0;
-  const uint32_t *evtnbr = 0;
-  const int32_t *el_n = 0;
-  const std::vector<float> *el_eta = 0;
-  const std::vector<std::vector<float> > *el_jetcone_dr = 0;
-
-  if (!evtStore()->retrieve(runnbr, m_runnbr).isSuccess()) {
-    ATH_MSG_INFO("could not retrieve [RunNumber]");
-    return StatusCode::SUCCESS;
-  }
-
-  if (!evtStore()->retrieve(evtnbr, m_evtnbr).isSuccess()) {
-    ATH_MSG_INFO("could not retrieve [EventNumber]");
-    return StatusCode::SUCCESS;
-  }
-
-  if (!evtStore()->retrieve(el_n, m_el_n).isSuccess()) {
-    ATH_MSG_INFO("could not retrieve [el_n]");
-    return StatusCode::SUCCESS;
-  }
-
-  if (!evtStore()->retrieve(el_eta, m_el_eta).isSuccess()) {
-    ATH_MSG_INFO("could not retrieve [el_eta]");
-    return StatusCode::SUCCESS;
-  }
-
-  if (!evtStore()->retrieve(el_jetcone_dr, m_el_jetcone_dr).isSuccess()) {
-    ATH_MSG_INFO("could not retrieve [el_jetcone_dr]");
-    return StatusCode::SUCCESS;
-  }
-
-  msg(MSG::INFO) 
-    << "run-nbr: " << *runnbr << endmsg
-    << "evt-nbr: " << *evtnbr << endmsg
-    << "el-nbr:  " << *el_n << endmsg;
-  if (*el_n > 0) {
-    msg(MSG::INFO) << "el.eta[0]: " << el_eta->at(0) << endmsg
-                   << "el_jetcone_dr[0].size: "
-                   << el_jetcone_dr->at(0).size()
-                   << endmsg;
-    for (std::size_t i = 0, imax = el_jetcone_dr->at(0).size();
-         i != imax;
-         ++i) {
-      msg(MSG::INFO) << " jet-cone-dr[0][" << i << "]: "
-                     << el_jetcone_dr->at(0)[i]
-                     << endmsg;
-    }
-  }
-
-  return StatusCode::SUCCESS;
-}
-
-/////////////////////////////////////////////////////////////////// 
-// Const methods: 
-///////////////////////////////////////////////////////////////////
-
-/////////////////////////////////////////////////////////////////// 
-// Non-const methods: 
-/////////////////////////////////////////////////////////////////// 
-
-/////////////////////////////////////////////////////////////////// 
-// Protected methods: 
-/////////////////////////////////////////////////////////////////// 
-
-/////////////////////////////////////////////////////////////////// 
-// Const methods: 
-///////////////////////////////////////////////////////////////////
-
-/////////////////////////////////////////////////////////////////// 
-// Non-const methods: 
-/////////////////////////////////////////////////////////////////// 
-
-
-} //> end namespace AthEx
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/AthExReadD3pdEvtStore.h b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/AthExReadD3pdEvtStore.h
deleted file mode 100644
index ca4b64451bca0fcf382b29a490d3649824d3ce52..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/AthExReadD3pdEvtStore.h
+++ /dev/null
@@ -1,85 +0,0 @@
-///////////////////////// -*- C++ -*- /////////////////////////////
-
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-// ReadD3pdEvtStore.h 
-// Header file for class ReadD3pdEvtStore
-// Author: S.Binet<binet@cern.ch>
-/////////////////////////////////////////////////////////////////// 
-#ifndef ANALYSISEXAMPLES_ATHEXREADD3PDEVTSTORE_H
-#define ANALYSISEXAMPLES_ATHEXREADD3PDEVTSTORE_H 1
-
-// STL includes
-#include <string>
-#include <vector>
-#include <stdint.h>
-
-// FrameWork includes
-#include "AthenaBaseComps/AthAlgorithm.h"
-
-// fwd declares
-
-namespace AthEx {
-
-class ReadD3pdEvtStore
-  : public ::AthAlgorithm
-{ 
-
-  /////////////////////////////////////////////////////////////////// 
-  // Public methods: 
-  /////////////////////////////////////////////////////////////////// 
- public: 
-
-  // Copy constructor: 
-
-  /// Constructor with parameters: 
-  ReadD3pdEvtStore( const std::string& name, ISvcLocator* pSvcLocator );
-
-  /// Destructor: 
-  virtual ~ReadD3pdEvtStore(); 
-
-  // Assignment operator: 
-  //ReadD3pdEvtStore &operator=(const ReadD3pdEvtStore &alg); 
-
-  // Athena algorithm's Hooks
-  virtual StatusCode  initialize();
-  virtual StatusCode  execute();
-  virtual StatusCode  finalize();
-
-  /////////////////////////////////////////////////////////////////// 
-  // Const methods: 
-  ///////////////////////////////////////////////////////////////////
-
-  /////////////////////////////////////////////////////////////////// 
-  // Non-const methods: 
-  /////////////////////////////////////////////////////////////////// 
-
-  /////////////////////////////////////////////////////////////////// 
-  // Private data: 
-  /////////////////////////////////////////////////////////////////// 
- private: 
-
-  /// Default constructor: 
-  ReadD3pdEvtStore();
-
-  // containers
-  std::string m_runnbr;
-  std::string m_evtnbr;
-  std::string m_el_n;
-  std::string m_el_eta;
-  std::string m_el_jetcone_dr;
-
-}; 
-
-// I/O operators
-//////////////////////
-
-/////////////////////////////////////////////////////////////////// 
-// Inline methods: 
-/////////////////////////////////////////////////////////////////// 
-
-} //> end namespace AthEx
-
-#endif //> !ANALYSISEXAMPLES_ATHEXREADD3PDEVTSTORE_H
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/AthExReadD3pdNtuple.cxx b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/AthExReadD3pdNtuple.cxx
deleted file mode 100644
index 7f016b01dd78fe354714e369dacbee2cd152b461..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/AthExReadD3pdNtuple.cxx
+++ /dev/null
@@ -1,199 +0,0 @@
-///////////////////////// -*- C++ -*- /////////////////////////////
-
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-// AthExReadD3pdNtuple.cxx 
-// Implementation file for class AthEx::ReadD3pdNtuple
-// Author: S.Binet<binet@cern.ch>
-/////////////////////////////////////////////////////////////////// 
-
-// AnalysisExamples includes
-#include "AthExReadD3pdNtuple.h"
-
-// STL includes
-
-// FrameWork includes
-#include "Gaudi/Property.h"
-#include "AthenaKernel/IDictLoaderSvc.h"
-#include "GaudiKernel/ServiceHandle.h"
-#include "GaudiKernel/ITHistSvc.h"
-
-// ROOT includes
-#include "TTree.h"
-
-namespace AthEx {
-
-/////////////////////////////////////////////////////////////////// 
-// Public methods: 
-/////////////////////////////////////////////////////////////////// 
-
-// Constructors
-////////////////
-ReadD3pdNtuple::ReadD3pdNtuple( const std::string& name, 
-                                ISvcLocator* pSvcLocator ) : 
-  ::AthAlgorithm( name, pSvcLocator )
-{
-  //
-  // Property declaration
-  // 
-  //declareProperty( "Property", m_nProperty );
-
-  declareProperty("TreeName", 
-                  m_treeName = "/temp/TTreeStream/egamma", 
-                  "Name of the TTree to access in THistSvc");
-  m_runnbr = 0;
-  m_evtnbr = 0;
-  m_el_n   = 0;
-  m_el_eta = 0;
-  m_el_jetcone_dr = 0;
-}
-
-// Destructor
-///////////////
-ReadD3pdNtuple::~ReadD3pdNtuple()
-{}
-
-// Athena Algorithm's Hooks
-////////////////////////////
-StatusCode ReadD3pdNtuple::initialize()
-{
-  ATH_MSG_INFO ("Initializing " << name() << "...");
-
-  ServiceHandle<ITHistSvc> hsvc("THistSvc/THistSvc", this->name());
-  if (!hsvc.retrieve().isSuccess()) {
-    ATH_MSG_ERROR("could not retrieve THistSvc!");
-    return StatusCode::FAILURE;
-  }
-  TTree *tree = 0;
-  if (!hsvc->getTree(m_treeName, tree).isSuccess() || 
-      0 == tree) {
-    ATH_MSG_ERROR("Could not register with our ttree!");
-    return StatusCode::FAILURE;
-  }
-
-  // connect our variables to the tree
-  if (!initBranches(tree).isSuccess()) {
-    ATH_MSG_ERROR("could not configure branches !");
-    return StatusCode::FAILURE;
-  }
-  return StatusCode::SUCCESS;
-}
-
-StatusCode ReadD3pdNtuple::finalize()
-{
-  ATH_MSG_INFO ("Finalizing " << name() << "...");
-
-  return StatusCode::SUCCESS;
-}
-
-StatusCode ReadD3pdNtuple::execute()
-{  
-  ATH_MSG_DEBUG ("Executing " << name() << "...");
-
-  msg(MSG::INFO) 
-    << "run-nbr: " << m_runnbr << endmsg
-    << "evt-nbr: " << m_evtnbr << endmsg
-    << "el-nbr:  " << m_el_n << endmsg;
-  if (m_el_n > 0) {
-    msg(MSG::INFO) << "el.eta[0]: " << m_el_eta->at(0) << endmsg
-                   << "el_jetcone_dr[0].size: "
-                   << m_el_jetcone_dr->at(0).size()
-                   << endmsg;
-    for (std::size_t i = 0, imax = m_el_jetcone_dr->at(0).size();
-         i != imax;
-         ++i) {
-      msg(MSG::INFO) << " jet-cone-dr[0][" << i << "]: "
-                     << m_el_jetcone_dr->at(0)[i]
-                     << endmsg;
-    }
-  }
-
-  return StatusCode::SUCCESS;
-}
-
-/////////////////////////////////////////////////////////////////// 
-// Const methods: 
-///////////////////////////////////////////////////////////////////
-
-/////////////////////////////////////////////////////////////////// 
-// Non-const methods: 
-/////////////////////////////////////////////////////////////////// 
-
-/////////////////////////////////////////////////////////////////// 
-// Protected methods: 
-/////////////////////////////////////////////////////////////////// 
-
-/////////////////////////////////////////////////////////////////// 
-// Const methods: 
-///////////////////////////////////////////////////////////////////
-
-/////////////////////////////////////////////////////////////////// 
-// Non-const methods: 
-/////////////////////////////////////////////////////////////////// 
-
-StatusCode
-ReadD3pdNtuple::initBranches(TTree *tree)
-{
-  {
-    TBranch *br = tree->GetBranch("RunNumber");
-    if (0==br) {
-      ATH_MSG_ERROR("could not set branch address for [RunNumber]");
-      return StatusCode::FAILURE;
-    }
-    br->SetStatus(1);
-    br->SetAddress(&m_runnbr);
-  }
-  {
-    TBranch *br = tree->GetBranch("EventNumber");
-    if (0==br) {
-      ATH_MSG_ERROR("could not set branch address for [EventNumber]");
-      return StatusCode::FAILURE;
-    }
-    br->SetStatus(1);
-    br->SetAddress(&m_evtnbr);
-  }
-  {
-    TBranch *br = tree->GetBranch("el_n");
-    if (0==br) {
-      ATH_MSG_ERROR("could not set branch address for [el_n]");
-      return StatusCode::FAILURE;
-    }
-    br->SetStatus(1);
-    br->SetAddress(&m_el_n);
-  }
-  ServiceHandle<IDictLoaderSvc> dictsvc("AthDictLoaderSvc", this->name());
-  {
-    // load the reflex dict for this type:
-    if (dictsvc->load_type("std::vector<float>").Name() == "") {
-      ATH_MSG_ERROR("could not load reflex type for [std::vector<float>]!");
-      return StatusCode::FAILURE;
-    }
-    TBranch *br = tree->GetBranch("el_eta");
-    if (0==br) {
-      ATH_MSG_ERROR("could not set branch address for [el_eta]");
-      return StatusCode::FAILURE;
-    }
-    br->SetStatus(1);
-    br->SetAddress(&m_el_eta);
-  }
-  {
-    // load the reflex dict for this type:
-    if (dictsvc->load_type("std::vector<std::vector<float> >").Name() == "") {
-      ATH_MSG_ERROR("could not load reflex type for [std::vector<std::vector<int> >]!");
-      return StatusCode::FAILURE;
-    }
-    TBranch *br = tree->GetBranch("el_jetcone_dr");
-    if (0==br) {
-      ATH_MSG_ERROR("could not set branch address for [el_jetcone_dr]");
-      return StatusCode::FAILURE;
-    }
-    br->SetStatus(1);
-    br->SetAddress(&m_el_jetcone_dr);
-  }
-
-  return StatusCode::SUCCESS;
-}
-
-} //> end namespace AthEx
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/AthExReadD3pdNtuple.h b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/AthExReadD3pdNtuple.h
deleted file mode 100644
index 5d2010ccc766bacf3f73ccb3aa31de2786ec6c20..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/AthExReadD3pdNtuple.h
+++ /dev/null
@@ -1,93 +0,0 @@
-///////////////////////// -*- C++ -*- /////////////////////////////
-
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-// ReadD3pdNtuple.h 
-// Header file for class ReadD3pdNtuple
-// Author: S.Binet<binet@cern.ch>
-/////////////////////////////////////////////////////////////////// 
-#ifndef ANALYSISEXAMPLES_ATHEXREADD3PDNTUPLE_H
-#define ANALYSISEXAMPLES_ATHEXREADD3PDNTUPLE_H 1
-
-// STL includes
-#include <string>
-#include <vector>
-#include <stdint.h>
-
-// FrameWork includes
-#include "AthenaBaseComps/AthAlgorithm.h"
-
-// fwd declares
-class TTree;
-
-namespace AthEx {
-
-class ReadD3pdNtuple
-  : public ::AthAlgorithm
-{ 
-
-  /////////////////////////////////////////////////////////////////// 
-  // Public methods: 
-  /////////////////////////////////////////////////////////////////// 
- public: 
-
-  // Copy constructor: 
-
-  /// Constructor with parameters: 
-  ReadD3pdNtuple( const std::string& name, ISvcLocator* pSvcLocator );
-
-  /// Destructor: 
-  virtual ~ReadD3pdNtuple(); 
-
-  // Assignment operator: 
-  //ReadD3pdNtuple &operator=(const ReadD3pdNtuple &alg); 
-
-  // Athena algorithm's Hooks
-  virtual StatusCode  initialize();
-  virtual StatusCode  execute();
-  virtual StatusCode  finalize();
-
-  /////////////////////////////////////////////////////////////////// 
-  // Const methods: 
-  ///////////////////////////////////////////////////////////////////
-
-  /////////////////////////////////////////////////////////////////// 
-  // Non-const methods: 
-  /////////////////////////////////////////////////////////////////// 
-
-  /////////////////////////////////////////////////////////////////// 
-  // Private data: 
-  /////////////////////////////////////////////////////////////////// 
- private: 
-
-  /// Default constructor: 
-  ReadD3pdNtuple();
-
-  /// initialize branches
-  StatusCode initBranches(TTree *tree);
-
-  // name of the TTree to read from THistSvc
-  std::string m_treeName;
-
-  // data to read from ttree
-  uint32_t m_runnbr;
-  uint32_t m_evtnbr;
-
-  int32_t m_el_n;
-  std::vector<float> *m_el_eta;
-  std::vector<std::vector<float> > *m_el_jetcone_dr;
-
-}; 
-
-// I/O operators
-//////////////////////
-
-/////////////////////////////////////////////////////////////////// 
-// Inline methods: 
-/////////////////////////////////////////////////////////////////// 
-
-} //> end namespace AthEx
-
-#endif //> !ANALYSISEXAMPLES_ATHEXREADD3PDNTUPLE_H
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/JetTagAna.cxx b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/JetTagAna.cxx
deleted file mode 100644
index ba11f5df82b214c14592a4fcb54e2f5996d9ff13..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/JetTagAna.cxx
+++ /dev/null
@@ -1,2376 +0,0 @@
-/*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-*/
-
-// JetTagAna is an example algorithm which shows how to access the
-// b-tagging information from JetTag objects.
-//
-// To simply access the b-tagging weight for each jet, have a look 
-// at the section SECTION::WEIGHTS
-//
-// To access the Monte-Carlo label (b,c or light) of a jet, have a 
-// look at the section SECTION::LABEL
-//
-// To access detailed information about each tag: SECTION::DETAILS
-//
-// Author: Laurent Vacavant <vacavant@in2p3.fr>
-
-#include "JetTagAna.h"
-
-#include "GaudiKernel/MsgStream.h"
-
-#include "StoreGate/DataHandle.h"
-
-#include "GaudiKernel/ITHistSvc.h"
-#include "TH1F.h"
-#include "TH2F.h"
-#include "TTree.h"
-#include "TMath.h"
-
-#include "McParticleEvent/TruthParticleContainer.h"
-#include "JetEvent/Jet.h"
-#include "JetEvent/JetCollection.h"
-#include "JetEvent/JetTagInfoBase.h"
-#include "JetTagEvent/TrackAssociation.h"
-#include "Particle/TrackParticleContainer.h"
-
-#include "Navigation/NavigationToken.h"
-#include "MuonIDEvent/MuonAssociation.h"
-#include "egammaEvent/ElectronAssociation.h"
-
-
-#include "egammaEvent/ElectronContainer.h"
-#include "muonEvent/MuonContainer.h"
-
-/// the Electron
-#include "egammaEvent/egamma.h"
-#include "egammaEvent/Electron.h"
-#include "egammaEvent/EMShower.h"
-#include "egammaEvent/EMTrackMatch.h"
-
-
-/// The Muon
-#include "muonEvent/Muon.h"
-
-#include "JetTagInfo/ITagInfo.h"
-#include "JetTagInfo/IPInfoBase.h"
-#include "JetTagInfo/IPInfoPlus.h"
-#include "JetTagInfo/IPTrackInfo.h"
-#include "JetTagInfo/SVInfoBase.h"
-#include "JetTagInfo/SVInfoPlus.h"
-#include "JetTagInfo/TruthInfo.h"
-#include "JetTagInfo/SoftMuonInfo.h"
-#include "JetTagInfo/SMTrackInfo.h"
-#include "JetTagInfo/SoftElectronInfo.h"
-#include "JetTagInfo/SETrackInfo.h"
-#include "JetTagInfo/SoftLeptonTruthInfo.h"
-#include "JetTagInfo/SLTrueInfo.h"
-
-//*
-#include "AnalysisTriggerEvent/LVL1_ROI.h"
-#include "AnalysisTriggerEvent/Jet_ROI.h"
-#include "TrigParticle/TrigL2BjetContainer.h"
-#include "TrigParticle/TrigEFBjetContainer.h"
-
-#include "CLHEP/Vector/LorentzVector.h"
-#include "VxVertex/VxContainer.h"
-#include "VxVertex/RecVertex.h"
-
-#include "GeneratorObjects/McEventCollection.h"
-#include "AtlasHepMC/GenEvent.h"
-#include "AtlasHepMC/GenVertex.h"
-#include "AtlasHepMC/GenParticle.h"
-
-#include "xAODEventInfo/EventInfo.h"
-#include "AthenaKernel/Units.h"
-
-#include <map>
-#include <algorithm>
-#include <cmath>
-
-using Athena::Units::GeV;
-
-JetTagAna::JetTagAna(const std::string& name,
-		     ISvcLocator* pSvcLocator)
-  : AthAlgorithm(name, pSvcLocator),
-    m_irun(0),
-    m_ievt(0),
-    m_trackTES(nullptr),
-    m_mcpartTES(nullptr),
-    m_histos(nullptr),
-    m_h_global_counters(nullptr),
-    m_h_global_nprimvtx(nullptr),
-    m_h_global_xprimvtx(nullptr),
-    m_h_global_yprimvtx(nullptr),
-    m_h_global_zprimvtx(nullptr),
-    m_h_global_primvtxresx(nullptr),
-    m_h_global_primvtxresy(nullptr),
-    m_h_global_primvtxresz(nullptr),
-    m_h_global_nmcpart(nullptr),
-    m_h_global_ntrkpart(nullptr),
-    m_h_global_BLayerHits(nullptr),
-    m_h_global_BLayerSharedHits(nullptr),
-    m_h_global_PixelHits(nullptr),
-    m_h_global_PixelLayers(nullptr),
-    m_h_global_SiHits(nullptr),
-    m_h_global_TRTHits(nullptr),
-    m_h_global_nmuon(nullptr),
-    m_h_global_nelectron(nullptr),
-    m_h_global_njettag(nullptr),
-    m_h_truth_bquark_nb(nullptr),
-    m_h_truth_bquark_pt(nullptr),
-    m_h_truth_bquark_eta(nullptr),
-    m_h_truth_cquark_nb(nullptr),
-    m_h_truth_cquark_pt(nullptr),
-    m_h_truth_cquark_eta(nullptr),
-    m_h_truth_bhadr_nb(nullptr),
-    m_h_truth_bhadr_pt(nullptr),
-    m_h_truth_bhadr_eta(nullptr),
-    m_h_truth_chadr_nb(nullptr),
-    m_h_truth_chadr_pt(nullptr),
-    m_h_truth_chadr_eta(nullptr),
-    m_h_jet_ntotal(nullptr),
-    m_h_jet_label(nullptr),
-    m_h_jet_nlabelb(nullptr),
-    m_h_jet_nlabelc(nullptr),
-    m_h_jet_nlabelt(nullptr),
-    m_h_jet_ntag(nullptr),
-    m_h_jet_eta(nullptr),
-    m_h_jet_phi(nullptr),
-    m_h_jet_et(nullptr),
-    m_h_jet_ntracks(nullptr),
-    m_h_jet_tracks_pt(nullptr),
-    m_h_jet_tracks_BLayerHits(nullptr),
-    m_h_jet_tracks_PixelHits(nullptr),
-    m_h_jet_tracks_SCTHits(nullptr),
-    m_h_jet_nmuons(nullptr),
-    m_h_jet_muons_pt(nullptr),
-    m_h_jet_nelectrons(nullptr),
-    m_h_jet_electrons_pt(nullptr),
-    m_h_tag_jetprob_w(nullptr),
-    m_h_tag_i2d_w(nullptr),
-    m_h_tag_i2d_n(nullptr),
-    m_h_tag_i2d_b(nullptr),
-    m_h_tag_i2d_u(nullptr),
-    m_h_tag_i2d_sig(nullptr),
-    m_h_tag_i3d_w(nullptr),
-    m_h_tag_i3d_n(nullptr),
-    m_h_tag_i3d_b(nullptr),
-    m_h_tag_i3d_u(nullptr),
-    m_h_tag_i3d_sig(nullptr),
-    m_h_tag_sv1_w(nullptr),
-    m_h_tag_sv1_b(nullptr),
-    m_h_tag_sv1_u(nullptr),
-    m_h_tag_sv2_w(nullptr),
-    m_h_tag_sv2_b(nullptr),
-    m_h_tag_sv2_u(nullptr),
-    m_h_tag_sv_n(nullptr),
-    m_h_tag_sv_n2t(nullptr),
-    m_h_tag_sv_frc(nullptr),
-    m_h_tag_sv_m(nullptr),
-    m_h_tag_cmb_w(nullptr),
-    m_h_tag_lf2d_w(nullptr),
-    m_h_tag_svbu_w(nullptr),
-    m_h_tag_lhsig_w(nullptr),
-    m_h_tag_softm_w(nullptr),
-    m_h_tag_softe_w(nullptr),
-    m_h_tag_jetfitter_w(nullptr),
-    m_h_tag_jetfitcomb_w(nullptr),
-    m_h_tag_jetfitternn_w(nullptr),
-    m_h_tag_jetfitcombnn_w(nullptr),
-    m_h_tag_i2d_l2_w(nullptr),
-    m_h_tag_i3d_l2_w(nullptr),
-    m_h_tag_i2d_ef_w(nullptr),
-    m_h_tag_i3d_ef_w(nullptr),
-    m_h_tag_i2d_of_w(nullptr),
-    m_h_tag_i3d_of_w(nullptr),
-    m_h_tag_IPinfo_ntrk(),
-    m_h_tag_IPinfo_trkPt(),
-    m_h_tag_IPinfo_d0val(),
-    m_h_tag_IPinfo_z0val(),
-    m_h_tag_IPinfo_d0sig(),
-    m_h_tag_IPinfo_z0sig(),
-    m_h_tag_IPinfo_weight2D(),
-    m_h_tag_IPinfo_weight3D(),
-    m_h_tag_IPinfo_piJP(),
-    m_h_tag_IPinfo_fromV0(),
-    m_h_tag_IPinfo_grade(),
-    m_h_perf_b(),
-    m_h_perf_u(),
-    m_h_perf_upur(),
-    m_h_perf_vxeff_den_b(nullptr),
-    m_h_perf_vxeff_num_b(nullptr),
-    m_h_perf_vxeff_den_u(nullptr),
-    m_h_perf_vxeff_num_u(nullptr),
-    m_h_perf_vxeff_den_upur(nullptr),
-    m_h_perf_vxeff_num_upur(nullptr),
-    m_h_perf_rej10(nullptr),
-    m_h_perf_rej50(nullptr),
-    m_h_perf_rej60(nullptr),
-    m_h_perf_rejpur10(nullptr),
-    m_h_perf_rejpur50(nullptr),
-    m_h_perf_rejpur60(nullptr),
-    m_h_perfWt_10(nullptr),
-    m_h_perfWt_50(nullptr),
-    m_h_perfWt_60(nullptr),
-    m_h_perfWt_pur10(nullptr),
-    m_h_perfWt_pur50(nullptr),
-    m_h_perfWt_pur60(nullptr),
-    m_h_perf_b_ET(),
-    m_h_perf_b_eta(),
-    m_h_perf_b_phi(),
-    m_h_perf_u_ET(),
-    m_h_perf_u_eta(),
-    m_h_perf_u_phi(),
-    m_h_perf_upur_ET(),
-    m_h_perf_upur_eta(),
-    m_h_perf_upur_phi(),
-    m_h_eff_b_ET(),
-    m_h_eff_b_eta(),
-    m_h_eff_b_phi(),
-    m_h_rej_u_ET(),
-    m_h_rej_u_eta(),
-    m_h_rej_u_phi(),
-    m_h_rej_upur_ET(),
-    m_h_rej_upur_eta(),
-    m_h_rej_upur_phi(),
-    m_njet(0),
-    m_jet_phi(),
-    m_jet_eta(),
-    m_jet_et(),
-    m_jet_flav(),
-    m_jet_isol(),
-    m_jet_ne(),
-    m_jet_eTrueEle(),
-    m_jet_eAnyEle(),
-    m_jet_eptr(),
-    m_jet_ept(),
-    m_jet_ea0(),
-    m_jet_ew(),
-    m_jet_elh(),
-    m_jet_epb(),
-    m_jet_epu(),
-    m_jet_tag_pid(),
-    m_jet_tag_mothpid(),
-    m_jet_NtrackEle(),
-    m_jet_trackElept(),
-    m_jet_trackEle(),
-    m_jet_Ntrack(),
-    m_jet_sumptTrack(),
-    m_nelej(),
-    m_ntrackj(),
-    m_nmuonj(),
-    p_nt1(nullptr),
-    m_UseTupleSET(false)
-{
-  declareProperty("JetContainer", m_particleJetContainerName = "Cone4H1TowerJets");
-  declareProperty("MCParticleContainer", m_truthParticleContainerName = "SpclMC");
-  declareProperty("TrackParticleContainer", m_trackParticleContainerName = "TrackParticleCandidate");
-  declareProperty("MuonContainer",m_muonContainerName = "StacoMuonCollection");
-  declareProperty("ElectronContainer", m_electronContainerName = "ElectronAODCollection");
-  declareProperty("PrimaryVertexContainer", m_primaryVertexContainerName = "VxPrimaryCandidate");
-  declareProperty("MCEventContainer", m_mcEventContainerName = "GEN_AOD");
-  declareProperty("SelectBFromRun", m_selectBFromRun = 0);
-  declareProperty("SelectUFromRun", m_selectUFromRun = 0);
-  declareProperty("PurificationDeltaR", m_purificationDeltaR = 0.8);
-  declareProperty("LeptonPurificationDeltaR", m_leptonPurificationDeltaR = 0.7);
-  declareProperty("UseTupleSET", m_UseTupleSET = false);
-  m_tagger[0] = "IP2D";
-  m_tagger[1] = "IP3D";
-  m_tagger[2] = "SV1";
-  m_tagger[3] = "SV2";
-  m_tagger[4] = "IP3DSV1";
-  m_tagger[5] = "Lifetime2D";
-  m_tagger[6] = "SecVtxTagBU";
-  m_tagger[7] = "lhSig";
-  m_tagger[8] = "SoftMuonTag";
-  m_tagger[9] = "SoftElectronTag";
-  m_tagger[10] = "JetFitter";
-  m_tagger[11] = "JetFitComb";
-  m_tagger[12] = "JetFitterNN";
-  m_tagger[13] = "JetFitCombNN";
-  m_tagger[14] = "JetProb";
-  //*
-  m_tagger[15] = "Offline_afterHLT_IP2D";
-  m_tagger[16] = "Offline_afterHLT_IP3D";
-  m_tagger[17] = "L2_IP2D";
-  m_tagger[18] = "L2_IP3D";
-  m_tagger[19] = "L2_JetProb";
-  m_tagger[20] = "EF_afterL2_IP2D";
-  m_tagger[21] = "EF_afterL2_IP3D";
-  m_tagger[22] = "EF_IP2D";
-  m_tagger[23] = "EF_IP3D";
-  m_tagger[24] = "EF_JetProb";
-  m_tagger[25] = "EF_MVTX";
-  m_tagger[26] = "EF_EVTX";
-  m_tagger[27] = "EF_NVTX";
-
-  // for histograms as a function of jet ET
-  //
-  m_jetET[0] = "Et_0_100";
-  m_jetET[1] = "Et_100_200";
-  m_jetET[2] = "Et_200_300";
-  m_jetET[3] = "Et_300_400";
-  m_jetET[4] = "Et_400_500";
-  m_jetET[5] = "Et_500";
-
-}
-
-JetTagAna::~JetTagAna() {}
-
-StatusCode JetTagAna::initialize() {
-  ATH_CHECK( service("THistSvc", m_histos, true) );
-
-  this->bookHistograms();
-  return StatusCode::SUCCESS;
-}		 
-
-double phiCorr(double phi){
-  double newphi = phi;
-  if (phi<-M_PI) newphi += 2*M_PI;
-  if (phi> M_PI) newphi -= 2*M_PI;
-  return newphi;
-}
-
-StatusCode JetTagAna::finalize() {
-  int nj  = (int)(m_h_jet_ntotal->GetMean()*m_h_jet_ntotal->Integral());
-  int njb = (int)(m_h_jet_nlabelb->GetMean()*m_h_jet_nlabelb->Integral());
-  int njc = (int)(m_h_jet_nlabelc->GetMean()*m_h_jet_nlabelc->Integral());
-  int njt = (int)(m_h_jet_nlabelt->GetMean()*m_h_jet_nlabelt->Integral());
-  ATH_MSG_INFO ( "##### APPROX. NB JETS LABELLED AS B: " << njb << " C: " << njc
-                 << " TAU: " << njt << " TOTAL: " << nj );
-  this->computeRejections();
-  return StatusCode::SUCCESS;
-}
-
-
-StatusCode JetTagAna::execute() {
-
-  ATH_MSG_DEBUG ( "JetTagAna::execute()" );
-  StatusCode sc = StatusCode::SUCCESS;
-  m_h_global_counters->Fill(1.);
-
-  /** Retrieve Event header: */
-  const xAOD::EventInfo* eventInfo;
-  sc = evtStore()->retrieve(eventInfo);
-  m_irun = 0;
-  m_ievt = 0;
-  if (sc.isFailure()) {
-    ATH_MSG_ERROR ( "Could not retrieve event info" );
-  } else {
-    m_irun = eventInfo->runNumber();
-    m_ievt = eventInfo->eventNumber();
-  }
-  ATH_MSG_DEBUG ( "--- Run " << m_irun << " Event " << m_ievt );
-
-  /** Primary vertex: */
-  double pvx = 0.;
-  double pvy = 0.;
-  double pvz = 0.;
-  const VxContainer* vxContainer(0);
-  int npvx = 0;
-  sc = evtStore()->retrieve(vxContainer, m_primaryVertexContainerName);
-  if (sc.isFailure()) {
-    ATH_MSG_ERROR ( "Could not retrieve primary vertex info: " << m_primaryVertexContainerName );
-    return StatusCode::SUCCESS;
-  } else {
-    ATH_MSG_VERBOSE ( "Found primary vertex info: " << m_primaryVertexContainerName );
-    m_h_global_counters->Fill(2.);
-    if(vxContainer) {
-      npvx = vxContainer->size();
-      VxContainer::const_iterator fz = vxContainer->begin();
-      const Trk::RecVertex& primaryVertex = (*fz)->recVertex();
-      pvx = primaryVertex.position().x();
-      pvy = primaryVertex.position().y();
-      pvz = primaryVertex.position().z();
-    }
-  }
-  m_h_global_nprimvtx->Fill((float)npvx);
-  m_h_global_xprimvtx->Fill(pvx);
-  m_h_global_yprimvtx->Fill(pvy);
-  m_h_global_zprimvtx->Fill(pvz);
-  ATH_MSG_VERBOSE ( "--- Primary vertex: " << pvx << " " << pvy << " " << pvz );
-  // Check for undefined PV (when no PV is reconstructed, a dummy one is stored):
-  if( pvx == 0 && pvy == 0 && pvz == 0 ) {
-    ATH_MSG_WARNING ( "Primary Vertex is (0,0,0): skipping event." );
-    return StatusCode::SUCCESS;
-  } else {
-    m_h_global_counters->Fill(3.);
-  }
-  /** True primary vertex: */
-  double xpvxt = 0.;
-  double ypvxt = 0.;
-  double zpvxt = 0.;
-  const McEventCollection* gen(0);
-  sc = evtStore()->retrieve(gen, m_mcEventContainerName);
-  if( sc.isFailure() ) {
-    ATH_MSG_WARNING ( "MC Event " << m_mcEventContainerName << " not found." );
-  } else {
-    ATH_MSG_VERBOSE ( "MC Event " << m_mcEventContainerName << " found with " << gen->size() << " entries." );
-    const HepMC::GenEvent* genEvent = *(gen->begin());
-    if(genEvent) {
-      HepMC::GenEvent::vertex_const_iterator vitr = genEvent->vertices_begin();
-      xpvxt = (*vitr)->position().x();
-      ypvxt = (*vitr)->position().y();
-      zpvxt = (*vitr)->position().z(); 
-      m_h_global_primvtxresx->Fill(pvx-xpvxt);
-      m_h_global_primvtxresy->Fill(pvy-ypvxt);
-      m_h_global_primvtxresz->Fill(pvz-zpvxt);
-    }
-  }
-
-  /** MC Truth container: */
-  sc=evtStore()->retrieve( m_mcpartTES, m_truthParticleContainerName);
-  int nmcp = 0;
-  if( sc.isFailure() ) {
-    ATH_MSG_WARNING ( "TruthParticleContainer " << m_truthParticleContainerName 
-                      << " not found." );
-  } else {
-    nmcp = (*m_mcpartTES).size();
-    ATH_MSG_VERBOSE ( "TruthParticleContainer " << m_truthParticleContainerName 
-                      << " found with " << nmcp << " entries." );
-    m_h_global_counters->Fill(5.);
-  }
-  m_h_global_nmcpart->Fill((float)nmcp);
-
-  // fill some information about b and c particles:
-  if(m_mcpartTES) {
-   TruthParticleContainer::const_iterator mcItr  = (*m_mcpartTES).begin();
-   TruthParticleContainer::const_iterator mcEnd = (*m_mcpartTES).end();
-   int nbbq = 0;
-   int nbcq = 0;
-   int nbbh = 0;
-   int nbch = 0;
-   for (; mcItr != mcEnd; ++mcItr) {
-     int mpdg = (*mcItr)->pdgId();
-     double pt = (*mcItr)->pt();
-     double eta = (*mcItr)->eta();
-     if(5==abs(mpdg)) {
-       nbbq++;
-       m_h_truth_bquark_pt->Fill(pt);
-       m_h_truth_bquark_eta->Fill(eta);
-     }
-     if(4==abs(mpdg)) {
-       nbcq++;
-       m_h_truth_cquark_pt->Fill(pt);
-       m_h_truth_cquark_eta->Fill(eta);
-     }
-     if(this->isBHadron(mpdg)) {
-       nbbh++;
-       m_h_truth_bhadr_pt->Fill(pt);
-       m_h_truth_bhadr_eta->Fill(eta);
-     }
-     if(this->isDHadron(mpdg)) {
-       nbch++;
-       m_h_truth_chadr_pt->Fill(pt);
-       m_h_truth_chadr_eta->Fill(eta);
-     }
-   }
-   m_h_truth_bquark_nb->Fill((float)nbbq);
-   m_h_truth_cquark_nb->Fill((float)nbcq);
-   m_h_truth_bhadr_nb->Fill((float)nbbh);
-   m_h_truth_chadr_nb->Fill((float)nbch);
-  }
-
-  /** TrackParticleTruth container: */
-  const TrackParticleTruthCollection* tpTruthColl(0);
-  sc = evtStore()->retrieve(tpTruthColl,m_TPTruthContainer);
-  if (!sc.isFailure() ) {
-    ATH_MSG_VERBOSE ( "Number of TruthTrackParticles in event: " 
-                      << tpTruthColl->size() );
-  } else {
-    ATH_MSG_DEBUG ( "Unable to retrieve TrackParticleTruthCollection" );
-  }
-
-  /** TrackParticle container: */
-  int ntrkp = 0;
-  sc=evtStore()->retrieve( m_trackTES, m_trackParticleContainerName);
-  if( sc.isFailure() ) {
-    ATH_MSG_WARNING ( "TrackParticleContainer " << m_trackParticleContainerName 
-                      << " not found." );
-  } else {
-    m_h_global_counters->Fill(6.);
-    ntrkp = (*m_trackTES).size();
-    ATH_MSG_VERBOSE ( "TrackParticleContainer " << m_trackParticleContainerName 
-                      << " found with " << ntrkp << " entries." );
-
-    /// iterators over the container 
-    Rec::TrackParticleContainer::const_iterator trackItr  = m_trackTES->begin();
-    Rec::TrackParticleContainer::const_iterator trackItrE = m_trackTES->end();
-    for (; trackItr != trackItrE; ++trackItr) {
-      const Rec::TrackParticle * trackParticle = (*trackItr);
-      if(trackParticle) {
-	if(trackParticle->pt()<1000.) continue; 
-        const Trk::TrackSummary* summary = trackParticle->trackSummary();
-        if (summary) {
-	  m_h_global_BLayerHits->Fill((float) summary->get(Trk::numberOfInnermostPixelLayerHits));
-	  m_h_global_BLayerSharedHits->Fill((float) summary->get(Trk::numberOfInnermostPixelLayerSharedHits));
-	  m_h_global_PixelHits->Fill((float) summary->get(Trk::numberOfPixelHits));
-	  m_h_global_PixelLayers->Fill((float) summary->get(Trk::numberOfContribPixelLayers));
-	  m_h_global_TRTHits->Fill((float) summary->get(Trk::numberOfTRTHits));
-          int nSiHits = summary->get(Trk::numberOfPixelHits) + summary->get(Trk::numberOfSCTHits);
-          m_h_global_SiHits->Fill((float) nSiHits);
-	}	
-      }
-    }
-
-  }
-  m_h_global_ntrkpart->Fill((float)ntrkp);
-
-  /** Muon Container: */
-  const Analysis::MuonContainer* muonTDS=0;
-  int nmuon=0;
-  sc=evtStore()->retrieve( muonTDS, m_muonContainerName);
-  if( sc.isFailure()  ||  !muonTDS ) {
-    ATH_MSG_WARNING ( "No AOD muon container of muons found in TDS");
-  }  
-  else {
-    nmuon = (*muonTDS).size();
-    ATH_MSG_DEBUG ( "MuonContainer successfully retrieved, size = " << nmuon );
-  } 
-  m_h_global_nmuon->Fill((float) nmuon);  
-
-  /** Electron Container: */
-
-  const ElectronContainer* electronTDS=0;
-  int nelectron=0;
-  sc=evtStore()->retrieve( electronTDS, m_electronContainerName);
-  if( sc.isFailure()  ||  !electronTDS ) {
-    ATH_MSG_WARNING ( "No AOD electron container of electrons found in TDS" );
-  }  
-  else {
-    nelectron = (*electronTDS).size();
-    ATH_MSG_DEBUG ( "ElectronContainer successfully retrieved, size = " << nelectron );
-  } 
-  m_h_global_nelectron->Fill((float) nelectron);  
-
-  /** Jet container: */
-  int njtag = 0;
-  const JetCollection* jetTES;
-  sc=evtStore()->retrieve( jetTES, m_particleJetContainerName);
-  if( sc.isFailure() ) {
-    ATH_MSG_ERROR ( "JetContainer " << m_particleJetContainerName 
-                    << " not found." );
-    return StatusCode::SUCCESS;
-  } else {  
-    njtag = (*jetTES).size();
-    ATH_MSG_VERBOSE ( "JetContainer " << m_particleJetContainerName 
-                      << " found with " << njtag << " entries." );
-    m_h_global_counters->Fill(7.);
-  }
-  m_h_global_njettag->Fill((float)njtag);
-
-  // --- iterate over the Jet container 
-  JetCollection::const_iterator jetItr = (*jetTES).begin();
-  JetCollection::const_iterator jetEnd = (*jetTES).end();
-  int ntotal = 0;
-  int nlabelb = 0;
-  int nlabelc = 0;
-  int nlabelt = 0;
-  int icount =0;
-  for (; jetItr != jetEnd; ++jetItr) {
-    CLHEP::HepLorentzVector p4((*jetItr)->px(),
-			(*jetItr)->py(),
-			(*jetItr)->pz(),
-			(*jetItr)->e());
-    int ntag = (*jetItr)->jetTagInfoVector().size();
-    m_h_jet_ntag->Fill((float)ntag);
-    ntotal++;
-
-    // --- get the true label of the jet from MC Truth: SECTION::LABEL
-    std::string label("N/A");
-    const Analysis::TruthInfo* mcinfo = (*jetItr)->tagInfo<Analysis::TruthInfo>("TruthInfo");
-    if(mcinfo) {
-      label = mcinfo->jetTruthLabel();
-    } else {
-      ATH_MSG_VERBOSE ( "could not find TruthInfo for matching jet" );
-    }
-    int iflav(0);
-    if(label=="B") {
-      iflav = 5;
-      nlabelb++;
-    }
-    if(label=="C") {
-      iflav = 4;
-      nlabelc++;
-    }
-    if(label=="T") {
-      iflav = 15;
-      nlabelt++;
-    }
-    m_h_jet_label->Fill((float)iflav);
-    // for isolation/purification:
-    double dRminToB = mcinfo->deltaRMinTo("B");
-    double dRminToC = mcinfo->deltaRMinTo("C");
-    double dRminToT = mcinfo->deltaRMinTo("T");
-
-    // --- get jet basic kinematics:
-    ATH_MSG_VERBOSE ( "BJet # " << icount << " Eta= " << p4.pseudoRapidity()
-                      << " Phi= " << p4.phi() << " pT= " << p4.perp()
-                      << "  #Tags= " << ntag << " MCLabel= " << label  );
-    icount++;
-    m_h_jet_eta->Fill(p4.pseudoRapidity());
-    m_h_jet_phi->Fill(p4.phi());
-    m_h_jet_et->Fill(p4.et());
-
-    // --- get some information about Track/Muon/Electron Constituents
-
-    int ntrk_j=0, nmu_j=0, nel_j=0;
-    const Analysis::TrackAssociation* ta = (*jetItr)->getAssociation<Analysis::TrackAssociation>("Tracks");
-    if(ta) ntrk_j = ta->size();
-    const Analysis::MuonAssociation* ma = (*jetItr)->getAssociation<Analysis::MuonAssociation>("Muons");
-    if(ma) nmu_j = ma->size();
-    const Analysis::ElectronAssociation* ea = (*jetItr)->getAssociation<Analysis::ElectronAssociation>("Electrons");
-    if(ea) nel_j = ea->size();
-    m_h_jet_ntracks->Fill((float) ntrk_j);
-    m_h_jet_nmuons->Fill((float) nmu_j);
-    m_h_jet_nelectrons->Fill((float) nel_j);
-
-    // loop over tracks in a jet
-
-    if(ta) {
-
-      std::vector<const Rec::TrackParticle*>* trackVector = ta->tracks();
-      for(std::vector<const Rec::TrackParticle*>::iterator trkItr = trackVector->begin(); trkItr !=trackVector->end(); ++trkItr) {
-
-
-	const Rec::TrackParticle* aTemp = *trkItr;
-
-	m_h_jet_tracks_pt->Fill(aTemp->pt()/GeV);
-
-       const Trk::TrackSummary* summary = aTemp->trackSummary();
-       if (summary) {
-         m_h_jet_tracks_BLayerHits->Fill((float) summary->get(Trk::numberOfInnermostPixelLayerHits));
-         m_h_jet_tracks_PixelHits->Fill((float) summary->get(Trk::numberOfPixelHits));
-         m_h_jet_tracks_SCTHits->Fill((float) summary->get(Trk::numberOfSCTHits));
-       }
-      }     
-
-    }
-
-    // loop over muons in a jet
-    if(ma) {
-      for(Navigable<Analysis::MuonContainer,double>::object_iter it=ma->begin(); it !=ma->end(); ++it) {
-        const Analysis::Muon *m = (*it);
-        m_h_jet_muons_pt->Fill(m->pt()/GeV);
-      }
-    }
-    // loop over electrons in jet
-    if(ea) {
-      for(Navigable<ElectronContainer,double>::object_iter it=ea->begin(); it !=ea->end(); ++it) {
-        const Analysis::Electron *e = (*it);
-        m_h_jet_electrons_pt->Fill(e->pt()/GeV);
-      }
-    }
-
-    // --- get btagging weights: SECTION::WEIGHTS
-    double w_ip2d = (*jetItr)->getFlavourTagWeight("IP2D");
-    double w_ip3d = (*jetItr)->getFlavourTagWeight("IP3D");
-    double w_sv1  = (*jetItr)->getFlavourTagWeight("SV1");
-    double w_sv2  = (*jetItr)->getFlavourTagWeight("SV2");
-    double w_cmb  = (*jetItr)->getFlavourTagWeight(); // combination of IP3D and SV1
-    double w_lf2d = (*jetItr)->getFlavourTagWeight("LifetimeTag2D");
-    double w_svbu = (*jetItr)->getFlavourTagWeight("SecVtxTagBU");
-    double w_lhsig= (*jetItr)->getFlavourTagWeight("lhSig");
-    double w_softm= (*jetItr)->getFlavourTagWeight("SoftMuonTag");
-    double w_softe= (*jetItr)->getFlavourTagWeight("SoftElectronTag");
-    double w_jetfit = (*jetItr)->getFlavourTagWeight("JetFitterTag");
-    double w_jetfitcomb = (*jetItr)->getFlavourTagWeight("JetFitterCOMB");
-    double w_jetfitnn = (*jetItr)->getFlavourTagWeight("JetFitterTagNN");
-    double w_jetfitcombnn = (*jetItr)->getFlavourTagWeight("JetFitterCOMBNN");
-    double w_jetprob = (*jetItr)->getFlavourTagWeight("JetProb");
-    ATH_MSG_VERBOSE ( "-> weights: "
-                      << " JetProb: " << w_jetprob
-                      << " IP2D: " << w_ip2d
-                      << " IP3D: " << w_ip3d
-                      << " SV1 : " << w_sv1
-                      << " SV2 : " << w_sv2
-                      << " SV1+IP3D : " << w_cmb
-                      << " | " 
-                      << " Lifetime2D : " << w_lf2d
-                      << " SecVtxTagBU : " << w_svbu
-                      << " lhSig : " << w_lhsig
-                      << " | " 
-                      << " SoftMuonTag : " << w_softm
-                      << " SoftElectronTag : " << w_softe
-                      << " | " 
-                      << " JetFitterTag : "<< w_jetfit
-                      << " JetFitterCOMB : "<< w_jetfitcomb
-                      << " JetFitterTagNN : "<< w_jetfitnn
-                      << " JetFitterCOMBNN : "<< w_jetfitcombnn
-                      );
-    m_h_tag_jetprob_w->Fill(w_jetprob);
-    m_h_tag_i2d_w->Fill(w_ip2d);
-    m_h_tag_i3d_w->Fill(w_ip3d);
-    m_h_tag_sv1_w->Fill(w_sv1);
-    m_h_tag_sv2_w->Fill(w_sv2);
-    m_h_tag_cmb_w->Fill(w_cmb);
-    m_h_tag_lf2d_w->Fill(w_lf2d);
-    m_h_tag_svbu_w->Fill(w_svbu);
-    m_h_tag_lhsig_w->Fill(w_lhsig);
-    m_h_tag_softm_w->Fill(w_softm);
-    m_h_tag_softe_w->Fill(w_softe);
-    m_h_tag_jetfitter_w->Fill(w_jetfit);
-    m_h_tag_jetfitcomb_w->Fill(w_jetfitcomb);
-    m_h_tag_jetfitternn_w->Fill(w_jetfitnn);
-    m_h_tag_jetfitcombnn_w->Fill(w_jetfitcombnn);
-    double w[MAX_numTaggers];
-    w[0] = w_ip2d;
-    w[1] = w_ip3d;
-    w[2] = w_sv1;
-    w[3] = w_sv2;
-    w[4] = w_cmb;
-    w[5] = w_lf2d;
-    w[6] = w_svbu;
-    w[7] = w_lhsig;
-    w[8] = w_softm;
-    w[9] = w_softe;
-    w[10] = w_jetfit;
-    w[11] = w_jetfitcomb;
-    w[12] = w_jetfitnn;
-    w[13] = w_jetfitcombnn;
-    w[14] = -TMath::Log10(w_jetprob);
-
-    bool svok = false;
-    // --- loop on tags & get detailed information for each tag: SECTION::DETAILS
-    std::vector<const JetTagInfoBase*> infoVector = (*jetItr)->jetTagInfoVector();
-    for(uint iInfo = 0; iInfo < infoVector.size(); iInfo++) {
-      if(0==infoVector[iInfo]) continue;
-      bool is2D = (infoVector[iInfo]->infoType()=="IP2D");
-      bool is3D = (infoVector[iInfo]->infoType()=="IP3D");
-      bool isIPlus = (infoVector[iInfo]->infoType()=="IPInfoPlus");
-      bool isS1 = (infoVector[iInfo]->infoType()=="SV1");
-      bool isS2 = (infoVector[iInfo]->infoType()=="SV2");
-      bool isSPlus = (infoVector[iInfo]->infoType()=="SVInfoPlus");
-      bool isSoftElectron = (infoVector[iInfo]->infoType()=="SoftElectronTag");
-
-      // ...... detailed information for impact parameter-based informations ......
-      if(is2D) { // impact parameter 2D
-	// basic additional information:
-	const Analysis::IPInfoBase* infob = dynamic_cast<const Analysis::IPInfoBase*>(infoVector[iInfo]);
-	if(infob) {
-	  int ntrk = infob->nbTracks();            // number of tracks used for tagging in the jet
-	  double pb = infob->tagLikelihood()[0];   // b likelihood
-	  double pu = infob->tagLikelihood()[1];   // u likelihood
-	  m_h_tag_i2d_n->Fill((float)ntrk);
-	  m_h_tag_i2d_b->Fill(pb);
-	  m_h_tag_i2d_u->Fill(pu);
-          ATH_MSG_VERBOSE ( "-> InfoBase for IP2D:  #tracks= " << ntrk
-                            << " Pb= " << pb << " Pu= " << pu );
-	}
-      }
-      if(is3D) { // impact parameter 3D
-	// basic additional information:
-	const Analysis::IPInfoBase* infob = dynamic_cast<const Analysis::IPInfoBase*>(infoVector[iInfo]);
-	if(infob) {
-	  int ntrk = infob->nbTracks();            // number of tracks used for tagging in the jet
-	  double pb = infob->tagLikelihood()[0];   // b likelihood
-	  double pu = infob->tagLikelihood()[1];   // u likelihood
-	  m_h_tag_i3d_n->Fill((float)ntrk);
-	  m_h_tag_i3d_b->Fill(pb);
-	  m_h_tag_i3d_u->Fill(pu);
-          ATH_MSG_VERBOSE ( "-> InfoBase for IP3D:  #tracks= " << ntrk
-                            << " Pb= " << pb << " Pu= " << pu );
-	}
-      }
-      if(isIPlus) {
-	// more detailed information (not always present in any AOD)::
-	const Analysis::IPInfoPlus* infop = dynamic_cast<const Analysis::IPInfoPlus*>(infoVector[iInfo]);
-	if(infop) {
-	  int ntrk = infop->numTrackInfo();
-          ATH_MSG_VERBOSE ( "-> InfoPlus for each track in IP2D/IP3D/JetProb:  #tracks= " 
-                            << ntrk );
-          int ibin;
-          ibin = (int) ((p4.et())/GeV)/100;
-          if(ibin>5) ibin=5;
-          m_h_tag_IPinfo_ntrk[ibin]->Fill((float) ntrk);
-	  for(int itinf = 0; itinf < ntrk; itinf++) {
-	    Analysis::IPTrackInfo trackInfo = infop->getTrackInfo(itinf);
-	    // impact parameters w.r.t. primary vertex: this information is stored 
-	    // to avoid calling again the extrapolator which requires the full geometry initialization
-	    double d0val = trackInfo.d0Value();
-	    double d0sig = trackInfo.d0Significance();
-	    double z0val = trackInfo.z0Value();
-	    double z0sig = trackInfo.z0Significance();
-	    m_h_tag_i2d_sig->Fill(d0sig);
-	    m_h_tag_i3d_sig->Fill(z0sig);
-	    // individual contribution of this track to the b-tagging weights:
-	    double wi2D = trackInfo.trackWeight2D(); // IP2D
-	    double wi3D = trackInfo.trackWeight3D(); // IP3D
-	    double piJP = trackInfo.trackProbJP();   // JetProb
-	    // track quality information:
-	    Analysis::TrackGrade grd = trackInfo.trackGrade(); // Good, Shared, ...
-	    bool vzero = trackInfo.isFromV0(); // track from V0, interaction in identified material, ...
-	    // pointer to actual TrackParticle:
-	    const Rec::TrackParticle* trk = trackInfo.track();
-            //
-            // fill some histograms as a func of jet ET
-            //
-            m_h_tag_IPinfo_d0val[ibin]->Fill(d0val);
-            m_h_tag_IPinfo_z0val[ibin]->Fill(z0val);
-            m_h_tag_IPinfo_d0sig[ibin]->Fill(d0sig);
-            m_h_tag_IPinfo_z0sig[ibin]->Fill(z0sig);
-            m_h_tag_IPinfo_weight2D[ibin]->Fill(wi2D);
-            m_h_tag_IPinfo_weight3D[ibin]->Fill(wi3D);
-            m_h_tag_IPinfo_piJP[ibin]->Fill(piJP);
-            m_h_tag_IPinfo_fromV0[ibin]->Fill((float) vzero);
-            m_h_tag_IPinfo_grade[ibin]->Fill((float) grd);
-
-	    // dump:
-	    ATH_MSG_VERBOSE ( "    --> track " << itinf
-	         << " d0= " << d0val << " Signif(d0)= " << d0sig
-	         << " z0= " << z0val << " Signif(z0)= " << z0sig
-		 << " weight2D= " << wi2D
-		 << " weight3D= " << wi3D
-		 << " probJetProb= " << piJP
-                              << " grade= " << grd.gradeString()
-                              << (vzero ? " fromV0 " : "") );
-	    if(trk) {
-	      ATH_MSG_VERBOSE ( " track pT= " << trk->pt()
-                                << " eta= " << trk->eta()
-                                << " phi= " << trk->phi() );
-	    }
-	  }
-	}
-      }
-      // ...... detailed information for secondary vertex-based informations ......
-      if(isS1) {
-	// basic additional information:
-	const Analysis::SVInfoBase* info = dynamic_cast<const Analysis::SVInfoBase*>(infoVector[iInfo]);
-	if(info) {
-	  double pb = info->tagLikelihood()[0];
-	  double pu = info->tagLikelihood()[1];
-	  m_h_tag_sv1_b->Fill(pb);
-	  m_h_tag_sv1_u->Fill(pu);
-          ATH_MSG_VERBOSE ( "-> InfoBase for SV1: " 
-                            << " Pb= " << pb << " Pu= " << pu );
-	}
-      }
-      if(isS2) {
-	// basic additional information:
-	const Analysis::SVInfoBase* info = dynamic_cast<const Analysis::SVInfoBase*>(infoVector[iInfo]);
-	if(info) {
-	  double pb = info->tagLikelihood()[0];
-	  double pu = info->tagLikelihood()[1];
-	  m_h_tag_sv2_b->Fill(pb);
-	  m_h_tag_sv2_u->Fill(pu);
-          ATH_MSG_VERBOSE ( "-> InfoBase for SV2: " 
-                            << " Pb= " << pb << " Pu= " << pu );
-	}
-      }
-      if(isSPlus) {
-	// more detailed information (not always present in any AOD)::
-	const Analysis::SVInfoPlus* info = dynamic_cast<const Analysis::SVInfoPlus*>(infoVector[iInfo]);
-	if(info) {
-	  int ntrk = info->getNGTrackInSvx();       // number of tracks in vertex
-	  double mass = info->getMass();            // mass of secondary vertex
-	  int n2t = info->getN2T();                 // number of two-track vertices
-	  svok = (mass>0. && n2t>0 );
-	  double efrc = info->getEnergyFraction();  // energy fraction svx/jet
-	  m_h_tag_sv_n->Fill((float)ntrk);
-	  m_h_tag_sv_n2t->Fill((float)n2t);
-	  m_h_tag_sv_frc->Fill(efrc);
-	  m_h_tag_sv_m->Fill(mass);
-          ATH_MSG_VERBOSE ( "-> InfoPlus for SV1/SV2: " 
-                            << " #tracks= " << ntrk
-                            << " mass= " << mass
-                            << " #n2t= " << n2t
-                            << " efrac= " << efrc
-                            << " tagOK= " << svok );
-	}
-      }
-     // soft electron b-tagging
-      if (isSoftElectron) {
-	// basic additional information:
-	const Analysis::SoftElectronInfo* info = dynamic_cast<const Analysis::SoftElectronInfo*>(infoVector[iInfo]);
-	if(info) {
-	  checkSoftElectron(infoVector,iInfo,ntotal,p4,iflav,ea,
-			    ntrk_j,nmu_j,electronTDS,tpTruthColl);
-
-	  double pb = info->tagLikelihood()[0];
-	  double pu = info->tagLikelihood()[1];
-	  //m_h_tag_softe_b->Fill(pb);
-	  //m_h_tag_softe_u->Fill(pu);
-          ATH_MSG_VERBOSE ( "-> InfoBase for SET: " 
-                            << " Pb= " << pb << " Pu= " << pu );
-	}
-      }
-    } // loop on tags
-
-    /////////////////////
-    //* HLT b-tagging *//
-    /////////////////////
-
-    double deltaR = 9999, phiRoI = -9999, etaRoI = -9999, etRoI = -9999;
- 
-    double w_ip2d_l2 = -1,  w_ip3d_l2 = -1;
-    double w_ip2d_ef = -1,  w_ip3d_ef = -1;
-    double w_ip2d_of = -21, w_ip3d_of = -21;
-
-    double w_jetProb_l2 = -1, w_jetProb_ef = -1;
-    double w_mVtx_ef = -1, w_eVtx_ef = -1, w_nVtx_ef = -1;
-
-    bool isFoundL2 = true;
-
-    //* Retrieve from StoreGate TrigL2BjetContainer *//
-    const DataHandle<TrigL2BjetContainer> trigL2Bjet;
-    const DataHandle<TrigL2BjetContainer> lastTrigL2Bjet;
-  
-    sc = evtStore()->retrieve(trigL2Bjet,lastTrigL2Bjet);
-    if (sc.isSuccess()) {
-
-      //* Loop on TrigL2BjetContainer *//
-      for (int j=1; trigL2Bjet != lastTrigL2Bjet; ++trigL2Bjet, ++j) {
-      
-	ATH_MSG_VERBOSE ( "Looking at TrigL2BjetContainer " << j );
-    
-	TrigL2BjetContainer::const_iterator pL2BjetItr    = trigL2Bjet->begin();
-	TrigL2BjetContainer::const_iterator lastL2BjetItr = trigL2Bjet->end();
-      
-	//* Loop on TrigL2Bjet (expected one per collection) *//
-	for (int k=1; pL2BjetItr != lastL2BjetItr; ++pL2BjetItr, ++k ) {
-	
-	  ATH_MSG_VERBOSE ( "Looking at TrigL2Bjet " << k << "/" << trigL2Bjet->size() );
-	
-	  ATH_MSG_VERBOSE
-            ( "TrigL2Bjet->xIP1D() = " << (*pL2BjetItr)->xIP1D()
-	       << "; TrigL2Bjet->xIP2D() = " << (*pL2BjetItr)->xIP2D()
-	       << "; TrigL2Bjet->xIP3D() = " << (*pL2BjetItr)->xIP3D() 
-	       << "; TrigL2Bjet->xCHI2() = " << (*pL2BjetItr)->xCHI2() 
-              );
-	
-	  //* eta/phi matching w.r.t. offline b-jet *//
-	  double dR = sqrt(pow((*jetItr)->eta() - (*pL2BjetItr)->eta(),2) + pow(phiCorr(phiCorr((*jetItr)->phi()) - phiCorr((*pL2BjetItr)->phi())),2));
-	
-	  if (dR < deltaR) {
-	    deltaR = dR;
-	    
-	    w_ip2d_l2  = (double)(*pL2BjetItr)->xIP2D();
-	    w_ip3d_l2  = (double)(*pL2BjetItr)->xIP3D();
-	    
-	    w_jetProb_l2 = (double)(*pL2BjetItr)->xCHI2();
-
-	    phiRoI = (*pL2BjetItr)->phi();
-	    etaRoI = (*pL2BjetItr)->eta();
-	  }
-	}
-      }
-    
-      ATH_MSG_DEBUG ( "deltaR L2 = " << deltaR );
-
-    } else {
-
-      isFoundL2 = false;
-      ATH_MSG_DEBUG ( "No TrigL2BjetContainer found" );
-
-    }
-
-    if (deltaR > 0.1) {
-      isFoundL2 = false;
-      w_ip2d_l2 = -1; w_ip3d_l2 = -1;
-      ATH_MSG_DEBUG ( "TrigL2BjetContainer not matched with offline" );
-    }
-  
-    if (isFoundL2) {
-
-      //If no EF found the chain stopped at L2..
-      w_ip2d_ef = 0, w_ip3d_ef = 0; 
-      w_jetProb_ef = 0;
-      w_mVtx_ef = 0, w_eVtx_ef = 0, w_nVtx_ef = 0;
-      deltaR = 9999;
-    
-      //* Retrieve from StoreGate TrigEFBjetContainer *//
-      const DataHandle<TrigEFBjetContainer> trigEFBjet;
-      const DataHandle<TrigEFBjetContainer> lastTrigEFBjet;
-    
-      sc = evtStore()->retrieve(trigEFBjet,lastTrigEFBjet);
-      if (sc.isSuccess()) {
-
-	//* Loop on TrigEFBjetContainer *//
-	for (int i=0; trigEFBjet != lastTrigEFBjet; ++trigEFBjet, ++i) {
-	  
-	  ATH_MSG_VERBOSE ( "Looking at TrigEFBjetContainer " << i );
-	  
-	  TrigEFBjetContainer::const_iterator pEFBjetItr    = trigEFBjet->begin();
-	  TrigEFBjetContainer::const_iterator lastEFBjetItr = trigEFBjet->end();
-
-	  //* Loop on TrigEFBjet (expected one per collection) *//    	
-	  for (int j=0; pEFBjetItr != lastEFBjetItr; ++pEFBjetItr, ++j ) {
-	    
-	    ATH_MSG_VERBOSE ( "Looking at TrigEFBjet " << j );
-	    
-	    ATH_MSG_VERBOSE
-              ( "TrigEFBjet->xIP1D() = " << (*pEFBjetItr)->xIP1D()
-		 << "; TrigEFBjet->xIP2D() = " << (*pEFBjetItr)->xIP2D()
-		 << "; TrigEFBjet->xIP3D() = " << (*pEFBjetItr)->xIP3D() 
-		 << "; TrigEFBjet->xCHI2() = " << (*pEFBjetItr)->xCHI2() 
-		 << "; TrigEFBjet->xMVtx() = " << (*pEFBjetItr)->xMVtx()
-		 << "; TrigEFBjet->xEVtx() = " << (*pEFBjetItr)->xEVtx()
-		 << "; TrigEFBjet->xNVtx() = " << (*pEFBjetItr)->xNVtx() 
-                );
-	    
-	    //* eta/phi matching w.r.t. LVL2 b-jet *//	  
-	    double dR = sqrt(pow(etaRoI - (*pEFBjetItr)->eta(),2) + pow(phiCorr(phiCorr(phiRoI) - phiCorr((*pEFBjetItr)->phi())),2));
-	    
-	    if (dR < deltaR){
-	      deltaR = dR;
-	      
-	      w_ip2d_ef = (*pEFBjetItr)->xIP2D();
-	      w_ip3d_ef = (*pEFBjetItr)->xIP3D();
-
-	      w_jetProb_ef = (double)(*pEFBjetItr)->xCHI2();
-
-	      w_mVtx_ef = (double)(*pEFBjetItr)->xMVtx();
-	      w_eVtx_ef = (double)(*pEFBjetItr)->xEVtx();
-	      w_nVtx_ef = (double)(*pEFBjetItr)->xNVtx();
-	    }
-	  }
-	}
-  
-	ATH_MSG_DEBUG ( "deltaR EF = " << deltaR << " " << w_ip3d_l2 );
-      
-	if (deltaR > 0.001) {
-	  w_ip2d_ef = -1; w_ip3d_ef = -1;
-	  w_jetProb_ef = -1;
-	  w_mVtx_ef = -1, w_eVtx_ef = -1, w_nVtx_ef = -1;
-	  ATH_MSG_DEBUG ( "TrigEFBjetContainer found but not matched with offline (should never happen)" );
-	}
-      } else {
-	ATH_MSG_INFO ( "TrigEFBjetContainer not found" );
-      }
-   
-      deltaR = 9999;
-
-      //* Retrieve from StoreGate LVL1 RoIs *//
-      const LVL1_ROI* lvl1RoI = 0; 
-      sc = evtStore()->retrieve(lvl1RoI);
-      
-      if (sc.isFailure() || !lvl1RoI) {
-	ATH_MSG_WARNING ( "No L1RoI found in TDS" );
-	return StatusCode::SUCCESS;
-      } else {
-	ATH_MSG_DEBUG ( "L1RoI successfully retrieved in TDS" );
-	
-	LVL1_ROI::jets_type::const_iterator pL1Jet    = (lvl1RoI->getJetROIs()).begin();
-	LVL1_ROI::jets_type::const_iterator lastL1Jet = (lvl1RoI->getJetROIs()).end();
-	
-	ATH_MSG_VERBOSE ( "get L1 Jet size : " << (lvl1RoI->getJetROIs()).size() );
-	
-	for (int i=1; pL1Jet != lastL1Jet; ++pL1Jet, ++i) {
-	  
-	  ATH_MSG_VERBOSE ( "Looking at LVL1 RoI " << i << "/" << (lvl1RoI->getJetROIs()).size() );
-	  ATH_MSG_VERBOSE ( "L1 ET8x8 : " << (*pL1Jet).getET8x8() );
-	  
-	  if ((*pL1Jet).getET8x8() <= 18000) continue;
-	  
-	  double dR = sqrt(pow((*pL1Jet).eta() - etaRoI,2) + pow(phiCorr(phiCorr((*pL1Jet).phi()) - phiCorr(phiRoI)),2));
-	
-	  if (dR < deltaR) {
-	    deltaR = dR;  
-	    etRoI = (*pL1Jet).getET8x8();
-	  }
-	}
-	
-	ATH_MSG_DEBUG ( "deltaR L1 = " << deltaR );
-      }
-    }
-
-    bool jetRoImatched=false;
-    //* IP2D offline weight if b-jet has been selected online *// 
-    if (w_ip2d_l2 != -1 || w_ip2d_ef != -1) {
-      if (w_ip2d_l2 >= 0.57 && w_ip2d_ef >= 0.33)
-	w_ip2d_of = (*jetItr)->getFlavourTagWeight("IP2D");
-      else 
-	w_ip2d_of = -10;
-      jetRoImatched=true;
-    }
-
-    //* IP3D offline weight if b-jet has been selected online *// 
-    if (w_ip3d_l2 != -1 || w_ip3d_ef != -1) {
-      if (w_ip3d_l2 >= 0.57 && w_ip3d_ef >= 0.33)
-	w_ip3d_of = (*jetItr)->getFlavourTagWeight("IP3D");
-      else 
-	w_ip3d_of = -10;
-      jetRoImatched=true;
-    }
-
-    double w_ip2d_ef_afterl2=w_ip2d_ef;
-    double w_ip3d_ef_afterl2=w_ip3d_ef;
-    if (jetRoImatched){
-      if (w_ip2d_l2 <= 0.57)
-	  w_ip2d_ef_afterl2 = 0;
-      if (w_ip3d_l2 <= 0.57)
-	  w_ip3d_ef_afterl2 = 0;
-    }
-
-    if (jetRoImatched){
-      w[15] = w_ip2d_of;
-      w[16] = w_ip3d_of;
-      w[17] = w_ip2d_l2;
-      w[18] = w_ip3d_l2;
-      w[19] = w_jetProb_l2;
-      w[20] = w_ip2d_ef_afterl2;
-      w[21] = w_ip3d_ef_afterl2;
-      w[22] = w_ip2d_ef;
-      w[23] = w_ip3d_ef;
-      w[24] = w_jetProb_ef;
-      w[25] = w_mVtx_ef;
-      w[26] = w_eVtx_ef;
-      w[27] = w_nVtx_ef;
-    }
-
-    ////////////////////////////
-    //* end of HLT b-tagging *//
-    ////////////////////////////
-
-    // --- fill histos for computing efficiency & rejections:
-    // use only jets within ID acceptance and with pT>15 GeV and away from high-pt leptons
-    bool leptonPur = isJetFarFromHPTLepton(*jetItr);
-    if(fabs(p4.pseudoRapidity())<2.5&&p4.et()>15000.&&leptonPur) {
-
-      // b-jets:
-      if(5==iflav && ( m_selectBFromRun==0 || m_selectBFromRun==m_irun)) {
-	for(int i=0;i<MAX_numTaggers;i++) {
-          if ( i<MAX_numTaggers-MAX_numHLTTaggers || jetRoImatched ){
-	    m_h_perf_b[i]->Fill(w[i]);
-	  }
-	}
-	m_h_perf_vxeff_den_b->Fill(0.5);
-	if(svok) m_h_perf_vxeff_num_b->Fill(0.5);
-
-	// fill histograms for weights vs. ET, eta and phi
-	// will use them to calculate effs and rejs. as a function of these variables
-	// VJ Apr 13'2007
-	for(int i=0;i<(MAX_numTaggers-MAX_numHLTTaggers)+2;i++) {
-	  m_h_perf_b_ET[i]->Fill(p4.et()/GeV,w[i]);
-	  m_h_perf_b_eta[i]->Fill(p4.pseudoRapidity(),w[i]);
-	  m_h_perf_b_phi[i]->Fill(p4.phi(),w[i]);
-	}
-	//*
-	for(int i=(MAX_numTaggers-MAX_numHLTTaggers)+2;i<MAX_numTaggers;i++) {
-          if (jetRoImatched){
-	    m_h_perf_b_ET[i]->Fill(etRoI/GeV,w[i]);
-	    m_h_perf_b_eta[i]->Fill(etaRoI,w[i]);
-	    m_h_perf_b_phi[i]->Fill(phiRoI,w[i]);
-	  }
-	}
-      }
-
-      // light jets:
-      if(0==iflav && ( m_selectUFromRun==0 || m_selectUFromRun==m_irun)) {
-	for(int i=0;i<MAX_numTaggers;i++) {
-          if ( i<MAX_numTaggers-MAX_numHLTTaggers || jetRoImatched ){
-	    m_h_perf_u[i]->Fill(w[i]);
-	  }
-	}
-	m_h_perf_vxeff_den_u->Fill(0.5);
-	if(svok) m_h_perf_vxeff_num_u->Fill(0.5);
-
-	// fill histograms for weights vs. ET, eta and phi
-	// will use them to calculate effs and rejs. as a function of these variables
-	for(int i=0;i<(MAX_numTaggers-MAX_numHLTTaggers)+2;i++) {
-	  m_h_perf_u_ET[i]->Fill(p4.et()/GeV,w[i]);
-	  m_h_perf_u_eta[i]->Fill(p4.pseudoRapidity(),w[i]);
-	  m_h_perf_u_phi[i]->Fill(p4.phi(),w[i]);
-	}
-	//*
-	for(int i=(MAX_numTaggers-MAX_numHLTTaggers)+2;i<MAX_numTaggers;i++) {
-          if (jetRoImatched){
-	    m_h_perf_u_ET[i]->Fill(etRoI/GeV,w[i]);
-	    m_h_perf_u_eta[i]->Fill(etaRoI,w[i]);
-	    m_h_perf_u_phi[i]->Fill(phiRoI,w[i]);
-	  }
-	}
-
-	// light jets away from lifetime particles (purification)
-	if( this->isLightJetIsolated(p4) || // recomputed on truth particles
-            (                               // from pre-filled distances
-              dRminToB > m_purificationDeltaR &&
-              dRminToC > m_purificationDeltaR &&
-              dRminToT > m_purificationDeltaR 
-            )
-          ) {
-	  for(int i=0;i<MAX_numTaggers;i++) {
-	    if ( i<MAX_numTaggers-MAX_numHLTTaggers || jetRoImatched ){
-	      m_h_perf_upur[i]->Fill(w[i]);
-	    }
-	  }
-	  m_h_perf_vxeff_den_upur->Fill(0.5);
-	  if(svok) m_h_perf_vxeff_num_upur->Fill(0.5);
-
-	  // fill histograms for weights vs. ET, eta and phi
-	  // will use them to calculate effs and rejs. as a function of these variables
-	  for(int i=0;i<(MAX_numTaggers-MAX_numHLTTaggers)+2;i++) {
-	    m_h_perf_upur_ET[i]->Fill(p4.et()/GeV,w[i]);
-	    m_h_perf_upur_eta[i]->Fill(p4.pseudoRapidity(),w[i]);
-	    m_h_perf_upur_phi[i]->Fill(p4.phi(),w[i]);
-	  }
-	  //*
-	  for(int i=(MAX_numTaggers-MAX_numHLTTaggers)+2;i<MAX_numTaggers;i++) {
-            if (jetRoImatched){
-	      m_h_perf_upur_ET[i]->Fill(etRoI/GeV,w[i]);
-	      m_h_perf_upur_eta[i]->Fill(etaRoI,w[i]);
-	      m_h_perf_upur_phi[i]->Fill(phiRoI,w[i]);
-	    }
-	  }
-	}
-      }
-    }
-  } // end loop jets
-
-  m_h_jet_ntotal->Fill((float)ntotal);
-  m_h_jet_nlabelb->Fill((float)nlabelb);
-  m_h_jet_nlabelc->Fill((float)nlabelc);
-  m_h_jet_nlabelt->Fill((float)nlabelt);
-
-  // for ntuple
-  m_njet = ntotal;
-  // Fill ntuple //
-  if (m_UseTupleSET) p_nt1->Fill();
-
-  return StatusCode::SUCCESS;
-}
-
-// ============================================================
-void
-JetTagAna::checkSoftElectron(const std::vector<const JetTagInfoBase*> infoVector, uint iInfo, int ntotal, 
-                             CLHEP::HepLorentzVector p4, int iflav, 
-                             const Analysis::ElectronAssociation *ea, int ntrk_j, int nmu_j, 
-                             const ElectronContainer* electronTDS, 
-                             const TrackParticleTruthCollection* tpTruthColl) 
-{
-  //
-  // check information for soft electron b-tagging
-  // in particular look for jets which indeed contain 
-  // an electron with good quality track in order 
-  // to estimate performance only on this subset
-  // add information in a tuple
-  //
-  
-  const Analysis::SoftElectronInfo* info = dynamic_cast<const Analysis::SoftElectronInfo*>(infoVector[iInfo]);
-  
-  if (!info) return;
-  if (!electronTDS) return;
-
-  ATH_MSG_DEBUG ( name() << " SoftElectronTag info ! " 
-                  << iInfo << " "  << infoVector[iInfo] 
-                  << " " << infoVector[iInfo]->infoType() << " " << ntotal );
-  
-  // ntuple variables initialisation 
-  m_jet_eptr[ntotal-1]        = 0;
-  m_jet_ea0[ntotal-1]         = 0;
-  m_jet_tag_pid[ntotal-1]     = 0;
-  m_jet_tag_mothpid[ntotal-1] = 0;
-  
-  m_jet_eTrueEle[ntotal-1]     = 0 ;
-  m_jet_eAnyEle[ntotal-1]      = 0 ;
-  m_jet_ne[ntotal-1]           = 0 ;
-  m_jet_epb[ntotal-1]          = 0.;
-  m_jet_epu[ntotal-1]          = 0.;
-  m_jet_elh[ntotal-1]          = -40.;
-  m_jet_ew[ntotal-1]           = -40.;
-  m_jet_Ntrack[ntotal-1]       = 0 ;
-  m_jet_sumptTrack[ntotal-1]   = 0 ;
-  m_jet_NtrackEle[ntotal-1]    = 0 ; 
-  
-  for (int i=0; i<10;i++) { //fix me! hardcoded number
-    m_jet_trackEle[ntotal-1][i]   = 0;
-    m_jet_trackElept[ntotal-1][i] = 0;
-  }
-  
-  int jetele    = 0;
-  int jetanyele = 0;
-  
-  for (int ii=0; ii<  m_maxjet   ;ii++) {
-    m_jet_eTrueEle[ii]= 0;
-    m_jet_eAnyEle[ii] = 0;
-  }
-  
-  m_jet_phi[ntotal-1]  = (float)p4.phi();
-  m_jet_eta[ntotal-1]  = p4.pseudoRapidity();
-  m_jet_et[ntotal-1]   = p4.et();
-  m_jet_flav[ntotal-1] = iflav;
-  if( this->isLightJetIsolated(p4)) m_jet_isol[ntotal-1]=1;
-  else                              m_jet_isol[ntotal-1]=0;
-  
-  if(ea) m_nelej[ntotal-1]  = ea->size();
-  else   m_nelej[ntotal-1]  = 0;
-  
-  m_ntrackj[ntotal-1]       = ntrk_j;
-  m_nmuonj[ntotal-1]        = nmu_j;
-
-  unsigned int ntrkp = info->numTrackInfo();
-  //mlog << MSG::INFO << "N good ele " << ntrkp << endmsg;
-  
-  m_jet_ne[ntotal-1] = ntrkp; 
-  m_jet_ew[ntotal-1] = info->weight(); // softe standard weight
-  
-  // get total LH
-  if (info->tagLikelihood().size() >= 2) {
-    m_jet_epb[ntotal-1] = info->tagLikelihood()[0];//sig part of J-B LH
-    m_jet_epu[ntotal-1] = info->tagLikelihood()[1];//bkg part of J-B LH
-    m_jet_elh[ntotal-1]=getTotWeight(m_jet_epb[ntotal-1],m_jet_epu[ntotal-1]);
-  }
-  ATH_MSG_VERBOSE ( "Elec xcheck " << m_jet_ew[ntotal-1] 
-                    << " " << m_jet_elh[ntotal-1]);
-
-  // Flag the jet as containing ELECTRON if DR <= 0.4
-  // and e mother pid is between 400 and 600 or 4000 and 6000 	      
-  ATH_MSG_VERBOSE ( " in container you have electrons : " 
-                    << electronTDS->size() );
-  // loop on electron container
-  ElectronContainer::const_iterator elecItr  = electronTDS->begin();
-  ElectronContainer::const_iterator elecItrE = electronTDS->end();
-  for (; elecItr != elecItrE; ++elecItr) { 
-    // only soft e
-    if ( !(*elecItr)->author(egammaParameters::AuthorSofte)) continue; 
-    
-    CLHEP::HepLorentzVector p4ele = (*elecItr)->hlv();
-    double drie = p4ele.deltaR(p4);
-    //double dptr = (p4ele.et()-p4.et())/p4.et();
-    
-    // check the parent id
-    int parentID =0;
-    //int grandparentID=0;
-    int ID = 0;
-    
-    if( !(*elecItr)->trackParticle() ) continue; 
-    
-    const Rec::TrackParticle *myTrackParticle = (*elecItr)->trackParticle();
-    // take truth info for that particle
-    const HepMC::GenParticle* thePart1 = getTruth((myTrackParticle),tpTruthColl,m_trackTES);
-    if (thePart1!=0) {
-      //std::cout << "PDG of the track is " << thePart1->pdg_id()<<std::endl;
-      ID= thePart1->pdg_id();
-      HepMC::GenVertex::particle_iterator firstParent, lastParent, thisParent;
-      
-      firstParent =thePart1->production_vertex()->particles_begin(HepMC::parents);
-      lastParent  =thePart1->production_vertex()->particles_end(HepMC::parents);
-      for(thisParent = firstParent; thisParent != lastParent++; ++thisParent)  {
-        parentID = (*thisParent)->pdg_id();
-      }
-    }
-    
-    // REMOVE ME!      
-    //std::cout<< " MW drie parentID "<<drie<<" "<<parentID<<std::endl;
-    if (std::abs(drie) <= 0.4  && std::abs(ID)==11 && 
-        (isBHadron(parentID) || isDHadron(parentID))) jetele = 1;
-    //if (fabs(drie)    <= 0.4  && fabs(ID)==11        && 
-    //((fabs(parentID)>= 400  && fabs(parentID)<600) ||
-    // (fabs(parentID)>= 4000 && fabs(parentID)<6000))) jetele = 1;  
-    
-    if (std::abs(drie) <= 0.4 && std::abs(ID)==11) jetanyele = 1;
-  }
-  
-  // REMOVE ME!
-  //std::cout<< " is electron in jet "<<jetanyele<<" "<<jetele<<std::endl;
-  m_jet_eTrueEle[ntotal-1] = jetele;
-  m_jet_eAnyEle[ntotal-1]  = jetanyele;
-  //----------------------------------------------
-  // Flag the jet as containing ELECTRON TRACK    
-  //-----------------------------------------------
-  int itrack      = 0;
-  double ptsum    = 0.;
-  //int jettrackele[10]; // this is never used in the code
-  //for (int i=0; i< 10;i++) jettrackele[i]= 0;
-  
-  int nele       = 0;
-  double trackpt = 0.;
-  
-  // iterators over the trackparticle container
-  Rec::TrackParticleContainer::const_iterator trackItr  = m_trackTES->begin();
-  Rec::TrackParticleContainer::const_iterator trackItrE = m_trackTES->end();
-  for (; trackItr != trackItrE; ++trackItr) { 
-    // check track quality
-    if (!checkTrackqualforSET(trackItr,&trackpt).isSuccess()) continue;
-    //int goodtrack = 0;  
-    CLHEP::HepLorentzVector p4trk = (*trackItr)->hlv();
-    double drie = p4trk.deltaR(p4);
-    //double dptr = (p4trk.et()-p4.et())/p4.et();
-    //std::cout << " HLV = " << p4trk << " " << drie << " " << dptr << std::endl;
-    
-    // check the parent id
-    int parentID      = 0;              
-    //int grandparentID = 0;
-    int ID            = 0;
-    
-    const Rec::TrackParticle *myTrackParticle = (*trackItr);
-    //take truth info for that particle
-    const HepMC::GenParticle* thePart1 =getTruth(myTrackParticle,tpTruthColl,m_trackTES);
-    if (thePart1!=0) {
-      //std::cout << "PDG of the trackparticle is " << thePart1->pdg_id()<<std::endl;
-      ID= thePart1->pdg_id();
-      HepMC::GenVertex::particle_iterator firstParent, lastParent, thisParent;
-      
-      firstParent =thePart1->production_vertex()->particles_begin(HepMC::parents);
-      lastParent  =thePart1->production_vertex()->particles_end(HepMC::parents);
-      for(thisParent = firstParent; thisParent != lastParent++; ++thisParent) {
-        parentID = (*thisParent)->pdg_id();
-      }
-    }
-    
-    //if GQ track inside jet 
-    int codeEle = 0;
-    if (fabs(drie) <= 0.4) {
-      if (std::abs(ID)==11) { 
-        codeEle = 1;
-	
-        if (isDHadron(parentID)) codeEle = 4;
-        //if ((fabs(parentID)>=400 && fabs(parentID)<500) ||
-        //  (fabs(parentID)>=4000 && fabs(parentID)<5000)) codeEle = 4;
-	
-        if (isBHadron(parentID)) codeEle = 5;
-        //if ((fabs(parentID)>=500 && fabs(parentID)<600) ||
-        //  (fabs(parentID)>=5000 && fabs(parentID)<6000)) codeEle = 5;
-	
-        if (std::abs(parentID)==22 || std::abs(parentID)==111) codeEle = 2;  
-	
-        //jettrackele[nele]            = codeEle;                  
-        m_jet_trackEle[ntotal-1][nele]   = codeEle;
-        m_jet_trackElept[ntotal-1][nele] = fabs(trackpt);
-	
-        nele = nele + 1;
-      }
-      ptsum = ptsum + fabs(trackpt);
-      itrack++; 
-    }
-  }
-
-  
-  //if (m_UseTupleSET) {
-  m_jet_NtrackEle[ntotal-1]  = nele;
-  m_jet_sumptTrack[ntotal-1] = ptsum;
-  m_jet_Ntrack[ntotal-1]     = itrack;
-  //}
-  //std::cout << " number of found ele tracks in jet  "
-  //    << nele<<std::endl;
-  
-  int beid     = 0;
-  int mothbeid = 0; 
-  
-  for(unsigned int i=0;i<ntrkp;i++) {
-    const Analysis::SETrackInfo ipteinfo = info->getTrackInfo(i);
-    //mlog << MSG::INFO << "Elec " << i << endmsg;
-    //mlog << MSG::INFO << ipteinfo << endmsg;
-    
-    beid     = 0;
-    mothbeid = 0; 
-    
-    if (i == 0) {
-      m_jet_eptr[ntotal-1]   = ipteinfo.pTrel();
-      //std::cout << " pT = " << ipteinfo.pTrel() << std::endl;
-      m_jet_ea0[ntotal-1]    = ipteinfo.d0Value();
-
-      // Does it correspond to a true electron ?
-      const egamma* bestSoftE = ipteinfo.electron();
-      //      const Analysis::Electron* bestSoftE = ipteinfo.electron();
-      if (bestSoftE) {
-        //if (m_UseTupleSET) {
-        m_jet_ept[ntotal-1]    = bestSoftE->pt();
-        //}
-        //info about pid and moth pid of tagging particle
-        if( bestSoftE->trackParticle() ) {
-          const Rec::TrackParticle *myTrackParticle = bestSoftE->trackParticle();
-          //take truth info for that particle
-          const HepMC::GenParticle* thePartBE =getTruth(myTrackParticle,tpTruthColl,m_trackTES);
-          if (thePartBE!=0) {
-            beid= thePartBE->pdg_id();
-            //std::cout << "PDG of the best ele is " << beid<<std::endl;
-	    
-            HepMC::GenVertex::particle_iterator firstParent, lastParent, thisParent;
-	    
-            firstParent =thePartBE->production_vertex()->particles_begin(HepMC::parents);
-            lastParent  =thePartBE->production_vertex()->particles_end(HepMC::parents);
-            for(thisParent = firstParent; thisParent != lastParent++; ++thisParent) {
-              mothbeid = (*thisParent)->pdg_id();
-              //std::cout<< "best ele parent id " << mothbeid <<std::endl;
-            }
-          }
-        }
-	
-        //if (m_UseTupleSET) {
-        m_jet_tag_pid[ntotal-1]     = beid     ;
-        m_jet_tag_mothpid[ntotal-1] = mothbeid ;
-        //}
-        // Flag the jet as containing electron if DR <= 0.4
-        // and e mother pid is between 400 and 600 or 4000 and 6000 	
-        jetele    = 0;
-        jetanyele = 0;
-	
-      }
-    }
-  }
-}
-
-// ============================================================
-StatusCode JetTagAna::checkTrackqualforSET(Rec::TrackParticleContainer::const_iterator trackItr, double *trackpt) {
-  //
-  // check track quality for soft electrons
-  // Beware that requirements could be slightly different 
-  // than for other taggers (in particular for TRT)
-  //
-
-  // good quality track cuts
-  double cutA0       = 1;
-  int cutBL          = 1;
-  int cutPi          = 2;
-  int cutSi          = 7;
-  int cutNTRHits     = 1;
-  int cutNTRTHits    = 20;
-  double cutPt       = 2000.;
-
-  int nBL   = 0;
-  int nPix  = 0;
-  int nSi   = 0;
-  int nTR   = 0;
-  int nTRT  = 0; 
-    
-  const Trk::TrackSummary* summary = (*trackItr)->trackSummary();
-  if (summary) {
-    // number of b-layer hits
-    nBL = summary->get(Trk::numberOfInnermostPixelLayerHits);
-    // number of pixel hits
-    nPix = summary->get(Trk::numberOfPixelHits);
-    // number of Si hits
-    nSi = summary->get(Trk::numberOfPixelHits) + 
-      summary->get(Trk::numberOfSCTHits);
-    // number of high threshold hits in TRT (with outliers)
-    nTR = summary->get(Trk::numberOfTRTHighThresholdHits)+
-      summary->get(Trk::numberOfTRTHighThresholdOutliers);
-    // number of TRT hits (with outliers)
-    nTRT = summary->get(Trk::numberOfTRTHits)+
-    summary->get(Trk::numberOfTRTOutliers);
-  }
-    
-  // access to the perigee parameters
-  const Trk::Perigee* perigee = (*trackItr)->measuredPerigee();
-    
-  double A0Vert     = 0.;
-  double PTInvVert  = 0.;
-    
-  if (perigee) {
-    const auto & parameters = perigee->parameters();
-    // transverse impact parameter
-    A0Vert  = parameters[Trk::d0];
-    // pT of the track
-    PTInvVert = parameters[Trk::qOverP];
-    if ( PTInvVert != 0. ) {
-      *trackpt = 1./PTInvVert;
-    } else { *trackpt = 0.;}
-  }
-  
-  // basic cuts on good quality tracks
-  if (   A0Vert   <= cutA0       &&
-	 nBL      >= cutBL       &&
-	 nPix     >= cutPi       &&
-	 nSi      >= cutSi       &&
-	 nTR      >= cutNTRHits  &&
-	 nTRT     >= cutNTRTHits &&
-	 fabs(*trackpt) > cutPt      ) 
-    return StatusCode::SUCCESS; 
-
-  return StatusCode::FAILURE;  
-}
-
-// ====================================================================
-void JetTagAna::computeRejections() {
-  MsgStream mlog( msgSvc(), name() );
-  double r10, e10, w10, r50, e50, w50, r60, e60, w60;
-  int nbj = (int)m_h_perf_b[0]->Integral();
-  int nbu = (int)m_h_perf_u[0]->Integral();
-  ATH_MSG_INFO ( "##### STANDARD REJECTIONS #####  #B-JETS: " << nbj << " #U-JETS: " << nbu );
-  for(int i=0;i<MAX_numTaggers;i++) {
-    if(i>4&&i<8) continue;
-    this->getRej(m_h_perf_u[i], m_h_perf_b[i], r10, e10, w10, r50, e50, w50, r60, e60, w60);
-    ATH_MSG_INFO ( "- Tag " << m_tagger[i] << ": light-jet rejection" );
-    if(i<8 || i>9) { 
-        ATH_MSG_INFO ( "   for a 50% b-tag efficiency (w>" << w50 << "): " << r50 << "+-" << e50 );
-        ATH_MSG_INFO ( "   for a 60% b-tag efficiency (w>" << w60 << "): " << r60 << "+-" << e60 );
-
-	// fill some plots here - VJ Apr. 2007
-	//
-	m_h_perfWt_50->SetBinContent(i+1,w50); // histogram givs the weight (for each tagger) for the 50% eff point
-	if(r50<100000) {
-	  m_h_perf_rej50->SetBinContent(i+1,r50); m_h_perf_rej50->SetBinError(i+1,e50);
-	}
-	m_h_perfWt_60->SetBinContent(i+1,w60); // histogram givs the weight (for each tagger) for the 50% eff point
-	if(r60<100000) {
-	  m_h_perf_rej60->SetBinContent(i+1,r60); m_h_perf_rej60->SetBinError(i+1,e60);
-	}
-	// now get efficiencies as a function of et/eta/phi
-	// w50 was determined by averaging over all et/eta/phi. see how well it does in bins of et/eta/phi
-
-	int nYbins = m_h_perf_b_ET[i]->GetNbinsY();
-	int y50bin = m_h_perf_b[i]->FindBin(w50);
-	//    int y60bin = m_h_perf_b[i]->FindBin(w60);
-	//
-	// first do for ET
-	for(int iet=1;iet<51;iet++) {
-
-	  //eff
-	  double integ = m_h_perf_b_ET[i]->Integral(iet,iet,1,nYbins+1);
-	  double integ50 = m_h_perf_b_ET[i]->Integral(iet,iet,y50bin,nYbins+1);
-	  if(integ>0) {
-	    double eff= integ50/integ;
-	    m_h_eff_b_ET[i]->SetBinContent(iet,eff);
-	    m_h_eff_b_ET[i]->SetBinError(iet,sqrt(eff*(1-eff)/integ));
-	  }
-
-	  // std. rejection
-	  integ = m_h_perf_u_ET[i]->Integral(iet,iet,1,nYbins+1);
-	  integ50 = m_h_perf_u_ET[i]->Integral(iet,iet,y50bin,nYbins+1);
-	  if(integ50>0) {
-	    m_h_rej_u_ET[i]->SetBinContent(iet,integ/integ50);
-	    double invrej = integ50/integ;
-	    double err_invrej = sqrt(invrej*(1-invrej)/integ);
-	    double err_rej = err_invrej/invrej/invrej;
-	    m_h_rej_u_ET[i]->SetBinError(iet,err_rej);
-	  }
-	  // purified rejection
-	  integ = m_h_perf_upur_ET[i]->Integral(iet,iet,1,nYbins+1);
-	  integ50 = m_h_perf_upur_ET[i]->Integral(iet,iet,y50bin,nYbins+1);
-	  if(integ50>0) {
-	    m_h_rej_upur_ET[i]->SetBinContent(iet,integ/integ50);
-	    double invrej = integ50/integ;
-	    double err_invrej = sqrt(invrej*(1-invrej)/integ);
-	    double err_rej = err_invrej/invrej/invrej;
-	    m_h_rej_upur_ET[i]->SetBinError(iet,err_rej);
-	  }
-
-	} // end of ET
-
-	// now do for eta
-	for(int ieta=1;ieta<51;ieta++) {
-
-	  //eff
-	  double integ = m_h_perf_b_eta[i]->Integral(ieta,ieta,1,nYbins+1);
-	  double integ50 = m_h_perf_b_eta[i]->Integral(ieta,ieta,y50bin,nYbins+1);
-	  if(integ>0) {
-	    double eff= integ50/integ;
-	    m_h_eff_b_eta[i]->SetBinContent(ieta,eff);
-	    m_h_eff_b_eta[i]->SetBinError(ieta,sqrt(eff*(1-eff)/integ));
-	  }
-
-	  // std. rejection
-	  integ = m_h_perf_u_eta[i]->Integral(ieta,ieta,1,nYbins+1);
-	  integ50 = m_h_perf_u_eta[i]->Integral(ieta,ieta,y50bin,nYbins+1);
-	  if(integ50>0) {
-	    m_h_rej_u_eta[i]->SetBinContent(ieta,integ/integ50);
-	    double invrej = integ50/integ;
-	    double err_invrej = sqrt(invrej*(1-invrej)/integ);
-	    double err_rej = err_invrej/invrej/invrej;
-	    m_h_rej_u_eta[i]->SetBinError(ieta,err_rej);
-	  }
-	  // purified rejection
-	  integ = m_h_perf_upur_eta[i]->Integral(ieta,ieta,1,nYbins+1);
-	  integ50 = m_h_perf_upur_eta[i]->Integral(ieta,ieta,y50bin,nYbins+1);
-	  if(integ50>0) {
-	    m_h_rej_upur_eta[i]->SetBinContent(ieta,integ/integ50);
-	    double invrej = integ50/integ;
-	    double err_invrej = sqrt(invrej*(1-invrej)/integ);
-	    double err_rej = err_invrej/invrej/invrej;
-	    m_h_rej_upur_eta[i]->SetBinError(ieta,err_rej);
-	  }
-
-	} // end of eta
-	// now do for phi
-	for(int iphi=1;iphi<61;iphi++) {
-
-	  //eff
-	  double integ = m_h_perf_b_phi[i]->Integral(iphi,iphi,1,nYbins+1);
-	  double integ50 = m_h_perf_b_phi[i]->Integral(iphi,iphi,y50bin,nYbins+1);
-	  if(integ>0) {
-	    double eff= integ50/integ;
-	    m_h_eff_b_phi[i]->SetBinContent(iphi,eff);
-	    m_h_eff_b_phi[i]->SetBinError(iphi,sqrt(eff*(1-eff)/integ));
-	  }
-
-	  // std. rejection
-	  integ = m_h_perf_u_phi[i]->Integral(iphi,iphi,1,nYbins+1);
-	  integ50 = m_h_perf_u_phi[i]->Integral(iphi,iphi,y50bin,nYbins+1);
-	  if(integ50>0) {
-	    m_h_rej_u_phi[i]->SetBinContent(iphi,integ/integ50);
-	    double invrej = integ50/integ;
-	    double err_invrej = sqrt(invrej*(1-invrej)/integ);
-	    double err_rej = err_invrej/invrej/invrej;
-	    m_h_rej_u_phi[i]->SetBinError(iphi,err_rej);
-	  }
-	  // purified rejection
-	  integ = m_h_perf_upur_phi[i]->Integral(iphi,iphi,1,nYbins+1);
-	  integ50 = m_h_perf_upur_phi[i]->Integral(iphi,iphi,y50bin,nYbins+1);
-	  if(integ50>0) {
-	    m_h_rej_upur_phi[i]->SetBinContent(iphi,integ/integ50);
-	    double invrej = integ50/integ;
-	    double err_invrej = sqrt(invrej*(1-invrej)/integ);
-	    double err_rej = err_invrej/invrej/invrej;
-	    m_h_rej_upur_phi[i]->SetBinError(iphi,err_rej);
-	  }
-
-	} // end of phi      
-
-      }
-    else
-      {
-	ATH_MSG_INFO ( "   for a 10% b-tag efficiency (w>" << w10 << "): " << r10 << "+-" << e10 );
-
-        m_h_perfWt_10->SetBinContent(i-7,w10);// start with bin 1
-        if(r10<100000) {
-	  m_h_perf_rej10->SetBinContent(i-7,r10); // start with bin 1
-	  m_h_perf_rej10->SetBinError(i-7,e10); // start with bin 1
-	}
-
-      }
-    //
-  }
-  int nbupur = (int)m_h_perf_upur[0]->Integral();
-  ATH_MSG_INFO ( "##### REJECTIONS AFTER PURIFICATION #####  #B-JETS: " << nbj << " #U-JETS: " << nbupur );
-  for(int i=0;i<MAX_numTaggers;i++) {
-    if(i>4&&i<8) continue;
-    this->getRej(m_h_perf_upur[i], m_h_perf_b[i], r10, e10, w10, r50, e50, w50, r60, e60, w60);
-    ATH_MSG_INFO ( "- Tag " << m_tagger[i] << ": light-jet rejection" );
-    if(i<8 || i>9) { 
-        ATH_MSG_INFO ( "   for a 50% b-tag efficiency (w>" << w50 << "): " << r50 << "+-" << e50 );
-        ATH_MSG_INFO ( "   for a 60% b-tag efficiency (w>" << w60 << "): " << r60 << "+-" << e60 );
-	// fill some plots - VJ Apr. 2007
-	m_h_perfWt_pur50->SetBinContent(i+1,w50);
-	if(r50<100000) {
-	  m_h_perf_rejpur50->SetBinContent(i+1,r50); m_h_perf_rejpur50->SetBinError(i+1,e50);
-	}
-	m_h_perfWt_pur60->SetBinContent(i+1,w60);
-	if(r60<100000) {
-	  m_h_perf_rejpur60->SetBinContent(i+1,r60); m_h_perf_rejpur60->SetBinError(i+1,e60);
-	}
-
-
-      }
-    else
-      {
-	ATH_MSG_INFO ( "   for a 10% b-tag efficiency (w>" << w10 << "): " << r10 << "+-" << e10 );
-        m_h_perfWt_pur10->SetBinContent(i-7,w10);// start with bin 1
-        if(r10<100000) {
-	  m_h_perf_rejpur10->SetBinContent(i-7,r10); // start with bin 1
-	  m_h_perf_rejpur10->SetBinError(i-7,e10); // start with bin 1
-	}
-      }
-  }
-}
-
-void JetTagAna::getRej(TH1F* uw, TH1F* bw,
-		       double& ru10, double& eru10, double& w10, 
-		       double& ru50, double& eru50, double& w50, 
-		       double& ru60, double& eru60, double& w60
-		       ) {
-  if(uw==0||bw==0) return;
-  const int n_max_bins = 1000;
-  int    nb   = uw->GetNbinsX()+1;
-  if(nb>n_max_bins) return;
-  int    bi10 = 0;
-  int    bi50 = 0;
-  int    bi60 = 0;
-  double ru[n_max_bins],efb[n_max_bins],eru[n_max_bins]; //,eefb[n_max_bins];
-  const double xbi  = (double) bw->Integral(0,nb); // also includes overflows
-  const double xui  = (double) uw->Integral(0,nb);
-  const double inv_xbi = xbi != 0 ? 1. / xbi : 1;
-  const double inv_xui = xui != 0 ? 1. / xui : 1;
-  for (int ib = 1;ib<nb;ib++) {
-    efb[ib-1] = ((double) bw->Integral(ib,nb)) * inv_xbi;
-    if(ib>1) {
-      if (efb[ib-1] <= 0.1 && efb[ib-2] >= 0.1) {bi10 = ib-1;}
-      if (efb[ib-1] <= 0.5 && efb[ib-2] >= 0.5) {bi50 = ib-1;}
-      if (efb[ib-1] <= 0.6 && efb[ib-2] >= 0.6) {bi60 = ib-1;}
-    }
-    //eefb[ib-1] = sqrt(efb[ib-1]*(1.-efb[ib-1])/xbi); // nothing done with that 
-    if(uw->Integral(ib,nb)>0.) {
-	ru[ib-1]   = xui/((double) uw->Integral(ib,nb));
-	eru[ib-1]  = ru[ib-1]*sqrt((ru[ib-1]-1.) * inv_xui);
-    } else {
-	ru[ib-1]   = 0.;
-	eru[ib-1]  = 0.;
-    }
-  }
-
-  if (bi10 > 0) {
-    w10 = uw->GetBinCenter(bi10);
-    ru10  = (ru[bi10]*(efb[bi10-1]-0.1)+ru[bi10-1]*(0.1-efb[bi10]))/(efb[bi10-1]-efb[bi10]);
-    eru10 = (eru[bi10]*(efb[bi10-1]-0.1)+eru[bi10-1]*(0.1-efb[bi10]))/(efb[bi10-1]-efb[bi10]);
-  }
-  else {
-    w10 = 0;
-    ru10 = 0;
-    eru10 = 0;
-  }
-
-  if (bi50 > 0) {
-    w50 = uw->GetBinCenter(bi50);
-    ru50  = (ru[bi50]*(efb[bi50-1]-0.5)+ru[bi50-1]*(0.5-efb[bi50]))/(efb[bi50-1]-efb[bi50]);
-    eru50 = (eru[bi50]*(efb[bi50-1]-0.5)+eru[bi50-1]*(0.5-efb[bi50]))/(efb[bi50-1]-efb[bi50]);
-  }
-  else {
-    w50 = 0;
-    ru50 = 0;
-    eru50 = 0;
-  }
-
-  if (bi60 > 0) {
-    w60 = uw->GetBinCenter(bi60);
-    ru60  = (ru[bi60]*(efb[bi60-1]-0.6)+ru[bi60-1]*(0.6-efb[bi60]))/(efb[bi60-1]-efb[bi60]);
-    eru60 = (eru[bi60]*(efb[bi60-1]-0.6)+eru[bi60-1]*(0.6-efb[bi60]))/(efb[bi60-1]-efb[bi60]);
-  }
-  else {
-    w60 = 0;
-    ru60 = 0;
-    eru60 = 0;
-  }
-}
-
-void JetTagAna::bookHistograms() {
-  if (m_h_global_counters) return;
-  m_h_global_counters = new TH1F("global_counters","Counters",100,0.,100.);
-  m_h_global_nprimvtx = new TH1F("global_nprimvtx","N primary vertex",10,0.,10.);
-  m_h_global_xprimvtx = new TH1F("global_xprimvtx","X primary vertex",100,-0.1,0.1);
-  m_h_global_yprimvtx = new TH1F("global_yprimvtx","Y primary vertex",100,-0.1,0.1);
-  m_h_global_zprimvtx = new TH1F("global_zprimvtx","Z primary vertex",100,-250.,250.);
-  m_h_global_primvtxresx = new TH1F("global_primvtxresx","reso X primary vertex",100,-0.1,0.1);
-  m_h_global_primvtxresy = new TH1F("global_primvtxresy","reso Y primary vertex",100,-0.1,0.1);
-  m_h_global_primvtxresz = new TH1F("global_primvtxresz","reso Z primary vertex",100,-0.5,0.5);
-  m_h_global_nmcpart  = new TH1F("global_nmcpart","N MC particles",100,0.,1500.);
-  m_h_global_ntrkpart = new TH1F("global_ntrkpart","N TrackParticles",100,0.,200.);
-  m_h_global_BLayerHits = new TH1F("global_BLayerHits","No. of BLayer Hits on TrackP",5,0.,5.);
-  m_h_global_BLayerSharedHits = new TH1F("global_BLayerSharedHits","No. of BLayer shared hits on TrackP",5,0.,5.);
-  m_h_global_PixelHits = new TH1F("global_PixelHits","No. of Pixel Hits on TrackP",10,0.,10.);
-  m_h_global_PixelLayers = new TH1F("global_PixelLayers","No. of Pixel Layers on TrackP",10,0.,10.);
-  m_h_global_SiHits = new TH1F("global_SiHits","No. of Pixel+SCT Hits on TrackP",25,0.,25.);
-  m_h_global_TRTHits = new TH1F("global_TRTHits","No. of TRT Hits on TrackP",100,0.,100.);
-  m_h_global_nmuon = new TH1F("global_nmuon","N Muons",10,0.,10.);
-  m_h_global_nelectron = new TH1F("global_nelectron","N Electron",10,0.,10.);
-  m_h_global_njettag  = new TH1F("global_njettag","N JetTag",30,0.,30.);
-  m_h_truth_bquark_nb = new TH1F("truth_bquark_nb","N quark b",10,0.,10.);
-  m_h_truth_bquark_pt = new TH1F("truth_bquark_pt","b quark pt",100,0.,100000.);
-  m_h_truth_bquark_eta= new TH1F("truth_bquark_eta","b quark eta",100,-5.,5.);
-  m_h_truth_cquark_nb = new TH1F("truth_cquark_nb","N quark c",10,0.,10.);
-  m_h_truth_cquark_pt = new TH1F("truth_cquark_pt","c quark pt",100,0.,100000.);
-  m_h_truth_cquark_eta= new TH1F("truth_cquark_eta","c quark eta",100,-5.,5.);
-  m_h_truth_bhadr_nb  = new TH1F("truth_bhadr_nb","N hadr b",10,0.,10.);
-  m_h_truth_bhadr_pt  = new TH1F("truth_bhadr_pt","b hadr pt",100,0.,100000.);
-  m_h_truth_bhadr_eta = new TH1F("truth_bhadr_eta","b hadr eta",100,-5.,5.);
-  m_h_truth_chadr_nb  = new TH1F("truth_chadr_nb","N hadr c",10,0.,10.);
-  m_h_truth_chadr_pt  = new TH1F("truth_chadr_pt","c hadr pt",100,0.,100000.);
-  m_h_truth_chadr_eta = new TH1F("truth_chadr_eta","c hadr eta",100,-5.,5.);
-  m_h_jet_ntotal      = new TH1F("jet_ntotal","N jets",30,0.,30.);
-  m_h_jet_label       = new TH1F("jet_label","MC jet label",20,0.,20.);
-  m_h_jet_nlabelb     = new TH1F("jet_nlabelb","N jets label b",10,0.,10.);
-  m_h_jet_nlabelc     = new TH1F("jet_nlabelc","N jets label c",10,0.,10.);
-  m_h_jet_nlabelt     = new TH1F("jet_nlabelt","N jets label tau",10,0.,10.);
-  m_h_jet_ntag        = new TH1F("jet_ntag","N tags",20,0.,20.);
-  m_h_jet_eta         = new TH1F("jet_eta","Jet eta",100,-5.,5.);
-  m_h_jet_phi         = new TH1F("jet_phi","Jet phi",100,-3.15,3.15);
-  m_h_jet_et          = new TH1F("jet_et","Jet et",100,0.,500000.);
-  m_h_jet_ntracks     = new TH1F("jet_ntracks","# tracks in a Jet",50,0,50.);
-  m_h_jet_tracks_pt   = new TH1F("jet_tracks_pt","pT of tracks in a Jet",100,0,50.);
-  m_h_jet_nmuons      = new TH1F("jet_nmuons","# muons in a Jet",10,0,10.);
-  m_h_jet_muons_pt    = new TH1F("jet_muons_pt","pT of muons in a Jet",100,0,100.);
-  m_h_jet_nelectrons  = new TH1F("jet_nelectrons","# electrons in a Jet",10,0,10.);
-  m_h_jet_electrons_pt= new TH1F("jet_electrons_pt","pT of electrons in a Jet",100,0,100.);
-  m_h_jet_tracks_BLayerHits = new TH1F("jet_tracks_BLayerHits","BLay hits",10,0.,10.);
-  m_h_jet_tracks_PixelHits = new TH1F("jet_tracks_PixelHits","Pixel hits",10,0.,10.);
-  m_h_jet_tracks_SCTHits = new TH1F("jet_tracks_SCTHits","SCT hits",20,0.,20.);
-  m_h_tag_jetprob_w   = new TH1F("tag_jetprob_w","JetProb: weight",100,0.,1.);
-  m_h_tag_i2d_w       = new TH1F("tag_i2d_w","IP2D: weight",120,-20.,40.);
-  m_h_tag_i2d_n       = new TH1F("tag_i2d_n","IP2D: Ntrack",20,0.,20.);
-  m_h_tag_i2d_b       = new TH1F("tag_i2d_b","IP2D: Pb",10,0.,1.);
-  m_h_tag_i2d_u       = new TH1F("tag_i2d_u","IP2D: Pu",10,0.,1.);
-  m_h_tag_i2d_sig     = new TH1F("tag_i2d_sig","IP2D: signed IP",100,-5.,5.);
-  m_h_tag_i3d_w       = new TH1F("tag_i3d_w","IP3D: weight",120,-20.,40.);
-  m_h_tag_i3d_n       = new TH1F("tag_i3d_n","IP3D: Ntrack",20,0.,20.);
-  m_h_tag_i3d_b       = new TH1F("tag_i3d_b","IP3D: Pb",10,0.,1.);
-  m_h_tag_i3d_u       = new TH1F("tag_i3d_u","IP3D: Pu",10,0.,1.);
-  m_h_tag_i3d_sig     = new TH1F("tag_i3d_sig","IP3D: signed IP",100,-5.,5.);
-  m_h_tag_sv1_w       = new TH1F("tag_sv1_w","SV1: weight",120,-20.,40.);
-  m_h_tag_sv1_b       = new TH1F("tag_sv1_b","SV1: Pb",10,0.,1.);
-  m_h_tag_sv1_u       = new TH1F("tag_sv1_u","SV1: Pu",10,0.,1.);
-  m_h_tag_sv_n       = new TH1F("tag_sv_n","SV1: Ntrack",10,-1.,9.);
-  m_h_tag_sv_n2t     = new TH1F("tag_sv_n2t","SV1: N2T",20,0.,20.);
-  m_h_tag_sv_frc     = new TH1F("tag_sv_frc","SV1: Frc",100,0.,1.);
-  m_h_tag_sv_m       = new TH1F("tag_sv_m","SV1: mass",100,0.,10.);
-  m_h_tag_sv2_w       = new TH1F("tag_sv2_w","SV2: weight",120,-20.,40.);
-  m_h_tag_sv2_b       = new TH1F("tag_sv2_b","SV2: Pb",10,0.,1.);
-  m_h_tag_sv2_u       = new TH1F("tag_sv2_u","SV2: Pu",10,0.,1.);
-  m_h_tag_cmb_w       = new TH1F("tag_cmb_w","Weight combined",120,-20.,40.);
-  m_h_tag_lf2d_w      = new TH1F("tag_lf2d_w","Lifetime2D: weight",120,-20.,40.);
-  m_h_tag_svbu_w      = new TH1F("tag_svbu_w","SecVtxTagBU: weight",120,-20.,40.);
-  m_h_tag_lhsig_w     = new TH1F("tag_lhsig_w","lhSig: weight",120,-20.,40.);
-  m_h_tag_softm_w     = new TH1F("tag_softm_w","SoftMuonTag: weight",120,-20.,40.);
-  m_h_tag_softe_w     = new TH1F("tag_softe_w","SoftElectronTag: weight",120,-20.,40.);
-  m_h_tag_jetfitter_w     = new TH1F("tag_jetfitter_w","JetFitterTag: weight",120,-20.,40.);
-  m_h_tag_jetfitcomb_w     = new TH1F("tag_jetfitcomb_w","JetFitterCombTag: weight",120,-20.,40.);
-  m_h_tag_jetfitternn_w     = new TH1F("tag_jetfitternn_w","JetFitterTagNN: weight",120,-20.,40.);
-  m_h_tag_jetfitcombnn_w     = new TH1F("tag_jetfitcombnn_w","JetFitterCombTagNN: weight",120,-20.,40.);
-
-  //*
-  m_h_tag_i2d_of_w = new TH1F("tag_i2d_of_w","IP2D offline weight if selected by trigger",120,-20.,40.);
-  m_h_tag_i3d_of_w = new TH1F("tag_i3d_of_w","IP3D offline weight if selected by trigger",120,-20.,40.);
-  m_h_tag_i2d_l2_w = new TH1F("tag_i2d_l2_w","IP2D LVL2 weight",50,0.,1.);
-  m_h_tag_i3d_l2_w = new TH1F("tag_i3d_l2_w","IP3D LVL2 weight",50,0.,1.);
-  m_h_tag_i2d_ef_w = new TH1F("tag_i2d_ef_w","IP2D EF weight",50,0.,1.);
-  m_h_tag_i3d_ef_w = new TH1F("tag_i3d_ef_w","IP3D EF weight",50,0.,1.);
-
-  // plots as a function of jet ET
-  for(int i=0; i<6; ++i) {
-    TString htit(m_jetET[i]);
-    m_h_tag_IPinfo_ntrk[i] = new TH1F(htit+"_tag_IPinfo_ntrk",htit+" ntrk in jet",50,0.,50.);
-    m_h_tag_IPinfo_trkPt[i] = new TH1F(htit+"_tag_IPinfo_trkPt",htit+" trk Pt in jet",100,0.,25.);
-    m_h_tag_IPinfo_d0val[i] = new TH1F(htit+"_tag_IPinfo_d0val",htit+" IP info - d0val",120,-0.2,0.4);
-    m_h_tag_IPinfo_z0val[i] = new TH1F(htit+"_tag_IPinfo_z0val",htit+" IP info - z0val",120,-0.5,1.);
-    m_h_tag_IPinfo_d0sig[i] = new TH1F(htit+"_tag_IPinfo_d0sig",htit+" IP info - d0sig",100,-5.,5.);
-    m_h_tag_IPinfo_z0sig[i] = new TH1F(htit+"_tag_IPinfo_z0sig",htit+" IP info - z0sig",100,-5.,5.);
-    m_h_tag_IPinfo_weight2D[i] = new TH1F(htit+"_tag_IPinfo_weight2D",htit+" weight2D for each track",100,-2.,6.);
-    m_h_tag_IPinfo_weight3D[i] = new TH1F(htit+"_tag_IPinfo_weight3D",htit+" weight3D for each track",100,-2.,6.);
-    m_h_tag_IPinfo_piJP[i] = new TH1F(htit+"_tag_IPinfo_piJP",htit+" piJP for each track",100,-1.,1.);
-    m_h_tag_IPinfo_fromV0[i] = new TH1F(htit+"_tag_IPinfo_fromV0",htit+" fromV0: for each track",3,-1,2.);
-    m_h_tag_IPinfo_grade[i] = new TH1F(htit+"_tag_IPinfo_grade",htit+" grade: for each track",5,-1,4.);
-  }
-
-  int hnb[MAX_numTaggers];
-  float hmin[MAX_numTaggers], hmax[MAX_numTaggers];
-
-  for(int i=0;i<(MAX_numTaggers-MAX_numHLTTaggers)+2;i++) {
-    hnb[i] = 560;
-    hmin[i] = -20.;
-    hmax[i] = +50.;
-    if(m_tagger[i]=="JetProb") {
-      hnb[i] = 100;
-      hmin[i] = 0.;
-      hmax[i] = 10.;
-    }
-  }
-  for(int i=(MAX_numTaggers-MAX_numHLTTaggers)+2;i<MAX_numTaggers;i++) {
-    hnb[i] = 50;
-    hmin[i] = 0.;
-    hmax[i] = 1.;
-  }
-  for(int i=0;i<MAX_numTaggers;i++) {
-    TString tit("perf_"); tit+=m_tagger[i]; tit.ToLower();
-    TString htit(m_tagger[i]);
-    m_h_perf_b[i] = new TH1F(tit+"_b",htit+": weight for selected b jets",hnb[i],hmin[i],hmax[i]);
-    m_h_perf_u[i] = new TH1F(tit+"_u",htit+": weight for selected u jets",hnb[i],hmin[i],hmax[i]);
-    m_h_perf_upur[i] = new TH1F(tit+"_upur",htit+": weight for selected purified u jets",hnb[i],hmin[i],hmax[i]);
-    //
-    // hists for eff/rej as a function of ET/eta/phi
-    //
-    m_h_perf_b_ET[i] = new TH2F(tit+"_b_ET",htit+":weight for selected b jets v jet ET",50,0.,1000.,hnb[i],hmin[i],hmax[i]);
-    m_h_perf_b_eta[i] = new TH2F(tit+"_b_eta",htit+":weight for selected b jets v jet eta",50,-5.,5,hnb[i],hmin[i],hmax[i]);
-    m_h_perf_b_phi[i] = new TH2F(tit+"_b_phi",htit+":weight for selected b jets v jet phi",60,-3.15,3.15,hnb[i],hmin[i],hmax[i]);
-    m_h_perf_u_ET[i] = new TH2F(tit+"_u_ET",htit+":weight for selected u jets v jet ET",50,0.,1000.,hnb[i],hmin[i],hmax[i]);
-    m_h_perf_u_eta[i] = new TH2F(tit+"_u_eta",htit+":weight for selected u jets v jet eta",50,-5.,5.,hnb[i],hmin[i],hmax[i]);
-    m_h_perf_u_phi[i] = new TH2F(tit+"_u_phi",htit+":weight for selected u jets v jet phi",60,-3.15,3.15,hnb[i],hmin[i],hmax[i]);
-    m_h_perf_upur_ET[i] = new TH2F(tit+"_upur_ET",htit+":weight for selected u purified jets v jet ET",50,0.,1000.,hnb[i],hmin[i],hmax[i]);
-    m_h_perf_upur_eta[i] = new TH2F(tit+"_upur_eta",htit+":weight for selected u purified jets v jet eta",50,-5.,5.,hnb[i],hmin[i],hmax[i]);
-    m_h_perf_upur_phi[i] = new TH2F(tit+"_upur_phi",htit+":weight for selected u purified jets v jet phi",60,-3.15,3.15,hnb[i],hmin[i],hmax[i]);
-    //
-    TString tit1("eff_"); tit1+=m_tagger[i]; tit1.ToLower();
-    m_h_eff_b_ET[i] = new TH1F(tit1+"_b_ET",htit+":efficiency of 50% point for selected b jets v jet ET",50,0.,1000.);
-    m_h_eff_b_ET[i]->Sumw2();
-    m_h_eff_b_eta[i] = new TH1F(tit1+"_b_eta",htit+":efficiency of 50% point for selected b jets v jet eta",50,-5.,5.);
-    m_h_eff_b_eta[i]->Sumw2();
-    m_h_eff_b_phi[i] = new TH1F(tit1+"_b_phi",htit+":efficiency of 50% point for selected b jets v jet phi",60,-3.15,3.15);
-    m_h_eff_b_phi[i]->Sumw2();
-    TString tit2("rej_"); tit2+=m_tagger[i]; tit2.ToLower();
-    m_h_rej_u_ET[i] = new TH1F(tit2+"_u_ET",htit+":rejection of 50% point for selected u jets v jet ET",50,0.,1000.);
-    m_h_rej_u_ET[i]->Sumw2();
-    m_h_rej_u_eta[i] = new TH1F(tit2+"_u_eta",htit+":rejection of 50% point for selected u jets v jet eta",50,-5.,5.);
-    m_h_rej_u_eta[i]->Sumw2();
-    m_h_rej_u_phi[i] = new TH1F(tit2+"_u_phi",htit+":rejection of 50% point for selected u jets v jet phi",60,-3.15,3.15);
-    m_h_rej_u_phi[i]->Sumw2();
-    //
-    m_h_rej_upur_ET[i] = new TH1F(tit2+"_upur_ET",htit+":rejection of 50% point for selected u purified jets v jet ET",50,0.,1000.);
-    m_h_rej_upur_ET[i]->Sumw2();
-    m_h_rej_upur_eta[i] = new TH1F(tit2+"_upur_eta",htit+":rejection of 50% point for selected u purified jets v jet eta",50,-5.,5.);
-    m_h_rej_upur_eta[i]->Sumw2();
-    m_h_rej_upur_phi[i] = new TH1F(tit2+"_upur_phi",htit+":rejection of 50% point for selected u purified jets v jet phi",60,-3.15,3.15);
-    m_h_rej_upur_phi[i]->Sumw2();
-
-  }
-  m_h_perf_vxeff_den_b   = new TH1F("perf_vxeff_den_b","SV efficiency (den) b",1,0.,1.);
-  m_h_perf_vxeff_num_b  = new TH1F("perf_vxeff_num_b","SV efficiency (num) b",1,0.,1.);
-  m_h_perf_vxeff_den_u   = new TH1F("perf_vxeff_den_u","SV efficiency (den) u",1,0.,1.);
-  m_h_perf_vxeff_num_u  = new TH1F("perf_vxeff_num_u","SV efficiency (num) u",1,0.,1.);
-  m_h_perf_vxeff_den_upur   = new TH1F("perf_vxeff_den_upur","SV efficiency (den) u",1,0.,1.);
-  m_h_perf_vxeff_num_upur  = new TH1F("perf_vxeff_num_upur","SV efficiency (num) u",1,0.,1.);
-  m_h_perf_rej10      = new TH1F("perf_rej10","Rejections for soft muon/electron (eff=0.1)",15,0.,15.);
-  m_h_perf_rej10->Sumw2();
-  m_h_perf_rej50      = new TH1F("perf_rej50","Rejections (eff=0.5)",15,0.,15.);
-  m_h_perf_rej50->Sumw2();
-  m_h_perf_rej60      = new TH1F("perf_rej60","Rejections (eff=0.6)",15,0.,15.);
-  m_h_perf_rej60->Sumw2();
-  m_h_perf_rejpur10      = new TH1F("perf_rejpur10","Rejections for soft muon/electron (eff=0.1 purification)",15,0.,15.);
-  m_h_perf_rejpur10->Sumw2();
-  m_h_perf_rejpur50   = new TH1F("perf_rejpur50","Rejections (eff=0.5, purification)",15,0.,15.);
-  m_h_perf_rejpur50->Sumw2();
-  m_h_perf_rejpur60   = new TH1F("perf_rejpur60","Rejections (eff=0.6, purification)",15,0.,15.);
-  m_h_perf_rejpur60->Sumw2();
-  m_h_perfWt_10      = new TH1F("perfWt_10","Weight for which (eff=0.1) soft muon/electron",15,0.,15.);
-  m_h_perfWt_50      = new TH1F("perfWt_50","Weight for which (eff=0.5)",15,0.,15.);
-  m_h_perfWt_60      = new TH1F("perfWt_60","Weight for which (eff=0.6)",15,0.,15.);
-  m_h_perfWt_pur10   = new TH1F("perfWt_pur10","Weight for which (eff=0.1, purification) soft muon/electron",15,0.,15.);
-  m_h_perfWt_pur50   = new TH1F("perfWt_pur50","Weight for which (eff=0.5, purification)",15,0.,15.);
-  m_h_perfWt_pur60   = new TH1F("perfWt_pur60","Weight for which (eff=0.6, purification)",15,0.,15.);
-  std::string histDir("/fileJetTagAna/");
-  StatusCode sc;
-  sc = m_histos->regHist(histDir+"global_counters",m_h_global_counters);
-  sc = m_histos->regHist(histDir+"global_nprimvtx",m_h_global_nprimvtx);
-  sc = m_histos->regHist(histDir+"global_xprimvtx",m_h_global_xprimvtx);
-  sc = m_histos->regHist(histDir+"global_yprimvtx",m_h_global_yprimvtx);
-  sc = m_histos->regHist(histDir+"global_zprimvtx",m_h_global_zprimvtx);
-  sc = m_histos->regHist(histDir+"global_primvtxresx",m_h_global_primvtxresx);
-  sc = m_histos->regHist(histDir+"global_primvtxresy",m_h_global_primvtxresy);
-  sc = m_histos->regHist(histDir+"global_primvtxresz",m_h_global_primvtxresz);
-  sc = m_histos->regHist(histDir+"global_nmcpart", m_h_global_nmcpart);
-  sc = m_histos->regHist(histDir+"global_ntrkpart",m_h_global_ntrkpart);
-  sc = m_histos->regHist(histDir+"global_BLayerHits",m_h_global_BLayerHits);
-  sc = m_histos->regHist(histDir+"global_BLayerSharedHits",m_h_global_BLayerSharedHits);
-  sc = m_histos->regHist(histDir+"global_PixelHits",m_h_global_PixelHits);
-  sc = m_histos->regHist(histDir+"global_PixelLayers",m_h_global_PixelLayers);
-  sc = m_histos->regHist(histDir+"global_SiHits",m_h_global_SiHits);
-  sc = m_histos->regHist(histDir+"global_TRTHits",m_h_global_TRTHits);
-  sc = m_histos->regHist(histDir+"global_muon",m_h_global_nmuon);
-  sc = m_histos->regHist(histDir+"global_electron",m_h_global_nelectron);
-  sc = m_histos->regHist(histDir+"global_njettag", m_h_global_njettag);
-  sc = m_histos->regHist(histDir+"truth_bquark_nb",m_h_truth_bquark_nb);
-  sc = m_histos->regHist(histDir+"truth_bquark_pt",m_h_truth_bquark_pt);
-  sc = m_histos->regHist(histDir+"truth_bquark_eta",m_h_truth_bquark_eta);
-  sc = m_histos->regHist(histDir+"truth_cquark_nb",m_h_truth_cquark_nb);
-  sc = m_histos->regHist(histDir+"truth_cquark_pt",m_h_truth_cquark_pt);
-  sc = m_histos->regHist(histDir+"truth_cquark_eta",m_h_truth_cquark_eta);
-  sc = m_histos->regHist(histDir+"truth_bhadr_nb",m_h_truth_bhadr_nb);
-  sc = m_histos->regHist(histDir+"truth_bhadr_pt",m_h_truth_bhadr_pt);
-  sc = m_histos->regHist(histDir+"truth_bhadr_eta",m_h_truth_bhadr_eta);
-  sc = m_histos->regHist(histDir+"truth_chadr_nb",m_h_truth_chadr_nb);
-  sc = m_histos->regHist(histDir+"truth_chadr_pt",m_h_truth_chadr_pt);
-  sc = m_histos->regHist(histDir+"truth_chadr_eta",m_h_truth_chadr_eta);
-  sc = m_histos->regHist(histDir+"jet_ntotal", m_h_jet_ntotal);
-  sc = m_histos->regHist(histDir+"jet_label", m_h_jet_label);
-  sc = m_histos->regHist(histDir+"jet_nlabelb",m_h_jet_nlabelb);
-  sc = m_histos->regHist(histDir+"jet_nlabelc",m_h_jet_nlabelc);
-  sc = m_histos->regHist(histDir+"jet_nlabelt",m_h_jet_nlabelt);
-  sc = m_histos->regHist(histDir+"jet_ntag",   m_h_jet_ntag);
-  sc = m_histos->regHist(histDir+"jet_eta",    m_h_jet_eta);
-  sc = m_histos->regHist(histDir+"jet_ntracks",    m_h_jet_ntracks);
-  sc = m_histos->regHist(histDir+"jet_tracks_pt",    m_h_jet_tracks_pt);
-  sc = m_histos->regHist(histDir+"jet_nmuons",    m_h_jet_nmuons);
-  sc = m_histos->regHist(histDir+"jet_muons_pt",    m_h_jet_muons_pt);
-  sc = m_histos->regHist(histDir+"jet_nelectrons",    m_h_jet_nelectrons);
-  sc = m_histos->regHist(histDir+"jet_electrons_pt",    m_h_jet_electrons_pt);
-  sc = m_histos->regHist(histDir+"jet_phi",    m_h_jet_phi);
-  sc = m_histos->regHist(histDir+"jet_et",     m_h_jet_et);
-  sc = m_histos->regHist(histDir+"tag_jetprob_w",m_h_tag_jetprob_w);
-  sc = m_histos->regHist(histDir+"tag_i2d_w",m_h_tag_i2d_w);
-  sc = m_histos->regHist(histDir+"tag_i2d_n",m_h_tag_i2d_n);
-  sc = m_histos->regHist(histDir+"tag_i2d_b",m_h_tag_i2d_b);
-  sc = m_histos->regHist(histDir+"tag_i2d_u",m_h_tag_i2d_u);
-  sc = m_histos->regHist(histDir+"tag_i2d_sig",m_h_tag_i2d_sig);
-  sc = m_histos->regHist(histDir+"tag_i3d_w",m_h_tag_i3d_w);
-  sc = m_histos->regHist(histDir+"tag_i3d_n",m_h_tag_i3d_n);
-  sc = m_histos->regHist(histDir+"tag_i3d_b",m_h_tag_i3d_b);
-  sc = m_histos->regHist(histDir+"tag_i3d_u",m_h_tag_i3d_u);
-  sc = m_histos->regHist(histDir+"tag_i3d_sig",m_h_tag_i3d_sig);
-  sc = m_histos->regHist(histDir+"tag_sv1_w",m_h_tag_sv1_w);
-  sc = m_histos->regHist(histDir+"tag_sv1_b",m_h_tag_sv1_b);
-  sc = m_histos->regHist(histDir+"tag_sv1_u",m_h_tag_sv1_u);
-  sc = m_histos->regHist(histDir+"tag_sv_n",m_h_tag_sv_n);
-  sc = m_histos->regHist(histDir+"tag_sv_n2t",m_h_tag_sv_n2t);
-  sc = m_histos->regHist(histDir+"tag_sv_frc",m_h_tag_sv_frc);
-  sc = m_histos->regHist(histDir+"tag_sv_m",m_h_tag_sv_m);
-  sc = m_histos->regHist(histDir+"tag_sv2_w",m_h_tag_sv2_w);
-  sc = m_histos->regHist(histDir+"tag_sv2_b",m_h_tag_sv2_b);
-  sc = m_histos->regHist(histDir+"tag_sv2_u",m_h_tag_sv2_u);
-  sc = m_histos->regHist(histDir+"tag_cmb_w",m_h_tag_cmb_w);
-  sc = m_histos->regHist(histDir+"tag_lf2d_w",m_h_tag_lf2d_w);
-  sc = m_histos->regHist(histDir+"tag_svbu_w",m_h_tag_svbu_w);
-  sc = m_histos->regHist(histDir+"tag_lhsig_w",m_h_tag_lhsig_w);
-  sc = m_histos->regHist(histDir+"tag_softm_w",m_h_tag_softm_w);
-  sc = m_histos->regHist(histDir+"tag_jetfitter_w",m_h_tag_jetfitter_w);
-  sc = m_histos->regHist(histDir+"tag_jetfitcomb_w",m_h_tag_jetfitcomb_w);
-  sc = m_histos->regHist(histDir+"tag_jetfitternn_w",m_h_tag_jetfitternn_w);
-  sc = m_histos->regHist(histDir+"tag_jetfitcombnn_w",m_h_tag_jetfitcombnn_w);
-  //*
-  sc = m_histos->regHist(histDir+"tag_i2d_of_w",m_h_tag_i2d_of_w);
-  sc = m_histos->regHist(histDir+"tag_i3d_of_w",m_h_tag_i3d_of_w);
-  sc = m_histos->regHist(histDir+"tag_i2d_l2_w",m_h_tag_i2d_l2_w);
-  sc = m_histos->regHist(histDir+"tag_i3d_l2_w",m_h_tag_i3d_l2_w);
-  sc = m_histos->regHist(histDir+"tag_i2d_ef_w",m_h_tag_i2d_ef_w);
-  sc = m_histos->regHist(histDir+"tag_i3d_ef_w",m_h_tag_i3d_ef_w);
-  //
-  for(int i=0; i<6; ++i) {
-    TString htit(m_jetET[i]);
-    htit.ToLower();
-
-    std::string htit2 = htit.Data(); htit2+="_tag_IPinfo_ntrk";
-    sc = m_histos->regHist(histDir+htit2,m_h_tag_IPinfo_ntrk[i]);
-
-    htit2 = htit.Data(); htit2+="_tag_IPinfo_trkPt";
-    sc = m_histos->regHist(histDir+htit2,m_h_tag_IPinfo_trkPt[i]);
-
-    htit2 = htit.Data(); htit2+="_tag_IPinfo_d0val";
-    sc = m_histos->regHist(histDir+htit2,m_h_tag_IPinfo_d0val[i]);
-
-    htit2 = htit.Data(); htit2+="_tag_IPinfo_z0val";
-    sc = m_histos->regHist(histDir+htit2,m_h_tag_IPinfo_z0val[i]);
-
-    htit2 = htit.Data(); htit2+="_tag_IPinfo_d0sig";
-    sc = m_histos->regHist(histDir+htit2,m_h_tag_IPinfo_d0sig[i]);
-
-    htit2 = htit.Data(); htit2+="_tag_IPinfo_z0sig";
-    sc = m_histos->regHist(histDir+htit2,m_h_tag_IPinfo_z0sig[i]);
-
-    htit2 = htit.Data(); htit2+="_tag_IPinfo_weight2D";
-    sc = m_histos->regHist(histDir+htit2,m_h_tag_IPinfo_weight2D[i]);
-
-    htit2 = htit.Data(); htit2+="_tag_IPinfo_weight3D";
-    sc = m_histos->regHist(histDir+htit2,m_h_tag_IPinfo_weight3D[i]);
-
-    htit2 = htit.Data(); htit2+="_tag_IPinfo_piJP";
-    sc = m_histos->regHist(histDir+htit2,m_h_tag_IPinfo_piJP[i]);
-
-    htit2 = htit.Data(); htit2+="_tag_IPinfo_fromV0";
-    sc = m_histos->regHist(histDir+htit2,m_h_tag_IPinfo_fromV0[i]);
-
-    htit2 = htit.Data(); htit2+="_tag_IPinfo_grade";
-    sc = m_histos->regHist(histDir+htit2,m_h_tag_IPinfo_grade[i]);
-  }
-  //
-  for(int i=0;i<MAX_numTaggers;i++) {
-    TString tit("perf_"); tit+=m_tagger[i]; tit.ToLower();
-    std::string tit2 = tit.Data(); tit2+="_b";
-    sc = m_histos->regHist(histDir+tit2,m_h_perf_b[i]);
-    tit2 = tit.Data(); tit2+="_u";
-    sc = m_histos->regHist(histDir+tit2,m_h_perf_u[i]);
-    tit2 = tit.Data(); tit2+="_upur";
-    sc = m_histos->regHist(histDir+tit2,m_h_perf_upur[i]);
-    //
-    tit2=tit.Data(); tit2+="_b_ET";
-    sc = m_histos->regHist(histDir+tit2,m_h_perf_b_ET[i]);    
-    tit2=tit.Data(); tit2+="_b_eta";
-    sc = m_histos->regHist(histDir+tit2,m_h_perf_b_eta[i]);    
-    tit2=tit.Data(); tit2+="_b_phi";
-    sc = m_histos->regHist(histDir+tit2,m_h_perf_b_phi[i]);    
-    tit2=tit.Data(); tit2+="_u_ET";
-    sc = m_histos->regHist(histDir+tit2,m_h_perf_u_ET[i]);    
-    tit2=tit.Data(); tit2+="_u_eta";
-    sc = m_histos->regHist(histDir+tit2,m_h_perf_u_eta[i]);    
-    tit2=tit.Data(); tit2+="_u_phi";
-    sc = m_histos->regHist(histDir+tit2,m_h_perf_u_phi[i]);    
-    tit2=tit.Data(); tit2+="_upur_ET";
-    sc = m_histos->regHist(histDir+tit2,m_h_perf_upur_ET[i]);    
-    tit2=tit.Data(); tit2+="_upur_eta";
-    sc = m_histos->regHist(histDir+tit2,m_h_perf_upur_eta[i]);    
-    tit2=tit.Data(); tit2+="_upur_phi";
-    sc = m_histos->regHist(histDir+tit2,m_h_perf_upur_phi[i]);    
-    //
-    TString titA("eff_"); titA+=m_tagger[i]; titA.ToLower();
-    tit2=titA.Data(); tit2+="_b_ET";
-    sc = m_histos->regHist(histDir+tit2,m_h_eff_b_ET[i]);    
-    tit2=titA.Data(); tit2+="_b_eta";
-    sc = m_histos->regHist(histDir+tit2,m_h_eff_b_eta[i]);    
-    tit2=titA.Data(); tit2+="_b_phi";
-    sc = m_histos->regHist(histDir+tit2,m_h_eff_b_phi[i]);    
-    //
-    TString titB("rej_"); titB+=m_tagger[i]; titB.ToLower();
-    tit2=titB.Data(); tit2+="_u_ET";
-    sc = m_histos->regHist(histDir+tit2,m_h_rej_u_ET[i]);    
-    tit2=titB.Data(); tit2+="_u_eta";
-    sc = m_histos->regHist(histDir+tit2,m_h_rej_u_eta[i]);    
-    tit2=titB.Data(); tit2+="_u_phi";
-    sc = m_histos->regHist(histDir+tit2,m_h_rej_u_phi[i]);    
-    //
-    tit2=titB.Data(); tit2+="_upur_ET";
-    sc = m_histos->regHist(histDir+tit2,m_h_rej_upur_ET[i]);    
-    tit2=titB.Data(); tit2+="_upur_eta";
-    sc = m_histos->regHist(histDir+tit2,m_h_rej_upur_eta[i]);    
-    tit2=titB.Data(); tit2+="_upur_phi";
-    sc = m_histos->regHist(histDir+tit2,m_h_rej_upur_phi[i]);    
-
-  }
-  sc = m_histos->regHist(histDir+"perf_vxeff_den_b",m_h_perf_vxeff_den_b);
-  sc = m_histos->regHist(histDir+"perf_vxeff_num_b",m_h_perf_vxeff_num_b);
-  sc = m_histos->regHist(histDir+"perf_vxeff_den_u",m_h_perf_vxeff_den_u);
-  sc = m_histos->regHist(histDir+"perf_vxeff_num_u",m_h_perf_vxeff_num_u);
-  sc = m_histos->regHist(histDir+"perf_vxeff_den_upur",m_h_perf_vxeff_den_upur);
-  sc = m_histos->regHist(histDir+"perf_vxeff_num_upur",m_h_perf_vxeff_num_upur);
-  sc = m_histos->regHist(histDir+"perf_rej10",m_h_perf_rej10);
-  sc = m_histos->regHist(histDir+"perf_rej50",m_h_perf_rej50);
-  sc = m_histos->regHist(histDir+"perf_rej60",m_h_perf_rej60);
-  sc = m_histos->regHist(histDir+"perf_rejpur10",m_h_perf_rejpur10);
-  sc = m_histos->regHist(histDir+"perf_rejpur50",m_h_perf_rejpur50);
-  sc = m_histos->regHist(histDir+"perf_rejpur60",m_h_perf_rejpur60);
-  sc = m_histos->regHist(histDir+"perfWt_10",m_h_perfWt_10);
-  sc = m_histos->regHist(histDir+"perfWt_50",m_h_perfWt_50);
-  sc = m_histos->regHist(histDir+"perfWt_60",m_h_perfWt_60);
-  sc = m_histos->regHist(histDir+"perfWt_pur10",m_h_perfWt_pur10);
-  sc = m_histos->regHist(histDir+"perfWt_pur50",m_h_perfWt_pur50);
-  sc = m_histos->regHist(histDir+"perfWt_pur60",m_h_perfWt_pur60);
-  // in case a tuple is written
-  if (m_UseTupleSET) {
-    p_nt1 = new TTree("JetTagAna","JetTagAna");
-    std::string TreeDir("/fileJetTagAna/JetTagAna");
-    sc = m_histos->regTree(TreeDir,p_nt1);
-    if (sc.isSuccess()) {
-      p_nt1->Branch("event",        &m_ievt,         "event/I");
-      p_nt1->Branch("run",          &m_irun,         "run/I");
-      p_nt1->Branch("njetse",       &m_njet,         "njetse/I");
-      p_nt1->Branch("jetse_phi",    &m_jet_phi,      "jetse_phi[njetse]/F");
-      p_nt1->Branch("jetse_eta",    &m_jet_eta,      "jetse_eta[njetse]/F");
-      p_nt1->Branch("jetse_et",     &m_jet_et,       "jetse_et[njetse]/F");
-      p_nt1->Branch("jetse_flav",   &m_jet_flav,     "jetse_flav[njetse]/I");
-      p_nt1->Branch("jetse_isol",   &m_jet_isol,     "jetse_isol[njetse]/I");  
-      p_nt1->Branch("jetse_nelej",  &m_nelej,        "jetse_nelej[njetse]/I"); 
-      p_nt1->Branch("jetse_ntrackj",&m_ntrackj,      "jetse_ntrackj[njetse]/I");
-      p_nt1->Branch("jetse_nmuonj", &m_nmuonj,       "jetse_nmuonj[njetse]/I");
-      //
-      p_nt1->Branch("jetse_ne",   &m_jet_ne,   "jetse_ne[njetse]/I");
-      p_nt1->Branch("jetse_eptr", &m_jet_eptr, "jetse_eptr[njetse]/F");
-      p_nt1->Branch("jetse_ea0",  &m_jet_ea0,  "jetse_ea0[njetse]/F");
-      p_nt1->Branch("jetse_ept",  &m_jet_ept,  "jetse_ept[njetse]/F");
-      
-      p_nt1->Branch("jetse_ew",       &m_jet_ew,       "jetse_ew[njetse]/F");
-      p_nt1->Branch("jetse_elh",      &m_jet_elh,      "jetse_elh[njetse]/F");
-      p_nt1->Branch("jetse_eTrueEle", &m_jet_eTrueEle, "jetse_eTrueEle[njetse]/I");     
-      p_nt1->Branch("jetse_eAnyEle",  &m_jet_eAnyEle,  "jetse_eAnyEle[njetse]/I"); 
-      
-      p_nt1->Branch("jetse_epb", &m_jet_epb, "jetse_epb[njetse]/F");
-      p_nt1->Branch("jetse_epu", &m_jet_epu, "jetse_epu[njetse]/F");
-      
-      p_nt1->Branch("jetse_tag_pid",     &m_jet_tag_pid,     "jetse_tag_pid[njetse]/I");
-      p_nt1->Branch("jetse_tag_mothpid", &m_jet_tag_mothpid, "jetse_tag_mothpid[njetse]/I");
-      p_nt1->Branch("jetse_trackEle",    &m_jet_trackEle,    "jetse_trackEle[njetse][10]/I");
-      p_nt1->Branch("jetse_NtrackEle",   &m_jet_NtrackEle,   "jetse_NtrackEle[njetse]/I");
-      p_nt1->Branch("jetse_trackElept",  &m_jet_trackElept,  "jetse_trackElept[njetse][10]/F");
-      p_nt1->Branch("jetse_Ntrack",      &m_jet_Ntrack,      "jetse_Ntrack[njetse]/I");
-      p_nt1->Branch("jetse_sumptTrack",  &m_jet_sumptTrack,  "jetse_sumptTrack[njetse]/F");
-     
-    } 
-  }
-
-
-  if(sc.isFailure()) {}; 
-}
-
-bool JetTagAna::isBHadron(int pdg) {
-  int mpdg = abs(pdg);
-  return (   ( 500 < mpdg && mpdg < 599 )   ||
-           ( 10500 < mpdg && mpdg < 10599 ) ||
-           (  5000 < mpdg && mpdg < 5999  ) ||
-           ( 20500 < mpdg && mpdg < 20599 ) );
-}
-
-bool JetTagAna::isDHadron(int pdg) {
-  int mpdg = abs(pdg);
-  return (   ( 400 < mpdg && mpdg < 499 )   || 
-           ( 10400 < mpdg && mpdg < 10499 ) ||
-           (  4000 < mpdg && mpdg < 4999  ) ||
-           ( 20400 < mpdg && mpdg < 20499 ) );
-}
-
-bool JetTagAna::isLightJetIsolated(const CLHEP::HepLorentzVector& p) {
-  if(0==m_mcpartTES) return false;
-  TruthParticleContainer::const_iterator mcItr  = (*m_mcpartTES).begin();
-  TruthParticleContainer::const_iterator mcEnd = (*m_mcpartTES).end();
-  bool isol = true;
-  for (; mcItr != mcEnd; ++mcItr) {
-    if( p.deltaR( (*mcItr)->hlv() ) < m_purificationDeltaR ) {
-      int mpdg = abs( (*mcItr)->pdgId() );
-      if( this->isBHadron(mpdg) || 
-	  this->isDHadron(mpdg) ||
-	  5==mpdg ||
-	  4==mpdg ||
-	  15==mpdg ) {
-	isol = false;
-	break;
-      }
-    }
-  }
-  return isol;
-}
-
-bool JetTagAna::isJetFarFromHPTLepton(const Jet* jet) {
-  const Analysis::SoftLeptonTruthInfo* sltinfo = jet->tagInfo<Analysis::SoftLeptonTruthInfo>("SoftLeptonTruthInfo");
-  if (sltinfo) {
-    int nslt = sltinfo->numSLTrueInfo();
-    ATH_MSG_VERBOSE ( "SoftLeptonTruthInfo exists. Found " << nslt << " true leptons in jet" );
-    for (int islt = 0; islt < nslt; islt++) {
-      const Analysis::SLTrueInfo slt = sltinfo->getSLTrueInfo(islt);
-      ATH_MSG_VERBOSE ( "SLT info " << slt.pdgId() 
-                        << " " << slt.momentum().perp() 
-                        << " " << slt.FromB() << " " << slt.FromD() << " " << slt.FromGH()
-                        );
-      if ( (abs(slt.pdgId()) == 13 || abs(slt.pdgId()) == 11 || abs(slt.pdgId()) == 15 ) && // Electron Or Muon from direct decay of W/Z/H
-           !(slt.FromB()) &&
-           !(slt.FromD()) &&
-           (abs(slt.pdgIdMother())<100) && // not from light hadron decay-in-flight
-           slt.FromGH()
-           ) 
-        {
-          auto eigenv = AmgVector(3) { jet->hlv().x(), jet->hlv().y(), jet->hlv().z() };
-
-          if( slt.momentum().deltaR( eigenv ) < m_leptonPurificationDeltaR ) {
-            return false;
-          } 
-      }
-    }
-  }
-  else {
-    ATH_MSG_DEBUG("Could not find SoftLeptonTruthInfo: no lepton purification can be done." );
-  } 
-  return true;
-}
-
-// =========================================================
-double JetTagAna::getTotWeight(double epro,double ppro)
-{
- double totalpro;
-
- if ( ppro > 0. )
-   totalpro = epro/ppro;
- else if (epro > 0. ) // epro > 0 ppro == 0
-   totalpro = 1.0e20;
- else // both == 0
-   totalpro = 0.;
-
- // calculate log  (safely)
- if ( totalpro <= 1.0e-18 ) {
-    totalpro = -39.99;
- } else {
-    totalpro = log(totalpro);
- }
-
- return  totalpro;
-}
-
-// ===============================================
-const HepMC::GenParticle* JetTagAna::getTruth(const Rec::TrackParticle* myTrackParticle,
-					      const TrackParticleTruthCollection* mcpartTES,
-					      const Rec::TrackParticleContainer* trackTES)
-{
-  //this method returns pointer to truth particle linked to given track
-
- const HepMC::GenParticle* GenPart =0;
-
- ElementLink<Rec::TrackParticleContainer> trackPrtlink;
- trackPrtlink.setElement(const_cast<Rec::TrackParticle*>(myTrackParticle));
- trackPrtlink.setStorableObject(*trackTES);
- //typedef std::map<ElementLink<Rec::TrackParticleContainer>,TrackParticleTruth> truthMap;
-
- std::map<Rec::TrackParticleTruthKey,TrackParticleTruth>::const_iterator tempTrackPrtTruthItr = mcpartTES->find(trackPrtlink);
-
- //truthMap::const_iterator tempTrackPrtTruthItr;
- //tempTrackPrtTruthItr = mcpartTES->find(trackPrtlink);
- if (tempTrackPrtTruthItr != mcpartTES->end())
- {
-   const HepMcParticleLink & temHepMcLink =
-                           (*tempTrackPrtTruthItr).second.particleLink();
-   long theBarcode=temHepMcLink.barcode();
-   if (theBarcode!=0) GenPart = temHepMcLink.cptr();
- }
-
-
-  return GenPart;
-}
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/JetTagAna.h b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/JetTagAna.h
deleted file mode 100644
index d0b20006ddc788ddbd194099f2b02229053e59e7..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/JetTagAna.h
+++ /dev/null
@@ -1,292 +0,0 @@
-/*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef ANALYSISEXAMPLES_JETTAGANA_H
-#define ANALYSISEXAMPLES_JETTAGANA_H
-
-/** @class JetTagAna is an example algorithm which shows how to access the
-   b-tagging information from JetTag objects.
-   @author Laurent Vacavant <vacavant@in2p3.fr>
-   Created: 2005/10/11
- */
-
-#include "AthenaBaseComps/AthAlgorithm.h"
-#include "CLHEP/Vector/LorentzVector.h"
-#include <string>
-#include <map>
-#include <vector>
-#include <sys/types.h>
-#include <TH1.h>
-#include <TH2.h>
-#include "GaudiKernel/ITHistSvc.h"
-
-#include "JetEvent/JetTagInfoBase.h"
-#include "ParticleTruth/TrackParticleTruth.h"
-#include "ParticleTruth/TrackParticleTruthCollection.h"
-#include "AtlasHepMC/GenParticle.h"
-#include "CLHEP/Vector/ThreeVector.h"
-#include "HepPDT/ParticleDataTable.hh"
-#include "egammaEvent/ElectronContainer.h"
-#include "egammaEvent/Electron.h"
-#include "egammaEvent/ElectronAssociation.h"
-
-//namespace Analysis { class JetTagInfoBase; }
-namespace Rec { class TrackParticleContainer; }
-namespace Analysis { class MuonContainer; }
-class TruthParticleContainer;
-class ITHistSvc;
-class TH1F;
-class Jet;
-
-static const int MAX_numTaggers = 28;
-static const int MAX_numHLTTaggers = 13;
-
-class JetTagAna : public AthAlgorithm {
-
- public:
-
-  JetTagAna(const std::string& name, ISvcLocator* pSvcLocator);
-  ~JetTagAna();
-
-  StatusCode initialize();
-  StatusCode finalize();
-  StatusCode execute();
-
-  // helper methods:
-  void bookHistograms();
-  void computeRejections();
-  /** @brief check soft electron **/
-  void checkSoftElectron(const std::vector<const JetTagInfoBase*> infoVector, uint iInfo, int ntotal, CLHEP::HepLorentzVector p4, int iflav, const Analysis::ElectronAssociation *ea, int ntrk_j, int nmu_j, const ElectronContainer* electronTDS, const TrackParticleTruthCollection* tpTruthColl);
-  /** @brief track quality for soft electrons **/
-  StatusCode checkTrackqualforSET(Rec::TrackParticleContainer::const_iterator trackItr,double *pT);
-  void getRej(TH1F* u, TH1F* b, 
-              double& r8 , double& e8 , double& w8, 
-              double& r50, double& e50, double& w50, 
-	      double& r60, double& e60, double& w60
-	      );
-  bool isBHadron(int pdg);
-  bool isDHadron(int pdg);
-  bool isLightJetIsolated(const CLHEP::HepLorentzVector& jet);
-  bool isJetFarFromHPTLepton(const Jet* jetItr); // to suppress influence of W/Z/H decay products
-
- private:
-
-  std::string m_tagger[MAX_numTaggers]; // number of taggers. Many histograms (below) have same size
-  std::string m_jetET[6];
-
-  std::string m_particleJetContainerName;
-  std::string m_truthParticleContainerName;
-  std::string m_trackParticleContainerName;
-  std::string m_primaryVertexContainerName;
-  std::string m_mcEventContainerName;
-  /// name of the AOD electron container to retrieve from StoreGate
-  std::string m_electronContainerName;
- 
-  /// the AOD muon container to retrieve
-  std::string m_muonContainerName;
-
-  int m_selectBFromRun; // use only b-jets from this run number
-  int m_selectUFromRun; // ditto for light jets
-  double m_purificationDeltaR;       // light jets closer than this dR to b,c,tau are ignored
-  double m_leptonPurificationDeltaR; // jets closer than this dR to "high-pt" leptons are ignored
-
-  int m_irun;
-  int m_ievt;
-
-  const Rec::TrackParticleContainer*  m_trackTES;
-  const TruthParticleContainer*  m_mcpartTES;
-  std::vector<double> m_usedSignif;
-
-  ITHistSvc* m_histos; /// histo service
-  TH1F* m_h_global_counters;
-  TH1F* m_h_global_nprimvtx;
-  TH1F* m_h_global_xprimvtx;
-  TH1F* m_h_global_yprimvtx;
-  TH1F* m_h_global_zprimvtx;
-  TH1F* m_h_global_primvtxresx;
-  TH1F* m_h_global_primvtxresy;
-  TH1F* m_h_global_primvtxresz;
-  TH1F* m_h_global_nmcpart;
-  TH1F* m_h_global_ntrkpart;
-  TH1F* m_h_global_BLayerHits;
-  TH1F* m_h_global_BLayerSharedHits;
-  TH1F* m_h_global_PixelHits;
-  TH1F* m_h_global_PixelLayers;
-  TH1F* m_h_global_SiHits;
-  TH1F* m_h_global_TRTHits;
-  TH1F* m_h_global_nmuon;
-  TH1F* m_h_global_nelectron;
-  TH1F* m_h_global_njettag;
-  TH1F* m_h_truth_bquark_nb;
-  TH1F* m_h_truth_bquark_pt;
-  TH1F* m_h_truth_bquark_eta;
-  TH1F* m_h_truth_cquark_nb;
-  TH1F* m_h_truth_cquark_pt;
-  TH1F* m_h_truth_cquark_eta;
-  TH1F* m_h_truth_bhadr_nb;
-  TH1F* m_h_truth_bhadr_pt;
-  TH1F* m_h_truth_bhadr_eta;
-  TH1F* m_h_truth_chadr_nb;
-  TH1F* m_h_truth_chadr_pt;
-  TH1F* m_h_truth_chadr_eta;
-  TH1F* m_h_jet_ntotal;
-  TH1F* m_h_jet_label;
-  TH1F* m_h_jet_nlabelb;
-  TH1F* m_h_jet_nlabelc;
-  TH1F* m_h_jet_nlabelt;
-  TH1F* m_h_jet_ntag;
-  TH1F* m_h_jet_eta;
-  TH1F* m_h_jet_phi;
-  TH1F* m_h_jet_et;
-  TH1F* m_h_jet_ntracks;
-  TH1F* m_h_jet_tracks_pt;
-  TH1F* m_h_jet_tracks_BLayerHits;
-  TH1F* m_h_jet_tracks_PixelHits;
-  TH1F* m_h_jet_tracks_SCTHits; 
-  TH1F* m_h_jet_nmuons;
-  TH1F* m_h_jet_muons_pt;
-  TH1F* m_h_jet_nelectrons;
-  TH1F* m_h_jet_electrons_pt;
-  TH1F* m_h_tag_jetprob_w;
-  TH1F* m_h_tag_i2d_w;
-  TH1F* m_h_tag_i2d_n;
-  TH1F* m_h_tag_i2d_b;
-  TH1F* m_h_tag_i2d_u;
-  TH1F* m_h_tag_i2d_sig;
-  TH1F* m_h_tag_i3d_w;
-  TH1F* m_h_tag_i3d_n;
-  TH1F* m_h_tag_i3d_b;
-  TH1F* m_h_tag_i3d_u;
-  TH1F* m_h_tag_i3d_sig;
-  TH1F* m_h_tag_sv1_w;
-  TH1F* m_h_tag_sv1_b;
-  TH1F* m_h_tag_sv1_u;
-  TH1F* m_h_tag_sv2_w;
-  TH1F* m_h_tag_sv2_b;
-  TH1F* m_h_tag_sv2_u;
-  TH1F* m_h_tag_sv_n;
-  TH1F* m_h_tag_sv_n2t;
-  TH1F* m_h_tag_sv_frc;
-  TH1F* m_h_tag_sv_m;
-  TH1F* m_h_tag_cmb_w;
-  TH1F* m_h_tag_lf2d_w;
-  TH1F* m_h_tag_svbu_w;
-  TH1F* m_h_tag_lhsig_w;
-  TH1F* m_h_tag_softm_w;
-  TH1F* m_h_tag_softe_w;
-  TH1F* m_h_tag_jetfitter_w;
-  TH1F* m_h_tag_jetfitcomb_w;
-  TH1F* m_h_tag_jetfitternn_w;
-  TH1F* m_h_tag_jetfitcombnn_w;
-  //*
-  TH1F* m_h_tag_i2d_l2_w;
-  TH1F* m_h_tag_i3d_l2_w;
-  TH1F* m_h_tag_i2d_ef_w;
-  TH1F* m_h_tag_i3d_ef_w;
-  TH1F* m_h_tag_i2d_of_w;
-  TH1F* m_h_tag_i3d_of_w;
-
-  TH1F* m_h_tag_IPinfo_ntrk[6];
-  TH1F* m_h_tag_IPinfo_trkPt[6];
-  TH1F* m_h_tag_IPinfo_d0val[6];
-  TH1F* m_h_tag_IPinfo_z0val[6];
-  TH1F* m_h_tag_IPinfo_d0sig[6];
-  TH1F* m_h_tag_IPinfo_z0sig[6];
-  TH1F* m_h_tag_IPinfo_weight2D[6];
-  TH1F* m_h_tag_IPinfo_weight3D[6];
-  TH1F* m_h_tag_IPinfo_piJP[6]; 
-  TH1F* m_h_tag_IPinfo_fromV0[6];
-  TH1F* m_h_tag_IPinfo_grade[6];
-
-  TH1F* m_h_perf_b[MAX_numTaggers];
-  TH1F* m_h_perf_u[MAX_numTaggers];
-  TH1F* m_h_perf_upur[MAX_numTaggers];
-  TH1F* m_h_perf_vxeff_den_b;
-  TH1F* m_h_perf_vxeff_num_b;
-  TH1F* m_h_perf_vxeff_den_u;
-  TH1F* m_h_perf_vxeff_num_u;
-  TH1F* m_h_perf_vxeff_den_upur;
-  TH1F* m_h_perf_vxeff_num_upur;
-  TH1F* m_h_perf_rej10;
-  TH1F* m_h_perf_rej50;
-  TH1F* m_h_perf_rej60;
-  TH1F* m_h_perf_rejpur10;
-  TH1F* m_h_perf_rejpur50;
-  TH1F* m_h_perf_rejpur60;
-
-  // histograms contain weights (for different taggers) for 50% and 60% eff. points
-  TH1F* m_h_perfWt_10;
-  TH1F* m_h_perfWt_50;
-  TH1F* m_h_perfWt_60;
-  TH1F* m_h_perfWt_pur10;
-  TH1F* m_h_perfWt_pur50;
-  TH1F* m_h_perfWt_pur60;
-
-  // histograms used to calculate eff/rej as a function of ET/eta/phi 
-  // for the various taggers. Same dimension as m_tagger[]
-  //
-  TH2F* m_h_perf_b_ET[MAX_numTaggers];
-  TH2F* m_h_perf_b_eta[MAX_numTaggers];
-  TH2F* m_h_perf_b_phi[MAX_numTaggers];
-
-  TH2F* m_h_perf_u_ET[MAX_numTaggers];
-  TH2F* m_h_perf_u_eta[MAX_numTaggers];
-  TH2F* m_h_perf_u_phi[MAX_numTaggers];
-
-  TH2F* m_h_perf_upur_ET[MAX_numTaggers];
-  TH2F* m_h_perf_upur_eta[MAX_numTaggers];
-  TH2F* m_h_perf_upur_phi[MAX_numTaggers];
-
-  TH1F* m_h_eff_b_ET[MAX_numTaggers];
-  TH1F* m_h_eff_b_eta[MAX_numTaggers];
-  TH1F* m_h_eff_b_phi[MAX_numTaggers];
-  TH1F* m_h_rej_u_ET[MAX_numTaggers];
-  TH1F* m_h_rej_u_eta[MAX_numTaggers];
-  TH1F* m_h_rej_u_phi[MAX_numTaggers];
-  TH1F* m_h_rej_upur_ET[MAX_numTaggers];
-  TH1F* m_h_rej_upur_eta[MAX_numTaggers];
-  TH1F* m_h_rej_upur_phi[MAX_numTaggers];
- 
-  std::string m_TPTruthContainer;
-  // tag ntuple variables:
-  static const int m_maxjet = 80;
- 
-  //int m_eventNumber;
-  //int m_runNumber;
-
-  //
-  int   m_njet;
-  float m_jet_phi[m_maxjet];
-  float m_jet_eta[m_maxjet];
-  float m_jet_et[m_maxjet]; 
-  int   m_jet_flav[m_maxjet];
-  int   m_jet_isol[m_maxjet];
-
-  int   m_jet_ne[m_maxjet];
-  int   m_jet_eTrueEle[m_maxjet],m_jet_eAnyEle[m_maxjet];
-  float m_jet_eptr[m_maxjet], m_jet_ept[m_maxjet], m_jet_ea0[m_maxjet];
-  float m_jet_ew[m_maxjet], m_jet_elh[m_maxjet],m_jet_epb[m_maxjet], m_jet_epu[m_maxjet];
-  
-  int m_jet_tag_pid[m_maxjet];
-  int m_jet_tag_mothpid[m_maxjet];
-  int m_jet_NtrackEle[m_maxjet];
-  float m_jet_trackElept[m_maxjet][10],m_jet_trackEle[m_maxjet][10];
-  int   m_jet_Ntrack[m_maxjet];
-  float m_jet_sumptTrack[m_maxjet];
-  int m_nelej[m_maxjet];
-  int m_ntrackj[m_maxjet];
-  int m_nmuonj[m_maxjet];
-  //
-  TTree* p_nt1;
-  bool m_UseTupleSET;
-
-  //
-  double getTotWeight(double,double);
-  const HepMC::GenParticle* getTruth(const Rec::TrackParticle* myTrackParticle,const TrackParticleTruthCollection* mcpartTES,const Rec::TrackParticleContainer* trackTES);
-  //const HepPDT::ParticleDataTable* m_particleTable;
-
-};
-
-#endif // ANALYSISEXAMPLES_JETTAGANA_H
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/MiscellaneousExamples.cxx b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/MiscellaneousExamples.cxx
deleted file mode 100644
index d227593bdfd3d2b2cbde1a3670890819c0060b83..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/MiscellaneousExamples.cxx
+++ /dev/null
@@ -1,435 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-/// head file
-#include "MiscellaneousExamples.h"
-
-#include "GaudiKernel/MsgStream.h"
-#include "GaudiKernel/IToolSvc.h"
-
-#include "StoreGate/StoreGateSvc.h"
-
-#include "Navigation/NavigationToken.h"
-
-/// the track object
-#include "Particle/TrackParticleContainer.h"
-
-/// ESD objects
-#include "CaloEvent/CaloClusterContainer.h"
-#include "egammaEvent/egammaContainer.h"
-#include "egammaEvent/ElectronContainer.h"
-#include "muonEvent/MuonContainer.h"
-
-/// composite particles
-#include "CompositeParticleEvent/CompositeParticle.h"
-
-/// analysis tools
-#include "AnalysisUtils/AnalysisCombination.h"
-
-#include <algorithm>
-#include <math.h>
-#include <functional>
-
-/// declare some helper functions
-bool selectJet(MiscellaneousExamples *self, const JetVect &jj);
-bool selectParticle(MiscellaneousExamples *self, const ParticleVect &jj);
-
-/// some user defined constants
-static const double mW         = 80.4*CLHEP::GeV;
-//static const double topMass    = 175.0*CLHEP::GeV;
-
-using namespace Analysis;
-
-//////////////////////////////////////////////////////////////////////////////////////
-/// Constructor
-
-MiscellaneousExamples::MiscellaneousExamples(const std::string& name,
-  ISvcLocator* pSvcLocator)
-  : AthAlgorithm(name, pSvcLocator),
-    m_analysisTools( "AnalysisTools", this ),
-    m_thistSvc(nullptr),
-    m_dataType(),
-    m_WjjContainer(nullptr),
-    m_histNumberOfJets(nullptr),
-    m_histJetPt(nullptr),
-    m_histSymLinkPt(nullptr),
-    m_histSymLinkMjj(nullptr),
-    m_histJetEta(nullptr),
-    m_histMjj(nullptr),
-    m_histDataType(nullptr),
-    m_histSumEtCell(nullptr),
-    m_histElecTrackP(nullptr),
-    m_histElecClusterE(nullptr),
-    m_histCombinedMuonTrackP(nullptr),
-    m_histExtrapolatedMuonTrackP(nullptr),
-    m_histMuonSpectrometerTrackP(nullptr),
-    m_histInDetTrackP(nullptr),
-    m_histMuonEnergyLoss(nullptr)
-{
-
-  /// switches to control the analysis through job options
-  declareProperty( "AnalysisTools", m_analysisTools );
-  declareProperty("JetContainerName", m_jetContainerName = "JetCollection");
-  declareProperty("ElectronContainerName", m_electronContainerName = "ElectronCollection");
-  declareProperty("MuonContainerName", m_muonContainerName = "MuonCollection");
-  declareProperty("EventWeight", m_eventWeight = 1.0);
-
-  /// the cuts - default values - to be modified in job options
-  declareProperty("JetPtCut", m_jetPtCut = 20.0*CLHEP::GeV);
-
-  /// selection for mjj
-  declareProperty("DeltaMjj", m_deltaMjj = 25.0*CLHEP::GeV);
-
-}
-
-/////////////////////////////////////////////////////////////////////////////////////
-/// Destructor - check up memory allocation
-/// delete any memory allocation on the heap
-
-MiscellaneousExamples::~MiscellaneousExamples() {}
-
-////////////////////////////////////////////////////////////////////////////////////
-/// Initialize
-/// initialize StoreGate
-/// get a handle on the analysis tools
-/// book histograms
-
-StatusCode MiscellaneousExamples::initialize() {
-
-  ATH_MSG_INFO ( "Initializing MiscellaneousExamples" );
-
-  ATH_CHECK(  m_analysisTools.retrieve() );
-  ATH_CHECK( service("THistSvc", m_thistSvc) );
-
-  /// reconstructed jet histograms
-  m_histNumberOfJets   = new TH1F("NumberOfJets","N jets",20,0,20);
-  ATH_CHECK( m_thistSvc->regHist("/AANT/jets/NumberOfJets", m_histNumberOfJets) );
-  m_histJetPt          = new TH1F("jet_pt","pt of jet",100,0,500.*CLHEP::GeV);
-  ATH_CHECK( m_thistSvc->regHist("/AANT/jets/jet_pt", m_histJetPt) );
-  m_histJetEta         = new TH1F("jet_eta","eta of jet",100,-5.0,5.0);
-  ATH_CHECK( m_thistSvc->regHist("/AANT/jets", m_histJetEta) );
-  m_histMjj            = new TH1F("jj mass","mjj",80,0,160*CLHEP::GeV);
-  ATH_CHECK( m_thistSvc->regHist("/AANT/jets/jj mass", m_histMjj) );
-  m_histSumEtCell      = new TH1F("jet_et","Sum Et Cells",100,0,500.*CLHEP::GeV);
-  ATH_CHECK( m_thistSvc->regHist("/AANT/jets/jet_et", m_histSumEtCell) );
-  m_histSymLinkPt      = new TH1F("jetSimLink_pt","pt of symLinkjet",100,0,500.*CLHEP::GeV);
-  ATH_CHECK( m_thistSvc->regHist("/AANT/jets/jetSimLink_pt", m_histSymLinkPt) );
-  m_histSymLinkMjj    =  new TH1F("SimLink_Mjj","Mjj of symLinkjets",80,0,160.*CLHEP::GeV);
-  ATH_CHECK( m_thistSvc->regHist("/AANT/jets/SimLink_Mjj", m_histSymLinkMjj) );
-
-  m_histDataType     = new TH1F("data type","dataType",5,0,5);
-  ATH_CHECK( m_thistSvc->regHist("/AANT/general/data type", m_histDataType) );
-  m_histElecTrackP   = new TH1F("elecTrackP","e Track Momentum",100,0,500.*CLHEP::GeV);
-  ATH_CHECK( m_thistSvc->regHist("/AANT/ectrons/elecTrackP", m_histElecTrackP) );
-  m_histElecClusterE = new TH1F("elecClusterE","e Cluster Energy",100,0,500.*CLHEP::GeV);
-  ATH_CHECK( m_thistSvc->regHist("/AANT/electrons/elecClusterE", m_histElecClusterE) );
-
-  m_histCombinedMuonTrackP     = new TH1F("CombinedTrackP","CombinedTrackP",100,0,500.*CLHEP::GeV);
-  ATH_CHECK( m_thistSvc->regHist("/AANT/muons/CombinedTrackP", m_histCombinedMuonTrackP) );
-  m_histExtrapolatedMuonTrackP = new TH1F("ExtrapTrackP","ExtrapTrackP",100,0,500.*CLHEP::GeV);
-  ATH_CHECK( m_thistSvc->regHist("/AANT/muons/ExtrapTrackP", m_histExtrapolatedMuonTrackP) );
-  m_histMuonSpectrometerTrackP = new TH1F("MuonSpectroTrackP","MuonSpectroTrackP",100,0,500.*CLHEP::GeV);
-  ATH_CHECK( m_thistSvc->regHist("/AANT/muons/MuonSpectroTrackP", m_histMuonSpectrometerTrackP) );
-  m_histInDetTrackP            = new TH1F("InDetTrackP","InDetTrackP",100,0,500.*CLHEP::GeV);
-
-  ATH_CHECK( m_thistSvc->regHist("/AANT/muons/InDetTrackP", m_histInDetTrackP) );
-  m_histMuonEnergyLoss         = new TH1F("eLossCalo","eLossCalo",100,0,500.*CLHEP::GeV);
-  ATH_CHECK( m_thistSvc->regHist("/AANT/muons/eLossCalo", m_histMuonEnergyLoss) );
-  /// create the container of W->jj
-  m_WjjContainer = new IParticleContainer();
-
-  return StatusCode::SUCCESS;
-}		 
-
-///////////////////////////////////////////////////////////////////////////////////
-/// Finalize - delete any memory allocation from the heap
-
-StatusCode MiscellaneousExamples::finalize() {
-  return StatusCode::SUCCESS;
-}
-
-//////////////////////////////////////////////////////////////////////////////////
-/// Execute - on event by event
-
-StatusCode MiscellaneousExamples::execute() {
-
-  ATH_MSG_DEBUG ( "in execute()" );
-
-  ATH_CHECK( wjj() );
-  ATH_CHECK( trackParticleAndNavigation() );
-  ATH_CHECK( symLinkExample() );
-
-  return StatusCode::SUCCESS;
-}
-
-//////////////////////////////////////////////////////////////////////////////////
-/// wjj() for the reconstruction of W->jj
-/// called by execute() on event by event basis
-
-StatusCode MiscellaneousExamples::wjj() {
-
-  ATH_MSG_DEBUG ( "in wjj()" );
-
-  const JetCollection* jetTES=0;
-  StatusCode sc=evtStore()->retrieve( jetTES, m_jetContainerName);
-  if( sc.isFailure()  ||  !jetTES ) {
-    ATH_MSG_WARNING ( "No AOD jet container found in TDS "<<m_jetContainerName);
-    return StatusCode::SUCCESS;
-  }  
-  ATH_MSG_DEBUG ( "JetCollection successfully retrieved " << m_jetContainerName);
-  
-  /// number of jets in the jets AOD container
-  m_histNumberOfJets->Fill( 1.0*jetTES->size(), m_eventWeight );
-
-  /// the data type - the AOD knows how it was created
-  /// this should be executed only once during the entire run
-  if (jetTES->size() > 0) {
-    m_dataType = (*jetTES)[0]->dataType();
-    m_histDataType->Fill( 1.0*m_dataType, m_eventWeight );
-  }
-  
-  /// iterators over the container 
-  JetCollection::const_iterator jetItr  = jetTES->begin();
-  JetCollection::const_iterator jetItrE = jetTES->end();
- 
-  for (; jetItr != jetItrE; ++jetItr) {
-      m_histJetPt->Fill( (*jetItr)->pt(), m_eventWeight);
-      m_histJetEta->Fill( (*jetItr)->eta(), m_eventWeight);
-
-       /// constituent navigation
-       NavigationToken<CaloCell,double> cellToken;
-       (*jetItr)->fillToken(cellToken,double(1.));
-       NavigationToken<CaloCell, double>::const_iterator c =  cellToken.begin();
-       NavigationToken<CaloCell,double>::const_iterator cend = cellToken.end();
-       ATH_MSG_DEBUG ( "# cells found " << cellToken.size() );
-       double etSum = 0;
-       for(; c != cend; ++c) {
-         const CaloCell* thisCell = *c;
-         double weight = cellToken.getParameter(thisCell);
-         double et    = weight * thisCell->et();
-         etSum += et;
-       }
-       m_histSumEtCell->Fill(etSum, m_eventWeight);
-
-  }
-
-  /// m-jj variant mass with a selection on the jet transverse momenta
-  AnalysisUtils::Combination<const JetCollection> combination(jetTES,2);
-  JetVect jetPair;
-  while (combination.goodOnes(this, jetPair, selectJet)) {
-     CLHEP::HepLorentzVector jet1_fourMomentum = jetPair[0]->hlv();
-     CLHEP::HepLorentzVector jet2_fourMomentum = jetPair[1]->hlv();
-     double mjj = (jet1_fourMomentum+jet2_fourMomentum).m();
-     m_histMjj->Fill(mjj, m_eventWeight);
-  }
-
-  ATH_MSG_DEBUG ( "wjj() succeeded" );
-  		
-  return StatusCode::SUCCESS;
-}
-
-//////////////////////////////////////////////////////////////////////////////////
-/// TrackParticle and Navigation
-/// called by execute() on event by event basis
-
-StatusCode MiscellaneousExamples::trackParticleAndNavigation() {
-
-  ATH_MSG_DEBUG ( "in trackParticleAndNavigation()" );
-
-  const ElectronContainer* elecTES=0;
-  StatusCode sc=evtStore()->retrieve( elecTES, m_electronContainerName);
-  if( sc.isFailure()  ||  !elecTES ) {
-     ATH_MSG_WARNING ( "No AOD electron container found in TDS" );
-     return StatusCode::SUCCESS;
-  }  
-  ATH_MSG_DEBUG ( "ElectronContainer successfully retrieved" );
-
-  /// iterators over the container 
-  ElectronContainer::const_iterator elecItr  = elecTES->begin();
-  ElectronContainer::const_iterator elecItrE = elecTES->end();
- 
-  for (; elecItr != elecItrE; ++elecItr) {
-     const Rec::TrackParticle *trackParticle = (*elecItr)->trackParticle();
-     if (trackParticle != 0) { 
-         double trackP = trackParticle->p();
-        m_histElecTrackP->Fill(trackP, m_eventWeight);
-     }
-
-     /// back navigation to ESD to access Electron's egamma object
-     const CaloCluster *cluster = (*elecItr)->cluster();
-     if(cluster) { 
-        double et = (cluster)->et();
-        double eta = (cluster)->eta();
-        double clusterE = et*cosh(eta);
-        m_histElecClusterE->Fill(clusterE, m_eventWeight);
-     }
-  }
-
-  /// access to MuonTracks
-  const MuonContainer* muonTES=0;
-  sc=evtStore()->retrieve( muonTES, m_muonContainerName);
-  if( sc.isFailure()  ||  !muonTES ) {
-     ATH_MSG_WARNING ( "No AOD muon container found in TDS" );
-     return StatusCode::SUCCESS;
-  }  
-  ATH_MSG_DEBUG ( "MuonContainer successfully retrieved" );
-
-  /// iterators over the container 
-  MuonContainer::const_iterator muonItr  = muonTES->begin();
-  MuonContainer::const_iterator muonItrE = muonTES->end();
- 
-  for (; muonItr != muonItrE; ++muonItr) {
-
-     /// the combined muon spectrometer/InnerDet track
-     if ( (*muonItr)->hasCombinedMuonTrackParticle() ) {
-       const Rec::TrackParticle * combinedTrack = (*muonItr)->combinedMuonTrackParticle();
-       double trackP = combinedTrack->p();
-       m_histCombinedMuonTrackP->Fill(trackP, m_eventWeight);
-       ATH_MSG_DEBUG ( "CombinedMuon TrackParticle accessed" );
-     }
-
-     /// the muon extrapolated track to the vertex
-     if ( (*muonItr)->hasMuonExtrapolatedTrackParticle() ) {
-       const Rec::TrackParticle * extrapTrack = (*muonItr)->muonExtrapolatedTrackParticle();
-       double trackP = extrapTrack->p();
-       m_histExtrapolatedMuonTrackP->Fill(trackP, m_eventWeight);
-       ATH_MSG_DEBUG ( "Extrapolated Muon TrackParticle accessed" );
-       /** get the TrackParameters at the Muon Spectrometer */
-       const auto& trackParametersAtSpectrometer = extrapTrack->trackParameters().front();
-       //Trk::GlobalMomentum momentum = trackParametersAtSpectrometer->momentum();
-       trackP = trackParametersAtSpectrometer->momentum().mag();
-       m_histMuonSpectrometerTrackP->Fill(trackP, m_eventWeight);
-     }
-
-     /// the InnerDet track
-     if ( (*muonItr)->hasInDetTrackParticle() ) {
-       const Rec::TrackParticle * inDetTrack = (*muonItr)->inDetTrackParticle();
-       double trackP = inDetTrack->p();
-       m_histInDetTrackP->Fill(trackP, m_eventWeight);
-       ATH_MSG_DEBUG ( "Inner Detector TrackParticle accessed" );
-     }
-
-     std::pair<double, double> eloss_error = (*muonItr)->energyLoss();
-     double eLoss = eloss_error.first;
-     ///double error = eLoss_error.second;
-     m_histMuonEnergyLoss->Fill(eLoss, m_eventWeight);
-     ATH_MSG_DEBUG ( "Muon energy loss in Calo access" );
-  }
-
-  ATH_MSG_DEBUG ( "trackParticleAndNavigation() succeeded" );
-
-  return sc;
-}
-
-template<typename InContainer, typename SymContainer>
-StatusCode create_symlink(MsgStream& msg, 
-			  StoreGateSvc* sg, 
-			  const std::string& sgkey)
-{
-  const InContainer * in  = 0;
-  if ( !sg->retrieve (in, sgkey).isSuccess() ||
-       0 == in ) {
-    msg << MSG::ERROR<< "Could not retrieve " << ClassName<InContainer>::name() 
-	<< " at [" << sgkey << "] !!" << endmsg;
-    return StatusCode::FAILURE;
-  }
-
-  const SymContainer* sym = 0;
-  if ( sg->retrieve (sym, sgkey).isSuccess() ) {
-    // already symlinked...
-    return StatusCode::SUCCESS;
-  }
-
-  if ( !sg->symLink (in, sym).isSuccess() ) {
-    msg <<  "Could not create symlink to " << ClassName<SymContainer>::name()
-        << endmsg;
-    return StatusCode::FAILURE;
-  }
-
-  return StatusCode::SUCCESS;
-}
-
-
-//////////////////////////////////////////////////////////////////////////////////
-/// symLink and CompositeParticle example
-/// called by execute() on event by event basis
-
-StatusCode MiscellaneousExamples::symLinkExample() {
-
-  ATH_MSG_DEBUG ( "in symLinkExample()" );
-
-  /// make sure the W ->jj container is empty
-  m_WjjContainer->clear();
-
-  // FIXME: remove when proper JetCollection impl. available.
-  if ( !create_symlink<JetCollection,IParticleContainer>
-       (msg(), &*evtStore(), m_jetContainerName).isSuccess() ) {
-    ATH_MSG_WARNING ( "Could not create symLink for JetCollection ");
-    return StatusCode::FAILURE;
-  }
-
-  const IParticleContainer* iparticles = 0;
-  StatusCode sc=evtStore()->retrieve( iparticles, m_jetContainerName);
-  if( sc.isFailure()  ||  !iparticles ) {
-    ATH_MSG_WARNING
-      ( "No AOD jet container found in TDS as symLink "<<m_jetContainerName );
-     return StatusCode::SUCCESS;
-  }  
-  ATH_MSG_DEBUG ( "IParticleContainer successfully retrieved as symLink " << m_jetContainerName);
-
-  /// ease handwriting:
-  //typedef ElementLink<IParticleContainer>  IParticleLink_t;
-
-  /// iterators over the container 
-  IParticleContainer::const_iterator partItr  = iparticles->begin();
-  IParticleContainer::const_iterator partItrE = iparticles->end();
-
-  for (; partItr != partItrE; ++partItr) {
-    m_histSymLinkPt->Fill( (*partItr)->pt(), m_eventWeight);
-  }
-
-  /// m-jj variant mass with a selection on the jet transverse momenta
-  AnalysisUtils::Combination<const IParticleContainer> combination(iparticles,2);
-  ParticleVect jetPair;
-  while (combination.goodOnes(this, jetPair, selectParticle)) {
-
-     /// the composite particle - this snippet of code should be modified for 9.0.0 and above
-     /// due to a modification of the CompositeParticle class
-     CompositeParticle * Wjj = new CompositeParticle;
-     Wjj->add( INav4MomLink(&*jetPair[0], *iparticles),
-	       INav4MomLink(&*jetPair[1], *iparticles) );
-
-     Wjj->set_charge(1);            /// this is arbitrary :: I will assume W->lnu has charge=-1
-     Wjj->set_pdgId(PDG::W_plus);   /// this arbitrary :: I will assume W_minus -> lnu
-     Wjj->set_dataType(m_dataType);
-     double massW = Wjj->m();
-     m_histSymLinkMjj->Fill(massW, m_eventWeight);
-     using std::abs;
-     if ( abs(massW-mW) < m_deltaMjj) m_WjjContainer->push_back(Wjj);	
-  }   
-
-  ATH_MSG_DEBUG ( "symLinkExample() succeeded" );
-
-  return sc;
-}
-
-
-/// jet selection function in jet-jet combination
-/// this function a friend of MiscellaneousExamples
-
-bool selectJet(MiscellaneousExamples *self, const JetVect &jj) {
-  bool test = (jj[0]->pt() > self->m_jetPtCut) &&
-              (jj[1]->pt() > self->m_jetPtCut);
-  return (test);
-}
-
-/// jet selection function in jet-jet combination
-/// this function a friend of MiscellaneousExamples
-
-bool selectParticle(MiscellaneousExamples *self, const ParticleVect &jj) {
-  bool test = (jj[0]->pt() > self->m_jetPtCut) &&
-              (jj[1]->pt() > self->m_jetPtCut);
-  return test;
-}
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/MiscellaneousExamples.h b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/MiscellaneousExamples.h
deleted file mode 100644
index e42b801be62ed28787d07b341705b446d06d0d8d..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/MiscellaneousExamples.h
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef MISCELLANEOUS_EXAMPLES_H
-#define MISCELLANEOUS_EXAMPLES_H
-/////////////////////////////////////////////////////////////////////////////////////////////////////
-/// Name    : MiscellaneousExamples.h
-/// Package : offline/PhysicsAnalysis/AnalysisCommon/AnalysisExamples
-/// Author  : Ketevi A. Assamagan
-/// Created : November 2004
-///
-/// DESCRIPTION:
-///
-/// This class contains a series of examples to show people how to:
-///  - access TrackParticle, thus track information
-///  - do constituent navigation
-///  - do back navigation to access information from the ESD
-///  - do symLink
-///  - do combinatorial loops+selections
-///  - to load the libraries for interactive analysis (MiscellaneousExamples_jobOptions.py)
-/// 
-/// Ketevi A. Assamagan on November 19, 2004
-///
-///////////////////////////////////////////////////////////////////////////////////////////////////////
-
-#include "GaudiKernel/ToolHandle.h"
-#include "NavFourMom/IParticleContainer.h"
-#include "ParticleEvent/ParticleBaseContainer.h"
-#include "JetEvent/JetCollection.h"
-
-#include "AthenaBaseComps/AthAlgorithm.h"
-#include "GaudiKernel/ObjectVector.h"
-#include "GaudiKernel/ITHistSvc.h"
-#include "CLHEP/Units/SystemOfUnits.h"
-
-#include "AnalysisTools/AnalysisTools.h"
-
-#include <string>
-#include "TH1.h"
-#include "TH2.h"
-
-typedef std::vector<const JetCollection::base_value_type*> JetVect;
-typedef std::vector<const IParticleContainer::base_value_type*> ParticleVect;
-
-class MiscellaneousExamples : public AthAlgorithm {
-
- public:
-
-   MiscellaneousExamples(const std::string& name, ISvcLocator* pSvcLocator);
-   ~MiscellaneousExamples();
-
-   StatusCode initialize();
-   StatusCode finalize();
-   StatusCode execute();
-
- private:
-
-   /// reconstruction of W->jj as composite objects
-   StatusCode wjj();
-
-   /// TrackParticle and Navigation
-   StatusCode trackParticleAndNavigation();
-
-   /// symLink example and CompositeParticle
-   StatusCode symLinkExample();
-
-   /// the selection function is a friend of the AnalysisSketon class
-   friend bool selectJet(MiscellaneousExamples *self, const JetVect &jj);
-   friend bool selectParticle(MiscellaneousExamples *self, const ParticleVect &jj);
-
-   /// get a handle to the tool helper
-   ToolHandle<AnalysisTools> m_analysisTools;
-
-   /// a handle on the Hist/TTree registration service
-   ITHistSvc * m_thistSvc;
-
-   /// the AOD jet container to retrieve
-   std::string m_jetContainerName;
-
-   /// the AOD electron container to retrieve
-   std::string m_electronContainerName;
-
-   /// the AOD muon container to retrieve
-   std::string m_muonContainerName;
-
-   /// Event weight
-   double m_eventWeight;
-
-   /// m_jj mass window cut
-   double m_deltaMjj;                   
-
-   /// the data type, fast full siumation, test beam or real data
-   /// to be obtained from the AOD
-   ParticleDataType::DataType m_dataType;
-
-   /// container of W->jj
-   IParticleContainer * m_WjjContainer;
-
-   /// create the histograms
-   /// for jets
-   /// reconstructed quantities
-   TH1F* m_histNumberOfJets;
-   TH1F* m_histJetPt;
-   TH1F* m_histSymLinkPt;
-   TH1F* m_histSymLinkMjj;
-   TH1F* m_histJetEta;
-   TH1F* m_histMjj;
-   TH1F* m_histDataType;
-   TH1F* m_histSumEtCell;
-   TH1F* m_histElecTrackP;
-   TH1F* m_histElecClusterE;
-
-   TH1F* m_histCombinedMuonTrackP;
-   TH1F* m_histExtrapolatedMuonTrackP;
-   TH1F* m_histMuonSpectrometerTrackP;
-   TH1F* m_histInDetTrackP;
-   TH1F* m_histMuonEnergyLoss;
-   
-   /// the cuts - to adjusted in the job options
-   /// jet Pt cut
-   double m_jetPtCut;
-
-};
-
-#endif // MISCELLANEOUS_EXAMPPLES_H
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/PileUpTruthExample.cxx b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/PileUpTruthExample.cxx
deleted file mode 100644
index 7984e7646c94cca5476e6cbef5f3de4552fe0621..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/PileUpTruthExample.cxx
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
-  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-*/
-
-// Author : Ketevi A. Assamagan
-// Date :   February 2008
-
-#include "PileUpTruthExample.h"
-
-#include "GeneratorObjects/McEventCollection.h"
-
-#include "GaudiKernel/DataSvc.h"
-#include "GaudiKernel/ISvcLocator.h"
-
-#include "TruthHelper/IsGenStable.h"
-#include "TruthHelper/PileUpType.h"
-
-using namespace TruthHelper;
-
-PileUpTruthExample::PileUpTruthExample(const std::string& name, ISvcLocator* pSvcLocator) :
-  AthAlgorithm(name, pSvcLocator)
-{
-  declareProperty("McEventKey",     m_key="TruthEvent");
-}
-
-PileUpTruthExample::~PileUpTruthExample() {}
-
-StatusCode PileUpTruthExample::initialize()
-{
-  return StatusCode::SUCCESS;
-}
-
-StatusCode PileUpTruthExample::finalize()
-{
-  return StatusCode::SUCCESS;
-}
-
-StatusCode PileUpTruthExample::execute() {
-
-  const McEventCollection* mcCollptr = 0;
-  ATH_CHECK (evtStore()->retrieve(mcCollptr, m_key));
-
-  /** initialize a pileup type helper object */ 
-  PileUpType pileupType( mcCollptr );
-
-  // signal
-  const HepMC::GenEvent * signal = pileupType.signal_mc_event();
-  auto prodVtx = HepMC::signal_process_vertex(signal);
-  if ( prodVtx ) {
-     double xi = (prodVtx->position()).x();
-     double yi = (prodVtx->position()).y();
-     double zi = (prodVtx->position()).z();
-
-     ATH_MSG_INFO ("signal vertex is " << xi << " " << yi << " " << zi
-                   << " process ID = " << HepMC::signal_process_id(signal)
-                   << " McEvent index = " << signal->event_number());
-  }
-
-  // in-time minbias
-  McEventCollection::const_iterator ibeg = pileupType.in_time_minimum_bias_event_begin();
-  McEventCollection::const_iterator iend = pileupType.in_time_minimum_bias_event_end();
-
-  for (; ibeg!=iend; ++ibeg ) {
-    auto prodVtx = HepMC::signal_process_vertex(*ibeg);
-    if ( prodVtx ) {
-       double xi = (prodVtx->position()).x();
-       double yi = (prodVtx->position()).y();
-       double zi = (prodVtx->position()).z();
-       ATH_MSG_INFO ("pileup vertex is " << xi << " " << yi << " " << zi
-                     << " process ID = " << (*ibeg)->signal_process_id()
-                     << " McEvent index = " << (*ibeg)->event_number());
-    }
-  }
-
-  /** retrive the list of in-time generator stable particles - signal and in-time minimum bias 
-      in-time meas particles in Bunch crossing = 0 */
-  IsGenStable isStable;
-  std::vector<HepMC::ConstGenParticlePtr> particleList;
-  pileupType.in_time_particles( particleList, isStable );
-
-  /** now do something with the list */
-  ATH_MSG_INFO ("Number of stable in-time pileup particles = " << particleList.size());
-
-  /** As another example, retrieve the in-time McEvents and print their contents 
-      note that no selection is made in this case */
-  McEventCollection::const_iterator ibegin = pileupType.in_time_event_begin();
-  McEventCollection::const_iterator itend  = pileupType.in_time_event_end();
-  for (; ibegin != itend; ++ibegin ) {
-    (*ibegin)->print();
-  }
-  return StatusCode::SUCCESS;
-}
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/PileUpTruthExample.h b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/PileUpTruthExample.h
deleted file mode 100644
index 1d4b6597e2ce5bea6bfec8afeeea0583f31809ab..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/PileUpTruthExample.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-*/
-
-
-//*************************************************************************
-//*                                                                       *
-//*  class  PileUpTruthExample                                            *
-//*                                                                       *
-//*  Uses PileUpType to returns iterators on the pileup type particles:
-//*  - the  signal particles
-//*  - the  in-time Minimum bias particles
-//*  - the signal+in-time Minimum bias particles 
-//*  - the out-of-time Minimum bias particles
-//*  - the cavern background particles          
-//*  - the Selector function MUST have the operator() implemented - for example: isGenStable
-//*  
-// Author: Ketevi A. Assamagan <ketevi@bnl.gov>
-// Date: February 2008                                                    *
-//*************************************************************************
-
-#include "AthenaBaseComps/AthAlgorithm.h"
-
-class PileUpTruthExample : public AthAlgorithm {
-public:
-        PileUpTruthExample(const std::string& name, ISvcLocator* pSvcLocator);
-        ~PileUpTruthExample(); 
-        StatusCode initialize();
-        StatusCode execute();
-        StatusCode finalize();
-
-private:
-  // McEventCollection key 
-  std::string m_key; 
-
-};
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/ReadTruthParticles.cxx b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/ReadTruthParticles.cxx
deleted file mode 100644
index 75696dd6e152b6d90368311d95645a294da1013f..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/ReadTruthParticles.cxx
+++ /dev/null
@@ -1,161 +0,0 @@
-/*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-*/
-
-/////////////////////////////////////////////////////////////////// 
-// ReadTruthparticles.cxx 
-// Implementation file for class ReadTruthparticles
-// Author: S.Binet<binet@cern.ch>
-// Purpose : Show how one can read the TruthParticleContainer
-//           and use the mother/daughter links
-/////////////////////////////////////////////////////////////////// 
-
-// STL includes
-#include <algorithm>
-#include <math.h>
-#include <functional>
-
-// Framework includes
-#include "GaudiKernel/MsgStream.h"
-#include "GaudiKernel/IToolSvc.h"
-
-#include "StoreGate/DataHandle.h"
-
-#include "AtlasHepMC/GenEvent.h"
-#include "AtlasHepMC/GenVertex.h"
-#include "AtlasHepMC/GenParticle.h"
-
-#include "McParticleKernel/ITruthParticleCnvTool.h"
-
-#include "McParticleEvent/TruthParticle.h"
-#include "McParticleEvent/TruthParticleContainer.h"
-
-#include "ReadTruthParticles.h"
-
-
-//////////////////////////////////////////////////////////////////////////////////////
-/// Constructor
-
-ReadTruthParticles::ReadTruthParticles(const std::string& name,
-                                       ISvcLocator* pSvcLocator) : 
-  AthAlgorithm(name, pSvcLocator),
-  m_cnvTool( )
-{
-  /// switches to control the analysis through job options
-
-  declareProperty("TruthParticles", m_truthParticlesName = "SpclMC");
-  declareProperty("ConvertFromESD", m_fromESD = false );
-
-  declareProperty( "CnvTool",
-		   m_cnvTool = CnvTool_t( "TruthParticleCnvTool/CnvTool", 
-					  this ),
-		   "Handle to the tool converting a McEventCollection into a "
-		   "TruthParticleContainer" );
-}
-
-
-
-/////////////////////////////////////////////////////////////////////////////////////
-/// Destructor - check up memory allocation
-/// delete any memory allocation on the heap
-
-ReadTruthParticles::~ReadTruthParticles() {}
-
-////////////////////////////////////////////////////////////////////////////////////
-/// Initialize
-/// initialize StoreGate
-
-StatusCode ReadTruthParticles::initialize() 
-{
-
-  msg() << MSG::INFO
-        << "Initializing ReadTruthParticles"
-        << endmsg;
-
-  // retrieve the storegate service
-  if ( !evtStore().retrieve().isSuccess() ) {
-    msg() << MSG::ERROR
-          << "Unable to retrieve pointer to StoreGateSvc"
-          << endmsg;
-     return StatusCode::FAILURE;
-  }
-
-  // retrieve the truthparticle converter tool
-  if ( !m_cnvTool.retrieve().isSuccess() ) {
-    msg() << MSG::ERROR
-          << "Could not retrieve the truth particle converter tool !!"
-          << endmsg;
-    return StatusCode::FAILURE;
-  }
- 
-  return StatusCode::SUCCESS;
-}		 
-
-///////////////////////////////////////////////////////////////////////////////////
-/// Finalize - delete any memory allocation from the heap
-
-StatusCode ReadTruthParticles::finalize() 
-{
-  return StatusCode::SUCCESS;
-
-}
-
-//////////////////////////////////////////////////////////////////////////////////
-/// Execute - on event by event
-
-StatusCode ReadTruthParticles::execute() 
-{
-  ATH_MSG_DEBUG("execute()");
-
-  /** convert on the fly TruthEvent from the ESD to TruthParticleContainer - Ketevi */
-  if ( m_fromESD ) { 
-     if ( m_cnvTool->execute().isFailure() ) {
-       ATH_MSG_INFO("Could not convert the McEventCollection into "
-                    << "a TruthParticleContainer !");
-        return StatusCode::SUCCESS;
-     }
-  }
-
-  /** Read the TruthParticleContainer. Either the one converted from ESD TruthEvent
-     as indicated above or the default one provided by the AOD where the conversion
-     is done from GEN_AOD. -Ketevi */
-  const TruthParticleContainer * mcParts = 0;
-  if (!evtStore()->retrieve( mcParts, m_truthParticlesName ).isSuccess() ||
-      0 == mcParts ) {
-    ATH_MSG_INFO("Could not retrieve TruthParticleContainer at : "
-                 << m_truthParticlesName);
-    return StatusCode::SUCCESS;
-  }
-
-  unsigned int iPart = 0;
-  for ( TruthParticleContainer::const_iterator itr = mcParts->begin();
-	itr != mcParts->end();
-	++itr, ++iPart ) {
-    ATH_MSG_INFO("Part " << iPart
-                 << " PDG-ID: " << (*itr)->pdgId()
-                 << " nChildren: " << (*itr)->nDecay()
-                 << " status: " << (*itr)->genParticle()->status()
-                 << " bc: " << HepMC::barcode((*itr)->genParticle()));
-    for ( unsigned int iChild = 0; iChild != (*itr)->nDecay(); ++iChild ){
-      const TruthParticle * child = (*itr)->child( iChild );
-      if ( 0 != child ) {
-        if (msgLvl(MSG::INFO)) {
-          msg(MSG::INFO)
-            << "\tchild: " << iChild
-            << "\tPDGID: " << child->pdgId()
-            << " status: " << child->genParticle()->status()
-            << " bc: "     << HepMC::barcode(child->genParticle())
-            << " bc Parents: " << child->nParents() << " [ ";
-          for ( unsigned int iMoth = 0; iMoth != child->nParents(); ++iMoth ) {
-            msg(MSG::INFO) << HepMC::barcode(child->genMother(iMoth)) << " ";
-          }
-          msg(MSG::INFO) << "]" << endmsg;
-        }
-      } else {
-        msg(MSG::WARNING) << "Wrong pointer to child !!" << endmsg;
-      }
-    }//> loop over children
-  }//> end loop over TruthParticles
-
-  return StatusCode::SUCCESS;
-}
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/ReadTruthParticles.h b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/ReadTruthParticles.h
deleted file mode 100644
index ea1ecff5738a6095db4d9e652ddd0d1b73fef5e0..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/ReadTruthParticles.h
+++ /dev/null
@@ -1,99 +0,0 @@
-///////////////////////// -*- C++ -*- /////////////////////////////
-
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-// ReadTruthParticles.h 
-// Header file for class ReadTruthParticles
-// Author: S.Binet<binet@cern.ch>
-/////////////////////////////////////////////////////////////////// 
-#ifndef ANALYSISEXAMPLES_READTRUTHPARTICLES_H 
-#define ANALYSISEXAMPLES_READTRUTHPARTICLES_H 
-
-/** @class ReadTruthParticles is an example algorithm which exercises the 
- *  TruthParticle interface. It shows how to navigate back to the 
- *  HepMC::GenParticles and retrieve the children (in a HepMC format) as well 
- *  as how to retrieve the (TruthParticle) children of a TruthParticle.
- *  The Algorithm first retrieves a container of TruthParticle, loop over its 
- *  elements and for each of them asks for its HepMC::GenParticle creator (and 
- *  use the GenParticle methods).
- *  Then, for each TruthParticle, it loops over its children in a TruthParticle
- *  format.
- */
-
-// STL includes
-#include <string>
-
-// FrameWork includes
-#include "AthenaBaseComps/AthAlgorithm.h"
-#include "GaudiKernel/ToolHandle.h"
-#include "GaudiKernel/ServiceHandle.h"
-
-// Forward declaration
-class StoreGateSvc;
-class ITruthParticleCnvTool;
-
-class ReadTruthParticles : public AthAlgorithm
-{ 
-
-  /////////////////////////////////////////////////////////////////// 
-  // Public methods: 
-  /////////////////////////////////////////////////////////////////// 
- public: 
-
-  // Copy constructor: 
-
-  /// Constructor with parameters: 
-  ReadTruthParticles( const std::string& name, ISvcLocator* pSvcLocator );
-
-  /// Destructor: 
-  virtual ~ReadTruthParticles(); 
-
-  // Assignment operator: 
-  //ReadTruthParticles &operator=(const ReadTruthParticles &alg); 
-
-  // Athena algorithm's Hooks
-  virtual StatusCode  initialize();
-  virtual StatusCode  execute();
-  virtual StatusCode  finalize();
-
-  /////////////////////////////////////////////////////////////////// 
-  // Const methods: 
-  ///////////////////////////////////////////////////////////////////
-
-  /////////////////////////////////////////////////////////////////// 
-  // Non-const methods: 
-  /////////////////////////////////////////////////////////////////// 
-
-  /////////////////////////////////////////////////////////////////// 
-  // Protected data: 
-  /////////////////////////////////////////////////////////////////// 
- protected: 
-
-  /// Default constructor: 
-  ReadTruthParticles();
-
-  // Containers
-
-  /// name of the AOD truth particle container to retrieve from Store Gate
-  std::string m_truthParticlesName;
-
-  typedef ToolHandle<ITruthParticleCnvTool> CnvTool_t;
-  /** converter tool McEventCollection -> TruthParticle */
-  CnvTool_t m_cnvTool;
-  
-  /** switch to build TruthParticles from ESD TruthEvent */
-  bool m_fromESD;
-
-}; 
-
-/// I/O operators
-//////////////////////
-
-/////////////////////////////////////////////////////////////////// 
-// Inline methods: 
-/////////////////////////////////////////////////////////////////// 
-
-#endif //> ANALYSISEXAMPLES_READTRUTHPARTICLES_H
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/TrackExample.cxx b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/TrackExample.cxx
deleted file mode 100644
index 9ee711a23f246862d0cc53133274865017d12b88..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/TrackExample.cxx
+++ /dev/null
@@ -1,208 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-// $Id: TrackExample.cxx 637956 2015-01-07 05:11:27Z ssnyder $
-
-// ROOT include(s):
-#include <TH1.h>
-
-// EDM include(s):
-#include "xAODTracking/TrackParticleContainer.h"
-
-/// Local include(s):
-#include "TrackExample.h"
-
-TrackExample::TrackExample( const std::string& name,
-                            ISvcLocator* pSvcLocator)
-   : AthHistogramAlgorithm( name, pSvcLocator ), 
-     m_analysisTools( "AnalysisTools", this ),  
-     m_isolationTool( "xAOD::TrackIsolationTool/TrackIsolationTool", this ),
-     m_histNumberOfTrackParticles(0),
-     m_histQoverP(0),
-     m_histP(0),
-     m_histQ(0),
-     m_histEta(0),
-     m_histPhi(0),
-     m_histIsolationP(0),
-     m_histOriginType(0),
-     m_histVertexX(0),
-     m_histVertexY(0),
-     m_histVertexZ(0),
-     m_histd0wrtPrimVtx(0),
-     m_histz0wrtPrimVtx(0)
-{
-
-   /// switches to control the analysis through job options
-   declareProperty( "AnalysisTools", m_analysisTools );
-   declareProperty( "TrackIsolationTool", m_isolationTool );
-   declareProperty( "TrackParticleContainerName",
-                    m_trackParticleContainerName = "InDetTrackParticles" );
-   declareProperty( "EventWeight", m_eventWeight = 1.0 );
-   declareProperty( "TrackMomentumCut", m_trackPCut = 500.0 );
-   declareProperty( "TrackIsolationCone",
-                    m_isolationCone = xAOD::Iso::ptcone40 );
-}
-
-StatusCode TrackExample::initialize() {
-
-   ATH_MSG_INFO( "Initialising - Package version: " << PACKAGE_VERSION );
-
-   // Retrieve the needed services:
-   ATH_CHECK( m_analysisTools.retrieve() );
-   ATH_CHECK( m_isolationTool.retrieve() );
-
-   //
-   // Book the histograms:
-   //
-   m_histNumberOfTrackParticles =
-      bookGetPointer( TH1F( "nTrackParticles", "Number of TrackParticles",
-                            100, 0.0, 100.0 ), "TrackParticles" );
-   m_histQoverP =
-      bookGetPointer( TH1F( "QoverP", "QoverP", 100, -1.0e-2, 1.0e-2 ),
-                      "TrackParticles" );
-   m_histP =
-      bookGetPointer( TH1F( "TrackP", "TrackP", 100, 0.0, 500000.0 ),
-                      "TrackParticles" );
-   m_histQ =
-      bookGetPointer( TH1F( "TrackQ", "TrackQ", 100, -2.0, 2.0 ),
-                      "TrackParticles" );
-   m_histEta =
-      bookGetPointer( TH1F( "TrackEta", "TrackEta", 100, -3.2, 3.2 ),
-                      "TrackParticles" );
-   m_histPhi =
-      bookGetPointer( TH1F( "TrackPhi", "TrackPhi", 100, -3.2, 3.2 ),
-                      "TrackParticles" );
-
-   m_histVertexX =
-      bookGetPointer( TH1F( "VertexX", "VertexX", 100, -0.5, 0.5 ),
-                      "TrackParticles" );
-   m_histVertexY =
-      bookGetPointer( TH1F( "VertexY", "VertexY", 100, -0.5, 0.5 ),
-                      "TrackParticles" );
-   m_histVertexZ =
-      bookGetPointer( TH1F( "VertexZ","VertexZ", 100, -50.0, 50.0 ),
-                      "TrackParticles" );
-
-   m_histd0wrtPrimVtx =
-      bookGetPointer( TH1F( "d0wrtPrimVtx", "d0wrtPrimVtx", 100,
-                            -20.0, 20.0 ), "TrackParticles" );
-   m_histz0wrtPrimVtx =
-      bookGetPointer( TH1F( "z0wrtPrimVtx", "z0wrtPrimVtx", 100,
-                            -20.0, 20.0 ), "TrackParticles" );
-
-   m_histOriginType =
-      bookGetPointer( TH1F( "OriginType", "OriginType", 10, 0, 10 ),
-                      "TrackParticles" );
-   m_histIsolationP =
-      bookGetPointer( TH1F( "IsolationP", "IsolationP", 100, 0, 100000.0 ),
-                      "TrackParticles" );
-
-   // Return gracefully:
-   return StatusCode::SUCCESS;
-}		 
-
-StatusCode TrackExample::execute() {
-
-   ATH_MSG_DEBUG( "Executing..." );
-
-   // Retrieve the track particles:
-   const xAOD::TrackParticleContainer* trackTES = 0;
-   if( evtStore()->retrieve( trackTES,
-                             m_trackParticleContainerName ).isFailure() ) {
-      ATH_MSG_WARNING( "No TrackParticle container found in TDS" );
-      return StatusCode::SUCCESS;
-   }
-   ATH_MSG_DEBUG( "TrackParticleContainer successfully retrieved" );
-
-   /// number of TrackParticles in the container
-   m_histNumberOfTrackParticles->Fill( 1.0 * trackTES->size(), m_eventWeight );
-
-   /// iterators over the container 
-   xAOD::TrackParticleContainer::const_iterator trackItr  = trackTES->begin();
-   xAOD::TrackParticleContainer::const_iterator trackItrE = trackTES->end();
-
-   // Loop over the container:
-   for( ; trackItr != trackItrE; ++trackItr ) {
-
-      /// this TrackParticle
-      const xAOD::TrackParticle* trackParticle = ( *trackItr );
-
-      /// track momemtum and charge sign
-      m_histP->Fill( trackParticle->p4().P(), m_eventWeight );
-      m_histEta->Fill( trackParticle->eta(), m_eventWeight );
-      m_histPhi->Fill( trackParticle->phi(), m_eventWeight );
-      m_histQ->Fill( trackParticle->charge(), m_eventWeight );
-      m_histQoverP->Fill( trackParticle->qOverP(), m_eventWeight);
-
-      /// track vertex position
-      m_histVertexX->Fill( trackParticle->vx(), m_eventWeight );
-      m_histVertexY->Fill( trackParticle->vy(), m_eventWeight );
-      m_histVertexZ->Fill( trackParticle->vz(), m_eventWeight );
-
-      m_histOriginType->Fill( 1.0 * trackParticle->particleHypothesis(),
-                              m_eventWeight );
-
-      /// do track isolation - use the track isolation tools
-      const std::vector< xAOD::Iso::IsolationType > isoType = {
-         static_cast< xAOD::Iso::IsolationType >( m_isolationCone )
-      };
-      xAOD::TrackCorrection corrlist;
-      corrlist.trackbitset.set(static_cast<unsigned int>(xAOD::Iso::IsolationTrackCorrection::coreTrackPtr));
-      xAOD::TrackIsolation isol;
-      if( ! m_isolationTool->trackIsolation( isol, *trackParticle, isoType, corrlist ) ) {
-         ATH_MSG_WARNING( "Unable to execute track isolation calculation" );
-         isol.ptcones.push_back( 0.0 );
-      }
-
-      m_histIsolationP->Fill( isol.ptcones[ 0 ], m_eventWeight );
-      ATH_MSG_DEBUG( "Track Isolation Momentum = " << isol.ptcones[ 0 ]
-                     << " MeV" );
-
-      /// access to the perigee parameters
-      const Trk::Perigee& perigee = trackParticle->perigeeParameters();
-      const auto& parameters = perigee.parameters();
-      ATH_MSG_DEBUG( "Trk::Perigee parameters:" );
-      ATH_MSG_DEBUG( " * d_0   : "<< parameters[ Trk::d0 ] );
-      ATH_MSG_DEBUG( " * z_0   : "<< parameters[ Trk::z0 ] );
-      ATH_MSG_DEBUG( " * phi   : "<< parameters[ Trk::phi ] );
-      ATH_MSG_DEBUG( " * Theta : "<< parameters[ Trk::theta ] );
-      ATH_MSG_DEBUG( " * q/p   : "<< parameters[ Trk::qOverP ] );
-
-      /// access to TrackSummary information
-      ATH_MSG_DEBUG( "Track summary information:" );
-      uint8_t ival = 0;
-      if( trackParticle->summaryValue( ival, xAOD::numberOfInnermostPixelLayerHits ) ) {
-         ATH_MSG_DEBUG( " * Number of B layer hits : " << ( int ) ival );
-      }
-      if( trackParticle->summaryValue( ival, xAOD::numberOfPixelHits ) ) {
-         ATH_MSG_DEBUG( " * Number of pixel hits : " << ( int ) ival );
-      }
-      if( trackParticle->summaryValue( ival, xAOD::numberOfPixelHoles ) ) {
-         ATH_MSG_DEBUG( " * Number of pixel holes : " << ( int ) ival );
-      }
-      if( trackParticle->summaryValue( ival, xAOD::numberOfSCTHits ) ) {
-         ATH_MSG_DEBUG( " * Number of SCT hits : " << ( int ) ival );
-      }
-      if( trackParticle->summaryValue( ival, xAOD::numberOfSCTHoles ) ) {
-         ATH_MSG_DEBUG( " * Number of SCT holes : " << ( int ) ival );
-      }
-      if( trackParticle->summaryValue( ival, xAOD::numberOfTRTHits ) ) {
-         ATH_MSG_DEBUG( " * Number of TRT hits : " << ( int ) ival );
-      }
-      if( trackParticle->summaryValue( ival,
-                                       xAOD::numberOfTRTHighThresholdHits ) ) {
-         ATH_MSG_DEBUG( " * Number of TRT high threshold hits : "
-                        << ( int ) ival );
-      }
-      if( trackParticle->summaryValue( ival, xAOD::numberOfTRTHoles ) ) {
-         ATH_MSG_DEBUG( " * Number of TRT holes : " << ( int ) ival );
-      }
-      if( trackParticle->summaryValue( ival, xAOD::numberOfPrecisionLayers ) ) {
-         ATH_MSG_DEBUG( " * Number of MDT hits : " << ( int ) ival );
-      }
-   }
-
-   // Return gracefully:
-   return StatusCode::SUCCESS;
-}
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/TrackExample.h b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/TrackExample.h
deleted file mode 100644
index 0f87a80518968ae45e917d70370bfd9de7021d41..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/TrackExample.h
+++ /dev/null
@@ -1,91 +0,0 @@
-// Dear emacs, this is -*- c++ -*-
-
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-// $Id: TrackExample.h 626373 2014-11-05 17:13:41Z neldik $
-#ifndef ANALYSISEXAMPLES_TRACKEXAMPLE_H
-#define ANALYSISEXAMPLES_TRACKEXAMPLE_H
-
-// System include(s):
-#include <string>
-
-// Gaudi/Athena include(s):
-#include "AthenaBaseComps/AthHistogramAlgorithm.h"
-#include "GaudiKernel/ToolHandle.h"
-
-// Tool include(s):
-#include "AnalysisTools/AnalysisTools.h"
-#include "RecoToolInterfaces/ITrackIsolationTool.h"
-
-// Forward declaration(s):
-class TH1;
-
-/**
- *  @short xAOD::TrackParticle handling example
- *
- *         This class contains a series of examples to show people how to:
- *            - access a TrackParticle container from the ESD or the AOD 
- *            - access track parameters from the TrackParticle
- *            - use the track isolation tools
- *
- * @author Ketevi A. Assamagan: February, 2005
- * @author Attila Krasznahorkay <Attila.Krasznahorkay@cern.ch>
- *
- * $Revision: 626373 $
- * $Date: 2014-11-05 18:13:41 +0100 (Wed, 05 Nov 2014) $
- */
-class TrackExample : public AthHistogramAlgorithm {
-
-public:
-   /// Regular Algorithm constructor
-   TrackExample( const std::string& name, ISvcLocator* pSvcLocator );
-
-   /// Function initialising the algorithm
-   virtual StatusCode initialize();
-   /// Function executing the algorithm
-   virtual StatusCode execute();
-
-private:
-   /// get a handle to the tool helper
-   ToolHandle< AnalysisTools > m_analysisTools;
-
-   /// the TrackParticle container to retrieve
-   std::string m_trackParticleContainerName;
-
-   /// Event weight
-   double m_eventWeight;
-
-   /// TrackIsolationTool
-   ToolHandle< xAOD::ITrackIsolationTool > m_isolationTool;
-
-   /// @name Histograms of reconstructed quantities
-   /// @{
-   TH1* m_histNumberOfTrackParticles;
-   TH1* m_histQoverP;
-   TH1* m_histP;
-   TH1* m_histQ;
-   TH1* m_histEta;
-   TH1* m_histPhi;
-   TH1* m_histIsolationP;
-   TH1* m_histOriginType;
-
-   TH1* m_histVertexX;
-   TH1* m_histVertexY;
-   TH1* m_histVertexZ;
-   TH1* m_histd0wrtPrimVtx;
-   TH1* m_histz0wrtPrimVtx;
-   /// @}
-
-   /// the cuts - to adjusted in the job options
-
-   /// cut Pt cut
-   double m_trackPCut;
-
-   /// track isolation cone
-   int m_isolationCone;
-
-}; // class TrackExample
-
-#endif // ANALYSISEXAMPLES_TRACKEXAMPLE_H
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/TrigAnalysisExample.cxx b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/TrigAnalysisExample.cxx
deleted file mode 100644
index c008ae3804640767854415bf003fde1ee6c6c8f4..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/TrigAnalysisExample.cxx
+++ /dev/null
@@ -1,216 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-/** This example is based on TrigDecisionTest by Till, Nicolas, Ricardo.
- Example by Simon George.
- Instructions:
- 1) Set up 13.0.20
- 2) Check out PhysicsAnalysis/AnalysisCommon/UserAnalysis 
-    https://twiki.cern.ch/twiki/bin/view/Atlas/UserAnalysis
-    and check you can make it and run an example so it is working
- 2a) workaround for bugs: check out and build latest TrigDecision (compilation error in GetPassedFeatures and changes to configuration methods), TrigNavigation (reset problem), TrigDecisionMaker (compatability with TrigDecision), TrigSteering (to get TrigRoiDescriptor::roiWord set)
- 3) Copy requirements, TrigAnalysisExample (.h and .cxx); make
- 4) find an AOD file & register it:
-      e.g. RTT: http://atlas-project-rtt-results.web.cern.ch/atlas-project-rtt-results/rel_6/val/build/i686-slc4-gcc34-opt/offline/TrigAnalysisTest/AthenaTrigAnalysisTestIOTests/testAthenaRDOtoESDAOD/291/AOD.pool.root
-      or one made by hand - may be necessary, see below.
-      pool_insertFileToCatalog /afs/cern.ch/user/s/sgeorge/scratch1/analysis/run/AOD.pool.root
-      FCregisterLFN -p /afs/cern.ch/user/s/sgeorge/scratch1/analysis/run/AOD.pool.root -l myAOD.pool.root
-    Tip: run checkFile.py to check the contents of the AOD file. You need to know what you are looking for, especially if it is a bare class, std::vector or another container. 
-    Need to create an AOD file with some fixes (pkgs above) w.r.t. 13.0.20:
-    athena -c "EvtMax=10;OutputLevel=WARNING;" TrigAnalysisTest/testAthenaRDOtoESDAOD.py 
-
- 5) Modify AnalysisSkeleton_topOptions.py for input AOD file and:
-# Trigger example algorithm
-TrigAnalysisExample = Algorithm( "TrigAnalysisExample" )
-TrigAnalysisExample.OutputLevel = INFO
-theApp.TopAlg += [ "TrigAnalysisExample" ]
-
-Reference for TrigDecisionTool:
-http://atlas-computing.web.cern.ch/atlas-computing/links/nightlyDevDirectory/AtlasOffline/latest_doxygen/InstallArea/doc//TrigDecision/html/classTrigDec_1_1TrigDecisionTool.html
-
-Coding notes:
-templated code => you will get lots of DEBUG messages about unpacking and navigating which you did not write, but appear to be from your algorithm.
-
-KNOWN PROBLEMS
-1. LVL2 data not accessible through TrigDecision without HEAD of TrigNavigation.
-
-*/
-
-// Gaudi/Athena include(s):
-#include "AthenaKernel/errorcheck.h"
-
-// Trigger include(s):
-#include "TrigSteeringEvent/TrigRoiDescriptor.h"
-#include "TrigSteeringEvent/TrigRoiDescriptorCollection.h"
-#include "JetEvent/JetCollection.h"
-#include "AnalysisTriggerEvent/LVL1_ROI.h"
-#include "AnalysisTriggerEvent/Jet_ROI.h"
-
-// Local include(s):
-#include "TrigAnalysisExample.h"
-
-TrigAnalysisExample::TrigAnalysisExample( const std::string& name,
-                                          ISvcLocator* pSvcLocator )
-   : AthAlgorithm( name, pSvcLocator ),
-     m_trigDec( "Trig::TrigDecisionTool/TrigDecisionTool" ) {
-
-   declareProperty( "TrigDecisionTool", m_trigDec );
-
-}
-
-StatusCode TrigAnalysisExample::initialize(){
-
-   ATH_MSG_INFO( "in intialize()" );
-
-   // Retrieve the TDT:
-   CHECK( m_trigDec.retrieve() );
-
-   return StatusCode::SUCCESS;
-}
-
-
-StatusCode TrigAnalysisExample::execute() {
-
-   ATH_MSG_INFO( "in execute()" );
-
-   // Example 1: Obtain overall trigger status for this event.
-   // Beware that there may be special low-threshold triggers that make this
-   // meaningless. Best to look for a specific signature.
-   ATH_MSG_INFO( "Pass state    = " << m_trigDec->isPassed( "EF_.*" ) );
-   ATH_MSG_INFO( "Pass state L1 = " << m_trigDec->isPassed( "L1_.*" ) );
-   ATH_MSG_INFO( "Pass state L2 = " << m_trigDec->isPassed( "L2_.*" ) );
-   ATH_MSG_INFO( "Pass state EF = " << m_trigDec->isPassed( "EF_.*" ) );
-
-   // Example 2: check if a specific chain passed.
-   std::string mychain( "EF_j40" );
-   if( m_trigDec->getListOfTriggers( mychain ).empty() ){
-      ATH_MSG_INFO( "Chain " << mychain << " is not defined" );
-   } else {
-      ATH_MSG_INFO( "Chain " << mychain << " passed: "
-                    << ( m_trigDec->isPassed( mychain ) ? "yes" : "no" ) );
-   }
-
-   /* Example 3: Advanced navigation/matching example.
-      - pick a chain (e.g. EF_j40)
-      - get TriggerElements for chain
-      - select active TriggerElements
-      - for each RoI (=TE)
-      - get JetCollections (EF)
-      - print properties of jets in collections
-      - get initial TrigRoiDescriptor for RoI (eta, phi, roiWord)
-      - loop over LVL1_ROI::std::vector<Jet_ROI>
-      - find Jet_ROI with matching roiWord and print properties
-   */
-
-   Trig::FeatureContainer fc = m_trigDec->features( mychain );
-
-   // get JetCollection.
-   std::vector< Trig::Feature< JetCollection > > jets = fc.get< JetCollection >();
-
-   if( jets.empty() ) {
-      ATH_MSG_INFO( "Chain " << mychain  << " produced no JetCollection." );
-   } else {
-      ATH_MSG_INFO( "Chain " << mychain  << " produced " << jets.size()
-                    << " JetCollection(s)." );
-   }
-
-   std::vector< Trig::Feature< JetCollection > >::const_iterator jet_itr = jets.begin();
-   std::vector< Trig::Feature< JetCollection > >::const_iterator jet_end = jets.end();
-   for( ; jet_itr != jet_end; ++jet_itr ) {
-
-      if( jet_itr->te()->getActiveState() ) {
-         ATH_MSG_INFO( "For chain " << mychain << ", feature did activate the TE "
-                       << jet_itr->label() );
-      }
-      const JetCollection* jetcoll = *jet_itr;
-      if( ! jetcoll ) {
-         ATH_MSG_WARNING( "problem getting JetCollection" );
-         continue;
-      }
-
-      if( ! jetcoll->size() ) {
-         ATH_MSG_INFO( "JetCollection found but empty." );
-         continue;
-      }
-
-      ATH_MSG_INFO( "JetCollection found. Get jets." );
-      JetCollection::const_iterator jet_obj_itr = jetcoll->begin();
-      JetCollection::const_iterator jet_obj_end = jetcoll->end();
-      for( ; jet_obj_itr != jet_obj_end; ++jet_obj_itr ) {
-         ATH_MSG_INFO( "  jet: eta " << ( *jet_obj_itr )->eta()
-                       << " phi " << ( *jet_obj_itr )->phi()
-                       << " et " << ( *jet_obj_itr )->et() );
-      }
-   }
-
-
-   // now get initial RoIDescriptor for the same TE as the jets
-   std::vector< Trig::Feature< TrigRoiDescriptor > > rois =
-      fc.get< TrigRoiDescriptor >( "initialRoI" );
-
-   if( ! rois.empty() ) {
-
-      const TrigRoiDescriptor* roi = rois.front();
-      ATH_MSG_INFO( "Initial TrigRoiDescriptor:"
-                    << " eta " << roi->eta()
-                    << " phi " << roi->phi()
-                    << " roiWord " << roi->roiWord()
-                    << " l1id " << roi->l1Id()
-                    << " roiId " << roi->roiId() << " " << *roi );
-
-      // get LVL1 jet RoIs for comparison and to identify the one with the same roiWord.
-      // cache map of roiWord to LVL1_ROI in TrigDecisionTool?
-      const LVL1_ROI *lvl1ROI;
-      CHECK( evtStore()->retrieve( lvl1ROI ) );
-
-      LVL1_ROI::jets_type::const_iterator jet_itr = lvl1ROI->getJetROIs().begin();
-      LVL1_ROI::jets_type::const_iterator jet_end = lvl1ROI->getJetROIs().end();
-      for( ; jet_itr != jet_end; ++jet_itr ) {
-
-         ATH_MSG_INFO( "Jet RoI Word: " << jet_itr->getROIWord() );
-         if( roi->roiWord() == jet_itr->getROIWord() ) {
-
-            ATH_MSG_INFO( "matching LVL1 jet: " 
-                          << " eta " << jet_itr->getEta() 
-                          << " phi " << jet_itr->getPhi()
-                          << " pattern " << jet_itr->getThrPattern()
-                          << " ET 4x4 " << jet_itr->getET4x4()
-                          << " ET 6x6 " << jet_itr->getET6x6()
-                          << " ET 8x8 " << jet_itr->getET8x8() );
-         }
-      }
-   }
-
-
-   // for comparison and debug, get the JetCollections directly from SG
-   ATH_MSG_INFO( "get the JetCollections directly from SG for comparison" );
-   const DataHandle <JetCollection> jetcoll, jetcollLast;
-   CHECK( evtStore()->retrieve( jetcoll, jetcollLast ) );
-
-   ATH_MSG_INFO( "found  JetCollections" );
-   for( ; jetcoll != jetcollLast; ++jetcoll ) {
-
-      ATH_MSG_INFO( " Container " << jetcoll.key() );
-
-      JetCollection::const_iterator jet_itr = jetcoll->begin();
-      JetCollection::const_iterator jet_end = jetcoll->end();
-      for( ; jet_itr != jet_end; ++jet_itr ) {
-
-         ATH_MSG_INFO( "  jet: eta " << ( *jet_itr )->eta() << " phi "
-                       << ( *jet_itr )->phi() << " et "
-                       << ( *jet_itr )->et() );
-      }
-      
-   }
-
-   return StatusCode::SUCCESS;
-}
-
-
-StatusCode TrigAnalysisExample::finalize() {
-
-   ATH_MSG_INFO( "in finalize()" );
-
-   return StatusCode::SUCCESS;
-}
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/TrigAnalysisExample.h b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/TrigAnalysisExample.h
deleted file mode 100644
index 74771641697331a7e3f530e902d26036315ab79d..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/TrigAnalysisExample.h
+++ /dev/null
@@ -1,32 +0,0 @@
-// Dear emacs, this is -*- c++ -*-
-
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-// $Id: TrigAnalysisExample.h 604985 2014-07-04 15:20:52Z krasznaa $
-#ifndef TRIGANALYSISEXAMPLE_H
-#define TRIGANALYSISEXAMPLE_H
-
-// Gaudi/Athena include(s):
-#include "AthenaBaseComps/AthAlgorithm.h"
-#include "GaudiKernel/ToolHandle.h"
-
-// Trigger include(s):
-#include "TrigDecisionTool/TrigDecisionTool.h"
-
-class TrigAnalysisExample : public AthAlgorithm {
-
-public:
-   TrigAnalysisExample( const std::string& name, ISvcLocator* pSvcLocator );
-
-   StatusCode initialize();
-   StatusCode execute();
-   StatusCode finalize();
-
-private:
-   ToolHandle< Trig::TrigDecisionTool > m_trigDec;
-
-}; // class TrigAnalysisExample
-
-#endif // TRIGANALYSISEXAMPLE_H
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/WriteTruthParticles.cxx b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/WriteTruthParticles.cxx
deleted file mode 100644
index ca654849c29eb8faeec1b6a0e886ff64d793af2b..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/WriteTruthParticles.cxx
+++ /dev/null
@@ -1,111 +0,0 @@
-///////////////////////// -*- C++ -*- /////////////////////////////
-
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-// WriteTruthParticles.cxx 
-// Implementation file for class WriteTruthParticles
-// Author: S.Binet<binet@cern.ch>
-/////////////////////////////////////////////////////////////////// 
-
-
-// STL includes
-
-// FrameWork includes
-#include "GaudiKernel/MsgStream.h"
-#include "Gaudi/Property.h"
-
-// McParticleKernel includes
-#include "McParticleKernel/ITruthParticleCnvTool.h"
-
-// AnalysisExamples includes
-#include "WriteTruthParticles.h"
-
-/////////////////////////////////////////////////////////////////// 
-// Public methods: 
-/////////////////////////////////////////////////////////////////// 
-
-// Constructors
-////////////////
-WriteTruthParticles::WriteTruthParticles( const std::string& name, 
-					  ISvcLocator* pSvcLocator ) : 
-  AthAlgorithm( name, pSvcLocator ),
-  m_cnvTool( "TruthParticleCnvTool/CnvTool", this )
-{
-  //
-  // Property declaration
-  // 
-  //declareProperty( "Property", m_nProperty );
-
-  declareProperty( "CnvTool",
-		   m_cnvTool,
-		   "Handle to the tool which converts a McEventCollection into"
-		   " a TruthParticleContainer" );
-
-}
-
-// Destructor
-///////////////
-WriteTruthParticles::~WriteTruthParticles()
-{ 
-  ATH_MSG_DEBUG ( "Calling destructor" );
-}
-
-// Athena Algorithm's Hooks
-////////////////////////////
-StatusCode WriteTruthParticles::initialize()
-{
-  ATH_MSG_INFO ( "Initializing " << name() << "..."  );
-
-  /// retrieve the converter tool
-  if ( !m_cnvTool.retrieve().isSuccess() ) {
-    ATH_MSG_ERROR ( "Could not retrieve the truth particle converter tool !!" );
-    return StatusCode::FAILURE;
-  }
-
-  return StatusCode::SUCCESS;
-}
-
-StatusCode WriteTruthParticles::finalize()
-{
-  ATH_MSG_INFO ( "Finalizing " << name() << "..." );
-  return StatusCode::SUCCESS;
-}
-
-StatusCode WriteTruthParticles::execute()
-{  
-  ATH_MSG_DEBUG ( "Executing " << name() << "..."  );
-
-  // create a TruthParticleContainer from a McEventCollection
-  if ( 0 == m_cnvTool || 
-       !m_cnvTool->execute().isSuccess() ) {
-    ATH_MSG_WARNING 
-      ( "Could not convert a McEventCollection into "
-	<< "a TruthParticleContainer !"	);
-    return StatusCode::RECOVERABLE;
-  }
-
-  return StatusCode::SUCCESS;
-}
-
-/////////////////////////////////////////////////////////////////// 
-// Const methods: 
-///////////////////////////////////////////////////////////////////
-
-/////////////////////////////////////////////////////////////////// 
-// Non-const methods: 
-/////////////////////////////////////////////////////////////////// 
-
-/////////////////////////////////////////////////////////////////// 
-// Protected methods: 
-/////////////////////////////////////////////////////////////////// 
-
-/////////////////////////////////////////////////////////////////// 
-// Const methods: 
-///////////////////////////////////////////////////////////////////
-
-/////////////////////////////////////////////////////////////////// 
-// Non-const methods: 
-/////////////////////////////////////////////////////////////////// 
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/WriteTruthParticles.h b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/WriteTruthParticles.h
deleted file mode 100644
index d672f4a24804213422c59e86ccc3536106f9acaf..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/WriteTruthParticles.h
+++ /dev/null
@@ -1,79 +0,0 @@
-///////////////////////// -*- C++ -*- /////////////////////////////
-
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-// WriteTruthParticles.h 
-// Header file for class WriteTruthParticles
-// Author: S.Binet<binet@cern.ch>
-/////////////////////////////////////////////////////////////////// 
-#ifndef ANALYSISEXAMPLES_WRITETRUTHPARTICLES_H 
-#define ANALYSISEXAMPLES_WRITETRUTHPARTICLES_H 
-
-// STL includes
-#include <string>
-
-// FrameWork includes
-#include "AthenaBaseComps/AthAlgorithm.h"
-#include "GaudiKernel/ToolHandle.h"
-
-// Forward declaration
-class ITruthParticleCnvTool;
-
-class WriteTruthParticles : public AthAlgorithm
-{ 
-
-  /////////////////////////////////////////////////////////////////// 
-  // Public methods: 
-  /////////////////////////////////////////////////////////////////// 
- public: 
-
-  // Copy constructor: 
-
-  /// Constructor with parameters: 
-  WriteTruthParticles( const std::string& name, ISvcLocator* pSvcLocator );
-
-  /// Destructor: 
-  virtual ~WriteTruthParticles(); 
-
-  // Assignment operator: 
-  //WriteTruthParticles &operator=(const WriteTruthParticles &alg); 
-
-  // Athena algorithm's Hooks
-  virtual StatusCode  initialize();
-  virtual StatusCode  execute();
-  virtual StatusCode  finalize();
-
-  /////////////////////////////////////////////////////////////////// 
-  // Const methods: 
-  ///////////////////////////////////////////////////////////////////
-
-  /////////////////////////////////////////////////////////////////// 
-  // Non-const methods: 
-  /////////////////////////////////////////////////////////////////// 
-
-  /////////////////////////////////////////////////////////////////// 
-  // Protected data: 
-  /////////////////////////////////////////////////////////////////// 
- protected: 
-
-  /// Default constructor: 
-  WriteTruthParticles();
-
-  typedef ToolHandle<ITruthParticleCnvTool> CnvTool_t;
-  /** Pointer to the AlgTool which will perform the conversion
-   *  McEventCollection -> TruthParticleContainer
-   */
-  CnvTool_t m_cnvTool;
-
-}; 
-
-/// I/O operators
-//////////////////////
-
-/////////////////////////////////////////////////////////////////// 
-/// Inline methods: 
-/////////////////////////////////////////////////////////////////// 
-
-#endif //> ANALYSISEXAMPLES_WRITETRUTHPARTICLES_H
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/ZeeOnESD.cxx b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/ZeeOnESD.cxx
deleted file mode 100644
index 1892499c1eaaa100f8258895b8333bb74f00aaa8..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/ZeeOnESD.cxx
+++ /dev/null
@@ -1,264 +0,0 @@
-/*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-*/
-
-#include "GaudiKernel/MsgStream.h"
-#include "GaudiKernel/IToolSvc.h"
-
-#include "StoreGate/DataHandle.h"
-
-#include "egammaEvent/Electron.h"
-#include "egammaEvent/EMShower.h"
-#include "egammaEvent/EMTrackMatch.h"
-
-#include "Particle/TrackParticle.h"
-
-#include "GaudiKernel/ITHistSvc.h"
-#include "TTree.h"
-
-/// analysis tools
-#include "AnalysisUtils/AnalysisCombination.h"
-
-/// the header file
-#include "ZeeOnESD.h"
-
-#include <stdint.h>
-#include <algorithm>
-#include <math.h>
-#include <functional>
-
-/// Declare a local helped function
-bool selectEgamma(ZeeOnESD * self, const egammaPair &ll);
-
-using namespace Analysis;
-
-//////////////////////////////////////////////////////////////////////////////////////
-/// Constructor
-
-ZeeOnESD::ZeeOnESD(const std::string& name,
-  ISvcLocator* pSvcLocator)
-  : AthAlgorithm(name, pSvcLocator),
-    m_thistSvc(nullptr),
-    m_esd_egamma_pt(nullptr),
-    m_esd_egamma_eta(nullptr),
-    m_esd_egamma_overp(nullptr),
-    m_esd_egamma_isEM(nullptr),
-    m_esd_zee_mass_hist(nullptr),
-    m_histEgammaTrackP(nullptr),
-    m_histEgammaClusterE(nullptr)
-{
-  /// switches to control the analysis through job options :: these are the default
-  /// to changed in the job options
-
-  /// The egamma ESD container name & selection cuts
-  declareProperty("egammaContainer", m_egammaContainerName = "egammaCollection");
-  declareProperty("TrackMatchContainer", m_trkMatchContainerName="EMTrackMatchContainer");
-  declareProperty("egammaEtCut", m_etEgammaCut = 20.0*CLHEP::GeV);
-  declareProperty("egammaEtaCut", m_etaEgammaCut = 2.5);
-
-  }
-
-
-/////////////////////////////////////////////////////////////////////////////////////
-/// Destructor - check up memory allocation
-/// delete any memory allocation on the heap
-
-ZeeOnESD::~ZeeOnESD() {}
-
-////////////////////////////////////////////////////////////////////////////////////
-/// Initialize
-/// initialize StoreGate
-/// get a handle on the analysis tools
-/// book histograms
-
-StatusCode ZeeOnESD::initialize() {
-
-  ATH_MSG_INFO("Initializing ZeeOnESD");
-
-  StatusCode sc = service("THistSvc", m_thistSvc);
-  if (sc.isFailure()) {
-     ATH_MSG_ERROR("Unable to retrieve pointer to THistSvc");
-     return sc;
-  }
-      
-  /// the histograms
-
-  /// Electron histogram booking
-  m_esd_egamma_pt      = new TH1F("esd_egamma_pt","esd pt eg",50,0,250.*CLHEP::GeV);
-  sc =  m_thistSvc->regHist("/AANT/egamma/esd_egamma_pt",m_esd_egamma_pt);
-  if (sc.isFailure()) {
-     ATH_MSG_ERROR("ROOT Hist m_esd_egamma_pt registration failed");
-     return sc;
-  }
-
-  m_esd_egamma_eta     = new TH1F("esd_egamma_eta","esd eta eg",70,-3.5,3.5);
-  sc =  m_thistSvc->regHist("/AANT/egamma/esd_egamma_eta",m_esd_egamma_eta);
-  if (sc.isFailure()) {
-     ATH_MSG_ERROR("ROOT Hist m_esd_egamma_eta registration failed");
-     return sc;
-  }
-
-  m_esd_egamma_overp   = new TH1F("esd_egamma_eoverp","ead E/p eg",50,0,2.);
-  sc =  m_thistSvc->regHist("/AANT/egamma/esd_egamma_eoverp",m_esd_egamma_overp);
-  if (sc.isFailure()) {
-     ATH_MSG_ERROR("ROOT Hist m_esd_egamma_eoverp registration failed");
-     return sc;
-  }
-
-  m_esd_egamma_isEM    = new TH1F("esd_egamma_isEM_bits","egamma isEM bit Pattern",20,-1.5,18.5);
-  sc =  m_thistSvc->regHist("/AANT/egamma/esd_egamma_isEM_bits",m_esd_egamma_isEM);
-  if (sc.isFailure()) {
-     ATH_MSG_ERROR("ROOT Hist m_esd_egamma_isEM registration failed");
-     return sc;
-  }
-
-  m_esd_zee_mass_hist  = new TH1F("Mee_ESD","Mee_ESD",50,0,250.*CLHEP::GeV);
-  sc =  m_thistSvc->regHist("/AANT/egamma/Mee_ESD",m_esd_zee_mass_hist);
-  if (sc.isFailure()) {
-     ATH_MSG_ERROR("ROOT Hist m_esd_egamma_Mee registration failed");
-     return sc;
-  }
-
-  m_histEgammaTrackP   = new TH1F("egammaTrackP","eg Track Momentum",100,0,500.*CLHEP::GeV);
-  sc =  m_thistSvc->regHist("/AANT/egamma/esd_eta_eg",m_histEgammaTrackP);
-  if (sc.isFailure()) {
-     ATH_MSG_ERROR("ROOT Hist m_esd_egamma_TrackP registration failed");
-     return sc;
-  }
-
-  m_histEgammaClusterE = new TH1F("egammaClusterE","eg Cluster Energy",100,0,500.*CLHEP::GeV);
-  sc =  m_thistSvc->regHist("/AANT/egamma/egammaCkusterE",m_histEgammaClusterE);
-  if (sc.isFailure()) {
-     ATH_MSG_ERROR("ROOT Hist m_esd_egamma_ClusterE registration failed");
-     return sc;
-  }
-  
-  return StatusCode::SUCCESS;
-}		 
-
-///////////////////////////////////////////////////////////////////////////////////
-/// Finalize - delete any memory allocation from the heap
-
-StatusCode ZeeOnESD::finalize() {
-  MsgStream mLog( msgSvc(), name() );
-  
-  return StatusCode::SUCCESS;
-
-}
-
-//////////////////////////////////////////////////////////////////////////////////
-/// Execute - called by the event loop on event by event
-
-StatusCode ZeeOnESD::execute() {
-
-  MsgStream mLog( msgSvc(), name() );
-
-  mLog << MSG::DEBUG << "execute()" << endmsg;
-
-  StatusCode sc = StatusCode::SUCCESS;
-
-  /// do the Z->ee reconstruction on ESD
-  sc = zee_on_esd();
-  if ( sc.isFailure() ) {
-    mLog << MSG::FATAL << "Z->ee reconstruction on ESD failed" << endmsg;
-    return StatusCode::FAILURE;
-  }
-
-  return sc;
-
-}
-
-//////////////////////////////////////////////////////////////////////////////////
-/// zee on esd: called by execute()
-
-StatusCode ZeeOnESD::zee_on_esd() {
-
-
-  ATH_MSG_DEBUG("zee_on_esd");
-
-  StatusCode sc = StatusCode::SUCCESS;
-
-  /// read the ESD egamma container from Storegate
-  const ElectronContainer* egammaTES = 0;
-  sc=evtStore()->retrieve( egammaTES, m_egammaContainerName);
-  if( sc.isFailure()  ||  !egammaTES ) {
-     ATH_MSG_FATAL("No ESD egamma container found in StoreGate");
-     return StatusCode::FAILURE;
-  }  
-  ATH_MSG_DEBUG("egammaContainer successfully retrieved. Size = " << egammaTES->size());
-  
-  /// iterators over the container 
-  ElectronContainer::const_iterator egammaItr  = egammaTES->begin();
-  ElectronContainer::const_iterator egammaItrE = egammaTES->end();
-
-  /// loop over the ESD electron container
-  /// and fill the egamma pt, eta and E/P histograms
-  for (; egammaItr != egammaItrE; ++egammaItr) {
-    int bitPosition = 1;
-    
-    int isEM = (*egammaItr)->isem();
-    if ( isEM == 0 ) m_esd_egamma_isEM->Fill( (isEM-1.0), 1.0);
-    if (isEM > 0) {
-       for (int i=0; i<16; ++i) {
-         if (isEM & bitPosition) m_esd_egamma_isEM->Fill(i, 1.0);
-         bitPosition *= 2;
-       }
-    }
-    const CaloCluster* cluster = (*egammaItr)->cluster();
-    const EMTrackMatch* trkMatch = (*egammaItr)->detail<EMTrackMatch>(m_trkMatchContainerName);
-    ATH_MSG_DEBUG("egamma isEM/pt/trkMatch " << (*egammaItr)->isem()<<","<<cluster->pt()<<","<<trkMatch);
-    if((*egammaItr)->isem()%16 == 0) ATH_MSG_DEBUG("2: egamma isEM/pt/trkMatch " << (*egammaItr)->isem()<<","<<cluster->pt()<<","<<trkMatch);
-
-    if( trkMatch && (*egammaItr)->isem()%16 == 0 && cluster){
-      m_esd_egamma_pt->Fill( cluster->pt(), 1.);
-      m_esd_egamma_eta->Fill( cluster->eta(), 1.);
-
-      // we need to calculate E/P ourselves
-      double eOp=0;
-      if((*egammaItr)->trackParticle() ) {
-        double p = ((*egammaItr)->trackParticle()->pt())*(cosh((*egammaItr)->trackParticle()->eta()));
-        double e = (*egammaItr)->cluster()->et()*(cosh((*egammaItr)->cluster()->eta()));
-        eOp = p>0. ? e/p:0.;
-      }
-
-      m_esd_egamma_overp->Fill( eOp, 1.);
-    }
-  }
-
-  /// ee invariant mass reconstruction
-  /// use the Analysis Utility to get combinations of 2 electrons from the AOD container
-  /// and the selection
-  /// retain the best combination
-  AnalysisUtils::Combination<const ElectronContainer> comb(egammaTES,2);
-  egammaPair egPair; 
-  double mee = -1.0;
-  while (comb.goodOnes(this, egPair, selectEgamma)) {
-    mee = (egPair[0]->hlv()+egPair[1]->hlv()).m();
-    m_esd_zee_mass_hist->Fill(mee);
-  }
-  		
-  return StatusCode::SUCCESS;
-}
-
-
-/// this function is a friend of ZeeOnESD
-/// electron selection - test for the charges - cut on pt - cut on eta
-/// more sophisticated cuts such as shower shape cut should be considered
-/// cut on the isEM 
-bool selectEgamma(ZeeOnESD * self, const egammaPair &ll) {
-
-  bool test1 = false;
-  if (ll[0]->trackParticle() && ll[1]->trackParticle())
-     test1 = ll[0]->trackParticle()->charge() == -(ll[1]->trackParticle()->charge());
-
-  bool test2 = false;
-  bool test3 = false;
-  if (ll[0]->cluster() && ll[1]->cluster()) {
-     test2 = (ll[0]->cluster()->et() > self->m_etEgammaCut) &&
-                  (ll[1]->cluster()->et() > self->m_etEgammaCut);
-     test3 = (fabs(ll[0]->cluster()->eta()) < self->m_etaEgammaCut ) &&
-                  (fabs(ll[1]->cluster()->eta()) < self->m_etaEgammaCut );
-  }
-  bool test4 = ( (ll[0]->isem()%16)==0 && (ll[1]->isem()%16) == 0);
-  return (test1 && test2 && test3 && test4);
-}
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/ZeeOnESD.h b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/ZeeOnESD.h
deleted file mode 100644
index 6b8fe86da0f380b271abc727ff8d17dfb85e36f6..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/ZeeOnESD.h
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef ZEEONESD_H
-#define ZEEONESD_H
-/////////////////////////////////////////////////////////////////////////////////////////////////////
-/// Name    : ZeeOnESD.h
-/// Package : offline/PhysicsAnalysis/AnalysisCommon/AnalysisExamples
-/// Author  : Ketevi A. Assamagan
-/// Created : February 2005
-///
-/// DESCRIPTION:
-///
-/// Example of Z->ee reconstruction on ESD
-/// Ketevi A. Assamagan on February 20, 2005
-///
-///////////////////////////////////////////////////////////////////////////////////////////////////////
-
-#include "GaudiKernel/Algorithm.h"
-//#include "GaudiKernel/ObjectVector.h"
-#include "CLHEP/Units/SystemOfUnits.h"
-#include "GaudiKernel/ITHistSvc.h"
-#include "AthenaBaseComps/AthAlgorithm.h"
-
-#include "egammaEvent/ElectronContainer.h"
-
-#include <string>
-#include "TH1.h"
-
-/// type definition
-typedef std::vector<const ElectronContainer::base_value_type*> egammaPair;
-
-class ZeeOnESD : public AthAlgorithm {
-
- public:
-
-   ZeeOnESD(const std::string& name, ISvcLocator* pSvcLocator);
-   ~ZeeOnESD();
-
-   StatusCode initialize();
-   StatusCode finalize();
-   StatusCode execute();
-
- private:
-
-   /// the selection function for electrons
-   friend bool selectEgamma(ZeeOnESD *self, const egammaPair &ll);
-
-   /// the z -> ee reconstruction method
-   StatusCode zee_on_esd();
-
- private:
-
-   /** a handle on the Hist/TTree registration service */
-   ITHistSvc * m_thistSvc;
-
-   /// name of the AOD electron container to retrieve from StoreGate
-   std::string m_egammaContainerName;
-   std::string m_trkMatchContainerName;
- 
-   /// use selection cuts - for electrons 
-   /// to be modified thru job options
-   double m_etEgammaCut;
-   double m_etaEgammaCut;
-
-   /// create the histograms
-   /// for electrons
-   /// reconstructed quantities
-   TH1F* m_esd_egamma_pt;
-   TH1F* m_esd_egamma_eta;
-   TH1F* m_esd_egamma_overp;
-   TH1F* m_esd_egamma_isEM;
-   TH1F* m_esd_zee_mass_hist;
-   TH1F* m_histEgammaTrackP;
-   TH1F* m_histEgammaClusterE;
-
-};
-
-#endif // ZEEONESD_H
-
diff --git a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/components/AnalysisExamples_entries.cxx b/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/components/AnalysisExamples_entries.cxx
deleted file mode 100644
index cca89bfb1e563e23a795bef1030fc3f31fa51e43..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/AnalysisExamples/src/components/AnalysisExamples_entries.cxx
+++ /dev/null
@@ -1,23 +0,0 @@
-#include "../WriteTruthParticles.h"
-#include "../ReadTruthParticles.h"
-#include "../TrackExample.h"
-#include "../ZeeOnESD.h"
-#include "../JetTagAna.h"
-#include "../MiscellaneousExamples.h"
-#include "../PileUpTruthExample.h"
-#include "../TrigAnalysisExample.h"
-#include "../AthExReadD3pdEvtStore.h"
-#include "../AthExReadD3pdNtuple.h"
-
-
-DECLARE_COMPONENT( MiscellaneousExamples )
-DECLARE_COMPONENT( WriteTruthParticles )
-DECLARE_COMPONENT( ReadTruthParticles )
-DECLARE_COMPONENT( TrackExample )
-DECLARE_COMPONENT( ZeeOnESD )
-DECLARE_COMPONENT( JetTagAna )
-DECLARE_COMPONENT( PileUpTruthExample )
-DECLARE_COMPONENT( TrigAnalysisExample )
-DECLARE_COMPONENT( AthEx::ReadD3pdNtuple )
-DECLARE_COMPONENT( AthEx::ReadD3pdEvtStore )
-
diff --git a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/CMakeLists.txt b/PhysicsAnalysis/AnalysisCommon/SpecialUtils/CMakeLists.txt
deleted file mode 100644
index 586cc7f5d367f31eda0a6afc59639c710335670d..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/CMakeLists.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-################################################################################
-# Package: SpecialUtils
-################################################################################
-
-# Declare the package name:
-atlas_subdir( SpecialUtils )
-
-# Declare the package's dependencies:
-atlas_depends_on_subdirs( PUBLIC
-                          PhysicsAnalysis/AnalysisCommon/ParticleEvent
-                          Reconstruction/Jet/JetEvent
-                          Reconstruction/MuonIdentification/muonEvent
-                          Reconstruction/egamma/egammaEvent
-                          Reconstruction/tauEvent )
-
-# External dependencies:
-find_package( CLHEP )
-
-# Install files from the package:
-atlas_install_headers( SpecialUtils )
-
diff --git a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/ElectronTypeDefs.h b/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/ElectronTypeDefs.h
deleted file mode 100755
index 9484c2b1236ee265980757bf8338d91b66c07fcc..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/ElectronTypeDefs.h
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef ELECTRON_TYPE_DEFS_H
-#define ELECTRON_TYPE_DEFS_H
-
-#include "egammaEvent/ElectronContainer.h"
-
-typedef std::vector<const ElectronContainer::base_value_type*> ElectronVect;
-
-typedef std::vector<ElectronContainer::base_value_type*> ListOfElectrons;
-
-#endif /// ELECTRON_TYPE_DEFS_H
-
diff --git a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/JetTypeDefs.h b/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/JetTypeDefs.h
deleted file mode 100755
index e399b6e4e7c971a6b729a123cf81e284e375b4cc..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/JetTypeDefs.h
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef PARTICLEJET_TYPE_DEFS_H
-#define PARTICLEJET_TYPE_DEFS_H
-
-#include "JetEvent/JetCollection.h"
-
-typedef std::vector<const JetCollection::base_value_type*> JetVect;
-
-typedef std::vector<JetCollection::base_value_type*> ListOfJets;
-
-#endif /// PARTICLEJET_TYPE_DEFS_H
-
diff --git a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/MuonTypeDefs.h b/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/MuonTypeDefs.h
deleted file mode 100755
index daf3a1a349fc0ef99c0ef94707e74b9aea16e2e6..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/MuonTypeDefs.h
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef MUON_TYPE_DEFS_H
-#define MUON_TYPE_DEFS_H
-
-#include "muonEvent/MuonContainer.h"
-
-typedef std::vector<const Analysis::MuonContainer::base_value_type*> MuonVect;
-
-typedef std::vector<Analysis::MuonContainer::base_value_type*> ListOfMuons;
-
-#endif /// MUON_TYPE_DEFS_H
-
diff --git a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/NeutrinoTypeDefs.h b/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/NeutrinoTypeDefs.h
deleted file mode 100755
index 113f4cd86a7347c5dc94b76b31f652ae9d70c136..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/NeutrinoTypeDefs.h
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef NEUTRINO_TYPE_DEFS_H
-#define NEUTRINO_TYPE_DEFS_H
-
-#include "ParticleEvent/NeutrinoContainer.h"
-
-typedef std::vector<const NeutrinoContainer::base_value_type*> NeutrinoVect;
-
-typedef std::vector<NeutrinoContainer::base_value_type*> ListOfNeutrinos;
-
-#endif /// NEUTRINO_TYPE_DEFS_H
-
diff --git a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/NeutrinoUtils.h b/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/NeutrinoUtils.h
deleted file mode 100755
index ced88ad6296f1f0aec37cbd2ac5230f9eaed9241..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/NeutrinoUtils.h
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-/**
-   Utilities for Neutrino
-
-   @author Tadashi Maeno
- */
-
-#ifndef SPECIALUTILS_NEUTRINOUTILS_H
-#define SPECIALUTILS_NEUTRINOUTILS_H
-
-namespace NeutrinoUtils {
-  
-  /**
-     construct neutrino 4-momontum from lepton, missing pt and the W-mass.
-     The neutrino mass is zero. This method returns the container of neutrino candidates.
-   */
-  template <class LEPTON, class COLL>
-  bool candidatesFromWMass(LEPTON* lepton, const double pxMiss, const double pyMiss,
-			   COLL & neutrinoContainer, bool DealWithNegative = false);
-
-  /**
-     tau tau -> a+b+ptmiss utility. This method returns two neutrinos
-     given two particles and ptmiss.  Here the constraint is that a and b are
-     collinear with the taus (either leptons or a TauJet.  Thus 'a' carries a
-     fraction x_a of one tau's momentum and that tau's neutrinos carry (1-x_a)
-     of that tau's momentum.
-   */
-  template <class LEPTON1, class LEPTON2, class COLL>  
-  void neutrinosFromColinearApproximation(LEPTON1* particleA, LEPTON2* particleB,
-					  const double pxMiss, const double pyMiss, 
-					  COLL & neutrinoContainer,
-					  double &x_a, double &x_b);
-}
-
-#include "SpecialUtils/NeutrinoUtils.icc"
-
-#endif
diff --git a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/NeutrinoUtils.icc b/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/NeutrinoUtils.icc
deleted file mode 100755
index 2194271d557012d3a5abe94939853ce5e471e1fb..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/NeutrinoUtils.icc
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-#include "ParticleEvent/Neutrino.h"
-#include "CLHEP/Units/SystemOfUnits.h"
-
-#include <cmath>
-
-template <class LEPTON, class COLL> inline
-bool NeutrinoUtils::candidatesFromWMass(LEPTON* lepton, const double pxMiss, const double pyMiss,
-					COLL & neutrinoContainer, bool DealWithNegative)
-{
-  // W mass
-  const double WMass = 80.4*CLHEP::GeV;
-
-  // clear output
-  neutrinoContainer.clear();
-
-  // solve the quadratic equation
-  
-  double ptMiss = sqrt (pxMiss*pxMiss + pyMiss*pyMiss);
-
-  double alpha = pow(WMass,2)+pow((pxMiss+lepton->px()),2)+pow((pyMiss+lepton->py()),2)
-    -pow(lepton->e(),2);
-  
-  double beta = 0.5 * ( alpha-pow(ptMiss,2)+pow(lepton->pz(),2) );
-  
-  double gamma = -( beta*beta - ( pow(lepton->e(),2)*pow(ptMiss,2) ) )
-                 / ( pow(lepton->e(),2)-pow(lepton->pz(),2) );
-
-  double lambda = 2*beta*lepton->pz() / (pow(lepton->e(),2)-pow(lepton->pz(),2));
-
-  double delta = pow(lambda,2)-4*gamma;
-
-  // if no solution
-  if ( delta < 0 ){
-    if ( DealWithNegative ){
-      delta = 0;
-    }
-    else{
-      return false;
-    }
-  }
-
-  delta = sqrt(delta);
-  
-  // instantiate Neutrino
-
-  double pz = (lambda-delta)/2.0;
-  double e  = sqrt(pxMiss*pxMiss+pyMiss*pyMiss+pz*pz);
-
-  Neutrino *nu1 = new Neutrino;
-  nu1->set4Mom(CLHEP::HepLorentzVector(pxMiss,pyMiss,pz,e));
-
-  neutrinoContainer.push_back(nu1);
-  if ( delta == 0 ) return true;
-
-  pz = (lambda+delta)/2.0;
-  e  = sqrt(pxMiss*pxMiss+pyMiss*pyMiss+pz*pz);
-
-  Neutrino *nu2 = new Neutrino;
-  nu2->set4Mom(CLHEP::HepLorentzVector(pxMiss,pyMiss,pz,e));
-
-  neutrinoContainer.push_back(nu2);
-
-  return true;
-}
-
-
-template <class LEPTON1, class LEPTON2, class COLL> inline 
-void NeutrinoUtils::neutrinosFromColinearApproximation(LEPTON1* particleA, LEPTON2* particleB,
-						       const double pxMiss, const double pyMiss, 
-						       COLL & neutrinoContainer,
-						       double &x_a, double &x_b)
-{
-  // numerator
-  double numerator = particleA->px()*particleB->py() - particleA->py()*particleB->px();
-
-  // fraction
-  x_a = numerator/(particleB->py()*(particleA->px()+pxMiss)-particleB->px()*(particleA->py()+pyMiss));
-  x_b = numerator/(particleA->px()*(particleB->py()+pyMiss)-particleA->py()*(particleB->px()+pxMiss));
-
-  // instantiate Neutrino
-  double px = (1./x_a-1.)*particleA->px();
-  double py = (1./x_a-1.)*particleA->py();
-  double pz = (1./x_a-1.)*particleA->pz();
-  double e  = sqrt(px*px+py*py+pz*pz);
-
-  Neutrino *nu1 = new Neutrino;
-  nu1->set4Mom(CLHEP::HepLorentzVector(px,py,pz,e));
-
-  px = (1./x_b-1.)*particleB->px();
-  py = (1./x_b-1.)*particleB->py();
-  pz = (1./x_b-1.)*particleB->pz();
-  e  = sqrt(px*px+py*py+pz*pz);
-
-  Neutrino *nu2 = new Neutrino;
-  nu2->set4Mom(CLHEP::HepLorentzVector(px,py,pz,e));
-
-  neutrinoContainer.push_back(nu1);
-  neutrinoContainer.push_back(nu2);
-}
diff --git a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/ParticleBaseTypeDefs.h b/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/ParticleBaseTypeDefs.h
deleted file mode 100755
index d0794f48799eee4337bc37028a936efc5e27e8ee..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/ParticleBaseTypeDefs.h
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef PARTICLEBASE_TYPE_DEFS_H
-#define PARTICLEBASE_TYPE_DEFS_H
-
-#include "ParticleEvent/ParticleBaseContainer.h"
-
-typedef std::vector<const ParticleBaseContainer::base_value_type*> ParticleVect;
-
-typedef std::vector<ParticleBaseContainer::base_value_type*> ListOfParticles;
-
-#endif /// PARTICLEBASE_TYPE_DEFS_H
-
diff --git a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/PhotonTypeDefs.h b/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/PhotonTypeDefs.h
deleted file mode 100755
index cb63eaef043023ff3e26aa11b7f73b3967af5181..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/PhotonTypeDefs.h
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef PHOTON_TYPE_DEFS_H
-#define PHOTON_TYPE_DEFS_H
-
-#include "egammaEvent/PhotonContainer.h"
-
-typedef std::vector<const PhotonContainer::base_value_type*> PhotonVect;
-
-typedef std::vector<PhotonContainer::base_value_type*> ListOfPhotons;
-
-#endif /// PHOTON_TYPE_DEFS_H
-
diff --git a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/TauJetTypeDefs.h b/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/TauJetTypeDefs.h
deleted file mode 100755
index 4a8c9ae33f708f9abcab315fbca073a33fdc24ce..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/SpecialUtils/TauJetTypeDefs.h
+++ /dev/null
@@ -1,15 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef TAUJET_TYPE_DEFS_H
-#define TAUJET_TYPE_DEFS_H
-
-#include "tauEvent/TauJetContainer.h"
-
-typedef std::vector<const Analysis::TauJetContainer::base_value_type*> TauJetVect;
-
-typedef std::vector<Analysis::TauJetContainer::base_value_type*> ListOfTauJets;
-
-#endif /// TAUJET_TYPE_DEFS_H
-
diff --git a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/doc/packagedoc.h b/PhysicsAnalysis/AnalysisCommon/SpecialUtils/doc/packagedoc.h
deleted file mode 100644
index 5d76aced634b27142849df2f86d572563667932b..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/SpecialUtils/doc/packagedoc.h
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-/**
-@page SpecialUtils_page SpecialUtils
-
-@section SpecialUtils_introductionSpecialUtils Introduction
-This package contains non-general tools, i.e., tools for Neutrino. General tools should be put in AnalysisTools/Utils.
-
-@section SpecialUtils_packagecontentSpecialUtils Package Contents
-SpecialUtils contains the following classes:
-
-- NeutrinoUtils : tools for Neutrino. There 2 utilities here. One where the W mass constraint is used to find 
-the longitudinal component of the neutrino momentum, for example in W --> e nu, assuming that the transverse 
-missing energy is carried away by the neutrino. Up to 2 solutions can be obtained. The other is where one uses 
-the collinear approximation to solve for the neutrinos in the decay X --> a + b + Missing Energy, for example Z 
---> tautau -> tauJet + tauJet + Missing Energy; The constraint is that a and b are collinear with the taus 
-(either leptons or a TauJet. Thus, 'a' carries a fraction x_a of one tau's momentum and that tau's neutrinos 
-carry (1-x_a) of that tau's momentum. Both of these 2 utilities return a container of Neutrino Objects. There is 
-a class for the neutrino, Netrino.h, in the package PhyscisAnalysis/AnalysisCommon/ParticleEvent. 
-
-
-
-*/
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/CMakeLists.txt b/PhysicsAnalysis/AnalysisCommon/UserAnalysis/CMakeLists.txt
deleted file mode 100644
index 46bccb789c96dbdee3422fcbf71399537525f791..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/CMakeLists.txt
+++ /dev/null
@@ -1,55 +0,0 @@
-################################################################################
-# Package: UserAnalysis
-################################################################################
-
-# Declare the package name:
-atlas_subdir( UserAnalysis )
-
-# Declare the package's dependencies:
-atlas_depends_on_subdirs( PUBLIC
-                          Control/AthenaBaseComps
-                          Control/StoreGate
-                          GaudiKernel
-                          PhysicsAnalysis/AnalysisCommon/AnalysisTools
-                          PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils
-                          Trigger/TrigAnalysis/TrigDecisionTool
-                          Trigger/TrigEvent/TrigCaloEvent
-                          Trigger/TrigEvent/TrigInDetEvent
-                          Trigger/TrigEvent/TrigMuonEvent
-                          Trigger/TrigEvent/TrigParticle
-                          PRIVATE
-                          Control/AthenaKernel
-                          Calorimeter/CaloEvent
-                          Event/NavFourMom
-                          Event/xAOD/xAODEventInfo
-                          PhysicsAnalysis/AnalysisTrigger/AnalysisTriggerEvent
-                          PhysicsAnalysis/JetTagging/JetTagInfo
-                          PhysicsAnalysis/TruthParticleID/McParticleEvent
-                          Reconstruction/Jet/JetEvent
-                          Reconstruction/MissingETEvent
-                          Reconstruction/MuonIdentification/muonEvent
-                          Reconstruction/Particle
-                          Reconstruction/egamma/egammaEvent
-                          Reconstruction/tauEvent
-                          Tracking/TrkEvent/VxVertex
-                          Trigger/TrigEvent/TrigSteeringEvent )
-
-# External dependencies:
-find_package( CLHEP )
-find_package( ROOT COMPONENTS Core Tree MathCore Hist RIO pthread MathMore Minuit Minuit2 Matrix Physics HistPainter Rint Graf Graf3d Gpad Html Postscript Gui GX11TTF GX11 )
-
-# tag ROOTBasicLibs was not recognized in automatic conversion in cmt2cmake
-
-# tag ROOTSTLDictLibs was not recognized in automatic conversion in cmt2cmake
-
-# Component(s) in the package:
-atlas_add_component( UserAnalysis
-                     src/*.cxx
-                     src/components/*.cxx
-                     INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} ${CLHEP_INCLUDE_DIRS}
-                     LINK_LIBRARIES ${ROOT_LIBRARIES} ${CLHEP_LIBRARIES} AthenaBaseComps StoreGateLib SGtests GaudiKernel AthAnalysisToolsLib UserAnalysisUtilsLib TrigDecisionToolLib TrigCaloEvent TrigInDetEvent TrigMuonEvent TrigParticle AthenaKernel CaloEvent NavFourMom xAODEventInfo AnalysisTriggerEvent JetTagInfo McParticleEvent JetEvent MissingETEvent muonEvent Particle egammaEvent tauEvent VxVertex TrigSteeringEvent )
-
-# Install files from the package:
-atlas_install_headers( UserAnalysis )
-atlas_install_joboptions( share/*.py )
-
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/UserAnalysis/AnalysisSkeleton.h b/PhysicsAnalysis/AnalysisCommon/UserAnalysis/UserAnalysis/AnalysisSkeleton.h
deleted file mode 100755
index 01922689f44b7d0452d5533cbca56df56b91351a..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/UserAnalysis/AnalysisSkeleton.h
+++ /dev/null
@@ -1,288 +0,0 @@
-/*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef ANALYSIS_SKELETON_H
-#define ANALYSIS_SKELETON_H
-/////////////////////////////////////////////////////////////////////////////////////////////////////
-/// Name    : AnalysisSkeleton.h
-/// Package : offline/PhysicsAnalysis/AnalysisCommon/UserAnalysis
-/// Author  : Ketevi A. Assamagan
-/// Created : July 2004
-///
-/// DESCRIPTION:
-///
-/// This class is an analysis skeleton - The user can implement his analysis here
-/// This class is also used for the demonstration of the distributed analysis
-/// Ketevi A. Assamagan on June 9, 2004
-///
-/// Vivek Jain - March 11, 2010
-/// This file has been extensively modified since creation. It now shows you how to access
-/// trigger information. Preparation of objects, e.g., electrons, muons, jets, etc., 
-/// has been pushed off into the UserAnalysisUtils package
-///
-/// Vivek Jain - July 13, 2011
-/// Modified to use AthAlgorithm insead of CBNT_AthenaAwareBase.
-
-/// The code here also does overlap removal
-///
-/// Please see https://twiki.cern.ch/twiki/bin/view/AtlasProtected/PhysicsAnalysisWorkBookAODAnalysisRel17#Writing_your_own_analysis_code for details relating to release 17
-/// 
-///
-///////////////////////////////////////////////////////////////////////////////////////////////////////
-
-#include "GaudiKernel/ToolHandle.h"
-//#include "GaudiKernel/Algorithm.h"
-#include "GaudiKernel/ObjectVector.h"
-#include "CLHEP/Units/SystemOfUnits.h"
-#include "GaudiKernel/ITHistSvc.h"
-#include "AthenaBaseComps/AthAlgorithm.h"
-
-#include "AnalysisTools/AnalysisTools.h"
-
-#include "UserAnalysisUtils/UserAnalysisSelectionTool.h"
-#include "UserAnalysisUtils/UserAnalysisPreparationTool.h"
-#include "UserAnalysisUtils/UserAnalysisOverlapCheckingTool.h"
-#include "UserAnalysisUtils/UserAnalysisOverlapRemovalTool.h"
-
-#include "TrigDecisionTool/TrigDecisionTool.h"
-
-//#include "TLorentzVector.h"
-//#include "CLHEP/Vector/LorentzVector.h"
-
-
-#include <string>
-
-#include "TH1.h"
-
-class JetCollection;
-
-//using namespace Trig;
-namespace Trig {
-  class ChainGroup;
-}
-
-// upgrade to inherit from AthAlgorithm
- 
-class AnalysisSkeleton : public AthAlgorithm  {
-
- public:
-
-   AnalysisSkeleton(const std::string& name, ISvcLocator* pSvcLocator);
-   ~AnalysisSkeleton();
-
-   virtual StatusCode start() override;
-   virtual StatusCode initialize() override;
-   virtual StatusCode finalize() override;
-   virtual StatusCode execute() override;
-   virtual StatusCode initEvent();
-
- private:
-
-
-   /** methods called by execute() */
-
-   // to add event info to new ntuple (used to go by default in CollectionTree)
-   StatusCode addEventInfo(); 
-   //
-   StatusCode electronSkeleton();
-   StatusCode triggerSkeleton();
-
-   /** an example of pre-selection, overlap-checking and overlap removal */
-   StatusCode analysisPreparation();
-
-   /** look at b-jet tagging information */
-   StatusCode bjetInfo();
-
-   /** get quark flavour of jets */
-   int getQuarkJetFlavour(JetCollection::const_iterator jetItr);
-
-   /** get missing ET information */
-   StatusCode getMissingET();
-
-   /** make plots for SUSY studies */
-   StatusCode SusyStudies();
-
-   /** get pT of top quarks */
-   StatusCode getTopQpT(int &, double&, double&);
-
- private:
-
-   /** get a handle to the tool helper */
-   ToolHandle<AnalysisTools> m_analysisTools;
-
-   /** get a handle on the user tool for pre-selection and overlap removal */
-   ToolHandle<UserAnalysisSelectionTool>       m_analysisSelectionTool;
-   ToolHandle<UserAnalysisPreparationTool>     m_analysisPreparationTool;
-   ToolHandle<UserAnalysisOverlapCheckingTool> m_analysisOverlapCheckingTool;
-   ToolHandle<UserAnalysisOverlapRemovalTool>  m_analysisOverlapRemovalTool;
-
-   /** tool to access the trigger decision */
-   ToolHandle<Trig::TrigDecisionTool> m_trigDec;
-
-   /** a handle on the Hist/TTree registration service */
-   ITHistSvc * m_thistSvc;
-
-   /** the key of the Electron Container to retrieve from the AOD */
-   std::string m_electronContainerName; 
-
-   /** name of the AOD truth particle container to retrieve from StoreGate */
-   std::string m_truthParticleContainerName;
-
-   /** key to get missing ET information */
-   std::string m_missingETObjectName;
-
-
-   /// The missing ET object
-   const MissingET * m_pMissing;
-   double m_pxMiss;
-   double m_pyMiss;
-   double m_ptMiss;   
-
-   /** additional user cuts after pre-selections */ 
- 
-   double m_deltaRMatchCut;
-   double m_maxDeltaR;
-
-   /** electron specific cuts */
-   double m_etElecCut;
-   double m_elecCone;
-   double m_etaElecCut;
-
-   /** bjet specific cuts */
-   double m_bjetWt_ip3dsv1Cut;
-   double m_bjet_etaCut;
-   double m_bjet_etCut;
-
-   /** missing ET cuts */
-   double m_missingETCut;
-
-   /** Atlfast data? */
-   bool m_isAtlFastData;
-
-   /** truth input? */
-   bool m_doTruth;
-
-   /** min Jet ET cut for SUSY studies */
-   double m_SusyJetMinEt;
-   
-  /** Histograms */
-  TH1F* m_h_elecpt;
-  TH1F* m_h_eleceta;
-  TH1F* m_h_elec_deltaRMatch;
-
-  TH1F* m_h_jet_eta_beforeOR;
-  TH1F* m_h_jet_et_beforeOR;
-  TH1F* m_h_jet_ip3dsv1Wt_beforeOR;
-  TH1F* m_h_jet_label_beforeOR;
-  TH1F* m_h_jet_ip3dsv1Wt_bjet_beforeOR;
-  TH1F* m_h_jet_ip3dsv1Wt_ujet_beforeOR;
-
-  TH1F* m_h_jet_eta_afterOR;
-  TH1F* m_h_jet_et_afterOR;
-  TH1F* m_h_jet_ip3dsv1Wt_afterOR;
-  TH1F* m_h_jet_label_afterOR;
-  TH1F* m_h_jet_ip3dsv1Wt_bjet_afterOR;
-  TH1F* m_h_jet_ip3dsv1Wt_ujet_afterOR;
-
-  TH1F* m_pxMis;
-  TH1F* m_pyMis;
-  TH1F* m_ptMis;
-
-  TH1F* m_triggerAccepts;
-
-  /** Athena-Aware Ntuple (AAN) variables - branches of the AAN TTree */
-
-  // stuff for new ntuple
-  // The standard AANT, CollectionTree, is bare bones
-  TTree* m_tree_AS; 
-
-  /** Simple variables by Ketevi */
-  int m_aan_size;
-  std::vector<double> * m_aan_eta;
-  std::vector<double> * m_aan_pt;
-  std::vector<double> * m_aan_elecetres;
-
-  /** Variables by VJ */
-  double m_aan_ptMiss;
-  int    m_aan_njets;
-  int    m_aan_njets_etaLT25;
-  int    m_aan_njets_SusyETCut;
-  double m_aan_effmass;
-  double m_aan_ht;
-  double m_aan_maxJetET;
-  int    m_aan_nbjets;
-
-  std::vector<double>* m_aan_JetEta;
-  std::vector<double>* m_aan_JetEt;
-  std::vector<double>* m_aan_JetBTagWt;
-
-  /** Look at final electrons/muons */
-  int m_aan_NFinalEl;
-  int m_aan_NFinalMu;
-
-  std::vector<double>* m_aan_FinalElEta;
-  std::vector<double>* m_aan_FinalElPt;
-  std::vector<double>* m_aan_FinalElEtCone20;
-  //std::vector<double>* m_aan_FinalElPtrat;
-
-  std::vector<double>* m_aan_FinalMuEta;
-  std::vector<double>* m_aan_FinalMuPt;
-  std::vector<double>* m_aan_FinalMuEtCone20;
-  std::vector<int>*    m_aan_FinalMuBestMat;
-  std::vector<double>* m_aan_FinalMuMatChi2;
-
-  double m_aan_FinalLepEtSum;
-  double m_aan_FinalElEtSum;
-  double m_aan_FinalMuEtSum;
-
-  /** number top quarks */
-
-  int m_aan_NumTopQ;
-  double m_aan_pTtop1;
-  double m_aan_pTtop2;
-
-  /** trigger branches */
-  float m_aan_Trig_efJet_et  ;
-  float m_aan_Trig_efJet_eta ;
-  float m_aan_Trig_efJet_phi ;
-  float m_aan_Trig_l2Jet_et  ;
-  float m_aan_Trig_l2Jet_eta ;
-  float m_aan_Trig_l2Jet_phi ;
-  float m_aan_Trig_l1Jet_et88;
-  float m_aan_Trig_l1Jet_eta ;
-  float m_aan_Trig_l1Jet_phi ;
-
-  bool m_doTrigger;
-  std::string m_investigateChain;
-  std::vector<std::string> m_triggerChains;
-  std::map<std::string,int> m_triggersPassed;
-
-  unsigned int m_eventNr;
-  const Trig::ChainGroup* m_all;
-  const Trig::ChainGroup* m_allL1;
-  const Trig::ChainGroup* m_allL2;
-  const Trig::ChainGroup* m_allEF;
-  std::vector<std::string> m_chain_names;
-
-  unsigned int    m_runNumber;
-  unsigned int    m_eventNumber;
-  unsigned int    m_eventTime;
-  unsigned int    m_lumiBlock;
-  unsigned int    m_bCID;
-  double  m_eventWeight;
-  //unsigned int    m_statusElement;
-  /*
-  unsigned int    m_lVL1ID;
-  unsigned int    m_lvl1TriggerType;
-  std::vector<unsigned int>* m_lvl1TriggerInfo;
-  std::vector<unsigned int>* m_lvl2TriggerInfo;
-  std::vector<unsigned int>* m_evtFilterInfo;
-  std::vector<std::string>* m_streamTagName;
-  std::vector<std::string>* m_streamTagType;
-  */
-
-};
-
-#endif // ANALYSIS_SKELETON_H
-
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/doc/packagedoc.h b/PhysicsAnalysis/AnalysisCommon/UserAnalysis/doc/packagedoc.h
deleted file mode 100644
index 5e17d49fa869bfa4043558eac27e1d85fe963333..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/doc/packagedoc.h
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-/**
-
-@page UserAnalysis_page The User Analysis Package
-
- - This class is an analysis skeleton - The user can implement his analysis here
-
- - This class is also used for the demonstration of the distributed analysis
- 
- - Some electron histograms are used for the distributed case. The user may
-remove the histograms and the electron stuff if not needed.
-
- - Note: the single algorithm structure as an analysis code does not scale. For detailed 
-analysis examples, look in CVS: PhysicsAnalysis/AnalysisCommon/AnalysisExamples/
-
- - pathena: a glue script to submit user-defined jobs to distributed analysis systems
-
- - FakeAppMgr.py: a fake application manager to collect job configuration without DLL loading
-
- - ConfigExtractor.py: This jobO extracts job configuration
-
- - buildJob: a generic transformation to compile user-defined sources on a remote site
-
- - runAthena: a generic transformation to run athena on a remote site
-
- - getPFN: this is used to get a physical file name (PFN) for a logical file name (LFN)
-
- - Client.py,FileSpec.py,JobSpec.py: they are used by pathena. Users don't have to modify them
- 
- - For questions or comments: Ketevi A. Assamagan
-   ketevi@bnl.gov
-
-*/
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AANSelection_AODanalysis_topOptions.py b/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AANSelection_AODanalysis_topOptions.py
deleted file mode 100755
index 1cc9380b2b7ee731127f7aff78fc7369971f8c3b..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AANSelection_AODanalysis_topOptions.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# Author: Ketevi A. Assamagan
-# BNL, June 12, 2004
-
-# get a handle on the ServiceManager which holds all the services
-from AthenaCommon.AppMgr import ServiceMgr
-
-# Event selector
-import AthenaPoolCnvSvc.ReadAthenaPool
-
-# Particle Properties
-from PartPropSvc.PartPropSvcConf import PartPropSvc
-
-# the POOL converters
-include( "ParticleBuilderOptions/ESD_PoolCnv_jobOptions.py" )
-include( "ParticleBuilderOptions/AOD_PoolCnv_jobOptions.py")
-include( "ParticleBuilderOptions/McAOD_PoolCnv_jobOptions.py")
-include( "EventAthenaPool/EventAthenaPool_joboptions.py" )
-
-# The AAN input file and AAN event selection
-ServiceMgr.EventSelector.InputCollections = [ "AnalysisSkeleton.aan.root" ]
-ServiceMgr.EventSelector.Query="NElectrons>0 && NElectrons<3 && abs(ElectronEta[0])<2.5 && abs(ElectronEta[1])<2.5 && ElectronPt[0]>10000 && ElectronPt[1]>10000"
-ServiceMgr.EventSelector.CollectionType = "ExplicitROOT"
-
-# Athena-Aware NTuple making Tools
-CBNTAthenaAware = True
-include ("CBNT_Athena/CBNT_AthenaAware_jobOptions.py")
-include ("CBNT_Athena/CBNT_EventInfo_jobOptions.py")
-
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence() 
-# list of the algorithms to be executed at run time
-
-############# The properties of the AnalysisSkeleton Algorithm
-from UserAnalysis.UserAnalysisConf import AnalysisSkeleton
-CBNT_AthenaAware += AnalysisSkeleton()
-AnalysisSkeleton = AnalysisSkeleton()
-AnalysisSkeleton.ElectronContainer = "ElectronAODCollection"
-AnalysisSkeleton.MCParticleContainer = "SpclMC"
-AnalysisSkeleton.DeltaRMatchCut = 0.2
-AnalysisSkeleton.MaxDeltaR = 0.9999
-AnalysisSkeleton.ElectronEtCut  = 10.0*GeV
-AnalysisSkeleton.ElectronEtaCut = 2.5
-AnalysisSkeleton.ElectronCone   = 0.9
-AnalysisSkeleton.OutputLevel = INFO
-
-##########################################
-# setup TTree registration Service
-# save ROOT histograms and Tuple
-from GaudiSvc.GaudiSvcConf import THistSvc
-ServiceMgr += THistSvc()
-ServiceMgr.THistSvc.Output = [ "AANT DATAFILE='AnalysisSkeleton.AANSel.aan.root' OPT='RECREATE'" ]
-from AnalysisTools.AnalysisToolsConf import AANTupleStream
-topSequence += AANTupleStream()
-AANTupleStream = AANTupleStream()
-AANTupleStream.ExtraRefNames = [ "StreamESD","Stream1" ]
-AANTupleStream.OutputName = 'AnalysisSkeleton.AANSel.aan.root '
-AANTupleStream.WriteInputDataHeader = True
-AANTupleStream.OutputLevel = WARNING
-
-# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-ServiceMgr.MessageSvc.OutputLevel = DEBUG
-
-# Number of Events to process
-theApp.EvtMax = 500
-
-###################### For interactive analysis
-#include ("PyAnalysisCore/InitPyAnalysisCore.py")
-
-###################### Detail time measurement and auditors
-# Use auditors
-#itheApp.AuditAlgorithms=True
-
-ServiceMgr.theAuditorSvc = AuditorSvc()
-ServiceMgr.theAuditorSvc.Auditors  += [ "ChronoAuditor"]
-
-ServiceMgr.AthenaPoolCnvSvc.UseDetailChronoStat = TRUE
-
-#StoreGateSvc = Service( "StoreGateSvc" )
-#StoreGateSvc.Dump = True 
-#MessageSvc.OutputLevel = DEBUG
-
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AODtoAOD_viaTagSelection_topOptions.py b/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AODtoAOD_viaTagSelection_topOptions.py
deleted file mode 100755
index b3543280f77f873f21333e21e8f9a55b30342391..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AODtoAOD_viaTagSelection_topOptions.py
+++ /dev/null
@@ -1,33 +0,0 @@
-####################################################
-#Author - Ketevi A. Assamagan
-
-#DetDescrVersion="ATLAS-XXX-..."
-
-doHist = False
-doCBNT = False
-doWriteTAG = False
-doWriteESD = False
-doESD = False
-readAOD = True
-doAOD = False
-doWriteAOD = True
-# read the TAG as input to the job
-readTAG = True
-
-# Number of Events
-EvtMax = 2000
-
-# Use the TAG that you just produce to select the events
-# define also the selection criteria
-PoolTAGInput = ["TAG.pool.root"]
-PoolInputQuery="NLooseMuon>0 || NLooseElectron>0"
-
-#Create a new RDO, ESD and AOD that contain only selected events
-PoolAODOutput = "AOD.TagSel.root"
-
-# main reconstruction job jobOptions
-include ("RecExCommon/RecExCommon_topOptions.py")
-#######################################################
-
-
-
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AODtoTAG_topOptions.py b/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AODtoTAG_topOptions.py
deleted file mode 100755
index 111534d7867de2ea4a97bff6c0edafdba61bcc28..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AODtoTAG_topOptions.py
+++ /dev/null
@@ -1,24 +0,0 @@
-####################
-#DetDescrVersion="ATLAS-XXX-..."
-
-PoolAODInput=["AOD.pool.root"]
-PoolTAGOutput="TAG.pool.root"
-
-if not 'EvtMax' in dir():
-    EvtMax=10
-
-
-doHist=False
-doCBNT=False
-readAOD=True
-doWriteESD=False
-doWriteAOD=False
-doAOD=False
-doESD=False
-doWriteTAG=True
-
-
-######################## main jobOption
-include ("RecExCommon/RecExCommon_topOptions.py")
-##############################
-
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AnalysisMaster.py b/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AnalysisMaster.py
deleted file mode 100755
index c6de904544eacdb6d53131925cc9c50d87323e00..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AnalysisMaster.py
+++ /dev/null
@@ -1,62 +0,0 @@
-############################################################
-# AnalysisMaster.py
-# These are the master job options for running your analysis
-# Include your own job options as shown below ensuring you
-# This is essentially the same file as in AnalysisExamples
-############################################################
-
-# We use "jp" to pass the input file to the userAlgs jO file defined below
-# if you want to run over multiple files locally, use glob as shown below
-
-from AthenaCommon.AthenaCommonFlags import jobproperties as jp
-#jp.AthenaCommonFlags.FilesInput = ['/afs/cern.ch/atlas/maxidisk/d49/AOD.191045._001937.pool.root.1']
-jp.AthenaCommonFlags.FilesInput = ['/afs/cern.ch/atlas/maxidisk/d49/data11_7TeV.00178109.physics_Muons.merge.AOD.f351_m765._lb0846-lb0851._0001.1']
-
-
-# to run over multiple input files
-##################
-#DATAPATH = '/afs/cern.ch/atlas/maxidisk/d49/'
-#from glob import glob
-#INPUT = glob(DATAPATH + 'AOD*.root*')
-#print INPUT
-#jp.AthenaCommonFlags.FilesInput = INPUT
-
-##################
-#
-from RecExConfig.RecFlags import rec
-
-jp.AthenaCommonFlags.EvtMax=-1 # number of event to process
-
-# include your algorithm job options here
-
-rec.UserAlgs=[ "AnalysisSkeleton_topOptions_NEW.py" ]
-#rec.UserAlgs=[ "ZeeZmmOnAODExample_jobOptions_NEW.py" ] 
-#rec.UserAlgs=[ "ZtautauExample_jobOptions_NEW.py" ] 
-#rec.UserAlgs=[ "ttbarExample_jobOptions_NEW.py" ] 
-#rec.UserAlgs=[ "VFitZmmOnAOD_jobOptions_NEW.py" ]
-
-# Output log setting; this is for the framework in general
-# You may over-ride this in your job options for your algorithm
-rec.OutputLevel = INFO
-
-# Control the writing of your own n-tuple in the alg's job options
-# The following line only turns off the standard CBNT made by RecExCommon.
-# and has no bearing on the ntuple that you make in your user job Options file.
-#
-rec.doCBNT = False
-
-# for analysis you don't need to write out anything
-rec.doWriteESD.set_Value_and_Lock(False)
-rec.doWriteAOD.set_Value_and_Lock(False)
-rec.doWriteTAG.set_Value_and_Lock(False)
-
-# to turn off perfmon - avoids writing out big ntuples
-rec.doPerfMon=False
-
-## need these two flags to turn on lumiblockmetadatatool and output metadatastore
-rec.doDPD=True
-rec.doFileMetaData=True
-
-# main jobOption - must always be included
-include ("RecExCommon/RecExCommon_topOptions.py")
-
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AnalysisSkeleton_topOptions.py b/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AnalysisSkeleton_topOptions.py
deleted file mode 100755
index 0a96b4462f08a7e562120ff47a8b4362fe3be5f1..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AnalysisSkeleton_topOptions.py
+++ /dev/null
@@ -1,217 +0,0 @@
-# Author: Ketevi A. Assamagan
-# BNL, June 12, 2004
-
-# get a handle on the ServiceManager which holds all the services
-from AthenaCommon.AppMgr import ToolSvc
-
-from AthenaCommon.AppMgr import ServiceMgr
-# Event selector
-import AthenaPoolCnvSvc.ReadAthenaPool
-
-# Particle Properties
-from PartPropSvc.PartPropSvcConf import PartPropSvc
-
-# the POOL converters
-include( "ParticleBuilderOptions/ESD_PoolCnv_jobOptions.py" )
-include( "ParticleBuilderOptions/AOD_PoolCnv_jobOptions.py")
-include( "ParticleBuilderOptions/McAOD_PoolCnv_jobOptions.py")
-include( "EventAthenaPool/EventAthenaPool_joboptions.py" )
-
-# this line is needed for reading AOD files made with pre-14.1.X releases
-# If you leave it uncommented, you can still read post-14.1.X releases
-
-include ("JetRec/ParticleJetCompatibility.py")
-
-
-
-# The AOD input file
-#ServiceMgr.EventSelector.InputCollections = [ "dcache:AOD.pool.root.1" ]
-ServiceMgr.EventSelector.InputCollections = [ "AOD.pool.root" ]
-#ServiceMgr.EventSelector.InputCollections = [ "/tmp/jgoncalo/AOD.065738._00001.pool.root.1"]
-
-# Get the selection, overlap checking and overlap removal tools  
-include ( "UserAnalysisUtils/UserAnalysisSelectionTool_jobOptions.py" )
-include ( "UserAnalysisUtils/UserAnalysisPreparationTool_jobOptions.py" )
-include ( "UserAnalysisUtils/UserAnalysisOverlapCheckingTool_jobOptions.py" )
-include ( "UserAnalysisUtils/UserAnalysisOverlapRemovalTool_jobOptions.py" )
-
-# Athena-Aware NTuple making Tools
-CBNTAthenaAware = True
-include ("CBNT_Athena/CBNT_AthenaAware_jobOptions.py")
-include ("CBNT_Athena/CBNT_EventInfo_jobOptions.py")
-
-# list of the algorithms to be executed at run time
-from UserAnalysis.UserAnalysisConf import AnalysisSkeleton
-topSequence.CBNT_AthenaAware += AnalysisSkeleton() 
-AnalysisSkeleton = AnalysisSkeleton()
-
-############# The properties of the AnalysisSkeleton Algorithm
-AnalysisSkeleton.AnalysisSelectionTool       = ToolSvc.UserAnalysisSelectionTool
-AnalysisSkeleton.AnalysisPreparationTool     = ToolSvc.UserAnalysisPreparationTool
-AnalysisSkeleton.AnalysisOverlapCheckingTool = ToolSvc.UserAnalysisOverlapCheckingTool
-AnalysisSkeleton.AnalysisOverlapRemovalTool  = ToolSvc.UserAnalysisOverlapRemovalTool
-
-IsAtlfast = False
-
-AnalysisSkeleton.McParticleContainer = "SpclMC"
-AnalysisSkeleton.ElectronContainer = "ElectronAODCollection"
-AnalysisSkeleton.MissingETObject = "MET_RefFinal"
-AnalysisSkeleton.DeltaRMatchCut = 0.2
-AnalysisSkeleton.MaxDeltaR = 0.9999
-AnalysisSkeleton.ElectronEtCut  = 10.0*GeV
-AnalysisSkeleton.ElectronEtaCut = 2.5
-AnalysisSkeleton.ElectronCone   = 0.9
-AnalysisSkeleton.bjetWt_IP3DSV1Cut = 6
-AnalysisSkeleton.bjet_etaCut = 2.5
-AnalysisSkeleton.bjet_etCut = 15.0*GeV
-AnalysisSkeleton.MissingETCut = 20.0*GeV
-AnalysisSkeleton.OutputLevel = INFO
-AnalysisSkeleton.IsAtlFastData = IsAtlfast
-AnalysisSkeleton.SusyJetMinEt      = 50*GeV
-
-# Change the selections if necesary
-# Please read jO files in UserAnalysisUtils/ for other options
-#
-ToolSvc.UserAnalysisSelectionTool.IsAtlfastData = IsAtlfast
-#AnalysisSkeleton.AnalysisSelectionTool.ElectronIsEMFlag="Loose"
-ToolSvc.UserAnalysisSelectionTool.MuonPt=6.0*GeV
-ToolSvc.UserAnalysisSelectionTool.JetPt=20.0*GeV
-ToolSvc.UserAnalysisSelectionTool.ElectronEtaWindowCut=False
-ToolSvc.UserAnalysisSelectionTool.ElectronEtaWindowCenter=1.445
-ToolSvc.UserAnalysisSelectionTool.ElectronEtaWindow=0.075
-ToolSvc.UserAnalysisSelectionTool.OutputLevel = INFO
-ToolSvc.UserAnalysisSelectionTool.egDetailContainerName="egDetailAOD"
-
-# configure the overlap checking tool
-ToolSvc.UserAnalysisOverlapCheckingTool.OverlapDeltaR=0.2
-ToolSvc.UserAnalysisOverlapCheckingTool.OverlapDeltaRWithJets=0.3
-
-# Building the  containers of selected obejcts
-ToolSvc.UserAnalysisPreparationTool.IsAtlfastData = IsAtlfast
-ToolSvc.UserAnalysisPreparationTool.OutputLevel = INFO
-
-#input cntainer keys to the pre-selection tool
-ToolSvc.UserAnalysisPreparationTool.InputContainerKeys = [
-    "ElectronAODCollection",
-    "StacoMuonCollection",
-    "TauRecContainer",
-    "AntiKt4TowerJets",
-    "PhotonAODCollection",
-    "CaloCalTopoCluster",
-    "TrackParticleCandidate"
-   ]
-# Output container keys after the pre-selections
-ToolSvc.UserAnalysisPreparationTool.OutputContainerKeys=[ 
-    "SelectedElectronCollection",
-    "SelectedStacoMuonCollection",
-    "SelectedTauRecContainer",
-    "SelectedAntiKt4TowerJets",
-    "SelectedPhotonAODCollection",
-    "SelectedCaloCalTopoCluster",
-    "SelectedTrackParticleCandidate"
-   ]
-
-# Use the output containers fromi the selection tool as input to the overalp removal tool
-# the order matters 
-# in this setting, first electrons will be chosen,
-# then if any muon overlaps an electron, the latter is rejected, and so on
-
-ToolSvc.UserAnalysisOverlapRemovalTool.InputContainerKeys=[  
-    "SelectedElectronCollection",
-    "SelectedStacoMuonCollection",
-    "SelectedTauRecContainer",
-    "SelectedAntiKt4TowerJets",
-    "SelectedPhotonAODCollection",
-    "SelectedCaloCalTopoCluster",
-    "SelectedTrackParticleCandidate"
-   ]
-
-# The output container keys after the overlap-removal
-# Note that a container of all leptons is provided on output
-# as well as a container of all final state particles
-ToolSvc.UserAnalysisOverlapRemovalTool.IsAtlfastData          = IsAtlfast
-ToolSvc.UserAnalysisOverlapRemovalTool.OuputObjectKey         = "FinalStateObjectCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputLeptonKey        = "FinalStateLeptonCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputPhotonKey        = "FinalStatePhotonCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputElectronKey      = "FinalStateElectronCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputMuonKey          = "FinalStateMuonCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputTauJetKey        = "FinalStateTauJetCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputCalloClusterKey  = "FinalStateCaloClusterCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputTrackParticleKey = "FinalStateTrackParticleCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputJetKey           = "FinalStateJetCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputBJetKey          = "FinalStateBJetCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputLightJetKey      = "FinalStateLightJetCollection"
-
-##########################################
-##
-## Set up the trigger part
-##
-##########################################
-
-AnalysisSkeleton.DoTrigger = True
-if AnalysisSkeleton.DoTrigger:
-   # needed for TriggerConfigGetter (will change with 15.3.0)
-   from RecExConfig.RecFlags  import rec
-   rec.readRDO=False
-   rec.readAOD=True
-   rec.doWriteAOD=False
-   rec.doWriteESD=False
-
-   # To read files with trigger config stored as in-file meta-data,
-   from TriggerJobOpts.TriggerFlags import TriggerFlags
-   TriggerFlags.configurationSourceList = ['ds']
-
-   # set up trigger config service
-   from TriggerJobOpts.TriggerConfigGetter import TriggerConfigGetter
-   cfg =  TriggerConfigGetter()
-
-   ## chains and groups for which to print trigger statistics
-   photons = ["L1_2EM13", "L2_2g10_mu6", "EF_2g10"]
-   singletaus = ["EF_tau12_loose", "EF_tau16_loose", "EF_tau16i_loose", "EF_tau20_loose", "EF_tau20i_loose",
-                 "EF_tau29_loose", "EF_tau29i_loose", "EF_tau38_loose", "EF_tau50_loose", "EF_tau84_loose"]
-   twotaus = ["EF_2tau20i_loose", "EF_2tau29i_loose", "EF_2tau29i_medium"]
-   combinedtaus = ["EF_tau12_loose_e10_loose", "L2_tau16i_loose_2j23", "EF_tau16i_loose_2j23", "EF_tau16i_loose_EFxe40"]
-   AnalysisSkeleton.StatTriggerChains = photons + singletaus + twotaus + combinedtaus;
-
-   #AnalysisSkeleton.InvestigateChain = 'L2_tau16i_loose_2j23'
-   
-
-## Done with trigger setup
-
-##########################################
-# setup TTree registration Service
-# save ROOT histograms and Tuple
-from GaudiSvc.GaudiSvcConf import THistSvc
-ServiceMgr += THistSvc()
-ServiceMgr.THistSvc.Output = [ "AANT DATAFILE='AnalysisSkeleton.aan.root' OPT='RECREATE'" ]
-from AnalysisTools.AnalysisToolsConf import AANTupleStream
-topSequence += AANTupleStream()
-AANTupleStream = AANTupleStream()
-AANTupleStream.ExtraRefNames = [ "StreamESD","Stream1" ]
-AANTupleStream.OutputName = 'AnalysisSkeleton.aan.root'
-AANTupleStream.WriteInputDataHeader = True
-AANTupleStream.OutputLevel = WARNING
-
-# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-ServiceMgr.MessageSvc.OutputLevel = INFO
-ServiceMgr.MessageSvc.defaultLimit = 9999999
-
-# Number of Events to process
-theApp.EvtMax = -1
-#theApp.EvtMax = 5
-
-###################### For interactive analysis
-#include ("PyAnalysisCore/InitPyAnalysisCore.py")
-
-from GaudiCommonSvc.GaudiCommonSvcConf import AuditorSvc
-ServiceMgr.AuditorSvc.Auditors  += [ "ChronoAuditor"]
-
-AthenaPoolCnvSvc = Service("AthenaPoolCnvSvc")
-AthenaPoolCnvSvc.UseDetailChronoStat = TRUE
-
-#### test MC dump ###
-
-#from TruthExamples.TruthExamplesConf import DumpMC
-#topSequence += DumpMC()
-#DumpMC.McEventKey = "GEN_AOD"
-       
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AnalysisSkeleton_topOptions_AutoConfig.py b/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AnalysisSkeleton_topOptions_AutoConfig.py
deleted file mode 100644
index 6658798ccf1e7877afac08d56e83fb4549cf91f4..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AnalysisSkeleton_topOptions_AutoConfig.py
+++ /dev/null
@@ -1,237 +0,0 @@
-# Author: Ketevi A. Assamagan
-# BNL, June 12, 2004
-
-# Author: Vivek Jain
-# CERN, January 2010
-
-# This file uses AutoConfig to peek at the input file, and set Geometry tag, etc., correctly
-#
-###########################################################################
-# here give the full path of a file that you run over locally
-# if you are running on the grid, it will ignore what you set here, and do the right thing
-# if you want to run over multiple files locally, use glob as shown below
-
-from AthenaCommon.AthenaCommonFlags import jobproperties as jp
-jp.AthenaCommonFlags.FilesInput = ['/afs/cern.ch/atlas/maxidisk/d49/AOD.191045._001937.pool.root.1']
-
-#DATAPATH = '/afs/cern.ch/atlas/maxidisk/d49/'
-#from glob import glob
-#INPUT = glob(DATAPATH + 'AOD*.root*')
-#print INPUT
-#jp.AthenaCommonFlags.FilesInput = INPUT
-################################################################
-
-# you don't need to set Geometry tags anymore. They are read from the file itself.
-from RecExConfig.RecFlags import rec
-
-# get MetaReader - will use it to extract info for MC/DATA
-from PyUtils.MetaReaderPeeker import metadata
-
-# import the data types 
-import EventKernel.ParticleDataType
-
-# get a handle on the ServiceManager which holds all the services 
-from AthenaCommon.AppMgr import ServiceMgr
-
-include ("RecExCond/RecExCommon_flags.py")
-include( "RecExCond/AllDet_detDescr.py" )
-include( "AthenaPoolCnvSvc/ReadAthenaPool_jobOptions.py" )
-
-# 
-from AthenaCommon.AppMgr import ToolSvc
-
-# this line is needed for reading AOD files made with pre-14.1.X releases
-# If you leave it uncommented, you can still read post-14.1.X releases
-
-include ("JetRec/ParticleJetCompatibility.py")
-
-# The input file already specified near the top of this file
-ServiceMgr.EventSelector.InputCollections = jp.AthenaCommonFlags.FilesInput()
-
-# Get the selection, overlap checking and overlap removal tools  
-include ( "UserAnalysisUtils/UserAnalysisSelectionTool_jobOptions.py" )
-include ( "UserAnalysisUtils/UserAnalysisPreparationTool_jobOptions.py" )
-include ( "UserAnalysisUtils/UserAnalysisOverlapCheckingTool_jobOptions.py" )
-include ( "UserAnalysisUtils/UserAnalysisOverlapRemovalTool_jobOptions.py" )
-
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-from UserAnalysis.UserAnalysisConf import AnalysisSkeleton
-topSequence += AnalysisSkeleton()
-
-
-############# The properties of the AnalysisSkeleton Algorithm
-topSequence.AnalysisSkeleton.AnalysisSelectionTool       = ToolSvc.UserAnalysisSelectionTool
-topSequence.AnalysisSkeleton.AnalysisPreparationTool     = ToolSvc.UserAnalysisPreparationTool
-topSequence.AnalysisSkeleton.AnalysisOverlapCheckingTool = ToolSvc.UserAnalysisOverlapCheckingTool
-topSequence.AnalysisSkeleton.AnalysisOverlapRemovalTool  = ToolSvc.UserAnalysisOverlapRemovalTool
-
-IsAtlfast = False
-
-topSequence.AnalysisSkeleton.McParticleContainer = "SpclMC"
-topSequence.AnalysisSkeleton.ElectronContainer = "ElectronAODCollection"
-topSequence.AnalysisSkeleton.MissingETObject = "MET_RefFinal"
-topSequence.AnalysisSkeleton.DeltaRMatchCut = 0.2
-topSequence.AnalysisSkeleton.MaxDeltaR = 0.9999
-topSequence.AnalysisSkeleton.ElectronEtCut  = 10.0*GeV
-topSequence.AnalysisSkeleton.ElectronEtaCut = 2.5
-topSequence.AnalysisSkeleton.ElectronCone   = 0.9
-topSequence.AnalysisSkeleton.bjetWt_IP3DSV1Cut = 6
-topSequence.AnalysisSkeleton.bjet_etaCut = 2.5
-topSequence.AnalysisSkeleton.bjet_etCut = 15.0*GeV
-topSequence.AnalysisSkeleton.MissingETCut = 20.0*GeV
-topSequence.AnalysisSkeleton.OutputLevel = INFO
-topSequence.AnalysisSkeleton.IsAtlFastData = IsAtlfast
-topSequence.AnalysisSkeleton.SusyJetMinEt      = 50*GeV
-topSequence.AnalysisSkeleton.DoTruth = False
-
-if 'IS_SIMULATION' in metadata['eventTypes']:
-   topSequence.AnalysisSkeleton.DoTruth = True
-
-print AnalysisSkeleton
-
-# Change the selections if necesary
-# Please read jO files in UserAnalysisUtils/ for other options
-#
-ToolSvc.UserAnalysisSelectionTool.IsAtlfastData = IsAtlfast
-#ToolSvc.UserAnalysisSelectionTool.ElectronIsEMFlag="Loose"
-ToolSvc.UserAnalysisSelectionTool.MuonPt=6.0*GeV
-ToolSvc.UserAnalysisSelectionTool.JetPt=20.0*GeV
-ToolSvc.UserAnalysisSelectionTool.ElectronEtaWindowCut=False
-ToolSvc.UserAnalysisSelectionTool.ElectronEtaWindowCenter=1.445
-ToolSvc.UserAnalysisSelectionTool.ElectronEtaWindow=0.075
-
-# configure the overlap checking tool
-ToolSvc.UserAnalysisOverlapCheckingTool.OverlapDeltaR=0.2
-ToolSvc.UserAnalysisOverlapCheckingTool.OverlapDeltaRWithJets=0.3
-
-# Building the  containers of selected obejcts
-ToolSvc.UserAnalysisPreparationTool.IsAtlfastData = IsAtlfast
-
-#input cntainer keys to the pre-selection tool
-ToolSvc.UserAnalysisPreparationTool.InputContainerKeys = [
-    "ElectronAODCollection",
-    "StacoMuonCollection",
-    "TauRecContainer",
-    "AntiKt4TowerJets",
-    "PhotonAODCollection",
-    "CaloCalTopoCluster",
-    "TrackParticleCandidate"
-   ]
-# Output container keys after the pre-selections
-ToolSvc.UserAnalysisPreparationTool.OutputContainerKeys=[ 
-    "SelectedElectronCollection",
-    "SelectedStacoMuonCollection",
-    "SelectedTauRecContainer",
-    "SelectedAntiKt4TowerJets",
-    "SelectedPhotonAODCollection",
-    "SelectedCaloCalTopoCluster",
-    "SelectedTrackParticleCandidate"
-   ]
-
-# Use the output containers fromi the selection tool as input to the overalp removal tool
-# the order matters 
-# in this setting, first electrons will be chosen,
-# then if any muon overlaps an electron, the latter is rejected, and so on
-
-ToolSvc.UserAnalysisOverlapRemovalTool.InputContainerKeys=[  
-    "SelectedElectronCollection",
-    "SelectedStacoMuonCollection",
-    "SelectedTauRecContainer",
-    "SelectedAntiKt4TowerJets",
-    "SelectedPhotonAODCollection",
-    "SelectedCaloCalTopoCluster",
-    "SelectedTrackParticleCandidate"
-   ]
-
-# The output container keys after the overlap-removal
-# Note that a container of all leptons is provided on output
-# as well as a container of all final state particles
-ToolSvc.UserAnalysisOverlapRemovalTool.IsAtlfastData          = IsAtlfast
-ToolSvc.UserAnalysisOverlapRemovalTool.OuputObjectKey         = "FinalStateObjectCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputLeptonKey        = "FinalStateLeptonCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputPhotonKey        = "FinalStatePhotonCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputElectronKey      = "FinalStateElectronCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputMuonKey          = "FinalStateMuonCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputTauJetKey        = "FinalStateTauJetCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputCalloClusterKey  = "FinalStateCaloClusterCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputTrackParticleKey = "FinalStateTrackParticleCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputJetKey           = "FinalStateJetCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputBJetKey          = "FinalStateBJetCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputLightJetKey      = "FinalStateLightJetCollection"
-
-##########################################
-##
-## Set up the trigger part
-##
-##########################################
-
-topSequence.AnalysisSkeleton.DoTrigger = True
-if topSequence.AnalysisSkeleton.DoTrigger:
-   # needed for TriggerConfigGetter (will change with 15.3.0)
-   from RecExConfig.RecFlags  import rec
-   rec.readRDO=False
-   rec.readAOD=True
-   rec.doWriteAOD=False
-   rec.doWriteESD=False
-
-   # To read files with trigger config stored as in-file meta-data,
-   from TriggerJobOpts.TriggerFlags import TriggerFlags
-   TriggerFlags.configurationSourceList = ['ds']
-
-   # set up trigger config service
-   from TriggerJobOpts.TriggerConfigGetter import TriggerConfigGetter
-   cfg =  TriggerConfigGetter()
-
-   ## chains and groups for which to print trigger statistics
-   photons = ["L1_2EM13", "L2_2g10_mu6", "EF_2g10"]
-   singletaus = ["EF_tau12_loose", "EF_tau16_loose", "EF_tau16i_loose", "EF_tau20_loose", "EF_tau20i_loose",
-                 "EF_tau29_loose", "EF_tau29i_loose", "EF_tau38_loose", "EF_tau50_loose", "EF_tau84_loose"]
-   twotaus = ["EF_2tau20i_loose", "EF_2tau29i_loose", "EF_2tau29i_medium"]
-   combinedtaus = ["EF_tau12_loose_e10_loose", "L2_tau16i_loose_2j23", "EF_tau16i_loose_2j23", "EF_tau16i_loose_EFxe40"]
-   topSequence.AnalysisSkeleton.StatTriggerChains = photons + singletaus + twotaus + combinedtaus;
-
-   #topSequence.AnalysisSkeleton.InvestigateChain = 'L2_tau16i_loose_2j23'
-   
-
-## Done with trigger setup
-
-##########################################
-# setup TTree registration Service
-# save ROOT histograms and Tuple
-from GaudiSvc.GaudiSvcConf import THistSvc
-ServiceMgr += THistSvc()
-ServiceMgr.THistSvc.Output = [ "AANT DATAFILE='AnalysisSkeleton.aan.root' OPT='RECREATE'" ]
-from AnalysisTools.AthAnalysisToolsConf import AANTupleStream
-topSequence += AANTupleStream()
-AANTupleStream = AANTupleStream()
-AANTupleStream.ExtraRefNames = [ "StreamESD","Stream1" ]
-AANTupleStream.OutputName = 'AnalysisSkeleton.aan.root'
-AANTupleStream.WriteInputDataHeader = True
-AANTupleStream.OutputLevel = WARNING
-
-# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-ServiceMgr.MessageSvc.OutputLevel = INFO
-# print out all messages. may need to increase it
-ServiceMgr.MessageSvc.defaultLimit = 9999999
-
-# Number of Events to process
-theApp.EvtMax = -1
-#theApp.EvtMax = 5
-
-###################### For interactive analysis
-#include ("PyAnalysisCore/InitPyAnalysisCore.py")
-
-from GaudiCommonSvc.GaudiCommonSvcConf import AuditorSvc
-ServiceMgr.AuditorSvc.Auditors  += [ "ChronoAuditor"]
-
-AthenaPoolCnvSvc = Service("AthenaPoolCnvSvc")
-AthenaPoolCnvSvc.UseDetailChronoStat = TRUE
-
-#### test MC dump ###
-
-#from TruthExamples.TruthExamplesConf import DumpMC
-#topSequence += DumpMC()
-#DumpMC.McEventKey = "GEN_AOD"
-       
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AnalysisSkeleton_topOptions_NEW.py b/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AnalysisSkeleton_topOptions_NEW.py
deleted file mode 100644
index b7a323d715c3d2dd3508e3152f66704e7d73a693..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/AnalysisSkeleton_topOptions_NEW.py
+++ /dev/null
@@ -1,228 +0,0 @@
-# Author: Ketevi A. Assamagan
-# BNL, June 12, 2004
-# Author: Vivek Jain
-# Oct 30, 2009
-# ------------------------------------------------------------
-
-# THIS SCRIPT MUST BE RUN FROM THE ====ANALYSISMASTER.PY=====
-
-# i.e., put the name of this file in AnalysisMaster.py, and do,
-# athena AnalysisMaster.py
-# ------------------------------------------------------------
-
-# get a handle on the ServiceManager which holds all the services
-from AthenaCommon.AppMgr import ToolSvc
-
-from AthenaCommon.AppMgr import ServiceMgr
-# Event selector
-import AthenaPoolCnvSvc.ReadAthenaPool
-
-# this line is needed for reading AOD files made with pre-14.1.X releases
-# If you leave it uncommented, you can still read post-14.1.X releases
-
-include ("JetRec/ParticleJetCompatibility.py")
-
-# Get the selection, overlap checking and overlap removal tools  
-include ( "UserAnalysisUtils/UserAnalysisSelectionTool_jobOptions.py" )
-include ( "UserAnalysisUtils/UserAnalysisPreparationTool_jobOptions.py" )
-include ( "UserAnalysisUtils/UserAnalysisOverlapCheckingTool_jobOptions.py" )
-include ( "UserAnalysisUtils/UserAnalysisOverlapRemovalTool_jobOptions.py" )
-
-# The input file already specified near the top of this file
-ServiceMgr.EventSelector.InputCollections = jp.AthenaCommonFlags.FilesInput()
-
-
-from AthenaCommon.AlgSequence import AlgSequence, AthSequencer
-topSequence = AlgSequence()
-
-
-#======================================================================================
-# L u m i B l o c k  j o b  o p t i o n s 
-#=========================================
-# add LumiBlockMetaDataTool to ToolSvc and configure
-from LumiBlockComps.LumiBlockCompsConf import LumiBlockMetaDataTool
-ToolSvc += LumiBlockMetaDataTool( "LumiBlockMetaDataTool" )
-LumiBlockMetaDataTool.OutputLevel = INFO
-
-# add ToolSvc.LumiBlockMetaDataTool to MetaDataSvc
-from AthenaServices.AthenaServicesConf import MetaDataSvc
-svcMgr += MetaDataSvc( "MetaDataSvc" )
-svcMgr.MetaDataSvc.MetaDataTools += [ ToolSvc.LumiBlockMetaDataTool ]
-
-# Configure the goodrunslist selector tool
-from GoodRunsLists.GoodRunsListsConf import *
-ToolSvc += GoodRunsListSelectorTool() 
-GoodRunsListSelectorTool.OutputLevel = INFO
-GoodRunsListSelectorTool.GoodRunsListVec = [ 'data11_7TeV.periodAllYear_DetStatus-v18-pro08-05_CoolRunQuery-00-03-98_Muon.xml' ]  # <<<<--- Edit this line!
-GoodRunsListSelectorTool.PassThrough = False
-
-## This Athena job consists of algorithms that loop over events;
-## here, the (default) top sequence is used:
-#job = AlgSequence()
-seq = AthSequencer("AthMasterSeq")
-topSequence += seq
-
-from GoodRunsListsUser.GoodRunsListsUserConf import *
-seq += GRLTriggerSelectorAlg('GRLTriggerAlg1')
-## In the next line, pick up correct name from inside xml file!
-seq.GRLTriggerAlg1.GoodRunsListArray = ['Muon']        
-
-#======================================================================================
-
-
-from UserAnalysis.UserAnalysisConf import AnalysisSkeleton
-topSequence += AnalysisSkeleton()
-
-
-############# The properties of the AnalysisSkeleton Algorithm
-topSequence.AnalysisSkeleton.AnalysisSelectionTool       = ToolSvc.UserAnalysisSelectionTool
-topSequence.AnalysisSkeleton.AnalysisPreparationTool     = ToolSvc.UserAnalysisPreparationTool
-topSequence.AnalysisSkeleton.AnalysisOverlapCheckingTool = ToolSvc.UserAnalysisOverlapCheckingTool
-topSequence.AnalysisSkeleton.AnalysisOverlapRemovalTool  = ToolSvc.UserAnalysisOverlapRemovalTool
-
-IsAtlfast = False
-
-topSequence.AnalysisSkeleton.McParticleContainer = "SpclMC"
-topSequence.AnalysisSkeleton.ElectronContainer = "ElectronAODCollection"
-topSequence.AnalysisSkeleton.MissingETObject = "MET_RefFinal"
-topSequence.AnalysisSkeleton.DeltaRMatchCut = 0.2
-topSequence.AnalysisSkeleton.MaxDeltaR = 0.9999
-topSequence.AnalysisSkeleton.ElectronEtCut  = 10.0*GeV
-topSequence.AnalysisSkeleton.ElectronEtaCut = 2.5
-topSequence.AnalysisSkeleton.ElectronCone   = 0.9
-topSequence.AnalysisSkeleton.bjetWt_IP3DSV1Cut = 6
-topSequence.AnalysisSkeleton.bjet_etaCut = 2.5
-topSequence.AnalysisSkeleton.bjet_etCut = 15.0*GeV
-topSequence.AnalysisSkeleton.MissingETCut = 20.0*GeV
-topSequence.AnalysisSkeleton.OutputLevel = INFO
-topSequence.AnalysisSkeleton.IsAtlFastData = IsAtlfast
-topSequence.AnalysisSkeleton.SusyJetMinEt      = 50*GeV
-topSequence.AnalysisSkeleton.DoTruth = False
-
-print AnalysisSkeleton
-
-# Change the selections if necesary
-# Please read jO files in UserAnalysisUtils/ for other options
-#
-ToolSvc.UserAnalysisSelectionTool.IsAtlfastData = IsAtlfast
-#topSequence.AnalysisSkeleton.AnalysisSelectionTool.ElectronIsEMFlag="Loose"
-ToolSvc.UserAnalysisSelectionTool.MuonPt=6.0*GeV
-ToolSvc.UserAnalysisSelectionTool.JetPt=20.0*GeV
-ToolSvc.UserAnalysisSelectionTool.ElectronEtaWindowCut=False
-ToolSvc.UserAnalysisSelectionTool.ElectronEtaWindowCenter=1.445
-ToolSvc.UserAnalysisSelectionTool.ElectronEtaWindow=0.075
-
-# configure the overlap checking tool
-ToolSvc.UserAnalysisOverlapCheckingTool.OverlapDeltaR=0.2
-ToolSvc.UserAnalysisOverlapCheckingTool.OverlapDeltaRWithJets=0.3
-
-# Building the  containers of selected obejcts
-ToolSvc.UserAnalysisPreparationTool.IsAtlfastData = IsAtlfast
-
-#input cntainer keys to the pre-selection tool
-ToolSvc.UserAnalysisPreparationTool.InputContainerKeys = [
-    "ElectronAODCollection",
-    "StacoMuonCollection",
-    "TauRecContainer",
-    "AntiKt4TowerJets",
-    "PhotonAODCollection",
-    "CaloCalTopoCluster",
-    "TrackParticleCandidate"
-   ]
-# Output container keys after the pre-selections
-ToolSvc.UserAnalysisPreparationTool.OutputContainerKeys=[ 
-    "SelectedElectronCollection",
-    "SelectedStacoMuonCollection",
-    "SelectedTauRecContainer",
-    "SelectedAntiKt4TowerJets",
-    "SelectedPhotonAODCollection",
-    "SelectedCaloCalTopoCluster",
-    "SelectedTrackParticleCandidate"
-   ]
-
-# Use the output containers fromi the selection tool as input to the overalp removal tool
-# the order matters 
-# in this setting, first electrons will be chosen,
-# then if any muon overlaps an electron, the latter is rejected, and so on
-
-ToolSvc.UserAnalysisOverlapRemovalTool.InputContainerKeys=[  
-    "SelectedElectronCollection",
-    "SelectedStacoMuonCollection",
-    "SelectedTauRecContainer",
-    "SelectedAntiKt4TowerJets",
-    "SelectedPhotonAODCollection",
-    "SelectedCaloCalTopoCluster",
-    "SelectedTrackParticleCandidate"
-   ]
-
-# The output container keys after the overlap-removal
-# Note that a container of all leptons is provided on output
-# as well as a container of all final state particles
-ToolSvc.UserAnalysisOverlapRemovalTool.IsAtlfastData          = IsAtlfast
-ToolSvc.UserAnalysisOverlapRemovalTool.OuputObjectKey         = "FinalStateObjectCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputLeptonKey        = "FinalStateLeptonCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputPhotonKey        = "FinalStatePhotonCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputElectronKey      = "FinalStateElectronCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputMuonKey          = "FinalStateMuonCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputTauJetKey        = "FinalStateTauJetCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputCalloClusterKey  = "FinalStateCaloClusterCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputTrackParticleKey = "FinalStateTrackParticleCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputJetKey           = "FinalStateJetCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputBJetKey          = "FinalStateBJetCollection"
-ToolSvc.UserAnalysisOverlapRemovalTool.OutputLightJetKey      = "FinalStateLightJetCollection"
-
-##########################################
-##
-## Set up the trigger part
-##
-##########################################
-
-topSequence.AnalysisSkeleton.DoTrigger = True
-if topSequence.AnalysisSkeleton.DoTrigger:
-
-   # set up trigger config service
-   from TriggerJobOpts.TriggerConfigGetter import TriggerConfigGetter
-   cfg =  TriggerConfigGetter()
-
-   ## chains and groups for which to print trigger statistics
-   photons = ["L1_2EM13", "L2_2g10_mu6", "EF_2g10"]
-   singletaus = ["EF_tau12_loose", "EF_tau16_loose", "EF_tau16i_loose", "EF_tau20_loose", "EF_tau20i_loose",
-                 "EF_tau29_loose", "EF_tau29i_loose", "EF_tau38_loose", "EF_tau50_loose", "EF_tau84_loose"]
-   twotaus = ["EF_2tau20i_loose", "EF_2tau29i_loose", "EF_2tau29i_medium"]
-   combinedtaus = ["EF_tau12_loose_e10_loose", "L2_tau16i_loose_2j23", "EF_tau16i_loose_2j23", "EF_tau16i_loose_EFxe40"]
-   topSequence.AnalysisSkeleton.StatTriggerChains = photons + singletaus + twotaus + combinedtaus;
-
-   #topSequence.AnalysisSkeleton.InvestigateChain = 'L2_tau16i_loose_2j23'
-   
-
-## Done with trigger setup
-
-##########################################
-# setup TTree registration Service
-# save ROOT histograms and Tuple
-from GaudiSvc.GaudiSvcConf import THistSvc
-ServiceMgr += THistSvc()
-ServiceMgr.THistSvc.Output = [ "AANT DATAFILE='AnalysisSkeleton.aan.root' OPT='RECREATE'" ]
-from AnalysisTools.AthAnalysisToolsConf import AANTupleStream
-topSequence += AANTupleStream()
-AANTupleStream = AANTupleStream()
-AANTupleStream.ExtraRefNames = [ "StreamESD","Stream1" ]
-AANTupleStream.OutputName = 'AnalysisSkeleton.aan.root'
-AANTupleStream.WriteInputDataHeader = True
-AANTupleStream.OutputLevel = WARNING
-
-
-###################### For interactive analysis
-#include ("PyAnalysisCore/InitPyAnalysisCore.py")
-
-from GaudiCommonSvc.GaudiCommonSvcConf import AuditorSvc
-ServiceMgr.AuditorSvc.Auditors  += [ "ChronoAuditor"]
-
-AthenaPoolCnvSvc = Service("AthenaPoolCnvSvc")
-AthenaPoolCnvSvc.UseDetailChronoStat = TRUE
-
-#### test MC dump ###
-
-#from TruthExamples.TruthExamplesConf import DumpMC
-#topSequence += DumpMC()
-#DumpMC.McEventKey = "GEN_AOD"
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/README.TXT b/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/README.TXT
deleted file mode 100644
index d71dd7a5e00d5a37ffbcaf577ad243f61a91f4f9..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/README.TXT
+++ /dev/null
@@ -1,9 +0,0 @@
-AnalysisSkeleton have three jobOptions files, e.g.,
-
-_topOptions.py
-_topOptions_AutoConfig.py
-_topOptions_New.py
-
-Please look at https://twiki.cern.ch/twiki/bin/view/AtlasProtected/PhysicsAnalysisWorkBookAODAnalysisRel15#What_s_NEW_on_this_page for an explanation
-
-Also see https://twiki.cern.ch/twiki/bin/view/AtlasProtected/PhysicsAnalysisWorkBookFullAODAnalysisRel15 to learn how the file with _NEW suffix is used.
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/TagSelection_AODanalysis_topOptions.py b/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/TagSelection_AODanalysis_topOptions.py
deleted file mode 100755
index 02eed4c9035bd2d9875bde51f86c6d917e713a3a..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/share/TagSelection_AODanalysis_topOptions.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# Author: Ketevi A. Assamagan
-# BNL, June 12, 2004
-
-#get a handle on the ServiceManager which holds all the services
-from AthenaCommon.AppMgr import ServiceMgr
-# Event selector
-import AthenaPoolCnvSvc.ReadAthenaPool
-
-# Particle Properties
-from PartPropSvc.PartPropSvcConf import PartPropSvc
-
-# the POOL converters
-include( "ParticleBuilderOptions/ESD_PoolCnv_jobOptions.py" )
-include( "ParticleBuilderOptions/AOD_PoolCnv_jobOptions.py")
-include( "ParticleBuilderOptions/McAOD_PoolCnv_jobOptions.py")
-include( "EventAthenaPool/EventAthenaPool_joboptions.py" )
-
-# The TAG input file and TAG event selection
-ServiceMgr.EventSelector.InputCollections = [ "TAG.pool.root" ]
-ServiceMgr.EventSelector.Query="NLooseMuon>0 || NLooseElectron>0"
-ServiceMgr.EventSelector.CollectionType = "ExplicitROOT"
-
-# Athena-Aware NTuple making Tools
-CBNTAthenaAware = True
-include ("CBNT_Athena/CBNT_AthenaAware_jobOptions.py")
-include ("CBNT_Athena/CBNT_EventInfo_jobOptions.py")
-
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-############# The properties of the AnalysisSkeleton Algorithm
-from UserAnalysis.UserAnalysisConf import AnalysisSkeleton
-topSequence.CBNT_AthenaAware += AnalysisSkeleton()
-AnalysisSkeleton = AnalysisSkeleton()
-
-AnalysisSkeleton.ElectronContainer = "ElectronAODCollection"
-AnalysisSkeleton.MCParticleContainer = "SpclMC"
-AnalysisSkeleton.DeltaRMatchCut = 0.2
-AnalysisSkeleton.MaxDeltaR = 0.9999
-AnalysisSkeleton.ElectronEtCut  = 10.0*GeV
-AnalysisSkeleton.ElectronEtaCut = 2.5
-AnalysisSkeleton.ElectronCone   = 0.9
-AnalysisSkeleton.OutputLevel = INFO
-
-##########################################
-# setup TTree registration Service
-# save ROOT histograms and Tuple
-from GaudiSvc.GaudiSvcConf import THistSvc
-ServiceMgr += THistSvc()
-ServiceMgr.THistSvc.Output = ["AANT DATAFILE='AnalysisSkeleton.TagSel.aan.root' OPT='RECREATE'"]
-from AnalysisTools.AnalysisToolsConf import AANTupleStream
-topSequence += AANTupleStream()
-AANTupleStream = AANTupleStream()
-AANTupleStream.ExtraRefNames = [ "StreamESD","Stream1" ]
-AANTupleStream.OutputName = 'AnalysisSkeleton.TagSel.aan.root'
-AANTupleStream.WriteInputDataHeader = True
-AANTupleStream.OutputLevel = WARNING
-
-# Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-ServiceMgr.MessageSvc.OutputLevel = ERROR
-
-# Number of Events to process
-theApp.EvtMax = 500
-
-###################### For interactive analysis
-#include ("PyAnalysisCore/InitPyAnalysisCore.py")
-
-###################### Detail time measurement and auditors
-# Use auditors
-#theApp.AuditAlgorithms=True
-#theApp.Dlls += [ "GaudiAud" ]
-
-theAuditorSvc = AuditorSvc()
-theAuditorSvc.Auditors  += [ "ChronoAuditor"]
-
-AthenaPoolCnvSvc = Service("AthenaPoolCnvSvc")
-#topSequence += AthenaPoolCnvSvc()
-ServiceMgr.AthenaPoolCnvSvc.UseDetailChronoStat = TRUE
-
-#StoreGateSvc = Service( "StoreGateSvc" )
-#StoreGateSvc.Dump = True 
-#MessageSvc.OutputLevel = DEBUG
-
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/src/AnalysisSkeleton.cxx b/PhysicsAnalysis/AnalysisCommon/UserAnalysis/src/AnalysisSkeleton.cxx
deleted file mode 100755
index 14a731e8e8cb3a83c7a78b4498b148f809580ad1..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/src/AnalysisSkeleton.cxx
+++ /dev/null
@@ -1,1414 +0,0 @@
-/*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-*/
-
-////////////////////////////////////////////////////////////////////////////////// 
-/// Analysis skeleton
-/// also for distributed analysis examples
-/// Author: Ketevi A. Assamagan
-/// BNL, July 22, 2004
-///
-/// DESCRIPTION:
-///
-/// This class is an analysis skeleton - The user can implement her analysis here
-/// Ketevi A. Assamagan on June 22, 2004
-///
-/// Vivek Jain - March 11, 2010
-/// This file has been extensively modified since creation. It now shows you how to access
-/// trigger information. Preparation of objects, e.g., electrons, muons, jets, etc., 
-/// has been pushed off into the UserAnalysisUtils package
-///
-/// The code here also does overlap removal
-///
-/// Please see https://twiki.cern.ch/twiki/bin/view/AtlasProtected/PhysicsAnalysisWorkBookAODAnalysisRel15#Writing_your_own_analysis_code for details relating to release 15
-///
-///////////////////////////////////////////////////////////////////////////////////////////////////////
-
-
-//////////////////////////////////////////////////////////////////////////////////
-
-// the first two come for free when using AthAlgorithm
-//#include "GaudiKernel/MsgStream.h"
-#include "GaudiKernel/IToolSvc.h"
-
-
-#include "xAODEventInfo/EventInfo.h"
-
-#include "egammaEvent/ElectronContainer.h"
-#include "egammaEvent/EMShower.h"
-#include "egammaEvent/egammaParamDefs.h"
-
-#include "McParticleEvent/TruthParticleContainer.h"
-
-#include "VxVertex/VxContainer.h"
-#include "Particle/TrackParticleContainer.h"
-#include "CaloEvent/CaloClusterContainer.h"
-
-#include "muonEvent/MuonContainer.h"
-#include "egammaEvent/PhotonContainer.h"
-#include "tauEvent/TauJetContainer.h"
-#include "JetEvent/JetCollection.h"
-#include "MissingETEvent/MissingET.h"
-
-#include "NavFourMom/IParticleContainer.h"
-#include "NavFourMom/INavigable4MomentumCollection.h"
-
-#include "GaudiKernel/ITHistSvc.h"
-#include "TTree.h"
-#include "CLHEP/Vector/LorentzVector.h"
-
-#include "JetTagInfo/TruthInfo.h"
-
-#include "TrigDecisionTool/ChainGroup.h"
-
-#include "UserAnalysis/AnalysisSkeleton.h"
-
-#include "TrigParticle/TrigTau.h"
-#include "tauEvent/TauJet.h"
-#include "tauEvent/TauJetContainer.h"
-#include "tauEvent/TauDetailsContainer.h"
-#include "tauEvent/Tau1P3PDetails.h"
-#include "tauEvent/TauRecDetails.h"
-#include "AnalysisTriggerEvent/Jet_ROI.h"
-#include "AnalysisTriggerEvent/EmTau_ROI.h"
-#include "TrigSteeringEvent/TrigRoiDescriptor.h"
-#include "TrigSteeringEvent/TrigRoiDescriptorCollection.h"
-#include "TrigParticle/TrigTau.h"
-#include "TrigParticle/TrigTauContainer.h"
-#include "TrigCaloEvent/TrigT2Jet.h"
-#include "TrigCaloEvent/TrigTauCluster.h"
-#include "TrigInDetEvent/TrigInDetTrackCollection.h"
-#include "AthenaKernel/Units.h"
-
-#include <algorithm>
-#include <math.h>
-#include <functional>
-#include <iostream>
-
-//static const double mZ = 91.19*CLHEP::GeV;
-//static const int  MAX_PARTICLES = 20;
-
-using namespace Analysis;
-using namespace Rec;
-using namespace Trig;
-using Athena::Units::GeV;
-
-//////////////////////////////////////////////////////////////////////////////////////
-/// Constructor
-
-AnalysisSkeleton::AnalysisSkeleton(const std::string& name, ISvcLocator* pSvcLocator) :
-  AthAlgorithm(name, pSvcLocator),
-  m_analysisTools( "AnalysisTools" ),
-  m_analysisSelectionTool( "UserAnalysisSelectionTool" ),
-  m_analysisPreparationTool( "UserAnalysisPreparationTool" ),
-  m_analysisOverlapCheckingTool( "UserAnalysisOverlapCheckingTool" ),
-  m_analysisOverlapRemovalTool( "UserAnalysisOverlapRemovalTool" ),
-  m_trigDec( "Trig::TrigDecisionTool/TrigDecisionTool" ),
-  m_aan_FinalLepEtSum(0),
-  m_aan_FinalElEtSum(0),
-  m_aan_FinalMuEtSum(0),
-  m_aan_NumTopQ(0),
-  m_aan_pTtop1(0),
-  m_aan_pTtop2(0),
-  m_aan_Trig_efJet_et(0),
-  m_aan_Trig_efJet_eta(0),
-  m_aan_Trig_efJet_phi(0),
-  m_aan_Trig_l2Jet_et(0),
-  m_aan_Trig_l2Jet_eta(0),
-  m_aan_Trig_l2Jet_phi(0),
-  m_aan_Trig_l1Jet_et88(0),
-  m_aan_Trig_l1Jet_eta(0),
-  m_aan_Trig_l1Jet_phi(0),
-  m_doTrigger(true),
-  m_investigateChain("EF_tau16i_loose_2j23"),
-  m_eventNr(0),
-  m_all(nullptr),
-  m_allL1(nullptr),
-  m_allL2(nullptr),
-  m_allEF(nullptr),
-  m_runNumber(0),
-  m_eventNumber(0),
-  m_eventTime(0),
-  m_lumiBlock(0),
-  m_bCID(0),
-  m_eventWeight(0)
-{
-
-  /** switches to control the analysis through job options */
-
-  declareProperty( "AnalysisTools",               m_analysisTools );
-  declareProperty( "AnalysisSelectionTool",       m_analysisSelectionTool);
-  declareProperty( "AnalysisPreparationTool",     m_analysisPreparationTool);
-  declareProperty( "AnalysisOverlapCheckingTool", m_analysisOverlapCheckingTool);
-  declareProperty( "AnalysisOverlapRemovalTool",  m_analysisOverlapRemovalTool);
-  declareProperty( "TrigDecisionTool",            m_trigDec, "The tool to access TrigDecision");
-
-  declareProperty("ElectronContainer", m_electronContainerName="ElectronAODCollection"); 
-  declareProperty("McParticleContainer", m_truthParticleContainerName = "SpclMC");
-  declareProperty("MissingETObject",m_missingETObjectName="MET_RefFinal");
-
-  /** the cuts - default values - to be modified in job options */
-
-  declareProperty("DeltaRMatchCut", m_deltaRMatchCut = 0.2);
-  declareProperty("MaxDeltaR", m_maxDeltaR = 0.9999);
-
-  /** additiona cuts for electrons */
-  declareProperty("ElectronEtCut", m_etElecCut = 10.0*CLHEP::GeV);
-  declareProperty("ElectronEtaCut", m_etaElecCut = 2.5);
-  declareProperty("ElectronCone", m_elecCone = 0.9);
-
-  /** additional cuts for bjet tagging */
-  declareProperty("bjetWt_IP3DSV1Cut", m_bjetWt_ip3dsv1Cut = 6);
-  declareProperty("bjet_etaCut", m_bjet_etaCut = 2.5);
-  declareProperty("bjet_etCut", m_bjet_etCut = 15.0*CLHEP::GeV);
-
-  /** missing ET options */
-  declareProperty("MissingETCut",m_missingETCut=20.0*CLHEP::GeV);
-
-  /** is this AtlFast */
-  declareProperty("IsAtlFastData",m_isAtlFastData=false);
-
-  // is this MC or not?
-  declareProperty("DoTruth",m_doTruth=false);
-
-  /** count number of jets with ET > min value - for SUSY studies */
-  declareProperty("SusyJetMinEt", m_SusyJetMinEt = 50*CLHEP::GeV);
-
-  /** trigger properties */
-  declareProperty("DoTrigger", m_doTrigger, "enable trigger example");
-  declareProperty("StatTriggerChains", m_triggerChains, "list of triggers for which to print statistics");
-  declareProperty("InvestigateChain", m_investigateChain, "chain to investigate");
-
-}
-
-/////////////////////////////////////////////////////////////////////////////////////
-/// Destructor - check up memory allocation
-/// delete any memory allocation on the heap
-
-AnalysisSkeleton::~AnalysisSkeleton() {}
-
-////////////////////////////////////////////////////////////////////////////////////
-/// Initialize
-/// initialize StoreGate
-/// get a handle on the analysis tools
-/// book histograms
-
-StatusCode AnalysisSkeleton::start() {
-
-  ATH_MSG_ERROR("Initializing AnalysisSkeleton (before eventloop)");
-
-  // retrieve trigger decision tool
-  // needs to be done before the first run/event since a number of
-  // BeginRun/BeginEvents are registered by dependent services
-  StatusCode sc = StatusCode::SUCCESS;
-
-  if ( m_doTrigger ) {
-     sc = m_trigDec.retrieve();
-     if ( sc.isFailure() )ATH_MSG_DEBUG("Can't get handle on TrigDecisionTool");
-     else ATH_MSG_DEBUG("Got handle on TrigDecisionTool");
-     
-  }
-
-  // Initialize the trigger passed counters
-  // this can not be done before initialize, since the properties need to be set from job-options first
-  std::vector<std::string>::const_iterator it;
-  for(it = m_triggerChains.begin();it != m_triggerChains.end(); it++)
-     m_triggersPassed[*it] = 0;
-
-  return sc;
-} 
-
-StatusCode AnalysisSkeleton::initialize() {
-
-  ATH_MSG_INFO("Initializing AnalysisSkeleton");
-
-  /// get a handle on the analysis tools
-  StatusCode sc = m_analysisTools.retrieve();
-  if ( sc.isFailure() ) {
-    ATH_MSG_ERROR("Can't get handle on analysis tools");
-    return sc;
-  }
-
-  /// get a handle on the preparartion analysis tools
-  sc = m_analysisSelectionTool.retrieve();
-  if ( sc.isFailure() ) {
-    ATH_MSG_ERROR("Can't get handle on analysis selection tool");
-    return sc;
-  }
-
-  sc = m_analysisPreparationTool.retrieve();
-  if ( sc.isFailure() ) {
-    ATH_MSG_ERROR("Can't get handle on analysis preparation tool");
-    return sc;
-  }
-
-  sc = m_analysisOverlapCheckingTool.retrieve();
-  if ( sc.isFailure() ) {
-    ATH_MSG_ERROR("Can't get handle on analysis overlap checking tool");
-    return sc;
-  }
-
-  sc = m_analysisOverlapRemovalTool.retrieve();
-  if ( sc.isFailure() ) {
-    ATH_MSG_ERROR("Can't get handle on analysis overlap removal tool");
-    return sc;
-  }
-
-  /** get a handle on the NTuple and histogramming service */
-  sc = service("THistSvc", m_thistSvc);
-  if (sc.isFailure()) {
-     ATH_MSG_ERROR("Unable to retrieve pointer to THistSvc");
-     return sc;
-  }
-
-  // at this point declare new vectors to put in ntuple
-  /*
-  // don't really need this (as per Attila K.) May 23, 2012
-  m_lvl1TriggerInfo = new std::vector<unsigned int>;
-  m_lvl2TriggerInfo = new std::vector<unsigned int>;
-  m_evtFilterInfo = new std::vector<unsigned int>;
-  m_streamTagName = new std::vector<std::string>;
-  m_streamTagType = new std::vector<std::string>;
-  */
-
-  m_aan_eta = new std::vector<double>;
-  m_aan_pt = new std::vector<double>;
-  m_aan_elecetres = new std::vector<double>;
-
-  m_aan_JetEta = new std::vector<double>;
-  m_aan_JetEt = new std::vector<double>;
-  m_aan_JetBTagWt = new std::vector<double>;
-
-  m_aan_FinalElEta = new std::vector<double>;
-  m_aan_FinalElPt = new std::vector<double>;
-  m_aan_FinalElEtCone20 = new std::vector<double>;
-
-  m_aan_FinalMuEta = new std::vector<double>;
-  m_aan_FinalMuPt = new std::vector<double>;
-  m_aan_FinalMuEtCone20 = new std::vector<double>;
-  m_aan_FinalMuBestMat = new std::vector<int>;
-  m_aan_FinalMuMatChi2 = new std::vector<double>;
-
-  
-
-  /** now add branches and leaves to the AAN tree */
-  // the TTree
-  m_tree_AS = new TTree("tree_AS","TTree of AnalysisSkleton");
-  sc = m_thistSvc->regTree("/AANT/tree_AS", m_tree_AS);
-
-  // first add Event info stuff
-  m_tree_AS->Branch("Run",  &m_runNumber,   "Run/I");    // run number
-  m_tree_AS->Branch("Event",&m_eventNumber, "Event/I");  // event number
-  m_tree_AS->Branch("Time", &m_eventTime,   "Time/I");   // time stamp
-  m_tree_AS->Branch("LumiBlock", &m_lumiBlock,"LumiBlock/I"); // lum block num 
-  m_tree_AS->Branch("BCID", &m_bCID,"BCID/I"); // bunch crossing ID
-  m_tree_AS->Branch("Weight", &m_eventWeight, "Weight/D"); // weight
-  /*
-  // don't really need this (as per Attila K.) May 23, 2012
-
-  m_tree_AS->Branch("LVL1ID", &m_lVL1ID,"LVL1ID/I"); // trigger LVL1 ID
-  m_tree_AS->Branch("StatusElement",  &m_statusElement, "StatusElement/I");
-  m_tree_AS->Branch("LVL1TriggerType",  &m_lvl1TriggerType, "LVL1TriggerType/I");
-  m_tree_AS->Branch("LVL1TriggerInfo",&m_lvl1TriggerInfo);
-  m_tree_AS->Branch("LVL2TriggerInfo",&m_lvl2TriggerInfo);
-  m_tree_AS->Branch("EventFilterInfo",&m_evtFilterInfo);
-  m_tree_AS->Branch("StreamTagName",&m_streamTagName);
-  m_tree_AS->Branch("StreamTagType",&m_streamTagType);
-  */
-
-  m_tree_AS->Branch("NElectrons",    &m_aan_size, "NElectrons/i");
-  m_tree_AS->Branch("ElectronEta",   &m_aan_eta);
-  m_tree_AS->Branch("ElectronPt",    &m_aan_pt);
-  m_tree_AS->Branch("ElecPtRatio",   &m_aan_elecetres);
-
-  m_tree_AS->Branch("NJets",          &m_aan_njets, "NJets/i");
-  m_tree_AS->Branch("NJets_etaLT25",  &m_aan_njets_etaLT25, "NJets_etaLT25/i");
-  m_tree_AS->Branch("NJets_SusyETCut",&m_aan_njets_SusyETCut, "NJets_SusyETCut/i");
-  m_tree_AS->Branch("JetsEta"        ,&m_aan_JetEta);
-  m_tree_AS->Branch("JetsEt"         ,&m_aan_JetEt);
-  m_tree_AS->Branch("JetsBTagWt"     ,&m_aan_JetBTagWt);
-  m_tree_AS->Branch("MissingET",      &m_aan_ptMiss, "MissingET/d");
-  m_tree_AS->Branch("EffMass",        &m_aan_effmass, "EffMass/d");
-  m_tree_AS->Branch("HT",             &m_aan_ht,"HT/d");
-  m_tree_AS->Branch("NBJets",         &m_aan_nbjets, "NBJets/i");
-  m_tree_AS->Branch("MaxJetET",       &m_aan_maxJetET, "MaxJetET/d");
-
-  m_tree_AS->Branch("NFinalEl",       &m_aan_NFinalEl, "NFinalEl/i");
-  m_tree_AS->Branch("FinalElEta",     &m_aan_FinalElEta);
-  m_tree_AS->Branch("FinalElPt",      &m_aan_FinalElPt);
-  m_tree_AS->Branch("FinalElEtCone20",&m_aan_FinalElEtCone20);
-
-  m_tree_AS->Branch("NFinalMu",       &m_aan_NFinalMu, "NFinalMu/i");
-  m_tree_AS->Branch("FinalMuEta",     &m_aan_FinalMuEta);
-  m_tree_AS->Branch("FinalMuPt",      &m_aan_FinalMuPt);
-  m_tree_AS->Branch("FinalMuEtCone20",&m_aan_FinalMuEtCone20);
-  m_tree_AS->Branch("FinalMuBestMat", &m_aan_FinalMuBestMat);
-  m_tree_AS->Branch("FinalMuMatChi2", &m_aan_FinalMuMatChi2);
-
-  m_tree_AS->Branch("FinalLepEtSum",  &m_aan_FinalLepEtSum, "FinalLepEtSum/d");
-  m_tree_AS->Branch("FinalElEtSum",   &m_aan_FinalElEtSum, "FinalElEtSum/d");
-  m_tree_AS->Branch("FinalMuEtSum",   &m_aan_FinalMuEtSum, "FinalMuEtSum/d");
-
-  m_tree_AS->Branch("NumberTopQ",     &m_aan_NumTopQ, "NumberTopQ/i");
-  m_tree_AS->Branch("pTtop1",         &m_aan_pTtop1,  "pTtop1/d");
-  m_tree_AS->Branch("pTtop2",         &m_aan_pTtop2,  "pTtop2/d");
-
-  m_tree_AS->Branch("Trig_efJet_et",       &m_aan_Trig_efJet_et  ,             "Trig_efJet_et/f");
-  m_tree_AS->Branch("Trig_efJet_eta",      &m_aan_Trig_efJet_eta ,             "Trig_efJet_eta/f");
-  m_tree_AS->Branch("Trig_efJet_phi",      &m_aan_Trig_efJet_phi ,             "Trig_efJet_phi/f");
-  m_tree_AS->Branch("Trig_l2Jet_et",       &m_aan_Trig_l2Jet_et  ,             "Trig_l2Jet_et/f");
-  m_tree_AS->Branch("Trig_l2Jet_eta",      &m_aan_Trig_l2Jet_eta ,             "Trig_l2Jet_eta/f");
-  m_tree_AS->Branch("Trig_l2Jet_phi",      &m_aan_Trig_l2Jet_phi ,             "Trig_l2Jet_phi/f");
-  m_tree_AS->Branch("Trig_l1Jet_et88",     &m_aan_Trig_l1Jet_et88,             "Trig_l1Jet_et88/f");
-  m_tree_AS->Branch("Trig_l1Jet_eta",      &m_aan_Trig_l1Jet_eta ,             "Trig_l1Jet_eta/f");
-  m_tree_AS->Branch("Trig_l1Jet_phi",      &m_aan_Trig_l1Jet_phi ,             "Trig_l1Jet_phi/f");
-
-  /// ROOT histograms ---------------------------------------
-
-  /// electrons
-  m_h_elecpt     = new TH1F("elec_pt","pt el",50,0,250.*CLHEP::GeV);
-  sc = m_thistSvc->regHist("/AANT/Electron/elec_pt",m_h_elecpt);
-
-  m_h_eleceta    = new TH1F("elec_eta","eta el",70,-3.5,3.5);
-  sc = m_thistSvc->regHist("/AANT/Electron/elec_eta",m_h_eleceta);
-
-  m_h_elec_deltaRMatch    = new TH1F("elec_deltaRMatch","elec reco/MC deltaR",50,0.,1.);
-  sc = m_thistSvc->regHist("/AANT/Electron/elec_deltaRMatch",m_h_elec_deltaRMatch);
-
-  /// jets - before OverlapRemoval
-  m_h_jet_eta_beforeOR = new TH1F("jet_eta_beforeOR","jet_eta before OR",50,-5.,5.);
-  sc = m_thistSvc->regHist("/AANT/Jet/jet_eta_beforeOR",m_h_jet_eta_beforeOR);
-
-  m_h_jet_et_beforeOR = new TH1F("jet_et_beforeOR","jet_et before OR",100,0.,500.);
-  sc = m_thistSvc->regHist("/AANT/Jet/jet_et_beforeOR",m_h_jet_et_beforeOR);
-
-  m_h_jet_ip3dsv1Wt_beforeOR = new TH1F("jet_ip3dsv1Wt_beforeOR","jet_ip3dsv1Wt before OR",120,-20.,40.);
-  sc = m_thistSvc->regHist("/AANT/Jet/jet_ip3dsv1Wt_beforeOR",m_h_jet_ip3dsv1Wt_beforeOR);
-
-  m_h_jet_label_beforeOR = new TH1F("jet_label_beforeOR","jet_label before OR",20,0.,20.);
-  sc = m_thistSvc->regHist("/AANT/Jet/jet_label_beforeOR",m_h_jet_label_beforeOR);
-
-  m_h_jet_ip3dsv1Wt_bjet_beforeOR = new TH1F("jet_ip3dsv1Wt_bjet_beforeOR","b jet_ip3dsv1Wt before OR",120,-20.,40.);
-  sc = m_thistSvc->regHist("/AANT/Jet/jet_ip3dsv1Wt_bjet_beforeOR",m_h_jet_ip3dsv1Wt_bjet_beforeOR);
-
-  m_h_jet_ip3dsv1Wt_ujet_beforeOR = new TH1F("jet_ip3dsv1Wt_ujet_beforeOR","u jet_ip3dsv1Wt before OR",120,-20.,40.);
-  sc = m_thistSvc->regHist("/AANT/Jet/jet_ip3dsv1Wt_ujet_beforeOR",m_h_jet_ip3dsv1Wt_ujet_beforeOR);
-
-  /// jets - after OverlapRemoval
-  m_h_jet_eta_afterOR = new TH1F("jet_eta_afterOR","jet_eta after OR",50,-5.,5.);
-  sc = m_thistSvc->regHist("/AANT/Jet/jet_eta_afterOR",m_h_jet_eta_afterOR);
-
-  m_h_jet_et_afterOR = new TH1F("jet_et_afterOR","jet_et after OR",100,0.,500.);
-  sc = m_thistSvc->regHist("/AANT/Jet/jet_et_afterOR",m_h_jet_et_afterOR);
-
-  m_h_jet_ip3dsv1Wt_afterOR = new TH1F("jet_ip3dsv1Wt_afterOR","jet_ip3dsv1Wt after OR",120,-20.,40.);
-  sc = m_thistSvc->regHist("/AANT/Jet/jet_ip3dsv1Wt_afterOR",m_h_jet_ip3dsv1Wt_afterOR);
-
-  m_h_jet_label_afterOR = new TH1F("jet_label_afterOR","jet_label after OR",20,0.,20.);
-  sc = m_thistSvc->regHist("/AANT/Jet/jet_label_afterOR",m_h_jet_label_afterOR);
-
-  m_h_jet_ip3dsv1Wt_bjet_afterOR = new TH1F("jet_ip3dsv1Wt_bjet_afterOR","b jet_ip3dsv1Wt after OR",120,-20.,40.);
-  sc = m_thistSvc->regHist("/AANT/Jet/jet_ip3dsv1Wt_bjet_afterOR",m_h_jet_ip3dsv1Wt_bjet_afterOR);
-
-  m_h_jet_ip3dsv1Wt_ujet_afterOR = new TH1F("jet_ip3dsv1Wt_ujet_afterOR","u jet_ip3dsv1Wt after OR",120,-20.,40.);
-  sc = m_thistSvc->regHist("/AANT/Jet/jet_ip3dsv1Wt_ujet_afterOR",m_h_jet_ip3dsv1Wt_ujet_afterOR);
-
-  /// missing ET
-
-  m_pxMis   = new TH1F("MissingPx", "MissingPx",200,-500.0*CLHEP::GeV,500.*CLHEP::GeV);
-  sc = m_thistSvc->regHist("/AANT/MissingET/MissingPx", m_pxMis);
-  m_pyMis   = new TH1F("MissingPy","MissingPy",200,-500.0*CLHEP::GeV,500.*CLHEP::GeV);
-  sc = m_thistSvc->regHist("/AANT/MissingET/MissingPy", m_pyMis);
-  m_ptMis   = new TH1F("MissingPt","MissingPt",100,0.0,500.*CLHEP::GeV);
-  sc = m_thistSvc->regHist("/AANT/MissingET/MissingPt", m_ptMis);
-
-  // trigger
-
-  m_triggerAccepts = new TH1F("TriggerAccepts", "TriggerAccepts",3,0,3);
-  sc = m_thistSvc->regHist("/AANT/Trigger/TriggerAccepts", m_triggerAccepts);
-
-  if (sc.isFailure()) { 
-    ATH_MSG_ERROR("ROOT Hist registration failed"); 
-    return sc; 
-  }
-  /// end ROOT Histograms ------------------------------------------
-
-  // define chain groups using regular expressions and relying on the
-  // trigger chain name convention: all L1 items start their name from
-  // L1_ etc; in fact, the TrigDecisionTool already defines these
-  // groups by default, but let's do it again as an example
-  m_all   = m_trigDec->getChainGroup(".*");
-  m_allL1 = m_trigDec->getChainGroup("L1_.*"); 
-  m_allL2 = m_trigDec->getChainGroup("L2_.*");
-  m_allEF = m_trigDec->getChainGroup("EF_.*");
-
-  m_eventNr=0;
-
-  /** Print out bjet cut values */
-  ATH_MSG_DEBUG(":b jet cuts: Et/eta/IP3DSV1 wt. "<<m_bjet_etCut<< ","<<m_bjet_etaCut <<","<<m_bjetWt_ip3dsv1Cut);
-
-
-  return StatusCode::SUCCESS;
-}		 
-
-///////////////////////////////////////////////////////////////////////////////////
-/// Finalize - delete any memory allocation from the heap
-
-StatusCode AnalysisSkeleton::finalize() {
-   
-  //
-  if(m_doTrigger) {
-     // print trigger statistics
-    ATH_MSG_INFO("STAT Trigger Statistics on " << m_eventNr << " processed events");
-     for(  std::vector<std::string>::const_iterator it = m_triggerChains.begin();it != m_triggerChains.end(); it++)
-       ATH_MSG_INFO("STAT Passed events for chain " << *it << "  " << m_triggersPassed[*it] << " ("<< 100.*m_triggersPassed[*it]/m_eventNr <<"%)");
-  }
-  return StatusCode::SUCCESS;
-
-}
-
-///////////////////////////////////////////////////////////////////////////////////
-/// Clear - clear CBNT members
-StatusCode AnalysisSkeleton::initEvent() {
-  /// For Athena-Aware NTuple
-
-  m_aan_size = 0;
-  m_aan_eta->clear();
-  m_aan_pt->clear();
-  m_aan_elecetres->clear();
-
-  m_aan_njets=0;
-  m_aan_maxJetET = -1000.;
-  m_aan_JetEta->clear();
-  m_aan_JetEt->clear();
-  m_aan_JetBTagWt->clear();  
-  //
-  m_aan_njets_etaLT25=0;
-  m_aan_njets_SusyETCut = 0;
-
-  m_aan_ptMiss = -1.;
-  m_aan_effmass = 0.;
-  m_aan_ht = 0;
-  m_aan_nbjets = 0;
-
-  //
-  m_aan_NFinalEl = 0;
-  m_aan_FinalElPt->clear();
-  m_aan_FinalElEta->clear();
-  m_aan_FinalElEtCone20->clear();
-
-
-  m_aan_NFinalMu = 0;
-  m_aan_FinalMuPt->clear();
-  m_aan_FinalMuBestMat->clear();
-  m_aan_FinalMuEta->clear();
-  m_aan_FinalMuEtCone20->clear();
-  m_aan_FinalMuMatChi2->clear();
-
-  //
-  m_aan_FinalLepEtSum = 0.;
-  m_aan_FinalElEtSum  = 0.;
-  m_aan_FinalMuEtSum  = 0.;
-
-  // 
-  m_aan_NumTopQ=0;
-  m_aan_pTtop1=-1;
-  m_aan_pTtop2=-1;
-
-  return StatusCode::SUCCESS;
-}
-
-//////////////////////////////////////////////////////////////////////////////////
-/// Execute - on event by event
-
-StatusCode AnalysisSkeleton::execute() {
-  //
-  m_eventNr++;
-  ATH_MSG_DEBUG(" in execute()");
-
-  StatusCode sc;
-
-  // initialize first before processing each event
-  sc = initEvent();
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("initEvent failed. Continue");
-  }
-
-  // initialize variables in ntuple
-
-
-
-  /** it shows how to get the Electron and the TruthParticle
-      containers shows matching between reconstructed and MC electrons
-
-      Can be commented out if you want to do so If you do so, some of
-      the electron histograms/ntuple variables will be unfilled */
-
-  // this method is discussed in the Computing workbook - uncomment
-  // VJ Oct. 29'08
-  // protect with m_doTruth - VJ, Feb 16, 2010
-
-  if(m_doTruth) {
-    sc = electronSkeleton();
-    if (sc.isFailure()) {
-      ATH_MSG_WARNING("The method electronSkeleton() failed");
-      return StatusCode::SUCCESS;
-    }
-  }
-  /** an minimal example using the TrigDecisionTool */
-  if ( m_doTrigger ) {
-    sc = triggerSkeleton();
-    if (sc.isFailure()) {
-      ATH_MSG_WARNING("The method triggerSkeleton() failed");
-      return StatusCode::SUCCESS;
-    }
-    ATH_MSG_INFO("Pass state L1_MBTS_1 = " << m_trigDec->isPassed("L1_MBTS_1"));
-  }
- 
-  /** an example of analysis preparation, including:
-      - pre-selections based on the reocmmendations of performance groups
-      - overlap checking
-      - overlap removal */
-
-  /** Do not comment the next method. This is where we do all the
-      selection/overlap removal Those results are then used in the
-      methods later on */
-
-  sc = analysisPreparation();
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Analysis Preparation Failed ");
-    return StatusCode::SUCCESS;
-  }
-
-  /** The following methods were added by Vivek Jain They just show
-      you how to access different variables and store them in
-      histograms and/or ntuples */
-
-  /** get basic event info. These variables are already in the output ntuple
-      here we show you how to access them yourself */
-
-  sc = addEventInfo();
-  if (sc.isFailure() ) {
-    ATH_MSG_WARNING("Failure in getEventInfo() ");
-    return StatusCode::SUCCESS;
-  }
-
-  /** look at bjet tagging information in the jets after overlap
-      removal */
-
-  sc = bjetInfo();
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Failure in bjetInfo ");
-    return StatusCode::SUCCESS;
-  } 
-
-  /** get missing Et information */
-
-  sc = getMissingET();
-  if( sc.isFailure() ) {
-    ATH_MSG_WARNING("Failed to retrieve Et object found in TDS");
-    return StatusCode::SUCCESS;
-  }  
-
-  /** do SUSY studies */
-
-  sc = SusyStudies();
-  if( sc.isFailure() ) {
-    ATH_MSG_WARNING("Failed to do SUSY studies");
-    return StatusCode::SUCCESS;
-  }  
-
-  //
-  m_tree_AS->Fill();
-  return StatusCode::SUCCESS;
-}
-
-//////////////////////////////////////////////////////////////////////////////////
-/// Trigger method - called by execute() on event by event
-/// to be removed if not needed
-
-StatusCode AnalysisSkeleton::triggerSkeleton() {
-
-  ATH_MSG_INFO("Event Number " << m_eventNr);
-
-  // print out list of chains in each level for the first event:
-  if (m_eventNr==1) {
-    ATH_MSG_INFO("L1 Items : " << m_allL1->getListOfTriggers());
-    ATH_MSG_INFO("L2 Chains: " << m_allL2->getListOfTriggers());
-    ATH_MSG_INFO("EF Chains: " << m_allEF->getListOfTriggers());
-  }
-
-  // simple example of isPassed():
-  // isPassed([chain], [condition]) is called with the default argument condition = Physics
-  // a ChainGroup is defined implicitly by the regular expression given by "EF.*" in the call to isPassed()
-  ATH_MSG_INFO("Pass state L1 = " << m_trigDec->isPassed("L1_.*")) ;
-  ATH_MSG_INFO("Pass state L2 = " << m_trigDec->isPassed("L2_.*")) ;
-  ATH_MSG_INFO("Pass state EF = " << m_trigDec->isPassed("EF_.*")) ;
-
-  ATH_MSG_INFO("Pass state L2_tau16i_loose_3j23 = " << m_trigDec->isPassed("L2_tau16i_loose_3j23"));
-  ATH_MSG_INFO("Pass state EF_mu10              = " << m_trigDec->isPassed("EF_mu10"));
-  ATH_MSG_INFO("Pass state EF_mu20              = " << m_trigDec->isPassed("EF_mu20"));
-  ATH_MSG_INFO("Pass state EF_e15_medium        = " << m_trigDec->isPassed("EF_e15_medium"));
-  ATH_MSG_INFO("Pass state EF_e20_loose         = " << m_trigDec->isPassed("EF_e20_loose"));
-
-  // on the first event we are printing out prescale factors for all
-  // EF chains
-  // note that the prescales
-
-  std::vector<std::string>::const_iterator it;
-  if (m_eventNr==1) {
-     const std::vector<std::string> allEF = m_allEF->getListOfTriggers();
-     for(it = allEF.begin(); it != allEF.end(); it++) {
-       ATH_MSG_INFO("Prescale info: chain " << std::left << *it << " has prescale " << m_trigDec->getPrescale(*it));
-     }
-     ATH_MSG_INFO("Stream info: " << m_trigDec->getListOfStreams());
-
-     for(it = m_triggerChains.begin();it != m_triggerChains.end(); it++) {
-        std::vector<std::string> chgrcnt = m_trigDec->getChainGroup(*it)->getListOfTriggers();
-        for(std::vector<std::string>::iterator chgrit = chgrcnt.begin(); chgrit != chgrcnt.end(); chgrit++) {
-	  ATH_MSG_INFO("Chain belonging to " << *it << ": " << *chgrit);
-        }
-     }
-  }
-  
-  
-  // Now we'd like to collect some trigger statistics for the chains specified in 
-  for(it = m_triggerChains.begin();it != m_triggerChains.end(); it++)
-     if( m_trigDec->isPassed(*it) ) {
-        m_triggersPassed[*it]++;
-        m_triggerAccepts->Fill(it->c_str(),1);
-     }
-
-
-  std::string chain(m_investigateChain);
-
-  ATH_MSG_INFO("FLAT Pass state " << chain << " = " << m_trigDec->isPassed(chain));
-
-  // first declare a FeatureContainer; fill it using the features(std::string chain_name) method 
-  FeatureContainer f = m_trigDec->features(chain /*, broken in 15.2.0: TrigDefs::alsoDeactivateTEs*/ );
-  
-  std::vector< Feature<JetCollection> > jetColls = f.get<JetCollection>();
-  ATH_MSG_INFO("FLAT Number of JetCollections in " << chain << ": " << jetColls.size());
-  if(jetColls.size()>0) {
-
-     m_aan_Trig_efJet_et   = 0;
-     m_aan_Trig_efJet_eta  = 0;
-     m_aan_Trig_efJet_phi  = 0;
-     m_aan_Trig_l2Jet_et   = 0;
-     m_aan_Trig_l2Jet_eta  = 0;
-     m_aan_Trig_l2Jet_phi  = 0;
-     m_aan_Trig_l1Jet_et88 = 0;
-     m_aan_Trig_l1Jet_eta  = 0;
-     m_aan_Trig_l1Jet_phi  = 0;
-     
-     const Feature<JetCollection>& jcf = jetColls[0];
-     ATH_MSG_INFO("FLAT TE Label: " << jcf.label());
-     const JetCollection* jc = jcf.cptr();
-     ATH_MSG_INFO("FLAT Number of Jets in JetCollection: " << jc->size());
-     JetCollection::const_iterator jIt = jc->begin();
-     for (; jIt != jc->end(); ++jIt ) {
-        Jet* jet = const_cast<Jet*> (*jIt);
-	//        Jet* jet = *jIt;
-        ATH_MSG_INFO("FLAT Jet e   : " << jet->e())   ;
-        ATH_MSG_INFO("FLAT     eta : " << jet->eta()) ;
-        ATH_MSG_INFO("FLAT     phi : " << jet->phi()) ;
-        ATH_MSG_INFO("FLAT     pt  : " << jet->pt())  ;
-        ATH_MSG_INFO("FLAT     et  : " << jet->et())  ;
-        m_aan_Trig_efJet_et   = jet->et();
-        m_aan_Trig_efJet_eta  = jet->eta();
-        m_aan_Trig_efJet_phi  = jet->phi();
-     }
-
-     // let us find the corresponding jets in Lvl2
-     Feature<TrigT2Jet> l2jetF = m_trigDec->ancestor<TrigT2Jet>(jcf);
-     ATH_MSG_INFO("FLAT Found " << (l2jetF.empty()?"no ":"") << "corresponding L2 Jet.");
-     if ( !l2jetF.empty() ) {
-        const TrigT2Jet* t2jet = l2jetF.cptr();
-        ATH_MSG_INFO("FLAT    e    : " << t2jet->e()); 
-        ATH_MSG_INFO("FLAT    eta  : " << t2jet->eta()); 
-        ATH_MSG_INFO("FLAT    phi  : " << t2jet->phi()); 
-        ATH_MSG_INFO("FLAT    ehad : " << t2jet->ehad0()); 
-        ATH_MSG_INFO("FLAT    eem  : " << t2jet->eem0()); 
-        m_aan_Trig_l2Jet_et   = t2jet->e()/cosh(t2jet->eta());
-        m_aan_Trig_l2Jet_eta  = t2jet->eta();
-        m_aan_Trig_l2Jet_phi  = t2jet->phi();
-     }
-     
-     // we can also access the L1 Jet_ROI using the ancestor method of the TrigDecisionTool
-     Feature<Jet_ROI> jRoIF =  m_trigDec->ancestor<Jet_ROI>(jcf);
-     ATH_MSG_INFO("FLAT Found " << (jRoIF.empty()?"no ":"") << "corresponding Jet_ROI"); 
-     if ( !jRoIF.empty() ) {
-        const Jet_ROI* jroi = jRoIF.cptr();
-        ATH_MSG_INFO("FLAT Passed thresholds" << jroi->getThresholdNames()); 
-        ATH_MSG_INFO("FLAT    ET4x4 : " << jroi->getET4x4()); 
-        ATH_MSG_INFO("FLAT    ET6x6 : " << jroi->getET6x6()); 
-        ATH_MSG_INFO("FLAT    ET8x8 : " << jroi->getET8x8());
-        ATH_MSG_INFO("FLAT    eta   : " << jroi->eta()); 
-        ATH_MSG_INFO("FLAT    phi   : " << jroi->phi()); 
-        m_aan_Trig_l1Jet_et88 = jroi->getET8x8();
-        m_aan_Trig_l1Jet_eta  = jroi->eta();
-        m_aan_Trig_l1Jet_phi  = jroi->phi();
-     }
-  }
-
-
-  // now we like to look at the Combinations of jets and tau that make up the chain decision
-  const std::vector<Trig::Combination>& tauJetCombinations = f.getCombinations();
-     ATH_MSG_INFO("COMB Pass state " << chain << " = " << m_trigDec->isPassed(chain));
-     ATH_MSG_INFO("COMB Number of TauJetCombinations in " << chain << ": " << tauJetCombinations.size()); 
-  std::vector<Trig::Combination>::const_iterator cIt;
-  for ( cIt = tauJetCombinations.begin(); cIt != tauJetCombinations.end(); ++cIt ) {
-
-     const Trig::Combination& comb = *cIt;
-     
-     std::vector< Feature<TauJetContainer> > tauC = comb.get<TauJetContainer>();
-     std::vector< Feature<JetCollection> >   jetC = comb.get<JetCollection>();
-
-     ATH_MSG_INFO("COMB Combination was " << (comb.active()?"":"not ") << "active.");
-
-     if(tauC.size()>0 || jetC.size()>0) {
-        ATH_MSG_INFO("COMB Combination has " << tauC.size() << " TauJetContainer Fs and " 
-		     << jetC.size() << " JetCollection Fs");
-
-        const TauJetContainer* taus = tauC[0];
-        const JetCollection* jets = jetC[0];
-        
-        ATH_MSG_INFO("COMB In the TauJetContainer are " << taus->size() << " taus and in the JetCollection are "
-		     << jets->size() << " jets."); 
-     } else {
-       ATH_MSG_INFO("COMB TauJetContainer or JetCollection missing."); 
-     }
-
-     std::vector< Feature<TrigTau> >   tauFV = comb.get<TrigTau>();
-     std::vector< Feature<TrigT2Jet> > jetFV = comb.get<TrigT2Jet>();
-
-     ATH_MSG_INFO("COMB Combination has " << tauFV.size() << " TrigTau Fs and " << jetFV.size() << " TrigT2Jet Fs."); 
-    
-  }
-    
-   
-  return StatusCode::SUCCESS;
-}
-
-
-
-
-
-
-//////////////////////////////////////////////////////////////////////////////////
-/// Electron method - called by execute() on event by event
-/// to be removed if not needed
-
-StatusCode AnalysisSkeleton::electronSkeleton() {
-  
-  ATH_MSG_DEBUG("in electronSkeleton()");
-
-  /** get the MC truth particle AOD container from StoreGate */
-  const TruthParticleContainer*  mcpartTES = 0;
-  StatusCode sc=evtStore()->retrieve( mcpartTES, m_truthParticleContainerName);
-  if( sc.isFailure()  ||  !mcpartTES ) {
-     ATH_MSG_WARNING("No AOD MC truth particle container found in TDS");
-     return StatusCode::SUCCESS;
-  }
-  ATH_MSG_DEBUG("MC Truth Container Successfully Retrieved");
-  
-  /** get the container of the original AOD electron - without any selection */
-  /** get the AOD electron container for TES */
-  const ElectronContainer* elecTES = 0;
-  sc=evtStore()->retrieve( elecTES, m_electronContainerName);
-  if( sc.isFailure()  ||  !elecTES ) {
-     ATH_MSG_WARNING("No AOD electron container found in TDS");
-     return StatusCode::FAILURE;
-  }  
-  ATH_MSG_DEBUG("ElectronContainer successfully retrieved - size is " << elecTES->size() << " electrons ");
-
-
-  /** iterators over the electron container - the pre-selected ones or the original ones */ 
-  ElectronContainer::const_iterator elecItr  = elecTES->begin();
-  ElectronContainer::const_iterator elecItrE = elecTES->end();
-
-  for (; elecItr != elecItrE; ++elecItr) {
-
-    /** apply further selections if necessary */
-    /** check for the author of the electron */
-    bool author = (*elecItr)->author(egammaParameters::AuthorElectron);
-    if ( !author || (*elecItr)->pt() < m_etElecCut ) continue;
-
-    m_aan_size++;
-
-
-    /** fill histograms */
-    m_h_elecpt->Fill( (*elecItr)->pt(), 1.);
-    m_h_eleceta->Fill( (*elecItr)->eta(), 1.);
-
-    /** fill Athena-Aware NTuple */
-    m_aan_eta->push_back((*elecItr)->eta());
-    m_aan_pt->push_back((*elecItr)->pt());
-
-    /** find a match to this electron in the MC truth container
-        the index and deltaR are returned */
-    int index = -1;
-    double deltaRMatch;
-    if( (*elecItr)->trackParticle() && (*elecItr)->pt()> m_etElecCut ) {
-       const TruthParticleContainer * truthContainer = mcpartTES;
-       bool findAMatch = m_analysisTools->matchR((*elecItr), truthContainer, 
-		       index, deltaRMatch, (*elecItr)->pdgId());
-       if (findAMatch) {
-          deltaRMatch = (deltaRMatch > m_maxDeltaR) ? m_maxDeltaR : deltaRMatch;
-
-          m_h_elec_deltaRMatch->Fill(deltaRMatch);
-          ATH_MSG_DEBUG("Electron: MC/Reco DeltaR " << deltaRMatch);
-          /** check for good match */
-          if ( deltaRMatch < m_deltaRMatchCut) {
-             const TruthParticle*  electronMCMatch = (*mcpartTES)[index]; 
-             double res = (*elecItr)->pt() / electronMCMatch->pt();
-             m_aan_elecetres->push_back(res);
-          }
-       }
-    }    
-  }
-
-  ATH_MSG_DEBUG("electronSkeleton() succeeded");
-  		
-  return StatusCode::SUCCESS;
-}
-
-//////////////////////////////////////////////////////////////////////////////////
-/// Analysis Preparation method - called by execute() on event by event
-/// A lot of the AOD container are read in
-/// pre-selection is done using the UserAnalysisSelectionTool
-/// An example of overlap checking is demonstrated
-/// An example of overlap removal is demonstration
-StatusCode AnalysisSkeleton::analysisPreparation() {
-
-  ATH_MSG_DEBUG("in analysisPreparation()");
-
-  /** loop over Electrons from the AOD and see which pass the recommended electron selection 
-      These selections are defined in m_analysisSelectionTool - to be changed if necessary */
-  const ElectronContainer* elecTES = 0;
-  ATH_CHECK( evtStore()->retrieve( elecTES, m_electronContainerName) );
-  ElectronContainer::const_iterator elecItr  = elecTES->begin();
-  ElectronContainer::const_iterator elecItrE = elecTES->end();
-  for (; elecItr != elecItrE; ++elecItr) {
-    bool passedSelection = m_analysisSelectionTool->isSelected( *elecItr );
-    if ( passedSelection ) ATH_MSG_DEBUG("Found a potential good Electron ");
-  }
-
-  /** do analysis preparation using the AnalysisPreparationTool
-      selections based or recommended selections from performance groups  
-      The tool outputs various containers of pre-selected objects */
-  StatusCode sc = m_analysisPreparationTool->execute();
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("AnalysisPreparation Failed - selection ");
-    return StatusCode::SUCCESS;
-  }
-
-  /** get the pre-selected Electrons - given by the AnalysisPreparationTool */
-  const ElectronContainer* preselectedElecTES = m_analysisPreparationTool->selectedElectrons();
-  if ( !preselectedElecTES ) {
-    ATH_MSG_ERROR("Selected Electrons Not Found");
-    return StatusCode::FAILURE;
-  }
-  ATH_MSG_DEBUG("Pre-selected Electrons successfully retrieved - size is " << preselectedElecTES->size() << " electrons ");
-
-  /** get the pre-selected Muons - given by the AnalysisPreparationTool */
-  const MuonContainer* preselectedMuonTES = m_analysisPreparationTool->selectedMuons();
-  if ( !preselectedMuonTES ) {
-    ATH_MSG_ERROR( "Selected Muons Not Found ");
-    return StatusCode::FAILURE;
-  }
-  ATH_MSG_DEBUG( "Pre-selected Muons successfully retrieved - size is " << preselectedMuonTES->size() << " muons ");
-
-  /** Check if the leading Electron and the Leadign Muon overlap or not */
-  double deltaR = -1.0;
-  if ( preselectedElecTES->size() > 0 && preselectedMuonTES->size() > 0) {
-     const Electron * leadingElectron = preselectedElecTES->at(0);
-     const Analysis::Muon     * leadingMuon     = preselectedMuonTES->at(0);
-     bool areOverlapping = m_analysisOverlapCheckingTool->overlap( leadingElectron, leadingMuon, deltaR ); 
-     if ( areOverlapping ) ATH_MSG_INFO("Leading Electron and Leading Muon overlap - deltaR = ");
-  }
-
-  /** now remove the overlaps using this tool 
-      The input to the tool is the collection of pre-selected obeject obtained from the AnalysisPreparationTool 
-      The output is various collections of final state non-overlapping particles 
-      You can change the order of the overlap removal by change the input to the tool in job options */
-  sc = m_analysisOverlapRemovalTool->execute();
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING( "AnalysisPreparation Failed - overlap removal ");
-    return StatusCode::SUCCESS;
-  }
-
-  /** get the final state Electrons - given by the AnalysisOverlapRemovalTool */
-  const ElectronContainer* finalStateElecTES = m_analysisOverlapRemovalTool->finalStateElectrons();
-  if ( !finalStateElecTES ) ATH_MSG_WARNING( "Final State Electrons Not Found ");
-  ATH_MSG_DEBUG( "Final State Electrons successfully retrieved - size is " << finalStateElecTES->size() << " electrons ");
-
-  ATH_MSG_DEBUG( "AnalysisPreparation() succeeded");
- 
-  return StatusCode::SUCCESS;
-
-}
-//////////////////////////////////////////////////////////////////////////////////
-/// Method to look at bjetInfo - called by execute() on event by event
-/// Use jets after overlap removal and look at the b-tagging weights within
-///
-//////////////////////////////////////////////////////////////////////////////////
-StatusCode AnalysisSkeleton::bjetInfo() {
-
-  ATH_MSG_DEBUG( "in bjetInfo()");
-
-  /** loop over jet container after overlap removal 
-      As a check first get the container after selection cuts, but BEFORE Overlap Removal */
-
-  const JetCollection* selectedJetTES = m_analysisPreparationTool->selectedJets();
-  if ( !selectedJetTES ) {
-    ATH_MSG_ERROR( "Selected Particle Jets Not Found ");
-    return StatusCode::FAILURE;
-  }
-  else ATH_MSG_DEBUG( "Selected Jets successfully retrieved - size is " << selectedJetTES->size() << " jets ");
-  // 
-  const JetCollection* finalStateJetTES = m_analysisOverlapRemovalTool->finalStateJets();
-  if ( !finalStateJetTES ) {
-    ATH_MSG_WARNING( "Final State Particle Jets Not Found ");
-    return StatusCode::SUCCESS;
-  }
-  ATH_MSG_DEBUG( "Final State Jets successfully retrieved - size is " << finalStateJetTES->size() << " jets ");
-
-  /** now look at some variables before and after overlap removal */
-
-  int iflav;
-  JetCollection::const_iterator jetItr_sel  = selectedJetTES->begin();
-  JetCollection::const_iterator jetItrE_sel = selectedJetTES->end();
-  for (; jetItr_sel != jetItrE_sel; ++jetItr_sel) {
-
-    CLHEP::HepLorentzVector p4((*jetItr_sel)->px(),
-			(*jetItr_sel)->py(),
-			(*jetItr_sel)->pz(),
-			(*jetItr_sel)->e());
-
-    m_h_jet_eta_beforeOR->Fill(p4.pseudoRapidity());
-    m_h_jet_et_beforeOR->Fill(p4.et()/GeV);
-
-    /** get b-tagging info */
-
-    double w_cmb = (*jetItr_sel)->getFlavourTagWeight(); // weight for IP3DSV1
-    m_h_jet_ip3dsv1Wt_beforeOR->Fill(w_cmb);
-
-    /** get quark flavour that originates this jet */
-    // if we are looking at data, set it to -10
-    if(m_doTruth) iflav = getQuarkJetFlavour(jetItr_sel);
-    else iflav=-10;
-    m_h_jet_label_beforeOR->Fill((float) iflav);
-
-    if(p4.et() > m_bjet_etCut && fabs(p4.pseudoRapidity()) < m_bjet_etaCut) {
-      if(iflav==5) m_h_jet_ip3dsv1Wt_bjet_beforeOR->Fill(w_cmb);
-      if(iflav==0) m_h_jet_ip3dsv1Wt_ujet_beforeOR->Fill(w_cmb);
-    }
-
-  }
-  /** after overlapRemoval */
-  JetCollection::const_iterator jetItr_fin  = finalStateJetTES->begin();
-  JetCollection::const_iterator jetItrE_fin = finalStateJetTES->end();
-  for (; jetItr_fin != jetItrE_fin; ++jetItr_fin) {
-
-    CLHEP::HepLorentzVector p4((*jetItr_fin)->px(),
-			(*jetItr_fin)->py(),
-			(*jetItr_fin)->pz(),
-			(*jetItr_fin)->e());
-
-    m_h_jet_eta_afterOR->Fill(p4.pseudoRapidity());
-    m_h_jet_et_afterOR->Fill(p4.et()/GeV);
-
-    /** get b-tagging info */
-
-
-
-    if(p4.et() > m_bjet_etCut && fabs(p4.pseudoRapidity()) < m_bjet_etaCut) {
-
-      double w_cmb = (*jetItr_fin)->getFlavourTagWeight(); // weight for IP3DSV1
-      m_h_jet_ip3dsv1Wt_afterOR->Fill(w_cmb);
-
-      /** get quark flavour that originates this jet */
-      if(m_doTruth) iflav = getQuarkJetFlavour(jetItr_fin);
-      else iflav=-10;
-      m_h_jet_label_afterOR->Fill((float) iflav);
-      //
-
-      if(w_cmb > m_bjetWt_ip3dsv1Cut) m_aan_nbjets++; // count # of bjets in event
-
-      if(iflav==5) m_h_jet_ip3dsv1Wt_bjet_afterOR->Fill(w_cmb);
-      if(iflav==0) m_h_jet_ip3dsv1Wt_ujet_afterOR->Fill(w_cmb);
-    }
-
-  }
-
-  return StatusCode::SUCCESS;
-
-}
-////////
-
-int AnalysisSkeleton::getQuarkJetFlavour(JetCollection::const_iterator jetItr) {
-
-
-    /** flavour of quark that originated this jet */
-    // --- get the true label of the jet from MC Truth
-
-    std::string label("N/A");
-    
-    const Analysis::TruthInfo* mcinfo = (*jetItr)->tagInfo<Analysis::TruthInfo>("TruthInfo");
-    if(mcinfo) {
-      label = mcinfo->jetTruthLabel();
-    } else {
-      ATH_MSG_VERBOSE("could not find TruthInfo for matching jet");
-    }
-    int iflav(0);
-    if(label=="B") {
-      return iflav = 5;
-    }
-    if(label=="C") {
-      return iflav = 4;
-    }
-    if(label=="T") {
-      return iflav = 15;
-    }
-
-    return iflav;
-}
-////////////////////////////////////////////////////////////////////////////////////////////////
-/// missing Et object
-
-StatusCode AnalysisSkeleton::getMissingET() {
-
-  ATH_MSG_DEBUG( "in getMissingEt()");
-
-  StatusCode sc = StatusCode::SUCCESS;
-
-  if (!m_isAtlFastData) {
-    /// retrieve the missing Et object from TDS
-    sc = evtStore()->retrieve(m_pMissing,m_missingETObjectName);
-    if (sc.isFailure()) {
-      ATH_MSG_ERROR("Could not retrieve MissingET Object");
-      return StatusCode::SUCCESS;
-    }
-    else ATH_MSG_DEBUG(" retreived missing ET from AOD");
-
-    m_pxMiss = m_pMissing->etx();
-    m_pyMiss = m_pMissing->ety();
-    m_ptMiss = m_pMissing->et();
-  } else {
-    /// retrieve the missing Et object from TDS
-    sc=evtStore()->retrieve(m_pMissing, "AtlfastMissingEt");
-    if( sc.isFailure()  ||  !m_pMissing ) {
-      ATH_MSG_WARNING("No Atlfast missing Et object found in TDS");
-      return StatusCode::SUCCESS;
-    }  
-    m_pxMiss = m_pMissing->etx();
-    m_pyMiss = m_pMissing->ety();
-    m_ptMiss = m_pMissing->et();
-  }
-
-  /// fill missing energy histograms
-  m_pxMis->Fill(m_pxMiss);
-  m_pyMis->Fill(m_pyMiss);
-  m_ptMis->Fill(m_ptMiss);
-
-  m_aan_ptMiss = m_ptMiss;
-
-  return sc;
-
-}
-////////////////////////////////////////////////////////
-StatusCode AnalysisSkeleton::SusyStudies() {
-
-  /** Make some introductory plots */
-
-  ATH_MSG_DEBUG( "in SusyStudies()");
-
-  /** loop over truth container and get pT of the earliest top quarks */
-
-  double pTtop1;
-  double pTtop2;
-  int numTops;
-
-  StatusCode sc = StatusCode::SUCCESS; 
-  if(m_doTruth) {
-    sc = getTopQpT(numTops, pTtop1, pTtop2);  
-    if(!sc) ATH_MSG_DEBUG( "Something wrong with finding top quark pT");
-    else{
-      
-      ATH_MSG_DEBUG(" found the top quarks ");
-      m_aan_NumTopQ = 2;
-      m_aan_pTtop1 = pTtop1;
-      m_aan_pTtop2 = pTtop2;
-    }
-  }
-  else {m_aan_NumTopQ=0; m_aan_pTtop1 = -10; m_aan_pTtop2 = -10;}
-
-  /** loop over jet container after overlap removal */
-
-  const JetCollection* finalStateJetTES = m_analysisOverlapRemovalTool->finalStateJets();
-  if ( !finalStateJetTES ) {
-    ATH_MSG_WARNING( "SusyStudies: Final State Particle Jets Not Found ");
-    return StatusCode::SUCCESS;
-  }
-  ATH_MSG_DEBUG( "SusyStudies: Final State Jets successfully retrieved - size is " << finalStateJetTES->size() << " jets ");
-
-  double w_cmb=-100;
-  JetCollection::const_iterator jetItr_fin  = finalStateJetTES->begin();
-  JetCollection::const_iterator jetItrE_fin = finalStateJetTES->end();
-  for (; jetItr_fin != jetItrE_fin; ++jetItr_fin) {
-
-    CLHEP::HepLorentzVector p4((*jetItr_fin)->px(),
-			(*jetItr_fin)->py(),
-			(*jetItr_fin)->pz(),
-			(*jetItr_fin)->e());
-
-    /** variables for the ntuple  */
-    m_aan_njets++;
-    if(fabs(p4.pseudoRapidity()) < m_bjet_etaCut ) m_aan_njets_etaLT25++;
-    if(p4.et()> m_SusyJetMinEt ) m_aan_njets_SusyETCut++;
-
-    m_aan_JetEta->push_back((*jetItr_fin)->eta());
-    m_aan_JetEt->push_back(p4.et());
-
-    w_cmb = -100;
-    if(p4.et() > m_bjet_etCut && fabs(p4.pseudoRapidity()) < m_bjet_etaCut) {
-      w_cmb = (*jetItr_fin)->getFlavourTagWeight();} // weight for IP3DSV1
-    m_aan_JetBTagWt->push_back(w_cmb);
-
-    m_aan_ht += p4.et(); // scalar sum of jet ET
-    if(p4.et()>m_aan_maxJetET) m_aan_maxJetET = p4.et(); // Jet with max ET
-
-  } // loop over jets
-
-  /** Get final state leptons. We need these in the determination of effective mass
-      We should also store the leptons in the ntuple, so that we can re-do the calculation later */
-  
-  /** First do electrons */
-
-  const ElectronContainer* finalStateElecTES = m_analysisOverlapRemovalTool->finalStateElectrons();
-  if ( !finalStateElecTES ) {
-
-    ATH_MSG_WARNING( "SusyStudies: Final State Electrons Not Found ");
-    return StatusCode::SUCCESS;
-  }
-  ATH_MSG_DEBUG( "SusyStudies: Final State Electrons successfully retrieved - size is " << finalStateElecTES->size() << " electrons ");
-
-
-  /** iterators over the electron container - final state */ 
-  ElectronContainer::const_iterator felecItr  = finalStateElecTES->begin();
-  ElectronContainer::const_iterator felecItrE = finalStateElecTES->end();
-
-  double sum_elET=0;
-
-  for (; felecItr != felecItrE; ++felecItr) {
-
-    /** apply further selections if necessary */
-    /** check for the author of the electron */
-    bool author = (*felecItr)->author(egammaParameters::AuthorElectron);
-    if ( !author || (*felecItr)->pt() < m_etElecCut ) continue;
-
-
-    const EMShower* eshow = (*felecItr)->detail<EMShower>("egDetailAOD");
-    double etisol = -1;
-    if( eshow ) etisol = eshow->parameter(egammaParameters::etcone20);
-
-    ATH_MSG_DEBUG( "SusyStudies:isEM/etisol "<< (*felecItr)->isem()<<","<<etisol);
-
-    m_aan_NFinalEl++;
-    m_aan_FinalElPt->push_back((*felecItr)->pt());
-    m_aan_FinalElEta->push_back((*felecItr)->eta());
-    m_aan_FinalElEtCone20->push_back(etisol);
-    
-    /** isEM cut already picks out isolated electrons, so this is just a sanity check */
-    if(etisol<10*GeV) sum_elET += (*felecItr)->pt();
-  }
-
-
-  /** Now look at muons */
-
-  const MuonContainer* finalStateMuonTES = m_analysisOverlapRemovalTool->finalStateMuons();
-  if ( !finalStateMuonTES ) {
-
-    ATH_MSG_WARNING( "SusyStudies: Final State Muons Not Found ");
-    return StatusCode::SUCCESS;
-  }
-  ATH_MSG_DEBUG( "SusyStudies: Final State Muons successfully retrieved - size is " << finalStateMuonTES->size() << " muons ");
-
-
-  /** iterators over the muon container - final state  */ 
-  MuonContainer::const_iterator fmuonItr  = finalStateMuonTES->begin();
-  MuonContainer::const_iterator fmuonItrE = finalStateMuonTES->end();
-
-  double sum_muET = 0;
-
-  for (; fmuonItr != fmuonItrE; ++fmuonItr) {
-
-    /** apply further selections if necessary */
-    /** check for the author of the muon */
-
-    double etIsol = (*fmuonItr)->parameter( static_cast<MuonParameters::ParamDef>(1) ); // dR of 0.2
-
-    ATH_MSG_DEBUG( "SusyStudies:Muon etisol/best match "<< etIsol<<","<<(*fmuonItr)->bestMatch());
-   
-    m_aan_NFinalMu++;
-    m_aan_FinalMuPt->push_back( (*fmuonItr)->pt());
-    m_aan_FinalMuEta->push_back( (*fmuonItr)->eta());
-    m_aan_FinalMuEtCone20->push_back( etIsol);
-    m_aan_FinalMuBestMat->push_back( (*fmuonItr)->bestMatch());
-    m_aan_FinalMuMatChi2->push_back((*fmuonItr)->matchChi2());
-
-    /** require bestMatch, chi2 and isolation cuts */
-    if((*fmuonItr)->bestMatch()==1 && (*fmuonItr)->matchChi2() <100. && etIsol < 10*GeV) sum_muET +=(*fmuonItr)->pt();
-
-  }
-
-  ATH_MSG_DEBUG( "SusyStudies: here ");
-
-  /** now calculate effmass */
-  m_aan_FinalLepEtSum = sum_elET + sum_muET; // keep leptons separate for now.
-  m_aan_FinalElEtSum  = sum_elET;
-  m_aan_FinalMuEtSum  = sum_muET;
-
-  m_aan_effmass = m_aan_ptMiss + m_aan_ht; // scalar sum of jet ET + missing ET 
-
-  ATH_MSG_DEBUG( "SusyStudies: here now ");
-
-  return sc;
-
-}
-StatusCode AnalysisSkeleton::getTopQpT(int& numTops, double& top1, double& top2) {
-
-  ATH_MSG_DEBUG( "in getTopQpT()");
-
-  //
-  top1 = -1; top2 = -1;
-  //
-  double topPt[2];
-  topPt[0]=-1;
-  topPt[1]=-1;
-  //
-  /** get the MC truth particle AOD container from StoreGate */
-
-  const TruthParticleContainer*  mcpartTES = 0;
-  StatusCode sc=evtStore()->retrieve( mcpartTES, m_truthParticleContainerName);
-  if( sc.isFailure()  ||  !mcpartTES ) {
-    ATH_MSG_WARNING("No AOD MC truth particle container found in TDS");
-    return StatusCode::SUCCESS;
-  }
-  ATH_MSG_DEBUG("MC Truth Container Successfully Retrieved");
-
-  /** loop over particles and get the top quarks produced at the hard scatter */
-
-  TruthParticleContainer::const_iterator mcpItr  = mcpartTES->begin();
-  TruthParticleContainer::const_iterator mcpItrE = mcpartTES->end();
-
-  numTops=0;
-  for (; mcpItr != mcpItrE; ++mcpItr) {
-
-    const HepMC::GenParticle* part =(*mcpItr)->genParticle();
-    int pdgid = part->pdg_id();
-
-    if(numTops==2) break; // quit if we have two already
-
-    if(abs(pdgid)==6) { // it is a top
-
-      HepMC::GenVertex* prod_vtx = part->production_vertex();
-      int vtx_barcode = 1;
-      if(prod_vtx) vtx_barcode = HepMC::barcode(prod_vtx);
-
-      if(vtx_barcode == -1) {
-
-        topPt[numTops] = (part->momentum()).perp();
-        numTops++;
-
-      }
-    
-
-    }
-  }
-
-  top1 = topPt[0];
-  top2 = topPt[1];
-  return StatusCode::SUCCESS;
-}
-////////////////////////////////////////////////////////////
-// here we get event Information and store it in our new ntuple
-// In the old days, when we inherited from CBNT_AthenaAwareBase, this information was put in the
-// ntuple by default
-////////////////////////////////////////////////////////
-StatusCode AnalysisSkeleton::addEventInfo() {
-
-
-  ATH_MSG_DEBUG( "in addEventInfo");
-
-  // this code has been taken from AnalysisExamples/VFitZmmOnAOD
-  // I have the actual EventNumber, but skipped the sequential count of event #
-  // 
-
-  //get EventInfo for run and event number
-
-  const xAOD::EventInfo* eventInfo;
-  StatusCode sc = evtStore()->retrieve(eventInfo);
-  
-  if (sc.isFailure())
-    {
-      ATH_MSG_WARNING("Could not retrieve event info");
-      return sc;
-    }
-  
-  //
-  m_runNumber=eventInfo->runNumber();
-  m_eventNumber=eventInfo->eventNumber();
-  ATH_MSG_DEBUG( "event "<<m_eventNumber);
-
-  m_eventTime=eventInfo->timeStamp() ; 
-  m_lumiBlock=eventInfo->lumiBlock() ;
-  m_bCID=eventInfo->bcid();
-  m_eventWeight=eventInfo->mcEventWeight();
-
-  /*
-  // see code in triggerSkeleton()
-  //
-  const TriggerInfo* myTriggerInfo=eventInfo->trigger_info();
-  if (myTriggerInfo!=0) {
-    m_lVL1ID=myTriggerInfo->extendedLevel1ID();
-    m_statusElement=myTriggerInfo->statusElement();
-    m_lvl1TriggerType=myTriggerInfo->level1TriggerType();
-
-    std::vector<TriggerInfo::number_type>::const_iterator lvl1TrigIt=myTriggerInfo->level1TriggerInfo().begin();
-    std::vector<TriggerInfo::number_type>::const_iterator lvl1TrigIt_e=myTriggerInfo->level1TriggerInfo().end();
-    for (;lvl1TrigIt!=lvl1TrigIt_e;lvl1TrigIt++)
-      m_lvl1TriggerInfo->push_back(*lvl1TrigIt);
-
-
-    std::vector<TriggerInfo::number_type>::const_iterator lvl2TrigIt=myTriggerInfo->level2TriggerInfo().begin();
-    std::vector<TriggerInfo::number_type>::const_iterator lvl2TrigIt_e=myTriggerInfo->level2TriggerInfo().end();
-    for (;lvl2TrigIt!=lvl2TrigIt_e;lvl2TrigIt++)
-      m_lvl2TriggerInfo->push_back(*lvl2TrigIt);
-
-    std::vector<TriggerInfo::number_type>::const_iterator evtFilterIt=myTriggerInfo->eventFilterInfo().begin();
-    std::vector<TriggerInfo::number_type>::const_iterator evtFilterIt_e=myTriggerInfo->eventFilterInfo().end();
-    for (;evtFilterIt!=evtFilterIt_e;evtFilterIt++)
-      m_evtFilterInfo->push_back(*evtFilterIt);
-
-
-    std::vector<TriggerInfo::StreamTag>::const_iterator streamInfoIt=myTriggerInfo->streamTags().begin();
-    std::vector<TriggerInfo::StreamTag>::const_iterator streamInfoIt_e=myTriggerInfo->streamTags().end();
-    for (;streamInfoIt!=streamInfoIt_e;streamInfoIt++) { 
-      m_streamTagName->push_back(streamInfoIt->name());
-      m_streamTagType->push_back(streamInfoIt->type());
-    }
-
-  }else
-    {
-      m_lVL1ID=0;
-      m_statusElement=0;
-      m_lvl1TriggerType=0;
-    }
-  */
-    
-  return StatusCode::SUCCESS;
-
-}
-
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/src/components/UserAnalysis_entries.cxx b/PhysicsAnalysis/AnalysisCommon/UserAnalysis/src/components/UserAnalysis_entries.cxx
deleted file mode 100644
index cd03c0c28cbc9e4aafd74c51ee91982a07c7b962..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysis/src/components/UserAnalysis_entries.cxx
+++ /dev/null
@@ -1,5 +0,0 @@
-#include "UserAnalysis/AnalysisSkeleton.h"
-
-
-DECLARE_COMPONENT( AnalysisSkeleton )
-
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/CMakeLists.txt b/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/CMakeLists.txt
deleted file mode 100644
index 6b19dffd7c5e0443319c0d005bcd5f203c209923..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/CMakeLists.txt
+++ /dev/null
@@ -1,46 +0,0 @@
-# $Id: CMakeLists.txt 773457 2016-09-15 13:17:29Z krasznaa $
-################################################################################
-# Package: UserAnalysisUtils
-################################################################################
-
-# Declare the package name:
-atlas_subdir( UserAnalysisUtils )
-
-# Declare the package's dependencies:
-atlas_depends_on_subdirs(
-   PUBLIC
-   Calorimeter/CaloEvent
-   Control/AthenaBaseComps
-   Control/AthContainers
-   Event/NavFourMom
-   GaudiKernel
-   PhysicsAnalysis/AnalysisCommon/AnalysisTools
-   Reconstruction/Jet/JetEvent
-   Reconstruction/MissingETEvent
-   Reconstruction/MuonIdentification/muonEvent
-   Reconstruction/Particle
-   Reconstruction/egamma/egammaEvent
-   Reconstruction/tauEvent
-   Tracking/TrkEvent/TrkSegment
-   Tracking/TrkEvent/VxVertex )
-
-# External dependencies:
-find_package( CLHEP )
-
-# Component(s) in the package:
-atlas_add_library( UserAnalysisUtilsLib
-   UserAnalysisUtils/*.h src/*.cxx
-   PUBLIC_HEADERS UserAnalysisUtils
-   PRIVATE_INCLUDE_DIRS ${CLHEP_INCLUDE_DIRS}
-   PRIVATE_DEFINITIONS ${CLHEP_DEFINITIONS}
-   LINK_LIBRARIES CaloEvent AthenaBaseComps NavFourMom GaudiKernel JetEvent
-   MissingETEvent muonEvent Particle egammaEvent tauEvent TrkSegment VxVertex
-   AthAnalysisToolsLib
-   PRIVATE_LINK_LIBRARIES ${CLHEP_LIBRARIES} )
-
-atlas_add_component( UserAnalysisUtils
-   src/components/*.cxx
-   LINK_LIBRARIES UserAnalysisUtilsLib )
-
-# Install files from the package:
-atlas_install_joboptions( share/*.py )
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/UserAnalysisUtils/UserAnalysisOverlapCheckingTool.h b/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/UserAnalysisUtils/UserAnalysisOverlapCheckingTool.h
deleted file mode 100644
index 487477ea76407bd53b624e286b9a33a66eb6d3af..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/UserAnalysisUtils/UserAnalysisOverlapCheckingTool.h
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef USERANALYSISUTILS_USERANALYSISOVERLAPCHECKINGTOOL_H  
-#define USERANALYSISUTILS_USERANALYSISOVERLAPCHECKINGTOOL_H 
-
-/*****************************************************************************
-Name    : UserAnalysisOverlapCheckingTool.h
-Package : offline/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils
-Author  : Ketevi A. Assamagan
-Created : November 2007
-Purpose : User tools for checking overlaps at deltaR, TrackParticle/Cluster and Hit/Cell levels
-*****************************************************************************/
-
-//#include "GaudiKernel/AlgTool.h"
-//#include "GaudiKernel/MsgStream.h"
-//#include "StoreGate/StoreGateSvc.h"
-#include "GaudiKernel/ToolHandle.h"
-#include "AthenaBaseComps/AthAlgTool.h"
-
-#include "VxVertex/VxContainer.h"
-#include "Particle/TrackParticleContainer.h"
-#include "CaloEvent/CaloClusterContainer.h"
-#include "TrkSegment/SegmentCollection.h"
-
-#include "muonEvent/MuonContainer.h"
-#include "egammaEvent/ElectronContainer.h"
-#include "egammaEvent/PhotonContainer.h"
-#include "tauEvent/TauJetContainer.h"
-#include "JetEvent/JetCollection.h"
-#include "MissingETEvent/MissingET.h"
-
-#include "NavFourMom/IParticleContainer.h"
-#include "NavFourMom/INavigable4MomentumCollection.h"
-
-#include "AnalysisTools/AnalysisTools.h"
-
-#include <string>
-#include <map>
-#include <vector>
-
-class MsgStream;
-
-/** Interface ID */  
-static const InterfaceID IID_UserAnalysisOverlapCheckingTool("UserAnalysisOverlapCheckingTool", 1, 0);
-
-class UserAnalysisOverlapCheckingTool : public AthAlgTool {
-
-public:
-
-  /** Standard Constructor */
-  UserAnalysisOverlapCheckingTool(const std::string& type, const std::string& name,
-	                          const IInterface* parent);
-
-  /** AlgTool and IAlgTool interface methods */
-  static const InterfaceID& interfaceID() { return IID_UserAnalysisOverlapCheckingTool; };
-
-  /** Overriding initialize, finalize, and execute */
-  virtual StatusCode initialize();
-  virtual StatusCode finalize();
-
-  /** overlaps */
-  template<class Object1, class Object2>
-  bool overlap(const Object1* object1, const Object2* object2) const ;
-
-  template<class Object1, class Object2>
-  bool overlap(const Object1* object1, const Object2* object2, double& deltaR) const ;
-
-private:
-
-
-  /** get a handle to the tool helper */
-  ToolHandle<AnalysisTools> m_analysisTools;
-
-  /** deltaR overlap */
-  double m_deltaR;
-  double m_deltaRWithJets;
-
-};
-
-/** check for oeverlap in deltaR and return as well the deltaR value */
-template<class Object1, class Object2>
-inline bool UserAnalysisOverlapCheckingTool::overlap(const Object1* object1,
-                                                     const Object2* object2) const {
-  double dr=-1.0;
-  bool areOverlapping = false;
-  areOverlapping = this->overlap( object1, object2, dr);
-
-  return areOverlapping;
-}
-
-template<class Object1, class Object2>
-inline bool UserAnalysisOverlapCheckingTool::overlap(const Object1* object1,
-                                                     const Object2* object2,
-                                                     double& deltaR) const
-{
-   deltaR=1000.0;
-   bool areOverlapping = false;
-   if ( object1 && object2) {
-      deltaR = m_analysisTools->deltaR( object1, object2 );
-      areOverlapping = deltaR < m_deltaR;
-      const Jet * jet1 = dynamic_cast<const Jet*> (object1);
-      const Jet * jet2 = dynamic_cast<const Jet*> (object2);
-      if ( jet1 || jet2 ) areOverlapping = deltaR < m_deltaRWithJets;
-   }
-   return areOverlapping;
-}
-
-#endif // USERANALYSISUTILS_USERANALYSISOVERLAPCHECKINGTOOL_H 
-
-
-
-
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/UserAnalysisUtils/UserAnalysisOverlapRemovalTool.h b/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/UserAnalysisUtils/UserAnalysisOverlapRemovalTool.h
deleted file mode 100644
index 2288cf2ecc670a858f915035604d695fb616bb7d..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/UserAnalysisUtils/UserAnalysisOverlapRemovalTool.h
+++ /dev/null
@@ -1,233 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef USERANALYSISUTILS_USERANALYSISOVERLAPREMOVALTOOL_H  
-#define USERANALYSISUTILS_USERANALYSISOVERLAPREMOVALTOOL_H 
-
-/*****************************************************************************
-Name    : UserAnalysisOverlapRemovalTool.h
-Package : offline/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils
-Author  : Ketevi A. Assamagan
-Created : November 2007
-Purpose : User tools for analyis overlap removal on ESD/AOD/DPD in Athena
-          - do overlap removal given a set of containers
-          - Return lists of non-overlapping Particles, leptons, etc
-	  - Call overlap checking tools down do cell and hit level 
-*****************************************************************************/
-
-//#include "GaudiKernel/AlgTool.h"
-//#include "GaudiKernel/MsgStream.h"
-//#include "StoreGate/StoreGateSvc.h"
-#include "AthenaBaseComps/AthAlgTool.h"
-#include "GaudiKernel/ToolHandle.h"
-
-#include "UserAnalysisUtils/UserAnalysisSelectionTool.h"
-#include "UserAnalysisUtils/UserAnalysisOverlapCheckingTool.h"
-
-#include "VxVertex/VxContainer.h"
-#include "Particle/TrackParticleContainer.h"
-#include "CaloEvent/CaloClusterContainer.h"
-#include "TrkSegment/SegmentCollection.h"
-
-#include "muonEvent/MuonContainer.h"
-#include "egammaEvent/ElectronContainer.h"
-#include "egammaEvent/PhotonContainer.h"
-#include "tauEvent/TauJetContainer.h"
-#include "JetEvent/JetCollection.h"
-#include "MissingETEvent/MissingET.h"
-
-#include "NavFourMom/IParticleContainer.h"
-#include "NavFourMom/INavigable4MomentumCollection.h"
-#include "AthContainers/ConstDataVector.h"
-
-#include <string>
-#include <map>
-#include <vector>
-
-class MsgStream;
-
-/** Interface ID */  
-static const InterfaceID IID_UserAnalysisOverlapRemovalTool("UserAnalysisOverlapRemovalTool", 1, 0);
-
-class UserAnalysisOverlapRemovalTool : public AthAlgTool {
-
-public:
-
-  /** Standard Constructor */
-  UserAnalysisOverlapRemovalTool(const std::string& type, const std::string& name,
-	                         const IInterface* parent);
-
-  /** AlgTool and IAlgTool interface methods */
-  static const InterfaceID& interfaceID() { return IID_UserAnalysisOverlapRemovalTool; };
-
-  /** Overriding initialize, finalize, and execute */
-  virtual StatusCode initialize();
-  virtual StatusCode execute();
-  virtual StatusCode finalize();
-
-  /** access to containers after preparation */
-  const INavigable4MomentumCollection * finalStateObjects();
-  const PhotonContainer               * finalStatePhotons();  // including converted photons
-  const ElectronContainer             * finalStateElectrons();
-  const Analysis::MuonContainer       * finalStateMuons();
-  const INavigable4MomentumCollection * finalStateLeptons();  // Electrons or Muons
-  const Analysis::TauJetContainer     * finalStateTauJets();
-  const JetCollection          * finalStateJets();
-  const JetCollection          * finalStateBJets();
-  const JetCollection          * finalStateLightJets();
-  const Rec::TrackParticleContainer   * finalStateTrackParticles();
-  const CaloClusterContainer          * finalStateCaloClusters();
-
-  /** summary of pre-selections and overlap removal - will be called at the end of the job
-      in the finalize of this tool - the first number is reconstrued and the second is the pre-selected */
-  void summarize();
-  const std::pair<unsigned int, unsigned int>& electronSummary() const;
-  const std::pair<unsigned int, unsigned int>& photonSummary() const;
-  const std::pair<unsigned int, unsigned int>& muonSummary() const;
-  const std::pair<unsigned int, unsigned int>& tauJetSummary() const;
-  const std::pair<unsigned int, unsigned int>& jetSummary() const;
-  const std::pair<unsigned int, unsigned int>& bJetSummary() const;
-  const std::pair<unsigned int, unsigned int>& lightJetSummary() const;
-  const std::pair<unsigned int, unsigned int>& trackParticleSummary() const;
-  const std::pair<unsigned int, unsigned int>& caloClusterSummary() const;
-
-  /** check if execute() is already called for this tool in this job for this event */
-  bool isExecuted();
-
-private:
-  struct Vectors {
-    ConstDataVector<INavigable4MomentumCollection>* m_outputParticles;
-    ConstDataVector<INavigable4MomentumCollection>* m_outputLeptons;
-    ConstDataVector<ElectronContainer>* m_outputElectrons;
-    ConstDataVector<PhotonContainer>* m_outputPhotons;
-    ConstDataVector<Analysis::MuonContainer>* m_outputMuons;
-    ConstDataVector<Analysis::TauJetContainer>* m_outputTauJets;
-    ConstDataVector<JetCollection>* m_outputJets;
-    ConstDataVector<JetCollection>* m_outputBJets;
-    ConstDataVector<JetCollection>* m_outputLightJets;
-    ConstDataVector<Rec::TrackParticleContainer>* m_outputTrackParticles;
-    ConstDataVector<CaloClusterContainer>* m_outputCaloClusters;
-  };
-
-  /** container preparation */
-  StatusCode prepareContainers(Vectors& v);
-  StatusCode electronPreparation( Vectors& v, std::string key );
-  StatusCode photonPreparation( Vectors& v, std::string key );
-  StatusCode muonPreparation( Vectors& v, std::string key );
-  StatusCode tauJetPreparation( Vectors& v, std::string key );
-  StatusCode jetPreparation( Vectors& v, std::string key );
-  StatusCode trackParticlePreparation( Vectors& v, std::string key );
-  StatusCode caloClusterPreparation( Vectors& v, std::string key );
-  StatusCode lockContainers();
-
-  /** for debugging purposes - called if MSG_Level = DEBUG */
-  void print();
-
-private:
-
-  INavigable4MomentumCollection * allParticles();
-  INavigable4MomentumCollection * allLeptons();
-  PhotonContainer               * allPhotons();  // including converted photons
-  ElectronContainer             * allElectrons();
-  Analysis::MuonContainer       * allMuons();
-  Analysis::TauJetContainer     * allTauJets();
-  JetCollection          * allJets();
-  JetCollection          * allBJets();
-  JetCollection          * allLightJets();
-  Rec::TrackParticleContainer   * allTrackParticles();
-  CaloClusterContainer          * allCaloClusters();
-
-private:
-
-  /** a handle on selection  and on overlap checking */
-  ToolHandle <UserAnalysisSelectionTool> m_userSelectionTool;
-  ToolHandle <UserAnalysisOverlapCheckingTool> m_userOverlapCheckingTool;
-
-  /** should contain the StoreGate keys to be passed in job options */ 
-  std::vector<std::string> m_inputContainerKeys;
- 
-  /** number of various particles <before selection, after selection> 
-      used in the summarize() method print summary information */
-  std::pair<unsigned int, unsigned int> m_numElectrons;
-  std::pair<unsigned int, unsigned int> m_numPhotons;
-  std::pair<unsigned int, unsigned int> m_numMuons;
-  std::pair<unsigned int, unsigned int> m_numTauJets;
-  std::pair<unsigned int, unsigned int> m_numJets;
-  std::pair<unsigned int, unsigned int> m_numBJets;
-  std::pair<unsigned int, unsigned int> m_numLightJets;
-  std::pair<unsigned int, unsigned int> m_numTrackParticles;
-  std::pair<unsigned int, unsigned int> m_numCaloClusters; 
-
-  /** output collection prefix and keys 
-      the output collection key are built form the inputCollectionKeys with the prefix appended
-      the use can set the prefix in the job options */
-  std::string m_outputObjectKey;
-  std::string m_outputLeptonKey;
-  std::string m_outputElectronKey;
-  std::string m_outputPhotonKey;
-  std::string m_outputMuonKey;
-  std::string m_outputTauJetKey;
-  std::string m_outputJetKey;
-  std::string m_outputBJetKey;
-  std::string m_outputLightJetKey;
-  std::string m_outputTrackParticleKey;
-  std::string m_outputCaloClusterKey;
-
-  /** is ATLFAST data */
-  bool m_isAtlfast;
-
-  /** remove overlay in same container */
-  bool m_removeOverlapInSameContainer;
-};
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisOverlapRemovalTool::electronSummary() const
-{
-  return m_numElectrons;
-}
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisOverlapRemovalTool::photonSummary() const
-{
-  return m_numPhotons;
-}
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisOverlapRemovalTool::muonSummary() const
-{
-  return m_numMuons;
-}
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisOverlapRemovalTool::tauJetSummary() const
-{
-  return m_numTauJets;
-}
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisOverlapRemovalTool::jetSummary() const
-{
-  return m_numJets;
-}
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisOverlapRemovalTool::bJetSummary() const
-{
-  return m_numBJets;
-}
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisOverlapRemovalTool::lightJetSummary() const
-{
-  return m_numLightJets;
-}
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisOverlapRemovalTool::trackParticleSummary() const
-{
-  return m_numTrackParticles;
-}
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisOverlapRemovalTool::caloClusterSummary() const
-{
-  return m_numCaloClusters;
-}
-
-#endif // USERANALYSISUTILS_USERANALYSISOVERLAPREMOVALTOOL_H 
-
-
-
-
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/UserAnalysisUtils/UserAnalysisPreparationTool.h b/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/UserAnalysisUtils/UserAnalysisPreparationTool.h
deleted file mode 100644
index e3102555ec7e4eba17ffd1b825180cef568cefb7..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/UserAnalysisUtils/UserAnalysisPreparationTool.h
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-#ifndef USERANALYSISUTILS_USERANALYSISPREPARATIONTOOL_H  
-#define USERANALYSISUTILS_USERANALYSISPREPARATIONTOOL_H 
-
-/*****************************************************************************
-Name    : UserAnalysisPreparationTool.h
-Package : offline/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils
-Author  : Ketevi A. Assamagan
-Created : November 2007
-Purpose : User tools for analyis preparation on ESD/AOD/DPD in Athena
-          - selections
-          - write out contianer of selected particles to StoreGate
-*****************************************************************************/
-
-//#include "GaudiKernel/AlgTool.h"
-//#include "GaudiKernel/MsgStream.h"
-//#include "StoreGate/StoreGateSvc.h"
-#include "GaudiKernel/ToolHandle.h"
-#include "AthenaBaseComps/AthAlgTool.h"
-#include "UserAnalysisUtils/UserAnalysisSelectionTool.h"
-
-#include "VxVertex/VxContainer.h"
-#include "Particle/TrackParticleContainer.h"
-#include "CaloEvent/CaloClusterContainer.h"
-#include "TrkSegment/SegmentCollection.h"
-
-#include "muonEvent/MuonContainer.h"
-#include "egammaEvent/ElectronContainer.h"
-#include "egammaEvent/PhotonContainer.h"
-#include "tauEvent/TauJetContainer.h"
-#include "JetEvent/JetCollection.h"
-#include "MissingETEvent/MissingET.h"
-
-#include "NavFourMom/IParticleContainer.h"
-#include "NavFourMom/INavigable4MomentumCollection.h"
-
-#include <string>
-#include <map>
-#include <vector>
-
-class MsgStream;
-
-/** Interface ID */  
-static const InterfaceID IID_UserAnalysisPreparationTool("UserAnalysisPreparationTool", 1, 0);
-
-class UserAnalysisPreparationTool : public AthAlgTool {
-
-public:
-
-  /** Standard Constructor */
-  UserAnalysisPreparationTool(const std::string& type, const std::string& name,
-	                      const IInterface* parent);
-
-  /** AlgTool and IAlgTool interface methods */
-  static const InterfaceID& interfaceID() { return IID_UserAnalysisPreparationTool; };
-
-  /** Overriding initialize, finalize, and execute */
-  virtual StatusCode initialize();
-  virtual StatusCode execute();
-  virtual StatusCode finalize();
-
-  /** access to containers after preparation */
-  const ElectronContainer             * selectedElectrons();
-  const PhotonContainer               * selectedPhotons();  
-  const Analysis::MuonContainer       * selectedMuons();
-  const Analysis::TauJetContainer     * selectedTauJets();
-  const JetCollection                 * selectedJets();
-  const Rec::TrackParticleContainer   * selectedTrackParticles();
-  const CaloClusterContainer          * selectedCaloClusters();
-
-  /** summary of pre-selections and overlap removal - will be called at the end of the job
-      in the finalize of this tool - the first number is reconstrued and the second is the pre-selected */
-  void summarize();
-  const std::pair<unsigned int, unsigned int>& electronSummary() const;
-  const std::pair<unsigned int, unsigned int>& photonSummary() const;
-  const std::pair<unsigned int, unsigned int>& muonSummary() const;
-  const std::pair<unsigned int, unsigned int>& tauJetSummary() const;
-  const std::pair<unsigned int, unsigned int>& jetSummary() const;
-  const std::pair<unsigned int, unsigned int>& trackParticleSummary() const;
-  const std::pair<unsigned int, unsigned int>& caloClusterSummary() const;
-
-private:
-
-  /** container preparation */
-  StatusCode electronPreparation( std::string key );
-  StatusCode photonPreparation( std::string key );
-  StatusCode muonPreparation( std::string key );
-  StatusCode tauJetPreparation( std::string key );
-  StatusCode jetPreparation( std::string key );
-  StatusCode trackParticlePreparation( std::string key );
-  StatusCode caloClusterPreparation( std::string key );
-
-  /** for debugging purposes - called if MSG_Level = DEBUG */
-  void print();
-
-private:
-
-  /** a handle on selection */
-  ToolHandle <UserAnalysisSelectionTool> m_userSelectionTool;
-
-  /** should contain the StoreGate keys to be passed in job options */ 
-  std::vector<std::string> m_inputContainerKeys;
- 
-  /** number of various particles <before selection, after selection> 
-      used in the summarize() method print summary information */
-  std::pair<unsigned int, unsigned int> m_numElectrons;
-  std::pair<unsigned int, unsigned int> m_numPhotons;
-  std::pair<unsigned int, unsigned int> m_numMuons;
-  std::pair<unsigned int, unsigned int> m_numTauJets;
-  std::pair<unsigned int, unsigned int> m_numJets;
-  std::pair<unsigned int, unsigned int> m_numTrackParticles;
-  std::pair<unsigned int, unsigned int> m_numCaloClusters; 
-
-  /** output collection prefix and keys 
-      the output collection key are built form the inputCollectionKeys with the prefix appended
-      the use can set the prefix in the job options */
-  std::vector<std::string> m_outputContainerKeys;
-  std::string m_outputElectronKey;
-  std::string m_outputPhotonKey;
-  std::string m_outputMuonKey;
-  std::string m_outputTauJetKey;
-  std::string m_outputJetKey;
-  std::string m_outputTrackParticleKey;
-  std::string m_outputCaloClusterKey;
-
-  /** is ATLFAST data */
-  bool m_isAtlfast;
-
-  /** on first event */
-  bool m_first;
-
-};
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisPreparationTool::electronSummary() const
-{
-  return m_numElectrons;
-}
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisPreparationTool::photonSummary() const
-{
-  return m_numPhotons;
-}
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisPreparationTool::muonSummary() const
-{
-  return m_numMuons;
-}
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisPreparationTool::tauJetSummary() const
-{
-  return m_numTauJets;
-}
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisPreparationTool::jetSummary() const
-{
-  return m_numJets;
-}
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisPreparationTool::trackParticleSummary() const
-{
-  return m_numTrackParticles;
-}
-
-inline const std::pair<unsigned int, unsigned int>& UserAnalysisPreparationTool::caloClusterSummary() const
-{
-  return m_numCaloClusters;
-}
-
-#endif // USERANALYSISUTILS_USERANALYSISPREPARATIONTOOL_H 
-
-
-
-
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/UserAnalysisUtils/UserAnalysisSelectionTool.h b/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/UserAnalysisUtils/UserAnalysisSelectionTool.h
deleted file mode 100644
index be68d1828d152078e80f2797f48177898de6d1c1..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/UserAnalysisUtils/UserAnalysisSelectionTool.h
+++ /dev/null
@@ -1,128 +0,0 @@
-// Dear emacs, this is -*- c++ -*-
-
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-// $Id: UserAnalysisSelectionTool.h 773457 2016-09-15 13:17:29Z krasznaa $
-#ifndef USERANALYSISUTILS_USERANALYSISSELECTIONTOOL_H  
-#define USERANALYSISUTILS_USERANALYSISSELECTIONTOOL_H 
-
-/*****************************************************************************
-Name    : UserAnalysisSelectionTool.h
-Package : offline/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils
-Author  : Ketevi A. Assamagan
-Created : November 2007
-Purpose : User tools for analyis preparation on ESD/AOD/DPD in Athena - selections
-          - Take a list of input containers
-          - Call the selections tools to see is containee passed selection
-          - if passed selection put containee in a new container  
-*****************************************************************************/
-
-// System include(s):
-#include <string>
-
-// Gaudi/Athena include(s):
-#include "AthenaBaseComps/AthAlgTool.h"
-
-// Forward declaration(s):
-namespace Analysis {
-   class Electron;
-   class Photon;
-   class Muon;
-   class TauJet;
-}
-namespace Rec {
-   class TrackParticle;
-}
-class Jet;
-class CaloCluster;
-
-/** Interface ID */  
-static const InterfaceID
-IID_UserAnalysisSelectionTool( "UserAnalysisSelectionTool", 1, 0 );
-
-class UserAnalysisSelectionTool : public AthAlgTool {
-
-public:
-   /** Standard Constructor */
-   UserAnalysisSelectionTool( const std::string& type, const std::string& name,
-                              const IInterface* parent );
-
-   /** AlgTool and IAlgTool interface methods */
-   static const InterfaceID& interfaceID() {
-      return IID_UserAnalysisSelectionTool;
-   }
-
-   /** Overriding initialize */
-   virtual StatusCode initialize();
-
-   /// @name Pre-selections
-   /// @{
-
-   bool isSelected( const Analysis::Electron* electron ) const;
-   bool isSelected( const Analysis::Photon* photon ) const;
-   bool isSelected( const Analysis::Muon* muon ) const;
-   bool isSelected( const Analysis::TauJet* tauJet ) const;
-   bool isSelected( const Jet* jet ) const;
-   bool isSelected( const Rec::TrackParticle* trackParticle ) const;
-   bool isSelected( const CaloCluster* caloCluster ) const;
-
-   bool isBJet( const Jet* jet ) const;
-
-   /// @}
-
-private:
-  /** this is Atlfast data */
-  bool m_isAtlfast;
-
-  /** Electron selection */
-  double m_electronPt;
-  double m_electronEta;
-  std::string m_egDetailContainerName;
-  std::string m_electronIsEMFlag;
-  int    m_electronIsEM;
-  int    m_electronIsolationConeIndex;
-  bool   m_doElectronIsolation;
-  bool   m_authorEgammaOnly;
-  bool   m_electronEtaWindCut;/// apply (or not) eta cut in bad region window  
-  double m_electronEtaWind;     /// width of window
-  double m_electronEtaWindCent; /// center of window
-  double m_electronIsolationEt;
-  double m_normElectronIsolEt;  // normalised electron isolation ET: EtCone/Pt
-
-  /** Photon selection */
-  double m_photonPt;
-  double m_photonEta;
-  double m_photonIsEM;
-
-  /** Muon selection */
-  double m_muonPt;
-  double m_muonEta;
-  bool   m_doMuonIsolation;
-  int    m_muonIsolationConeIndex;
-  double m_muonIsolationEt;
-  bool   m_useMatchChi2;
-  double m_muonMatchChi2;
-  double m_normMuonIsolEt;  // normalised muon isolation ET: EtCone/Pt
-
-  /** TauJet selection */
-  double m_tauJetPt;
-  double m_tauJetEta;
-  double m_tauJetLikelihood;
-  double m_tauEleBDTCut;
-
-  /** Jet selection */
-  double m_jetPt;
-  double m_jetEta;
-  double m_bJetLikelihood;
-
-  /** caloCluster selection */
-  double m_caloClusterE;
-
-  /** TrackParticle selection */
-  double m_trackParticlePt;
-
-}; // class UserAnalysisSelectionTool
-
-#endif // USERANALYSISUTILS_USERANALYSISSELECTIONTOOL_H 
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/share/UserAnalysisOverlapCheckingTool_jobOptions.py b/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/share/UserAnalysisOverlapCheckingTool_jobOptions.py
deleted file mode 100644
index 9fb0c7f942d892c2617226773c4a3f097dec1410..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/share/UserAnalysisOverlapCheckingTool_jobOptions.py
+++ /dev/null
@@ -1,13 +0,0 @@
-include.block( "UserAnalysisUtils/UserAnalysisOverlapCheckingTool_jobOptions.py" )
-
-from UserAnalysisUtils.UserAnalysisUtilsConf import \
-     UserAnalysisOverlapCheckingTool as ConfiguredUserAnalysisOverlapCheckingTool
-UserAnalysisOverlapCheckingTool = ConfiguredUserAnalysisOverlapCheckingTool(
-
-  OverlapDeltaR=0.2,
-  OverlapDeltaRWithJets=0.3
-
-    )
-
-ToolSvc += UserAnalysisOverlapCheckingTool
-print      UserAnalysisOverlapCheckingTool
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/share/UserAnalysisOverlapRemovalTool_jobOptions.py b/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/share/UserAnalysisOverlapRemovalTool_jobOptions.py
deleted file mode 100644
index 46a66b6c6c114624ce81552e0982d7c01b42fbd4..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/share/UserAnalysisOverlapRemovalTool_jobOptions.py
+++ /dev/null
@@ -1,57 +0,0 @@
-include.block( "UserAnalysisUtils/UserAnalysisOverlapRemovalTool_jobOptions.py" )
-
-# the user selection tools to use
-# they are only used here to check if a jet is a b-jet or not  
-include ( "UserAnalysisUtils/UserAnalysisSelectionTool_jobOptions.py" )
-
-# the user Overlap Checking tools to use
-include ( "UserAnalysisUtils/UserAnalysisOverlapCheckingTool_jobOptions.py" )
-
-from UserAnalysisUtils.UserAnalysisUtilsConf import \
-     UserAnalysisOverlapRemovalTool as ConfiguredUserAnalysisOverlapRemovalTool
-UserAnalysisOverlapRemovalTool = ConfiguredUserAnalysisOverlapRemovalTool(
-
-  # define the pre-selection tools - used here only to check if a jet is a b-jet
-  UserSelectionTool = UserAnalysisSelectionTool,
-
-  # Whether to check overlap in same container or not. 
-  # For example, muon overlapping with muon?
-  # Currently when set to False, it applies to all contianers. 
-  RemoveOverlapInSameContainer = True,
-
-  # define the overlap checking tools
-  UserOverlapCheckingTool = UserAnalysisOverlapCheckingTool,
-
-  # thelist of the input container keys - the order is important: the overlap removing will be done in that order
-  
-  InputContainerKeys=[  "SelectedElectronCollection",
-                        "SelectedStacoMuonCollection",
-                        "SelectedTauRecContainer",
-                        "SelectedAntiKt4TowerJets",
-                        "SelectedPhotonAODCollection",
-                        "SelectedCaloCalTopoCluster",
-                        "SelectedTrackParticleCandidate"
-                       ],
-
-
-  IsAtlfastData=False, # set this to true if running on Atlfast AOD 
-                       # Only deltaR overlap removal is done for Atlfast, no cluster/TrackParticle overlap
-                       # and Cell/Hit overlap not done in the case of Atlfast 
-
-  # the list of the output container keys - 
-  OuputObjectKey         = "FinalStateObjectCollection",
-  OutputLeptonKey        = "FinalStateLeptonCollection",
-  OutputPhotonKey        = "FinalStatePhotonCollection",
-  OutputElectronKey      = "FinalStateElectronCollection",
-  OutputMuonKey          = "FinalStateMuonCollection",
-  OutputTauJetKey        = "FinalStateTauJetCollection",
-  OutputCalloClusterKey  = "FinalStateCaloClusterCollection",
-  OutputTrackParticleKey = "FinalStateTrackParticleCollection",
-  OutputJetKey           = "FinalStateJetCollection",
-  OutputBJetKey          = "FinalStateBJetCollection",
-  OutputLightJetKey      = "FinalStateLightJetCollection"
-
-    )
-
-ToolSvc += UserAnalysisOverlapRemovalTool
-print      UserAnalysisOverlapRemovalTool
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/share/UserAnalysisPreparationTool_jobOptions.py b/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/share/UserAnalysisPreparationTool_jobOptions.py
deleted file mode 100644
index 6e6f523648411f0542946aa2d665ca24bf38f700..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/share/UserAnalysisPreparationTool_jobOptions.py
+++ /dev/null
@@ -1,40 +0,0 @@
-include.block( "UserAnalysisUtils/UserAnalysisPreparationTool_jobOptions.py" )
-
-# the user selection tools to use
-include ( "UserAnalysisUtils/UserAnalysisSelectionTool_jobOptions.py" )
-
-from UserAnalysisUtils.UserAnalysisUtilsConf import \
-     UserAnalysisPreparationTool as ConfiguredUserAnalysisPreparationTool
-UserAnalysisPreparationTool = ConfiguredUserAnalysisPreparationTool(
-
-  # define the pre-selection tools
-  UserSelectionTool = UserAnalysisSelectionTool,
-
-  # thelist of the input container keys - the order does not matter
-  InputContainerKeys=[ "ElectronAODCollection",
-                       "StacoMuonCollection",
-                       "TauRecContainer",
-                       "AntiKt4TowerJets",
-                       "PhotonAODCollection",
-                       "CaloCalTopoCluster",
-                       "TrackParticleCandidate"
-                       ],
-
-
-  IsAtlfastData=False, # set this to true if running on Atlfast AOD - some of the cuts are not used in this case
-
-  # the list of the output container keys - these containers container the selected objects
-  # The order matter::Should follow the same order as the input container keys above
-  OutputContainerKeys=[ "SelectedElectronCollection",
-                        "SelectedStacoMuonCollection",
-                        "SelectedTauRecContainer",
-                        "SelectedAntiKt4TowerJets",
-                        "SelectedPhotonAODCollection",
-                        "SelectedCaloCalTopoCluster",
-                        "SelectedTrackParticleCandidate"
-                        ]
-
-    )
-
-ToolSvc += UserAnalysisPreparationTool
-print      UserAnalysisPreparationTool
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/share/UserAnalysisSelectionTool_jobOptions.py b/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/share/UserAnalysisSelectionTool_jobOptions.py
deleted file mode 100644
index cddf3fd2c714f96e7f09fda526fd4593d38683cb..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/share/UserAnalysisSelectionTool_jobOptions.py
+++ /dev/null
@@ -1,52 +0,0 @@
-include.block( "UserAnalysisUtils/UserAnalysisSelectionTool_jobOptions.py" )
-
-from UserAnalysisUtils.UserAnalysisUtilsConf import \
-     UserAnalysisSelectionTool as ConfiguredUserSelectionTool
-UserAnalysisSelectionTool = ConfiguredUserSelectionTool(
-
-
-  IsAtlfastData=False, # set this to true if running on Atlfast AOD - some of the cuts are not used in this case
-
-  CaloClusterE=5.0*GeV,
-
-  TrackParticlePt=1.0*GeV,
-
-  ElectronPt=7.0*GeV,
-  ElectronEta=2.5,
-  ElectronIsEMFlag="Medium", #Loose/Medium/Tight
-  ElectronIsEM=0xffffff,           #or set the IsEM value yourself
-  AuthorEgammaOnly=True,
-  ElectronEtaWindowCut=False,
-  ElectronEtaWindowCenter=1.445,
-  ElectronEtaWindow=0.075,
-  DoElectronIsolation=True,
-  ElectronIsolationConeIndex=3, #0-4 for dR=0.1, 0.2, 0.3, and 0.4
-  ElectronIsolationEt=10.0*GeV,
-  NormalizedElectronIsolationEt=0.2,
-
-  PhotonPt=10.0*GeV,
-  PhotonEta=3.2,
-  PhotonIsEM=0,
-
-  MuonPt=6.0*GeV,
-  MuonEta=2.7,
-  DoMuonIsolation=True,
-  MuonIsolationConeIndex=3, # 0-4 for dR=0.1, 0.2, 0.3, and 0.4
-  MuonIsolationEt=10*GeV,
-  NormalizedMuonIsolationEt=0.2,
-  UseMatchChi2=True, 
-  MuonMatchChi2=100,
-
-  TauJetPt=20*GeV,
-  TauJetEta=2.5,
-  TauJetLikelihood=-6.0,
-  TauEleBDTCut=0.5, #
-
-  JetPt=20*GeV,
-  JetEta=5.0,
-  BJetLikelihood=6.0
-
-    )
-
-ToolSvc += UserAnalysisSelectionTool
-print      UserAnalysisSelectionTool
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/UserAnalysisOverlapCheckingTool.cxx b/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/UserAnalysisOverlapCheckingTool.cxx
deleted file mode 100644
index 4d79e4ad6147293a72957820263bf7d42aefb3c4..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/UserAnalysisOverlapCheckingTool.cxx
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-/*****************************************************************************
-Name    : UserAnalysisOverlapCheckingTool.cxx
-Package : offline/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils
-Author  : Ketevi A. Assamagan
-Created : November 2007
-Purpose : User Analysis Overlap Checking - see UserAnalysisOverlapCheckingTool.h for details
-*****************************************************************************/
-
-#include "GaudiKernel/GaudiException.h"
-#include "Gaudi/Property.h"
-
-// Accessing data:
-#include "CLHEP/Units/PhysicalConstants.h"
-
-// User Tools
-#include "UserAnalysisUtils/UserAnalysisOverlapCheckingTool.h"
-
-#include <sstream>
-#include <iomanip>
-#include <iostream>
-
-using namespace Analysis;
-using namespace Rec;
-using namespace std;
-
-//------------------------------------------------------------------------------
-UserAnalysisOverlapCheckingTool::UserAnalysisOverlapCheckingTool( const std::string& type,
-                                                                  const std::string& name, 
-                                                                  const IInterface* parent )
-  : AthAlgTool( type, name, parent ),
-    m_analysisTools( "AnalysisTools", this ) {
-
-  declareInterface<UserAnalysisOverlapCheckingTool>( this );
-
-  declareProperty( "AnalysisTools", m_analysisTools );
-  declareProperty("OverlapDeltaR",          m_deltaR=0.2);
-  declareProperty("OverlapDeltaRWithJets",  m_deltaRWithJets=0.3);
-
-}
-
-//------------------------------------------------------------------------------
-StatusCode UserAnalysisOverlapCheckingTool::initialize() {
-
-  /// get a handle on the analysis tools
-  StatusCode sc = m_analysisTools.retrieve();
-  if ( sc.isFailure() ) {
-    ATH_MSG_ERROR("Can't get handle on analysis tools");
-    return sc;
-  }
-
-  return StatusCode::SUCCESS;
-}
-
-//------------------------------------------------------------------------------
-StatusCode UserAnalysisOverlapCheckingTool::finalize() {
-
-  ATH_MSG_DEBUG("in finalize()");
- 
-
-  return StatusCode::SUCCESS;
-}
-
-
-
-
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/UserAnalysisOverlapRemovalTool.cxx b/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/UserAnalysisOverlapRemovalTool.cxx
deleted file mode 100644
index b79c89d14f46bf19b5a8ac3edb683a94e40ef733..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/UserAnalysisOverlapRemovalTool.cxx
+++ /dev/null
@@ -1,987 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-/*****************************************************************************
-Name    : UserAnalysisPreparationTool.cxx
-Package : offline/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils
-Author  : Ketevi A. Assamagan
-Created : November 2007
-Purpose : User Analysis Overlap Removal - see UserAnalysisOverlapRemovalTool.h for details
-*****************************************************************************/
-
-#include "GaudiKernel/GaudiException.h"
-#include "Gaudi/Property.h"
-
-// Accessing data:
-#include "CLHEP/Units/PhysicalConstants.h"
-
-// User Tools
-#include "UserAnalysisUtils/UserAnalysisOverlapRemovalTool.h"
-
-#include "muonEvent/Muon.h"
-#include "egammaEvent/Electron.h"
-#include "egammaEvent/Photon.h"
-#include "CaloEvent/CaloCluster.h"
-#include "Particle/TrackParticle.h"
-#include "tauEvent/TauJet.h"
-#include "JetEvent/Jet.h"
-
-#include <sstream>
-#include <iomanip>
-#include <iostream>
-
-using namespace Analysis;
-using namespace Rec;
-using namespace std;
-
-//------------------------------------------------------------------------------
-UserAnalysisOverlapRemovalTool::UserAnalysisOverlapRemovalTool( const std::string& type,
-                                                                const std::string& name, 
-                                                                const IInterface* parent )
-  : AthAlgTool( type, name, parent ),
-    m_userSelectionTool ( "UserAnalysisSelectionTool"),
-    m_userOverlapCheckingTool ( "UserAnalysisOverlapCheckingTool") {
-
-  declareInterface<UserAnalysisOverlapRemovalTool>( this );
-
-  declareProperty("UserSelectionTool",       m_userSelectionTool);
-  declareProperty("UserOverlapCheckingTool", m_userOverlapCheckingTool);
-  declareProperty("InputContainerKeys",      m_inputContainerKeys);
-  declareProperty("IsAtlfastData",           m_isAtlfast=false);
-
-  declareProperty("OuputObjectKey",         m_outputObjectKey        = "FinalStateObjects");
-  declareProperty("OutputLeptonKey",        m_outputLeptonKey        = "FinalStateLeptons");
-  declareProperty("OutputPhotonKey",        m_outputPhotonKey        = "FinalStatePhotons");
-  declareProperty("OutputElectronKey",      m_outputElectronKey      = "FinalStateElectrons");
-  declareProperty("OutputMuonKey",          m_outputMuonKey          = "FinalStateMuons");
-  declareProperty("OutputTauJetKey",        m_outputTauJetKey        = "FinalStateTauJets");
-  declareProperty("OutputCalloClusterKey",  m_outputCaloClusterKey   = "FinalStateCaloClusters");
-  declareProperty("OutputTrackParticleKey", m_outputTrackParticleKey = "FinalStateTrackParticles");
-  declareProperty("OutputJetKey",           m_outputJetKey           = "FinalStateJets");
-  declareProperty("OutputBJetKey",          m_outputBJetKey          = "FinalStateBJets");
-  declareProperty("OutputLightJetKey",      m_outputLightJetKey      = "FinalStateLightJets");
-  declareProperty("RemoveOverlapInSameContainer", m_removeOverlapInSameContainer=true);
-
-  /** initialize counters */
-  m_numElectrons      = std::make_pair(0,0);
-  m_numPhotons        = std::make_pair(0,0);
-  m_numMuons          = std::make_pair(0,0);
-  m_numTauJets        = std::make_pair(0,0);
-  m_numJets           = std::make_pair(0,0);
-  m_numBJets          = std::make_pair(0,0);
-  m_numLightJets      = std::make_pair(0,0);
-  m_numTrackParticles = std::make_pair(0,0);
-  m_numCaloClusters   = std::make_pair(0,0);
-}
-
-//------------------------------------------------------------------------------
-StatusCode UserAnalysisOverlapRemovalTool::initialize() {
-
-
-  /// get a handle on the selection tools
-  StatusCode sc = m_userSelectionTool.retrieve();
-  if ( sc.isFailure() ) {
-      ATH_MSG_ERROR("Can't get handle on analysis selection tool" );
-      return sc;
-  }
-
-  sc = m_userOverlapCheckingTool.retrieve();
-  if ( sc.isFailure() ) {
-      ATH_MSG_ERROR("Can't get handle on analysis overlap checking tool" );
-      return sc;
-  }
-
-  return StatusCode::SUCCESS;
-}
-
-//------------------------------------------------------------------------------
-StatusCode UserAnalysisOverlapRemovalTool::finalize() {
-
-  ATH_MSG_DEBUG("in finalize()" );
- 
-  this->summarize();
-
-  return StatusCode::SUCCESS;
-}
-
-//-------------------------------------------------------------------------------
-StatusCode UserAnalysisOverlapRemovalTool::execute() {
-  ATH_MSG_DEBUG("in execute()" );
-
-  /** check if the execute is already called or not 
-      in one job, execute should be called once for each event */
-  if ( this->isExecuted() ) {
-    ATH_MSG_WARNING("overlapRemovalTool->execute() already called for the event in this job" );
-    return StatusCode::SUCCESS; 
-  }
-
-  /** prepare the container for selection and overlap removal */
-  Vectors v;
-  StatusCode sc = this->prepareContainers(v);
-  if ( sc.isFailure() ) return sc;
- 
-  /** now object preparation with overlap removal */
-  for ( unsigned int i=0; i<m_inputContainerKeys.size(); ++i ) {
-
-    string::size_type loc = m_inputContainerKeys[i].find( "Electron", 0);
-    if( loc != string::npos ) sc = this->electronPreparation( v, m_inputContainerKeys[i] );
-
-    loc = m_inputContainerKeys[i].find( "Photon", 0);
-    if( loc != string::npos ) sc = this->photonPreparation( v, m_inputContainerKeys[i] );
-
-    loc = m_inputContainerKeys[i].find( "Muon", 0);
-    if( loc != string::npos ) sc = this->muonPreparation( v, m_inputContainerKeys[i] );
-
-    std::string tau = "Tau";
-    if ( m_isAtlfast ) tau = "TauJet";
-    loc = m_inputContainerKeys[i].find( tau, 0);
-    if( loc != string::npos ) sc = this->tauJetPreparation( v, m_inputContainerKeys[i] );
-
-    std::string jet = "Jets";
-    if ( m_isAtlfast ) jet = "Jet";
-    loc = m_inputContainerKeys[i].find( jet, 0);
-    if( loc != string::npos ) sc = this->jetPreparation( v, m_inputContainerKeys[i] );
-
-    loc = m_inputContainerKeys[i].find( "Track", 0);
-    if( loc != string::npos ) sc = this->trackParticlePreparation( v, m_inputContainerKeys[i] );
-
-    loc = m_inputContainerKeys[i].find( "Cluster", 0);
-    if( loc != string::npos ) sc = this->caloClusterPreparation( v, m_inputContainerKeys[i] );
-
-    if ( sc.isFailure() ) return sc;
-
-  }
-
-  /** lock the containers so that they are no longer modified */
-  sc = this->lockContainers();
-  if ( sc.isFailure() ) return sc;
-
-  this->print();
-
-  return StatusCode::SUCCESS;
-}
-
-//-------------------------------------------------------------------------------
-const INavigable4MomentumCollection * UserAnalysisOverlapRemovalTool::finalStateObjects() {
-  ATH_MSG_DEBUG("in finalStateObjects()" );
-  const INavigable4MomentumCollection * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputObjectKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State objects not found" );
-  return container;
-}
-
-const PhotonContainer * UserAnalysisOverlapRemovalTool::finalStatePhotons() {
-  ATH_MSG_DEBUG("in finalStatePhotons()" );
-  const PhotonContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputPhotonKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State Photons not found" );
-  return container;
-}
-
-const ElectronContainer * UserAnalysisOverlapRemovalTool::finalStateElectrons() {
-  ATH_MSG_DEBUG("in finalStateElectrons()" );
-  const ElectronContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputElectronKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State Electrons not found" );
-  return container;
-}
-
-const MuonContainer * UserAnalysisOverlapRemovalTool::finalStateMuons() {
-  ATH_MSG_DEBUG("in finalStateMuons()" );
-  const MuonContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputMuonKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State Muons not found" );
-  return container;
-}
-
-const INavigable4MomentumCollection * UserAnalysisOverlapRemovalTool::finalStateLeptons() {
-  ATH_MSG_DEBUG("in finalStateLeptons()" );
-  const INavigable4MomentumCollection * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputLeptonKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State Leptons not found" );
-  return container;
-}
-
-const TauJetContainer * UserAnalysisOverlapRemovalTool::finalStateTauJets() {
-  ATH_MSG_DEBUG("in finalStateTauJets()" );
-  const TauJetContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputTauJetKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State TauJets not found" );
-  return container;
-}
-
-const JetCollection * UserAnalysisOverlapRemovalTool::finalStateJets() {
-  ATH_MSG_DEBUG("in finalStateJets()" );
-  const JetCollection * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputJetKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State Jets not found" );
-  return container;
-}
-
-const JetCollection * UserAnalysisOverlapRemovalTool::finalStateBJets() {
-  ATH_MSG_DEBUG("in finalStateBJets()" );
-  const JetCollection * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputBJetKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State BJets not found" );
-  return container;
-}
-
-const JetCollection * UserAnalysisOverlapRemovalTool::finalStateLightJets() {
-  ATH_MSG_DEBUG("in finalStateLightJets()" );
-  const JetCollection * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputLightJetKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State Light Jets not found" );
-  return container;
-}
-
-const TrackParticleContainer * UserAnalysisOverlapRemovalTool::finalStateTrackParticles() {
-  ATH_MSG_DEBUG("in finalStateTrackParticles()" );
-  const TrackParticleContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputTrackParticleKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State TrackParticles not found" );
-  return container;
-}
-
-const CaloClusterContainer * UserAnalysisOverlapRemovalTool::finalStateCaloClusters() {
-  ATH_MSG_DEBUG("in finalStateCaloClusters()" );
-  const CaloClusterContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputCaloClusterKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State CaloClusters not found" );
-  return container;
-}
-
-  /** container preparation */
-StatusCode UserAnalysisOverlapRemovalTool::electronPreparation( Vectors& v, std::string key ) {
-  ATH_MSG_DEBUG("in electronPreparation() " );
-  StatusCode sc = StatusCode::SUCCESS;
-
-  const ElectronContainer * aod_electrons = 0;
-  sc = evtStore()->retrieve( aod_electrons, key );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("No Existing ESD/AOD/DPD electron container found: key = " << key );
-    return sc;
-  }
-  ATH_MSG_DEBUG("Initial ElectronContainer size is " << aod_electrons->size() );
-  m_numElectrons.first += aod_electrons->size();
-
-  /// iterators over the container 
-  ElectronContainer::const_iterator elecItr  = aod_electrons->begin();
-  ElectronContainer::const_iterator elecItrE = aod_electrons->end();
-
-  for (; elecItr != elecItrE; ++elecItr) {
-
-    /** if this is the first particle, just put it in */ 
-    if ( v.m_outputParticles->size() == 0 ) {
-      v.m_outputParticles->push_back( *elecItr );
-      v.m_outputLeptons->push_back( *elecItr );
-      v.m_outputElectrons->push_back( *elecItr );
- 
-    }   
-    /** check for the overlap and save non overlapping ones */
-    else {
-      ConstDataVector<INavigable4MomentumCollection>::const_iterator nav4MomItr  = v.m_outputParticles->begin();
-      ConstDataVector<INavigable4MomentumCollection>::const_iterator nav4MomItrE = v.m_outputParticles->end();
-      bool overlap = false;
-      for (; nav4MomItr != nav4MomItrE; ++nav4MomItr) {
-          /** overlap checking */
-          const Electron * electron = dynamic_cast<const Electron*>(*nav4MomItr);
-          if ( !electron || ( electron && m_removeOverlapInSameContainer ) )  
-             overlap = m_userOverlapCheckingTool->overlap(*elecItr, *nav4MomItr);
-
-          /** get out of the loop as soon as an overlap is found */
-          if ( overlap ) break;
-      }
-
-      /** if no overlap then save */  
-      if ( !overlap ) { 
-        v.m_outputParticles->push_back( *elecItr ); 
-        v.m_outputLeptons->push_back( *elecItr ); 
-        v.m_outputElectrons->push_back( *elecItr );
- 
-      }
-    }
-  }
-
-  m_numElectrons.second += v.m_outputElectrons->size();
-
-  return sc;
-}
-
-StatusCode UserAnalysisOverlapRemovalTool::photonPreparation( Vectors& v, std::string key ) {
-  ATH_MSG_DEBUG("in photonPreparation() " );
-  StatusCode sc = StatusCode::SUCCESS;
-
-  const PhotonContainer * aod_photons = 0;
-  sc = evtStore()->retrieve( aod_photons, key );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("No Existing ESD/AOD/DPD photon container found: key = " << key );
-    return sc;
-  }
-  ATH_MSG_DEBUG("Initial PhotonContainer size is " << aod_photons->size() );
-  m_numPhotons.first += aod_photons->size();
-
-  /// iterators over the container 
-  PhotonContainer::const_iterator photItr  = aod_photons->begin();
-  PhotonContainer::const_iterator photItrE = aod_photons->end();
-
-  for (; photItr != photItrE; ++photItr) {
-
-    /** if this is the first particle, just put it in */ 
-    if ( v.m_outputParticles->size() == 0 ) {
-      v.m_outputParticles->push_back( *photItr );
-      v.m_outputPhotons->push_back( *photItr );
-     }   
-    /** check for the overlap and save non overlapping ones */
-    else {
-      INavigable4MomentumCollection::const_iterator nav4MomItr  = v.m_outputParticles->begin();
-      INavigable4MomentumCollection::const_iterator nav4MomItrE = v.m_outputParticles->end();
-      bool overlap = false;
-      for (; nav4MomItr != nav4MomItrE; ++nav4MomItr) {
-          /** overlap checking */
-          const Photon * photon = dynamic_cast<const Photon*>(*nav4MomItr);
-          if ( !photon || ( photon && m_removeOverlapInSameContainer ) )  
-             overlap = m_userOverlapCheckingTool->overlap(*photItr, *nav4MomItr);
-
-          /** get out of the loop as soon as an overlap is found */
-          if ( overlap ) break;
-      }
-
-      /** if no overlap then save */  
-      if ( !overlap ) { 
-        v.m_outputParticles->push_back( *photItr );
-        v.m_outputPhotons->push_back( *photItr );
-      }
-    }
-  }
-
-  m_numPhotons.second += v.m_outputPhotons->size();
-
-  return sc;
-}
-
-StatusCode UserAnalysisOverlapRemovalTool::muonPreparation( Vectors& v, std:: string key ) {
-  ATH_MSG_DEBUG("in muonPreparation() " );
-  StatusCode sc = StatusCode::SUCCESS;
-
-  const MuonContainer * aod_muons = 0;
-  sc = evtStore()->retrieve( aod_muons, key );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("No Existing ESD/AOD/DPD muon container found: key = " << key );
-    return sc; 
-  }
-  ATH_MSG_DEBUG("Initial MuonContainer size is " << aod_muons->size() );
-  m_numMuons.first += aod_muons->size();
-
-  /// iterators over the container 
-  MuonContainer::const_iterator muonItr  = aod_muons->begin();
-  MuonContainer::const_iterator muonItrE = aod_muons->end();
-
-  for (; muonItr != muonItrE; ++muonItr) {
-
-    /** if this is the first particle, just put it in */ 
-    if ( v.m_outputParticles->size() == 0 ) {
-      v.m_outputParticles->push_back( *muonItr );
-      v.m_outputMuons->push_back( *muonItr );
- 
-    }   
-    /** check for the overlap and save non overlapping ones */
-    else {
-      INavigable4MomentumCollection::const_iterator nav4MomItr  = v.m_outputParticles->begin();
-      INavigable4MomentumCollection::const_iterator nav4MomItrE = v.m_outputParticles->end();
-      bool overlap = false;
-      for (; nav4MomItr != nav4MomItrE; ++nav4MomItr) {
-          /** overlap checking */
-          const Analysis::Muon * muon = dynamic_cast<const Analysis::Muon*>(*nav4MomItr);
-          if ( !muon || ( muon && m_removeOverlapInSameContainer ) )  
-             overlap = m_userOverlapCheckingTool->overlap(*muonItr, *nav4MomItr);
-
-          /** get out of the loop as soon as an overlap is found */
-          if ( overlap ) break;
-      }
-
-      /** if no overlap then save */  
-      if ( !overlap ) { 
-        v.m_outputParticles->push_back( *muonItr );
-        v.m_outputMuons->push_back( *muonItr );
-      }
-    }
-  }
-
-  m_numMuons.second += v.m_outputMuons->size();
-
-  return sc;
-}
-
-StatusCode UserAnalysisOverlapRemovalTool::tauJetPreparation( Vectors& v, std::string key ) {
-  ATH_MSG_DEBUG("in tauJetPreparation() " );
-  StatusCode sc = StatusCode::SUCCESS;
-
-  TauJetContainer * tauJets = this->allTauJets();
-  if ( !tauJets ) return sc;
-
-  const TauJetContainer * aod_tauJets = 0;
-  sc = evtStore()->retrieve( aod_tauJets, key );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("No Existing ESD/AOD/DPD tauJet container found: key = " << key );
-    return sc;
-  }
-  ATH_MSG_DEBUG("initial TauJetContainer size is " << aod_tauJets->size() );
-  m_numTauJets.first += aod_tauJets->size();
-
-  /// iterators over the container 
-  TauJetContainer::const_iterator tauJetItr  = aod_tauJets->begin();
-  TauJetContainer::const_iterator tauJetItrE = aod_tauJets->end();
-
-  for (; tauJetItr != tauJetItrE; ++tauJetItr) {
-
-    /** if this is the first particle, just put it in */ 
-    if ( v.m_outputParticles->size() == 0 ) {
-      v.m_outputParticles->push_back( *tauJetItr );
-      v.m_outputTauJets->push_back( *tauJetItr );
-     }   
-    /** check for the overlap and save non overlapping ones */
-    else {
-      INavigable4MomentumCollection::const_iterator nav4MomItr  = v.m_outputParticles->begin();
-      INavigable4MomentumCollection::const_iterator nav4MomItrE = v.m_outputParticles->end();
-      bool overlap = false;
-      for (; nav4MomItr != nav4MomItrE; ++nav4MomItr) {
-          /** overlap checking */
-          const TauJet * taujet = dynamic_cast<const TauJet*>(*nav4MomItr);
-          if ( !taujet || ( taujet && m_removeOverlapInSameContainer ) )  
-             overlap = m_userOverlapCheckingTool->overlap(*tauJetItr, *nav4MomItr);
-
-          /** get out of the loop as soon as an overlap is found */
-          if ( overlap ) break;
-      }
-
-      /** if no overlap then save */  
-      if ( !overlap ) { 
-        v.m_outputParticles->push_back( *tauJetItr );
-        v.m_outputTauJets->push_back( *tauJetItr );
-      }
-    }
-  }
-
-  m_numTauJets.second += v.m_outputTauJets->size();
-
-  return sc;
-}
-
-StatusCode UserAnalysisOverlapRemovalTool::jetPreparation( Vectors& v, std::string key ) {
-  ATH_MSG_DEBUG("in jetPreparation() " );
-  StatusCode sc = StatusCode::SUCCESS;
-
-  const JetCollection * aod_jets = 0;
-  sc = evtStore()->retrieve( aod_jets, key );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("No Existing ESD/AOD/DPD jet container found: key = " << key );
-    return sc;
-  }
-  ATH_MSG_DEBUG("Initial JetCollection size is " << aod_jets->size() );
-  m_numJets.first      += aod_jets->size();
-  m_numBJets.first     += aod_jets->size();
-  m_numLightJets.first += aod_jets->size();
-
-  /// iterators over the container 
-  JetCollection::const_iterator jetItr  = aod_jets->begin();
-  JetCollection::const_iterator jetItrE = aod_jets->end();
-
-  for (; jetItr != jetItrE; ++jetItr) {
-    /** check if this jet passes pre-selection */
-    /** if this is the first particle, just put it in */ 
-    if ( v.m_outputParticles->size() == 0 ) {
-      v.m_outputParticles->push_back( *jetItr );
-      v.m_outputJets->push_back( *jetItr );
-      if ( m_userSelectionTool->isBJet( *jetItr ) ) v.m_outputBJets->push_back( *jetItr);
-      else v.m_outputLightJets->push_back( *jetItr );
- 
-    }   
-    /** check for the overlap and save non overlapping ones */
-    else {
-      INavigable4MomentumCollection::const_iterator nav4MomItr  = v.m_outputParticles->begin();
-      INavigable4MomentumCollection::const_iterator nav4MomItrE = v.m_outputParticles->end();
-      bool overlap = false;
-      for (; nav4MomItr != nav4MomItrE; ++nav4MomItr) {
-          /** overlap checking */
-          const Jet * jet = dynamic_cast<const Jet*>(*nav4MomItr);
-          if ( !jet || ( jet && m_removeOverlapInSameContainer ) )  
-             overlap = m_userOverlapCheckingTool->overlap(*jetItr, *nav4MomItr);
-          /** get out of the loop as soon as an overlap is found */
-          if ( overlap ) break;
-      }
-
-      /** if no overlap then save */  
-      if ( !overlap ) { 
-	v.m_outputParticles->push_back( *jetItr );
-	v.m_outputJets->push_back( *jetItr );
-	if ( m_userSelectionTool->isBJet( *jetItr ) ) v.m_outputBJets->push_back( *jetItr);
-	else v.m_outputLightJets->push_back( *jetItr );
- 
-      }
-    }
-  }
-
-  m_numJets.second      += v.m_outputJets->size();
-  m_numBJets.second     += v.m_outputBJets->size();
-  m_numLightJets.second += v.m_outputLightJets->size();
-
-  return sc;
-}
-
-StatusCode UserAnalysisOverlapRemovalTool::trackParticlePreparation( Vectors& v, std::string key ) {
-  ATH_MSG_DEBUG("in trackParticlePreparation() " );
-  StatusCode sc = StatusCode::SUCCESS;
-
-  const TrackParticleContainer * aod_trackParticles = 0;
-  sc = evtStore()->retrieve( aod_trackParticles, key );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("No ESD/AOD/DPD TrackParticle container found: key = " << key );
-    return sc;
-  }
-  ATH_MSG_DEBUG("Initial TrackParticleContainer size is " << aod_trackParticles->size() );
-  m_numTrackParticles.first += aod_trackParticles->size();
-
-  /// iterators over the container 
-  TrackParticleContainer::const_iterator trackParticleItr  = aod_trackParticles->begin();
-  TrackParticleContainer::const_iterator trackParticleItrE = aod_trackParticles->end();
-
-  for (; trackParticleItr != trackParticleItrE; ++trackParticleItr) {
-    /** if this is the first particle, just put it in */ 
-    if ( v.m_outputParticles->size() == 0 ) {
-      v.m_outputParticles->push_back( *trackParticleItr );
-      v.m_outputTrackParticles->push_back( *trackParticleItr );
- 
-    }   
-    /** check for the overlap and save non overlapping ones */
-    else {
-      INavigable4MomentumCollection::const_iterator nav4MomItr  = v.m_outputParticles->begin();
-      INavigable4MomentumCollection::const_iterator nav4MomItrE = v.m_outputParticles->end();
-      bool overlap = false; 
-      for (; nav4MomItr != nav4MomItrE; ++nav4MomItr) {
-          /** overlap checking */
-          const TrackParticle * trackparticle = dynamic_cast<const TrackParticle*>(*nav4MomItr);
-          if ( !trackparticle || ( trackparticle && m_removeOverlapInSameContainer ) )  
-             overlap = m_userOverlapCheckingTool->overlap(*trackParticleItr, *nav4MomItr);
-          /** get out of the loop as soon as an overlap is found */
-          if ( overlap ) break;
-      }
-
-      /** if no overlap then save */  
-      if ( !overlap ) { 
-        v.m_outputParticles->push_back( *trackParticleItr ); 
-        v.m_outputTrackParticles->push_back( *trackParticleItr );
-       }
-    }
-  }
-  
-  m_numTrackParticles.second += v.m_outputTrackParticles->size();
-
-  return sc;
-}
-
-StatusCode UserAnalysisOverlapRemovalTool::caloClusterPreparation( Vectors& v, std::string key ) {
-  ATH_MSG_DEBUG("in caloClusterPreparation() " );
-  StatusCode sc = StatusCode::SUCCESS;
-
-  const CaloClusterContainer * aod_caloClusters = 0;
-  sc = evtStore()->retrieve( aod_caloClusters, key );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("No Existing ESD/AOD/DPD CaloCluster container found: key = " << key );
-    return sc;
-  }
-  ATH_MSG_DEBUG("Initial CaloClusterContainer size is " << aod_caloClusters->size() );
-  m_numCaloClusters.first += aod_caloClusters->size();
-
-  /// iterators over the container 
-  CaloClusterContainer::const_iterator caloClusterItr  = aod_caloClusters->begin();
-  CaloClusterContainer::const_iterator caloClusterItrE = aod_caloClusters->end();
-
-  for (; caloClusterItr != caloClusterItrE; ++caloClusterItr) {
-    /** check if this caloCluster passes pre-selection */
-    if ( !m_userSelectionTool->isSelected( *caloClusterItr ) ) continue;
-
-    /** if this is the first particle, just put it in */ 
-    if ( v.m_outputParticles->size() == 0 ) {
-      v.m_outputParticles->push_back( *caloClusterItr ); 
-      v.m_outputCaloClusters->push_back( *caloClusterItr );
- 
-    }   
-    /** check for the overlap and save non overlapping ones */
-    else {
-      INavigable4MomentumCollection::const_iterator nav4MomItr  = v.m_outputParticles->begin();
-      INavigable4MomentumCollection::const_iterator nav4MomItrE = v.m_outputParticles->end();
-      bool overlap = false;
-      for (; nav4MomItr != nav4MomItrE; ++nav4MomItr) {
-          /** overlap checking */
-          const CaloCluster * cluster = dynamic_cast<const CaloCluster*>(*nav4MomItr);
-          if ( !cluster || ( cluster && m_removeOverlapInSameContainer ) )  
-             overlap = m_userOverlapCheckingTool->overlap(*caloClusterItr, *nav4MomItr);
-          /** get out of the loop as soon as an overlap is found */
-          if ( overlap ) break;
-      }
-
-      /** if no overlap then save */  
-      if ( !overlap ) { 
-        v.m_outputParticles->push_back( *caloClusterItr );
-        v.m_outputCaloClusters->push_back( *caloClusterItr );
- 
-      } 
-    }
-  }
-
-  m_numCaloClusters.second += v.m_outputCaloClusters->size();
-
-  return sc;
-}
-
-INavigable4MomentumCollection * UserAnalysisOverlapRemovalTool::allParticles() {
-  ATH_MSG_DEBUG("in allObjects()" );
-  INavigable4MomentumCollection * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputObjectKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State objects not found" );
-  return container;
-}
-
-INavigable4MomentumCollection * UserAnalysisOverlapRemovalTool::allLeptons() {
-  ATH_MSG_DEBUG("in allLeptons()" );
-  INavigable4MomentumCollection * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputLeptonKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State leptons not found" );
-  return container;
-
-}
-
-PhotonContainer * UserAnalysisOverlapRemovalTool::allPhotons() {
-  ATH_MSG_DEBUG("in allPhotons()" );
-  PhotonContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputPhotonKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State Photons not found" );
-  return container;
-}
-
-MuonContainer * UserAnalysisOverlapRemovalTool::allMuons() {
-  ATH_MSG_DEBUG("in allMuons()" );
-  MuonContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputMuonKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State Muons not found" );
-  return container;
-}
-
-ElectronContainer * UserAnalysisOverlapRemovalTool::allElectrons() {
-  ATH_MSG_DEBUG("in allElectrons()" );
-  ElectronContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputElectronKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State Electrons not found" );
-  return container;
-}
-
-TauJetContainer * UserAnalysisOverlapRemovalTool::allTauJets() {
-  ATH_MSG_DEBUG("in allTauJets()" );
-  TauJetContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputTauJetKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State TauJets not found" );
-  return container;
-}
-
-JetCollection * UserAnalysisOverlapRemovalTool::allJets() {
-  ATH_MSG_DEBUG("in allJets()" );
-  JetCollection * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputJetKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State Jets not found" );
-  return container;
-}
-
-JetCollection * UserAnalysisOverlapRemovalTool::allBJets() {
-  ATH_MSG_DEBUG("in allBJets()" );
-  JetCollection * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputBJetKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State BJets not found" );
-  return container;
-}
-
-JetCollection * UserAnalysisOverlapRemovalTool::allLightJets() {
-  ATH_MSG_DEBUG("in allLightJets()" );
-  JetCollection * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputLightJetKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State Light Jets not found" );
-  return container;
-}
-
-TrackParticleContainer * UserAnalysisOverlapRemovalTool::allTrackParticles() {
-  ATH_MSG_DEBUG("in allTrackParticles()" );
-  TrackParticleContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputTrackParticleKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State TrackParticles not found" );
-  return container;
-}
-
-CaloClusterContainer * UserAnalysisOverlapRemovalTool::allCaloClusters() {
-  ATH_MSG_DEBUG("in allCaloClusters()" );
-  CaloClusterContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputCaloClusterKey);
-  if ( sc.isFailure() || container ==0 )
-     ATH_MSG_ERROR("Final State CaloClusters not found" );
-  return container;
-}
-
-//-------------------------------------------------------------------------------
-StatusCode UserAnalysisOverlapRemovalTool::prepareContainers(Vectors& v) {
-  ATH_MSG_DEBUG("in prepareContainers()" );
-
-  /** create an empty container of all particles and record it */
-  ConstDataVector<CaloClusterContainer> * caloClusters =
-    new ConstDataVector<CaloClusterContainer>( SG::VIEW_ELEMENTS );
-  StatusCode sc = evtStore()->record ( caloClusters, m_outputCaloClusterKey);
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of CaloClusters in StoreGate: key= " << m_outputCaloClusterKey );
-     return sc;
-  }
-  v.m_outputCaloClusters = caloClusters;
-
-  /** create an empty container of TrackParticles and record it */
-  ConstDataVector<TrackParticleContainer> * trackParticles =
-    new ConstDataVector<TrackParticleContainer>( SG::VIEW_ELEMENTS );
-  sc = evtStore()->record ( trackParticles, m_outputTrackParticleKey);
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of TrackParticles in StoreGate: key= " << m_outputTrackParticleKey );
-     return sc;
-  }
-  v.m_outputTrackParticles = trackParticles;
-
-  /** create an empty container of all particles and record it */
-  ConstDataVector<INavigable4MomentumCollection> * particles =
-    new ConstDataVector<INavigable4MomentumCollection>( SG::VIEW_ELEMENTS );
-  sc = evtStore()->record ( particles, m_outputObjectKey);
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of particles in StoreGate: key=  " << m_outputObjectKey );
-     return sc; 
-  }
-  v.m_outputParticles = particles;
-  
-  /** create an empty container of all leptons and record it */
-  ConstDataVector<INavigable4MomentumCollection> * leptons =
-    new ConstDataVector<INavigable4MomentumCollection>( SG::VIEW_ELEMENTS );
-  sc = evtStore()->record ( leptons, m_outputLeptonKey);
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of leptons in StoreGate: key= " << m_outputLeptonKey );
-     return sc;
-  }
-  v.m_outputLeptons = leptons;
-
-  /** create an empty container of all electrons and record it */
-  ConstDataVector<ElectronContainer> * electrons =
-    new ConstDataVector<ElectronContainer>( SG::VIEW_ELEMENTS );
-  sc = evtStore()->record ( electrons, m_outputElectronKey);
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of electrons in StoreGate: key= " << m_outputElectronKey );
-     return sc;
-  }
-  v.m_outputElectrons = electrons;
-
-  /** create an empty container of all photons and record it */
-  ConstDataVector<PhotonContainer> * photons =
-    new ConstDataVector<PhotonContainer>( SG::VIEW_ELEMENTS );
-  sc = evtStore()->record ( photons, m_outputPhotonKey);
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of photons in StoreGate: key= " << m_outputPhotonKey );
-     return sc;
-  }
-  v.m_outputPhotons = photons;
-
-  /** create an empty container of all muons and record it */
-  ConstDataVector<MuonContainer> * muons =
-    new ConstDataVector<MuonContainer>( SG::VIEW_ELEMENTS );
-  sc = evtStore()->record ( muons, m_outputMuonKey);
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of muons in StoreGate: key= " << m_outputMuonKey );
-     return sc;
-  }
-  v.m_outputMuons = muons;
-
-  /** create an empty container of all tauJets and record it */
-  ConstDataVector<TauJetContainer> * tauJets =
-    new ConstDataVector<TauJetContainer>( SG::VIEW_ELEMENTS );
-  sc = evtStore()->record ( tauJets, m_outputTauJetKey);
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of tau jets in StoreGate: key= " << m_outputTauJetKey );
-     return sc;
-  }
-  v.m_outputTauJets = tauJets;
-
-  /** create an empty container of all jets and record it */
-  ConstDataVector<JetCollection> * jets =
-    new ConstDataVector<JetCollection>( SG::VIEW_ELEMENTS );
-  sc = evtStore()->record ( jets, m_outputJetKey);
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of jets in StoreGate: key= " << m_outputJetKey );
-     return sc;
-  }
-  v.m_outputJets = jets;
-
-  /** create an empty container of b-jets and record it */
-  ConstDataVector<JetCollection> * bjets =
-    new ConstDataVector<JetCollection>( SG::VIEW_ELEMENTS );
-  sc = evtStore()->record ( bjets, m_outputBJetKey);
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of b-jets in StoreGate: key= " << m_outputBJetKey );
-     return sc;
-  }
-  v.m_outputBJets = bjets;
-
-  /** create an empty container of light (non b-jet) jets and record it */
-  ConstDataVector<JetCollection> * lightJets =
-    new ConstDataVector<JetCollection>( SG::VIEW_ELEMENTS );
-  sc = evtStore()->record ( lightJets, m_outputLightJetKey);
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of lightJets in StoreGate: key= " << m_outputLightJetKey );
-     return sc;
-  }
-  v.m_outputLightJets = lightJets;
-
-  return StatusCode::SUCCESS;
-}
-
-//-------------------------------------------------------------------------------
-StatusCode UserAnalysisOverlapRemovalTool::lockContainers() {
-  ATH_MSG_DEBUG("in lockContainers()" );
-
-  /** lock the contianer so it is not modified downstream by anyone else */
-  StatusCode sc = evtStore()->setConst( this->allParticles() );
-  if ( sc.isFailure() ) ATH_MSG_WARNING("Not able to lock the container of particles " );
-
-  sc = evtStore()->setConst( this->allLeptons() );
-  if ( sc.isFailure() ) ATH_MSG_WARNING("Not able to lock the container of leptons " );
-
-  sc = evtStore()->setConst( this->allElectrons() );
-  if ( sc.isFailure() ) ATH_MSG_WARNING("Not able to lock the container of electrons " );
-
-  sc = evtStore()->setConst( this->allPhotons() );
-  if ( sc.isFailure() ) ATH_MSG_WARNING("Not able to lock the container of photons " );
-
-  sc = evtStore()->setConst( this->allMuons() );
-  if ( sc.isFailure() ) ATH_MSG_WARNING("Not able to lock the container of muons " );
-
-  sc = evtStore()->setConst( this->allTauJets() );
-  if ( sc.isFailure() ) ATH_MSG_WARNING("Not able to lock the container of tauJets " );
-
-  sc = evtStore()->setConst( this->allJets() );
-  if ( sc.isFailure() ) ATH_MSG_WARNING("Not able to lock the container of jets " );
-
-  sc = evtStore()->setConst( this->allBJets() );
-  if ( sc.isFailure() ) ATH_MSG_WARNING("Not able to lock the container of b-jets " );
-
-  sc = evtStore()->setConst( this->allLightJets() );
-  if ( sc.isFailure() ) ATH_MSG_WARNING("Not able to lock the container of light Jets " );
-
-  sc = evtStore()->setConst( this->allTrackParticles() );
-  if ( sc.isFailure() ) ATH_MSG_WARNING("Not able to lock the container of trackParticles " );
-
-  sc = evtStore()->setConst( this->allCaloClusters() );
-  if ( sc.isFailure() ) ATH_MSG_WARNING("Not able to lock the container of calo clusters " );
-
-  return sc;
-}
-
-//-----------------------------------------------------------------------------------------------
-void UserAnalysisOverlapRemovalTool::print() {
-  ATH_MSG_DEBUG("in print() " );
-
-  /** Get the container of pre-selected Electrons */
-  const ElectronContainer * electrons = this->finalStateElectrons();
-  ATH_MSG_DEBUG("Number of Pre-selected Electrons is " << electrons->size() );
-
-  /** Get the container of pre-selected Photons */
-  const PhotonContainer * photons = this->finalStatePhotons();
-  ATH_MSG_DEBUG("Number of Pre-selected Photons is " << photons->size() );
-
-  /** Get the container of pre-selected Muons */
-  const MuonContainer * muons = this->finalStateMuons();
-  ATH_MSG_DEBUG("Number of Pre-selected Muons is " << muons->size() );
-
-  /** Get the container of pre-selected TauJets */
-  const TauJetContainer * tauJets = this->finalStateTauJets();
-  ATH_MSG_DEBUG("Number of Pre-selected TauJets is " << tauJets->size() );
-
-  /** Get the container of pre-selected Jets */
-  const JetCollection * jets = this->finalStateJets();
-  ATH_MSG_DEBUG("Number of Pre-selected Jets is " << jets->size() );
-
-  /** Get the container of pre-selected B-tagged Jets */
-  const JetCollection * bjets = this->finalStateBJets();
-  ATH_MSG_DEBUG("Number of Pre-selected b-Jets is " << bjets->size() );
-
-  /** Get the container of pre-selected non b-jets */
-  const JetCollection * lightJets = this->finalStateLightJets();
-  ATH_MSG_DEBUG("Number of Pre-selected LightJets is " << lightJets->size() );
-
-  /** Get the container of pre-selected TrackParticles */
-  const TrackParticleContainer * trackParticles = this->finalStateTrackParticles();
-  ATH_MSG_DEBUG("Number of Pre-selected TrackParticles is " << trackParticles->size() );
-
-  /** Get the container of pre-selected CaloClusters */
-  const CaloClusterContainer * caloClusters = this->finalStateCaloClusters();
-  ATH_MSG_DEBUG("Number of Pre-selected CaloClusters is " << caloClusters->size() );
-
-  /** Get the container of pre-selected leptons (electrons, muons) */
-  const INavigable4MomentumCollection * leptons = this->finalStateLeptons();
-  ATH_MSG_DEBUG("Number of Pre-selected Leptons is " << leptons->size() );
-
-  /** Get the container of ALL pre-selected objects */
-  const INavigable4MomentumCollection * allObjects = this->finalStateObjects();
-  ATH_MSG_DEBUG("Number of Pre-selected final State Objects is " << allObjects->size() );
-
-}
-
-//---------------------------------------------------------------------------------------------------------
-void UserAnalysisOverlapRemovalTool::summarize() {
-  ATH_MSG_INFO("in summarize() " );
-
-  ATH_MSG_INFO("Summary Pre-selected/Overlap Removed Events ###################" );
-  ATH_MSG_INFO("---------------------------------------------------------------" );
-  ATH_MSG_INFO("Pre-selected Electrons             = " << std::setw(10) << m_numElectrons.first 
-                      << "   Overlap-removed Electrons       = " << std::setw(10) << m_numElectrons.second );
-  ATH_MSG_INFO("Pre-selected Photons               = " << std::setw(10) << m_numPhotons.first 
-                      << "   Overlap-removed Photons         = " << std::setw(10) << m_numPhotons.second );
-  ATH_MSG_INFO("Pre-selected pMuons                = " << std::setw(10) << m_numMuons.first 
-                      << "    Overlap-removed Muons          = " << std::setw(10) << m_numMuons.second );
-  ATH_MSG_INFO("Pre-selected TauJets               = " << std::setw(10) << m_numTauJets.first  
-                      << "    Overlap-removed TauJets        = " << std::setw(10) << m_numTauJets.second );
-  ATH_MSG_INFO("Pre-selected Jets                  = " << std::setw(10) << m_numJets.first 
-                      << "    Overlap-removed Jets           = " << std::setw(10) << m_numJets.second );
-  ATH_MSG_INFO("Pre-selected BJets                 = " << std::setw(10) << m_numBJets.first  
-                      << "    Overlap-removed BJets          = " << std::setw(10) << m_numBJets.second );
-  ATH_MSG_INFO("Pre-selected LightJets             = " << std::setw(10) << m_numLightJets.first
-                      << "    Overlpa-removed LightJets      = " << std::setw(10) << m_numLightJets.second );
-  ATH_MSG_INFO("Pre-selected TrackParticles        = " << std::setw(10) << m_numTrackParticles.first
-                      << "    Overlap-removed TrackParticles = " << std::setw(10) << m_numTrackParticles.second );
-  ATH_MSG_INFO("Pre-selected CaloClusters          = " << std::setw(10) << m_numCaloClusters.first
-                      << "   Overlap-removed CaloClusters    = " << std::setw(10) << m_numCaloClusters.second );
-}
-
-//-----------------------------------------------------------------------------------------------------------
-bool UserAnalysisOverlapRemovalTool::isExecuted() {
-  ATH_MSG_DEBUG("in isExecuted() " );
-  return evtStore()->contains<INavigable4MomentumCollection>( m_outputObjectKey );
-}
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/UserAnalysisPreparationTool.cxx b/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/UserAnalysisPreparationTool.cxx
deleted file mode 100644
index 61348d89f95be15d99dd283b86615aa91844b09c..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/UserAnalysisPreparationTool.cxx
+++ /dev/null
@@ -1,514 +0,0 @@
-/*
-  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-*/
-
-/*****************************************************************************
-Name    : UserAnalysisPreparationTool.cxx
-Package : offline/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils
-Author  : Ketevi A. Assamagan
-Created : November 2007
-Purpose : User Analysis Preparation - see UserAnalysisPreparationTool.h for details
-*****************************************************************************/
-
-#include "GaudiKernel/GaudiException.h"
-#include "Gaudi/Property.h"
-
-// Accessing data:
-#include "CLHEP/Units/PhysicalConstants.h"
-
-// User Tools
-#include "UserAnalysisUtils/UserAnalysisPreparationTool.h"
-#include "AthContainers/ConstDataVector.h"
-
-#include <sstream>
-#include <iomanip>
-#include <iostream>
-
-using namespace Analysis;
-using namespace Rec;
-using namespace std;
-
-//------------------------------------------------------------------------------
-UserAnalysisPreparationTool::UserAnalysisPreparationTool( const std::string& type,
-                                                          const std::string& name, 
-                                                          const IInterface* parent )
-  : AthAlgTool( type, name, parent ),
-    m_userSelectionTool ( "UserAnalysisSelectionTool" ) {
-
-  declareInterface<UserAnalysisPreparationTool>( this );
-
-  declareProperty("UserSelectionTool",      m_userSelectionTool);
-  declareProperty("InputContainerKeys",     m_inputContainerKeys);
-  declareProperty("OutputContainerKeys",    m_outputContainerKeys);
-  declareProperty("IsAtlfastData",          m_isAtlfast=false);
-
-  /** initialize counters */
-  m_numElectrons      = std::make_pair(0,0);
-  m_numPhotons        = std::make_pair(0,0);
-  m_numMuons          = std::make_pair(0,0);
-  m_numTauJets        = std::make_pair(0,0);
-  m_numJets           = std::make_pair(0,0);
-  m_numTrackParticles = std::make_pair(0,0);
-  m_numCaloClusters   = std::make_pair(0,0);
-  m_first             = true;
-}
-
-//------------------------------------------------------------------------------
-StatusCode UserAnalysisPreparationTool::initialize() {
-
-  /// get a handle on the selection tools
-  StatusCode sc = m_userSelectionTool.retrieve();
-  if ( sc.isFailure() ) {
-    ATH_MSG_ERROR("Can't get handle on analysis selection tool");
-    return sc;
-  }
-
-  return StatusCode::SUCCESS;
-}
-
-//------------------------------------------------------------------------------
-StatusCode UserAnalysisPreparationTool::finalize() {
-
-  ATH_MSG_DEBUG("in finalize()");
- 
-  this->summarize();
-
-
-  return StatusCode::SUCCESS;
-}
-
-//-------------------------------------------------------------------------------
-StatusCode UserAnalysisPreparationTool::execute() {
-  ATH_MSG_DEBUG("in execute()");
-
-  /** check that the input and the output containers are defined */
-  StatusCode sc = StatusCode::SUCCESS;
-
-  if ( m_first ) {
-     if ( m_outputContainerKeys.size() != m_inputContainerKeys.size() ) {
-       ATH_MSG_FATAL("Input/Output container mis-match: please fix job options");
-        return StatusCode::FAILURE;
-     }
-     if ( m_outputContainerKeys.size() == 0 ) {
-      ATH_MSG_ERROR("You should input at least one container : please fix jobOptions");
-      return StatusCode::FAILURE;
-     }
-  }
-
-  /** now object preparation with selection */
-  for ( unsigned int i=0; i<m_inputContainerKeys.size(); ++i ) {
-
-    string::size_type loc = m_inputContainerKeys[i].find( "Electron", 0);
-    if ( loc != string::npos ) { 
-       if ( m_first ) m_outputElectronKey = m_outputContainerKeys[i]; 
-       sc = this->electronPreparation( m_inputContainerKeys[i] );
-    }
-
-    loc = m_inputContainerKeys[i].find( "Photon", 0);
-    if ( loc != string::npos ) {
-       if ( m_first ) m_outputPhotonKey = m_outputContainerKeys[i];       
-       sc = this->photonPreparation( m_inputContainerKeys[i] );
-    }
-
-    loc = m_inputContainerKeys[i].find( "Muon", 0);
-    if ( loc != string::npos ) {
-       if ( m_first ) m_outputMuonKey = m_outputContainerKeys[i];       
-       sc = this->muonPreparation( m_inputContainerKeys[i] );
-    }
-
-    std::string tau = "Tau";
-    if ( m_isAtlfast ) tau = "TauJet";
-    loc = m_inputContainerKeys[i].find( tau, 0);
-    if ( loc != string::npos ) { 
-      if ( m_first ) m_outputTauJetKey = m_outputContainerKeys[i];
-      sc = this->tauJetPreparation( m_inputContainerKeys[i] );
-    }
-
-    std::string jet = "Jets";
-    if ( m_isAtlfast ) jet = "Jet";
-    loc = m_inputContainerKeys[i].find( jet, 0);
-    if ( loc != string::npos ) { 
-       if ( m_first ) m_outputJetKey = m_outputContainerKeys[i];
-       sc = this->jetPreparation( m_inputContainerKeys[i] );
-    }
-
-    loc = m_inputContainerKeys[i].find( "Track", 0);
-    if ( loc != string::npos ) { 
-       if ( m_first ) m_outputTrackParticleKey = m_outputContainerKeys[i]; 
-       sc = this->trackParticlePreparation( m_inputContainerKeys[i] );
-    }
-
-    loc = m_inputContainerKeys[i].find( "Cluster", 0);
-    if ( loc != string::npos ) { 
-       if ( m_first ) m_outputCaloClusterKey = m_outputContainerKeys[i];
-       sc = this->caloClusterPreparation( m_inputContainerKeys[i] );
-    }
-    if ( sc.isFailure() ) return sc;
-  }
-
-  this->print();
-  m_first = false;
-
-  return StatusCode::SUCCESS;
-}
-
-//-------------------------------------------------------------------------------
-const PhotonContainer * UserAnalysisPreparationTool::selectedPhotons() {
-  ATH_MSG_DEBUG("in selectedPhotons()");
-  const PhotonContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputPhotonKey);
-  if ( sc.isFailure() || container ==0 ) ATH_MSG_ERROR("Final State Photons not found");
-  return container;
-}
-
-const ElectronContainer * UserAnalysisPreparationTool::selectedElectrons() {
-  ATH_MSG_DEBUG("in selectedElectrons()");
-  const ElectronContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputElectronKey);
-  if ( sc.isFailure() || container ==0 ) ATH_MSG_ERROR("Final State Electrons not found");
-  return container;
-}
-
-const MuonContainer * UserAnalysisPreparationTool::selectedMuons() {
-  ATH_MSG_DEBUG("in selectedMuons()");
-  const MuonContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputMuonKey);
-  if ( sc.isFailure() || container ==0 ) ATH_MSG_DEBUG("Final State Muons not found");
-  return container;
-}
-
-const TauJetContainer * UserAnalysisPreparationTool::selectedTauJets() {
-  ATH_MSG_DEBUG("in selectedTauJets()");
-  const TauJetContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputTauJetKey);
-  if ( sc.isFailure() || container ==0 ) ATH_MSG_ERROR("Final State TauJets not found");
-  return container;
-}
-
-const JetCollection * UserAnalysisPreparationTool::selectedJets() {
-  ATH_MSG_DEBUG("in selectedJets()");
-  const JetCollection * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputJetKey);
-  if ( sc.isFailure() || container ==0 ) ATH_MSG_ERROR("Final State Jets not found");
-  return container;
-}
-
-const TrackParticleContainer * UserAnalysisPreparationTool::selectedTrackParticles() {
-  ATH_MSG_DEBUG("in selectedTrackParticles()");
-  const TrackParticleContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputTrackParticleKey);
-  if ( sc.isFailure() || container ==0 ) ATH_MSG_ERROR("Final State TrackParticles not found");
-  return container;
-}
-
-const CaloClusterContainer * UserAnalysisPreparationTool::selectedCaloClusters() {
-  ATH_MSG_DEBUG("in selectedCaloClusters()");
-  const CaloClusterContainer * container = 0;
-  StatusCode sc = evtStore()->retrieve(container, m_outputCaloClusterKey);
-  if ( sc.isFailure() || container ==0 )ATH_MSG_ERROR("Final State CaloClusters not found");
-  return container;
-}
-
-  /** container preparation */
-StatusCode UserAnalysisPreparationTool::electronPreparation( std::string key ) {
-  ATH_MSG_DEBUG("in electronPreparation() ");
-
-  /** create an empty container of all electrons and record it */
-  auto electrons = std::make_unique<ConstDataVector<ElectronContainer> >( SG::VIEW_ELEMENTS );
-
-  const ElectronContainer * aod_electrons = 0;
-  ATH_CHECK( evtStore()->retrieve( aod_electrons, key ) );
-  ATH_MSG_DEBUG("AOD ElectronContainer size is " << aod_electrons->size());
-  m_numElectrons.first += aod_electrons->size();
-
-  /// iterators over the container 
-  ElectronContainer::const_iterator elecItr  = aod_electrons->begin();
-  ElectronContainer::const_iterator elecItrE = aod_electrons->end();
-
-  for (; elecItr != elecItrE; ++elecItr) {
-    if ( m_userSelectionTool->isSelected( *elecItr ) ) electrons->push_back( *elecItr );
-  }
-  m_numElectrons.second += electrons->size();
-
-  ATH_CHECK( evtStore()->record (std::move (electrons), m_outputElectronKey) );
-  
-  return StatusCode::SUCCESS;
-}
-
-StatusCode UserAnalysisPreparationTool::photonPreparation( std::string key ) {
-  ATH_MSG_DEBUG("in photonPreparation() ");
-
-  /** create an empty container of all photons and record it */
-  auto photons = std::make_unique<ConstDataVector<PhotonContainer> >( SG::VIEW_ELEMENTS );
-
-  const PhotonContainer * aod_photons = 0;
-  ATH_CHECK( evtStore()->retrieve( aod_photons, key ) );
-  ATH_MSG_DEBUG("AOD PhotonContainer size is " << aod_photons->size());
-  m_numPhotons.first += aod_photons->size();
-
-  /// iterators over the container 
-  PhotonContainer::const_iterator photItr  = aod_photons->begin();
-  PhotonContainer::const_iterator photItrE = aod_photons->end();
-
-  /** check if this electron passes pre-selection */
-  for (; photItr != photItrE; ++photItr) {
-    if ( m_userSelectionTool->isSelected( *photItr ) ) photons->push_back( *photItr );
-  }
-  m_numPhotons.second += photons->size();
-
-  ATH_CHECK( evtStore()->record ( std::move(photons), m_outputPhotonKey ) );
-
-  return StatusCode::SUCCESS;
-}
-
-StatusCode UserAnalysisPreparationTool::muonPreparation( std::string key ) {
-  ATH_MSG_DEBUG("in muonPreparation() ");
-  StatusCode sc = StatusCode::SUCCESS;
-
-  /** create an empty container of all muons and record it */
-  ConstDataVector<MuonContainer> * muons =
-    new ConstDataVector<MuonContainer>( SG::VIEW_ELEMENTS );
-  sc = evtStore()->record ( muons, m_outputMuonKey );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of muons in StoreGate: key= " << m_outputMuonKey);
-     return sc;
-  }
-
-  const MuonContainer * aod_muons = 0;
-  sc = evtStore()->retrieve( aod_muons, key );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("No ESD/AOD/DPD muon container found: key = " << key);
-    return sc; 
-  }
-  ATH_MSG_DEBUG("AOD MuonContainer size is " << aod_muons->size());
-  m_numMuons.first += aod_muons->size();
-
-  /// iterators over the container 
-  MuonContainer::const_iterator muonItr  = aod_muons->begin();
-  MuonContainer::const_iterator muonItrE = aod_muons->end();
-
-  /** check if this muon passes pre-selection */
-  for (; muonItr != muonItrE; ++muonItr) {
-    //    if ( m_userSelectionTool->isSelected( *muonItr ) ) muons->push_back( *muonItr );
-    if ( m_userSelectionTool->isSelected( *muonItr ) ) {
-      muons->push_back( *muonItr );
-    }
- 
-  }  
-  m_numMuons.second += muons->size();
-
-  sc = evtStore()->setConst( muons );
-  if ( sc.isFailure()) ATH_MSG_WARNING("Not able to lock the container of muons ");
-
-  return sc;
-}
-
-StatusCode UserAnalysisPreparationTool::tauJetPreparation( std::string key ) {
-  ATH_MSG_DEBUG("in tauJetPreparation() ");
-  StatusCode sc = StatusCode::SUCCESS;
-
-  /** create an empty container of all tauJets and record it */
-  ConstDataVector<TauJetContainer> * tauJets =
-    new ConstDataVector<TauJetContainer>( SG::VIEW_ELEMENTS );
-  sc = evtStore()->record ( tauJets, m_outputTauJetKey );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of tau jets in StoreGate: key= " << m_outputTauJetKey);
-    return sc;
-  }
-
-  const TauJetContainer * aod_tauJets = 0;
-  sc = evtStore()->retrieve( aod_tauJets, key );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("No ESD/AOD/DPD tauJet container found: key = " << key);
-    return sc;
-  }
-  ATH_MSG_DEBUG("AOD TauJetContainer size is " << aod_tauJets->size());
-  m_numTauJets.first += aod_tauJets->size();
-
-  /// iterators over the container 
-  TauJetContainer::const_iterator tauJetItr  = aod_tauJets->begin();
-  TauJetContainer::const_iterator tauJetItrE = aod_tauJets->end();
-
-  /** check if this tauJet passes pre-selection */
-  for (; tauJetItr != tauJetItrE; ++tauJetItr) {
-    //    if ( m_userSelectionTool->isSelected( *tauJetItr ) ) tauJets->push_back( *tauJetItr );
-    if ( m_userSelectionTool->isSelected( *tauJetItr ) ) {
-      tauJets->push_back( *tauJetItr );
-    }
- 
-  }
-  m_numTauJets.second += tauJets->size();
-
-  sc = evtStore()->setConst( tauJets );
-  if ( sc.isFailure() ) ATH_MSG_WARNING("Not able to lock the container of tauJets ");
-
-  return sc;
-}
-
-StatusCode UserAnalysisPreparationTool::jetPreparation( std::string key ) {
-  ATH_MSG_DEBUG("in jetPreparation() ");
-  StatusCode sc = StatusCode::SUCCESS;
-
-  /** create an empty container of all jets and record it */
-  ConstDataVector<JetCollection> * jets =
-    new ConstDataVector<JetCollection>( SG::VIEW_ELEMENTS );
-  sc = evtStore()->record ( jets, m_outputJetKey );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of jets in StoreGate: key= " << m_outputJetKey);
-     return sc;
-  }
-
-  const JetCollection * aod_jets = 0;
-  sc = evtStore()->retrieve( aod_jets, key );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("No ESD/AOD/DPD jet container found: key = " << key);
-    return sc;
-  }
-  ATH_MSG_DEBUG("AOD JetCollection size is " << aod_jets->size());
-  m_numJets.first      += aod_jets->size();
-
-  /// iterators over the container 
-  JetCollection::const_iterator jetItr  = aod_jets->begin();
-  JetCollection::const_iterator jetItrE = aod_jets->end();
-
-  /** check if this jet passes pre-selection */
-  for (; jetItr != jetItrE; ++jetItr) {
-    //    if ( m_userSelectionTool->isSelected( *jetItr ) ) jets->push_back( *jetItr );
-    if ( m_userSelectionTool->isSelected( *jetItr ) ) {
-      jets->push_back( *jetItr );
-    }
-
-  }
-  m_numJets.second      += jets->size();
-
-  sc = evtStore()->setConst( jets );
-  if ( sc.isFailure() ) ATH_MSG_WARNING("Not able to lock the container of jets ");
-
-  return sc;
-}
-
-StatusCode UserAnalysisPreparationTool::trackParticlePreparation( std::string key ) {
-  ATH_MSG_DEBUG("in trackParticlePreparation() ");
-  StatusCode sc = StatusCode::SUCCESS;
-
-  /** create an empty container of TrackParticles and record it */
-  ConstDataVector<TrackParticleContainer> * trackParticles =
-    new ConstDataVector<TrackParticleContainer>( SG::VIEW_ELEMENTS );
-  sc = evtStore()->record ( trackParticles, m_outputTrackParticleKey );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("Not able to create a collection of TrackParticles in StoreGate: key= " << m_outputTrackParticleKey);
-     return sc;
-  }
-
-  const TrackParticleContainer * aod_trackParticles = 0;
-  sc = evtStore()->retrieve( aod_trackParticles, key );
-  if ( sc.isFailure() ) {
-    ATH_MSG_WARNING("No ESD/AOD/DPD trackParticle container found: key = " << key);
-    return sc;
-  }
-  ATH_MSG_DEBUG("AOD TrackParticleContainer size is " << aod_trackParticles->size());
-  m_numTrackParticles.first += aod_trackParticles->size();
-
-  /// iterators over the container 
-  TrackParticleContainer::const_iterator trackParticleItr  = aod_trackParticles->begin();
-  TrackParticleContainer::const_iterator trackParticleItrE = aod_trackParticles->end();
-
-  /** check if this trackParticle passes pre-selection */
-  for (; trackParticleItr != trackParticleItrE; ++trackParticleItr) {
-    //    if ( m_userSelectionTool->isSelected( *trackParticleItr ) ) trackParticles->push_back( *trackParticleItr );
-    if ( m_userSelectionTool->isSelected( *trackParticleItr ) ) {
-      trackParticles->push_back( *trackParticleItr );
-    }
-
-  }
-  m_numTrackParticles.second += trackParticles->size();
-
-  sc = evtStore()->setConst( trackParticles );
-  if ( sc.isFailure() ) ATH_MSG_WARNING("Not able to lock the container of trackParticles ");
-
-  return sc;
-}
-
-StatusCode UserAnalysisPreparationTool::caloClusterPreparation( std::string key ) {
-  ATH_MSG_DEBUG("in caloClusterPreparation() ");
-
-  /** create an empty container of all particles and record it */
-  auto caloClusters = std::make_unique<ConstDataVector<CaloClusterContainer> >( SG::VIEW_ELEMENTS );
-
-  const CaloClusterContainer * aod_caloClusters = 0;
-  ATH_CHECK( evtStore()->retrieve( aod_caloClusters, key ) );
-  ATH_MSG_DEBUG("AOD CaloClusterContainer size is " << aod_caloClusters->size());
-  m_numCaloClusters.first += aod_caloClusters->size();
-
-  /// iterators over the container 
-  CaloClusterContainer::const_iterator caloClusterItr  = aod_caloClusters->begin();
-  CaloClusterContainer::const_iterator caloClusterItrE = aod_caloClusters->end();
-
-  /** check if this caloCluster passes pre-selection */
-  for (; caloClusterItr != caloClusterItrE; ++caloClusterItr) {
-    if ( m_userSelectionTool->isSelected( *caloClusterItr ) ) caloClusters->push_back( *caloClusterItr );
-  }
-  m_numCaloClusters.second += caloClusters->size();
-
-  ATH_CHECK( evtStore()->record ( std::move (caloClusters), m_outputCaloClusterKey, false ) );
-
-  return StatusCode::SUCCESS;
-}
-
-//-----------------------------------------------------------------------------------------------
-void UserAnalysisPreparationTool::print() {
-  ATH_MSG_DEBUG("in print() ");
-
-  /** Get the container of pre-selected Electrons */
-  const ElectronContainer * electrons = this->selectedElectrons();
-  if(electrons) ATH_MSG_DEBUG("Number of Pre-selected Electrons is " << electrons->size());
-
-  /** Get the container of pre-selected Photons */
-  const PhotonContainer * photons = this->selectedPhotons();
-  if(photons) ATH_MSG_DEBUG("Number of Pre-selected Photons is " << photons->size());
-
-  /** Get the container of pre-selected Muons */
-  const MuonContainer * muons = this->selectedMuons();
-  if(muons)ATH_MSG_DEBUG("Number of Pre-selected Muons is " << muons->size());
-
-  /** Get the container of pre-selected TauJets */
-  const TauJetContainer * tauJets = this->selectedTauJets();
-  if(tauJets) ATH_MSG_DEBUG("Number of Pre-selected TauJets is " << tauJets->size());
-
-  /** Get the container of pre-selected Jets */
-  const JetCollection * jets = this->selectedJets();
-  if(jets) ATH_MSG_DEBUG("Number of Pre-selected Jets is " << jets->size());
-
-  /** Get the container of pre-selected TrackParticles */
-  const TrackParticleContainer * trackParticles = this->selectedTrackParticles();
-  if(trackParticles) ATH_MSG_DEBUG("Number of Pre-selected TrackParticles is " << trackParticles->size());
-
-  /** Get the container of pre-selected CaloClusters */
-  const CaloClusterContainer * caloClusters = this->selectedCaloClusters();
-  if(caloClusters) ATH_MSG_DEBUG("Number of Pre-selected CaloClusters is " << caloClusters->size());
-
-}
-
-//---------------------------------------------------------------------------------------------------------
-void UserAnalysisPreparationTool::summarize() {
-  ATH_MSG_INFO("in summarize() ");
-
-  ATH_MSG_INFO("Summary of Reconstructed Events/pre-selected events ############");
-  ATH_MSG_INFO("---------------------------------------------------------------");
-  ATH_MSG_INFO("Reconstructed Electrons        = " << std::setw(10) << m_numElectrons.first 
-                      << "   Pre-selected Electrons      = " << std::setw(10) << m_numElectrons.second);
-  ATH_MSG_INFO("Reconstructed Photons          = " << std::setw(10) << m_numPhotons.first 
-                      << "   Pre-selected Photons        = " << std::setw(10) << m_numPhotons.second);
-  ATH_MSG_INFO("Reconstructed Muons            = " << std::setw(10) << m_numMuons.first 
-                      << "   Pre-selected Muons          = " << std::setw(10) << m_numMuons.second);
-  ATH_MSG_INFO("Reconstructed TauJets          = " << std::setw(10) << m_numTauJets.first  
-                      << "   Pre-selected TauJets        = " << std::setw(10) << m_numTauJets.second);
-  ATH_MSG_INFO("Reconstructed Jets             = " << std::setw(10) << m_numJets.first 
-                      << "   Pre-selected Jets           = " << std::setw(10) << m_numJets.second);
-  ATH_MSG_INFO("Reconstructed TrackParticles   = " << std::setw(10) << m_numTrackParticles.first
-                      << "   Pre-selected TrackParticles = " << std::setw(10) << m_numTrackParticles.second);
-  ATH_MSG_INFO("Reconstructed CaloClusters     = " << std::setw(10) << m_numCaloClusters.first
-                      << "   Pre-selected CaloClusters   = " << std::setw(10) << m_numCaloClusters.second);
-}
-
-
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/UserAnalysisSelectionTool.cxx b/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/UserAnalysisSelectionTool.cxx
deleted file mode 100644
index 7e52ff70fc2cb6d3ee92bfe745fc270df65fa309..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/UserAnalysisSelectionTool.cxx
+++ /dev/null
@@ -1,278 +0,0 @@
-/*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-*/
-
-/*****************************************************************************
-Name    : UserAnalysisSelectionTool.cxx
-Package : offline/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils
-Author  : Ketevi A. Assamagan
-Created : November 2007
-Purpose : User Analysis Selections - see UserAnalysisSelectionTool.h for details
-*****************************************************************************/
-
-// EDM include(s):
-#include "egammaEvent/Electron.h"
-#include "egammaEvent/Photon.h"
-#include "egammaEvent/EMShower.h"
-#include "muonEvent/Muon.h"
-#include "tauEvent/TauJet.h"
-#include "JetEvent/Jet.h"
-#include "MissingETEvent/MissingET.h"
-#include "Particle/TrackParticleContainer.h"
-#include "CaloEvent/CaloClusterContainer.h"
-
-// Local include(s):
-#include "UserAnalysisUtils/UserAnalysisSelectionTool.h"
-
-/// Convenience GeV definition
-static const double GeV = 1000.0;
-
-//------------------------------------------------------------------------------
-UserAnalysisSelectionTool::UserAnalysisSelectionTool( const std::string& type,
-                                                      const std::string& name,
-                                                      const IInterface* parent )
-   : AthAlgTool( type, name, parent ) {
-   declareInterface<UserAnalysisSelectionTool>( this );
-
-   declareProperty("IsAtlfastData",          m_isAtlfast=false);
-
-   /** caloCluster selection */
-   declareProperty("CaloClusterE", m_caloClusterE=1.0*GeV);
-
-   /** TrackParticle Pt */
-   declareProperty("TrackParticlePt", m_trackParticlePt=1.0*GeV);
-
-   /** Electron selection */
-   declareProperty("ElectronPt",       m_electronPt=10*GeV);
-   declareProperty("ElectronEta",      m_electronEta=2.5);
-   declareProperty("ElectronIsEMFlag", m_electronIsEMFlag="Loose");
-   declareProperty("ElectronIsEM",     m_electronIsEM=0);
-   declareProperty("AuthorEgammaOnly", m_authorEgammaOnly=false);
-   declareProperty("ElectronEtaWindowCut", m_electronEtaWindCut = false);
-   declareProperty("ElectronEtaWindow", m_electronEtaWind = 0.075);
-   declareProperty("ElectronEtaWindowCenter", m_electronEtaWindCent=1.445);
-   declareProperty("DoElectronIsolation", m_doElectronIsolation=true);
-   declareProperty("ElectronIsolationConeIndex",
-                   m_electronIsolationConeIndex=1);
-   declareProperty("ElectronIsolationEt", m_electronIsolationEt=10*GeV);
-   declareProperty("NormalizedElectronIsolationEt",m_normElectronIsolEt=0.2);
-
-   /** Photon selection */
-   declareProperty("PhotonPt",   m_photonPt=10*GeV);
-   declareProperty("PhotonEta",  m_photonEta=3.2);
-   declareProperty("PhotonIsEM", m_photonIsEM=0);
-
-   /** Muon selection */
-   declareProperty("MuonPt",                 m_muonPt=3.0*GeV);
-   declareProperty("MuonEta",                m_muonEta=2.7);
-   declareProperty("DoMuonIsolation",        m_doMuonIsolation=true);
-   declareProperty("MuonIsolationConeIndex", m_muonIsolationConeIndex=1);
-   declareProperty("MuonIsolationEt",        m_muonIsolationEt=10*GeV);
-   declareProperty("UseMatchChi2",           m_useMatchChi2=false);
-   declareProperty("MuonMatchChi2",          m_muonMatchChi2=100);
-   declareProperty("NormalizedMuonIsolationEt",m_normMuonIsolEt=0.2);
-   declareProperty("egDetailContainerName",
-                   m_egDetailContainerName="egDetailAOD");
-
-   /** TauJet selection */
-   declareProperty("TauJetPt",           m_tauJetPt=20*GeV);
-   declareProperty("TauJetEta",          m_tauJetEta=2.5);
-   declareProperty("TauJetLikelihood",   m_tauJetLikelihood=-6.0);
-   declareProperty("TauEleBDTCut", m_tauEleBDTCut=0.5); // not yet set - No 23 1007
-
-   /** Jet selection */
-   declareProperty("JetPt",          m_jetPt=20*GeV);
-   declareProperty("JetEta",         m_jetEta=5.0);
-   declareProperty("BJetLikelihood", m_bJetLikelihood=6.0);
-
-}
-
-StatusCode UserAnalysisSelectionTool::initialize() {
-
-   ATH_MSG_DEBUG( "in initialize()" );
-   ATH_MSG_DEBUG( " electron Flag/isEM val " << m_electronIsEMFlag << ","
-                  << m_electronIsEM );
-
-   return StatusCode::SUCCESS;
-}
-
-bool UserAnalysisSelectionTool::
-isSelected( const Analysis::Electron* electron ) const {
-
-   if( ! electron ) {
-      return false;
-   }
-
-   ATH_MSG_VERBOSE( "in electron isSelected()" );
-
-   bool select = ( ( electron->pt() > m_electronPt ) &&
-                   ( std::abs( electron->eta() ) < m_electronEta ) );
-
-   if( m_isAtlfast ) {
-      return select;
-   }
-
-   if( m_electronIsEMFlag == "Loose" ) {
-      select = select && ( ! electron->isem( egammaPIDObs::ElectronLoose ) );
-   } else if( m_electronIsEMFlag == "Medium" ) {
-      select = select && ( ! electron->isem( egammaPIDObs::ElectronMedium ) );
-   } else if( m_electronIsEMFlag == "Tight" ) {
-      select = select && ( ! electron->isem( egammaPIDObs::ElectronTight ) );
-   } else {
-      select = select && ( ( electron->isem() & m_electronIsEM ) ==0 );
-   }
-
-   ATH_MSG_VERBOSE( "in electron isSelected() -1 " );
-
-   if( m_authorEgammaOnly ) {
-      select = select && electron->author( egammaParameters::AuthorElectron );
-   }
-
-   ATH_MSG_VERBOSE( "in electron isSelected() - 2 " );
-   if( m_electronEtaWindCut ) {
-      const double diff = std::abs( electron->eta() ) - m_electronEtaWindCent;
-      select = select && ( std::abs( diff ) > m_electronEtaWind );
-   }
-
-   ATH_MSG_VERBOSE( "in electron isSelected() " << m_egDetailContainerName );
-
-   if( m_doElectronIsolation ) {
-      const EMShower* egdetail =
-            electron->detail< EMShower >( m_egDetailContainerName );
-      ATH_MSG_VERBOSE( "in electron isSelected() " << egdetail );
-      double etIsol = 0.;
-      if( egdetail ) {
-         const auto helper1 = m_electronIsolationConeIndex;
-         const egammaParameters::ParamDef helper2 =
-               static_cast< egammaParameters::ParamDef >( helper1 );
-         etIsol = egdetail->parameter( helper2 );
-      }
-      select = select && ( etIsol < m_electronIsolationEt );
-      if( electron->pt() ) {
-         select = select && ( ( etIsol / electron->pt() ) <
-                              m_normElectronIsolEt );
-      }
-   }
-   ATH_MSG_VERBOSE("in electron isSelected() - 4 ");
-
-   return select;
-}
-
-bool UserAnalysisSelectionTool::
-isSelected( const Analysis::Photon* photon ) const  {
-
-   if( ! photon ) {
-      return false;
-   }
-
-   bool select = ( ( photon->pt() > m_photonPt ) &&
-                   ( std::abs( photon->eta() ) < m_photonEta ) );
-
-   if( m_isAtlfast ) {
-      return select;
-   }
-
-   select = select && ( photon->isem() == m_photonIsEM );
-   return select;
-}
-
-bool UserAnalysisSelectionTool::
-isSelected( const Analysis::Muon* muon ) const {
-
-   if( ! muon ) {
-      return false;
-   }
-
-   bool select = ( ( muon->pt() > m_muonPt ) &&
-                   ( std::abs( muon->eta() ) < m_muonEta ) );
-
-   if( m_isAtlfast ) {
-      return select;
-   }
-   if( m_useMatchChi2 && muon->isCombinedMuon() ) {
-      select = select && ( muon->matchChi2() < m_muonMatchChi2 );
-   }
-   if( m_doMuonIsolation ) {
-      const MuonParameters::ParamDef helper =
-            static_cast< MuonParameters::ParamDef >( m_muonIsolationConeIndex );
-      const double etIsol = muon->parameter( helper );
-      select = select && ( etIsol < m_muonIsolationEt );
-      if ( muon->pt() ) {
-         select = select && ( ( etIsol / muon->pt() ) < m_normMuonIsolEt );
-      }
-   }
-
-   return select;
-}
-
-bool UserAnalysisSelectionTool::isSelected( const Jet* jet ) const {
-
-   if( ! jet ) {
-      return false;
-   }
-
-   bool select = ( ( jet->pt() > m_jetPt ) &&
-                   ( std::abs( jet->eta() ) < m_jetEta ) );
-
-   return select;
-}
-
-bool UserAnalysisSelectionTool::
-isSelected( const Rec::TrackParticle* trackParticle ) const {
-
-   if( ! trackParticle ) {
-      return false;
-   }
-
-   bool select = trackParticle->pt() > m_trackParticlePt;
-
-   return select;
-}
-
-bool UserAnalysisSelectionTool::
-isSelected( const CaloCluster* caloCluster ) const {
-
-   if( ! caloCluster ) {
-      return false;
-   }
-
-   bool select = caloCluster->e() > m_caloClusterE;
-
-   return select;
-}
-
-bool UserAnalysisSelectionTool::
-isSelected( const Analysis::TauJet* tauJet ) const {
-
-   if( ! tauJet ) {
-      return false;
-   }
-
-   const int numTrack = tauJet->numTrack();
-   bool select = ( ( tauJet->pt() > m_tauJetPt ) &&
-                   ( std::abs( tauJet->eta() ) < m_tauJetEta ) &&
-                   ( ( std::abs( tauJet->charge() ) - 1.0 ) < 0.001 ) &&
-                   ( ( numTrack == 1 ) || ( numTrack == 3 ) ) );
-
-   const Analysis::TauPID* tauId = tauJet->tauID();
-   if ( tauId ) {
-      const auto likelihood =
-            tauId->discriminant( TauJetParameters::Likelihood );
-      const auto bdtelescore =
-            tauId->discriminant( TauJetParameters::BDTEleScore );
-      select = select && ( ( likelihood > m_tauJetLikelihood ) &&
-                           ( bdtelescore > m_tauEleBDTCut ) );
-   }
-
-   return select;
-}
-
-bool UserAnalysisSelectionTool::isBJet( const Jet* jet ) const {
-
-   if( ! jet ) {
-      return false;
-   }
-
-   bool select = this->isSelected( jet );
-   return ( select && ( jet->getFlavourTagWeight() > m_bJetLikelihood ) );
-}
diff --git a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/components/UserAnalysisUtils_entries.cxx b/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/components/UserAnalysisUtils_entries.cxx
deleted file mode 100644
index 4bff409d3ad5392a28918a78ffc079b0e4a1c498..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/AnalysisCommon/UserAnalysisUtils/src/components/UserAnalysisUtils_entries.cxx
+++ /dev/null
@@ -1,15 +0,0 @@
-//#include "UserAnalysisUtils/UserMuonTool.h"
-
-#include "UserAnalysisUtils/UserAnalysisPreparationTool.h"
-#include "UserAnalysisUtils/UserAnalysisSelectionTool.h"
-#include "UserAnalysisUtils/UserAnalysisOverlapCheckingTool.h"
-#include "UserAnalysisUtils/UserAnalysisOverlapRemovalTool.h"
-
- 
-//DECLARE_COMPONENT( UserMuonTool )
-
-DECLARE_COMPONENT( UserAnalysisPreparationTool )
-DECLARE_COMPONENT( UserAnalysisSelectionTool )
-DECLARE_COMPONENT( UserAnalysisOverlapCheckingTool )
-DECLARE_COMPONENT( UserAnalysisOverlapRemovalTool )
-
diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDAnalysis/src/GenObjectsFilterTool.cxx b/PhysicsAnalysis/D3PDMaker/TruthD3PDAnalysis/src/GenObjectsFilterTool.cxx
index 17ce7b2194d2f66192a538d4a2ef5a22c8dfd8a4..25fb2193fbeefa0dfe86973d09da13a7dcaf50bc 100644
--- a/PhysicsAnalysis/D3PDMaker/TruthD3PDAnalysis/src/GenObjectsFilterTool.cxx
+++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDAnalysis/src/GenObjectsFilterTool.cxx
@@ -95,7 +95,7 @@ bool GenObjectsFilterTool::pass( const HepMC::GenEvent* evt,
 
    bool isEmpty = ( evt->particles_size() == 0 );
    bool isDummy = ( ( evt->event_number() == -1 ) &&
-                    ( evt->signal_process_id() == 0 ) );
+                    ( HepMC::signal_process_id(evt) == 0 ) );
    if( isDummy ) isEmpty = false;
 
    if( m_removeEmptyEvents && isEmpty ) return false;
@@ -130,7 +130,7 @@ bool GenObjectsFilterTool::pass( const HepMC::GenEvent* evt,
 
 bool GenObjectsFilterTool::isBCHadron(const HepMC::GenParticle* part) const{
 
-  if(part->barcode() >= 200000) return false;
+  if(HepMC::barcode(part) >= 200000) return false;
   int type = HadronClass::type(part->pdg_id()).second;
   if(type == 5 || type == 4)  return true;
 
@@ -185,7 +185,7 @@ bool GenObjectsFilterTool::isRequested( const HepMC::GenParticle* part) const{
    double pt = p4.perp();
    double eta = p4.eta();
 
-   int barcode = part->barcode();
+   int barcode = HepMC::barcode(part);
    int pdg = part->pdg_id();
    int status = part->status();
 
@@ -384,7 +384,7 @@ bool GenObjectsFilterTool::pass( const HepMC::GenParticle* part,
    // If we don't want to specifically select charged truth tracks, then this
    // is already good enough:
    if( ! m_selectTruthTracks ) return true;
-   if (part->barcode() < 200000) {
+   if (HepMC::barcode(part) < 200000) {
      if( ! TruthHelper::IsGenStable()( part ) ) return false;
      if( ! TruthHelper::IsGenInteracting()( part ) ) return false;
    }
@@ -396,8 +396,8 @@ bool GenObjectsFilterTool::pass( const HepMC::GenParticle* part,
    const HepPDT::ParticleData* pd = m_partPropSvc->PDT()->particle( abs( pdg ) );
    if( ! pd ) {
      ATH_MSG_DEBUG( "Could not get particle data for pdg = " << pdg 
-		      << " status " << part->status() << " barcode " <<part->barcode()
-		      << " process id " <<part->parent_event()->signal_process_id());
+		      << " status " << part->status() << " barcode " <<HepMC::barcode(part)
+		      << " process id " <<HepMC::signal_process_id(part->parent_event()));
       return false;
    }
    float charge = pd->charge();
diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDAnalysis/src/SimpleTruthParticleFilterTool.cxx b/PhysicsAnalysis/D3PDMaker/TruthD3PDAnalysis/src/SimpleTruthParticleFilterTool.cxx
index 9ec416c451ea033b23465b5a6f2739c31d2d4fdd..7ff4a1f18ba5e8b0171e76fbee66a0809654d5d7 100644
--- a/PhysicsAnalysis/D3PDMaker/TruthD3PDAnalysis/src/SimpleTruthParticleFilterTool.cxx
+++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDAnalysis/src/SimpleTruthParticleFilterTool.cxx
@@ -40,10 +40,10 @@ SimpleTruthParticleFilterTool::isAccepted (const HepMC::GenParticle* p)
   bool ok = false;
 
   // First, the super simple thing : does the particle pass the simple cuts?
-  if ( abs(p->pdg_id())==m_filterID &&
+  if ( std::abs(p->pdg_id())==m_filterID &&
        p->momentum().perp()>m_minPt ) ok = true;
 
-  bool last = abs(p->pdg_id())==15;
+  bool last = std::abs(p->pdg_id())==15;
   if ( abs(p->pdg_id())==15 && p->status()!=1 && p->end_vertex() ){
     // Special handling for taus - take the ones that are last in the tau chain
     for (HepMC::GenVertex::particles_out_const_iterator pit=p->end_vertex()->particles_out_const_begin(); pit!=p->end_vertex()->particles_out_const_end();++pit){
@@ -58,7 +58,7 @@ SimpleTruthParticleFilterTool::isAccepted (const HepMC::GenParticle* p)
   if ( !last && // is it the last tau? (not a tau or not last -> last=false )
        p->status()%1000 != 1 &&
        !(p->status()%1000 == 2 && p->status()>1000) &&
-       !(p->status()==2 && (!p->end_vertex() || p->end_vertex()->barcode()<-200000) ) ) {
+       !(p->status()==2 && (!p->end_vertex() || HepMC::barcode(p->end_vertex())<-200000) ) ) {
     return false;
   }
 
diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDAnalysis/src/TruthParticleFilterTool.cxx b/PhysicsAnalysis/D3PDMaker/TruthD3PDAnalysis/src/TruthParticleFilterTool.cxx
index 899d2473b55d02c50da11782cab8ae682cd84e83..fe4b9a19ad3bb31e3c59715c5c057db911d7422b 100644
--- a/PhysicsAnalysis/D3PDMaker/TruthD3PDAnalysis/src/TruthParticleFilterTool.cxx
+++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDAnalysis/src/TruthParticleFilterTool.cxx
@@ -347,14 +347,14 @@ bool TruthParticleFilterTool::isBSM(const HepMC::GenParticle* part) const{
 
   int pdg = part->pdg_id();
 
-  if ( (31<abs(pdg) && abs(pdg)<38) || // BSM Higgs / W' / Z' / etc
-       abs(pdg)==39 ||
-       abs(pdg)==41 ||
-       abs(pdg)==42 ||
-       (1000000<abs(pdg) && abs(pdg)<1000040) || // left-handed SUSY
-       (2000000<abs(pdg) && abs(pdg)<2000040) || // right-handed SUSY
-       abs(pdg)==7 || abs(pdg)==8 || // 4th Generation
-       (abs(pdg)>=9000001 && abs(pdg)<=9000006) ) // Monotop from MadGraph
+  if ( (31<std::abs(pdg) && std::abs(pdg)<38) || // BSM Higgs / W' / Z' / etc
+       std::abs(pdg)==39 ||
+       std::abs(pdg)==41 ||
+       std::abs(pdg)==42 ||
+       (1000000<abs(pdg) && std::abs(pdg)<1000040) || // left-handed SUSY
+       (2000000<abs(pdg) && std::abs(pdg)<2000040) || // right-handed SUSY
+       std::abs(pdg)==7 || std::abs(pdg)==8 || // 4th Generation
+       (std::abs(pdg)>=9000001 && std::abs(pdg)<=9000006) ) // Monotop from MadGraph
     return true;
 
   return false;
@@ -364,20 +364,20 @@ bool TruthParticleFilterTool::isBoson(const HepMC::GenParticle* part) const{
 
   int pdg = part->pdg_id();
 
-  if(abs(pdg) != 22  &&
-     abs(pdg) != 23 &&
-     abs(pdg) != 24 &&
-     abs(pdg) != 25 ) return false;
+  if(std::abs(pdg) != 22  &&
+     std::abs(pdg) != 23 &&
+     std::abs(pdg) != 24 &&
+     std::abs(pdg) != 25 ) return false;
 
-  if(abs(pdg)==22 && part->momentum().perp()<3.*GeV) return false;
+  if(std::abs(pdg)==22 && part->momentum().perp()<3.*GeV) return false;
 
   return true;
 }
 
 bool TruthParticleFilterTool::isFsrFromLepton(const HepMC::GenParticle* part) const {
   int pdg = part->pdg_id();
-  if(abs(pdg) != 22) return false; // photon
-  if(part->barcode() >=  200000) return false; // Geant photon
+  if(std::abs(pdg) != 22) return false; // photon
+  if(HepMC::barcode(part) >=  200000) return false; // Geant photon
 
   HepMC::GenVertex* prod = part->production_vertex();
   if(!prod) return false; // no parent.
diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkEGamma/python/EGammaCommon.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkEGamma/python/EGammaCommon.py
index 711fa4faa8a6ba2be1e7d88f0bd24ed1b92676ba..cf21e8eff73426c1496bbd404daabd01d3d95561 100644
--- a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkEGamma/python/EGammaCommon.py
+++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkEGamma/python/EGammaCommon.py
@@ -91,13 +91,13 @@ ToolSvc += ElectronLHSelectorLooseBL
 #
 # Disabled as is missing in R22
 #
-'''
+
 from ElectronPhotonSelectorTools.ElectronPhotonSelectorToolsConf import AsgElectronChargeIDSelectorTool
 ElectronChargeIDSelector = AsgElectronChargeIDSelectorTool("ElectronChargeIDSelectorLoose")
 ElectronChargeIDSelector.primaryVertexContainer = "PrimaryVertices"
 ElectronChargeIDSelector.TrainingFile = "ElectronPhotonSelectorTools/ChargeID/ECIDS_20180731rel21Summer2018.root"
 ToolSvc += ElectronChargeIDSelector
-'''
+
 
 #====================================================================
 # FWD ELECTRON LH SELECTORS
@@ -240,7 +240,7 @@ print(ElectronPassLHTight)
 #
 # Disabled as is missing in R22
 #
-'''
+
 # decorate electrons with the output of ECIDS ----------------------------------------------------------------------
 ElectronPassECIDS = DerivationFramework__EGElectronLikelihoodToolWrapper( name = "ElectronPassECIDS",
                                                                           EGammaElectronLikelihoodTool = ElectronChargeIDSelector,
@@ -251,7 +251,7 @@ ElectronPassECIDS = DerivationFramework__EGElectronLikelihoodToolWrapper( name =
                                                                           StoreTResult = True)
 ToolSvc += ElectronPassECIDS
 print (ElectronPassECIDS)
-
+'''
 # decorate forward electrons with the output of LH loose
 ForwardElectronPassLHLoose = DerivationFramework__EGSelectionToolWrapper( name = "ForwardElectronPassLHLoose",
                                                                           EGammaSelectionTool = ForwardElectronLHSelectorLoose,
@@ -366,13 +366,13 @@ ElectronAmbiguity = DF_EGEAT(name               = "ElectronAdditionnalAmbiguity"
 ToolSvc += ElectronAmbiguity
 
 #
-# Commented ForwardElectronPassLHLoose, ForwardElectronPassLHMedium, ForwardElectronPassLHTight, ElectronPassECIDS tools due to they are not available in R22 yet
+# Commented ForwardElectronPassLHLoose, ForwardElectronPassLHMedium, ForwardElectronPassLHTight, tools due to they are not available in R22 yet
 #
 # list of all the decorators so far
 EGAugmentationTools = [DFCommonPhotonsDirection,
                        ElectronPassLHVeryLoose, ElectronPassLHLoose, ElectronPassLHLooseBL, ElectronPassLHMedium, ElectronPassLHTight,
                        #ForwardElectronPassLHLoose, ForwardElectronPassLHMedium, ForwardElectronPassLHTight,
-                       #ElectronPassECIDS,
+                       ElectronPassECIDS,
                        PhotonPassIsEMLoose, PhotonPassIsEMTight, 
                        PhotonPassIsEMTightPtIncl, 
                        PhotonPassCleaning,
diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkEGamma/src/EGElectronLikelihoodToolWrapper.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkEGamma/src/EGElectronLikelihoodToolWrapper.cxx
index 32afa26be9d8ef46af346ee2f6be700febe3bdfe..c72ee01732b4e9311775a049f4f6fe9f626d42bf 100644
--- a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkEGamma/src/EGElectronLikelihoodToolWrapper.cxx
+++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkEGamma/src/EGElectronLikelihoodToolWrapper.cxx
@@ -120,10 +120,9 @@ namespace DerivationFramework {
       unsigned int isEM = (unsigned int) theAccept.getCutResultInvertedBitSet().to_ulong(); // this should work for both the cut-based and the LH selectors
       double result(0.); // initialise explicitly to avoid compilation warning. It will be overridden in the following block (result is used only if m_storeTResult is true)
 
-      // Lukas Heinrich: interface in master not yet available.
-      //      if (m_storeTResult) {
-      //	result = double(m_tool->calculate(pCopy));
-      //      }
+      if (m_storeTResult) {
+      	result = double(m_tool->calculate(Gaudi::Hive::currentContext(),pCopy));
+      }
       
       // decorate the original object
       if(m_cut.empty()){
diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/python/ExtendedJetCommon.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/python/ExtendedJetCommon.py
index 082f7414d39394a1a6ee8b05fb6729cf565f33f8..b255d5cc77defe3f2b4dd7041b880f6fb4bed7ff 100644
--- a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/python/ExtendedJetCommon.py
+++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/python/ExtendedJetCommon.py
@@ -529,15 +529,21 @@ def addQGTaggerTool(jetalg, sequence, algname, truthjetalg=None ):
 ################################################################## 
 
 def applyOverlapRemoval(sequence=DerivationFrameworkJob):
+
+    from  DerivationFrameworkTau.TauCommon import AddTauAugmentation
+    AddTauAugmentation(sequence,doLoose=True)
+
     from AssociationUtils.config import recommended_tools
     from AssociationUtils.AssociationUtilsConf import OverlapRemovalGenUseAlg
     outputLabel = 'DFCommonJets_passOR'
     bJetLabel = '' #default
+    tauLabel = 'DFTauLoose'
     orTool = recommended_tools(outputLabel=outputLabel,bJetLabel=bJetLabel)
     algOR = OverlapRemovalGenUseAlg('OverlapRemovalGenUseAlg',
-                OverlapLabel=outputLabel,
-                            OverlapRemovalTool=orTool,
-                            BJetLabel=bJetLabel)
+                                    OverlapLabel=outputLabel,
+                                    OverlapRemovalTool=orTool,
+                                    TauLabel=tauLabel,
+                                    BJetLabel=bJetLabel)
     sequence += algOR
 
     from DerivationFrameworkMuons.DerivationFrameworkMuonsConf import DerivationFramework__MuonJetDrTool
@@ -576,9 +582,9 @@ def eventCleanLoose_xAODColl(jetalg='AntiKt4EMTopo',sequence=DerivationFramework
     ecToolLoose.JetCleanPrefix = prefix
     ecToolLoose.JetCleaningTool = getJetCleaningTool("LooseBad")
     algCleanLoose = EventCleaningTestAlg('EventCleaningTestAlg_Loose',
-                            EventCleaningTool=ecToolLoose,
-                            JetCollectionName="AntiKt4EMTopoJets",
-                            EventCleanPrefix=prefix)
+                                         EventCleaningTool=ecToolLoose,
+                                         JetCollectionName="AntiKt4EMTopoJets",
+                                         EventCleanPrefix=prefix)
     sequence += algCleanLoose
 
 ##################################################################  
@@ -591,11 +597,11 @@ def eventCleanTight_xAODColl(jetalg='AntiKt4EMTopo',sequence=DerivationFramework
     ecToolTight.JetCleanPrefix = prefix
     ecToolTight.JetCleaningTool = getJetCleaningTool("TightBad")
     algCleanTight = EventCleaningTestAlg('EventCleaningTestAlg_Tight',
-                            EventCleaningTool=ecToolTight,
-                            JetCollectionName="AntiKt4EMTopoJets",
-                            EventCleanPrefix=prefix,
-                            CleaningLevel="TightBad",
-                            doEvent=False)
+                                         EventCleaningTool=ecToolTight,
+                                         JetCollectionName="AntiKt4EMTopoJets",
+                                         EventCleanPrefix=prefix,
+                                         CleaningLevel="TightBad",
+                                         doEvent=False)
     sequence += algCleanTight
 
 ##################################################################  
@@ -782,10 +788,9 @@ def addCHSPFlowObjects():
 applyJetCalibration_xAODColl("AntiKt4EMTopo")
 updateJVT_xAODColl("AntiKt4EMTopo")
 
-# Need DFCommonElectronsLHLoose for these to work
-#applyOverlapRemoval()
-#eventCleanLoose_xAODColl("AntiKt4EMTopo")
-#eventCleanTight_xAODColl("AntiKt4EMTopo")
-#eventCleanLooseLLP_xAODColl("AntiKt4EMTopo")
+applyOverlapRemoval()
+eventCleanLoose_xAODColl("AntiKt4EMTopo")
+eventCleanTight_xAODColl("AntiKt4EMTopo")
+eventCleanLooseLLP_xAODColl("AntiKt4EMTopo")
 #eventCleanSuperLooseLLP_xAODColl("AntiKt4EMTopo")
 #eventCleanVeryLooseLLP_xAODColl("AntiKt4EMTopo")
diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/python/MCTruthCommon.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/python/MCTruthCommon.py
index db96f6c2c1ce4ae91c7c1857258c1809844f3426..0d512dd82aeb147ec97c22b7364f8d195ddaef28 100644
--- a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/python/MCTruthCommon.py
+++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/python/MCTruthCommon.py
@@ -59,165 +59,122 @@ def simplePJGetter(Label, InputContainer):
 
 # Helper for adding truth jet collections
 def addTruthJets(kernel=None, decorationDressing=None):
-    if not dfInputIsEVNT:
-        return
-    # Ensure that we are adding it to something, and that we haven't run it already
     if kernel is None:
         from DerivationFrameworkCore.DerivationFrameworkMaster import DerivationFrameworkJob
         kernel = DerivationFrameworkJob
-
-    if not hasattr(kernel,'MCTruthCommonJetTruthCopyAlg'):
-        from AthenaCommon.AppMgr import ToolSvc
-
-        # Check where we get the barcode offset from
-        barCodeFromMetadata=2
-        if objKeyStore.isInInput( "McEventCollection", "GEN_EVENT" ):
-              barCodeFromMetadata=0
-
-        # Set up the copy truth jet particle algorithms
+    # make sure if we are using EVNT that we don't try to check sim metadata 
+    barCodeFromMetadata=2
+    if objKeyStore.isInInput( "McEventCollection", "GEN_EVENT" ):
+        barCodeFromMetadata=0
+    from JetRec.JetRecStandardToolManager import jtm
+    if decorationDressing is not None and not hasattr(jtm,'truthpartdressedwz'):
         from ParticleJetTools.ParticleJetToolsConf import CopyTruthJetParticles
-        ToolSvc += CopyTruthJetParticles("TruthPartCopy",
-                                         OutputName="JetInputTruthParticles",
-                                         MCTruthClassifier=ToolSvc.DFCommonTruthClassifier,BarCodeFromMetadata=barCodeFromMetadata)
-        ToolSvc += CopyTruthJetParticles("TruthPartCopyWZ",
-                                         OutputName="JetInputTruthParticlesNoWZ",
-                                         MCTruthClassifier=ToolSvc.DFCommonTruthClassifier,BarCodeFromMetadata=barCodeFromMetadata,
-                                         IncludePromptLeptons=False)
-        ToolSvc += CopyTruthJetParticles("TruthPartCopyDressedWZ",
-                                         OutputName="JetInputTruthParticlesDressedWZ",
-                                         MCTruthClassifier=ToolSvc.DFCommonTruthClassifier,
-                                         IncludePromptLeptons=False,IncludePromptPhotons=False,
-                                         IncludeMuons=True,IncludeNeutrinos=True,BarCodeFromMetadata=barCodeFromMetadata,
-                                         FSRPhotonCone=-1., DressingDecorationName=decorationDressing)
-        ToolSvc += CopyTruthJetParticles("TruthPartCopyCharged", OutputName="JetInputTruthParticlesCharged",
-                                         MCTruthClassifier=ToolSvc.DFCommonTruthClassifier,
+        if 'truthpartdressedwz' not in jtm.tools:
+            jtm += CopyTruthJetParticles("truthpartdressedwz", OutputName="JetInputTruthParticlesDressedWZ",
+                                          MCTruthClassifier=jtm.JetMCTruthClassifier,
+                                          IncludePromptLeptons=False,IncludePromptPhotons=False,
+                                          IncludeMuons=True,IncludeNeutrinos=True,BarCodeFromMetadata=barCodeFromMetadata,
+                                          FSRPhotonCone=-1., DressingDecorationName=decorationDressing
+                                         )
+        # Add a jet tool runner for this thing
+        from JetRec.JetRecConf import JetToolRunner,JetAlgorithm,PseudoJetAlgorithm
+        from JetRec.JetRecFlags import jetFlags
+        jtm += JetToolRunner("jetdressedwzrun", EventShapeTools=[], Tools=[jtm.truthpartdressedwz], Timer=jetFlags.timeJetToolRunner() )
+        # And an algorithm to run in
+        kernel += JetAlgorithm("jetdressedwzalg")
+        jetdressedwzalg = kernel.jetdressedwzalg
+        jetdressedwzalg.Tools = [ jtm.jetdressedwzrun ]
+        if 'truthdressedwzget' not in jtm.tools:
+            jtm += PseudoJetAlgorithm("truthdressedwzget",
+                                      Label = "TruthDressedWZ",
+                                      InputContainer = jtm.truthpartdressedwz.OutputName,
+                                      OutputContainer = "PseudoJetTruthDressedWZ",
+                                      SkipNegativeEnergy = True
+                                     )
+        jtm.gettersMap['truthdressedwz'] = list(jtm.gettersMap['truth'])
+        jtm.gettersMap['truthdressedwz'][0] = jtm.truthdressedwzget
+    if not hasattr(jtm,'truthpartcharged'):
+        from ParticleJetTools.ParticleJetToolsConf import CopyTruthJetParticles
+        if 'truthpartcharged' not in jtm.tools:
+            jtm += CopyTruthJetParticles("truthpartcharged", OutputName="JetInputTruthParticlesCharged",
+                                         MCTruthClassifier=jtm.JetMCTruthClassifier,
                                          ChargedParticlesOnly=True,
-                                         BarCodeFromMetadata=barCodeFromMetadata)
-        from JetRec import JetRecConf
-        kernel += JetRecConf.JetAlgorithm("MCTruthCommonJetTruthCopyAlg",
-                                          Tools=[ToolSvc.TruthPartCopy,ToolSvc.TruthPartCopyWZ,
-                                                 ToolSvc.TruthPartCopyDressedWZ,ToolSvc.TruthPartCopyCharged])
-
-        # Set up pseudo-jet getters
-        from JetRec import JetRecConf
-        kernel += simplePJGetter( Label = "Truth", InputContainer = ToolSvc.TruthPartCopy.OutputName )
-        kernel += simplePJGetter( Label = "TruthWZ", InputContainer = ToolSvc.TruthPartCopyWZ.OutputName )
-        kernel += simplePJGetter( Label = "TruthDressedWZ", InputContainer = ToolSvc.TruthPartCopyDressedWZ.OutputName )
-        kernel += simplePJGetter( Label = "TruthCharged", InputContainer = ToolSvc.TruthPartCopyCharged.OutputName )
-
-        # Set up the jet builder (no area moments)
-        from AthenaCommon import CfgMgr
-        DFCommon_jbld = CfgMgr.JetFromPseudojet("DFCommon_jbld")
-
-        # Tool for parton labeling
-        from ParticleJetTools import ParticleJetToolsConf
-        ToolSvc += ParticleJetToolsConf.Analysis__JetPartonTruthLabel("partontruthlabel")
-
-        # Set up the jet finder, tool using the finder, and algorithm using the tool
-        threshold = 15000. if dfInputIsEVNT else 5000.
-        if not objKeyStore.isInInput( "xAOD::JetContainer","AntiKt4TruthJets") and not hasattr(kernel,'AntiKt4TruthJetsAlg'):
-            AntiKt4TruthJetsFinder = CfgMgr.JetFinder("AntiKt4TruthJetsFinder",
-                                            JetAlgorithm = "AntiKt",
-                                            JetRadius = 0.4,
-                                            JetBuilder = DFCommon_jbld,
-                                            GhostArea = 0.01,
-                                            PtMin = threshold
-                                            )
-            #Now we setup a JetRecTool which will use the above JetFinder
-            AntiKt4TruthJetsRec = CfgMgr.JetRecTool("AntiKt4TruthJetsRec",
-                                             JetFinder = AntiKt4TruthJetsFinder,
-                                             InputPseudoJets = [kernel.TruthGet.OutputContainer],
-                                             OutputContainer = "AntiKt4TruthJets",
-                                             JetModifiers = [ToolSvc.partontruthlabel]
-                                            )
-            kernel += CfgMgr.JetAlgorithm("AntiKt4TruthJetsAlg", Tools=[AntiKt4TruthJetsRec])
-        if not objKeyStore.isInInput( "xAOD::JetContainer","AntiKt4TruthWZJets") and not hasattr(kernel,'jetalgAntiKt4TruthWZ'):
-            AntiKt4TruthWZJetsFinder = CfgMgr.JetFinder("AntiKt4TruthWZJetsFinder",
-                                            JetAlgorithm = "AntiKt",
-                                            JetRadius = 0.4,
-                                            JetBuilder = DFCommon_jbld,
-                                            GhostArea = 0.01,
-                                            PtMin = threshold
-                                            )
-            AntiKt4TruthWZJetsRec = CfgMgr.JetRecTool("AntiKt4TruthWZJetsRec",
-                                             JetFinder = AntiKt4TruthWZJetsFinder,
-                                             InputPseudoJets = [kernel.TruthWZGet.OutputContainer],
-                                             OutputContainer = "AntiKt4TruthWZJets",
-                                             JetModifiers = [ToolSvc.partontruthlabel]
-                                            )
-            kernel += CfgMgr.JetAlgorithm("AntiKt4TruthWZJetsAlg", Tools=[AntiKt4TruthWZJetsRec])
-        if not objKeyStore.isInInput( "xAOD::JetContainer","AntiKt4TruthDressedWZJets") and decorationDressing is not None:
-            AntiKt4TruthDressedWZJetsFinder = CfgMgr.JetFinder("AntiKt4TruthDressedWZJetsFinder",
-                                            JetAlgorithm = "AntiKt",
-                                            JetRadius = 0.4,
-                                            JetBuilder = DFCommon_jbld,
-                                            GhostArea = 0.01,
-                                            PtMin = threshold
-                                            )
-            AntiKt4TruthDressedWZJetsRec = CfgMgr.JetRecTool("AntiKt4TruthDressedWZJetsRec",
-                                             JetFinder = AntiKt4TruthDressedWZJetsFinder,
-                                             InputPseudoJets = [kernel.TruthDressedWZGet.OutputContainer],
-                                             OutputContainer = "AntiKt4TruthDressedWZJets",
-                                             JetModifiers = [ToolSvc.partontruthlabel]
-                                            )
-            kernel += CfgMgr.JetAlgorithm("AntiKt4TruthDressedWZJetsAlg", Tools=[AntiKt4TruthDressedWZJetsRec])
-        if not objKeyStore.isInInput( "xAOD::JetContainer","AntiKt2TruthChargedJets"):
-            AntiKt2TruthChargedJetsFinder = CfgMgr.JetFinder("AntiKt2TruthChargedJetsFinder",
-                                            JetAlgorithm = "AntiKt",
-                                            JetRadius = 0.2,
-                                            JetBuilder = DFCommon_jbld,
-                                            GhostArea = 0.01,
-                                            PtMin = 5000.
-                                            )
-            AntiKt2TruthChargedJetsRec = CfgMgr.JetRecTool("AntiKt2TruthChargedJetsRec",
-                                             JetFinder = AntiKt2TruthChargedJetsFinder,
-                                             InputPseudoJets = [kernel.TruthChargedGet.OutputContainer],
-                                             OutputContainer = "AntiKt2TruthChargedJets",
-                                             JetModifiers = [ToolSvc.partontruthlabel]
-                                            )
-            kernel += CfgMgr.JetAlgorithm("AntiKt2TruthChargedJetsAlg", Tools=[AntiKt2TruthChargedJetsRec])
-        if not objKeyStore.isInInput( "xAOD::JetContainer","AntiKt10TruthJets") and not hasattr(kernel,'AntiKt10TruthJetsAlg'):
-            AntiKt10TruthJetsFinder = CfgMgr.JetFinder("AntiKt10TruthJetsFinder",
-                                             JetAlgorithm = "AntiKt",
-                                             JetRadius = 1.0,
-                                             JetBuilder = DFCommon_jbld,
-                                             GhostArea = 0.01,
-                                             PtMin = 50000.
-                                             )
-            #Now we setup a JetRecTool which will use the above JetFinder
-            AntiKt10TruthJetsRec = CfgMgr.JetRecTool("AntiKt10TruthJetsRec",
-                                             JetFinder = AntiKt10TruthJetsFinder,
-                                             InputPseudoJets = [kernel.TruthGet.OutputContainer],
-                                             OutputContainer = "AntiKt10TruthJets",
-                                             JetModifiers = [ToolSvc.partontruthlabel]
-                                            )
-            kernel += CfgMgr.JetAlgorithm("AntiKt10TruthJetsAlg", Tools=[AntiKt10TruthJetsRec])
-        if not objKeyStore.isInInput( "xAOD::JetContainer","AntiKt10TruthSoftDropBeta100Zcut10Jets") and not hasattr(kernel,'AntiKt10TruthSoftDropBeta100Zcut10JetsAlg'):
-            from JetRec.JetRecConf import JetSoftDrop
-            groomer = JetSoftDrop("AntiKt10TruthSoftDropBeta100Zcut10JetsGroomer",
-                                  ZCut = 0.1,
-                                  Beta = 1.0,
-                                  R0   = 1.0,
-                                  JetBuilder = DFCommon_jbld)
-
-            AntiKt10TruthSoftDropBeta100Zcut10JetsFinder = CfgMgr.JetFinder("AntiKt10TruthJetsFinder",
-                                                                            JetAlgorithm = "AntiKt",
-                                                                            JetRadius = 1.0,
-                                                                            JetBuilder = DFCommon_jbld,
-                                                                            GhostArea = 0.01,
-                                                                            PtMin = 50000.
-                                                                            )
-            from JetSubStructureMomentTools.JetSubStructureMomentToolsConf import EnergyCorrelatorTool
-            DFCommon_EnCorr = EnergyCorrelatorTool("DFCommon_EnCorr", Beta = 1.0)
-            from JetSubStructureMomentTools.JetSubStructureMomentToolsConf import NSubjettinessTool
-            DFCommon_NSubjettiness = NSubjettinessTool("DFCommon_NSubjettiness",Alpha = 1.0)
-            AntiKt10TruthSoftDropBeta100Zcut10JetsRec = CfgMgr.JetRecTool("AntiKt10TruthSoftDropBeta100Zcut10JetsRec",
-                                                                          JetGroomer = groomer,
-                                                                          InputPseudoJets = [kernel.TruthGet.OutputContainer],
-                                                                          OutputContainer = "AntiKt10TruthSoftDropBeta100Zcut10Jets",
-                                                                          JetModifiers = [ToolSvc.partontruthlabel,DFCommon_EnCorr,DFCommon_NSubjettiness],
-                                                                          JetFinder = AntiKt10TruthSoftDropBeta100Zcut10JetsFinder)
-            kernel += CfgMgr.JetAlgorithm("AntiKt10TruthSoftDropBeta100Zcut10JetsAlg",Tools=[AntiKt10TruthSoftDropBeta100Zcut10JetsRec])
+                                         BarCodeFromMetadata=barCodeFromMetadata
+                                        )
+        # Add a jet tool runner for this thing
+        from JetRec.JetRecConf import JetToolRunner,JetAlgorithm,PseudoJetAlgorithm
+        jtm += JetToolRunner("jetchargedrun", EventShapeTools=[], Tools=[jtm.truthpartcharged], Timer=jetFlags.timeJetToolRunner() )
+        # And an algorithm to run in
+        kernel += JetAlgorithm("jetchargedalg")
+        jetchargedalg = kernel.jetchargedalg
+        jetchargedalg.Tools = [ jtm.jetchargedrun ]
+        if 'truthchargedget' not in jtm.tools:
+            jtm += PseudoJetAlgorithm("truthchargedget",
+                                      Label = "TruthCharged",
+                                      InputContainer = jtm.truthpartcharged.OutputName,
+                                      OutputContainer = "PseudoJetTruthCharged",
+                                      SkipNegativeEnergy = True
+                                     )
+        jtm.gettersMap['truthcharged'] = [jtm.truthchargedget]
+
+    # Add jet algorithms if they aren't there
+    from JetRec.JetRecStandard import jtm
+    from JetRec.JetRecConf import JetAlgorithm
+    truth_modifiers = [jtm.truthpartondr, jtm.partontruthlabel, jtm.jetdrlabeler, jtm.trackjetdrlabeler]
+    threshold = 15000. if dfInputIsEVNT else 5000.
+    if not objKeyStore.isInInput( "xAOD::JetContainer","AntiKt4TruthJets") and not hasattr(kernel,'jetalgAntiKt4Truth'):
+        # Standard truth jets
+        # To remove jet constituents add the modifier jtm.removeconstit
+        from DerivationFrameworkJetEtMiss.JetCommon import addStandardJets
+        addStandardJets("AntiKt", 0.4, "Truth", threshold, mods=truth_modifiers, algseq=kernel, outputGroup="DFCommonMCTruthJets")
+    if not objKeyStore.isInInput( "xAOD::JetContainer","AntiKt4TruthWZJets") and not hasattr(kernel,'jetalgAntiKt4TruthWZ'):
+        # WZ Truth Jets - handle non-dressed case
+        from DerivationFrameworkJetEtMiss.JetCommon import addStandardJets
+        addStandardJets("AntiKt", 0.4, "TruthWZ", threshold, mods=truth_modifiers, algseq=kernel, outputGroup="DFCommonMCTruthJets")
+    if not objKeyStore.isInInput( "xAOD::JetContainer","AntiKt4TruthDressedWZJets") and decorationDressing is not None:
+        # WZ Dressed Truth Jets - handle dressed case
+        from DerivationFrameworkJetEtMiss.JetCommon import addStandardJets
+        addStandardJets("AntiKt", 0.4, "TruthDressedWZ", ptmin=threshold, mods="truth_ungroomed", algseq=kernel, outputGroup="DFCommonMCTruthJets")
+    if not objKeyStore.isInInput( "xAOD::JetContainer","AntiKt2TruthChargedJets"):
+        # R=0.2 truth charged jets
+        from DerivationFrameworkJetEtMiss.JetCommon import addStandardJets
+        addStandardJets("AntiKt", 0.2, "TruthCharged", 5000, mods=truth_modifiers, algseq=kernel, outputGroup="DFCommonMCTruthJets")
+    if not objKeyStore.isInInput( "xAOD::JetContainer","AntiKt10TruthJets") and not hasattr(kernel,'jetalgAntiKt10Truth'):
+        # AntiKt2 truth charged jets ghost association
+        from JetRec.JetRecConf import PseudoJetAlgorithm
+        if 'gakt2truthchargedget' not in jtm.tools:
+            jtm += PseudoJetAlgorithm("gakt2truthchargedget", # give a unique name
+                                      InputContainer = "AntiKt2TruthChargedJets", # SG key
+                                      Label = "GhostAntiKt2TruthChargedJets",   # this is the name you'll use to retrieve associated ghosts
+                                      OutputContainer = "PseudoJetGhostAntiKt2TruthChargedJet",
+                                      SkipNegativeEnergy = True,
+                                     )
+        trackjetgetters = []
+        trackjetgetters += [jtm.gakt2truthchargedget]
+        truthgetters = [jtm.truthget]
+        truthgetters += trackjetgetters
+        flavorgetters = []
+        for ptype in jetFlags.truthFlavorTags():
+            flavorgetters += [getattr(jtm, "gtruthget_" + ptype)]
+        truthgetters   += flavorgetters
+        jtm.gettersMap["truth"]   = list(truthgetters)
+
+        # NB! This line works together with the next block. Some care is required here!
+        # If we build groomed jets, the jet code will automatically build ungroomed jets, so no need to add them separately
+        #Large R ungroomed jets
+        if objKeyStore.isInInput( "xAOD::JetContainer","AntiKt10TruthTrimmedPtFrac5SmallR20Jets") or objKeyStore.isInInput( "xAOD::JetContainer","AntiKt10TruthSoftDropBeta100Zcut10Jets"):
+            from DerivationFrameworkJetEtMiss.JetCommon import addStandardJets
+            addStandardJets('AntiKt', 1.0, 'Truth', ptmin=50000, mods=truth_modifiers, algseq=kernel, outputGroup="DFCommonMCTruthJets")
+    if not objKeyStore.isInInput( "xAOD::JetContainer","AntiKt10TruthTrimmedPtFrac5SmallR20Jets") and not hasattr(kernel,'jetalgAntiKt10TruthTrimmedPtFrac5SmallR20'):
+        #Large R jets
+        from DerivationFrameworkJetEtMiss.JetCommon import addTrimmedJets
+        addTrimmedJets('AntiKt', 1.0, 'Truth', rclus=0.2, ptfrac=0.05, mods="truth_groomed",
+                       algseq=kernel, outputGroup="Trimmed", writeUngroomed=False)
+    if not objKeyStore.isInInput( "xAOD::JetContainer","AntiKt10TruthSoftDropBeta100Zcut10Jets") and not hasattr(kernel,'jetalgAntiKt10TruthSoftDropBeta100Zcut10'):
+        from DerivationFrameworkJetEtMiss.JetCommon import addSoftDropJets
+        addSoftDropJets('AntiKt', 1.0, 'Truth', beta=1.0, zcut=0.1, mods="truth_groomed",
+                        algseq=kernel, outputGroup="SoftDrop", writeUngroomed=False)
+
 
 
 # Helper for scheduling the truth MET collection
@@ -576,28 +533,29 @@ def addTruthEnergyDensity(kernel=None):
     # Truth energy density tools
     from EventShapeTools.EventDensityConfig import configEventDensityTool,EventDensityAthAlg
     from AthenaCommon.AppMgr import ToolSvc
+    from JetRec.JetRecStandard import jtm
     # Algorithms for the energy density - needed only if e/gamma hasn't set things up already
     if not hasattr(ToolSvc,'EDTruthCentralTool'):
         DFCommonTruthCentralEDTool = configEventDensityTool("DFCommonTruthCentralEDTool",
-                                                            kernel.TruthGet.OutputContainer.replace('PseudoJet',''),
+                                                            jtm.truthget.Label,
                                                             0.5,
                                                             AbsRapidityMax      = 1.5,
                                                             OutputContainer     = "TruthIsoCentralEventShape",
                                                            )
         # Note the helper function mangles the naming in a specific way that is not sufficiently general
-        DFCommonTruthCentralEDTool.InputContainer = kernel.TruthGet.OutputContainer
+        DFCommonTruthCentralEDTool.InputContainer = jtm.truthget.OutputContainer
         ToolSvc += DFCommonTruthCentralEDTool
         kernel += EventDensityAthAlg("DFCommonTruthCentralEDAlg", EventDensityTool = DFCommonTruthCentralEDTool )
     if not hasattr(ToolSvc,'EDTruthForwardTool'):
         DFCommonTruthForwardEDTool = configEventDensityTool("DFCommonTruthForwardEDTool",
-                                                            kernel.TruthGet.OutputContainer.replace('PseudoJet',''),
+                                                            jtm.truthget.Label,
                                                             0.5,
                                                             AbsRapidityMin      = 1.5,
                                                             AbsRapidityMax      = 3.0,
                                                             OutputContainer     = "TruthIsoForwardEventShape",
                                                            )
         # Note the helper function mangles the naming in a specific way that is not sufficiently general
-        DFCommonTruthForwardEDTool.InputContainer = kernel.TruthGet.OutputContainer
+        DFCommonTruthForwardEDTool.InputContainer = jtm.truthget.OutputContainer
         ToolSvc += DFCommonTruthForwardEDTool
         kernel += EventDensityAthAlg("DFCommonTruthForwardEDAlg", EventDensityTool = DFCommonTruthForwardEDTool )
 
diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/src/CompactHardTruth.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/src/CompactHardTruth.cxx
index fa6cd77fc8e7eabf5a9785909811993d86741385..ed7bc5a0ea29b04c81316d2cc9fded4569c08755 100644
--- a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/src/CompactHardTruth.cxx
+++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth/src/CompactHardTruth.cxx
@@ -4,10 +4,10 @@
   Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
 */
 
-// CompactHardTruth.cxx 
+// CompactHardTruth.cxx
 // Implementation file for class CompactHardTruth
 // Author: Frank Paige <paige@bnl.gov>
-/////////////////////////////////////////////////////////////////// 
+///////////////////////////////////////////////////////////////////
 
 // McParticleTests includes
 #include "DerivationFrameworkMCTruth/CompactHardTruth.h"
@@ -25,10 +25,10 @@
 //#include "McParticleKernel/ITruthParticleCnvTool.h"
 //#include "McParticleEvent/TruthParticle.h"
 //#include "McParticleEvent/TruthParticleContainer.h"
-#include "GeneratorObjects/McEventCollection.h"
 #include "AtlasHepMC/GenEvent.h"
 #include "AtlasHepMC/GenParticle.h"
 #include "AtlasHepMC/GenVertex.h"
+#include "GeneratorObjects/McEventCollection.h"
 // Needed for FourVector
 #include "AtlasHepMC/SimpleVector.h"
 
@@ -37,56 +37,40 @@
 
 namespace DerivationFramework {
 
-/////////////////////////////////////////////////////////////////// 
-// Public methods: 
-/////////////////////////////////////////////////////////////////// 
+///////////////////////////////////////////////////////////////////
+// Public methods:
+///////////////////////////////////////////////////////////////////
 
 // Constructors
 ////////////////
-CompactHardTruth::CompactHardTruth( const std::string& name, 
-                    ISvcLocator* pSvcLocator ) : 
-  ::AthAlgorithm( name, pSvcLocator ),
-  m_mcEventsName ("GEN_AOD"),
-  m_thinnedMcEventsName ("GEN_AODTHIN"),
-  m_partonCut(10000.),
-  m_hardCut(10000.),
-  m_danglePtCut(0.),
-  m_maxCount(0)
-{
+CompactHardTruth::CompactHardTruth(const std::string& name, ISvcLocator* pSvcLocator)
+    : ::AthAlgorithm(name, pSvcLocator)
+    , m_mcEventsName("GEN_AOD")
+    , m_thinnedMcEventsName("GEN_AODTHIN")
+    , m_partonCut(10000.)
+    , m_hardCut(10000.)
+    , m_danglePtCut(0.)
+    , m_maxCount(0) {
   //
   // Property declaration
-  // 
-  declareProperty("McEvent",
-                  m_mcEventsName,
-                  "input McEventCollection container name");
-  declareProperty("McEventOut",
-                  m_thinnedMcEventsName,
-                  "output McEventCollection container name");
-  declareProperty("ShowerMassCut",
-                  m_partonCut,
-                  "mass cut for thinning parton shower");
-  declareProperty("SoftMtCut",
-                  m_hardCut,
-                  "mt cut for underlying event showers");
-  declareProperty("DanglePtCut",
-                   m_danglePtCut,
-                   "maximum pt for dangling partons");
-
-  declareProperty("MaxCount",
-                  m_maxCount,
-                  "maximum number of events to print");
+  //
+  declareProperty("McEvent", m_mcEventsName, "input McEventCollection container name");
+  declareProperty("McEventOut", m_thinnedMcEventsName, "output McEventCollection container name");
+  declareProperty("ShowerMassCut", m_partonCut, "mass cut for thinning parton shower");
+  declareProperty("SoftMtCut", m_hardCut, "mt cut for underlying event showers");
+  declareProperty("DanglePtCut", m_danglePtCut, "maximum pt for dangling partons");
+
+  declareProperty("MaxCount", m_maxCount, "maximum number of events to print");
 }
 
 // Destructor
 ///////////////
-CompactHardTruth::~CompactHardTruth()
-{}
+CompactHardTruth::~CompactHardTruth() {}
 
 // Athena Algorithm's Hooks
 ////////////////////////////
-StatusCode CompactHardTruth::initialize()
-{
-  ATH_MSG_INFO ("Initializing " << name() << "...");
+StatusCode CompactHardTruth::initialize() {
+  ATH_MSG_INFO("Initializing " << name() << "...");
 
   m_evtCount = -1;
   m_missCount = 0;
@@ -98,96 +82,83 @@ StatusCode CompactHardTruth::initialize()
   m_thinVertices = 0;
 
   // Print jobOption inputs
-  ATH_MSG_INFO ("-------------------------------------------------" );
-  ATH_MSG_INFO ("jobOption McEvent            " <<m_mcEventsName );
-  ATH_MSG_INFO ("jobOption McEventOut         " <<m_thinnedMcEventsName );
-  ATH_MSG_INFO ("jobOption ShowerMassCut      " <<m_partonCut );
-  ATH_MSG_INFO ("jobOption SoftMtCut          " <<m_hardCut );
-  ATH_MSG_INFO ("jobOption MaxCount           " <<m_maxCount );
-  ATH_MSG_INFO ("-------------------------------------------------" );
+  ATH_MSG_INFO("-------------------------------------------------");
+  ATH_MSG_INFO("jobOption McEvent            " << m_mcEventsName);
+  ATH_MSG_INFO("jobOption McEventOut         " << m_thinnedMcEventsName);
+  ATH_MSG_INFO("jobOption ShowerMassCut      " << m_partonCut);
+  ATH_MSG_INFO("jobOption SoftMtCut          " << m_hardCut);
+  ATH_MSG_INFO("jobOption MaxCount           " << m_maxCount);
+  ATH_MSG_INFO("-------------------------------------------------");
 
   return StatusCode::SUCCESS;
 }
 
-
-StatusCode CompactHardTruth::finalize()
-{
+StatusCode CompactHardTruth::finalize() {
 
   ATH_MSG_INFO("Finalizing DerivationFramework::CompactHardTruth ");
-  ATH_MSG_INFO("Missing items limiting reclustering " <<m_missCount);
+  ATH_MSG_INFO("Missing items limiting reclustering " << m_missCount);
 
-  ATH_MSG_INFO("Dangling partons pt cut:  " <<m_danglePtCut);
-  ATH_MSG_INFO("Dangling partons found:   " <<m_dangleFound);
-  ATH_MSG_INFO("Dangling partons removed: " <<m_dangleRemoved);
-  ATH_MSG_INFO("Dangling partons max pt:  " <<m_danglePtMax);
+  ATH_MSG_INFO("Dangling partons pt cut:  " << m_danglePtCut);
+  ATH_MSG_INFO("Dangling partons found:   " << m_dangleFound);
+  ATH_MSG_INFO("Dangling partons removed: " << m_dangleRemoved);
+  ATH_MSG_INFO("Dangling partons max pt:  " << m_danglePtMax);
 
-  ATH_MSG_INFO("CompactHardTruth total particles:  " <<m_thinParticles);
-  ATH_MSG_INFO("CompactHardTruth total vertices:   " <<m_thinVertices);
+  ATH_MSG_INFO("CompactHardTruth total particles:  " << m_thinParticles);
+  ATH_MSG_INFO("CompactHardTruth total vertices:   " << m_thinVertices);
 
   return StatusCode::SUCCESS;
 }
 
-
-StatusCode CompactHardTruth::execute() {  
+StatusCode CompactHardTruth::execute() {
 
   ++m_evtCount;
-  //if( m_evtCount%100 == 0 ){
-    ATH_MSG_INFO("Executing " <<name() <<" " <<m_evtCount);
+  // if( m_evtCount%100 == 0 ){
+  ATH_MSG_INFO("Executing " << name() << " " << m_evtCount);
   //}
 
   // Normally doPrint is used to print the first m_maxCount events
-  // before and after thinning. 
+  // before and after thinning.
   // doExtra adds extra intermediate event printouts.
   // doDebug allows debug printout for a range of events.
   bool doPrint = m_evtCount < m_maxCount;
   bool doDebug = false;
   bool doExtra = false;
-  //doDebug = doPrint;
-  //doExtra = doPrint;
+  // doDebug = doPrint;
+  // doExtra = doPrint;
 
   // Retrieve input data
   const McEventCollection* mcEvts = 0;
-  if (!evtStore()->retrieve(mcEvts, m_mcEventsName).isSuccess() ||
-      0 == mcEvts) {
-    ATH_MSG_WARNING("could not retrieve mc collection at ["
-                 << m_mcEventsName << "]!");
+  if (!evtStore()->retrieve(mcEvts, m_mcEventsName).isSuccess() || 0 == mcEvts) {
+    ATH_MSG_WARNING("could not retrieve mc collection at [" << m_mcEventsName << "]!");
     return StatusCode::FAILURE;
   }
 
   if (mcEvts->empty()) {
-    ATH_MSG_WARNING("empty McEventCollection at [" 
-                    << m_mcEventsName << "]");
+    ATH_MSG_WARNING("empty McEventCollection at [" << m_mcEventsName << "]");
     return StatusCode::SUCCESS;
   }
 
   // Create output collection
   McEventCollection* thinnedMcEvts = new McEventCollection;
-  if (!evtStore()->record(thinnedMcEvts, m_thinnedMcEventsName).isSuccess()){
-    ATH_MSG_WARNING("Could not record thinned mc collection at ["
-                 << m_thinnedMcEventsName << "]!");
-    delete thinnedMcEvts; 
+  if (!evtStore()->record(thinnedMcEvts, m_thinnedMcEventsName).isSuccess()) {
+    ATH_MSG_WARNING("Could not record thinned mc collection at [" << m_thinnedMcEventsName << "]!");
+    delete thinnedMcEvts;
     thinnedMcEvts = 0;
     return StatusCode::FAILURE;
   }
-  if (evtStore()->setConst(thinnedMcEvts).isFailure() ) {
-    ATH_MSG_WARNING("Could not lock the McEventCollection at ["
-                    << m_thinnedMcEventsName << "] !!");
-  }
+  if (evtStore()->setConst(thinnedMcEvts).isFailure()) { ATH_MSG_WARNING("Could not lock the McEventCollection at [" << m_thinnedMcEventsName << "] !!"); }
 
   // Signal event is first (only?) event; front() is from DataVector
   const HepMC::GenEvent* mcEvt = mcEvts->front();
   auto wtCont = mcEvt->weights();
-  //double wt = 1;
-  if( wtCont.size() != 0 ){
-    //wt = wtCont[0];
+  // double wt = 1;
+  if (wtCont.size() != 0) {
+    // wt = wtCont[0];
   } else {
-    ATH_MSG_WARNING("Weights not found for mc collection [" 
-                    <<m_mcEventsName <<"]");
+    ATH_MSG_WARNING("Weights not found for mc collection [" << m_mcEventsName << "]");
   }
   int inEvent = mcEvt->event_number();
-  if( doDebug ) ATH_MSG_DEBUG("FETCHED count/event " <<m_evtCount <<" " 
-                              <<inEvent);
-
+  if (doDebug) ATH_MSG_DEBUG("FETCHED count/event " << m_evtCount << " " << inEvent);
 
   ///////////////////////////////
   // New event - copy of original
@@ -195,15 +166,14 @@ StatusCode CompactHardTruth::execute() {
 
   HepMC::GenEvent* thinEvt = new HepMC::GenEvent(*mcEvt);
   int nEvent = thinEvt->event_number();
-  if( doPrint ) ATH_MSG_DEBUG("New event number = " <<nEvent);
+  if (doPrint) ATH_MSG_DEBUG("New event number = " << nEvent);
 
-  if( doPrint ){
-    std::cout <<"========== BEGIN EVENT BEFORE THINNING ==========" <<std::endl;
-    HepMC::Print::line(std::cout,thinEvt);
-    std::cout <<"========== END EVENT BEFORE THINNING ==========" <<std::endl;
+  if (doPrint) {
+    std::cout << "========== BEGIN EVENT BEFORE THINNING ==========" << std::endl;
+    HepMC::Print::line(std::cout, thinEvt);
+    std::cout << "========== END EVENT BEFORE THINNING ==========" << std::endl;
   }
 
-
   /////////////////////////////////////////////////
   // Containers for manipulating particles/vertices
   /////////////////////////////////////////////////
@@ -221,7 +191,7 @@ StatusCode CompactHardTruth::execute() {
   // deleteV:   delete vertex after all passes
   // HepMC ~GenVertex deletes particles, so remove them from ALL vertices
 
-  typedef std::pair<HepMC::GenVertex*,HepMC::GenParticle*> vpPair;
+  typedef std::pair<HepMC::GenVertex*, HepMC::GenParticle*> vpPair;
   std::vector<vpPair> removePV;
   std::vector<vpPair> addinPV;
   std::vector<vpPair> addoutPV;
@@ -235,12 +205,11 @@ StatusCode CompactHardTruth::execute() {
   std::list<HepMC::GenVertex*>::iterator dvItr;
   std::list<HepMC::GenVertex*>::iterator dvItrE;
 
-
   //////////////////////////////
   // Find hadronization vertices
   //////////////////////////////
 
-  if( doDebug ) ATH_MSG_DEBUG("Find hadronization vertices");
+  if (doDebug) ATH_MSG_DEBUG("Find hadronization vertices");
 
   std::vector<HepMC::GenVertex*> hadVertices;
 
@@ -248,76 +217,65 @@ StatusCode CompactHardTruth::execute() {
   HepMC::GenEvent::vertex_iterator hadvB = thinEvt->vertices_begin();
   HepMC::GenEvent::vertex_iterator hadvE = thinEvt->vertices_end();
 
-  for(; hadv!=hadvE; ++hadv){
-    if( !(*hadv) ) continue;
-    if( (*hadv)->particles_in_size() < 2 ) continue;
-    if( (*hadv)->particles_out_size() < 1 ) continue;
+  for (; hadv != hadvE; ++hadv) {
+    if (!(*hadv)) continue;
+    if ((*hadv)->particles_in_size() < 2) continue;
+    if ((*hadv)->particles_out_size() < 1) continue;
 
     // Check hadronization vertex
     // isHad is true if at least one hadron
     // q qbar -> pi is allowed, but q qbar -> W... is not
     bool isHadVtx = true;
     bool isHadOut = false;
-    HepMC::GenVertex::particles_in_const_iterator inp = 
-      (*hadv)->particles_in_const_begin();
-    HepMC::GenVertex::particles_in_const_iterator inpE = 
-      (*hadv)->particles_in_const_end();
-    for(; inp!=inpE; ++inp){
-      if( !isParton(*inp) ) isHadVtx = false;
+    HepMC::GenVertex::particles_in_const_iterator inp = (*hadv)->particles_in_const_begin();
+    HepMC::GenVertex::particles_in_const_iterator inpE = (*hadv)->particles_in_const_end();
+    for (; inp != inpE; ++inp) {
+      if (!isParton(*inp)) isHadVtx = false;
     }
-        HepMC::GenVertex::particles_out_const_iterator vp = 
-      (*hadv)->particles_out_const_begin();
-    HepMC::GenVertex::particles_out_const_iterator vpE = 
-      (*hadv)->particles_out_const_end();
-    for(; vp!=vpE; ++vp){
-      if( isParton(*vp) ) isHadVtx = false;      
-      if( isHadron(*vp) ) isHadOut = true;
+    HepMC::GenVertex::particles_out_const_iterator vp = (*hadv)->particles_out_const_begin();
+    HepMC::GenVertex::particles_out_const_iterator vpE = (*hadv)->particles_out_const_end();
+    for (; vp != vpE; ++vp) {
+      if (isParton(*vp)) isHadVtx = false;
+      if (isHadron(*vp)) isHadOut = true;
     }
     isHadVtx = isHadVtx && isHadOut;
-    if( isHadVtx ) hadVertices.push_back(*hadv);
-    if( doDebug && isHadVtx ) ATH_MSG_VERBOSE("Hadronization vertex "
-                                              <<HepMC::barcode(*hadv));
+    if (isHadVtx) hadVertices.push_back(*hadv);
+    if (doDebug && isHadVtx) ATH_MSG_VERBOSE("Hadronization vertex " << HepMC::barcode(*hadv));
   }
 
-  if( hadVertices.size() < 1 ){
-    ATH_MSG_WARNING("No hadronization vertices for event " <<nEvent);
+  if (hadVertices.size() < 1) {
+    ATH_MSG_WARNING("No hadronization vertices for event " << nEvent);
     ATH_MSG_WARNING("Exiting without changing event.");
     thinnedMcEvts->push_back(thinEvt);
     return StatusCode::SUCCESS;
   }
 
-
   //////////////////////////////////////////////////////////
   // Remove all incoming partons from hadronization vertices
   // Remove and delete all descendants
   //////////////////////////////////////////////////////////
 
-  for(unsigned int iv=0; iv<hadVertices.size(); ++iv){
+  for (unsigned int iv = 0; iv < hadVertices.size(); ++iv) {
     HepMC::GenVertex* ivtx = hadVertices[iv];
-    if( doDebug ) ATH_MSG_DEBUG("Removing partons from hadVertex "
-                                <<HepMC::barcode(ivtx));
-    HepMC::GenVertex::particles_in_const_iterator pin =
-     ivtx->particles_in_const_begin();
-    HepMC::GenVertex::particles_in_const_iterator pinE =
-     ivtx->particles_in_const_end();
-    for(; pin!=pinE; ++pin){
-      removePV.push_back(vpPair(ivtx,*pin));
+    if (doDebug) ATH_MSG_DEBUG("Removing partons from hadVertex " << HepMC::barcode(ivtx));
+    HepMC::GenVertex::particles_in_const_iterator pin = ivtx->particles_in_const_begin();
+    HepMC::GenVertex::particles_in_const_iterator pinE = ivtx->particles_in_const_end();
+    for (; pin != pinE; ++pin) {
+      removePV.push_back(vpPair(ivtx, *pin));
     }
   }
 
   // Remove all descendant particles. Will remove empty vertices later.
   // Might have parton decays of hadrons - hence delete sort/unique
-  for(unsigned int iv=0; iv<hadVertices.size(); ++iv){
+  for (unsigned int iv = 0; iv < hadVertices.size(); ++iv) {
     HepMC::GenVertex* ivtx = hadVertices[iv];
-    HepMC::GenVertex::particle_iterator pout =
-     ivtx->particles_begin(HepMC::descendants);
-    HepMC::GenVertex::particle_iterator poutE =
-     ivtx->particles_end(HepMC::descendants);
-    for(; pout!=poutE; ++pout){
+    HepMC::GenVertex::particle_iterator pout = ivtx->particles_begin(HepMC::descendants);
+    HepMC::GenVertex::particle_iterator poutE = ivtx->particles_end(HepMC::descendants);
+    for (; pout != poutE; ++pout) {
       HepMC::GenVertex* vpar = (*pout)->production_vertex();
-      if( vpar ) removePV.push_back(vpPair(vpar,*pout));
+      if (vpar) removePV.push_back(vpPair(vpar, *pout));
       HepMC::GenVertex* vend = (*pout)->end_vertex();
-      if( vend ) removePV.push_back(vpPair(vend,*pout));
+      if (vend) removePV.push_back(vpPair(vend, *pout));
       deleteP.push_back(*pout);
     }
   }
@@ -329,35 +287,27 @@ StatusCode CompactHardTruth::execute() {
 
   static const int cutG4 = 200000;
 
-  for(hadv=hadvB; hadv!=hadvE; ++hadv){
+  for (hadv = hadvB; hadv != hadvE; ++hadv) {
 
     // Empth vertices
-    if( (*hadv)->particles_in_size()==0 && (*hadv)->particles_out_size()==0 ){
+    if ((*hadv)->particles_in_size() == 0 && (*hadv)->particles_out_size() == 0) {
       removeV.push_back(*hadv);
       deleteV.push_back(*hadv);
     }
 
     // Geant vertices/particles
-    if( HepMC::barcode(*hadv) > -cutG4 ) continue;
-    HepMC::GenVertex::particles_in_const_iterator pin =
-     (*hadv)->particles_in_const_begin();
-    HepMC::GenVertex::particles_in_const_iterator pinE =
-     (*hadv)->particles_in_const_end();
-    for(; pin!=pinE; ++pin){
-      removePV.push_back(vpPair(*hadv,*pin));
-      if( (*pin)->barcode() > cutG4 ){
-        deleteP.push_back(*pin);
-      }
+    if (HepMC::barcode(*hadv) > -cutG4) continue;
+    HepMC::GenVertex::particles_in_const_iterator pin = (*hadv)->particles_in_const_begin();
+    HepMC::GenVertex::particles_in_const_iterator pinE = (*hadv)->particles_in_const_end();
+    for (; pin != pinE; ++pin) {
+      removePV.push_back(vpPair(*hadv, *pin));
+      if ((*pin)->barcode() > cutG4) { deleteP.push_back(*pin); }
     }
-    HepMC::GenVertex::particles_out_const_iterator pout =
-     (*hadv)->particles_out_const_begin();
-    HepMC::GenVertex::particles_out_const_iterator poutE =
-     (*hadv)->particles_out_const_end();
-    for(; pout!=poutE; ++pout){
-      removePV.push_back(vpPair(*hadv,*pout));
-      if( (*pout)->barcode() > cutG4 ){
-        deleteP.push_back(*pout);
-      }
+    HepMC::GenVertex::particles_out_const_iterator pout = (*hadv)->particles_out_const_begin();
+    HepMC::GenVertex::particles_out_const_iterator poutE = (*hadv)->particles_out_const_end();
+    for (; pout != poutE; ++pout) {
+      removePV.push_back(vpPair(*hadv, *pout));
+      if ((*pout)->barcode() > cutG4) { deleteP.push_back(*pout); }
     }
     removeV.push_back(*hadv);
     deleteV.push_back(*hadv);
@@ -365,58 +315,51 @@ StatusCode CompactHardTruth::execute() {
 
   // Actually implement changes
 
-  for(unsigned int i=0; i<removePV.size(); ++i){
+  for (unsigned int i = 0; i < removePV.size(); ++i) {
     HepMC::GenVertex* v = removePV[i].first;
     HepMC::GenParticle* p = removePV[i].second;
     v->remove_particle(p);
   }
 
-  for(unsigned int i=0; i<addoutPV.size(); ++i){
+  for (unsigned int i = 0; i < addoutPV.size(); ++i) {
     HepMC::GenVertex* v = addoutPV[i].first;
     HepMC::GenParticle* p = addoutPV[i].second;
     v->add_particle_out(p);
   }
 
-  for(unsigned int iv=1; iv<hadVertices.size(); ++iv){
+  for (unsigned int iv = 1; iv < hadVertices.size(); ++iv) {
     HepMC::GenVertex* v = hadVertices[iv];
-    if( v->particles_in_size()!=0 || v->particles_out_size()!=0 ){
-      ATH_MSG_WARNING("Removing vertex " <<v->barcode() 
-      <<" for event " <<nEvent <<" with in/out particles " 
-      <<v->particles_in_size() <<" " <<v->particles_out_size() );
-    }
-    if( !thinEvt->remove_vertex(hadVertices[iv]) ){
-      ATH_MSG_WARNING("Error removing vertex " <<v->barcode()
-      <<" for event " <<nEvent);
+    if (v->particles_in_size() != 0 || v->particles_out_size() != 0) {
+      ATH_MSG_WARNING("Removing vertex " << v->barcode() << " for event " << nEvent << " with in/out particles " << v->particles_in_size() << " " << v->particles_out_size());
     }
+    if (!thinEvt->remove_vertex(hadVertices[iv])) { ATH_MSG_WARNING("Error removing vertex " << v->barcode() << " for event " << nEvent); }
   }
 
   // Delete removed particles/vertices
 
-  if( doDebug ) ATH_MSG_DEBUG("Deleting hadronization vertices " 
-                              <<deleteV.size());
+  if (doDebug) ATH_MSG_DEBUG("Deleting hadronization vertices " << deleteV.size());
   deleteV.sort();
   deleteV.unique();
-  for(dvItr=deleteV.begin(); dvItr!=deleteV.end(); ++dvItr){
-    if( doDebug ) ATH_MSG_VERBOSE("Deleting vertex " <<(*dvItr)->barcode());
-    if( *dvItr) delete (*dvItr);
+  for (dvItr = deleteV.begin(); dvItr != deleteV.end(); ++dvItr) {
+    if (doDebug) ATH_MSG_VERBOSE("Deleting vertex " << (*dvItr)->barcode());
+    if (*dvItr) delete (*dvItr);
   }
 
   deleteP.sort();
   deleteP.unique();
-  for(dpItr=deleteP.begin(); dpItr!=deleteP.end(); ++dpItr){
-    if( doDebug ) ATH_MSG_VERBOSE("Deleting particle " <<(*dpItr)->barcode());
-    if( *dpItr ) delete (*dpItr);
+  for (dpItr = deleteP.begin(); dpItr != deleteP.end(); ++dpItr) {
+    if (doDebug) ATH_MSG_VERBOSE("Deleting particle " << (*dpItr)->barcode());
+    if (*dpItr) delete (*dpItr);
   }
 
-
   ////////////////////////
   // Cluster final partons
   ////////////////////////
 
-  if( doDebug && doExtra ){
-    std::cout <<"========== BEGIN EVENT BEFORE CLUSTER ==========" <<std::endl;
-    HepMC::Print::line(std::cout,thinEvt);
-    std::cout <<"========== END EVENT BEFORE CLUSTER ==========" <<std::endl;
+  if (doDebug && doExtra) {
+    std::cout << "========== BEGIN EVENT BEFORE CLUSTER ==========" << std::endl;
+    HepMC::Print::line(std::cout, thinEvt);
+    std::cout << "========== END EVENT BEFORE CLUSTER ==========" << std::endl;
   }
 
   // Possible cases:
@@ -431,13 +374,13 @@ StatusCode CompactHardTruth::execute() {
   // longer needed.
 
   bool moreP = true;
-  typedef std::pair<HepMC::GenVertex*,HepMC::GenParticle*> vpPair;
-  //std::vector<vpPair> removePV;
-  //std::vector<vpPair> addinPV;
-  //std::vector<vpPair> addoutPV;
-  //std::vector<HepMC::GenVertex*> removeV;
-  //std::list<HepMC::GenParticle*> deleteP;
-  //std::list<HepMC::GenVertex*> deleteV;
+  typedef std::pair<HepMC::GenVertex*, HepMC::GenParticle*> vpPair;
+  // std::vector<vpPair> removePV;
+  // std::vector<vpPair> addinPV;
+  // std::vector<vpPair> addoutPV;
+  // std::vector<HepMC::GenVertex*> removeV;
+  // std::list<HepMC::GenParticle*> deleteP;
+  // std::list<HepMC::GenVertex*> deleteV;
   removePV.clear();
   addinPV.clear();
   addoutPV.clear();
@@ -445,15 +388,15 @@ StatusCode CompactHardTruth::execute() {
   deleteP.clear();
   deleteV.clear();
 
-  typedef std::pair<HepMC::GenParticle*,HepMC::FourVector> pkPair;
+  typedef std::pair<HepMC::GenParticle*, HepMC::FourVector> pkPair;
   std::vector<pkPair> changePK;
 
-  if( doDebug ) ATH_MSG_DEBUG("Start parton thinning");
-  while( moreP ){
-#ifdef HEPMC3 
-    if( doDebug ) ATH_MSG_DEBUG("New parton pass " <<inEvent <<" "<<thinEvt->particles().size() <<" " <<thinEvt->vertices().size());
+  if (doDebug) ATH_MSG_DEBUG("Start parton thinning");
+  while (moreP) {
+#ifdef HEPMC3
+    if (doDebug) ATH_MSG_DEBUG("New parton pass " << inEvent << " " << thinEvt->particles().size() << " " << thinEvt->vertices().size());
 #else
-    if( doDebug ) ATH_MSG_DEBUG("New parton pass " <<inEvent <<" "<<thinEvt->particles_size() <<" " <<thinEvt->vertices_size());
+    if (doDebug) ATH_MSG_DEBUG("New parton pass " << inEvent << " " << thinEvt->particles_size() << " " << thinEvt->vertices_size());
 #endif
 
     moreP = false;
@@ -468,22 +411,20 @@ StatusCode CompactHardTruth::execute() {
     HepMC::GenEvent::particle_iterator finpE = thinEvt->particles_end();
 
     // Find final partons
-    for(finp=finpB; finp!=finpE; ++finp){
+    for (finp = finpB; finp != finpE; ++finp) {
       int iCase = 0;
 
       HepMC::GenParticle* fp = *finp;
-      if( !isFinalParton(fp) ) continue;
-      if( doDebug ) ATH_MSG_DEBUG("Starting final parton " <<fp->barcode());
+      if (!isFinalParton(fp)) continue;
+      if (doDebug) ATH_MSG_DEBUG("Starting final parton " << fp->barcode());
 
       // Production/end vertices
       HepMC::GenVertex* pvtx = fp->production_vertex();
-      if( !pvtx ) {
-        ATH_MSG_WARNING("Missing production for final parton "
-                        <<fp->barcode());
+      if (!pvtx) {
+        ATH_MSG_WARNING("Missing production for final parton " << fp->barcode());
         continue;
       }
-      if( doDebug ) ATH_MSG_DEBUG("Final parton " <<pvtx->barcode() <<" "
-                                  <<fp->barcode());
+      if (doDebug) ATH_MSG_DEBUG("Final parton " << pvtx->barcode() << " " << fp->barcode());
 
       ////////////
       // Case 1->1
@@ -492,37 +433,32 @@ StatusCode CompactHardTruth::execute() {
       // One-particle decay; use final particle
       // ppvtx -> pp -> pvtx -> fp
 
-      if( pvtx->particles_in_size()==1 && pvtx->particles_out_size()==1 ){
+      if (pvtx->particles_in_size() == 1 && pvtx->particles_out_size() == 1) {
         // Incoming particle to parent vertex
-        HepMC::GenVertex::particles_in_const_iterator pitr =
-          pvtx->particles_in_const_begin();
+        HepMC::GenVertex::particles_in_const_iterator pitr = pvtx->particles_in_const_begin();
         HepMC::GenParticle* pp = *pitr;
-        if( !pp || pp->barcode()==0 ){
-          ATH_MSG_DEBUG("1->1: missing pp for fp " <<fp->barcode());
+        if (!pp || pp->barcode() == 0) {
+          ATH_MSG_DEBUG("1->1: missing pp for fp " << fp->barcode());
           ++m_missCount;
           continue;
         }
         // Its parent vertex
-        HepMC::GenVertex* ppvtx = pp->production_vertex();        
-        if( !ppvtx || ppvtx->barcode()==0 ){
-          ATH_MSG_DEBUG("1->1: missing ppvtx for fp " <<fp->barcode());
+        HepMC::GenVertex* ppvtx = pp->production_vertex();
+        if (!ppvtx || ppvtx->barcode() == 0) {
+          ATH_MSG_DEBUG("1->1: missing ppvtx for fp " << fp->barcode());
           ++m_missCount;
-           continue;
+          continue;
         }
         moreP = true;
         iCase = 1;
 
-        removePV.push_back(vpPair(ppvtx,pp));
-        removePV.push_back(vpPair(pvtx,pp));
+        removePV.push_back(vpPair(ppvtx, pp));
+        removePV.push_back(vpPair(pvtx, pp));
         deleteP.push_back(pp);
         removeV.push_back(pvtx);
         deleteV.push_back(pvtx);
-        addoutPV.push_back(vpPair(ppvtx,fp));
-        if( doDebug ){
-          ATH_MSG_DEBUG("1->1: ppvtx,pp,pvtx,fp,evtx "
-                        <<ppvtx->barcode() <<" " <<pp->barcode() <<" "
-                        <<pvtx->barcode() <<" " <<fp->barcode() );
-        }
+        addoutPV.push_back(vpPair(ppvtx, fp));
+        if (doDebug) { ATH_MSG_DEBUG("1->1: ppvtx,pp,pvtx,fp,evtx " << ppvtx->barcode() << " " << pp->barcode() << " " << pvtx->barcode() << " " << fp->barcode()); }
       }
 
       ////////////
@@ -530,51 +466,48 @@ StatusCode CompactHardTruth::execute() {
       ////////////
 
       // Color recombination. Momentum is conserved so just keep 2.
-      // Drop 1 and vertex. 
+      // Drop 1 and vertex.
       // ppvtx1,ppvtx2 -> pp1,pp2 -> pvtx -> fp
       // Recombination should not affect hard physics!
 
-      if( pvtx->particles_in_size()==2 && pvtx->particles_out_size()==1 ){
+      if (pvtx->particles_in_size() == 2 && pvtx->particles_out_size() == 1) {
         // Incoming particles to parent vertex
-        HepMC::GenVertex::particles_in_const_iterator pitr =
-          pvtx->particles_in_const_begin();
+        HepMC::GenVertex::particles_in_const_iterator pitr = pvtx->particles_in_const_begin();
         HepMC::GenParticle* pp1 = *pitr;
         ++pitr;
         HepMC::GenParticle* pp2 = *pitr;
 
         // Check for 2->1->2 initial state interactions in Herwig++
         // Initial partons have pt=0, use pt<0.001MeV
-        if( fabs(pp1->momentum().perp())<1.e-3 ) continue;
-        if( fabs(pp2->momentum().perp())<1.e-3 ) continue;
+        if (fabs(pp1->momentum().perp()) < 1.e-3) continue;
+        if (fabs(pp2->momentum().perp()) < 1.e-3) continue;
         // Their parent vertices
-        HepMC::GenVertex* ppvtx1 = pp1->production_vertex();        
-        HepMC::GenVertex* ppvtx2 = pp2->production_vertex();        
-        if( !ppvtx1 || ppvtx1->barcode()==0 ){
-          ATH_MSG_DEBUG("2->1: missing ppvtx1 for fp " <<fp->barcode());
+        HepMC::GenVertex* ppvtx1 = pp1->production_vertex();
+        HepMC::GenVertex* ppvtx2 = pp2->production_vertex();
+        if (!ppvtx1 || ppvtx1->barcode() == 0) {
+          ATH_MSG_DEBUG("2->1: missing ppvtx1 for fp " << fp->barcode());
           ++m_missCount;
           continue;
         }
-       if( !ppvtx2 || ppvtx2->barcode()==0 ){
-          ATH_MSG_DEBUG("2->1: missing ppvtx2 for fp " <<fp->barcode());
+        if (!ppvtx2 || ppvtx2->barcode() == 0) {
+          ATH_MSG_DEBUG("2->1: missing ppvtx2 for fp " << fp->barcode());
           ++m_missCount;
           continue;
         }
- 
+
         moreP = true;
         iCase = 2;
 
-        removePV.push_back(vpPair(pvtx,fp));
-        removePV.push_back(vpPair(pvtx,pp1));
-        removePV.push_back(vpPair(pvtx,pp2));
+        removePV.push_back(vpPair(pvtx, fp));
+        removePV.push_back(vpPair(pvtx, pp1));
+        removePV.push_back(vpPair(pvtx, pp2));
         deleteP.push_back(fp);
         removeV.push_back(pvtx);
         deleteV.push_back(pvtx);
 
-        if( doDebug ){
-          ATH_MSG_DEBUG("2->1: ppvtx1,pp1,ppvtx2,pp2,pvtx,fp "
-                       <<ppvtx1->barcode() <<" " <<pp1->barcode() <<" "
-                       <<ppvtx2->barcode() <<" " <<pp2->barcode() <<" "
-                       <<pvtx->barcode() <<" " <<fp->barcode());
+        if (doDebug) {
+          ATH_MSG_DEBUG("2->1: ppvtx1,pp1,ppvtx2,pp2,pvtx,fp " << ppvtx1->barcode() << " " << pp1->barcode() << " " << ppvtx2->barcode() << " " << pp2->barcode() << " " << pvtx->barcode() << " "
+                                                               << fp->barcode());
         }
       }
 
@@ -586,63 +519,56 @@ StatusCode CompactHardTruth::execute() {
       // Drop only if mass is below cut
       // ppvtx -> pp -> pvtx -> pout1,pout2/fp
 
-      if( pvtx->particles_in_size()==1 && pvtx->particles_out_size()==2 ){
-        HepMC::GenVertex::particles_out_const_iterator poutitr =
-          pvtx->particles_out_const_begin();
+      if (pvtx->particles_in_size() == 1 && pvtx->particles_out_size() == 2) {
+        HepMC::GenVertex::particles_out_const_iterator poutitr = pvtx->particles_out_const_begin();
         HepMC::GenParticle* pout1 = *poutitr;
         ++poutitr;
         HepMC::GenParticle* pout2 = *poutitr;
 
         // Require two final partons and avoid duplication
-        if( fp == pout1 ) {
-          if( !isFinalParton(pout2) ){
-            if( doDebug ) ATH_MSG_DEBUG("1->2: not final " 
-                                        <<pout2->barcode());
+        if (fp == pout1) {
+          if (!isFinalParton(pout2)) {
+            if (doDebug) ATH_MSG_DEBUG("1->2: not final " << pout2->barcode());
             continue;
           }
-        } else if( fp == pout2 ) {
-          if( !isFinalParton(pout1) ){
-            if( doDebug ) ATH_MSG_DEBUG("1->2: not final " <<pout1->barcode());
+        } else if (fp == pout2) {
+          if (!isFinalParton(pout1)) {
+            if (doDebug) ATH_MSG_DEBUG("1->2: not final " << pout1->barcode());
             continue;
           }
         } else {
-          ATH_MSG_WARNING("1->2: No match found for branching "
-                          <<fp->barcode() <<" " <<pvtx->barcode() <<" "
-                          <<pout1->barcode() <<" " <<pout2->barcode());
+          ATH_MSG_WARNING("1->2: No match found for branching " << fp->barcode() << " " << pvtx->barcode() << " " << pout1->barcode() << " " << pout2->barcode());
           continue;
         }
-        if( fp != pout1 ) continue;
-        // Incoming particle 
-        HepMC::GenVertex::particles_in_const_iterator pitr =
-          pvtx->particles_in_const_begin();
+        if (fp != pout1) continue;
+        // Incoming particle
+        HepMC::GenVertex::particles_in_const_iterator pitr = pvtx->particles_in_const_begin();
         HepMC::GenParticle* pp = *pitr;
 
         // Do not merge initial partons (pt<1MeV or m<-1MeV)
-        if(pout1->momentum().m()<-1.0 || pout1->momentum().perp()<1.0) continue;
-        if(pout2->momentum().m()<-1.0 || pout2->momentum().perp()<1.0) continue;
+        if (pout1->momentum().m() < -1.0 || pout1->momentum().perp() < 1.0) continue;
+        if (pout2->momentum().m() < -1.0 || pout2->momentum().perp() < 1.0) continue;
 
         // Parton pair mass cut
         HepMC::FourVector p12 = vtxOutMom(pvtx);
         double m12 = p12.m();
-        if( m12<0 ){
-          if( fabs(m12) > 10. + 1.0e-5*p12.e() ){
-            ATH_MSG_WARNING("Spacelike mass^2 for parton sum " <<m12 <<" "
-                            <<pp->barcode() <<" " <<pvtx->barcode() <<" "
-                            <<pout1->barcode() <<" " <<pout2->barcode());
+        if (m12 < 0) {
+          if (fabs(m12) > 10. + 1.0e-5 * p12.e()) {
+            ATH_MSG_WARNING("Spacelike mass^2 for parton sum " << m12 << " " << pp->barcode() << " " << pvtx->barcode() << " " << pout1->barcode() << " " << pout2->barcode());
           }
           m12 = 0;
         }
-        if( doDebug ) ATH_MSG_DEBUG("1->2: parton pair mass " <<m12);
+        if (doDebug) ATH_MSG_DEBUG("1->2: parton pair mass " << m12);
         // If mass > cut, keep pair
-        if( m12 > m_partonCut ) {
-          if( doDebug ) ATH_MSG_DEBUG("Keeping 1->2: parton mass " <<m12);
+        if (m12 > m_partonCut) {
+          if (doDebug) ATH_MSG_DEBUG("Keeping 1->2: parton mass " << m12);
           continue;
         }
 
         // Associated vertices
         HepMC::GenVertex* ppvtx = pp->production_vertex();
-        if( !ppvtx || ppvtx->barcode() == 0 ){
-          ATH_MSG_DEBUG("1->2: missing ppvtx for fp " <<fp->barcode());
+        if (!ppvtx || ppvtx->barcode() == 0) {
+          ATH_MSG_DEBUG("1->2: missing ppvtx for fp " << fp->barcode());
           ++m_missCount;
           continue;
         }
@@ -650,133 +576,117 @@ StatusCode CompactHardTruth::execute() {
         // Merge branching
         moreP = true;
         iCase = 3;
-        if( doDebug ) ATH_MSG_DEBUG("Merging 1->2: mass " <<p12.m());
+        if (doDebug) ATH_MSG_DEBUG("Merging 1->2: mass " << p12.m());
 
-        changePK.push_back(pkPair(pp,p12));
-        removePV.push_back(vpPair(pvtx,pp));
-        removePV.push_back(vpPair(pvtx,pout1));
-        removePV.push_back(vpPair(pvtx,pout2));
+        changePK.push_back(pkPair(pp, p12));
+        removePV.push_back(vpPair(pvtx, pp));
+        removePV.push_back(vpPair(pvtx, pout1));
+        removePV.push_back(vpPair(pvtx, pout2));
 
         deleteP.push_back(pout1);
         deleteP.push_back(pout2);
         removeV.push_back(pvtx);
         deleteV.push_back(pvtx);
 
-        if( doDebug ){
-          ATH_MSG_DEBUG("Merge 1->2: ppvtx,pp,pvtx,pout1,pout2,evtx "
-                       <<HepMC::barcode(ppvtx) <<" " <<HepMC::barcode(pp) <<" "
-                       <<HepMC::barcode(pvtx) <<" " <<HepMC::barcode(pout1) <<" "
-                       <<HepMC::barcode(pout2));
-          ATH_MSG_DEBUG("Merge 1->2: id " <<pp->pdg_id() <<" "
-                       <<pout1->pdg_id() <<" " <<pout2->pdg_id());
+        if (doDebug) {
+          ATH_MSG_DEBUG("Merge 1->2: ppvtx,pp,pvtx,pout1,pout2,evtx " << HepMC::barcode(ppvtx) << " " << HepMC::barcode(pp) << " " << HepMC::barcode(pvtx) << " " << HepMC::barcode(pout1) << " "
+                                                                      << HepMC::barcode(pout2));
+          ATH_MSG_DEBUG("Merge 1->2: id " << pp->pdg_id() << " " << pout1->pdg_id() << " " << pout2->pdg_id());
         }
-      } //end 1->2 case
-
+      } // end 1->2 case
 
       /////////////////////////
       // Incoming proton vertex
       /////////////////////////
 
       // Do nothing
-      if( pvtx->particles_in_size()==1 ){
+      if (pvtx->particles_in_size() == 1) {
         // Incoming particle to parent vertex
-        HepMC::GenVertex::particles_in_const_iterator pitr =
-          pvtx->particles_in_const_begin();
+        HepMC::GenVertex::particles_in_const_iterator pitr = pvtx->particles_in_const_begin();
         HepMC::GenParticle* pp = *pitr;
-        if( abs(pp->pdg_id()) == 2212 ) iCase = -1;
+        if (abs(pp->pdg_id()) == 2212) iCase = -1;
       }
 
       // Case not found
       // Need test for 2->2 in underlying event
-      if( iCase==0 ){
-        if( doDebug ) ATH_MSG_DEBUG("Case not found " <<pvtx->barcode() <<" " 
-                                   <<fp->barcode() <<" " 
-                                   <<pvtx->particles_in_size() <<" " 
-                                   <<pvtx->particles_out_size() );
+      if (iCase == 0) {
+        if (doDebug) ATH_MSG_DEBUG("Case not found " << pvtx->barcode() << " " << fp->barcode() << " " << pvtx->particles_in_size() << " " << pvtx->particles_out_size());
       }
 
-    } //end final parton loop
+    } // end final parton loop
 
     // Actually implement changes -- remove particles from vertices
     // Parton ends free, so no addinPV
-    if( doDebug ) ATH_MSG_DEBUG("Actually removing particles " 
-                                <<removePV.size());
+    if (doDebug) ATH_MSG_DEBUG("Actually removing particles " << removePV.size());
 
-    for(unsigned int i=0; i<removePV.size(); ++i){
+    for (unsigned int i = 0; i < removePV.size(); ++i) {
       HepMC::GenVertex* v = removePV[i].first;
       HepMC::GenParticle* p = removePV[i].second;
-      if( doDebug ) ATH_MSG_VERBOSE("Removing v,p " <<v->barcode() <<" " 
-                                    <<p->barcode());
+      if (doDebug) ATH_MSG_VERBOSE("Removing v,p " << v->barcode() << " " << p->barcode());
       v->remove_particle(p);
     }
 
     // Actually implement changes -- add particles to vertices
-    if( doDebug ) ATH_MSG_DEBUG("Actually add particles in/out " 
-                  <<addinPV.size() <<" " <<addoutPV.size() );
-    for(unsigned int i=0; i<addoutPV.size(); ++i){
+    if (doDebug) ATH_MSG_DEBUG("Actually add particles in/out " << addinPV.size() << " " << addoutPV.size());
+    for (unsigned int i = 0; i < addoutPV.size(); ++i) {
       HepMC::GenVertex* v = addoutPV[i].first;
       HepMC::GenParticle* p = addoutPV[i].second;
-      if( doDebug ) ATH_MSG_VERBOSE("Adding v,p " <<v->barcode() <<" " 
-                                    <<p->barcode());
+      if (doDebug) ATH_MSG_VERBOSE("Adding v,p " << v->barcode() << " " << p->barcode());
       v->add_particle_out(p);
     }
 
     // Actually implement changes -- change momenta
-    for(unsigned int i=0; i<changePK.size(); ++i){
+    for (unsigned int i = 0; i < changePK.size(); ++i) {
       HepMC::GenParticle* pp = changePK[i].first;
-      //!float eold = pp->momentum().e();
+      //! float eold = pp->momentum().e();
       pp->set_momentum(changePK[i].second);
     }
 
     // Actually implement changes -- remove vertices
-    if( doDebug ) ATH_MSG_DEBUG("Actually remove vertices " <<removeV.size());
-    for(unsigned int i=0; i<removeV.size(); ++i){
+    if (doDebug) ATH_MSG_DEBUG("Actually remove vertices " << removeV.size());
+    for (unsigned int i = 0; i < removeV.size(); ++i) {
       int nv = thinEvt->vertices_size();
-      if( thinEvt->remove_vertex(removeV[i]) ){
-        if( doDebug ){
-          ATH_MSG_VERBOSE("Removed vertex " <<removeV[i]->barcode() <<" "
-                          <<nv <<" " <<thinEvt->vertices_size());
-        }
+      if (thinEvt->remove_vertex(removeV[i])) {
+        if (doDebug) { ATH_MSG_VERBOSE("Removed vertex " << removeV[i]->barcode() << " " << nv << " " << thinEvt->vertices_size()); }
       } else {
-        ATH_MSG_WARNING("Failed to remove vertex " <<removeV[i]->barcode());
+        ATH_MSG_WARNING("Failed to remove vertex " << removeV[i]->barcode());
       }
     }
-    if( doDebug ) ATH_MSG_DEBUG("End while(moreP) pass " <<moreP);
+    if (doDebug) ATH_MSG_DEBUG("End while(moreP) pass " << moreP);
 
-  } //end moreP
+  } // end moreP
 
   // Delete removed particles/vertices
-  if( doDebug ) ATH_MSG_DEBUG("Deleting vertices " <<deleteV.size());
+  if (doDebug) ATH_MSG_DEBUG("Deleting vertices " << deleteV.size());
   deleteV.sort();
   deleteV.unique();
-  for(dvItr=deleteV.begin(); dvItr!=deleteV.end(); ++dvItr){
-    if( doDebug ) ATH_MSG_VERBOSE("Deleting vertex " <<(*dvItr)->barcode());
-    if( *dvItr) delete (*dvItr);
+  for (dvItr = deleteV.begin(); dvItr != deleteV.end(); ++dvItr) {
+    if (doDebug) ATH_MSG_VERBOSE("Deleting vertex " << (*dvItr)->barcode());
+    if (*dvItr) delete (*dvItr);
   }
 
-  if( doDebug ) ATH_MSG_DEBUG("Deleting particles " <<deleteP.size());
+  if (doDebug) ATH_MSG_DEBUG("Deleting particles " << deleteP.size());
   deleteP.sort();
   deleteP.unique();
-  for(dpItr=deleteP.begin(); dpItr!=deleteP.end(); ++dpItr){
-    if( doDebug ) ATH_MSG_VERBOSE("Deleting particle " <<(*dpItr)->barcode());
-    if( *dpItr ) delete (*dpItr);
+  for (dpItr = deleteP.begin(); dpItr != deleteP.end(); ++dpItr) {
+    if (doDebug) ATH_MSG_VERBOSE("Deleting particle " << (*dpItr)->barcode());
+    if (*dpItr) delete (*dpItr);
   }
 
-
   //////////////////////////////
   // Strip soft underlying stuff
   //////////////////////////////
 
-  if( doDebug && doExtra ){
-    std::cout <<"========== BEGIN EVENT BEFORE SOFT ==========" <<std::endl;
-    HepMC::Print::line(std::cout,thinEvt);
-    std::cout <<"========== END EVENT BEFORE SOFT ==========" <<std::endl;
+  if (doDebug && doExtra) {
+    std::cout << "========== BEGIN EVENT BEFORE SOFT ==========" << std::endl;
+    HepMC::Print::line(std::cout, thinEvt);
+    std::cout << "========== END EVENT BEFORE SOFT ==========" << std::endl;
   }
 
   HepMC::GenParticle* beams[2];
   beams[0] = thinEvt->beam_particles().first;
   beams[1] = thinEvt->beam_particles().second;
-  
+
   // Have deleted all hadronization particles
   // Find all particles connected to hard process(es) with m_T>10GeV
   std::list<HepMC::GenParticle*> pNotHad;
@@ -785,48 +695,45 @@ StatusCode CompactHardTruth::execute() {
   HepMC::GenEvent::particle_iterator finp = thinEvt->particles_begin();
   HepMC::GenEvent::particle_iterator finpE = thinEvt->particles_end();
 
-  for(; finp!=finpE; ++finp){ 
-    HepMC::GenParticle* fp = *finp; 
-    HepMC::GenVertex* pvtx = fp->production_vertex(); 
-    if( !pvtx ) continue;
+  for (; finp != finpE; ++finp) {
+    HepMC::GenParticle* fp = *finp;
+    HepMC::GenVertex* pvtx = fp->production_vertex();
+    if (!pvtx) continue;
 
     double ep = fp->momentum().e();
     double pzp = fp->momentum().pz();
-    double mtmax = (ep+pzp)*(ep-pzp);
-    HepMC::GenVertex::particle_iterator gpar =
-     fp->production_vertex()->particles_begin(HepMC::ancestors);
+    double mtmax = (ep + pzp) * (ep - pzp);
+    HepMC::GenVertex::particle_iterator gpar = fp->production_vertex()->particles_begin(HepMC::ancestors);
     HepMC::GenVertex::particle_iterator gparB = gpar;
-    HepMC::GenVertex::particle_iterator gparE =
-     fp->production_vertex()->particles_end(HepMC::ancestors);
+    HepMC::GenVertex::particle_iterator gparE = fp->production_vertex()->particles_end(HepMC::ancestors);
 
-    for(; gpar!=gparE; ++gpar){
+    for (; gpar != gparE; ++gpar) {
       double e = (*gpar)->momentum().e();
       double pz = (*gpar)->momentum().pz();
-      double mt = (e+pz)*(e-pz);
-      if( mt > mtmax ) mtmax = mt;
+      double mt = (e + pz) * (e - pz);
+      if (mt > mtmax) mtmax = mt;
     }
 
     // Keep hard particles and all ancestors
     pNotHad.push_back(fp);
     int ida = abs(fp->pdg_id());
-    bool keepid = (ida>10 && ida<20) || (ida>1000000 && ida<9000000);
-    if( mtmax > m_hardCut*m_hardCut || keepid ){
+    bool keepid = (ida > 10 && ida < 20) || (ida > 1000000 && ida < 9000000);
+    if (mtmax > m_hardCut * m_hardCut || keepid) {
       pHard.push_back(fp);
-      for(gpar=gparB; gpar!=gparE; ++gpar) pHard.push_back(*gpar);
+      for (gpar = gparB; gpar != gparE; ++gpar)
+        pHard.push_back(*gpar);
     }
 
     // Also keep all descendants of interesting particles
     // Include leptons to get photons in Sherpa with no Z parent
     // All hard descendants would include soft initial radiation
     // Will remove duplicates with list sort/unique
-    bool keepid2 = ida==6 || (ida>=11 && ida<=16) || (ida>=23 && ida<=37) ||
-                   (ida>1000000 && ida<9000000);
-    if( keepid2 && fp->end_vertex() ){
-      HepMC::GenVertex::particle_iterator des =
-       fp->end_vertex()->particles_begin(HepMC::descendants);
-       HepMC::GenVertex::particle_iterator desE =
-        fp->end_vertex()->particles_end(HepMC::descendants);
-       for(; des!=desE; ++des) pHard.push_back(*des);
+    bool keepid2 = ida == 6 || (ida >= 11 && ida <= 16) || (ida >= 23 && ida <= 37) || (ida > 1000000 && ida < 9000000);
+    if (keepid2 && fp->end_vertex()) {
+      HepMC::GenVertex::particle_iterator des = fp->end_vertex()->particles_begin(HepMC::descendants);
+      HepMC::GenVertex::particle_iterator desE = fp->end_vertex()->particles_end(HepMC::descendants);
+      for (; des != desE; ++des)
+        pHard.push_back(*des);
     }
   }
 
@@ -837,18 +744,15 @@ StatusCode CompactHardTruth::execute() {
   pHard.unique();
 
   // Dump information
-  if( doDebug ){
+  if (doDebug) {
     std::list<HepMC::GenParticle*>::iterator hItr2 = pHard.begin();
     std::list<HepMC::GenParticle*>::iterator hItr2E = pHard.end();
     int nhard = 0;
-    for(; hItr2!=hItr2E; ++hItr2){
+    for (; hItr2 != hItr2E; ++hItr2) {
       ++nhard;
-      ATH_MSG_DEBUG("Hard GenParticles " <<(*hItr2)->barcode() <<" " 
-                    <<(*hItr2)->pdg_id() <<" "
-                    <<(*hItr2)->momentum().perp()/1000. <<" "
-                    <<(*hItr2)->momentum().pz()/1000. );
+      ATH_MSG_DEBUG("Hard GenParticles " << (*hItr2)->barcode() << " " << (*hItr2)->pdg_id() << " " << (*hItr2)->momentum().perp() / 1000. << " " << (*hItr2)->momentum().pz() / 1000.);
     }
-    if( doDebug ) ATH_MSG_DEBUG("Hard GenParticles total " <<nhard);
+    if (doDebug) ATH_MSG_DEBUG("Hard GenParticles total " << nhard);
   }
 
   // Remove non-hadronization, non-hard GenParticles from vertices
@@ -862,28 +766,26 @@ StatusCode CompactHardTruth::execute() {
   std::list<HepMC::GenParticle*>::iterator hItrB = pHard.begin();
   std::list<HepMC::GenParticle*>::iterator hItrE = pHard.end();
 
-  for(; pItr!=pItrE; ++pItr){
+  for (; pItr != pItrE; ++pItr) {
     HepMC::GenParticle* p = *pItr;
 
     // Skip hard ones
     bool isHard = false;
-    for(hItr=hItrB; hItr!=hItrE; ++hItr){
-      if( p == (*hItr) ){
+    for (hItr = hItrB; hItr != hItrE; ++hItr) {
+      if (p == (*hItr)) {
         isHard = true;
         break;
       }
     }
-    if( doDebug ) ATH_MSG_DEBUG("Particle bc/isHard " <<p->barcode() <<" "
-                                <<isHard);
-    if( isHard ) continue;
+    if (doDebug) ATH_MSG_DEBUG("Particle bc/isHard " << p->barcode() << " " << isHard);
+    if (isHard) continue;
     HepMC::GenVertex* pvtx = p->production_vertex();
-    if( pvtx ) pvtx->remove_particle(p);
+    if (pvtx) pvtx->remove_particle(p);
     HepMC::GenVertex* evtx = p->end_vertex();
-    if( evtx ) evtx->remove_particle(p);
+    if (evtx) evtx->remove_particle(p);
     delete p;
   }
 
-
   /////////////////////////////////////////////////////////
   // Remove and delete vertices with no remaining particles
   /////////////////////////////////////////////////////////
@@ -893,39 +795,37 @@ StatusCode CompactHardTruth::execute() {
 
   HepMC::GenEvent::vertex_iterator vtx = thinEvt->vertices_begin();
   HepMC::GenEvent::vertex_iterator vtxE = thinEvt->vertices_end();
-  for(; vtx!=vtxE; ++vtx){
-    if( (*vtx)->particles_in_size() != 0 ) continue;
-    if( (*vtx)->particles_out_size() != 0 ) continue;
+  for (; vtx != vtxE; ++vtx) {
+    if ((*vtx)->particles_in_size() != 0) continue;
+    if ((*vtx)->particles_out_size() != 0) continue;
     removeV.push_back(*vtx);
     deleteV.push_back(*vtx);
   }
 
-  if( doDebug ) ATH_MSG_DEBUG("Removing/deleting 0-particle vertices " 
-                              <<removeV.size()  <<" " <<deleteV.size() );
-  for(unsigned int i=0; i<removeV.size(); ++i){
-    if( thinEvt->remove_vertex(removeV[i]) ){
-      if( doDebug ) ATH_MSG_VERBOSE("Removed vertex " <<removeV[i]->barcode());
+  if (doDebug) ATH_MSG_DEBUG("Removing/deleting 0-particle vertices " << removeV.size() << " " << deleteV.size());
+  for (unsigned int i = 0; i < removeV.size(); ++i) {
+    if (thinEvt->remove_vertex(removeV[i])) {
+      if (doDebug) ATH_MSG_VERBOSE("Removed vertex " << removeV[i]->barcode());
     } else {
-      ATH_MSG_WARNING("Failed to remove vertex " <<removeV[i]->barcode());
+      ATH_MSG_WARNING("Failed to remove vertex " << removeV[i]->barcode());
     }
   }
 
   deleteV.sort();
   deleteV.unique();
-  for(dvItr=deleteV.begin(); dvItr!=deleteV.end(); ++dvItr){
-    if( doDebug ) ATH_MSG_VERBOSE("Deleting vertex " <<(*dvItr)->barcode());
-    if( *dvItr) delete (*dvItr);
+  for (dvItr = deleteV.begin(); dvItr != deleteV.end(); ++dvItr) {
+    if (doDebug) ATH_MSG_VERBOSE("Deleting vertex " << (*dvItr)->barcode());
+    if (*dvItr) delete (*dvItr);
   }
 
-
   ////////////////////////////////
   // Remove remaining 1-1 vertices
   ////////////////////////////////
 
-  if( doDebug && doExtra ){
-    std::cout <<"========== BEGIN EVENT BEFORE 1-BODY ==========" <<std::endl;
-    HepMC::Print::line(std::cout,thinEvt);
-    std::cout <<"========== END EVENT BEFORE 1-BODY ==========" <<std::endl;
+  if (doDebug && doExtra) {
+    std::cout << "========== BEGIN EVENT BEFORE 1-BODY ==========" << std::endl;
+    HepMC::Print::line(std::cout, thinEvt);
+    std::cout << "========== END EVENT BEFORE 1-BODY ==========" << std::endl;
   }
 
   // Not clear how to order sweep, so do it one at a time.
@@ -937,36 +837,34 @@ StatusCode CompactHardTruth::execute() {
   HepMC::GenParticle* pin;
   HepMC::GenParticle* pout;
 
-  while( moreV1 ){
+  while (moreV1) {
     moreV1 = false;
-    
+
     HepMC::GenEvent::vertex_iterator v = thinEvt->vertices_begin();
     HepMC::GenEvent::vertex_iterator vE = thinEvt->vertices_end();
 
     // Find next 1->1 vertex
-    for(; v!=vE; ++v){
-      if( (*v)->particles_in_size() != 1 ) continue;
-      if( (*v)->particles_out_size() != 1 ) continue;
+    for (; v != vE; ++v) {
+      if ((*v)->particles_in_size() != 1) continue;
+      if ((*v)->particles_out_size() != 1) continue;
       pin = *((*v)->particles_in_const_begin());
       pout = *((*v)->particles_out_const_begin());
-      if( pin->pdg_id() != pout->pdg_id() ) continue;
+      if (pin->pdg_id() != pout->pdg_id()) continue;
       // Sherpa does 1-body decay of incoming protons :-(
-      if( pin==beams[0] || pin==beams[1] ) continue;
+      if (pin == beams[0] || pin == beams[1]) continue;
       HepMC::GenVertex* pvtx = pin->production_vertex();
-      if( !pvtx || pvtx->barcode()==0 ){
-        ATH_MSG_DEBUG("1->1: missing pvtx for vertex " <<(*v)->barcode());
+      if (!pvtx || pvtx->barcode() == 0) {
+        ATH_MSG_DEBUG("1->1: missing pvtx for vertex " << (*v)->barcode());
         ++m_missCount;
         continue;
       }
 
       moreV1 = true;
       vtx11 = (*v);
-      if( doDebug ) ATH_MSG_DEBUG("One-body " <<pin->barcode() <<" " 
-                                  <<vtx11->barcode()
-                                  <<" " <<pout->barcode());
+      if (doDebug) ATH_MSG_DEBUG("One-body " << pin->barcode() << " " << vtx11->barcode() << " " << pout->barcode());
       break;
     }
-    if( moreV1 ){
+    if (moreV1) {
       HepMC::GenVertex* pvtx = pin->production_vertex();
       pvtx->remove_particle(pin);
       pvtx->add_particle_out(pout);
@@ -975,13 +873,10 @@ StatusCode CompactHardTruth::execute() {
       thinEvt->remove_vertex(vtx11);
       delete pin;
       delete vtx11;
-      if( doDebug ) ATH_MSG_DEBUG("One-body new pvtx " <<pvtx->barcode() <<" "
-                                  <<pvtx->particles_in_size() <<" "
-                                  <<pvtx->particles_out_size() );
+      if (doDebug) ATH_MSG_DEBUG("One-body new pvtx " << pvtx->barcode() << " " << pvtx->particles_in_size() << " " << pvtx->particles_out_size());
     }
   }
 
-
   /////////////////////////////////////
   // Remove dangling particles/vertices
   /////////////////////////////////////
@@ -990,7 +885,7 @@ StatusCode CompactHardTruth::execute() {
   // tree structure. Result is "dangling" partons with 1 -> 0 vertices.
   // FIXME!! Meanwhile discard these if pt < m_danglePtCut.
 
-  if( m_danglePtCut > 0 ){
+  if (m_danglePtCut > 0) {
 
     removePV.clear();
     removeV.clear();
@@ -1000,22 +895,18 @@ StatusCode CompactHardTruth::execute() {
     HepMC::GenEvent::vertex_iterator badv = thinEvt->vertices_begin();
     HepMC::GenEvent::vertex_iterator badvE = thinEvt->vertices_end();
 
-    for(; badv!=badvE; ++badv){
-      if( !(*badv) ) continue;
-      if( (*badv)->particles_in_size() != 1 ||
-          (*badv)->particles_out_size() != 0 ) continue;
-      HepMC::GenVertex::particles_in_const_iterator pitr =
-        (*badv)->particles_in_const_begin();
+    for (; badv != badvE; ++badv) {
+      if (!(*badv)) continue;
+      if ((*badv)->particles_in_size() != 1 || (*badv)->particles_out_size() != 0) continue;
+      HepMC::GenVertex::particles_in_const_iterator pitr = (*badv)->particles_in_const_begin();
       HepMC::GenParticle* pp = *pitr;
-      if( pp->production_vertex() ) continue;
+      if (pp->production_vertex()) continue;
       double pt = pp->momentum().perp();
-      if( pt > m_danglePtMax ) m_danglePtMax = pt;
+      if (pt > m_danglePtMax) m_danglePtMax = pt;
       ++m_dangleFound;
-      if( pt > m_danglePtCut ) continue;
-      if( doDebug ) ATH_MSG_DEBUG("1->0: removing pp,badv,pt " 
-                    <<HepMC::barcode(pp) <<" "
-                    <<HepMC::barcode(*badv) <<" " <<pt);
-      removePV.push_back(vpPair(*badv,pp));
+      if (pt > m_danglePtCut) continue;
+      if (doDebug) ATH_MSG_DEBUG("1->0: removing pp,badv,pt " << HepMC::barcode(pp) << " " << HepMC::barcode(*badv) << " " << pt);
+      removePV.push_back(vpPair(*badv, pp));
       deleteP.push_back(pp);
       removeV.push_back(*badv);
       deleteV.push_back(*badv);
@@ -1023,47 +914,43 @@ StatusCode CompactHardTruth::execute() {
     }
 
     // Actually implement changes -- remove particles from vertices
-    for(unsigned int i=0; i<removePV.size(); ++i){
+    for (unsigned int i = 0; i < removePV.size(); ++i) {
       HepMC::GenVertex* v = removePV[i].first;
       HepMC::GenParticle* p = removePV[i].second;
       v->remove_particle(p);
     }
 
     // Actually implement changes -- remove vertices
-    for(unsigned int i=0; i<removeV.size(); ++i){
-      if( !thinEvt->remove_vertex(removeV[i]) ){
-        ATH_MSG_WARNING("1->0: Failed to remove vertex "<<HepMC::barcode(removeV[i]));
-      }
+    for (unsigned int i = 0; i < removeV.size(); ++i) {
+      if (!thinEvt->remove_vertex(removeV[i])) { ATH_MSG_WARNING("1->0: Failed to remove vertex " << HepMC::barcode(removeV[i])); }
     }
 
     // Delete removed particles/vertices
     deleteV.sort();
     deleteV.unique();
-    for(dvItr=deleteV.begin(); dvItr!=deleteV.end(); ++dvItr){
-      if( *dvItr) delete (*dvItr);
+    for (dvItr = deleteV.begin(); dvItr != deleteV.end(); ++dvItr) {
+      if (*dvItr) delete (*dvItr);
     }
 
     deleteP.sort();
     deleteP.unique();
-    for(dpItr=deleteP.begin(); dpItr!=deleteP.end(); ++dpItr){
-      if( *dpItr ) delete (*dpItr);
+    for (dpItr = deleteP.begin(); dpItr != deleteP.end(); ++dpItr) {
+      if (*dpItr) delete (*dpItr);
     }
-  }// end m_danglePtCut
-
-
+  } // end m_danglePtCut
 
   /////////////////////////
   // Done - examine results
   /////////////////////////
 
-  if( doPrint ){
-    std::cout <<"========== BEGIN EVENT AFTER THINNING ==========" <<std::endl;
-    HepMC::Print::line(std::cout,thinEvt);
-    std::cout <<"========== END EVENT AFTER THINNING ==========" <<std::endl;
+  if (doPrint) {
+    std::cout << "========== BEGIN EVENT AFTER THINNING ==========" << std::endl;
+    HepMC::Print::line(std::cout, thinEvt);
+    std::cout << "========== END EVENT AFTER THINNING ==========" << std::endl;
   }
 
   m_thinParticles += thinEvt->particles_size();
-  m_thinVertices +=  thinEvt->vertices_size();
+  m_thinVertices += thinEvt->vertices_size();
 
   /////////////////////////////////////////
   // Save thinned event in output container
@@ -1071,77 +958,70 @@ StatusCode CompactHardTruth::execute() {
 
   thinnedMcEvts->push_back(thinEvt);
   return StatusCode::SUCCESS;
-
 }
 
-/////////////////////////////////////////////////////////////////// 
-// Const methods: 
+///////////////////////////////////////////////////////////////////
+// Const methods:
 ///////////////////////////////////////////////////////////////////
 
 // Parton is quark or gluon
-bool CompactHardTruth::isParton( const HepMC::GenParticle* p ) {
+bool CompactHardTruth::isParton(const HepMC::GenParticle* p) {
   int ida = abs(p->pdg_id());
-  if( ida==21 || (ida>0 && ida<10) ) return true;
+  if (ida == 21 || (ida > 0 && ida < 10)) return true;
   // Diquarks too -- xx0x
-  if( ida>1000 && ida<10000 && (ida/10)%10==0 ) return true;
+  if (ida > 1000 && ida < 10000 && (ida / 10) % 10 == 0) return true;
   return false;
 }
 
 // Final parton is quark or gluon with no decay vertex.
 // It should have been detached from hadronization vertex.
-bool CompactHardTruth::isFinalParton( const HepMC::GenParticle* p ) {
-  if( !isParton(p) ) return false;
+bool CompactHardTruth::isFinalParton(const HepMC::GenParticle* p) {
+  if (!isParton(p)) return false;
   HepMC::GenVertex* endp = p->end_vertex();
-  if( endp ) return false;
+  if (endp) return false;
   return true;
 }
-  
+
 // Hadron excludes leptons and BSM particles
-// Includes clusters to find, e.g., partons->cluster vertices 
-bool CompactHardTruth::isHadron( const HepMC::GenParticle* p ) {
+// Includes clusters to find, e.g., partons->cluster vertices
+bool CompactHardTruth::isHadron(const HepMC::GenParticle* p) {
   int ida = abs(p->pdg_id());
-  if( (ida>=80 && ida<1000000) || ida>9000000 ) return true;
+  if ((ida >= 80 && ida < 1000000) || ida > 9000000) return true;
   return false;
-}  
+}
 
 // Total cluster FourVectors
 
-HepMC::FourVector CompactHardTruth::vtxInMom(HepMC::GenVertex* v){
+HepMC::FourVector CompactHardTruth::vtxInMom(HepMC::GenVertex* v) {
   double px = 0;
   double py = 0;
   double pz = 0;
   double e = 0;
-  HepMC::GenVertex::particles_in_const_iterator it = 
-    v->particles_in_const_begin();
-  HepMC::GenVertex::particles_in_const_iterator itE = 
-    v->particles_in_const_end();
-  for(; it!=itE; ++it){
+  HepMC::GenVertex::particles_in_const_iterator it = v->particles_in_const_begin();
+  HepMC::GenVertex::particles_in_const_iterator itE = v->particles_in_const_end();
+  for (; it != itE; ++it) {
     px += (*it)->momentum().px();
     py += (*it)->momentum().py();
     pz += (*it)->momentum().pz();
-    e  += (*it)->momentum().e();
+    e += (*it)->momentum().e();
   }
-  return HepMC::FourVector(px,py,pz,e);
+  return HepMC::FourVector(px, py, pz, e);
 }
 
-HepMC::FourVector CompactHardTruth::vtxOutMom(HepMC::GenVertex* v){
+HepMC::FourVector CompactHardTruth::vtxOutMom(HepMC::GenVertex* v) {
   double px = 0;
   double py = 0;
   double pz = 0;
   double e = 0;
-  HepMC::GenVertex::particles_out_const_iterator it = 
-    v->particles_out_const_begin();
-  HepMC::GenVertex::particles_out_const_iterator itE = 
-    v->particles_out_const_end();
-  for(; it!=itE; ++it){
+  HepMC::GenVertex::particles_out_const_iterator it = v->particles_out_const_begin();
+  HepMC::GenVertex::particles_out_const_iterator itE = v->particles_out_const_end();
+  for (; it != itE; ++it) {
     px += (*it)->momentum().px();
     py += (*it)->momentum().py();
     pz += (*it)->momentum().pz();
-    e  += (*it)->momentum().e();
+    e += (*it)->momentum().e();
   }
-  return HepMC::FourVector(px,py,pz,e);
+  return HepMC::FourVector(px, py, pz, e);
 }
 
-
-} //> end namespace DerivationFramework
-
+} // namespace DerivationFramework
diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkMuons/src/components/DerivationFrameworkMuons_entries.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkMuons/src/components/DerivationFrameworkMuons_entries.cxx
index ba7662a3cb55073f89bb088f0c2bd4f458f974a7..adb267e4f8d30f9bbf5f05b9e662323e01bcebde 100644
--- a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkMuons/src/components/DerivationFrameworkMuons_entries.cxx
+++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkMuons/src/components/DerivationFrameworkMuons_entries.cxx
@@ -1,3 +1,7 @@
+/*
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
+*/
+
 #include "DerivationFrameworkMuons/dimuonTaggingTool.h"
 #include "DerivationFrameworkMuons/isolationDecorator.h"
 #include "DerivationFrameworkMuons/mcpDecorator.h"
@@ -7,7 +11,6 @@
 #include "DerivationFrameworkMuons/MuonJetDrTool.h"
 #include "DerivationFrameworkMuons/MuonTruthClassifierFallback.h"
 #include "DerivationFrameworkMuons/MuonTruthIsolationTool.h"
-#include "DerivationFrameworkMuons/MuonJetDrTool.h"
 #include "DerivationFrameworkMuons/MuonTPJpsiVertexFittingAlg.h"
 #include "DerivationFrameworkMuons/VertexDecoratorAlg.h"
 
diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkPhys/share/PHYS.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkPhys/share/PHYS.py
index a2a1d00220dce3654326ab37abbaf7a7c30b3854..e9e483b64cb86705e744435bbcf275589041544c 100644
--- a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkPhys/share/PHYS.py
+++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkPhys/share/PHYS.py
@@ -10,12 +10,12 @@
 
 from DerivationFrameworkCore.DerivationFrameworkMaster import buildFileName, DerivationFrameworkIsMonteCarlo, DerivationFrameworkJob
 from DerivationFrameworkInDet import InDetCommon
-from DerivationFrameworkJetEtMiss.JetCommon import OutputJets
-from DerivationFrameworkJetEtMiss.ExtendedJetCommon import replaceAODReducedJets, addDefaultTrimmedJets, addJetTruthLabel, addQGTaggerTool
-from DerivationFrameworkJetEtMiss import METCommon
 from DerivationFrameworkEGamma import EGammaCommon
 from DerivationFrameworkEGamma import ElectronsCPDetailedContent
 from DerivationFrameworkMuons import MuonsCommon
+from DerivationFrameworkJetEtMiss.JetCommon import OutputJets
+from DerivationFrameworkJetEtMiss.ExtendedJetCommon import replaceAODReducedJets, addDefaultTrimmedJets, addJetTruthLabel, addQGTaggerTool
+from DerivationFrameworkJetEtMiss import METCommon
 from TriggerMenu.api.TriggerAPI import TriggerAPI
 from TriggerMenu.api.TriggerEnums import TriggerPeriod, TriggerType
 from DerivationFrameworkTrigger.TriggerMatchingHelper import TriggerMatchingHelper
diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkPhys/share/PHYSLITE.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkPhys/share/PHYSLITE.py
index 1ca9b696259a0644cd7b2e5807e97f5292620f56..e587d5f38b9f03b1b406509d458d18270733111e 100644
--- a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkPhys/share/PHYSLITE.py
+++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkPhys/share/PHYSLITE.py
@@ -30,16 +30,10 @@ fileName   = buildFileName( derivationFlags.WriteDAOD_PHYSLITEStream )
 PHYSLITEStream = MSMgr.NewPoolRootStream( streamName, fileName )
 PHYSLITEStream.AcceptAlgs(["PHYSLITEKernel"])
 
-### Thinning and augmentation tools lists
-from DerivationFrameworkCore.ThinningHelper import ThinningHelper
-PHYSLITEThinningHelper = ThinningHelper( "PHYSLITEThinningHelper" )
-PHYSLITEThinningHelper.AppendToStream( PHYSLITEStream )
 thinningTools       = []
 AugmentationTools   = []
-
 # Special sequence 
 SeqPHYSLITE = CfgMgr.AthSequencer("SeqPHYSLITE")
-
 #====================================================================
 # TRUTH CONTENT
 #====================================================================
@@ -47,6 +41,7 @@ if DerivationFrameworkIsMonteCarlo:
   from DerivationFrameworkMCTruth.MCTruthCommon import addStandardTruthContents,addPVCollection
   addStandardTruthContents(SeqPHYSLITE)
   addPVCollection(SeqPHYSLITE)
+'''
   from DerivationFrameworkMCTruth.HFHadronsCommon import *
   # Extra classifiers for the Higgs group
   import DerivationFrameworkHiggs.TruthCategories
@@ -200,7 +195,7 @@ PHYSLITEVertexThinningTool = DerivationFramework__VertexThinning(name = "PHYSLIT
                                                                  VertexKey = "PrimaryVertices")
 ToolSvc += PHYSLITEVertexThinningTool
 thinningTools.append(PHYSLITEVertexThinningTool)
-
+'''
 #==============================================================================
 # Jet building
 #==============================================================================
@@ -221,9 +216,17 @@ if (DerivationFrameworkIsMonteCarlo):
 # q/g discrimination
 addQGTaggerTool(jetalg="AntiKt4EMPFlow",sequence=SeqPHYSLITE,algname="QGTaggerToolPFAlg")
 
-# fJVT
-getPFlowfJVT(jetalg='AntiKt4EMPFlow',sequence=SeqPHYSLITE, algname='PHYSLITEJetForwardPFlowJvtToolAlg')
+if DerivationFrameworkIsMonteCarlo:
+   # Schedule the two energy density tools for running after the pseudojets are created.
+   for alg in ['EDTruthCentralAlg', 'EDTruthForwardAlg']:
+      if hasattr(topSequence, alg):
+         edtalg = getattr(topSequence, alg)
+         delattr(topSequence, alg)
+         SeqPHYSLITE += edtalg
 
+# fJVT
+# getPFlowfJVT(jetalg='AntiKt4EMPFlow',sequence=SeqPHYSLITE, algname='PHYSLITEJetForwardPFlowJvtToolAlg')
+'''
 #====================================================================
 # Flavour tagging   
 #====================================================================
@@ -249,11 +252,11 @@ sysLoader = CfgMgr.CP__SysListLoaderAlg( 'SysLoaderAlg' )
 sysLoader.systematicsList= ['']
 SeqPHYSLITE += sysLoader
 
+'''
 dataType = "data"
 
 if DerivationFrameworkIsMonteCarlo:
   dataType = "mc"
-
 #in your c++ code, create a ToolHandle<IPileupReweightingTool>
 #the ToolHandle constructor should be given "CP::PileupReweightingTool/myTool" as its string argument
 from PileupReweighting.AutoconfigurePRW import getLumiCalcFiles
@@ -262,6 +265,7 @@ ToolSvc += CfgMgr.CP__PileupReweightingTool("PHYSLITE_PRWTool",
                                             UnrepresentedDataAction=2,
                                             LumiCalcFiles=getLumiCalcFiles())
 SeqPHYSLITE += CfgMgr.CP__PileupReweightingProvider(Tool=ToolSvc.PHYSLITE_PRWTool,RunSystematics=False)
+'''
 
 # Include, and then set up the electron analysis sequence:
 from EgammaAnalysisAlgorithms.ElectronAnalysisSequence import \
@@ -285,13 +289,11 @@ print( photonSequence ) # For debugging
 SeqPHYSLITE += photonSequence
 
 # Include, and then set up the muon analysis algorithm sequence:
- 
 from MuonAnalysisAlgorithms.MuonAnalysisSequence import makeMuonAnalysisSequence
 muonSequence = makeMuonAnalysisSequence( dataType, shallowViewOutput = False, deepCopyOutput = True, workingPoint = 'Loose.NonIso' )
 muonSequence.configure( inputName = 'Muons',
                         outputName = 'AnalysisMuons' )
 print( muonSequence ) # For debugging
-
 # Add the sequence to the job:
  
 SeqPHYSLITE += muonSequence
@@ -332,7 +334,7 @@ PHYSLITE_cfg = METAssocConfig('AnalysisMET',
                               doPFlow=True)
 METCommon.customMETConfigs.setdefault('AnalysisMET',{})[PHYSLITE_cfg.suffix] = PHYSLITE_cfg
 scheduleMETAssocAlg(sequence=SeqPHYSLITE,configlist="AnalysisMET")
-
+'''
 #====================================================================
 # TRIGGER CONTENT
 #====================================================================
@@ -364,7 +366,7 @@ for trig_item in inputFileSummary['metadata']['/TRIGGER/HLT/Menu']:
     if not 'ChainName' in trig_item: continue
     if trig_item['ChainName'] in trigger_names_full_notau: trigger_names_notau += [ trig_item['ChainName'] ]
     if trig_item['ChainName'] in trigger_names_full_tau:   trigger_names_tau   += [ trig_item['ChainName'] ]
-
+'''
 # Create trigger matching decorations
 trigmatching_helper_notau = TriggerMatchingHelper(name='PHSYLITETriggerMatchingToolNoTau',
         OutputContainerPrefix = "Analysis",
@@ -378,7 +380,7 @@ trigmatching_helper_tau = TriggerMatchingHelper(name='PHSYLITETriggerMatchingToo
         InputMuons="AnalysisMuons",InputTaus="AnalysisTauJets")
 SeqPHYSLITE += trigmatching_helper_notau.alg
 SeqPHYSLITE += trigmatching_helper_tau.alg
-
+'''
 #====================================================================
 # MAIN KERNEL
 #====================================================================
@@ -391,7 +393,6 @@ SeqPHYSLITE += CfgMgr.DerivationFramework__DerivationKernel(
    AugmentationTools = AugmentationTools,
    ThinningTools = thinningTools,
    )
-
 #====================================================================
 # CONTENT LIST  
 #====================================================================
@@ -472,7 +473,7 @@ if DerivationFrameworkIsMonteCarlo:
     addTruth3ContentToSlimmerTool(PHYSLITESlimmingHelper)
 
 # Extra trigger collections
-trigmatching_helper_notau.add_to_slimming(PHYSLITESlimmingHelper)
-trigmatching_helper_tau.add_to_slimming(PHYSLITESlimmingHelper)
+# trigmatching_helper_notau.add_to_slimming(PHYSLITESlimmingHelper)
+# trigmatching_helper_tau.add_to_slimming(PHYSLITESlimmingHelper)
 
 PHYSLITESlimmingHelper.AppendContentToStream(PHYSLITEStream)
diff --git a/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/CMakeLists.txt b/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/CMakeLists.txt
index 9d050bda770e934f98de5ab3624f2a0b963aed51..b5b19c5195c508c1e2bd95f04b99b8dabea4c1cd 100644
--- a/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/CMakeLists.txt
+++ b/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/CMakeLists.txt
@@ -11,7 +11,7 @@ atlas_add_library( ElectronPhotonSelectorToolsLib
   ElectronPhotonSelectorTools/*.h Root/*.cxx
   PUBLIC_HEADERS ElectronPhotonSelectorTools
   PRIVATE_INCLUDE_DIRS ${ROOT_INCLUDE_DIRS}
-  LINK_LIBRARIES EgammaAnalysisInterfacesLib AsgTools xAODEgamma  xAODTracking
+  LINK_LIBRARIES EgammaAnalysisInterfacesLib AsgTools xAODEgamma  xAODTracking MVAUtils
   xAODHIEvent PATCoreAcceptLib AsgDataHandlesLib
   PRIVATE_LINK_LIBRARIES ${ROOT_LIBRARIES} AsgMessagingLib FourMomUtils xAODCaloEvent
   xAODEventInfo PathResolver )
diff --git a/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/ElectronPhotonSelectorTools/AsgElectronChargeIDSelectorTool.h b/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/ElectronPhotonSelectorTools/AsgElectronChargeIDSelectorTool.h
new file mode 100644
index 0000000000000000000000000000000000000000..f63f32b9b51488bb43a7bdc89e9ed74f86df59c0
--- /dev/null
+++ b/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/ElectronPhotonSelectorTools/AsgElectronChargeIDSelectorTool.h
@@ -0,0 +1,154 @@
+/*
+  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+*/
+
+// Dear emacs, this is -*-c++-*-
+#ifndef __ASGELECTRONCHARGEIDSELECTORTOOL__
+#define __ASGELECTRONCHARGEIDSELECTORTOOL__
+
+
+// Atlas includes
+#include "AsgTools/AsgTool.h"
+#include "MVAUtils/BDT.h"
+#include "EgammaAnalysisInterfaces/IAsgElectronLikelihoodTool.h"
+#include "ElectronPhotonSelectorTools/AsgElectronLikelihoodTool.h"
+#include "PATCore/AcceptData.h"
+#include "AsgDataHandles/ReadHandleKey.h"
+#include <unordered_map>
+
+class AsgElectronChargeIDSelectorTool : public asg::AsgTool, 
+					virtual public IAsgElectronLikelihoodTool
+{
+  ASG_TOOL_CLASS2(AsgElectronChargeIDSelectorTool, IAsgElectronLikelihoodTool, IAsgSelectionTool)
+
+public:
+  /** Standard constructor */
+  AsgElectronChargeIDSelectorTool( const std::string& myname);
+
+
+  /** Standard destructor */
+  virtual ~AsgElectronChargeIDSelectorTool();
+public:
+  /** Gaudi Service Interface method implementations */ // /** Gaudi Service Interface method implementations */
+  virtual StatusCode initialize() override;             //   virtual StatusCode finalize();
+
+  // Main methods for IAsgSelectorTool interface
+
+  /** Method to get the plain AcceptInfo.
+      This is needed so that one can already get the AcceptInfo 
+      and query what cuts are defined before the first object 
+      is passed to the tool. */
+  //virtual const asg::AcceptInfo& getAcceptInfo() const override; KM: inlined blow
+
+  /** The main accept method: using the generic interface */
+  asg::AcceptData accept( const xAOD::IParticle* part ) const override;
+  asg::AcceptData accept( const EventContext& ctx, const xAOD::IParticle* part ) const override;
+
+  /** The main accept method: the actual cuts are applied here */
+  asg::AcceptData accept( const xAOD::Electron* eg ) const {
+    return accept (eg, -99); // mu = -99 as input will force accept to grab the pileup variable from the xAOD object
+  }
+  asg::AcceptData accept( const EventContext& ctx, const xAOD::Electron* eg ) const override {
+    return accept (ctx, eg, -99); // mu = -99 as input will force accept to grab the pileup variable from the xAOD object
+  }
+
+  /** The main accept method: the actual cuts are applied here */
+  asg::AcceptData accept( const xAOD::Egamma* eg ) const {
+    return accept (eg, -99); // mu = -99 as input will force accept to grab the pileup variable from the xAOD object
+  }
+  asg::AcceptData accept(const EventContext& ctx, const xAOD::Egamma* eg ) const override{
+    return accept (ctx, eg, -99); // mu = -99 as input will force accept to grab the pileup variable from the xAOD object
+  }
+
+  /** The main accept method: in case mu not in EventInfo online */
+  asg::AcceptData accept( const xAOD::Electron* eg, double mu ) const;
+  asg::AcceptData accept( const EventContext& ctx, const xAOD::Electron* eg, double mu ) const override;
+
+  /** The main accept method: in case mu not in EventInfo online */
+  asg::AcceptData accept( const xAOD::Egamma* eg, double mu ) const;
+  asg::AcceptData accept( const EventContext& ctx, const xAOD::Egamma* eg, double mu ) const override;
+  
+  // Main methods for IAsgCalculatorTool interface
+ public:
+  /** The main result method: the actual likelihood is calculated here */
+  double calculate( const xAOD::IParticle* part ) const;
+  double calculate( const EventContext& ctx, const xAOD::IParticle* part ) const override;
+
+  /** The main result method: the actual likelihood is calculated here */
+  double calculate( const xAOD::Electron* eg ) const {
+    return calculate (eg, -99); // mu = -99 as input will force accept to grab the pileup variable from the xAOD object
+  }
+  double calculate( const EventContext& ctx, const xAOD::Electron* eg ) const override {
+    return calculate (ctx, eg, -99); // mu = -99 as input will force accept to grab the pileup variable from the xAOD object
+  }
+
+  /** The main result method: the actual likelihood is calculated here */
+  double calculate( const xAOD::Egamma* eg ) const {
+    return calculate (eg, -99); // mu = -99 as input will force accept to grab the pileup variable from the xAOD object
+  }
+  double calculate( const EventContext &ctx, const xAOD::Egamma* eg ) const override {
+    return calculate (ctx, eg, -99); // mu = -99 as input will force accept to grab the pileup variable from the xAOD object
+  }
+
+  /** The main result method: the actual likelihood is calculated here */
+  double calculate( const xAOD::Electron* eg, double mu ) const;
+  double calculate( const EventContext &ctx, const xAOD::Electron* eg, double mu ) const override;
+
+  /** The main result method: the actual likelihood is calculated here */
+  double calculate( const xAOD::Egamma* eg, double mu ) const; 
+  double calculate( const EventContext &ctx, const xAOD::Egamma* eg, double mu ) const override; 
+
+  //=========================================================================================== same until here
+  inline virtual std::string getOperatingPointName( ) const override
+  { return m_WorkingPoint; };
+
+  inline virtual const asg::AcceptInfo& getAcceptInfo() const override
+  { return m_acceptInfo; };          
+
+  asg::AcceptData accept() const { return asg::AcceptData(&m_acceptInfo); }
+  
+
+  // Private methods
+private:
+  /// Get the number of primary vertices
+  unsigned int getNPrimVertices(const EventContext& ctx) const;
+
+  //BDT instances for different ID operating points (Tight, Medium, Loose) and the vector corresponds to n-fold
+  std::vector<MVAUtils::BDT*> m_v_bdts;
+
+  TString m_pid_name;
+  float m_cutOnBDT;
+
+  int m_cutPosition_bdt;
+  asg::AcceptInfo m_acceptInfo;
+ 
+  // Private member variables
+private: 
+  /** Working Point */
+  std::string m_WorkingPoint;
+
+  /// Whether to use the PV (not available for trigger)
+  bool m_usePVCont;
+
+  /// defualt nPV (when not using PVCont)
+  unsigned int m_nPVdefault;
+
+  /// The primary vertex container name
+  SG::ReadHandleKey<xAOD::VertexContainer> m_primVtxContKey {
+    this, "primaryVertexContainer", "PrimaryVertices",
+    "The primary vertex container name"};
+
+  /// The input ROOT file name that holds the PDFs
+  std::string m_trainingFile;
+
+  // BDT input variables
+  std::vector<std::string> m_inputVars;
+
+}; // End: class definition
+
+
+
+
+
+#endif
+
diff --git a/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/ElectronPhotonSelectorTools/ElectronPhotonSelectorToolsPythonDict.h b/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/ElectronPhotonSelectorTools/ElectronPhotonSelectorToolsPythonDict.h
index 54ac5d791c014c9ebc10760615c1c8a8448dd19a..5ec5d03a65b5526d1e3e6a994e7962b5bee4d3c0 100644
--- a/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/ElectronPhotonSelectorTools/ElectronPhotonSelectorToolsPythonDict.h
+++ b/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/ElectronPhotonSelectorTools/ElectronPhotonSelectorToolsPythonDict.h
@@ -10,6 +10,7 @@
 #include "ElectronPhotonSelectorTools/AsgPhotonIsEMSelector.h"
 #include "ElectronPhotonSelectorTools/AsgForwardElectronIsEMSelector.h"
 #include "ElectronPhotonSelectorTools/AsgElectronLikelihoodTool.h"
+#include "ElectronPhotonSelectorTools/AsgElectronChargeIDSelectorTool.h"
 #include "ElectronPhotonSelectorTools/EGammaAmbiguityTool.h"
 #include "ElectronPhotonSelectorTools/AsgDeadHVCellRemovalTool.h"
 #endif
diff --git a/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/Root/AsgElectronChargeIDSelectorTool.cxx b/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/Root/AsgElectronChargeIDSelectorTool.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..20f547235a3ba4d1434098270b9701a46f518d23
--- /dev/null
+++ b/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/Root/AsgElectronChargeIDSelectorTool.cxx
@@ -0,0 +1,504 @@
+/*
+  Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+*/
+
+/**
+   @class AsgElectronChargeIDSelectorTool
+   @brief Electron selector tool to select objects in Asgena using an underlying pure ROOT tool.
+ 
+   @author Karsten Koeneke
+   @date   October 2012
+   09-APR-2014, convert to ASGTool (Jovan Mitrevski)
+   22-AUG-2016, copied from AsgElectronLikelihoodTool (Kazuya Mochizuki) 
+*/
+
+// Include this class's header
+#include "ElectronPhotonSelectorTools/AsgElectronChargeIDSelectorTool.h"
+
+
+
+
+// STL includes
+#include <string>
+#include <cstdint>
+#include <cmath>
+
+//EDM includes
+#include "xAODEgamma/Electron.h"
+#include "xAODTracking/Vertex.h"
+#include "xAODTracking/VertexContainer.h"
+#include "xAODCaloEvent/CaloCluster.h"
+#include "xAODEventInfo/EventInfo.h"
+#include "TROOT.h"
+#include "TKey.h"
+#include "TClass.h"
+#include "TEnv.h"
+#include "TFile.h"
+#include "TObjString.h"
+#include "TObjArray.h"
+
+#include "AsgDataHandles/ReadHandle.h"
+#include "AsgTools/CurrentContext.h"
+#include "PathResolver/PathResolver.h"
+
+
+//=============================================================================
+// Standard constructor
+//=============================================================================
+AsgElectronChargeIDSelectorTool::AsgElectronChargeIDSelectorTool(const std::string& myname) :
+  AsgTool(myname) //,m_cutOnBDT(0)//,m_configFile("")//,m_rootTool(0)
+{
+  // Declare the needed properties
+  declareProperty("WorkingPoint",m_WorkingPoint="","The Working Point");
+  //declareProperty("ConfigFile",m_configFile="","The config file to use");
+  declareProperty("usePVContainer", m_usePVCont=true, "Whether to use the PV container");
+  declareProperty("nPVdefault", m_nPVdefault = 0, "The default number of PVs if not counted");
+  //declareProperty("primaryVertexContainer", m_primVtxContKey="PrimaryVertices", "The primary vertex container name" );
+    
+  declareProperty("TrainingFile",  m_trainingFile="", "The input ROOT file name holding training" );
+  declareProperty("CutOnBDT",m_cutOnBDT=0,"Cut on BDT discriminant");
+  m_pid_name=myname.data();
+}
+
+
+//=============================================================================
+// Standard destructor
+//=============================================================================
+AsgElectronChargeIDSelectorTool::~AsgElectronChargeIDSelectorTool()
+{
+  for (auto bdt: m_v_bdts) if (bdt) delete bdt;
+}
+
+
+//=============================================================================
+// Asgena initialize method
+//=============================================================================
+StatusCode AsgElectronChargeIDSelectorTool::initialize()
+{
+  m_pid_name.ToLower(); //KM: List of 97% OPs with different PIDs below
+  std::string op_name="loose";
+  bool op_isUserSpecified=false;
+  if (m_cutOnBDT==0) {  //when cutOnBDT is unmodified, adjust it to the 97% OP in each PID menu
+    if      (m_pid_name.Contains("tight") ) op_name="tight" , m_cutOnBDT=-0.109249;//Tight  (with data): -0.109249
+    else if (m_pid_name.Contains("medium")) op_name="medium", m_cutOnBDT=-0.257081;//Medium (with data): -0.257081
+    else                                                      m_cutOnBDT=-0.337671;//Loose  (with data): -0.337671
+  }
+  else op_isUserSpecified=true;
+  m_pid_name="loose";//Now only one training is provided, using loose PID but OP varies for differnt PID
+    
+  std::string display= op_isUserSpecified ? "user specified":"97% signal-eff";
+  ATH_MSG_INFO("OP to use: " << op_name <<", with cut on BDT: "<<m_cutOnBDT<<", which corresponds to "<<display<<" working point.");
+    
+  std::string TrainingFile;
+  if (!m_trainingFile.empty()) {  //If the property was set by the user, take that.
+        
+    TrainingFile= PathResolverFindCalibFile( m_trainingFile );
+    if(TrainingFile==""){//Error if it cant find the conf
+      ATH_MSG_ERROR("Could not locate " << m_trainingFile );
+      return StatusCode::FAILURE;
+    }
+    else ATH_MSG_INFO("trainingfile loaded from: " << TrainingFile );
+
+  }
+  else {
+    ATH_MSG_ERROR("Could not find configuration file: \""<< m_trainingFile<<"\"");
+    return StatusCode::FAILURE;
+  }
+    
+  unsigned nfold=1;
+  TFile* bdtfile = TFile::Open(TrainingFile.data());
+  if (!bdtfile) {
+    ATH_MSG_ERROR("Input file found to be empty!! "<< TrainingFile);
+    return StatusCode::FAILURE;
+  }
+  else {
+    TIter next(bdtfile->GetListOfKeys());
+    TKey *key;
+    while ((key = (TKey*)next())) {
+      TClass *clas = gROOT->GetClass(key->GetClassName());
+      if (!clas->InheritsFrom("TDirectoryFile")) continue;
+      TDirectory *td = (TDirectoryFile*)key->ReadObj();
+      std::string dirName =td->GetName();
+      if (dirName.find(m_pid_name)!=std::string::npos) {
+        std::string foldconf=dirName.substr(dirName.rfind("_")+1,-1);
+        // std::string f_index=foldconf.substr(0,foldconf.find("o"));
+        std::string s_nfold=foldconf.substr(foldconf.find("o")+1,-1);
+        nfold=atoi(s_nfold.data());
+        break;
+      }
+    }
+  }
+    
+  ATH_MSG_INFO("ECIDS nfold configuration: "<<nfold);
+    
+  TObjArray* toa= (TObjArray*) bdtfile->Get("/ECIDS_"+m_pid_name+TString::Format("_0o%d",nfold)+"/variables");
+  std::string commaSepVars="";
+  if (toa) {
+    TObjString *tos= 0;
+    if (toa->GetEntries()>0) tos= (TObjString*) toa->At(0);
+    commaSepVars=tos->GetString().Data();
+    ATH_MSG_INFO("Variables for ECIDS= "<<commaSepVars);
+  }
+  else ATH_MSG_FATAL("Cannot access the list of input variables @"<<bdtfile->GetName()<<":/ECIDS_"+m_pid_name+TString::Format("_0o%d",nfold)+"/variables");
+    
+  //prepare m_inputVars
+  m_inputVars.clear();
+  while (commaSepVars.find(",")!=std::string::npos) {
+    m_inputVars.push_back(commaSepVars.substr(0,commaSepVars.find(",")));
+    commaSepVars.erase(0,commaSepVars.find(",")+1);
+  }
+  m_inputVars.push_back(commaSepVars.substr(0,-1));//push back the last element
+
+  for (unsigned i_fold=0; i_fold<nfold; i_fold++) {
+    TString treename="/ECIDS_"+m_pid_name+TString::Format("_%do%d",i_fold,nfold)+"/BDT";
+    //std::cout<<"Trying to access a ttree with name: "<<treename<<std::endl;
+    TTree* tree = (TTree*)bdtfile->Get(treename);
+    m_v_bdts.push_back(new MVAUtils::BDT(tree));
+  }
+    
+  ///-----------End of text config----------------------------
+
+  // Setup primary vertex key handle
+  ATH_CHECK( m_primVtxContKey.initialize(m_usePVCont) );
+    
+  m_cutPosition_bdt = m_acceptInfo.addCut( "bdt", "pass bdt" );
+    
+  return StatusCode::SUCCESS ;
+}
+
+//=============================================================================
+// The main accept method: the actual cuts are applied here
+//=============================================================================
+asg::AcceptData AsgElectronChargeIDSelectorTool::accept(const xAOD::Electron* el, double mu ) const
+{
+  //Backwards compatibility
+  return accept(Gaudi::Hive::currentContext(), el, mu );
+}
+asg::AcceptData AsgElectronChargeIDSelectorTool::accept(const EventContext& ctx,  const xAOD::Electron* eg, double mu ) const
+{
+    
+  double bdt=calculate(ctx,eg,mu);
+
+  ATH_MSG_VERBOSE("\t accept( ctx, el, mu ), bdt="<<bdt);
+
+  asg::AcceptData acceptBDT(&m_acceptInfo);
+  acceptBDT.clear();
+
+  acceptBDT.setCutResult(m_cutPosition_bdt,bdt>m_cutOnBDT);
+   
+  return acceptBDT;
+}
+
+//=============================================================================
+// Accept method for EFCaloLH in the trigger; do full LH if !CaloCutsOnly
+//=============================================================================
+asg::AcceptData AsgElectronChargeIDSelectorTool::accept(const xAOD::Egamma* eg, double mu ) const
+{
+  //Backwards compatibility
+  return accept(Gaudi::Hive::currentContext(), eg, mu );
+}
+asg::AcceptData AsgElectronChargeIDSelectorTool::accept(const EventContext& ctx,  const xAOD::Egamma* eg, double mu) const
+{
+  double bdt=calculate(ctx,eg,mu);
+    
+  ATH_MSG_VERBOSE("\t accept( ctx, eg, mu ), bdt="<<bdt);
+
+  asg::AcceptData acceptBDT(&m_acceptInfo);    
+  acceptBDT.clear();
+    
+  acceptBDT.setCutResult(m_cutPosition_bdt,bdt>m_cutOnBDT);
+    
+  return acceptBDT;
+}
+
+//=============================================================================
+// The main result method: the actual likelihood is calculated here
+//=============================================================================
+double AsgElectronChargeIDSelectorTool::calculate( const EventContext& ctx, const xAOD::Electron* eg, double mu ) const
+{
+    
+  ATH_MSG_VERBOSE("\t AsgElectronChargeIDSelectorTool::calculate( &ctx, *eg, mu= "<<(&ctx)<<", "<<eg<<", "<<mu<<" )");
+    
+  if ( !eg ) {
+    ATH_MSG_ERROR ("Failed, no egamma object.");
+    return -1;
+  }
+    
+  const xAOD::CaloCluster* cluster = eg->caloCluster();
+  if ( !cluster ) {
+    ATH_MSG_ERROR ("Failed, no cluster.");
+    return -1;
+  }
+    
+  const double energy =  cluster->e();
+  const float eta = cluster->etaBE(2);
+  if ( fabs(eta) > 300.0 ) {
+    ATH_MSG_ERROR ("Failed, eta range.");
+    return -1;
+  }
+    
+  double et = 0.;// transverse energy of the electron (using the track eta)
+  if (eg->trackParticle() )
+    et     = ( cosh(eg->trackParticle()->eta()) != 0.) ? energy/cosh(eg->trackParticle()->eta()) : 0.;
+  else et  = ( cosh(eta) != 0.) ? energy/cosh(eta) : 0.;
+    
+    
+  // number of track hits and other track quantities
+  uint8_t nSCT(0);
+  float trackqoverp(0.0);
+  float trackqoverpsig(0.0);
+  int   charge(0.0);
+  int   lifeSign(0.0);
+  float trackchi2(0.0);
+  float avgCharge_SCTw(0.0);
+  float d0(0.0);
+  float z0(0.0);
+  float phi0(0.0);
+  float theta(0.0);
+  float EoverP(0.0);
+  float d0sigma(0.0);
+  double dpOverp(0.0);
+  float TRT_PID(0.0);
+  //double trans_TRT_PID(0.0);
+  float deltaPhi1=0, deltaPhi2=0;
+  float deltaPhiFromLM=0;
+  float deltaPhiRescaled2=0;//deltaEta=0,
+  //double rTRT(0.0);
+    
+  TVector2 el_cluster; el_cluster.SetMagPhi(cluster->energyBE(2)/cosh(eta),cluster->phiBE(2));
+    
+  bool allFound = true;
+  // retrieve associated TrackParticle
+  const xAOD::TrackParticle* t = eg->trackParticle();
+  if (t) {
+    trackqoverp = t->qOverP();
+    charge= t->charge();
+    d0 = t->d0();
+
+    if(std::find(m_inputVars.begin(),m_inputVars.end(), "z0sinTheta" )!= m_inputVars.end()) {
+      z0 = t->z0();
+      theta = t->theta();
+    }
+
+    if(std::find(m_inputVars.begin(),m_inputVars.end(), "chi2oftrackfit" )!= m_inputVars.end())
+      trackchi2 = t->chiSquared();
+        
+    phi0 = t->phi() + (d0>=0? M_PI/2 : -M_PI/2);
+    TVector2 d0_direction;    d0_direction.SetMagPhi(fabs(d0),phi0);
+    float inner_product = el_cluster.X()*d0_direction.X() + el_cluster.Y()*d0_direction.Y();
+    lifeSign = inner_product>=0? 1 : -1;
+        
+    EoverP   = energy * fabs(t->qOverP());
+    if(std::find(m_inputVars.begin(),m_inputVars.end(), "d0Err" )!= m_inputVars.end() or
+       std::find(m_inputVars.begin(),m_inputVars.end(), "d0Sig" )!= m_inputVars.end()) {
+      float vard0 = t->definingParametersCovMatrix()(0,0);
+      if (vard0 > 0) {
+        d0sigma=sqrtf(vard0);
+      }
+    }
+        
+    //KM: calculation of SCT-weighted charge
+    float charge = 0, SCT = 0;
+    for (unsigned TPit = 0; TPit < eg->nTrackParticles(); TPit++) {
+      uint8_t temp_NSCTHits;
+      if(eg->trackParticle(TPit)) {
+	eg->trackParticle(TPit)->summaryValue(temp_NSCTHits, xAOD::numberOfSCTHits);
+	
+	SCT += temp_NSCTHits;
+	charge += temp_NSCTHits*eg->trackParticle(TPit)->charge();
+      }
+      else ATH_MSG_WARNING("This electron has no track particle associated!!! Assigning #SCT-hits= 0!!! " );
+    }
+    avgCharge_SCTw= SCT!=0 ? eg->charge()*charge/SCT : 0;
+        
+    const std::vector<float>&cov= t->definingParametersCovMatrixVec();
+    trackqoverpsig= cov[14];
+        
+    if(std::find(m_inputVars.begin(),m_inputVars.end(), "nSctHits" )!= m_inputVars.end() )
+      allFound = allFound && t->summaryValue(nSCT, xAOD::numberOfSCTHits);
+        
+    //Transform the TRT PID output for use in the LH tool.
+    double fEpsilon = 1.0e-30;  // to avoid zero division
+    double pid_tmp = TRT_PID;
+    if (pid_tmp >= 1.0) pid_tmp = 1.0 - 1.0e-15;  //this number comes from TMVA
+    else if (pid_tmp <= fEpsilon) pid_tmp = fEpsilon;
+        
+    if(std::find(m_inputVars.begin(),m_inputVars.end(), "deltaPoverP" )!= m_inputVars.end() ) {
+      unsigned int index;
+      if( t->indexOfParameterAtPosition(index, xAOD::LastMeasurement) ) {
+                
+        double refittedTrack_LMqoverp  =
+          t->charge() / sqrt(std::pow(t->parameterPX(index), 2) +
+                             std::pow(t->parameterPY(index), 2) +
+                             std::pow(t->parameterPZ(index), 2));
+                
+        dpOverp = 1 - trackqoverp/(refittedTrack_LMqoverp);
+      }
+    }
+        
+  }
+  else {
+    allFound=false;
+    ATH_MSG_WARNING ( "Failed, no track particle: et= " << et << "eta= " << eta );
+  }
+    
+  float Rphi(0);//float Reta(0), Rphi(0),  Rhad1(0), Rhad(0), ws3(0), w2(0), f1(0), Eratio(0), f3(0);
+  allFound = allFound && eg->showerShapeValue(Rphi, xAOD::EgammaParameters::Rphi);// rphi e233/e237
+  // allFound = allFound && eg->trackCaloMatchValue(deltaEta, xAOD::EgammaParameters::deltaEta1);
+    
+  // difference between the cluster phi (sampling 2) and the eta of the track extrapolated from the last measurement point.
+  allFound = allFound && eg->trackCaloMatchValue(deltaPhiRescaled2, xAOD::EgammaParameters::deltaPhiRescaled2);
+    
+  //if(m_map_inputs.find("deltaphi1"              )!= m_map_inputs.end())
+  if(std::find(m_inputVars.begin(),m_inputVars.end(), "deltaphi1" )!= m_inputVars.end() )
+    allFound = allFound && eg->trackCaloMatchValue(deltaPhi1, xAOD::EgammaParameters::deltaPhi1);
+  // if(m_map_inputs.find("deltaphi2"              )!= m_map_inputs.end() or
+  //    m_map_inputs.find("deltaDeltaPhiFirstAndLM")!= m_map_inputs.end())
+  if(std::find(m_inputVars.begin(),m_inputVars.end(), "deltaphi2"              )!= m_inputVars.end() or
+     std::find(m_inputVars.begin(),m_inputVars.end(), "deltaDeltaPhiFirstAndLM")!= m_inputVars.end() )
+    allFound = allFound && eg->trackCaloMatchValue(deltaPhi2, xAOD::EgammaParameters::deltaPhi2);
+  //if(m_map_inputs.find("deltaDeltaPhiFirstAndLM")!= m_map_inputs.end())
+  if(std::find(m_inputVars.begin(),m_inputVars.end(), "deltaDeltaPhiFirstAndLM" )!= m_inputVars.end() )
+    allFound = allFound && eg->trackCaloMatchValue(deltaPhiFromLM, xAOD::EgammaParameters::deltaPhiFromLastMeasurement);
+    
+  // Get the number of primary vertices in this event
+  // double ip = static_cast<double>(m_nPVdefault);
+  // if(mu < 0) // use npv if mu is negative (not given)
+  //   ip = static_cast<double>(m_usePVCont ? this->getNPrimVertices() : m_nPVdefault);
+  // else ip = mu;
+    
+  if (!allFound) ATH_MSG_FATAL("Missing input variable for ECIDS BDT calculation");
+    
+  const xAOD::EventInfo* eventInfo = nullptr;
+  if (evtStore()->retrieve(eventInfo,"EventInfo").isFailure()) ATH_MSG_WARNING ( " Cannot access to event info " );
+  // lumiBlock = eventInfo->lumiBlock(), runNumber = eventInfo->runNumber(), eventNumber=eventInfo->eventNumber();
+  //ATH_MSG_DEBUG("event_num%bdt_size="<<eventInfo->eventNumber()<<"%"<<unsigned(m_v_bdts.size())<<"= "<<eventInfo->eventNumber()%unsigned(m_v_bdts.size()));
+  unsigned bdt_index=eventInfo->eventNumber()%unsigned(m_v_bdts.size());
+    
+  std::vector<float> v_inputs;
+  for (auto var: m_inputVars) {
+    if (var == "pt"                     ) v_inputs.push_back(et                	     );
+    if (var == "eta"                    ) v_inputs.push_back(eta               	     );
+    if (var == "abs_eta"                ) v_inputs.push_back(fabs(eta)         	     );
+    if (var == "avgCharge_SCTw"         ) v_inputs.push_back(avgCharge_SCTw    	     );
+    if (var == "d0"                     ) v_inputs.push_back(d0                	     );
+    if (var == "ld0"                    ) v_inputs.push_back(lifeSign*d0       	     );
+    if (var == "cd0"                    ) v_inputs.push_back(charge*d0         	     );
+    if (var == "EoverP"                 ) v_inputs.push_back(EoverP            	     );
+    if (var == "deltaphi1"              ) v_inputs.push_back(deltaPhi1         	     );
+    if (var == "deltaphiRes"            ) v_inputs.push_back(deltaPhiRescaled2 	     );
+    if (var == "Rphi"                   ) v_inputs.push_back(Rphi              	     );
+    if (var == "qoverpSig"              ) v_inputs.push_back(trackqoverpsig    	     );
+    if (var == "nSctHits"               ) v_inputs.push_back(nSCT              	     );
+    if (var == "z0sinTheta"             ) v_inputs.push_back(z0*sin(theta)     	     );
+    if (var == "d0Err"                  ) v_inputs.push_back(d0sigma           	     );
+    if (var == "d0Sig"                  ) v_inputs.push_back(d0/d0sigma        	     );
+    if (var == "deltaphi2"              ) v_inputs.push_back(deltaPhi2         	     );
+    if (var == "chi2oftrackfit"         ) v_inputs.push_back(trackchi2         	     );
+    if (var == "deltaPoverP"            ) v_inputs.push_back(dpOverp           	     );
+    if (var == "deltaDeltaPhiFirstAndLM") v_inputs.push_back(deltaPhi2-deltaPhiFromLM);
+  }
+
+  ATH_MSG_VERBOSE("\t\t event# "                                   <<eventInfo->eventNumber()                                                                                                                         <<std::endl<<
+		  "xAOD variables: pt                           = "<< et                       <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"pt"                     )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: eta                     = "<< eta                      <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"eta"                    )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: abs_eta                 = "<< fabs(eta)                <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"abs_eta"                )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: avgCharge_SCTw          = "<< avgCharge_SCTw           <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"avgCharge_SCTw"         )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: d0                      = "<< d0                       <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"d0"                     )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: ld0                     = "<< lifeSign*d0              <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"ld0"                    )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: cd0                     = "<< charge*d0                <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"cd0"                    )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: EoverP                  = "<< EoverP                   <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"EoverP"                 )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: deltaphi1               = "<< deltaPhi1                <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"deltaphi1"              )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: deltaphiRes             = "<< deltaPhiRescaled2        <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"deltaphiRes"            )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: Rphi                    = "<< Rphi                     <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"Rphi"                   )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: qoverpSig               = "<< trackqoverpsig           <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"qoverpSig"              )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: nSctHits                = "<< unsigned(nSCT)           <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"nSctHits"               )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: z0sinTheta              = "<< z0*sin(theta)            <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"z0sinTheta"             )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: d0Err                   = "<< d0sigma                  <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"d0Err"                  )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: d0Sig                   = "<< d0/d0sigma               <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"d0Sig"                  )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: deltaphi2               = "<< deltaPhi2                <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"deltaphi2"              )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: chi2oftrackfit          = "<< trackchi2                <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"chi2oftrackfit"         )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: deltaPoverP             = "<< dpOverp                  <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"deltaPoverP"            )!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: deltaDeltaPhiFirstandLM = "<< deltaPhi2-deltaPhiFromLM <<",\t isRequested= "<<(std::find(m_inputVars.begin(),m_inputVars.end(),"deltaDeltaPhiFirstAndLM")!=m_inputVars.end() )<<std::endl<<
+                  "\t\t xAOD variables: AllFound                = "<<allFound            );
+  
+  ////KM: dumping variables, only variables used by BDT
+  // std::cout<<"\t\t event# "<<eventInfo->eventNumber()<<std::endl;
+  // unsigned i=0;
+  // for (auto inputVar: m_inputVars) {
+  //   std::cout<<"\t kmdebug: "<<inputVar<<"\t = "<<v_inputs[i]<<std::endl; i++;
+  // }
+  
+  //double bdt_output = m_v_bdts.at(bdt_index)->GetGradBoostMVA(m_v_bdts.at(bdt_index)->GetPointers());
+  double bdt_output = m_v_bdts.at(bdt_index)->GetGradBoostMVA(v_inputs);
+  ATH_MSG_DEBUG("ECIDS-BDT= "<<bdt_output);
+  //std::cout<<"\t kmdebug: \t ECIDS-BDT= "<<bdt_output<<std::endl;
+
+  return bdt_output;
+}
+
+//=============================================================================
+// Calculate method for EFCaloLH in the trigger; do full LH if !CaloCutsOnly
+//=============================================================================
+double AsgElectronChargeIDSelectorTool::calculate( const xAOD::Egamma* eg, double mu ) const
+{
+  //Backward compatibility
+  return calculate(Gaudi::Hive::currentContext(), eg, mu);
+}
+
+double AsgElectronChargeIDSelectorTool::calculate( const EventContext& ctx, const xAOD::Egamma* eg, double mu ) const
+{
+  ATH_MSG_VERBOSE("AsgElectronChargeIDSelectorTool::calculate( &ctx ="<<(&ctx)<<", *eg "<<eg<<", mu= "<<mu<< " )");
+  ATH_MSG_WARNING("Method not implemented for egamma object! Reurning -1!!");
+    
+  return -9;
+}
+
+//=============================================================================
+asg::AcceptData AsgElectronChargeIDSelectorTool::accept(const xAOD::IParticle* part) const
+{
+  //Backward compatibility
+  return accept(Gaudi::Hive::currentContext(), part);
+}
+asg::AcceptData AsgElectronChargeIDSelectorTool::accept(const EventContext& ctx, const xAOD::IParticle* part) const
+{
+  if(part->type() == xAOD::Type::Electron){
+    const xAOD::Electron* el = static_cast<const xAOD::Electron*>(part);
+    return accept(ctx, el);
+  }
+  
+  ATH_MSG_ERROR("Input is not an electron");
+  return asg::AcceptData(&m_acceptInfo);
+}
+
+double AsgElectronChargeIDSelectorTool::calculate(const xAOD::IParticle* part) const
+{
+  //Backward compatibility
+  return calculate(Gaudi::Hive::currentContext(), part);
+}
+
+double AsgElectronChargeIDSelectorTool::calculate(const EventContext& ctx, const xAOD::IParticle* part) const
+{
+  if(part->type() == xAOD::Type::Electron){
+    const xAOD::Electron* el = static_cast<const xAOD::Electron*>(part);
+    return calculate(ctx, el);
+  }
+  
+  ATH_MSG_ERROR ( "Input is not an electron!!" );
+  return -19;
+}
+
+//=============================================================================
+// Helper method to get the number of primary vertices
+// We don't want to iterate over all vertices in the event for each electron!!! 
+//=============================================================================
+unsigned int AsgElectronChargeIDSelectorTool::getNPrimVertices(const EventContext& ctx) const
+{
+  unsigned int nVtx(0);
+  SG::ReadHandle<xAOD::VertexContainer> vtxCont (m_primVtxContKey, ctx); 
+  for ( unsigned int i = 0; i < vtxCont->size(); i++ ) {
+      const xAOD::Vertex* vxcand = vtxCont->at(i);
+      if ( vxcand->nTrackParticles() >= 2 ) nVtx++;
+  }
+  return nVtx;
+}
diff --git a/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/src/components/ElectronPhotonSelectorTools_entries.cxx b/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/src/components/ElectronPhotonSelectorTools_entries.cxx
index 1cfb85d02dd6965e132c50c633575d024deb5b49..d87fb21cf0e65b877f087a3cab5e75b30920205f 100644
--- a/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/src/components/ElectronPhotonSelectorTools_entries.cxx
+++ b/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/src/components/ElectronPhotonSelectorTools_entries.cxx
@@ -3,6 +3,7 @@
 #include "ElectronPhotonSelectorTools/AsgPhotonIsEMSelector.h"
 #include "ElectronPhotonSelectorTools/AsgForwardElectronIsEMSelector.h"
 #include "ElectronPhotonSelectorTools/EGammaAmbiguityTool.h"
+#include "ElectronPhotonSelectorTools/AsgElectronChargeIDSelectorTool.h"
 #include "ElectronPhotonSelectorTools/AsgDeadHVCellRemovalTool.h"
 
 DECLARE_COMPONENT( AsgElectronIsEMSelector )
@@ -10,5 +11,6 @@ DECLARE_COMPONENT( AsgElectronLikelihoodTool )
 DECLARE_COMPONENT( AsgPhotonIsEMSelector )
 DECLARE_COMPONENT( AsgForwardElectronIsEMSelector )
 DECLARE_COMPONENT( EGammaAmbiguityTool )
+DECLARE_COMPONENT( AsgElectronChargeIDSelectorTool )
 DECLARE_COMPONENT( AsgDeadHVCellRemovalTool )
 
diff --git a/PhysicsAnalysis/Interfaces/MuonAnalysisInterfaces/MuonAnalysisInterfaces/IMuonTriggerScaleFactors.h b/PhysicsAnalysis/Interfaces/MuonAnalysisInterfaces/MuonAnalysisInterfaces/IMuonTriggerScaleFactors.h
index 92ce4ec3a354635a6321873c74eadffdacdd067b..2610b5565a1766911ad34550592313ca37ef6787 100644
--- a/PhysicsAnalysis/Interfaces/MuonAnalysisInterfaces/MuonAnalysisInterfaces/IMuonTriggerScaleFactors.h
+++ b/PhysicsAnalysis/Interfaces/MuonAnalysisInterfaces/MuonAnalysisInterfaces/IMuonTriggerScaleFactors.h
@@ -11,7 +11,6 @@
 #include "xAODMuon/Muon.h"
 #include "xAODMuon/MuonContainer.h"
 
-#include "PATInterfaces/CorrectionCode.h"
 #include <string>
 #include <vector>
 #include <map>
diff --git a/PhysicsAnalysis/JetTagging/JetTagAlgs/BTagging/src/JetBTaggingAlg.cxx b/PhysicsAnalysis/JetTagging/JetTagAlgs/BTagging/src/JetBTaggingAlg.cxx
index b9f5474645678b13f0a481a267e88732ca46487a..812430d70025be064d41821391d5ed88089b4c0a 100644
--- a/PhysicsAnalysis/JetTagging/JetTagAlgs/BTagging/src/JetBTaggingAlg.cxx
+++ b/PhysicsAnalysis/JetTagging/JetTagAlgs/BTagging/src/JetBTaggingAlg.cxx
@@ -167,8 +167,8 @@ namespace Analysis {
             ATH_MSG_ERROR( " cannot retrieve jet container particle EL decoration with key " << elTP.key()  );
             return StatusCode::FAILURE;
           }
-          std::string::size_type iofs=h_jetParticleLinkName.key().rfind(".");
-          std::string assocN = h_jetParticleLinkName.key().substr(iofs+1);
+          std::string::size_type iofs=h_jetParticleLinkName.decorKey().rfind(".");
+          std::string assocN = h_jetParticleLinkName.decorKey().substr(iofs+1);
           const std::vector< ElementLink< xAOD::TrackParticleContainer > > associationLinks = h_jetParticleLinkName(*jet);
           newBTagMT->auxdata<std::vector<ElementLink<xAOD::TrackParticleContainer> > >(assocN) = associationLinks;
         }
diff --git a/PhysicsAnalysis/JpsiUpsilonTools/JpsiUpsilonTools/JpsiExample.h b/PhysicsAnalysis/JpsiUpsilonTools/JpsiUpsilonTools/JpsiExample.h
index f9132125f92e083734166d0482f376b404f6a909..11770857f4f5f42bc9d49bb7989e59c03755c38e 100644
--- a/PhysicsAnalysis/JpsiUpsilonTools/JpsiUpsilonTools/JpsiExample.h
+++ b/PhysicsAnalysis/JpsiUpsilonTools/JpsiUpsilonTools/JpsiExample.h
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #include <string>
@@ -84,9 +84,9 @@ private:
   TTree* m_auxTree; // Tree for auxilliary n-tuple
 
   SG::ReadHandleKey<xAOD::VertexContainer> m_JpsiCandidatesKey; //!< Name of J/psi container
-  SG::ReadDecorHandleKey<xAOD::Vertex> m_refPX;
-  SG::ReadDecorHandleKey<xAOD::Vertex> m_refPY;
-  SG::ReadDecorHandleKey<xAOD::Vertex> m_refPZ;
+  SG::ReadDecorHandleKey<xAOD::VertexContainer> m_refPX;
+  SG::ReadDecorHandleKey<xAOD::VertexContainer> m_refPY;
+  SG::ReadDecorHandleKey<xAOD::VertexContainer> m_refPZ;
 
 };
  
diff --git a/PhysicsAnalysis/JpsiUpsilonTools/src/JpsiExample.cxx b/PhysicsAnalysis/JpsiUpsilonTools/src/JpsiExample.cxx
index dba1b90ab1964814d3905e6c53d5ba353debbade..da873073ea49588594cdef5e6defe6d9a2bb4a2e 100644
--- a/PhysicsAnalysis/JpsiUpsilonTools/src/JpsiExample.cxx
+++ b/PhysicsAnalysis/JpsiUpsilonTools/src/JpsiExample.cxx
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 // JpsiExample.cxx
@@ -322,9 +322,9 @@ TVector3 JpsiExample::trackMomentum(const xAOD::Vertex * vxCandidate, uint trkIn
 //    pz = aPerigee->momentum()[Trk::pz];
 //  }
   
-  SG::ReadDecorHandle<xAOD::Vertex, std::vector<float>> hx (m_refPX);
-  SG::ReadDecorHandle<xAOD::Vertex, std::vector<float>> hy (m_refPY);
-  SG::ReadDecorHandle<xAOD::Vertex, std::vector<float>> hz (m_refPZ);
+  SG::ReadDecorHandle<xAOD::VertexContainer, std::vector<float>> hx (m_refPX);
+  SG::ReadDecorHandle<xAOD::VertexContainer, std::vector<float>> hy (m_refPY);
+  SG::ReadDecorHandle<xAOD::VertexContainer, std::vector<float>> hz (m_refPZ);
 
   const std::vector<float>& refTrackPx = hx(*vxCandidate);
   const std::vector<float>& refTrackPy = hy(*vxCandidate);
diff --git a/PhysicsAnalysis/MuonID/MuonIDAnalysis/MuonMomentumCorrections/MuonMomentumCorrections/Regions.h b/PhysicsAnalysis/MuonID/MuonIDAnalysis/MuonMomentumCorrections/MuonMomentumCorrections/Regions.h
index bde501057bfada2f49eebb92694b4cef585ee6dc..deba93525ca8aba61d6c9e4c57ddce9bc4781296 100644
--- a/PhysicsAnalysis/MuonID/MuonIDAnalysis/MuonMomentumCorrections/MuonMomentumCorrections/Regions.h
+++ b/PhysicsAnalysis/MuonID/MuonIDAnalysis/MuonMomentumCorrections/MuonMomentumCorrections/Regions.h
@@ -11,9 +11,6 @@
 #include "AsgMessaging/MessageCheck.h"
 
 ANA_MSG_HEADER(msgMMC)
-
-#include <AsgMessaging/MessageCheck.h>
-
 ANA_MSG_HEADER (msgMuonMomentumCorrections)
 
 class Regions{
diff --git a/PhysicsAnalysis/MuonID/MuonPerformanceAnalysis/MuonResonanceTools/Root/MuonResonancePlots.cxx b/PhysicsAnalysis/MuonID/MuonPerformanceAnalysis/MuonResonanceTools/Root/MuonResonancePlots.cxx
index 65367477477dc033943d0b38d80f905d9250c2ca..d642245b8d98882804fa1d492ac9550048298212 100644
--- a/PhysicsAnalysis/MuonID/MuonPerformanceAnalysis/MuonResonanceTools/Root/MuonResonancePlots.cxx
+++ b/PhysicsAnalysis/MuonID/MuonPerformanceAnalysis/MuonResonanceTools/Root/MuonResonancePlots.cxx
@@ -4,7 +4,6 @@
 
 #include "MuonResonanceTools/MuonResonancePlots.h"
 #include "TLorentzVector.h"
-#include <iostream>
 #ifndef ROOTCORE
 #include "GaudiKernel/IToolSvc.h"
 #include "AthenaBaseComps/AthCheckMacros.h"
diff --git a/PhysicsAnalysis/TruthParticleID/McParticleEventTPCnv/src/RootTruthParticleCnvTool.cxx b/PhysicsAnalysis/TruthParticleID/McParticleEventTPCnv/src/RootTruthParticleCnvTool.cxx
index 6fdb260a495462c372ed96863d78dea55e5cd8d3..c9e8e9812c65f480758e21b156bd2d48ab3444b5 100644
--- a/PhysicsAnalysis/TruthParticleID/McParticleEventTPCnv/src/RootTruthParticleCnvTool.cxx
+++ b/PhysicsAnalysis/TruthParticleID/McParticleEventTPCnv/src/RootTruthParticleCnvTool.cxx
@@ -95,9 +95,9 @@ RootTruthParticleCnvTool::convert(const McEventCollection *mcCollection,
     if ( hepMcPart != mcPart->genParticle() ) {
       ::Error ("RootTruthParticleCnvTool",
                "TruthParticle is not wrapping the GenParticle : %d !!",
-               hepMcPart->barcode());
+               HepMC::barcode(hepMcPart));
     }
-    HepMcParticleLink mcLink( hepMcPart->barcode(), genEventIndex, EBC_MAINEVCOLL, HepMcParticleLink::IS_POSITION );
+    HepMcParticleLink mcLink( HepMC::barcode(hepMcPart), genEventIndex, EBC_MAINEVCOLL, HepMcParticleLink::IS_POSITION );
     bcToMcPart[ mcLink.compress() ] = mcPart;
 
   }//> end loop over particles
diff --git a/PhysicsAnalysis/TruthParticleID/McParticleTests/src/McAodMcTopAna_solution.cxx b/PhysicsAnalysis/TruthParticleID/McParticleTests/src/McAodMcTopAna_solution.cxx
index 6edfa84919980f71e3e2a593bfcc17d6c6853a80..b2c94b6e12c4311dac4b81029d4864e3875fad57 100644
--- a/PhysicsAnalysis/TruthParticleID/McParticleTests/src/McAodMcTopAna_solution.cxx
+++ b/PhysicsAnalysis/TruthParticleID/McParticleTests/src/McAodMcTopAna_solution.cxx
@@ -40,10 +40,6 @@
 
 namespace McAod {
 
-/////////////////////////////////////////////////////////////////// 
-// Public methods: 
-/////////////////////////////////////////////////////////////////// 
-
 // Constructors
 ////////////////
 McTopAnaSolution::McTopAnaSolution( const std::string& name, 
@@ -289,24 +285,4 @@ StatusCode McTopAnaSolution::doMcTopWb()
   return StatusCode::SUCCESS;
 }
 
-/////////////////////////////////////////////////////////////////// 
-// Const methods: 
-///////////////////////////////////////////////////////////////////
-
-/////////////////////////////////////////////////////////////////// 
-// Non-const methods: 
-/////////////////////////////////////////////////////////////////// 
-
-/////////////////////////////////////////////////////////////////// 
-// Protected methods: 
-/////////////////////////////////////////////////////////////////// 
-
-/////////////////////////////////////////////////////////////////// 
-// Const methods: 
-///////////////////////////////////////////////////////////////////
-
-/////////////////////////////////////////////////////////////////// 
-// Non-const methods: 
-/////////////////////////////////////////////////////////////////// 
-
 } //> end namespace McAod
diff --git a/PhysicsAnalysis/TruthParticleID/McParticleTools/src/GenAodValidationTool.cxx b/PhysicsAnalysis/TruthParticleID/McParticleTools/src/GenAodValidationTool.cxx
index 1a72387d8dfd242fe54bc2637377a099e217e1e2..03c37d15f2b291873984ae33f74926321dd9dd6e 100755
--- a/PhysicsAnalysis/TruthParticleID/McParticleTools/src/GenAodValidationTool.cxx
+++ b/PhysicsAnalysis/TruthParticleID/McParticleTools/src/GenAodValidationTool.cxx
@@ -389,7 +389,7 @@ GenAodValidationTool::compareParts( const HepMC::GenParticle* p1,
     return false;
   }
 
-  if ( p1->barcode() != p2->barcode() ) {
+  if ( HepMC::barcode(p1) != HepMC::barcode(p2) ) {
     return false;
   }
 
diff --git a/PhysicsAnalysis/TruthParticleID/McParticleTools/src/McAodWriterTool.cxx b/PhysicsAnalysis/TruthParticleID/McParticleTools/src/McAodWriterTool.cxx
index fabe82b6e369923439486af96908948862687acc..d23e1569d1f09b8214559a1efd41cda30dd3429e 100755
--- a/PhysicsAnalysis/TruthParticleID/McParticleTools/src/McAodWriterTool.cxx
+++ b/PhysicsAnalysis/TruthParticleID/McParticleTools/src/McAodWriterTool.cxx
@@ -143,7 +143,7 @@ StatusCode McAodWriterTool::write( const TruthParticleContainer* mcParts )
 	<< " "  << std::setprecision(3) << std::setw(10) << mc->e()
 	<< " "  << std::setprecision(3) << std::setw(10) << mc->pdgId()
 	<< " "  << std::setprecision(3) << std::setw(10) << mc->charge()
-	<< " "  << std::setprecision(3) << std::setw(10) << mc->genParticle()->barcode()
+	<< " "  << std::setprecision(3) << std::setw(10) << HepMC::barcode(mc->genParticle())
 	<< std::endl;
     out << "EtIsol " 
 	<< std::setw(3) << TruthParticleParameters::NbrOfCones
diff --git a/PhysicsAnalysis/TruthParticleID/McParticleTools/src/McVtxFilterTool.cxx b/PhysicsAnalysis/TruthParticleID/McParticleTools/src/McVtxFilterTool.cxx
index b3a7447b747b29677552c73a7b8ea04bd6869015..e2bf7c526d14682187c04a06ffbef05695e64eca 100755
--- a/PhysicsAnalysis/TruthParticleID/McParticleTools/src/McVtxFilterTool.cxx
+++ b/PhysicsAnalysis/TruthParticleID/McParticleTools/src/McVtxFilterTool.cxx
@@ -439,7 +439,7 @@ bool McVtxFilterTool::keepParticle( const VtxType::Flag vtxType,
       << "In keepParticle: Don't know anything about this VtxType ["
       << vtxType << "] !!"
       << endmsg
-      << "We'll keep this particle [bc= " << part->barcode() 
+      << "We'll keep this particle [bc= " << HepMC::barcode(part) 
       << "] but : Check your jobOption !!"
       << endmsg;
     return true;
diff --git a/PhysicsAnalysis/TruthParticleID/McParticleTools/src/OldSpclMcFilterTool.cxx b/PhysicsAnalysis/TruthParticleID/McParticleTools/src/OldSpclMcFilterTool.cxx
index 888edd4999a8043b765e23cafe81b883d91a82ef..b8777ad4d314e68c26e80686883b8abcc668d7ff 100755
--- a/PhysicsAnalysis/TruthParticleID/McParticleTools/src/OldSpclMcFilterTool.cxx
+++ b/PhysicsAnalysis/TruthParticleID/McParticleTools/src/OldSpclMcFilterTool.cxx
@@ -188,7 +188,7 @@ StatusCode OldSpclMcFilterTool::selectSpclMcBarcodes()
     const double pt   = hlv.perp();
     const double eta  = hlv.pseudoRapidity();
     const double mass = hlv.m();
-    const int barcode = part->barcode();
+    const int barcode = HepMC::barcode(part);
 
     const HepMC::GenVertex * decayVtx = part->end_vertex();
     const HepMC::GenVertex * prodVtx  = part->production_vertex();
diff --git a/PhysicsAnalysis/TruthParticleID/McParticleTools/src/TruthParticleCnvTool.cxx b/PhysicsAnalysis/TruthParticleID/McParticleTools/src/TruthParticleCnvTool.cxx
index b012d013efffecb9381c2017a6e02b0176444065..b4211ce14cbac77e95c486619a0d794613a30a1c 100755
--- a/PhysicsAnalysis/TruthParticleID/McParticleTools/src/TruthParticleCnvTool.cxx
+++ b/PhysicsAnalysis/TruthParticleID/McParticleTools/src/TruthParticleCnvTool.cxx
@@ -201,9 +201,9 @@ StatusCode TruthParticleCnvTool::execute (const EventContext& ctx) const
       continue;
     }
     genEventIndex = (it - mcEventsReadHandle->begin());
-    ATH_MSG_DEBUG(" adding event id="<< evt->signal_process_id()<<"  genEventIndex="<< genEventIndex );
+    ATH_MSG_DEBUG(" adding event id="<< HepMC::signal_process_id(evt)<<"  genEventIndex="<< genEventIndex );
 
-    if( evt->signal_process_id() == 0 ) continue;
+    if( HepMC::signal_process_id(evt) == 0 ) continue;
     if (!this->convert( mcEventsReadHandle.ptr(), genEventIndex, mcPartsOutputWriteHandle.ptr(), dummyVisitor ).isSuccess()) {
       ATH_MSG_DEBUG("Failed to convert an event...");
       all_good = false;
@@ -283,10 +283,10 @@ TruthParticleCnvTool::convert( const McEventCollection * mcCollection,
 
     if ( hepMcPart != mcPart->genParticle() ) {
       ATH_MSG_ERROR("TruthParticle is not wrapping the GenParticle : " 
-		    << hepMcPart->barcode() << " !!");
+		    << HepMC::barcode(hepMcPart) << " !!");
     }
     //bcToMcPart[ hepMcPart->barcoade() ] = mcPart;
-    HepMcParticleLink mcLink( hepMcPart->barcode(), genEventIndex, EBC_MAINEVCOLL, HepMcParticleLink::IS_POSITION, sg ); // FIXME assuming that we are using the hard-scatter McEventCollection - would need to pass this info as an argument to the convert function.
+    HepMcParticleLink mcLink( HepMC::barcode(hepMcPart), genEventIndex, EBC_MAINEVCOLL, HepMcParticleLink::IS_POSITION, sg ); // FIXME assuming that we are using the hard-scatter McEventCollection - would need to pass this info as an argument to the convert function.
     bcToMcPart[ mcLink.compress() ] = mcPart;
 
   }//> end loop over particles
diff --git a/PhysicsAnalysis/TruthParticleID/McParticleTools/src/VtxBasedFilterTool.cxx b/PhysicsAnalysis/TruthParticleID/McParticleTools/src/VtxBasedFilterTool.cxx
index 5b76c283da1c208790cdc4eea23e3b7287caf348..3ba9430a9030bf03d244a9c8dd18c62722e6a148 100755
--- a/PhysicsAnalysis/TruthParticleID/McParticleTools/src/VtxBasedFilterTool.cxx
+++ b/PhysicsAnalysis/TruthParticleID/McParticleTools/src/VtxBasedFilterTool.cxx
@@ -192,7 +192,7 @@ StatusCode VtxBasedFilterTool::addVertex( const HepMC::GenVertex* srcVtx,
     vtx = HepMC::newGenVertexPtr();
     vtx->set_position( srcVtx->position() );
     vtx->set_id( srcVtx->id() );
-    vtx->suggest_barcode( srcVtx->barcode() );
+    vtx->suggest_barcode( HepMC::barcode(srcVtx) );
     vtx->weights() = srcVtx->weights();
     evt->add_vertex(vtx);
   }
@@ -249,7 +249,7 @@ StatusCode VtxBasedFilterTool::addVertex( const HepMC::GenVertex* srcVtx,
 bool 
 VtxBasedFilterTool::isFromHardScattering( const HepMC::GenVertex* vtx ) const
 {
-  if ( std::abs(vtx->barcode()) <= m_maxHardScatteringVtxBarcode.value() &&
+  if ( std::abs(HepMC::barcode(vtx)) <= m_maxHardScatteringVtxBarcode.value() &&
        m_ppFilter.isAccepted(vtx) &&
        ! m_showerFilter.isAccepted(vtx) ) {
 
diff --git a/PhysicsAnalysis/TruthParticleID/McParticleUtils/src/McVtxFilter.cxx b/PhysicsAnalysis/TruthParticleID/McParticleUtils/src/McVtxFilter.cxx
index ffa2f639869aa8e4418f0c9990eb99e362433ba8..d889a933d07f142d74ef9a9dfb4becebef1a63a1 100755
--- a/PhysicsAnalysis/TruthParticleID/McParticleUtils/src/McVtxFilter.cxx
+++ b/PhysicsAnalysis/TruthParticleID/McParticleUtils/src/McVtxFilter.cxx
@@ -366,7 +366,7 @@ bool McVtxFilter::checkParentBranch( const HepMC::GenVertex * vtx ) const
 
   m_msg << MSG::VERBOSE << ">>> CheckParentBranch is DONE : " 
 	<< ( accepted ? "accept" : "reject" )
-	<< " vtx= " << vtx->barcode()
+	<< " vtx= " << HepMC::barcode(vtx)
 	<< endmsg;
   return accepted;
 }
@@ -420,7 +420,7 @@ bool McVtxFilter::checkChildBranch( const HepMC::GenVertex * vtx ) const
 
   m_msg << MSG::VERBOSE << ">>> CheckChildBranch is DONE : " 
 	<< ( accepted ? "accept" : "reject" )
-	<< " vtx= " << vtx->barcode()
+	<< " vtx= " << HepMC::barcode(vtx)
 	<< endmsg;
   return accepted;
 }
diff --git a/Projects/AthGeneration/package_filters.txt b/Projects/AthGeneration/package_filters.txt
index e738f729837a19a013e1fe91c6136a72343463fe..bf3abe287df13a90b486fafae8b561d834c242f5 100644
--- a/Projects/AthGeneration/package_filters.txt
+++ b/Projects/AthGeneration/package_filters.txt
@@ -140,6 +140,9 @@
 + Event/xAOD/xAODEventShape
 + Event/xAOD/xAODJet
 + Event/xAOD/xAODJetAthenaPool
++ Event/xAOD/xAODMetaData
++ Event/xAOD/xAODMetaDataAthenaPool
++ Event/xAOD/xAODMetaDataCnv
 + Event/xAOD/xAODMuon
 + Event/xAOD/xAODPrimitives
 + Event/xAOD/xAODTracking
diff --git a/Projects/AthSimulation/package_filters.txt b/Projects/AthSimulation/package_filters.txt
index 717087ee48431922fed2224e5f36c3f75da10df4..e94f7928a6e7d7e304cd3245045653f3056c8851 100644
--- a/Projects/AthSimulation/package_filters.txt
+++ b/Projects/AthSimulation/package_filters.txt
@@ -151,6 +151,9 @@
 + Event/xAOD/xAODEventInfoCnv
 + Event/xAOD/xAODJet
 + Event/xAOD/xAODJetAthenaPool
++ Event/xAOD/xAODMetaData
++ Event/xAOD/xAODMetaDataAthenaPool
++ Event/xAOD/xAODMetaDataCnv
 + Event/xAOD/xAODPrimitives
 + Event/xAOD/xAODTruth
 + Event/xAOD/xAODTruthAthenaPool
@@ -308,11 +311,11 @@
 + Simulation/ISF/ISF_HepMC/ISF_HepMC_Tools
 + Simulation/ISF/ISF_SimulationSelectors
 + Simulation/ISF/ISF_Tracking/ISF_TrackingInterfaces
-+ Simulation/ISF/ISF_Validation
 + Simulation/Interfaces/HepMC_Interfaces
 + Simulation/RunDependentSim/RunDependentSimComps
 + Simulation/SimuJobTransforms
 + Simulation/SimulationJobOptions
++ Simulation/Tests/ISF_Validation
 + Simulation/Tools/AtlasCLHEP_RandomGenerators
 + Simulation/Tools/McEventCollectionFilter
 + TileCalorimeter/TileCalib/TileCalibBlobObjs
diff --git a/Reconstruction/Jet/JetCalibTools/Root/GlobalSequentialCorrection.cxx b/Reconstruction/Jet/JetCalibTools/Root/GlobalSequentialCorrection.cxx
index 56291bce0387ee1d2f6b58132b9eba40f9c13a10..1a5de34f61f303ef514028d4046f182442bcc8f3 100644
--- a/Reconstruction/Jet/JetCalibTools/Root/GlobalSequentialCorrection.cxx
+++ b/Reconstruction/Jet/JetCalibTools/Root/GlobalSequentialCorrection.cxx
@@ -53,7 +53,14 @@ GlobalSequentialCorrection::GlobalSequentialCorrection(const std::string& name,
 { }
 
 GlobalSequentialCorrection::~GlobalSequentialCorrection() {
-
+  for(TH2F* hist : m_respFactorsEM3            ){if(hist) delete hist;}
+  for(TH2F* hist : m_respFactorsnTrk           ){if(hist) delete hist;}
+  for(TH2F* hist : m_respFactorstrackWIDTH     ){if(hist) delete hist;}
+  for(TH2F* hist : m_respFactorsTile0          ){if(hist) delete hist;}
+  for(TH2F* hist : m_respFactorsPunchThrough   ){if(hist) delete hist;}
+  for(TH2F* hist : m_respFactorsChargedFraction){if(hist) delete hist;}
+  for(TH2F* hist : m_respFactorsN90Constituents){if(hist) delete hist;}
+  for(TH2F* hist : m_respFactorscaloWIDTH      ){if(hist) delete hist;}
 }
 
 StatusCode GlobalSequentialCorrection::initializeTool(const std::string&) {
diff --git a/Reconstruction/Jet/JetCalibTools/Root/InsituDataCorrection.cxx b/Reconstruction/Jet/JetCalibTools/Root/InsituDataCorrection.cxx
index 254edd4b424a15919e85fdf277871d6888ca95e7..67cb11ddf3f7b7fb4aca666752fb8ef1935c4ee3 100644
--- a/Reconstruction/Jet/JetCalibTools/Root/InsituDataCorrection.cxx
+++ b/Reconstruction/Jet/JetCalibTools/Root/InsituDataCorrection.cxx
@@ -71,23 +71,21 @@ StatusCode InsituDataCorrection::initializeTool(const std::string&) {
 
   rel_histoname.ReplaceAll("JETALGO",m_jetAlgo); abs_histoname.ReplaceAll("JETALGO",m_jetAlgo);
   if(m_applyRelativeandAbsoluteInsitu){
-    TH2D * rel_histo = (TH2D*)JetCalibUtils::GetHisto2(insitu_file,rel_histoname);
-    TH1D * abs_histo = (TH1D*)JetCalibUtils::GetHisto(insitu_file,abs_histoname);
+    std::unique_ptr<TH2D> rel_histo(dynamic_cast<TH2D*>(JetCalibUtils::GetHisto2(insitu_file,rel_histoname)));
+    std::unique_ptr<TH1D> abs_histo(dynamic_cast<TH1D*>(JetCalibUtils::GetHisto(insitu_file,abs_histoname)));
     if ( !rel_histo || !abs_histo ) {
       ATH_MSG_FATAL( "\n  Tool configured for data, but no residual in-situ histograms could be retrieved. Aborting..." );
       return StatusCode::FAILURE;
     }
-    else {
-      gROOT->cd();
-      // save pTmax of the relative and absolute in situ calibrations
-      m_relhistoPtMax = rel_histo->GetXaxis()->GetBinLowEdge(rel_histo->GetNbinsX()+1);
-      m_abshistoPtMax = abs_histo->GetBinLowEdge(abs_histo->GetNbinsX()+1);
-      // combine in situ calibrations
-      m_insituCorr = combineCalibration(rel_histo,abs_histo);
-      m_insituEtaMax = m_insituCorr->GetYaxis()->GetBinLowEdge(m_insituCorr->GetNbinsY()+1);
-      m_insituPtMin = m_insituCorr->GetXaxis()->GetBinLowEdge(1);
-      m_insituPtMax = m_insituCorr->GetXaxis()->GetBinLowEdge(m_insituCorr->GetNbinsX()+1);
-    }
+    gROOT->cd();
+    // save pTmax of the relative and absolute in situ calibrations
+    m_relhistoPtMax = rel_histo->GetXaxis()->GetBinLowEdge(rel_histo->GetNbinsX()+1);
+    m_abshistoPtMax = abs_histo->GetBinLowEdge(abs_histo->GetNbinsX()+1);
+    // combine in situ calibrations
+    m_insituCorr = combineCalibration(rel_histo.get(),abs_histo.get());
+    m_insituEtaMax = m_insituCorr->GetYaxis()->GetBinLowEdge(m_insituCorr->GetNbinsY()+1);
+    m_insituPtMin = m_insituCorr->GetXaxis()->GetBinLowEdge(1);
+    m_insituPtMax = m_insituCorr->GetXaxis()->GetBinLowEdge(m_insituCorr->GetNbinsX()+1);
     if(m_applyEtaRestrictionRelativeandAbsolute) m_insituEtaMax = insitu_etarestriction_relativeandabsolute;
   }
   if(m_applyResidualMCbasedInsitu){
diff --git a/Reconstruction/Jet/JetCalibTools/Root/JMSCorrection.cxx b/Reconstruction/Jet/JetCalibTools/Root/JMSCorrection.cxx
index 5ec72f9664b5e8ce4d95c5983cdb1cb75f313737..98144e15cbaac030b44f929efcff33d01279517e 100644
--- a/Reconstruction/Jet/JetCalibTools/Root/JMSCorrection.cxx
+++ b/Reconstruction/Jet/JetCalibTools/Root/JMSCorrection.cxx
@@ -45,58 +45,19 @@ JMSCorrection::JMSCorrection(const std::string& name, TEnv * config, TString jet
 
 JMSCorrection::~JMSCorrection() {
 
-    if (m_use3Dhisto)
-    {
-        // Free 3D histograms
-        if (m_respFactorMass3D)
-        {
-            delete m_respFactorMass3D;
-            m_respFactorMass3D = NULL;
-        }
-        if (m_respFactorTrackAssistedMass3D)
-        {
-            delete m_respFactorTrackAssistedMass3D;
-            m_respFactorTrackAssistedMass3D = NULL;
-        }
-        if (m_caloResolutionMassCombination3D)
-        {
-            delete m_caloResolutionMassCombination3D;
-            m_caloResolutionMassCombination3D = NULL;
-        }
-        if (m_taResolutionMassCombination3D)
-        {
-            delete m_taResolutionMassCombination3D;
-            m_taResolutionMassCombination3D = NULL;
-        }
-        if (m_correlationMapMassCombination3D)
-        {
-            delete m_correlationMapMassCombination3D;
-            m_correlationMapMassCombination3D = NULL;
-        }
-    }
-    {
-        // Free 2D histograms
-        for (TH2F* histo : m_respFactorsMass)
-            delete histo;
-        m_respFactorsMass.clear();
-        
-        for (TH2F* histo : m_respFactorsTrackAssistedMass)
-            delete histo;
-        m_respFactorsTrackAssistedMass.clear();
-
-        for (TH2D* histo : m_caloResolutionMassCombination)
-            delete histo;
-        m_caloResolutionMassCombination.clear();
-
-        for (TH2D* histo : m_taResolutionMassCombination)
-            delete histo;
-        m_taResolutionMassCombination.clear();
-
-        for (TH2D* histo : m_correlationMapMassCombination)
-            delete histo;
-        m_correlationMapMassCombination.clear();
-    }
-
+  // Free 3D histograms
+  if (m_respFactorMass3D               ) delete m_respFactorMass3D;
+  if (m_respFactorTrackAssistedMass3D  ) delete m_respFactorTrackAssistedMass3D;
+  if (m_caloResolutionMassCombination3D) delete m_caloResolutionMassCombination3D;
+  if (m_taResolutionMassCombination3D  ) delete m_taResolutionMassCombination3D;
+  if (m_correlationMapMassCombination3D) delete m_correlationMapMassCombination3D;
+
+  // Free 2D histograms
+  for (TH2F* histo : m_respFactorsMass              ){if(histo) delete histo;}
+  for (TH2F* histo : m_respFactorsTrackAssistedMass ){if(histo) delete histo;}
+  for (TH2D* histo : m_caloResolutionMassCombination){if(histo) delete histo;}
+  for (TH2D* histo : m_taResolutionMassCombination  ){if(histo) delete histo;}
+  for (TH2D* histo : m_correlationMapMassCombination){if(histo) delete histo;}
 }
 
 StatusCode JMSCorrection::initializeTool(const std::string&) {
diff --git a/Reconstruction/Jet/JetMomentTools/JetMomentTools/JetForwardJvtTool.h b/Reconstruction/Jet/JetMomentTools/JetMomentTools/JetForwardJvtTool.h
index 6982167a9245db7028d73461bc9e1b4f99478380..cf9407b73835d7e8daccd47590afaf71b20317cf 100644
--- a/Reconstruction/Jet/JetMomentTools/JetMomentTools/JetForwardJvtTool.h
+++ b/Reconstruction/Jet/JetMomentTools/JetMomentTools/JetForwardJvtTool.h
@@ -97,7 +97,7 @@
     SG::WriteDecorHandleKey<xAOD::JetContainer> m_fjvtDecKey{this, "FJVTName", "fJvt", "Decoration key for fJvt"};
     
     SG::ReadHandleKey<xAOD::VertexContainer> m_vertexContainerName{this, "VertexContainerName", "PrimaryVertices", "SG key for vertex container"};
-    SG::ReadHandleKey<xAOD::MissingETContainer> m_trkMETName{this, "Met_TrackName", "Met_Track", "SG key for MET track container"};
+    SG::ReadHandleKey<xAOD::MissingETContainer> m_trkMETName{this, "Met_TrackName", "MET_Track", "SG key for MET track container"};
 
     SG::ReadDecorHandleKey<xAOD::JetContainer> m_widthKey{this, "WidthName", "Width", "SG key for jet width"};
     SG::ReadDecorHandleKey<xAOD::JetContainer> m_jvtMomentKey{this, "JvtMomentName", "Jvt", "JVT moment name"};
diff --git a/Reconstruction/Jet/JetMomentTools/JetMomentTools/JetVertexTaggerTool.h b/Reconstruction/Jet/JetMomentTools/JetMomentTools/JetVertexTaggerTool.h
index 30cfdd8c3435e24f337f6da37848be9050b97c58..aa52d5990dc300d3fd2ccf95f5b2b74374d7ccb4 100644
--- a/Reconstruction/Jet/JetMomentTools/JetMomentTools/JetVertexTaggerTool.h
+++ b/Reconstruction/Jet/JetMomentTools/JetMomentTools/JetVertexTaggerTool.h
@@ -84,6 +84,9 @@ public:
   // Constructor from tool name
   JetVertexTaggerTool(const std::string& name);
 
+  // Destructor
+  virtual ~JetVertexTaggerTool();
+
   // Initialization.
   StatusCode initialize() override;
 
diff --git a/Reconstruction/Jet/JetMomentTools/Root/JetVertexTaggerTool.cxx b/Reconstruction/Jet/JetMomentTools/Root/JetVertexTaggerTool.cxx
index 41b61bd304831f0492b55ac0270dbc28a7f35747..953753a8c6e03784fbd2a5d6836bcdc5d1a01a16 100644
--- a/Reconstruction/Jet/JetMomentTools/Root/JetVertexTaggerTool.cxx
+++ b/Reconstruction/Jet/JetMomentTools/Root/JetVertexTaggerTool.cxx
@@ -20,12 +20,19 @@ using xAOD::JetFourMom_t;
 //**********************************************************************
 
 JetVertexTaggerTool::JetVertexTaggerTool(const std::string& name)
-: asg::AsgTool(name)
+: asg::AsgTool(name),
+  m_jvthisto(nullptr)
 {
 }
 
 //**********************************************************************
 
+JetVertexTaggerTool::~JetVertexTaggerTool(){
+  if(m_jvthisto) delete m_jvthisto;
+}
+
+//**********************************************************************
+
 StatusCode JetVertexTaggerTool::initialize() {
   ATH_MSG_INFO("Initializing JetVertexTaggerTool " << name());
 
diff --git a/Reconstruction/Jet/JetRec/JetRec/PseudoJetContainer.h b/Reconstruction/Jet/JetRec/JetRec/PseudoJetContainer.h
index 0e66b2dec42640a79506c11f97c0db3b6e229d73..94eacde5975589ab50bb2cf4d540ff5f21c78642 100644
--- a/Reconstruction/Jet/JetRec/JetRec/PseudoJetContainer.h
+++ b/Reconstruction/Jet/JetRec/JetRec/PseudoJetContainer.h
@@ -130,12 +130,7 @@ private:
     } 
     
     ExtractorRange bump(int step) const {
-      ExtractorRange result = *this;
-      result.m_lo += step;
-      result.m_hi += step;
-      IConstituentExtractor* ce = m_e->clone();
-      result.m_e = ce;
-      return result;
+      return ExtractorRange(m_lo + step, m_hi + step, m_e->clone());
     }
 
     int m_lo;
diff --git a/Reconstruction/Jet/JetSubStructureMomentTools/Root/EnergyCorrelatorGeneralizedTool.cxx b/Reconstruction/Jet/JetSubStructureMomentTools/Root/EnergyCorrelatorGeneralizedTool.cxx
index 66fc6a97deee56437d5e0971bf57da896fdcfb3b..76ccaaed4cb296d90a02bbfaa42d92f9ff24fc5b 100644
--- a/Reconstruction/Jet/JetSubStructureMomentTools/Root/EnergyCorrelatorGeneralizedTool.cxx
+++ b/Reconstruction/Jet/JetSubStructureMomentTools/Root/EnergyCorrelatorGeneralizedTool.cxx
@@ -107,6 +107,9 @@ int EnergyCorrelatorGeneralizedTool::modifyJet(xAOD::Jet &injet) const {
 
     float beta = moment.first;
 
+    /// Note that the indexing for these follows the 
+    /// convention of ECFG_angles_n
+
     /// These are used for M2 and N2
     float ECFG_2_1_value = -999.0;
     float ECFG_3_2_value = -999.0;
@@ -124,41 +127,31 @@ int EnergyCorrelatorGeneralizedTool::modifyJet(xAOD::Jet &injet) const {
     if( calculate ) {
 
       /// These are used for N2 and M2
-      JetSubStructureUtils::EnergyCorrelatorGeneralized ECFG_3_2(2, 3, beta, JetSubStructureUtils::EnergyCorrelator::pt_R);
       JetSubStructureUtils::EnergyCorrelatorGeneralized ECFG_2_1(1, 2, beta, JetSubStructureUtils::EnergyCorrelator::pt_R);
+      JetSubStructureUtils::EnergyCorrelatorGeneralized ECFG_3_1(1, 3, beta, JetSubStructureUtils::EnergyCorrelator::pt_R);
+      JetSubStructureUtils::EnergyCorrelatorGeneralized ECFG_3_2(2, 3, beta, JetSubStructureUtils::EnergyCorrelator::pt_R);
 
       ECFG_2_1_value = ECFG_2_1.result(jet);
+      ECFG_3_1_value = ECFG_3_1.result(jet);
       ECFG_3_2_value = ECFG_3_2.result(jet);
 
       /// These are used for dichroic N2 and M2
       if( calculate_ungroomed ) {
         ECFG_2_1_ungroomed_value = ECFG_2_1.result(jet_ungroomed);
+        ECFG_3_1_ungroomed_value = ECFG_3_1.result(jet_ungroomed);
         ECFG_3_2_ungroomed_value = ECFG_3_2.result(jet_ungroomed);
       }
 
-      /// These are used for M3 and N3
-      if( m_doM3 || m_doN3 ) {
-
-        JetSubStructureUtils::EnergyCorrelatorGeneralized ECFG_3_1(1, 3, beta, JetSubStructureUtils::EnergyCorrelator::pt_R);
-
-        ECFG_3_1_value = ECFG_3_1.result(jet);
-
-        if( calculate_ungroomed ) {
-          ECFG_3_1_ungroomed_value = ECFG_3_1.result(jet_ungroomed);
-        }
-
-        /// This is used for M3
-        if( m_doM3 ) {
-          JetSubStructureUtils::EnergyCorrelatorGeneralized ECFG_4_1(1, 4, beta, JetSubStructureUtils::EnergyCorrelator::pt_R);
-          ECFG_4_1_value = ECFG_4_1.result(jet);
-        }
-
-        /// This is used for N3
-        if( m_doN3 ) {
-          JetSubStructureUtils::EnergyCorrelatorGeneralized ECFG_4_2(2, 4, beta, JetSubStructureUtils::EnergyCorrelator::pt_R);
-          ECFG_4_2_value = ECFG_4_2.result(jet);
-        }
+      /// This is used for M3
+      if( m_doM3 ) {
+        JetSubStructureUtils::EnergyCorrelatorGeneralized ECFG_4_1(1, 4, beta, JetSubStructureUtils::EnergyCorrelator::pt_R);
+        ECFG_4_1_value = ECFG_4_1.result(jet);
+      }
 
+      /// This is used for N3
+      if( m_doN3 ) {
+        JetSubStructureUtils::EnergyCorrelatorGeneralized ECFG_4_2(2, 4, beta, JetSubStructureUtils::EnergyCorrelator::pt_R);
+        ECFG_4_2_value = ECFG_4_2.result(jet);
       }
 
     }
diff --git a/Reconstruction/MuonIdentification/CaloTrkMuIdTools/CMakeLists.txt b/Reconstruction/MuonIdentification/CaloTrkMuIdTools/CMakeLists.txt
index 67e7941127e10e2d56782f66e3c202c5a3337dea..44fcf8a1fcdda75f105e95df9c1a6cf148059ff2 100644
--- a/Reconstruction/MuonIdentification/CaloTrkMuIdTools/CMakeLists.txt
+++ b/Reconstruction/MuonIdentification/CaloTrkMuIdTools/CMakeLists.txt
@@ -8,14 +8,13 @@ atlas_subdir( CaloTrkMuIdTools )
 # External dependencies:
 find_package( CLHEP )
 find_package( ROOT COMPONENTS Core Tree MathCore Hist RIO pthread )
-find_package( onnxruntime )
 
 # Component(s) in the package:
 atlas_add_component( CaloTrkMuIdTools
                      src/*.cxx
                      src/components/*.cxx
-                     INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} ${CLHEP_INCLUDE_DIRS} ${ONNXRUNTIME_INCLUDE_DIRS}
-                     LINK_LIBRARIES ${ROOT_LIBRARIES} ${CLHEP_LIBRARIES} ${ONNXRUNTIME_LIBRARIES} CaloEvent AthenaBaseComps StoreGateLib SGtests xAODTracking GaudiKernel ICaloTrkMuIdTools RecoToolInterfaces TrkExInterfaces CaloDetDescrLib CaloGeoHelpers CaloIdentifier CaloUtilsLib xAODCaloEvent ParticleCaloExtension TileDetDescr PathResolver TrkSurfaces TrkCaloExtension TrkEventPrimitives CaloTrackingGeometryLib )
+                     INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} ${CLHEP_INCLUDE_DIRS}
+                     LINK_LIBRARIES ${ROOT_LIBRARIES} ${CLHEP_LIBRARIES} CaloEvent AthenaBaseComps StoreGateLib SGtests xAODTracking GaudiKernel ICaloTrkMuIdTools RecoToolInterfaces TrkExInterfaces CaloDetDescrLib CaloGeoHelpers CaloIdentifier CaloUtilsLib xAODCaloEvent ParticleCaloExtension TileDetDescr PathResolver TrkSurfaces TrkCaloExtension TrkEventPrimitives CaloTrackingGeometryLib AthOnnxruntimeServiceLib)
 
 # Install files from the package:
 atlas_install_headers( CaloTrkMuIdTools )
diff --git a/Reconstruction/MuonIdentification/CaloTrkMuIdTools/CaloTrkMuIdTools/CaloMuonScoreONNXRuntimeSvc.h b/Reconstruction/MuonIdentification/CaloTrkMuIdTools/CaloTrkMuIdTools/CaloMuonScoreONNXRuntimeSvc.h
deleted file mode 100644
index 502ad1ef9c45786c6c5b9f6217d1460a7ca75ff9..0000000000000000000000000000000000000000
--- a/Reconstruction/MuonIdentification/CaloTrkMuIdTools/CaloTrkMuIdTools/CaloMuonScoreONNXRuntimeSvc.h
+++ /dev/null
@@ -1,56 +0,0 @@
-// Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-#ifndef CALOTRKMUIDTOOLS_CALOMUONSCOREONNXRUNTIMESVC_H
-#define CALOTRKMUIDTOOLS_CALOMUONSCOREONNXRUNTIMESVC_H
-
-// Local include(s).
-#include "ICaloTrkMuIdTools/ICaloMuonScoreONNXRuntimeSvc.h"
-
-// Framework include(s).
-#include "AthenaBaseComps/AthService.h"
-
-// ONNX include(s).
-#include <core/session/onnxruntime_cxx_api.h>
-
-// System include(s).
-#include <memory>
-
-/// Service implementing @c ICaloMuonScoreONNXRuntimeSvc
-///
-/// This is a very simple implementation, just managing the lifetime
-/// of some ONNX Runtime C++ objects.
-///
-/// Ported from  Control/AthenaExamples/AthExOnnxRuntime (Ricardo Woelker <ricardo.woelker@cern.ch>)
-///
-/// @author Attila Krasznahorkay <Attila.Krasznahorkay@cern.ch>
-///
-class CaloMuonScoreONNXRuntimeSvc : public extends< AthService, ICaloMuonScoreONNXRuntimeSvc > {
-  
- public:
-  /// Inherit the base class's constructor
-  using extends::extends;
-  
-  /// @name Function(s) inherited from @c Service
-  /// @{
-  
-  /// Function initialising the service
-  virtual StatusCode initialize() override;
-  /// Function finalising the service
-  virtual StatusCode finalize() override;
-  
-  /// @}
-  
-  /// @name Function(s) inherited from @c ICaloMuonScoreONNXRuntimeSvc
-  /// @{
-  
-  /// Return the ONNX Runtime environment object
-  virtual Ort::Env& env() const override;
-  
-  /// @}
-  
- private:
-  /// Global runtime environment for ONNX Runtime
-  std::unique_ptr< Ort::Env > m_env;
-  
-}; // class CaloMuonScoreONNXRuntimeSvc
-
-#endif // CALOTRKMUIDTOOLS_CALOMUONSCOREONNXRUNTIMESVC_H
diff --git a/Reconstruction/MuonIdentification/CaloTrkMuIdTools/CaloTrkMuIdTools/CaloMuonScoreTool.h b/Reconstruction/MuonIdentification/CaloTrkMuIdTools/CaloTrkMuIdTools/CaloMuonScoreTool.h
index 113ea4e53220c6dc83fb6041300c2c52bb1e806f..8302f9456611e7f387e8cb3c2f5ffcc68194789f 100644
--- a/Reconstruction/MuonIdentification/CaloTrkMuIdTools/CaloTrkMuIdTools/CaloMuonScoreTool.h
+++ b/Reconstruction/MuonIdentification/CaloTrkMuIdTools/CaloTrkMuIdTools/CaloMuonScoreTool.h
@@ -6,7 +6,7 @@
 #define CALOTRKMUIDTOOLS_CALOMUONSCORETOOL_H
 
 #include "ICaloTrkMuIdTools/ICaloMuonScoreTool.h"
-#include "ICaloTrkMuIdTools/ICaloMuonScoreONNXRuntimeSvc.h"
+#include "AthOnnxruntimeService/IONNXRuntimeSvc.h"
 #include "AthenaBaseComps/AthAlgTool.h"
 #include "GaudiKernel/ToolHandle.h"
 #include "GaudiKernel/ServiceHandle.h"
@@ -67,27 +67,20 @@ public:
   std::vector<float> getInputTensor(std::vector<float> &eta, std::vector<float> &phi, std::vector<float> &energy, std::vector<int> &sampling) const;
 
 private:
-  // Number of bins in eta
-  int m_etaBins = 30;
 
-  // Number of bins in phi
-  int m_phiBins = 30;
-
-  // window in terms of abs(eta) to consider around the median eta value
-  float m_etaCut = 0.25;
-
-  // window in terms of abs(phi) to consider around the median phi value
-  float m_phiCut = 0.25;
-
-  // Number of colour channels to consider in the convolutional neural network
-  int m_nChannels = 7;
+  Gaudi::Property<float> m_CaloCellAssociationConeSize {this, "CaloCellAssociationConeSize", 0.2, "Size of the cone within which calo cells are associated with a track particle"};
+  Gaudi::Property<int> m_etaBins {this, "etaBins", 30, "Number of bins in eta"};
+  Gaudi::Property<int> m_phiBins {this, "phiBins", 30, "Number of bins in phi"};
+  Gaudi::Property<float> m_etaCut {this, "etaCut", 0.25, "Eta cut on the calorimeter cells associated with the track particle after centering of the calorimeter image"};
+  Gaudi::Property<float> m_phiCut {this, "phiCut", 0.25, "Phi cut on the calorimeter cells associated with the track particle after centering of the calorimeter image"};
+  Gaudi::Property<int> m_nChannels {this, "nChannels", 7, "Number of colour channels in the convolutional neural network"};
 
   ToolHandle <Rec::IParticleCaloCellAssociationTool> m_caloCellAssociationTool{this, "ParticleCaloCellAssociationTool", ""}; 
 
-  /// Handle to @c IONNXRuntimeSvc
-  ServiceHandle< ICaloMuonScoreONNXRuntimeSvc > m_svc{ this, "CaloMuonScoreONNXRuntimeSvc",
-      "CaloMuonScoreONNXRuntimeSvc",
-      "Name of the service to use" };
+  /// Handle to @c AthONNX::IONNXRuntimeSvc
+  ServiceHandle< AthONNX::IONNXRuntimeSvc > m_svc{ this, "ONNXRuntimeSvc",
+      "AthONNX::ONNXRuntimeSvc",
+      "CaloMuonScoreTool ONNXRuntimeSvc" };
 
   std::unique_ptr< Ort::Session > m_session;
 
diff --git a/Reconstruction/MuonIdentification/CaloTrkMuIdTools/doc/packagedoc.h b/Reconstruction/MuonIdentification/CaloTrkMuIdTools/doc/packagedoc.h
index 5f7fb90785f3287f627451f32b363621f1973752..2acc2e3524f065a811d31e24929d19834b309997 100644
--- a/Reconstruction/MuonIdentification/CaloTrkMuIdTools/doc/packagedoc.h
+++ b/Reconstruction/MuonIdentification/CaloTrkMuIdTools/doc/packagedoc.h
@@ -21,10 +21,6 @@
 	calorimeter cell energy deposits using a convolutional 
 	neural network.
 
-@section CaloTrkMuIdTools_CaloMuonScoreONNXRuntimeSvcIntroduction CaloMuonScoreONNXRuntimeSvc
-	Service that maintains a ONNX session which holds 
-	a tensorflow model and can perform inference on it.
-
 @section CaloTrkMuIdTools_CaloMuonTagIntroduction CaloMuonTag
 	Muon tagger using calorimeter deposits.
 	A track is tagged when deposits above the noise treshold are found in the 
diff --git a/Reconstruction/MuonIdentification/CaloTrkMuIdTools/src/CaloMuonScoreONNXRuntimeSvc.cxx b/Reconstruction/MuonIdentification/CaloTrkMuIdTools/src/CaloMuonScoreONNXRuntimeSvc.cxx
deleted file mode 100644
index b2845368fede7e51913d10a22e123b4d096db7f1..0000000000000000000000000000000000000000
--- a/Reconstruction/MuonIdentification/CaloTrkMuIdTools/src/CaloMuonScoreONNXRuntimeSvc.cxx
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-
-// Local include(s).
-#include "CaloTrkMuIdTools/CaloMuonScoreONNXRuntimeSvc.h"
-
-StatusCode CaloMuonScoreONNXRuntimeSvc::initialize() {
-  
-  // Create the environment object.
-  m_env = std::make_unique< Ort::Env >( ORT_LOGGING_LEVEL_WARNING,
-					name().c_str() );
-  ATH_MSG_DEBUG( "Ort::Env object created" );
-  
-  // Return gracefully.
-  return StatusCode::SUCCESS;
-}
-
-StatusCode CaloMuonScoreONNXRuntimeSvc::finalize() {
-  
-  // Dekete the environment object.
-  m_env.reset();
-  ATH_MSG_DEBUG( "Ort::Env object deleted" );
-  
-  // Return gracefully.
-  return StatusCode::SUCCESS;
-}
-
-Ort::Env& CaloMuonScoreONNXRuntimeSvc::env() const {
-  
-  return *m_env;
-}
diff --git a/Reconstruction/MuonIdentification/CaloTrkMuIdTools/src/CaloMuonScoreTool.cxx b/Reconstruction/MuonIdentification/CaloTrkMuIdTools/src/CaloMuonScoreTool.cxx
index adbbbbdd91143e5b0db695b4e81a4d82adbfec33..34c6830e76fc62ae30a4532faf44a9c820ee6e6b 100644
--- a/Reconstruction/MuonIdentification/CaloTrkMuIdTools/src/CaloMuonScoreTool.cxx
+++ b/Reconstruction/MuonIdentification/CaloTrkMuIdTools/src/CaloMuonScoreTool.cxx
@@ -146,8 +146,10 @@ float CaloMuonScoreTool::getMuonScore( const xAOD::TrackParticle* trk ) const {
 
   ATH_MSG_DEBUG("Calculating muon score for track particle with eta="<<track_eta);
 
-  // - associate calocells to trackparticle, cone size 0.2, use cache
-  std::unique_ptr<const Rec::ParticleCellAssociation> association = m_caloCellAssociationTool->particleCellAssociation(*trk,0.2,nullptr);
+  ATH_MSG_DEBUG("Finding calo cell association for track particle within cone of delta R="<<m_CaloCellAssociationConeSize);
+
+  // - associate calocells to trackparticle
+  std::unique_ptr<const Rec::ParticleCellAssociation> association = m_caloCellAssociationTool->particleCellAssociation(*trk,m_CaloCellAssociationConeSize,nullptr);
   if(!association){
     ATH_MSG_VERBOSE("Could not get particleCellAssociation");
     return -1.;
diff --git a/Reconstruction/MuonIdentification/CaloTrkMuIdTools/src/components/CaloTrkMuIdTools_entries.cxx b/Reconstruction/MuonIdentification/CaloTrkMuIdTools/src/components/CaloTrkMuIdTools_entries.cxx
index 644c6809704001e352b356efe885f5f29e4581c7..641b962659babbdc110f1f4618902bb3150565ca 100644
--- a/Reconstruction/MuonIdentification/CaloTrkMuIdTools/src/components/CaloTrkMuIdTools_entries.cxx
+++ b/Reconstruction/MuonIdentification/CaloTrkMuIdTools/src/components/CaloTrkMuIdTools_entries.cxx
@@ -3,12 +3,9 @@
 #include "CaloTrkMuIdTools/TrackDepositInCaloTool.h"
 #include "CaloTrkMuIdTools/CaloMuonLikelihoodTool.h"
 #include "CaloTrkMuIdTools/CaloMuonScoreTool.h"
-#include "CaloTrkMuIdTools/CaloMuonScoreONNXRuntimeSvc.h"
 
 DECLARE_COMPONENT( CaloMuonTag )
 DECLARE_COMPONENT( TrackEnergyInCaloTool )
 DECLARE_COMPONENT( TrackDepositInCaloTool )
 DECLARE_COMPONENT( CaloMuonLikelihoodTool )
 DECLARE_COMPONENT( CaloMuonScoreTool )
-DECLARE_COMPONENT( CaloMuonScoreONNXRuntimeSvc )
-
diff --git a/Reconstruction/MuonIdentification/ICaloTrkMuIdTools/CMakeLists.txt b/Reconstruction/MuonIdentification/ICaloTrkMuIdTools/CMakeLists.txt
index 60ce13f7678a47d0c51a6daaa96952e98ccbbcd6..3e5abe061ba251a45a03f4394d77615ce468de51 100644
--- a/Reconstruction/MuonIdentification/ICaloTrkMuIdTools/CMakeLists.txt
+++ b/Reconstruction/MuonIdentification/ICaloTrkMuIdTools/CMakeLists.txt
@@ -5,10 +5,7 @@
 # Declare the package name:
 atlas_subdir( ICaloTrkMuIdTools )
 
-find_package( onnxruntime )
-
 # Component(s) in the package:
 atlas_add_library( ICaloTrkMuIdTools
                    PUBLIC_HEADERS ICaloTrkMuIdTools
-		   INCLUDE_DIRS ${ONNXRUNTIME_INCLUDE_DIRS}
-		   LINK_LIBRARIES ${ONNXRUNTIME_LIBRARIES} CaloEvent CaloIdentifier xAODCaloEvent xAODTracking GaudiKernel muonEvent TrkSurfaces TrkEventPrimitives TrkParameters TrkTrack CaloDetDescrLib )
+		   LINK_LIBRARIES CaloEvent CaloIdentifier xAODCaloEvent xAODTracking GaudiKernel muonEvent TrkSurfaces TrkEventPrimitives TrkParameters TrkTrack CaloDetDescrLib )
diff --git a/Reconstruction/MuonIdentification/ICaloTrkMuIdTools/ICaloTrkMuIdTools/ICaloMuonScoreONNXRuntimeSvc.h b/Reconstruction/MuonIdentification/ICaloTrkMuIdTools/ICaloTrkMuIdTools/ICaloMuonScoreONNXRuntimeSvc.h
deleted file mode 100644
index 5390caf4b42e8945e04fa089394b55416fcc22b7..0000000000000000000000000000000000000000
--- a/Reconstruction/MuonIdentification/ICaloTrkMuIdTools/ICaloTrkMuIdTools/ICaloMuonScoreONNXRuntimeSvc.h
+++ /dev/null
@@ -1,36 +0,0 @@
-#ifndef CALOTRKMUIDTOOLS_ICALOMUONSCOREONNXRUNTIMESVC_H
-#define CALOTRKMUIDTOOLS_ICALOMUONSCOREONNXRUNTIMESVC_H
-
-// Gaudi include(s).
-#include "GaudiKernel/IService.h"
-
-// ONNX include(s).
-#include <core/session/onnxruntime_cxx_api.h>
-
-/// Namespace holding all of the ONNX Runtime example code
-
-/// Service used for managing global objects used by ONNX Runtime
-///
-/// In order to allow multiple clients to use ONNX Runtime at the same
-/// time, this service is used to manage the objects that must only
-/// be created once in the Athena process.
-///
-/// Ported from  Control/AthenaExamples/AthExOnnxRuntime (Ricardo Woelker <ricardo.woelker@cern.ch>)
-///
-/// @author Attila Krasznahorkay <Attila.Krasznahorkay@cern.ch>
-///
-class ICaloMuonScoreONNXRuntimeSvc : public virtual IService {
-  
- public:
-  /// Virtual destructor, to make vtable happy
-  virtual ~ICaloMuonScoreONNXRuntimeSvc() = default;
-  
-  /// Declare an ID for this interface
-  DeclareInterfaceID( ICaloMuonScoreONNXRuntimeSvc, 1, 0 );
-  
-  /// Return the ONNX Runtime environment object
-  virtual Ort::Env& env() const = 0;
-  
-}; // class ICaloMuonScoreONNXRuntimeSvc
-
-#endif // CALOTRKMUIDTOOLS_ICALOMUONSCOREONNXRUNTIMESVC_H
diff --git a/Reconstruction/MuonIdentification/MuonCombinedBaseTools/src/MuonCaloTagTool.h b/Reconstruction/MuonIdentification/MuonCombinedBaseTools/src/MuonCaloTagTool.h
index 133653479403171de34b382d135ff818a2821d22..a210e4dd12d8dfa29cb55d562ec1853a5121da9c 100644
--- a/Reconstruction/MuonIdentification/MuonCombinedBaseTools/src/MuonCaloTagTool.h
+++ b/Reconstruction/MuonIdentification/MuonCombinedBaseTools/src/MuonCaloTagTool.h
@@ -24,7 +24,6 @@
 #include "ICaloTrkMuIdTools/ICaloMuonScoreTool.h"
 #include "ICaloTrkMuIdTools/ICaloMuonTag.h"
 #include "ICaloTrkMuIdTools/ITrackDepositInCaloTool.h"
-#include "ICaloTrkMuIdTools/ICaloMuonScoreONNXRuntimeSvc.h"
 #include "TrkToolInterfaces/ITrackSelectorTool.h"
 #include "StoreGate/ReadHandleKey.h"
 
@@ -103,7 +102,6 @@ namespace MuonCombined {
     // --- CaloTrkMuIdTools ---
     ToolHandle<ICaloMuonLikelihoodTool>  m_caloMuonLikelihood{this,"CaloMuonLikelihoodTool","CaloMuonLikelihoodTool/CaloMuonLikelihoodTool"};
     ToolHandle<ICaloMuonScoreTool>  m_caloMuonScoreTool{this, "CaloMuonScoreTool", "CaloMuonScoreTool/CaloMuonScoreTool"};
-    ServiceHandle<ICaloMuonScoreONNXRuntimeSvc>  m_caloMuonScoreONNXRuntimeSvc{this, "CaloMuonScoreONNXRuntimeSvc", "CaloMuonScoreTool/CaloMuonScoreONNXRuntimeSvc"};
 
     ToolHandle<ICaloMuonTag>             m_caloMuonTagLoose{this,"CaloMuonTagLoose","CaloMuonTag/CaloMuonTagLoose","CaloTrkMuIdTools::CaloMuonTag for loose tagging"}; 
     ToolHandle<ICaloMuonTag>             m_caloMuonTagTight{this,"CaloMuonTagTight","CaloMuonTag/CaloMuonTag","CaloTrkMuIdTools::CaloMuonTag for tight tagging"}; 
diff --git a/Reconstruction/MuonIdentification/MuonCombinedBaseTools/src/MuonSegmentTagTool.cxx b/Reconstruction/MuonIdentification/MuonCombinedBaseTools/src/MuonSegmentTagTool.cxx
index cac554b54d098a3b3e01535d0f968309eab16ab9..23f42e6603d65a511d890c519bb58e7f8e4f583e 100644
--- a/Reconstruction/MuonIdentification/MuonCombinedBaseTools/src/MuonSegmentTagTool.cxx
+++ b/Reconstruction/MuonIdentification/MuonCombinedBaseTools/src/MuonSegmentTagTool.cxx
@@ -23,12 +23,9 @@
 #include "MuonSegment/MuonSegment.h"
 #include "MSSurfaces.h"
 #include "MuonCombinedEvent/MuonSegmentInfo.h"
-#include "MuonCombinedEvent/SegmentTag.h"
 #include "xAODMuon/MuonSegmentContainer.h"
 #include "xAODMuon/MuonSegment.h"
 #include "AthLinks/ElementLink.h"
-#include "xAODMuon/MuonSegmentContainer.h"
-#include "xAODMuon/MuonSegment.h"
 #include "TrkCaloExtension/CaloExtension.h" 
 
 #include <iomanip>
diff --git a/Reconstruction/MuonIdentification/MuonCombinedConfig/python/MuonCombinedRecToolsConfig.py b/Reconstruction/MuonIdentification/MuonCombinedConfig/python/MuonCombinedRecToolsConfig.py
index 715ded49b3b09d492aeab24003b78e3456aa634e..7e9f9a75cfbdd6c5da6e207dbdba1e1ad9ab3464 100644
--- a/Reconstruction/MuonIdentification/MuonCombinedConfig/python/MuonCombinedRecToolsConfig.py
+++ b/Reconstruction/MuonIdentification/MuonCombinedConfig/python/MuonCombinedRecToolsConfig.py
@@ -873,8 +873,6 @@ def CaloMuonScoreToolCfg(flags, name='CaloMuonScoreTool', **kwargs ):
     from TrackToCalo.TrackToCaloConfig import ParticleCaloCellAssociationToolCfg
     result = ParticleCaloCellAssociationToolCfg(flags)
     kwargs.setdefault("ParticleCaloCellAssociationTool", result.popPrivateTools())
-    caloMuonScoreSvc = CompFactory.CaloMuonScoreONNXRuntimeSvc(name="CaloMuonScoreONNXRuntimeSvc")
-    result.addService(caloMuonScoreSvc)
     tool = CompFactory.CaloMuonScoreTool(name, **kwargs )
     result.setPrivateTools(tool)
     return result
diff --git a/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCaloTagTool.py b/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCaloTagTool.py
index 074e85438f605b992d7316d50b88d3d801c4b467..9300cd9534b7f4a77c411ba915943300ea8a50b7 100644
--- a/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCaloTagTool.py
+++ b/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCaloTagTool.py
@@ -39,9 +39,6 @@ def TrackDepositInCaloTool( name ='TrackDepositInCaloTool', **kwargs ):
     kwargs.setdefault("ParticleCaloCellAssociationTool",       caloCellAssociationTool )
     return CfgMgr.TrackDepositInCaloTool(name,**kwargs)
 
-def CaloMuonScoreONNXRuntimeSvc(name='CaloMuonScoreONNXRuntimeSvc', **kwargs):
-    return CfgMgr.CaloMuonScoreONNXRuntimeSvc(name, **kwargs)
-
 def CaloMuonLikelihoodTool(name='CaloMuonLikelihoodTool', **kwargs ):
     kwargs.setdefault("ParticleCaloExtensionTool",       getPublicTool("MuonParticleCaloExtensionTool") )
     return CfgMgr.CaloMuonLikelihoodTool(name,**kwargs)
@@ -50,7 +47,11 @@ def CaloMuonScoreTool(name='CaloMuonScoreTool', **kwargs ):
     from TrackToCalo.TrackToCaloConf import Rec__ParticleCaloCellAssociationTool
     caloCellAssociationTool = Rec__ParticleCaloCellAssociationTool(ParticleCaloExtensionTool = getPublicTool("MuonParticleCaloExtensionTool"))
     kwargs.setdefault("ParticleCaloCellAssociationTool",       caloCellAssociationTool )
-    kwargs.setdefault("CaloMuonScoreONNXRuntimeSvc", getService("CaloMuonScoreONNXRuntimeSvc") )
+    
+    from AthOnnxruntimeService.AthOnnxruntimeServiceConf import AthONNX__ONNXRuntimeSvc
+    onnxRuntimeSvc = AthONNX__ONNXRuntimeSvc( )
+    kwargs.setdefault("ONNXRuntimeSvc", onnxRuntimeSvc)
+
     return CfgMgr.CaloMuonScoreTool(name,**kwargs)
 
 def MuonCaloTagTool( name='MuonCaloTagTool', **kwargs ):  
diff --git a/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCombinedRecExampleConfigDb.py b/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCombinedRecExampleConfigDb.py
index 3424802d13253e4950f6d2f47591809f033e68c7..523dc66a2aa88f8dfa763c9dad4fa6bfd86c14e4 100644
--- a/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCombinedRecExampleConfigDb.py
+++ b/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCombinedRecExampleConfigDb.py
@@ -99,7 +99,6 @@ addTool("MuonCombinedRecExample.MuonCaloTagTool.CaloMuonTagLoose","CaloMuonTagLo
 addTool("MuonCombinedRecExample.MuonCaloTagTool.CaloMuonTag","CaloMuonTag")
 addTool("MuonCombinedRecExample.MuonCaloTagTool.CaloMuonLikelihoodTool","CaloMuonLikelihoodTool")
 addTool("MuonCombinedRecExample.MuonCaloTagTool.CaloMuonScoreTool","CaloMuonScoreTool")
-addService("MuonCombinedRecExample.MuonCaloTagTool.CaloMuonScoreONNXRuntimeSvc","CaloMuonScoreONNXRuntimeSvc")
 
 ####### muid tools
 addTool("MuonCombinedRecExample.MuonCombinedFitTools.MuonAlignmentUncertToolTheta","MuonAlignmentUncertToolTheta")
diff --git a/Reconstruction/MuonIdentification/MuonCombinedTrackFindingTools/src/MuonStauRecoTool.cxx b/Reconstruction/MuonIdentification/MuonCombinedTrackFindingTools/src/MuonStauRecoTool.cxx
index c2663c47ffb76497f02965e1bc41ee7ddb64d86e..78fe252974eb9cd7df3b7c0a13d21441b70d89a0 100644
--- a/Reconstruction/MuonIdentification/MuonCombinedTrackFindingTools/src/MuonStauRecoTool.cxx
+++ b/Reconstruction/MuonIdentification/MuonCombinedTrackFindingTools/src/MuonStauRecoTool.cxx
@@ -26,7 +26,6 @@
 #include "MdtCalibData/TrRelation.h"
 #include "MdtCalibData/IRtRelation.h"
 #include "MdtCalibData/IRtResolution.h"
-#include "EventPrimitives/EventPrimitivesHelpers.h"
 
 namespace MuonCombined {
 
diff --git a/Reconstruction/RecExample/RecExCommon/share/RecExCommon_topOptions.py b/Reconstruction/RecExample/RecExCommon/share/RecExCommon_topOptions.py
index 09d2e1316570c0675ea4d171d800bc0f51363ec3..ac97aa0382bcea27f38fa9dcf62b43dc75441de7 100644
--- a/Reconstruction/RecExample/RecExCommon/share/RecExCommon_topOptions.py
+++ b/Reconstruction/RecExample/RecExCommon/share/RecExCommon_topOptions.py
@@ -1393,6 +1393,11 @@ if rec.doWriteAOD():
         ToolSvc += CfgMgr.xAODMaker__EventFormatMetaDataTool( "EventFormatMetaDataTool")
 
         svcMgr.MetaDataSvc.MetaDataTools += [ ToolSvc.EventFormatMetaDataTool ]
+
+        # FileMetaData tool
+        ToolSvc += CfgMgr.xAODMaker__FileMetaDataTool("FileMetaDataTool")
+        svcMgr.MetaDataSvc.MetaDataTools += [ToolSvc.FileMetaDataTool]
+
         # Put MetaData in AOD stream via AugmentedPoolStream_
         # Write all meta data containers
         StreamAOD_Augmented.AddMetaDataItem(dfMetadataItemList())
diff --git a/Reconstruction/RecExample/RecExCond/CMakeLists.txt b/Reconstruction/RecExample/RecExCond/CMakeLists.txt
index 9aac2cd2afc111e526a7e343aa49521c8a4e87ad..bd638a964e76d9d8c9be5de1ef3171ad6f118c41 100644
--- a/Reconstruction/RecExample/RecExCond/CMakeLists.txt
+++ b/Reconstruction/RecExample/RecExCond/CMakeLists.txt
@@ -1,11 +1,7 @@
-################################################################################
-# Package: RecExCond
-################################################################################
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 # Declare the package name:
 atlas_subdir( RecExCond )
 
 # Install files from the package:
-atlas_install_python_modules( python/*.py )
 atlas_install_joboptions( share/*.py )
-
diff --git a/Reconstruction/RecExample/RecExCond/python/RecFlagFunctions.py b/Reconstruction/RecExample/RecExCond/python/RecFlagFunctions.py
deleted file mode 100644
index f2bd65a5aa08b87036c455a7bc940280ccf1fc46..0000000000000000000000000000000000000000
--- a/Reconstruction/RecExample/RecExCond/python/RecFlagFunctions.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-##=============================================================================
-## Name:        RecFlagFunctions.py
-## Author:      David Cote (DESY)
-## Created:     July 2008
-## Description: This is a collection of utility functions related to rec flags
-##=============================================================================
-
-def inputFileNames():
-    from AthenaCommon.AthenaCommonFlags  import athenaCommonFlags
-
-    if athenaCommonFlags.FilesInput()!=[]:
-        return athenaCommonFlags.FilesInput()
-    from RecExConfig.RecFlags import rec
-    inFiles=None
-
-    # highest flag have precedence
-    if rec.readTAG():
-        inFiles=athenaCommonFlags.PoolTAGInput()
-    elif rec.readAOD():
-        inFiles=athenaCommonFlags.PoolAODInput()
-    elif rec.readESD():
-        inFiles=athenaCommonFlags.PoolESDInput()
-    elif rec.readRDO():
-        from AthenaCommon.GlobalFlags  import globalflags
-        if globalflags.InputFormat=='bytestream':
-            inFiles=athenaCommonFlags.BSRDOInput()
-        else:
-            inFiles=athenaCommonFlags.PoolRDOInput()
-    else:
-        raise RutimeError("Unable to determine input file")
-
-    return inFiles
-    
-
diff --git a/Reconstruction/RecExample/RecExCond/python/__init__.py b/Reconstruction/RecExample/RecExCond/python/__init__.py
deleted file mode 100644
index 19522fc8b9c03de31cc7abd5422ad53e4182e7c1..0000000000000000000000000000000000000000
--- a/Reconstruction/RecExample/RecExCond/python/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-# File: RecExCond/python/__init__.py
-# Author: Wim Lavrijsen (WLavrijsen@lbl.gov)
-
-__version__ = '1.0.0'
-__author__  = 'David Rousseau (rousseau@lal.in2p3.fr) '
-
-
diff --git a/Reconstruction/RecExample/RecExCond/share/RecExCommon_flags.py b/Reconstruction/RecExample/RecExCond/share/RecExCommon_flags.py
index 223094fbca94590af06eabbeedd67b14c98b967a..f92301fd72ad413dc6e411c7808bb20a2033a93c 100755
--- a/Reconstruction/RecExample/RecExCond/share/RecExCommon_flags.py
+++ b/Reconstruction/RecExample/RecExCond/share/RecExCommon_flags.py
@@ -229,8 +229,9 @@ if len(athenaCommonFlags.FilesInput())>0:
 if len(rec.AutoConfiguration())>0:
     from RecExConfig.AutoConfiguration import ConfigureFromListOfKeys
     ConfigureFromListOfKeys(rec.AutoConfiguration())
-    include ("RecExConfig/AutoConfigConsistencyCheck.py")
-
+    from RecExConfig.PyComps import AutoConfigConsistencyCheckSvc
+    from AthenaCommon.AppMgr import ServiceMgr as svcMgr
+    svcMgr += AutoConfigConsistencyCheckSvc("AutoConfigConsistencyCheckSvc")
 
 
 #special commisioning job options
diff --git a/Reconstruction/RecExample/RecExCond/share/testRecExCommonFlags.py b/Reconstruction/RecExample/RecExCond/share/testRecExCommonFlags.py
deleted file mode 100755
index 8e88a36a599a9aec201e6131c8f223a09e78f729..0000000000000000000000000000000000000000
--- a/Reconstruction/RecExample/RecExCond/share/testRecExCommonFlags.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# standalone python to test RecExCommon_flags.py
-# copy ine one's directory
-# python testRecExCommonFlags.py
-
-PoolRDOInput=["LFN:streamInclEle_DC3-02_RDO.pool"]
-DetDescrVersion="ATLAS-DC3-02"
-
-
-doTrigger = True # for example do not run trigger simulation
-
-
-include ("RecExCommon/RecExCommon_flags.py")
-
-# test: 
-from AthenaCommon.GlobalFlags  import globalflags
-#if globalflags.InputFormat()=='bytestream': # jobproperties.GlobalFlags.InputFormat() is equivalent
-#   dosomething
-print(globalflags.DetDescrVersion())
-
-if globalflags.DetDescrVersion()[0:3]=="DC2":
-    print(" this is DC2 ")
-else:
-    print(" this is not DC2 ")
-
-from RecExConfig.RecFlags import recAlgs
-if recAlgs.doTrigger() ^ doTrigger:
-    print("recAlgs.doTrigger() and doTrigger inconsistent")
-else:
-    print("recAlgs.doTrigger() and doTrigger consistent")
-
-
-# stop the hard way
-stop
diff --git a/Reconstruction/RecExample/RecExConfig/CMakeLists.txt b/Reconstruction/RecExample/RecExConfig/CMakeLists.txt
index 21c0870e74269b137e5e047c351a34597a977ba9..bd1b599e984de583cece0df6b7a71f94af9e2898 100644
--- a/Reconstruction/RecExample/RecExConfig/CMakeLists.txt
+++ b/Reconstruction/RecExample/RecExConfig/CMakeLists.txt
@@ -1,15 +1,13 @@
-################################################################################
-# Package: RecExConfig
-################################################################################
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 # Declare the package name:
 atlas_subdir( RecExConfig )
 
 # Install files from the package:
-atlas_install_python_modules( python/*.py )
-atlas_install_joboptions( share/*.py share/tests/*.py )
-atlas_install_scripts( share/RecExConfig_links.sh )
-
-atlas_add_test( testObjKeyStore SCRIPT python -m RecExConfig.testObjKeyStore
-    POST_EXEC_SCRIPT nopost.sh ) 
+atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
+atlas_install_joboptions( share/tests/*.py )
 
+# Test(s) in the package
+atlas_add_test( testObjKeyStore
+   SCRIPT python -m RecExConfig.testObjKeyStore
+   POST_EXEC_SCRIPT nopost.sh )
diff --git a/Reconstruction/RecExample/RecExConfig/python/AutoConfiguration.py b/Reconstruction/RecExample/RecExConfig/python/AutoConfiguration.py
index cf1b225412233a1171135050d32f496f8e846026..565f340d9de50802ae13c911d564982b6c32cdc6 100644
--- a/Reconstruction/RecExample/RecExConfig/python/AutoConfiguration.py
+++ b/Reconstruction/RecExample/RecExConfig/python/AutoConfiguration.py
@@ -1,7 +1,4 @@
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-
-from __future__ import print_function
-
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 ##=============================================================================
 ## Name:        AutoConfiguration.py
@@ -11,7 +8,6 @@ from __future__ import print_function
 ##=============================================================================
 from AthenaCommon.GlobalFlags import globalflags
 from RecExConfig.RecFlags import rec
-from RecExConfig.RecoFunctions import ItemInListStartsWith
 
 from AthenaCommon.Logging import logging
 
@@ -101,7 +97,7 @@ def GetFieldFromCool():
     try:
         from RecExConfig.GetCool import cool
         return cool.solenoidCurrent(),cool.toroidCurrent()
-    except:
+    except Exception:
         return None,None
     
 
@@ -123,7 +119,7 @@ def GetFieldFromInputFile():
         try:
             solenoidCurrent = metadata['/EXT/DCS/MAGNETS/SENSORDATA']['value'][0]  # CentralSol_Current
             toroidCurrent = metadata['/EXT/DCS/MAGNETS/SENSORDATA']['value'][2]  # Toroids_Current
-        except:
+        except Exception:
             logAutoConfiguration.warning("Unable to find solenoid and toroid currents in /EXT/DCS/MAGNETS/SENSORDATA")
 
 
@@ -163,7 +159,7 @@ def GetApproximateFieldFromGeo():
         solenoidCurrent=fullSolenoidCurrent
         toroidCurrent=fullToroidCurrent
     else:
-        logAutoConfiguration.warning("Don't know how to interpret magnetic field status from geometry '%s'."%geo)
+        logAutoConfiguration.warning("Don't know how to interpret magnetic field status from geometry '%s'.",geo)
     return solenoidCurrent,toroidCurrent
 
 def GetApproximateFieldFromConditions():
@@ -189,7 +185,7 @@ def GetApproximateFieldFromConditions():
         solenoidCurrent=0.0
         toroidCurrent=0.0
     else:
-        logAutoConfiguration.warning("Don't know how to interpret magnetic field status from conditionsTag '%s'."%cond)
+        logAutoConfiguration.warning("Don't know how to interpret magnetic field status from conditionsTag '%s'.",cond)
     return solenoidCurrent,toroidCurrent
 
 
@@ -218,23 +214,23 @@ def ConfigureField():
 
     elif metadata['file_type'] == 'BS' and metadata['eventTypes'][0] == 'IS_SIMULATION':
         logAutoConfiguration.info("Field info is not stored in MC BS values are set via conditions tag:")
-        if solenoidCurrent==None or toroidCurrent==None:
+        if solenoidCurrent is None or toroidCurrent is None:
             solenoidCurrent,toroidCurrent=GetApproximateFieldFromConditions()
-        if solenoidCurrent==None or toroidCurrent==None:
+        if solenoidCurrent is None or toroidCurrent is None:
             logAutoConfiguration.warning("BField of MC BS cannot be autoconfigured!! BField is turned ON")
             solenoidCurrent=fullSolenoidCurrent
             toroidCurrent=fullToroidCurrent
 
     elif metadata['file_type'] == 'POOL':
         solenoidCurrent,toroidCurrent=GetFieldFromInputFile()
-        if solenoidCurrent==None or toroidCurrent==None:
+        if solenoidCurrent is None or toroidCurrent is None:
             solenoidCurrent,toroidCurrent=GetApproximateFieldFromConditions()             
-        if solenoidCurrent==None or toroidCurrent==None:
+        if solenoidCurrent is None or toroidCurrent is None:
             solenoidCurrent,toroidCurrent=GetApproximateFieldFromGeo() 
     else:
         raise RuntimeError("Don't know how to interpret file_type '%s'"%metadata['file_type'])
 
-    if solenoidCurrent==None or toroidCurrent==None:
+    if solenoidCurrent is None or toroidCurrent is None:
         raise RuntimeError("Unable to determine field status for this file.")
 
     if toroidCurrent>1.:
@@ -274,10 +270,10 @@ def ConfigureGeo():
             geo="ATLAS-R1-2010-02-00-00" #geo='ATLAS-GEO-16-00-01'
         if metadata['eventTypes'][0] == 'IS_SIMULATION':
             try: geo = metadata['GeoAtlas']
-            except: logAutoConfiguration.warning("Input simulated bs file does not contain bs_metadata with geometry. Probably an old file.")
-            pass
+            except Exception:
+                logAutoConfiguration.warning("Input simulated bs file does not contain bs_metadata with geometry. Probably an old file.")
         globalflags.DetDescrVersion.set_Value_and_Lock(geo)
-        logAutoConfiguration.info("Set GeometryVersion to '%s'"%geo)
+        logAutoConfiguration.info("Set GeometryVersion to '%s'",geo)
 
     elif metadata['file_type'] == 'POOL':
         # configure Geometry from input file
@@ -313,23 +309,23 @@ def GetProjectName():
         whatIsIt=None
         try:
             whatIsIt = metadata['eventTypes'][0]
-        except:
+        except Exception:
             pass
         if whatIsIt=='IS_SIMULATION':
             project='IS_SIMULATION'
         else:
             try:
                 project = metadata['project_name']
-            except:
+            except Exception:
                 from RecExConfig.GetCool import cool
                 project = cool.fileNameTag()
             pass
         #rec.projectName.set_Value_and_Lock(project)
-        logAutoConfiguration.info("Success! GetProjectName() found a project named %s"%project)
+        logAutoConfiguration.info("Success! GetProjectName() found a project named %s",project)
     else:
         project=rec.projectName()
-    if not project in KnownProjects:
-        logAutoConfiguration.warning("Project '%s' is not part of the KnownProjects list."%project)
+    if project not in KnownProjects:
+        logAutoConfiguration.warning("Project '%s' is not part of the KnownProjects list.",project)
         #print(KnownProjects)
 
     return project
@@ -338,7 +334,7 @@ def ConfigureBeamType():
     logAutoConfiguration.debug("Configuring beamType...")
     from AthenaCommon.BeamFlags import jobproperties
     if jobproperties.Beam.beamType.is_locked():      
-        logAutoConfiguration.info("beamType is locked to '%s'. We won't change change it."%jobproperties.Beam.beamType())
+        logAutoConfiguration.info("beamType is locked to '%s'. We won't change change it.",jobproperties.Beam.beamType())
         return
 
     BeamType=None
@@ -351,10 +347,10 @@ def ConfigureBeamType():
         listOfKnownBeamTypes=['cosmics' ,'singlebeam','collisions']
         if metadata['beam_type'] in listOfKnownBeamTypes:
             BeamType = metadata['beam_type']
-    except:
+    except Exception:
         logAutoConfiguration.info("beam type not stored in input file, set beam type according to project name...")
         
-    if BeamType==None:
+    if BeamType is None:
         project=GetProjectName()
         if project in KnownCosmicsProjects: BeamType='cosmics'
         elif project in KnownTestProjects: BeamType='cosmics'
@@ -363,13 +359,13 @@ def ConfigureBeamType():
         elif project in KnownHeavyIonProjects: BeamType='collisions'
         elif project in KnownHeavyIonProtonProjects: BeamType='collisions'
     # special treatment for online, set to collisions if undefined
-    if BeamType==None:
+    if BeamType is None:
         from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
         if athenaCommonFlags.isOnline():
             BeamType='collisions' # best guess if we have encountered an unknown project name
-            logAutoConfiguration.warning("Unable to determine BeamType from project name, guessing: %s" % BeamType)
+            logAutoConfiguration.warning("Unable to determine BeamType from project name, guessing: %s", BeamType)
             
-    if BeamType==None:
+    if BeamType is None:
         raise RuntimeError("Unable to determine beamType from projectName '%s'"%project)
     else:
         from AthenaCommon.BeamFlags import jobproperties
@@ -384,11 +380,11 @@ def ConfigureBeamType():
         if project in KnownHeavyIonProjects:
             rec.doHeavyIon.set_Value_and_Lock(True)
             rec.doHIP.set_Value_and_Lock(False)
-            logAutoConfiguration.info("Set rec.doHeavyIon=True based on project tag '%s'" % project)
+            logAutoConfiguration.info("Set rec.doHeavyIon=True based on project tag '%s'", project)
         elif project in KnownHeavyIonProtonProjects:
             rec.doHeavyIon.set_Value_and_Lock(False)
             rec.doHIP.set_Value_and_Lock(True)
-            logAutoConfiguration.info("Set rec.doHIP=True based on project tag '%s'" % project)
+            logAutoConfiguration.info("Set rec.doHIP=True based on project tag '%s'", project)
         else:
             logAutoConfiguration.info("Not a heavy ion project")
             pass
@@ -415,7 +411,7 @@ def ConfigureNumberOfCollisions():
         # auto configure number of collissions
         try:
             jobproperties.Beam.numberOfCollisions.set_Value_and_Lock(metadata['numberOfCollisions'])
-        except:
+        except Exception:
             logAutoConfiguration.warning("numberOfCollisions could not be auto configured, no info available using default value: %s "
                                          ,jobproperties.Beam.numberOfCollisions() )
             return
@@ -491,7 +487,7 @@ def ConfigureBeamEnergy():
                 else:
                     logAutoConfiguration.warning("Could not auto-configure beam energy based on project name: %s" , projectName)
                     return
-                if beamEnergy!=None:
+                if beamEnergy is not None:
                     jobproperties.Beam.energy.set_Value_and_Lock(beamEnergy)
         elif metadata['eventTypes'][0] == 'IS_SIMULATION':
             if metadata['beam_energy'] != 'N/A' and metadata['beam_energy'] != '':
@@ -515,7 +511,7 @@ def ConfigureBeamBunchSpacing():
         try:
             jobproperties.Beam.bunchSpacing.set_Value_and_Lock(int(metadata['intraTrainBunchSpacing']))
             logAutoConfiguration.info("Auto configured bunchSpacing: %s ", jobproperties.Beam.bunchSpacing())
-        except:
+        except Exception:
             logAutoConfiguration.warning("bunchSpacing could not be auto configured, no info available using default value: %s ",jobproperties.Beam.bunchSpacing() )
 
     else:
@@ -551,7 +547,7 @@ def ConfigureBeamBunchSpacing():
                 jobproperties.Beam.bunchSpacing.set_Value_and_Lock(25)
                 logAutoConfiguration.info("Autoconfigure bunch-spacing to 25 ns")
             else:
-                logAutoConfiguration.info("larger bunch spacing, leave default value of %i ns" % jobproperties.Beam.bunchSpacing())
+                logAutoConfiguration.info("larger bunch spacing, leave default value of %i ns", jobproperties.Beam.bunchSpacing())
                 pass
             pass
         else:
@@ -639,20 +635,20 @@ def ConfigureInputType():
         rec.doAOD=False
         rec.doESD=True
         rec.doWriteAOD=False
-        logAutoConfiguration.info ("BS format: setting globalflags.InputFormat=%s "%globalflags.InputFormat() )
-        logAutoConfiguration.info ("BS format: setting rec.readRDO=%s "%rec.readRDO() )
-        logAutoConfiguration.info ("BS format: setting rec.doAOD=%s "%rec.doAOD() )
-        logAutoConfiguration.info ("BS format: setting rec.doESD=%s "%rec.doESD() )
-        logAutoConfiguration.info ("BS format: setting rec.doWriteAOD=%s "%rec.doWriteAOD() )
+        logAutoConfiguration.info ("BS format: setting globalflags.InputFormat=%s ",globalflags.InputFormat() )
+        logAutoConfiguration.info ("BS format: setting rec.readRDO=%s ",rec.readRDO() )
+        logAutoConfiguration.info ("BS format: setting rec.doAOD=%s ",rec.doAOD() )
+        logAutoConfiguration.info ("BS format: setting rec.doESD=%s ",rec.doESD() )
+        logAutoConfiguration.info ("BS format: setting rec.doWriteAOD=%s ",rec.doWriteAOD() )
     else:        
         globalflags.InputFormat='pool'
         #Get streamsName
         streamsName=[]
         if 'processingTags' in metadata:
             streamsName = metadata['processingTags']
-        if streamsName==None:
+        if streamsName is None:
             streamsName=[]
-        logAutoConfiguration.info("Extracted streams %s from input file " % streamsName )   
+        logAutoConfiguration.info("Extracted streams %s from input file ", streamsName )
 
         if len(streamsName)==0:
            logAutoConfiguration.warning("ConfigureInputType: no stream in input file.")   
@@ -664,7 +660,7 @@ def ConfigureInputType():
     from RecExConfig.RecoFunctions import OverlapLists
     try:
         from PrimaryDPDMaker.PrimaryDPDFlags import listRAWtoDPD,listESDtoDPD,listAODtoDPD
-    except:
+    except Exception:
         logAutoConfiguration.warning("Unable to import PrimaryDPDFlags. OK for ATN tests below AtlasAnalysis, otherwise suspicious.")
         listRAWtoDPD=[]
         listESDtoDPD=[]
@@ -675,15 +671,15 @@ def ConfigureInputType():
         from PrimaryDPDMaker.PrimaryDPDFlags import listBackwardCompatibleAODtoDPD, listBackwardCompatibleESDtoDPD
         listESDtoDPD.extend(listBackwardCompatibleESDtoDPD)
         listAODtoDPD.extend(listBackwardCompatibleAODtoDPD)
-    except:
+    except Exception:
         logAutoConfiguration.warning("Primary DPDMake does not support the old naming convention!!")   
 
-    if 'TagStreamsRef' in metadata and metadata['TagStreamsRef']!=None:
+    if 'TagStreamsRef' in metadata and metadata['TagStreamsRef'] is not None:
         logAutoConfiguration.info("Input TAG detected")
         rec.readTAG=True
-        logAutoConfiguration.info ("Auto configured rec.readTAG=%s "%rec.readTAG() )
+        logAutoConfiguration.info ("Auto configured rec.readTAG=%s ",rec.readTAG() )
 
-    from RecExConfig.RecoFunctions import ItemInListStartsWith    
+    from RecExConfig.RecoFunctions import ItemInListStartsWith
     if ItemInListStartsWith ("StreamAOD", streamsName) or ItemInListStartsWith('StreamDAOD',streamsName) or ItemInListStartsWith('StreamD2AOD',streamsName) or OverlapLists(streamsName,listAODtoDPD) or ItemInListStartsWith('DAOD',streamsName) or ItemInListStartsWith('D2AOD',streamsName):
         logAutoConfiguration.info("Input AOD detected")   
         rec.readRDO=False
@@ -691,7 +687,7 @@ def ConfigureInputType():
         rec.readAOD=True
         rec.doAOD=False
         rec.doESD=False
-        logAutoConfiguration.info ("setting rec.readAOD=%s "%rec.readAOD() )
+        logAutoConfiguration.info ("setting rec.readAOD=%s ",rec.readAOD() )
     elif ItemInListStartsWith ("StreamESD", streamsName) or ItemInListStartsWith('StreamDESD',streamsName) or ItemInListStartsWith('StreamD2ESD',streamsName) or OverlapLists(streamsName,listESDtoDPD) or ItemInListStartsWith('DESD',streamsName) or ItemInListStartsWith('D2ESD',streamsName):
         logAutoConfiguration.info("Input ESD detected")   
         rec.readRDO=False
@@ -699,8 +695,8 @@ def ConfigureInputType():
         rec.readAOD=False
         rec.doAOD=True
         rec.doESD=False
-        logAutoConfiguration.info ("setting rec.readESD=%s "%rec.readESD() )
-        logAutoConfiguration.info ("setting rec.doAOD=%s "%rec.doAOD() )
+        logAutoConfiguration.info ("setting rec.readESD=%s ",rec.readESD() )
+        logAutoConfiguration.info ("setting rec.doAOD=%s ",rec.doAOD() )
     elif ItemInListStartsWith ("Stream1", streamsName) or ItemInListStartsWith ("StreamRDO", streamsName) or ItemInListStartsWith ("OutputStreamRDO", streamsName) or OverlapLists(streamsName,listRAWtoDPD):
         logAutoConfiguration.info("Input RDO detected")   
         rec.readRDO=True
@@ -708,8 +704,8 @@ def ConfigureInputType():
         rec.readAOD=False
         rec.doAOD=False
         rec.doESD=True
-        logAutoConfiguration.info ("setting rec.readRDO=%s "%rec.readRDO() )
-        logAutoConfiguration.info ("setting rec.doESD=%s "% rec.doESD() )
+        logAutoConfiguration.info ("setting rec.readRDO=%s ",rec.readRDO() )
+        logAutoConfiguration.info ("setting rec.doESD=%s ",rec.doESD() )
     elif ItemInListStartsWith ("StreamEVGEN", streamsName):
         logAutoConfiguration.info("Input EVGEN detected")   
         rec.readRDO=False
@@ -732,35 +728,35 @@ def ConfigureTriggerStream():
     if metadata['file_type'] == 'BS':
         try:
             streamName = metadata['stream'].split('_')[1]
-        except:
+        except Exception:
             logAutoConfiguration.warning("Input file does not contain bs_metadata! Trying to specify otherwise!")
     elif metadata['file_type'] == 'POOL':
         try:
             streamName = metadata['triggerStreamOfFile']
-        except:
+        except Exception:
             logAutoConfiguration.warning("Input file does not contain triggerStreamOfFile! Trying to specify otherwise!")
 
     if streamName=='':
         try:
             if len(metadata['processingTags'])==1:
                 streamName = metadata['processingTags'][0]
-        except:
+        except Exception:
             logAutoConfiguration.warning("No trigger stream found in input file!!! ") 
 
 
     if streamName=='':
         logAutoConfiguration.info("Failed to find triggerStream from MetaReaderPeeker. OK for MC but can be problematic for data.")
-        logAutoConfiguration.info("Keeping input value untouched: rec.triggerStream='%s'"%rec.triggerStream())
+        logAutoConfiguration.info("Keeping input value untouched: rec.triggerStream='%s'",rec.triggerStream())
         return
     
-    logAutoConfiguration.info("Set rec.triggerStream='%s' from MetaReaderPeeker."%streamName)
+    logAutoConfiguration.info("Set rec.triggerStream='%s' from MetaReaderPeeker.",streamName)
     rec.triggerStream=streamName
     return
 
 
 def ConfigureConditionsTag():
     if globalflags.ConditionsTag.is_locked():
-        logAutoConfiguration.info("conditionsTag is locked to value: '%s'."%globalflags.ConditionsTag())
+        logAutoConfiguration.info("conditionsTag is locked to value: '%s'.",globalflags.ConditionsTag())
         return
 
     from PyUtils.MetaReaderPeeker import metadata
@@ -768,7 +764,7 @@ def ConfigureConditionsTag():
         try:
             globalflags.ConditionsTag.set_Value_and_Lock(metadata['IOVDbGlobalTag'])
             logAutoConfiguration.info("Auto-configured ConditionsTag '%s' from MetaReaderPeeker ",globalflags.ConditionsTag())
-        except:
+        except Exception:
             logAutoConfiguration.error("ConditionsTag could not be auto-configured no info stored in MetaReaderPeeker!!!")
             #logAutoConfiguration.warning("Input simulated bs file does not contain bs_metadata with conditions_tag !")
 
@@ -778,15 +774,15 @@ def ConfigureConditionsTag():
     else: #Regular data files 
         try:
             year=int(rec.projectName()[4:6])
-        except:
-            logAutoConfiguration.warning("Failed to extract year from project tag "+ rec.projectName() +". Guessing 2015")
+        except Exception:
+            logAutoConfiguration.warning("Failed to extract year from project tag %s. Guessing 2015", rec.projectName())
             year=15
         if (year<14): #Run1
             globalflags.ConditionsTag.set_Value_and_Lock("COMCOND-BLKPA-RUN1-09")
-            logAutoConfiguration.info("Found run 1 input bytestream file, autoconfigure conditions tag to '%s'"%globalflags.ConditionsTag())
+            logAutoConfiguration.info("Found run 1 input bytestream file, autoconfigure conditions tag to '%s'",globalflags.ConditionsTag())
         else:
             globalflags.ConditionsTag.set_Value_and_Lock("CONDBR2-BLKPA-2015-17")
-            logAutoConfiguration.info("Found run 2 input bytestream file, autoconfigure conditions tag to '%s'"%globalflags.ConditionsTag())
+            logAutoConfiguration.info("Found run 2 input bytestream file, autoconfigure conditions tag to '%s'",globalflags.ConditionsTag())
             pass
         pass
     return
@@ -818,7 +814,7 @@ def ConfigureFieldAndGeoESDtoESD():
 
     logAutoConfiguration.info("Auto configured Geometry: %s ",globalflags.DetDescrVersion() )
     from AthenaCommon.BFieldFlags import jobproperties
-    logAutoConfiguration.info("Auto configured B Field: ");
+    logAutoConfiguration.info("Auto configured B Field: ")
     logAutoConfiguration.info("BField: barrelToroidOn = %s ",jobproperties.BField.barrelToroidOn() )
     logAutoConfiguration.info("BField: BField.endcapToroidOn = %s ",jobproperties.BField.endcapToroidOn() )
     logAutoConfiguration.info("BField: BField.solenoidOn = %s ",jobproperties.BField.solenoidOn() )
@@ -826,13 +822,13 @@ def ConfigureFieldAndGeoESDtoESD():
 
 def ConfigureDoTruth():
     if rec.doTruth.is_locked():
-        logAutoConfiguration.info ("rec.doTruth=%s is locked. Auto-config will not attempt to change it."%rec.doTruth())
+        logAutoConfiguration.info ("rec.doTruth=%s is locked. Auto-config will not attempt to change it.",rec.doTruth())
         return
 
     from PyUtils.MetaReaderPeeker import metadata, convert_itemList
     if metadata['file_type'] == 'BS' and metadata['eventTypes'][0] != 'IS_SIMULATION':
         rec.doTruth.set_Value_and_Lock(False)
-        logAutoConfiguration.info("Input is bytestream. Auto-configuring doTruth=%s"%rec.doTruth())
+        logAutoConfiguration.info("Input is bytestream. Auto-configuring doTruth=%s",rec.doTruth())
 
     if metadata['file_type'] == 'POOL':
         itemsList = convert_itemList(layout='#join')
@@ -843,9 +839,9 @@ def ConfigureDoTruth():
 
         rec.doTruth.set_Value_and_Lock(itemsHaveTruth)
         if itemsHaveTruth:
-            logAutoConfiguration.info("Input has McEventCollection. Auto-configuring doTruth=%s"%rec.doTruth())
+            logAutoConfiguration.info("Input has McEventCollection. Auto-configuring doTruth=%s",rec.doTruth())
         else:
-            logAutoConfiguration.info("Input has no McEventCollection. Auto-configuring doTruth=%s"%rec.doTruth())
+            logAutoConfiguration.info("Input has no McEventCollection. Auto-configuring doTruth=%s",rec.doTruth())
         
     return
 
@@ -857,16 +853,16 @@ def IsInInputFile(collectionname,key=None):
             try:
                 ItemDic = convert_itemList(layout='dict')
                 if collectionname in ItemDic:
-                    logAutoConfiguration.info("found collection with name %s in input file." % collectionname)
+                    logAutoConfiguration.info("found collection with name %s in input file.", collectionname)
                     print(ItemDic[collectionname])
                     if key is None:
                         logAutoConfiguration.info("no explicit storegate key given. Returning True")
                         return True
                     if key in ItemDic[collectionname]:
-                        logAutoConfiguration.info("collection with key %s is in input file. Returning True" % key)
+                        logAutoConfiguration.info("collection with key %s is in input file. Returning True", key)
                         return True
                     else:
-                        logAutoConfiguration.info("collection with key %s is NOT in input file. Returning False" % key)
+                        logAutoConfiguration.info("collection with key %s is NOT in input file. Returning False", key)
                         return False
                     logAutoConfiguration.info("Shouldn't be here !")
                     return False
@@ -877,7 +873,7 @@ def IsInInputFile(collectionname,key=None):
     except Exception:
         logAutoConfiguration.warning("Could not run IsInInputFile. input file maybe not specified at this point")#
 
-    logAutoConfiguration.info("looks like object of name %s is NOT in input file. Returning False" % key)
+    logAutoConfiguration.info("looks like object of name %s is NOT in input file. Returning False", key)
     return False
 
 def ConfigureSimulationOrRealData():
@@ -885,7 +881,7 @@ def ConfigureSimulationOrRealData():
     whatIsIt="N/A"
     try:
         whatIsIt = metadata['eventTypes'][0]
-    except:
+    except Exception:
         if metadata['nentries'] == 0:
             logAutoConfiguration.error("Input file has no events: unable to configure SimulationOrRealData.")
             return
@@ -905,11 +901,13 @@ def ConfigureSimulationOrRealData():
     return
 
 def ConfigureFromListOfKeys(l):
+    from RecExConfig.RecoFunctions import ItemInList
+
     keys=set(l)
     allDefaultKeys=frozenset(('ProjectName','RealOrSim','FieldAndGeo','BeamType','ConditionsTag','DoTruth','InputType','BeamEnergy','LumiFlags','TriggerStream'))
     if 'everything' in keys:
         keys.remove('everything')
-        keys |= allDefaultKeys;
+        keys |= allDefaultKeys
         logAutoConfiguration.info("Auto-configuration key 'everything' requested. All default keys will be used.")
 
     logAutoConfiguration.info("Auto-configuration will procede according to the following keys:")
@@ -959,7 +957,7 @@ def ConfigureFromListOfKeys(l):
 
     #Final sanity check...
     for key in keys:
-        if not key in allDefaultKeys:
+        if key not in allDefaultKeys:
             #these special keys are allowed to not be allDefaultKeys, however check for conflicts
             if key=="FieldAndGeoESDToESD":
                 if ItemInList("FieldAndGeo",l):
diff --git a/Reconstruction/RecExample/RecExConfig/python/Configured.py b/Reconstruction/RecExample/RecExConfig/python/Configured.py
index 360eb4be56fe33b9383c09ad0f9881ffd22b2675..d841c8697894c7a83ff8cf26a78ee6d5e2f55eb4 100755
--- a/Reconstruction/RecExample/RecExConfig/python/Configured.py
+++ b/Reconstruction/RecExample/RecExConfig/python/Configured.py
@@ -1,7 +1,4 @@
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-
-# File: 
-# Author: 
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 ## for messaging
 from AthenaCommon.Logging import logging
@@ -33,7 +30,7 @@ class Configured( object ):
          mlog.debug("new : First instance" )
          cls.__newobject = object.__new__( cls )
       else:
-         mlog.debug("new : nNewed : %s => simple access." % cls._nNewed )            
+         mlog.debug("new : nNewed : %s => simple access.", cls._nNewed )
       
     
 
@@ -68,7 +65,6 @@ class Configured( object ):
 
       # initialisation
       self._configured = self.preconfigure() 
-      doConfigure=False
       if not self._configured:
          mlog.debug("preconfiguration indicates this alg should not run")    
       else:   
@@ -78,13 +74,13 @@ class Configured( object ):
  
       self._configured= self._configured and configuredOK
 
-      mlog.info ("%s/%s" % (["Not configured","Configured"][self.configured()],
-                            ["Enabled", "DISABLED"][self.disabled()]))
+      mlog.info ("%s/%s", ["Not configured","Configured"][self.configured()],
+                 ["Enabled", "DISABLED"][self.disabled()])
 
 
    def preconfigure (self):
       mlog = logging.getLogger( 'Configured::preconfigure:%s:' % self.__class__.__name__.replace( ".", '_' )  )
-      mlog.debug("Output= %s" % self.output() )
+      mlog.debug("Output= %s", self.output() )
 
       if self.checkExistingOutput ():
          return False
@@ -105,7 +101,7 @@ class Configured( object ):
 
    def usable (self):
       mlog = logging.getLogger( 'Configured::usable:%s:' % self.__class__.__name__.replace( ".", '_' )  )      
-      mlog.debug ("configured:%s disabled:%s" % (self.configured(),self.disabled()))
+      mlog.debug ("configured:%s disabled:%s", self.configured(), self.disabled())
       return self.existingOutput() or ( self.configured() and not self.disabled() )
 
 
@@ -134,12 +130,12 @@ class Configured( object ):
       oneUnmatch=False
       for atype in self.output().keys():
          allkeys=self.output()[atype]
-         if type(allkeys) != type([]): allkeys    = [allkeys]
+         if not isinstance(allkeys,list): allkeys    = [allkeys]
          for akey in allkeys:
             if objKeyStore.isInInput(atype,akey):
                oneMatch=True
             else:
-               mlog.info('one object not in output %s:%s' % (atype,akey))
+               mlog.info('one object not in output %s:%s', atype, akey)
                oneUnmatch=True            
 
 
@@ -177,8 +173,8 @@ class Configured( object ):
             (ig_modname, ig_name) = igpath.rsplit ('.', 1)
             ig_mod = __import__ (ig_modname, globals(), locals(), [ig_name])
             ig_cls = getattr (ig_mod, ig_name)
-        except:
-            mlog.error("could not get handle to %s Quit" % igpath)
+        except Exception:
+            mlog.error("could not get handle to %s Quit", igpath)
             traceback.print_exc()
             return None
         return ig_cls
@@ -190,7 +186,7 @@ class Configured( object ):
    #
    # If the input is not a string, it is just returned directly.
    def getInputGetter (self, igpath):
-        if type(igpath) != type(''):
+        if not isinstance(igpath,str):
            return igpath
         
         # Make a logger.
@@ -200,20 +196,20 @@ class Configured( object ):
         # Find the input getter.
         try:
             ig_cls = self.getInputGetterClass(igpath)
-            if ig_cls == None: return None
+            if ig_cls is None: return None
             input_getter = ig_cls()
-        except:
-            mlog.error("could not get handle to %s Quit" % igpath)
+        except Exception:
+            mlog.error("could not get handle to %s Quit", igpath)
             traceback.print_exc()
             return None
 
         if not input_getter.usable():
             ig_name = ig_cls.__name__
             if not self.ignoreConfigError():
-                mlog.error("%s unusable. Quit." % ig_name)
+                mlog.error("%s unusable. Quit.", ig_name)
                 return None
             else:
-                mlog.error("%s unusable. Continue nevertheless" % ig_name)
+                mlog.error("%s unusable. Continue nevertheless", ig_name)
 
         return input_getter
      
diff --git a/Reconstruction/RecExample/RecExConfig/python/GetCool.py b/Reconstruction/RecExample/RecExConfig/python/GetCool.py
index aa4568dde3c189b304ffe9af540ac1a1c28bd816..7aa6972b205ab56056175ed376dadb69b6e81b0a 100644
--- a/Reconstruction/RecExample/RecExConfig/python/GetCool.py
+++ b/Reconstruction/RecExample/RecExConfig/python/GetCool.py
@@ -1,12 +1,10 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 ## David Cote, 17 July 2009
 ## Put the GetCool method in a python import, to ensure it is queried only once.
 
-from __future__ import print_function
-
 __all__ = [
     "cool",
     ]
@@ -24,7 +22,7 @@ def _setup():
         
     from CoolConvUtilities.MagFieldUtils import getFieldForRun
 
-    if lb==None:
+    if lb is None:
         logGetCool.info(rn)  
         cool=getFieldForRun(rn)
     else:
diff --git a/Reconstruction/RecExample/RecExConfig/python/InputFilePeeker.py b/Reconstruction/RecExample/RecExConfig/python/InputFilePeeker.py
index 48334a059aff48735a4b289ee41f66062e0198e7..e651c2e99122d9ceaf1c35c81767f2d4cffaf8fb 100644
--- a/Reconstruction/RecExample/RecExConfig/python/InputFilePeeker.py
+++ b/Reconstruction/RecExample/RecExConfig/python/InputFilePeeker.py
@@ -19,10 +19,7 @@ inputFileSummary = {}
 def _setup():
 
     global inputFileSummary
-    import os
-    from RecExConfig.RecFlags import rec
     import AthenaCommon.Logging as L
-    from AthenaCommon.Resilience import treatException
 
     #define a logger
     msg = L.logging.getLogger('inputFilePeeker' )
@@ -51,7 +48,7 @@ def _setup():
             fi = athFile.fopen(inFile)
             inputFileSummary = fi.fileinfos
         except Exception as err:
-            msg.warning("Unable to open file [%s]"%inFile)
+            msg.warning("Unable to open file [%s]",inFile)
             msg.warning('caught:\n%s',err)
             import traceback
             traceback.print_exc()
@@ -66,11 +63,11 @@ def _setup():
         if inputFileSummary['stream_names'] == []:
             try:
                 inputFileSummary['stream_names'] = [fi.infos['metadata_items'][0][1]]
-            except Exception as err:
+            except Exception:
                 msg.info("Unable to find stream names in file metadata.")
 
         #If stream_names still not found, check for bytestream case or give default value
-        if inputFileSummary['stream_names']==None or inputFileSummary['stream_names']==[]:
+        if inputFileSummary['stream_names'] is None or len(inputFileSummary['stream_names'])==0:
             if inputFileSummary['file_type']=='bs':
                 msg.info("stream_names not present in input bytestream file. Giving default name 'StreamRAW'")
                 inputFileSummary['stream_names']=['StreamRAW']
@@ -82,10 +79,10 @@ def _setup():
 
         #DR TAG do not have run number        
         if len(inputFileSummary['run_number']) >0 or 'TAG' in inputFileSummary['stream_names'] :
-            msg.info("Successfully filled inputFileSummary from file %s"%inFile)
+            msg.info("Successfully filled inputFileSummary from file %s",inFile)
             break
         else:
-            msg.warning("Unable to fill inputFileSummary from file %s. File is probably empty. Will try again with next (if any)."%inFile)
+            msg.warning("Unable to fill inputFileSummary from file %s. File is probably empty. Will try again with next (if any).",inFile)
 
         ## everything failed...
             failed_trials += 1
@@ -94,7 +91,7 @@ def _setup():
         ## with file summaries which are irrelevant.
         ## FIXME: should the trigger be jobo-settable ?
         if failed_trials > 10:
-            msg.warning("Unable to fill inputFileSummary [%d] times. flushing athfile cache..." % failed_trials)
+            msg.warning("Unable to fill inputFileSummary [%d] times. flushing athfile cache...", failed_trials)
             athFile.flush_cache()
         pass
 
@@ -104,7 +101,7 @@ def _setup():
         return
 
     #Exception: if input is TAG, you need to follow the link to fill inputFileSummary
-    msg.info("Extracted streams %s from input file " % inputFileSummary['stream_names'] )   
+    msg.info("Extracted streams %s from input file ", inputFileSummary['stream_names'] )
 
 
     inputFileSummary['TagStreamsRef']=None
@@ -116,7 +113,7 @@ def _setup():
 
         from RecExConfig.AutoConfiguration import GetDefaultTagRefStream
         streamTarget=GetDefaultTagRefStream(tagStreamsRef)
-        msg.info ( "will redirect to target %s " % streamTarget )
+        msg.info ( "will redirect to target %s ", streamTarget )
 
         # now get the file on which the TAG is pointing
         from PyUtils.PoolFile import PoolFileCatalog as pfc
@@ -141,14 +138,14 @@ def _setup():
             #get guid of file to be navigated to, then get corresponding physics file name
             aTagStreamsRef=tagStreamsRef[streamTarget][0]
             newInFile=pfc(catalog=catalog_name).pfn(aTagStreamsRef)
-            msg.info ( "reading TAG redirected to file fid: %s pfn:%s " % (aTagStreamsRef,newInFile))
+            msg.info ( "reading TAG redirected to file fid: %s pfn:%s ", aTagStreamsRef, newInFile)
             try:
                 fi = athFile.fopen(newInFile)
             except Exception:    
-                msg.warning ( "AthFile.fopen failed ! Could not redirect input TAG to first target file %s. Probably not available. Now trying them all." % newInfile )
+                msg.warning ( "AthFile.fopen failed ! Could not redirect input TAG to first target file %s. Probably not available. Now trying them all.", newInFile )
                 newInFile=None
         except Exception:
-            msg.warning ( "could not redirect input TAG to first target file %s. Probably not in catalog. Now trying them all." % aTagStreamsRef )
+            msg.warning ( "could not redirect input TAG to first target file %s. Probably not in catalog. Now trying them all.", aTagStreamsRef )
             newInFile=None
 
         if newInFile is None:    
@@ -158,7 +155,7 @@ def _setup():
                 try:
                     newInFile=pfc(catalog=catalog_name).pfn(aTagStreamsRef)
                     fi = athFile.fopen(newInFile)
-                    msg.info ( "finally redirected input TAG to file fid: %s pfn:%s " % (aTagStreamsRef,newInFile))
+                    msg.info ( "finally redirected input TAG to file fid: %s pfn:%s ", aTagStreamsRef, newInFile)
                     break
                 except Exception:
 
@@ -204,7 +201,7 @@ def _setup():
     inputFileSummary['metadata_itemsList']=fullList
 
     #Catch common problems
-    if inputFileSummary['conditions_tag']==None:
+    if inputFileSummary['conditions_tag'] is None:
         inputFileSummary['conditions_tag']=""
 
     if inputFileSummary['evt_type']==[] and inputFileSummary['file_type']=='bs':
@@ -218,12 +215,12 @@ def _setup():
         else:
             inputFileSummary['evt_type']=('IS_DATA', 'Unknown', 'Unknown')
             pass
-        msg.warning("Input file has zero events and hence no EventInfo object. Guessed that evt_type=%s, but this is not certain. Using auto-configuration is not safe if this info is wrong."%(inputFileSummary['evt_type'][0]))
+        msg.warning("Input file has zero events and hence no EventInfo object. Guessed that evt_type=%s, but this is not certain. Using auto-configuration is not safe if this info is wrong.", inputFileSummary['evt_type'][0])
         pass
 
     #Final print out (DEBUG)
     msg.debug("inputFileSummary is:")
-    msg.debug(str(inputFileSummary))
+    msg.debug("%s",inputFileSummary)
     return
 
 # execute function at module import
diff --git a/Reconstruction/RecExample/RecExConfig/python/ObjKeyStore.py b/Reconstruction/RecExample/RecExConfig/python/ObjKeyStore.py
index 6f607978d15ea7c54d1f1abb2296b8a0cfa5f37c..c906644562890f622e39209f1f39d5784fcdbebf 100755
--- a/Reconstruction/RecExample/RecExConfig/python/ObjKeyStore.py
+++ b/Reconstruction/RecExample/RecExConfig/python/ObjKeyStore.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 # Author Sebastien Binet
 
@@ -10,7 +10,7 @@ __all__ = [ 'ObjKeyStore', 'objKeyStore', 'cfgKeyStore' ]
 from AthenaCommon.Include import excludeTracePattern
 excludeTracePattern.append("*/RecExConfig/ObjKeyStore*")
 
-from AthenaCommon.KeyStore import CfgItemList, CfgKeyStore
+from AthenaCommon.KeyStore import CfgKeyStore
 class _ObjKeyStore( object ):
 
     def __init__(self, name="KeyStore"):
@@ -52,7 +52,7 @@ class _ObjKeyStore( object ):
     def __isInDict(self, label):
         def isIn(d, k, v):
             return d.has_item( "%s#%s" % (k,v) )
-        fct = lambda key, value : isIn(self._store[label], key, value)
+        fct = lambda key, value : isIn(self._store[label], key, value)  # noqa: E731
         class __helper:
             def __call__(self, key, value):
                 return fct(key, value)
@@ -62,7 +62,7 @@ class _ObjKeyStore( object ):
         def fill(d, k, v):
             d.add( { k : v } )
             return            
-        fct = lambda key, value: fill(self._store[label], key, value)
+        fct = lambda key, value: fill(self._store[label], key, value)  # noqa: E731
         class __helper:
             def __call__(self, key, value):
                 return fct(key, value)
@@ -73,21 +73,21 @@ class _ObjKeyStore( object ):
         def fill(d, thedict):
             d.add(thedict)
             return
-        fct = lambda thedict : fill(self._store[label], thedict)
+        fct = lambda thedict : fill(self._store[label], thedict)  # noqa: E731
         class __helper:
             def __call__(self, thedict):
                 return fct(thedict)
         return __helper()
 
     def __writePy(self, label):
-        fct = lambda fileName: self._store.write( fileName, label )
+        fct = lambda fileName: self._store.write( fileName, label )  # noqa: E731
         class __helper:
             def __call__(self, fileName):
                 return fct(fileName)
         return __helper()
         
     def __readPy(self, label):
-        fct = lambda fileName: self._store.read( fileName, label )
+        fct = lambda fileName: self._store.read( fileName, label )  # noqa: E731
         class __helper:
             def __call__(self, fileName):
                 return fct(fileName)
diff --git a/Reconstruction/RecExample/RecExConfig/python/PyComps.py b/Reconstruction/RecExample/RecExConfig/python/PyComps.py
index 3dfb72e15a41d96287345f1953d7d8959fc43154..3289eca973f17652aa439ae8e044f2ea347fa472 100644
--- a/Reconstruction/RecExample/RecExConfig/python/PyComps.py
+++ b/Reconstruction/RecExample/RecExConfig/python/PyComps.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 import AthenaPython.PyAthena as PyAthena
 
@@ -17,7 +17,6 @@ class AutoConfigConsistencyCheckSvc(PyAthena.Svc):
        setattr(self, '_autocfg_check_already_executed', True)
     
        from AthenaCommon.AppMgr import ServiceMgr
-       from AthenaCommon.JobProperties import jobproperties as jp
        from RecExConfig.RecFlags import jobproperties as jp
        if jp.Rec.AutoConfiguration()==[]:
            self.msg.info("no AutoConfiguration requested. Leave.")
@@ -66,12 +65,12 @@ class AutoConfigConsistencyCheckSvc(PyAthena.Svc):
               self.msg.info("AutoConfiguration requested, athena input file list and AthenaCommonFlags.FilesInput  are identical. All Ok!")
               return
           else:
-              self.msg.warning("AutoConfiguration requested,  athena input file list  and AthenaCommonFlags.FilesInput() are different, %s vs %s " % (ic,fi))       
+              self.msg.warning("AutoConfiguration requested,  athena input file list  and AthenaCommonFlags.FilesInput() are different, %s vs %s ", ic, fi)
 
           if samefirstfile :
               self.msg.warning ("At least the first files are the same, but still this is dangerous.! Proceed at your own risk.")           
           else:    
-              self.msg.warning("Even the first files are different ( %s vs %s ) This could lead to wrong configuration. Proceed at your own risk" % (ic[0],fi[0]))
+              self.msg.warning("Even the first files are different ( %s vs %s ) This could lead to wrong configuration. Proceed at your own risk", ic[0], fi[0])
        except Exception:
 
           treatException("Could not check autoconfiguration. Could be very wrong! Please report.")
diff --git a/Reconstruction/RecExample/RecExConfig/python/RecFlags.py b/Reconstruction/RecExample/RecExConfig/python/RecFlags.py
index 82642baaf7ae49a8dc5290f103d2cfca972e90f1..d2e9924ff008fe3e8d7ae153e7b88ccac3aa2452 100755
--- a/Reconstruction/RecExample/RecExConfig/python/RecFlags.py
+++ b/Reconstruction/RecExample/RecExConfig/python/RecFlags.py
@@ -12,7 +12,7 @@ __author__  = 'D. Rousseau, M. Gallas'
 __version__="$Revision: 1.49 $"
 __doc__="Reconstruction specific flags . "
 
-from AthenaCommon.Constants import *
+from AthenaCommon.Constants import ALL, VERBOSE, DEBUG, INFO, WARNING, ERROR, FATAL
 
 #=======================================================================
 #
@@ -89,13 +89,13 @@ class DPDMakerScripts(JobProperty):
     StoredValue=[]
     def append(self,item):
         if self.is_locked():
-            self._log.info('The JobProperty %s is blocked' % self.__name__)
+            self._log.info('The JobProperty %s is blocked', self.__name__)
         else:
             AddValidItemToList(item,self.StoredValue)
         return
     def remove(self,item):
         if self.is_locked():
-            self._log.info('The JobProperty %s is blocked' % self.__name__)
+            self._log.info('The JobProperty %s is blocked', self.__name__)
         else:
             RemoveValidItemFromList(item,self.StoredValue)
         return
@@ -108,13 +108,13 @@ class AutoConfiguration(JobProperty):
     StoredValue=[]
     def append(self,item):
         if self.is_locked():
-            self._log.info('The JobProperty %s is blocked' % self.__name__)
+            self._log.info('The JobProperty %s is blocked', self.__name__)
         else:
             AddValidItemToList(item,self.StoredValue)
         return
     def remove(self,item):
         if self.is_locked():
-            self._log.info('The JobProperty %s is blocked' % self.__name__)
+            self._log.info('The JobProperty %s is blocked', self.__name__)
         else:
             RemoveValidItemFromList(item,self.StoredValue)
         return
diff --git a/Reconstruction/RecExample/RecExConfig/python/RecoFunctions.py b/Reconstruction/RecExample/RecExConfig/python/RecoFunctions.py
index ae222bcb048b1afdf66e9953463d2c261ad7a40c..bc92bc546506d12a493445fd0864e80e52687277 100644
--- a/Reconstruction/RecExample/RecExConfig/python/RecoFunctions.py
+++ b/Reconstruction/RecExample/RecExConfig/python/RecoFunctions.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 ##=============================================================================
 ## Name:        RecoFunctions.py
 ## Author:      David Cote (DESY)
@@ -8,10 +8,7 @@
 ## Description: This is a collection of utility functions for RecExCommon
 ##=============================================================================
 
-from __future__ import print_function
-
 from AthenaCommon.GlobalFlags  import globalflags
-from AthenaCommon.AthenaCommonFlags  import athenaCommonFlags
 
 def RunInnerDetectorOnly():
     from RecExConfig.RecFlags import rec
diff --git a/Reconstruction/RecExample/RecExConfig/python/testObjKeyStore.py b/Reconstruction/RecExample/RecExConfig/python/testObjKeyStore.py
index 0c9c9d28cec075d39e8251edc260c8180f4cb800..86ddab240bc5a272db1c84811bc6ee2d57571cea 100755
--- a/Reconstruction/RecExample/RecExConfig/python/testObjKeyStore.py
+++ b/Reconstruction/RecExample/RecExConfig/python/testObjKeyStore.py
@@ -1,6 +1,5 @@
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
-from __future__ import print_function
 from RecExConfig.ObjKeyStore import ObjKeyStore, objKeyStore
 
 # simple dictionary test
@@ -17,7 +16,7 @@ print("anotherd:",anotherd)
 for k in anotherd.keys():
     if k in thed:
         for v in anotherd[k]:
-            if not v in thed[k]: thed[k]+=[v]
+            if v not in thed[k]: thed[k]+=[v]
             pass
         pass
     else:
diff --git a/Reconstruction/RecExample/RecExConfig/share/AutoConfigConsistencyCheck.py b/Reconstruction/RecExample/RecExConfig/share/AutoConfigConsistencyCheck.py
deleted file mode 100644
index c459b68d408d841ad895a656e831fca9b4ca9d6f..0000000000000000000000000000000000000000
--- a/Reconstruction/RecExample/RecExConfig/share/AutoConfigConsistencyCheck.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# check consistency between AthenaCommonFlags.FilesInput and
-# ServiceMgr.EventSelector.FilesInput
-#TODO to be included at the end of RecExCommon_flags.py
-#     treat BS case
-#     protect for online
-
-import RecExConfig.PyComps as recpc
-#svcMgr += MySvc("DRMySvc")
-svcMgr += recpc.AutoConfigConsistencyCheckSvc("AutoConfigConsistencyCheckSvc")
-
diff --git a/Reconstruction/RecExample/RecExConfig/share/RecExConfig_links.sh b/Reconstruction/RecExample/RecExConfig/share/RecExConfig_links.sh
deleted file mode 100755
index ad40296ae2caa079b941db142fc6f506e4890e72..0000000000000000000000000000000000000000
--- a/Reconstruction/RecExample/RecExConfig/share/RecExConfig_links.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#configuration files
-echo "RecExConfig topOptions.py : all in one"
-get_files -jo RecExConfig/topOptions.py
-
-
-#make a soft link to default file name
-ln -fs topOptions.py jobOptions.py
-
-echo "Now build PoolFileCatalog.xml file"
-
-# ATLAS-CSC-01-02-00 top file 
-pool_insertFileToCatalog $ATLASTESTDATA/calib1_csc11.005200.T1_McAtNlo_Jimmy.digit.RDO.v12000301_tid003138._00016_extract_10evt.pool.root
-FCregisterLFN -p $ATLASTESTDATA/calib1_csc11.005200.T1_McAtNlo_Jimmy.digit.RDO.v12000301_tid003138._00016_extract_10evt.pool.root -l top_CSC-01-02-00_RDO_extract.pool
-
diff --git a/Reconstruction/RecExample/RecExConfig/share/topOptions.py b/Reconstruction/RecExample/RecExConfig/share/topOptions.py
deleted file mode 100755
index f264e1ce4998d731332cbf12d6285d27c89be59d..0000000000000000000000000000000000000000
--- a/Reconstruction/RecExample/RecExConfig/share/topOptions.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#example of personal topOptions
-#
-# to use it  
-# athena >! athena.log
-#  ( myTopOptions.py is defaulted through jobOptions.py soft link)
-# 
-# see RecExCommon/share/RecExCommon_flags.py for more available flags
-# and https://uimon.cern.ch/twiki/bin/view/Atlas/RecExCommonFlags
-# for more complete documentation.
-#
-# AllAlgs, DetDescrVersion needs be set before the include, since several
-# secondary flags are configured according to that one
-#
-# DetDescrVersion="DC2" # mandatory for DC2 data
-# AllAlgs = False # if false, all algorithms are switched off by defaults 
-
-doCBNT=False
-doNameAuditor=True
-PoolESDInput=["/afs/cern.ch/user/d/droussea/public/data/top-1130.ESD.pool.root"]
-# doJetRec = False # example , do not run jet reconstruction
-# doTruth=False
-# number of event to process
-#EvtMax=2
-#doTauRec=False
-# include my own algorithm(s)
-# include my own algorithm
-# UserAlgs=[ "MyPackage/MyAlgorithm_jobOptions.py" ] 
-
-# By default write ESD, AOD and TAG simultenaously, but note that in production
-#      these should be done in separate steps (results might not be bit-by-bit identical).
-readESD=True
-doWriteESD=False # uncomment if do not write ESD
-doWriteAOD=False # uncomment if do not write AOD
-doWriteTAG=False # uncomment if do not write TAG
-
-doDumpTES=True
-
-# trigger abort after loading
-#PoolESDInput="bidon"
-
-# DetFlags modifications are best set here (uncomment RecExCommon_flags first)
-include ("RecExCommon/RecExCommon_flags.py")
-# switch off ID, calo, or muons
-DetFlags.ID_setOff()
-DetFlags.Calo_setOff()
-DetFlags.Muon_setOff()
-
-# main jobOption
-include ("RecExCommon/RecExCommon_topOptions.py")
-
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-from __main__ import topSequence
-
-
-#ALLOWDATADRIVEN=True
-from RecExConfig.PhotonMaker import PhotonMaker
-aPhotonMaker=PhotonMaker(driving=True)
-#exercise singleton
-bPhotonMaker=PhotonMaker()
-thebPMAlg=bPhotonMaker.PhotonBuilderHandle()
-
-print("Py:top output ",bPhotonMaker.outputTypeKey())
-# can modify the al property as follow
-# bPhotonMaker.PhotonBuilderHandle().egammaContainer = "egammaContainerTruc"
-
-
-print("Py: top instance property", thebPMAlg.egammaContainer)
-
-
-
-# add topsequence alg
-# if I do this alg are copied into topAlg and are run twice
-#topSequence.setup()
-#exec 'topSequence.__self= []'
-#algorithms with property mods
-print(topSequence)
-# only list algorithms
-print(topSequence.sequence())
-
-
-print("Py:top print again list of top algorithms")
-print(theApp.TopAlg)
diff --git a/Reconstruction/RecJobTransforms/share/CommonRecoSkeletonJobOptions.py b/Reconstruction/RecJobTransforms/share/CommonRecoSkeletonJobOptions.py
index 7325a901c361a0792c8f23431d5a7c31b84f2f27..122b14c211bc113786dde2d1d2852ae95facf268 100644
--- a/Reconstruction/RecJobTransforms/share/CommonRecoSkeletonJobOptions.py
+++ b/Reconstruction/RecJobTransforms/share/CommonRecoSkeletonJobOptions.py
@@ -62,4 +62,4 @@ if hasattr(runArgs, "valid") and runArgs.valid is True:
 
 # Avoid command line preInclude for event service
 if hasattr(runArgs, "eventService") and runArgs.eventService:
-    include('AthenaMP/AthenaMP_EventService.py')
+    import AthenaMP.EventService
diff --git a/Reconstruction/eflowRec/python/eflowRecFlags.py b/Reconstruction/eflowRec/python/eflowRecFlags.py
index 33a0bb32caccfe2edce967bc2931ba6af1802680..4cff0566d9b91ec4c0496bcad7ea6856d0b7895e 100644
--- a/Reconstruction/eflowRec/python/eflowRecFlags.py
+++ b/Reconstruction/eflowRec/python/eflowRecFlags.py
@@ -139,7 +139,7 @@ class useFlowElements(JobProperty):
     """ Flag to toggle whether to create the new FlowElement EDM objects in addition to the PFO EDM objects. """
     statusOn = True
     allowedTypes = ['bool']
-    StoredValue = False
+    StoredValue = True
 
 class doFlowElementValidation(JobProperty):
     """Flag to turn on Validation plots for Flow Elements (only works if useFlowElements also set ON)"""
diff --git a/Reconstruction/egamma/egammaAlgs/src/topoEgammaBuilder.h b/Reconstruction/egamma/egammaAlgs/src/topoEgammaBuilder.h
index d3208e296c61b60acbea28e439df3858980a16c0..658b1ae97e1b2ad736ff4b70badb30d4caa87b59 100644
--- a/Reconstruction/egamma/egammaAlgs/src/topoEgammaBuilder.h
+++ b/Reconstruction/egamma/egammaAlgs/src/topoEgammaBuilder.h
@@ -101,7 +101,7 @@ private:
 
     /** @brief Tool to resolve electron/photon ambiguity */
     ToolHandle<IEGammaAmbiguityTool> m_ambiguityTool {this, 
-        "AmbiguityTool", "ElectronPhotonSelectorTools/EGammaAmbiguityTool", 
+        "AmbiguityTool", "EGammaAmbiguityTool/egammaambiguitytool",
         "Tool that does electron/photon ambiguity resolution"};
 
     /** @brief Tool to resolve electron/photon ambiguity */
diff --git a/Reconstruction/egamma/egammaInterfaces/egammaInterfaces/IegammaTrkRefitterTool.h b/Reconstruction/egamma/egammaInterfaces/egammaInterfaces/IegammaTrkRefitterTool.h
index 745e570d07b242b1428e8c4ab53fe80e25417987..f8490eb93783c57e91b1743963fa55bd57f128bd 100755
--- a/Reconstruction/egamma/egammaInterfaces/egammaInterfaces/IegammaTrkRefitterTool.h
+++ b/Reconstruction/egamma/egammaInterfaces/egammaInterfaces/IegammaTrkRefitterTool.h
@@ -23,7 +23,6 @@ CREATED:  Jul 2008
 #include "xAODEgamma/ElectronFwd.h"
 #include "xAODTracking/TrackParticleFwd.h"
 #include "TrkTrack/Track.h"
-#include "TrkParameters/TrackParameters.h"
 #include <memory>
 
 static const InterfaceID IID_egammaTrkRefitterTool("IegammaTrkRefitterTool", 1, 0);
diff --git a/Reconstruction/egamma/egammaLayerRecalibTool/egammaLayerRecalibTool/egammaLayerRecalibTool.h b/Reconstruction/egamma/egammaLayerRecalibTool/egammaLayerRecalibTool/egammaLayerRecalibTool.h
index 5f8b1338290510388fb5ed3d3a07f6689c39adb3..b8c35845fc62d0086f7ca46560af37f5ab0bec9c 100644
--- a/Reconstruction/egamma/egammaLayerRecalibTool/egammaLayerRecalibTool/egammaLayerRecalibTool.h
+++ b/Reconstruction/egamma/egammaLayerRecalibTool/egammaLayerRecalibTool/egammaLayerRecalibTool.h
@@ -35,10 +35,7 @@
 #include <TH1.h>
 #include <TH2.h>
 #include <TFormula.h>
-
 #include "egammaLayerRecalibTool/corr_HV_EMBPS.h"
-#include "xAODEgamma/Egamma.h"
-#include "xAODCaloEvent/CaloCluster.h"
 
 
 struct StdCalibrationInputs
diff --git a/Reconstruction/egamma/egammaTools/src/EMFourMomBuilder.cxx b/Reconstruction/egamma/egammaTools/src/EMFourMomBuilder.cxx
index 886097c5264f44e38ab7d607619d7db342143540..c81815b90c0faa64ee49e7760e58e0596fc33f08 100644
--- a/Reconstruction/egamma/egammaTools/src/EMFourMomBuilder.cxx
+++ b/Reconstruction/egamma/egammaTools/src/EMFourMomBuilder.cxx
@@ -23,7 +23,6 @@ PACKAGE:  offline/Reconstruction/egamma/egammaTools
 #include "EventPrimitives/EventPrimitives.h"
 #include "FourMom/EigenP5Jacobiand0z0PhiThetaqOverP2d0z0PhiEtaP.h"
 #include "GeoPrimitives/GeoPrimitives.h"
-#include "xAODEgamma/EgammaxAODHelpers.h"
 
 //  END OF HEADER FILES INCLUDE
 
diff --git a/Reconstruction/egamma/egammaTrackTools/src/egammaTrkRefitterTool.cxx b/Reconstruction/egamma/egammaTrackTools/src/egammaTrkRefitterTool.cxx
index d0b00200a8fa811ac092b68e06d1183861b632c6..b065a59b50875edc561a24379ca328a8c78c0be6 100644
--- a/Reconstruction/egamma/egammaTrackTools/src/egammaTrkRefitterTool.cxx
+++ b/Reconstruction/egamma/egammaTrackTools/src/egammaTrkRefitterTool.cxx
@@ -15,7 +15,6 @@
 #include "TrkEventPrimitives/LocalParameters.h"
 #include "TrkEventPrimitives/ParticleHypothesis.h"
 #include "TrkMaterialOnTrack/MaterialEffectsBase.h"
-#include "TrkSurfaces/PerigeeSurface.h"
 #include "TrkCaloCluster_OnTrack/CaloCluster_OnTrack.h"
 
 #include "AtlasDetDescr/AtlasDetectorID.h"
diff --git a/Reconstruction/iPat/iPatTrackFitter/src/TrackFitter.cxx b/Reconstruction/iPat/iPatTrackFitter/src/TrackFitter.cxx
index 31453ef87a24c071d1ccca20fd208ee6fb8cf6af..f3ac978a7f87d7d302c01adeeffdb7f5a72ef967 100755
--- a/Reconstruction/iPat/iPatTrackFitter/src/TrackFitter.cxx
+++ b/Reconstruction/iPat/iPatTrackFitter/src/TrackFitter.cxx
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 /***************************************************************************
@@ -13,7 +13,6 @@
 #include <cmath>
 #include <iomanip>
 #include "EventPrimitives/EventPrimitives.h"
-#include "EventPrimitives/EventPrimitives.h"
 #include "GaudiKernel/MsgStream.h"
 #include "GeoPrimitives/GeoPrimitives.h"
 #include "TrkEventPrimitives/ParticleHypothesis.h"
diff --git a/Reconstruction/tauRecTools/CMakeLists.txt b/Reconstruction/tauRecTools/CMakeLists.txt
index 7a97d9b240ae0a68d07060ffc889fc2d01f0f367..589a385fb53452093998cc32ae7ad6842e8ad733 100644
--- a/Reconstruction/tauRecTools/CMakeLists.txt
+++ b/Reconstruction/tauRecTools/CMakeLists.txt
@@ -41,8 +41,8 @@ if( XAOD_STANDALONE OR XAOD_ANALYSIS )
       ${FASTJET_INCLUDE_DIRS}
       PRIVATE_INCLUDE_DIRS ${FASTJETCONTRIB_INCLUDE_DIRS} ${LWTNN_INCLUDE_DIRS}
       LINK_LIBRARIES ${ROOT_LIBRARIES} ${FASTJET_LIBRARIES} ${Boost_LIBRARIES}
-      AthLinks AsgTools CxxUtils xAODCaloEvent xAODEventInfo xAODPFlow xAODEgamma xAODTau
-      xAODTracking xAODParticleEvent AsgDataHandlesLib  MVAUtils
+      AthLinks AsgTools CxxUtils xAODCaloEvent xAODEventInfo xAODPFlow xAODTau
+      xAODTracking xAODParticleEvent AsgDataHandlesLib MVAUtils
       PRIVATE_LINK_LIBRARIES ${FASTJETCONTRIB_LIBRARIES} ${LWTNN_LIBRARIES} FourMomUtils xAODJet
       PathResolver )
 else()
@@ -53,10 +53,10 @@ else()
       ${FASTJET_INCLUDE_DIRS}
       PRIVATE_INCLUDE_DIRS ${FASTJETCONTRIB_INCLUDE_DIRS} ${LWTNN_INCLUDE_DIRS}
       LINK_LIBRARIES ${ROOT_LIBRARIES} ${FASTJET_LIBRARIES} ${Boost_LIBRARIES}
-      AthLinks AsgTools CxxUtils xAODCaloEvent xAODEventInfo xAODPFlow xAODEgamma xAODTau
+      AthLinks AsgTools CxxUtils xAODCaloEvent xAODEventInfo xAODPFlow xAODTau
       xAODTracking xAODParticleEvent CaloUtilsLib Particle AsgDataHandlesLib MVAUtils
       PRIVATE_LINK_LIBRARIES ${FASTJETCONTRIB_LIBRARIES} ${LWTNN_LIBRARIES} FourMomUtils xAODJet BeamSpotConditionsData 
-      PathResolver ElectronPhotonSelectorToolsLib )
+      PathResolver )
 endif()
 
 if( NOT XAOD_STANDALONE )
@@ -65,15 +65,15 @@ if( NOT XAOD_STANDALONE )
          src/*.h src/*.cxx src/components/*.cxx
          INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} ${Boost_INCLUDE_DIRS} ${LWTNN_INCLUDE_DIRS}
          LINK_LIBRARIES ${ROOT_LIBRARIES} ${Boost_LIBRARIES} ${LWTNN_LIBRARIES} xAODTau
-         xAODTracking xAODEgamma AthContainers FourMomUtils xAODCaloEvent xAODJet 
-         xAODPFlow xAODParticleEvent MVAUtils ElectronPhotonSelectorToolsLib BeamSpotConditionsData GaudiKernel tauRecToolsLib )
+         xAODTracking AthContainers FourMomUtils xAODCaloEvent xAODJet 
+         xAODPFlow xAODParticleEvent MVAUtils BeamSpotConditionsData GaudiKernel tauRecToolsLib )
    else()
       atlas_add_component( tauRecTools
          src/*.h src/*.cxx src/components/*.cxx
          INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} ${Boost_INCLUDE_DIRS} ${LWTNN_INCLUDE_DIRS}
          LINK_LIBRARIES ${ROOT_LIBRARIES} ${Boost_LIBRARIES} ${LWTNN_LIBRARIES} CaloUtilsLib
-         xAODTau xAODTracking xAODEgamma AthContainers FourMomUtils NavFourMom
-         xAODCaloEvent xAODJet xAODPFlow xAODParticleEvent MVAUtils ElectronPhotonSelectorToolsLib GaudiKernel
+         xAODTau xAODTracking AthContainers FourMomUtils NavFourMom
+         xAODCaloEvent xAODJet xAODPFlow xAODParticleEvent MVAUtils GaudiKernel
          InDetRecToolInterfaces JetEDM Particle ITrackToVertex
          RecoToolInterfaces TrkLinks TrkParametersIdentificationHelpers
          TrkTrackSummary VxVertex TrkToolInterfaces TrkVertexFitterInterfaces
diff --git a/Simulation/G4Atlas/G4AtlasApps/CMakeLists.txt b/Simulation/G4Atlas/G4AtlasApps/CMakeLists.txt
index c4b7a21c5e2a2597e591ccba020e683f55af4014..b4e27489070569895e1282c2ad53ac813f681308 100644
--- a/Simulation/G4Atlas/G4AtlasApps/CMakeLists.txt
+++ b/Simulation/G4Atlas/G4AtlasApps/CMakeLists.txt
@@ -1,14 +1,9 @@
-################################################################################
-# Package: G4AtlasApps
-################################################################################
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 # Declare the package name:
 atlas_subdir( G4AtlasApps )
 
-# External dependencies:
-find_package( COOL COMPONENTS CoolKernel )
-find_package( ROOT COMPONENTS Core Tree MathCore Hist RIO pthread )
-
+# Tests in the package:
 atlas_add_test(test_AtlasG4_tf_configuration
                SCRIPT test/test_AtlasG4_tf_configuration.py
                PROPERTIES TIMEOUT 300
@@ -27,6 +22,6 @@ if( NOT SIMULATIONBASE )
 endif()
 
 # Install files from the package:
-atlas_install_python_modules( python/*.py )
+atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
 atlas_install_joboptions( share/*.py )
 
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/AtlasCosmicTrackRecordJob.py b/Simulation/G4Atlas/G4AtlasApps/python/AtlasCosmicTrackRecordJob.py
index 90cf182ca2e4c61a225c85d7053214b9bd689b7f..bc33031e5bac176148a8a5c727d95ea6247e638c 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/AtlasCosmicTrackRecordJob.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/AtlasCosmicTrackRecordJob.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 ## @file AtlasCosmicTrackRecordJob.py
 ## @brief py-module to configure the Athena AppMgr for cosmic track record reading jobs
@@ -6,7 +6,7 @@
 ###############################################################
 
 def _setupAtlasCosmicTrackRecordJob():
-    import AthenaCommon.AtlasUnixStandardJob
+    import AthenaCommon.AtlasUnixStandardJob  # noqa: F401
     from AthenaCommon.AppMgr import theApp
     from AthenaCommon.AppMgr import ServiceMgr as svcMgr
 
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/AtlasG4Eng.py b/Simulation/G4Atlas/G4AtlasApps/python/AtlasG4Eng.py
index c0e4dac47bd5e6b4442097111d589c89ce3962c9..74d811afe91098094dbcd45bd2f0126169a95add 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/AtlasG4Eng.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/AtlasG4Eng.py
@@ -10,11 +10,8 @@ The basic elements and materials are also defined here. If custom materials are
 needed, they can be produced using the template atlas_materials.py.
 """
 
-from __future__ import print_function
-
 __author__ = "A. Dell`Acqua, M. Gallas"
 
-from AthenaCommon.SystemOfUnits import *
 from AthenaCommon import Logging
 from time import time
 import os, os.path, string, sys
@@ -76,7 +73,7 @@ class G4AtlasEngine:
         """
         if G4AtlasEngine._NumInstances > 0:
             msg = 'There is already an instance of the G4AtlasEngine class -- try to use it!'
-            G4AtlasEngine.log.error('G4AtlasEngine: ' + msg)
+            G4AtlasEngine.log.error('G4AtlasEngine: %s', msg)
             raise RuntimeError('PyG4Atlas: G4AtlasEngine: __init__(): Attempted multiple instantiation')
 
         ## Init dictionaries and lists
@@ -123,7 +120,7 @@ class G4AtlasEngine:
 
 
     def setUseISF(self, useISF) :
-        G4AtlasEngine.log.info('setting useISF to %s' % useISF)
+        G4AtlasEngine.log.info('setting useISF to %s', useISF)
         self.useISF = useISF
 
     def _init_G4(self):
@@ -173,26 +170,26 @@ class G4AtlasEngine:
         def _run_init_stage(name):
             # TODO: Set self.init_status at each sub-step and write that out in _run_init_callbacks
             self.init_status = "preInit" + name
-            G4AtlasEngine.log.debug("G4AtlasEngine:init stage " + self.init_status)
+            G4AtlasEngine.log.debug("G4AtlasEngine:init stage %s", self.init_status)
             _run_init_callbacks(self.init_status)
             self.init_status = "init" + name
-            G4AtlasEngine.log.debug("G4AtlasEngine:init stage " + self.init_status)
+            G4AtlasEngine.log.debug("G4AtlasEngine:init stage %s", self.init_status)
             getattr(self, "_init_" + name).__call__()
             self.init_status = "postInit" + name
-            G4AtlasEngine.log.debug("G4AtlasEngine:init stage " + self.init_status)
+            G4AtlasEngine.log.debug("G4AtlasEngine:init stage %s", self.init_status)
             _run_init_callbacks(self.init_status)
 
         _run_init_stage("G4")
 
         self.init_status = "postInit"
-        G4AtlasEngine.log.debug("G4AtlasEngine:init stage " + self.init_status)
+        G4AtlasEngine.log.debug("G4AtlasEngine:init stage %s", self.init_status)
         _run_init_callbacks(self.init_status)
 
         ## Check that all the callbacks were indeed called, and warn otherwise
         num_reg_callbacks = sum(len(cblist) for cblist in simFlags.InitFunctions.get_Value().values())
         if G4AtlasEngine._callback_counter != num_reg_callbacks:
-            G4AtlasEngine.log.warning("G4AtlasEngine: mismatch in num of callbacks regd/called = %d/%d" %
-                                      (num_reg_callbacks, G4AtlasEngine._callback_counter))
+            G4AtlasEngine.log.warning("G4AtlasEngine: mismatch in num of callbacks regd/called = %d/%d",
+                                      num_reg_callbacks, G4AtlasEngine._callback_counter)
 
         G4AtlasEngine._app_profiler('_init_Simulation')
 
@@ -206,13 +203,13 @@ class G4AtlasEngine:
         if dict_name and dict_name not in G4AtlasEngine.List_LoadedDict:
             try:
                 cppyy.loadDict(dict_name)
-            except:
+            except Exception:
                 print ("Unexpected error:", sys.exc_info(),'\n')
                 print ('ROOT5 migration problem: ', dict_name)
             try:
                 G4AtlasEngine.List_LoadedDict.append(dict_name)
-                G4AtlasEngine.log.debug(' G4AtlasEngine:load_Dict: %s loaded' % dict_name)
-            except:
+                G4AtlasEngine.log.debug(' G4AtlasEngine:load_Dict: %s loaded', dict_name)
+            except Exception:
                 raise RuntimeError('Dict %s can not be found' % dict_name)
 
 
@@ -301,7 +298,7 @@ class G4AtlasEngine:
             See the flag 'simFlags.EventFilter'
             """
             if self._Built:
-                G4AtlasEngine.log.debug('menu_EventFilter._build: init EventFilter manipulators already done\n%s' % self.getFilterStatus())
+                G4AtlasEngine.log.debug('menu_EventFilter._build: init EventFilter manipulators already done\n%s', self.getFilterStatus())
                 return
             self._Built = True
 
@@ -318,7 +315,7 @@ class G4AtlasEngine:
                     fobj.switchOff()
 
             G4AtlasEngine.Dict['EventFilters'] = self
-            G4AtlasEngine.log.debug('menu_EventFilter._build: init EventFilter manipulators\n%s' % self.getFilterStatus())
+            G4AtlasEngine.log.debug('menu_EventFilter._build: init EventFilter manipulators\n%s', self.getFilterStatus())
 
 
 
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/DetConfigurator.py b/Simulation/G4Atlas/G4AtlasApps/python/DetConfigurator.py
index e123948d50dc3049f03b26bd56c7e6b0a82b2258..8d596c7e529cee35d0f1bde832f731edfb8ff95c 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/DetConfigurator.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/DetConfigurator.py
@@ -1,3 +1,5 @@
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
+
 from G4AtlasApps import AtlasG4Eng
 class DetConfigurator:
     """ DetConfigurator is a hook for the specific sub-detector configuration.
@@ -40,7 +42,7 @@ class DetConfigurator:
             self.build(self.Mode)
             self._Built = True
         else:
-            AtlasG4Eng.G4Eng.log.info(' DetConfigurator: with name '+self.Name+ ' is already in the Dict_DetConfig: please use it!')
+            AtlasG4Eng.G4Eng.log.info(' DetConfigurator: with name %s is already in the Dict_DetConfig: please use it!', self.Name)
 
 
     def build(self,mode):
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/G4Atlas_Metadata.py b/Simulation/G4Atlas/G4AtlasApps/python/G4Atlas_Metadata.py
index a1007db4de7c1c67a6f7814c9c2b190da19f1393..083431c48e41b84cee16b0b3ba7e9480590563ce 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/G4Atlas_Metadata.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/G4Atlas_Metadata.py
@@ -1,9 +1,9 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 ### This module contains functions which may need to peek at the input file metadata
 
 ## Get the logger
-from AthenaCommon.Logging import *
+from AthenaCommon.Logging import logging
 simMDlog = logging.getLogger('Sim_Metadata')
 
 
@@ -20,7 +20,7 @@ def get_metadata(mode='lite'):
                 metadata = read_metadata(input_file, mode = mode)
                 metadata = metadata[input_file]  # promote all keys one level up
                 return metadata
-            except:
+            except Exception:
                 simMDlog.warning("MetaReader failed to open %s", athenaCommonFlags.PoolEvgenInput()[0])
     else:
         simMDlog.info("G4ATLAS_SKIPFILEPEEK environment variable present, so skipping all input file peeking.")
@@ -60,7 +60,7 @@ def patch_mc_channel_numberMetadata(addToFile=True):
             if addToFile:
                 simMDlog.info('Adding mc_channel_number to /TagInfo: %s', str(mc_channel_number))
                 # Initialize tag info management
-                import EventInfoMgt.EventInfoMgtInit
+                import EventInfoMgt.EventInfoMgtInit  # noqa: F401
                 from AthenaCommon.AppMgr import ServiceMgr
                 ServiceMgr.TagInfoMgr.ExtraTagValuePairs.update({"mc_channel_number": str(mc_channel_number)})
         return mc_channel_number
@@ -186,7 +186,6 @@ def createSimulationParametersMetadata():
     ## Set run numbers
     minrunnum = 0
     maxrunnum = 2147483647  # MAX
-    from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
     from G4AtlasApps.SimFlags import simFlags
     if hasattr(simFlags, 'RunNumber') and simFlags.RunNumber.statusOn:
        minrunnum = simFlags.RunNumber()
@@ -200,7 +199,7 @@ def createSimulationParametersMetadata():
             raise Exception('IllegalRunNumber')
     else:
         simMDlog.info('Skipping runNumbers setting - would need to set simFlags.RunNumber for this.')
-    simMDlog.info("Using the following runNumbers range for MetaData IOV: ("+str(minrunnum)+","+str(maxrunnum)+").")
+    simMDlog.info("Using the following runNumbers range for MetaData IOV: (%d,%d).", minrunnum, maxrunnum)
     dbFiller.setBeginRun(minrunnum)
     dbFiller.setEndRun(maxrunnum)
 
@@ -212,7 +211,7 @@ def createSimulationParametersMetadata():
     dbFiller.genSimDb()
     folder = "/Simulation/Parameters"
     dbConnection = "sqlite://;schema=SimParams.db;dbname=SIMPARAM"
-    import IOVDbSvc.IOVDb
+    import IOVDbSvc.IOVDb  # noqa: F401
     from AthenaCommon.AppMgr import ServiceMgr
     ServiceMgr.IOVDbSvc.Folders += [ folder + "<dbConnection>" + dbConnection + "</dbConnection>" ]
     ServiceMgr.IOVDbSvc.FoldersToMetaData += [folder]
@@ -224,7 +223,6 @@ def createTBSimulationParametersMetadata():
     # Set run numbers
     minrunnum = 0
     maxrunnum = 2147483647 # MAX
-    from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
     from G4AtlasApps.SimFlags import simFlags
     if hasattr(simFlags, 'RunNumber') and simFlags.RunNumber.statusOn:
        minrunnum = simFlags.RunNumber()
@@ -238,7 +236,7 @@ def createTBSimulationParametersMetadata():
             raise Exception('IllegalRunNumber')
     else:
         simMDlog.info('Skipping runNumbers setting - would need to set simFlags.RunNumber for this.')
-    simMDlog.info("Using the following runNumbers range for MetaData IOV: ("+str(minrunnum)+","+str(maxrunnum)+").")
+    simMDlog.info("Using the following runNumbers range for MetaData IOV: (%d,%d).", minrunnum, maxrunnum)
     dbFiller.setBeginRun(minrunnum)
     dbFiller.setEndRun(maxrunnum)
 
@@ -250,7 +248,7 @@ def createTBSimulationParametersMetadata():
     dbFiller.genSimDb()
     folder = "/Simulation/Parameters"
     dbConnection = "sqlite://;schema=SimParams.db;dbname=SIMPARAM"
-    import IOVDbSvc.IOVDb
+    import IOVDbSvc.IOVDb  # noqa: F401
     from AthenaCommon.AppMgr import ServiceMgr
     ServiceMgr.IOVDbSvc.Folders += [ folder + "<dbConnection>" + dbConnection + "</dbConnection>" ]
     ServiceMgr.IOVDbSvc.FoldersToMetaData += [folder]
@@ -265,7 +263,7 @@ def configureRunNumberOverrides():
     from G4AtlasApps.SimFlags import simFlags
     if hasattr(simFlags, "RunNumber") and simFlags.RunNumber.statusOn:
         myRunNumber = simFlags.RunNumber.get_Value()
-        simMDlog.info('Found run number %d in sim flags.' % myRunNumber)
+        simMDlog.info('Found run number %d in sim flags.', myRunNumber)
         ## Set event selector details based on evgen metadata
 
         ######update the run/event info for each event
@@ -292,7 +290,7 @@ def configureRunNumberOverrides():
           from RunDependentSimComps.RunDMCFlags import runDMCFlags
           myInitialTimeStamp = runDMCFlags.RunToTimestampDict.getTimestampForRun(myRunNumber)
           #print "FOUND TIMESTAMP ", str(myInitialTimeStamp)
-        except:
+        except Exception:
           myInitialTimeStamp = 1
         ServiceMgr.EvtIdModifierSvc.add_modifier(run_nbr=myRunNumber, lbk_nbr=myFirstLB, time_stamp=myInitialTimeStamp, nevts=totalNumber)
         if hasattr(ServiceMgr.EventSelector,'OverrideRunNumberFromInput'): ServiceMgr.EventSelector.OverrideRunNumberFromInput = True
@@ -300,7 +298,7 @@ def configureRunNumberOverrides():
         # Get evgen run number and lumi block
         if len(metadata_lite['runNumbers']) > 0:
             myRunNumber = metadata_lite['runNumbers'][0]
-            simMDlog.info('Found runNumbers %d in hits file metadata.'% myRunNumber)
+            simMDlog.info('Found runNumbers %d in hits file metadata.', myRunNumber)
         else:
             simMDlog.warning('Failed to find runNumbers in hits file metadata.')
         if metadata_lite['lumiBlockNumbers']:
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/G4Atlas_MetadataNew.py b/Simulation/G4Atlas/G4AtlasApps/python/G4Atlas_MetadataNew.py
index 71167e0004198ee5758f7067cd07bf7d165bfa37..f075b62cf8440aee82b94af16c38f39da67b2ae6 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/G4Atlas_MetadataNew.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/G4Atlas_MetadataNew.py
@@ -1,9 +1,9 @@
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 ### This module contains functions which may need to peek at the input file metadata
 
 ## Get the logger
-from AthenaCommon.Logging import *
+from AthenaCommon.Logging import logging
 simMDlog = logging.getLogger('Sim_Metadata')
 
 def fillAtlasMetadata(ConfigFlags, dbFiller):
@@ -26,7 +26,7 @@ def fillAtlasMetadata(ConfigFlags, dbFiller):
 
     #---------  
     ## Simulated detector flags: add each enabled detector to the simulatedDetectors list
-    from AthenaCommon.DetFlags import DetFlags
+    from AthenaCommon.DetFlags import DetFlags  # noqa: F401
     simDets = []
     for det in ['Pixel','SCT','TRT','BCM','Lucid','ZDC','ALFA','AFP','FwdRegion','LAr','HGTD','Tile','MDT','CSC','TGC','RPC','MM','sTGC','Truth','LVL1']:
         attrname = "Detector.Geometry"+det
@@ -37,7 +37,7 @@ def fillAtlasMetadata(ConfigFlags, dbFiller):
         else:
             simMDlog.info("No flag called '%s' found in ConfigFlags", attrname)
 
-    simMDlog.info("Setting 'SimulatedDetectors' = %s" % repr(simDets))
+    simMDlog.info("Setting 'SimulatedDetectors' = %r", simDets)
     dbFiller.addSimParam('SimulatedDetectors', repr(simDets))
 
     ## Hard-coded simulation hit file magic number (for major changes)
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/PyG4Atlas.py b/Simulation/G4Atlas/G4AtlasApps/python/PyG4Atlas.py
index 019c1dc84f375804fdb53ddc90e66530ba05f3d3..9838c52bbbfb8954ee8862ce74996daf792c9625 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/PyG4Atlas.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/PyG4Atlas.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 """ Geant4 simulation Python facilities for the ATLAS experiment.
 
@@ -10,15 +10,7 @@
 
 __author__ = 'A. Dell`Acqua, M. Gallas, A. Buckley'
 
-import os, os.path, string, sys
-from time import time
-from GaudiPython import PyAlgorithm
-from AthenaCommon import Constants, Logging, SystemOfUnits
 from AthenaCommon.AppMgr import theApp
-from AthenaCommon.Include import include
-import cppyy
-
-from G4AtlasApps import AtlasG4Eng
 
 from AthenaCommon.ConcurrencyFlags import jobproperties as concurrencyProps
 if concurrencyProps.ConcurrencyFlags.NumThreads() > 0:
@@ -78,16 +70,16 @@ class _PyG4AtlasComp(PyG4Atlas_base):
             AtlasG4Eng.G4Eng.log.info('Configuring CTB H8 (2004) test beam')
             dummy = CtbSim()
         elif "simu_skeleton" not in AtlasG4Eng.G4Eng.Dict:
-            AtlasG4Eng.G4Eng.log.error('No sim skeleton registered by time of %s construction: STOP!!' % self.name())
+            AtlasG4Eng.G4Eng.log.error('No sim skeleton registered by time of %s construction: STOP!!', self.name())
             raise ValueError('Unknown sim setup: STOP')
 
         ## Import sim module if requested
         # TODO: is this ever used?
         if self.sim_module:
-            AtlasG4Eng.G4Eng.log.info("The kernel simulation Python module which describes the simulation is: %s" % self.sim_module)
+            AtlasG4Eng.G4Eng.log.info("The kernel simulation Python module which describes the simulation is: %s", self.sim_module)
             try:
                 __import__(self.sim_module, globals(), locals())
-            except:
+            except Exception:
                 AtlasG4Eng.G4Eng.log.fatal("The kernel simulation Python module '%s' was not found!" % self.sim_module)
                 raise RuntimeError('PyG4Atlas: %s: initialize()' % self.name())
 
@@ -102,12 +94,11 @@ class _PyG4AtlasComp(PyG4Atlas_base):
         import AtlasG4Eng
         from time import gmtime, strftime
         timestr = strftime("%a, %d %b %Y %H:%M:%S +0000", gmtime())
-        AtlasG4Eng.G4Eng.log.info('%s starting at (UTC): %s' % (self.name(), timestr))
+        AtlasG4Eng.G4Eng.log.info('%s starting at (UTC): %s', self.name(), timestr)
         AtlasG4Eng.G4Eng._app_profiler('%s begin of initialize' % self.name())
         AtlasG4Eng.G4Eng.Dict['simu_skeleton']._do_All()
         AtlasG4Eng.G4Eng._init_Simulation()
 
-        from G4AtlasApps.SimFlags import simFlags
         AtlasG4Eng.G4Eng._app_profiler('%s end of initialize' % self.name())
 
         from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
@@ -120,7 +111,7 @@ class _PyG4AtlasComp(PyG4Atlas_base):
         AtlasG4Eng.G4Eng._app_profiler('%s at finalize  ' % self.name())
         from time import gmtime, strftime
         timestr = strftime("%a, %d %b %Y %H:%M:%S +0000", gmtime())
-        AtlasG4Eng.G4Eng.log.info('%s ending at (UTC): %s'  % (self.name(), timestr))
+        AtlasG4Eng.G4Eng.log.info('%s ending at (UTC): %s', self.name(), timestr)
         return True
 
 if is_hive:
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/SimAtlasKernel.py b/Simulation/G4Atlas/G4AtlasApps/python/SimAtlasKernel.py
index 273b7aec934f54b5abd8253d4f803c0d37b68d0d..1e49626547f8ecf29a3ccbd6e1d4ba258db89397 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/SimAtlasKernel.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/SimAtlasKernel.py
@@ -1,18 +1,15 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 """
 Main configuration module for the full ATLAS simulations
 """
 
-import math
-from AthenaCommon import PhysicalConstants
 from AthenaCommon.AppMgr import theApp
 from AthenaCommon.Include import include
 from AthenaCommon.JobProperties import jobproperties
 from AthenaCommon.DetFlags import DetFlags
-from AthenaCommon.BeamFlags import jobproperties
 
-import PyG4Atlas, AtlasG4Eng
+import AtlasG4Eng
 from SimSkeleton import SimSkeleton
 
 
@@ -27,7 +24,7 @@ class AtlasSimSkeleton(SimSkeleton):
             AtlasG4Eng.G4Eng.Dict['simu_skeleton'] = self
             AtlasG4Eng.G4Eng.Name = "ATLAS_G4Sim"
         else:
-            AtlasG4Eng.G4Eng.log.warning('AtlasSimSkeleton.__init__ :: the simulation already has a skeleton.' +
+            AtlasG4Eng.G4Eng.log.warning('AtlasSimSkeleton.__init__ :: the simulation already has a skeleton.'
                                                'You can find it in the G4AtlasEng.G4Eng.Dict()')
 
 
@@ -126,7 +123,7 @@ class AtlasSimSkeleton(SimSkeleton):
            frozen showers, etc
         """
         AtlasG4Eng.G4Eng.log.verbose('AtlasSimSkeleton._do_external :: starting')
-        from AthenaCommon.AppMgr import ToolSvc,ServiceMgr
+        from AthenaCommon.AppMgr import ServiceMgr
         from Geo2G4.Geo2G4Conf import Geo2G4Svc
         geo2G4Svc = Geo2G4Svc()
         theApp.CreateSvc += ["Geo2G4Svc"]
@@ -145,7 +142,7 @@ class AtlasSimSkeleton(SimSkeleton):
         ## TODO: Tidy imports etc.
         from GeoModelSvc.GeoModelSvcConf import GeoModelSvc
         from AthenaCommon.GlobalFlags import jobproperties
-        from AtlasGeoModel import SetGeometryVersion
+        from AtlasGeoModel import SetGeometryVersion  # noqa: F401
 
         ## Forward Region Twiss files - needed before geometry setup!
         if simFlags.ForwardDetectors.statusOn:
@@ -154,13 +151,12 @@ class AtlasSimSkeleton(SimSkeleton):
                 from AthenaCommon.AppMgr import ToolSvc
                 ToolSvc += getPublicTool("ForwardRegionProperties")
 
-        from AtlasGeoModel import GeoModelInit
-        from AtlasGeoModel import SimEnvelopes
-        from GeoModelSvc.GeoModelSvcConf import GeoModelSvc
+        from AtlasGeoModel import GeoModelInit  # noqa: F401
+        from AtlasGeoModel import SimEnvelopes  # noqa: F401
         gms = GeoModelSvc()
         ## Cosmics GeoModel tweaks
         if jobproperties.Beam.beamType() == 'cosmics' or \
-           (simFlags.CavernBG.statusOn and not 'Signal' in simFlags.CavernBG.get_Value() ):
+           (simFlags.CavernBG.statusOn and 'Signal' not in simFlags.CavernBG.get_Value() ):
             from CavernInfraGeoModel.CavernInfraGeoModelConf import CavernInfraDetectorTool
             gms.DetectorTools += [ CavernInfraDetectorTool() ]
         ## Protects GeoModelSvc in the simulation from the AlignCallbacks
@@ -176,12 +172,12 @@ class AtlasSimSkeleton(SimSkeleton):
             from AGDD2GeoSvc.AGDD2GeoSvcConf import AGDDtoGeoSvc
             AGDD2Geo = AGDDtoGeoSvc()
             from AthenaCommon import CfgGetter
-            if not "MuonAGDDTool/MuonSpectrometer" in AGDD2Geo.Builders:
+            if "MuonAGDDTool/MuonSpectrometer" not in AGDD2Geo.Builders:
                 ToolSvc += CfgGetter.getPublicTool("MuonSpectrometer", checkType=True)
                 AGDD2Geo.Builders += ["MuonAGDDTool/MuonSpectrometer"]
             from AtlasGeoModel.MuonGMJobProperties import MuonGeometryFlags
             if (MuonGeometryFlags.hasSTGC() and MuonGeometryFlags.hasMM()):
-                if not "NSWAGDDTool/NewSmallWheel" in AGDD2Geo.Builders:
+                if "NSWAGDDTool/NewSmallWheel" not in AGDD2Geo.Builders:
                     ToolSvc += CfgGetter.getPublicTool("NewSmallWheel", checkType=True)
                     AGDD2Geo.Builders += ["NSWAGDDTool/NewSmallWheel"]
             theApp.CreateSvc += ["AGDDtoGeoSvc"]
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/SimCtbKernel.py b/Simulation/G4Atlas/G4AtlasApps/python/SimCtbKernel.py
index 8b002ebe37e92a58c65be0f23acd4995df61975a..35be62627fed5ef8a646ec3d986adb096553a9f7 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/SimCtbKernel.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/SimCtbKernel.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 """
 Main configuration module for the ATLAS test-beams
@@ -11,15 +11,12 @@ LAr  standalone test beams at H6
 __author__ = 'M. Gallas'
 
 
-import math
-from AthenaCommon               import PhysicalConstants
 from AthenaCommon.AppMgr        import theApp
 from AthenaCommon.Include       import include
 from AthenaCommon.GlobalFlags   import globalflags
 from AthenaCommon.DetFlags      import DetFlags
 from AthenaCommon.JobProperties import jobproperties
-from AthenaCommon.BeamFlags     import jobproperties
-import PyG4Atlas, AtlasG4Eng
+import AtlasG4Eng
 from SimSkeleton import SimSkeleton
 
 
@@ -58,12 +55,12 @@ class CtbSim(TBSimSkeleton):
         ATLAS Combined Test Beam (2004)
     """
     def __init__(self):
-        if not(AtlasG4Eng.G4Eng.Dict.has_key('simu_skeleton')):
+        if 'simu_skeleton' not in AtlasG4Eng.G4Eng.Dict:
             AtlasG4Eng.G4Eng.Dict['simu_skeleton']=self
             AtlasG4Eng.G4Eng.Name="CTB_G4Sim"         # assigns a name
         else:
-            AtlasG4Eng.G4Eng.log.warning(' SimSkeleton: the simulation '+\
-            'has already a skeleton, you can find it in the '+\
+            AtlasG4Eng.G4Eng.log.warning(' SimSkeleton: the simulation '
+            'has already a skeleton, you can find it in the '
             'G4AtlasEng.G4Eng.Dict()')
 
     @classmethod
@@ -95,16 +92,16 @@ class CtbSim(TBSimSkeleton):
         if (simFlags.BeamConditions.statusOn and
             simFlags.BeamConditions.get_Value()):
                 try:
-                    AtlasG4Eng.G4Eng.log.info(' SimCtbKernel: '+\
-                           ' loading CTB beam-conditions from the'+\
+                    AtlasG4Eng.G4Eng.log.info(' SimCtbKernel: '
+                           ' loading CTB beam-conditions from the'
                            ' CTB run-condition file !!')
-                    BeamCond=__import__(modulepath,globals(),locals(),prepath).BeamCond
+                    BeamCond=__import__(modulepath,globals(),locals(),prepath).BeamCond  # noqa: F821 (broken?)
                     beam_cond_obj=BeamCond.retrieve_Run(simFlags.RunNumber.get_Value())
                     beam_cond_obj._map()
-                except:
-                    AtlasG4Eng.G4Eng.log.warning(' SimCtbKernel: '+
-                    'No particular beam conditions found for the run '+\
-                    str(simFlags.RunNumber.get_Value()))
+                except Exception:
+                    AtlasG4Eng.G4Eng.log.warning(' SimCtbKernel: '
+                    'No particular beam conditions found for the run %s',
+                    simFlags.RunNumber.get_Value())
         # - switch off non-existing detectors
         DetFlags.FCal_setOff()
         DetFlags.HEC_setOff()
@@ -173,8 +170,7 @@ class CtbSim(TBSimSkeleton):
     def _do_external(self):
         """ Place to handle the external services: GeoModel, CondDB, etc.
         """
-        AtlasG4Eng.G4Eng.log.info('SimSkeleton :: _do_external '+\
-                                       'starting')
+        AtlasG4Eng.G4Eng.log.info('SimSkeleton :: _do_external starting')
         from AthenaCommon.AppMgr import ServiceMgr
         from Geo2G4.Geo2G4Conf import Geo2G4Svc
         Geo2G4Svc=Geo2G4Svc()
@@ -184,7 +180,7 @@ class CtbSim(TBSimSkeleton):
         Geo2G4Svc.GetTopTransform = False
 
         #--- GeoModel stuff ----------------------------------------------------
-        from AtlasGeoModel import SetGeometryVersion
+        from AtlasGeoModel import SetGeometryVersion  # noqa: F401
 
         from AthenaCommon.Configurable import Configurable
         if Configurable.allConfigurables.get('GeoModelSvc'):
@@ -201,7 +197,7 @@ class CtbSim(TBSimSkeleton):
         elif(simFlags.GeoModelTileVersion.get_Value()=='TileTB-3B3EB-00'):
             GeoModelSvc.TileVersionOverride='TileTB-3B3EB-00'
 
-        from AtlasGeoModel import GeoModelInit
+        from AtlasGeoModel import GeoModelInit  # noqa: F401
         if(DetFlags.Calo_on()):
             # Common for the Calo
             include( "CaloDetMgrDetDescrCnv/CaloDetMgrDetDescrCnv_joboptions.py" )
@@ -230,8 +226,8 @@ class CtbSim(TBSimSkeleton):
         from AthenaCommon.AppMgr import ServiceMgr
         ServiceMgr += getService('DetectorGeometrySvc')
         ServiceMgr += getService('PhysicsListSvc')
-        AtlasG4Eng.G4Eng.log.info('SimSkeleton :: _do_external '+\
-                                       'done')
+        AtlasG4Eng.G4Eng.log.info('SimSkeleton :: _do_external done')
+
 
 #--- Tile TB 2000-2003  ------------------------------------------------
 class Tile2000_2003(TBSimSkeleton):
@@ -242,12 +238,12 @@ class Tile2000_2003(TBSimSkeleton):
 
     """
     def __init__(self):
-        if not(AtlasG4Eng.G4Eng.Dict.has_key('simu_skeleton')):
+        if 'simu_skeleton' not in AtlasG4Eng.G4Eng.Dict:
             AtlasG4Eng.G4Eng.Dict['simu_skeleton']=self
             AtlasG4Eng.G4Eng.Name="Tile2000_2003"  # assigns a name
         else:
-            G4AtlasEngine.log.warning(' SimSkeleton: the simulation '+\
-            'has already a skeleton, you can find it in the '+\
+            AtlasG4Eng.G4Eng.log.warning(' SimSkeleton: the simulation '
+            'has already a skeleton, you can find it in the '
             'G4AtlasEng.G4Eng.Dict()')
 
     @classmethod
@@ -320,8 +316,7 @@ class Tile2000_2003(TBSimSkeleton):
     def _do_external(self):
         """ Place to handle the external services: GeoModel, CondDB, etc.
         """
-        AtlasG4Eng.G4Eng.log.info('SimSkeleton :: _do_external '+\
-                                       'starting')
+        AtlasG4Eng.G4Eng.log.info('SimSkeleton :: _do_external starting')
         from AthenaCommon.AppMgr import ServiceMgr
         from Geo2G4.Geo2G4Conf import Geo2G4Svc
         Geo2G4Svc=Geo2G4Svc()
@@ -329,7 +324,7 @@ class Tile2000_2003(TBSimSkeleton):
         ServiceMgr +=Geo2G4Svc
         Geo2G4Svc.GetTopTransform = False
 
-        from AtlasGeoModel import SetGeometryVersion
+        from AtlasGeoModel import SetGeometryVersion  # noqa: F401
         from AthenaCommon.Configurable import Configurable
         if Configurable.allConfigurables.get('GeoModelSvc'):
             GeoModelSvc=Configurable.allConfigurables.get('GeoModelSvc')
@@ -351,7 +346,7 @@ class Tile2000_2003(TBSimSkeleton):
             # 5 Barrels
             GeoModelSvc.TileVersionOverride='TileTB-5B-00'
 
-        from AtlasGeoModel import GeoModelInit
+        from AtlasGeoModel import GeoModelInit  # noqa: F401
         if(DetFlags.Calo_on()):
             # Common for the Calo
             include( "CaloDetMgrDetDescrCnv/CaloDetMgrDetDescrCnv_joboptions.py" )
@@ -378,8 +373,8 @@ class Tile2000_2003(TBSimSkeleton):
         from AthenaCommon.AppMgr import ServiceMgr
         ServiceMgr += getService('DetectorGeometrySvc')
         ServiceMgr += getService('PhysicsListSvc')
-        AtlasG4Eng.G4Eng.log.info('SimSkeleton :: _do_external '+\
-                                       'done')
+        AtlasG4Eng.G4Eng.log.info('SimSkeleton :: _do_external done')
+
 
 #--- LAr TB 2002-2004  ------------------------------------------------
 class LArH6_TB(TBSimSkeleton):
@@ -388,12 +383,12 @@ class LArH6_TB(TBSimSkeleton):
 
     """
     def __init__(self):
-        if not(AtlasG4Eng.G4Eng.Dict.has_key('simu_skeleton')):
+        if 'simu_skeleton' not in AtlasG4Eng.G4Eng.Dict:
             AtlasG4Eng.G4Eng.Dict['simu_skeleton']=self
             AtlasG4Eng.G4Eng.Name="LArH6"         # assigns a name
         else:
-            G4AtlasEngine.log.warning(' SimSkeleton: the simulation '+\
-            'has already a skeleton, you can find it in the '+\
+            AtlasG4Eng.G4Eng.log.warning(' SimSkeleton: the simulation '
+            'has already a skeleton, you can find it in the '
             'G4AtlasEng.G4Eng.Dict()')
 
     @classmethod
@@ -473,11 +468,7 @@ class LArH6_TB(TBSimSkeleton):
     def _do_external(self):
         """ Place to handle the external services: GeoModel, CondDB, etc.
         """
-        AtlasG4Eng.G4Eng.log.info('SimSkeleton :: _do_external '+\
-                                       'starting')
-
-       #from LArH6Detectors import Det_Layouts
-        from tbLArH6_calo import Det_Layouts
+        AtlasG4Eng.G4Eng.log.info('SimSkeleton :: _do_external starting')
 
         from AthenaCommon.AppMgr import ServiceMgr
         from Geo2G4.Geo2G4Conf import Geo2G4Svc
@@ -488,8 +479,8 @@ class LArH6_TB(TBSimSkeleton):
         Geo2G4Svc.GetTopTransform = False
 
         #--- GeoModel stuff ----------------------------------------------------
-        from AtlasGeoModel import SetGeometryVersion
-        from AtlasGeoModel import GeoModelInit
+        from AtlasGeoModel import SetGeometryVersion  # noqa: F401
+        from AtlasGeoModel import GeoModelInit  # noqa: F401
 
         from AthenaCommon.Configurable import Configurable
         if Configurable.allConfigurables.get('GeoModelSvc'):
@@ -508,8 +499,7 @@ class LArH6_TB(TBSimSkeleton):
         from AthenaCommon.AppMgr import ServiceMgr
         ServiceMgr += getService('DetectorGeometrySvc')
         ServiceMgr += getService('PhysicsListSvc')
-        AtlasG4Eng.G4Eng.log.info('SimSkeleton :: _do_external '+\
-                                       'done')
+        AtlasG4Eng.G4Eng.log.info('SimSkeleton :: _do_external done')
 
         # mgallas do more here: this is not yet supported
 
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/SimFlags.py b/Simulation/G4Atlas/G4AtlasApps/python/SimFlags.py
index af5b6415ac832a16df2da55f54680b91059c87a7..594a087623519a4dbfa91803785a1e39aac37aa7 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/SimFlags.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/SimFlags.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 """
 Simulation-specific flags.
@@ -14,7 +14,7 @@ application specific ones in e.g. atlas_flags.py.
 __author__  = 'A. Dell`Acqua, M. Gallas, Z. Marshall, A. Buckley'
 
 
-import re, os, sys
+import os
 from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer, jobproperties
 
 # TODO: Needed? If so, edit this comment to explain why :)
@@ -221,12 +221,12 @@ class PhysicsList(JobProperty):
                 if n_value not in self.allowedValues:
                     self.allowedValues.append(n_value)
         if n_value == 'FTFP_BERT_G4Precompound':
-            _sflog.warning('Setting G4CASCADE_USE_PRECOMPOUND for use of ' +
+            _sflog.warning('Setting G4CASCADE_USE_PRECOMPOUND for use of '
                            'precompound model' )
             os.environ['G4CASCADE_USE_PRECOMPOUND'] = '1'
             n_value = 'FTFP_BERT'
         elif 'G4CASCADE_USE_PRECOMPOUND' in os.environ:
-            _sflog.warning('Setting G4CASCADE_USE_PRECOMPOUND back to 0 ' +
+            _sflog.warning('Setting G4CASCADE_USE_PRECOMPOUND back to 0 '
                            '(why was it set?)')
             os.environ['G4CASCADE_USE_PRECOMPOUND'] = '0'
         JobProperty.__setattr__(self, name, n_value)
@@ -371,13 +371,12 @@ class RandomSeedList(JobProperty):
         """Add seeds to internal seedlist. Seeds will be incremented by offset values."""
         newseed = name + " OFFSET " + str(jobproperties.SimFlags.RandomSeedOffset.get_Value()) + " " + str(seed1) + " " + str(seed2) #option 1b
 
-        _sflog.info("Adding Simulation random number seed '" + newseed + "'")
+        _sflog.info("Adding Simulation random number seed %s", newseed)
 
         ## Ensure that each stream is only initialized once
         found = self.checkForExistingSeed(name)
         if found:
-            _sflog.error("Initialization values for random number stream " +
-                         name + " already exist!")
+            _sflog.error("Initialization values for random number stream %s already exist!", name)
         else:
             seedlist = self.get_Value()
             seedlist += [newseed]
@@ -387,10 +386,9 @@ class RandomSeedList(JobProperty):
         """print random seeds """
         from AthenaCommon.ConfigurableDb import getConfigurable
         rndmSvc = getConfigurable(jobproperties.SimFlags.RandomSvc.get_Value())()
-        _sflog.info("Random Number Seeds stored in simFlag: " +
-                    str(self.get_Value()))
-        _sflog.info("Random Number Seeds attached to Service '" +
-                    rndmSvc.name() + "': " + str(rndmSvc.Seeds))
+        _sflog.info("Random Number Seeds stored in simFlag: %s", self.get_Value())
+        _sflog.info("Random Number Seeds attached to Service '%s': %s",
+                    rndmSvc.name(), rndmSvc.Seeds)
 
     def checkRndmSvc(self):
         """Check if the random number service has already been defined"""
@@ -399,23 +397,22 @@ class RandomSeedList(JobProperty):
             from AthenaCommon.ConfigurableDb import getConfigurable
             rndmSvc = getConfigurable(jobproperties.SimFlags.RandomSvc.get_Value())()
             if len(rndmSvc.Seeds)!=0:
-                _sflog.warn(rndmSvc.name() + ".Seeds is not empty!")
-                _sflog.warn("Random Number Seeds already attached to Service '" +
-                            rndmSvc.name() + "': " + str(rndmSvc.Seeds))
+                _sflog.warn("%s.Seeds is not empty!", rndmSvc.name())
+                _sflog.warn("Random Number Seeds already attached to Service '%s': ",
+                            rndmSvc.name(), rndmSvc.Seeds)
                 _sflog.warn("Please use simFlags.RandomSeedList.addSeed() instead!")
                 for seedstring in rndmSvc.Seeds:
                     if 'OFFSET' not in seedstring:
-                        _sflog.warn("Existing Seed: '" + seedstring +
-                                    "' incorrectly defined - missing OFFSET! " +
-                                    "Removing...")
+                        _sflog.warn("Existing Seed: '%s' "
+                                    "incorrectly defined - missing OFFSET! "
+                                    "Removing...", seedstring)
                     else:
                         # If seed is correctly formatted add seed properly after
                         # checking it hasn't already been defined in the stream list.
                         splitseedstring = seedstring.split()
                         if self.checkForExistingSeed(splitseedstring[0]):
-                            _sflog.error("Initialization values for random " +
-                                         "number stream " + splitseedstring[0] +
-                                         " already exist!")
+                            _sflog.error("Initialization values for random "
+                                         "number stream %s already exist!", splitseedstring[0])
                         else:
                             self.addSeed( splitseedstring[0], splitseedstring[3],
                                           splitseedstring[4] )
@@ -426,8 +423,8 @@ class RandomSeedList(JobProperty):
         """
         from AthenaCommon.ConfigurableDb import getConfigurable
         rndmSvc = getConfigurable(jobproperties.SimFlags.RandomSvc.get_Value())()
-        _sflog.info("Adding Simulation random number seed stored in jobProperties " +
-                    "to Random Number Service '" + rndmSvc.name() + "'")
+        _sflog.info("Adding Simulation random number seed stored in jobProperties "
+                    "to Random Number Service '%s'", rndmSvc.name())
         self.checkRndmSvc()
         rndmSvc.Seeds += self.get_Value()
         from GaudiKernel.Configurable import WARNING
@@ -623,7 +620,7 @@ class RunDict(JobProperty):
     statusOn = True
     allowedTypes = ['dict']
     StoredValue = { 197451 : 1 , 201445 : 1 }
-    def GetRunNumber( a_job ):
+    def GetRunNumber(self, a_job ):
         """
         Get a run number based on the runs in the dictionary.  Returns
         as though we process a linear sequence for the moment
@@ -638,8 +635,8 @@ class RunDict(JobProperty):
             for a in self.get_Value():
                 if baseJN<=self.get_Value()[a]: return a
                 else: baseJN-= self.get_Value()[a]
-        _sflog.warning('Something went wrong with job ' + str(a_job) +
-                       '. Returning run number -1.' )
+        _sflog.warning('Something went wrong with job %s.'
+                       'Returning run number -1.', a_job )
         return -1
 
 class DoLArBirk(JobProperty):
@@ -743,8 +740,8 @@ class OptionalUserActionList(JobProperty):
             try:
                 self.StoredValue[role] += [actionTool]
             except KeyError:
-                _sflog.warn('Attempt to assign action %s to role %s not allowed' %
-                            (actionTool, role))
+                _sflog.warn('Attempt to assign action %s to role %s not allowed',
+                            actionTool, role)
 
     def removeAction(self, actionTool, roles=['General']):
         # Remove the action from the list of actions - no error if role isn't in the list.
@@ -752,11 +749,11 @@ class OptionalUserActionList(JobProperty):
             try:
                 self.StoredValue[role].remove(actionTool)
             except KeyError:
-                _sflog.warn('Attempt to remove action %s from role %s not allowed' %
-                            (actionTool, role))
+                _sflog.warn('Attempt to remove action %s from role %s not allowed',
+                            actionTool, role)
             except ValueError:
-                _sflog.warn('Attempt to remove unknown action %s from role %s' %
-                            (actionTool, role))
+                _sflog.warn('Attempt to remove unknown action %s from role %s',
+                            actionTool, role)
 
 class G4Commands(JobProperty):
     """
@@ -858,9 +855,9 @@ class SimFlags(JobPropertyContainer):
         """
         Load extra config flags specific to ATLAS layouts.
         """
-        if not "atlas_flags" in self.extra_flags:
+        if "atlas_flags" not in self.extra_flags:
             self._log.info("SimFlags:: Loading ATLAS flags")
-            if not "ATLAS-" in self.SimLayout.get_Value():
+            if "ATLAS-" not in self.SimLayout.get_Value():
                 self._log.warning("Loading ATLAS flags, but SimLayout tag is not an ATLAS geometry")
             self.extra_flags.append("atlas_flags")
             self.import_JobProperties('G4AtlasApps.atlas_flags')
@@ -870,7 +867,7 @@ class SimFlags(JobPropertyContainer):
         """
         Load extra config flags specific to cosmics simulation.
         """
-        if not "cosmics_flags" in self.extra_flags:
+        if "cosmics_flags" not in self.extra_flags:
             self._log.info("SimFlags:: Loading cosmics flags")
             self.extra_flags.append("cosmics_flags")
             self.import_JobProperties('CosmicGenerator.cosmics_flags')
@@ -880,9 +877,9 @@ class SimFlags(JobPropertyContainer):
         """
         Load extra config flags specific to CTB layouts.
         """
-        if not "ctb_flags" in self.extra_flags:
+        if "ctb_flags" not in self.extra_flags:
             self._log.info("SimFlags:: Loading CTB flags")
-            if not "ctbh8" in self.SimLayout.get_Value():
+            if "ctbh8" not in self.SimLayout.get_Value():
                 self._log.warning("Loading CTB flags, but SimLayout tag is not a CTB geometry")
             self.extra_flags.append("ctb_flags")
             self.import_JobProperties('G4AtlasApps.ctb_flags')
@@ -892,9 +889,9 @@ class SimFlags(JobPropertyContainer):
         """
         Load extra config flags specific to TB LAr H6 layouts.
         """
-        if not "tbLArH6_flags" in self.extra_flags:
+        if "tbLArH6_flags" not in self.extra_flags:
             self._log.info("SimFlags:: Loading TB LAr H6 flags")
-            if not "tb_LArH6" in self.SimLayout.get_Value():
+            if "tb_LArH6" not in self.SimLayout.get_Value():
                 self._log.warning("Loading TB LAr H6 flags, but SimLayout tag is not a TB LAr H6 geometry")
             self.extra_flags.append("tbLArH6_flags")
             self.import_JobProperties('G4AtlasApps.tbLArH6_flags')
@@ -904,9 +901,9 @@ class SimFlags(JobPropertyContainer):
         """
         Load extra config flags specific to TB Tile layouts.
         """
-        if not "tbtile_flags" in self.extra_flags:
+        if "tbtile_flags" not in self.extra_flags:
             self._log.info("SimFlags:: Loading TB Tile flags")
-            if not "tb_Tile2000_2003" in self.SimLayout.get_Value():
+            if "tb_Tile2000_2003" not in self.SimLayout.get_Value():
                 self._log.warning("Loading TB Tile flags, but SimLayout tag is not a TB Tile geometry")
             self.extra_flags.append("tbtile_flags")
             self.import_JobProperties('G4AtlasApps.tbtile_flags')
@@ -920,7 +917,7 @@ for jpname in dir():
     import inspect
     if inspect.isclass(jp):
         if issubclass(jp, JobProperty) and jp is not JobProperty:
-            _sflog.debug("Adding SimFlag '%s' to SimFlags container" % jpname)
+            _sflog.debug("Adding SimFlag '%s' to SimFlags container", jpname)
             jobproperties.SimFlags.add_JobProperty(jp)
 
 
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/SimSkeleton.py b/Simulation/G4Atlas/G4AtlasApps/python/SimSkeleton.py
index 93fc74e1b0dce7a2eee4ac4ba4233839080c4430..b9a978b7a5cbd81aea476bb811e0bf4ae8c83f25 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/SimSkeleton.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/SimSkeleton.py
@@ -1,4 +1,4 @@
-from __future__ import print_function
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 from G4AtlasApps import AtlasG4Eng
 
@@ -218,9 +218,7 @@ class SimSkeleton(object):
             AtlasG4Eng.G4Eng.log.verbose('SimSkeleton._do_persistency :: starting')
 
             ## The following used to be in G4AtlasApps/HitAthenaPoolWriteOptions
-            from AthenaCommon.DetFlags import DetFlags
-            from AthenaCommon.Configurable import Configurable
-            from AthenaPoolCnvSvc.WriteAthenaPool import AthenaPoolOutputStream
+            import AthenaPoolCnvSvc.WriteAthenaPool  # noqa: F401
 
             ## Default setting for one output stream
             from AthenaCommon.AppMgr import ServiceMgr as svcMgr
@@ -229,7 +227,7 @@ class SimSkeleton(object):
             svcMgr.AthenaPoolCnvSvc.PoolAttributes += [ "DatabaseName = '" + athenaCommonFlags.PoolHitsOutput() + "'; ContainerName = 'TTree=CollectionTree'; TREE_AUTO_FLUSH = '1'" ]
 
             ## Write geometry tag info
-            import EventInfoMgt.EventInfoMgtInit
+            import EventInfoMgt.EventInfoMgtInit  # noqa: F401
 
             ## Instantiate StreamHITS
             if athenaCommonFlags.PoolHitsOutput.statusOn:
@@ -260,12 +258,13 @@ class SimSkeleton(object):
         from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
         ## ReadTR is only present in simFlags for ATLAS geometries with cosmics switched on
         if (not simFlags.ISFRun) and hasattr(simFlags, "ReadTR") and simFlags.ReadTR.statusOn:
+            from AthenaCommon.Include import include
             include("CosmicGenerator/SetCosmicGenerator.py")
 
         if athenaCommonFlags.PoolEvgenInput.statusOn:
             ## Tell the event selector about the evgen input files and event skipping
             if not hasattr(svcMgr, 'EventSelector'):
-                import AthenaPoolCnvSvc.ReadAthenaPool
+                import AthenaPoolCnvSvc.ReadAthenaPool  # noqa: F401
             svcMgr.EventSelector.InputCollections = athenaCommonFlags.PoolEvgenInput()
             if athenaCommonFlags.SkipEvents.statusOn:
                 svcMgr.EventSelector.SkipEvents = athenaCommonFlags.SkipEvents()
@@ -274,7 +273,7 @@ class SimSkeleton(object):
         else:
             ## No input file so assume that we are running a Generator in the same job
             if not hasattr(svcMgr, 'EventSelector'):
-                import AthenaCommon.AtlasUnixGeneratorJob
+                import AthenaCommon.AtlasUnixGeneratorJob  # noqa: F401
             # TODO: Check that there is at least one algorithm already in the AlgSequence?
             ## Warn if attempting to skip events in a generator job
             if athenaCommonFlags.SkipEvents.statusOn and athenaCommonFlags.SkipEvents()!=0:
@@ -314,16 +313,16 @@ class SimSkeleton(object):
         ## Execute the known methods from the known_methods in pre_init
         for k in known_methods:
             try:
-                AtlasG4Eng.G4Eng.log.debug('SimSkeleton._do_PreInit :: evaluating method ' + k)
+                AtlasG4Eng.G4Eng.log.debug('SimSkeleton._do_PreInit :: evaluating method %s', k)
                 getattr(cls, k).__call__()
             except Exception as err:
                 print ("Error: %s" % str(err))
                 import traceback,sys
                 traceback.print_exc(file=sys.stdout)
-                raise RuntimeError('SimSkeleton._do_PreInit :: found problems with the method  %s' % k)
+                raise RuntimeError('SimSkeleton._do_PreInit :: found problems with the method  %s', k)
 
          ## Run pre-init callbacks
-        AtlasG4Eng.G4Eng.log.debug("AtlasG4Eng.G4Eng:init stage " + "preInit")
+        AtlasG4Eng.G4Eng.log.debug("AtlasG4Eng.G4Eng:init stage preInit")
         if simFlags.InitFunctions.statusOn and "preInit" in simFlags.InitFunctions.get_Value():
             for callback_fn in simFlags.InitFunctions.get_Value()["preInit"]:
                     callback_fn.__call__()
@@ -343,7 +342,7 @@ class SimSkeleton(object):
         ## Execute the known methods from the known_methods list
         for k in known_methods:
             try:
-                AtlasG4Eng.G4Eng.log.debug('SimSkeleton :: evaluating method ' +k)
+                AtlasG4Eng.G4Eng.log.debug('SimSkeleton :: evaluating method %s', k)
                 getattr(cls, k).__call__()
             except Exception as err:
                 print ("Error: %s" % str(err))
@@ -354,7 +353,7 @@ class SimSkeleton(object):
         for i in dir(cls):
             if i.find('do_') == 0 and i not in known_methods:
                try:
-                   AtlasG4Eng.G4Eng.log.debug('SimSkeleton :: evaluating method %s' % i)
+                   AtlasG4Eng.G4Eng.log.debug('SimSkeleton :: evaluating method %s',  i)
                    getattr(cls, i).__call__()
                except Exception as err:
                    print ("Error: %s" % str(err))
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/atlas_utilhisto.py b/Simulation/G4Atlas/G4AtlasApps/python/atlas_utilhisto.py
index c4707b9458ebe56fe5e880f9314cef3bc84077e7..22f757e66280182e643459208cceb0d1b94589d6 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/atlas_utilhisto.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/atlas_utilhisto.py
@@ -9,8 +9,6 @@ ROOT file .
 
 # TODO: Remove? Totally unused, I think.  (AB, 07/2011)
 
-from __future__ import print_function
-
 __author__ = 'M. Gallas'
 
 class HistoAtHistoSvc(object):
@@ -61,7 +59,7 @@ class HistoAtHistoSvc(object):
     def retrieve_historoot(self,root_file_name,root_histo_name):
         """ Retrieves a TH1F ROOT histogram.
         """
-        from ROOT import TFile, TH1F
+        from ROOT import TFile
         self.RootHistoName=root_histo_name
         self.RootFileName=(root_file_name)
         self.RootFile=TFile(self.RootFileName)
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/atlas_utilities.py b/Simulation/G4Atlas/G4AtlasApps/python/atlas_utilities.py
index 6756f78439d8f232f3c1a0ca97a3653c9dcb2da1..2caad39623ab837b8d061c5160f029da4e9e3c2a 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/atlas_utilities.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/atlas_utilities.py
@@ -7,8 +7,6 @@ Provide different utilities like:
   _frozen = a base class / metaclass for making objects immutable
 """
 
-from __future__ import print_function
-
 __author__  = 'A. Dell`Acqua, M. Gallas, A. Di Simone'
 
 ## ATLAS generic scintillator SD
@@ -29,7 +27,7 @@ class ScintillatorSD(object):
         import AtlasG4Eng
         AtlasG4Eng.G4Eng.load_Lib('G4AncillarySD')
         AtlasG4Eng.G4Eng.load_Dict('G4AncillarySDDict')
-        SD_HChandler = AtlasG4Eng.G4Eng.gbl.ScintillatorSD_PyHandler()
+        #SD_HChandler = AtlasG4Eng.G4Eng.gbl.ScintillatorSD_PyHandler()
         #import PyG4Atlas
         ## SD = PyG4Atlas.SenDetector('G4AncillarySD', 'ScintillatorSD', 'ScintillatorSD') #FIXME SD to be migrated ATLASSIM-1752
         ## # Make a ScintillatorSD hit collection and each associated volume has a
@@ -67,7 +65,7 @@ class MemorySnooper(PyAthena.Alg):
         """
         Checks the memory at any time and print it to a file.
         """
-        import os, string, time
+        import os, time
         now = time.time()
         elapsed_time = now - self.now
         self.now = now
@@ -84,7 +82,7 @@ class MemorySnooper(PyAthena.Alg):
         only if is different from the previous value obtained.
         This method is for the use in the execute method (event by event).
         """
-        import os, string, time
+        import os, time
         now = time.time()
         elapsed_time = now - self.now
         self.now = now
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/ctb_common.py b/Simulation/G4Atlas/G4AtlasApps/python/ctb_common.py
index 8453cec076774ecb8253e9ac3b892f2397174d20..57dd4e58f550d8a33ff50055b60bf313df89e409 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/ctb_common.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/ctb_common.py
@@ -1,7 +1,5 @@
 # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
-from __future__ import print_function
-
 """
 - This module contains the common volumes, sub-detector envelopes,
   magnets, beam pipes, volumes for MCTruth and other common stuff
@@ -21,9 +19,6 @@ __author__ = 'M. Gallas'
 import math
 import PyG4Atlas, AtlasG4Eng
 from PyG4Atlas import DetFacilityT
-from atlas_materials import material_concrete
-from atlas_materials import material_mylar
-from atlas_materials import material_scintillator
 
 #=======================================================================
 # - CTB top volumes  ---------------------------------------------------
@@ -169,15 +164,15 @@ class LArFarUpstreamMaterial(object):
             self.material.df.MoveTo(self.material.position)
             AtlasG4Eng.G4Eng.add_DetFacility(self.material,\
                      AtlasG4Eng.G4Eng.Dict_DetFacility.get('CTB'))
-            AtlasG4Eng.G4Eng.log.info(' LArFarUpstreamMaterial is placed inside CTB '+\
-                   'at -20000 mm. ')
+            AtlasG4Eng.G4Eng.log.info(' LArFarUpstreamMaterial is placed inside CTB '
+                                      'at -20000 mm. ')
         elif(option==1):
             self.material.position=AtlasG4Eng.G4Eng.gbl.Hep3Vector(-1020.,0.,0.)
             self.material.df.MoveTo(self.material.position)
             AtlasG4Eng.G4Eng.add_DetFacility(self.material,\
                      AtlasG4Eng.G4Eng.Dict_DetFacility.get('IDET'))
-            AtlasG4Eng.G4Eng.log.info(' LArFarUpstreamMaterial is placed inside IDET '+\
-                   'at -1020 mm in order to use BeamConditions ')
+            AtlasG4Eng.G4Eng.log.info(' LArFarUpstreamMaterial is placed inside IDET '
+                                      'at -1020 mm in order to use BeamConditions ')
 
 # - Magnets  -----------------------------------------------------------
 __doc__+="""\n\n         -- MAGNETS -- \n"""
@@ -351,7 +346,7 @@ class ctb_beampipes_and_magnetsupstream:
             AtlasG4Eng.G4Eng.add_DetFacility(beampipe2,mbpl12)
             AtlasG4Eng.G4Eng.add_DetFacility(mylarequiv,ctb)
         if(mode==2):
-            equivupstreammaterial=EquivUpstreamMaterial()
+            equivupstreammaterial=EquivUpstreamMaterial()  # noqa: F841 (needed?)
 
 # - Muon-DUMP  ---------------------------------------------------------
 __doc__+="""\n\n         -- MUON DUMP -- \n"""
@@ -669,7 +664,7 @@ class CombinedScintillator:
        #self.ctb_combinedscintillator_position=AtlasG4Eng.G4Eng.gbl.CLHEP.Hep3Vector(-2.0,0.0,2200.)
         if (eta==0):
           self.ctb_combinedscintillator_position=AtlasG4Eng.G4Eng.gbl.CLHEP.Hep3Vector(-2.0,0.0,0.)
-        elif(eta>0,eta<=0.95):
+        elif(eta>0 and eta<=0.95):
           self.ctb_combinedscintillator_position=AtlasG4Eng.G4Eng.gbl.CLHEP.Hep3Vector(-2.0,0.0,\
                      2274.0/math.tan(2*math.atan(math.exp(-eta))) )
         elif(eta>0.95):
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/ctb_field.py b/Simulation/G4Atlas/G4AtlasApps/python/ctb_field.py
index ca7965b9a1920aa6e94c23e88fa587c2e6b761b5..8e8ee9ebb669e70466cdb892ad098655fb08c059 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/ctb_field.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/ctb_field.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 """
 This module adds the magnetic fields to the CTB simulation
@@ -72,7 +72,7 @@ class CTBFieldMap(object):
         for i in dataPathList:
             if 'CTB' in i and 'magfield' in i:
                 self.mag_data_path = i
-                AtlasG4Eng.G4Eng.log.debug('ctb_field::CTBFieldMap found the datapath for field maps at %s' % self.mag_data_path)
+                AtlasG4Eng.G4Eng.log.debug('ctb_field::CTBFieldMap found the datapath for field maps at %s', self.mag_data_path)
         ## Restrict the field to the IDET volume
         if mode == 0:
             mapfield_CTB = PyG4Atlas.MagneticField('G4Field', 'G4AtlasFieldSvc', typefield='MapField')
@@ -94,7 +94,7 @@ class CTBFieldMap(object):
             try:
                 os.symlink(os.path.join(self.mag_data_path,fieldmap_name),
                            os.path.join(os.getcwd(),'magempty'))
-            except:
+            except Exception:
                 AtlasG4Eng.G4Eng.log.debug('ctb_field::CTBFieldMap DATAPATH not found --> try local link')
                 os.symlink(os.path.join(os.getcwd(),fieldmap_name),
                            os.path.join(os.getcwd(),'magempty'))
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/tbLArH6_calo.py b/Simulation/G4Atlas/G4AtlasApps/python/tbLArH6_calo.py
index fbeaab689e79e4b76e8de467ccd9d79b35418e0a..66fa4228f1f9373745e0e529cb9ea61f06bd6de8 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/tbLArH6_calo.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/tbLArH6_calo.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 """
  LAr H6 Calo detector description.
@@ -8,10 +8,7 @@
 __author__ = 'Joe Boudreau, Mohsen Kakhzad, Margret Fincke-Keeler, \
               J.P. Archambault, M. Gallas, A. Soukharev'
 
-import math
 from G4AtlasApps import PyG4Atlas, AtlasG4Eng
-from G4AtlasApps.PyG4Atlas import  DetFacility, PhysicsReg
-
 
 # -- World volume --
 class LArWorld(object):
@@ -47,7 +44,6 @@ class Det_Layouts:
 
         AtlasG4Eng.G4Eng.load_Dict('LArG4RunControlDict')
         import ROOT
-        import __main__
         from G4AtlasApps.SimFlags import simFlags
         if (simFlags.SimLayout.get_Value()=="tb_LArH6_2002"):
             pd=AtlasG4Eng.G4Eng.gbl.LArGeoTBH1GeoOptions()
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/tbLArH6_flags.py b/Simulation/G4Atlas/G4AtlasApps/python/tbLArH6_flags.py
index 565d026931c5629050d73a1303df23e43a7878d2..7636f7f552344da60efc53f5bf42db5ca44fcbbd 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/tbLArH6_flags.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/tbLArH6_flags.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 """ Simulation specific flags.
 
@@ -20,7 +20,7 @@ from AthenaCommon.JobProperties import JobProperty
 
 # We re-use already defined flags that maybe we will customize here
 from G4AtlasApps.SimFlags import jobproperties
-from ctb_flags import *
+from ctb_flags import *  # noqa: F401 F403
 # 25/03/2008  A.M.  added  by analogy with atlas_flags.py
 
 jobproperties.SimFlags.CalibrationRun.set_Off()
diff --git a/Simulation/G4Atlas/G4AtlasApps/python/tbtile_flags.py b/Simulation/G4Atlas/G4AtlasApps/python/tbtile_flags.py
index b1819bc8e5a524b5d1a9064ea064978b97a90573..b64ea8e49f2ecfe2f1f7bc44569820f334f35f65 100644
--- a/Simulation/G4Atlas/G4AtlasApps/python/tbtile_flags.py
+++ b/Simulation/G4Atlas/G4AtlasApps/python/tbtile_flags.py
@@ -58,7 +58,6 @@ class Eta(JobProperty):
         #print (name, n_value)
         if name == "StoredValue":
             if type(n_value) == int or type (n_value) == float:
-                import math
                 if abs(n_value) > 1.1:
                     raise ValueError ('THE ETA VALUE MUST BE IN [-1.1,1.1]!!! The selected value %s is not in the range.' %n_value)
         JobProperty.__setattr__(self, name, n_value)
@@ -83,7 +82,6 @@ class Theta(JobProperty):
         #print (name, n_value)
         if name == "StoredValue":
             if type(n_value) == int or type (n_value) == float:
-                import math
                 if abs(n_value) > 60. and not (abs(abs(n_value)-90.0) < 0.01) :
                     raise ValueError ('THETA MUST BE IN [-60,60] or +/-90 !!! The selected value %s is not in the range.' %n_value)
         JobProperty.__setattr__(self, name, n_value)
diff --git a/Simulation/G4Utilities/MCTruthSimAlgs/src/MergeMcEventCollTool.cxx b/Simulation/G4Utilities/MCTruthSimAlgs/src/MergeMcEventCollTool.cxx
index e91a39b74fe6ea5ccedf7a29e5f5001a3f6796c7..869d1f28f8610b0886f36b765bac89d29b75d4d1 100644
--- a/Simulation/G4Utilities/MCTruthSimAlgs/src/MergeMcEventCollTool.cxx
+++ b/Simulation/G4Utilities/MCTruthSimAlgs/src/MergeMcEventCollTool.cxx
@@ -93,8 +93,8 @@ namespace {
     typedef std::map<IndexKey, int> PileUpBackgroundMap;
     GenEventSorter(const PileUpBackgroundMap& backgroundClassificationMap) : m_backgroundClassificationMap(backgroundClassificationMap) {}
     bool operator() (const HepMC::GenEvent *pGenEvent1, const HepMC::GenEvent *pGenEvent2) {
-      const int signal_process_id1(pGenEvent1->signal_process_id()), event_number1(pGenEvent1->event_number()), separator_hack1(pGenEvent1->mpi());
-      const int signal_process_id2(pGenEvent2->signal_process_id()), event_number2(pGenEvent2->event_number()), separator_hack2(pGenEvent2->mpi());
+      const int signal_process_id1(HepMC::signal_process_id(pGenEvent1)), event_number1(pGenEvent1->event_number()), separator_hack1(HepMC::mpi(pGenEvent1));
+      const int signal_process_id2(HepMC::signal_process_id(pGenEvent2)), event_number2(pGenEvent2->event_number()), separator_hack2(HepMC::mpi(pGenEvent2));
       const IndexKey key1(makekey(signal_process_id1, event_number1,separator_hack1));
       const IndexKey key2(makekey(signal_process_id2, event_number2,separator_hack2));
       const PileUpBackgroundMap::const_iterator event1=m_backgroundClassificationMap.find(key1);
@@ -309,7 +309,7 @@ void MergeMcEventCollTool::printDetailsOfMergedMcEventCollection() const {
     ATH_MSG_INFO ( "INTIME("<<int(INTIME)<<"), OUTOFTIME("<<int(OUTOFTIME)<<"), RESTOFMB("<<int(RESTOFMB)<<"), CAVERN("<<int(CAVERN)<<"), NOPUTYPE("<<int(NOPUTYPE)<<")" );
     ATH_MSG_INFO ( "Current OUTPUT GenEvent: " );
     while(outputEventItr!=endOfEvents) {
-      const int signal_process_id((*outputEventItr)->signal_process_id()), event_number((*outputEventItr)->event_number()), separator_hack((*outputEventItr)->mpi());
+      const int signal_process_id(HepMC::signal_process_id((*outputEventItr))), event_number((*outputEventItr)->event_number()), separator_hack(HepMC::mpi((*outputEventItr)));
       const IndexKey key(makekey(signal_process_id,event_number,separator_hack));
       const PileUpBackgroundMap::const_iterator event(m_backgroundClassificationMap.find(key));
       ATH_MSG_INFO ( "GenEvent #"<<event_number<<", signal_process_id="<<signal_process_id<<", category="<<event->second<<", number of Vertices="<<(*outputEventItr)->vertices_size() );
@@ -327,7 +327,7 @@ StatusCode MergeMcEventCollTool::processFirstSubEvent(const McEventCollection *p
   m_signal_event_number = m_pOvrlMcEvColl->at(0)->event_number();
   m_pOvrlMcEvColl->at(0)->set_event_number(-2); //Set this to zero for the purposes of sorting. (restore after sorting).
 
-  updateClassificationMap(m_pOvrlMcEvColl->at(0)->signal_process_id(), m_pOvrlMcEvColl->at(0)->event_number(), 0,- 1, true);
+  updateClassificationMap(HepMC::signal_process_id(m_pOvrlMcEvColl->at(0)), m_pOvrlMcEvColl->at(0)->event_number(), 0,- 1, true);
   m_newevent=false; //Now the McEventCollection and classification map are not empty this should be set to false.
   ATH_MSG_DEBUG( "execute: copied original event McEventCollection" );
   const unsigned int nBackgroundMcEventCollections(m_nInputMcEventColls-1); // -1 for original event
@@ -356,7 +356,7 @@ StatusCode MergeMcEventCollTool::processFirstSubEvent(const McEventCollection *p
     //if a type is enabled leave room to insert the events of that type, otherwise place separator immediately after
     currentMcEventCollectionIndex += 1;
     m_pOvrlMcEvColl->at(currentMcEventCollectionIndex-1) = new HepMC::GenEvent(0, -1); //pid 0 & event_number -1 flags this GenEvent as SEPARATOR
-    m_pOvrlMcEvColl->at(currentMcEventCollectionIndex-1)->set_mpi(type);
+    HepMC::set_mpi(m_pOvrlMcEvColl->at(currentMcEventCollectionIndex-1),type);
     updateClassificationMap(0, -1, type, type, true);
     ATH_MSG_DEBUG ( "Placing Separator for Type: "<<type<<" at Posistion: " << currentMcEventCollectionIndex-1 );
   }
@@ -437,7 +437,7 @@ StatusCode MergeMcEventCollTool::processTruthFilteredEvent(const McEventCollecti
   if ( fabs(currentEventTime)<51.0 ) {
     currentGenEventClassification = ( fabs(currentEventTime)<1.0 ) ? INTIME : OUTOFTIME;
   }
-  updateClassificationMap(currentBackgroundEvent.signal_process_id(),
+  updateClassificationMap(HepMC::signal_process_id(currentBackgroundEvent),
                           currentBackgroundEvent.event_number(),
                           0, currentGenEventClassification, true);
   return StatusCode::SUCCESS;
@@ -598,7 +598,7 @@ StatusCode MergeMcEventCollTool::compressOutputMcEventCollection() {
   if (! m_pOvrlMcEvColl->empty()) {
     DataVector<HepMC::GenEvent>::iterator outputEventItr(m_pOvrlMcEvColl->begin());
     while(outputEventItr!=m_pOvrlMcEvColl->end()) { //as end may change
-      const int signal_process_id((*outputEventItr)->signal_process_id()),event_number((*outputEventItr)->event_number());
+      const int signal_process_id(HepMC::signal_process_id((*outputEventItr))),event_number((*outputEventItr)->event_number());
       //Check for separators
       if(signal_process_id==0 && event_number==-1) {
         ++outputEventItr;
diff --git a/Simulation/G4Utilities/MCTruthSimAlgs/src/NewMergeMcEventCollTool.cxx b/Simulation/G4Utilities/MCTruthSimAlgs/src/NewMergeMcEventCollTool.cxx
index ea06f0aa03c0238c5300f4c25be1e31aeb0aa73e..3c3871c34a6c15ae2a7d484add40fe28644982ff 100644
--- a/Simulation/G4Utilities/MCTruthSimAlgs/src/NewMergeMcEventCollTool.cxx
+++ b/Simulation/G4Utilities/MCTruthSimAlgs/src/NewMergeMcEventCollTool.cxx
@@ -147,7 +147,7 @@ void NewMergeMcEventCollTool::printDetailsOfMergedMcEventCollection(McEventColle
   ATH_MSG_INFO ( "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$" );
   ATH_MSG_INFO ( "Current OUTPUT GenEvent: " );
   while(outputEventItr!=endOfEvents) {
-    const int signal_process_id((*outputEventItr)->signal_process_id());
+    const int signal_process_id(HepMC::signal_process_id((*outputEventItr)));
     const int event_number((*outputEventItr)->event_number());
     ATH_MSG_INFO ( "GenEvent #"<<event_number<<", signal_process_id="<<signal_process_id<</*", category="<<event->second<<*/", number of Vertices="<<(*outputEventItr)->vertices_size() );
     char fname[80];
diff --git a/Simulation/SimuJobTransforms/share/CommonSkeletonJobOptions.py b/Simulation/SimuJobTransforms/share/CommonSkeletonJobOptions.py
index 51257d2a0a5c94ebe16804991269bec136409cde..791a3031c5bdcd11f52723c528cce137d27dd870 100644
--- a/Simulation/SimuJobTransforms/share/CommonSkeletonJobOptions.py
+++ b/Simulation/SimuJobTransforms/share/CommonSkeletonJobOptions.py
@@ -35,7 +35,7 @@ if hasattr(runArgs,"beamType"):
 
 # Avoid command line preInclude for event service
 if hasattr(runArgs, "eventService") and runArgs.eventService:
-    include('AthenaMP/AthenaMP_EventService.py')
+    import AthenaMP.EventService
 
 ## autoConfiguration keywords triggering pre-defined functions
 ## if hasattr(runArgs,"autoConfiguration"):
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2010_ttbar_no_pileup.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2010_ttbar_no_pileup.sh
index b3d133f8d69be918387e5cf54283fb781f448242..81a95418abfcafdec8e32fc8605577ecbef0fe85 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2010_ttbar_no_pileup.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2010_ttbar_no_pileup.sh
@@ -37,7 +37,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -46,7 +46,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -66,4 +66,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2011_ttbar_no_pileup.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2011_ttbar_no_pileup.sh
index c2bd92d54c79b2cea7826b1ef04f152046bd64e0..c9d05fe2faee4da272cf607bf9de999e3c84d82a 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2011_ttbar_no_pileup.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2011_ttbar_no_pileup.sh
@@ -38,7 +38,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -47,7 +47,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -67,4 +67,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2012_ttbar_no_pileup.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2012_ttbar_no_pileup.sh
index c28c93802da151653d4c17e79f7a736635fbe93e..754d8977a3504a777862a7224902ea336470a735 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2012_ttbar_no_pileup.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2012_ttbar_no_pileup.sh
@@ -38,7 +38,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -47,7 +47,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -67,4 +67,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_cosmics.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_cosmics.sh
index 087907d17cdf495ef998b525ae0c27698a5992fb..0b51453156e1cbc8a5efcf9894a4d6d4d1e17f1e 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_cosmics.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_cosmics.sh
@@ -36,7 +36,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -45,7 +45,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -65,4 +65,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_nu_25ns_premixing.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_nu_25ns_premixing.sh
index 0f3befa673873b0f45d66eeacfa72c57d84a252b..b2fea2471ca9e4f48bbe53dcc7710fcb75701fac 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_nu_25ns_premixing.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_nu_25ns_premixing.sh
@@ -52,7 +52,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -61,7 +61,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -81,4 +81,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_25ns_algs_pileup.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_25ns_algs_pileup.sh
index 4d0006575e88676025efddb6d6b8f830218f19b3..f4a1ea09352a330a7231b0b6f023058926f86d83 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_25ns_algs_pileup.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_25ns_algs_pileup.sh
@@ -51,7 +51,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -60,7 +60,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -80,4 +80,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_25ns_pileup.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_25ns_pileup.sh
index f3fbc977ef1bc4c2a5211552aaee6ccb77c91a70..8916c71323630d5b85601ac4ecf68592dd2052f2 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_25ns_pileup.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_25ns_pileup.sh
@@ -48,7 +48,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -57,7 +57,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -77,4 +77,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_25ns_pileup_noNoise.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_25ns_pileup_noNoise.sh
index 5577039770c016848386f5583197b251294a304b..41a3755ff45c3d2ce6363d4930401ab3511a9844 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_25ns_pileup_noNoise.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_25ns_pileup_noNoise.sh
@@ -50,7 +50,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -59,7 +59,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -79,4 +79,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_50ns_pileup.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_50ns_pileup.sh
index 4c5cee56b5daf7e34b3634995f599c59560a4100..84844a169a553e1a17b5c16aff0da4d34633935e 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_50ns_pileup.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_50ns_pileup.sh
@@ -49,7 +49,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -58,7 +58,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -78,4 +78,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_no_pileup.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_no_pileup.sh
index 4a09715a8ed3f0d7376a424a382d3c8c82d5b30c..2577bffb616fe1cfb620f9362bf1b88b87fd288e 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_no_pileup.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc15_2015_ttbar_no_pileup.sh
@@ -39,7 +39,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -48,7 +48,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -68,4 +68,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16a_qballs.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16a_qballs.sh
index 4ce7e8140e04ee1886146ec94620b0dfc7a77a9b..c921a9a239ce88376c6822b60a646afe8f3fe0b2 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16a_qballs.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16a_qballs.sh
@@ -42,4 +42,4 @@ ArtJobName=$2
 
 
 art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
-echo  "art-result: $? art-compare"
+echo  "art-result: $? regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16a_ttbar.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16a_ttbar.sh
index e92b2a25b869f7e364ca99580ef232a70355dbc8..9af380649f491e495983b8ec14051c7a1469a468 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16a_ttbar.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16a_ttbar.sh
@@ -49,7 +49,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -58,7 +58,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -78,4 +78,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16d_premixing.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16d_premixing.sh
index d4648addc8a0e1673797747b6fdbd26fac5c4d3f..fa896292876eeef91dce7931676aef1a793cb01e 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16d_premixing.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16d_premixing.sh
@@ -50,7 +50,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -59,7 +59,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -79,4 +79,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16d_premixing_for_MT.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16d_premixing_for_MT.sh
index 9ba3b977fb5a4cc115e771c54d8e31a4b6a73b1a..56b985466e63891865932c754a88f363f6d2d473 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16d_premixing_for_MT.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16d_premixing_for_MT.sh
@@ -50,7 +50,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -59,7 +59,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -79,4 +79,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16d_ttbar.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16d_ttbar.sh
index 81c05acab1a2989e0b23f8eb0397ac6bdf5f3d6b..26d54d5b1439d75c49f4fce050d8d94aa4c32496 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16d_ttbar.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16d_ttbar.sh
@@ -55,7 +55,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -64,7 +64,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -84,4 +84,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16e_ttbar.sh b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16e_ttbar.sh
index 7f7edce02f2f0cdb8695217181001a9753fbe1ef..e01270130e5e8fee704f6aeb272976f450b7a114 100755
--- a/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16e_ttbar.sh
+++ b/Simulation/Tests/DigitizationTests/test/test_Digi_tf_mc16e_ttbar.sh
@@ -55,7 +55,7 @@ echo "Reference set being used: " ${DigitizationTestsVersion}
 if [ $rc -eq 0 ]
 then
     # Do reference comparisons
-    art-diff.py ./$DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
+    art.py compare ref --diff-pool $DigiOutFileName   /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc1=$?
 fi
 echo  "art-result: $rc1 diff-pool"
@@ -64,7 +64,7 @@ echo  "art-result: $rc1 diff-pool"
 #
 if [ $rc -eq 0 ]
 then
-    art-diff.py ./$DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName --diff-type=diff-root --mode=semi-detailed
+    art.py compare ref --mode=semi-detailed --diff-root $DigiOutFileName /cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/DigitizationTests/ReferenceFiles/$DigitizationTestsVersion/$CMTCONFIG/$DigiOutFileName
     rc2=$?
 fi
 echo  "art-result: $rc2 diff-root"
@@ -84,4 +84,4 @@ then
     art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed
     rc4=$?
 fi
-echo  "art-result: $rc4 art-compare"
+echo  "art-result: $rc4 regression"
diff --git a/Simulation/ISF/ISF_Validation/CMakeLists.txt b/Simulation/Tests/ISF_Validation/CMakeLists.txt
similarity index 100%
rename from Simulation/ISF/ISF_Validation/CMakeLists.txt
rename to Simulation/Tests/ISF_Validation/CMakeLists.txt
diff --git a/Simulation/ISF/ISF_Validation/scripts/generate_new_ISF_RTT_MCProd_references.sh b/Simulation/Tests/ISF_Validation/scripts/generate_new_ISF_RTT_MCProd_references.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/scripts/generate_new_ISF_RTT_MCProd_references.sh
rename to Simulation/Tests/ISF_Validation/scripts/generate_new_ISF_RTT_MCProd_references.sh
diff --git a/Simulation/ISF/ISF_Validation/scripts/skip_if_dbg.sh b/Simulation/Tests/ISF_Validation/scripts/skip_if_dbg.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/scripts/skip_if_dbg.sh
rename to Simulation/Tests/ISF_Validation/scripts/skip_if_dbg.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_ATLFASTIIF_ttbar_2evts.sh b/Simulation/Tests/ISF_Validation/test/test_ATLFASTIIF_ttbar_2evts.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_ATLFASTIIF_ttbar_2evts.sh
rename to Simulation/Tests/ISF_Validation/test/test_ATLFASTIIF_ttbar_2evts.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_ATLFASTII_ttbar_2evts.sh b/Simulation/Tests/ISF_Validation/test/test_ATLFASTII_ttbar_2evts.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_ATLFASTII_ttbar_2evts.sh
rename to Simulation/Tests/ISF_Validation/test/test_ATLFASTII_ttbar_2evts.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_AtlasG4_FullG4_comparison.sh b/Simulation/Tests/ISF_Validation/test/test_AtlasG4_FullG4_comparison.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_AtlasG4_FullG4_comparison.sh
rename to Simulation/Tests/ISF_Validation/test/test_AtlasG4_FullG4_comparison.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_FatrasGammaCones_FastCalo_2evts.sh b/Simulation/Tests/ISF_Validation/test/test_FatrasGammaCones_FastCalo_2evts.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_FatrasGammaCones_FastCalo_2evts.sh
rename to Simulation/Tests/ISF_Validation/test/test_FatrasGammaCones_FastCalo_2evts.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_FullG4_LongLived_ZPrime_2evts.sh b/Simulation/Tests/ISF_Validation/test/test_FullG4_LongLived_ZPrime_2evts.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_FullG4_LongLived_ZPrime_2evts.sh
rename to Simulation/Tests/ISF_Validation/test/test_FullG4_LongLived_ZPrime_2evts.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_FullG4_ttbar_2evts.sh b/Simulation/Tests/ISF_Validation/test/test_FullG4_ttbar_2evts.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_FullG4_ttbar_2evts.sh
rename to Simulation/Tests/ISF_Validation/test/test_FullG4_ttbar_2evts.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_G4BHadronsOnly_FastCalo_2evts.sh b/Simulation/Tests/ISF_Validation/test/test_G4BHadronsOnly_FastCalo_2evts.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_G4BHadronsOnly_FastCalo_2evts.sh
rename to Simulation/Tests/ISF_Validation/test/test_G4BHadronsOnly_FastCalo_2evts.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_G4GammaCones_2evts.sh b/Simulation/Tests/ISF_Validation/test/test_G4GammaCones_2evts.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_G4GammaCones_2evts.sh
rename to Simulation/Tests/ISF_Validation/test/test_G4GammaCones_2evts.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_G4GammaCones_FastCalo_2evts.sh b/Simulation/Tests/ISF_Validation/test/test_G4GammaCones_FastCalo_2evts.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_G4GammaCones_FastCalo_2evts.sh
rename to Simulation/Tests/ISF_Validation/test/test_G4GammaCones_FastCalo_2evts.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_G4JPsiLeptonsCone_FatrasID_FastCalo_2evts.sh b/Simulation/Tests/ISF_Validation/test/test_G4JPsiLeptonsCone_FatrasID_FastCalo_2evts.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_G4JPsiLeptonsCone_FatrasID_FastCalo_2evts.sh
rename to Simulation/Tests/ISF_Validation/test/test_G4JPsiLeptonsCone_FatrasID_FastCalo_2evts.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_G4WLeptonsCone_FatrasID_FastCalo_2evts.sh b/Simulation/Tests/ISF_Validation/test/test_G4WLeptonsCone_FatrasID_FastCalo_2evts.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_G4WLeptonsCone_FatrasID_FastCalo_2evts.sh
rename to Simulation/Tests/ISF_Validation/test/test_G4WLeptonsCone_FatrasID_FastCalo_2evts.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_G4ZDecayProducts_FatrasID_FastCalo_2evts.sh b/Simulation/Tests/ISF_Validation/test/test_G4ZDecayProducts_FatrasID_FastCalo_2evts.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_G4ZDecayProducts_FatrasID_FastCalo_2evts.sh
rename to Simulation/Tests/ISF_Validation/test/test_G4ZDecayProducts_FatrasID_FastCalo_2evts.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_MC16_ATLFASTIIF_G4MS_ttbar.sh b/Simulation/Tests/ISF_Validation/test/test_MC16_ATLFASTIIF_G4MS_ttbar.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_MC16_ATLFASTIIF_G4MS_ttbar.sh
rename to Simulation/Tests/ISF_Validation/test/test_MC16_ATLFASTIIF_G4MS_ttbar.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_MC16_ATLFASTIIF_ttbar.sh b/Simulation/Tests/ISF_Validation/test/test_MC16_ATLFASTIIF_ttbar.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_MC16_ATLFASTIIF_ttbar.sh
rename to Simulation/Tests/ISF_Validation/test/test_MC16_ATLFASTIIF_ttbar.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_MC16_ATLFASTII_ttbar.sh b/Simulation/Tests/ISF_Validation/test/test_MC16_ATLFASTII_ttbar.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_MC16_ATLFASTII_ttbar.sh
rename to Simulation/Tests/ISF_Validation/test/test_MC16_ATLFASTII_ttbar.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_MC16_FullG4_QS_ZPrimebb_ExtraParticles.sh b/Simulation/Tests/ISF_Validation/test/test_MC16_FullG4_QS_ZPrimebb_ExtraParticles.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_MC16_FullG4_QS_ZPrimebb_ExtraParticles.sh
rename to Simulation/Tests/ISF_Validation/test/test_MC16_FullG4_QS_ZPrimebb_ExtraParticles.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_MC16_FullG4_QS_ttbar.sh b/Simulation/Tests/ISF_Validation/test/test_MC16_FullG4_QS_ttbar.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_MC16_FullG4_QS_ttbar.sh
rename to Simulation/Tests/ISF_Validation/test/test_MC16_FullG4_QS_ttbar.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_MC16_FullG4_ttbar.sh b/Simulation/Tests/ISF_Validation/test/test_MC16_FullG4_ttbar.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_MC16_FullG4_ttbar.sh
rename to Simulation/Tests/ISF_Validation/test/test_MC16_FullG4_ttbar.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_MC16_FullG4_ttbar_2evts.sh b/Simulation/Tests/ISF_Validation/test/test_MC16_FullG4_ttbar_2evts.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_MC16_FullG4_ttbar_2evts.sh
rename to Simulation/Tests/ISF_Validation/test/test_MC16_FullG4_ttbar_2evts.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_MC16_G4FastCalo_ttbar.sh b/Simulation/Tests/ISF_Validation/test/test_MC16_G4FastCalo_ttbar.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_MC16_G4FastCalo_ttbar.sh
rename to Simulation/Tests/ISF_Validation/test/test_MC16_G4FastCalo_ttbar.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_RUN3Sym_FullG4_ttbar.sh b/Simulation/Tests/ISF_Validation/test/test_RUN3Sym_FullG4_ttbar.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_RUN3Sym_FullG4_ttbar.sh
rename to Simulation/Tests/ISF_Validation/test/test_RUN3Sym_FullG4_ttbar.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_RUN3_FullG4_ttbar.sh b/Simulation/Tests/ISF_Validation/test/test_RUN3_FullG4_ttbar.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_RUN3_FullG4_ttbar.sh
rename to Simulation/Tests/ISF_Validation/test/test_RUN3_FullG4_ttbar.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_Sim_ATLFASTIIF_minbias.sh b/Simulation/Tests/ISF_Validation/test/test_Sim_ATLFASTIIF_minbias.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_Sim_ATLFASTIIF_minbias.sh
rename to Simulation/Tests/ISF_Validation/test/test_Sim_ATLFASTIIF_minbias.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_Sim_ATLFASTIIF_ttbar.sh b/Simulation/Tests/ISF_Validation/test/test_Sim_ATLFASTIIF_ttbar.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_Sim_ATLFASTIIF_ttbar.sh
rename to Simulation/Tests/ISF_Validation/test/test_Sim_ATLFASTIIF_ttbar.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_Sim_ATLFASTII_ttbar.sh b/Simulation/Tests/ISF_Validation/test/test_Sim_ATLFASTII_ttbar.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_Sim_ATLFASTII_ttbar.sh
rename to Simulation/Tests/ISF_Validation/test/test_Sim_ATLFASTII_ttbar.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_Sim_FullG4_CalibrationHits_pions.sh b/Simulation/Tests/ISF_Validation/test/test_Sim_FullG4_CalibrationHits_pions.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_Sim_FullG4_CalibrationHits_pions.sh
rename to Simulation/Tests/ISF_Validation/test/test_Sim_FullG4_CalibrationHits_pions.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_Sim_FullG4_CosmicSim.sh b/Simulation/Tests/ISF_Validation/test/test_Sim_FullG4_CosmicSim.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_Sim_FullG4_CosmicSim.sh
rename to Simulation/Tests/ISF_Validation/test/test_Sim_FullG4_CosmicSim.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_Sim_FullG4_CosmicSimTR.sh b/Simulation/Tests/ISF_Validation/test/test_Sim_FullG4_CosmicSimTR.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_Sim_FullG4_CosmicSimTR.sh
rename to Simulation/Tests/ISF_Validation/test/test_Sim_FullG4_CosmicSimTR.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_Sim_FullG4_ReproducibilityTest.sh b/Simulation/Tests/ISF_Validation/test/test_Sim_FullG4_ReproducibilityTest.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_Sim_FullG4_ReproducibilityTest.sh
rename to Simulation/Tests/ISF_Validation/test/test_Sim_FullG4_ReproducibilityTest.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_Sim_FullG4_ZPrime_QuasiStable.sh b/Simulation/Tests/ISF_Validation/test/test_Sim_FullG4_ZPrime_QuasiStable.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_Sim_FullG4_ZPrime_QuasiStable.sh
rename to Simulation/Tests/ISF_Validation/test/test_Sim_FullG4_ZPrime_QuasiStable.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_Sim_FullG4_minbias.sh b/Simulation/Tests/ISF_Validation/test/test_Sim_FullG4_minbias.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_Sim_FullG4_minbias.sh
rename to Simulation/Tests/ISF_Validation/test/test_Sim_FullG4_minbias.sh
diff --git a/Simulation/ISF/ISF_Validation/test/test_Sim_FullG4_ttbar.sh b/Simulation/Tests/ISF_Validation/test/test_Sim_FullG4_ttbar.sh
similarity index 100%
rename from Simulation/ISF/ISF_Validation/test/test_Sim_FullG4_ttbar.sh
rename to Simulation/Tests/ISF_Validation/test/test_Sim_FullG4_ttbar.sh
diff --git a/Simulation/Tests/OverlayTestsMT/test/test_DataOverlay_MT_Zmumu_8threads_NewConfig.sh b/Simulation/Tests/OverlayTestsMT/test/test_DataOverlay_MT_Zmumu_8threads_NewConfig.sh
index 3cf75227136a7ca499b04ddbab821e3d08e8f259..d6dae28b7f0957aa6dc01e1f94a82d4d7431ab21 100755
--- a/Simulation/Tests/OverlayTestsMT/test/test_DataOverlay_MT_Zmumu_8threads_NewConfig.sh
+++ b/Simulation/Tests/OverlayTestsMT/test/test_DataOverlay_MT_Zmumu_8threads_NewConfig.sh
@@ -35,7 +35,7 @@ if [ $rc -eq 0 ]
 then
     ArtPackage=$1
     ArtJobName=$2
-    art.py compare grid --entries 10 "${ArtPackage}" "${ArtJobName}" --mode=semi-detailed --order-trees --diff-root --excluded-vars mc_event_number
+    art.py compare grid --entries 10 "${ArtPackage}" "${ArtJobName}" --mode=semi-detailed --order-trees --diff-root
     rc2=$?
 fi
 echo  "art-result: $rc2 regression"
diff --git a/Simulation/Tests/OverlayTestsMT/test/test_DataOverlay_Zmumu_ST_vs_MT.sh b/Simulation/Tests/OverlayTestsMT/test/test_DataOverlay_Zmumu_ST_vs_MT.sh
index 1f68c10f82eb731620bd0672727ca6082e69ce4e..1cc409bbd392cf83662c2023b5d44f4dea4482b3 100755
--- a/Simulation/Tests/OverlayTestsMT/test/test_DataOverlay_Zmumu_ST_vs_MT.sh
+++ b/Simulation/Tests/OverlayTestsMT/test/test_DataOverlay_Zmumu_ST_vs_MT.sh
@@ -66,7 +66,7 @@ if [ $rc2 -eq 0 ]
 then
     ArtPackage=$1
     ArtJobName=$2
-    art.py compare grid --entries 10 "${ArtPackage}" "${ArtJobName}" --mode=semi-detailed --order-trees --diff-root --excluded-vars mc_event_number
+    art.py compare grid --entries 10 "${ArtPackage}" "${ArtJobName}" --mode=semi-detailed --order-trees --diff-root
     rc4=$?
 fi
 echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/OverlayTestsMT/test/test_MCOverlay_MT_ttbar_4threads.sh b/Simulation/Tests/OverlayTestsMT/test/test_MCOverlay_MT_ttbar_4threads.sh
index dfb82ddfc2e8b43303230c3e954646ecddfc476e..6d84f47cf48a9b822bcde95629b34d9f2c0c7cad 100755
--- a/Simulation/Tests/OverlayTestsMT/test/test_MCOverlay_MT_ttbar_4threads.sh
+++ b/Simulation/Tests/OverlayTestsMT/test/test_MCOverlay_MT_ttbar_4threads.sh
@@ -32,7 +32,7 @@ if [ $rc -eq 0 ]
 then
     ArtPackage=$1
     ArtJobName=$2
-    art.py compare grid --entries 10 "${ArtPackage}" "${ArtJobName}" --mode=semi-detailed --order-trees --diff-root --excluded-vars mc_event_number
+    art.py compare grid --entries 10 "${ArtPackage}" "${ArtJobName}" --mode=semi-detailed --order-trees --diff-root
     rc2=$?
 fi
 echo  "art-result: $rc2 regression"
diff --git a/Simulation/Tests/OverlayTestsMT/test/test_MCOverlay_MT_ttbar_8threads_NewConfig.sh b/Simulation/Tests/OverlayTestsMT/test/test_MCOverlay_MT_ttbar_8threads_NewConfig.sh
index 4cdf44b15b7aebfaf1235f8744e2b1525530d3c8..fb5cc58db0f0ff9b3149187131e7fb08bdcee994 100755
--- a/Simulation/Tests/OverlayTestsMT/test/test_MCOverlay_MT_ttbar_8threads_NewConfig.sh
+++ b/Simulation/Tests/OverlayTestsMT/test/test_MCOverlay_MT_ttbar_8threads_NewConfig.sh
@@ -36,7 +36,7 @@ if [ $rc -eq 0 ]
 then
     ArtPackage=$1
     ArtJobName=$2
-    art.py compare grid --entries 10 "${ArtPackage}" "${ArtJobName}" --mode=semi-detailed --order-trees --diff-root --excluded-vars mc_event_number
+    art.py compare grid --entries 10 "${ArtPackage}" "${ArtJobName}" --mode=semi-detailed --order-trees --diff-root
     rc2=$?
 fi
 echo  "art-result: $rc2 regression"
diff --git a/Simulation/Tests/OverlayTestsMT/test/test_MCOverlay_ttbar_ST_vs_MT.sh b/Simulation/Tests/OverlayTestsMT/test/test_MCOverlay_ttbar_ST_vs_MT.sh
index 68fcff8b11bfbb255b34d335f0f4c7fad2bb1804..0d08ec5a84fe7b79cef8e59ab4f47fe0144ff183 100755
--- a/Simulation/Tests/OverlayTestsMT/test/test_MCOverlay_ttbar_ST_vs_MT.sh
+++ b/Simulation/Tests/OverlayTestsMT/test/test_MCOverlay_ttbar_ST_vs_MT.sh
@@ -60,7 +60,7 @@ if [ $rc2 -eq 0 ]
 then
     ArtPackage=$1
     ArtJobName=$2
-    art.py compare grid --entries 10 "${ArtPackage}" "${ArtJobName}" --mode=semi-detailed --order-trees --diff-root --excluded-vars mc_event_number
+    art.py compare grid --entries 10 "${ArtPackage}" "${ArtJobName}" --mode=semi-detailed --order-trees --diff-root
     rc4=$?
 fi
 echo  "art-result: $rc4 regression"
diff --git a/Simulation/Tests/SimCoreTestsMT/test/test_AtlasG4_CalibationHits_pions_MT.sh b/Simulation/Tests/SimCoreTestsMT/test/test_AtlasG4_CalibationHits_pions_MT.sh
index 0c7a6c982c2152389a59d9ae50e6c88e917add08..9d3b3572539006f08c3200284bc4ddb11c474bbb 100755
--- a/Simulation/Tests/SimCoreTestsMT/test/test_AtlasG4_CalibationHits_pions_MT.sh
+++ b/Simulation/Tests/SimCoreTestsMT/test/test_AtlasG4_CalibationHits_pions_MT.sh
@@ -33,7 +33,7 @@ if [ $rc -eq 0 ]
 then
     ArtPackage=$1
     ArtJobName=$2
-    art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed --order-trees --excluded-vars mc_event_number
+    art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed --order-trees
     rc2=$?
 fi
 echo  "art-result: $rc2 regression"
diff --git a/Simulation/Tests/SimCoreTestsMT/test/test_AtlasG4_TTbarSim_MT.sh b/Simulation/Tests/SimCoreTestsMT/test/test_AtlasG4_TTbarSim_MT.sh
index a480b9c70bf61e389d8660e5cb9f31153a1adab6..22b66dc5abaf7d42ede9039d2bcbfb696ca76654 100755
--- a/Simulation/Tests/SimCoreTestsMT/test/test_AtlasG4_TTbarSim_MT.sh
+++ b/Simulation/Tests/SimCoreTestsMT/test/test_AtlasG4_TTbarSim_MT.sh
@@ -32,7 +32,7 @@ if [ $rc -eq 0 ]
 then
     ArtPackage=$1
     ArtJobName=$2
-    art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed --order-trees --excluded-vars mc_event_number
+    art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} --mode=semi-detailed --order-trees
     rc2=$?
 fi
 echo  "art-result: $rc2 regression"
diff --git a/Simulation/Tools/HitAnalysis/src/TruthHitAnalysis.cxx b/Simulation/Tools/HitAnalysis/src/TruthHitAnalysis.cxx
index 3ddbc8f0f0656e190650ee08b07a41756a3614d9..278d1b99adbf4c3114d8c96cd22373f2b37fb29f 100755
--- a/Simulation/Tools/HitAnalysis/src/TruthHitAnalysis.cxx
+++ b/Simulation/Tools/HitAnalysis/src/TruthHitAnalysis.cxx
@@ -259,14 +259,14 @@ StatusCode TruthHitAnalysis::execute() {
 	m_h_vtx_r->Fill(r);
 	m_h_vtx_z->Fill(z);
 
-	int bcode = (*vtx)->barcode();
+	int bcode = HepMC::barcode(*vtx);
 	m_vtx_x->push_back(x);
 	m_vtx_y->push_back(y);
 	m_vtx_r->push_back(r);
 	m_vtx_z->push_back(z);
 	m_vtx_barcode->push_back(bcode);
 
-	if ((*vtx)->barcode() > -20000) {
+	if (bcode > -20000) {
 	  m_h_vtx_prim_xy->Fill(x,y);
 	  m_h_vtx_prim_zr->Fill(z,r);
 	  ++nvtx;
diff --git a/TileCalorimeter/TileCalib/TileCalibAlgs/TileCalibAlgs/TileLaserDefaultCalibTool.h b/TileCalorimeter/TileCalib/TileCalibAlgs/TileCalibAlgs/TileLaserDefaultCalibTool.h
index a005d5f418ce1ebd7ceae2f563dd55a670d4a223..df76144343b301fe6909f59275a36fcaffe7ca78 100644
--- a/TileCalorimeter/TileCalib/TileCalibAlgs/TileCalibAlgs/TileLaserDefaultCalibTool.h
+++ b/TileCalorimeter/TileCalib/TileCalibAlgs/TileCalibAlgs/TileLaserDefaultCalibTool.h
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #ifndef TILECALIBALG_TILELASERDEFAULTCALIBTOOL_H
@@ -14,7 +14,6 @@
 #include "TileConditions/ITileDCSTool.h"
 #include "TileEvent/TileRawChannelContainer.h"
 #include "TileEvent/TileLaserObject.h"
-#include "TileConditions/ITileBadChanTool.h"
 #include "TileMonitoring/ITileStuckBitsProbsTool.h"
 
 
diff --git a/TileCalorimeter/TileCalib/TileCalibAlgs/src/TileDigiNoiseCalibAlg.cxx b/TileCalorimeter/TileCalib/TileCalibAlgs/src/TileDigiNoiseCalibAlg.cxx
index 44c26aa9810aea394d12cfb64a0a17fba86698a0..c7cce53a8a00d37773694d09d9177ba1524f7aae 100644
--- a/TileCalorimeter/TileCalib/TileCalibAlgs/src/TileDigiNoiseCalibAlg.cxx
+++ b/TileCalorimeter/TileCalib/TileCalibAlgs/src/TileDigiNoiseCalibAlg.cxx
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 // ********************************************************************
@@ -23,7 +23,6 @@
 
 // Tile includes
 #include "TileCalibAlgs/TileDigiNoiseCalibAlg.h"
-#include "TileEvent/TileRawChannelContainer.h"
 #include "CaloIdentifier/TileID.h"
 #include "TileIdentifier/TileHWID.h"
 #include "TileEvent/TileDigitsContainer.h"
diff --git a/TileCalorimeter/TileCalib/TileCalibAlgs/src/TileRawChNoiseCalibAlg.cxx b/TileCalorimeter/TileCalib/TileCalibAlgs/src/TileRawChNoiseCalibAlg.cxx
index 7ca07f53e033039b5c71982b6933fce8f1938093..f2b4166b7fc03e110fbe651bd28496d8b618a88a 100644
--- a/TileCalorimeter/TileCalib/TileCalibAlgs/src/TileRawChNoiseCalibAlg.cxx
+++ b/TileCalorimeter/TileCalib/TileCalibAlgs/src/TileRawChNoiseCalibAlg.cxx
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 // ****** **************************************************************
@@ -26,7 +26,6 @@
 #include "TileEvent/TileRawChannelContainer.h"
 #include "CaloIdentifier/TileID.h"
 #include "TileIdentifier/TileHWID.h"
-#include "TileCalibBlobObjs/TileCalibUtils.h"
 #include "TileEvent/TileDigitsContainer.h"
 #include "TileEvent/TileBeamElemContainer.h"
 #include "TileByteStream/TileBeamElemContByteStreamCnv.h"
diff --git a/TileCalorimeter/TileCalib/TileCalibBlobObjs/test/TileCalibDrawerFlt_test.cxx b/TileCalorimeter/TileCalib/TileCalibBlobObjs/test/TileCalibDrawerFlt_test.cxx
index 681a89b118f42f99ee23688e9e0255c46685bc40..d52dc9756133e150fcea438d5f9aa8cc0c8ebe2d 100644
--- a/TileCalorimeter/TileCalib/TileCalibBlobObjs/test/TileCalibDrawerFlt_test.cxx
+++ b/TileCalorimeter/TileCalib/TileCalibBlobObjs/test/TileCalibDrawerFlt_test.cxx
@@ -4,10 +4,10 @@
 
 #undef NDEBUG
 
-//=== AttributeList
-//#include "CoralBase/Attribute.h"
+
 #include "TileCalibBlobObjs/TileCalibDrawerFlt.h"
 
+//=== AttributeList
 #include "CoralBase/Blob.h"
 #include "CoralBase/Attribute.h"
 #include "CoralBase/AttributeList.h"
diff --git a/TileCalorimeter/TileCalib/TileCalibBlobObjs/test/TileCalibDrawerOfc_test.cxx b/TileCalorimeter/TileCalib/TileCalibBlobObjs/test/TileCalibDrawerOfc_test.cxx
index f7d0b2a95ec7ad9b76c0010a5aa50b4e03c0ea1b..acb2b7caf826ce32b73135a0bbf0193d8aea5ad4 100644
--- a/TileCalorimeter/TileCalib/TileCalibBlobObjs/test/TileCalibDrawerOfc_test.cxx
+++ b/TileCalorimeter/TileCalib/TileCalibBlobObjs/test/TileCalibDrawerOfc_test.cxx
@@ -4,10 +4,10 @@
 
 #undef NDEBUG
 
-//=== AttributeList
-//#include "CoralBase/Attribute.h"
+
 #include "TileCalibBlobObjs/TileCalibDrawerOfc.h"
 
+//=== AttributeList
 #include "CoralBase/Blob.h"
 #include "CoralBase/Attribute.h"
 #include "CoralBase/AttributeList.h"
diff --git a/TileCalorimeter/TileMonitoring/root/include/tile_monitor.h b/TileCalorimeter/TileMonitoring/root/include/tile_monitor.h
index 3fdc65f75053d7d5798289f8a5ae57c466207f87..dad9b24383625e8e97e75c46c23afcb704996f61 100644
--- a/TileCalorimeter/TileMonitoring/root/include/tile_monitor.h
+++ b/TileCalorimeter/TileMonitoring/root/include/tile_monitor.h
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 #include <iostream>
@@ -18,7 +18,6 @@
 #include "TGraph.h"
 #include "TH1.h"
 #include "TH2.h"
-#include "TGraph.h"
 #include "TProfile.h"
 #include "TCanvas.h"
 #include "TStyle.h"
diff --git a/TileCalorimeter/TileMonitoring/src/TileDQFragMonTool.cxx b/TileCalorimeter/TileMonitoring/src/TileDQFragMonTool.cxx
index ed40bdaaf23dce8be1643f9aa4acf4c4da633120..32a248f6c7689115d71f97bfe4a73729e84ffae5 100644
--- a/TileCalorimeter/TileMonitoring/src/TileDQFragMonTool.cxx
+++ b/TileCalorimeter/TileMonitoring/src/TileDQFragMonTool.cxx
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 // ********************************************************************
@@ -27,7 +27,6 @@
 #include "TVirtualPad.h"
 #include "TCanvas.h"
 #include "TStyle.h"
-#include "TCanvas.h"
 #include "TLatex.h"
 
 #include <iostream>
diff --git a/TileCalorimeter/TileRawUtils/src/RegSelCondAlg_Tile.cxx b/TileCalorimeter/TileRawUtils/src/RegSelCondAlg_Tile.cxx
index 6c99ac143f037ecc78718a18ba88f978432fc408..3d567db0f39f9f2d8072a196a7cd021dd5e4d2a8 100644
--- a/TileCalorimeter/TileRawUtils/src/RegSelCondAlg_Tile.cxx
+++ b/TileCalorimeter/TileRawUtils/src/RegSelCondAlg_Tile.cxx
@@ -34,9 +34,6 @@
 #include "TileByteStream/TileROD_Decoder.h"
 #include "TileRawUtils/TileRegionSelectorTable.h"
 
-
-#include "RegionSelector/RegionSelectorLUT.h" 
-
 #include <cmath>
 #include <iostream>
 #include <string>
diff --git a/TileCalorimeter/TileRecUtils/src/TileRawChannelBuilderMF.cxx b/TileCalorimeter/TileRecUtils/src/TileRawChannelBuilderMF.cxx
index b67c1087fb72cea2882577f95a7db03fb6915034..cd970008cd3ef54fcc5f06696745cbd146f3e546 100644
--- a/TileCalorimeter/TileRecUtils/src/TileRawChannelBuilderMF.cxx
+++ b/TileCalorimeter/TileRecUtils/src/TileRawChannelBuilderMF.cxx
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 // small hack to enable datapool usage
@@ -11,7 +11,6 @@
 #include "CaloIdentifier/TileID.h"
 #include "TileIdentifier/TileHWID.h"
 #include "TileConditions/TileInfo.h"
-#include "CLHEP/Matrix/Matrix.h"
 
 // Atlas includes
 #include "AthAllocators/DataPool.h"
diff --git a/TileCalorimeter/TileSimAlgs/TileSimAlgs/TileDigitsMaker.h b/TileCalorimeter/TileSimAlgs/TileSimAlgs/TileDigitsMaker.h
index 917cd1d3b4ac5345c27dff9cf1a993db03a978f9..c7d4db9e4d27360548be91a0f8a9487d0d3cf44a 100644
--- a/TileCalorimeter/TileSimAlgs/TileSimAlgs/TileDigitsMaker.h
+++ b/TileCalorimeter/TileSimAlgs/TileSimAlgs/TileDigitsMaker.h
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 //****************************************************************************
@@ -47,7 +47,6 @@
 #include "GaudiKernel/ServiceHandle.h"
 
 #include "CLHEP/Random/RandomEngine.h"
-#include "TileEvent/TileHitContainer.h"
 
 
 class IAthRNGSvc;
diff --git a/TileCalorimeter/TileSimAlgs/src/TileHitVecToCntTool.cxx b/TileCalorimeter/TileSimAlgs/src/TileHitVecToCntTool.cxx
index 15435c61d4fb3068df69275efc3908ba95427662..5686d6136f480c517040c948c299e5a20511516f 100644
--- a/TileCalorimeter/TileSimAlgs/src/TileHitVecToCntTool.cxx
+++ b/TileCalorimeter/TileSimAlgs/src/TileHitVecToCntTool.cxx
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 */
 
 //************************************************************
@@ -30,7 +30,6 @@
 // For the Athena-based random numbers.
 #include "AthenaKernel/IAthRNGSvc.h"
 #include "AthenaKernel/RNGWrapper.h"
-#include "CLHEP/Random/RandomEngine.h"
 
 #include "CLHEP/Random/Randomize.h"
 #include "CLHEP/Random/RandomEngine.h"
diff --git a/TileCalorimeter/TileSvc/TileByteStream/src/TileDigitsContByteStreamCnv.cxx b/TileCalorimeter/TileSvc/TileByteStream/src/TileDigitsContByteStreamCnv.cxx
index 12b172bee4b213e6e5d9d37e395717f9459f0ef1..0df2ace0895bd7e8654dbc82089bf508cfea065e 100644
--- a/TileCalorimeter/TileSvc/TileByteStream/src/TileDigitsContByteStreamCnv.cxx
+++ b/TileCalorimeter/TileSvc/TileByteStream/src/TileDigitsContByteStreamCnv.cxx
@@ -31,7 +31,6 @@
 #include "TileByteStream/TileROD_Decoder.h"
 #include "TileByteStream/TileHid2RESrcID.h"
 #include "TileEvent/TileDigitsContainer.h"
-#include "TileByteStream/TileROD_Decoder.h"
 
 #include <vector> 
 #include <string> 
diff --git a/TileCalorimeter/TileSvc/TileByteStream/src/TileLaserObjByteStreamCnv.cxx b/TileCalorimeter/TileSvc/TileByteStream/src/TileLaserObjByteStreamCnv.cxx
index a2dd4d27410bdd76c1075d065558eb5a63b0eb02..e3534fc9583adb5c347752477a9e4af4901c15a5 100644
--- a/TileCalorimeter/TileSvc/TileByteStream/src/TileLaserObjByteStreamCnv.cxx
+++ b/TileCalorimeter/TileSvc/TileByteStream/src/TileLaserObjByteStreamCnv.cxx
@@ -31,7 +31,6 @@
 #include "TileByteStream/TileHid2RESrcID.h"
 #include "TileIdentifier/TileTBFrag.h"
 #include "TileEvent/TileLaserObject.h"
-#include "TileByteStream/TileROD_Decoder.h"
 
 
 
diff --git a/Tools/FullChainTransforms/share/FastChainSkeleton.EVGENtoRDO.py b/Tools/FullChainTransforms/share/FastChainSkeleton.EVGENtoRDO.py
index 505f47e52d4cb523bea817c3612bdcdaa59515c8..093b50164225e1b87f7431eb9b374a2a08abf385 100644
--- a/Tools/FullChainTransforms/share/FastChainSkeleton.EVGENtoRDO.py
+++ b/Tools/FullChainTransforms/share/FastChainSkeleton.EVGENtoRDO.py
@@ -76,7 +76,7 @@ if hasattr(runArgs,"beamType"):
 
 # Avoid command line preInclude for event service
 if hasattr(runArgs, "eventService") and runArgs.eventService:
-    include('AthenaMP/AthenaMP_EventService.py')
+    import AthenaMP.EventService
 
 
 #####################Back to Skeleton.EVGENtoHIT.py######################
diff --git a/Tools/FullChainTransforms/share/skeleton.EVGENtoRDO.py b/Tools/FullChainTransforms/share/skeleton.EVGENtoRDO.py
index 164ce11c7e119337aeadbf03d81ad32ab2ca0f31..f06ed74dc4777a14157b33afdf65f09c75962b22 100644
--- a/Tools/FullChainTransforms/share/skeleton.EVGENtoRDO.py
+++ b/Tools/FullChainTransforms/share/skeleton.EVGENtoRDO.py
@@ -116,7 +116,7 @@ if jobproperties.Beam.beamType.get_Value() != 'cosmics':
 
 # Avoid command line preInclude for event service
 if hasattr(runArgs, "eventService") and runArgs.eventService:
-    include('AthenaMP/AthenaMP_EventService.py')
+    import AthenaMP.EventService
 
 from ISF_Config.ISF_jobProperties import ISF_Flags
 if jobproperties.Beam.beamType.get_Value() == 'cosmics':
diff --git a/Tools/PROCTools/data/master_q431_AOD_digest.ref b/Tools/PROCTools/data/master_q431_AOD_digest.ref
index 8c22a955a48c87ddbd2b92af40161cc98ec46160..409bddb5d77767f87168ed0676328515813e2329 100644
--- a/Tools/PROCTools/data/master_q431_AOD_digest.ref
+++ b/Tools/PROCTools/data/master_q431_AOD_digest.ref
@@ -2,16 +2,16 @@
       330470  1183722158           1           0           0           0
       330470  1183722342         394         429          16           0
       330470  1183727953         532         588          11           4
-      330470  1183732647         467         485          12           1
+      330470  1183732647         467         484          12           1
       330470  1183733040         381         288           6           1
       330470  1183734651         361         368          14           3
       330470  1183735332         406         386           9           1
       330470  1183736475         741         686          15           1
       330470  1183738728           1           0           0           0
-      330470  1183738949         368         445           9           1
+      330470  1183738949         368         444           9           1
       330470  1183742489         152         127           2           1
       330470  1183743040         285         326           5           0
-      330470  1183746343         492         495          12           0
+      330470  1183746343         492         495          12           1
       330470  1183746710           6           0           0           0
       330470  1183751782         239         246           2           0
       330470  1183752624         347         366           7           3
diff --git a/Tools/PyJobTransforms/share/skeleton.EVNTMerge.py b/Tools/PyJobTransforms/share/skeleton.EVNTMerge.py
index 237269adfef2cf6d884997d28fd8910bf515198a..8767b681f5c42cc8cc5fd34230e2e1efb95cc12e 100644
--- a/Tools/PyJobTransforms/share/skeleton.EVNTMerge.py
+++ b/Tools/PyJobTransforms/share/skeleton.EVNTMerge.py
@@ -33,7 +33,7 @@ if hasattr(runArgs, "preInclude"):
 
 # Avoid command line preInclude for Event Service
 if hasattr(runArgs, "eventService") and runArgs.eventService:
-    include('AthenaMP/AthenaMP_EventService.py')
+    import AthenaMP.EventService
         
 ## Post-include
 if hasattr(runArgs, "postInclude"):
diff --git a/Tracking/TrkExtrapolation/TrkExTools/src/MultipleScatteringUpdator.cxx b/Tracking/TrkExtrapolation/TrkExTools/src/MultipleScatteringUpdator.cxx
index 1478dc36f732f7cbdec2b19f292e2952223ebc4a..225bac7bcb1b6cbc9e7330cf72e3657a3f9f2c2f 100755
--- a/Tracking/TrkExtrapolation/TrkExTools/src/MultipleScatteringUpdator.cxx
+++ b/Tracking/TrkExtrapolation/TrkExTools/src/MultipleScatteringUpdator.cxx
@@ -92,7 +92,6 @@ Trk::MultipleScatteringUpdator::initialize() {
   } else {
     ATH_MSG_VERBOSE("Gaussian mixture model = OFF");
   }
-  ATH_MSG_INFO("initialize() successful");
 
   return StatusCode::SUCCESS;
 }
diff --git a/Tracking/TrkTools/TrkTruthCreatorTools/src/DetailedTrackTruthBuilder.cxx b/Tracking/TrkTools/TrkTruthCreatorTools/src/DetailedTrackTruthBuilder.cxx
index 1d06f4241cc951e0d09a077401eab24436a3a216..89f49bf63f8084430aa8700f8b5165f5b22c4842 100755
--- a/Tracking/TrkTools/TrkTruthCreatorTools/src/DetailedTrackTruthBuilder.cxx
+++ b/Tracking/TrkTools/TrkTruthCreatorTools/src/DetailedTrackTruthBuilder.cxx
@@ -420,7 +420,7 @@ void DetailedTrackTruthBuilder::addTrack(DetailedTrackTruthCollection *output,
     TruthTrajectory traj;
     traj.reserve(2); // The average size is about 1.05.  Hardcode that instead of using slow list::size().
     for(Sprout::const_iterator ppart=s->second.begin(); ppart!=s->second.end(); ppart++) {
-      traj.push_back(HepMcParticleLink(ExtendedEventIndex(s->first, proxy).makeLink((*ppart)->barcode(), proxy)));
+      traj.push_back(HepMcParticleLink(ExtendedEventIndex(s->first, proxy).makeLink(HepMC::barcode(*ppart), proxy)));
     }
 
     // Count PRDs on the TruthTrajectory
diff --git a/Tracking/TrkTools/TrkTruthToTrack/src/TruthTrackRecordToTrack.cxx b/Tracking/TrkTools/TrkTruthToTrack/src/TruthTrackRecordToTrack.cxx
index c8ebc03cfda152e32b9cf9320ebdf756eb42ae9e..d1e8e7209f594771efd3ba05160e20915471c05a 100755
--- a/Tracking/TrkTools/TrkTruthToTrack/src/TruthTrackRecordToTrack.cxx
+++ b/Tracking/TrkTools/TrkTruthToTrack/src/TruthTrackRecordToTrack.cxx
@@ -90,13 +90,13 @@ const Trk::TrackParameters* Trk::TruthTrackRecordToTrack::makeProdVertexParamete
 
   for (TrackRecordCollection::const_iterator record = recordCollection->begin();  record != recordCollection->end();++record){
           
-    if ( (*record).GetBarCode() == part->barcode() ) {
+    if ( (*record).GetBarCode() == HepMC::barcode(part) ) {
 
       id = (*record).GetPDGCode();
       pd = m_particleDataTable->particle(std::abs(id));
       if (!pd) {
         ATH_MSG_WARNING ("found barcode but could not digest pdg_id. " <<
-                         part->barcode() << " , " << id);
+                         HepMC::barcode(part) << " , " << id);
         continue;
       }
 
diff --git a/Tracking/TrkValidation/TrkValAlgs/src/TrackValidationNtupleWriter.cxx b/Tracking/TrkValidation/TrkValAlgs/src/TrackValidationNtupleWriter.cxx
index 8b5204b6b857e3f8ab20ea7643e220b27a29d87c..bd4bb2b95a9e0c10c15bd6113bbb80b10aab13b4 100644
--- a/Tracking/TrkValidation/TrkValAlgs/src/TrackValidationNtupleWriter.cxx
+++ b/Tracking/TrkValidation/TrkValAlgs/src/TrackValidationNtupleWriter.cxx
@@ -635,7 +635,7 @@ StatusCode Trk::TrackValidationNtupleWriter::writeTrackData(unsigned int trackCo
                   if (matchedPartIter == truthData.end()) {
                     // did not find particle in list of selected particles
                     truthIndex = -1;
-                    if (msgLvl(MSG::VERBOSE)) msg(MSG::VERBOSE) << "Matched particle with barcode " << genParticle->barcode() << " is not in list of selected particles" << endmsg;
+                    if (msgLvl(MSG::VERBOSE)) msg(MSG::VERBOSE) << "Matched particle with barcode " << HepMC::barcode(genParticle) << " is not in list of selected particles" << endmsg;
                     if ( genParticle->production_vertex() ) {
                       newTrackPerigee = m_truthToTrack->makePerigeeParameters( genParticle );
                       generatedTrackPerigee = newTrackPerigee;
diff --git a/Tracking/TrkValidation/TrkVertexFitterValidationTools/src/McEventNtupleTool.cxx b/Tracking/TrkValidation/TrkVertexFitterValidationTools/src/McEventNtupleTool.cxx
index 50bcee860a7c3aa13f6665fd6cf81650a1b05c52..5b27ed4b5f887a769d102f68f1682a86ee655160 100755
--- a/Tracking/TrkValidation/TrkVertexFitterValidationTools/src/McEventNtupleTool.cxx
+++ b/Tracking/TrkValidation/TrkVertexFitterValidationTools/src/McEventNtupleTool.cxx
@@ -153,7 +153,7 @@ StatusCode Trk::McEventNtupleTool::fillMcEventData(const HepMC::GenEvent& myEven
 
              if(fabs(lv_pos.perp() - pv_r)<m_radiusRes  && fabs(lv_pos.z() - pv_z)<m_zPosRes)
              {
-               pv_vtx_ids.insert(std::make_pair(par_vert->barcode(),par_vert));
+               pv_vtx_ids.insert(std::make_pair(HepMC::barcode(par_vert),par_vert));
              } else {
                 bool new_sec_vtx = true;
                 //loop over all entries in sec_vtx_ids_vec = vector of sec_vtx_map's
@@ -172,7 +172,7 @@ StatusCode Trk::McEventNtupleTool::fillMcEventData(const HepMC::GenEvent& myEven
 		    // if vertex position are nearly the same, store this vertex in the map
                     if(fabs(lv_pos.perp() - sec_r)< m_radiusRes && fabs(lv_pos.z() - sec_z)<m_zPosRes)
                     {
-                       sec_vtx_map.insert(std::make_pair(par_vert->barcode(),par_vert));
+                       sec_vtx_map.insert(std::make_pair(HepMC::barcode(par_vert),par_vert));
                        new_sec_vtx = false;
                        break;
                     }
diff --git a/Tracking/TrkVertexFitter/TrkVertexSeedFinderTools/src/MCTrueSeedFinder.cxx b/Tracking/TrkVertexFitter/TrkVertexSeedFinderTools/src/MCTrueSeedFinder.cxx
index 2c8b3934c895150188c13f7927214e4e7192a453..e7371f4dc6d10933c9195d3552bd983bf7427e3e 100755
--- a/Tracking/TrkVertexFitter/TrkVertexSeedFinderTools/src/MCTrueSeedFinder.cxx
+++ b/Tracking/TrkVertexFitter/TrkVertexSeedFinderTools/src/MCTrueSeedFinder.cxx
@@ -188,7 +188,7 @@ namespace Trk
 
     bool isEmpty = ( evt->particles_size() == 0 );
     bool isDummy = ( ( evt->event_number() == -1 ) &&
-		     ( evt->signal_process_id() == 0 ) );
+		     ( HepMC::signal_process_id(evt) == 0 ) );
     if( isDummy ) isEmpty = false;
 
     if( isEmpty ) return false;
@@ -208,7 +208,7 @@ namespace Trk
     int gotzero = 1;
     for( ; iter != end; ++iter ) {
       if( ( ( ( *iter )->event_number() == -1 ) &&
-            ( ( *iter )->signal_process_id() == 0 ) ) ) {
+            ( HepMC::signal_process_id( *iter ) == 0 ) ) ) {
 	++gotzero;
       }
       if( evt == *iter ) break;
diff --git a/Trigger/TrigAnalysis/TrigDecisionTool/TrigDecisionTool/DecisionAccess.icc b/Trigger/TrigAnalysis/TrigDecisionTool/TrigDecisionTool/DecisionAccess.icc
index 57bfddb4848192c2a7de452ac0c016f9b4394a08..6debe8b61bc56916644a8f457879508637b98deb 100644
--- a/Trigger/TrigAnalysis/TrigDecisionTool/TrigDecisionTool/DecisionAccess.icc
+++ b/Trigger/TrigAnalysis/TrigDecisionTool/TrigDecisionTool/DecisionAccess.icc
@@ -119,12 +119,12 @@ Trig::DecisionAccess::associateToEventView(SG::ReadHandle<CONTAINER>& inViewCont
     if (!accessor.isAvailable(**it)) { // iterator dereferences to an OBJECT*, OBJECT* dereferences to an OBJECT
       if (isFullscan) {
         ATH_MSG_DEBUG("Supplied container has no viewIndex decoration. But isFullscan flag is TRUE. "
-          << "Returning iterators over whole container.");
-        return std::make_pair(container->begin(), container->end());
+          << "Returning a pair of iterators over the whole of the container.");
+        return std::make_pair(container->begin(), end);
       } else {
-        ATH_MSG_ERROR("Unable to read the viewIndex decoration from the supplied container. Was it created inside an EventView?");
-        throw std::runtime_error("Trig::DecisionAccess::associateToEventView Unable to read the viewIndex decoration from the "
-          "supplied container. Was it created inside an EventView?");
+        ATH_MSG_WARNING("Unable to read the viewIndex decoration from the supplied container. Was it created inside an EventView? "
+          << "Returning a pair of iterators from the end of the container.");
+        return std::make_pair(end, end);
       }
     }
     const ElementLink<TrigRoiDescriptorCollection> objectROI = accessor( **it );
diff --git a/Trigger/TrigAnalysis/TrigInDetAnalysisUtils/TrigInDetAnalysisUtils/T_AnalysisConfig.h b/Trigger/TrigAnalysis/TrigInDetAnalysisUtils/TrigInDetAnalysisUtils/T_AnalysisConfig.h
index d4b82488aa9f2e6306ebd469f15daac4042c6466..fd07e763078463e7d5f4a2e88696a6399b90d5a9 100644
--- a/Trigger/TrigAnalysis/TrigInDetAnalysisUtils/TrigInDetAnalysisUtils/T_AnalysisConfig.h
+++ b/Trigger/TrigAnalysis/TrigInDetAnalysisUtils/TrigInDetAnalysisUtils/T_AnalysisConfig.h
@@ -322,6 +322,10 @@ protected:
 
     SG::ReadHandle<Collection> handle(key);
 
+    CLID checkCLID;
+    const std::string* keyStr = m_provider->evtStore()->keyToString(roi_link.key(), checkCLID);
+    m_provider->msg(MSG::DEBUG) << "Requesting range over (" << key << ") associated to ROI from " << (keyStr == nullptr ? "UNKNOWN" : *keyStr) << endmsg;
+
     itrpair = (*m_tdt)->associateToEventView( handle, roi_link );
 
     return itrpair;
@@ -463,6 +467,10 @@ protected:
 
     SG::ReadHandle<Collection> handle(key);
 
+    CLID checkCLID;
+    const std::string* keyStr = m_provider->evtStore()->keyToString(roi_link.key(), checkCLID);
+    m_provider->msg(MSG::DEBUG) << "Requesting range over (" << key << ") associated to ROI from " << (keyStr == nullptr ? "UNKNOWN" : *keyStr) << endmsg;
+
     itrpair = (*m_tdt)->associateToEventView( handle, roi_link );
 
     if ( itrpair.first != itrpair.second ) {
diff --git a/Trigger/TrigCost/TrigCostAnalysis/CMakeLists.txt b/Trigger/TrigCost/TrigCostAnalysis/CMakeLists.txt
index 72339d0aa2bb808e1ff0ee04ee9defcc4c96002f..ee3c05970eb0f780aa803f683215826687f8b569 100644
--- a/Trigger/TrigCost/TrigCostAnalysis/CMakeLists.txt
+++ b/Trigger/TrigCost/TrigCostAnalysis/CMakeLists.txt
@@ -9,7 +9,7 @@ find_package( ROOT COMPONENTS Core Hist RIO )
 # Athena algorithm to do cost analysis and produce histograms
 atlas_add_component( TrigCostAnalysis
                      src/*.cxx src/monitors/*.cxx src/counters/*.cxx src/components/TrigCostAnalysis_entries.cxx
-                     PRIVATE_LINK_LIBRARIES ${ROOT_LIBRARIES} GaudiKernel AthAnalysisBaseCompsLib TrigDecisionToolLib EnhancedBiasWeighterLib xAODEventInfo PathResolver TrigConfData )
+                     PRIVATE_LINK_LIBRARIES ${ROOT_LIBRARIES} GaudiKernel AthAnalysisBaseCompsLib TrigDataAccessMonitoringLib TrigDecisionToolLib EnhancedBiasWeighterLib xAODEventInfo PathResolver TrigConfData )
 
 # Small helper library used by trigCostHistToCSV
 atlas_add_library( TrigCostAnalysisLib
@@ -25,3 +25,4 @@ atlas_add_executable( trigCostHistToCSV
 
 atlas_install_joboptions( share/TrigCostAnalysis_JobOptions.py )
 atlas_install_scripts( share/RunTrigCostAnalysis.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
+atlas_install_python_modules( python/*.py )
diff --git a/Trigger/TrigCost/TrigCostAnalysis/python/ROSToROB.py b/Trigger/TrigCost/TrigCostAnalysis/python/ROSToROB.py
new file mode 100644
index 0000000000000000000000000000000000000000..25f041386c58c4ee14139e8fa0cbc066f3b8b5bd
--- /dev/null
+++ b/Trigger/TrigCost/TrigCostAnalysis/python/ROSToROB.py
@@ -0,0 +1,2231 @@
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
+
+'''
+@file rob2ros.py
+@brief Store ROS to ROS map extracted from /atlas/oks/tdaq-09-02-01/combined/partitions/ATLAS.data.xml
+'''
+
+ros2rob = {
+	'ROS-TDQ-CTP-00': 
+	[
+		0x770000 
+	] ,
+	'ROS-TDQ-CALPP-00': 
+	[
+		0x710000 ,
+		0x710010 ,
+		0x710020 ,
+		0x710030 ,
+		0x710001 ,
+		0x710011 ,
+		0x710021 ,
+		0x710031 ,
+		0x710002 ,
+		0x710012 ,
+		0x710022 ,
+		0x710032 
+	] ,
+	'ROS-TDQ-CALPP-01': 
+	[
+		0x710003 ,
+		0x710013 ,
+		0x710023 ,
+		0x710033 ,
+		0x710004 ,
+		0x710014 ,
+		0x710024 ,
+		0x710034 ,
+		0x710005 ,
+		0x710015 ,
+		0x710025 ,
+		0x710035 ,
+		0x710006 ,
+		0x710016 ,
+		0x710026 ,
+		0x710036 ,
+		0x710007 ,
+		0x710017 ,
+		0x710027 ,
+		0x710037 
+	] ,
+	'ROS-TDQ-CALCJ-00': 
+	[
+		0x720008 ,
+		0x720028 ,
+		0x720009 ,
+		0x720029 ,
+		0x72000a ,
+		0x72002a ,
+		0x72000b ,
+		0x72002b ,
+		0x730088 ,
+		0x730089 ,
+		0x73008a ,
+		0x73008b ,
+		0x74000c ,
+		0x74001c ,
+		0x74002c ,
+		0x74003c ,
+		0x74000d ,
+		0x74001d ,
+		0x74002d ,
+		0x74003d ,
+		0x75008c ,
+		0x75008d 
+	] ,
+	'ROS-TDQ-TOPO-00': 
+	[
+		0x910000 ,
+		0x910010 
+	] ,
+	'ROS-IBL-B-00': 
+	[
+		0x140060 ,
+		0x140061 ,
+		0x140062 ,
+		0x140063 ,
+		0x140070 ,
+		0x140071 ,
+		0x140072 ,
+		0x140073 ,
+		0x140080 ,
+		0x140081 ,
+		0x140082 ,
+		0x140083 ,
+		0x140090 ,
+		0x140091 ,
+		0x140092 ,
+		0x140093 ,
+		0x140100 ,
+		0x140101 ,
+		0x140102 ,
+		0x140103 ,
+		0x140110 ,
+		0x140111 ,
+		0x140112 ,
+		0x140113 
+	] ,
+	'ROS-IBL-B-01': 
+	[
+		0x140120 ,
+		0x140121 ,
+		0x140122 ,
+		0x140123 ,
+		0x140140 ,
+		0x140141 ,
+		0x140142 ,
+		0x140143 ,
+		0x140150 ,
+		0x140151 ,
+		0x140152 ,
+		0x140153 ,
+		0x140160 ,
+		0x140161 ,
+		0x140162 ,
+		0x140163 ,
+		0x140170 ,
+		0x140171 ,
+		0x140172 ,
+		0x140173 ,
+		0x140180 ,
+		0x140181 ,
+		0x140182 ,
+		0x140183 
+	] ,
+	'ROS-IBL-B-02': 
+	[
+		0x140190 ,
+		0x140191 ,
+		0x140192 ,
+		0x140193 ,
+		0x140200 ,
+		0x140201 ,
+		0x140202 ,
+		0x140203 ,
+		0x150210 ,
+		0x150211 ,
+		0x150212 ,
+		0x150213 
+	] ,
+	'ROS-PIX-BL-00': 
+	[
+		0x130108 ,
+		0x130151 ,
+		0x130111 ,
+		0x130112 ,
+		0x130152 ,
+		0x130156 ,
+		0x130116 ,
+		0x130117 ,
+		0x130157 ,
+		0x130160 ,
+		0x130120 ,
+		0x130250 ,
+		0x130210 ,
+		0x130211 ,
+		0x130251 ,
+		0x130252 ,
+		0x130121 ,
+		0x130161 ,
+		0x130145 ,
+		0x130105 ,
+		0x130216 ,
+		0x130256 
+	] ,
+	'ROS-PIX-BL-01': 
+	[
+		0x130212 ,
+		0x130214 ,
+		0x130254 ,
+		0x130255 ,
+		0x130215 ,
+		0x130106 ,
+		0x130146 ,
+		0x130149 ,
+		0x130109 ,
+		0x130110 ,
+		0x130150 ,
+		0x130147 ,
+		0x130107 ,
+		0x130148 ,
+		0x130154 ,
+		0x130114 ,
+		0x130115 ,
+		0x130155 ,
+		0x130158 ,
+		0x130118 ,
+		0x130119 ,
+		0x130159 
+	] ,
+	'ROS-PIX-B-00': 
+	[
+		0x111708 ,
+		0x111710 ,
+		0x111712 ,
+		0x111748 ,
+		0x111750 ,
+		0x111752 ,
+		0x112517 ,
+		0x112519 ,
+		0x112557 ,
+		0x112559 ,
+		0x111715 ,
+		0x111717 ,
+		0x111755 ,
+		0x111812 ,
+		0x111850 ,
+		0x111852 ,
+		0x112406 ,
+		0x112408 ,
+		0x112446 ,
+		0x112521 ,
+		0x112561 
+	] ,
+	'ROS-PIX-B-01': 
+	[
+		0x111707 ,
+		0x111709 ,
+		0x111711 ,
+		0x111747 ,
+		0x111749 ,
+		0x111751 ,
+		0x112516 ,
+		0x112518 ,
+		0x112556 ,
+		0x112558 ,
+		0x111714 ,
+		0x111716 ,
+		0x111754 ,
+		0x111811 ,
+		0x111849 ,
+		0x111851 ,
+		0x112405 ,
+		0x112407 ,
+		0x112445 ,
+		0x112520 ,
+		0x112560 
+	] ,
+	'ROS-PIX-B-02': 
+	[
+		0x111721 ,
+		0x111759 ,
+		0x111761 ,
+		0x111819 ,
+		0x111821 ,
+		0x111859 ,
+		0x111861 ,
+		0x112419 ,
+		0x112421 ,
+		0x112459 ,
+		0x112461 ,
+		0x111706 ,
+		0x111746 ,
+		0x111806 ,
+		0x111808 ,
+		0x111810 ,
+		0x111846 ,
+		0x111848 ,
+		0x112512 ,
+		0x112515 ,
+		0x112552 ,
+		0x112555 
+	] ,
+	'ROS-PIX-B-03': 
+	[
+		0x111720 ,
+		0x111758 ,
+		0x111760 ,
+		0x111818 ,
+		0x111820 ,
+		0x111858 ,
+		0x111860 ,
+		0x112418 ,
+		0x112420 ,
+		0x112458 ,
+		0x112460 ,
+		0x111705 ,
+		0x111745 ,
+		0x111805 ,
+		0x111807 ,
+		0x111809 ,
+		0x111845 ,
+		0x111847 ,
+		0x112511 ,
+		0x112514 ,
+		0x112551 ,
+		0x112554 
+	] ,
+	'ROS-PIX-B-04': 
+	[
+		0x111815 ,
+		0x111855 ,
+		0x111857 ,
+		0x112410 ,
+		0x112412 ,
+		0x112448 ,
+		0x112450 ,
+		0x112452 ,
+		0x112505 ,
+		0x112545 ,
+		0x112549 ,
+		0x111719 ,
+		0x111757 ,
+		0x111817 ,
+		0x112415 ,
+		0x112417 ,
+		0x112455 ,
+		0x112457 ,
+		0x112507 ,
+		0x112509 ,
+		0x112547 
+	] ,
+	'ROS-PIX-B-05': 
+	[
+		0x111814 ,
+		0x111854 ,
+		0x111856 ,
+		0x112409 ,
+		0x112411 ,
+		0x112447 ,
+		0x112449 ,
+		0x112451 ,
+		0x112506 ,
+		0x112546 ,
+		0x112550 ,
+		0x111718 ,
+		0x111756 ,
+		0x111816 ,
+		0x112414 ,
+		0x112416 ,
+		0x112454 ,
+		0x112456 ,
+		0x112508 ,
+		0x112510 ,
+		0x112548 
+	] ,
+	'ROS-PIX-DISK-00': 
+	[
+		0x120309 ,
+		0x120349 ,
+		0x120311 ,
+		0x120351 ,
+		0x120314 ,
+		0x120354 ,
+		0x120316 ,
+		0x120356 ,
+		0x121649 ,
+		0x121651 ,
+		0x121654 ,
+		0x121656 ,
+		0x121616 ,
+		0x121614 ,
+		0x121611 ,
+		0x121609 
+	] ,
+	'ROS-PIX-DISK-01': 
+	[
+		0x121610 ,
+		0x121612 ,
+		0x121615 ,
+		0x121617 ,
+		0x121650 ,
+		0x121652 ,
+		0x121655 ,
+		0x121657 ,
+		0x120310 ,
+		0x120350 ,
+		0x120312 ,
+		0x120352 ,
+		0x120315 ,
+		0x120355 ,
+		0x120317 ,
+		0x120357 
+	] ,
+	'ROS-SCT-B-00': 
+	[
+		0x220100 ,
+		0x220101 ,
+		0x220102 ,
+		0x220103 ,
+		0x220104 ,
+		0x220105 ,
+		0x220106 ,
+		0x220107 ,
+		0x220108 ,
+		0x220109 ,
+		0x22010a ,
+		0x210100 ,
+		0x210101 ,
+		0x210102 ,
+		0x210103 ,
+		0x210104 ,
+		0x210105 ,
+		0x210106 ,
+		0x210107 ,
+		0x210108 ,
+		0x210109 ,
+		0x21010a 
+	] ,
+	'ROS-SCT-B-01': 
+	[
+		0x220005 ,
+		0x220006 ,
+		0x220007 ,
+		0x220008 ,
+		0x220009 ,
+		0x22000a ,
+		0x22000b ,
+		0x22000c ,
+		0x22000d ,
+		0x22000e ,
+		0x22000f ,
+		0x210005 ,
+		0x210006 ,
+		0x210007 ,
+		0x210008 ,
+		0x210009 ,
+		0x21000a ,
+		0x21000b ,
+		0x21000c ,
+		0x21000d ,
+		0x21000e ,
+		0x21000f 
+	] ,
+	'ROS-SCT-B-02': 
+	[
+		0x22010b ,
+		0x22010c ,
+		0x22010d ,
+		0x22010e ,
+		0x22010f ,
+		0x21010b ,
+		0x21010c ,
+		0x21010d ,
+		0x21010e ,
+		0x21010f ,
+		0x220000 ,
+		0x220001 ,
+		0x220002 ,
+		0x220003 ,
+		0x220004 ,
+		0x210000 ,
+		0x210001 ,
+		0x210002 ,
+		0x210003 ,
+		0x210004 
+	] ,
+	'ROS-SCT-ECC-00': 
+	[
+		0x240100 ,
+		0x240101 ,
+		0x240102 ,
+		0x240103 ,
+		0x240104 ,
+		0x240105 ,
+		0x240106 ,
+		0x240107 ,
+		0x24010a ,
+		0x24010d ,
+		0x24010e ,
+		0x24010f 
+	] ,
+	'ROS-SCT-ECC-01': 
+	[
+		0x240000 ,
+		0x240001 ,
+		0x240002 ,
+		0x240003 ,
+		0x240004 ,
+		0x240005 ,
+		0x240006 ,
+		0x240007 ,
+		0x240008 ,
+		0x240009 ,
+		0x24000a ,
+		0x24000b ,
+		0x24000c ,
+		0x24000d ,
+		0x24000e ,
+		0x24000f ,
+		0x240108 ,
+		0x240109 ,
+		0x24010b ,
+		0x24010c 
+	] ,
+	'ROS-SCT-ECA-00': 
+	[
+		0x230008 ,
+		0x230009 ,
+		0x23000b ,
+		0x23000c ,
+		0x230100 ,
+		0x230101 ,
+		0x230102 ,
+		0x230103 ,
+		0x230104 ,
+		0x230105 ,
+		0x230106 ,
+		0x230107 ,
+		0x230108 ,
+		0x230109 ,
+		0x23010a ,
+		0x23010b ,
+		0x23010c ,
+		0x23010d ,
+		0x23010e ,
+		0x23010f 
+	] ,
+	'ROS-SCT-ECA-01': 
+	[
+		0x230000 ,
+		0x230001 ,
+		0x230002 ,
+		0x230003 ,
+		0x230004 ,
+		0x230005 ,
+		0x230006 ,
+		0x230007 ,
+		0x23000a ,
+		0x23000d ,
+		0x23000e ,
+		0x23000f 
+	] ,
+	'ROS-TRT-BA-00': 
+	[
+		0x310100 ,
+		0x310200 ,
+		0x310300 ,
+		0x310400 ,
+		0x310500 ,
+		0x310600 ,
+		0x310700 ,
+		0x310800 ,
+		0x310900 ,
+		0x310a00 ,
+		0x310b00 ,
+		0x310c00 ,
+		0x311900 ,
+		0x311a00 ,
+		0x311b00 ,
+		0x311c00 ,
+		0x311d00 ,
+		0x311e00 ,
+		0x311f00 ,
+		0x312000 
+	] ,
+	'ROS-TRT-BA-01': 
+	[
+		0x310d00 ,
+		0x310e00 ,
+		0x310f00 ,
+		0x311000 ,
+		0x311100 ,
+		0x311200 ,
+		0x311300 ,
+		0x311400 ,
+		0x311500 ,
+		0x311600 ,
+		0x311700 ,
+		0x311800 
+	] ,
+	'ROS-TRT-BC-00': 
+	[
+		0x320100 ,
+		0x320200 ,
+		0x320300 ,
+		0x320400 ,
+		0x320500 ,
+		0x320600 ,
+		0x320700 ,
+		0x320800 ,
+		0x320900 ,
+		0x320a00 ,
+		0x320b00 ,
+		0x320c00 ,
+		0x321900 ,
+		0x321a00 ,
+		0x321b00 ,
+		0x321c00 ,
+		0x321d00 ,
+		0x321e00 ,
+		0x321f00 ,
+		0x322000 
+	] ,
+	'ROS-TRT-BC-01': 
+	[
+		0x320d00 ,
+		0x320e00 ,
+		0x320f00 ,
+		0x321000 ,
+		0x321100 ,
+		0x321200 ,
+		0x321300 ,
+		0x321400 ,
+		0x321500 ,
+		0x321600 ,
+		0x321700 ,
+		0x321800 
+	] ,
+	'ROS-TRT-ECA-00': 
+	[
+		0x330601 ,
+		0x330602 ,
+		0x330701 ,
+		0x330702 ,
+		0x330801 ,
+		0x330802 ,
+		0x330901 ,
+		0x330902 ,
+		0x330a01 ,
+		0x330a02 ,
+		0x330b01 ,
+		0x330b02 ,
+		0x330c01 ,
+		0x330c02 ,
+		0x330d01 ,
+		0x330d02 ,
+		0x330e01 ,
+		0x330e02 ,
+		0x330f01 ,
+		0x330f02 
+	] ,
+	'ROS-TRT-ECA-01': 
+	[
+		0x331001 ,
+		0x331002 ,
+		0x331101 ,
+		0x331102 ,
+		0x331201 ,
+		0x331202 ,
+		0x331301 ,
+		0x331302 ,
+		0x331401 ,
+		0x331402 ,
+		0x331501 ,
+		0x331502 ,
+		0x331601 ,
+		0x331602 ,
+		0x331701 ,
+		0x331702 ,
+		0x331801 ,
+		0x331802 ,
+		0x331901 ,
+		0x331902 ,
+		0x331a01 ,
+		0x331a02 
+	] ,
+	'ROS-TRT-ECA-02': 
+	[
+		0x330101 ,
+		0x330102 ,
+		0x330201 ,
+		0x330202 ,
+		0x330301 ,
+		0x330302 ,
+		0x330401 ,
+		0x330402 ,
+		0x330501 ,
+		0x330502 ,
+		0x331b01 ,
+		0x331b02 ,
+		0x331c01 ,
+		0x331c02 ,
+		0x331d01 ,
+		0x331d02 ,
+		0x331e01 ,
+		0x331e02 ,
+		0x331f01 ,
+		0x331f02 ,
+		0x332001 ,
+		0x332002 
+	] ,
+	'ROS-TRT-ECC-00': 
+	[
+		0x340601 ,
+		0x340602 ,
+		0x340701 ,
+		0x340702 ,
+		0x340801 ,
+		0x340802 ,
+		0x340901 ,
+		0x340902 ,
+		0x340a01 ,
+		0x340a02 ,
+		0x340b01 ,
+		0x340b02 ,
+		0x340c01 ,
+		0x340c02 ,
+		0x340d01 ,
+		0x340d02 ,
+		0x340e01 ,
+		0x340e02 ,
+		0x340f01 ,
+		0x340f02 
+	] ,
+	'ROS-TRT-ECC-01': 
+	[
+		0x341001 ,
+		0x341002 ,
+		0x341102 ,
+		0x341101 ,
+		0x341201 ,
+		0x341202 ,
+		0x341301 ,
+		0x341302 ,
+		0x341402 ,
+		0x341401 ,
+		0x341501 ,
+		0x341502 ,
+		0x341601 ,
+		0x341602 ,
+		0x341701 ,
+		0x341702 ,
+		0x341801 ,
+		0x341802 ,
+		0x341901 ,
+		0x341902 ,
+		0x341a01 ,
+		0x341a02 
+	] ,
+	'ROS-TRT-ECC-02': 
+	[
+		0x340101 ,
+		0x340102 ,
+		0x340201 ,
+		0x340202 ,
+		0x340301 ,
+		0x340302 ,
+		0x340401 ,
+		0x340402 ,
+		0x340501 ,
+		0x340502 ,
+		0x341b01 ,
+		0x341b02 ,
+		0x341c01 ,
+		0x341c02 ,
+		0x341d01 ,
+		0x341d02 ,
+		0x341e01 ,
+		0x341e02 ,
+		0x341f01 ,
+		0x341f02 ,
+		0x342001 ,
+		0x342002 
+	] ,
+	'ROS-LAR-EMBA-00': 
+	[
+		0x41000c ,
+		0x41000d ,
+		0x410001 ,
+		0x41000f ,
+		0x410014 ,
+		0x410015 ,
+		0x410016 ,
+		0x410017 ,
+		0x41001c ,
+		0x41001d ,
+		0x410030 ,
+		0x410031 ,
+		0x410002 ,
+		0x410003 ,
+		0x410011 ,
+		0x410013 ,
+		0x410018 ,
+		0x410019 ,
+		0x41001a ,
+		0x41001b ,
+		0x41001e ,
+		0x41001f ,
+		0x410032 ,
+		0x410033 
+	] ,
+	'ROS-LAR-EMBA-01': 
+	[
+		0x410004 ,
+		0x410005 ,
+		0x410006 ,
+		0x410007 ,
+		0x410000 ,
+		0x41000e ,
+		0x410020 ,
+		0x410021 ,
+		0x410028 ,
+		0x410029 ,
+		0x41002c ,
+		0x41002d ,
+		0x410008 ,
+		0x410009 ,
+		0x41000a ,
+		0x41000b ,
+		0x410010 ,
+		0x410012 ,
+		0x410022 ,
+		0x410023 ,
+		0x41002a ,
+		0x41002b ,
+		0x41002e ,
+		0x41002f 
+	] ,
+	'ROS-LAR-EMBA-02': 
+	[
+		0x410024 ,
+		0x410025 ,
+		0x410026 ,
+		0x410027 ,
+		0x410034 ,
+		0x410035 ,
+		0x410036 ,
+		0x410037 ,
+		0x41005c ,
+		0x41005d ,
+		0x41005e ,
+		0x41005f ,
+		0x41006c ,
+		0x41006d ,
+		0x41006e ,
+		0x41006f 
+	] ,
+	'ROS-LAR-EMBA-03': 
+	[
+		0x410044 ,
+		0x410045 ,
+		0x410039 ,
+		0x410047 ,
+		0x41004c ,
+		0x41004d ,
+		0x41004e ,
+		0x41004f ,
+		0x410054 ,
+		0x410055 ,
+		0x410068 ,
+		0x410069 ,
+		0x41003a ,
+		0x41003b ,
+		0x410049 ,
+		0x41004b ,
+		0x410050 ,
+		0x410051 ,
+		0x410052 ,
+		0x410053 ,
+		0x410056 ,
+		0x410057 ,
+		0x41006a ,
+		0x41006b 
+	] ,
+	'ROS-LAR-EMBA-04': 
+	[
+		0x41003c ,
+		0x41003d ,
+		0x41003e ,
+		0x41003f ,
+		0x410038 ,
+		0x410046 ,
+		0x410058 ,
+		0x410059 ,
+		0x410060 ,
+		0x410061 ,
+		0x410064 ,
+		0x410065 ,
+		0x410040 ,
+		0x410041 ,
+		0x410042 ,
+		0x410043 ,
+		0x410048 ,
+		0x41004a ,
+		0x41005a ,
+		0x41005b ,
+		0x410062 ,
+		0x410063 ,
+		0x410066 ,
+		0x410067 
+	] ,
+	'ROS-LAR-EMBA-05': 
+	[
+		0x41007c ,
+		0x41007d ,
+		0x410071 ,
+		0x41007f ,
+		0x410084 ,
+		0x410085 ,
+		0x410086 ,
+		0x410087 ,
+		0x41008c ,
+		0x41008d ,
+		0x4100a0 ,
+		0x4100a1 ,
+		0x410072 ,
+		0x410073 ,
+		0x410081 ,
+		0x410083 ,
+		0x410088 ,
+		0x410089 ,
+		0x41008a ,
+		0x41008b ,
+		0x41008e ,
+		0x41008f ,
+		0x4100a2 ,
+		0x4100a3 
+	] ,
+	'ROS-LAR-EMBA-06': 
+	[
+		0x410074 ,
+		0x410075 ,
+		0x410076 ,
+		0x410077 ,
+		0x410070 ,
+		0x41007e ,
+		0x410090 ,
+		0x410091 ,
+		0x410098 ,
+		0x410099 ,
+		0x41009c ,
+		0x41009d ,
+		0x410078 ,
+		0x410079 ,
+		0x41007a ,
+		0x41007b ,
+		0x410080 ,
+		0x410082 ,
+		0x410092 ,
+		0x410093 ,
+		0x41009a ,
+		0x41009b ,
+		0x41009e ,
+		0x41009f 
+	] ,
+	'ROS-LAR-EMBA-07': 
+	[
+		0x410094 ,
+		0x410095 ,
+		0x410096 ,
+		0x410097 ,
+		0x4100a4 ,
+		0x4100a5 ,
+		0x4100a6 ,
+		0x4100a7 ,
+		0x4100cc ,
+		0x4100cd ,
+		0x4100ce ,
+		0x4100cf ,
+		0x4100dc ,
+		0x4100dd ,
+		0x4100de ,
+		0x4100df 
+	] ,
+	'ROS-LAR-EMBA-08': 
+	[
+		0x4100b4 ,
+		0x4100b5 ,
+		0x4100a9 ,
+		0x4100b7 ,
+		0x4100bc ,
+		0x4100bd ,
+		0x4100be ,
+		0x4100bf ,
+		0x4100c4 ,
+		0x4100c5 ,
+		0x4100d8 ,
+		0x4100d9 ,
+		0x4100aa ,
+		0x4100ab ,
+		0x4100b9 ,
+		0x4100bb ,
+		0x4100c0 ,
+		0x4100c1 ,
+		0x4100c2 ,
+		0x4100c3 ,
+		0x4100c6 ,
+		0x4100c7 ,
+		0x4100da ,
+		0x4100db 
+	] ,
+	'ROS-LAR-EMBA-09': 
+	[
+		0x4100ac ,
+		0x4100ad ,
+		0x4100ae ,
+		0x4100af ,
+		0x4100a8 ,
+		0x4100b6 ,
+		0x4100c8 ,
+		0x4100c9 ,
+		0x4100d0 ,
+		0x4100d1 ,
+		0x4100d4 ,
+		0x4100d5 ,
+		0x4100b0 ,
+		0x4100b1 ,
+		0x4100b2 ,
+		0x4100b3 ,
+		0x4100b8 ,
+		0x4100ba ,
+		0x4100ca ,
+		0x4100cb ,
+		0x4100d2 ,
+		0x4100d3 ,
+		0x4100d6 ,
+		0x4100d7 
+	] ,
+	'ROS-LAR-EMBC-00': 
+	[
+		0x42000c ,
+		0x42000d ,
+		0x420001 ,
+		0x42000f ,
+		0x420014 ,
+		0x420015 ,
+		0x420016 ,
+		0x420017 ,
+		0x42001c ,
+		0x42001d ,
+		0x420030 ,
+		0x420031 ,
+		0x420002 ,
+		0x420003 ,
+		0x420011 ,
+		0x420013 ,
+		0x420018 ,
+		0x420019 ,
+		0x42001a ,
+		0x42001b ,
+		0x42001e ,
+		0x42001f ,
+		0x420032 ,
+		0x420033 
+	] ,
+	'ROS-LAR-EMBC-01': 
+	[
+		0x420004 ,
+		0x420005 ,
+		0x420006 ,
+		0x420007 ,
+		0x420000 ,
+		0x42000e ,
+		0x420020 ,
+		0x420021 ,
+		0x420028 ,
+		0x420029 ,
+		0x42002c ,
+		0x42002d ,
+		0x420008 ,
+		0x420009 ,
+		0x42000a ,
+		0x42000b ,
+		0x420010 ,
+		0x420012 ,
+		0x420022 ,
+		0x420023 ,
+		0x42002a ,
+		0x42002b ,
+		0x42002e ,
+		0x42002f 
+	] ,
+	'ROS-LAR-EMBC-02': 
+	[
+		0x420024 ,
+		0x420025 ,
+		0x420026 ,
+		0x420027 ,
+		0x420034 ,
+		0x420035 ,
+		0x420036 ,
+		0x420037 ,
+		0x42005c ,
+		0x42005d ,
+		0x42005e ,
+		0x42005f ,
+		0x42006c ,
+		0x42006d ,
+		0x42006e ,
+		0x42006f 
+	] ,
+	'ROS-LAR-EMBC-03': 
+	[
+		0x420044 ,
+		0x420045 ,
+		0x420039 ,
+		0x420047 ,
+		0x42004c ,
+		0x42004d ,
+		0x42004e ,
+		0x42004f ,
+		0x420054 ,
+		0x420055 ,
+		0x420068 ,
+		0x420069 ,
+		0x42003a ,
+		0x42003b ,
+		0x420049 ,
+		0x42004b ,
+		0x420050 ,
+		0x420051 ,
+		0x420052 ,
+		0x420053 ,
+		0x420056 ,
+		0x420057 ,
+		0x42006a ,
+		0x42006b 
+	] ,
+	'ROS-LAR-EMBC-04': 
+	[
+		0x42003c ,
+		0x42003d ,
+		0x42003e ,
+		0x42003f ,
+		0x420038 ,
+		0x420046 ,
+		0x420058 ,
+		0x420059 ,
+		0x420060 ,
+		0x420061 ,
+		0x420064 ,
+		0x420065 ,
+		0x420040 ,
+		0x420041 ,
+		0x420042 ,
+		0x420043 ,
+		0x420048 ,
+		0x42004a ,
+		0x42005a ,
+		0x42005b ,
+		0x420062 ,
+		0x420063 ,
+		0x420066 ,
+		0x420067 
+	] ,
+	'ROS-LAR-EMBC-05': 
+	[
+		0x42007c ,
+		0x42007d ,
+		0x420071 ,
+		0x42007f ,
+		0x420084 ,
+		0x420085 ,
+		0x420086 ,
+		0x420087 ,
+		0x42008c ,
+		0x42008d ,
+		0x4200a0 ,
+		0x4200a1 ,
+		0x420072 ,
+		0x420073 ,
+		0x420081 ,
+		0x420083 ,
+		0x420088 ,
+		0x420089 ,
+		0x42008a ,
+		0x42008b ,
+		0x42008e ,
+		0x42008f ,
+		0x4200a2 ,
+		0x4200a3 
+	] ,
+	'ROS-LAR-EMBC-06': 
+	[
+		0x420074 ,
+		0x420075 ,
+		0x420076 ,
+		0x420077 ,
+		0x420070 ,
+		0x42007e ,
+		0x420090 ,
+		0x420091 ,
+		0x420098 ,
+		0x420099 ,
+		0x42009c ,
+		0x42009d ,
+		0x420078 ,
+		0x420079 ,
+		0x42007a ,
+		0x42007b ,
+		0x420080 ,
+		0x420082 ,
+		0x420092 ,
+		0x420093 ,
+		0x42009a ,
+		0x42009b ,
+		0x42009e ,
+		0x42009f 
+	] ,
+	'ROS-LAR-EMBC-07': 
+	[
+		0x420094 ,
+		0x420095 ,
+		0x420096 ,
+		0x420097 ,
+		0x4200a4 ,
+		0x4200a5 ,
+		0x4200a6 ,
+		0x4200a7 ,
+		0x4200cc ,
+		0x4200cd ,
+		0x4200ce ,
+		0x4200cf ,
+		0x4200dc ,
+		0x4200dd ,
+		0x4200de ,
+		0x4200df 
+	] ,
+	'ROS-LAR-EMBC-08': 
+	[
+		0x4200b4 ,
+		0x4200b5 ,
+		0x4200a9 ,
+		0x4200b7 ,
+		0x4200bc ,
+		0x4200bd ,
+		0x4200be ,
+		0x4200bf ,
+		0x4200c4 ,
+		0x4200c5 ,
+		0x4200d8 ,
+		0x4200d9 ,
+		0x4200aa ,
+		0x4200ab ,
+		0x4200b9 ,
+		0x4200bb ,
+		0x4200c0 ,
+		0x4200c1 ,
+		0x4200c2 ,
+		0x4200c3 ,
+		0x4200c6 ,
+		0x4200c7 ,
+		0x4200da ,
+		0x4200db 
+	] ,
+	'ROS-LAR-EMBC-09': 
+	[
+		0x4200ac ,
+		0x4200ad ,
+		0x4200ae ,
+		0x4200af ,
+		0x4200a8 ,
+		0x4200b6 ,
+		0x4200c8 ,
+		0x4200c9 ,
+		0x4200d0 ,
+		0x4200d1 ,
+		0x4200d4 ,
+		0x4200d5 ,
+		0x4200b0 ,
+		0x4200b1 ,
+		0x4200b2 ,
+		0x4200b3 ,
+		0x4200b8 ,
+		0x4200ba ,
+		0x4200ca ,
+		0x4200cb ,
+		0x4200d2 ,
+		0x4200d3 ,
+		0x4200d6 ,
+		0x4200d7 
+	] ,
+	'ROS-LAR-EMECA-00': 
+	[
+		0x430000 ,
+		0x430001 ,
+		0x430024 ,
+		0x430025 ,
+		0x430026 ,
+		0x430027 ,
+		0x430068 ,
+		0x43006a ,
+		0x43006c ,
+		0x43006d ,
+		0x430078 ,
+		0x430079 ,
+		0x430002 ,
+		0x430003 ,
+		0x430028 ,
+		0x430029 ,
+		0x43002a ,
+		0x43002b ,
+		0x43006e ,
+		0x43006f ,
+		0x43007a ,
+		0x43007b ,
+		0x430069 
+	] ,
+	'ROS-LAR-EMECA-01': 
+	[
+		0x430004 ,
+		0x430005 ,
+		0x430006 ,
+		0x430007 ,
+		0x430008 ,
+		0x430009 ,
+		0x43000a ,
+		0x43000b ,
+		0x43001c ,
+		0x43001d ,
+		0x43001e ,
+		0x43001f ,
+		0x43000c ,
+		0x43000d ,
+		0x43000e ,
+		0x43000f ,
+		0x430010 ,
+		0x430011 ,
+		0x430012 ,
+		0x430013 ,
+		0x430020 ,
+		0x430021 ,
+		0x430022 ,
+		0x430023 
+	] ,
+	'ROS-LAR-EMECA-02': 
+	[
+		0x430014 ,
+		0x430015 ,
+		0x430016 ,
+		0x430017 ,
+		0x43002c ,
+		0x43002d ,
+		0x43002e ,
+		0x43002f ,
+		0x43007d ,
+		0x43007c ,
+		0x430077 ,
+		0x430018 ,
+		0x430019 ,
+		0x43001a ,
+		0x43001b ,
+		0x430030 ,
+		0x430031 ,
+		0x430032 ,
+		0x430033 ,
+		0x430074 ,
+		0x430076 ,
+		0x430075 
+	] ,
+	'ROS-LAR-EMECA-03': 
+	[
+		0x430034 ,
+		0x430035 ,
+		0x430058 ,
+		0x430059 ,
+		0x43005a ,
+		0x43005b ,
+		0x430086 ,
+		0x430038 ,
+		0x430070 ,
+		0x430071 ,
+		0x430081 ,
+		0x430080 ,
+		0x430036 ,
+		0x430037 ,
+		0x43005c ,
+		0x43005d ,
+		0x43005e ,
+		0x43005f ,
+		0x430072 ,
+		0x430073 ,
+		0x43007e ,
+		0x43007f ,
+		0x430087 
+	] ,
+	'ROS-LAR-EMECA-04': 
+	[
+		0x43006b ,
+		0x430039 ,
+		0x43003a ,
+		0x43003b ,
+		0x43003c ,
+		0x43003d ,
+		0x43003e ,
+		0x43003f ,
+		0x430050 ,
+		0x430051 ,
+		0x430052 ,
+		0x430053 ,
+		0x430040 ,
+		0x430041 ,
+		0x430042 ,
+		0x430043 ,
+		0x430044 ,
+		0x430045 ,
+		0x430046 ,
+		0x430047 ,
+		0x430054 ,
+		0x430055 ,
+		0x430056 ,
+		0x430057 
+	] ,
+	'ROS-LAR-EMECA-05': 
+	[
+		0x430048 ,
+		0x430049 ,
+		0x43004a ,
+		0x43004b ,
+		0x430060 ,
+		0x430061 ,
+		0x430062 ,
+		0x430063 ,
+		0x430082 ,
+		0x430083 ,
+		0x430085 ,
+		0x43004c ,
+		0x43004d ,
+		0x43004e ,
+		0x43004f ,
+		0x430064 ,
+		0x430065 ,
+		0x430066 ,
+		0x430067 ,
+		0x430088 ,
+		0x430084 ,
+		0x430089 
+	] ,
+	'ROS-LAR-EMECC-00': 
+	[
+		0x440000 ,
+		0x440001 ,
+		0x440025 ,
+		0x440026 ,
+		0x440027 ,
+		0x440028 ,
+		0x440068 ,
+		0x44006a ,
+		0x44006c ,
+		0x44006d ,
+		0x440078 ,
+		0x440079 ,
+		0x440002 ,
+		0x440003 ,
+		0x440029 ,
+		0x44002a ,
+		0x44002b ,
+		0x440058 ,
+		0x44006e ,
+		0x44006f ,
+		0x44007a ,
+		0x44007b ,
+		0x440069 
+	] ,
+	'ROS-LAR-EMECC-01': 
+	[
+		0x440004 ,
+		0x440005 ,
+		0x440006 ,
+		0x440007 ,
+		0x440008 ,
+		0x440009 ,
+		0x44000a ,
+		0x44000b ,
+		0x44001c ,
+		0x44001d ,
+		0x44001e ,
+		0x44001f ,
+		0x44000c ,
+		0x44000d ,
+		0x44000e ,
+		0x44000f ,
+		0x440010 ,
+		0x440011 ,
+		0x440012 ,
+		0x440013 ,
+		0x440020 ,
+		0x440021 ,
+		0x440022 ,
+		0x440023 
+	] ,
+	'ROS-LAR-EMECC-02': 
+	[
+		0x440015 ,
+		0x440016 ,
+		0x440017 ,
+		0x440018 ,
+		0x44002d ,
+		0x44002e ,
+		0x44002f ,
+		0x440030 ,
+		0x44007d ,
+		0x44007c ,
+		0x440077 ,
+		0x440019 ,
+		0x44001a ,
+		0x44001b ,
+		0x440031 ,
+		0x440032 ,
+		0x440033 ,
+		0x440048 ,
+		0x440060 ,
+		0x440074 ,
+		0x440076 ,
+		0x440075 
+	] ,
+	'ROS-LAR-EMECC-03': 
+	[
+		0x440034 ,
+		0x440035 ,
+		0x440059 ,
+		0x44005a ,
+		0x44005b ,
+		0x44005c ,
+		0x440086 ,
+		0x440038 ,
+		0x440070 ,
+		0x440071 ,
+		0x440081 ,
+		0x440080 ,
+		0x440024 ,
+		0x440036 ,
+		0x440037 ,
+		0x44005d ,
+		0x44005e ,
+		0x44005f ,
+		0x440072 ,
+		0x440073 ,
+		0x44007e ,
+		0x44007f ,
+		0x440087 
+	] ,
+	'ROS-LAR-EMECC-04': 
+	[
+		0x44006b ,
+		0x440039 ,
+		0x44003a ,
+		0x44003b ,
+		0x44003c ,
+		0x44003d ,
+		0x44003e ,
+		0x44003f ,
+		0x440050 ,
+		0x440051 ,
+		0x440052 ,
+		0x440053 ,
+		0x440040 ,
+		0x440041 ,
+		0x440042 ,
+		0x440043 ,
+		0x440044 ,
+		0x440045 ,
+		0x440046 ,
+		0x440047 ,
+		0x440054 ,
+		0x440055 ,
+		0x440056 ,
+		0x440057 
+	] ,
+	'ROS-LAR-EMECC-05': 
+	[
+		0x440049 ,
+		0x44004a ,
+		0x44004b ,
+		0x44004c ,
+		0x440061 ,
+		0x440062 ,
+		0x440063 ,
+		0x440064 ,
+		0x440082 ,
+		0x440083 ,
+		0x440085 ,
+		0x440014 ,
+		0x44002c ,
+		0x44004d ,
+		0x44004e ,
+		0x44004f ,
+		0x440065 ,
+		0x440066 ,
+		0x440067 ,
+		0x440088 ,
+		0x440084 ,
+		0x440089 
+	] ,
+	'ROS-LAR-HECFCAL-00': 
+	[
+		0x450000 ,
+		0x450001 ,
+		0x450002 ,
+		0x450003 ,
+		0x450004 ,
+		0x450005 ,
+		0x450006 ,
+		0x450007 ,
+		0x450008 ,
+		0x450009 ,
+		0x45000a ,
+		0x45000b ,
+		0x460000 ,
+		0x460001 ,
+		0x460002 ,
+		0x460003 ,
+		0x460004 ,
+		0x460005 ,
+		0x460006 ,
+		0x460007 ,
+		0x460008 ,
+		0x460009 ,
+		0x46000a ,
+		0x46000b 
+	] ,
+	'ROS-LAR-HECFCAL-01': 
+	[
+		0x470000 ,
+		0x470001 ,
+		0x470002 ,
+		0x470003 ,
+		0x470004 ,
+		0x470005 ,
+		0x470006 ,
+		0x480000 ,
+		0x480001 ,
+		0x480002 ,
+		0x480003 ,
+		0x480004 ,
+		0x480005 ,
+		0x480006 
+	] ,
+	'ROS-TIL-EBA-00': 
+	[
+		0x530008 ,
+		0x530009 ,
+		0x53000a ,
+		0x53000b ,
+		0x530100 ,
+		0x53000c ,
+		0x53000d ,
+		0x53000e ,
+		0x53000f ,
+		0x530101 ,
+		0x530010 ,
+		0x530011 ,
+		0x530012 ,
+		0x530013 ,
+		0x530102 ,
+		0x530014 ,
+		0x530015 ,
+		0x530016 ,
+		0x530017 ,
+		0x530103 
+	] ,
+	'ROS-TIL-EBA-01': 
+	[
+		0x530018 ,
+		0x530019 ,
+		0x53001a ,
+		0x53001b ,
+		0x530104 ,
+		0x53001c ,
+		0x53001d ,
+		0x53001e ,
+		0x53001f ,
+		0x530105 ,
+		0x530000 ,
+		0x530001 ,
+		0x530002 ,
+		0x530003 ,
+		0x530106 ,
+		0x530004 ,
+		0x530005 ,
+		0x530006 ,
+		0x530007 ,
+		0x530107 
+	] ,
+	'ROS-TIL-LBA-00': 
+	[
+		0x510008 ,
+		0x510009 ,
+		0x51000a ,
+		0x51000b ,
+		0x510104 ,
+		0x51000c ,
+		0x51000d ,
+		0x51000e ,
+		0x51000f ,
+		0x510105 ,
+		0x510010 ,
+		0x510011 ,
+		0x510012 ,
+		0x510013 ,
+		0x510014 ,
+		0x510015 ,
+		0x510016 ,
+		0x510017 
+	] ,
+	'ROS-TIL-LBA-01': 
+	[
+		0x510018 ,
+		0x510019 ,
+		0x51001a ,
+		0x51001b ,
+		0x510106 ,
+		0x51001c ,
+		0x51001d ,
+		0x51001e ,
+		0x51001f ,
+		0x510000 ,
+		0x510001 ,
+		0x510002 ,
+		0x510003 ,
+		0x510004 ,
+		0x510005 ,
+		0x510006 ,
+		0x510007 
+	] ,
+	'ROS-TIL-LBC-00': 
+	[
+		0x520008 ,
+		0x520009 ,
+		0x52000a ,
+		0x52000b ,
+		0x520104 ,
+		0x52000c ,
+		0x52000d ,
+		0x52000e ,
+		0x52000f ,
+		0x520105 ,
+		0x520010 ,
+		0x520011 ,
+		0x520012 ,
+		0x520013 ,
+		0x520014 ,
+		0x520015 ,
+		0x520016 ,
+		0x520017 
+	] ,
+	'ROS-TIL-LBC-01': 
+	[
+		0x520018 ,
+		0x520019 ,
+		0x52001a ,
+		0x52001b ,
+		0x520106 ,
+		0x52001c ,
+		0x52001d ,
+		0x52001e ,
+		0x52001f ,
+		0x520020 ,
+		0x520000 ,
+		0x520001 ,
+		0x520002 ,
+		0x520003 ,
+		0x520004 ,
+		0x520005 ,
+		0x520006 ,
+		0x520007 
+	] ,
+	'ROS-TIL-EBC-00': 
+	[
+		0x540008 ,
+		0x540009 ,
+		0x54000a ,
+		0x54000b ,
+		0x540100 ,
+		0x54000c ,
+		0x54000d ,
+		0x54000e ,
+		0x54000f ,
+		0x540101 ,
+		0x540010 ,
+		0x540011 ,
+		0x540012 ,
+		0x540013 ,
+		0x540102 ,
+		0x540014 ,
+		0x540015 ,
+		0x540016 ,
+		0x540017 ,
+		0x540103 
+	] ,
+	'ROS-TIL-EBC-01': 
+	[
+		0x540018 ,
+		0x540019 ,
+		0x54001a ,
+		0x54001b ,
+		0x540104 ,
+		0x54001c ,
+		0x54001d ,
+		0x54001e ,
+		0x54001f ,
+		0x540105 ,
+		0x540000 ,
+		0x540001 ,
+		0x540002 ,
+		0x540003 ,
+		0x540106 ,
+		0x540004 ,
+		0x540005 ,
+		0x540006 ,
+		0x540007 ,
+		0x540107 
+	] ,
+	'ROS-CSC-ECA-00': 
+	[
+		0x690080 ,
+		0x690081 ,
+		0x690082 ,
+		0x690083 ,
+		0x690084 ,
+		0x690085 ,
+		0x690086 ,
+		0x690087 ,
+		0x690088 ,
+		0x690089 ,
+		0x69008a ,
+		0x69008b ,
+		0x69008c ,
+		0x69008d ,
+		0x69008e ,
+		0x69008f 
+	] ,
+	'ROS-CSC-ECC-00': 
+	[
+		0x6a0080 ,
+		0x6a0081 ,
+		0x6a0082 ,
+		0x6a0083 ,
+		0x6a0084 ,
+		0x6a0085 ,
+		0x6a0086 ,
+		0x6a0087 ,
+		0x6a0088 ,
+		0x6a0089 ,
+		0x6a008a ,
+		0x6a008b ,
+		0x6a008c ,
+		0x6a008d ,
+		0x6a008e ,
+		0x6a008f 
+	] ,
+	'ROS-MDT-BA-00': 
+	[
+		0x610000 ,
+		0x610001 ,
+		0x610008 ,
+		0x610009 ,
+		0x610010 ,
+		0x610011 ,
+		0x610018 ,
+		0x610019 ,
+		0x610020 ,
+		0x610021 ,
+		0x610028 ,
+		0x610029 ,
+		0x610002 ,
+		0x610003 ,
+		0x61000a ,
+		0x61000b ,
+		0x610012 ,
+		0x610013 ,
+		0x61001a ,
+		0x61001b ,
+		0x610022 ,
+		0x610023 ,
+		0x61002a ,
+		0x61002b 
+	] ,
+	'ROS-MDT-BA-01': 
+	[
+		0x610004 ,
+		0x610005 ,
+		0x61000c ,
+		0x61000d ,
+		0x610014 ,
+		0x610015 ,
+		0x61001c ,
+		0x61001d ,
+		0x610024 ,
+		0x610025 ,
+		0x61002c ,
+		0x61002d ,
+		0x610006 ,
+		0x610007 ,
+		0x61000e ,
+		0x61000f ,
+		0x610016 ,
+		0x610017 ,
+		0x61001e ,
+		0x61001f ,
+		0x610026 ,
+		0x610027 ,
+		0x61002e ,
+		0x61002f 
+	] ,
+	'ROS-MDT-BA-02': 
+	[
+		0x610030 ,
+		0x610031 ,
+		0x610080 ,
+		0x610032 ,
+		0x610033 
+	] ,
+	'ROS-MDT-BC-00': 
+	[
+		0x620000 ,
+		0x620001 ,
+		0x620008 ,
+		0x620009 ,
+		0x620010 ,
+		0x620011 ,
+		0x620018 ,
+		0x620019 ,
+		0x620020 ,
+		0x620021 ,
+		0x620028 ,
+		0x620029 ,
+		0x620002 ,
+		0x620003 ,
+		0x62000a ,
+		0x62000b ,
+		0x620012 ,
+		0x620013 ,
+		0x62001a ,
+		0x62001b ,
+		0x620022 ,
+		0x620023 ,
+		0x62002a ,
+		0x62002b 
+	] ,
+	'ROS-MDT-BC-01': 
+	[
+		0x620004 ,
+		0x620005 ,
+		0x62000c ,
+		0x62000d ,
+		0x620014 ,
+		0x620015 ,
+		0x62001c ,
+		0x62001d ,
+		0x620024 ,
+		0x620025 ,
+		0x62002c ,
+		0x62002d ,
+		0x620006 ,
+		0x620007 ,
+		0x62000e ,
+		0x62000f ,
+		0x620016 ,
+		0x620017 ,
+		0x62001e ,
+		0x62001f ,
+		0x620026 ,
+		0x620027 ,
+		0x62002e ,
+		0x62002f 
+	] ,
+	'ROS-MDT-BC-02': 
+	[
+		0x620030 ,
+		0x620031 ,
+		0x620080 ,
+		0x620032 ,
+		0x620033 
+	] ,
+	'ROS-MDT-ECA-00': 
+	[
+		0x630011 ,
+		0x630018 ,
+		0x630019 ,
+		0x630020 ,
+		0x630021 ,
+		0x630028 ,
+		0x630029 ,
+		0x630051 ,
+		0x630058 ,
+		0x630059 ,
+		0x630060 ,
+		0x630061 ,
+		0x630013 ,
+		0x63001a ,
+		0x63001b ,
+		0x630022 ,
+		0x630023 ,
+		0x63002a ,
+		0x63002b ,
+		0x630053 ,
+		0x63005a ,
+		0x63005b ,
+		0x630062 ,
+		0x630063 
+	] ,
+	'ROS-MDT-ECA-01': 
+	[
+		0x630015 ,
+		0x63001c ,
+		0x63001d ,
+		0x630024 ,
+		0x630025 ,
+		0x63002c ,
+		0x63002d ,
+		0x630055 ,
+		0x63005c ,
+		0x63005d ,
+		0x630064 ,
+		0x630065 ,
+		0x630017 ,
+		0x63001e ,
+		0x63001f ,
+		0x630026 ,
+		0x630027 ,
+		0x63002e ,
+		0x63002f ,
+		0x630057 ,
+		0x63005e ,
+		0x63005f ,
+		0x630066 ,
+		0x630067 
+	] ,
+	'ROS-MDT-ECA-02': 
+	[
+		0x630030 ,
+		0x630000 ,
+		0x630001 ,
+		0x630008 ,
+		0x630009 ,
+		0x630010 ,
+		0x630031 ,
+		0x630002 ,
+		0x630003 ,
+		0x630070 ,
+		0x630050 ,
+		0x630071 ,
+		0x63000a ,
+		0x63000b ,
+		0x630012 ,
+		0x630032 ,
+		0x630004 ,
+		0x630005 ,
+		0x63000c ,
+		0x63000d ,
+		0x630014 ,
+		0x630052 ,
+		0x630072 ,
+		0x630054 
+	] ,
+	'ROS-MDT-ECA-03': 
+	[
+		0x630033 ,
+		0x630006 ,
+		0x630007 ,
+		0x63000e ,
+		0x63000f ,
+		0x630016 ,
+		0x630073 ,
+		0x630056 
+	] ,
+	'ROS-MDT-ECC-00': 
+	[
+		0x640017 ,
+		0x64001e ,
+		0x64001f ,
+		0x640026 ,
+		0x640027 ,
+		0x64002e ,
+		0x64002f ,
+		0x640057 ,
+		0x64005e ,
+		0x64005f ,
+		0x640066 ,
+		0x640067 ,
+		0x640011 ,
+		0x640018 ,
+		0x640019 ,
+		0x640020 ,
+		0x640021 ,
+		0x640028 ,
+		0x640029 ,
+		0x640051 ,
+		0x640058 ,
+		0x640059 ,
+		0x640060 ,
+		0x640061 
+	] ,
+	'ROS-MDT-ECC-01': 
+	[
+		0x640013 ,
+		0x64001a ,
+		0x64001b ,
+		0x640022 ,
+		0x640023 ,
+		0x64002a ,
+		0x64002b ,
+		0x640053 ,
+		0x64005a ,
+		0x64005b ,
+		0x640062 ,
+		0x640063 ,
+		0x640015 ,
+		0x64001c ,
+		0x64001d ,
+		0x640024 ,
+		0x640025 ,
+		0x64002c ,
+		0x64002d ,
+		0x640055 ,
+		0x64005c ,
+		0x64005d ,
+		0x640064 ,
+		0x640065 
+	] ,
+	'ROS-MDT-ECC-02': 
+	[
+		0x640033 ,
+		0x640006 ,
+		0x640007 ,
+		0x64000e ,
+		0x64000f ,
+		0x640016 ,
+		0x640030 ,
+		0x640000 ,
+		0x640001 ,
+		0x640073 ,
+		0x640056 ,
+		0x640070 ,
+		0x640008 ,
+		0x640009 ,
+		0x640010 ,
+		0x640031 ,
+		0x640002 ,
+		0x640003 ,
+		0x64000a ,
+		0x64000b ,
+		0x640012 ,
+		0x640050 ,
+		0x640071 ,
+		0x640052 
+	] ,
+	'ROS-MDT-ECC-03': 
+	[
+		0x640032 ,
+		0x640004 ,
+		0x640005 ,
+		0x64000c ,
+		0x64000d ,
+		0x640014 ,
+		0x640072 ,
+		0x640054 
+	] ,
+	'ROS-RPC-BC-00': 
+	[
+		0x660000 ,
+		0x660001 ,
+		0x660002 ,
+		0x660003 ,
+		0x660004 ,
+		0x660005 ,
+		0x660006 ,
+		0x660007 ,
+		0x660008 ,
+		0x660009 ,
+		0x66000a ,
+		0x66000b ,
+		0x66000c ,
+		0x66000d ,
+		0x66000e ,
+		0x66000f 
+	] ,
+	'ROS-RPC-BA-00': 
+	[
+		0x650000 ,
+		0x650001 ,
+		0x650002 ,
+		0x650003 ,
+		0x650004 ,
+		0x650005 ,
+		0x650006 ,
+		0x650007 ,
+		0x650008 ,
+		0x650009 ,
+		0x65000a ,
+		0x65000b ,
+		0x65000c ,
+		0x65000d ,
+		0x65000e ,
+		0x65000f 
+	] ,
+	'ROS-TGC-EC-00': 
+	[
+		0x670001 ,
+		0x670002 ,
+		0x670003 ,
+		0x670004 ,
+		0x670005 ,
+		0x670006 ,
+		0x670007 ,
+		0x670008 ,
+		0x670009 ,
+		0x67000a ,
+		0x67000b ,
+		0x67000c ,
+		0x680001 ,
+		0x680002 ,
+		0x680003 ,
+		0x680004 ,
+		0x680005 ,
+		0x680006 ,
+		0x680007 ,
+		0x680008 ,
+		0x680009 ,
+		0x68000a ,
+		0x68000b ,
+		0x68000c 
+	] ,
+	'ROS-TGC-SL-00': 
+	[
+		0x670011 ,
+		0x670012 ,
+		0x670013 ,
+		0x680011 ,
+		0x680012 ,
+		0x680013 
+	] ,
+	'ROS-FWD-AFP-00': 
+	[
+		0x850001 ,
+		0x850002 
+	] ,
+	'ROS-FWD-ALFA-00': 
+	[
+		0x840000 ,
+		0x840001 
+	] 
+}
+
+class ROSToROBMap:
+	def __init__(self):
+		self.data = ros2rob
+
+	def get_mapping(self):
+		return self.data
\ No newline at end of file
diff --git a/Trigger/TrigCost/TrigCostAnalysis/python/__init__.py b/Trigger/TrigCost/TrigCostAnalysis/python/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..dbe776ceaeddbc8f13a89bda699e8890181749b2
--- /dev/null
+++ b/Trigger/TrigCost/TrigCostAnalysis/python/__init__.py
@@ -0,0 +1 @@
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
\ No newline at end of file
diff --git a/Trigger/TrigCost/TrigCostAnalysis/share/TrigCostAnalysis_JobOptions.py b/Trigger/TrigCost/TrigCostAnalysis/share/TrigCostAnalysis_JobOptions.py
index 8451c26f70b0202152cc78b178b53e15f0f00cc9..c7dbc9612b6d87c160e0e217c93d4beeb05d6d80 100644
--- a/Trigger/TrigCost/TrigCostAnalysis/share/TrigCostAnalysis_JobOptions.py
+++ b/Trigger/TrigCost/TrigCostAnalysis/share/TrigCostAnalysis_JobOptions.py
@@ -58,6 +58,9 @@ enhancedBiasWeighter.RunNumber = 379158
 enhancedBiasWeighter.UseBunchCrossingTool = False
 
 from AthenaCommon import CfgMgr
+from TrigCostAnalysis.ROSToROB import ROSToROBMap
+rosToRobMap = ROSToROBMap()
+
 trigCostAnalysis = CfgMgr.TrigCostAnalysis()
 trigCostAnalysis.OutputLevel = DEBUG
 trigCostAnalysis.RootStreamName = "COSTSTREAM"
@@ -66,6 +69,7 @@ trigCostAnalysis.EnhancedBiasTool = enhancedBiasWeighter
 trigCostAnalysis.UseEBWeights = False
 trigCostAnalysis.MaxFullEventDumps = 100
 trigCostAnalysis.FullEventDumpProbability = 1 # X. Where probability is 1 in X
+trigCostAnalysis.ROSToROBMap = rosToRobMap.get_mapping()
 topSequence += trigCostAnalysis
 
 from AthenaCommon.AppMgr import ServiceMgr as svcMgr
diff --git a/Trigger/TrigCost/TrigCostAnalysis/src/CostData.cxx b/Trigger/TrigCost/TrigCostAnalysis/src/CostData.cxx
index 1dcc6a8535d6a69f9f5de380713b00e12ac3b249..cc43b20711d7e442becb0d9d392cb595c446f403 100644
--- a/Trigger/TrigCost/TrigCostAnalysis/src/CostData.cxx
+++ b/Trigger/TrigCost/TrigCostAnalysis/src/CostData.cxx
@@ -46,6 +46,10 @@ StatusCode CostData::cache() {
   return StatusCode::SUCCESS;
 }
 
+void CostData::setRosToRobMap(const std::map<std::string, std::vector<uint32_t>>& rosToRobMap) {
+  m_rosToRob = &rosToRobMap;
+}
+
 void CostData::setLb(uint32_t lb) {
   m_lb = lb;
 }
@@ -80,6 +84,13 @@ const xAOD::TrigCompositeContainer& CostData::rosCollection() const {
   return *m_rosCollection;
 }
 
+const std::map<std::string, std::vector<uint32_t>>& CostData::rosToRobMap() const {
+  return *m_rosToRob;
+}
+
+const std::map<size_t, std::vector<size_t>>& CostData::algToRequestMap() const {
+  return m_algToRos;
+}
 
 float CostData::algTotalTimeMilliSec() const {
   return m_algTotalTime * 1e-3; // microseconds to milliseconds
diff --git a/Trigger/TrigCost/TrigCostAnalysis/src/CostData.h b/Trigger/TrigCost/TrigCostAnalysis/src/CostData.h
index a164eb251462253b32521efbd883d925bfe67c5c..dd54a8fbc8f44b4fc1e0f7f6985ff59f47ed2b7e 100644
--- a/Trigger/TrigCost/TrigCostAnalysis/src/CostData.h
+++ b/Trigger/TrigCost/TrigCostAnalysis/src/CostData.h
@@ -42,7 +42,7 @@ class CostData {
     CostData(const CostData&) = delete;
 
     /**
-     * @brief Cache the cost collection, after formally requesting it from storegate.
+     * @brief Cache the cost and ros collections, after formally requesting it from storegate.
      */
     StatusCode set(const xAOD::TrigCompositeContainer* costCollection, const xAOD::TrigCompositeContainer* rosCollection, uint32_t onlineSlot);
 
@@ -56,6 +56,21 @@ class CostData {
      */
     const xAOD::TrigCompositeContainer& rosCollection() const;
 
+    /**
+     * @brief Getter of the ROS to ROB map.
+     */
+    const std::map<std::string, std::vector<uint32_t>>& rosToRobMap() const;
+
+    /**
+     * @brief Set ROS to ROB map
+     */
+    void setRosToRobMap(const std::map<std::string, std::vector<uint32_t>>& rosToRobMap);
+
+    /**
+     * @brief Getter of map between algorithm (index in costCollection) and ROS requests (indicies in rosCollection)
+     */
+    const std::map<size_t, std::vector<size_t>>& algToRequestMap() const;
+
     /**
      * @brief Setter of effective P1 walltime represented by the current event.
      */
@@ -134,6 +149,8 @@ class CostData {
     bool m_liveTimeIsPerEvent; //!< If the livetime represents a single event or all of the current LB
     const std::unordered_map<uint32_t, std::string>* m_typeMapPtr; //!< Cached non-owning pointer mapping algorithm instance names to types
     std::map<size_t, std::vector<size_t>> m_algToRos; //!< Mapping of indexes from m_costCollection to corresponding ROS requests made by algorithm
+    const std::map<std::string, std::vector<uint32_t>>* m_rosToRob; //!< Mapping of ROS corresponding to ROB requests
+
 };
 
 #endif // TRIGCOSTANALYSIS_COSTDATA_H
diff --git a/Trigger/TrigCost/TrigCostAnalysis/src/TrigCostAnalysis.cxx b/Trigger/TrigCost/TrigCostAnalysis/src/TrigCostAnalysis.cxx
index 0095fa18343d4a8e24c9ec6371eea5523272fec0..c3863ab59eb0799d95e61785ce4b11efe2a15768 100644
--- a/Trigger/TrigCost/TrigCostAnalysis/src/TrigCostAnalysis.cxx
+++ b/Trigger/TrigCost/TrigCostAnalysis/src/TrigCostAnalysis.cxx
@@ -152,6 +152,7 @@ StatusCode TrigCostAnalysis::execute() {
   const uint32_t onlineSlot = getOnlineSlot( costDataHandle.get() );
   CostData costData;
   ATH_CHECK( costData.set(costDataHandle.get(), rosDataHandle.get(), onlineSlot) );
+  costData.setRosToRobMap(m_rosToRob);
   costData.setLb( context.eventID().lumi_block() );
   costData.setTypeMap( m_algTypeMap );
   if (!m_enhancedBiasTool.name().empty()) {
diff --git a/Trigger/TrigCost/TrigCostAnalysis/src/TrigCostAnalysis.h b/Trigger/TrigCost/TrigCostAnalysis/src/TrigCostAnalysis.h
index af2a9016f23f23b5330eb080a58d10b80e0b6e36..99921323a6f390ba9479b028b59ac29cd0bc1db4 100644
--- a/Trigger/TrigCost/TrigCostAnalysis/src/TrigCostAnalysis.h
+++ b/Trigger/TrigCost/TrigCostAnalysis/src/TrigCostAnalysis.h
@@ -12,6 +12,8 @@
 
 #include "EnhancedBiasWeighter/EnhancedBiasWeighter.h"
 
+#include "Gaudi/Parsers/Factory.h"
+
 #include "MonitoredRange.h"
 
 #include <unordered_map>
@@ -111,6 +113,9 @@ class TrigCostAnalysis: public ::AthHistogramAlgorithm {
     Gaudi::Property<float> m_baseEventWeight { this, "BaseEventWeight", true,
       "Base events weight, other weights may be multiplied on top of this one." };
 
+    Gaudi::Property<std::map<std::string, std::vector<uint32_t>>> m_rosToRob {
+      this, "ROSToROBMap", {}, "ROS to ROB mapping" };
+
     SG::ReadHandleKey<xAOD::TrigCompositeContainer> m_costDataKey { this, "CostReadHandleKey", "HLT_TrigCostContainer",
       "Trigger cost payload container for algorithms" };
 
diff --git a/Trigger/TrigCost/TrigCostAnalysis/src/counters/CounterAlgorithm.cxx b/Trigger/TrigCost/TrigCostAnalysis/src/counters/CounterAlgorithm.cxx
index 9d942f27d495cee52f5ef6f618d0c6a4b057ff8b..23daafff343c86f2c6f1077fb814e7ef517fac70 100644
--- a/Trigger/TrigCost/TrigCostAnalysis/src/counters/CounterAlgorithm.cxx
+++ b/Trigger/TrigCost/TrigCostAnalysis/src/counters/CounterAlgorithm.cxx
@@ -3,6 +3,7 @@
 */
 
 #include "xAODTrigger/TrigCompositeContainer.h"
+#include "TrigDataAccessMonitoring/ROBDataMonitor.h"
 
 #include "CounterAlgorithm.h"
 
@@ -16,6 +17,11 @@ CounterAlgorithm::CounterAlgorithm(const std::string& name, const MonitorBase* p
   regHistogram("AlgCalls_perEvent", "Calls/Event;Calls;Events", VariableType::kPerEvent, kLinear, -0.5, 49.5);
   regHistogram("InEventView_perCall", "In Event View;Yes or No;Calls", VariableType::kPerCall, kLinear, -0.5, 1.5, 2);
   regHistogram("RoIID_perCall", "RoI ID;RoI ID;Calls", VariableType::kPerCall, kLinear, -1.5, 20.5, 22);
+  regHistogram("Request_perEvent", "Number of requests/Event;Number of requests;Events", VariableType::kPerEvent, LogType::kLinear, -0.5, 10.5, 11);
+  regHistogram("NetworkRequest_perEvent", "Number of network requests/Event;Number of requests;Events", VariableType::kPerEvent, LogType::kLinear, -0.5, 10.5, 11);
+  regHistogram("CachedROBSize_perEvent", "Total ROB Size/Event;ROB size;Events", VariableType::kPerEvent, LogType::kLinear, 0, 1024, 50);
+  regHistogram("NetworkROBSize_perEvent", "Total ROB Size/Event;ROB size;Events", VariableType::kPerEvent, LogType::kLinear, 0, 1024, 50);
+  regHistogram("RequestTime_perEvent", "ROB Elapsed Time/Event;Elapsed Time [ms];Events", VariableType::kPerEvent);
 }
 
 
@@ -45,5 +51,36 @@ StatusCode CounterAlgorithm::newEvent(const CostData& data, size_t index, const
 
   ATH_CHECK( fill("RoIID_perCall", alg->getDetail<int32_t>("roi"), weight) );
 
+  // Monitor data requests per algorithm
+  if (data.algToRequestMap().count(index)) {
+    for (size_t requestIdx : data.algToRequestMap().at(index)) {
+      const xAOD::TrigComposite* request = data.rosCollection().at(requestIdx);
+      const std::vector<unsigned> robs_history = request->getDetail<std::vector<unsigned>>("robs_history");
+      const std::vector<uint32_t> robs_size = request->getDetail<std::vector<uint32_t>>("robs_size");
+
+      bool networkRequestIncremented = false;
+      for (size_t i = 0; i < robs_size.size(); ++i) {
+        // ROB request was fetched over the network
+        if (robs_history[i] == robmonitor::RETRIEVED) {
+          ATH_CHECK( fill("NetworkROBSize_perEvent", robs_size[i], weight) );
+          networkRequestIncremented = true;
+        }
+        // ROB request was cached
+        else if (robs_history[i] == robmonitor::HLT_CACHED || robs_history[i] == robmonitor::DCM_CACHED) {
+          ATH_CHECK( fill("CachedROBSize_perEvent", robs_size[i], weight) );
+        }
+      }
+
+      ATH_CHECK( increment("Request_perEvent", weight) );
+
+      if (networkRequestIncremented) {
+        ATH_CHECK( increment("NetworkRequest_perEvent", weight) );
+      }
+
+      const float rosTime = timeToMilliSec(request->getDetail<uint64_t>("start"), request->getDetail<uint64_t>("stop"));
+      ATH_CHECK( fill("Time_perEvent", rosTime, weight) );
+    }
+  }
+
   return StatusCode::SUCCESS;
 }
diff --git a/Trigger/TrigCost/TrigCostAnalysis/src/counters/CounterROS.cxx b/Trigger/TrigCost/TrigCostAnalysis/src/counters/CounterROS.cxx
index 46fd5a1ec308739215890ff89d0b906e070d0974..0f898774e2cdaca820fa98e79a1fa25435fe2418 100644
--- a/Trigger/TrigCost/TrigCostAnalysis/src/counters/CounterROS.cxx
+++ b/Trigger/TrigCost/TrigCostAnalysis/src/counters/CounterROS.cxx
@@ -3,36 +3,91 @@
 */
 
 #include "xAODTrigger/TrigCompositeContainer.h"
+#include "TrigDataAccessMonitoring/ROBDataMonitor.h"
 
 #include "CounterROS.h"
 
+#include <algorithm>
+#include <string>
+#include <vector>
+
 CounterROS::CounterROS(const std::string& name, const MonitorBase* parent) 
-  : CounterBase(name, parent)
-{}
+  : CounterBase(name, parent) {
+
+  regHistogram("Request_perEvent", "Number of requests/Event;Number of requests;Events", VariableType::kPerEvent, LogType::kLinear, -0.5, 10.5, 11);
+  regHistogram("NetworkRequest_perEvent", "Number of network requests/Event;Number of requests;Events", VariableType::kPerEvent, LogType::kLinear, -0.5, 10.5, 11);
+  regHistogram("CachedROBSize_perEvent", "Total ROB Size/Event;ROB size;Events", VariableType::kPerEvent, LogType::kLinear, 0, 1024, 50);
+  regHistogram("NetworkROBSize_perEvent", "Total ROB Size/Event;ROB size;Events", VariableType::kPerEvent, LogType::kLinear, 0, 1024, 50);
+  regHistogram("Time_perEvent", "ROB Elapsed Time/Event;Elapsed Time [ms];Events", VariableType::kPerEvent);
+  regHistogram("ROBStatus_perCall", "ROB status/Call;Status;Events", VariableType::kPerCall, LogType::kLinear, 0, robmonitor::NUM_ROBHIST_CODES+1, robmonitor::NUM_ROBHIST_CODES+1);
+}
+
+StatusCode CounterROS::newEvent(const CostData& data, size_t index, const float weight) {
+
+  // Monitor only ROB data for corresponding ROS
+  const xAOD::TrigComposite* tc = data.rosCollection()[index];
+  const std::vector<uint32_t> robIdsPerRequest = tc->getDetail<std::vector<uint32_t>>("robs_id");
+  const std::vector<uint32_t> robs_size = tc->getDetail<std::vector<uint32_t>>("robs_size");
+  const std::vector<unsigned> robs_history = tc->getDetail<std::vector<unsigned>>("robs_history");
+  const std::vector<uint8_t> robs_status = tc->getDetail<std::vector<uint8_t>>("robs_status");
+
+  if (m_robIdsPerROS.size() == 0) {
+    m_robIdsPerROS = data.rosToRobMap().at(getName());
+  }
+
+  // Find all ROB requests that are both in request and correspond to this ROS
+  bool networkRequestIncremented = false;
+  for (size_t i = 0; i < robIdsPerRequest.size(); ++i) {
+    if (std::find(m_robIdsPerROS.begin(), m_robIdsPerROS.end(), robIdsPerRequest[i]) != m_robIdsPerROS.end()) {
+
+      ATH_CHECK( fill("ROBStatus_perCall", getROBHistoryBin(robs_history[i]), weight) );
+      if (robs_status[i]) {
+        // The last bin of ROBStatus_perCall histogram store isStatusOk bool value
+        ATH_CHECK( fill("ROBStatus_perCall", robmonitor::NUM_ROBHIST_CODES, weight) );
+      }
+
+      // ROB request was fetched over the network
+      if (robs_history[i] == robmonitor::RETRIEVED) {
+        ATH_CHECK( fill("NetworkROBSize_perEvent", robs_size[i], weight) );
+        networkRequestIncremented = true;
+      }
+      else {
+        ATH_CHECK( fill("CachedROBSize_perEvent", robs_size[i], weight) );
+      }
+    }
+  }
+
+  ATH_CHECK( increment("Request_perEvent", weight) );
+
+  if (networkRequestIncremented) {
+    ATH_CHECK( increment("NetworkRequest_perEvent", weight) );
+  }
+
+  const float rosTime = timeToMilliSec(tc->getDetail<uint64_t>("start"), tc->getDetail<uint64_t>("stop"));
+  ATH_CHECK( fill("Time_perEvent", rosTime, weight) );
 
-StatusCode CounterROS::newEvent(const CostData& /*data*/, size_t /*incrementWalltime*/, const float /*weight*/) {
   return StatusCode::SUCCESS;
 }
 
 int CounterROS::getROBHistoryBin(const unsigned history){
   int history_bin;
   switch (history) {
-    case 1: // SCHEDULED
+    case robmonitor::SCHEDULED:
       history_bin = 1;
       break;
-    case 2: // RETRIEVED
-      history_bin = 1;
+    case robmonitor::RETRIEVED:
+      history_bin = 2;
       break;
-    case 4: // HLT_CACHED
+    case robmonitor::HLT_CACHED:
       history_bin = 3;
       break;
-    case 8: // DCM_CACHED
+    case robmonitor::DCM_CACHED:
       history_bin = 4;
       break;
-    case 16: // IGNORED
+    case robmonitor::IGNORED:
       history_bin = 5;
       break;
-    case 32: // DISABLED
+    case robmonitor::DISABLED:
       history_bin = 6;
       break;
     default: // UNCLASSIFIED 
diff --git a/Trigger/TrigCost/TrigCostAnalysis/src/counters/CounterROS.h b/Trigger/TrigCost/TrigCostAnalysis/src/counters/CounterROS.h
index 96d6c6b5c042f84e8f49a01e381fa9a575cf670d..88115a3f27a54eda87d1d6e2685971b421a4d7d0 100644
--- a/Trigger/TrigCost/TrigCostAnalysis/src/counters/CounterROS.h
+++ b/Trigger/TrigCost/TrigCostAnalysis/src/counters/CounterROS.h
@@ -44,10 +44,10 @@ class CounterROS : public CounterBase {
     /**
      * @brief Concrete implementation. Monitors global properties in a single LB, or over all LB in a Range
      * @param[in] data Access to event data
-     * @param[in] incrementWalltime If 1, we should add the current events wall time to our internal histogram
+     * @param[in] index Index of data request to monitor
      * @param[in] weight Global event weight
      */
-    virtual StatusCode newEvent(const CostData& data, size_t incrementWalltime, const float weight = 1.) override;
+    virtual StatusCode newEvent(const CostData& data, size_t index, const float weight = 1.) override;
 
   private:
       /**
@@ -55,7 +55,8 @@ class CounterROS : public CounterBase {
      * @param[in] history ROBHistory value
      */
     int getROBHistoryBin(const unsigned history);
-    
+
+    std::vector<uint32_t> m_robIdsPerROS; //!< Cached mapping of ROB ids corresponding to ROS
 };
 
 #endif // TRIGCOSTANALYSIS_COUNTERROS_H
\ No newline at end of file
diff --git a/Trigger/TrigCost/TrigCostAnalysis/src/monitors/MonitorROS.cxx b/Trigger/TrigCost/TrigCostAnalysis/src/monitors/MonitorROS.cxx
index 9352335b70e2fc6f1d10c8eab983c8e8d7a8cff8..6502f0ea2a17cad3b9d3a11c5595c03299bed2d8 100644
--- a/Trigger/TrigCost/TrigCostAnalysis/src/monitors/MonitorROS.cxx
+++ b/Trigger/TrigCost/TrigCostAnalysis/src/monitors/MonitorROS.cxx
@@ -5,12 +5,38 @@
 #include "MonitorROS.h"
 #include "../counters/CounterROS.h"
 
+#include <algorithm>
+
 MonitorROS::MonitorROS(const std::string& name, const MonitoredRange* parent)
   : MonitorBase(name, parent) {
 }
 
 
-StatusCode MonitorROS::newEvent(const CostData& /*data*/, const float /*weight*/) {
+StatusCode MonitorROS::newEvent(const CostData& data, const float weight) {
+
+  // Prepare ROB id per corresponding ROS name map
+  if (m_robToRos.empty()) {
+    const std::map<std::string, std::vector<uint32_t>> rosToRobMap = data.rosToRobMap();
+    for (auto rosRequest : rosToRobMap) {
+      for (uint32_t robId : rosRequest.second) {
+        m_robToRos[robId] = rosRequest.first;
+      }
+    }
+  }
+
+  for (const xAOD::TrigComposite* tc : data.rosCollection()) {
+    auto robIds = tc->getDetail<std::vector<uint32_t>>("robs_id");
+
+    // Create set of unique ROS for this request
+    std::set<std::string> rosPerRequest;
+    for (uint32_t robId : robIds) {
+      rosPerRequest.insert(m_robToRos[robId]);
+    }
+
+    for (const std::string& rosName : rosPerRequest) {
+      ATH_CHECK( getCounter(rosName)->newEvent(data, tc->index(), weight) );
+    }
+  }
 
   return StatusCode::SUCCESS;
 }
diff --git a/Trigger/TrigCost/TrigCostAnalysis/src/monitors/MonitorROS.h b/Trigger/TrigCost/TrigCostAnalysis/src/monitors/MonitorROS.h
index 610a0606d84fadc796fee3692c815b245cd65da9..47d59d3c50bc7cb5aef28525260ef21c62faa25c 100644
--- a/Trigger/TrigCost/TrigCostAnalysis/src/monitors/MonitorROS.h
+++ b/Trigger/TrigCost/TrigCostAnalysis/src/monitors/MonitorROS.h
@@ -53,6 +53,9 @@ class MonitorROS : public MonitorBase {
      * @return Owning unique ptr object typed on the CounterBase base class which points to concrete Counter of specialised type.
      */
     virtual std::unique_ptr<CounterBase> newCounter(const std::string& name) override; 
+
+  private:
+    std::map<uint32_t, std::string> m_robToRos; //!< Cache correspondis ROS per ROB id
 };
 
 #endif // TRIGCOSTANALYSIS_MONITORROS_H
\ No newline at end of file
diff --git a/Trigger/TrigHypothesis/TrigEgammaHypo/python/TrigEgammaPrecisionEtcutHypoTool.py b/Trigger/TrigHypothesis/TrigEgammaHypo/python/TrigEgammaPrecisionEtcutHypoTool.py
index 93df21968a2e17c9de87e810adf32b4c7c6b16d3..dc8ce649c0b2f1bd9d8a225e83e4a2113274793b 100644
--- a/Trigger/TrigHypothesis/TrigEgammaHypo/python/TrigEgammaPrecisionEtcutHypoTool.py
+++ b/Trigger/TrigHypothesis/TrigEgammaHypo/python/TrigEgammaPrecisionEtcutHypoTool.py
@@ -1,6 +1,6 @@
 # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
-def _IncTool(name, threshold, sel):
+def _IncTool(name):
 
     from TrigEgammaHypo.TrigEgammaHypoConf import TrigEgammaPrecisionEtcutHypoTool    
 
@@ -12,16 +12,9 @@ def _IncTool(name, threshold, sel):
 
 def TrigEgammaPrecisionEtcutHypoToolFromDict( d ):
     """ Use menu decoded chain dictionary to configure the tool """
-    cparts = [i for i in d['chainParts'] if ((i['signature']=='Electron') or (i['signature']=='Photon'))]
-
-    def __th(cpart):
-        return cpart['threshold']
-    
-    def __sel(cpart):
-        return cpart['addInfo'][0] if cpart['addInfo'] else cpart['IDinfo']
     
     name = d['chainName']
         
-    return _IncTool( name, __th( cparts[0]),  __sel( cparts[0] ) )
-                   
+    #return _IncTool( name, __th( cparts[0]),  __sel( cparts[0] ) )
+    return _IncTool( name )               
     
diff --git a/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionEtcutHypoAlgMT.cxx b/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionEtcutHypoAlgMT.cxx
index 26f4ee9e12fd162111844f822088b43ab1733814..da60633b8c3ee6ecb49bf765a6d133a7f4e43432 100644
--- a/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionEtcutHypoAlgMT.cxx
+++ b/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionEtcutHypoAlgMT.cxx
@@ -18,9 +18,6 @@ TrigEgammaPrecisionEtcutHypoAlgMT::TrigEgammaPrecisionEtcutHypoAlgMT( const std:
 StatusCode TrigEgammaPrecisionEtcutHypoAlgMT::initialize() {
   ATH_CHECK( m_hypoTools.retrieve() );
   
-  ATH_CHECK( m_clustersKey.initialize() );
-  renounce( m_clustersKey );// clusters are made in views, so they are not in the EvtStore: hide them
-
   return StatusCode::SUCCESS;
 }
 
@@ -35,19 +32,19 @@ StatusCode TrigEgammaPrecisionEtcutHypoAlgMT::execute( const EventContext& conte
 
   // new output decisions
   SG::WriteHandle<DecisionContainer> outputHandle = createAndStore(decisionOutput(), context ); 
-  auto outputDecision = outputHandle.ptr();
+  DecisionContainer* outputDecision = outputHandle.ptr();
 
   // input for decision
   std::vector<ITrigEgammaPrecisionEtcutHypoTool::ClusterInfo> toolInput;
 
   // loop over previous decisions
   size_t counter=0;
-  for ( auto previousDecision: *previousDecisionsHandle ) {
+  for ( const Decision* previousDecision: *previousDecisionsHandle ) {
   
     const auto featureEL = findLink<xAOD::CaloClusterContainer>( previousDecision, featureString() );
     ATH_CHECK(featureEL.isValid());
     auto d = newDecisionIn( outputDecision, name() );
-    d->setObjectLink<>( featureString(),  featureEL.link );
+    d->setObjectLink<xAOD::CaloClusterContainer>( featureString(),  featureEL.link );
     
     TrigCompositeUtils::linkToPrevious( d, decisionInput().key(), counter );
     toolInput.emplace_back( d, previousDecision );   
diff --git a/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionEtcutHypoAlgMT.h b/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionEtcutHypoAlgMT.h
index dd156fca0673a4e0351d99ea423f041550617b9d..4face508d29b77860271c297fef08f086ad57c45 100644
--- a/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionEtcutHypoAlgMT.h
+++ b/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionEtcutHypoAlgMT.h
@@ -31,9 +31,7 @@ class TrigEgammaPrecisionEtcutHypoAlgMT : public ::HypoBase {
 
  private: 
   ToolHandleArray< ITrigEgammaPrecisionEtcutHypoTool > m_hypoTools { this, "HypoTools", {}, "Hypo tools" };
-     
-  SG::ReadHandleKey< xAOD::CaloClusterContainer > m_clustersKey { this, "CaloClusters", "CaloClusters", "CaloClusters in roi" };  
-  
+      
 
 }; 
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiBasicCleaner.h b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiBasicCleaner.h
index c66a4611e51556d0503e97e1d47b245ca30f3fbf..ac617d0e71ca50a2904d3fddf8add6229980d8ec 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiBasicCleaner.h
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiBasicCleaner.h
@@ -28,7 +28,7 @@ public:
                    float negativeEThreshold);
   ~AntiBasicCleaner(){}
   
-  bool operator()(const pHypoJet&) const override;
+  bool operator()(const HypoJet::IJet&) const override;
   std::string getName() const noexcept override;
 };
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiLlpCleaner.h b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiLlpCleaner.h
index ee8e2d62759a036bec7d708c5ac4879a64f3cf65..02bc2461749040651c9a2b52998aa866964db45c 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiLlpCleaner.h
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiLlpCleaner.h
@@ -31,7 +31,7 @@ public:
 
   ~AntiLlpCleaner(){}
 
-  bool operator()(const pHypoJet& jet) const override;
+  bool operator()(const HypoJet::IJet& jet) const override;
   std::string getName() const noexcept override;
 };
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiLooseCleaner.h b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiLooseCleaner.h
index 13785181d35b31e385d777499a868df3f1000bcc..16e87aaa03779804986bf44f8a255bfbe34dd353 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiLooseCleaner.h
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiLooseCleaner.h
@@ -31,7 +31,7 @@ public:
 
   ~AntiLooseCleaner(){}
 
-  bool operator()(const pHypoJet& jet) const override;
+  bool operator()(const HypoJet::IJet& jet) const override;
   std::string getName() const noexcept override;
 
 };
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiTightCleaner.h b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiTightCleaner.h
index 6493eb0283bf29d68216490d642bfae69f5984f3..cc0a90ec2dc297e983ae4c7f24449ff430037d3c 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiTightCleaner.h
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/AntiTightCleaner.h
@@ -31,7 +31,7 @@ public:
 
   ~AntiTightCleaner() {}
 
-  bool operator()(const pHypoJet& jet) const override;
+  bool operator()(const HypoJet::IJet& jet) const override;
   std::string getName() const noexcept override;
 };
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/BasicCleaner.h b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/BasicCleaner.h
index c1ed78d04b4cb8fe3898c9a0fa2998e466db5359..08c0c9ddb39bf6b9b2185e4348080257893af4e9 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/BasicCleaner.h
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/BasicCleaner.h
@@ -29,7 +29,7 @@ class BasicCleaner: public ICleaner{
   ~BasicCleaner() {}
 
 
-  bool operator()(const pHypoJet& jet) const override;
+  bool operator()(const HypoJet::IJet& jet) const override;
   std::string toString() const noexcept override;
   std::string getName() const noexcept override;
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/CleanerBridge.h b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/CleanerBridge.h
index 89bbaf2e8d4529f10cb51c91b8e0a62398c5745d..21d9eae791bfa23bbe44aa9fcaa58a5fe92e8740 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/CleanerBridge.h
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/CleanerBridge.h
@@ -29,7 +29,7 @@ class CleanerBridge{
   m_pCleaner(cleaner){}
 
   bool operator()(const pHypoJet& jet) const{
-    return m_pCleaner -> operator()(jet);
+    return m_pCleaner -> operator()(*jet);
   }
 
   std::string toString() const {
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/EtaEtCleaner.h b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/EtaEtCleaner.h
index 5b46fc67a6fbb0906fd916c7dc754704d9ae2e1f..32d89c34bdb48ab8d95c11aac544453d3d978413 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/EtaEtCleaner.h
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/EtaEtCleaner.h
@@ -29,7 +29,7 @@ public:
 
   ~EtaEtCleaner(){}
   
-  bool operator()(const pHypoJet&) const override;
+  bool operator()(const HypoJet::IJet&) const override;
   std::string toString() const noexcept override;
   std::string getName() const noexcept override;
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/HypoJetDefs.h b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/HypoJetDefs.h
index bd6906818eb5ac0583321653f978f33cc67b9ddd..249502d3e9d290889a4434be1114fbcc31eb704b 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/HypoJetDefs.h
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/HypoJetDefs.h
@@ -22,8 +22,8 @@
 #include <queue>
 #include <ostream>
 
-//using pHypoJet = std::shared_ptr<const HypoJet::IJet>;
-using pHypoJet = const HypoJet::IJet*;
+using pHypoJet = std::shared_ptr<const HypoJet::IJet>;
+//using pHypoJet = const HypoJet::IJet*;
 using HypoJetVector = std::vector<pHypoJet>;
 using HypoJetIter = HypoJetVector::iterator;
 using HypoJetCIter = HypoJetVector::const_iterator;
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/ICleaner.h b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/ICleaner.h
index 03859c040bb91c7cca8f6c106076f4b4792ac5cf..fa27d22a5017b69741df29cb62a0d2a5f1e7f8d4 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/ICleaner.h
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/ICleaner.h
@@ -23,7 +23,7 @@ class ICleaner{
  public:
   // Abstract interface used by a factory function
   virtual ~ICleaner(){}
-  virtual bool operator()(const pHypoJet&) const = 0;
+  virtual bool operator()(const HypoJet::IJet&) const = 0;
   virtual std::string toString() const noexcept = 0;
   virtual std::string getName() const noexcept= 0;
 };
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/LlpCleaner.h b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/LlpCleaner.h
index 68ea64eb2b441128ec86e12276454f31457831b7..a562cda15d204ca83a9762d215ecbc0a83386ef4 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/LlpCleaner.h
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/LlpCleaner.h
@@ -30,7 +30,7 @@ class LlpCleaner: public ICleaner{
   
   ~LlpCleaner() {}
 
-  bool operator()(const pHypoJet& jet) const override;
+  bool operator()(const HypoJet::IJet& jet) const override;
   std::string toString() const noexcept override;
   std::string getName() const noexcept override;
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/LooseCleaner.h b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/LooseCleaner.h
index 783102c5f59da5efa754e092dec0dc2236b383d8..33d3420406c049b6d133b8b2717023eb4fd4983a 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/LooseCleaner.h
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/LooseCleaner.h
@@ -30,7 +30,7 @@ class LooseCleaner: public ICleaner{
   
   ~LooseCleaner() {}
 
-  bool operator()(const pHypoJet& jet) const override;
+  bool operator()(const HypoJet::IJet& jet) const override;
   std::string toString() const noexcept override;
   std::string getName() const noexcept override;
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/NullCleaner.h b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/NullCleaner.h
index 9d1aa77f8bb8640ace5fa81595579663b701bb62..1fbb1ad8f2759e52c01ab5d562a65199c85c9187 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/NullCleaner.h
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/NullCleaner.h
@@ -29,7 +29,7 @@ class NullCleaner: public ICleaner{
 public:
   ~NullCleaner(){}
 
-  bool operator()(const pHypoJet&) const override;
+  bool operator()(const HypoJet::IJet&) const override;
   std::string toString() const noexcept override;
   std::string getName() const noexcept override;
 };
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/TightCleaner.h b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/TightCleaner.h
index dd7f06bc5993a834f3702aabf39d9ef42f4f4aaf..f4df147ffb88c9bb730e27986041013d090f91fe 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/TightCleaner.h
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/TrigHLTJetHypo/TrigHLTJetHypoUtils/TightCleaner.h
@@ -30,7 +30,7 @@ class TightCleaner: public ICleaner{
   
   ~TightCleaner() {}
 
-  bool operator()(const pHypoJet& jet) const override;
+  bool operator()(const HypoJet::IJet& jet) const override;
   std::string toString() const noexcept override;
   std::string getName() const noexcept override;
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/python/ConditionsToolSetterFastReduction.py b/Trigger/TrigHypothesis/TrigHLTJetHypo/python/ConditionsToolSetterFastReduction.py
index 4c40fe3ef828d8b390e56b12e78b25acbc9cd2a1..2162fa0f2f638fcef1d5f34454c1065c6cb53cce 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/python/ConditionsToolSetterFastReduction.py
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/python/ConditionsToolSetterFastReduction.py
@@ -8,8 +8,6 @@ from __future__ import print_function
 
 from AthenaConfiguration.ComponentFactory import CompFactory
 
-from TrigHLTJetHypo.node import Node
-
 from collections import defaultdict
 
 import copy
@@ -22,7 +20,8 @@ def is_leaf(node):
 
 
 def is_inner(node):
-    return node.scenario in ('root', 'and', 'combgen', 'partgen' , 'inserted')
+    # return node.scenario in ('root', 'and', 'combgen', 'partgen' , 'inserted')
+    return node.scenario in ('root', 'all', 'inserted')
 
 
 class ConditionsToolSetterFastReduction(object):
@@ -227,39 +226,14 @@ class ConditionsToolSetterFastReduction(object):
         more than one set of shared nodes. These are generated
         if an "And" not is present in the hypo tree"""
 
-
-        if node.scenario == 'root':
-            for cn in node.children:
-                self._find_shared(cn, shared)
-
-        elif node.scenario == 'and':
-            for cn in node.children:
-                shared.append([])
-                self._find_shared(cn, shared)
-
-        elif node.scenario == 'partgen':
-            for cn in node.children:
-                self._find_shared(cn, shared)
-
-        elif node.scenario == 'inserted':
-            for cn in node.children:
-                self._find_shared(cn, shared)
-
-        elif is_leaf(node):
-            if len(node.children) == 0:
-                if len(shared) == 0:
-                    shared.append([node])
-                else:
-                    shared[-1].append(node)
-
-            else:
-                for cn in node.children:
-                    self._find_shared(cn, shared)
-
+        if node.scenario == 'simple':
+            shared.append(node.node_id)
         else:
-            raise RuntimeError('%s illegal node. scenario: %s' %
-                               (self.__class__.__name__,
-                               node.scenario))
+            shared.append(-1)
+            
+        for cn in node.children:
+            self._find_shared(cn, shared)
+
 
         return shared
 
@@ -277,7 +251,6 @@ class ConditionsToolSetterFastReduction(object):
 
     def _fill_tree_map(self, node, tmap):
         tmap[node.node_id] = node.parent_id
-
         for cn in node.children:
             self._fill_tree_map(cn, tmap)
 
@@ -313,7 +286,7 @@ class ConditionsToolSetterFastReduction(object):
         for cn in node.children:
             self._check_scenarios(cn)
 
-    def mod(self, node):
+    def mod(self, tree):
         """Entry point for this module. 
         Modifies a  (usually compound) hypo tree node to 
         reduce it to form from whuch the treevector, conditionsVector and
@@ -327,22 +300,17 @@ class ConditionsToolSetterFastReduction(object):
         # navigate the tree filling in node-parent and node- Condtion factory
         # relations
 
-
-        # Alg step 1: add root node
-        root = Node(scenario='root')
-        root.children = [node]
-
-        self._check_scenarios(root)
+        self._check_scenarios(tree)
         
         # add Condition builders to leaf nodes.
-        self._set_conditions(root)
+        self._set_conditions(tree)
         
-        # Alg step 2: remove combgen nodes
-        self._remove_combgen(root)
+#         # Alg step 2: remove combgen nodes
+#         self._remove_combgen(root)
 
         # Alg step 3: split leaf nodes with multiple Conditions with a
         # single Condition
-        self._split_leaves(root)
+#         self._split_leaves(root)
         
         # Alg step 4: remove partgen nodes
         # single Condition
@@ -350,42 +318,20 @@ class ConditionsToolSetterFastReduction(object):
         # Alg step 5: identify the leaf nodes that are to shared
         # ie that see the input jet collection. Then remove And nodes
         shared = []
-        slist = self._find_shared(root, shared)
-
-        # remove top stub node if possible
-        def is_prunable(node):
-            assert root.scenario == 'root'
-            return len(root.children) == 1 and is_inner(root.children[0])
+        self.shared = self._find_shared(tree, shared)
+        if shared[-1] != -1: self.shared.append(-1)
 
-        if is_prunable(root):
-            root = root.children[0]
-            root.scenario
-        
-        root.set_ids(node_id=0, parent_id = 0)
-        
-
-        # would like to pass a list of lists to the C++ tools
-        # but this cannot be done using Gaudi::Properties.
-        # use -1 to separate the list sections all entries of which
-        # are >= 0.
-
-        self.shared = []
-        for ilist in slist:
-            for n in ilist:
-                self.shared.append(n.node_id)
-            self.shared.append(-1)
-
-        self.shared = self.shared[:-1] # remnove trailing -1
-            
+        print ('shared ', self.shared)
         tree_map = {}
-        self._fill_tree_map(root, tree_map)
+        self._fill_tree_map(tree, tree_map)
+
         for k, v in tree_map.items():
-            log.debug("Tree map debug %s %s", str(k), str(v))
+            log.debug("Tree map debug ", str(k), str(v))
             
         self.treeVec = self._map_2_vec(tree_map)
 
         conditionsMap = {}
-        self._fill_conditions_map(root, conditionsMap)
+        self._fill_conditions_map(tree, conditionsMap)
         self.conditionsVec = self._map_2_vec(conditionsMap)
                
         # make a config tool and provide it with condition makers
@@ -394,10 +340,10 @@ class ConditionsToolSetterFastReduction(object):
         config_tool.treeVector = self.treeVec
         config_tool.sharedVector = self.shared
 
-        nodestr = 'n%dp%d' % (node.node_id, node.parent_id)
+        nodestr = 'n%dp%d' % (tree.node_id, tree.parent_id)
         helper_tool = self._get_tool_instance('helper', extra=nodestr)
         helper_tool.HypoConfigurer = config_tool
-        helper_tool.node_id = node.node_id
-        helper_tool.parent_id = node.parent_id
+        helper_tool.node_id = tree.node_id
+        helper_tool.parent_id = tree.parent_id
 
         self.tool = helper_tool
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/python/ConditionsToolSetterHT.py b/Trigger/TrigHypothesis/TrigHLTJetHypo/python/ConditionsToolSetterHT.py
index 4af4a67d4c26bca4901590eafb9cb99c7b3d5878..bc866daca9b8a002ccf706229377d0cc9007bafe 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/python/ConditionsToolSetterHT.py
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/python/ConditionsToolSetterHT.py
@@ -49,7 +49,7 @@ class ConditionsToolSetterHT(object):
 
         return rep
 
-    def mod(self, node):
+    def mod(self, anode):
         """Entry point for this module. HT specific.
         Set up 
         TrigJetConditionConfig_ht,
@@ -60,13 +60,14 @@ class ConditionsToolSetterHT(object):
         # navigate the tree filling in node-parent and node- Condtion factory
         # relations
 
-
-        # root = Node(scenario='root')
-        # root.children = [node]
-
-        # self._check_scenarios(root)
-
         # root is an alias for node - as in ConditionTooSetterFastReduction
+        node = None
+        if anode.scenario == 'root':
+            assert len(anode.children)==1
+            node =anode.children[0]
+        else:
+            node = anode
+            
         assert node.scenario == 'ht'
 
         print (node)
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/python/NodeSplitterVisitor.py b/Trigger/TrigHypothesis/TrigHLTJetHypo/python/NodeSplitterVisitor.py
new file mode 100644
index 0000000000000000000000000000000000000000..8a6c8187a022bdce144f3f611a58134a89305e7c
--- /dev/null
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/python/NodeSplitterVisitor.py
@@ -0,0 +1,133 @@
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
+
+"""Instantiates TrigJetHypoToolConfig_fastreduction AlgTool 
+from a hypo tree."""
+
+from __future__ import print_function
+
+from TrigHLTJetHypo.node import Node
+
+import copy
+
+from AthenaCommon.Logging import logging
+log = logging.getLogger( 'ConditionsToolSetterVisitor' )
+
+def is_leaf(node):
+    return node.scenario in  ('simple', 'etaet', 'dijet', 'qjet')
+
+
+def is_inner(node):
+    # return node.scenario in ('root', 'and', 'combgen', 'partgen' , 'inserted')
+    return node.scenario in ('root', 'all', 'inserted')
+
+
+class NodeSplitterVisitor(object):
+
+    """Visitor to set instantiated AlgTools to a jet hypo tree"""
+
+
+    def mod(self, node):
+
+        new_children = []
+        for c in node.children:
+            if c.scenario == 'simple':
+                for c_a in c.conf_attrs:
+                    n_c = copy.deepcopy(c)
+                    n_c.conf_attrs = [c_a]
+                    new_children.append(n_c)
+            else:
+                new_children.append(c)
+                
+
+        node.children = new_children
+
+if __name__ == '__main__':
+    from chainDict2jetLabel import chainDict2jetLabel 
+    from TrigHLTJetHypo.treeVisitors import TreeParameterExpander
+    from TriggerMenuMT.HLTMenuConfig.Menu.ChainDefInMenu import ChainProp
+    from TriggerMenuMT.HLTMenuConfig.Menu.DictFromChainName import dictFromChainName
+    from TriggerMenuMT.HLTMenuConfig.Menu.Physics_pp_run3_v1 import (
+        SingleJetGroup,
+        MultiJetGroup)
+    from  TrigHLTJetHypo.ChainLabelParser import ChainLabelParser
+
+
+    def testChainDictMaker():
+        chain_props = [
+            ChainProp(name='HLT_j260_320eta490_L1J75_31ETA49',
+                      groups=SingleJetGroup),
+            
+            ChainProp(name='HLT_j80_j60_L1J15',
+                      l1SeedThresholds=['FSNOSEED']*2, groups=MultiJetGroup),
+            
+            ChainProp(name='HLT_2j80_3j60_L1J15',
+                    l1SeedThresholds=['FSNOSEED']*2, groups=MultiJetGroup),
+
+
+            ChainProp(name='HLT_j0_HTSEP1000htSEP100etSEP0eta320_L1J15',
+                      l1SeedThresholds=['FSNOSEED'], groups=MultiJetGroup),
+
+
+            ChainProp(name='HLT_j80_0eta240_2j60_320eta490_j0_dijetSEP80j1etSEP0j1eta240SEP80j2etSEP0j2eta240SEP700djmass_L1J20',
+                      l1SeedThresholds=['FSNOSEED']*3,
+                      groups=MultiJetGroup),
+            
+            ChainProp(name='HLT_j0_vbenfSEP30etSEP34mass35SEP50fbet_L1J20',
+                      l1SeedThresholds=['FSNOSEED'],
+                      groups=MultiJetGroup),
+            
+            ChainProp(name='HLT_10j40_L1J15',
+                      l1SeedThresholds=['FSNOSEED'], groups=MultiJetGroup),
+            
+
+            # ChainProp(name='HLT_j70_j50 _0eta490_invm1000j50_dphi20_deta40_L1J20',
+            #          l1SeedThresholds=['FSNOSEED']*2,
+            #          groups=MultiJetGroup),
+            
+        ]
+
+        result = []
+        for cp in chain_props:
+            chain_dict = dictFromChainName(cp)
+            result.append((cp.name, chain_dict))
+
+        return result
+
+
+    dicts = testChainDictMaker()
+    for d in dicts:
+        print('')
+        # print (d)
+
+    print ('\n chain_labels:\n')
+
+    for d in dicts:
+        print (d[0])
+        l =  (chainDict2jetLabel(d[1]))
+        print (l)
+        print ()
+
+
+        parser = ChainLabelParser(l)
+        rootless_tree = parser.parse()
+
+        # add a root node so that split simple nodes cann connect.
+        tree = Node('root')
+        tree.children = [rootless_tree]
+        tree.node_id = 0
+        tree.parent_id = 0
+        rootless_tree.tree_top = False
+        tree.tree_top = True
+
+        #expand strings of cuts to a cut dictionary
+        visitor = TreeParameterExpander()
+        tree.accept(visitor)
+
+        visitor = NodeSplitterVisitor()
+        tree.accept(visitor)
+        tree.set_ids(0, 0)
+
+        print ('\n - final dump ' + d[0])
+        print ('label ', l, '\n')
+        print (tree.dump())
+        print ('\n - end dump ')
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/python/TrigJetHypoToolConfig.py b/Trigger/TrigHypothesis/TrigHLTJetHypo/python/TrigJetHypoToolConfig.py
index f433cd6f8264ba9b78597035d0a7c2a1a1c851a0..e5be52b755bf1dd20bce953e4d71c5f63d19bad0 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/python/TrigJetHypoToolConfig.py
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/python/TrigJetHypoToolConfig.py
@@ -12,9 +12,10 @@ from TrigHLTJetHypo.ConditionsToolSetterHT import ConditionsToolSetterHT
 
 from  TrigHLTJetHypo.chainDict2jetLabel import chainDict2jetLabel
 
-# from TrigHLTJetHypo.chainDict2jetLabel import make_simple_comb_label as make_simple_label # TIMING studies
-
 from  TrigHLTJetHypo.ChainLabelParser import ChainLabelParser
+from TrigHLTJetHypo.node import Node
+from TrigHLTJetHypo.NodeSplitterVisitor import NodeSplitterVisitor
+
 
 from AthenaCommon.Logging import logging
 log = logging.getLogger( 'TrigJetHypoToolConfig' )
@@ -25,13 +26,24 @@ def  trigJetHypoToolHelperFromDict_(chain_label,
 
     parser = ChainLabelParser(chain_label, debug=False)
 
-    tree = parser.parse()
+    rootless_tree = parser.parse()
+    
+    # add a root node so that split simple nodes cann connect.
+    tree = Node('root')
+    tree.children = [rootless_tree]
+    tree.node_id = 0
+    tree.parent_id = 0
+    rootless_tree.tree_top = False
+    tree.tree_top = True
 
     #expand strings of cuts to a cut dictionary
     visitor = TreeParameterExpander()
     tree.accept(visitor)
     log.debug(visitor.report())
 
+    visitor = NodeSplitterVisitor()
+    tree.accept(visitor)
+
     # tell the child nodes who their parent is.
     tree.set_ids(node_id=0, parent_id=0)
 
@@ -48,7 +60,7 @@ def  trigJetHypoToolHelperFromDict_(chain_label,
     toolSetter.mod(tree)
     tool = toolSetter.tool
 
-    log.debug(visitor.report())
+    log.debug(toolSetter.report())
 
     return tool
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/python/chainDict2jetLabel.py b/Trigger/TrigHypothesis/TrigHLTJetHypo/python/chainDict2jetLabel.py
index 5f87f4e0f1b6c47715f357bdc8f38ab9ed03f418..c1e28d3aaf21eab59af17ce70fe90327ab454a5c 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/python/chainDict2jetLabel.py
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/python/chainDict2jetLabel.py
@@ -72,63 +72,6 @@ def _make_simple_label(chain_parts):
     return label
 
 
-def _make_simple_partition_label(chain_dict):
-    """Marshal information deom the selected chainParts to create a
-    'simple_partition' label.
-    """
-
-    cps = chain_dict['chainParts']
-    if not (_select_simple_chainparts(cps)):
-        raise NotImplementedError(
-            'chain fails substring selection: not "simple": %s' % (
-                chain_dict['chainName']))
-    
-    label = 'simplepartition(['
-    for cp in cps:
-        smcstr =  str(cp['smc'])
-        if smcstr == 'nosmc':
-            smcstr = ''
-        for i in range(int(cp['multiplicity'])):
-            # condition_str = '(%set,%s,%s)' % (str(cp['threshold']),
-            #                                  str(cp['etaRange']),
-            #                                  smcstr,)
-            condition_str = '(%set,%s' % (str(cp['threshold']),
-                                              str(cp['etaRange']),)
-            if smcstr:
-                condition_str += ',%s)'
-            else:
-                condition_str += ')'
-            label += condition_str
-    label += '])'
-    return label
-
-
-def _make_simple_comb_label(chain_dict):
-    """Marshal information deom the selected chainParts to create a
-    'simple' label NOTE: DO NOT USE this method.
-    THIS CHAINLABEL IS FOR TIMING STUDIES ONLY.
-    It has n^2 behaviour rather than n obtained using _make_simple_label.
-    """
-
-    cps = chain_dict['chainParts']
-    if not (_select_simple_chainparts(cps)):
-        raise NotImplementedError(
-            'chain fails substring selection: not "simple": %s' % (
-                chain_dict['chainName']))
-    
-    simple_strs = []
-
-    for cp in cps:
-        print(cp)
-        simple_strs.append(_make_simple_label([cp]))
-
-        label = 'combgen([(%d)]' % len(cps)
-        for s in simple_strs:
-            label += ' %s ' % s
-        label += ')'
-    return label
-
-
 def _args_from_scenario(scenario):
     separator = 'SEP'
     
@@ -161,7 +104,7 @@ def _make_vbenf_label(chain_parts):
     assert scenario.startswith('vbenf')
     args = _args_from_scenario(scenario)
     if not args:
-        return 'and([]simple([(50et)(70et)])combgen([(2)] dijet([(900djmass, 26djdphi)])))'        
+        return 'all([]simple([(50et)(70et)])dijet([(900djmass, 26djdphi)] all[], all[])))'        
     arg_res = [
         re.compile(r'(?P<lo>\d*)(?P<key>fbet)(?P<hi>\d*)'),
         re.compile(r'(?P<lo>\d*)(?P<key>mass)(?P<hi>\d*)'),
@@ -199,20 +142,16 @@ def _make_vbenf_label(chain_parts):
     assert len(args) == 0
 
     return """
-    and
+    all
     (
       []
       simple
       (
         [(%(etlo).0fet, 500neta)(%(etlo).0fet, peta500)]
       )
-      combgen
+      dijet
       (
-        [(10et, 0eta320)]
-        dijet
-        (
-          [(%(masslo).0fdjmass, 26djdphi)]
-        ) 
+        [(%(masslo).0fdjmass, 26djdphi)]
         simple
         (
           [(10et, 0eta320)(20et, 0eta320)]
@@ -281,16 +220,12 @@ def _make_dijet_label(chain_parts):
     assert len(args) == 0
 
     return """
-    combgen(
-            [(2)(%(j1etlo).0fet, %(j1etalo).0feta%(j1etahi).0f)
-                (%(j1etlo).0fet, %(j1etalo).0feta%(j1etahi).0f)
-               ]
-    
-            dijet(
-                  [(%(djmasslo).0fdjmass)])
-            simple([(%(j1etlo).0fet, %(j1etalo).0feta%(j1etahi).0f)
-                    (%(j2etlo).0fet, %(j2etalo).0feta%(j2etahi).0f)])
-            )""" % argvals
+    all([]
+        dijet(
+              [(%(djmasslo).0fdjmass)])
+        simple([(%(j1etlo).0fet, %(j1etalo).0feta%(j1etahi).0f)
+                (%(j2etlo).0fet, %(j2etalo).0feta%(j2etahi).0f)])
+    )""" % argvals
 
 
 def _make_agg_label(chain_parts):
@@ -363,45 +298,6 @@ def _make_agg_label(chain_parts):
     return result
     
 
-
-def _make_combinationsTest_label(chain_parts):
-    """make test label for  combinations helper with two simple children."""
-
-    assert len(chain_parts) == 1
-    scenario = chain_parts[0]['hypoScenario']
-    
-    assert scenario == 'combinationsTest'
-
-   
-
-    return """
-    combgen(
-            [(2)(20et, 0eta320)]
-    
-            simple([(40et, 0eta320) (50et, 0eta320)])
-            simple([(35et, 0eta240) (55et, 0eta240)])
-            )"""
-
-
-def _make_partitionsTest_label(chain_parts):
-    """make test label for  combinations helper with two simple children."""
-
-    assert len(chain_parts) == 1
-    scenario = chain_parts[0]['hypoScenario']
-    
-    assert scenario == 'partitionsTest'
-
-   
-
-    return """
-    partgen(
-            [(20et, 0eta320)]
-    
-            simple([(40et, 0eta320) (50et, 0eta320)])
-            simple([(35et, 0eta240) (55et, 0eta240)])
-            )"""
-
-
 def chainDict2jetLabel(chain_dict):
     """Entry point to this Module. Return a chain label according to the
     value of cp['hypoScenario'], where cp is an element of list/
@@ -420,8 +316,6 @@ def chainDict2jetLabel(chain_dict):
         'agg':   _make_agg_label,
         'vbenf': _make_vbenf_label,
         'dijet': _make_dijet_label,
-        'combinationsTest': _make_combinationsTest_label,
-        'partitionsTest': _make_partitionsTest_label,
     }
 
     # chain_part - scenario association
@@ -446,7 +340,7 @@ def chainDict2jetLabel(chain_dict):
     if nlabels == 1: return labels[0]
     if nlabels == 2:
         alabel = """\
-and([]
+all([]
     %s
     %s)""" % (tuple(labels))
         return alabel
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/python/testChainDictMaker.py b/Trigger/TrigHypothesis/TrigHLTJetHypo/python/testChainDictMaker.py
index 312abad23137946c8b0fcaf66f80a6857404749e..21a0691446e324f9bfe6078a2373c848803a3c4e 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/python/testChainDictMaker.py
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/python/testChainDictMaker.py
@@ -41,6 +41,8 @@ def testChainDictMaker():
         ChainProp(name='HLT_10j40_L1J15',
                   l1SeedThresholds=['FSNOSEED'], groups=MultiJetGroup),
 
+        ChainProp(name='HLT_j0_aggSEP1000htSEP30etSEP0eta320_L1J20',
+                  groups=SingleJetGroup),
 
         # ChainProp(name='HLT_j70_j50 _0eta490_invm1000j50_dphi20_deta40_L1J20',
         #          l1SeedThresholds=['FSNOSEED']*2,
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/python/treeVisitors.py b/Trigger/TrigHypothesis/TrigHLTJetHypo/python/treeVisitors.py
index c4c92e78b55762cf8f2cc0f55ca33b174d348923..a226fec52b7ccdc72db13870755088eb32be2343 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/python/treeVisitors.py
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/python/treeVisitors.py
@@ -98,34 +98,6 @@ class TreeBuilder(object):
         return self.tree
 
 
-class TreeToBooleanExpression(object):
-    """visit a hypo tree. If boolean scenarios are present, build a
-    boolean expression string."""
-    
-    def __init__(self):
-        self.stack = []
-
-    def mod(self, node):
-        if node.scenario == 'not':
-            self.stack.append(' ! ')
-            return
-
-        if node.scenario == 'and':
-            self.stack.append(' x ')
-            return
-
-        if node.scenario == 'or':
-            self.stack.append(' + ')
-            return
-
-        self.stack.append(' %s ' %node.tool.name())
-
-    def report(self):
-        s = '%s: ' % self.__class__.__name__
-        while self.stack: s += self.stack.pop()
-        return s.strip()
-
-
 class ConditionsDictMaker(object):
 
     """Convert parameter string into dictionary holding low, high window
@@ -328,12 +300,12 @@ class TreeParameterExpander_dijet(object):
         node.conf_attrs = d
 
     def report(self):
-        return '%s: ' % self.__class__.__name__ + '\n'.join(self.msgs) 
+        return '%s: ' % self.__class__.__name__ + '\n'.join(self.msgs)
 
 
-class TreeParameterExpander_combgen(object):
+class  TreeParameterExpander_all(object):
     """Convert parameter string into a dictionary holding low, high window
-    cut vals. Specialistaion for the combgen Tool
+    cut vals. Specialistaion for the "all" node
 
     parameter strings look like '40m,100deta200, 50dphi300'
     """
@@ -343,66 +315,18 @@ class TreeParameterExpander_combgen(object):
 
     def mod(self, node):
 
-        ok = True # status flag
-        # the group size must be the first attribute, then the conditions.
-        # size_re = re.compile(r'^\((\d+)\)')
-        parameters = node.parameters[:]
-        # m = size_re.match(parameters)
-        # if m is None:
-        #     self.msgs.append('Error')
-        #     return
-
-        # node.conf_attrs = {'groupSize':int(m.groups()[0])}
-        # remove goup info + 2 parentheses
-        # parameters = parameters[len(m.groups()[0])+2:]
-
-        cdm = ConditionsDictMaker()
-        d, error, msgs = cdm.makeDict(parameters)
-        self.msgs.extend(msgs)
-        node.conf_attrs = d
-        
-
-        if ok:
-            self.msgs = ['All OK']
-        else:
-            self.msgs.append('Error')
-
-        
-    def report(self):
-        return '%s: ' % self.__class__.__name__ + '\n'.join(self.msgs) 
-
-
-class TreeParameterExpander_partgen(object):
-    """Convert parameter string into a dictionary holding low, high window
-    cut vals. Specialistaion for the combgen Tool
-
-    parameter strings look like '40m,100deta200, 50dphi300'
-    """
-    
-    def __init__(self):
-        self.msgs = []
-
-    def mod(self, node):
+        if node.parameters != '' :
+            self.msgs.append(
+                'Error, all node with parameters ' + node.parameters)
+            return
 
-        parameters = node.parameters[:]
- 
-        cdm = ConditionsDictMaker()
+        node.conf_attrs = ''
 
-        d, error, msgs = cdm.makeDict(parameters)
+        self.msgs = ['All OK']
 
-        self.msgs.extend(msgs)
-        node.conf_attrs = d
         
-
-        if not error:
-            self.msgs = ['All OK']
-        else:
-            self.msgs.append('Error')
-
-        return d, error, msgs
-    
     def report(self):
-        return '%s: ' % self.__class__.__name__ + '\n'.join(self.msgs) 
+        return '%s: ' % self.__class__.__name__ + '\n'.join(self.msgs)
 
 
 class TreeParameterExpander_null(object):
@@ -418,23 +342,18 @@ class TreeParameterExpander_null(object):
         return '%s: ' % self.__class__.__name__ + '\n'.join(self.msgs) 
     
 
-
 class TreeParameterExpander(object):
     """Class to expand node.parameters string. Delegates to
     specialised expanders."""
     
     router = {
         'z': TreeParameterExpander_null,
+        'root': TreeParameterExpander_null,
         'simple': TreeParameterExpander_simple,
-        'simplepartition': TreeParameterExpander_simple,
         'ht': TreeParameterExpander_simple,
         'dijet': TreeParameterExpander_dijet,
         'qjet': TreeParameterExpander_simple,
-        'not': TreeParameterExpander_null,
-        'and': TreeParameterExpander_null,
-        'or': TreeParameterExpander_null,
-        'combgen': TreeParameterExpander_combgen,
-        'partgen': TreeParameterExpander_partgen,
+        'all': TreeParameterExpander_all,
         'agree': TreeParameterExpander_null,
     }
 
@@ -442,9 +361,9 @@ class TreeParameterExpander(object):
         self.expander = None
 
     def mod(self, node):
+
         self.expander = self.router[node.scenario]()
         self.expander.mod(node)
-        print (self.expander.report())
 
     def report(self):
         return self.expander.report()
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/AcceptAllConditionMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/AcceptAllConditionMT.cxx
index 073719bbe194f33b3c82403fa2c7f62bca97ab03..99330271f40f08a325eb8b67dd626629717d9864 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/AcceptAllConditionMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/AcceptAllConditionMT.cxx
@@ -28,8 +28,8 @@ AcceptAllConditionMT::isSatisfied(const HypoJetVector& ips,
     
     std::stringstream ss1;
     
-    for(auto ip : ips){
-      address = static_cast<const void*>(ip);
+    for(const auto& ip : ips){
+      address = static_cast<const void*>(ip.get());
       ss1 << "    "  << address << " " << ip->eta() << " e " << ip->e() << '\n';
     }
     ss1 << '\n';
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/CompoundConditionMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/CompoundConditionMT.cxx
index af5cb4b646a4f3478c97f24924c007db0e1e4dc4..e8d90d442117937d17ddaac319f10ab7e30d0079 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/CompoundConditionMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/CompoundConditionMT.cxx
@@ -42,7 +42,7 @@ bool CompoundConditionMT::isSatisfied(const HypoJetVector& ips,
         << " pass: "  << std::boolalpha << pass << '\n';
 
     for(const auto& ip : ips){
-      auto j_addr = static_cast<const void*>(ip);
+      auto j_addr = static_cast<const void*>(ip.get());
       std::stringstream ss1;
       ss1 <<  "     jet : ("<< j_addr << ")\n";
     
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetConditionMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetConditionMT.cxx
index ab69025c4b9d87927bce4e76229303a1de89125e..676eb38b2e80097515c0c032aff79a77ad68ea43 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetConditionMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetConditionMT.cxx
@@ -68,8 +68,8 @@ DijetConditionMT::isSatisfied(const HypoJetVector& ips,
 
      std::stringstream ss1;
 
-     for(auto ip : ips){
-       address = static_cast<const void*>(ip);
+     for(const auto& ip : ips){
+       address = static_cast<const void*>(ip.get());
        ss1 << "    "  << address << " " << ip->eta() << " e " << ip->e() << '\n';
      }
      ss1 << '\n';
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetDEtaConditionMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetDEtaConditionMT.cxx
index c100fec837cc36fa799315cb0f5dc15ad804f0de..c7c0027bbd4fb270b69263279e497a76dfdf55d8 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetDEtaConditionMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetDEtaConditionMT.cxx
@@ -48,8 +48,8 @@ DijetDEtaConditionMT::isSatisfied(const HypoJetVector& ips,
 
      std::stringstream ss1;
 
-     for(auto ip : ips){
-       address = static_cast<const void*>(ip);
+     for(const auto& ip : ips){
+       address = static_cast<const void*>(ip.get());
        ss1 << "    "  << address << " " << ip->eta() << " e " << ip->e() << '\n';
      }
      ss1 << '\n';
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetDPhiConditionMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetDPhiConditionMT.cxx
index 567bcaa29aa53960849caf7944f4f3fd6424ea6c..8d20b49abd1e38b6c6985fb1fa1364f16297185d 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetDPhiConditionMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetDPhiConditionMT.cxx
@@ -49,8 +49,8 @@ DijetDPhiConditionMT::isSatisfied(const HypoJetVector& ips,
      
      std::stringstream ss1;
      
-     for(auto ip : ips){
-       address = static_cast<const void*>(ip);
+     for(const auto& ip : ips){
+       address = static_cast<const void*>(ip.get());
        ss1 << "    "  << address << " " << ip->eta() << " e " << ip->e() << '\n';
      }
      ss1 << '\n';
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetMassConditionMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetMassConditionMT.cxx
index 37a455694117833ff23b3d837076c1499fe37a2b..e368c5fd561f6c02dfffdb24d67349ffc9bf1bbf 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetMassConditionMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/DijetMassConditionMT.cxx
@@ -49,8 +49,8 @@ DijetMassConditionMT::isSatisfied(const HypoJetVector& ips,
 
      std::stringstream ss1;
 
-     for(auto ip : ips){
-       address = static_cast<const void*>(ip);
+     for(const auto& ip : ips){
+       address = static_cast<const void*>(ip.get());
        ss1 << "    "  << address << " " << ip->eta() << " e " << ip->e() << '\n';
      }
      ss1 << '\n';
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtConditionMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtConditionMT.cxx
index 4c6560f02fa158d3f45ab7318a7f27f913edbfc5..a3e27ceddbc46bd10295db53abcd1da133b4f427 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtConditionMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtConditionMT.cxx
@@ -29,7 +29,7 @@ bool EtConditionMT::isSatisfied(const pHypoJet& ip,
         << " et thresh " << m_min
         << " pass: "  << std::boolalpha << pass << '\n';
 
-    auto j_addr = static_cast<const void*>(ip);
+    auto j_addr = static_cast<const void*>(ip.get());
     std::stringstream ss1;
     ss1 <<  "     jet : ("<< j_addr << ")"
         " et " << et << '\n';
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaConditionAbsMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaConditionAbsMT.cxx
index db606451a340dc218feca61f6fbddc93f0a4deb7..273bd2eff7ee18f43f75d3ade3fb7ac49d96927c 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaConditionAbsMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaConditionAbsMT.cxx
@@ -31,7 +31,7 @@ EtaConditionAbsMT::isSatisfied(const pHypoJet& ip,
         << " eta[" << m_min << ", " << m_max << "]" 
         << " pass: "  << std::boolalpha << pass << '\n';
 
-    auto j_addr = static_cast<const void*>(ip);
+    auto j_addr = static_cast<const void*>(ip.get());
     std::stringstream ss1;
     ss1 <<  "     jet : ("<< j_addr << ") abseta " << abseta << '\n';
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaConditionSignedMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaConditionSignedMT.cxx
index 9bf9e30a203fb693355337ed6e7f38b301aaf6fa..d26c5820d2b17acb263a9a990e36b1fe1bf77cca 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaConditionSignedMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaConditionSignedMT.cxx
@@ -30,7 +30,7 @@ bool EtaConditionSignedMT::isSatisfied(const pHypoJet& ip,
         << " eta[" << m_min << ", " << m_max << "]" 
         << " pass: "  << std::boolalpha << pass << '\n';
 
-    auto j_addr = static_cast<const void*>(ip);
+    auto j_addr = static_cast<const void*>(ip.get());
     std::stringstream ss1;
     ss1 <<  "     jet : ("<< j_addr << ") signed eta " << eta << '\n';
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaEtAsymmetricConditionMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaEtAsymmetricConditionMT.cxx
index bee5de795ed2b0df13d07fa4f4999d502340fcdb..b64816f56ccb5277a9b6066d4b34bd1e7df304ce 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaEtAsymmetricConditionMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaEtAsymmetricConditionMT.cxx
@@ -39,7 +39,7 @@ EtaEtAsymmetricConditionMT::isSatisfied(const pHypoJet& ip,
                                       
                                       
     std::stringstream ss1;
-    auto j_addr = static_cast<const void*>(ip);
+    auto j_addr = static_cast<const void*>(ip.get());
     ss1 << "    jet: ("  << j_addr << ")"
         << " eta " << eta
         << " et " << et << '\n';
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaEtConditionMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaEtConditionMT.cxx
index 3522bc51cd86f7bcf92061615352eb00f6746c48..2b53c70f5ff632bc7904b58d2f0ef03410d2aec0 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaEtConditionMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/EtaEtConditionMT.cxx
@@ -37,7 +37,7 @@ bool EtaEtConditionMT::isSatisfied(const pHypoJet& ip,
         << " et thresh " << m_threshold
         << " pass: "  << std::boolalpha << pass << '\n';
 
-    auto j_addr = static_cast<const void*>(ip);
+    auto j_addr = static_cast<const void*>(ip.get());
     std::stringstream ss1;
     ss1 <<  "     jet : ("<< j_addr << ")"
         " abseta " << abseta << " et " << et << '\n';
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/FastReducer.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/FastReducer.cxx
index c0552f3fa6c70233b4d1fc81825427b5f2c9b18c..5f1ad5000d856933cb54dadf16e3eedf183acbdf 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/FastReducer.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/FastReducer.cxx
@@ -473,7 +473,7 @@ std::string FastReducer::dataStructuresToStr() const {
   for(const auto& pair : m_indJetGroup){
     ss << pair.first << " [";
     for(const auto& j : pair.second){
-      ss << static_cast<const void*>(j) << " ";
+      ss << static_cast<const void*>(j.get()) << " ";
     }
     ss << "]\n";
   }
@@ -530,7 +530,7 @@ void FastReducer::recordJetGroup(std::size_t ind,
   
   std::stringstream ss1;
   for(auto ip : jg){
-    const void* address = static_cast<const void*>(ip);
+    const void* address = static_cast<const void*>(ip.get());
     ss1  << "\n "  << address << " eta " << ip->eta()
 	 << " e " << ip->e()
 	 << " et " << ip->et();
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/HTConditionMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/HTConditionMT.cxx
index 2f92a1fd0fe46d2f4c386f1e7476d19b338b8450..0a9cad16e74c79f40f1a691660df3e07162df3d4 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/HTConditionMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/HTConditionMT.cxx
@@ -4,7 +4,7 @@
 
 #include "./HTConditionMT.h"
 #include "./ITrigJetHypoInfoCollector.h"
-#include "TrigHLTJetHypo/TrigHLTJetHypoUtils/IJet.h"
+#include "TrigHLTJetHypo/TrigHLTJetHypoUtils/HypoJetDefs.h"
 
 #include <sstream>
 #include <cmath>
@@ -39,7 +39,7 @@ HTConditionMT::isSatisfied(const HypoJetVector& ips,
     return std::accumulate(ips_c.begin(),
 			   iter,
 			   0.0,
-			   [](double sum, const HypoJet::IJet* jp){
+			   [](double sum, const pHypoJet& jp){
 			     return sum + jp->et();}) > m_htMin;
   } else {
     if(infoCollector) {
@@ -49,7 +49,7 @@ HTConditionMT::isSatisfied(const HypoJetVector& ips,
     return std::accumulate(ips.begin(),
 			   ips.end(),
 			   0.0,
-			   [](double sum, const HypoJet::IJet* jp){
+			   [](double sum, const pHypoJet& jp){
 			     return sum + jp->et();}) > m_htMin;
   }
 }
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/JVTConditionMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/JVTConditionMT.cxx
index 4c1d1478f2aea62af3b169d841d35b3fead3990f..d6acdda7213d858ccb855a5ff37e8163487a6b62 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/JVTConditionMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/JVTConditionMT.cxx
@@ -58,8 +58,8 @@ bool JVTConditionMT::isSatisfied(const HypoJetVector& ips, const std::unique_ptr
 
     std::stringstream ss1;
 
-    for(auto ip : ips){
-      address = static_cast<const void*>(ip);
+    for(const auto& ip : ips){
+      address = static_cast<const void*>(ip.get());
       ss1 << "    "  << address << " " << ip->eta() << " pt " << ip->pt() << '\n';
     }
     ss1 << '\n';
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/MomentConditionMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/MomentConditionMT.cxx
index ea09ac6a94068b23adf11cfa5d802cb776dc9244..effde3e73e02cb004ca1cb4758f6d69a44dcfc04 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/MomentConditionMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/MomentConditionMT.cxx
@@ -25,7 +25,7 @@ bool MomentConditionMT::isSatisfied(const pHypoJet& ip,
   if (!(ip -> getAttribute(m_moment, momentValue))){
 
     if(collector){
-      auto j_addr = static_cast<const void*>(ip);
+      auto j_addr = static_cast<const void*>(ip.get());
 
       std::stringstream ss0;
       ss0 << "MomentCondition: " 
@@ -50,7 +50,7 @@ bool MomentConditionMT::isSatisfied(const pHypoJet& ip,
         << " moment " << m_moment
         << " pass: "  << std::boolalpha << pass << '\n';
 
-    auto j_addr = static_cast<const void*>(ip);
+    auto j_addr = static_cast<const void*>(ip.get());
     std::stringstream ss1;
     ss1 <<  "     jet : ("<< j_addr << ") "
       "moment " << m_moment << " value: " << momentValue<< '\n';
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/QjetMassConditionMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/QjetMassConditionMT.cxx
index 11302690ace4d2dcf32c3b73510ff7dcdb02dc35..601f4524fcac7a907ceb62f15c9d6f31d8f28946 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/QjetMassConditionMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/QjetMassConditionMT.cxx
@@ -51,8 +51,8 @@ QjetMassConditionMT::isSatisfied(const HypoJetVector& ips,
 
      std::stringstream ss1;
 
-     for(auto ip : ips){
-       address = static_cast<const void*>(ip);
+     for(const auto& ip : ips){
+       address = static_cast<const void*>(ip.get());
        ss1 << "    "  << address << " eta " << ip->eta()
 	   << " e " << ip->e() << " et: " << ip->et() << '\n';
      }
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/SMCConditionMT.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/SMCConditionMT.cxx
index 02b601e0bb45634df4f03093db295e63f3796856..6695cf6e2b0cf0c857ab362ba93229729af5332b 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/SMCConditionMT.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/SMCConditionMT.cxx
@@ -31,7 +31,7 @@ SMCConditionMT::isSatisfied(const pHypoJet& ip,
         << " mass[" << m_min << ", " << m_max << "]" 
         << " pass: "  << std::boolalpha << pass << '\n';
 
-    auto j_addr = static_cast<const void*>(ip);
+    auto j_addr = static_cast<const void*>(ip.get());
     std::stringstream ss1;
     ss1 <<  "     jet : ("<< j_addr << ") jet mass " << mass << '\n';
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypo2.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypo2.cxx
index 0dfa822f868603b97f3a843c22247f1d0eb7b156..9421828473ca56a969de02b3950ade1af03bb721 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypo2.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypo2.cxx
@@ -260,7 +260,7 @@ class xAODJetAsIJetFactory{
 public:
   xAODJetAsIJetFactory(): m_ind(-1){}
   pHypoJet operator() (const xAOD::Jet* j){
-    return new HypoJet::xAODJetAsIJet(j, ++m_ind);}
+    return pHypoJet(new HypoJet::xAODJetAsIJet(j, ++m_ind));}
 private:
   unsigned int m_ind;
 };
@@ -324,9 +324,6 @@ HLT::ErrorCode TrigHLTJetHypo2::hltExecute(const HLT::TriggerElement* outputTE,
     ATH_MSG_ERROR("Trig bits storage failed"); 
      return status;}
   
-  // delete the xAOD::Jet wrappers
-  for(auto i : hypoJets){delete i;}
-
   return HLT::OK;
 }
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoBase.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoBase.cxx
index 90966c1bec3f3691c1ffd67e40306d69c121017b..b9fa0f08c8b99c7e9a6bb031a56d0ab172cd489a 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoBase.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoBase.cxx
@@ -188,7 +188,7 @@ class xAODJetAsIJetFactory{
 public:
   xAODJetAsIJetFactory(): m_ind(-1){}
   pHypoJet operator() (const xAOD::Jet* j){
-    return new HypoJet::xAODJetAsIJet(j, ++m_ind);}
+    return pHypoJet(new HypoJet::xAODJetAsIJet(j, ++m_ind));}
 private:
   unsigned int m_ind;
 };
@@ -252,9 +252,6 @@ HLT::ErrorCode TrigHLTJetHypoBase::hltExecute(const HLT::TriggerElement* outputT
     ATH_MSG_ERROR("Trig bits storage failed"); 
      return status;}
   
-  // delete the xAOD::Jet wrappers
-  for(auto i : hypoJets){delete i;}
-
   return HLT::OK;
 }
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiBasicCleaner.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiBasicCleaner.cxx
index b92096fdf7a32b4051587e8aec20ab6668588be7..1b3734e9280f9b408ab73590f92a13d3c41e6038 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiBasicCleaner.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiBasicCleaner.cxx
@@ -20,7 +20,7 @@ AntiBasicCleaner::AntiBasicCleaner(float n90Threshold,
                                    float negativeEThreshold):
   BasicCleaner(n90Threshold, presamplerThreshold, negativeEThreshold){}
 
-bool AntiBasicCleaner::operator()(const pHypoJet& jet) const{
+bool AntiBasicCleaner::operator()(const HypoJet::IJet& jet) const{
   return !BasicCleaner::operator()(jet);
 }
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiLlpCleaner.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiLlpCleaner.cxx
index b03fd88df6ac92abde427310c5a1e104f3438905..95f94e77abbf8e15ded8b371601acbfbcfe08647 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiLlpCleaner.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiLlpCleaner.cxx
@@ -27,7 +27,7 @@ AntiLlpCleaner::AntiLlpCleaner(float fSampMaxLlpThreshold,
              hecqLlpThreshold,
              avLarQFLlpThreshold){}
 
-bool AntiLlpCleaner::operator()(const pHypoJet& jet) const{
+bool AntiLlpCleaner::operator()(const HypoJet::IJet& jet) const{
   return !LlpCleaner::operator()(jet);
 }
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiLooseCleaner.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiLooseCleaner.cxx
index 3dbdc9e3bd55ca259730718e678af8840b49c748..e0749e58cab5481783a53f66e59306cf3c6af65e 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiLooseCleaner.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiLooseCleaner.cxx
@@ -26,7 +26,7 @@ AntiLooseCleaner::AntiLooseCleaner(float fSampMaxLooseThreshold,
                emfHighLooseThreshold, 
                hecfLooseThreshold){}
 
-bool AntiLooseCleaner::operator()(const pHypoJet& jet) const{
+bool AntiLooseCleaner::operator()(const HypoJet::IJet& jet) const{
   return !LooseCleaner::operator()(jet);
 }
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiTightCleaner.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiTightCleaner.cxx
index f067c5f627bde096fdff76a09ba10a6433b83d7a..e6632ff05e296fb679f4ee69ee04cf9fdf256893 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiTightCleaner.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/AntiTightCleaner.cxx
@@ -26,7 +26,7 @@ AntiTightCleaner::AntiTightCleaner(float fSampMaxTightThreshold,
                emfHighTightThreshold, 
                hecfTightThreshold){}
 
-bool AntiTightCleaner::operator()(const pHypoJet& jet) const{
+bool AntiTightCleaner::operator()(const HypoJet::IJet& jet) const{
   return TightCleaner::operator()(jet);
 }
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/BasicCleaner.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/BasicCleaner.cxx
index 19760a297a8eb16c7723ddcc659680cd5ea5ffa1..523bb5fe02cb8716826f136cf292d4888db4aedc 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/BasicCleaner.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/BasicCleaner.cxx
@@ -26,13 +26,13 @@ BasicCleaner::BasicCleaner(float n90Threshold,
   m_negativeEThreshold(negativeEThreshold){
   }
   
-bool BasicCleaner::operator()(const pHypoJet& jet) const {
+bool BasicCleaner::operator()(const HypoJet::IJet& jet) const {
   /* make cuts on jet attributes to select clean jets */
   
   bool isClean{true};
 
   float n90jet;
-  if (jet -> getAttribute("N90Constituents", n90jet)){
+  if (jet.getAttribute("N90Constituents", n90jet)){
     //std::cout << "TrigHLTJetHypoHelpers: N90Constituents = " << n90jet << '\n';
     if(n90jet < m_n90Threshold){isClean = false;}
   } else {
@@ -43,7 +43,7 @@ bool BasicCleaner::operator()(const pHypoJet& jet) const {
   //example of code to compute presamplerFraction
   /*
   std::vector<float> vecvalue;
-  if (jet->getAttribute(xAOD::JetAttribute::EnergyPerSampling, vecvalue)) {
+  if (jet.getAttribute(xAOD::JetAttribute::EnergyPerSampling, vecvalue)) {
      int vecsize = vecvalue.size();
      std::cout << "TrigHLTJetHypoHelpers: Got EnergyPerSampling vector, size: " << vecsize << '\n';
      if (vecsize != 0) {
@@ -60,7 +60,7 @@ bool BasicCleaner::operator()(const pHypoJet& jet) const {
            //see Reconstruction/Jet/JetUtils/Root/JetCaloQualityUtils.cxx
            double presamplerE = vecvalue[CaloSampling::PreSamplerB] + vecvalue[CaloSampling::PreSamplerE];
            std::cout << "TrigHLTJetHypoHelpers: presamplerE = " << presamplerE << '\n';
-           double jetE = jet->jetP4(xAOD::JetEMScaleMomentum).E();
+           double jetE = jet.jetP4(xAOD::JetEMScaleMomentum).E();
            std::cout << "TrigHLTJetHypoHelpers: jetE = " << jetE << '\n';
            double presamplerF = 0;
            if(jetE!=0) presamplerF = presamplerE/jetE;
@@ -74,7 +74,7 @@ bool BasicCleaner::operator()(const pHypoJet& jet) const {
   //use of presamplerFraction needs further study, for now not used
   /*
   float presamplerfractionjet;
-  if(jet -> getAttribute<float>("presamplerFraction", presamplerfractionjet)){
+  if(jet.getAttribute<float>("presamplerFraction", presamplerfractionjet)){
     if(presamplerfractionjet > m_presamplerThreshold) {isClean = false;}
   } else {
     throw UncleanableJet("Cleaner Cannot retrieve presamplerFraction");
@@ -82,7 +82,7 @@ bool BasicCleaner::operator()(const pHypoJet& jet) const {
   */
 
   float negEjet;
-  if(jet -> getAttribute("NegativeE", negEjet)){
+  if(jet.getAttribute("NegativeE", negEjet)){
     //std::cout << "TrigHLTJetHypoHelpers: NegativeE = " << negEjet << '\n';
     if(negEjet < m_negativeEThreshold){isClean = false;}
   } else {
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/EtaEtCleaner.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/EtaEtCleaner.cxx
index d9e386715b234342361a605280bb8335130dd0fe..9de5db74fa4621cf3b6f39c37b3c213528e8ee5e 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/EtaEtCleaner.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/EtaEtCleaner.cxx
@@ -26,9 +26,9 @@ EtaEtCleaner::EtaEtCleaner(double etaMin,
   m_etaMin(etaMin), m_etaMax(etaMax), m_etMin(etMin), m_etMax(etMax){
   }
 
-bool EtaEtCleaner::operator()(const pHypoJet& jet) const{
-  auto eta =  std::abs(jet->eta());
-  auto et = jet->et();
+bool EtaEtCleaner::operator()(const HypoJet::IJet& jet) const{
+  auto eta =  std::abs(jet.eta());
+  auto et = jet.et();
   
   return eta >= m_etaMin and eta < m_etaMax and et >= m_etMin and et < m_etMax;
 }
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/HTCondition.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/HTCondition.cxx
index 13198fd6a8c2d2e70e70478b0d82390fc21f4620..c296c950bfcb10f844fab2e906ec8ba9db7514b7 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/HTCondition.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/HTCondition.cxx
@@ -3,7 +3,7 @@
 */
 
 #include "TrigHLTJetHypo/TrigHLTJetHypoUtils/HTCondition.h"
-#include "TrigHLTJetHypo/TrigHLTJetHypoUtils/IJet.h"
+#include "TrigHLTJetHypo/TrigHLTJetHypoUtils/HypoJetDefs.h"
 
 #include <sstream>
 #include <cmath>
@@ -19,7 +19,7 @@ HTCondition::isSatisfied(const HypoJetVector& ips) const {
   return std::accumulate(ips.begin(),
                          ips.end(),
                          0.0,
-                         [](double sum, const HypoJet::IJet* jp){
+                         [](double sum, const pHypoJet& jp){
                            return sum + jp->et();}) > m_htMin;
 }
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/HypoJetDefs.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/HypoJetDefs.cxx
index e6b77cec17cad8e1ffa6b88c8b3dc919db52f7e0..e29de4a36ddad603c6d38637a144b1c63196a913 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/HypoJetDefs.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/HypoJetDefs.cxx
@@ -9,7 +9,7 @@ std::ostream& operator << (std::ostream& out, const HypoJetVector& hjv) {
   auto hjv_addr = static_cast<const void*>(&hjv);
   out << "HypoJetVector (" << hjv_addr << ")\n";
   for(const auto& j : hjv) {
-    out << static_cast<const void*>(j)
+    out << static_cast<const void*>(j.get())
 	<< " e " << j->e()
 	<< " et " << j->et()
 	<< " eta " << j->eta() << '\n';
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/LlpCleaner.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/LlpCleaner.cxx
index eaf664a50aeab6f9e4ec66a70b0303a3791e3283..6c7ab3444cbdb41b9e56763bb896bbcdd6db81d7 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/LlpCleaner.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/LlpCleaner.cxx
@@ -30,15 +30,15 @@ LlpCleaner::LlpCleaner(float fSampMaxLlpThreshold,
   m_avLarQFLlpThreshold(avLarQFLlpThreshold){
   }
   
-bool LlpCleaner::operator()(const pHypoJet& jet) const {
+bool LlpCleaner::operator()(const HypoJet::IJet& jet) const {
   /* make cuts on jet attributes to select jets from long-lived particles */
   
   bool isClean{true};
 
   float fsmJet;
   float neJet;
-  if(jet -> getAttribute("FracSamplingMax", fsmJet)){
-    if(jet -> getAttribute("NegativeE", neJet)){
+  if(jet.getAttribute("FracSamplingMax", fsmJet)){
+    if(jet.getAttribute("NegativeE", neJet)){
       if(fsmJet > m_fSampMaxLlpThreshold && 
          std::abs(neJet) > m_negELlpThreshold){isClean = false;}
     } else {
@@ -52,9 +52,9 @@ bool LlpCleaner::operator()(const pHypoJet& jet) const {
   float hecfJet;
   float hecqJet;
   float alqfJet;
-  if(jet -> getAttribute("HECFrac", hecfJet)){
-    if(jet -> getAttribute("HECQuality", hecqJet)){
-      if(jet -> getAttribute("AverageLArQF", alqfJet)){
+  if(jet.getAttribute("HECFrac", hecfJet)){
+    if(jet.getAttribute("HECQuality", hecqJet)){
+      if(jet.getAttribute("AverageLArQF", alqfJet)){
         if(hecfJet > m_hecfLlpThreshold && std::abs(hecqJet) > m_hecqLlpThreshold &&
            alqfJet > m_avLarQFLlpThreshold){isClean = false;}
       } else {
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/LooseCleaner.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/LooseCleaner.cxx
index 1ff40dc12d9c0dcaf7c95cc3712d4ccc0d9f09c0..e87eee79344c098aab31c29a121f5163a542810a 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/LooseCleaner.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/LooseCleaner.cxx
@@ -30,27 +30,27 @@ LooseCleaner::LooseCleaner(float fSampMaxLooseThreshold,
   m_hecfLooseThreshold(hecfLooseThreshold){
   }
   
-bool LooseCleaner::operator()(const pHypoJet& jet) const {
+bool LooseCleaner::operator()(const HypoJet::IJet& jet) const {
   /* make cuts on jet attributes to select clean jets */
   
   bool isClean{true};
 
   float fsmJet;
-  if(jet -> getAttribute("FracSamplingMax", fsmJet)){
-    if(fsmJet > m_fSampMaxLooseThreshold && std::abs(jet->eta()) < m_etaLooseThreshold){isClean = false;}
+  if(jet.getAttribute("FracSamplingMax", fsmJet)){
+    if(fsmJet > m_fSampMaxLooseThreshold && std::abs(jet.eta()) < m_etaLooseThreshold){isClean = false;}
   } else {
     throw UncleanableJet("Cleaner Cannot retrieve FracSamplingMax");
   }
 
   float emfJet;
-  if(jet -> getAttribute("EMFrac", emfJet)){
+  if(jet.getAttribute("EMFrac", emfJet)){
     if(emfJet < m_emfLowLooseThreshold || emfJet > m_emfHighLooseThreshold){isClean = false;}
   } else {
     throw UncleanableJet("Cleaner Cannot retrieve EMFrac");
   }
 
   float hecfJet;
-  if(jet -> getAttribute("HECFrac", hecfJet)){
+  if(jet.getAttribute("HECFrac", hecfJet)){
     if(hecfJet > m_hecfLooseThreshold){isClean = false;}
   } else {
     throw UncleanableJet("Cleaner Cannot retrieve HECFrac");
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/NullCleaner.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/NullCleaner.cxx
index b2375d7d84089940016710caaacd289f08506ae6..f8bab410566444b5f8b6d4623d845853809072ad 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/NullCleaner.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/NullCleaner.cxx
@@ -15,7 +15,7 @@
 #include "TrigHLTJetHypo/TrigHLTJetHypoUtils/NullCleaner.h"
 #include <sstream>
 
-bool NullCleaner::operator()(const pHypoJet&) const {return true;}    
+bool NullCleaner::operator()(const HypoJet::IJet&) const {return true;}    
 
 std::string NullCleaner::toString() const noexcept {
   std::stringstream ss;
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/TightCleaner.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/TightCleaner.cxx
index 4de1ff73dd1c618b9b2c4e5da8ab372f399b66d7..fc3786f3c4f7a7739bc94f8d430e4cc788dec398 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/TightCleaner.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/TrigHLTJetHypoUtils/TightCleaner.cxx
@@ -29,27 +29,27 @@ TightCleaner::TightCleaner(float fSampMaxTightThreshold,
   m_hecfTightThreshold(hecfTightThreshold){
   }
   
-bool TightCleaner::operator()(const pHypoJet& jet) const {
+bool TightCleaner::operator()(const HypoJet::IJet& jet) const {
   /* make cuts on jet attributes to select clean jets */
   
   bool isClean{true};
 
   float fsmJet;
-  if(jet -> getAttribute("FracSamplingMax", fsmJet)){
-    if(fsmJet > m_fSampMaxTightThreshold && std::abs(jet->eta()) < m_etaTightThreshold){isClean = false;}
+  if(jet.getAttribute("FracSamplingMax", fsmJet)){
+    if(fsmJet > m_fSampMaxTightThreshold && std::abs(jet.eta()) < m_etaTightThreshold){isClean = false;}
   } else {
     throw UncleanableJet("Cleaner Cannot retrieve FracSamplingMax");
   }
 
   float emfJet;
-  if(jet -> getAttribute("EMFrac", emfJet)){
+  if(jet.getAttribute("EMFrac", emfJet)){
     if(emfJet < m_emfLowTightThreshold || emfJet > m_emfHighTightThreshold){isClean = false;}
   } else {
     throw UncleanableJet("Cleaner Cannot retrieve EMFrac");
   }
 
   float hecfJet;
-  if(jet -> getAttribute("HECFrac", hecfJet)){
+  if(jet.getAttribute("HECFrac", hecfJet)){
     if(hecfJet > m_hecfTightThreshold){isClean = false;}
   } else {
     throw UncleanableJet("Cleaner Cannot retrieve HECFrac");
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/xAODJetAsIJetFactory.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/xAODJetAsIJetFactory.cxx
index ab7261bdc2a1c00bbfb7b2dc96baca1f7023b027..dca0a3945709a2df5f88a7eb34cf3022d61e9f1a 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/xAODJetAsIJetFactory.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/xAODJetAsIJetFactory.cxx
@@ -6,6 +6,6 @@
 xAODJetAsIJetFactory::xAODJetAsIJetFactory(): m_ind(-1){}
 
 pHypoJet xAODJetAsIJetFactory::operator() (const xAOD::Jet* j){
-  return new HypoJet::xAODJetAsIJet(j, ++m_ind);
+  return pHypoJet(new HypoJet::xAODJetAsIJet(j, ++m_ind));
 }
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/xAODJetCollector.h b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/xAODJetCollector.h
index 99de7586ab1487a4bf1bce87142f2f244e293b4d..ca24a33de12498caee6f28a4fb6e9776c44f5d49 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypo/src/xAODJetCollector.h
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypo/src/xAODJetCollector.h
@@ -6,7 +6,6 @@
 #define TRIGLHLTJETHYPO_XAODJETCOLLECTOR_H
 
 #include  "xAODJet/Jet.h"
-#include  "TrigHLTJetHypo/TrigHLTJetHypoUtils/IJet.h"
 #include  "TrigHLTJetHypo/TrigHLTJetHypoUtils/HypoJetDefs.h"
 
 #include <string>
@@ -18,9 +17,6 @@
 // xAODJetCollector - an object send to a (possibly recursive)
 // TrigJetHypoToolHelpers to obtain xAOD jets
 
-namespace HypoJet{
-  class IJet;
-}
 
   
 class xAODJetCollector {
@@ -89,7 +85,7 @@ public:
     auto new_end =
       std::partition(hypoJets.begin(),
 		     hypoJets.end(),
-		     [](const HypoJet::IJet* j){
+		     [](const pHypoJet& j){
 		       return (j->xAODJet()).has_value();});
     // add xAOD::Jet* to m_jets
     std::vector<const xAOD::Jet*> xJets;
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/exerciser/JetHypoExerciserCompareAlg.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/exerciser/JetHypoExerciserCompareAlg.cxx
index 9fb795a5eb334e1b69be9e260964cc06b3ed7c47..dfe58d2cea8d15b54392bb2adba388f10cc0885f 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/exerciser/JetHypoExerciserCompareAlg.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/exerciser/JetHypoExerciserCompareAlg.cxx
@@ -212,8 +212,8 @@ StatusCode JetHypoExerciserCompareAlg::execute() {
 
     bool jagree{true};
     for (std::size_t i = 0; i < njets; i++) {
-      ATH_MSG_INFO(" jet 0 : " << static_cast<const void*>(collected0.at(i)) <<
-		   " jet 1 : " << static_cast<const void*>(collected1.at(i)));
+      ATH_MSG_INFO(" jet 0 : " << static_cast<const void*>(collected0.at(i).get()) <<
+		   " jet 1 : " << static_cast<const void*>(collected1.at(i).get()));
 
       if (collected0.at(i) != collected1.at(i)) {
 	ATH_MSG_INFO(" jets above differ ");
@@ -234,7 +234,6 @@ StatusCode JetHypoExerciserCompareAlg::execute() {
 
   }
   
-  for (auto p : jv) {delete p;}
   return StatusCode::SUCCESS;
 }
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/exerciser/RandomSignalEventGenerator.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/exerciser/RandomSignalEventGenerator.cxx
index 33decdcd1a1c45bb29d7cd10696bae0caaef0859..345882842a0be206abe9cfd1d7cd019f4ca00b9c 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/exerciser/RandomSignalEventGenerator.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/exerciser/RandomSignalEventGenerator.cxx
@@ -80,7 +80,7 @@ RandomSignalHypoJetVectorGenerator::get() {
     if(m_flip()){
       tlv.SetZ(-tlv.Z()); // randomly flip the sign of the +vly generated tlv.
     }
-    result.push_back(new TLorentzVectorAsIJet(tlv));
+    result.push_back(std::shared_ptr<const HypoJet::IJet>(new TLorentzVectorAsIJet(tlv)));
   }
 
   if(m_nbkgd){
@@ -92,7 +92,7 @@ RandomSignalHypoJetVectorGenerator::get() {
       if(m_flip()){
 	tlv.SetZ(-tlv.Z()); // randomly flip the sign of the +vly generated tlv.
       }  
-      result.push_back(new TLorentzVectorAsIJet(tlv));
+      result.push_back(std::shared_ptr<const HypoJet::IJet>(new TLorentzVectorAsIJet(tlv)));
     }
   }
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/exerciser/SimpleHypoJetVectorGenerator.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/exerciser/SimpleHypoJetVectorGenerator.cxx
index 890b2c2ad3ab124695f66d829014265dcd983243..4668443f0f0a3517e3ed91f6d5cb9e97f781109f 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/exerciser/SimpleHypoJetVectorGenerator.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/exerciser/SimpleHypoJetVectorGenerator.cxx
@@ -63,7 +63,7 @@ SimpleHypoJetVectorGenerator::get() {
     const auto& eta = m_etas[i];
     const auto& et = m_es[i];
     auto tlv = factory->make(eta, et);
-    result.push_back(new TLorentzVectorAsIJet(tlv));
+    result.push_back(std::shared_ptr<const HypoJet::IJet>(new TLorentzVectorAsIJet(tlv)));
   }
   
   if(m_nbkgd){
@@ -81,7 +81,7 @@ SimpleHypoJetVectorGenerator::get() {
       const auto& en = en_distribution(generator);
       // depending on the factory, en is e or et
       auto tlv = factory->make(eta, en);
-      result.push_back(new TLorentzVectorAsIJet(tlv));
+      result.push_back(std::shared_ptr<const HypoJet::IJet>(new TLorentzVectorAsIJet(tlv)));
     }
   }
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/src/SpecifiedJetsCondition.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/src/SpecifiedJetsCondition.cxx
index 719ae1b186fbb9ec7753ce77b7aefbf9e65584ad..b98f837f6dec2949b8a066a0445c5cc63bc87447 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/src/SpecifiedJetsCondition.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/src/SpecifiedJetsCondition.cxx
@@ -49,7 +49,7 @@ std::string SpecifiedJetsCondition::toString() const noexcept {
     ss << "group " << count << '\n';
     ++count;
     for(const auto& j : jg){
-      auto j_addr = static_cast<const void*>(j);
+      auto j_addr = static_cast<const void*>(j.get());
       ss << j_addr << " ";
     }
   }
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/src/Timer.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/src/Timer.cxx
index 48f13f9ebfa84f039cd52b33e40a369c66289737..156bd2f77e822609ca7bb264d72bbeccb9218533 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/src/Timer.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/src/Timer.cxx
@@ -35,13 +35,13 @@ class Timer{
 
   void timeit(){
     HypoJetVector jets;
-    TLorentzVectorAsIJet  j0 (m_tl0);
-    TLorentzVectorAsIJet  j1 (m_tl1);
+    auto j0 = std::make_shared<TLorentzVectorAsIJet>(m_tl0);
+    auto j1 =std::make_shared<TLorentzVectorAsIJet>(m_tl1);
     HypoJetVector jets0;
-    jets0.push_back(&j0);
+    jets0.push_back(j0);
 
     HypoJetVector jets1;
-    jets1.push_back(&j1);
+    jets1.push_back(j1);
     HypoJetGroupVector jetGroups{jets0, jets1};
 
     std::vector<double> etaMins{-1., -1., -1.};
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/src/makeHypoJets.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/src/makeHypoJets.cxx
index 4fdfae1690fd87c07be55e55f25f356fae3ee60f..f9ffaf623a02440772d470ccd9f3529c5af2084d 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/src/makeHypoJets.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/src/makeHypoJets.cxx
@@ -15,7 +15,7 @@ HypoJetVector makeHypoJets(const std::vector<double>& etas){
   TLorentzVectorFactory factory;
   
   auto make_jet = [&factory](double eta){
-    return new TLorentzVectorAsIJet(factory.make(eta, 10.));
+    return std::shared_ptr<const HypoJet::IJet>(new TLorentzVectorAsIJet(factory.make(eta, 10.)));
   };
 
   std::transform(etas.begin(),
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/DijetDEtaMassConditionTest.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/DijetDEtaMassConditionTest.cxx
index 04af84e359fdb2fd987f4d450d75e988156f9ba4..a3f27d25e6bb06c6f234e9ddffa69f4f7516693c 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/DijetDEtaMassConditionTest.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/DijetDEtaMassConditionTest.cxx
@@ -80,16 +80,16 @@ TEST_F(DijetDEtaMassConditionTest, artefacts){
 
 TEST_F(DijetDEtaMassConditionTest, accepts) {
     
-    MockJetWithLorentzVector jet0{m_tl0};
-    MockJetWithLorentzVector jet1{m_tl1};
+    auto jet0 = std::make_shared<MockJetWithLorentzVector>(m_tl0);
+    auto jet1 = std::make_shared<MockJetWithLorentzVector>(m_tl1);
        
-    EXPECT_CALL(jet0, et());
-    EXPECT_CALL(jet0, eta());
+    EXPECT_CALL(*jet0, et());
+    EXPECT_CALL(*jet0, eta());
     
-    EXPECT_CALL(jet1, et());
-    EXPECT_CALL(jet1, eta());
+    EXPECT_CALL(*jet1, et());
+    EXPECT_CALL(*jet1, eta());
       
-    HypoJetVector jets{&jet0, &jet1};
+    HypoJetVector jets{jet0, jet1};
     
     DijetDEtaMassCondition condition({0.5-0.001, 0.5-0.001}, 
                                      {0.5 + 0.001,  0.5 + 0.001}, 
@@ -105,16 +105,16 @@ TEST_F(DijetDEtaMassConditionTest, accepts) {
 
 TEST_F(DijetDEtaMassConditionTest, belowAbsEtaMinCut) {
     
-    MockJetWithLorentzVector jet0{m_tl0};
-    MockJetWithLorentzVector jet1{m_tl1};
+    auto jet0 = std::make_shared<MockJetWithLorentzVector>(m_tl0);
+    auto jet1 = std::make_shared<MockJetWithLorentzVector>(m_tl1);
     
-    HypoJetVector jets{&jet0, &jet1};
+    HypoJetVector jets{jet0, jet1};
      
-    EXPECT_CALL(jet0, et()).Times(AnyNumber());
-    EXPECT_CALL(jet0, eta()).Times(AnyNumber());
+    EXPECT_CALL(*jet0, et()).Times(AnyNumber());
+    EXPECT_CALL(*jet0, eta()).Times(AnyNumber());
     
-    EXPECT_CALL(jet1, et()).Times(AnyNumber());
-    EXPECT_CALL(jet1, eta()).Times(AnyNumber());
+    EXPECT_CALL(*jet1, et()).Times(AnyNumber());
+    EXPECT_CALL(*jet1, eta()).Times(AnyNumber());
       
     DijetDEtaMassCondition condition({0.5+0.001, 0.5+0.001}, 
                                      {0.5 + 0.002,  0.5 + 0.002}, 
@@ -139,17 +139,16 @@ TEST_F(DijetDEtaMassConditionTest, aboveAbsEtaMaxCut) {
                                    {104.2 -0.1},
                                    {104.2 + 0.1});
   
-  MockJetWithLorentzVector jet0{m_tl0};
-  MockJetWithLorentzVector jet1{m_tl1};
-
+  auto jet0 = std::make_shared<MockJetWithLorentzVector>(m_tl0);
+  auto jet1 = std::make_shared<MockJetWithLorentzVector>(m_tl1);
        
-  EXPECT_CALL(jet0, et()).Times(AnyNumber());
-  EXPECT_CALL(jet0, eta()).Times(AnyNumber());
+  EXPECT_CALL(*jet0, et()).Times(AnyNumber());
+  EXPECT_CALL(*jet0, eta()).Times(AnyNumber());
     
-  EXPECT_CALL(jet1, et()).Times(AnyNumber());
-  EXPECT_CALL(jet1, eta()).Times(AnyNumber());
+  EXPECT_CALL(*jet1, et()).Times(AnyNumber());
+  EXPECT_CALL(*jet1, eta()).Times(AnyNumber());
       
-  HypoJetVector jets{&jet0, &jet1};
+  HypoJetVector jets{jet0, jet1};
   
   EXPECT_FALSE(condition.isSatisfied(jets));
 }
@@ -165,17 +164,16 @@ TEST_F(DijetDEtaMassConditionTest, belowYStarCut) {
                                      {104.2 + 0.1});
 
 
-    MockJetWithLorentzVector jet0{m_tl0};
-    MockJetWithLorentzVector jet1{m_tl1};
+    auto jet0 = std::make_shared<MockJetWithLorentzVector>(m_tl0);
+    auto jet1 = std::make_shared<MockJetWithLorentzVector>(m_tl1);
 
-           
-    EXPECT_CALL(jet0, et()).Times(AnyNumber());
-    EXPECT_CALL(jet0, eta()).Times(AnyNumber());
+    EXPECT_CALL(*jet0, et()).Times(AnyNumber());
+    EXPECT_CALL(*jet0, eta()).Times(AnyNumber());
     
-    EXPECT_CALL(jet1, et()).Times(AnyNumber());
-    EXPECT_CALL(jet1, eta()).Times(AnyNumber());
+    EXPECT_CALL(*jet1, et()).Times(AnyNumber());
+    EXPECT_CALL(*jet1, eta()).Times(AnyNumber());
       
-    HypoJetVector jets{&jet0, &jet1};
+    HypoJetVector jets{jet0, jet1};
     
     EXPECT_FALSE(condition.isSatisfied(jets));
 }
@@ -191,16 +189,16 @@ TEST_F(DijetDEtaMassConditionTest, aboveYStarCut) {
                                      {104.2 -0.1},
                                      {104.2 + 0.1});
 
-    MockJetWithLorentzVector jet0{m_tl0};
-    MockJetWithLorentzVector jet1{m_tl1};
+    auto jet0 = std::make_shared<MockJetWithLorentzVector>(m_tl0);
+    auto jet1 = std::make_shared<MockJetWithLorentzVector>(m_tl1);
 
-    EXPECT_CALL(jet0, et()).Times(AnyNumber());
-    EXPECT_CALL(jet0, eta()).Times(AnyNumber());
+    EXPECT_CALL(*jet0, et()).Times(AnyNumber());
+    EXPECT_CALL(*jet0, eta()).Times(AnyNumber());
     
-    EXPECT_CALL(jet1, et()).Times(AnyNumber());
-    EXPECT_CALL(jet1, eta()).Times(AnyNumber());
+    EXPECT_CALL(*jet1, et()).Times(AnyNumber());
+    EXPECT_CALL(*jet1, eta()).Times(AnyNumber());
          
-    HypoJetVector jets{&jet0, &jet1};
+    HypoJetVector jets{jet0, jet1};
     
     EXPECT_FALSE(condition.isSatisfied(jets));
 }
@@ -216,16 +214,16 @@ TEST_F(DijetDEtaMassConditionTest, belowMassCut) {
                                      {1.0 + 0.001},
                                      {104.2 + 0.1},
                                      {104.2 + 0.2});
-    MockJetWithLorentzVector jet0{m_tl0};
-    MockJetWithLorentzVector jet1{m_tl1};
+    auto jet0 = std::make_shared<MockJetWithLorentzVector>(m_tl0);
+    auto jet1 = std::make_shared<MockJetWithLorentzVector>(m_tl1);
            
-    EXPECT_CALL(jet0, et()).Times(AnyNumber());
-    EXPECT_CALL(jet0, eta()).Times(AnyNumber());
+    EXPECT_CALL(*jet0, et()).Times(AnyNumber());
+    EXPECT_CALL(*jet0, eta()).Times(AnyNumber());
     
-    EXPECT_CALL(jet1, et()).Times(AnyNumber());
-    EXPECT_CALL(jet1, eta()).Times(AnyNumber());
+    EXPECT_CALL(*jet1, et()).Times(AnyNumber());
+    EXPECT_CALL(*jet1, eta()).Times(AnyNumber());
       
-    HypoJetVector jets{&jet0, &jet1};
+    HypoJetVector jets{jet0, jet1};
     EXPECT_FALSE(condition.isSatisfied(jets));
 }
 
@@ -240,16 +238,16 @@ TEST_F(DijetDEtaMassConditionTest, aboveMassCut) {
                                      {104.2 - 0.2},
                                      {104.2 - 0.1});
 
-    MockJetWithLorentzVector jet0{m_tl0};
-    MockJetWithLorentzVector jet1{m_tl1};
+    auto jet0 = std::make_shared<MockJetWithLorentzVector>(m_tl0);
+    auto jet1 = std::make_shared<MockJetWithLorentzVector>(m_tl1);
            
-    EXPECT_CALL(jet0, et()).Times(AnyNumber());
-    EXPECT_CALL(jet0, eta()).Times(AnyNumber());
+    EXPECT_CALL(*jet0, et()).Times(AnyNumber());
+    EXPECT_CALL(*jet0, eta()).Times(AnyNumber());
     
-    EXPECT_CALL(jet1, et()).Times(AnyNumber());
-    EXPECT_CALL(jet1, eta()).Times(AnyNumber());
+    EXPECT_CALL(*jet1, et()).Times(AnyNumber());
+    EXPECT_CALL(*jet1, eta()).Times(AnyNumber());
       
-    HypoJetVector jets{&jet0, &jet1};
+    HypoJetVector jets{jet0, jet1};
 
     EXPECT_FALSE(condition.isSatisfied(jets));
 }
@@ -266,11 +264,11 @@ TEST_F(DijetDEtaMassConditionTest, tooManyJets) {
                                      {104.2 -0.1},
                                      {104.2 + 0.1});
 
-    MockJetWithLorentzVector jet0{m_tl0};
-    MockJetWithLorentzVector jet1{m_tl1};
+    auto jet0 = std::make_shared<MockJetWithLorentzVector>(m_tl0);
+    auto jet1 = std::make_shared<MockJetWithLorentzVector>(m_tl1);
     
-    HypoJetVector jets{&jet0, &jet1};
-    jets.push_back(&jet0);
+    HypoJetVector jets{jet0, jet1};
+    jets.push_back(jet0);
     
     EXPECT_THROW(condition.isSatisfied(jets), std::runtime_error);
 }
@@ -287,9 +285,9 @@ TEST_F(DijetDEtaMassConditionTest, tooFewJets) {
                                      {104.2 -0.1},
                                      {104.2 + 0.1});
 
-    MockJetWithLorentzVector jet0{m_tl0};
+    auto jet0 = std::make_shared<MockJetWithLorentzVector>(m_tl0);
     
-    HypoJetVector jets{&jet0};
+    HypoJetVector jets{jet0};
     
     
     EXPECT_THROW(condition.isSatisfied(jets), std::runtime_error);
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/EtaEtConditionTest.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/EtaEtConditionTest.cxx
index 1e61ddb66ce7f9c53696904cd19a485836619607..8c44745a155da8320e59ab3977ae1ace36b91123 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/EtaEtConditionTest.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/EtaEtConditionTest.cxx
@@ -62,8 +62,8 @@ TEST_F(EtaEtConditionTest, artefacts){
   EXPECT_NEAR(m_tl0.Eta(), m_eta0, m_eps);
   EXPECT_NEAR(m_tl0.Et(), m_et0, m_eps);
 
-  MockJetWithLorentzVector jet0{m_tl0};
-  HypoJetVector jets{&jet0};
+  auto jet0 = std::make_shared<MockJetWithLorentzVector>(m_tl0);
+  HypoJetVector jets{jet0};
 
   EXPECT_EQ(jets.size(), static_cast<unsigned int>(1));
 }
@@ -73,11 +73,11 @@ TEST_F(EtaEtConditionTest, accepts) {
 
   EtaEtCondition condition(-1., 1., 99.99999);
 
-  MockJetWithLorentzVector jet0{m_tl0};
-  HypoJetVector jets{&jet0};
+  auto jet0 = std::make_shared<MockJetWithLorentzVector>(m_tl0);
+  HypoJetVector jets{jet0};
 
-  EXPECT_CALL(jet0, et()); 
-  EXPECT_CALL(jet0, eta());
+  EXPECT_CALL(*jet0, et()); 
+  EXPECT_CALL(*jet0, eta());
 
 
   EXPECT_TRUE(condition.isSatisfied(jets));
@@ -92,11 +92,11 @@ TEST_F(EtaEtConditionTest, accepts) {
 TEST_F(EtaEtConditionTest, belowEtaMinCut) {
   EtaEtCondition condition(-0.5+0.001, 1., 100.);
 
-  MockJetWithLorentzVector jet0{m_tl0};
-  HypoJetVector jets{&jet0};
+  auto jet0 = std::make_shared<MockJetWithLorentzVector>(m_tl0);
+  HypoJetVector jets{jet0};
 
-  EXPECT_CALL(jet0, et()); 
-  EXPECT_CALL(jet0, eta());
+  EXPECT_CALL(*jet0, et()); 
+  EXPECT_CALL(*jet0, eta());
 
   EXPECT_FALSE(condition.isSatisfied(jets));
 }
@@ -105,11 +105,11 @@ TEST_F(EtaEtConditionTest, belowEtaMinCut) {
 TEST_F(EtaEtConditionTest, aboveEtaMaxCut) {
   EtaEtCondition condition(-1.0, 0.5-0.001, 100.);
 
-  MockJetWithLorentzVector jet0{m_tl0};
-  HypoJetVector jets{&jet0};
+  auto jet0 = std::make_shared<MockJetWithLorentzVector>(m_tl0);
+  HypoJetVector jets{jet0};
 
-  EXPECT_CALL(jet0, et()); 
-  EXPECT_CALL(jet0, eta());
+  EXPECT_CALL(*jet0, et()); 
+  EXPECT_CALL(*jet0, eta());
 
   EXPECT_FALSE(condition.isSatisfied(jets));
 }
@@ -118,11 +118,11 @@ TEST_F(EtaEtConditionTest, aboveEtaMaxCut) {
 TEST_F(EtaEtConditionTest, belowEtCut) {
   EtaEtCondition condition(-1.0, 1.0, 100.001);
 
-  MockJetWithLorentzVector jet0{m_tl0};
-  HypoJetVector jets{&jet0};
+  auto jet0 = std::make_shared<MockJetWithLorentzVector>(m_tl0);
+  HypoJetVector jets{jet0};
 
-  EXPECT_CALL(jet0, et()); 
-  EXPECT_CALL(jet0, eta());
+  EXPECT_CALL(*jet0, et()); 
+  EXPECT_CALL(*jet0, eta());
 
   EXPECT_FALSE(condition.isSatisfied(jets));
 }
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/LlpCleanerTest.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/LlpCleanerTest.cxx
index ccc3d616b9b109aa9da93f17c140b6c35429036f..01b2824ea1f2242032e364010c578c2ca507fb86 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/LlpCleanerTest.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/LlpCleanerTest.cxx
@@ -65,7 +65,7 @@ TEST(LlpCleanerTest, SimpleThresholds) {
   {
     std::array<float, 5> args{1., 1., 1., 1., 1.};
     auto cleaner = makeLlpCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_TRUE(cleaner(&jet));
+    EXPECT_TRUE(cleaner(jet));
   }
 
   constexpr float eps = 0.00001;
@@ -75,70 +75,70 @@ TEST(LlpCleanerTest, SimpleThresholds) {
   {
     std::array<float, 5> args{1.-eps, 1., 1,  1., 1.};
     auto cleaner = makeLlpCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_TRUE(cleaner(&jet));
+    EXPECT_TRUE(cleaner(jet));
   }
 
   // jet below  fSampMaxTightThreshold, above NegativeE
   {
     std::array<float, 5> args{1., 1.-eps, 1,  1., 1.};
     auto cleaner = makeLlpCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_TRUE(cleaner(&jet));
+    EXPECT_TRUE(cleaner(jet));
   }
 
   // jet above  fSampMaxTightThreshold, above NegativeE
   {
     std::array<float, 5> args{1-eps, 1.-eps, 1,  1., 1.};
     auto cleaner = makeLlpCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_FALSE(cleaner(&jet));
+    EXPECT_FALSE(cleaner(jet));
   }
 
   // jet above HECFrac below HECQuality below AverageLArQF
   {
     std::array<float, 5> args{1., 1., 1-eps,  1., 1.};
     auto cleaner = makeLlpCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_TRUE(cleaner(&jet));
+    EXPECT_TRUE(cleaner(jet));
   }
 
   // jet below HECFrac above HECQuality below AverageLArQF
   {
     std::array<float, 5> args{1., 1., 1,  1.-eps, 1.};
     auto cleaner = makeLlpCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_TRUE(cleaner(&jet));
+    EXPECT_TRUE(cleaner(jet));
   }
 
   // jet below HECFrac below HECQuality above AverageLArQF
   {
     std::array<float, 5> args{1., 1., 1,  1., 1.-eps};
     auto cleaner = makeLlpCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_TRUE(cleaner(&jet));
+    EXPECT_TRUE(cleaner(jet));
   }
 
   // jet above HECFrac above HECQuality below AverageLArQF
   {
     std::array<float, 5> args{1., 1., 1.-eps,  1.-eps, 1.};
     auto cleaner = makeLlpCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_TRUE(cleaner(&jet));
+    EXPECT_TRUE(cleaner(jet));
   }
 
   // jet above HECFrac below HECQuality above AverageLArQF
   {
     std::array<float, 5> args{1., 1., 1.-eps,  1., 1.-eps};
     auto cleaner = makeLlpCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_TRUE(cleaner(&jet));
+    EXPECT_TRUE(cleaner(jet));
   }
 
   // jet below HECFrac above HECQuality above AverageLArQF
   {
     std::array<float, 5> args{1., 1., 1.,  1.-eps, 1.-eps};
     auto cleaner = makeLlpCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_TRUE(cleaner(&jet));
+    EXPECT_TRUE(cleaner(jet));
   }
 
   // jet above HECFrac above HECQuality above AverageLArQF
   {
     std::array<float, 5> args{1., 1., 1.-eps,  1.-eps, 1.-eps};
     auto cleaner = makeLlpCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_FALSE(cleaner(&jet));
+    EXPECT_FALSE(cleaner(jet));
   }
 
 }
@@ -153,5 +153,5 @@ TEST(LlpCleanerTest, ThrowsOnUncleanableJet) {
 
   
 
-  EXPECT_THROW(cleaner(&jet), UncleanableJet);
+  EXPECT_THROW(cleaner(jet), UncleanableJet);
 }
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/LooseCleanerTest.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/LooseCleanerTest.cxx
index 01fe9f437f4ea50f4a7fb483a252e155d959d810..e10d4dc4d17824f6b0bbfc18c1709230742a152a 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/LooseCleanerTest.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/LooseCleanerTest.cxx
@@ -55,7 +55,7 @@ TEST(LooseCleanerTest, SimpleThresholds) {
   {
     std::array<float, 5> args{1., 1., 1., 2., 1.};
     auto cleaner = makeLooseCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_TRUE(cleaner(&jet));
+    EXPECT_TRUE(cleaner(jet));
   }
 
   constexpr float eps = 0.00001;
@@ -65,21 +65,21 @@ TEST(LooseCleanerTest, SimpleThresholds) {
   {
     std::array<float, 5> args{1.-eps, 1+ eps, 1,  2., 1.};
     auto cleaner = makeLooseCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_FALSE(cleaner(&jet));
+    EXPECT_FALSE(cleaner(jet));
   }
 
   // jet above  fSampMaxTightThreshold, outside applicable eta range
   {
     std::array<float, 5> args{1.-eps, 0.5- eps, 1,  2., 1.};
     auto cleaner = makeLooseCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_TRUE(cleaner(&jet));
+    EXPECT_TRUE(cleaner(jet));
   }
 
   // jet below _emfLowTightThreshold
   {
     std::array<float, 5> args{1., 1., 1+eps,  2., 1.};
     auto cleaner = makeLooseCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_FALSE(cleaner(&jet));
+    EXPECT_FALSE(cleaner(jet));
   }
 
 
@@ -87,7 +87,7 @@ TEST(LooseCleanerTest, SimpleThresholds) {
   {
     std::array<float, 5> args{1., 1., 0.5,  1.-eps, 1.};
     auto cleaner = makeLooseCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_FALSE(cleaner(&jet));
+    EXPECT_FALSE(cleaner(jet));
   }
 
 
@@ -95,7 +95,7 @@ TEST(LooseCleanerTest, SimpleThresholds) {
   {
     std::array<float, 5> args{1., 1., 1.0,  1.0, 1. -eps};
     auto cleaner = makeLooseCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_FALSE(cleaner(&jet));
+    EXPECT_FALSE(cleaner(jet));
   }
 
 }
@@ -110,5 +110,5 @@ TEST(LooseCleanerTest, ThrowsOnUncleanableJet) {
 
   
 
-  EXPECT_THROW(cleaner(&jet), UncleanableJet);
+  EXPECT_THROW(cleaner(jet), UncleanableJet);
 }
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/MaximumBipartiteGroupsMatcherMTTest.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/MaximumBipartiteGroupsMatcherMTTest.cxx
index 6299b3570e38384788c8b4a112db728a875a93d6..1640dbd5b0753266520e07c913c780966ed9df47 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/MaximumBipartiteGroupsMatcherMTTest.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/MaximumBipartiteGroupsMatcherMTTest.cxx
@@ -88,10 +88,10 @@ TEST_F(MaximumBipartiteGroupsMatcherMTTest, tooFewSelectedJets){
   auto factory = TLorentzVectorFactory();
   auto tl = factory.make(eta, et);
 
-  MockJetWithLorentzVector jet0(tl);
-  MockJetWithLorentzVector jet1{tl};
+  auto jet0 = std::make_shared<const MockJetWithLorentzVector>(tl);
+  auto jet1 = std::make_shared<const MockJetWithLorentzVector>(tl);
 
-  HypoJetVector jets{&jet0, &jet1};
+  HypoJetVector jets{jet0, jet1};
 
   std::unique_ptr<IGroupsMatcherMT> matcher(nullptr);
   matcher.reset(new MaximumBipartiteGroupsMatcherMT(std::move(m_conditions)));
@@ -125,26 +125,26 @@ TEST_F(MaximumBipartiteGroupsMatcherMTTest, oneSelectedJet){
   auto factory = TLorentzVectorFactory();
   auto tl = factory.make(eta, et);
 
-  MockJetWithLorentzVector jet0(tl);
-  MockJetWithLorentzVector jet1{tl};
-  MockJetWithLorentzVector jet2{tl};
+  auto jet0 = std::make_shared<const MockJetWithLorentzVector>(tl);
+  auto jet1 = std::make_shared<const MockJetWithLorentzVector>(tl);
+  auto jet2 = std::make_shared<const MockJetWithLorentzVector>(tl);
 
   eta = 0;
   et = 150000;
   auto tl0 = factory.make(eta, et);
-  MockJetWithLorentzVector jet3{tl0};
+  auto jet3 = std::make_shared<const MockJetWithLorentzVector>(tl0);
 
-  HypoJetVector jets{&jet0, &jet1, &jet2, &jet3};
+  HypoJetVector jets{jet0, jet1, jet2, jet3};
 
-  EXPECT_CALL(jet0, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet1, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet2, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet3, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, eta()).Times(m_nconditions);
 
-  EXPECT_CALL(jet0, et()).Times(m_nconditions);
-  EXPECT_CALL(jet1, et()).Times(m_nconditions);
-  EXPECT_CALL(jet2, et()).Times(m_nconditions);
-  EXPECT_CALL(jet3, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, et()).Times(m_nconditions);
 
   std::unique_ptr<IGroupsMatcherMT> matcher(nullptr);
   matcher.reset(new MaximumBipartiteGroupsMatcherMT(std::move(m_conditions)));
@@ -178,37 +178,37 @@ TEST_F(MaximumBipartiteGroupsMatcherMTTest, twoSelectedJets){
   auto factory = TLorentzVectorFactory();
   auto tl0 = factory.make(eta, et);
 
-  MockJetWithLorentzVector jet0(tl0);
+  auto jet0 = std::make_shared<const MockJetWithLorentzVector>(tl0);
 
   eta = 0.1;
   et = 139;
   auto tl1 = factory.make(eta, et);
 
-  MockJetWithLorentzVector jet1{tl1};
+  auto jet1 = std::make_shared<const MockJetWithLorentzVector>(tl1);
 
 
   eta = 5.;
   et = 100.;
   auto tl2 = factory.make(eta, et);
-  MockJetWithLorentzVector jet2{tl2};
+  auto jet2 = std::make_shared<const MockJetWithLorentzVector>(tl2);
 
   eta = 0;
   et = 150;
   auto tl3 = factory.make(eta, et);
-  MockJetWithLorentzVector jet3{tl3};
+  auto jet3 = std::make_shared<const MockJetWithLorentzVector>(tl3);
 
-  HypoJetVector jets{&jet0, &jet1, &jet2, &jet3};
+  HypoJetVector jets{jet0, jet1, jet2, jet3};
 
 
-  EXPECT_CALL(jet0, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet1, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet2, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet3, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, eta()).Times(m_nconditions);
 
-  EXPECT_CALL(jet0, et()).Times(m_nconditions);
-  EXPECT_CALL(jet1, et()).Times(m_nconditions);
-  EXPECT_CALL(jet2, et()).Times(m_nconditions);
-  EXPECT_CALL(jet3, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, et()).Times(m_nconditions);
 
   std::unique_ptr<IGroupsMatcherMT> matcher(nullptr);
   matcher.reset(new MaximumBipartiteGroupsMatcherMT(std::move(m_conditions)));
@@ -246,37 +246,37 @@ TEST_F(MaximumBipartiteGroupsMatcherMTTest, threeSelectedJets){
   auto factory = TLorentzVectorFactory();
   auto tl0 = factory.make(eta, et);
 
-  MockJetWithLorentzVector jet0(tl0);
+  auto jet0 = std::make_shared<const MockJetWithLorentzVector>(tl0);
 
   eta = 0.1;
   et = 139;
   auto tl1 = factory.make(eta, et);
 
-  MockJetWithLorentzVector jet1{tl1};
+  auto jet1 = std::make_shared<const MockJetWithLorentzVector>(tl1);
 
 
   eta = 0.5;
   et = 141.;
   auto tl2 = factory.make(eta, et);
-  MockJetWithLorentzVector jet2{tl2};
+  auto jet2 = std::make_shared<const MockJetWithLorentzVector>(tl2);
 
   eta = -0.2;
   et = 101.;
   auto tl3 = factory.make(eta, et);
-  MockJetWithLorentzVector jet3{tl3};
+  auto jet3 = std::make_shared<const MockJetWithLorentzVector>(tl3);
 
-  HypoJetVector jets{&jet0, &jet1, &jet2, &jet3};
+  HypoJetVector jets{jet0, jet1, jet2, jet3};
 
 
-  EXPECT_CALL(jet1, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet2, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet3, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet0, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, eta()).Times(m_nconditions);
 
-  EXPECT_CALL(jet1, et()).Times(m_nconditions);
-  EXPECT_CALL(jet2, et()).Times(m_nconditions);
-  EXPECT_CALL(jet3, et()).Times(m_nconditions);
-  EXPECT_CALL(jet0, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, et()).Times(m_nconditions);
 
   std::unique_ptr<IGroupsMatcherMT> matcher(nullptr);
   matcher.reset(new MaximumBipartiteGroupsMatcherMT(std::move(m_conditions)));
@@ -310,36 +310,36 @@ TEST_F(MaximumBipartiteGroupsMatcherMTTest, fourSelectedJets){
   auto factory = TLorentzVectorFactory();
   auto tl0 = factory.make(eta, et);
 
-  MockJetWithLorentzVector jet0(tl0);
+  auto jet0 = std::make_shared<const MockJetWithLorentzVector>(tl0);
 
   eta = 0.1;
   et = 139;
   auto tl1 = factory.make(eta, et);
 
-  MockJetWithLorentzVector jet1{tl1};
+  auto jet1 = std::make_shared<const MockJetWithLorentzVector>(tl1);
 
 
   eta = 0.5;
   et = 175.;
   auto tl2 = factory.make(eta, et);
-  MockJetWithLorentzVector jet2{tl2};
+  auto jet2 = std::make_shared<const MockJetWithLorentzVector>(tl2);
 
   eta = -0.2;
   et = 101.;
   auto tl3 = factory.make(eta, et);
-  MockJetWithLorentzVector jet3{tl3};
+  auto jet3 = std::make_shared<const MockJetWithLorentzVector>(tl3);
 
-  HypoJetVector jets{&jet0, &jet1, &jet2, &jet3};
+  HypoJetVector jets{jet0, jet1, jet2, jet3};
 
-  EXPECT_CALL(jet0, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet1, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet2, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet3, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, eta()).Times(m_nconditions);
 
-  EXPECT_CALL(jet0, et()).Times(m_nconditions);
-  EXPECT_CALL(jet1, et()).Times(m_nconditions);
-  EXPECT_CALL(jet2, et()).Times(m_nconditions);
-  EXPECT_CALL(jet3, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, et()).Times(m_nconditions);
 
   std::unique_ptr<IGroupsMatcherMT> matcher(nullptr);
  matcher.reset(new MaximumBipartiteGroupsMatcherMT(std::move(m_conditions)));
@@ -384,36 +384,36 @@ TEST_F(MaximumBipartiteGroupsMatcherMTTest, overlappingEtaRegions){
   auto factory = TLorentzVectorFactory();
   auto tl0 = factory.make(eta, et);
 
-  MockJetWithLorentzVector jet0(tl0);
+  auto jet0 = std::make_shared<const MockJetWithLorentzVector>(tl0);
 
   eta = 0.1;
   et = 101;
   auto tl1 = factory.make(eta, et);
 
-  MockJetWithLorentzVector jet1{tl1};
+  auto jet1 = std::make_shared<const MockJetWithLorentzVector>(tl1);
 
 
   eta = 0.1;
   et = 91.;
   auto tl2 = factory.make(eta, et);
-  MockJetWithLorentzVector jet2{tl2};
+  auto jet2 = std::make_shared<const MockJetWithLorentzVector>(tl2);
 
   eta = 0.1;
   et = 81.;
   auto tl3 = factory.make(eta, et);
-  MockJetWithLorentzVector jet3{tl3};
+  auto jet3 = std::make_shared<const MockJetWithLorentzVector>(tl3);
 
-  HypoJetVector jets{&jet0, &jet1, &jet2, &jet3};
+  HypoJetVector jets{jet0, jet1, jet2, jet3};
   
-  EXPECT_CALL(jet0, eta()).Times(nconditions);
-  EXPECT_CALL(jet1, eta()).Times(nconditions);
-  EXPECT_CALL(jet2, eta()).Times(nconditions);
-  EXPECT_CALL(jet3, eta()).Times(nconditions);
+  EXPECT_CALL(*jet0, eta()).Times(nconditions);
+  EXPECT_CALL(*jet1, eta()).Times(nconditions);
+  EXPECT_CALL(*jet2, eta()).Times(nconditions);
+  EXPECT_CALL(*jet3, eta()).Times(nconditions);
   
-  EXPECT_CALL(jet0, et()).Times(nconditions);
-  EXPECT_CALL(jet1, et()).Times(nconditions);
-  EXPECT_CALL(jet2, et()).Times(nconditions);
-  EXPECT_CALL(jet3, et()).Times(nconditions);
+  EXPECT_CALL(*jet0, et()).Times(nconditions);
+  EXPECT_CALL(*jet1, et()).Times(nconditions);
+  EXPECT_CALL(*jet2, et()).Times(nconditions);
+  EXPECT_CALL(*jet3, et()).Times(nconditions);
   
   std::unique_ptr<IGroupsMatcherMT> matcher(nullptr);
   matcher.reset(new MaximumBipartiteGroupsMatcherMT(std::move(conditions)));
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/MaximumBipartiteGroupsMatcherMTTest_Multijet.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/MaximumBipartiteGroupsMatcherMTTest_Multijet.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..addcca6599b302836f1a68af240cdf6fa2b0dc6e
--- /dev/null
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/MaximumBipartiteGroupsMatcherMTTest_Multijet.cxx
@@ -0,0 +1,524 @@
+/*
+  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+*/
+
+#include "TrigHLTJetHypo/../src//MaximumBipartiteGroupsMatcherMT.h"
+#include "TrigHLTJetHypo/TrigHLTJetHypoUtils/HypoJetDefs.h"
+#include "TrigHLTJetHypo/../src/ConditionsDefsMT.h"
+#include "TrigHLTJetHypo/../src/conditionsFactoryMT.h"
+#include "TrigHLTJetHypo/../src/DebugInfoCollector.h"
+#include "TrigHLTJetHypo/../src/xAODJetCollector.h"
+#include "TrigHLTJetHypo/../src/MultijetFlowNetworkBuilder.h"
+#include "TrigHLTJetHypo/TrigHLTJetHypoUtils/CombinationsGrouper.h"
+
+#include "../src/makeHypoJets.h"
+#include "gtest/gtest.h"
+
+#include <TLorentzVector.h>
+#include <memory>
+#include <iostream>
+#include <fstream>
+#include <sstream>
+#include <limits>
+#include <algorithm>
+
+
+/*
+ * MaximumBipartiteMatcher functionality tested:
+ * 0 fail if no jet vector indices
+ * 1 fail if no if there are fewer passing jets than conditions
+ * 2 pass if there are at least as many passing jets as conditions
+ * 3 conditions are ordered by threshold
+ * 4 jets are ordered by Et
+ *
+ * Mocked objects:
+ * - jet: will be ordered on ET, need TLorentzVector, hence
+ *        MockJetWithLorentzVector
+ * - ICondition
+ */
+
+
+class MaximumBipartiteGroupsMatcherMTTest_Multijet: public ::testing::Test {
+public:
+  MaximumBipartiteGroupsMatcherMTTest_Multijet() {
+  }
+
+  ConditionsMT m_conditions;
+  int m_nconditions;
+  bool m_debug{false};
+  
+  HypoJetGroupVector makeJetGroupsMT(HypoJetIter b, HypoJetIter e){
+    CombinationsGrouper g(2);  // dijet groups
+    return g.group(b, e)[0];
+  }
+
+  void makeConditions(const std::vector<double>& detaMins,
+                      const std::vector<double>& detaMaxs){
+    std::vector<double> massMins{0., 0.};
+    std::vector<double> massMaxs{
+      std::numeric_limits<double>::max(),
+        std::numeric_limits<double>::max(),
+        };
+
+
+    std::vector<double> dphiMins{0., 0.};
+    std::vector<double> dphiMaxs{
+      std::numeric_limits<double>::max(),
+        std::numeric_limits<double>::max(),
+        };
+
+
+    m_conditions = conditionsFactoryDijetMT(massMins, massMaxs,
+                                            detaMins, detaMaxs,
+                                            dphiMins, dphiMaxs);
+    m_nconditions = m_conditions.size();
+  }
+};
+
+
+TEST_F(MaximumBipartiteGroupsMatcherMTTest_Multijet, debugFlagIsFalse){
+  /* idiot test to ensure dbug flag is of prior to commiting */
+   EXPECT_FALSE(m_debug);
+}
+
+TEST_F(MaximumBipartiteGroupsMatcherMTTest_Multijet, mj_flowNetworkBuilder_0){
+   /* (j0, j1) -> c0  
+     (j0, j2) -> c0                        
+     (j0, j1) -> c1
+       Fails - j0 is shared.  
+ */
+
+  auto out = std::make_unique<std::ofstream>(nullptr);
+  if (m_debug){out.reset(new std::ofstream("mj_flowNetworkBuilder_0.log"));}
+
+  std::vector<double> detaMins{3.6, 5.5};
+
+  std::vector<double> detaMaxs{
+    std::numeric_limits<double>::max(),
+      std::numeric_limits<double>::max(),
+      };
+  makeConditions(detaMins, detaMaxs);
+
+
+  if(out){
+    for(const auto& c : m_conditions){*out << c->toString();}
+  }
+
+
+  std::vector<double> etas{-5.0, 1.0, -1.0, -2.5};
+  EXPECT_TRUE(etas.size() == 4);
+
+    
+
+  auto jets = makeHypoJets(etas);
+
+  EXPECT_TRUE(jets.size() == 4);
+  if(m_debug){
+    for(const auto & j: jets){*out<< j << " " << j->toString() <<'\n';}
+  }
+  EXPECT_TRUE(m_conditions.size() == 2);
+  std::unique_ptr<IFlowNetworkBuilder> builder =
+    std::make_unique<MultijetFlowNetworkBuilder>(std::move(m_conditions));
+
+  std::map<int, pHypoJet> nodeToJet;
+
+  auto groups = makeJetGroupsMT(jets.begin(), jets.end());
+  EXPECT_TRUE(groups.size() == 6);
+
+  auto collector = std::unique_ptr<ITrigJetHypoInfoCollector>();
+
+  collector.reset(new DebugInfoCollector("mj_flowNetworkBuilder_0"));   
+
+  auto G = builder->create(groups.begin(), groups.end(), collector, nodeToJet);
+  EXPECT_FALSE(G.has_value());
+}
+
+TEST_F(MaximumBipartiteGroupsMatcherMTTest_Multijet, mj_flowNetworkBuilder_1){
+  /* (j0, j1) -> c0
+    (j0, j1) -> c1
+    (j3, j4) -> c1K
+     Passes.  
+  */
+
+  auto out = std::make_unique<std::ofstream>(nullptr);
+  if (m_debug){out.reset(new std::ofstream("mj_flowNetworkBuilder_1.log"));}
+
+  std::vector<double> detaMins{0., 0.};
+
+  std::vector<double> detaMaxs{1.0, 2.0};
+  makeConditions(detaMins, detaMaxs);
+
+
+  if(out){
+    for(const auto& c : m_conditions){*out << c->toString();}
+  }
+
+  std::vector<double> etas{-5.0, -4.9, 3.9, 5.0};
+  EXPECT_TRUE(etas.size() == 4);
+
+    
+  auto jets = makeHypoJets(etas);
+  EXPECT_TRUE(jets.size() == 4);
+  if(m_debug){
+    for(const auto & j: jets){*out<<j<< " " << j->toString() <<'\n';}
+  }
+  EXPECT_TRUE(m_conditions.size() == 2);
+
+  std::unique_ptr<IFlowNetworkBuilder> builder = 
+    std::make_unique<MultijetFlowNetworkBuilder>(std::move(m_conditions));
+
+  std::map<int, pHypoJet> nodeToJet;
+
+  auto groups = makeJetGroupsMT(jets.begin(), jets.end());
+  EXPECT_TRUE(groups.size() == 6);
+  std::unique_ptr<ITrigJetHypoInfoCollector> collector =
+    std::make_unique<DebugInfoCollector>("mj_flowNetworkBuilder_1_collector");
+
+  auto G = builder->create(groups.begin(), groups.end(), collector, nodeToJet);
+  EXPECT_TRUE(G.has_value());
+
+  if(m_debug){
+    std::stringstream ss;
+    ss << **G << '\n'; 
+    collector->collect("FlowNetwork", ss.str());
+    collector->write();
+  }
+
+  EXPECT_TRUE((*G)->V() == 14);
+  EXPECT_TRUE(((*G)->edges()).size() == 13);
+}
+
+TEST_F(MaximumBipartiteGroupsMatcherMTTest_Multijet, mj_flowNetworkBuilder_2){
+  /* (j0, j1) -> c0    
+    (j0, j1) -> c1
+       (j3, j4) -> c1
+       Passes.
+     Test with no collector                      
+  */
+
+  auto out = std::make_unique<std::ofstream>(nullptr);
+  if (m_debug){out.reset(new std::ofstream("mj_flowNetworkBuilder_2.log"));}
+
+  std::vector<double> detaMins{0., 0.};
+
+  std::vector<double> detaMaxs{1.0, 2.0};
+  makeConditions(detaMins, detaMaxs);
+
+
+  if(out){
+    for(const auto& c : m_conditions){*out << c->toString();}
+  }
+
+  std::vector<double> etas{-5.0, -4.9, 3.9, 5.0};
+  EXPECT_TRUE(etas.size() == 4);
+  
+  auto jets = makeHypoJets(etas);
+
+  EXPECT_TRUE(jets.size() == 4);
+  if(m_debug){
+    for(const auto & j: jets){*out<<j<< " " << j->toString() <<'\n';}
+  }
+  EXPECT_TRUE(m_conditions.size() == 2);
+
+  std::unique_ptr<IFlowNetworkBuilder> builder =
+    std::make_unique<MultijetFlowNetworkBuilder>(std::move(m_conditions));
+  std::map<int, pHypoJet> nodeToJet;
+
+  auto groups = makeJetGroupsMT(jets.begin(), jets.end());
+  EXPECT_TRUE(groups.size() == 6);
+  auto collector = std::unique_ptr<ITrigJetHypoInfoCollector>();
+
+  auto G = builder->create(groups.begin(), groups.end(), collector, nodeToJet);
+  EXPECT_TRUE(G.has_value());
+
+  EXPECT_TRUE((*G)->V() == 14);
+  EXPECT_TRUE(((*G)->edges()).size() == 13);
+}
+
+
+TEST_F(MaximumBipartiteGroupsMatcherMTTest_Multijet, mj0){
+  /* (j0, j1) -> c0
+     (j0, j2) -> c0
+     (j0, j1) -> c1
+     Fails - j0 is shared.
+  */
+
+  auto out = std::make_unique<std::ofstream>(nullptr);
+  if (m_debug){out.reset(new std::ofstream("Multijet_mj0.log"));}
+
+  std::vector<double> detaMins{3.6, 5.5};
+  
+  std::vector<double> detaMaxs{
+    std::numeric_limits<double>::max(),
+      std::numeric_limits<double>::max(),
+      };
+  makeConditions(detaMins, detaMaxs);
+
+
+  if(out){
+    for(const auto& c : m_conditions){*out << c->toString();}
+  }
+
+  std::vector<double> etas{-5.0, 1.0, -1.0, -2.5};
+  EXPECT_TRUE(etas.size() == 4);
+  
+  auto jets = makeHypoJets(etas);
+
+  if(m_debug){
+    for(const auto & j: jets){*out<< j << " " << j->toString() <<'\n';}
+  }
+   EXPECT_TRUE(jets.size() == 4);
+
+  
+  EXPECT_TRUE(m_conditions.size() == 2);
+  
+  auto matcher = std::unique_ptr<IGroupsMatcherMT>(nullptr);
+  matcher.reset(new MaximumBipartiteGroupsMatcherMT(std::move(m_conditions)));
+
+  auto groups = makeJetGroupsMT(jets.begin(), jets.end());
+  EXPECT_TRUE(groups.size() == 6);
+  auto collector = std::unique_ptr<ITrigJetHypoInfoCollector>();
+  collector.reset(new DebugInfoCollector("mj0"));
+
+  xAODJetCollector jetCollector;
+  
+  auto pass = matcher->match(groups.begin(),
+			     groups.end(),
+			     jetCollector,
+			     collector);
+  
+  if (m_debug){collector->write();}
+
+  EXPECT_TRUE(jetCollector.empty());
+  EXPECT_FALSE(*pass);
+}
+
+
+// TEST_F(MaximumBipartiteGroupsMatcherMTTest_Multijet, mj1){
+//   /* (j0, j3) -> c0
+//      (j0, j1) -> c1
+//      Fails - shared j0.
+//   */
+// 
+//   auto out = std::make_unique<std::ofstream>(nullptr);
+//   if (m_debug){out.reset(new std::ofstream("Multijet_mj1.log"));}
+// 
+//   std::vector<double> detaMins{3.6, 5.5};
+//   
+//   std::vector<double> detaMaxs{4.5, 6.5};
+//   makeConditions(detaMins, detaMaxs);
+// 
+// 
+//   if(out){
+//     for(const auto& c : m_conditions){*out << c->toString();}
+//   }
+// 
+//   std::vector<double> etas{-5.0, 1.0, -1.0, -2.5};
+//   EXPECT_TRUE(etas.size() == 4);
+//   
+//   auto jets = makeHypoJets(etas);
+// 
+//   EXPECT_TRUE(jets.size() == 4);
+//   if(m_debug){
+//     for(const auto & j: jets){*out<<j<< " " << j->toString() <<'\n';}
+//   }
+//   EXPECT_TRUE(m_conditions.size() == 2);
+// 
+//   auto matcher = std::unique_ptr<IGroupsMatcherMT>(nullptr);
+//   matcher.reset(new MaximumBipartiteGroupsMatcherMT(std::move(m_conditions)));
+// 
+//   auto groups = makeJetGroupsMT(jets.begin(), jets.end());
+//   EXPECT_TRUE(groups.size() == 6);
+//   auto collector = std::unique_ptr<ITrigJetHypoInfoCollector>();
+// 
+//   if(m_debug){
+//     collector.reset(new DebugInfoCollector("mj1"));
+//   }
+// 
+//   xAODJetCollector jetCollector;
+// 
+//   auto pass = matcher->match(groups.begin(),
+// 			     groups.end(),
+// 			     jetCollector,
+// 			     collector,
+// 			     m_debug);
+// 
+//   if(m_debug){collector->write();}
+//   
+//   EXPECT_TRUE(jetCollector.empty());
+//   EXPECT_FALSE(*pass);
+// }
+// 
+// 
+// TEST_F(MaximumBipartiteGroupsMatcherMTTest_Multijet, mj2){
+//   /* (j0, j1) -> c0
+//      (j3, j4) -> c1
+//      Passes 
+//   */
+// 
+//   auto out = std::make_unique<std::ofstream>(nullptr);
+//   if (m_debug){out.reset(new std::ofstream("Multijet_mj2.log"));}
+// 
+//   std::vector<double> detaMins{0.0, 1.0};
+//   
+//   std::vector<double> detaMaxs{1.5, 2.0};
+//   makeConditions(detaMins, detaMaxs);
+// 
+// 
+//   if(out){
+//     for(const auto& c : m_conditions){*out << c->toString();}
+//   }
+// 
+//   std::vector<double> etas{-5.0, -4.9, 5.0, 3.25};
+//   EXPECT_TRUE(etas.size() == 4);
+// 
+//   auto jets = makeHypoJets(etas);
+//   
+//   EXPECT_TRUE(jets.size() == 4);
+//   if(m_debug){
+//     for(const auto & j: jets){*out<<j<< " " << j->toString() <<'\n';}
+//   }
+//   EXPECT_TRUE(m_conditions.size() == 2);
+// 
+//   auto matcher = std::unique_ptr<IGroupsMatcherMT>(nullptr);
+//   matcher.reset(new MaximumBipartiteGroupsMatcherMT(std::move(m_conditions)));
+// 
+//     auto groups = makeJetGroupsMT(jets.begin(), jets.end());
+//   EXPECT_TRUE(groups.size() == 6);
+//   auto collector = std::unique_ptr<ITrigJetHypoInfoCollector>(nullptr);
+// 
+//   if(m_debug){
+//     collector.reset(new DebugInfoCollector("mj2"));
+//   }
+// 
+//   xAODJetCollector jetCollector;
+// 
+//   auto pass = matcher->match(groups.begin(),
+// 			     groups.end(),
+// 			     jetCollector,
+// 			     collector,
+// 			     m_debug);
+// 
+//   if(m_debug){collector->write();}
+//   
+//   EXPECT_TRUE(jetCollector.empty()); // not xAOD jets
+//   
+//   EXPECT_TRUE(*pass);
+// }
+// 
+// 
+// TEST_F(MaximumBipartiteGroupsMatcherMTTest_Multijet, mj3){
+//   /* (j0, j1) -> c0
+//      (j0, j1) -> c1
+//      (j3, j4) -> c1
+//      Passes.
+//   */
+// 
+//   auto out = std::make_unique<std::ofstream>(nullptr);
+//   if (m_debug){out.reset(new std::ofstream("Multijet_mj3.log"));}
+// 
+//   std::vector<double> detaMins{0., 0.};
+//   
+//   std::vector<double> detaMaxs{1.0, 2.0};
+//   makeConditions(detaMins, detaMaxs);
+// 
+// 
+//   if(out){
+//     for(const auto& c : m_conditions){*out << c->toString();}
+//   }
+// 
+//   std::vector<double> etas{-5.0, -4.9, 4.0, 5.0};
+//   EXPECT_TRUE(etas.size() == 4);
+// 
+//   auto jets = makeHypoJets(etas);
+// 
+//   EXPECT_TRUE(jets.size() == 4);
+// 
+//   if(m_debug){
+//     for(const auto & j: jets){*out<<j<< " " << j->toString() <<'\n';}
+//   }
+//   EXPECT_TRUE(m_conditions.size() == 2);
+// 
+//   auto matcher = std::unique_ptr<IGroupsMatcherMT>(nullptr);
+//   matcher.reset(new MaximumBipartiteGroupsMatcherMT(std::move(m_conditions)));
+//   
+//     auto groups = makeJetGroupsMT(jets.begin(), jets.end());
+//   EXPECT_TRUE(groups.size() == 6);
+//   auto collector = std::unique_ptr<ITrigJetHypoInfoCollector>();
+//   collector.reset(new DebugInfoCollector("mj3"));
+// 
+//   xAODJetCollector jetCollector;
+// 
+//   auto pass = matcher->match(groups.begin(),
+// 			     groups.end(),
+// 			     jetCollector,
+// 			     collector);
+// 
+//   if(m_debug){collector->write();}
+//   
+//   EXPECT_TRUE(jetCollector.empty()); //not xAOD jets
+//   EXPECT_TRUE(*pass);
+// }
+// 
+// 
+// TEST_F(MaximumBipartiteGroupsMatcherMTTest_Multijet, mj4){
+//   /* (j0, j1) -> c0
+//      (j0, j1) -> c1
+//      (j3, j4) -> c0
+//      (j3, j4) -> c1
+// 
+//      Passes.
+//   */
+// 
+//   auto out = std::make_unique<std::ofstream>(nullptr);
+//   if (m_debug){out.reset(new std::ofstream("Multijet_mj4.log"));}
+// 
+//   std::vector<double> detaMins{0., 0.};
+//   
+//   std::vector<double> detaMaxs{1.0, 2.0};
+//   makeConditions(detaMins, detaMaxs);
+// 
+// 
+//   if(out){
+//     for(const auto& c : m_conditions){*out << c->toString();}
+//   }
+// 
+//   std::vector<double> etas{-5., -4.9, 4.9, 5.};
+//   EXPECT_TRUE(etas.size() == 4);
+// 
+// 
+// 
+//   auto jets = makeHypoJets(etas);
+//   
+//   EXPECT_TRUE(jets.size() == 4);
+//   if(m_debug){
+//     for(const auto & j: jets){*out<<j<< " " << j->toString() <<'\n';}
+//   }
+// 
+//   
+//   EXPECT_TRUE(m_conditions.size() == 2);
+// 
+//   auto matcher = std::unique_ptr<IGroupsMatcherMT>(nullptr);
+//   matcher.reset(new MaximumBipartiteGroupsMatcherMT(std::move(m_conditions)));
+// 
+//   auto groups = makeJetGroupsMT(jets.begin(), jets.end());
+//   EXPECT_TRUE(groups.size() == 6);
+//   auto collector = std::unique_ptr<ITrigJetHypoInfoCollector>();
+//   
+//   if(m_debug){
+//     collector.reset(new DebugInfoCollector("mj4"));
+//   }
+// 
+//   xAODJetCollector jetCollector;
+//   auto pass = matcher->match(groups.begin(),
+// 			     groups.end(),
+// 			     jetCollector,
+// 			     collector,
+// 			     m_debug);
+// 
+//   if(m_debug){collector->write();}
+//   
+//   EXPECT_TRUE(jetCollector.empty()); // not xAOD jets
+//   EXPECT_TRUE(*pass);
+// }
+// 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/MaximumBipartiteGroupsMatcherTest.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/MaximumBipartiteGroupsMatcherTest.cxx
index 23a53fe61e6ac63270fc012bc7736a959891502e..b813e7417cb0dcf8f634a1dc682519cce4b1db06 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/MaximumBipartiteGroupsMatcherTest.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/MaximumBipartiteGroupsMatcherTest.cxx
@@ -85,10 +85,10 @@ TEST_F(MaximumBipartiteGroupsMatcherTest, tooFewSelectedJets){
   auto factory = TLorentzVectorFactory();
   auto tl = factory.make(eta, et);
 
-  MockJetWithLorentzVector jet0(tl);
-  MockJetWithLorentzVector jet1{tl};
+  auto jet0 = std::make_shared<const MockJetWithLorentzVector>(tl);
+  auto jet1 = std::make_shared<const MockJetWithLorentzVector>(tl);
 
-  HypoJetVector jets{&jet0, &jet1};
+  HypoJetVector jets{jet0, jet1};
   auto groups = makeJetGroups(jets.begin(), jets.end());
 
   MaximumBipartiteGroupsMatcher matcher(m_conditions);
@@ -109,26 +109,26 @@ TEST_F(MaximumBipartiteGroupsMatcherTest, oneSelectedJet){
   auto factory = TLorentzVectorFactory();
   auto tl = factory.make(eta, et);
 
-  MockJetWithLorentzVector jet0(tl);
-  MockJetWithLorentzVector jet1{tl};
-  MockJetWithLorentzVector jet2{tl};
+  auto jet0 = std::make_shared<const MockJetWithLorentzVector>(tl);
+  auto jet1 = std::make_shared<const MockJetWithLorentzVector>(tl);
+  auto jet2 = std::make_shared<const MockJetWithLorentzVector>(tl);
 
   eta = 0;
   et = 150;
   auto tl0 = factory.make(eta, et);
-  MockJetWithLorentzVector jet3{tl0};
+  auto jet3 = std::make_shared<const MockJetWithLorentzVector>(tl0);
 
-  HypoJetVector jets{&jet0, &jet1, &jet2, &jet3};
+  HypoJetVector jets{jet0, jet1, jet2, jet3};
 
-  EXPECT_CALL(jet0, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet1, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet2, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet3, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, eta()).Times(m_nconditions);
 
-  EXPECT_CALL(jet0, et()).Times(m_nconditions);
-  EXPECT_CALL(jet1, et()).Times(m_nconditions);
-  EXPECT_CALL(jet2, et()).Times(m_nconditions);
-  EXPECT_CALL(jet3, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, et()).Times(m_nconditions);
 
   auto groups = makeJetGroups(jets.begin(), jets.end());
 
@@ -150,38 +150,35 @@ TEST_F(MaximumBipartiteGroupsMatcherTest, twoSelectedJets){
 
   auto factory = TLorentzVectorFactory();
   auto tl0 = factory.make(eta, et);
-
-  MockJetWithLorentzVector jet0(tl0);
+  auto jet0 = std::make_shared<const MockJetWithLorentzVector>(tl0);
 
   eta = 0.1;
   et = 139;
   auto tl1 = factory.make(eta, et);
-
-  MockJetWithLorentzVector jet1{tl1};
-
+  auto jet1 = std::make_shared<const MockJetWithLorentzVector>(tl1);
 
   eta = 5.;
   et = 100.;
   auto tl2 = factory.make(eta, et);
-  MockJetWithLorentzVector jet2{tl2};
+  auto jet2 = std::make_shared<const MockJetWithLorentzVector>(tl2);
 
   eta = 0;
   et = 150;
   auto tl3 = factory.make(eta, et);
-  MockJetWithLorentzVector jet3{tl3};
+  auto jet3 = std::make_shared<const MockJetWithLorentzVector>(tl3);
 
-  HypoJetVector jets{&jet0, &jet1, &jet2, &jet3};
+  HypoJetVector jets{jet0, jet1, jet2, jet3};
 
 
-  EXPECT_CALL(jet0, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet1, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet2, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet3, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, eta()).Times(m_nconditions);
 
-  EXPECT_CALL(jet0, et()).Times(m_nconditions);
-  EXPECT_CALL(jet1, et()).Times(m_nconditions);
-  EXPECT_CALL(jet2, et()).Times(m_nconditions);
-  EXPECT_CALL(jet3, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, et()).Times(m_nconditions);
 
   MaximumBipartiteGroupsMatcher matcher(m_conditions);
   auto groups = makeJetGroups(jets.begin(), jets.end());
@@ -204,38 +201,34 @@ TEST_F(MaximumBipartiteGroupsMatcherTest, threeSelectedJets){
 
   auto factory = TLorentzVectorFactory();
   auto tl0 = factory.make(eta, et);
-
-  MockJetWithLorentzVector jet0(tl0);
+  auto jet0 = std::make_shared<const MockJetWithLorentzVector>(tl0);
 
   eta = 0.1;
   et = 139;
   auto tl1 = factory.make(eta, et);
-
-  MockJetWithLorentzVector jet1{tl1};
-
+  auto jet1 = std::make_shared<const MockJetWithLorentzVector>(tl1);
 
   eta = 0.5;
   et = 141.;
   auto tl2 = factory.make(eta, et);
-  MockJetWithLorentzVector jet2{tl2};
+  auto jet2 = std::make_shared<const MockJetWithLorentzVector>(tl2);
 
   eta = -0.2;
   et = 101.;
   auto tl3 = factory.make(eta, et);
-  MockJetWithLorentzVector jet3{tl3};
+  auto jet3 = std::make_shared<const MockJetWithLorentzVector>(tl3);
 
-  HypoJetVector jets{&jet0, &jet1, &jet2, &jet3};
+  HypoJetVector jets{jet0, jet1, jet2, jet3};
 
+  EXPECT_CALL(*jet1, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, eta()).Times(m_nconditions);
 
-  EXPECT_CALL(jet1, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet2, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet3, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet0, eta()).Times(m_nconditions);
-
-  EXPECT_CALL(jet1, et()).Times(m_nconditions);
-  EXPECT_CALL(jet2, et()).Times(m_nconditions);
-  EXPECT_CALL(jet3, et()).Times(m_nconditions);
-  EXPECT_CALL(jet0, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, et()).Times(m_nconditions);
 
   MaximumBipartiteGroupsMatcher matcher(m_conditions);
   auto groups = makeJetGroups(jets.begin(), jets.end());
@@ -258,37 +251,35 @@ TEST_F(MaximumBipartiteGroupsMatcherTest, fourSelectedJets){
 
   auto factory = TLorentzVectorFactory();
   auto tl0 = factory.make(eta, et);
-
-  MockJetWithLorentzVector jet0(tl0);
+  auto jet0 = std::make_shared<const MockJetWithLorentzVector>(tl0);
 
   eta = 0.1;
   et = 139;
   auto tl1 = factory.make(eta, et);
-
-  MockJetWithLorentzVector jet1{tl1};
+  auto jet1 = std::make_shared<const MockJetWithLorentzVector>(tl1);
 
 
   eta = 0.5;
   et = 175.;
   auto tl2 = factory.make(eta, et);
-  MockJetWithLorentzVector jet2{tl2};
+  auto jet2 = std::make_shared<const MockJetWithLorentzVector>(tl2);
 
   eta = -0.2;
   et = 101.;
   auto tl3 = factory.make(eta, et);
-  MockJetWithLorentzVector jet3{tl3};
+  auto jet3 = std::make_shared<const MockJetWithLorentzVector>(tl3);
 
-  HypoJetVector jets{&jet0, &jet1, &jet2, &jet3};
+  HypoJetVector jets{jet0, jet1, jet2, jet3};
 
-  EXPECT_CALL(jet0, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet1, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet2, eta()).Times(m_nconditions);
-  EXPECT_CALL(jet3, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, eta()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, eta()).Times(m_nconditions);
 
-  EXPECT_CALL(jet0, et()).Times(m_nconditions);
-  EXPECT_CALL(jet1, et()).Times(m_nconditions);
-  EXPECT_CALL(jet2, et()).Times(m_nconditions);
-  EXPECT_CALL(jet3, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet0, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet1, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet2, et()).Times(m_nconditions);
+  EXPECT_CALL(*jet3, et()).Times(m_nconditions);
 
   MaximumBipartiteGroupsMatcher matcher(m_conditions);
   auto groups = makeJetGroups(jets.begin(), jets.end());
@@ -320,37 +311,34 @@ TEST_F(MaximumBipartiteGroupsMatcherTest, overlappingEtaRegions){
 
   auto factory = TLorentzVectorFactory();
   auto tl0 = factory.make(eta, et);
-
-  MockJetWithLorentzVector jet0(tl0);
+  auto jet0 = std::make_shared<const MockJetWithLorentzVector>(tl0);
 
   eta = 0.1;
   et = 101;
   auto tl1 = factory.make(eta, et);
-
-  MockJetWithLorentzVector jet1{tl1};
-
+  auto jet1 = std::make_shared<const MockJetWithLorentzVector>(tl1);
 
   eta = 0.1;
   et = 91.;
   auto tl2 = factory.make(eta, et);
-  MockJetWithLorentzVector jet2{tl2};
+  auto jet2 = std::make_shared<const MockJetWithLorentzVector>(tl2);
 
   eta = 0.1;
   et = 81.;
   auto tl3 = factory.make(eta, et);
-  MockJetWithLorentzVector jet3{tl3};
+  auto jet3 = std::make_shared<const MockJetWithLorentzVector>(tl3);
 
-  HypoJetVector jets{&jet0, &jet1, &jet2, &jet3};
+  HypoJetVector jets{jet0, jet1, jet2, jet3};
 
-  EXPECT_CALL(jet0, eta()).Times(nconditions);
-  EXPECT_CALL(jet1, eta()).Times(nconditions);
-  EXPECT_CALL(jet2, eta()).Times(nconditions);
-  EXPECT_CALL(jet3, eta()).Times(nconditions);
+  EXPECT_CALL(*jet0, eta()).Times(nconditions);
+  EXPECT_CALL(*jet1, eta()).Times(nconditions);
+  EXPECT_CALL(*jet2, eta()).Times(nconditions);
+  EXPECT_CALL(*jet3, eta()).Times(nconditions);
 
-  EXPECT_CALL(jet0, et()).Times(nconditions);
-  EXPECT_CALL(jet1, et()).Times(nconditions);
-  EXPECT_CALL(jet2, et()).Times(nconditions);
-  EXPECT_CALL(jet3, et()).Times(nconditions);
+  EXPECT_CALL(*jet0, et()).Times(nconditions);
+  EXPECT_CALL(*jet1, et()).Times(nconditions);
+  EXPECT_CALL(*jet2, et()).Times(nconditions);
+  EXPECT_CALL(*jet3, et()).Times(nconditions);
 
   MaximumBipartiteGroupsMatcher matcher(conditions);
   auto groups = makeJetGroups(jets.begin(), jets.end());
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/PartitionsGrouperTest.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/PartitionsGrouperTest.cxx
index 1090afa4716af1e5c3d840000ce92b510323d50e..e8143f21e18a6ab9006e819d4cace802ff61995a 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/PartitionsGrouperTest.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/PartitionsGrouperTest.cxx
@@ -40,7 +40,7 @@ TEST(PartitionsGrouperTest, test0){
   constexpr double eta{0.5}; 
   for(int i = 1; i < 6; ++i){
     auto tlv = factory.make(eta, i);
-    TLorentzVectorAsIJet* tl_j = new TLorentzVectorAsIJet(tlv);
+    auto tl_j = std::make_shared<TLorentzVectorAsIJet>(tlv);
     jets.push_back(tl_j);
   }
 
@@ -59,10 +59,6 @@ TEST(PartitionsGrouperTest, test0){
     EXPECT_TRUE(groupVector[0].size()== 2); // two jets first group
     EXPECT_TRUE(groupVector[1].size()== 3); // three jets second group
   }
-
-  auto b = jets.begin();
-  auto e = jets.end();
-  for(auto iter = b; iter != e; ++iter){delete *iter;}
 }
 
 TEST(PartitionsGrouperTest, test1){
@@ -76,7 +72,7 @@ TEST(PartitionsGrouperTest, test1){
   constexpr double eta{0.5}; 
   for(int i = 1; i < 10; ++i){
     auto tlv = factory.make(eta, i);
-    TLorentzVectorAsIJet* tl_j = new TLorentzVectorAsIJet(tlv);
+    auto tl_j = std::make_shared<TLorentzVectorAsIJet>(tlv);
     jets.push_back(tl_j);
   }
 
@@ -96,10 +92,6 @@ TEST(PartitionsGrouperTest, test1){
     EXPECT_TRUE(groupVector[1].size()== 3); // three jets second group
     EXPECT_TRUE(groupVector[2].size()== 1); // one jet third group
   }
-
-  auto b = jets.begin();
-  auto e = jets.end();
-  for(auto iter = b; iter != e; ++iter){delete *iter;}
 }
 
 
@@ -114,7 +106,7 @@ TEST(PartitionsGrouperTest, test2){
   constexpr double eta{0.5}; 
   for(int i = 1; i < 5; ++i){
     auto tlv = factory.make(eta, i);
-    TLorentzVectorAsIJet* tl_j = new TLorentzVectorAsIJet(tlv);
+    auto tl_j = std::make_shared<TLorentzVectorAsIJet>(tlv);
     jets.push_back(tl_j);
   }
 
@@ -128,10 +120,6 @@ TEST(PartitionsGrouperTest, test2){
   // need to place 5 jets in 2 conditions
   // 5.4.3.2/(2!3!) ways to do this (j1j2 and j2j1 in c1 is counted once).
   EXPECT_TRUE(groupVectors.size() == 0);
-
-  auto b = jets.begin();
-  auto e = jets.end();
-  for(auto iter = b; iter != e; ++iter){delete *iter;}
 }
 
 
@@ -147,7 +135,7 @@ TEST(PartitionsGrouperTest, SingleJetGrouperBehavioiur){
   constexpr double eta{0.5}; 
   for(int i = 1; i < 7; ++i){
     auto tlv = factory.make(eta, i);
-    TLorentzVectorAsIJet* tl_j = new TLorentzVectorAsIJet(tlv);
+    auto tl_j = std::make_shared<TLorentzVectorAsIJet>(tlv);
     jets.push_back(tl_j);
   }
 
@@ -177,10 +165,6 @@ TEST(PartitionsGrouperTest, SingleJetGrouperBehavioiur){
       EXPECT_TRUE(jv.size() == 1);
     }
   }
-
-  auto b = jets.begin();
-  auto e = jets.end();
-  for(auto iter = b; iter != e; ++iter){delete *iter;}
 }
 
 
@@ -198,7 +182,7 @@ TEST(PartitionsGrouperTest, CombinationsGrouperBehavioiur){
   constexpr double eta{0.5}; 
   for(int i = 1; i < 7; ++i){
     auto tlv = factory.make(eta, i);
-    TLorentzVectorAsIJet* tl_j = new TLorentzVectorAsIJet(tlv);
+    auto tl_j = std::make_shared<TLorentzVectorAsIJet>(tlv);
     jets.push_back(tl_j);
   }
 
@@ -235,10 +219,6 @@ TEST(PartitionsGrouperTest, CombinationsGrouperBehavioiur){
       EXPECT_TRUE(jv.size() == 2);
     }
   }
-  
-  auto b = jets.begin();
-  auto e = jets.end();
-  for(auto iter = b; iter != e; ++iter){delete *iter;}
 }
 
 TEST(PartitionsGrouperTest, FullPartitionGrouperBehavioiur){
@@ -253,7 +233,7 @@ TEST(PartitionsGrouperTest, FullPartitionGrouperBehavioiur){
   constexpr double eta{0.5}; 
   for(int i = 1; i < 7; ++i){
     auto tlv = factory.make(eta, i);
-    TLorentzVectorAsIJet* tl_j = new TLorentzVectorAsIJet(tlv);
+    auto tl_j = std::make_shared<TLorentzVectorAsIJet>(tlv);
     jets.push_back(tl_j);
   }
 
@@ -295,9 +275,5 @@ TEST(PartitionsGrouperTest, FullPartitionGrouperBehavioiur){
       EXPECT_TRUE(jv.size() == 1);
     }
   }
-  
-  auto b = jets.begin();
-  auto e = jets.end();
-  for(auto iter = b; iter != e; ++iter){delete *iter;}
 }
 
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/PartitionsGroupsMatcherMTTest.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/PartitionsGroupsMatcherMTTest.cxx
index 333e68a3e4ab116b630b908b6ffd53e133973f42..b6708b3f4df116de60932edf8ceba4180b9d257c 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/PartitionsGroupsMatcherMTTest.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/PartitionsGroupsMatcherMTTest.cxx
@@ -93,7 +93,7 @@ TEST_F(PartitionsGroupsMatcherMTTest, tooFewSelectedJets){
    auto factory = TLorentzVectorFactory();
    auto tlv = factory.make(eta, et);
 
-   TLorentzVectorAsIJet* tl_j = new TLorentzVectorAsIJet(tlv);
+   auto tl_j = std::make_shared<const TLorentzVectorAsIJet>(tlv);
    HypoJetVector jets;
    jets.push_back(tl_j);
 
@@ -137,7 +137,7 @@ TEST_F(PartitionsGroupsMatcherMTTest, PassingJets){
   auto factory = TLorentzVectorFactory();
   
   auto makeJetFromEt = [&factory, eta](double et){
-    return new TLorentzVectorAsIJet(factory.make(eta, et));
+    return std::make_shared<const TLorentzVectorAsIJet>(factory.make(eta, et));
   };
 
   std::transform(ets.begin(),
@@ -199,7 +199,7 @@ TEST_F(PartitionsGroupsMatcherMTTest, Passing3Failing1){
   auto factory = TLorentzVectorFactory();
   
   auto makeJetFromEt = [&factory, eta](double et){
-    return new TLorentzVectorAsIJet(factory.make(eta, et));
+    return std::make_shared<const TLorentzVectorAsIJet>(factory.make(eta, et));
   };
 
   std::transform(ets.begin(),
@@ -241,8 +241,6 @@ TEST_F(PartitionsGroupsMatcherMTTest, Passing3Failing1){
   // calls: 4 jets, conditions need three: 4.3.2  = 24
   // pass: ignore failing jet. 3.2.1 = 6
   EXPECT_EQ(npass,  6u);
-  EXPECT_EQ(ncall, 24u);
-  
-  for(auto& j : jets){delete j;}
+  EXPECT_EQ(ncall, 24u);  
 }
  
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/TightCleanerTest.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/TightCleanerTest.cxx
index 7ad64fa8b1dee2f1f44c19bed8926f6e84b1d9c6..2f2b665de39928ca53cafc1877b23beaf0d3dadd 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/TightCleanerTest.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/TightCleanerTest.cxx
@@ -58,7 +58,7 @@ TEST(TightCleanerTest, SimpleThresholds) {
   {
     std::array<float, 5> args{1., 1., 1., 2., 1.};
     auto cleaner = makeCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_TRUE(cleaner(&jet));
+    EXPECT_TRUE(cleaner(jet));
   }
 
   constexpr float eps = 0.00001;
@@ -68,21 +68,21 @@ TEST(TightCleanerTest, SimpleThresholds) {
   {
     std::array<float, 5> args{1.-eps, 1+ eps, 1,  2., 1.};
     auto cleaner = makeCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_FALSE(cleaner(&jet));
+    EXPECT_FALSE(cleaner(jet));
   }
 
   // jet above  fSampMaxTightThreshold, outside applicable eta range
   {
     std::array<float, 5> args{1.-eps, 0.5- eps, 1,  2., 1.};
     auto cleaner = makeCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_TRUE(cleaner(&jet));
+    EXPECT_TRUE(cleaner(jet));
   }
 
   // jet below _emfLowTightThreshold
   {
     std::array<float, 5> args{1., 1., 1+eps,  2., 1.};
     auto cleaner = makeCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_FALSE(cleaner(&jet));
+    EXPECT_FALSE(cleaner(jet));
   }
 
 
@@ -90,7 +90,7 @@ TEST(TightCleanerTest, SimpleThresholds) {
   {
     std::array<float, 5> args{1., 1., 0.5,  1.-eps, 1.};
     auto cleaner = makeCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_FALSE(cleaner(&jet));
+    EXPECT_FALSE(cleaner(jet));
   }
 
 
@@ -98,7 +98,7 @@ TEST(TightCleanerTest, SimpleThresholds) {
   {
     std::array<float, 5> args{1., 1., 1.0,  1.0, 1. -eps};
     auto cleaner = makeCleaner(args[0], args[1], args[2], args[3], args[4]);
-    EXPECT_FALSE(cleaner(&jet));
+    EXPECT_FALSE(cleaner(jet));
   }
 
 }
@@ -113,5 +113,5 @@ TEST(TightCleanerTest, ThrowsOnUncleanableJet) {
 
   
 
-  EXPECT_THROW(cleaner(&jet), UncleanableJet);
+  EXPECT_THROW(cleaner(jet), UncleanableJet);
 }
diff --git a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/xAODJetCollectorTest.cxx b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/xAODJetCollectorTest.cxx
index 04abacad61108fe08bf1e5ec7ca1a0c074d6245d..fe3713ff9ee6ca711f6454848d161f5fa5544586 100644
--- a/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/xAODJetCollectorTest.cxx
+++ b/Trigger/TrigHypothesis/TrigHLTJetHypoUnitTests/tests/xAODJetCollectorTest.cxx
@@ -35,13 +35,11 @@ TEST_F(xAODJetCollectorTest, multipleInputJets){
   //multiple jets in. Jet collector is not empty and has the correct number.
 
   constexpr std::size_t njets{11};
-  HypoJetVector jets;
   std::vector<xAOD::Jet> xaodjets (njets);
-  std::vector<HypoJet::xAODJetAsIJet> ijets;
-  ijets.reserve (njets);
+  HypoJetVector jets;
+  jets.reserve (njets);
   for(unsigned int ijet = 0; ijet < njets; ++ijet){
-    ijets.emplace_back (&xaodjets[ijet], ijet);
-    jets.push_back(&ijets.back());
+    jets.emplace_back (new HypoJet::xAODJetAsIJet(&xaodjets[ijet], ijet));
   }
 
   xAODJetCollector collector;
@@ -57,12 +55,10 @@ TEST_F(xAODJetCollectorTest, nonXAODJets){
 
   unsigned int njets{11};
   HypoJetVector jets;
-  std::vector<TLorentzVectorAsIJet> ijets;
-  ijets.reserve (njets);
+  jets.reserve (njets);
   for(unsigned int ijet = 0; ijet < njets; ++ijet){
     TLorentzVector v;
-    ijets.emplace_back (v);
-    jets.push_back(&ijets.back());
+    jets.emplace_back (new TLorentzVectorAsIJet(v));
   }
 
   xAODJetCollector collector;
diff --git a/Trigger/TrigTools/TrigInDetConfig/python/InDetSetup.py b/Trigger/TrigTools/TrigInDetConfig/python/InDetSetup.py
index 33b3bf2e4905fd687af528f145a4d1ab40aaf059..80ea72b7108dbf312da5e5862b7666534c2eeffb 100644
--- a/Trigger/TrigTools/TrigInDetConfig/python/InDetSetup.py
+++ b/Trigger/TrigTools/TrigInDetConfig/python/InDetSetup.py
@@ -12,16 +12,7 @@ log = logging.getLogger("InDetSetup")
 
 from AthenaCommon.AthenaCommonFlags import athenaCommonFlags # noqa: F401
 
-if 'InDetTrigFlags' not in dir():
-   # --- setup flags with default values
-   from InDetTrigRecExample.InDetTrigFlags import InDetTrigFlags
-   InDetTrigFlags.doNewTracking.set_Value_and_Lock(True)
-   InDetTrigFlags.primaryVertexSetup = "IterativeFinding"
-   InDetTrigFlags.doRefit = True    # switched on for ATR-12226 (z0 uncertainties in bjets)
-   InDetTrigFlags.doPixelClusterSplitting = False
-   InDetTrigFlags.doPrintConfigurables = False
-
-
+include("InDetTrigRecExample/InDetTrigRec_jobOptions.py")
 
 def makeInDetAlgsNoView( config = None, rois = 'EMViewRoIs', doFTF = True ):
 
@@ -157,11 +148,9 @@ def makeInDetAlgs( config = None, rois = 'EMViewRoIs', doFTF = True, viewVerifie
 
     viewAlgs.append(InDetSCTEventFlagWriter)
 
-
-
   #Pixel clusterisation
   from InDetTrigRecExample.InDetTrigConfigRecLoadTools import TrigPixelLorentzAngleTool, TrigSCTLorentzAngleTool
-
+  
   from SiClusterizationTool.SiClusterizationToolConf import InDet__ClusterMakerTool
   InDetClusterMakerTool = InDet__ClusterMakerTool(name                 = "InDetClusterMakerTool_" + signature,
                                                   SCTLorentzAngleTool = TrigSCTLorentzAngleTool,
@@ -169,7 +158,6 @@ def makeInDetAlgs( config = None, rois = 'EMViewRoIs', doFTF = True, viewVerifie
 
   ToolSvc += InDetClusterMakerTool
 
-
   from SiClusterizationTool.SiClusterizationToolConf import InDet__MergedPixelsTool
   InDetMergedPixelsTool = InDet__MergedPixelsTool(name                    = "InDetMergedPixelsTool_" + signature,
                                                   globalPosAlg            = InDetClusterMakerTool)
@@ -305,7 +293,6 @@ def makeInDetAlgs( config = None, rois = 'EMViewRoIs', doFTF = True, viewVerifie
       from SiSpacePointFormation.SiSpacePointFormationConf import InDet__SiElementPropertiesTableCondAlg
       condSeq += InDet__SiElementPropertiesTableCondAlg(name = "InDetSiElementPropertiesTableCondAlg")
 
-
   #FIXME have a flag for now set for True( as most cases call FTF) but potentially separate
   if doFTF: 
       #Load signature configuration (containing cut values, names of collections, etc)
@@ -323,7 +310,7 @@ def makeInDetAlgs( config = None, rois = 'EMViewRoIs', doFTF = True, viewVerifie
 
       viewAlgs.append(theFTF)
 
-
+      
       from TrigInDetConf.TrigInDetPostTools import  InDetTrigParticleCreatorToolFTF
       from InDetTrigParticleCreation.InDetTrigParticleCreationConf import InDet__TrigTrackingxAODCnvMT
 
diff --git a/Trigger/TrigTruthEvent/TrigTruthEventTPCnv/test/TrigInDetTrackTruthCnv_p1_test.cxx b/Trigger/TrigTruthEvent/TrigTruthEventTPCnv/test/TrigInDetTrackTruthCnv_p1_test.cxx
index bcfa97974d199e6b4a024f552a4dcbc779b3c75a..dde73ed0f0298b50b27b42b106bc9902839005c8 100644
--- a/Trigger/TrigTruthEvent/TrigTruthEventTPCnv/test/TrigInDetTrackTruthCnv_p1_test.cxx
+++ b/Trigger/TrigTruthEvent/TrigTruthEventTPCnv/test/TrigInDetTrackTruthCnv_p1_test.cxx
@@ -54,7 +54,7 @@ public:
     int nstat = 4;
     p.m_nr_common_hits.resize (nstat);
     for (int i=0; i < nstat; i++) {
-      HepMcParticleLink particleLink(genPartVector.at(i)->barcode(),genPartVector.at(i)->parent_event()->event_number());
+      HepMcParticleLink particleLink(HepMC::barcode(genPartVector.at(i)),genPartVector.at(i)->parent_event()->event_number());
       TrigIDHitStats tihs;
       tihs[TrigIDHitStats::PIX] = 12 + i*10;
       tihs[TrigIDHitStats::SCT] = 13 + i*10;
@@ -93,7 +93,7 @@ void test1(std::vector<HepMC::GenParticle*>& genPartVector)
   std::cout << "test1\n";
   const HepMC::GenParticle *particle = genPartVector.at(0);
   // Create HepMcParticleLink outside of leak check.
-  HepMcParticleLink dummyHMPL(particle->barcode(),particle->parent_event()->event_number());
+  HepMcParticleLink dummyHMPL(HepMC::barcode(particle),particle->parent_event()->event_number());
   assert(dummyHMPL.cptr()==particle);
   Athena_test::Leakcheck check;
 
diff --git a/Trigger/TrigTruthEvent/TrigTruthEventTPCnv/test/TrigInDetTrackTruthMapCnv_p1_test.cxx b/Trigger/TrigTruthEvent/TrigTruthEventTPCnv/test/TrigInDetTrackTruthMapCnv_p1_test.cxx
index 9f35e407de0385d77b26128e71af4a17311de5e4..c671675ed0d1f26b116b54f8bde5a23714611a31 100644
--- a/Trigger/TrigTruthEvent/TrigTruthEventTPCnv/test/TrigInDetTrackTruthMapCnv_p1_test.cxx
+++ b/Trigger/TrigTruthEvent/TrigTruthEventTPCnv/test/TrigInDetTrackTruthMapCnv_p1_test.cxx
@@ -56,7 +56,7 @@ public:
     int nstat = 1;
     p.m_nr_common_hits.resize (nstat);
     for (int i=ndx; i < nstat; i++) {
-      HepMcParticleLink particleLink(genPartVector.at(i)->barcode(),genPartVector.at(i)->parent_event()->event_number());
+      HepMcParticleLink particleLink(HepMC::barcode(genPartVector.at(i)),genPartVector.at(i)->parent_event()->event_number());
       TrigIDHitStats tihs;
       tihs[TrigIDHitStats::PIX] = 12 + i*10 + ndx*100;
       tihs[TrigIDHitStats::SCT] = 13 + i*10 + ndx*100;
@@ -106,7 +106,7 @@ void test1(std::vector<HepMC::GenParticle*>& genPartVector)
   std::cout << "test1\n";
   const HepMC::GenParticle *particle = genPartVector.at(0);
   // Create HepMcParticleLink outside of leak check.
-  HepMcParticleLink dummyHMPL(particle->barcode(),particle->parent_event()->event_number());
+  HepMcParticleLink dummyHMPL(HepMC::barcode(particle),particle->parent_event()->event_number());
   assert(dummyHMPL.cptr()==particle);
 
   SG::WriteHandle<TrigInDetTrackCollection> coll{"coll"};
diff --git a/Trigger/TrigValidation/TrigAnalysisTest/share/ref_RDOtoRDOTrig_v1Dev_build.ref b/Trigger/TrigValidation/TrigAnalysisTest/share/ref_RDOtoRDOTrig_v1Dev_build.ref
index 09d8678d668d8af5a0e2cc6c782c978185a62ede..45ca473f70dd75582ec997220a49024b07c6a5bb 100644
--- a/Trigger/TrigValidation/TrigAnalysisTest/share/ref_RDOtoRDOTrig_v1Dev_build.ref
+++ b/Trigger/TrigValidation/TrigAnalysisTest/share/ref_RDOtoRDOTrig_v1Dev_build.ref
@@ -23,7 +23,7 @@ HLT_2e17_etcut_L12EM15VH:
     3: 2
   stepFeatures:
     0: 12
-    1: 314
+    1: 80
     2: 16
     3: 12
 HLT_2e17_lhvloose_L12EM15VH:
@@ -51,17 +51,17 @@ HLT_2e24_lhvloose_L12EM20VH:
   stepFeatures:
     0: 2
 HLT_2e3_etcut_L12EM3:
-  eventCount: 20
+  eventCount: 19
   stepCounts:
     0: 20
-    1: 20
-    2: 20
-    3: 20
+    1: 19
+    2: 19
+    3: 19
   stepFeatures:
     0: 342
-    1: 3730
-    2: 796
-    3: 304
+    1: 976
+    2: 712
+    3: 270
 HLT_2g10_loose_mu20_L1MU20:
   eventCount: 0
   stepCounts:
@@ -157,25 +157,25 @@ HLT_2mu10_bJpsimumu_L12MU10:
   stepCounts:
     0: 2
     1: 2
-    2: 2
-    3: 2
+    2: 1
+    3: 1
   stepFeatures:
     0: 8
-    1: 8
-    2: 8
-    3: 4
+    1: 6
+    2: 4
+    3: 2
 HLT_2mu10_bUpsimumu_L12MU10:
   eventCount: 0
   stepCounts:
     0: 2
     1: 2
-    2: 2
-    3: 2
+    2: 1
+    3: 1
   stepFeatures:
     0: 8
-    1: 8
-    2: 8
-    3: 4
+    1: 6
+    2: 4
+    3: 2
 HLT_2mu14_L12MU10:
   eventCount: 1
   stepCounts:
@@ -197,7 +197,7 @@ HLT_2mu14_l2io_L12MU10:
     3: 1
   stepFeatures:
     0: 8
-    1: 14
+    1: 10
     2: 4
     3: 4
 HLT_2mu15_L12MU10:
@@ -229,37 +229,37 @@ HLT_2mu4_bDimu_L12MU4:
   stepCounts:
     0: 3
     1: 3
-    2: 2
-    3: 2
+    2: 1
+    3: 1
   stepFeatures:
     0: 12
-    1: 12
-    2: 8
-    3: 4
+    1: 10
+    2: 4
+    3: 2
 HLT_2mu4_bJpsimumu_L12MU4:
   eventCount: 0
   stepCounts:
     0: 3
     1: 3
-    2: 2
-    3: 2
+    2: 1
+    3: 1
   stepFeatures:
     0: 12
-    1: 12
-    2: 8
-    3: 4
+    1: 10
+    2: 4
+    3: 2
 HLT_2mu4_bUpsimumu_L12MU4:
   eventCount: 0
   stepCounts:
     0: 3
     1: 3
-    2: 2
-    3: 2
+    2: 1
+    3: 1
   stepFeatures:
     0: 12
-    1: 12
-    2: 8
-    3: 4
+    1: 10
+    2: 4
+    3: 2
 HLT_2mu4_muonqual_L12MU4:
   eventCount: 2
   stepCounts:
@@ -309,35 +309,35 @@ HLT_2mu6_bJpsimumu_L12MU6:
   stepCounts:
     0: 3
     1: 3
-    2: 2
-    3: 2
+    2: 1
+    3: 1
   stepFeatures:
     0: 12
-    1: 12
-    2: 8
-    3: 4
+    1: 10
+    2: 4
+    3: 2
 HLT_2mu6_bJpsimumul2io_L12MU6:
   eventCount: 0
   stepCounts:
     0: 3
-    1: 3
+    1: 2
     2: 2
     3: 2
   stepFeatures:
     0: 12
-    1: 26
+    1: 14
     2: 8
     3: 4
 HLT_2mu6_l2io_L12MU6:
   eventCount: 2
   stepCounts:
     0: 3
-    1: 3
+    1: 2
     2: 2
     3: 2
   stepFeatures:
     0: 12
-    1: 26
+    1: 14
     2: 8
     3: 8
 HLT_2mu6_muonqual_L12MU6:
@@ -514,7 +514,7 @@ HLT_e17_lhloose_mu14_L1EM15VH_MU10:
     8: 2
   stepFeatures:
     0: 2
-    1: 43
+    1: 3
     2: 4
     3: 2
     4: 2
@@ -526,15 +526,15 @@ HLT_e17_lhvloose_nod0_L1EM15VH:
   eventCount: 5
   stepCounts:
     0: 6
-    1: 6
-    2: 6
-    3: 6
+    1: 5
+    2: 5
+    3: 5
     4: 5
   stepFeatures:
     0: 6
-    1: 80
-    2: 9
-    3: 6
+    1: 7
+    2: 7
+    3: 5
     4: 5
 HLT_e20_lhmedium_e15_lhmedium_Zee_L12EM3:
   eventCount: 0
@@ -545,7 +545,7 @@ HLT_e20_lhmedium_e15_lhmedium_Zee_L12EM3:
     3: 1
   stepFeatures:
     0: 2
-    1: 20
+    1: 7
     2: 3
     3: 2
 HLT_e24_lhmedium_2g12_loose_L1EM20VH_3EM10VH:
@@ -558,99 +558,99 @@ HLT_e24_lhvloose_L1EM20VH:
   eventCount: 5
   stepCounts:
     0: 6
-    1: 6
-    2: 6
-    3: 6
+    1: 5
+    2: 5
+    3: 5
     4: 5
   stepFeatures:
     0: 6
-    1: 80
-    2: 9
-    3: 6
+    1: 7
+    2: 7
+    3: 5
     4: 5
 HLT_e26_etcut_L1EM22VHI:
-  eventCount: 6
+  eventCount: 5
   stepCounts:
     0: 6
-    1: 6
-    2: 6
-    3: 6
+    1: 5
+    2: 5
+    3: 5
   stepFeatures:
     0: 7
-    1: 132
-    2: 7
-    3: 7
+    1: 30
+    2: 6
+    3: 6
 HLT_e26_lhloose_L1EM15VH:
   eventCount: 4
   stepCounts:
     0: 5
-    1: 5
-    2: 5
-    3: 5
+    1: 4
+    2: 4
+    3: 4
     4: 4
   stepFeatures:
     0: 5
-    1: 63
-    2: 8
-    3: 5
+    1: 6
+    2: 6
+    3: 4
     4: 4
 HLT_e26_lhloose_L1EM22VHI:
   eventCount: 4
   stepCounts:
     0: 5
-    1: 5
-    2: 5
-    3: 5
+    1: 4
+    2: 4
+    3: 4
     4: 4
   stepFeatures:
     0: 5
-    1: 63
-    2: 8
-    3: 5
+    1: 6
+    2: 6
+    3: 4
     4: 4
 HLT_e26_lhmedium_L1EM15VH:
   eventCount: 4
   stepCounts:
     0: 5
-    1: 5
-    2: 5
-    3: 5
+    1: 4
+    2: 4
+    3: 4
     4: 4
   stepFeatures:
     0: 5
-    1: 63
-    2: 8
-    3: 5
+    1: 6
+    2: 6
+    3: 4
     4: 4
 HLT_e26_lhmedium_L1EM22VHI:
   eventCount: 4
   stepCounts:
     0: 5
-    1: 5
-    2: 5
-    3: 5
+    1: 4
+    2: 4
+    3: 4
     4: 4
   stepFeatures:
     0: 5
-    1: 63
-    2: 8
-    3: 5
+    1: 6
+    2: 6
+    3: 4
     4: 4
 HLT_e26_lhmedium_mu8noL1_L1EM22VHI:
   eventCount: 2
   stepCounts:
     0: 5
-    1: 5
-    2: 5
-    3: 5
+    1: 4
+    2: 4
+    3: 4
     4: 4
     5: 2
     6: 2
   stepFeatures:
     0: 5
-    1: 63
-    2: 8
-    3: 5
+    1: 6
+    2: 6
+    3: 4
     4: 4
     5: 2
     6: 2
@@ -658,99 +658,99 @@ HLT_e26_lhtight_L1EM15VH:
   eventCount: 4
   stepCounts:
     0: 5
-    1: 5
-    2: 5
-    3: 5
+    1: 4
+    2: 4
+    3: 4
     4: 4
   stepFeatures:
     0: 5
-    1: 63
-    2: 8
-    3: 5
+    1: 6
+    2: 6
+    3: 4
     4: 4
 HLT_e26_lhtight_L1EM22VHI:
   eventCount: 4
   stepCounts:
     0: 5
-    1: 5
-    2: 5
-    3: 5
+    1: 4
+    2: 4
+    3: 4
     4: 4
   stepFeatures:
     0: 5
-    1: 63
-    2: 8
-    3: 5
+    1: 6
+    2: 6
+    3: 4
     4: 4
 HLT_e26_lhtight_ivarloose_L1EM22VHI:
   eventCount: 3
   stepCounts:
     0: 5
-    1: 5
-    2: 5
-    3: 5
+    1: 4
+    2: 4
+    3: 4
     4: 3
   stepFeatures:
     0: 5
-    1: 63
-    2: 8
-    3: 5
+    1: 6
+    2: 6
+    3: 4
     4: 3
 HLT_e26_lhtight_ivarmedium_L1EM22VHI:
   eventCount: 3
   stepCounts:
     0: 5
-    1: 5
-    2: 5
-    3: 5
+    1: 4
+    2: 4
+    3: 4
     4: 3
   stepFeatures:
     0: 5
-    1: 63
-    2: 8
-    3: 5
+    1: 6
+    2: 6
+    3: 4
     4: 3
 HLT_e26_lhtight_ivartight_L1EM22VHI:
   eventCount: 3
   stepCounts:
     0: 5
-    1: 5
-    2: 5
-    3: 5
+    1: 4
+    2: 4
+    3: 4
     4: 3
   stepFeatures:
     0: 5
-    1: 63
-    2: 8
-    3: 5
+    1: 6
+    2: 6
+    3: 4
     4: 3
 HLT_e26_lhtight_nod0_L1EM22VHI:
   eventCount: 4
   stepCounts:
     0: 5
-    1: 5
-    2: 5
-    3: 5
+    1: 4
+    2: 4
+    3: 4
     4: 4
   stepFeatures:
     0: 5
-    1: 63
-    2: 8
-    3: 5
+    1: 6
+    2: 6
+    3: 4
     4: 4
 HLT_e26_lhtight_nod0_L1EM24VHI:
   eventCount: 4
   stepCounts:
     0: 5
-    1: 5
-    2: 5
-    3: 5
+    1: 4
+    2: 4
+    3: 4
     4: 4
   stepFeatures:
     0: 5
-    1: 63
-    2: 8
-    3: 5
+    1: 6
+    2: 6
+    3: 4
     4: 4
 HLT_e300_etcut_L1EM22VHI:
   eventCount: 0
@@ -773,9 +773,9 @@ HLT_e3_etcut_L1EM3:
     3: 20
   stepFeatures:
     0: 171
-    1: 1865
-    2: 398
-    3: 152
+    1: 491
+    2: 360
+    3: 136
 HLT_e5_etcut_L1EM3:
   eventCount: 20
   stepCounts:
@@ -785,120 +785,120 @@ HLT_e5_etcut_L1EM3:
     3: 20
   stepFeatures:
     0: 137
-    1: 1656
-    2: 190
-    3: 115
+    1: 458
+    2: 173
+    3: 102
 HLT_e5_lhloose_L1EM3:
   eventCount: 4
   stepCounts:
     0: 18
-    1: 18
-    2: 18
-    3: 18
+    1: 15
+    2: 15
+    3: 15
     4: 4
   stepFeatures:
     0: 55
-    1: 570
-    2: 90
-    3: 51
+    1: 110
+    2: 72
+    3: 40
     4: 5
 HLT_e5_lhloose_noringer_L1EM3:
   eventCount: 5
   stepCounts:
     0: 17
-    1: 17
-    2: 17
-    3: 17
+    1: 15
+    2: 15
+    3: 15
     4: 5
   stepFeatures:
     0: 56
-    1: 651
-    2: 103
-    3: 55
+    1: 128
+    2: 88
+    3: 46
     4: 6
 HLT_e5_lhmedium_L1EM3:
   eventCount: 4
   stepCounts:
     0: 18
-    1: 18
-    2: 18
-    3: 18
+    1: 14
+    2: 14
+    3: 14
     4: 4
   stepFeatures:
     0: 54
-    1: 565
-    2: 88
-    3: 50
+    1: 107
+    2: 70
+    3: 39
     4: 5
 HLT_e5_lhmedium_noringer_L1EM3:
   eventCount: 4
   stepCounts:
     0: 16
-    1: 16
-    2: 16
-    3: 16
+    1: 13
+    2: 13
+    3: 13
     4: 4
   stepFeatures:
     0: 48
-    1: 528
-    2: 81
-    3: 47
+    1: 97
+    2: 66
+    3: 38
     4: 5
 HLT_e5_lhtight_L1EM3:
   eventCount: 4
   stepCounts:
     0: 18
-    1: 18
-    2: 18
-    3: 18
+    1: 14
+    2: 14
+    3: 14
     4: 4
   stepFeatures:
     0: 52
-    1: 546
-    2: 84
-    3: 48
+    1: 103
+    2: 66
+    3: 37
     4: 5
 HLT_e5_lhtight_nod0_L1EM3:
   eventCount: 4
   stepCounts:
     0: 18
-    1: 18
-    2: 18
-    3: 18
+    1: 14
+    2: 14
+    3: 14
     4: 4
   stepFeatures:
     0: 52
-    1: 546
-    2: 84
-    3: 48
+    1: 103
+    2: 66
+    3: 37
     4: 5
 HLT_e5_lhtight_noringer_L1EM3:
   eventCount: 4
   stepCounts:
     0: 16
-    1: 16
-    2: 16
-    3: 16
+    1: 13
+    2: 13
+    3: 13
     4: 4
   stepFeatures:
     0: 45
-    1: 513
-    2: 73
-    3: 44
+    1: 92
+    2: 60
+    3: 36
     4: 5
 HLT_e5_lhtight_noringer_nod0_L1EM3:
   eventCount: 4
   stepCounts:
     0: 16
-    1: 16
-    2: 16
-    3: 16
+    1: 13
+    2: 13
+    3: 13
     4: 4
   stepFeatures:
     0: 45
-    1: 513
-    2: 73
-    3: 44
+    1: 92
+    2: 60
+    3: 36
     4: 5
 HLT_e60_lhmedium_L1EM22VHI:
   eventCount: 2
@@ -910,7 +910,7 @@ HLT_e60_lhmedium_L1EM22VHI:
     4: 2
   stepFeatures:
     0: 2
-    1: 34
+    1: 2
     2: 4
     3: 2
     4: 2
@@ -924,7 +924,7 @@ HLT_e60_lhmedium_nod0_L1EM22VHI:
     4: 2
   stepFeatures:
     0: 2
-    1: 34
+    1: 2
     2: 4
     3: 2
     4: 2
@@ -937,16 +937,16 @@ HLT_e7_etcut_L1EM3:
     3: 20
   stepFeatures:
     0: 89
-    1: 1124
-    2: 122
-    3: 78
+    1: 361
+    2: 117
+    3: 73
 HLT_e7_lhmedium_mu24_L1MU20:
   eventCount: 1
   stepCounts:
     0: 7
-    1: 7
-    2: 7
-    3: 7
+    1: 5
+    2: 5
+    3: 5
     4: 2
     5: 2
     6: 1
@@ -954,9 +954,9 @@ HLT_e7_lhmedium_mu24_L1MU20:
     8: 1
   stepFeatures:
     0: 9
-    1: 97
-    2: 13
-    3: 9
+    1: 10
+    2: 10
+    3: 7
     4: 2
     5: 2
     6: 1
@@ -1479,7 +1479,7 @@ HLT_j45_csskpf_nojcalib_ftf_L1J20:
     1: 17
   stepFeatures:
     0: 19
-    1: 40
+    1: 37
 HLT_j45_cssktc_nojcalib_L1J20:
   eventCount: 15
   stepCounts:
@@ -1515,7 +1515,7 @@ HLT_j45_pf_ftf_L1J20:
     1: 18
   stepFeatures:
     0: 19
-    1: 49
+    1: 50
 HLT_j45_pf_ftf_preselj45_L1J20:
   eventCount: 18
   stepCounts:
@@ -1523,7 +1523,7 @@ HLT_j45_pf_ftf_preselj45_L1J20:
     1: 18
   stepFeatures:
     0: 19
-    1: 49
+    1: 50
 HLT_j45_pf_nojcalib_ftf_L1J20:
   eventCount: 18
   stepCounts:
@@ -1539,17 +1539,17 @@ HLT_j45_pf_subjesgscIS_ftf_L1J20:
     1: 19
   stepFeatures:
     0: 19
-    1: 51
+    1: 52
 HLT_j45_pf_subjesgscIS_ftf_bdl1r70_split_L1J20:
-  eventCount: 15
+  eventCount: 13
   stepCounts:
     0: 19
     1: 18
-    2: 15
+    2: 13
   stepFeatures:
     0: 19
-    1: 48
-    2: 29
+    1: 49
+    2: 25
 HLT_j45_pf_subjesgscIS_ftf_boffperf_split_L1J20:
   eventCount: 18
   stepCounts:
@@ -1558,8 +1558,8 @@ HLT_j45_pf_subjesgscIS_ftf_boffperf_split_L1J20:
     2: 18
   stepFeatures:
     0: 19
-    1: 48
-    2: 48
+    1: 49
+    2: 49
 HLT_j45_sktc_nojcalib_L1J20:
   eventCount: 15
   stepCounts:
@@ -1591,13 +1591,13 @@ HLT_j45_subjesgscIS_ftf_015jvt_L1J15:
     0: 20
     1: 44
 HLT_j45_subjesgscIS_ftf_059jvt_L1J15:
-  eventCount: 18
+  eventCount: 17
   stepCounts:
     0: 20
-    1: 18
+    1: 17
   stepFeatures:
     0: 20
-    1: 43
+    1: 42
 HLT_j45_subjesgscIS_ftf_L1J15:
   eventCount: 19
   stepCounts:
@@ -1607,11 +1607,11 @@ HLT_j45_subjesgscIS_ftf_L1J15:
     0: 20
     1: 48
 HLT_j45_subjesgscIS_ftf_bdl1r70_split_L1J20:
-  eventCount: 14
+  eventCount: 13
   stepCounts:
     0: 19
     1: 19
-    2: 14
+    2: 13
   stepFeatures:
     0: 19
     1: 47
@@ -1625,7 +1625,7 @@ HLT_j45_subjesgscIS_ftf_bmv2c1070_split_L1J20:
   stepFeatures:
     0: 19
     1: 47
-    2: 29
+    2: 27
 HLT_j45_subjesgscIS_ftf_boffperf_split_L1J20:
   eventCount: 19
   stepCounts:
@@ -1719,7 +1719,7 @@ HLT_j85_pf_ftf_L1J20:
     1: 13
   stepFeatures:
     0: 19
-    1: 22
+    1: 21
 HLT_mb_sptrk_L1RD0_FILLED:
   eventCount: 0
 HLT_mu0_muoncalib_L1MU20:
@@ -1758,182 +1758,182 @@ HLT_mu11_mu6_bBmumu_L1MU11_2MU6:
   eventCount: 0
   stepCounts:
     0: 3
-    1: 2
+    1: 1
   stepFeatures:
     0: 6
-    1: 4
+    1: 2
 HLT_mu11_mu6_bDimu2700_L1MU11_2MU6:
   eventCount: 0
   stepCounts:
     0: 3
-    1: 2
+    1: 1
   stepFeatures:
     0: 6
-    1: 4
+    1: 2
 HLT_mu11_mu6_bDimu_L1MU11_2MU6:
   eventCount: 0
   stepCounts:
     0: 3
-    1: 2
+    1: 1
   stepFeatures:
     0: 6
-    1: 4
+    1: 2
 HLT_mu11_mu6_bJpsimumu_L1MU11_2MU6:
   eventCount: 0
   stepCounts:
     0: 3
-    1: 2
+    1: 1
   stepFeatures:
     0: 6
-    1: 4
+    1: 2
 HLT_mu11_mu6_bPhi_L1MU11_2MU6:
   eventCount: 0
   stepCounts:
     0: 3
-    1: 2
+    1: 1
   stepFeatures:
     0: 6
-    1: 4
+    1: 2
 HLT_mu11_mu6_bTau_L1MU11_2MU6:
   eventCount: 0
   stepCounts:
     0: 3
-    1: 2
+    1: 1
   stepFeatures:
     0: 6
-    1: 4
+    1: 2
 HLT_mu11_mu6_bUpsimumu_L1MU11_2MU6:
   eventCount: 0
   stepCounts:
     0: 3
-    1: 2
+    1: 1
   stepFeatures:
     0: 6
-    1: 4
+    1: 2
 HLT_mu14_L1MU10:
   eventCount: 7
   stepCounts:
     0: 10
-    1: 8
-    2: 8
+    1: 7
+    2: 7
     3: 7
   stepFeatures:
     0: 12
-    1: 9
-    2: 10
+    1: 8
+    2: 8
     3: 8
 HLT_mu20_2mu4noL1_L1MU20:
   eventCount: 1
   stepCounts:
     0: 8
-    1: 5
-    2: 5
-    3: 5
+    1: 6
+    2: 6
+    3: 6
     4: 1
     5: 1
   stepFeatures:
     0: 10
-    1: 6
-    2: 6
-    3: 6
+    1: 7
+    2: 7
+    3: 7
     4: 2
     5: 2
 HLT_mu20_ivarmedium_mu4noL1_10invm70_L1MU20:
   eventCount: 2
   stepCounts:
     0: 8
-    1: 5
-    2: 5
-    3: 5
-    4: 3
+    1: 6
+    2: 6
+    3: 6
+    4: 4
     5: 2
     6: 2
   stepFeatures:
     0: 10
-    1: 6
-    2: 6
-    3: 6
-    4: 3
+    1: 7
+    2: 7
+    3: 7
+    4: 4
     5: 2
     6: 2
 HLT_mu20_ivarmedium_mu8noL1_L1MU20:
   eventCount: 1
   stepCounts:
     0: 8
-    1: 5
-    2: 5
-    3: 5
-    4: 3
+    1: 6
+    2: 6
+    3: 6
+    4: 4
     5: 1
     6: 1
   stepFeatures:
     0: 10
-    1: 6
-    2: 6
-    3: 6
-    4: 3
+    1: 7
+    2: 7
+    3: 7
+    4: 4
     5: 1
     6: 1
 HLT_mu22_2mu4noL1_L1MU20:
   eventCount: 1
   stepCounts:
     0: 8
-    1: 5
-    2: 4
-    3: 4
+    1: 6
+    2: 5
+    3: 5
     4: 1
     5: 1
   stepFeatures:
     0: 10
-    1: 6
-    2: 5
-    3: 5
+    1: 7
+    2: 6
+    3: 6
     4: 2
     5: 2
 HLT_mu22_mu8noL1_L1MU20:
   eventCount: 2
   stepCounts:
     0: 8
-    1: 5
-    2: 4
-    3: 4
+    1: 6
+    2: 5
+    3: 5
     4: 2
     5: 2
   stepFeatures:
     0: 10
-    1: 6
-    2: 5
-    3: 5
+    1: 7
+    2: 6
+    3: 6
     4: 3
     5: 3
 HLT_mu24_2mu4noL1_L1MU20:
   eventCount: 1
   stepCounts:
     0: 8
-    1: 5
-    2: 4
-    3: 4
+    1: 6
+    2: 5
+    3: 5
     4: 1
     5: 1
   stepFeatures:
     0: 10
-    1: 6
-    2: 5
-    3: 5
+    1: 7
+    2: 6
+    3: 6
     4: 2
     5: 2
 HLT_mu24_L1MU20:
-  eventCount: 4
+  eventCount: 5
   stepCounts:
     0: 8
-    1: 5
-    2: 4
-    3: 4
-  stepFeatures:
-    0: 10
     1: 6
     2: 5
     3: 5
+  stepFeatures:
+    0: 10
+    1: 7
+    2: 6
+    3: 6
 HLT_mu24_idperf_L1MU20:
   eventCount: 5
   stepCounts:
@@ -1947,137 +1947,137 @@ HLT_mu24_idperf_L1MU20:
     2: 6
     3: 6
 HLT_mu24_ivarmedium_L1MU20:
-  eventCount: 2
+  eventCount: 3
   stepCounts:
     0: 8
-    1: 5
-    2: 4
-    3: 4
-    4: 2
-  stepFeatures:
-    0: 10
     1: 6
     2: 5
     3: 5
-    4: 2
+    4: 3
+  stepFeatures:
+    0: 10
+    1: 7
+    2: 6
+    3: 6
+    4: 3
 HLT_mu24_mu10noL1_L1MU20:
   eventCount: 2
   stepCounts:
     0: 8
-    1: 5
-    2: 4
-    3: 4
+    1: 6
+    2: 5
+    3: 5
     4: 2
     5: 2
   stepFeatures:
     0: 10
-    1: 6
-    2: 5
-    3: 5
+    1: 7
+    2: 6
+    3: 6
     4: 3
     5: 3
 HLT_mu26_L1MU20:
-  eventCount: 3
+  eventCount: 4
   stepCounts:
     0: 8
-    1: 5
-    2: 4
-    3: 3
-  stepFeatures:
-    0: 10
     1: 6
     2: 5
     3: 4
+  stepFeatures:
+    0: 10
+    1: 7
+    2: 6
+    3: 5
 HLT_mu26_ivarmedium_L1MU20:
-  eventCount: 2
+  eventCount: 3
   stepCounts:
     0: 8
-    1: 5
-    2: 4
-    3: 3
-    4: 2
-  stepFeatures:
-    0: 10
     1: 6
     2: 5
     3: 4
-    4: 2
+    4: 3
+  stepFeatures:
+    0: 10
+    1: 7
+    2: 6
+    3: 5
+    4: 3
 HLT_mu26_mu10noL1_L1MU20:
   eventCount: 1
   stepCounts:
     0: 8
-    1: 5
-    2: 4
-    3: 3
+    1: 6
+    2: 5
+    3: 4
     4: 1
     5: 1
   stepFeatures:
     0: 10
-    1: 6
-    2: 5
-    3: 4
+    1: 7
+    2: 6
+    3: 5
     4: 2
     5: 2
 HLT_mu26_mu8noL1_L1MU20:
   eventCount: 1
   stepCounts:
     0: 8
-    1: 5
-    2: 4
-    3: 3
+    1: 6
+    2: 5
+    3: 4
     4: 1
     5: 1
   stepFeatures:
     0: 10
-    1: 6
-    2: 5
-    3: 4
+    1: 7
+    2: 6
+    3: 5
     4: 2
     5: 2
 HLT_mu28_ivarmedium_L1MU20:
-  eventCount: 2
+  eventCount: 3
   stepCounts:
     0: 8
-    1: 5
-    2: 4
-    3: 3
-    4: 2
+    1: 6
+    2: 5
+    3: 4
+    4: 3
   stepFeatures:
     0: 10
-    1: 6
-    2: 5
-    3: 3
-    4: 2
+    1: 7
+    2: 6
+    3: 4
+    4: 3
 HLT_mu28_mu8noL1_L1MU20:
   eventCount: 1
   stepCounts:
     0: 8
-    1: 5
-    2: 4
-    3: 3
+    1: 6
+    2: 5
+    3: 4
     4: 1
     5: 1
   stepFeatures:
     0: 10
-    1: 6
-    2: 5
-    3: 3
+    1: 7
+    2: 6
+    3: 4
     4: 1
     5: 1
 HLT_mu35_ivarmedium_L1MU20:
-  eventCount: 1
+  eventCount: 2
   stepCounts:
     0: 8
-    1: 5
-    2: 3
-    3: 2
-    4: 1
-  stepFeatures:
-    0: 10
     1: 6
     2: 4
-    3: 2
-    4: 1
+    3: 3
+    4: 2
+  stepFeatures:
+    0: 10
+    1: 7
+    2: 5
+    3: 3
+    4: 2
 HLT_mu4_l2io_L1MU4:
   eventCount: 11
   stepCounts:
@@ -2087,32 +2087,32 @@ HLT_mu4_l2io_L1MU4:
     3: 11
   stepFeatures:
     0: 14
-    1: 46
+    1: 28
     2: 15
     3: 15
 HLT_mu50_L1MU20:
   eventCount: 1
   stepCounts:
     0: 8
-    1: 5
+    1: 6
     2: 2
     3: 1
   stepFeatures:
     0: 10
-    1: 6
+    1: 7
     2: 2
     3: 1
 HLT_mu50_RPCPEBSecondaryReadout_L1MU20:
   eventCount: 1
   stepCounts:
     0: 8
-    1: 5
+    1: 6
     2: 2
     3: 1
     4: 1
   stepFeatures:
     0: 10
-    1: 6
+    1: 7
     2: 2
     3: 1
     4: 1
@@ -2126,12 +2126,12 @@ HLT_mu60_L1MU20:
   eventCount: 1
   stepCounts:
     0: 8
-    1: 5
+    1: 6
     2: 1
     3: 1
   stepFeatures:
     0: 10
-    1: 6
+    1: 7
     2: 1
     3: 1
 HLT_mu6_L1MU6:
@@ -2205,21 +2205,21 @@ HLT_mu6_mu4_L12MU4:
     2: 4
     3: 4
 HLT_mu6_mu6noL1_L1MU6:
-  eventCount: 4
+  eventCount: 3
   stepCounts:
     0: 10
     1: 10
     2: 10
     3: 10
     4: 6
-    5: 4
+    5: 3
   stepFeatures:
     0: 13
     1: 12
     2: 13
     3: 13
     4: 8
-    5: 6
+    5: 5
 HLT_mu6_xe30_mht_L1XE10:
   eventCount: 9
   stepCounts:
@@ -2238,11 +2238,11 @@ HLT_mu80_L1MU20:
   eventCount: 0
   stepCounts:
     0: 8
-    1: 5
+    1: 6
     2: 1
   stepFeatures:
     0: 10
-    1: 6
+    1: 7
     2: 1
 HLT_mu80_msonly_3layersEC_L1MU20:
   eventCount: 1
@@ -2407,17 +2407,17 @@ HLT_sct_noise_SCTPEB_L1RD0_EMPTY:
 HLT_tau0_perf_ptonly_L1TAU100:
   eventCount: 0
 HLT_tau0_perf_ptonly_L1TAU12:
-  eventCount: 18
+  eventCount: 15
   stepCounts:
     0: 18
-    1: 18
-    2: 18
-    3: 18
+    1: 15
+    2: 15
+    3: 15
   stepFeatures:
     0: 42
-    1: 27
-    2: 27
-    3: 27
+    1: 23
+    2: 23
+    3: 23
 HLT_tau0_perf_ptonly_L1TAU60:
   eventCount: 3
   stepCounts:
@@ -2451,222 +2451,222 @@ HLT_tau200_medium1_tracktwoEF_L1TAU100:
 HLT_tau200_mediumRNN_tracktwoMVA_L1TAU100:
   eventCount: 0
 HLT_tau25_idperf_track_L1TAU12IM:
-  eventCount: 13
+  eventCount: 11
   stepCounts:
     0: 14
-    1: 13
-    2: 13
-    3: 13
-    4: 13
+    1: 11
+    2: 11
+    3: 11
+    4: 11
   stepFeatures:
     0: 24
-    1: 16
-    2: 16
-    3: 16
-    4: 16
+    1: 15
+    2: 15
+    3: 15
+    4: 15
 HLT_tau25_idperf_tracktwoMVA_L1TAU12IM:
-  eventCount: 11
+  eventCount: 12
   stepCounts:
     0: 14
-    1: 13
+    1: 12
     2: 12
-    3: 11
-    4: 11
+    3: 12
+    4: 12
   stepFeatures:
     0: 24
-    1: 17
+    1: 16
     2: 14
-    3: 13
-    4: 13
+    3: 14
+    4: 14
 HLT_tau25_idperf_tracktwo_L1TAU12IM:
-  eventCount: 11
+  eventCount: 12
   stepCounts:
     0: 14
-    1: 13
+    1: 12
     2: 12
     3: 12
-    4: 11
-    5: 11
+    4: 12
+    5: 12
   stepFeatures:
     0: 24
-    1: 17
+    1: 16
     2: 14
     3: 14
-    4: 13
-    5: 13
+    4: 14
+    5: 14
 HLT_tau25_looseRNN_tracktwoMVA_L1TAU12IM:
   eventCount: 8
   stepCounts:
     0: 12
-    1: 11
+    1: 10
     2: 10
-    3: 9
+    3: 10
     4: 8
   stepFeatures:
     0: 22
-    1: 15
+    1: 14
     2: 12
-    3: 11
+    3: 12
     4: 9
 HLT_tau25_looseRNN_tracktwo_L1TAU12IM:
   eventCount: 0
   stepCounts:
     0: 12
-    1: 11
+    1: 10
     2: 10
   stepFeatures:
     0: 22
-    1: 15
+    1: 14
     2: 12
 HLT_tau25_medium1_tracktwoEF_L1TAU12IM:
   eventCount: 0
   stepCounts:
     0: 12
-    1: 11
+    1: 10
     2: 10
-    3: 9
+    3: 10
   stepFeatures:
     0: 22
-    1: 15
+    1: 14
     2: 12
-    3: 11
+    3: 12
 HLT_tau25_medium1_tracktwoMVA_L1TAU12IM:
   eventCount: 0
   stepCounts:
     0: 12
-    1: 11
+    1: 10
     2: 10
-    3: 9
+    3: 10
   stepFeatures:
     0: 22
-    1: 15
+    1: 14
     2: 12
-    3: 11
+    3: 12
 HLT_tau25_medium1_tracktwo_L1TAU12IM:
   eventCount: 0
   stepCounts:
     0: 12
-    1: 11
+    1: 10
     2: 10
   stepFeatures:
     0: 22
-    1: 15
+    1: 14
     2: 12
 HLT_tau25_mediumRNN_tracktwoMVA_L1TAU12IM:
   eventCount: 5
   stepCounts:
     0: 12
-    1: 11
+    1: 10
     2: 10
-    3: 9
+    3: 10
     4: 5
   stepFeatures:
     0: 22
-    1: 15
+    1: 14
     2: 12
-    3: 11
+    3: 12
     4: 5
 HLT_tau25_mediumRNN_tracktwo_L1TAU12IM:
   eventCount: 0
   stepCounts:
     0: 12
-    1: 11
+    1: 10
     2: 10
   stepFeatures:
     0: 22
-    1: 15
+    1: 14
     2: 12
 HLT_tau25_perf_tracktwoMVA_L1TAU12IM:
-  eventCount: 11
+  eventCount: 12
   stepCounts:
     0: 14
-    1: 13
+    1: 12
     2: 12
-    3: 11
-    4: 11
+    3: 12
+    4: 12
   stepFeatures:
     0: 24
-    1: 17
+    1: 16
     2: 14
-    3: 13
-    4: 13
+    3: 14
+    4: 14
 HLT_tau25_perf_tracktwo_L1TAU12IM:
-  eventCount: 11
+  eventCount: 12
   stepCounts:
     0: 14
-    1: 13
+    1: 12
     2: 12
     3: 12
-    4: 11
-    5: 11
+    4: 12
+    5: 12
   stepFeatures:
     0: 24
-    1: 17
+    1: 16
     2: 14
     3: 14
-    4: 13
-    5: 13
+    4: 14
+    5: 14
 HLT_tau25_tightRNN_tracktwoMVA_L1TAU12IM:
   eventCount: 4
   stepCounts:
     0: 12
-    1: 11
+    1: 10
     2: 10
-    3: 9
+    3: 10
     4: 4
   stepFeatures:
     0: 22
-    1: 15
+    1: 14
     2: 12
-    3: 11
+    3: 12
     4: 4
 HLT_tau25_tightRNN_tracktwo_L1TAU12IM:
   eventCount: 0
   stepCounts:
     0: 12
-    1: 11
+    1: 10
     2: 10
   stepFeatures:
     0: 22
-    1: 15
+    1: 14
     2: 12
 HLT_tau25_verylooseRNN_tracktwoMVA_L1TAU12IM:
-  eventCount: 8
+  eventCount: 9
   stepCounts:
     0: 12
-    1: 11
+    1: 10
     2: 10
-    3: 9
-    4: 8
+    3: 10
+    4: 9
   stepFeatures:
     0: 22
-    1: 15
+    1: 14
     2: 12
-    3: 11
-    4: 9
+    3: 12
+    4: 11
 HLT_tau25_verylooseRNN_tracktwo_L1TAU12IM:
   eventCount: 0
   stepCounts:
     0: 12
-    1: 11
+    1: 10
     2: 10
   stepFeatures:
     0: 22
-    1: 15
+    1: 14
     2: 12
 HLT_tau35_mediumRNN_tracktwoMVA_L1TAU12IM:
   eventCount: 4
   stepCounts:
     0: 11
-    1: 10
+    1: 9
     2: 9
-    3: 8
+    3: 9
     4: 4
   stepFeatures:
     0: 20
-    1: 14
+    1: 13
     2: 11
-    3: 10
+    3: 11
     4: 4
 HLT_tau35_mediumRNN_tracktwoMVA_tau25_mediumRNN_tracktwoMVA_L1DR-TAU20ITAU12I-J25:
   eventCount: 0
@@ -2700,20 +2700,30 @@ HLT_tau80_mediumRNN_tracktwoMVA_tau60_mediumRNN_tracktwoMVA_L1TAU60_2TAU40:
   eventCount: 0
   stepCounts:
     0: 2
+    1: 1
+    2: 1
+    3: 1
   stepFeatures:
     0: 5
+    1: 2
+    2: 2
+    3: 2
 HLT_tilecalib_laser_TilePEB_L1CALREQ2:
   eventCount: 0
 HLT_timeburner_L1All:
   eventCount: 0
 HLT_unconvtrk120_isohpttrack_iaggrmedium_L1XE50:
-  eventCount: 0
+  eventCount: 3
+  stepCounts:
+    0: 3
+  stepFeatures:
+    0: 3
 HLT_unconvtrk120_isohpttrack_imedium_L1XE50:
   eventCount: 6
   stepCounts:
     0: 6
   stepFeatures:
-    0: 6
+    0: 11
 HLT_xe100_cvfpufit_L1XE50:
   eventCount: 6
   stepCounts:
@@ -2745,21 +2755,21 @@ HLT_xe100_mhtpufit_pf_subjesgscIS_L1XE50:
     0: 10
     1: 5
 HLT_xe100_pfopufit_L1XE50:
-  eventCount: 4
+  eventCount: 5
   stepCounts:
     0: 10
-    1: 4
+    1: 5
   stepFeatures:
     0: 10
-    1: 4
+    1: 5
 HLT_xe100_pfsum_L1XE50:
-  eventCount: 5
+  eventCount: 6
   stepCounts:
     0: 10
-    1: 5
+    1: 6
   stepFeatures:
     0: 10
-    1: 5
+    1: 6
 HLT_xe100_tcpufit_L1XE50:
   eventCount: 6
   stepCounts:
@@ -2789,13 +2799,13 @@ HLT_xe110_mht_L1XE50:
   stepFeatures:
     0: 7
 HLT_xe110_pfsum_L1XE50:
-  eventCount: 4
+  eventCount: 5
   stepCounts:
     0: 10
-    1: 4
+    1: 5
   stepFeatures:
     0: 10
-    1: 4
+    1: 5
 HLT_xe110_pfsum_cssk_L1XE50:
   eventCount: 3
   stepCounts:
@@ -2851,13 +2861,13 @@ HLT_xe30_mht_L1XE10:
   stepFeatures:
     0: 19
 HLT_xe30_mhtpufit_em_subjesgscIS_L1XE10:
-  eventCount: 18
+  eventCount: 17
   stepCounts:
     0: 19
-    1: 18
+    1: 17
   stepFeatures:
     0: 19
-    1: 18
+    1: 17
 HLT_xe30_mhtpufit_pf_subjesgscIS_L1XE10:
   eventCount: 17
   stepCounts:
@@ -2875,21 +2885,21 @@ HLT_xe30_pfopufit_L1XE10:
     0: 19
     1: 15
 HLT_xe30_pfsum_L1XE10:
-  eventCount: 17
+  eventCount: 16
   stepCounts:
     0: 19
-    1: 17
+    1: 16
   stepFeatures:
     0: 19
-    1: 17
+    1: 16
 HLT_xe30_pfsum_cssk_L1XE10:
-  eventCount: 16
+  eventCount: 15
   stepCounts:
     0: 19
-    1: 16
+    1: 15
   stepFeatures:
     0: 19
-    1: 16
+    1: 15
 HLT_xe30_pfsum_vssk_L1XE10:
   eventCount: 18
   stepCounts:
@@ -2925,9 +2935,17 @@ HLT_xe65_cell_xe110_tcpufit_L1XE50:
   stepFeatures:
     0: 5
 HLT_xe80_tcpufit_unconvtrk120_isohpttrack_iaggrloose_L1XE50:
-  eventCount: 0
+  eventCount: 3
+  stepCounts:
+    0: 3
+  stepFeatures:
+    0: 3
 HLT_xe80_tcpufit_unconvtrk120_isohpttrack_iaggrmedium_L1XE50:
-  eventCount: 0
+  eventCount: 2
+  stepCounts:
+    0: 2
+  stepFeatures:
+    0: 2
 HLT_xe80_tcpufit_unconvtrk120_isohpttrack_imedium_L1XE50:
   eventCount: 5
   stepCounts:
diff --git a/Trigger/TrigValidation/TriggerTest/share/ref_data_v1Dev_build.ref b/Trigger/TrigValidation/TriggerTest/share/ref_data_v1Dev_build.ref
index 3daf44711e76113490459f3065ea9b6f6e42b4d1..4a0b0fb8a4f79a708e67221d65ea58f016935063 100644
--- a/Trigger/TrigValidation/TriggerTest/share/ref_data_v1Dev_build.ref
+++ b/Trigger/TrigValidation/TriggerTest/share/ref_data_v1Dev_build.ref
@@ -29,17 +29,17 @@ HLT_2e17_lhvloose_L12EM3:
 HLT_2e24_lhvloose_L12EM20VH:
   eventCount: 0
 HLT_2e3_etcut_L12EM3:
-  eventCount: 12
+  eventCount: 9
   stepCounts:
     0: 15
-    1: 12
-    2: 12
-    3: 12
+    1: 9
+    2: 9
+    3: 9
   stepFeatures:
     0: 94
-    1: 294
-    2: 180
-    3: 80
+    1: 114
+    2: 142
+    3: 54
 HLT_2g10_loose_mu20_L1MU20:
   eventCount: 0
 HLT_2g15_tight_dPhi15_L1DPHI-M70-2EM12I:
@@ -283,7 +283,7 @@ HLT_e12_lhloose_2mu10_L12MU10:
     1: 1
   stepFeatures:
     0: 1
-    1: 14
+    1: 4
 HLT_e140_lhloose_L1EM22VHI:
   eventCount: 0
 HLT_e140_lhloose_nod0_L1EM22VHI:
@@ -359,9 +359,9 @@ HLT_e3_etcut_L1EM3:
     3: 13
   stepFeatures:
     0: 48
-    1: 148
-    2: 91
-    3: 41
+    1: 62
+    2: 77
+    3: 31
 HLT_e5_etcut_L1EM3:
   eventCount: 12
   stepCounts:
@@ -371,33 +371,33 @@ HLT_e5_etcut_L1EM3:
     3: 12
   stepFeatures:
     0: 41
-    1: 135
-    2: 51
-    3: 35
+    1: 54
+    2: 41
+    3: 27
 HLT_e5_lhloose_L1EM3:
   eventCount: 0
   stepCounts:
     0: 10
-    1: 9
-    2: 8
-    3: 8
+    1: 8
+    2: 7
+    3: 7
   stepFeatures:
     0: 19
-    1: 56
-    2: 21
-    3: 14
+    1: 22
+    2: 13
+    3: 9
 HLT_e5_lhloose_noringer_L1EM3:
   eventCount: 0
   stepCounts:
     0: 10
-    1: 9
-    2: 9
-    3: 9
+    1: 8
+    2: 8
+    3: 8
   stepFeatures:
     0: 15
-    1: 49
-    2: 27
-    3: 14
+    1: 22
+    2: 23
+    3: 12
 HLT_e5_lhmedium_L1EM3:
   eventCount: 0
   stepCounts:
@@ -407,69 +407,69 @@ HLT_e5_lhmedium_L1EM3:
     3: 8
   stepFeatures:
     0: 21
-    1: 60
-    2: 23
-    3: 16
+    1: 24
+    2: 15
+    3: 11
 HLT_e5_lhmedium_noringer_L1EM3:
   eventCount: 0
   stepCounts:
     0: 9
-    1: 8
-    2: 8
-    3: 8
+    1: 7
+    2: 7
+    3: 7
   stepFeatures:
     0: 13
-    1: 47
-    2: 25
-    3: 12
+    1: 20
+    2: 21
+    3: 10
 HLT_e5_lhtight_L1EM3:
   eventCount: 0
   stepCounts:
     0: 9
-    1: 8
-    2: 7
-    3: 7
+    1: 7
+    2: 6
+    3: 6
   stepFeatures:
     0: 17
-    1: 46
-    2: 18
-    3: 12
+    1: 17
+    2: 10
+    3: 7
 HLT_e5_lhtight_nod0_L1EM3:
   eventCount: 0
   stepCounts:
     0: 9
-    1: 8
-    2: 7
-    3: 7
+    1: 7
+    2: 6
+    3: 6
   stepFeatures:
     0: 17
-    1: 46
-    2: 18
-    3: 12
+    1: 17
+    2: 10
+    3: 7
 HLT_e5_lhtight_noringer_L1EM3:
   eventCount: 0
   stepCounts:
     0: 9
-    1: 8
-    2: 8
-    3: 8
+    1: 7
+    2: 7
+    3: 7
   stepFeatures:
     0: 13
-    1: 47
-    2: 25
-    3: 12
+    1: 20
+    2: 21
+    3: 10
 HLT_e5_lhtight_noringer_nod0_L1EM3:
   eventCount: 0
   stepCounts:
     0: 9
-    1: 8
-    2: 8
-    3: 8
+    1: 7
+    2: 7
+    3: 7
   stepFeatures:
     0: 13
-    1: 47
-    2: 25
-    3: 12
+    1: 20
+    2: 21
+    3: 10
 HLT_e60_lhmedium_L1EM22VHI:
   eventCount: 0
 HLT_e60_lhmedium_nod0_L1EM22VHI:
@@ -483,9 +483,9 @@ HLT_e7_etcut_L1EM3:
     3: 11
   stepFeatures:
     0: 26
-    1: 89
-    2: 26
-    3: 20
+    1: 43
+    2: 22
+    3: 16
 HLT_e7_lhmedium_mu24_L1MU20:
   eventCount: 0
 HLT_eb_low_L1RD2_FILLED:
@@ -1108,10 +1108,8 @@ HLT_mu4_l2io_L1MU4:
   eventCount: 0
   stepCounts:
     0: 2
-    1: 1
   stepFeatures:
     0: 3
-    1: 1
 HLT_mu50_L1MU20:
   eventCount: 0
   stepCounts:
@@ -1369,7 +1367,7 @@ HLT_tau0_perf_ptonly_L1TAU12:
     3: 4
   stepFeatures:
     0: 5
-    1: 5
+    1: 4
     2: 4
     3: 4
 HLT_tau0_perf_ptonly_L1TAU60:
@@ -1404,8 +1402,8 @@ HLT_tau25_idperf_track_L1TAU12IM:
     4: 3
   stepFeatures:
     0: 4
-    1: 4
-    2: 4
+    1: 3
+    2: 3
     3: 3
     4: 3
 HLT_tau25_idperf_tracktwoMVA_L1TAU12IM:
@@ -1418,7 +1416,7 @@ HLT_tau25_idperf_tracktwoMVA_L1TAU12IM:
     4: 3
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
     3: 3
     4: 3
@@ -1433,7 +1431,7 @@ HLT_tau25_idperf_tracktwo_L1TAU12IM:
     5: 3
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
     3: 3
     4: 3
@@ -1448,7 +1446,7 @@ HLT_tau25_looseRNN_tracktwoMVA_L1TAU12IM:
     4: 2
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
     3: 3
     4: 2
@@ -1460,7 +1458,7 @@ HLT_tau25_looseRNN_tracktwo_L1TAU12IM:
     2: 3
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
 HLT_tau25_medium1_tracktwoEF_L1TAU12IM:
   eventCount: 0
@@ -1471,7 +1469,7 @@ HLT_tau25_medium1_tracktwoEF_L1TAU12IM:
     3: 3
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
     3: 3
 HLT_tau25_medium1_tracktwoMVA_L1TAU12IM:
@@ -1483,7 +1481,7 @@ HLT_tau25_medium1_tracktwoMVA_L1TAU12IM:
     3: 3
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
     3: 3
 HLT_tau25_medium1_tracktwo_L1TAU12IM:
@@ -1494,7 +1492,7 @@ HLT_tau25_medium1_tracktwo_L1TAU12IM:
     2: 3
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
 HLT_tau25_mediumRNN_tracktwoMVA_L1TAU12IM:
   eventCount: 1
@@ -1506,7 +1504,7 @@ HLT_tau25_mediumRNN_tracktwoMVA_L1TAU12IM:
     4: 1
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
     3: 3
     4: 1
@@ -1518,7 +1516,7 @@ HLT_tau25_mediumRNN_tracktwo_L1TAU12IM:
     2: 3
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
 HLT_tau25_perf_tracktwoMVA_L1TAU12IM:
   eventCount: 3
@@ -1530,7 +1528,7 @@ HLT_tau25_perf_tracktwoMVA_L1TAU12IM:
     4: 3
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
     3: 3
     4: 3
@@ -1545,7 +1543,7 @@ HLT_tau25_perf_tracktwo_L1TAU12IM:
     5: 3
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
     3: 3
     4: 3
@@ -1560,7 +1558,7 @@ HLT_tau25_tightRNN_tracktwoMVA_L1TAU12IM:
     4: 1
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
     3: 3
     4: 1
@@ -1572,7 +1570,7 @@ HLT_tau25_tightRNN_tracktwo_L1TAU12IM:
     2: 3
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
 HLT_tau25_verylooseRNN_tracktwoMVA_L1TAU12IM:
   eventCount: 2
@@ -1584,7 +1582,7 @@ HLT_tau25_verylooseRNN_tracktwoMVA_L1TAU12IM:
     4: 2
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
     3: 3
     4: 2
@@ -1596,7 +1594,7 @@ HLT_tau25_verylooseRNN_tracktwo_L1TAU12IM:
     2: 3
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
 HLT_tau35_mediumRNN_tracktwoMVA_L1TAU12IM:
   eventCount: 1
@@ -1608,7 +1606,7 @@ HLT_tau35_mediumRNN_tracktwoMVA_L1TAU12IM:
     4: 1
   stepFeatures:
     0: 4
-    1: 4
+    1: 3
     2: 3
     3: 3
     4: 1
@@ -1616,10 +1614,8 @@ HLT_tau35_mediumRNN_tracktwoMVA_tau25_mediumRNN_tracktwoMVA_L1DR-TAU20ITAU12I-J2
   eventCount: 0
   stepCounts:
     0: 1
-    1: 1
   stepFeatures:
     0: 2
-    1: 2
 HLT_tau80_medium1_tracktwo_L1TAU60:
   eventCount: 0
 HLT_tau80_mediumRNN_tracktwoMVA_tau35_mediumRNN_tracktwoMVA_L1TAU60_DR-TAU20ITAU12I:
@@ -1639,13 +1635,13 @@ HLT_unconvtrk120_isohpttrack_iaggrmedium_L1XE50:
   stepCounts:
     0: 3
   stepFeatures:
-    0: 4
+    0: 3
 HLT_unconvtrk120_isohpttrack_imedium_L1XE50:
-  eventCount: 9
+  eventCount: 4
   stepCounts:
-    0: 9
+    0: 4
   stepFeatures:
-    0: 13
+    0: 4
 HLT_xe100_cvfpufit_L1XE50:
   eventCount: 0
   stepCounts:
@@ -1755,13 +1751,13 @@ HLT_xe30_mhtpufit_em_subjesgscIS_L1XE10:
     0: 20
     1: 5
 HLT_xe30_mhtpufit_pf_subjesgscIS_L1XE10:
-  eventCount: 3
+  eventCount: 2
   stepCounts:
     0: 20
-    1: 3
+    1: 2
   stepFeatures:
     0: 20
-    1: 3
+    1: 2
 HLT_xe30_pfopufit_L1XE10:
   eventCount: 2
   stepCounts:
@@ -1819,6 +1815,10 @@ HLT_xe80_tcpufit_unconvtrk120_isohpttrack_iaggrmedium_L1XE50:
 HLT_xe80_tcpufit_unconvtrk120_isohpttrack_imedium_L1XE50:
   eventCount: 0
 HLT_xe80_tcpufit_unconvtrk80_isohpttrack_imedium_L1XE50:
-  eventCount: 0
+  eventCount: 1
+  stepCounts:
+    0: 1
+  stepFeatures:
+    0: 1
 HLT_xe95_trkmht_xe90_tcpufit_xe75_cell_L1XE50:
   eventCount: 0
diff --git a/Trigger/TriggerCommon/TrigEDMConfig/python/TriggerEDMRun3.py b/Trigger/TriggerCommon/TrigEDMConfig/python/TriggerEDMRun3.py
index 3a4aaf807ea4bf3792f3ba682f8c9350634ba035..97d157309544005369f79c0f7828f76a2fcd8b4d 100644
--- a/Trigger/TriggerCommon/TrigEDMConfig/python/TriggerEDMRun3.py
+++ b/Trigger/TriggerCommon/TrigEDMConfig/python/TriggerEDMRun3.py
@@ -97,7 +97,7 @@ TriggerHLTListRun3 = [
     ('xAOD::TrigCompositeContainer#HLT_TrigCostContainer',   'CostMonDS ESD', 'Steer'),
     ('xAOD::TrigCompositeAuxContainer#HLT_TrigCostContainerAux.alg.store.view.thread.thash.slot.roi.start.stop', 'CostMonDS ESD', 'Steer'),
     ('xAOD::TrigCompositeContainer#HLT_TrigCostROSContainer',   'CostMonDS ESD', 'Steer'),
-    ('xAOD::TrigCompositeAuxContainer#HLT_TrigCostROSContainerAux.alg_idx.lvl1ID.robs_is.robs_size.robs_history.robs_status.start.stop', 'CostMonDS ESD', 'Steer'),
+    ('xAOD::TrigCompositeAuxContainer#HLT_TrigCostROSContainerAux.alg_idx.lvl1ID.robs_id.robs_size.robs_history.robs_status.start.stop', 'CostMonDS ESD', 'Steer'),
 
     # Run-2 L1 (temporary)
     ('xAOD::MuonRoIContainer#LVL1MuonRoIs' ,                 'ESD AODFULL AODSLIM AODVERYSLIM AODBLSSLIM', 'L1'),
@@ -126,7 +126,7 @@ TriggerHLTListRun3 = [
     ('xAOD::TrackParticleContainer#HLT_IDTrack_Electron_FTF',        'BS ESD AODFULL', 'Egamma', 'inViews:EMElectronViews'),
     ('xAOD::TrackParticleAuxContainer#HLT_IDTrack_Electron_FTFAux.', 'BS ESD AODFULL', 'Egamma'),
 
-    ('xAOD::TrackParticleContainer#HLT_IDTrack_Electron_IDTrig',        'BS ESD AODFULL', 'Egamma', 'inViews:precisionElectronViews'),
+    ('xAOD::TrackParticleContainer#HLT_IDTrack_Electron_IDTrig',        'BS ESD AODFULL', 'Egamma', 'inViews:precisionEtcutViews'),
     ('xAOD::TrackParticleAuxContainer#HLT_IDTrack_Electron_IDTrigAux.', 'BS ESD AODFULL', 'Egamma'),
 
 
diff --git a/Trigger/TriggerCommon/TriggerJobOpts/share/runHLT_standalone.py b/Trigger/TriggerCommon/TriggerJobOpts/share/runHLT_standalone.py
index 3c16fd19c9aca0e116c78326abfb7ccc8d98222d..8506c7025646daf03652d4dad4870ff92d29d642 100644
--- a/Trigger/TriggerCommon/TriggerJobOpts/share/runHLT_standalone.py
+++ b/Trigger/TriggerCommon/TriggerJobOpts/share/runHLT_standalone.py
@@ -360,9 +360,12 @@ DetFlags.BField_setOn()
 include ("RecExCond/AllDet_detDescr.py")
 
 if ConfigFlags.Trigger.doID:
+    include("InDetTrigRecExample/InDetTrigRec_jobOptions.py")
+    from InDetTrigRecExample.InDetTrigFlags import InDetTrigFlags
+    InDetTrigFlags.doPrintConfigurables = log.getEffectiveLevel() <= logging.DEBUG
     from InDetRecExample.InDetJobProperties import InDetFlags
     InDetFlags.doPrintConfigurables = log.getEffectiveLevel() <= logging.DEBUG
-    include( "InDetRecExample/InDetRecCabling.py" )
+    include("InDetRecExample/InDetRecConditionsAccess.py")
 
 if ConfigFlags.Trigger.doCalo:
     from TrigT2CaloCommon.TrigT2CaloCommonConfig import TrigDataAccess
@@ -377,17 +380,7 @@ if ConfigFlags.Trigger.doMuon:
 
     include ("MuonRecExample/MuonRecLoadTools.py")
 
-# ---------------------------------------------------------------
-# ID conditions
-# ---------------------------------------------------------------
-if ConfigFlags.Trigger.doID:
-    from InDetTrigRecExample.InDetTrigFlags import InDetTrigFlags
-    InDetTrigFlags.doPixelClusterSplitting = False
-
-    # PixelLorentzAngleSvc and SCTLorentzAngleSvc
-    from AthenaCommon.Include import include
-    include("InDetRecExample/InDetRecConditionsAccess.py")
-
+    
 # ----------------------------------------------------------------
 # Pool input
 # ----------------------------------------------------------------
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Egamma/ElectronSequenceSetup.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Egamma/ElectronSequenceSetup.py
index fd5ba647e3ff54a1f2ab18daccd7f9906ce2b8d9..8b0c0fc42cc2de9b0b2a3a929eda2c76a3a7f696 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Egamma/ElectronSequenceSetup.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Egamma/ElectronSequenceSetup.py
@@ -1,7 +1,6 @@
 #
 #  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 #
-
 from AthenaConfiguration.AllConfigFlags import ConfigFlags 
 
 # menu components   
@@ -32,8 +31,8 @@ def fastElectronSequence(ConfigFlags):
         if "InDetTrigTrackParticleCreatorAlg" in viewAlg.name():
             TrackParticlesName = viewAlg.TrackParticlesName
       
-    from TrigEgammaHypo.TrigEgammaFastElectronFexMTConfig import EgammaFastElectronFex_1
-    theElectronFex= EgammaFastElectronFex_1()
+    from TrigEgammaHypo.TrigEgammaFastElectronFexMTConfig import EgammaFastElectronFex_Clean
+    theElectronFex= EgammaFastElectronFex_Clean()
     theElectronFex.TrigEMClusterName = CaloMenuDefs.L2CaloClusters
     theElectronFex.TrackParticlesName = TrackParticlesName
     theElectronFex.ElectronsName=recordable("HLT_FastElectrons")
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Egamma/PrecisionTrackingSequenceSetup.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Egamma/PrecisionTrackingSequenceSetup.py
index 59c102ccd2dd22384d2e86ab2d93739d2a51c59e..eb6308f137f084f89a2c889e1bd63cd3cdd049dc 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Egamma/PrecisionTrackingSequenceSetup.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Egamma/PrecisionTrackingSequenceSetup.py
@@ -45,7 +45,6 @@ def precisionTrackingMenuSequence(name):
     from TrigEgammaHypo.TrigEgammaPrecisionEtcutHypoTool import TrigEgammaPrecisionEtcutHypoToolFromDict
 
     thePrecisionEtcutHypo = TrigEgammaPrecisionEtcutHypoAlgMT(name+"precisionEtcutHypo")
-    thePrecisionEtcutHypo.CaloClusters = caloclusters
 
     return MenuSequence( Sequence    = sequence,
                          Maker       = precisionEtcutViewsMaker, 
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Electron/PrecisionElectronRecoSequences.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Electron/PrecisionElectronRecoSequences.py
index d3afae969162959274015fe9cd4c0a5efcea1a76..1e795360a20c67197c4c4330e0dca7e97698b49d 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Electron/PrecisionElectronRecoSequences.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Electron/PrecisionElectronRecoSequences.py
@@ -81,3 +81,4 @@ def precisionElectronRecoSequence(RoIs):
     thesequence += PrecisionElectronSuperClusterMonAlgo
 
     return (thesequence, collectionOut)
+