diff --git a/Calorimeter/CaloRec/python/CaloBCIDCoeffsCondAlgDefault.py b/Calorimeter/CaloRec/python/CaloBCIDCoeffsCondAlgDefault.py
deleted file mode 100644
index fd0b51b5deacfdaea0d8ba102bb1dd51550f276d..0000000000000000000000000000000000000000
--- a/Calorimeter/CaloRec/python/CaloBCIDCoeffsCondAlgDefault.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright (C) 2002-2023 CERN for the benefit of the ATLAS collaboration
-
-# File: CaloRec/python/CaloBCIDCoeffsCondAlgDefault.py
-# Created: Mar 2020, sss
-# Purpose: Configure CaloBCIDCoeffsCondAlg.
-
-
-from AthenaCommon.AlgSequence import AthSequencer
-from AthenaCommon import CfgMgr
-
-
-def CaloBCIDCoeffsCondAlgDefault():
-    from AthenaCommon.GlobalFlags import globalflags
-    from IOVDbSvc.CondDB import conddb
-    from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
-
-    name = 'CaloBCIDCoeffsCondAlg'
-    condSeq = AthSequencer ('AthCondSeq')
-
-    if hasattr (condSeq, name):
-        return getattr (condSeq, name)
-
-    from LArRecUtils.LArMCSymCondAlg import LArMCSymCondAlgDefault
-    LArMCSymCondAlgDefault()
-
-    if globalflags.DataSource()=='data':
-        if athenaCommonFlags.isOnline:
-            conddb.addFolder("LAR_ONL","/LAR/LArPileup/LArPileupShape<key>LArShape32</key>",className="LArShape32MC")
-            conddb.addFolder("LAR_ONL","/LAR/LArPileup/LArPileupAverage",className="LArMinBiasAverageMC")
-        else:
-            conddb.addFolder("LAR_OFL","/LAR/ElecCalibOfl/LArPileupShape<key>LArShape32</key>",className="LArShape32MC")
-            conddb.addFolder("LAR_OFL","/LAR/ElecCalibOfl/LArPileupAverage",className="LArMinBiasAverageMC")
-
-        #For data, the regular shape is the 4-sample one used to Q-factor computation by LArRawChannelBuilder
-        #Here we need a 32-sample, symmetrized shape. Therfore the re-key'ing and the dedicated LArPileUpShapeSymCondAlg
-
-        from LArRecUtils.LArRecUtilsConf import LArSymConditionsAlg_LArMinBiasAverageMC_LArMinBiasAverageSym_ as LArMinBiasAverageSymAlg
-        if not hasattr (condSeq, 'LArPileUpAvgSymCondAlg'):
-            condSeq+=LArMinBiasAverageSymAlg("LArPileUpAvgSymCondAlg",ReadKey="LArPileupAverage",WriteKey="LArPileupAverageSym")
-
-        from LArRecUtils.LArRecUtilsConf import LArSymConditionsAlg_LArShape32MC_LArShape32Sym_ as LArShapeSymAlg
-        if not hasattr (condSeq, 'LArPileUpShapeSymCondAlg'):
-            condSeq+=LArShapeSymAlg("LArPileUpShapeSymCondAlg",ReadKey="LArShape32",WriteKey="LArShape32Sym")
-
-        ShapeKey = 'LArShape32Sym'
-    else: #MC case
-        from LArRecUtils.LArOFCCondAlgDefault import LArOFCCondAlgDefault
-        from LArRecUtils.LArAutoCorrTotalCondAlgDefault import  LArAutoCorrTotalCondAlgDefault
-        from LArRecUtils.LArADC2MeVCondAlgDefault import LArADC2MeVCondAlgDefault
-        LArADC2MeVCondAlgDefault()
-        LArAutoCorrTotalCondAlgDefault()
-        LArOFCCondAlgDefault()
-        conddb.addFolder("LAR_OFL","/LAR/ElecCalibMC/LArPileupAverage",className="LArMinBiasAverageMC")
-
-        from LArRecUtils.LArRecUtilsConf import LArSymConditionsAlg_LArMinBiasAverageMC_LArMinBiasAverageSym_ as LArMinBiasAverageSymAlg
-        if not hasattr (condSeq, 'LArPileUpAvgSymCondAlg'):
-            condSeq+=LArMinBiasAverageSymAlg("LArPileUpAvgSymCondAlg",ReadKey="LArPileupAverage",WriteKey="LArPileupAverageSym")
-
-        ShapeKey = 'LArShapeSym'
-
-    CaloBCIDCoeffsCondAlg = CfgMgr.CaloBCIDCoeffsCondAlg # CaloRec
-    alg = CaloBCIDCoeffsCondAlg (name,
-                                 MCSymKey = 'LArMCSym',
-                                 OFCKey = 'LArOFC',
-                                 ShapeKey = ShapeKey,
-                                 MinBiasAvgKey = 'LArPileupAverageSym',
-                                 OutputCoeffsKey = 'CaloBCIDCoeffs')
-    condSeq += alg
-    return alg
-
-    
diff --git a/Calorimeter/CaloRec/python/CaloBCIDLumiCondAlgDefault.py b/Calorimeter/CaloRec/python/CaloBCIDLumiCondAlgDefault.py
deleted file mode 100644
index 3f8356aae2afd830e8fee57ae871bbaad7ea4b97..0000000000000000000000000000000000000000
--- a/Calorimeter/CaloRec/python/CaloBCIDLumiCondAlgDefault.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright (C) 2002-2023 CERN for the benefit of the ATLAS collaboration
-
-# File: CaloRec/python/CaloBCIDLumiCondAlgDefault.py
-# Created: Mar 2020, sss
-# Purpose: Configure CaloBCIDLumiCondAlg.
-
-
-from AthenaCommon.AlgSequence import AthSequencer
-from AthenaCommon import CfgMgr
-
-
-def CaloBCIDLumiCondAlgDefault():
-    from AthenaCommon.GlobalFlags import globalflags
-
-    name = 'CaloBCIDLumiCondAlg'
-    condSeq = AthSequencer ('AthCondSeq')
-
-    if hasattr (condSeq, name):
-        return getattr (condSeq, name)
-
-    from CaloRec.CaloBCIDCoeffsCondAlgDefault import CaloBCIDCoeffsCondAlgDefault
-    CaloBCIDCoeffsCondAlgDefault()
-
-    if globalflags.DataSource()=='data':
-        from LumiBlockComps.LuminosityCondAlgDefault import LuminosityCondAlgDefault
-        LuminosityCondAlgDefault()
-    else: #MC case
-        from LumiBlockComps.BunchCrossingCondAlgDefault import BunchCrossingCondAlgDefault
-        BunchCrossingCondAlgDefault()
-
-    CaloBCIDLumiCondAlg = CfgMgr.CaloBCIDLumiCondAlg # CaloRec
-    alg = CaloBCIDLumiCondAlg (name,
-                               CoeffsKey = 'CaloBCIDCoeffs',
-                               BunchCrossingCondDataKey = 'BunchCrossingData',
-                               LuminosityCondDataKey = 'LuminosityCondData',
-                               isMC = globalflags.DataSource()!='data',
-                               OutputLumiKey = 'CaloBCIDLumi')
-    condSeq += alg
-    return alg
-
-    
diff --git a/Calorimeter/CaloTools/python/CaloEstimatedGainToolDefault.py b/Calorimeter/CaloTools/python/CaloEstimatedGainToolDefault.py
deleted file mode 100644
index b0cf68dc946f3a805e57c09491a58d881f4502ce..0000000000000000000000000000000000000000
--- a/Calorimeter/CaloTools/python/CaloEstimatedGainToolDefault.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-#
-# File: CaloTools/python/CaloEstimatedGainToolDefault.py
-# Created: Aug 2019, sss
-# Purpose: Configure CaloEstimatedGainTool.
-#
-
-
-def CaloEstimatedGainToolDefault():
-    from LArCabling.LArCablingAccess import LArFebRodMapping, LArCalibIdMapping
-    LArFebRodMapping()
-    LArCalibIdMapping()
-
-    from TileConditions.TileInfoConfigurator import TileInfoConfigurator
-    TileInfoConfigurator()
-
-    from TileConditions.TileCondToolConf import \
-        getTileCondToolEmscale, \
-        getTileCondToolNoiseSample
-    from TileConditions.TileConditionsConf import TileCondIdTransforms
-
-    from LArRecUtils.LArADC2MeVCondAlgDefault import LArADC2MeVCondAlgDefault
-    adc2mev = LArADC2MeVCondAlgDefault()
-
-    from CaloTools.CaloToolsConf import CaloEstimatedGainTool
-    tool = CaloEstimatedGainTool ('CaloEstimatedGainTool',
-                                  ADC2MeVKey = adc2mev.LArADC2MeVKey,
-                                  TileCondIdTransforms = TileCondIdTransforms(),
-                                  TileCondToolEmscale = getTileCondToolEmscale(),
-                                  TileCondToolNoiseSample = getTileCondToolNoiseSample())
-    return tool
-
diff --git a/Control/AthenaCommon/python/AppMgr.py b/Control/AthenaCommon/python/AppMgr.py
index 221e3e4b20f3ce978e69022aa4273b1b5e6c64c5..f331af3f60509dba55f6fb95d67448a4f444bf9a 100755
--- a/Control/AthenaCommon/python/AppMgr.py
+++ b/Control/AthenaCommon/python/AppMgr.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2023 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
 
 # File: AthenaCommon/share/AppMgr.py
 # Author: Wim Lavrijsen (WLavrijsen@lbl.gov)
@@ -402,10 +402,8 @@ class AthAppMgr( AppMgr ):
             selfOptions = self.bootProps()
             for k,v in selfOptions.items(): setattr(self, k, v)
          svcMgr = self.serviceMgr()
-       # the following code is stopped when minimal to prevent the loading of
-       # ConfigurableDb when no configuration is done; FIXME: minimal is set
-       # to go when all code has been cleaned up
-         if self._opts and not self._opts.minimal:
+       # prevent the loading of ConfigurableDb when no configuration is done
+         if self._opts and not self._opts.fromdb:
             from AthenaCommon.ConfigurableDb import getConfigurable
             if not hasattr(svcMgr, 'JobOptionsSvc'):
                svcMgr += getConfigurable(self.JobOptionsSvcType)("JobOptionsSvc")
@@ -484,7 +482,6 @@ class AthAppMgr( AppMgr ):
           # fire ourselves up anew
             Logging.log.info( 'restarting athena.py from %s ... ', fn )
             sys.argv.insert( 1, fn )
-            sys.argv.append( '--minimal' )
             os.execvp( sys.argv[0], sys.argv )
 
          else:
diff --git a/Control/AthenaCommon/python/AthOptionsParser.py b/Control/AthenaCommon/python/AthOptionsParser.py
index 16c18f90cf2a307e75adfd7116b2056dd546bf99..7b6a565c8add873ff6d4a88b738b6497a1f21980 100644
--- a/Control/AthenaCommon/python/AthOptionsParser.py
+++ b/Control/AthenaCommon/python/AthOptionsParser.py
@@ -193,9 +193,6 @@ def getArgumentParser():
     g.add_argument('--dump-configuration', metavar='FILE', dest='config_dump_file',
                    help='dump an ASCII version of the configuration to %(metavar)s')
 
-    g.add_argument('--no-display', action='store_true',
-                   help='prompt, but no graphics display')
-
     # --------------------------------------------------------------------------
     g = parser.add_argument_group('Monitoring and debugging')
 
@@ -256,9 +253,6 @@ def getArgumentParser():
     g = parser.add_argument_group('Expert options')
     parser.expert_groups.append(g)
 
-    g.add_argument('--minimal', action='store_true',
-                   help="minimal athena setup (used by drop-and-reload)")
-
     g.add_argument('--cppyy_minvmem', type=float, dest='cppyy_minvmem',
                    help="artificial vmem bump around cppys's import")
 
diff --git a/Control/AthenaCommon/python/CfgGetter.py b/Control/AthenaCommon/python/CfgGetter.py
deleted file mode 100644
index dc32accf17522fe9b3eca63ea48553833bffbb87..0000000000000000000000000000000000000000
--- a/Control/AthenaCommon/python/CfgGetter.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-
-
-# by default only export the getter functions
-__all__ = [ "getPublicTool", "getPublicToolClone", "getPrivateTool", "getPrivateToolClone",
-            "getService", "getServiceClone", "getAlgorithm", "getAlgorithmClone" ]
-
-from AthenaCommon.ConfiguredFactory import ConfiguredFactory
-
-# (private) storage of tools and services
-_configFactory = ConfiguredFactory( propertiesToExcludeIfDefaultValue = [ "EvtStore", "DetStore", "decSvc", "AcceptAlgs", "RequireAlgs", "VetoAlgs" ] )
-
-
-# forward public user access functions
-getPublicTool         = _configFactory.getPublicTool
-getPublicToolClone    = _configFactory.getPublicToolClone
-getPrivateTool        = _configFactory.getPrivateTool
-getPrivateToolClone   = _configFactory.getPrivateToolClone
-getService            = _configFactory.getService
-getServiceClone       = _configFactory.getServiceClone
-getAlgorithm          = _configFactory.getAlgorithm
-getAlgorithmClone     = _configFactory.getAlgorithmClone
-
-addTool               = _configFactory.addTool
-addToolClone          = _configFactory.addToolClone
-addService            = _configFactory.addService
-addServiceClone       = _configFactory.addServiceClone
-addAlgorithm          = _configFactory.addAlgorithm
-addAlgorithmClone     = _configFactory.addAlgorithmClone
-
-addTypesToExcludeIfDefaultValue     = _configFactory.addTypesToExcludeIfDefaultValue
-addNamesToExcludeIfDefaultValue     = _configFactory.addNamesToExcludeIfDefaultValue
-addFullNamesToExcludeIfDefaultValue = _configFactory.addFullNamesToExcludeIfDefaultValue
-addPropertiesToExcludeIfDefault     = _configFactory.addPropertiesToExcludeIfDefault
-addTypesToSkipIfNotAvailable        = _configFactory.addTypesToSkipIfNotAvailable
-addNamesToSkipIfNotAvailable        = _configFactory.addNamesToSkipIfNotAvailable
-addFullNamesToSkipIfNotAvailable    = _configFactory.addFullNamesToSkipIfNotAvailable
-addTypesOnlyToSkip                  = _configFactory.addTypesOnlyToSkip
-
-setLogLevel = _configFactory.setLogLevel
-printStats  = _configFactory.printStats
diff --git a/Control/AthenaCommon/python/ConfiguredFactory.py b/Control/AthenaCommon/python/ConfiguredFactory.py
deleted file mode 100644
index 26d4c637e64af815a3dab1040d0fc71ef8334f3b..0000000000000000000000000000000000000000
--- a/Control/AthenaCommon/python/ConfiguredFactory.py
+++ /dev/null
@@ -1,1299 +0,0 @@
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-
-__author__ = 'Martin Woudstra <martin.woudstra@cern.ch>'
-
-import copy,os,weakref
-
-from GaudiKernel.GaudiHandles import \
-     GaudiHandle,GaudiHandleArray,\
-     PublicToolHandle,PublicToolHandleArray,\
-     PrivateToolHandle,PrivateToolHandleArray,\
-     ServiceHandle,ServiceHandleArray
-
-from AthenaCommon.Configurable import Configurable
-from AthenaCommon import ConfigurableDb
-from AthenaCommon.AppMgr import ToolSvc, ServiceMgr  # noqa: 401
-from AthenaCommon.Logging import logging
-
-
-# Classes-to-be-raised-as-errors
-# NB probably should go in another file somewhere in AthenaCommon
-
-class ConfigurationError(RuntimeError):
-    pass
-
-class ConfigurableInstanceNotFoundError(ConfigurationError):
-    pass
-
-class ConfigurableClassNotFoundError(ConfigurationError):
-    pass
-
-class ConfigurableTypeError(ConfigurationError):
-    pass
-
-
-#
-# Functions for Configurables. These should ideally be member functions of class Configurable
-#
-def getProperty(configurable,property):
-    """Helper function to get a property of a configurable: the set value, user set default or C++ default."""
-    try:
-        # set value
-        return getattr(configurable,property)
-    except AttributeError:
-        retVal = configurable.getDefaultProperty(property)
-        if retVal is None: raise
-        return retVal
-
-def getPropertyNoAutoRetrieve(configurable,property):
-    """Helper function to get a property of a configurable: the set value, user set default or C++ default."""
-    if hasPropertyBeenSet(configurable,property):
-        return getattr(configurable,property)
-    else:
-        retVal = configurable.getDefaultProperty(property)
-        if retVal is None: raise AttributeError("%r does not have attribute %s" % (configurable.__class__, property) )
-        return retVal
-
-
-def getPropertyType(configurable,property):
-    try:
-        return type(configurable._properties[property].default)
-    except KeyError:
-        raise AttributeError("Configurable %r does not have property %s" % (configurable.__class__.__name__,property) ) 
-    
-
-def hasPropertyBeenSet(configurable,property):
-    try:
-        proxy = configurable._properties[property]
-    except KeyError:
-        raise AttributeError("Configurable %r does not have property %s" % (configurable.__class__.__name__,property) ) 
-
-    return configurable in proxy.history
-
-
-#
-# Handy function to get type and name from different kinds of objects: string,Configurable,GaudiHandle
-#
-def getTypeAndName(objectWithTypeAndName):
-    """Return tuple (\"<type>\",\"<name>\") extracted from <objectWithTypeAndName> string or Configurable or GaudiHandle.
-    <objectWithTypeAndName> format if string: \"<type>/<name>\". Short format: \"<name>\" assumes name == type and returns (\"<name>\",\"<name>\" """
-    if type(objectWithTypeAndName) == str:
-        parts = objectWithTypeAndName.split('/')
-        typePart = parts[0]
-        if len(parts) >= 2:
-            namePart = parts[1]
-        else:
-            namePart = typePart
-    else:
-        typePart = objectWithTypeAndName.getType()
-        namePart = objectWithTypeAndName.getName()
-
-    return (typePart,namePart)
-        
-
-#
-# Handy function to get the full name from different kinds of objects: string,Configurable,GaudiHandle
-#
-def getFullName(objectWithTypeAndName):
-    if type(objectWithTypeAndName) == str:
-        return objectWithTypeAndName
-    else:
-        return objectWithTypeAndName.getFullName()
-
-
-# helper function: add element to list only if it is not yet present. Should probably go elsewhere
-def addToListIfNotIn(theList,elementsToAdd):
-    if not isinstance( elementsToAdd, (list,tuple) ): # also covers GaudiHandleArrays (since they derive from list)
-        elementsToAdd = [ elementsToAdd ]
-    for e in elementsToAdd:
-        if e not in theList: theList.append(e)
-
-
-## @brief A helper class that contains all neccessary information to create a configurable instance.
-#
-# Arguments to the constructor:
-# @arg \c configuredMaker is a string of the form \"package.module.maker\", where the "maker" is either
-# a class derived from auto-generated configurable, a factory function (creating a configurable instance),
-# or the name of a C++ class. A configurable instance is also supported temporarily
-# @arg \c configurableArgs are the (optional) named arguments (name=value) which will be passed on to the configuredMaker
-# function or class __init__
-class ConfiguredPlaceHolder(object):
-    def __init__(self,configuredMaker,**configurableArgs):
-        self.configurableArgs = configurableArgs
-        self._originalMaker = configuredMaker
-        self._setMaker( configuredMaker )
-
-
-    def _setMaker(self,configuredMaker):
-        # set optional member based on type of configurableFactory
-        if type(configuredMaker) == str:
-            self._configuredMakerName = configuredMaker
-        elif isinstance(configuredMaker,Configurable):
-            self._configurableInstance  = weakref.ref(configuredMaker)
-            self._configurableClass     = configuredMaker.__class__
-        elif type(configuredMaker).__name__ == "function":
-            self._configurableFunction = configuredMaker
-        elif issubclass(configuredMaker,Configurable):
-            self._configurableClass     = configuredMaker
-        else:
-            raise ConfigurationError("%r is not a Configurable class, a string (class name), a factory function or a configurable instance",
-                                     configuredMaker)
-        
-
-    def _getObject(self,fullName):
-        parts = fullName.split('.')
-        objName = parts[-1]
-        if len(parts) == 1:
-            # assume it's a name of a Configurable class
-            conf = ConfigurableDb.getConfigurable(objName)
-            if conf is None:
-                raise ConfigurableClassNotFoundError("Configurable for class %s not found" % (objName) )
-            return conf
-
-        else:
-            # assume it's the name of an object in a module
-            mods = parts[:-1]
-            modName = '.'.join(mods)
-            # load the top-level module
-            mod = __import__(modName,globals(),locals(),[objName])
-            return getattr(mod,objName)
-            
-
-    def _getConfigurableMaker(self):
-        # first try class
-        try:
-            return self._configurableClass
-        except AttributeError:
-            pass
-
-        # then try factory function
-        try:
-            return self._configurableFunction
-        except AttributeError:
-            pass
-
-        # create one of the two
-        try:
-            name = self._configuredMakerName
-        except AttributeError:
-            pass
-        else:
-            self._setMaker( self._getObject(name) )
-            # now try again with updated maker
-            return self._getConfigurableMaker()
-            
-        raise ConfigurationError("Could not determine configuredMaker for %r" % (self._originalMaker) )
-
-
-    # read-only access
-    configuredMaker = property( _getConfigurableMaker )
-
-
-    def getMakerType(self):
-        if type(self._originalMaker) == str:
-            return "%r" % (self._originalMaker)  # for printout
-        elif isinstance(self._originalMaker,Configurable):
-            return "<instance \'%s.%s\'>" % (self._originalMaker.__module__,self._originalMaker.__class__.__name__)  # for printout
-        elif type(self._originalMaker).__name__ == "function":
-            return "<function \'%s.%s\'>" % (self._originalMaker.__module__,self._originalMaker.__name__)  # for printout
-        elif issubclass(self._originalMaker,Configurable):
-            return "<class \'%s.%s\'>" % (self._originalMaker.__module__,self._originalMaker.__name__)  # for printout
-
-        return "%r" % self._originalMaker
-
-
-    def shortDescription(self,name):
-        typ = None
-        indent = "  "
-        theInstance = self.getInstance()
-        if theInstance is not None:
-            typ = theInstance.getType()
-            indent = "* "
-        else:
-            try:
-                typ = self._configurableClass.getType()
-            except AttributeError:
-                try:
-                    makerName = self._configuredMakerName
-                    if '.' not in makerName: typ = makerName
-                except AttributeError:
-                    pass
-
-        mess = "%s%r  maker=%s" % (indent, name, self.getMakerType())
-        if typ is not None: mess += "  type=%s" % (typ,)
-
-        return mess
-
-
-    def createInstance(self,name,argsOverride=None):
-        theMaker = self._getConfigurableMaker()
-
-        args = dict(self.configurableArgs)
-        if argsOverride: args.update(argsOverride)
-        # make an instance of the class
-        conf = theMaker(name,**args)
-
-        # store for later quick access
-        self._configurableInstance = weakref.ref(conf)
-
-        return conf
-        
-
-    def hasInstance(self):
-        return self.getInstance() is not None
-
-
-    def getInstance(self):
-        try:
-            return self._configurableInstance() # weakref: may return None is object was deleted
-        except AttributeError:
-            return None
-
-
-    ## @brief make a clone of this PlaceHolder, with a new name and possibly updated arguments
-    def clone(self,**kwargs):
-        args = dict(self.configurableArgs) # make a copy
-        args.update(kwargs)
-        return ConfiguredPlaceHolder(self._originalMaker, **args)
-        
-
-# end of class ConfiguredPlaceHolder
-
-#
-# Helper class to keep track of the full hierarchy of failed properties
-#
-class PropertyStack(object):
-    def __init__(self):
-        self._names = [] # list of strings to get unique full property hierarchy name
-        self._props = [] # list of strings
-
-    def push(self,parent,propName):
-        if not self._names:
-            # top one: start with instance name
-            self._names.append( parent.getJobOptName() )
-        # all: use property name
-        self._names.append( propName )
-
-        fullPropName = "%s(%r).%s" % (parent.getType(), parent.getJobOptName(), propName)
-        self._props.append(fullPropName)
-##        print "Adding to property stack %i: %s" % (self.depth(),fullPropName)
-
-
-    def pop(self):
-        self._names.pop()
-        self._props.pop()
-
-    def depth(self):
-        return len(self._props)
-
-    def fullHierarchyName(self):
-        return '.'.join(self._names)
-
-    def toString(self,indent=""):
-        header =  "%s%s property stack details (%i deep, top first):" % (indent,self.fullHierarchyName(),len(self._props))
-        lines = []
-        # print in reverse order (deepest first)
-        for p in self._props:
-            indent+=" "
-            lines.append( indent+p )
-        sep = " ="+os.linesep
-        return header + os.linesep + sep.join(lines)
-
-
-## @brief track some created instances and keep track of the properties they were assigned to
-class ConfigurableInstanceTracker(object):
-    def __init__(self,fullName):
-        self._confName = fullName
-        self._properties = {} # list of all PropertyStacks where this same instance was assigned to
-
-    def addProperty(self,propStack):
-        fullName = propStack.fullHierarchyName()
-        if fullName not in self._properties:
-            self._properties[fullName] = copy.deepcopy(propStack)
-
-    def getType(self):
-        return getTypeAndName(self._confName)[0]
-
-    def __str__(self):
-        lines = []
-        lines.append( "%r as value of the following %i properties:" % \
-                      (self._confName, len(self._properties)) )
-        for p in self._properties.values():
-            lines.append( p.toString("    ") )
-
-        return os.linesep.join(lines)
-
-
-
-## @brief A gathering place where fully configured configurables can be retrieved by name.
-#
-# This is an intermediate solution between the ToolSvc of the old days that accepted anything without
-# any checks, including tools that have not been explicitly added, and the current ToolSvc which only
-# allows access to the tools that have been explicitly added.
-# The ConfiguredFactory keeps a list of recipes to create tools or services by instance name.
-# The tools/services are only created and added to ToolSvc and/or ServiceMgr (where applicable) when
-# someone is requesting the tool via one of the get*() functions or when properties of a requested
-# tool are resolved (i.e. recursively configured).
-# This helps to keep the ToolSvc/ServiceMgr clean by having only the tools/services that are needed.
-#
-# One can declare a 'recipe' for tools and services by adding:
-# - the instance maker, the instance name and its properties to set
-# The maker is given in the form of a string ("module.maker") which points to a Configurable derived class,
-# a Configurable factory function or a Configurable instance.
-# Clients can then request a fully configured tool (public or private, or even a service) by its name,
-# just as with ToolSvc, although the interface is different: factory.getPublicTool("toolName").
-#
-# The configurable is not instantiated immediately when added, but the information is stored in the dedicated
-# class ConfiguredPlaceHolder, which will instantiate the configurable on demand.
-# The main benefit is for the clients of the tools. When a client (algorithm, service, tool) needs a tool
-# (or service), it can just ask the factory to create the configured tool (by name), without having to know
-# the details of how to configure it, because the configuration has been setup by the expert of the tool.
-# The client does not need to know the name of the configurable class, and there is a single place where
-# to find it. The client only needs to know the instance name (which should be documented!).
-#
-# When a tool is being created, all its ToolHandle and ServiceHandle properties are being resolved, meaning
-# that the tools/services they refer to are being configured by the ConfiguredFactory. What happens if
-# the tool/service correspondig to the Handle properties is not found in the ConfiguredFactory depends
-# on the settings of the Factory.
-# Example:<br>
-# <code>
-# confFactory.addTool( "SlaveToolPackage.SlaveToolModule.SlaveToolMaker, "SlaveTool", beObedient=True )<br>
-# confFactory.addTool( "MasterToolPackage.MasterToolModule.MasterToolMaker", "TheMaster", doTheWorkTool = "SlaveTool" )<br>
-# </code>
-# where doTheWorkTool is a ToolHandle property of class MasterTool.
-#
-# Other features:
-# - clone a configurable: make a copy of a configured configurable with a different name.
-#   This is useful if one wants to make a small change w.r.t. an already configured tool.
-#   There are functions line addToolClone() and getPublicToolClone()
-#
-class ConfiguredFactory(object):
-
-    ## @arg @c typesToExcludeDefaults : list of strings with name of configurable type that should not be resolved
-    def __init__(self,
-                 configureSetDependencies=True,
-                 configureDefaultDependencies=True,
-                 configureDependenciesAlsoIfDefaultInstance=False,
-                 requireAllDependencies=False,
-                 typesToExcludeIfDefaultValue=[],
-                 namesToExcludeIfDefaultValue=[],
-                 fullNamesToExcludeIfDefaultValue=[],
-                 propertiesToExcludeIfDefaultValue=[],
-                 typesToSkipIfNotAvailable=[],
-                 namesToSkipIfNotAvailable=[],
-                 fullNamesToSkipIfNotAvailable=[],
-                 tryDefaultTool=False,
-                 tryDefaultService=False,
-                 tryDefaultAlgorithm=False ):
-        
-        self._availableConfigurables = {} # dict with ConfiguredPlaceHolders
-        self._configSetDependencies      = configureSetDependencies
-        self._configDefaultDependencies  = configureDefaultDependencies
-        self._configDependenciesAlsoIfDefaultInstance = configureDependenciesAlsoIfDefaultInstance
-        self._requireAllDependencies = requireAllDependencies
-        self._tryDefaultTool = tryDefaultTool
-        self._tryDefaultService = tryDefaultService
-        self._tryDefaultAlgorithm = tryDefaultAlgorithm
-        self._typesToExcludeDefaults        = copy.copy(typesToExcludeIfDefaultValue)
-        self._namesToExcludeDefaults        = copy.copy(namesToExcludeIfDefaultValue)
-        self._fullNamesToExcludeDefaults    = copy.copy(fullNamesToExcludeIfDefaultValue)
-        self._propertiesToExcludeDefaults   = copy.copy(propertiesToExcludeIfDefaultValue)
-        self._typesToSkipIfNotAvailable     = copy.copy(typesToSkipIfNotAvailable)
-        self._namesToSkipIfNotAvailable     = copy.copy(namesToSkipIfNotAvailable)
-        self._fullNamesToSkipIfNotAvailable = copy.copy(fullNamesToSkipIfNotAvailable)
-        self._typesOnlyToSkip = []
-        
-        self._hasReadDB = False
-
-        # some members for printout
-        # list of default configurable instances that have been created
-        self._createdDefaultInstances = {}
-        self._defaultTypeNotFound= {}
-        # list of configurable types/names that have been excluded from recursive config
-        self._excludedDependencies = []
-        # list of configurable members whose instantiation failure was ignored
-        # One done for C++ defaults, and only if requireAllDependencies=False (otherwise exception is thrown)
-        self._ignoredDependencyErrors = []
-        # list of properties where the default has not been explicitly set
-        self._excludedDefaultProperties = {}
-        # list of default instances that we not created because they had only the type
-        self._excludedOnlyTypes = []
-
-        
-    def logger(self):
-        return logging.getLogger(self.__class__.__name__)
-
-    def setLogLevel(self,level):
-        self.logger().setLevel(level)
-
-    ## Add types to exclude if the value if the C++ default
-    # Argument \<excludeList\> is list of names (strings) of configurable types (C++ class names)
-    # that should not be resolved. A single type name (a string) is also supported.
-    def addTypesToExcludeIfDefaultValue(self,excludeList):
-        addToListIfNotIn(self._typesToExcludeDefaults, excludeList)
-
-    ## Add instance names to exclude if the value if the C++ default
-    # Argument \<excludeList\> is list of names (strings) of configurable instances
-    # that should not be resolved. A single instance name (a string) is also supported.
-    def addNamesToExcludeIfDefaultValue(self,excludeList):
-        addToListIfNotIn(self._namesToExcludeDefaults, excludeList)
-        
-    # Add to list of full names ("type/name") to exclude if the value if the C++ default
-    # This is mainly to exclude defaults which point to an interface type: fullname = "ISomeInterface" (should be fixed in C++ !!!)
-    def addFullNamesToExcludeIfDefaultValue(self,excludeList):
-        addToListIfNotIn(self._fullNamesToExcludeDefaults, excludeList)
-
-    # Add to names of properties not to resolve if their value is the C++ default
-    def addPropertiesToExcludeIfDefault(self,excludeProps):
-        addToListIfNotIn(self._propertiesToExcludeDefaults, excludeProps)
-
-    # Add types to ignore if they are not found in the database
-    def addTypesToSkipIfNotAvailable(self,skipList):
-        addToListIfNotIn(self._typesToSkipIfNotAvailable,skipList)
-                
-    # Add names to ignore if they are not found in the database
-    def addNamesToSkipIfNotAvailable(self,skipList):
-        addToListIfNotIn(self._namesToSkipIfNotAvailable,skipList)
-                
-    # Add full names to ignore if they are not found in the database
-    def addFullNamesToSkipIfNotAvailable(self,skipList):
-        addToListIfNotIn(self._fullNamesToSkipIfNotAvailable,skipList)
-
-    def addTypesOnlyToSkip(self,typeList):
-        addToListIfNotIn(self._typesOnlyToSkip,typeList)
-                
-    def _isExcludedDefaultValue(self,propValue):
-        propType,propName = getTypeAndName(propValue)
-        fullName = getFullName(propValue)
-        if propType in self._typesToExcludeDefaults or \
-               propName in self._namesToExcludeDefaults or \
-               fullName in self._fullNamesToExcludeDefaults:
-            # keep list for printout at the end
-            excluded = "%s(%r)" % (propType,propName)
-            addToListIfNotIn(self._excludedDependencies, excluded)
-                
-            return True
-
-        return False
-
-
-    def _isExcludedDefaultProperty(self,propName):
-        if propName in self._propertiesToExcludeDefaults:
-            n = self._excludedDefaultProperties.setdefault(propName,0)
-            n += 1
-            self._excludedDefaultProperties[propName] = n
-            return True
-
-        return False
-
-
-    def _skipIfNotAvailable(self,propValue):
-        propType,propName = getTypeAndName(propValue)
-        fullName = getFullName(propValue)
-        if propType in self._typesToSkipIfNotAvailable or \
-               propName in self._namesToSkipIfNotAvailable or \
-               fullName in self._fullNamesToSkipIfNotAvailable:
-            # keep list for printout at the end
-            excluded = "%s(%r)" % (propType,propName)
-            addToListIfNotIn(self._excludedDependencies, excluded)
-                
-            return True
-
-        return False
-
-
-    # this is really to counteract a design mistake of (C++) GaudiHandles:
-    # the default type/name is the tool/service/alg type of the Handle
-    def _excludeIfOnlyType(self,objectWithTypeAndName):
-        if type(objectWithTypeAndName) == str:
-            parts = objectWithTypeAndName.split('/')
-            if len(parts) >= 2: return False
-            # now it has only 1 part, either type or name
-            if '::' in objectWithTypeAndName or objectWithTypeAndName in self._typesOnlyToSkip:
-                addToListIfNotIn(self._excludedOnlyTypes, objectWithTypeAndName)
-                return True
-
-            # in case of doubt
-            return False
-
-        else:
-            return self._excludeIfOnlyType(objectWithTypeAndName.getFullName())
-
-
-    ## @arg @c parent : the parent (@c ConfiguredPlaceHolder) of the property
-    def _resolveConfigurable(self,parent,propertyName,propertyValue,getterFunc,checkType,indent,propStack):
-        if propertyValue is None: return None
-        parentType=parent.getType()
-        parentName=parent.getName()
-        isPropDefault = not hasPropertyBeenSet(parent,propertyName)
-
-        propertyType = getPropertyType(parent,propertyName)
-        isPropertyArray = issubclass(propertyType,GaudiHandleArray)
-        isValueArray = type(propertyValue) in (list,tuple) or isinstance(propertyValue,GaudiHandleArray)
-
-        resolvingString = indent+"%s "
-        # for debug printout
-        if self.logger().isEnabledFor(logging.VERBOSE):
-            if ( isPropDefault ):
-                resolvingString += "default "
-            else:
-                resolvingString += "set "
-            resolvingString += "%s(%r).%s = %r" % (parentType,parentName,propertyName,propertyValue)
-
-        if propStack is not None: propStack.push(parent,propertyName)
-
-        try:
-            if isValueArray:
-                self.logger().verbose(resolvingString, "Resolving")
-                newPropertyValue = [] # propertyType()
-                for v in propertyValue:
-                    conf = self._resolveConfigurable(parent,propertyName,v,getterFunc,checkType,indent,propStack)
-                    if conf:
-                        newPropertyValue.append( conf )
-                    else: # keep existing one
-                        newPropertyValue.append( v )
-
-            else: # not an array
-                if isPropDefault and self._isExcludedDefaultValue(propertyValue):
-                    self.logger().verbose(resolvingString, "Keeping existing")
-                    return propertyValue
-                if not isPropertyArray: self.logger().verbose(resolvingString, "Resolving")
-
-                if isinstance(propertyValue,Configurable):
-                    # do not change already assigned configurables
-                    self.logger().verbose(resolvingString, "Keeping existing")
-                    return propertyValue
-                elif isinstance(propertyValue,GaudiHandle) or type(propertyValue) == str:
-                    doCheck=checkType
-                    # tryDefaultConfigurable None instead of False to always retrieve it for the sake of dependencies
-                    tryDefault=self._configDependenciesAlsoIfDefaultInstance or None
-                else:
-                    raise TypeError("%r is not a Configurable, Handle, HandleArray, string or list" % propertyValue)
-
-                nextIndent=indent+" "
-                newPropertyValue = getterFunc(propertyValue, tryDefaultConfigurable=tryDefault, checkType=doCheck, allowNone=True,
-                                              indent=nextIndent,propStack=propStack)
-
-                # for printout at the end
-                if propStack is not None:
-                    valueType,valueName = getTypeAndName(propertyValue)
-                    defaultInstance = self._createdDefaultInstances.get(valueName)
-                    if defaultInstance:
-                        defaultInstance.addProperty(propStack)
-                    typeNotFound = self._defaultTypeNotFound.get(valueName)
-                    if typeNotFound:
-                        typeNotFound.addProperty(propStack)
-
-
-            # for debug printout
-            if self.logger().isEnabledFor(logging.VERBOSE) and \
-                   newPropertyValue is not None and (not isPropertyArray or isValueArray):
-                resolvedString  = indent+"Resolved  "
-                if ( isPropDefault ):
-                    resolvedString  += "default "
-                else:
-                    resolvedString  += "set "
-                # add array name by hand to see the resolving (PublicToolHandleArray will only show tool names)
-                if isPropertyArray and isValueArray:
-                    newValueRepr = "%s(%r)" % (propertyType.__name__, newPropertyValue)
-                else:
-                    newValueRepr = "%r" % newPropertyValue
-                resolvedString  += "%s(%r).%s = %s" % (parentType,parentName,propertyName,newValueRepr)
-                self.logger().verbose(resolvedString)
-
-            if propStack is not None: propStack.pop()
-
-            # return the new value (or old one)
-            if newPropertyValue is None:
-                # Not available, just keep the old one
-                return propertyValue
-            else:
-                return newPropertyValue
-
-        except ConfigurableInstanceNotFoundError:
-            if propStack is not None: propStack.pop()
-            if self._requireAllDependencies:
-                raise
-
-            # Not available, just keep the old one
-            return propertyValue
-
-        except ConfigurationError as err:
-            if propStack is not None: propStack.pop()
-            if self._requireAllDependencies or not isPropDefault:
-                raise
-
-            ignored = "%s(%r).%s = %r\n    Error message: %s" % (parentType,parentName,propertyName,propertyValue,err)
-            if ignored not in self._ignoredDependencyErrors:
-                self._ignoredDependencyErrors.append(ignored)
-            self.logger().debug( "Ignoring C++ default config error: %s", ignored)
-            # Do not include ignored values in return list
-            return None
-        
-
-        # I should never get here
-        raise RuntimeError("Logical Bug in Code. This line should not have been executed")
-
-
-
-
-
-    ## @arg @c parent : the parent (@c ConfiguredPlaceHolder) of the property
-    def _resolveProperty(self,parent,propertyName,propertyValue,checkType=False,indent="",propStack=None):
-        """For Tool/ServiceHandles: turn name of tool/service into configurable instances.
-        For any other, return None so it does not get re-assigned"""
-        if propertyValue is None: return None # valid also for Handles. Nothing special to do.
-        cls = parent.__class__
-        try:
-            cxxdefault = cls._properties[propertyName].default
-        except KeyError:
-            raise AttributeError("%s does not have property %s" % (cls.getType(),propertyName))
-
-        if isinstance(cxxdefault,(PublicToolHandle,PublicToolHandleArray)):
-            confGetter = self.getPublicTool
-        elif isinstance(cxxdefault,(PrivateToolHandle,PrivateToolHandleArray)):
-            confGetter = self.getPrivateTool
-        elif isinstance(cxxdefault,(ServiceHandle,ServiceHandleArray)):
-            confGetter = self.getService
-        else: # any other type of property left untouched
-            return propertyValue
-
-        return self._resolveConfigurable(parent,propertyName,propertyValue,confGetter,checkType,indent,propStack)
-
-
-    ## @arg @c parent : the parent (@c ConfiguredPlaceHolder) of the property
-    def _resolveProperties(self,parent,props,indent="",propStack=None):
-        newProps = {}
-        propnames = list(props.keys())
-        propnames.sort()
-        for name in propnames:
-            value = props[name]
-            if value != Configurable.propertyNoValue:
-                newProps[name] = self._resolveProperty( parent, name, value, indent, propStack )
-
-        return newProps
-
-
-    def _resolveAllProperties(self,conf,indent="",propStack=None):
-
-        defaultProps = conf.getDefaultProperties()
-        propnames = list(defaultProps.keys())
-        propnames.sort()
-        for name in propnames:
-            value = defaultProps[name]
-            # skip non-configurables, since they don't need to be resolved
-            propertyType = getPropertyType(conf,name)
-            if not issubclass(propertyType,(GaudiHandle,GaudiHandleArray,Configurable)):
-                continue
-
-            isPropSet = hasPropertyBeenSet(conf,name)
-            if isPropSet:
-                if not self._configSetDependencies: continue
-            else:
-                if not self._configDefaultDependencies: continue
-                if self._isExcludedDefaultProperty( name ): continue
-
-            # to avoid auto-retrieve of private AlgTools, only call getattr() if already set elsewhere
-            if isPropSet:
-                value = getattr(conf,name)
-            else:
-                value = defaultProps[name]
-
-            resolvedValue = self._resolveProperty(conf,name,value,checkType=isPropSet,indent=indent,propStack=propStack)
-            if resolvedValue is None or resolvedValue == value:
-                self.logger().verbose("%sProperty %s.%s=%r left unchanged", indent, conf.getFullName(),name, value)
-                continue
-
-            try:
-                setattr(conf,name,resolvedValue)
-            except Exception as err:
-                raise ConfigurationError("ERROR in setting %s(%r).%s = %r\n  Exception raised: %s" %
-                                         (conf.getType(),conf.getName(),name,value,err) )
-
-        
-    def numberOfInstances(self):
-        n = 0
-        for c in self._availableConfigurables.values():
-            if c.hasInstance(): n += 1
-
-        return n
-
-
-    def numberOfConfigurables(self):
-        return len(self._availableConfigurables)
-    
-
-    def checkProperties(self, configInstance, propsToCheck):
-        # check that any newly requested properties are consistent with existing tool
-        problem = ""
-        oldValue = None
-        problemName = None
-        newValue = None
-        for n,v in propsToCheck.items():
-            try:
-                oldProp = getPropertyNoAutoRetrieve(configInstance,n)
-            except AttributeError:
-                continue
-            
-            if hasattr(oldProp,'getName') and hasattr(oldProp,'getType'):
-                newType,newName = getTypeAndName(v)
-                oldType,oldName = getTypeAndName(oldProp)
-                if oldName != newName or (newType != newName and oldType != newType):
-                    oldValue = oldProp
-                    problemName = n
-                    newValue = v
-                    break
-            else:
-                if oldProp != v:
-                    oldValue = oldProp
-                    problemName = n
-                    newValue = v
-                    break
-
-        if problemName is not None:
-            problem = "Existing property value %s(%r).%s = %r inconsistent with required property value %r" % \
-                      (configInstance.getType(),configInstance.getName(),problemName,oldValue,newValue)
-
-        return problem
-
-
-    def _addConfigured(self, configuredMaker, instanceName, **configurableArgs):
-        if instanceName in self._availableConfigurables:
-            raise RuntimeError("Configurable with name %r already declared in factory" % (instanceName) )
-
-        conf = ConfiguredPlaceHolder( configuredMaker, **configurableArgs )
-        self._availableConfigurables[instanceName] = conf
-        self.logger().verbose("Added configurable maker %s", conf.shortDescription(instanceName))
-
-
-    def _addConfiguredClone(self, originalName, cloneName, **kwargs):
-        if originalName == cloneName:
-            raise RuntimeError("Can not make clone %r with same name as original" % (cloneName) )
-
-        orig = self._availableConfigurables.get(originalName)
-        if orig is None:
-            raise RuntimeError("Can not make clone %r of non-existing original %r" % (cloneName,originalName))
-
-        if cloneName in self._availableConfigurables:
-            raise RuntimeError("Configurable with name %r already declared in factory" % (cloneName) )
-
-        conf = orig.clone(**kwargs)
-        self._availableConfigurables[cloneName] = conf
-        self.logger().verbose("Added configurable maker clone %s from original %r", conf.shortDescription(cloneName), originalName)
-
-
-    def checkConfigurableType(self,conf,requiredType):
-        """Check that the configurable <conf> has the correct type (conf.getType()). C++ and equivalent python type names can be mixed."""
-        typeRequested = requiredType.translate ( ConfigurableDb._transtable )
-        typeFound     = conf.getType().translate ( ConfigurableDb._transtable )
-        if typeFound != typeRequested:
-            raise TypeError("Requested configurable %s(%r) already exists with different type: %s" % (typeRequested,conf.getName(),typeFound))
-        
-
-    def getConfigured(self, instanceName, tryDefaultConfigurable=None, checkType=True, indent="", propStack = None):
-        if not instanceName: return None
-        if not self._hasReadDB: self.read()
-        # first look in available instances
-        confType,confName = getTypeAndName(instanceName)
-        if not confName: return None
-        self.logger().verbose("%sTrying to instantiate %s(%r)",indent,confType,confName)
-        nextIndent=indent+" "
-        if propStack is None: propStack = PropertyStack()
-        conf = None
-        # look in predefined configurables
-        maker = self._availableConfigurables.get(confName)
-        if maker is not None:
-            conf = maker.getInstance()
-            if conf is not None:
-                self.logger().debug("%sUsing already configured %s(%r)",indent,conf.getType(),conf.getName())            
-            else: # create it
-                conf = maker.createInstance(confName)
-                self.logger().verbose("%sInstantiated %s(%r). Starting to resolve dependencies",indent,conf.getType(),conf.getName())
-                self._resolveAllProperties(conf,nextIndent,propStack)
-                if checkType and confType != confName:
-                    # explicit type given, check that existing type is as requested
-                    self.checkConfigurableType(conf,confType)
-
-                self.logger().debug("%sInstantiated configured %s(%r)",indent,conf.getType(),conf.getName())
-
-        else:
-            # look in default configurables (if requested)
-            if tryDefaultConfigurable or tryDefaultConfigurable is None:
-                # do not make defaults if only the type is given
-                if self._excludeIfOnlyType(instanceName):
-                    self.logger().verbose("%sNot instantiated because %r is only a type",indent,instanceName)
-                    return None
-                # skip explicitly excludes ones
-                if self._skipIfNotAvailable(instanceName):
-                    self.logger().verbose("%sNot instantiated because %r is not in database",indent,instanceName)
-                    return None
-
-                configurableClass = ConfigurableDb.getConfigurable(confType)
-                if configurableClass is None:
-                    self._defaultTypeNotFound.setdefault(confName,ConfigurableInstanceTracker(getFullName(instanceName)))
-                    if tryDefaultConfigurable:
-                        raise ConfigurableClassNotFoundError("Could not find configurable class for %s" % instanceName)
-                    elif tryDefaultConfigurable is None:
-                        self.logger().verbose("%sNot instantiated because configurable class %r does not exist",indent,confType)
-                        return None
-
-                else:
-                    self.logger().verbose("%sGoing to create default configurable instance %s(%r)",indent,confType,confName)
-                    conf = configurableClass(confName)
-                    # this is the reason we make it even if tryDefaultConfigurable is not True
-                    self._resolveAllProperties(conf,nextIndent,propStack)
-
-                    if tryDefaultConfigurable:
-                        self._createdDefaultInstances.setdefault(confName,ConfigurableInstanceTracker(getFullName(conf)))
-                        self.logger().debug("%sInstantiated default %s(%r)",indent,conf.getType(),conf.getName())
-                    else:
-                        self.logger().verbose("%sGoing to delete default configurable instance %s(%r)",indent,conf.getType(),conf.getName())
-                        return None # this will automatically delete the just created configurable (if no other references exist)
-                    
-
-        if conf is None:
-            self.logger().verbose("%sCould not find configurable instance %r", indent, instanceName)
-            raise ConfigurableInstanceNotFoundError("Could not find configurable instance %r" % (instanceName) )
-
-        return conf
-
-
-    def getConfiguredClone(self, cloneName, originalOrName, **kwargs):
-        if not cloneName or not originalOrName: return None
-        if not self._hasReadDB: self.read()
-        # check if original is available (to catch mistakes like swapping cloneName and originalName)
-        if type(originalOrName) == str and originalOrName not in self._availableConfigurables:
-            raise ConfigurationError("Can not make clone %r of non-existing configurable %r" % (cloneName,originalOrName) )
-
-        conf = None
-        # first see if cloneName is already present in list of available configurables
-        maker = self._availableConfigurables.get(cloneName)
-        if maker is not None:
-            conf = maker.getInstance()
-            if conf is None:
-                conf = maker.createInstance(cloneName)
-                self._resolveAllProperties(conf)
-                self.logger().info("Instantiated configurable %s(%r), requested as clone of %r", conf.__class__.__name__, conf.name(), originalOrName)
-            else:
-                self.logger().debug("Using existing configurable %s(%r), requested as clone of %r", conf.__class__.__name__, conf.name(), originalOrName)
-                
-            problem = self.checkProperties(conf,kwargs)
-            if problem:
-                raise ConfigurationError("Existing clone requested with incompatible properties: "+problem)
-
-
-        if conf is None:
-            # if we get here, clone needs to me made
-            if type(originalOrName) == str:
-                originalName = originalOrName # for printout later
-                self._addConfiguredClone(originalName,cloneName,**kwargs)
-                conf = self.getConfigured(cloneName,tryDefaultConfigurable=False,checkType=True)
-                self.logger().info("Instantiated configurable %s(%r) as clone of %r", conf.__class__.__name__, conf.name(), originalName)
-
-            elif isinstance(originalOrName,Configurable):
-                originalName = originalOrName.name() # for printout later
-                conf = originalOrName.clone(cloneName)
-                for n,v in kwargs.items():
-                    setattr(conf,n,v)
-                self._resolveAllProperties(conf)
-                # add to list for later retrieval
-                self._availableConfigurables[cloneName] = ConfiguredPlaceHolder(conf)
-                self.logger().info("Instantiated direct resolved clone %s(%r) from original %r", conf.__class__.__name__, conf.name(), originalName)
-
-
-        if conf is None:
-            # complete failure
-            raise RuntimeError("Could not find configurable %r as source for clone %r" % (originalOrName,cloneName) )
-
-        return conf
-        
-
-    #
-    # Functions for AlgTools
-    #
-    def addTool(self, toolMaker, name, **toolArgs):
-        """Add a recipe to make a tool"""
-        self._addConfigured( toolMaker, name, **toolArgs )
-
-
-    def addToolClone(self, originalName, cloneName, **kwargs):
-        """Add a clone of an existing tool-recipe"""
-        self._addConfiguredClone(originalName, cloneName, **kwargs)
-
-
-    def checkTool(self,tool):
-        """Check that the tool configurable instance is indeed an AlgTool.
-        Raises ConfigurableTypeError exception is case of error, and returns tool in case of success"""
-        if tool.getGaudiType() != 'AlgTool':
-            raise ConfigurableTypeError("%r is not an AlgTool, but a %s" % (tool,tool.getGaudiType()))
-
-
-    def _getTool(self, name, tryDefaultConfigurable, checkType, indent, propStack ):
-        """Get private or public tool."""
-        if not name: return None
-        if tryDefaultConfigurable is None:
-            tryDefault = self._tryDefaultTool or None
-        else:
-            tryDefault = tryDefaultConfigurable
-
-        tool = self.getConfigured( name, tryDefault, checkType, indent, propStack )
-        if tool is None: return None
-        self.checkTool( tool )
-        return tool
-
-
-    def getPublicTool(self, name, tryDefaultConfigurable=None, checkType=False, allowNone=False, indent="", propStack=None):
-        """Get public tool"""
-        if not name:
-            if allowNone: return None
-            else: raise ConfigurationError("Requested Public Tool with empty name")
-
-        global ToolSvc
-        confType,confName = getTypeAndName(name)
-        if not confName: return None
-        # first try to get it from ToolSvc
-        try:
-            theTool = getattr(ToolSvc,confName)
-            if confType != confName and theTool.getType() != confType:
-                if checkType:
-                    raise ConfigurableTypeError("Existing public tool %r has type %s while type %s is requested" % \
-                                                (confName, theTool.getType(), confType) )
-                else:
-                    self.logger().info("Requested type %s of tool named %r overwritten by already defined type %s",
-                                       confType,confName,theTool.getType())
-            return theTool
-        except AttributeError:
-            # make it and add to ToolSvc
-            theTool = self._getTool(name, tryDefaultConfigurable, checkType, indent, propStack)
-            if theTool is None:
-                if allowNone: return None
-                else: raise ConfigurationError("Public Tool %r not found" % (name,) )
-                    
-            # some configurable makers already add the tool to ToolSvc
-            if not hasattr(ToolSvc,theTool.getName()):
-                ToolSvc += theTool
-            return theTool
-
-
-    def getPrivateTool(self, name, tryDefaultConfigurable = None, checkType = True, allowNone=False, indent = "", propStack=None, **toolProps):
-        """Get private tool. Returns a (deep) copy of the tool, so really private"""
-        if not name:
-            if allowNone: return None
-            else: raise ConfigurationError("Requested Private Tool with empty name")
-            
-        tool = self._getTool(name, tryDefaultConfigurable, checkType, indent, propStack)
-        if tool is None:
-            if allowNone: return None
-            else: raise ConfigurationError("Private Tool %r not found" % (name,) )
-
-        # start with a copy
-        tool = copy.deepcopy( tool )
-        # overwrite with user properties
-        extraProps = self._resolveProperties( tool, toolProps, indent, propStack )
-        for n,v in extraProps.items():
-            setattr(tool,n,v)
-        return tool
-
-
-    def getPublicToolClone(self, cloneName, originalOrName, **toolProps):
-        """Get a public clone of a pre-defined (public or private) tool"""
-        if not cloneName or not originalOrName: return None
-        global ToolSvc
-        # check if clone is same as original
-        origType,origName = getTypeAndName(originalOrName)
-        if not origName: return None
-        if origName == cloneName:
-            self.logger().warning("Clone name same as original name. Returning unmodified original AlgTool named %r",cloneName)
-            oldTool = self.getPublicTool(cloneName)
-            problem = self.checkProperties(oldTool,toolProps)
-            if problem:
-                raise ConfigurationError("Existing AlgTool (clone=original) requested with incompatible properties: "+problem)
-
-            return oldTool
-        
-        # try to get existing clone from ToolSvc
-        try:
-            oldTool = getattr(ToolSvc,cloneName)
-            problem = self.checkProperties(oldTool,toolProps)
-            if problem:
-                raise ConfigurationError("Existing clone requested with incompatible properties: "+problem)
-                
-            return oldTool
-
-        except AttributeError:
-            theTool = self.getConfiguredClone( cloneName, origName, **toolProps )
-            if theTool is None: return None
-            self.checkTool(theTool)
-            # some configurable makers already add the tool to ToolSvc
-            if not hasattr(ToolSvc,theTool.getName()):
-                ToolSvc += theTool
-            return theTool
-
-
-    def getPrivateToolClone(self, cloneName, originalOrName, **toolProps):
-        """Get private tool. Returns a (deep) copy of the tool, so really private"""
-        if not cloneName or not originalOrName: return None
-        theClone = self.getConfiguredClone( cloneName, originalOrName, **toolProps )
-        if theClone is None: return None
-        self.checkTool( theClone )
-        return copy.deepcopy( theClone )
-
-
-    #
-    # Functions for Services
-    #
-    def checkService(self,theSvc):
-        """Make sure that theSvc configurable instance is indeed a Service.
-        Raises ConfigurableTypeError exception is case of error, and returns theSvc in case of success"""
-        if theSvc.getGaudiType() != 'Service':
-            raise ConfigurableTypeError("%r is not a Service, but a %s" % (theSvc,theSvc.getGaudiType()))
-
-    def addService( self, serviceMaker, name, **svcArgs ):
-        self._addConfigured( serviceMaker, name, **svcArgs )
-
-    def addServiceClone(self, originalName, cloneName, **kwargs):
-        """Add a clone of a possible service to the list of possible services."""
-        self._addConfiguredClone(originalName, cloneName, **kwargs)
-
-    def getService( self, name, tryDefaultConfigurable=None, checkType=True, allowNone=False, indent="", propStack=None ):
-        if not name:
-            if allowNone: return None
-            else: raise ConfigurationError("Requested Service with empty name")
-
-        confType,confName = getTypeAndName(name)
-        if not confName:
-            if allowNone: return None
-            else: raise ConfigurationError("Requested Service with empty instance name: %r" % (name,) )
-
-
-        global ServiceMgr
-        # first try to get it from ServiceMgr
-        try:
-            return getattr(ServiceMgr,confName)
-        except AttributeError:
-            # then try to create it
-            if tryDefaultConfigurable is None:
-                tryDefault = self._tryDefaultService or None
-            else:
-                tryDefault = tryDefaultConfigurable
-            # make it and add to ServiceMgr
-            theSvc = self.getConfigured( name, tryDefault, checkType, indent, propStack )
-            if theSvc is None:
-                if allowNone: return None
-                else: raise ConfigurationError("Service %r not found" % (name,) )
-
-            self.checkService(theSvc)
-            ServiceMgr += theSvc
-            return theSvc
-
-
-    def getServiceClone( self, cloneName, originalOrName, **kwargs ):
-        if not cloneName or not originalOrName: return None
-        global ServiceMgr
-
-        # check if clone is same as original
-        origType,origName = getTypeAndName(originalOrName)
-        if origName == cloneName:
-            self.logger().warning("Clone name same as original name. Returning unmodified original Service named %r",cloneName)
-            oldSvc = self.getService(cloneName)
-            problem = self.checkProperties(oldSvc,kwargs)
-            if problem:
-                raise RuntimeError("Existing Service (clone=original) requested with incompatible properties: "+problem)
-
-            return oldSvc
-        
-
-        # first try to get clone from ServiceMgr
-        try:
-            oldSvc = getattr(ServiceMgr,cloneName)
-            problem = self.checkProperties(oldSvc,kwargs)
-            if problem:
-                raise ConfigurationError("Existing clone requested with incompatible properties: "+problem)
-                
-            return oldSvc
-
-        except AttributeError:
-            # then create clone from existing definition
-            theSvc = self.getConfiguredClone( cloneName, origName, **kwargs )
-            if theSvc is None: return None
-            self.checkService(theSvc)
-            # some configurable makers already add the service to ServiceMgr
-            if not hasattr(ServiceMgr,theSvc.getName()):
-                ServiceMgr += theSvc
-            return theSvc
-
-
-    def checkAlgorithm(self,theAlg):
-        """Make sure that theAlg configurable instance is indeed an Algorithm.
-        Raises ConfigurableTypeError exception is case of error, and returns theAlg in case of success"""
-        if theAlg.getGaudiType() != 'Algorithm':
-            raise ConfigurableTypeError("%r is not an Algorithm, but a %s" % (theAlg,theAlg.getGaudiType()))
-
-
-    def addAlgorithm(self, algMaker, algName, **algProps ):
-        self._addConfigured( algMaker, algName, **algProps )
-
-
-    def addAlgorithmClone(self, originalName, cloneName, **kwargs):
-        """Add a clone of a possible Algorithm to the list of possible algorithms"""
-        self._addConfiguredClone(originalName, cloneName, **kwargs)
-
-
-    def getAlgorithm( self, name, tryDefaultConfigurable=None, checkType=True, allowNone=False, indent="", propStack=None ):
-        if not name:
-            if allowNone: return None
-            else: raise ConfigurationError("Requested Algorithm with empty name")
-
-        if tryDefaultConfigurable is None:
-            tryDefault = self._tryDefaultAlgorithm or None
-        else:
-            tryDefault = tryDefaultConfigurable
-
-        theAlg = self.getConfigured( name, tryDefault, checkType, indent, propStack )
-        if theAlg is None:
-            if allowNone: return None
-            else: raise ConfigurationError("Algorithm %r not found" % (name,) )
-
-        self.checkAlgorithm(theAlg)        
-        return theAlg
-
-
-    def getAlgorithmClone( self, cloneName, originalOrName, **kwargs ):
-        if not cloneName or not originalOrName: return None
-
-        # check if clone is same as original
-        origType,origName = getTypeAndName(originalOrName)
-        if origName == cloneName:
-            self.logger().warning("Clone name same as original name. Returning unmodified original Algorithm named %r",cloneName)
-            oldAlg = self.getAlgorithm(cloneName)
-            problem = self.checkProperties(oldAlg,kwargs)
-            if problem:
-                raise ConfigurationError("Existing Algorithm (clone=original) requested with incompatible properties: "+problem)
-
-            return oldAlg
-        
-        # make it from original one
-        theAlg = self.getConfiguredClone(cloneName, origName, **kwargs)
-        if theAlg is None: return None
-        self.checkAlgorithm(theAlg)
-        return theAlg
-
-
-    # if called at the end of the job, it will show some statistics
-    def printStats(self):
-        log = self.logger()
-        nConfig = self.numberOfConfigurables()
-        nInstances = self.numberOfInstances()
-        lines = [ "%d configurables were declared, %d instantiated (indicated by *):" % (nConfig, nInstances) ]
-        # print in alphabetic order by name
-        names = self._availableConfigurables.keys()
-        names.sort(key = lambda x : x)
-        for n in names:
-            c = self._availableConfigurables[n]
-            lines.append( c.shortDescription(n) )
-        lines.append( "end of list of declared configurables" )
-        log.info( os.linesep.join(lines) )
-
-
-        # print list of created defaults, in alphabetic order by type
-        defaultInstances = self._createdDefaultInstances.values()
-        defaultInstances.sort(key = lambda x : x.getType())
-        nDefault = len(defaultInstances)
-        if nDefault:
-            lines = [ "%d default configurables were created:" % (nDefault) ]
-            for c in defaultInstances:
-                lines.append( "* %s" % (c) )
-            lines.append("end of list of created default configurables")
-            log.info( os.linesep.join(lines) )
-
-
-        # print list of default configurables where the type was not found
-        defaultTypesNotFound = self._defaultTypeNotFound.values()
-        defaultTypesNotFound.sort(key = lambda x : x.getType())
-        nTypesNotFound = len(defaultTypesNotFound)
-        if nTypesNotFound:
-            lines= [ "%d types not found for default configurable:" % (nTypesNotFound) ]
-            for c in defaultTypesNotFound:
-                lines.append( "  %s" % (c) )
-            log.info( os.linesep.join(lines) )
-
-
-        if self._excludedDependencies:
-            lines = ["Defaults of the following %i configurables were not explicitly configured:" % \
-                     len(self._excludedDependencies) ]
-            for c in self._excludedDependencies:
-                lines.append( "  %s" % c )
-            log.info( os.linesep.join(lines) )
-
-        if self._excludedDefaultProperties:
-            lines = [ "%i properties with the following names and default values were not explicitly configured:" % \
-                      len(self._excludedDefaultProperties) ]
-            for c,count in self._excludedDefaultProperties.items():
-                lines.append("  %s (%d times)" % (c,count) )
-            log.info( os.linesep.join(lines) )
-
-        if self._excludedOnlyTypes:
-            lines = [ "%i default instances were not made because only the type was given:" % \
-                      len(self._excludedOnlyTypes) ]
-            for c in self._excludedOnlyTypes:
-                lines.append("  %s" % c)
-            log.info( os.linesep.join(lines) )                
-        
-        if self._ignoredDependencyErrors:
-            lines = [ "%i errors in configuring C++ defaults were ignored:" % \
-                      len(self._ignoredDependencyErrors) ]
-            for c in self._ignoredDependencyErrors:
-                lines.append("  %s" % c)
-            log.info( os.linesep.join(lines) )
-            
-
-        del log
-        
-
-    def read(self):
-        """Read definitions from Database modules"""
-        import sys, os, os.path, time
-       
-        self.logger().debug( "importing configDb modules..." )
-        nFiles = 0
-        startTime = time.time()
-
-        paths = [p for p in sys.path if p.endswith ('/python')]
-        if not paths: paths = sys.path
-
-        for path in paths:
-            if not os.path.exists( path ):
-                continue
-            if not os.path.isdir (path): continue
-         
-            self.logger().verbose( "searching in %s..." % path )
-         
-            packages = os.listdir(path)
-            for p in packages:
-                dbFile = p+"ConfigDb.py"
-                localfile = os.path.join(p,dbFile)
-                globalfile = os.path.join(path,localfile)
-                if not os.path.isfile(globalfile): continue
-                    
-                # turn filename syntax into module syntax: remove extension and replace / with . (dot)
-                confDbModule = os.path.splitext(localfile)[0].replace(os.sep,'.')
-                self.logger().debug( "importing %s...", confDbModule )
-                try:
-                    mod = __import__( confDbModule, globals(), locals(), [dbFile] )
-                except Exception as err:
-                    self.logger().warning( "Error importing module %s !", confDbModule )
-                    self.logger().warning( "Reason: %s", err )
-                else:
-                    nFiles += 1
-
-        stopTime = time.time()
-        self.logger().info( "imported %i confDb modules in %.2f seconds", nFiles, stopTime-startTime )
-        self._hasReadDB = True
-        
-
-    def instantiateAll(self):
-        """Instantiate all declared tools,services and algorithms. Mainly for testing the full database."""
-        for name in self._availableConfigurables.keys():
-            self.getConfigured(name,tryDefaultConfigurable=False,checkType=False)
-
-# end of class ConfiguredFactory
diff --git a/Control/AthenaCommon/share/Execution.py b/Control/AthenaCommon/share/Execution.py
index c2952e194fd1d6c14846a80ccf5e80030f3b99ea..82179084d615be0d46668543baf1be0597784fa3 100644
--- a/Control/AthenaCommon/share/Execution.py
+++ b/Control/AthenaCommon/share/Execution.py
@@ -87,7 +87,7 @@ else:
       hookDebugger()
 
  ## setup multi-process running and debugging
-   if not opts.minimal and opts.nprocs and (opts.nprocs >= 1 or opts.nprocs==-1):
+   if not opts.fromdb and opts.nprocs and (opts.nprocs >= 1 or opts.nprocs==-1):
       from AthenaCommon.AppMgr import ServiceMgr as svcMgr
       import AthenaMP.PyComps as _amppy
       svcMgr += _amppy.MpEvtLoopMgr(NWorkers=opts.nprocs)
diff --git a/Control/AthenaCommon/share/Preparation.py b/Control/AthenaCommon/share/Preparation.py
index 24fcd501854f08453f352c2d31901e534effe353..5b23d66fdbc607312c2e9067304cb4bb180f1d1b 100644
--- a/Control/AthenaCommon/share/Preparation.py
+++ b/Control/AthenaCommon/share/Preparation.py
@@ -23,8 +23,7 @@ import AthenaCommon.Constants     as Lvl
 import AthenaCommon.SystemOfUnits as Units
 
 ### Athena configuration -----------------------------------------------------
-if not opts.minimal:
-   from AthenaCommon import CfgMgr
+from AthenaCommon import CfgMgr
 from AthenaCommon.AppMgr import theApp
 from AthenaCommon.Logging import log
 
@@ -88,12 +87,6 @@ del min_cppyy_vmem_growth, grow_vmem, vmem_mb
 ## we have to put it there as some jobOptions might steal the event loop...
 ## ie: do theApp.initialize();theApp.nextEvent(...) directly in the jobO.
 
-# this code is to be removed; for now flag it with 'minimal' to test the effects
-# of its removal
-if not opts.minimal:
-   from AthenaPython import PyAthena
-# -- end of minimal
-
 ## user level configuration
 from AthenaCommon.Include import IncludeError
 try:
@@ -116,14 +109,6 @@ if opts.interactive:
 
    del atexit
 
-### pre-import some general job-flags so they can be used in CLI
-
-# this code is to be removed; for now flag it with 'minimal' to test the effects
-# of its removal
-if not opts.minimal:
-   from AthenaCommon.AthenaCommonFlags import jobproperties as jp
-   from AthenaCommon.GlobalFlags import jobproperties as jp
-
 if opts.command:
    _msg.info( 'executing CLI (-c) command: "%s"' % opts.command )
    exec (opts.command)
diff --git a/Control/AthenaConfiguration/python/AthConfigFlags.py b/Control/AthenaConfiguration/python/AthConfigFlags.py
index 8167c23143f6c50f85c70ffc6fe0a9a27847fb83..9dde73038417965f09854d8dde1dc930bfc75e35 100644
--- a/Control/AthenaConfiguration/python/AthConfigFlags.py
+++ b/Control/AthenaConfiguration/python/AthConfigFlags.py
@@ -161,8 +161,7 @@ class FlagAddress(object):
         raise RuntimeError( "No such flag: "+ self._name+".  The name is likely incomplete." )
 
     def __getitem__(self, name):
-        merged = self._name + "." + name
-        return self._flags._get(merged)
+        return getattr(self, name)
 
     def __setitem__(self, name, value):
         setattr(self, name, value)
@@ -236,7 +235,9 @@ class AthConfigFlags(object):
         raise DeprecationWarning("__hash__ method in AthConfigFlags is deprecated. Probably called from function decorator, use AccumulatorCache decorator instead.")
 
     def _calculateHash(self):
-        return hash(frozenset((x, repr(y)) for x, y in self._flagdict.items()))
+        fmap = self._renamed_map()
+        flags = ((fmap[x], y) for x, y in self._flagdict.items() if fmap[x])
+        return hash(frozenset((x, repr(y)) for x, y in flags))
 
     def __getattr__(self, name):
         # Avoid infinite recursion looking up our own attributes
@@ -307,7 +308,6 @@ class AthConfigFlags(object):
 
 
     def _renamed_map(self):
-        self.loadAllDynamicFlags()
         def rename(key):
             for new, old in self._renames.items():
                 if key.startswith(old + '.'):
diff --git a/Control/AthenaConfiguration/test/testAthConfigFlags.py b/Control/AthenaConfiguration/test/testAthConfigFlags.py
index d8b35e3b8f4f2621229b0cb6f2e0e7523f3c02d5..c0aa7dcc8a42116c7db6997acf6b7c2fdc377f85 100755
--- a/Control/AthenaConfiguration/test/testAthConfigFlags.py
+++ b/Control/AthenaConfiguration/test/testAthConfigFlags.py
@@ -324,6 +324,43 @@ class TestFlagsSetupDynamic(FlagsSetup):
         self.assertTrue( self.flags.hasFlag("Z.A") )
         self.assertTrue( self.flags.hasCategory("Z.C") )
 
+    def test_cloneHash(self):
+        # compare copy hash to clone hash, should be equal
+        copyflags = copy.deepcopy(self.flags)
+        copyflags.loadAllDynamicFlags()
+        copyflags.lock()
+        copyhash = copyflags.athHash()
+        cloneflags = self.flags.clone()
+        cloneflags.loadAllDynamicFlags()
+        cloneflags.lock()
+        clonehash = cloneflags.athHash()
+        self.assertEqual(copyhash, clonehash)
+
+        # compare copy hash to cloneAndReplace hash, should not be equal
+        clonew = self.flags.cloneAndReplace('W', 'Z')
+        clonew.loadAllDynamicFlags()
+        clonew.lock()
+        clonewhash = clonew.athHash()
+        # this should not be equal, the flags don't have the same
+        # content
+        self.assertNotEqual(clonewhash, clonehash)
+
+        # copy back into Z
+        clonez = clonew.cloneAndReplace('Z', 'W')
+        clonez.lock()
+        clonezhash = clonez.athHash()
+        self.assertEqual(clonehash, clonezhash)
+
+        # compare first clone to second clone
+        cloneflags2 = self.flags.clone()
+        cloneflags2.loadAllDynamicFlags()
+        cloneflags2.lock()
+        clonehash2 = cloneflags2.athHash()
+        # this should be equal, they have the same flags
+        self.assertEqual(clonehash2, clonehash)
+        # this should not be equal, since a group was replaced
+        self.assertNotEqual(clonehash2, clonewhash)
+
 
 class TestDynamicDependentFlags(unittest.TestCase):
     def test(self):
diff --git a/Control/AthenaServices/share/AthTPCnvSvc_test.py b/Control/AthenaServices/share/AthTPCnvSvc_test.py
index 21eeac4c8735ab4c2ad0e268fdbf8901a947656a..6d86094a20c70c7ae589410caa8f3dcba478b0ac 100644
--- a/Control/AthenaServices/share/AthTPCnvSvc_test.py
+++ b/Control/AthenaServices/share/AthTPCnvSvc_test.py
@@ -1,5 +1,8 @@
 # Unit test for AthTPCnvSvc
 
+from AthenaCommon.AppMgr import theApp
+from AthenaPython import PyAthena
+
 theApp.EvtMax = 1
 theApp.initialize()
 tpsvc=PyAthena.py_svc('AthTPCnvSvc',True,'ITPCnvSvc')
diff --git a/DataQuality/DataQualityConfigurations/config/HLT/HLTcalo/collisions_run.config b/DataQuality/DataQualityConfigurations/config/HLT/HLTcalo/collisions_run.config
index d1353bf9d0f42bdb247b319267e926beb06ae2f2..fbf463c78a3036e3c9e12879812a7155c219807a 100644
--- a/DataQuality/DataQualityConfigurations/config/HLT/HLTcalo/collisions_run.config
+++ b/DataQuality/DataQualityConfigurations/config/HLT/HLTcalo/collisions_run.config
@@ -106,11 +106,11 @@ dir HLT {                                   # dir HLT
     dir HLT_FastCaloEMClusters {            # HLT_FastCaloEMClusters
     
       dir HLT_Clusters {                    # HLT_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
 
           hist HLT_(eta|num|phi|size) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Shifter/xAODTrigEMClusters/${chain}
           }
           hist HLT_phi_vs_HLT_eta {
@@ -139,12 +139,17 @@ dir HLT {                                   # dir HLT
         }
       }                                     # HLT_Matched_to_OFF
       dir OFF_Clusters {                    # OFF_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
-          hist .* {
+          hist OFF_(eta|num|phi|size) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
+            output = HLT/TRCAL/Expert/xAODTrigEMClusters/${chain}
+          }
+          hist OFF_phi_vs_OFF_eta {
+            algorithm = HLTCaloHistogram_Not_Empty&GatherData
             output = HLT/TRCAL/Expert/xAODTrigEMClusters/${chain}
           }
+
         }                                   
       }                                     # OFF_Matched_to_HLT
     }                                       # HLT_FastCaloEMClusters
@@ -152,10 +157,10 @@ dir HLT {                                   # dir HLT
     dir HLT_TopoCaloClustersFS {            # HLT_TopoCaloClustersFS
 
       dir HLT_Clusters {                    # HLT_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist HLT_(eta|num|phi|size) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Shifter/xAODCaloClusters/HLT_TopoCaloClustersFS/${chain}
           }
           hist HLT_phi_vs_HLT_eta {
@@ -184,19 +189,19 @@ dir HLT {                                   # dir HLT
         }
       }                                       # HLT_Matched_to_OFF
       dir OFF_Clusters {                      # OFF_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist OFF_(et|eta|num|phi|type) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Expert/xAODCaloClusters/HLT_TopoCaloClustersFS/${chain}
           }
         }
       }                                       # OFF_Clusters
       dir OFF_Matched_to_HLT {                # OFF_Matched_to_HLT
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist OFF_no_HLT_match_num {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Expert/xAODCaloClusters/HLT_TopoCaloClustersFS/${chain}
           }
         }
@@ -207,10 +212,10 @@ dir HLT {                                   # dir HLT
     dir HLT_TopoCaloClustersLC {              # HLT_TopoCaloClustersLC
     
       dir HLT_Clusters {                      # HLT_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist HLT_(eta|num|phi|size) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Shifter/xAODCaloClusters/HLT_TopoCaloClustersLC/${chain}
           }
           hist HLT_phi_vs_HLT_eta {
@@ -261,10 +266,10 @@ dir HLT {                                   # dir HLT
     dir HLT_TopoCaloClustersRoI {              # HLT_TopoCaloClustersRoI
       
       dir HLT_Clusters {                       # HLT_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
        	  regex       =  1
           hist HLT_(eta|num|phi|size) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Shifter/xAODCaloClusters/HLT_TopoCaloClustersRoI/${chain}
           }
           hist HLT_phi_vs_HLT_eta {
@@ -293,10 +298,10 @@ dir HLT {                                   # dir HLT
         }
       }                                        # HLT_Matched_to_OFF
       dir OFF_Clusters {                       # OFF_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist OFF_(et|eta|num|phi|type) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Expert/xAODCaloClusters/HLT_TopoCaloClustersRoI/${chain}
           }
         }
diff --git a/DataQuality/DataQualityConfigurations/config/HLT/HLTcalo/cosmics_run.config b/DataQuality/DataQualityConfigurations/config/HLT/HLTcalo/cosmics_run.config
index 96b16150fae350fe073d2746c40003edeb89698e..7b0fe950293ddfeefa19f91b2855091403e70e4c 100644
--- a/DataQuality/DataQualityConfigurations/config/HLT/HLTcalo/cosmics_run.config
+++ b/DataQuality/DataQualityConfigurations/config/HLT/HLTcalo/cosmics_run.config
@@ -93,11 +93,11 @@ dir HLT {                                   # dir HLT
     dir HLT_FastCaloEMClusters {            # HLT_FastCaloEMClusters
     
       dir HLT_Clusters {                    # HLT_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
 
           hist HLT_(eta|num|phi|size) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Shifter/xAODTrigEMClusters/${chain}
           }
           hist HLT_phi_vs_HLT_eta {
@@ -129,7 +129,12 @@ dir HLT {                                   # dir HLT
         algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
-          hist .* {
+          hist OFF_(eta|num|phi|size) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
+            output = HLT/TRCAL/Expert/xAODTrigEMClusters/${chain}
+          }
+          hist OFF_phi_vs_OFF_eta {
+            algorithm = HLTCaloHistogram_Not_Empty&GatherData
             output = HLT/TRCAL/Expert/xAODTrigEMClusters/${chain}
           }
         }                                   
@@ -139,10 +144,10 @@ dir HLT {                                   # dir HLT
     dir HLT_TopoCaloClustersFS {            # HLT_TopoCaloClustersFS
 
       dir HLT_Clusters {                    # HLT_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist HLT_(eta|num|phi|size) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Shifter/xAODCaloClusters/HLT_TopoCaloClustersFS/${chain}
           }
           hist HLT_phi_vs_HLT_eta {
@@ -171,19 +176,19 @@ dir HLT {                                   # dir HLT
         }
       }                                       # HLT_Matched_to_OFF
       dir OFF_Clusters {                      # OFF_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist OFF_(et|eta|num|phi|type) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Expert/xAODCaloClusters/HLT_TopoCaloClustersFS/${chain}
           }
         }
       }                                       # OFF_Clusters
       dir OFF_Matched_to_HLT {                # OFF_Matched_to_HLT
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist OFF_no_HLT_match_num {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Expert/xAODCaloClusters/HLT_TopoCaloClustersFS/${chain}
           }
         }
@@ -194,10 +199,10 @@ dir HLT {                                   # dir HLT
     dir HLT_TopoCaloClustersLC {              # HLT_TopoCaloClustersLC
     
       dir HLT_Clusters {                      # HLT_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist HLT_(eta|num|phi|size) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Shifter/xAODCaloClusters/HLT_TopoCaloClustersLC/${chain}
           }
           hist HLT_phi_vs_HLT_eta {
@@ -226,10 +231,10 @@ dir HLT {                                   # dir HLT
         }
       }                                       # HLT_Matched_to_OFF
       dir OFF_Clusters {                      # OFF_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist OFF_(et|eta|num|phi|type) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Expert/xAODCaloClusters/HLT_TopoCaloClustersLC/${chain}
           }
         }
@@ -248,10 +253,10 @@ dir HLT {                                   # dir HLT
     dir HLT_TopoCaloClustersRoI {              # HLT_TopoCaloClustersRoI
       
       dir HLT_Clusters {                       # HLT_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
        	  regex       =  1
           hist HLT_(eta|num|phi|size) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Shifter/xAODCaloClusters/HLT_TopoCaloClustersRoI/${chain}
           }
           hist HLT_phi_vs_HLT_eta {
@@ -280,10 +285,10 @@ dir HLT {                                   # dir HLT
         }
       }                                        # HLT_Matched_to_OFF
       dir OFF_Clusters {                       # OFF_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist OFF_(et|eta|num|phi|type) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Expert/xAODCaloClusters/HLT_TopoCaloClustersRoI/${chain}
           }
         }
diff --git a/DataQuality/DataQualityConfigurations/config/HLT/HLTcalo/heavyions_run.config b/DataQuality/DataQualityConfigurations/config/HLT/HLTcalo/heavyions_run.config
index 5c198e25f592316aeae60f0dc4ed65bf4a0fdb40..19196ac78c9400715e3649d8d91694a2ccb2466c 100644
--- a/DataQuality/DataQualityConfigurations/config/HLT/HLTcalo/heavyions_run.config
+++ b/DataQuality/DataQualityConfigurations/config/HLT/HLTcalo/heavyions_run.config
@@ -101,11 +101,11 @@ dir HLT {                                   # dir HLT
     dir HLT_FastCaloEMClusters {            # HLT_FastCaloEMClusters
     
       dir HLT_Clusters {                    # HLT_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
 
           hist HLT_(eta|num|phi|size) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Shifter/xAODTrigEMClusters/${chain}
           }
           hist HLT_phi_vs_HLT_eta {
@@ -134,10 +134,14 @@ dir HLT {                                   # dir HLT
         }
       }                                     # HLT_Matched_to_OFF
       dir OFF_Clusters {                    # OFF_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
-          hist .* {
+          hist OFF_(eta|num|phi|size) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
+            output = HLT/TRCAL/Expert/xAODTrigEMClusters/${chain}
+          }
+          hist OFF_phi_vs_OFF_eta {
+            algorithm = HLTCaloHistogram_Not_Empty&GatherData
             output = HLT/TRCAL/Expert/xAODTrigEMClusters/${chain}
           }
         }                                   
@@ -147,10 +151,10 @@ dir HLT {                                   # dir HLT
     dir HLT_TopoCaloClustersHIRoI {              # HLT_TopoCaloClustersHIRoI
       
       dir HLT_Clusters {                       # HLT_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
        	  regex       =  1
           hist HLT_(eta|num|phi|size) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Shifter/xAODCaloClusters/HLT_TopoCaloClustersHIRoI/${chain}
           }
           hist HLT_phi_vs_HLT_eta {
@@ -179,10 +183,10 @@ dir HLT {                                   # dir HLT
         }
       }                                        # HLT_Matched_to_OFF
       dir OFF_Clusters {                       # OFF_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist OFF_(et|eta|num|phi|type) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Expert/xAODCaloClusters/HLT_TopoCaloClustersHIRoI/${chain}
           }
         }
@@ -201,10 +205,10 @@ dir HLT {                                   # dir HLT
     dir HLT_TopoCaloClustersFS {            # HLT_TopoCaloClustersFS
 
       dir HLT_Clusters {                    # HLT_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist HLT_(eta|num|phi|size) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Shifter/xAODCaloClusters/HLT_TopoCaloClustersFS/${chain}
           }
           hist HLT_phi_vs_HLT_eta {
@@ -233,19 +237,19 @@ dir HLT {                                   # dir HLT
         }
       }                                       # HLT_Matched_to_OFF
       dir OFF_Clusters {                      # OFF_Clusters
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist OFF_(et|eta|num|phi|type) {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Expert/xAODCaloClusters/HLT_TopoCaloClustersFS/${chain}
           }
         }
       }                                       # OFF_Clusters
       dir OFF_Matched_to_HLT {                # OFF_Matched_to_HLT
-        algorithm = HLTCalo_KolmogorovTest_MaxDist
         dir (?P<chain>.*) {
           regex       =  1
           hist OFF_no_HLT_match_num {
+            algorithm = HLTCalo_KolmogorovTest_MaxDist
             output = HLT/TRCAL/Expert/xAODCaloClusters/HLT_TopoCaloClustersFS/${chain}
           }
         }
diff --git a/DataQuality/DataQualityConfigurations/scripts/MergeConfigs.py b/DataQuality/DataQualityConfigurations/scripts/MergeConfigs.py
index 159a28dac7108cad912ee942cf7b68bc9703ce45..99419f70852034cf03c57ab8c2edf3bf1140fe93 100755
--- a/DataQuality/DataQualityConfigurations/scripts/MergeConfigs.py
+++ b/DataQuality/DataQualityConfigurations/scripts/MergeConfigs.py
@@ -13,8 +13,11 @@ def list_directories(parent_dir, recurse=True):
     returns a list of directories in parent_dir
     Directory 'common' will appear first if present
     if recurse = True, also includes all subdirectories in the tree
+    If user has defined DQCONFIG_DIR_WHITELIST env var, will filter based on that list
+    Example values would be: DQCONFIG_DIR_WHITELIST=L1Calo:HLT
     """
     import os
+    whitelist = os.getenv('DQCONFIG_DIR_WHITELIST','').split(":")
     rv = []
     if recurse:
         for root, dum1, dum2 in os.walk(parent_dir):
@@ -31,7 +34,7 @@ def list_directories(parent_dir, recurse=True):
     for x in rv:
         if 'common' in x:
             truerv = [x] + truerv
-        else:
+        elif x.split("/")[-1] in whitelist or whitelist==[]:
             truerv.append(x)
     return truerv
 
@@ -105,7 +108,7 @@ def merge_han_configs(template, parent_dir, out, options):
     outobj = open(out, 'w')
     outobj.write('# ****************************\n')
     outobj.write('metadata GitInfo {\n')
-    outobj.write('  Hash = %s\n' % release_metadata()['nightly release'])
+    outobj.write('  Hash = %s\n' % (release_metadata()['nightly release'] or "unknown"))
     outobj.write('}\n')
     outobj.write('# ****************************\n\n')
     for f in files:
diff --git a/DataQuality/DataQualityUtils/DataQualityUtils/HanOutputFile.h b/DataQuality/DataQualityUtils/DataQualityUtils/HanOutputFile.h
index db478b80355593f1a8e5dddf3738b1036aa018f7..a82f01d40d6058ae05595c2896b6fc90abeb768a 100644
--- a/DataQuality/DataQualityUtils/DataQualityUtils/HanOutputFile.h
+++ b/DataQuality/DataQualityUtils/DataQualityUtils/HanOutputFile.h
@@ -149,7 +149,7 @@ namespace dqutils
   protected:
     virtual void clearData();
     virtual void convertToGraphics(
-      int cnvsType, TCanvas* myC, std::string& json, TImage* img = 0, char** x = 0, int* y = 0);
+      int cnvsType, TCanvas* myC, std::string& json, TImage** img = 0, char** x = 0, int* y = 0);
     virtual void convertToGraphics(int cnvsType, TCanvas* myC, std::string namePNG, std::string nameJSON);
     virtual bool saveFile(
       int cnvsType, std::string pngfName, std::string pngContent, std::string jsonfName, std::string jsonfContent);
diff --git a/DataQuality/DataQualityUtils/src/HanOutputFile.cxx b/DataQuality/DataQualityUtils/src/HanOutputFile.cxx
index 39af410ec3c8429be704cd76063a654d0d9d259f..8cbe3942fb320ae40cffc2aa0bc5f305ea1dcfea 100644
--- a/DataQuality/DataQualityUtils/src/HanOutputFile.cxx
+++ b/DataQuality/DataQualityUtils/src/HanOutputFile.cxx
@@ -31,6 +31,7 @@
 #include <TString.h>
 #include <TStyle.h>
 #include <TText.h>
+#include <TImageDump.h>
 
 #include <boost/algorithm/string/case_conv.hpp>
 #include <boost/lexical_cast.hpp>
@@ -1325,10 +1326,14 @@ namespace dqutils
     return nSaved;
   }
 
-  void getImageBuffer(TImage* img, TCanvas* myC, char** x, int* y)
+  void getImageBuffer ATLAS_NOT_THREAD_SAFE (TImage** img, TCanvas* myC, char** x, int* y)
   {
-    img->FromPad(myC);
-    img->GetImageBuffer(x, y, TImage::kPng);
+    gVirtualPS->Open(myC->GetName(), 114);
+    myC->Paint();
+    (*img) = dynamic_cast<TImageDump*>(gVirtualPS)->GetImage();
+    if (*img) {
+      (*img)->GetImageBuffer(x, y, TImage::kPng);
+    }
   }
 
   bool HanOutputFile::saveHistogramToFile(std::string nameHis, std::string location, TDirectory* groupDir,
@@ -1399,12 +1404,13 @@ namespace dqutils
     gStyle->SetStatW(0.2);
     gStyle->SetStatH(0.1);
 
-    char* x;
+    char* x = nullptr;
     int y;
     std::string json;
-    TImage* img = TImage::Create();
+    TImage* img = nullptr;
 
     gROOT->SetBatch();
+    TImageDump tid;
     std::string pathname(groupDir->GetPath());
     std::string display = "";
     bool WasCollectionReference = false;
@@ -1831,7 +1837,7 @@ namespace dqutils
         tt.SetNDC();
         tt.SetTextSize(0.03);
         tt.DrawLatex(0.02, 0.01, pathName.c_str());
-        convertToGraphics(cnvsType, myC.get(), json, img, &x, &y);
+        convertToGraphics(cnvsType, myC.get(), json, &img, &x, &y);
       }
       else if (h != 0)
       {
@@ -2118,7 +2124,7 @@ namespace dqutils
         tt.SetTextSize(0.03);
         tt.DrawLatex(0.02, 0.01, pathName.c_str());
 
-        convertToGraphics(cnvsType, myC.get(), json, img, &x, &y);
+        convertToGraphics(cnvsType, myC.get(), json, &img, &x, &y);
       }
       // delete myC;
       gStyle->Reset();
@@ -2149,7 +2155,7 @@ namespace dqutils
       tt.DrawLatex(0.02, 0.01, pathName.c_str());
       // myC->SaveAs( name.c_str() );
 
-      convertToGraphics(cnvsType, myC.get(), json, img, &x, &y);
+      convertToGraphics(cnvsType, myC.get(), json, &img, &x, &y);
 
       gStyle->Reset();
     }
@@ -2277,7 +2283,7 @@ namespace dqutils
       tt.SetNDC();
       tt.SetTextSize(0.03);
       tt.DrawLatex(0.02, 0.01, pathName.c_str());
-      convertToGraphics(cnvsType, myC.get(), json, img, &x, &y);
+      convertToGraphics(cnvsType, myC.get(), json, &img, &x, &y);
       gStyle->Reset();
     }
     std::string rv;
@@ -2287,7 +2293,7 @@ namespace dqutils
     };
     std::pair<std::string, std::string> rvPair{ rv, json };
 
-    delete img;
+    free(x);
     delete hobj;
     delete hRef;
     delete legend;
@@ -3715,7 +3721,7 @@ namespace dqutils
     return true;
   }
 
-  void HanOutputFile::convertToGraphics(int cnvsType, TCanvas* myC, std::string& json, TImage* img, char** x, int* y)
+  void HanOutputFile::convertToGraphics(int cnvsType, TCanvas* myC, std::string& json, TImage** img, char** x, int* y)
   {
     if (cnvsType & GENERATE_PNG)
     {
diff --git a/DetectorDescription/GeoModel/DumpGeo/CMakeLists.txt b/DetectorDescription/GeoModel/DumpGeo/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..6e0ff16df43b4cb76d2edc750ffa34c7d58bdfa5
--- /dev/null
+++ b/DetectorDescription/GeoModel/DumpGeo/CMakeLists.txt
@@ -0,0 +1,19 @@
+# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
+
+# author: Riccardo Maria BIANCHI <riccardo.maria.bianchi@cern.ch>
+
+# Declare the package name.
+atlas_subdir( DumpGeo )
+
+# External dependencies:
+find_package( GeoModel COMPONENTS GeoModelKernel GeoModelDBManager
+   GeoModelWrite )
+
+# Component(s) in the package.
+atlas_add_component( DumpGeo
+   DumpGeo/*.h src/*.cxx src/components/*.cxx
+   PRIVATE_INCLUDE_DIRS ${GEOMODEL_INCLUDE_DIRS}
+   LINK_LIBRARIES ${GEOMODEL_LIBRARIES} AthenaBaseComps AthenaKernel CxxUtils GaudiKernel GeoModelUtilities )
+
+# Install files from the package.
+atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/DumpGeo/ATLAS_CHECK_THREAD_SAFETY b/DetectorDescription/GeoModel/DumpGeo/DumpGeo/ATLAS_CHECK_THREAD_SAFETY
similarity index 100%
rename from DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/DumpGeo/ATLAS_CHECK_THREAD_SAFETY
rename to DetectorDescription/GeoModel/DumpGeo/DumpGeo/ATLAS_CHECK_THREAD_SAFETY
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/DumpGeo/DumpGeo.h b/DetectorDescription/GeoModel/DumpGeo/DumpGeo/DumpGeo.h
similarity index 74%
rename from DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/DumpGeo/DumpGeo.h
rename to DetectorDescription/GeoModel/DumpGeo/DumpGeo/DumpGeo.h
index b65f3fbf99690d79e77a122897ee9a188fbc281c..70862d0ebbdfba1210d8d932c71ff3b72a1eb524 100755
--- a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/DumpGeo/DumpGeo.h
+++ b/DetectorDescription/GeoModel/DumpGeo/DumpGeo/DumpGeo.h
@@ -28,29 +28,26 @@
 #include <string>
 #include <vector>
 
-class GeoExporter;
+// class GeoExporter;
 
 // Marked not thread-safe because GeoExporter uses VP1.
-class ATLAS_NOT_THREAD_SAFE DumpGeo: public AthAlgorithm,
-                                     public IIncidentListener
+class ATLAS_NOT_THREAD_SAFE DumpGeo: public AthAlgorithm
 {
  public:
   DumpGeo(const std::string& name, ISvcLocator* pSvcLocator) ATLAS_CTORDTOR_NOT_THREAD_SAFE;
   ~DumpGeo()=default;
 
   StatusCode initialize();
-  StatusCode execute();
-
-  void handle(const Incident& inc);
+  StatusCode execute() {return StatusCode::SUCCESS;};
 
  private:
-  IToolSvc* m_toolSvc;
-  GeoExporter* m_geoExporter;
-
   // Properties
   // -- Athena-related
   Gaudi::Property<std::string> m_atlasRelease{this, "AtlasRelease", "", "The current, in use Atlas release"}; 
-
+  Gaudi::Property<std::string> m_detDescrTag{this, "AtlasVersion", "", "The current, in use Atlas Detector Description Geometry TAG"}; 
+  Gaudi::Property<std::string> m_outFileName{this, "OutSQLiteFileName", "", "The name of the output SQLite file"}; 
+  Gaudi::Property<std::vector<std::string>> m_user_filterDetManagersList
+                                          { this, "UserFilterDetManager", {}, "Doc", "OrderedSet<T>"};
 };
 
 #endif
diff --git a/DetectorDescription/GeoModel/DumpGeo/README.md b/DetectorDescription/GeoModel/DumpGeo/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..3a23a3bd0791d74614ad3802fd7c0545a77bf573
--- /dev/null
+++ b/DetectorDescription/GeoModel/DumpGeo/README.md
@@ -0,0 +1,155 @@
+# DumpGeo - Dump the ATLAS GeoModel to a local file
+
+ * [Intro](#intro) 
+ * [Setup](#build)
+ * [Run](#run)
+ * [Documentation](#documentation)
+
+## Intro
+
+`DumpGeo` is an Athena algorithm inheriting from the class AthAlgorithm. Internally, it calls the package `GeoExporter` to dump the GeoModel tree resulting from any Geometry TAGs into a local SQLite file.
+
+You can run these intructions on any `lxplus`-like machine (Alma9 or CC7) where the Athena framework is installed, or on macOS with Athena running inside a [a Lima container/light VM](https://atlassoftwaredocs.web.cern.ch/athena/lima/)
+
+
+## Setup 
+
+You should setup Athena, as usual; for example you can setup the latest build of the `24.0` release:
+
+```bash
+setupATLAS
+asetup Athena,24.0,latest
+```
+
+## Run
+
+`DumpGeo` has been migrated to the new Athena Component Accumulator (CA). A new `DumpGeoConfig.py` Python script configures the Athena algorithm.  
+
+`DumpGeo` can be used both as a shell command or as an Athena jobOption, in command-line or embedded within your own jobOption.
+
+### Basic use - Run as a terminal command
+
+After having set Athena, at the prompt, run the command:
+
+
+```sh
+python -m DumpGeo.DumpGeoConfig
+```
+
+this will dump the default geometry tag (the Run3 default tag `ATLAS-R3S-2021-03-02-00`, at the time of writing) into a local file named `geometry-ATLAS-R3S-2021-03-02-00.db`. The fine names reflects the geometry tag that has been dumped.
+
+Optionally, you can specify which geometry tag to be dumped by using the `-detDescr` option; for example:
+
+```sh
+python -m DumpGeo.DumpGeoConfig --detdescr=ATLAS-R2-2016-01-00-01
+```
+
+After issueing the command, a file named `geometry-ATLAS-R2-2016-01-00-01.db` will be created in the run folder.
+
+
+### Run it as an Athena jobOption
+
+You can also run `dump-geo` as an Athena jobOption. For example:
+
+```bash
+athena DumpGeo/dump-geo.py -c "DetDescrVersion='ATLAS-R3-2021-01-00-00‘"
+```
+
+You can even embed it into your own workflow, within your own jobOption.
+
+
+## Options
+
+### Overwrite the Output File
+
+By default, DumpGeo exits with an error when an output file with the same name is found.
+
+You can force to overwrite the output file with the `--forceOverwrite` or `-f` CLI options:
+
+```sh
+python -m DumpGeo.DumpGeoConfig -f
+```
+
+### Filter DetectorManagers
+
+The CLI option `--filterDetManagers` lets you filter over DetectorManagers.
+
+DetectorManagers are “containers” for subsystems. The class [GeoVDetectorManager](https://gitlab.cern.ch/GeoModelDev/GeoModel/-/blob/main/GeoModelCore/GeoModelKernel/GeoModelKernel/GeoVDetectorManager.h?ref_type=heads) holds all the TreeTops (the top volumes) of a subsystem together.
+
+By using the `--filterDetManagers` CLI option, you can dump the TreeTops belonging to a single DetectorManager:
+
+```sh
+python -m DumpGeo.DumpGeoConfig --filterDetManagers="InDetServMat"
+```
+
+Or you can dump all TreeTops from multiple managers, passing a comma-separated list:
+
+```sh
+python -m DumpGeo.DumpGeoConfig --filterDetManagers="BeamPipe,InDetServMat"
+```
+
+The output file will reflect the content of the GeoModel tree:
+
+```sh
+geometry-ATLAS-R3S-2021-03-02-00-BeamPipe-InDetServMat.db
+```
+
+When dumping the content of a DetectorManager, DumpGeo picks the TreeTop volumes and gets their default Transform (no alignments). 
+
+It then it adds to the output “world” volume a GeoNameTag with the name of the DetectorManager, and then it adds the TreeTop volumes to it with their Transforms ahead of them.
+
+```
+World
+|
+|-- GeoNameTag("BeamPipe")
+|
+|-- GeoTransform -- TT1
+|-- GeoVPhysVol -- TT1
+|
+|-- GeoTransform -- TT2
+|-- GeoVPhysVol -- TT2
+|
+|-- GeoTransform -- TT3
+|-- GeoVPhysVol -- TT3
+```
+
+In that way, we get meaningful, comprehensive checkboxes when visualizing the output tree from the SQLite file in [GMEX](https://gitlab.cern.ch/GeoModelDev/GeoModel/-/tree/master/GeoModelVisualization) ([GeoModelExplorer](https://gitlab.cern.ch/GeoModelDev/GeoModel/-/tree/master/GeoModelVisualization))
+
+![a filtered GeoModel tree in GMEX](docs/img/gmex1.png)
+
+
+
+
+
+### Additional Options
+
+You can use all the common Athena flags to steer the dump mechanism. 
+
+With the new CA configuration, you can use the `--help` option to get the list of all available options. 
+
+```bash
+$ python -m DumpGeo.DumpGeoConfig —help
+```
+
+
+As soon as we add options to `DumpGeo`, you will get the new ones listed at the bottom of the “help” output, after the common Athena options
+
+```sh
+$ python -m DumpGeo.DumpGeoConfig —help
+
+[...Athena options...]
+
+--detDescr TAG                           The ATLAS geometry tag you want to dump (a convenience alias for the Athena flag 'GeoModel.AtlasVersion=TAG') (default: ATLAS-R3S-2021-03-02-00)
+
+--filterDetManagers FILTERDETMANAGERS    Only output the GeoModel Detector Managers specified in the FILTER list; input is a comma-separated list (default: None)
+
+-f, --forceOverwrite                     Force to overwrite an existing SQLite output file with the same name, if any (default: False)
+```
+
+
+## Documentation
+
+You can get more information about the GeoModel tree and the content of the output SQLite file on the GeoModel documentation website: https://cern.ch/geomodel
+
+ 
+
diff --git a/DetectorDescription/GeoModel/DumpGeo/docs/img/gmex1.png b/DetectorDescription/GeoModel/DumpGeo/docs/img/gmex1.png
new file mode 100644
index 0000000000000000000000000000000000000000..3ee366f405437fa43d3f2cc25077c0190547a440
Binary files /dev/null and b/DetectorDescription/GeoModel/DumpGeo/docs/img/gmex1.png differ
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/python/DumpGeoConfig.py b/DetectorDescription/GeoModel/DumpGeo/python/DumpGeoConfig.py
similarity index 67%
rename from DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/python/DumpGeoConfig.py
rename to DetectorDescription/GeoModel/DumpGeo/python/DumpGeoConfig.py
index 9ef2f29b82e25b21393490500d45aa1cf07f9ea6..cbbbb8ab5c7563ccd8fc6515014c2111940ae87e 100644
--- a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/python/DumpGeoConfig.py
+++ b/DetectorDescription/GeoModel/DumpGeo/python/DumpGeoConfig.py
@@ -9,7 +9,6 @@ from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator
 from AthenaConfiguration.ComponentFactory import CompFactory
 #from AthenaConfiguration.Enums import Format
 
-import os
 
 def configureGeometry(flags, cfg):
     if flags.Detector.GeometryBpipe:
@@ -67,7 +66,7 @@ def getATLASVersion():
         return os.environ["AtlasBaseVersion"]
     return "Unknown"
 
-def DumpGeoCfg(flags, name="DumpGeoCA", **kwargs):
+def DumpGeoCfg(flags, name="DumpGeoCA", outFileName="", **kwargs):
     # This is based on a few old-style configuation files:
     # JiveXML_RecEx_config.py
     # JiveXML_jobOptionBase.py
@@ -75,24 +74,18 @@ def DumpGeoCfg(flags, name="DumpGeoCA", **kwargs):
 
     #print(dir("args: ", args)) # debug
 
-    # set Alg's properties
+    
+    # set diitional Alg's properties
+    # Note: at this point, the user-defined Geo TAG args.detDescr, 
+    #       if set, has already replaced the default GeoModel.AtlasVersion in 'flags';
+    #       so, we can use the latter, directy.
+    _logger.verbose("Using ATLAS/Athena version: %s", getATLASVersion())
+    _logger.verbose("Using GeoModel ATLAS version: %s", flags.GeoModel.AtlasVersion)
     kwargs.setdefault("AtlasRelease", getATLASVersion())
-
-    # TODO: Fix this
-    # This is a temporary hack to reflect how detDescr is handled in the old bash-driven DumpGeo
-    # This should be replaced by proper python flags and Gaudy properties
-    if args.detDescr:
-        os.environ["DUMPGEODETDESCRTAG"] = args.detDescr # save to an env var, for later use in GeoModelStandalone/GeoExporter
-        print("+ DumpGeo -- INFO -- This is the Detector Description geometry TAG you are dumping: '%s'" % args.detDescr)
-    if args.forceOverwrite is True:
-        print("+ DumpGeo -- NOTE -- You chose to overwrite an existing geometry dump file with the same name, if present.")
-        os.environ["DUMPGEOFORCEOVERWRITE"] = "1" # save to an env var, for later use in GeoModelStandalone/GeoExporter
-    # if args.filterTreeTops:
-    #     print("+ DumpGeo -- NOTE -- Your 'GeoModel TreeTop' filter set: '%s'" % args.filterTreeTops)
-    #     os.environ["DUMPGEOFILTERTREETOPS"] = args.filterTreeTops # save to an env var, for later use in GeoModelStandalone/GeoExporter
+    kwargs.setdefault("AtlasVersion", flags.GeoModel.AtlasVersion)
+    kwargs.setdefault("OutSQLiteFileName", outFileName)
     if args.filterDetManagers:
-        print("+ DumpGeo -- NOTE -- Your 'GeoModel Detector Manager' filter set: '%s'" % args.filterDetManagers)
-        os.environ["DUMPGEOFILTERDETMANAGERS"] = args.filterDetManagers # save to an env var, for later use in GeoModelStandalone/GeoExporter
+        kwargs.setdefault("UserFilterDetManager", args.filterDetManagers.split(","))
 
 
     the_alg = CompFactory.DumpGeo(name="DumpGeoAlg", **kwargs)
@@ -101,6 +94,7 @@ def DumpGeoCfg(flags, name="DumpGeoCA", **kwargs):
 
 
 if __name__=="__main__":
+    import os, sys
     # Run with e.g. python -m DumpGeo.DumpGeoConfig --detDescr=<ATLAS-geometry-tag> --filter=[<list of tree tops>]
     
     # from AthenaConfiguration.Enums import Format
@@ -115,8 +109,18 @@ if __name__=="__main__":
 
     from AthenaConfiguration.AllConfigFlags import initConfigFlags
     flags = initConfigFlags()
-    flags.Exec.MaxEvents = 1 
-    # ^ We only need one event to get the GeoModel tree from the GeoModelSvc (even less, we don't need any event at all!)
+    flags.Exec.MaxEvents = 0 
+    # ^ We do not need any events to get the GeoModel tree from the GeoModelSvc.
+    # So, we don't need to run on any events, 
+    # and we don't need to trigger any execute() Athena methods either. 
+    # So, we set 'EvtMax' to 0 and Athena will skip the 'execute' phase; 
+    # only the 'finalize' step will be run after the 'init'.
+    # -- Note: 
+    # Also, if we run on events (even on 1 event) and we dump the Pixel 
+    # as part of the  DetectorManager filter, then we get a crash because 
+    # the PixelDetectorManager complains during the 'execute' phase, 
+    # because we 'stole' a handle on its TreeTop, 
+    # which contains a FullPhysVol and cannot be shared.
     flags.Concurrency.NumThreads = 0 
     # ^ VP1 will not work with the scheduler, since its condition/data dependencies are not known in advance
     # More in details: the scheduler needs to know BEFORE the event, what the dependencies of each Alg are. 
@@ -138,14 +142,13 @@ if __name__=="__main__":
 
     if 'help' in args:
         # No point doing more here, since we just want to print the help.
-        import sys
         sys.exit()
 
     _logger.verbose("+ About to set flags related to the input")
 
     # Empty input is not normal for Athena, so we will need to check 
     # this repeatedly below (the same as with VP1)
-    vp1_empty_input = False  
+    dumpgeo_empty_input = False  
     # This covers the use case where we launch DumpGeo
     # without input files; e.g., to check the detector description
     if (flags.Input.Files == [] or 
@@ -153,7 +156,7 @@ if __name__=="__main__":
         from Campaigns.Utils import Campaign
         from AthenaConfiguration.TestDefaults import defaultGeometryTags
 
-        vp1_empty_input = True
+        dumpgeo_empty_input = True
         # NB Must set e.g. ConfigFlags.Input.Runparse_args() Number and
         # ConfigFlags.Input.TimeStamp before calling the 
         # MainServicesCfg to avoid it attempting auto-configuration 
@@ -172,7 +175,7 @@ if __name__=="__main__":
         flags.Input.MCCampaign = Campaign.Unknown
         flags.GeoModel.AtlasVersion = defaultGeometryTags.RUN3
     _logger.verbose("+ ... Done")
-    _logger.verbose("+ empty input: '%s'" % vp1_empty_input)
+    _logger.verbose("+ empty input: '%s'" % dumpgeo_empty_input)
 
     _logger.verbose("+ detDescr flag: '%s'" % args.detDescr)
 
@@ -180,12 +183,12 @@ if __name__=="__main__":
     _logger.verbose("+ About to set the detector flags")
     # So we can now set up the geometry flags from the input
     from AthenaConfiguration.DetectorConfigFlags import setupDetectorFlags
-    setupDetectorFlags(flags, None, use_metadata=not vp1_empty_input,
+    setupDetectorFlags(flags, None, use_metadata=not dumpgeo_empty_input,
                        toggle_geometry=True, keep_beampipe=True)
     _logger.verbose("+ ... Done")
 
     if args.detDescr:
-        _logger.verbose("+ About to set a custom detector description tag")
+        _logger.verbose("+ About to set a custom user-defined detector description tag: '%s'" % args.detDescr)
         flags.GeoModel.AtlasVersion = args.detDescr
         _logger.verbose("+ ... Done")
 
@@ -193,6 +196,45 @@ if __name__=="__main__":
     # finalize setting flags: lock them.
     flags.lock()
 
+
+
+    # Handle the user's inputs and create a file name 
+    # for the output SQLite, accordingly
+    outFileName = "geometry"
+    filterDetManagers = []
+    # - Put Geometry TAG into the file name
+    # NOTE: at this point, the user-defined Geo TAG args.detDescr, 
+    #       if set, has already replaced the default TAG in 'flags';
+    #       so, we can use the latter, directy.
+    geoTAG = flags.GeoModel.AtlasVersion
+    print("+ DumpGeo -- INFO -- This is the Detector Description geometry TAG you are dumping: '%s'" % geoTAG)
+    outFileName = outFileName + "-" + geoTAG
+    # - Put DetectorManagers' names into the file name, 
+    #   if the filter has been used by the user
+    if args.filterDetManagers:
+        print("+ DumpGeo -- NOTE -- Your 'GeoModel Detector Manager' filter set: '%s'" % args.filterDetManagers)
+        filterDetManagers = args.filterDetManagers.split(",")
+        outFileName = outFileName + "-" + "-".join(filterDetManagers)
+    # - Add the final extension to the name of the output SQLite file 
+    outFileName = outFileName + ".db"
+
+    # Overwrite the output SQLite file, if existing
+    if os.path.exists(outFileName):
+        if args.forceOverwrite is True:
+            print("+ DumpGeo -- NOTE -- You chose to overwrite an existing geometry dump file with the same name, if present.")
+            # os.environ["DUMPGEOFORCEOVERWRITE"] = "1" # save to an env var, for later use in GeoModelStandalone/GeoExporter
+            # Check if the file exists before attempting to delete it   
+            if os.path.exists(outFileName):
+                os.remove(outFileName)
+                print(f"The file {outFileName} has been deleted.")
+            else:
+                print(f"The file {outFileName} does not exist. So, it was not needed to 'force-delete' it. Continuing...")
+        else:
+            print(f"\nDumpGeo -- ERROR! The ouput file '{outFileName}' exists already!\nPlease move or remove it, or use the 'force' option: '-f' or '--forceOverWrite'.\n\n")
+            sys.exit()
+            #raise ValueError("The output file exists already!")
+
+
     # DEBUG -- inspect the flags
     # flags.dump()
     # flags._loadDynaFlags('GeoModel')
@@ -210,6 +252,6 @@ if __name__=="__main__":
     _logger.verbose("+ ...Done")
 
     # configure DumpGeo
-    cfg.merge(DumpGeoCfg(flags, args)) 
+    cfg.merge(DumpGeoCfg(flags, args, outFileName=outFileName)) 
     cfg.run()
 
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/dump-geo_jobOptions_TileGeometry.py b/DetectorDescription/GeoModel/DumpGeo/share/dump-geo_jobOptions_TileGeometry.py
similarity index 98%
rename from DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/dump-geo_jobOptions_TileGeometry.py
rename to DetectorDescription/GeoModel/DumpGeo/share/dump-geo_jobOptions_TileGeometry.py
index 4232539c0559081fb5b9826394d2b70c7e54fed5..67148da6ac12ca8dbe419018996f82b88c632820 100644
--- a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/dump-geo_jobOptions_TileGeometry.py
+++ b/DetectorDescription/GeoModel/DumpGeo/share/dump-geo_jobOptions_TileGeometry.py
@@ -1,4 +1,7 @@
-# 
+
+#TODO: This test script should be updated to CA! (R.M.B.)
+
+
 # An Athena job to dump a specific, custom TileCal geometry tag
 # to a standalone GeoModel SQLite `.db` file. 
 # The .db file can then be visualized with the GeoModel's 
diff --git a/DetectorDescription/GeoModel/DumpGeo/src/DumpGeo.cxx b/DetectorDescription/GeoModel/DumpGeo/src/DumpGeo.cxx
new file mode 100755
index 0000000000000000000000000000000000000000..4587086d4976e5705eefb72451903091167183f7
--- /dev/null
+++ b/DetectorDescription/GeoModel/DumpGeo/src/DumpGeo.cxx
@@ -0,0 +1,250 @@
+/*
+  Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
+*/
+
+//  Initial version:                                       
+//  - 2017, Sep -- Riccardo Maria BIANCHI                  
+//                 <riccardo.maria.bianchi@cern.ch>        
+//  Main updates:                                          
+//  - 2024, Feb -- Riccardo Maria BIANCHI                  
+//                 <riccardo.maria.bianchi@cern.ch>   
+//                 Moved configuration to ComponentAccumulator (CA)   
+//  - 2024, Mar -- Riccardo Maria BIANCHI                  
+//                 <riccardo.maria.bianchi@cern.ch> 
+//                 Removed GeoExporter, moved all to DumpGeo     
+
+#include "DumpGeo/DumpGeo.h"
+
+// Athena includes
+#include "AthenaBaseComps/AthMsgStreamMacros.h"
+#include "GeoModelUtilities/GeoModelExperiment.h"
+#include "GeoModelInterfaces/IGeoDbTagSvc.h"
+
+// From GeoModel in Externals
+#include "GeoModelKernel/GeoVolumeCursor.h"
+#include "GeoModelKernel/GeoVDetectorManager.h"
+#include "GeoModelKernel/GeoBox.h"
+#include "GeoModelDBManager/GMDBManager.h"
+#include "GeoModelWrite/WriteGeoModel.h"
+// #include "GeoModelHelpers/defineWorld.h" // not available in 24.0... 
+
+// C++ includes
+#include <vector>
+#include <stdexcept>
+#include <iostream>
+#include <iomanip>
+#include <cstdlib> //For setenv
+
+// Units
+#include "GeoModelKernel/Units.h"
+#define UNITS GeoModelKernelUnits  // so we can use, e.g., 'UNITS::cm'
+
+
+//TODO: replace this with GeoModelHelpers/defineWorld.h
+//_____________________________________________________________________________________
+GeoPhysVol* createTheWorld()
+{
+  // Define the units
+  #define gr   UNITS::gram
+  #define mole UNITS::mole
+  #define cm3  UNITS::cm3
+
+  // Define the chemical elements
+  GeoElement*  Nitrogen = new GeoElement ("Nitrogen" ,"N"  ,  7.0 ,  14.0067 *gr/mole);
+  GeoElement*  Oxygen   = new GeoElement ("Oxygen"   ,"O"  ,  8.0 ,  15.9995 *gr/mole);
+  GeoElement*  Argon    = new GeoElement ("Argon"    ,"Ar" , 18.0 ,  39.948  *gr/mole);
+  GeoElement*  Hydrogen = new GeoElement ("Hydrogen" ,"H"  ,  1.0 ,  1.00797 *gr/mole);
+
+  // Define the materials
+  double densityOfAir=0.001214 *gr/cm3;
+  GeoMaterial *air = new GeoMaterial("Air", densityOfAir);
+  air->add(Nitrogen  , 0.7494);
+  air->add(Oxygen, 0.2369);
+  air->add(Argon, 0.0129);
+  air->add(Hydrogen, 0.0008);
+  air->lock();
+ 
+  //-----------------------------------------------------------------------------------//
+  // create the world volume container and
+  // get the 'world' volume, i.e. the root volume of the GeoModel tree
+  const GeoBox* worldBox = new GeoBox(1000*UNITS::cm, 1000*UNITS::cm, 1000*UNITS::cm);
+  const GeoLogVol* worldLog = new GeoLogVol("WorldLog", worldBox, air);
+  GeoPhysVol* world = new GeoPhysVol(worldLog);
+  return world;
+}
+
+//____________________________________________________________________
+DumpGeo::DumpGeo(const std::string& name, ISvcLocator* svcLocator):
+  AthAlgorithm(name, svcLocator)
+{}
+
+//____________________________________________________________________
+StatusCode DumpGeo::initialize()
+{
+  ATH_MSG_DEBUG("in initialize()");
+
+  ATH_MSG_INFO("===================================================");
+  ATH_MSG_INFO("\t\tLaunching DumpGeo");
+  ATH_MSG_INFO("===================================================");
+
+  ATH_MSG_INFO("Accessing the ATLAS geometry...");
+  const GeoModelExperiment * theExpt = nullptr;
+  ATH_CHECK(detStore()->retrieve(theExpt,"ATLAS"));
+
+  // Get the ATLAS GEOMETRY 'World' volume
+  PVConstLink world(theExpt->getPhysVol());
+
+  ATH_MSG_INFO("User filter DetectorManagers: " << m_user_filterDetManagersList);
+
+  ServiceHandle<IGeoDbTagSvc> geoDbTag("GeoDbTagSvc", name());
+  ATH_CHECK(geoDbTag.retrieve());
+  ATH_MSG_INFO("This is the Geometry TAG we are dumping: " << geoDbTag->atlasVersion());
+
+
+  GeoPhysVol* volTop = createTheWorld();
+
+
+  if ( !(m_user_filterDetManagersList.empty()) ) {
+    
+    // Get list of managers
+    // We fill a set from the output vector, 
+    // so we can use its built-in 'count' method later,
+    // to search for DetManagers
+    ATH_MSG_INFO("List of GeoModel Detector Managers: ");
+    std::set<std::string> managersList{};
+    {
+       std::vector<std::string> blub = theExpt->getListOfManagers();
+       managersList.insert(blub.begin(), blub.end());
+    }    
+
+    // Convert the list of det managers passed by the user into a set
+    std::set<std::string> user_managersList{};
+    {
+       user_managersList.insert(m_user_filterDetManagersList.begin(), m_user_filterDetManagersList.end());
+    }
+
+    // safety check: 
+    // check that all DetManagers requested by the user are in the list
+    // If not, print an error message to warn the user and return
+    for (auto& userDet : user_managersList) {
+      if ( !managersList.count(userDet)) {
+        ATH_MSG_FATAL("This Detector Manager you requested to dump is not in the list of DetectorManagers for the geometry tag you are using: " << userDet);
+        throw GaudiException("The Detector Manager you requested to dump is not in the list of DetectorManagers.", 
+                                    "DumpGeo", StatusCode::FAILURE);
+      }
+    }
+
+
+    if ( !(managersList.empty()) ) {
+    for (auto const& mm : managersList)
+      {
+          // get the DetectorManager
+          const GeoVDetectorManager* manager = theExpt->getManager(mm);
+
+          // get the name of the DetectorManager
+          std::string detManName = manager->getName();
+          ATH_MSG_INFO("\tDetectorManager: " << detManName);
+
+          // get the DetManager's TreeTops
+          unsigned int nTreetops = manager->getNumTreeTops();
+          ATH_MSG_INFO("\t" << mm << " - # TreeTops: " << nTreetops);
+
+          // if ( nTreetops > 0) &&  isStringInVector(m_user_filterDetManagersList, detManName) ) {
+          if ( ( nTreetops > 0) &&  user_managersList.count(detManName) ) {
+              
+              for(unsigned int i=0; i < nTreetops; ++i) {
+
+                  PVConstLink treetop(manager->getTreeTop(i));
+
+                  // get treetop's volume
+                  const GeoVPhysVol* vol = treetop;
+                  
+                  // get volume's transform
+                  // NOTE: we use getDefX() to get the transform without any alignment
+                  GeoTransform* volXf = new GeoTransform( vol->getDefX() );
+                  
+                  // get volume's logvol's name
+                  std::string volName = vol->getLogVol()->getName();
+                  ATH_MSG_DEBUG("\t\t treetop: " << volName);
+
+
+                  // Add to the main volume a GeoNameTag with the name of the DetectorManager 
+                  volTop->add(new GeoNameTag(detManName));
+                  // add Transform and Volume to the main PhysVol
+                  volTop->add(volXf);
+                  volTop->add(const_cast<GeoVPhysVol*>(vol));
+
+                  if (msgLvl (MSG::DEBUG)) {
+                    // DEBUG: dive into the Treetop
+                    if ("BeamPipe"==detManName) {
+                      GeoVolumeCursor av(treetop);
+                      while (!av.atEnd()) {
+                          ATH_MSG_DEBUG("\t\ttreetop n." << i << " - child name: "  << av.getName());
+                          av.next(); // increment volume cursor.
+                      } // end while
+                    }
+                  }
+              } // end for
+          } // end if
+      } // end for
+    }
+  } 
+
+  // DEBUG inspection
+  if (msgLvl (MSG::DEBUG)) {
+    ATH_MSG_DEBUG("Looping over top volumes in the GeoModel tree (children of the 'World' volume)...");
+    GeoVolumeCursor av(world);
+    while (!av.atEnd()) {
+      std::string volname = av.getName();
+      ATH_MSG_DEBUG("\t* relevant NameTag:" << volname);
+      av.next(); // increment volume cursor.
+    }
+  }
+
+  ATH_MSG_INFO("Creating the SQLite DB file...");
+  if ( m_outFileName.empty()) {
+    ATH_MSG_FATAL("The name of the output SQLite file is not set!");
+    throw GaudiException("The name of the output SQLite file is not set!", 
+                                "DumpGeo", StatusCode::FAILURE);
+  }
+  ATH_MSG_INFO("Output file name: " << m_outFileName);
+
+  // open the DB connection
+  GMDBManager db(m_outFileName);
+
+  // check the DB connection
+  if (db.checkIsDBOpen())
+      ATH_MSG_INFO("OK! Database is open!");
+  else {
+      ATH_MSG_ERROR(" ***** Database ERROR!! Exiting..." );      
+              throw GaudiException("The GeoModel SQLite .db file could not be opened successfully.", 
+                                    "DumpGeo", StatusCode::FAILURE);
+  }
+
+  ATH_MSG_INFO("Traversing the GeoModel tree...");
+    // Dump the tree volumes into a DB
+    GeoModelIO::WriteGeoModel dumpGeoModelGraph(db); // init the GeoModel node action
+    // visit all GeoModel nodes  
+    if ( !(m_user_filterDetManagersList.empty()) ) {
+      volTop->exec(&dumpGeoModelGraph); 
+    } else {
+      world->exec(&dumpGeoModelGraph); 
+    }
+  ATH_MSG_INFO("Saving the GeoModel tree to the DB...");
+    dumpGeoModelGraph.saveToDB(); // save to the SQlite DB file
+    ATH_MSG_ALWAYS("DONE. Geometry saved to " << m_outFileName);
+
+  // Quick test if DEBUG
+  if (msgLvl (MSG::DEBUG)) {
+    ATH_MSG_DEBUG("Test - list of all the GeoMaterial nodes in the persistified geometry:");
+    db.printAllMaterials();
+    ATH_MSG_DEBUG("Test - list of all the GeoElement nodes in the persistified geometry:");
+    db.printAllElements();
+  }
+
+  ATH_MSG_DEBUG("End of DumpGeo::init().");
+  return StatusCode::SUCCESS;
+}
+
+
+
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/src/components/DumpGeo_entries.cxx b/DetectorDescription/GeoModel/DumpGeo/src/components/DumpGeo_entries.cxx
similarity index 100%
rename from DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/src/components/DumpGeo_entries.cxx
rename to DetectorDescription/GeoModel/DumpGeo/src/components/DumpGeo_entries.cxx
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/CMakeLists.txt b/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/CMakeLists.txt
deleted file mode 100644
index 9ef8aee98477e24cf11ef382991242ae1b1df330..0000000000000000000000000000000000000000
--- a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/CMakeLists.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration
-
-# Declare the package name.
-atlas_subdir( DumpGeo )
-
-# Component(s) in the package.
-atlas_add_component( DumpGeo
-   DumpGeo/*.h src/*.cxx src/components/*.cxx
-   LINK_LIBRARIES AthenaBaseComps CxxUtils GaudiKernel GeoExporter )
-
-# Install files from the package.
-atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
-atlas_install_joboptions( share/*.py )
-atlas_install_scripts( share/dump-geo )
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/README.md b/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/README.md
deleted file mode 100644
index 6982c4b0dd697c61b3460abe51046571a06d936c..0000000000000000000000000000000000000000
--- a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/README.md
+++ /dev/null
@@ -1,88 +0,0 @@
-# DumpGeo - Dump the ATLAS GeoModel to a local file
-
- * [Intro](#intro) 
- * [Setup](#build)
- * [Run](#run)
- * [Documentation](#documentation)
-
-## Intro
-
-`DumpGeo` is an Athena algorithm inheriting from the class AthAlgorithm. Internally, it calls the package `GeoExporter` to dump 
-the ATLAS GeoModel into a local file.
-
-*Note:* At the time of writing, the local file is a SQLite file).
-
-Instructions are meant for a `lxplus`-like machine (CC7 or SLC6) where the Athena framework is installed.
-
-
-
-
-## Setup 
-
-You should setup Athena, as usual; for example you can setup the latest nightly build:
-
-```bash
-setupATLAS
-asetup Athena,main,latest
-```
-
-## Run
-
-`dump-geo` can be used both as a shell command or as an Athena jobOption, in command-line or embedded within your own jobOption.
-
-### Basic use - Run as a terminal command
-
-At the prompt, run the command `dump-geo`:
-
-```bash
-dump-geo
-```
-
-this will dump the default geometry tag (`ATLAS-R2-2016-01-00-01`, at the time of writing) 
-into a local file named `geometry-TAG.db`, where `TAG` is replaced with the specific geometry tag being dumped .
-
-Optionally, you can specify which geometry tag to be dumped by adding the `-detdescr` flag and a valid ATLAS Geometry tag; for example:
-
-```bash
-dump-geo -detdescr=ATLAS-R2-2016-01-00-01
-```
-
-After issueing the command, a file named `geometry-ATLAS-R2-2016-01-00-01.db` will be created in the run folder.
-
-
-### Run it as an Athena jobOption
-
-You can also run `dump-geo` as an Athena jobOption. For example:
-
-```bash
-athena DumpGeo/dump-geo.py -c "DetDescrVersion='ATLAS-R3-2021-01-00-00‘"
-```
-
-You can even embed it into your own workflow, within your own jobOption.
-
-
-### Additional Options
-
-You can use several flags to steer the dump mechanism. You can see the full list of options by using:
-
-```bash
-dump-geo -h
-```
-
-For example, you can exclude subsystems by using the `-noX` flags, for example:
-
-```bash
-dump-geo -nomuon -nocalo
-```
-
-will dump only the Inner Detector geometry into the output `geometry.db` file.
-
-## Documentation
-
-You can take a look at the full documentation for `dump-geo`:
-
-- by running `dump-geo -h`
-- by visiting the GeoModel documentation website: https://cern.ch/geomodel
-
- 
-
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/dump-geo b/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/dump-geo
deleted file mode 100755
index 0c5dd4c8525e9fa493c8f791b052a01b977551fb..0000000000000000000000000000000000000000
--- a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/dump-geo
+++ /dev/null
@@ -1,1036 +0,0 @@
-#/*
-#  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-#*/
-
-#!/bin/bash
-
-# Script for constructing command which launches athena on dump-geo.py, with appropriate options (in the -c flag).
-#
-# Author: Riccardo.Maria.Bianchi@cern.ch
-# First useful version: July 2007 by Thomas.Kittelmann@cern.ch
-# Modified: Manuel.Proissl@cern.ch, March 2012
-#           Riccardo.Maria.Bianchi@cern.ch, November 2012
-#           Edward.Moyse@cern.ch, multiple times
-#           Riccardo.Maria.Bianchi@cern.ch, November 2017
-#
-#
-# Questions: Riccardo.Maria.Bianchi@cern.ch
-#
-#
-
-
-echo
-echo "******************************************"
-echo
-echo "      Launching ATLASGeoModelDumper"
-echo
-echo "******************************************"
-echo
-echo
-
-##################################################
-#echo "*** VP1 NOTE *** setting QT_PLUGIN_PATH to '${QTDIR}/plugins', to let Qt load the SQLite driver"
-#export QT_PLUGIN_PATH=$QTDIR/plugins
-#echo;
-##################################################
-
-
-#fixme/todo: option to start cruise mode (event/second)
-#fixme/todo: MULTINP_CPY should not be allowed to exist beforehand!
-
-APP=`basename $0`
-
-which athena.py > /dev/null 2>&1
-if [ $? != 0 ]; then
-    echo "Error: athena.py not found in path. Please setup atlas release before running"
-    exit 1
-fi
-
-#######################################
-##       Init config variables       ##
-#######################################
-
-LIVEDEFAULTDETDESCR="ATLAS-GEO-10-00-00"
-LIVEDEFAULTGLOBCOND="COMCOND-BLKPST-004-01"
-LIVEDEFAULTEVENTSRC="https://atlas-live.cern.ch/event_files/L1MinBias/vp1fileinfo.txt"
-LIVELOCALDEFAULTEVENTSRC="/VP1_events/"
-LIVEBOOTSTRAPSRC="http://atlas-vp1.web.cern.ch/atlas-vp1/live-bootstrap/vp1fileinfo.txt"
-
-declare -a FILES_POOLROOT
-declare -a FILES_VP1CFG
-declare -a FILES_EXTRAJOBOPT
-declare -a LOCALEVENTSRC_AVAILDIRS
-
-FLAG_FORCE=0
-FLAG_HELP=0
-FLAG_DATA=0
-FLAG_TEST=0
-ERRORS=""
-DETDESCRTAG=""
-TREETOPFILTER=""
-GLOBCONDTAG=""
-SKIPEVENTS=""
-CRUISEWITHTIME=""
-LOGFILE=""
-FILTEREVENTS=""
-FLAG_LARHV=0
-# FLAG_FULLTOROIDS=0
-FLAG_DONTCLEANUPEVTCPY=0
-FLAG_LIVE=0
-FLAG_LIVELOCAL=0
-FLAG_NOAUTOCONF=0
-FLAG_TRIG=0
-FLAG_NOCALO=0
-FLAG_NOID=0
-FLAG_NOSORTDBREPLICAS=0
-FLAG_NOMUON=0
-FLAG_LUCID=0
-FLAG_ONLINE=0
-FLAG_ALFA=0
-FLAG_FORWARD_REGION=0
-FLAG_ZDC=0
-FLAG_CAVERN=0
-FLAG_TOYDETECTOR=0
-FLAG_GEOMODELSTATS=0
-FLAG_EXTRAPOLATOR=0
-FLAG_SPACEPOINTS=0
-FLAG_NOEXTRAPOLATOR=0
-FLAG_FITTER=0
-FLAG_MC=0
-FLAG_FATRAS=0
-FLAG_NOGUI=0
-FLAG_CGAL=0
-FLAG_SLHC=0
-MUONLAYOUT=""
-NSWAGDDFILE=""
-MUONAGDDFILE=""
-FLAG_CUSTOMGEOMETRY=0
-MULTINP_SRC=""
-MULTINP_CPY=""
-TRIGDEC=""
-FATRAS_TRUTHKEY=""
-ATHENAOPTS=""
-
-#######################################
-##    Decode command line options    ##
-#######################################
-
-i=1
-while [ $i -le $# ]; do
-    arg=${!i}
-    if [ "x${arg}" == "x-h" -o "x${arg}" == "x-help" -o "x${arg}" == "x--help" ]; then
-	FLAG_HELP=1
-    elif [ "x${arg}" == "x-larhv" ]; then
-	FLAG_LARHV=1
-  #     elif [ "x${arg}" == "x-fulltoroids" ]; then
-  # FLAG_FULLTOROIDS=1
-    elif [ "x${arg}" == "x-live" ]; then
-	FLAG_LIVE=1
-    elif [ "x${arg}" == "x-livelocal" ]; then
-	FLAG_LIVELOCAL=1
-    elif [ "x${arg}" == "x-noautoconf" ]; then
-	FLAG_NOAUTOCONF=1
-    elif [ "x${arg}" == "x-nocleanupeventcpy" ]; then
-	FLAG_DONTCLEANUPEVTCPY=1
-    elif [ "x${arg}" == "x-data" ]; then
-	FLAG_DATA=1
-    elif [ "x${arg}" == "x-test" ]; then
-	FLAG_TEST=1
-    elif [ "x${arg}" == "x-trig" ]; then
-        FLAG_TRIG=1
-    elif [ "x${arg}" == "x-nocalo" ]; then
-	FLAG_NOCALO=1
-    elif [ "x${arg}" == "x-noid" ]; then
-	FLAG_NOID=1
-    elif [ "x${arg}" == "x-nomuon" ]; then
-	FLAG_NOMUON=1
-    elif [ "x${arg}" == "x-nosortdbreplicas" ]; then
-	FLAG_NOSORTDBREPLICAS=1
-    elif [ "x${arg}" == "x-lucid" ]; then
-	FLAG_LUCID=1
-    elif [ "x${arg}" == "x-online" ]; then
-	FLAG_ONLINE=1
-    elif [ "x${arg}" == "x-alfa" ]; then
-	FLAG_ALFA=1
-    elif [ "x${arg}" == "x-forwardregion" ]; then
-	FLAG_FORWARD_REGION=1
-    elif [ "x${arg}" == "x-zdc" ]; then
-	FLAG_ZDC=1
-    elif [ "x${arg}" == "x-cavern" ]; then
-	FLAG_CAVERN=1
-    elif [ "x${arg}" == "x-toy-detector" ]; then
-	FLAG_TOYDETECTOR=1
-    elif [ "x${arg}" == "x-geomodel-stats" ]; then
-	FLAG_GEOMODELSTATS=1
-    elif [ "x${arg}" == "x-spacepoints" ]; then
-	FLAG_SPACEPOINTS=1
-    elif [ "x${arg}" == "x-extrapolator" ]; then
-	FLAG_EXTRAPOLATOR=1
-    elif [ "x${arg}" == "x-noextrapolator" ]; then
-	FLAG_NOEXTRAPOLATOR=1
-    elif [ "x${arg}" == "x-refit" ]; then
-	FLAG_FITTER=1
-    elif [ "x${arg}" == "x-fatras" ]; then
-	FLAG_FATRAS=1
-    elif [ "x${arg}" == "x-nogui" ]; then
-	FLAG_NOGUI=1
-    elif [ "x${arg}" == "x-cgal" ]; then
-	FLAG_CGAL=1
-    elif [ "x${arg}" == "x-slhc" ]; then
-        echo "SLHC Mode: ON"
-        FLAG_SLHC=1
-    elif [ "x${arg}" == "x-customgeom" ]; then
-        FLAG_CUSTOMGEOMETRY=1
-    ## Muon Layout
-    elif [ "x${arg:0:11}" == "x-muonLayout" ]; then
-        ERRORS="$ERRORS\nNeed argument to -muonLayout. Example: -muonLayout=MuonSpectrometer-R.09.02.NSW"
-    elif [ "x${arg:0:12}" == "x-muonLayout=" ]; then
-        MUONLAYOUT=${arg:12:$((${#arg}-12))}
-        if [ "x$MUONLAYOUT" == "x" ]; then
-            ERRORS="$ERRORS\nNeed argument to -muonLayout. Example: -muonLayout=MuonSpectrometer-R.09.02.NSW"
-        fi
-    # Muon NSW AGDD custom file
-    elif [ "x${arg:0:8}" == "x-nswagdd" ]; then
-        ERRORS="$ERRORS\nNeed argument to -nswagdd. Example: -nswagdd=stations.v2.07.xml"
-    elif [ "x${arg:0:9}" == "x-nswagdd=" ]; then
-        NSWAGDDFILE=${arg:9:$((${#arg}-9))}
-        if [ "x$NSWAGDDFILE" == "x" ]; then
-            ERRORS="$ERRORS\nNeed argument to -nswagdd. Example: -nswagdd=stations.v2.07.xml"
-        fi
-    ###
-    # Muon NSW AGDD custom file
-    elif [ "x${arg:0:9}" == "x-muonagdd" ]; then
-        ERRORS="$ERRORS\nNeed argument to -muonagdd. Example: -muonagdd=muon_passive_r.08.01.xml"
-    elif [ "x${arg:0:10}" == "x-muonagdd=" ]; then
-        MUONAGDDFILE=${arg:10:$((${#arg}-10))}
-        if [ "x$MUONAGDDFILE" == "x" ]; then
-            ERRORS="$ERRORS\nNeed argument to -muonagdd. Example: -muonagdd=muon_passive_r.08.01.xml"
-        fi
-    ###
-    elif [ "x${arg}" == "x-mc" ]; then
-	FLAG_MC=1
-    elif [ "x${arg}" == "x-multinp" ]; then
-	echo "IGNORING OBSOlETE FLAG -multinp"
-    elif [ "x${arg:0:10}" == "x-detdescr=" ]; then
-	DETDESCRTAG=${arg:10:$((${#arg}-10))}
-	if [ "x$DETDESCRTAG" == "x" ]; then
-	    ERRORS="$ERRORS\nNeed argument to -detdescr"
-	fi
-    elif [ "x${arg:0:9}" == "x-treetop=" ]; then
-	TREETOPFILTER=${arg:9:$((${#arg}-9))}
-	if [ "x$TREETOPFILTER" == "x" ]; then
-	    ERRORS="$ERRORS\nNeed argument to -treetop"
-	fi
-    elif [ "x${arg}" == "x-f" ]; then
-    FLAG_FORCE=1
-    elif [ "x${arg:0:10}" == "x-globcond=" ]; then
-	GLOBCONDTAG=${arg:10:$((${#arg}-10))}
-	if [ "x$GLOBCONDTAG" == "x" ]; then
-	    ERRORS="$ERRORS\nNeed argument to -globcond"
-	fi
-    elif [ "x${arg:0:10}" == "x-eventsrc=" ]; then
-	MULTINP_SRC=${arg:10:$((${#arg}-10))}
-	if [ "x$MULTINP_SRC" == "x" ]; then
-	    ERRORS="$ERRORS\nNeed argument to -eventsrc"
-	fi
-    elif [ "x${arg:0:10}" == "x-eventcpy=" ]; then
-	MULTINP_CPY=${arg:10:$((${#arg}-10))}
-	if [ "x$MULTINP_CPY" == "x" ]; then
-	    ERRORS="$ERRORS\nNeed argument to -eventcpy"
-	fi
-    elif [ "x${arg:0:9}" == "x-logfile=" ]; then
-	LOGFILE=${arg:9:$((${#arg}-9))}
-	if [ "x$LOGFILE" == "x" ]; then
-	    ERRORS="$ERRORS\nNeed argument to -logfile"
-	fi
-    elif [ "x${arg:0:9}" == "x-trigdec=" ]; then
-	TRIGDEC=${arg:9:$((${#arg}-9))}
-	if [ "x$TRIGDEC" == "x" ]; then
-	    ERRORS="$ERRORS\nNeed argument to -trigdec"
-	fi
-    elif [ "x${arg:0:11}" == "x-fatraskey=" ]; then
-	FATRAS_TRUTHKEY=${arg:11:$((${#arg}-11))}
-	if [ "x$FATRAS_TRUTHKEY" == "x" ]; then
-	    ERRORS="$ERRORS\nNeed argument to -fatraskey"
-	fi
-    elif [ "x${arg:0:12}" == "x-skipevents=" ]; then
-	SKIPEVENTS=${arg:12:$((${#arg}-12))}
-	if [ "x$SKIPEVENTS" == "x" ]; then
-	    ERRORS="$ERRORS\nNeed argument to -skipevents"
-	else
-	    #check it is integer:
-	    echo "$SKIPEVENTS" | grep '^[0-9][0-9]*$' > /dev/null 2>&1 || \
-		ERRORS="$ERRORS\nArgument to -skipevents must be an integer!"
-	fi
-    elif [ "x${arg:0:11}" == "x-evtfilter=" ]; then
-	FILTEREVENTS=${arg:11:$((${#arg}-11))}
-	if [ "x$FILTEREVENTS" == "x" ]; then
-	    ERRORS="$ERRORS\nNeed argument to -evtfilter"
-	fi
-    elif [ "x${arg:0:8}" == "x-cruise=" ]; then
-	CRUISEWITHTIME=${arg:8:$((${#arg}-8))}
-	if [ "x$CRUISEWITHTIME" == "x" ]; then
-	    ERRORS="$ERRORS\nNeed argument to -cruise"
-	else
-	    #check it is integer:
-	    echo "$CRUISEWITHTIME" | grep '^[0-9][0-9]*$' > /dev/null 2>&1 || \
-		ERRORS="$ERRORS\nArgument to -cruise must be an integer!"
-	    if [ "x$CRUISEWITHTIME" == "x0" ]; then
-		ERRORS="$ERRORS\nArgument to -cruise must be positive!"
-	    fi
-	fi
-    elif [ "x${arg:0:12}" == "x-athenaopts=" ]; then
-	ATHENAOPTS=${arg:12:$((${#arg}-12))}
-	if [ "x$ATHENAOPTS" == "x" ]; then
-	    ERRORS="$ERRORS\nNeed argument to -athenaopts"
-	fi
-    elif [ "x${arg:0:16}" == "x-extraevtsrcdir=" ]; then
-	EXTRADIR=${arg:16:$((${#arg}-16))}
-	if [ "x$EXTRADIR" == "x" ]; then
-	    ERRORS="$ERRORS\nNeed argument to -extraevtsrcdir"
-	else
-	    if [ ! -d $EXTRADIR ]; then
-		ERRORS="$ERRORS\nExtra event source directory $EXTRADIR does not exist"
-	    else
-		LOCALEVENTSRC_AVAILDIRS[${#LOCALEVENTSRC_AVAILDIRS[@]}]="$EXTRADIR"
-	    fi
-	fi
-    elif [ ${#arg} -gt 4 -a ${arg:$((${#arg}-4)):4} == ".vp1" ]; then
-	FILES_VP1CFG[${#FILES_VP1CFG[@]}]="${arg}"
-    elif [ ${#arg} -gt 3 -a ${arg:$((${#arg}-3)):3} == ".py" ]; then
-	FILES_EXTRAJOBOPT[${#FILES_EXTRAJOBOPT[@]}]="${arg}"
-    elif [[ ${#arg} -gt 3 && ${arg:0:1} != "-" && ( "${arg}" =~ "^LFN:" || -f "${arg}" ) ]]; then
-	FILES_POOLROOT[${#FILES_POOLROOT[@]}]="${arg}"
-    elif [[ ${#arg} -gt 3 && ${arg:0:1} != "-" && ( "${arg}" =~ "^root:" || -f "${arg}" ) ]]; then
-	echo "Opening a file through the 'root://' protocol..."
-    FILES_POOLROOT[${#FILES_POOLROOT[@]}]="${arg}"
-    else
-	ERRORS="$ERRORS\nUnknown argument: ${arg}"
-    fi
-    i=$((i+1))
-done
-
-#live & noautoconf flags (AutoConfiguration is disabled in live and livelocal modes)
-
-if [ "x$FLAG_LIVE" == "x1" ]; then
-    if [ "x$FLAG_MC" == "x1" ]; then
-	echo "Notice: -mc flag is ignored when -live is present"
-    fi
-    FLAG_MC=0
-    if [ "x$FLAG_NOAUTOCONF" == "x1" ]; then
-	echo "Notice: -noautoconf flag is redundant when -live is present"
-    fi
-    FLAG_NOAUTOCONF=1
-    if [ "x$MULTINP_SRC" == "x" ]; then
-	MULTINP_SRC="$LIVEDEFAULTEVENTSRC"
-    fi
-    if [ "x$DETDESCRTAG" == "x" ]; then
-	DETDESCRTAG="$LIVEDEFAULTDETDESCR"
-    fi
-    if [ "x$GLOBCONDTAG" == "x" ]; then
-	GLOBCONDTAG="$LIVEDEFAULTGLOBCOND"
-    fi
-elif [ "x$FLAG_LIVELOCAL" == "x1" ]; then
-    if [ "x$FLAG_MC" == "x1" ]; then
-	echo "Notice: -mc flag is ignored when -livelocal is present"
-    fi
-    FLAG_MC=0
-    if [ "x$FLAG_NOAUTOCONF" == "x1" ]; then
-	echo "Notice: -noautoconf flag is redundant when -livelocal is present"
-    fi
-    FLAG_NOAUTOCONF=1
-    if [ "x$MULTINP_SRC" == "x" ]; then
-	MULTINP_SRC="$LIVELOCALDEFAULTEVENTSRC"
-    fi
-    if [ "x$DETDESCRTAG" == "x" ]; then
-	DETDESCRTAG="$LIVEDEFAULTDETDESCR"
-    fi
-    if [ "x$GLOBCONDTAG" == "x" ]; then
-	GLOBCONDTAG="$LIVEDEFAULTGLOBCOND"
-    fi
-elif [ "x$FLAG_NOAUTOCONF" == "x0" -a ${#FILES_POOLROOT[@]} != 0 ]; then
-    if [ "x$FLAG_MC" == "x1" ]; then
-	echo "Notice: -mc flag is ignored when autoconfiguration is ON"
-    fi
-    FLAG_MC=0
-fi
-
-if [ "x$FLAG_LIVE" == "x1" -a "x$FLAG_LIVELOCAL" == "x1" ]; then
-    ERRORS="$ERRORS\nDon't specify both -live and -livelocal"
-fi
-
-#default:
-if [ "x$MULTINP_SRC" != "x" -a "x$MULTINP_CPY" == "x" ]; then
-    MULTINP_CPY="/tmp/$USER/vp1events/$RANDOM"
-fi
-
-# Sanity checks:
-if [ "x$MULTINP_SRC" == "x" -a "x$FLAG_DONTCLEANUPEVTCPY" == "x1" ]; then
-    ERRORS="$ERRORS\nOption -nocleanupeventcpy can't be set without -eventsrc=..."
-fi
-if [ "x$MULTINP_CPY" != "x" -a "x$MULTINP_SRC" == "x" ]; then
-    ERRORS="$ERRORS\nOption -eventcpy=... can't be set without -eventsrc=..."
-fi
-
-if [ "x$FLAG_NOID" == "x1" -a "x$FLAG_FATRAS" == "x1" ]; then
-    ERRORS="$ERRORS\nOptions -fatras and -noid can not be applied simultaneously"
-fi
-
-if [ "x$FLAG_LIVE" == "x1" -a "x$FLAG_FATRAS" == "x1" ]; then
-    ERRORS="$ERRORS\nOptions -fatras and -live can not be applied simultaneously"
-fi
-
-if [ "x$FLAG_LIVELOCAL" == "x1" -a "x$FLAG_FATRAS" == "x1" ]; then
-    ERRORS="$ERRORS\nOptions -fatras and -livelocal can not be applied simultaneously"
-fi
-
-if [ "x$FLAG_NOID" == "x1" -a "x$FLAG_NOMUON" == "x1" -a "x$FLAG_EXTRAPOLATOR" == "x1" ]; then
-    ERRORS="$ERRORS\nOptions -extrapolator can not be used when both -noid and -nomuon are specified"
-fi
-if [ "x$FLAG_NOID" == "x1" -a "x$FLAG_SPACEPOINTS" == "x1" ]; then
-    ERRORS="$ERRORS\nOptions -spacepoints can not be used when -noid is specified"
-fi
-
-if [ "x$FLAG_EXTRAPOLATOR" == "x1" -a "x$FLAG_NOEXTRAPOLATOR" == "x1" ]; then
-    ERRORS="$ERRORS\nOptions -extrapolator and -noextrapolator are incompatible"
-fi
-
-if [ "x$FLAG_NOID" == "x1" -a "x$FLAG_NOMUON" == "x1" -a "x$FLAG_FITTER" == "x1" ]; then
-    ERRORS="$ERRORS\nOptions -refit can not be used when both -noid and -nomuon are specified"
-fi
-
-if [ "x$MULTINP_CPY" != "x" -a -f "$MULTINP_CPY" ]; then
-    ERRORS="$ERRORS\n-eventcpy directory set to point to a file"
-fi
-
-
-BOOTSTRAPNAME="bootstrapevent.pool.root"
-BOOTSTRAPFILE="${MULTINP_CPY}/${BOOTSTRAPNAME}"
-BOOTSTRAP_HTTPFILEINFO="${MULTINP_CPY}/bootstrap_fileinfo.txt"
-HTTPMODE=0
-if [ "x$MULTINP_SRC" != "x" ]; then
-    if [ ${#FILES_POOLROOT[@]} != 0 ]; then
-            #cant have input files.
-	ERRORS="$ERRORS\nPlease don't specify input files when running with -live, -livelocal or -eventsrc=..."
-    else
-	FILES_POOLROOT[${#FILES_POOLROOT[@]}]="${BOOTSTRAPFILE}"
-    fi
-    HTTPMODE=1
-    echo $MULTINP_SRC|grep '^https://..*/..*' > /dev/null || HTTPMODE=0
-    echo $MULTINP_SRC|grep '/$' > /dev/null && HTTPMODE=0
-    if [ "x$HTTPMODE" == "x0" ]; then
-	if [ "x$MULTINP_SRC" != "x" -a ! -d "$MULTINP_SRC" ]; then
-	    ERRORS="$ERRORS\n-eventsrc directory does not exists"
-	fi
-    elif [ "x$MULTINP_CPY" == "x$MULTINP_SRC" ]; then
-        #Fixme: Could probably cheat this check by presense of trailing '/', symlinks, etc.
-	ERRORS="$ERRORS\n-eventsrc and -eventcpy directories must be different"
-    fi
-fi
-
-if [ ${#LOCALEVENTSRC_AVAILDIRS[@]} != 0 ]; then
-    if [ "x$MULTINP_SRC" == "x" ]; then
-	ERRORS="$ERRORS\n-extraevtsrcdir can not be specified without the -eventsrc flag"
-    fi
-    if [ "x$HTTPMODE" != "x0" ]; then
-	ERRORS="$ERRORS\n-extraevtsrcdir can not be used when -eventsrc is not a local directory"
-    fi
-fi
-
-if [ "x$FLAG_EXTRAPOLATOR" == "x0" -a "x$FLAG_EXTRAPOLATOR" == "x0" ]; then
-    if [ "x$FLAG_NOID" == "x1" -a "x$FLAG_NOMUON" == "x1" ]; then
-	FLAG_NOEXTRAPOLATOR=1
-    else
-	FLAG_EXTRAPOLATOR=1
-    fi
-fi
-
-
-#######################################
-##  Possibly print out errors/usage  ##
-#######################################
-
-if [ "x$ERRORS" != "x" ]; then
-    echo "================================================================="
-    echo "Errors found in commandline options!!!:"
-    echo -e "$ERRORS"
-    echo
-    echo "Run 'vp1 -h' for more information"
-    echo "================================================================="
-    echo
-    exit 1
-fi
-
-if [ "x$FLAG_HELP" != "x0" ]; then
-    echo "The $APP script serves as a frontend which runs athena with vp1.py and appropriate options."
-    echo
-    echo "Usage: "
-    echo
-    echo "$APP [inputfiles] [configfiles] [jobopt fragments] [options]"
-    echo
-    echo "Config files must have the .vp1 extension, joboption fragments the .py extension,"
-    echo "and any other argument which doesn't start with '-' is assumed to be an input file if such a file exists."
-    echo
-    echo "  -h, -help, --help           : Display this help (with examples) and exit."
-    echo
-    echo "  -larhv                      : Access current LAr HV data in the DCS_OFL database via VP1 CaloReadout system."
-    echo "  -data                       : Input files are data [default assumption is simulation]. "
-    echo
-    echo "  -detdescr=TAG               : Set global geometry tag. Overrides autoconfiguration"
-    echo
-    echo "  -treetop=FILTER             : Only output the GeoModel Treetop specified in the FILTER list. "
-    echo "                                Format is a comma separated list of event numbers"
-    echo
-    echo "  -f                          : 'force' mode, which overwrites the geometry dump file in the local folder, if present."
-    echo
-    echo "  -globcond=TAG               : Set global conditions tag. Overrides autoconfiguration"
-    echo
-    echo "  -skipevents=N               : Skip the first N events"
-    echo
-    echo "  -evtfilter=FILTER           : Only process the events in the FILTER list."
-    echo "                                Format is a comma separated list of event numbers, where each event number"
-    echo "                                can optionally be preceeded by a run number and a semicolon."
-    echo "                                Prepend the list with an \"%\" to instead exclude the events."
-    echo "                                For instance select a few nice events like: --evtfilter=7,13,5300:26,54"
-    echo "                                Or exclude a few bad events like: --evtfilter=%7,13,5300:26,54"
-    echo
-    echo "                                It is also possible to specify one or more files with run/evt numbers"
-    echo "                                specified on each line: --evtfilter=%7,13,5300:26,54,myevents1.txt"
-    echo
-    echo "  -cruise=N                   : Start in cruise mode, changing events after N seconds"
-    echo
-    echo "  -logfile=mylogfile.txt      : Leave a copy of the output in mylogfile.txt (both stdout and stderr)"
-    echo
-    echo "  -trig                       : Init trigger information to be displayed with the Trigger Plugin"
-    echo
-    echo "  -nocalo                     : Don't init geometry and converters for calorimeter systems."
-    echo "  -noid                       : Don't init geometry and converters for the inner detector."
-    echo "  -nomuon                     : Don't init geometry and converters for muon systems."
-    # echo "  -fulltoroids                : Build complete and rather heavyweight description of muon dead materials."
-    # echo "                                A lightweight version is built by default"
-    echo "  -lucid                      : Init geometry and converters for the LUCID forward detector."
-    echo "  -alfa                       : Init geometry and converters for the ALFA forward detector."
-    echo "  -forwardregion              : Init geometry and converters for the FrwardRegion."
-    echo "  -zdc                        : Init geometry and converters for the ZDC forward detector."
-    echo "  -cavern                     : Init geometry for the cavern infrastructure."
-    echo
-    echo "  -toy-detector               : Let VP1 visualize the "ToyDetector" volumes of GeoModelExamples (as 'Other unrecognized volumes.' in the Geo system)"
-    echo
-    echo "  -geomodel-stats             : Write GeoModel stats to a file."
-    echo
-    echo "  -extrapolator               : Whether to configure an extrapolator for more precise track representation."
-    echo "  -noextrapolator               Default is -extrapolator unless both -nomuon and -noid are specified."
-    echo
-    echo "  -refit                      : Allows VP1-controlled refit from PrepRawData."
-    echo
-    echo "  -online                     : Use this flag for running VP1 at P1."
-    echo
-    echo "  -mc                         : Tell VP1 that it is running over MC data. The flag is ignored when autoconfiguration is ON"
-    echo "                                (default mode when running over data files)"
-    echo
-    echo "  -fatras                     : Use Fatras (in single-track-simulation mode if no input file with generated event)"
-    echo "  -fatraskey=KEY              : Optionally override truth key used by Fatras (typical values are GEN_EVENT or TruthEvent)"
-    echo
-    echo "  -eventsrc=DIR               : Directory to take single event files from (do not specify input files in this case)."
-    echo "                                To get files from a web server (i.e. live events), put instead the url to the file"
-    echo "                                residing in the same directory (most users should just use the -live option instead)."
-    echo
-    echo "  -extraevtsrcdir=DIR         : Directory to add to the menu to allow user to change eventsrc on the fly."
-    echo "                                This argument can appear several times and should only be used when eventsrc is"
-    echo "                                a local directory"
-    echo
-    echo "  -eventcpy=DIR               : Directory to keep local copies of processed event files."
-    echo "                                If -eventsrc is set, then -eventcpy will default to /tmp/$USER/vp1events/$RANDOM "
-    echo "  -nocleanupeventcpy          : Prevent removal of eventcpy directory after athena process finishes."
-    echo
-    echo "  -live                       : Run on live events from point 1. This is a convenience option which implies"
-    echo "                                other options [unless explicitly set]:"
-    echo "                                  -noautoconf"
-    echo "                                  -eventsrc=$LIVEDEFAULTEVENTSRC"
-    echo "                                  -detdescr=$LIVEDEFAULTDETDESCR"
-    echo "                                  -globcond=$LIVEDEFAULTGLOBCOND"
-    echo
-    echo "  -livelocal                  : Run on live events from point 1 in local directory. This is a convenience option"
-    echo "                                which implies other options [unless explicitly set]:"
-    echo "                                  -noautoconf"
-    echo "                                  -eventsrc=$LIVELOCALDEFAULTEVENTSRC"
-    echo "                                  -detdescr=$LIVEDEFAULTDETDESCR"
-    echo "                                  -globcond=$LIVEDEFAULTGLOBCOND"
-    echo
-    echo "  -noautoconf                 : Disable AutoConfiguration. VP1 runs in the 'data' mode by default"
-    echo "                                use '-mc' flag for switching to the MC mode"
-    echo
-    echo "  -nosortdbreplicas           : Do not attempt to sort DB replicas according to geographical proximity."
-    echo
-    echo "  -trigdec=file.xml           : Use file.xml as source of trigger decision category data."
-    echo "  -trigdec=embedded           : Trigger decision category data is embedded in the input file(s)."
-    echo "  -trigdec=someserver.cern.ch : Trigger decision category data is available from the server someserver.cern.ch."
-    echo
-    echo "  -athenaopts=<options>       : Pass <options> onto athena invocation (type 'athena.py -h' to see available options)."
-    echo "                                NB: You can not use the -c option."
-    echo
-    echo "  -cgal                       : Use CGAL boolean instead of vp1 boolean."
-    echo
-    echo "  -nsw                        : Enable the Muon New Small Wheel geometry (and disable the existing small wheel)."
-    echo
-    echo "  -slhc                        : Enable special configurations for SLHC studies."
-    echo
-    echo "  -customgeom                 : Enable a custom geometry."
-    echo
-    echo "  -test                       : Show the athena command instead of executing it."
-    echo
-    echo "Note that the order of specified files/options is not important, and that they can in general be mixed quite"
-    echo "liberally (when it makes sense - of course dont put e.g. both -trigdec=file.xml and -trigdec=embedded)"
-    echo
-    echo
-    echo "Examples:"
-    echo
-    echo "  * Simply run on the input files myevents1.pool.root and myevents2.pool.root"
-    echo
-    echo "     \$>$APP myevents1.pool.root myevents2.pool.root"
-    echo
-    echo "  * Simply run on the input file myevents.pool.root and have the joboption fragments myopts1.py and myopts2.py"
-    echo "     included after the standard vp1 setup"
-    echo
-    echo "     \$>$APP myevents.pool.root myopts1.py myopts2.py"
-    echo
-    echo "  * Run on myevents1.pool.root, and use a file, mytrigger.xml, as source of trigger decision data:"
-    echo
-    echo "     \$>$APP myevents1.pool.root -trigdec=mytrigger.xml"
-    echo
-    echo "  * Run on myevents1.pool.root, don't init geometry and converters for the calorimeter systems:"
-    echo
-    echo "     \$>$APP myevents1.pool.root -nocalo"
-    echo
-    echo "  * Run on myevents1.pool.root, and use detector description tag \"ATLAS-GEO-10-00-00\":"
-    echo
-    echo "     \$>$APP myevents1.pool.root -detdescr=ATLAS-GEO-10-00-00"
-    echo
-    echo "  * Run on myevents1.pool.root, and pass on a couple of options to the athena invocation (in this case"
-    echo "     we want to run with a debugger and also set the loglevel to WARNING):"
-    echo
-    echo "     \$>$APP myevents1.pool.root -athenaopts='-d -l WARNING'"
-    echo
-    echo "  * Run on myevents1.pool.root, and preload the tab/channel configuration files myconfig1.vp1 and myconfig2.vp1:"
-    echo
-    echo "     \$>$APP myevents1.pool.root myconfig1.vp1 myconfig2.vp1"
-    echo
-    echo "  * Run on myevents1.pool.root, and preload the tab/channel configuration files myconfig1.vp1 and myconfig2.vp1:"
-    echo
-    echo "     \$>$APP myevents1.pool.root myconfig1.vp1 myconfig2.vp1"
-    echo
-    echo "  * Some more advanced examples, using several options at once:"
-    echo
-    echo "     \$>$APP -detdescr=ATLAS-GEO-10-00-00 myevents1.pool.root myevents2.pool.root myconfig1.vp1 -athenaopts='-d'"
-    echo "     \$>$APP -detdescr=ATLAS-GEO-10-00-00 myevents1.pool.root myconfig1.vp1 -nocalo -nomuon -trigdec=embedded"
-    echo
-    exit 0
-fi
-
-########################################
-##  Setup & run according to options  ##
-########################################
-
-if [ "x$FLAG_DONTCLEANUPEVTCPY" != "x1" -a "x$MULTINP_CPY" != "x" ]; then
-    if [ ${#MULTINP_CPY} -lt 8 -o "x$MULTINP_CPY" == "$HOME" -o "x$MULTINP_CPY" == "$HOME/" ]; then
-	echo "NOTICE: Turning off removal of ${MULTINP_CPY} since it seems to be too short or pointing at $HOME."
-	FLAG_DONTCLEANUPEVTCPY=1
-    fi
-fi
-
-if [ "x$FLAG_DATA" != "x0" ]; then
-    echo "Error: Support for the -data flag is actually not implemented yet!"
-    exit 1
-fi
-
-PYTHONFILES=""
-i=0
-while [ $i -le $((${#FILES_VP1CFG[@]}-1)) ]; do
-    if [ "x$PYTHONFILES" == "x" ]; then
-	PYTHONFILES='vp1CfgFiles=["'"${FILES_VP1CFG[$i]}"'"'
-    else
-	PYTHONFILES="${PYTHONFILES},${FILES_VP1CFG[$i]}"'"'
-    fi
-    i=$((i+1))
-done
-if [ "x$PYTHONFILES" != "x" ]; then
-    PYTHONFILES="${PYTHONFILES}"']'
-fi
-
-
-POOLROOTFILES=""
-i=0
-while [ $i -le $((${#FILES_POOLROOT[@]}-1)) ]; do
-    if [ "x$POOLROOTFILES" == "x" ]; then
-	POOLROOTFILES='vp1InputFiles=["'"${FILES_POOLROOT[$i]}"'"'
-    else
-	POOLROOTFILES="${POOLROOTFILES}"',"'"${FILES_POOLROOT[$i]}"'"'
-    fi
-    i=$((i+1))
-done
-if [ "x$POOLROOTFILES" != "x" ]; then
-    POOLROOTFILES="${POOLROOTFILES}"']'
-fi
-
-EXTRAINPUTDIRS=""
-i=0
-while [ $i -le $((${#LOCALEVENTSRC_AVAILDIRS[@]}-1)) ]; do
-    if [ "x$EXTRAINPUTDIRS" == "x" ]; then
-	EXTRAINPUTDIRS='vp1MultiAvailableSrcDirs=["'"${LOCALEVENTSRC_AVAILDIRS[$i]}"'"'
-    else
-	EXTRAINPUTDIRS="${EXTRAINPUTDIRS}"',"'"${LOCALEVENTSRC_AVAILDIRS[$i]}"'"'
-    fi
-    i=$((i+1))
-done
-if [ "x$EXTRAINPUTDIRS" != "x" ]; then
-    EXTRAINPUTDIRS="${EXTRAINPUTDIRS}"']'
-fi
-
-EXTRAJOBOPT=""
-i=0
-while [ $i -le $((${#FILES_EXTRAJOBOPT[@]}-1)) ]; do
-    if [ "x$EXTRAJOBOPT" == "x" ]; then
-	EXTRAJOBOPT="${FILES_EXTRAJOBOPT[$i]}"
-    else
-	EXTRAJOBOPT="${EXTRAJOBOPT} ${FILES_EXTRAJOBOPT[$i]}"
-    fi
-    i=$((i+1))
-done
-
-JOBOPT="DumpGeo/dump-geo.py"
-if [ -f dump-geo.py ]; then
-    JOBOPT=dump-geo.py
-fi
-
-OPTS="$PYTHONFILES"
-if [ "x$POOLROOTFILES" != "x" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}$POOLROOTFILES"
-fi
-if [ "x$EXTRAINPUTDIRS" != "x" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}$EXTRAINPUTDIRS"
-fi
-if [ "x$DETDESCRTAG" != "x" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}DetDescrVersion=\"$DETDESCRTAG\""
-    # save the DetDescr tag for later
-    export DUMPGEODETDESCRTAG=$DETDESCRTAG
-    echo "DetDescrTag: ${DUMPGEODETDESCRTAG}"
-fi
-if [ "x$TREETOPFILTER" != "x" ]; then
-    #if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi # not needed
-    #OPTS="${OPTS}GeoModelTreetopFilter=\"$TREETOPFILTER\"" # not needed
-    # save the GeoModelTreetopFilter tag for later
-    # it will be accessed by the package (Athena algorithm) GeoModelStandalone/DumpGeo
-    export DUMPGEOTREETOPFILTER=$TREETOPFILTER
-    echo "User's 'GeoModel Treetop' filter set: ${DUMPGEOTREETOPFILTER}"
-fi
-if [ "x$FLAG_FORCE" != "x0" ]; then
-    #if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    export DUMPGEOOVERWRITE=$FLAG_FORCE
-    echo "User's Overwrite local geometry dump file, if present: ${DUMPGEOOVERWRITE}"
-fi
-if [ "x$GLOBCONDTAG" != "x" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1GlobCond=\"$GLOBCONDTAG\""
-fi
-if [ "x$SKIPEVENTS" != "x" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}skipEvents=$SKIPEVENTS"
-fi
-if [ "x$FILTEREVENTS" != "x" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1FilterEvents=\"$FILTEREVENTS\""
-fi
-if [ "x$CRUISEWITHTIME" != "x" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1CruiseTime=$CRUISEWITHTIME"
-fi
-if [ "x$FLAG_TRIG" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1Trig=True"
-fi
-if [ "x$FLAG_NOCALO" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1Calo=False"
-    export DUMPGEO_NOCALO="1";
-fi
-if [ "x$FLAG_NOID" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1ID=False"
-    export DUMPGEO_NOID="1";
-fi
-if [ "x$FLAG_NOMUON" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1Muon=False"
-    export DUMPGEO_NOMUON="1";
-fi
-if [ "x$FLAG_NOSORTDBREPLICAS" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1NoSortDBReplicas=True"
-fi
-if [ "x$FLAG_LARHV" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1LarHvData=True"
-fi
-# if [ "x$FLAG_FULLTOROIDS" != "x0" ]; then
-#     if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-#     OPTS="${OPTS}vp1FullToroids=True"
-# fi
-if [ "x$FLAG_ONLINE" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1Online=True"
-fi
-if [ "x$FLAG_LUCID" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1LUCID=True"
-fi
-if [ "x$FLAG_ALFA" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1ALFA=True"
-fi
-if [ "x$FLAG_FORWARD_REGION" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1ForwardRegion=True"
-fi
-if [ "x$FLAG_ZDC" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1ZDC=True"
-fi
-if [ "x$FLAG_CAVERN" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1Cavern=True"
-fi
-if [ "x$FLAG_TOYDETECTOR" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1ToyDetector=True"
-fi
-if [ "x$FLAG_GEOMODELSTATS" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1GeoModelStats=True"
-fi
-if [ "x$FLAG_SPACEPOINTS" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1SpacePoints=True"
-fi
-if [ "x$FLAG_EXTRAPOLATOR" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1Extrapolator=True"
-fi
-if [ "x$FLAG_NOEXTRAPOLATOR" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1Extrapolator=False"
-fi
-if [ "x$FLAG_NOAUTOCONF" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1NoAutoConf=True"
-fi
-if [ "x$FLAG_FITTER" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1Fitter=True"
-fi
-if [ "x$FLAG_FATRAS" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1Fatras=True"
-fi
-if [ "x$FLAG_SLHC" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1SLHC=True"
-fi
-if [ "x$FLAG_CUSTOMGEOMETRY" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1CustomGeometry=True"
-fi
-if [ "x$MUONLAYOUT" != "x" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1MuonLayout=\"$MUONLAYOUT\""
-fi
-if [ "x$NSWAGDDFILE" != "x" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1NSWAGDDFiles=[\"${NSWAGDDFILE}\"]"
-fi
-if [ "x$MUONAGDDFILE" != "x" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1MuonAGDDFiles=[\"${MUONAGDDFILE}\"]"
-fi
-if [ "x$FLAG_NOGUI" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1NoGui=True"
-fi
-if [ "x$FLAG_MC" != "x0" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1Mc=True"
-fi
-if [ "x$MULTINP_SRC" != "x" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1Multinp=True"
-fi
-if [ "x$MULTINP_SRC" != "x" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1Multinpsrc=\"$MULTINP_SRC\""
-fi
-if [ "x$MULTINP_CPY" != "x" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1Multinpcpy=\"$MULTINP_CPY\""
-fi
-if [ "x$TRIGDEC" != "x" ]; then
-    if [ ${#TRIGDEC} -gt 5 -a ${TRIGDEC:$((${#TRIGDEC}-4)):4} == ".xml" ]; then
-	if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-	OPTS="${OPTS}vp1TrigDecXML=\"$TRIGDEC\""
-    else
-	echo "Error: Support for the -trigdec flag for embedded or server is actually not implemented in this script yet! So far only .xml sources work"
-	exit 1
-    fi
-fi
-if [ "x$FATRAS_TRUTHKEY" != "x" ]; then
-    if [ "x$OPTS" != "x" ]; then OPTS="$OPTS;"; fi
-    OPTS="${OPTS}vp1FatrasTruthKey=\"$FATRAS_TRUTHKEY\""
-fi
-
-if [ "x$LOGFILE" != "x" ]; then
-    #Fixme: Check that this actually redirects stderr correctly!!
-    CMD="$CMD 2>&1|tee $LOGFILE"
-fi
-#Test mode:
-if [ "x$FLAG_TEST" != "x0" ]; then
-    if [ "x$HTTPMODE" == "x1" ]; then
-	echo "--> Start by downloading a fresh file from $MULTINP_SRC to $BOOTSTRAPFILE"
-    fi
-    if [ "x$MULTINP_CPY" != "x" -a ! -d "$MULTINP_CPY" ]; then
-	echo mkdir -p $MULTINP_CPY
-    fi
-    if [ "x$LOGFILE" != "x" ]; then
-	echo athena.py $ATHENAOPTS  -c "'$OPTS'" $JOBOPT $EXTRAJOBOPT "2>&1|tee $LOGFILE"
-    else
-	echo athena.py $ATHENAOPTS  -c "'$OPTS'" $JOBOPT $EXTRAJOBOPT
-    fi
-    exit 0
-fi
-
-#print the final list of options that have been set
-echo "'dump-geo' - Options that have been set to run the application: ${OPTS}"
-
-
-##################################################
-##  Possibly get boot-strap file for http mode  ##
-##################################################
-
-if [ "x$MULTINP_CPY" != "x" -a ! -d "$MULTINP_CPY" ]; then
-    mkdir -p $MULTINP_CPY
-    if [ $? != 0 ]; then
-	echo
-	echo "Failed to create $MULTINP_CPY"
-	echo
-	exit 1
-    fi
-fi
-
-
-if [ "x$HTTPMODE" == "x1" ]; then
-    if [ -f $BOOTSTRAPFILE ]; then
-	echo
-	echo
-	echo "Using existing $BOOTSTRAPFILE as first event."
-	echo
-	echo "In case of problems, try first to remove this file and then restart"
-	echo
-	echo
-    else
-	echo "Need to acquire $BOOTSTRAPFILE"
-	while [ 1 ]; do
-	    rm -f $BOOTSTRAP_HTTPFILEINFO
-	    echo
-	    echo "Executing wget $LIVEBOOTSTRAPSRC --output-document=$BOOTSTRAP_HTTPFILEINFO"
-	    wget $LIVEBOOTSTRAPSRC --output-document=$BOOTSTRAP_HTTPFILEINFO
-	    if [ $? != 0 ]; then
-		echo
-		echo "Download failed!"
-		echo
-		rm -f $BOOTSTRAP_HTTPFILEINFO
-		exit 1
-	    fi
-	    echo "Download succesful"
-	    #Check integrity of downloaded file:
-	    OK=1
-	    grep '^vp1_.*_.*_.*.pool.root' $BOOTSTRAP_HTTPFILEINFO |grep -v "copy " > /dev/null || OK=0
-	    grep 'begin_checksums' $BOOTSTRAP_HTTPFILEINFO > /dev/null || OK=0
-	    grep 'end_checksums' $BOOTSTRAP_HTTPFILEINFO > /dev/null || OK=0
-	    if [ "x$OK" == "x1" ]; then
-		echo "Downloaded info file content seems sane."
-		break
-	    else
-		echo "Problems with content of downloaded info file. Will retry in 5 seconds."
-		sleep 5
-	    fi
-	done
-	#Decode to find recent file:
-	FILELINE=`grep '^vp1_.*_.*_.*.pool.root' $BOOTSTRAP_HTTPFILEINFO |grep -v "copy "|head -1|tr '\t' ' '`
-	FILE=`echo $FILELINE|awk '{print $1}'`
-	SUM=`echo $FILELINE|awk '{print $2}'`
-	FILEURL=`echo $LIVEBOOTSTRAPSRC|sed 's#http://\(.*\)/.*$#http://\1/#'`$FILE
-	echo FILE=$FILE
-	echo FILEURL=$FILEURL
-	echo SUM=$SUM
-	OK=1
-	BOOTSTRAPFILE_SUMFILE=$BOOTSTRAPFILE.md5sum
-	rm -f $BOOTSTRAPFILE $BOOTSTRAPFILE_SUMFILE
-	echo "Executing wget ${FILEURL} --output-document=$BOOTSTRAP_HTTPFILEINFO"
-	wget ${FILEURL} --output-document=$BOOTSTRAPFILE || OK=0
-	if [ $? != 0 ]; then
-	    echo
-	    echo "Download failed!"
-	    echo
-	    rm -f $BOOTSTRAPFILE $BOOTSTRAPFILE_SUMFILE
-	    exit 1
-	fi
-	echo "$SUM  $BOOTSTRAPFILE" > $BOOTSTRAPFILE_SUMFILE
-	if [ $? != 0 ]; then
-	    echo
-	    echo "Could not create md5sum check file"
-	    echo
-	    rm -f $BOOTSTRAPFILE $BOOTSTRAPFILE_SUMFILE
-	    exit 1
-	fi
-	md5sum --status --check $BOOTSTRAPFILE_SUMFILE
-	if [ $? != 0 ]; then
-	    echo
-	    echo "md5sum check of downloaded bootstrap event file failed!"
-	    echo
-	    rm -f $BOOTSTRAPFILE $BOOTSTRAPFILE_SUMFILE
-	    exit 1
-	fi
-	echo "Checksum of downloaded Bootstrap event file is OK. Proceeding to launch athena."
-    fi
-elif [ "x$MULTINP_SRC" != "x" ]; then
-    NEWEST=`ls -1rt $MULTINP_SRC/*_*_*.pool.root |tail -1`
-    if [ ! -f $NEWEST ]; then
-	echo
-	echo "Could not find event file in $MULTINP_SRC to use to bootstrap athena. "
-	echo
-	exit 1
-    fi
-    cp $NEWEST -f $BOOTSTRAPFILE
-    if [ $? != 0 -o ! -f $BOOTSTRAPFILE ]; then
-	echo
-	echo "Could not copy newest event file $BOOTSTRAPFILE!"
-	echo
-	rm -f $BOOTSTRAPFILE
-	exit 1
-    fi
-fi
-
-
-echo "JOBOPT: " ${JOBOPT}
-echo "EXTRAJOBOPT: " ${EXTRAJOBOPT}
-if [ "x$LOGFILE" != "x" ]; then
-    athena.py $ATHENAOPTS  -c "$OPTS" $JOBOPT $EXTRAJOBOPT 2>&1|tee $LOGFILE
-else
-    athena.py $ATHENAOPTS  -c "$OPTS" $JOBOPT $EXTRAJOBOPT
-fi
-
-if [ "x$FLAG_DONTCLEANUPEVTCPY" != "x1" -a "x$MULTINP_CPY" != "x" ]; then
-    if [ -d "${MULTINP_CPY}" -a -f "${MULTINP_CPY}/${BOOTSTRAPNAME}" ]; then
-	rm -rf "${MULTINP_CPY}"
-    fi
-fi
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/dump-geo.py b/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/dump-geo.py
deleted file mode 100644
index cfce17503f7f4c6dc82ab4a1dee8adf7c1aba93c..0000000000000000000000000000000000000000
--- a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/dump-geo.py
+++ /dev/null
@@ -1,561 +0,0 @@
-#/*
-#  Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration
-#*/
-
-if not 'vp1InputFiles' in dir(): vp1InputFiles = []
-if not 'vp1CfgFiles' in dir(): vp1CfgFiles = []
-if not 'vp1ID' in dir(): vp1ID=True
-if not 'vp1Calo' in dir(): vp1Calo=True
-if not 'vp1Muon' in dir(): vp1Muon=True
-if not 'vp1LUCID' in dir(): vp1LUCID=False
-if not 'vp1ALFA' in dir(): vp1ALFA=False
-if not 'vp1ForwardRegion' in dir(): vp1ForwardRegion=False
-if not 'vp1ZDC' in dir(): vp1ZDC=False
-if not 'vp1Extrapolator' in dir(): vp1Extrapolator=True
-if not 'vp1Fitter' in dir(): vp1Fitter=False
-if not 'vp1Fatras' in dir(): vp1Fatras=False
-if not 'vp1FatrasMuon' in dir(): vp1FatrasMuon=(vp1Fatras and vp1Muon)
-if not 'vp1FatrasCalo' in dir(): vp1FatrasCalo=(vp1Fatras and vp1Calo)
-if not 'vp1FatrasTruthKey' in dir(): vp1FatrasTruthKey=""
-if not 'vp1Online' in dir(): vp1Online=False
-if not 'vp1Mc' in dir(): vp1Mc=False
-if not 'vp1Multinp' in dir(): vp1Multinp=False
-if not 'vp1Multinpsrc' in dir(): vp1Multinpsrc=""
-if not 'vp1Multinpcpy' in dir(): vp1Multinpcpy=""
-if not 'vp1MultiAvailableSrcDirs' in dir(): vp1MultiAvailableSrcDirs = []
-if not 'vp1LarHvData' in dir(): vp1LarHvData=False
-# if not 'vp1FullToroids' in dir(): vp1FullToroids=False
-if not 'vp1CruiseTime' in dir(): vp1CruiseTime=0
-if not 'vp1NoSortDBReplicas' in dir(): vp1NoSortDBReplicas=False
-if not 'vp1FilterEvents' in dir(): vp1FilterEvents=""
-if not 'vp1NoGui' in dir(): vp1NoGui=False
-if not 'vp1SpacePoints' in dir(): vp1SpacePoints=False
-if not 'vp1Cavern' in dir(): vp1Cavern=False
-if not 'vp1ToyDetector' in dir(): vp1ToyDetector=False
-if not 'vp1GeoModelStats' in dir(): vp1GeoModelStats=False
-if not 'vp1NoAutoConf' in dir(): vp1NoAutoConf=False
-if not 'vp1Trig' in dir(): vp1Trig=False
-if not 'vp1CustomGeometry' in dir(): vp1CustomGeometry=False
-if not 'vp1SLHC' in dir(): vp1SLHC=False
-if not 'vp1MuonAGDDFiles' in dir(): vp1MuonAGDDFiles=[]
-if not 'vp1MuonAGDD2GeoSwitches' in dir(): vp1MuonAGDD2GeoSwitches=[]
-if not 'vp1NSWAGDDFiles' in dir(): vp1NSWAGDDFiles=[]
-if not 'vp1MuonLayout' in dir(): vp1MuonLayout=""
-
-def vp1CfgErr(s): print ("VP1 CONFIGURATION ERROR: %s" % s)
-
-if (vp1Fatras and not vp1ID):
-    vp1CfgErr("Fatras can not be enabled without inner detector. Turning off Fatras.")
-    vp1Fatras=False
-if (vp1FatrasMuon and not vp1Fatras):
-    vp1CfgErr("FatrasMuon can not be enabled without Fatras enabled. Turning off FatrasMuon.")
-    vp1FatrasMuon=False
-if (vp1FatrasMuon and not vp1Muon):
-    vp1CfgErr("FatrasMuon can not be enabled without muon systems. Turning off FatrasMuon.")
-    vp1FatrasMuon=False
-if (vp1FatrasCalo and not vp1Fatras):
-    vp1CfgErr("FatrasCalo can not be enabled without Fatras enabled. Turning off FatrasCalo.")
-    vp1FatrasCalo=False
-if (vp1FatrasCalo and not vp1Calo):
-    vp1CfgErr("FatrasCalo can not be enabled without calo systems. Turning off FatrasCalo.")
-    vp1FatrasCalo=False
-if ( vp1FatrasTruthKey != "" and not vp1Fatras ):
-    vp1CfgErr("FatrasTruthKey set but Fatras not enabled. Unsetting FatrasTruthKey.")
-    vp1FatrasTruthKey=""
-
-#Set VP1ATLREL integer so we can do e.g. "if VP1ATLREL>=160002:"
-include('DumpGeo/vp1FragmentConstructVP1ATLREL.py')
-
-#Disable FPE checks :-(
-from RecExConfig.RecFlags import rec
-rec.doFloatingPointException.set_Value_and_Lock(False)
-
-if (vp1InputFiles == []):
-    # Create an event selector:
-    import AthenaCommon.AtlasUnixGeneratorJob
-    # Configure GlobalFlags
-    from AthenaCommon.GlobalFlags import globalflags
-    globalflags.DetGeo.set_Value_and_Lock('atlas')
-    globalflags.DataSource.set_Value_and_Lock('geant4')
-
-    # # Set geometry version
-    if (not "DetDescrVersion" in dir()):
-        
-        #DetDescrVersion = "ATLAS-R2-2015-03-01-00" # default for Rel. 21
-        #DetDescrVersion = "ATLAS-R2-2016-01-00-01" # default Run 2 geometry
-        DetDescrVersion = "ATLAS-R3S-2021-03-02-00" # default Run 3 geometry, with symmetric NSW
-
-        os.environ["DUMPGEODETDESCRTAGDEFAULT"] = "1" # save to an env var, for later use in GeoModelStandalone/GeoExporter
-        os.environ["DUMPGEODETDESCRTAG"] = DetDescrVersion # save to an env var, for later use in GeoModelStandalone/GeoExporter
-    globalflags.DetDescrVersion = DetDescrVersion
-
-    # Set conditions tag
-    if not 'vp1GlobCond' in dir():
-        vp1GlobCond="OFLCOND-SDR-BS7T-05-14"
-    from IOVDbSvc.CondDB import conddb
-    conddb.setGlobalTag(vp1GlobCond)
-else:
-    if(vp1NoAutoConf):
-        # AutoConfiguration disabled
-        import AthenaCommon.AtlasUnixStandardJob
-
-        if(vp1Online):
-            from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
-            athenaCommonFlags.isOnline = True
-
-        from AthenaCommon.GlobalFlags import globalflags
-        globalflags.DetGeo.set_Value_and_Lock('atlas')
-        if(vp1Mc):
-            globalflags.DataSource.set_Value_and_Lock('geant4')
-        else:
-            globalflags.DataSource.set_Value_and_Lock('data')
-        globalflags.InputFormat.set_Value_and_Lock('pool')
-        globalflags.Luminosity.set_Value_and_Lock('zero')
-        from AthenaCommon.BeamFlags import jobproperties
-        jobproperties.Beam.beamType="collisions"
-
-        # Set geometry version
-        if (not "DetDescrVersion" in dir()):
-            DetDescrVersion = "ATLAS-R2-2016-01-00-01" # "ATLAS-R2-2015-03-01-00" for Rel. 21
-
-        globalflags.DetDescrVersion = DetDescrVersion
-
-        # Set conditions tag
-        if not 'vp1GlobCond' in dir():
-            if (vp1Mc):
-                vp1GlobCond="OFLCOND-SIM-BS7T-02"
-            else:
-                vp1GlobCond="COMCOND-BLKPST-004-01"
-
-        from IOVDbSvc.CondDB import conddb
-        conddb.setGlobalTag(vp1GlobCond)
-    else:
-        # AutoConfiguration enabled
-        from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
-        athenaCommonFlags.FilesInput=vp1InputFiles
-        from RecExConfig.RecFlags import rec
-        rec.AutoConfiguration=['everything']
-        rec.doAOD.set_Value_and_Lock(False)
-        rec.doESD.set_Value_and_Lock(False)
-
-        # Override geometry tag from command line
-        if("DetDescrVersion" in dir()):
-            from AthenaCommon.GlobalFlags import globalflags
-            globalflags.DetDescrVersion.set_Value_and_Lock(DetDescrVersion)
-
-        include('RecExCond/RecExCommon_flags.py')
-        theApp.EvtMax = -1
-
-        # Override global conditions tag from command line
-        if('vp1GlobCond' in dir()):
-            from IOVDbSvc.CondDB import conddb
-            conddb.setGlobalTag(vp1GlobCond)
-
-if vp1FilterEvents:
-    import DumpGeo.VP1EvtFilter
-    vp1FilterEvents = DumpGeo.VP1EvtFilter.parseFilterString(vp1FilterEvents)
-    DumpGeo.VP1EvtFilter.installEventFilter(vp1FilterEvents)
-
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-#Detector setup:
-from AthenaCommon.DetFlags import DetFlags
-if (vp1ID): 
-  DetFlags.ID_setOn()
-else:       DetFlags.ID_setOff()
-if (vp1Calo): DetFlags.Calo_setOn()
-else:         DetFlags.Calo_setOff()
-if (vp1Muon): DetFlags.Muon_setOn()
-else:         DetFlags.Muon_setOff()
-if (vp1LUCID): DetFlags.Lucid_setOn()
-else:          DetFlags.Lucid_setOff()
-if (vp1ALFA): DetFlags.ALFA_setOn()
-else:          DetFlags.ALFA_setOff()
-if (vp1ForwardRegion): DetFlags.FwdRegion_setOn()
-else:          DetFlags.FwdRegion_setOff()
-if (vp1ZDC): DetFlags.ZDC_setOn()
-else:          DetFlags.ZDC_setOff()
-DetFlags.Print()
-if (vp1CustomGeometry):
-    print ("Configuring Custom geometry.")
-if (vp1SLHC):
-  print ("Setting up SLHC configuration")
-  rec.doTrigger.set_Value_and_Lock(False)
-  from AthenaCommon.GlobalFlags import globalflags
-  from AthenaCommon.GlobalFlags import jobproperties
-  from InDetRecExample.InDetJobProperties import InDetFlags
-
-  #include("InDetSLHC_Example/preInclude.SLHC.py")
-  #include("InDetSLHC_Example/preInclude.NoTRT.py")
-  #include("InDetSLHC_Example/preInclude.SLHC_Rec.py")
-  # The above don't work for dev (20.9.0) so copied modified versions below:
-  from InDetSLHC_Example.SLHC_JobProperties import SLHC_Flags
-  SLHC_Flags.SLHC_Version = ''
-  DetFlags.ID_setOn()
-  DetFlags.TRT_setOff()
-  DetFlags.detdescr.TRT_setOff()
-  DetFlags.makeRIO.TRT_setOff()
-  DetFlags.Calo_setOn()
-  DetFlags.Muon_setOn()
-  DetFlags.Truth_setOn()
-  from TrkDetDescrSvc.TrkDetDescrJobProperties import TrkDetFlags
-  TrkDetFlags.SLHC_Geometry                   = True
-  TrkDetFlags.MagneticFieldCallbackEnforced   = False
-  TrkDetFlags.TRT_BuildStrawLayers            = False
-  TrkDetFlags.MaterialSource = 'None'
-
-if vp1Muon and vp1MuonLayout!="":
-    print ("*** DumpGeo NOTE *** You specified custom vp1MuonLayout, using %s as muon geometry"%vp1MuonLayout)
-    from GeoModelSvc.GeoModelSvcConf import GeoModelSvc
-    GeoModelSvc = GeoModelSvc()
-    GeoModelSvc.MuonVersionOverride=vp1MuonLayout
-
-# --- GeoModel
-from AtlasGeoModel import SetGeometryVersion
-from AtlasGeoModel import GeoModelInit
-
-from AthenaCommon.AppMgr import ServiceMgr as svcMgr
-
-if vp1GeoModelStats:
-  print ("printing GeoModel stats to file (Svc.StatisticsToFile=TRUE)...")
-  svcMgr.GeoModelSvc.StatisticsToFile = True
-
-if vp1Cavern:
-  print ("vp1.py - Initializing the visualization of the Cavern Infrastructure...")
-  from CavernInfraGeoModel.CavernInfraGeoModelConf import CavernInfraDetectorTool
-  svcMgr.GeoModelSvc.DetectorTools += [ CavernInfraDetectorTool() ]
-
-if vp1ToyDetector:
-  print ("vp1.py - Initializing the visualization of the GeoModelExamples 'ToyDetector'...")
-  from GeoModelExamples.GeoModelExamplesConf import ToyDetectorTool
-  svcMgr.GeoModelSvc.DetectorTools += [ ToyDetectorTool() ]
-
-
-
-
-# --- AGDD2Geo
-# Switch it on if
-#  - Muon is ON
-#  - Major geometry version is greater than 10
-if (vp1Muon):
-
-    
-    # This fixes the same error reported and fixed in ATLASVPONE-641
-    # Details: https://its.cern.ch/jira/browse/ATLASVPONE-641
-    from AthenaCommon.AppMgr import ServiceMgr as svcMgr
-    if not hasattr(svcMgr, "MuonIdHelperSvc"):
-        from MuonIdHelpers.MuonIdHelpersConfigLegacy import MuonIdHelperSvc
-        svcMgr += MuonIdHelperSvc("MuonIdHelperSvc")
-
-
-    from AtlasGeoModel import Agdd2Geo
-
-    if len(vp1MuonAGDDFiles)>0:
-        print ("*** DumpGeo NOTE *** You specified custom vp1MuonAGDDFiles, configuring MuonAGDDTool to read MuonAGDD information from custom file(s) '%s' instead from built-in geometry"%(', '.join(vp1MuonAGDDFiles)))
-        if hasattr(svcMgr,"AGDDtoGeoSvc"):
-            for b in getattr(svcMgr,"AGDDtoGeoSvc").Builders:
-                if b.name()=="MuonSpectrometer":
-                    b.ReadAGDD=False
-                    b.XMLFiles=vp1MuonAGDDFiles
-                    if len(vp1MuonAGDD2GeoSwitches)>0:
-                        print ("*** DumpGeo NOTE *** You specified custom vp1MuonAGDD2GeoSwitches, configuring MuonAGDDTool to build volumes: '%s'"%(', '.join(vp1MuonAGDD2GeoSwitches)))
-                        b.Volumes=vp1MuonAGDD2GeoSwitches
-                    else:
-                        # the default AGDD2GeoSwitches for Run2
-                        b.Volumes=["ECT_Toroids",
-                                   "BAR_Toroid",
-                                   "Feet",
-                                   "RailAssembly",
-                                   "JFSH_Shield",
-                                   "JDSH_Shield",
-                                   "JTSH_Shield",
-                                   "pp2",
-                                   "MBAP_AccessPlatform",
-                                   "MBWH_BigWheels",
-                                   "SADL_CalorimeterSaddle",
-                                   "TBWH_BigWheels",
-                                   "TGC3_BigWheels",
-                                   "TGC1_BigWheels",
-                                   "MDTRail",
-                                   "servicesAtZ0",
-                                   "HFTruckRail",
-                                   "RUN2_Services"]
-    if len(vp1NSWAGDDFiles)>0:
-        print ("*** DumpGeo NOTE *** You specified custom vp1NSWAGDDFiles, configuring NSWAGDDTool to read NSWAGDD information from custom file(s) '%s' instead from built-in geometry"%(', '.join(vp1NSWAGDDFiles)))
-        if hasattr(svcMgr,"AGDDtoGeoSvc"):
-            for b in getattr(svcMgr,"AGDDtoGeoSvc").Builders:
-                if b.name()=="NewSmallWheel":
-                    b.ReadAGDD=False
-                    b.XMLFiles=vp1NSWAGDDFiles
-
-#MagneticField:
-import MagFieldServices.SetupField # NEW
-
-if (vp1Fatras):
-    from FatrasExample.FatrasJobProperties import FatrasFlags
-    FatrasFlags.SingleTrackSimulation.set_Value(vp1InputFiles==[])
-    from FastSimulationConfig.FastSimulationFlags import jobproperties
-    jobproperties.FastSimulation.doFatrasID.set_Value(True)
-    jobproperties.FastSimulation.doFatrasMuon.set_Value(vp1FatrasMuon)
-    FatrasFlags.FastCaloSim.set_Value(vp1FatrasCalo)
-
-if (vp1InputFiles != []):
-    import AthenaPoolCnvSvc.ReadAthenaPool
-
-    if (vp1ID):
-        from InDetRecExample.InDetJobProperties import InDetFlags
-        InDetFlags.loadTools               = (vp1Extrapolator or vp1SpacePoints)
-        InDetFlags.preProcessing           = vp1SpacePoints
-        InDetFlags.doPRDFormation          = False
-        InDetFlags.doSpacePointFormation   = vp1SpacePoints
-        InDetFlags.doNewTracking           = False
-        InDetFlags.doLowPt                 = False
-        InDetFlags.doLowBetaFinder         = False
-        InDetFlags.doBackTracking          = False
-        InDetFlags.doTRTStandalone         = False
-        InDetFlags.doTrtSegments           = False
-        InDetFlags.postProcessing          = False
-        InDetFlags.doSlimming              = False
-        InDetFlags.doVertexFinding         = False
-        InDetFlags.doParticleCreation      = False
-        InDetFlags.doConversions           = False
-        InDetFlags.doSecVertexFinder       = False
-        InDetFlags.doV0Finder              = False
-        InDetFlags.doTrkNtuple             = False
-        InDetFlags.doPixelTrkNtuple        = False
-        InDetFlags.doSctTrkNtuple          = False
-        InDetFlags.doTrtTrkNtuple          = False
-        InDetFlags.doVtxNtuple             = False
-        InDetFlags.doConvVtxNtuple         = False
-        InDetFlags.doV0VtxNtuple           = False
-        InDetFlags.doRefit                 = False
-
-        InDetFlags.doStatistics            = False
-        InDetFlags.useDCS                  = False
-        include( "InDetRecExample/InDetRec_jobOptions.py" )
-
-        if (vp1Fatras and vp1FatrasTruthKey!=""):
-            from FatrasExample.FatrasKeys import FatrasKeyFlags
-            FatrasKeyFlags.InputMcEventCollection.set_Value(vp1FatrasTruthKey)
-
-    if (vp1Trig):
-        from TriggerJobOpts.TriggerConfigGetter import TriggerConfigGetter
-        TriggerConfigGetter()
-
-    if (vp1Calo):
-        from LArConditionsCommon import LArAlignable
-
-        #
-        # Following is needed to display Tile Pulse shapes on ESD files
-        #
-        from TileConditions.TileInfoConfigurator import TileInfoConfigurator
-        tileInfoConfigurator = TileInfoConfigurator()
-
-    if (vp1Muon):
-        include( "AmdcAth/AmdcAth_jobOptions.py" )
-
-    include( "PartPropSvc/PartPropSvc.py" )
-
-    if(vp1NoAutoConf):
-        svcMgr.EventSelector.InputCollections=vp1InputFiles
-    else:
-        svcMgr.EventSelector.InputCollections=athenaCommonFlags.FilesInput()
-
-    if 'skipEvents' in dir():
-        svcMgr.EventSelector.SkipEvents=skipEvents
-else:
-    vp1Extrapolator = False
-
-# do not need to run execute since layout is dumped during initialize
-from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
-athenaCommonFlags.EvtMax = 0
-
-#watch LAr HV:
-if ( vp1LarHvData ):
-    from time import time
-    svcMgr.EventSelector.InitialTimeStamp  = int (time())
-    svcMgr.EventSelector.TimeStampInterval = 1
-    from IOVDbSvc.CondDB import conddb
-    conddb.addFolder("DCS_OFL","/LAR/DCS/HV/BARREl/I16")
-    conddb.addFolder("DCS_OFL","/LAR/DCS/HV/BARREL/I8")
-#------------
-
-if (vp1Fatras):
-    include( "FatrasExample/Fatras_jobOptions.py" )
-
-if (vp1Calo):
-    from AthenaCommon.GlobalFlags import globalflags
-    if globalflags.DataSource() == 'data' :
-        include ("LArConditionsCommon/LArIdMap_comm_jobOptions.py")
-    else:
-        include ("LArConditionsCommon/LArConditionsCommon_MC_jobOptions.py")
-        include ("LArConditionsCommon/LArIdMap_MC_jobOptions.py")
-
-#Extrapolator:
-if vp1Extrapolator and (vp1ID or vp1Muon):
-    # Force tool initializations needed by the extrapolator
-    from VP1TrkAuxAlgs.VP1TrkAuxAlgsConf import VP1TrkInitializer
-    VP1TrkInitializer = VP1TrkInitializer()
-    VP1TrkInitializer.ForceExtrapolatorTools = True
-    topSequence += VP1TrkInitializer
-
-    if not vp1Fatras and vp1Mc:
-        include ('TrkDetDescrSvc/AtlasTrackingGeometrySvc.py')
-
-    from TrkExTools.AtlasExtrapolator import AtlasExtrapolator
-    VP1ExtraPolatorName='VP1Extrapolator';
-    VP1Extrapolator = AtlasExtrapolator(name=VP1ExtraPolatorName)
-
-    # Previous config from SetupFitters.py
-    # from TrkExTools.TrkExToolsConf import Trk__Extrapolator as Extrapolator
-    # VP1Extrapolator = Extrapolator(name='VP1Extrapolator',
-    #                                Navigator = VP1Navigator,
-    #                                MaterialEffectsUpdators = [ VP1MaterialUpdator ],
-    #                                Propagators = [ VP1Propagator ] )
-
-    ToolSvc += VP1Extrapolator
-    print (VP1Extrapolator)
-    #We should append to variable instead:
-    os.putenv("VP1_JOBCFG_EXTRA_VP1_EXTRAPOLATORS","Trk::Extrapolator/"+VP1ExtraPolatorName)
-
-    from TrkExEngine.AtlasExtrapolationEngine import AtlasExtrapolationEngine
-    ExtrapolationEngine = AtlasExtrapolationEngine(name='Extrapolation', nameprefix='Atlas')
-    ToolSvc += ExtrapolationEngine
-    print (ExtrapolationEngine)
-
-#Fitter:
-if vp1Fitter and vp1Extrapolator and (vp1ID or vp1Muon):
-    VP1TrkInitializer.ForceFitterTools = True
-    os.putenv("VP1_DEVEL_ENABLEREFIT","1")
-    ##########################################################
-    # The Extrapolator
-    include('TrkDetDescrSvc/AtlasTrackingGeometrySvc.py')
-
-    from TrkExRungeKuttaPropagator.TrkExRungeKuttaPropagatorConf import Trk__RungeKuttaPropagator as Propagator
-    VP1Propagator = Propagator(name = 'VP1Propagator')
-    ToolSvc += VP1Propagator
-
-    # the Navigator has to get this one
-    from TrkExTools.TrkExToolsConf import Trk__Navigator
-    VP1Navigator = Trk__Navigator(name = 'VP1Navigator')
-    VP1Navigator.TrackingGeometrySvc = svcMgr.AtlasTrackingGeometrySvc
-    ToolSvc += VP1Navigator
-
-    from TrkExTools.TrkExToolsConf import Trk__MaterialEffectsUpdator as MatUpdator
-    VP1MaterialUpdator = MatUpdator(name='VP1MaterialEffectsUpdator')
-    ToolSvc += VP1MaterialUpdator
-
-    from TrkMeasurementUpdator_xk.TrkMeasurementUpdator_xkConf import Trk__KalmanUpdator_xk
-    VP1Updator = Trk__KalmanUpdator_xk(name = 'VP1Updator')
-    ToolSvc += VP1Updator
-
-    from TrkDynamicNoiseAdjustor.TrkDynamicNoiseAdjustorConf import Trk__InDetDynamicNoiseAdjustment
-    VP1DNAdjustor = Trk__InDetDynamicNoiseAdjustment(name    = 'VP1DNAdjustor')
-                                                               #yminmax = 100.0)
-    ToolSvc += VP1DNAdjustor
-    from TrkKalmanFitter.TrkKalmanFitterConf import Trk__ForwardKalmanFitter as PublicFKF
-    VP1FKF = PublicFKF(name                  = 'VP1FKF',
-                        StateChi2PerNDFPreCut = 25.0)
-    ToolSvc += VP1FKF
-
-    from TrkKalmanFitter.TrkKalmanFitterConf import Trk__KalmanSmoother as PublicBKS
-    VP1BKS = PublicBKS(name                        = 'VP1BKS',
-                      InitialCovarianceSeedFactor = 200.)
-    ToolSvc += VP1BKS
-
-    from TrkKalmanFitter.TrkKalmanFitterConf import Trk__KalmanOutlierLogic as PublicKOL
-    VP1KOL = PublicKOL(name               = 'VP1KOL',
-                      StateChi2PerNDFCut = 12.5)
-    ToolSvc += VP1KOL
-
-    #FIXME! Only do this for Muons?
-    from MuonRecExample import MuonRecTools
-    MdtTubeHitOnTrackCreator      = MuonRecTools.getPublicTool("MdtTubeHitOnTrackCreator")
-
-    from TrkRIO_OnTrackCreator.TrkRIO_OnTrackCreatorConf import Trk__RIO_OnTrackCreator
-    VP1RotCreator = Trk__RIO_OnTrackCreator(name = 'VP1RotCreator',
-       ToolMuonDriftCircle = MdtTubeHitOnTrackCreator ,
-                                                 Mode = 'all')
-    ToolSvc += VP1RotCreator
-    print (VP1RotCreator)
-    print (MdtTubeHitOnTrackCreator)
-
-    from TrkKalmanFitter.TrkKalmanFitterConf import Trk__KalmanFitter as ConfiguredKalmanFitter
-    VP1KalmanFitter = ConfiguredKalmanFitter(name                           = 'VP1KalmanFitter',
-                                             ExtrapolatorHandle             = VP1Extrapolator,
-                                             RIO_OnTrackCreatorHandle       = VP1RotCreator,
-                                             MeasurementUpdatorHandle       = VP1Updator,
-                                             ForwardKalmanFitterHandle      = VP1FKF,
-                                             KalmanSmootherHandle           = VP1BKS,
-                                             KalmanOutlierLogicHandle       = VP1KOL,
-                                             DynamicNoiseAdjustorHandle     = None,
-                                             AlignableSurfaceProviderHandle = None)
-
-    ToolSvc += VP1KalmanFitter
-
-    print (VP1KalmanFitter)
-    os.putenv("VP1_JOBCFG_EXTRA_VP1_FITTERS",VP1KalmanFitter.name())
-
-
-    VP1KalmanFitterDNA = ConfiguredKalmanFitter(name                        = 'VP1KalmanFitterDNA',
-                                             ExtrapolatorHandle             = VP1Extrapolator,
-                                             RIO_OnTrackCreatorHandle       = VP1RotCreator,
-                                             MeasurementUpdatorHandle       = VP1Updator,
-                                             ForwardKalmanFitterHandle      = VP1FKF,
-                                             KalmanSmootherHandle           = VP1BKS,
-                                             KalmanOutlierLogicHandle       = VP1KOL,
-                                             DynamicNoiseAdjustorHandle     = VP1DNAdjustor,
-                                             AlignableSurfaceProviderHandle = None)
-
-    ToolSvc += VP1KalmanFitterDNA
-    os.putenv("VP1_JOBCFG_EXTRA_VP1_FITTERS",VP1KalmanFitterDNA.name())
-
-
-    from TrkGlobalChi2Fitter.TrkGlobalChi2FitterConf import Trk__GlobalChi2Fitter
-    VP1GlobalChi2Fitter = Trk__GlobalChi2Fitter(name               = 'VP1GlobalChi2Fitter',
-                                             ExtrapolationTool     = VP1Extrapolator,
-                                             NavigatorTool         = VP1Navigator,
-                                             PropagatorTool        = VP1Propagator,
-                                             RotCreatorTool        = VP1RotCreator,
-                                             MeasurementUpdateTool = VP1Updator,
-                                             StraightLine          = False,
-                                             OutlierCut            = 3.0,
-                                             SignedDriftRadius     = True,
-                                             RecalculateDerivatives= True
-                                             )
-    print (VP1GlobalChi2Fitter)
-    ToolSvc += VP1GlobalChi2Fitter
-
-    VP1GlobalChi2Fitter.OutputLevel=DEBUG
-
-    os.putenv("VP1_JOBCFG_EXTRA_VP1_FITTERS","Trk::KalmanFitter/"+VP1KalmanFitter.name()+";"+"Trk::KalmanFitter/"+VP1KalmanFitterDNA.name()+";"+"Trk::GlobalChi2Fitter/"+VP1GlobalChi2Fitter.name())
-
-#On a machine where the hostname does not indicate domain, pool will
-#fail if trying to find nearest replica. In any case, dblookup.xml
-#needs to be patched when running on real data:
-include('DumpGeo/vp1FragmentFixDBReplica.py')
-
-if vp1NoSortDBReplicas:
-    PoolSvc = Service( "PoolSvc" )
-    PoolSvc.SortReplicas = False
-
-
-#Finally, the VP1 algorithm itself:
-from DumpGeo.DumpGeoConf import DumpGeo
-topSequence += DumpGeo()
-
-DumpGeo.NoGui=vp1NoGui
-
-if vp1CruiseTime > 0:
-    DumpGeo.InitialCruiseMode = "EVENT"
-    DumpGeo.InitialCruiseModePeriod = vp1CruiseTime
-
-DumpGeo.InitiallyLoadedVP1Files = vp1CfgFiles
-if (vp1Multinp):
-    DumpGeo.MultipleFilesON = True
-    DumpGeo.MFSourceDir = vp1Multinpsrc
-    DumpGeo.MFLocalCopyDir = vp1Multinpcpy
-    DumpGeo.MFAvailableLocalInputDirectories = vp1MultiAvailableSrcDirs
-
-topSequence.TimeOut=0
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/vp1FragmentConstructVP1ATLREL.py b/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/vp1FragmentConstructVP1ATLREL.py
deleted file mode 100644
index d3c8662e09a0040090c11947e43d3ca83cfaa530..0000000000000000000000000000000000000000
--- a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/vp1FragmentConstructVP1ATLREL.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#/*
-#  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-#*/
-
-#Construct VP1ATLREL integer based on AtlasVersion environment
-#variable, needed to make script work across releases. As an example
-#14.0.1 becomes 140001. Any parts not recognised are replaced with 99.
-if not 'VP1ATLREL' in dir():
-    import os
-    versionMajor=99; versionMinor=99; versionPatch=99
-    if os.getenv("AtlasVersion") == None:
-        VP1ATLREL=999999
-    else:
-        atlVerParts=os.getenv("AtlasVersion").split('.')
-        if len(atlVerParts)>=1 and atlVerParts[0].isdigit():
-            versionMajor=eval(atlVerParts[0])
-            if len(atlVerParts)>=2 and atlVerParts[1].isdigit():
-                versionMinor=eval(atlVerParts[1])
-                if len(atlVerParts)>=3 and atlVerParts[2].isdigit():
-                    versionPatch=eval(atlVerParts[2])
-        if versionMinor>99: versionMinor=99
-        if versionMinor==99 or versionPatch>99: versionPatch=99
-        VP1ATLREL=10000*versionMajor+100*versionMinor+versionPatch
-        print ("VP1ATLREL: "+str(VP1ATLREL))
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/vp1FragmentFixDBReplica.py b/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/vp1FragmentFixDBReplica.py
deleted file mode 100644
index 596940e127a22051f7f9cb35f92595af0c5e4709..0000000000000000000000000000000000000000
--- a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/share/vp1FragmentFixDBReplica.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#/*
-#  Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-#*/
-
-from AthenaCommon.GlobalFlags import globalflags
-import os
-
-if globalflags.DataSource() == 'data' and os.getenv("FRONTIER_SERVER") == None :
-
-    # Patch dblookup.xml to avoid any attempt to use sqlite replicas
-    print ("VP1 Info: real data mode and Frontier environment not set - need to patch dblookup.xml to avoid using local sqlite replicas.")
-    if os.path.exists("dblookup.xml"):
-        print ("VP1 Warning: dblookup.xml already found in run directory. Will not attempt to created patched copy.")
-        os.putenv("CORAL_DBLOOKUP_PATH",".")
-    else:
-        if os.getenv("CORAL_AUTH_PATH") == None or not os.path.isdir(os.getenv("CORAL_AUTH_PATH")):
-            print ("VP1 Warning: CORAL_AUTH_PATH is not set or does not point to directory. Can't create patched dblookup.xml.")
-        else:
-            dblookupFileLocation=os.getenv("CORAL_AUTH_PATH")+"/dblookup.xml"
-            if not os.path.exists(dblookupFileLocation) or not os.path.isfile(dblookupFileLocation):
-                print ("VP1 Warning: Did not find CORAL_AUTH_PATH/dblookup.xml. Can't create patched dblookup.xml.")
-            else:
-                print ("VP1 Warning: Attempting to create patched dblookup.xml in current directory")
-                os.system("cat $CORAL_AUTH_PATH/dblookup.xml |grep -v 'ALLP200.db'> dblookup.xml")
-                os.putenv("CORAL_DBLOOKUP_PATH",".")
-
-    if os.getenv("ATLAS_CONDDB")==None and (os.getenv("HOSTNAME") == None or os.getenv("HOSTNAME").find('.')<=0):
-        print ("VP1 Warning: Unable to determine domain from runtime environment. Disabling attempt to find nearest replica.")
-        PoolSvc = Service( "PoolSvc" )
-        PoolSvc.SortReplicas = False
-
-        print ("")
-        print ("  ==========>")
-        print ("  ========>")
-        print ("  ======>")
-        print ("  ====>")
-        print ("  ====> VP1 Warning: Can't find nearest Oracle replica and you are running on real data.")
-        print ("  ====> Thus conditions data will have to come from the default, CERN (or you might crash!).")
-        print ("  ====>")
-        print ("  ====> Notice that this might SIGNIFICANTLY slow down (or even time-out) your job depending")
-        print ("  ====> on your location and connection! You can avoid the issue (and this warning) by fixing")
-        print ("  ====> your environment: either ATLAS_CONDDB or HOSTNAME. Alternatively you can edit a dblookup.xml file in your2")
-        print ("  ====> directory to only leave in references to a closer replica in sections where such a replica is available.")
-        print ("  ====>")
-        print ("  ======>")
-        print ("  ========>")
-        print ("  ==========>")
-        print ("")
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/src/DumpGeo.cxx b/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/src/DumpGeo.cxx
deleted file mode 100755
index 034feb57373b378e6f9c9bc7dc5754c5cc27eb1b..0000000000000000000000000000000000000000
--- a/DetectorDescription/GeoModel/GeoModelStandalone/DumpGeo/src/DumpGeo.cxx
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
-  Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration
-*/
-
-#include "DumpGeo/DumpGeo.h"
-
-#include "GeoExporter/GeoExporter.h"
-
-#include <vector>
-#include <stdexcept>
-#include <iostream>
-#include <iomanip>
-#include <cstdlib> //For setenv
-
-//____________________________________________________________________
-DumpGeo::DumpGeo(const std::string& name, ISvcLocator* svcLocator):
-  AthAlgorithm(name, svcLocator),
-  m_toolSvc(0),m_geoExporter(0)
-{
-  ::setenv("LCGPATCH_COINMULTISELECT","1",1);
-}
-
-//____________________________________________________________________
-StatusCode DumpGeo::initialize()
-{
-  ATH_MSG_INFO("in initialize()");
-
-  //Create GeoExporter object and see if it considers settings to be valid.
-  m_geoExporter = new GeoExporter(&(*evtStore()), &(*detStore()), serviceLocator(), m_toolSvc);
-  if (!m_geoExporter->argumentsAreValid()) {
-    delete m_geoExporter;
-    m_geoExporter = 0;
-    return StatusCode::FAILURE;
-  }
-
-  try
-  {
-    m_geoExporter->init();//Launch!
-  }
-  catch ( char* c)
-  {
-    ATH_MSG_ERROR("Exception: " << c);
-    return StatusCode::FAILURE;
-  }
-  
-  return StatusCode::SUCCESS;
-}
-
-//____________________________________________________________________
-StatusCode DumpGeo::execute()
-{
-  return StatusCode::SUCCESS;
-}
-
-//____________________________________________________________________
-void DumpGeo::handle(const Incident& inc)
-{
-  ATH_MSG_INFO("Handling incident '" << inc.type() << "'");
-
-  if (!m_geoExporter) {
-    ATH_MSG_INFO("Aborting due to null GeoExporter pointer.");
-    return;
-  }
-}
-
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/CMakeLists.txt b/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/CMakeLists.txt
deleted file mode 100644
index b1c5eb14a2d935a5c013600372122bf3b8d12852..0000000000000000000000000000000000000000
--- a/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/CMakeLists.txt
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (C) 2002-2023 CERN for the benefit of the ATLAS collaboration
-
-# Declare the package name:
-atlas_subdir( GeoExporter )
-
-# External dependencies:
-find_package( Qt5 COMPONENTS Core Network )
-find_package( Boost )
-find_package( GeoModel COMPONENTS GeoModelKernel GeoModelDBManager
-   GeoModelWrite )
-
-# Component(s) in the package:
-atlas_add_library( GeoExporter
-   GeoExporter/*.h src/*.cxx
-   PUBLIC_HEADERS GeoExporter
-   PRIVATE_INCLUDE_DIRS ${Boost_INCLUDE_DIRS} ${GEOMODEL_INCLUDE_DIRS}
-   PRIVATE_LINK_LIBRARIES ${Boost_LIBRARIES} ${GEOMODEL_LIBRARIES} Qt5::Core
-   Qt5::Network GeoModelUtilities VP1Base VP1Utils CxxUtils )
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/GeoExporter/ATLAS_CHECK_THREAD_SAFETY b/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/GeoExporter/ATLAS_CHECK_THREAD_SAFETY
deleted file mode 100644
index 34833d6fe602954c45cf1c4b47e0a2a52f578b5a..0000000000000000000000000000000000000000
--- a/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/GeoExporter/ATLAS_CHECK_THREAD_SAFETY
+++ /dev/null
@@ -1 +0,0 @@
-DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/GeoExporter/GeoExporter.h b/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/GeoExporter/GeoExporter.h
deleted file mode 100755
index 5fe7a049f6181ecd8037678c1a47d2b50956b357..0000000000000000000000000000000000000000
--- a/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/GeoExporter/GeoExporter.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
-  Copyright (C) 2002-2023 CERN for the benefit of the ATLAS collaboration
-*/
-
-/////////////////////////////////////////////////////////////
-//                                                         //
-//  Header file for class GeoExporter                      //
-//                                                         //
-//  Author: Riccardo Maria BIANCHI <rbianchi@cern.ch>      //
-//                                                         //
-//  Initial version: Sept 2016                             //
-//  Major updates:   Aug 2018
-//                                                         //
-/////////////////////////////////////////////////////////////
-
-#ifndef GeoExporter_H
-#define GeoExporter_H
-
-//NB: There should never be any Qt (or Athena of course) includes in this file!!!
-
-#include "CxxUtils/checker_macros.h"
-#include <string>
-#include <vector>
-
-class StoreGateSvc;
-class IToolSvc;
-class ISvcLocator;
-
-// Marked not thread-safe because it uses VP1.
-class ATLAS_NOT_THREAD_SAFE GeoExporter {
-public:
-
-  GeoExporter(StoreGateSvc* sg, StoreGateSvc* detstore,
-	          ISvcLocator* svclocator,IToolSvc*toolSvc);//,
-  ~GeoExporter();
-
-  //We can check the validity of the argument already during algorithm initialise:
-  bool argumentsAreValid() const;
-
-  //Called in the first algorithm refresh to launch the gui:
-  void init();
-
-private:
-
-  GeoExporter(const GeoExporter & );
-  GeoExporter & operator= (const GeoExporter & );
-
-  class  Imp;
-  Imp   *m_d;
-
-};
-
-#endif
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/README.md b/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/README.md
deleted file mode 100644
index d9b47e61ad38ea7a07a099fdcde9b3ebab68147f..0000000000000000000000000000000000000000
--- a/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/README.md
+++ /dev/null
@@ -1,6 +0,0 @@
-# GeoExporter
-
-GeoExporter loads and builds the GeoModel tree while running inside Athena 
-and it dumps it into a SQLite file.
-
-GeoExporter is called by the DumpGeo Athena algorithm.
diff --git a/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/src/GeoExporter.cxx b/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/src/GeoExporter.cxx
deleted file mode 100755
index 153832f99faf00bdeb215890d1b5e49e740343b1..0000000000000000000000000000000000000000
--- a/DetectorDescription/GeoModel/GeoModelStandalone/GeoExporter/src/GeoExporter.cxx
+++ /dev/null
@@ -1,325 +0,0 @@
-/*
-  Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
-*/
-
-
-///////////////////////////////////////////////////////////////////////
-//                                                                   //
-//  Implementation of class GeoExporter                              //
-//                                                                   //
-//  Author: Riccardo Maria BIANCHI <riccardo.maria.bianchi@cern.ch>  //
-//                                                                   //
-//  Initial version: Sep 2017                                        
-//
-//  Main updates:
-//  - 2024, Feb -- Riccardo Maria BIANCHI <riccardo.maria.bianchi@cern.ch>
-//                 Migrated to CA, added new CLI options, 
-//                 added new filter over DetectorManagers
-//
-///////////////////////////////////////////////////////////////////////
-
-#include "GeoExporter/GeoExporter.h"
-#include "VP1Base/VP1Msg.h"
-
-#include "VP1Utils/VP1JobConfigInfo.h"
-#include "VP1Utils/VP1SGAccessHelper.h"
-
-#include "GeoModelKernel/GeoVolumeCursor.h"
-#include "GeoModelKernel/GeoVDetectorManager.h"
-#include "GeoModelKernel/GeoBox.h"
-
-#include "GeoModelUtilities/GeoModelExperiment.h"
-
-#include "GeoModelDBManager/GMDBManager.h"
-
-#include "GeoModelWrite/WriteGeoModel.h"
-// #include "GeoModelHelpers/defineWorld.h" //TODO: Use this as soon as we have the latest GeoModel in Athena main
-
-#include <QtCore/QStringList>
-#include <QtCore/QFile>
-#include <QtCore/QFileInfo>
-#include <QtCore/QUrl>
-#include <QtNetwork/QSslSocket>
-#include <QProcessEnvironment>
-#include <QDebug>
-
-#include <cassert>
-#include <iostream>
-
-#include <boost/range/irange.hpp>
-
-// Units
-#include "GeoModelKernel/Units.h"
-#define UNITS GeoModelKernelUnits  // so we can use, e.g., 'UNITS::cm'
-
-//TODO: replace this with GeoModelHelpers/defineWorld.h
-//_____________________________________________________________________________________
-GeoPhysVol* createTheWorld()
-{
-  // Define the units
-  #define gr   UNITS::gram
-  #define mole UNITS::mole
-  #define cm3  UNITS::cm3
-
-  // Define the chemical elements
-  GeoElement*  Nitrogen = new GeoElement ("Nitrogen" ,"N"  ,  7.0 ,  14.0067 *gr/mole);
-  GeoElement*  Oxygen   = new GeoElement ("Oxygen"   ,"O"  ,  8.0 ,  15.9995 *gr/mole);
-  GeoElement*  Argon    = new GeoElement ("Argon"    ,"Ar" , 18.0 ,  39.948  *gr/mole);
-  GeoElement*  Hydrogen = new GeoElement ("Hydrogen" ,"H"  ,  1.0 ,  1.00797 *gr/mole);
-
-  // Define the materials
-  double densityOfAir=0.001214 *gr/cm3;
-  GeoMaterial *air = new GeoMaterial("Air", densityOfAir);
-  air->add(Nitrogen  , 0.7494);
-  air->add(Oxygen, 0.2369);
-  air->add(Argon, 0.0129);
-  air->add(Hydrogen, 0.0008);
-  air->lock();
- 
-  //-----------------------------------------------------------------------------------//
-  // create the world volume container and
-  // get the 'world' volume, i.e. the root volume of the GeoModel tree
-  std::cout << "Creating the 'world' volume, i.e. the root volume of the GeoModel tree..." << std::endl;
-  const GeoBox* worldBox = new GeoBox(1000*UNITS::cm, 1000*UNITS::cm, 1000*UNITS::cm);
-  const GeoLogVol* worldLog = new GeoLogVol("WorldLog", worldBox, air);
-  GeoPhysVol* world = new GeoPhysVol(worldLog);
-  return world;
-}
-
-
-//____________________________________________________________________
-class GeoExporter::Imp {
-public:
-  Imp() {};
-  //We hold the arguments here until init is called:
-  StoreGateSvc* sg = nullptr;
-  StoreGateSvc* detstore = nullptr;
-  ISvcLocator* svclocator = nullptr;
-  IToolSvc*toolSvc = nullptr;
-};
-
-
-//____________________________________________________________________
-GeoExporter::GeoExporter(StoreGateSvc* sg,StoreGateSvc* detstore,
-	       ISvcLocator* svclocator,IToolSvc*toolSvc)
- : m_d(new Imp)
-{
-  m_d->sg = sg;
-  m_d->detstore = detstore;
-  m_d->svclocator = svclocator;
-  m_d->toolSvc = toolSvc;
-}
-
-//____________________________________________________________________
-GeoExporter::~GeoExporter()
-{
-  delete m_d; m_d=0;
-}
-
-//____________________________________________________________________
-bool GeoExporter::argumentsAreValid() const
-{
-  //Athena pointers:
-  if (!m_d->sg) {
-    VP1Msg::message("ERROR: Null pointer to event store.");
-    return false;
-  }
-  if (!m_d->detstore) {
-    VP1Msg::message("ERROR: Null pointer to detector store.");
-    return false;
-  }
-
-  return true;
-}
-
-
-//____________________________________________________________________
-void GeoExporter::init()
-{
-  VP1Msg::messageDebug("Start of GeoExporter::init()...");
-
-  VP1Msg::message("");
-  VP1Msg::message("===================================================");
-  VP1Msg::message("               Launching the GeoExporter");
-  VP1Msg::message("===================================================");
-  VP1Msg::message("");
-
-  VP1Msg::message("Accessing the ATLAS geometry...");
-  StoreGateSvc* detstore = m_d->detstore;
- //Get the world volume:
-  const GeoModelExperiment * theExpt = nullptr;
-  if (!VP1SGAccessHelper(detstore).retrieve(theExpt,"ATLAS")) {
-    std::cout << "Error: Could not retrieve the ATLAS GeoModelExperiment from detector store" << std::endl; // TODO: move to ATH_MSG_
-    //ATH_MSG_FATAL ("Error: Could not retrieve the ATLAS GeoModelExperiment from detector store");
-    //return StatusCode::FAILURE;
-    return; // TODO: move to Return statuscode
-  }
-  // GET ATLAS GEOMETRY
-  PVConstLink world(theExpt->getPhysVol());
-
-  // ### Get user's settings ###
-  QProcessEnvironment environment = QProcessEnvironment::systemEnvironment();
-  // -- get Detector Description tag
-  QString default_detdescrtag = environment.value("DUMPGEODETDESCRTAGDEFAULT");
-  QString user_detdescrtag = environment.value("DUMPGEODETDESCRTAG");
-  if ("1"==default_detdescrtag) {
-    VP1Msg::message("The user did not specify a DetDescrTag - Using the default one: " + user_detdescrtag);
-    }
-  else {
-    VP1Msg::message("User's settings - DetDescrTag: " + user_detdescrtag);
-    }
-  // -- get 'forceOverwrite' option
-  bool user_forceOverwrite = ( environment.value("DUMPGEOFORCEOVERWRITE")=="1" ? true : false );
-  VP1Msg::message("User's settings - forceOverwrite option: " + QString::number(user_forceOverwrite) );
-  // -- get sub-systems settings
-  bool user_noid = environment.value("DUMPGEO_NOID").toInt();
-  bool user_nocalo = environment.value("DUMPGEO_NOCALO").toInt();
-  bool user_nomuon = environment.value("DUMPGEO_NOMUON").toInt();
-  QString user_subsystems_filters = "" + QString(((user_noid) ? "-noID" : "")) + QString(((user_nocalo) ? "-noCalo" : "")) + QString(((user_nomuon) ? "-noMuon" : ""));
-  // -- get GeoModel Treetop filter // FIXME: check and update this!!
-  QString user_filterTreeTops = environment.value("DUMPGEOFILTERTREETOPS");
-  VP1Msg::message("User's settings - GeoModel TreeTops filter: " + user_filterTreeTops);
-  // -- get GeoModel Detector Managers filter // FIXME: check and update this!!
-  QString user_filterDetManagers = environment.value("DUMPGEOFILTERDETMANAGERS");
-  VP1Msg::message("User's settings - GeoModel DetectorManagers filter: " + user_filterDetManagers);
-
-
-  // Get list of TreeTops from the TREETOPFILTER
-  QStringList user_treetopslist;
-  if ( ! user_filterTreeTops.isEmpty() ) {
-    user_treetopslist = user_filterTreeTops.split(',');
-  }
-  // Get list of DetectorManagers from the TREETOPFILTER
-  QStringList user_detmanagerslist;
-  if ( ! user_filterDetManagers.isEmpty() ) {
-    user_detmanagerslist = user_filterDetManagers.split(','); 
-  }
-
-  GeoPhysVol* volTop = createTheWorld();
-
-if ( !(user_detmanagerslist.empty()) ) {
-  // Get list of managers
-  std::cout << "\nList of GeoModel managers: " << std::endl;
-  std::vector<std::string> managersList = theExpt->getListOfManagers();
-  if ( !(managersList.empty()) ) {
-   for (auto const& mm : managersList)
-    {
-        // get the DetectorManager
-        const GeoVDetectorManager* manager = theExpt->getManager(mm);
-
-        // get the name of the DetectorManager
-        std::string detManName = manager->getName();
-        std::cout << "\n\t DetectorManager: " << detManName << std::endl;
-
-        // get the DetManager's TreeTops
-        unsigned int nTreetops = manager->getNumTreeTops();
-        std::cout << mm << "\t - n.Treetops: " << nTreetops << std::endl;
-
-        if ( nTreetops > 0 && user_detmanagerslist.contains(QString::fromStdString(detManName)) ) {
-            
-            for(unsigned int i=0; i < nTreetops; ++i) {
-
-                PVConstLink treetop(manager->getTreeTop(i));
-
-                // get treetop's volume
-                const GeoVPhysVol* vol = treetop;
-                
-                // get volume's transform
-                // NOTE: we use getDefX() to get the transform without any alignment
-                GeoTransform* volXf = new GeoTransform( vol->getDefX() );
-                
-                // get volume's logvol's name
-                std::string volName = vol->getLogVol()->getName();
-                std::cout << "\t\t treetop: " << volName << std::endl;
-
-
-                // Add to the main volume a GeoNameTag with the name of the DetectorManager 
-                volTop->add(new GeoNameTag(detManName));
-                // add Transform and Volume to the main PhysVol
-                volTop->add(volXf);
-                volTop->add(const_cast<GeoVPhysVol*>(vol));
-
-                // DEBUG: dive into the Treetop
-                if ("BeamPipe"==detManName) {
-                GeoVolumeCursor av(treetop);
-                while (!av.atEnd()) {
-                    std::cout << "\t\ttreetop n." << i << " - child name: "  << av.getName() << "\n";
-                    av.next(); // increment volume cursor.
-                } // end while
-                }
-            } // end for
-        } // end if
-    } // end for
-  }
-} 
-// if ( !(user_treetopslist.empty()) ) {
-  std::cout << "\nLooping over top volumes in the GeoModel tree (children of the 'World' volume)..." << std::endl;
-  GeoVolumeCursor av(world);
-  while (!av.atEnd()) {
-
-	  std::string volname = av.getName();
-    std::cout << "\t* relevant NameTag:" << volname << std::endl ;
-    
-    av.next(); // increment volume cursor.
-    }
-// }
-
-  std::cout << "Creating the SQLite DB file..." << std::endl;
-  QString fileName = "geometry";
-  if ( !(user_detdescrtag.isEmpty()) ) {
-    fileName = "geometry-" + user_detdescrtag;
-  }
-  if ( !(user_treetopslist.isEmpty()) ) {
-        fileName = fileName + "-" + user_treetopslist.join("-");
-  }
-  if ( !(user_detmanagerslist.isEmpty()) ) {
-        fileName = fileName + "-" + user_detmanagerslist.join("-");
-  }
-  if ( !(user_subsystems_filters.isEmpty()) ) {
-        fileName = fileName + "-" + user_subsystems_filters + ".db";
-  }
-  fileName = fileName + ".db";
-
-  // check if fileName exists and if yes: Is it a file and no directory?
-   bool fileExists = QFileInfo::exists(fileName) && QFileInfo(fileName).isFile();
-  if (fileExists) {
-    if (user_forceOverwrite) {
-        VP1Msg::message("Removing the existing dump file ("+fileName+")...");
-        QFile file (fileName);
-        file.remove();
-    } else if ( !user_forceOverwrite ) {
-        VP1Msg::messageWarningAllRed("The output file ("+fileName+") is already present in the current folder, but you don't use the '-f' flag to overwrite it. The program will be stopped. Please remove or move the existing file to another folder, or use the '-f' flag to replace it.");
-        throw "existing output file";
-    }
-  }
-  // open the DB connection
-  GMDBManager db(fileName.toStdString());
-
-  // check the DB connection
-  if (db.checkIsDBOpen())
-      qDebug() << "OK! Database is open!";
-  else {
-      qDebug() << "Database ERROR!! Exiting...";
-      return;
-  }
-
-   std::cout << "Dumping the GeoModel geometry to the DB file..." << std::endl;
-  // Dump the tree volumes into a DB
-  GeoModelIO::WriteGeoModel dumpGeoModelGraph(db); // init the GeoModel node action
-  // visit all GeoModel nodes  
-  if (!(user_detmanagerslist.empty()) || !(user_treetopslist.empty())) {
-    volTop->exec(&dumpGeoModelGraph); 
-  } else {
-    world->exec(&dumpGeoModelGraph); 
-  }
-  std::cout << "Saving the GeoModel tree to the DB." << std::endl;
-  dumpGeoModelGraph.saveToDB(); // save to the SQlite DB file
-  std::cout << "DONE. Geometry saved." <<std::endl;
-
-  std::cout << "\nTest - list of all the GeoMaterial nodes in the persistified geometry:" << std::endl;
-  db.printAllMaterials();
-  std::cout << "\nTest - list of all the GeoElement nodes in the persistified geometry:" << std::endl;
-  db.printAllElements();
-
-  VP1Msg::messageDebug("end of GeoExporter::init().");
-}
diff --git a/ForwardDetectors/LUCID/LUCID_G4_SD/CMakeLists.txt b/ForwardDetectors/LUCID/LUCID_G4_SD/CMakeLists.txt
index bbbbd178aff2d10d5241d742083ea053638a81a4..58f15e694f3434d10d3e2f990d80ef629fdbac51 100644
--- a/ForwardDetectors/LUCID/LUCID_G4_SD/CMakeLists.txt
+++ b/ForwardDetectors/LUCID/LUCID_G4_SD/CMakeLists.txt
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
 
 # Declare the package name:
 atlas_subdir( LUCID_G4_SD )
@@ -14,7 +14,7 @@ atlas_add_library( LUCID_G4_SDLib
                    OBJECT
                    NO_PUBLIC_HEADERS
                    INCLUDE_DIRS ${CLHEP_INCLUDE_DIRS} ${GEANT4_INCLUDE_DIRS} ${GTEST_INCLUDE_DIRS}
-                   LINK_LIBRARIES ${CLHEP_LIBRARIES} ${GEANT4_LIBRARIES} ${GTEST_LIBRARIES} G4AtlasToolsLib HitManagement LUCID_GeoModelLib LUCID_SimEvent StoreGateLib )
+                   LINK_LIBRARIES ${CLHEP_LIBRARIES} ${GEANT4_LIBRARIES} ${GTEST_LIBRARIES} G4AtlasToolsLib HitManagement LUCID_GeoModelLib LUCID_SimEvent MCTruth StoreGateLib )
 
 atlas_add_library( LUCID_G4_SD
                    src/components/*.cxx
@@ -24,7 +24,7 @@ atlas_add_library( LUCID_G4_SD
 
 atlas_add_test( LUCID_SensitiveDetector_gtest
                 SOURCES test/LUCID_SensitiveDetector_gtest.cxx
-                LINK_LIBRARIES LUCID_G4_SDLib G4AtlasToolsLib TestTools CxxUtils
+                LINK_LIBRARIES LUCID_G4_SDLib G4AtlasToolsLib MCTruth TestTools CxxUtils
                 POST_EXEC_SCRIPT nopost.sh )
 
 # Install files from the package:
diff --git a/ForwardDetectors/LUCID/LUCID_G4_SD/src/LUCID_SensitiveDetector.cxx b/ForwardDetectors/LUCID/LUCID_G4_SD/src/LUCID_SensitiveDetector.cxx
index 568950826212823e21e2cb547ac032a6364f549d..f1b3fe967afc793de6b165a2b019b7bced416066 100644
--- a/ForwardDetectors/LUCID/LUCID_G4_SD/src/LUCID_SensitiveDetector.cxx
+++ b/ForwardDetectors/LUCID/LUCID_G4_SD/src/LUCID_SensitiveDetector.cxx
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
 */
 
 
@@ -11,6 +11,8 @@
 
 // Athena headers
 #include "LUCID_GeoModel/LUCID_Constants.h"
+#include "MCTruth/TrackHelper.h"
+
 
 // Geant4 headers
 #include "G4Step.hh"
@@ -63,12 +65,13 @@ bool LUCID_SensitiveDetector::ProcessHits(G4Step* aStep, G4TouchableHistory*) {
       G4cout << "LUCID_SensitiveDetector::ProcessHits(): It is from a Cerenkov process "  << G4endl;
     }
 
+  TrackHelper trHelp(aTrack);
   double energy = aTrack->GetKineticEnergy()/CLHEP::eV;
   double lambda = m_hit->GetWaveLength(energy);
 
   m_HitColl->Emplace(m_hit->GetTubNumber(aStep),
-                     aTrack->GetTrackID(),
                      aTrack->GetDefinition()->GetPDGEncoding(),
+                     trHelp.GetParticleLink(),
                      LUCID_HitHelper::GetVolNumber    (aTrack->GetLogicalVolumeAtVertex()->GetName()),
                      m_hit->GetPreStepPoint (aStep).x(),
                      m_hit->GetPreStepPoint (aStep).y(),
diff --git a/ForwardDetectors/LUCID/LUCID_G4_SD/test/LUCID_SensitiveDetector_gtest.cxx b/ForwardDetectors/LUCID/LUCID_G4_SD/test/LUCID_SensitiveDetector_gtest.cxx
index 382353e81b23f03f9fd2e99b24398f268b00fe2b..8ae7529c2a294dc847aa168cf1080a66800c8eea 100644
--- a/ForwardDetectors/LUCID/LUCID_G4_SD/test/LUCID_SensitiveDetector_gtest.cxx
+++ b/ForwardDetectors/LUCID/LUCID_G4_SD/test/LUCID_SensitiveDetector_gtest.cxx
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
 */
 
 #include "CxxUtils/checker_macros.h"
@@ -38,7 +38,7 @@ class GaudiEnvironment : public ::testing::Environment {
   }
   ISvcLocator* m_svcLoc = nullptr;
 };
-class LUCID_SensitiveDetectortest : public ::testing::Test {	
+class LUCID_SensitiveDetectortest : public ::testing::Test {
   protected:
     virtual void SetUp() override {
     }
@@ -78,8 +78,8 @@ TEST_F( LUCID_SensitiveDetectortest, ProcessHits )
   G4double velocity1 = 99.93100;
   G4double steplength = 1.0;
   G4double charge = 0.0;
-  G4int encoding = 0;
-  G4int antiencoding = 0;
+  G4int encoding = 22;
+  G4int antiencoding = 22;
   G4String astring = "Cerenkov";
   G4ProcessType atype = (G4ProcessType)0;
   G4String nop1 = "opticalphoton";
@@ -94,7 +94,7 @@ TEST_F( LUCID_SensitiveDetectortest, ProcessHits )
   LUCID_SimHitCollection * a = sd2.m_HitColl.ptr();
   ASSERT_EQ( a->begin()->GetTubeID(), 50 ); //test the TubeID value of the Hit generated by the member function ProcessHits and stored in the smart point m_HitColl, it should be 50 based on my setting. The same below
   ASSERT_EQ( a->begin()->GetTrack(), 0 );
-  ASSERT_TRUE( a->begin()->GetPdgCode() == 3 );
+  ASSERT_TRUE( a->begin()->GetPdgCode() == 22 );
   ASSERT_TRUE( a->begin()->GetGenVolume() == 2 );
   ASSERT_TRUE( a->begin()->GetX() == 0 );
   ASSERT_TRUE( a->begin()->GetY() == 0 );
@@ -157,4 +157,3 @@ int main( int argc, char** argv ) {
   return RUN_ALL_TESTS();
 
 }
-
diff --git a/Generators/CosmicGenerator/CMakeLists.txt b/Generators/CosmicGenerator/CMakeLists.txt
index 77f6eb9151d1b28c61b5f87044a0727438c53268..d8d6c6180c018826dd33b8966359682c63e49be7 100644
--- a/Generators/CosmicGenerator/CMakeLists.txt
+++ b/Generators/CosmicGenerator/CMakeLists.txt
@@ -23,5 +23,4 @@ atlas_add_component( CosmicGenerator
 
 # Install files from the package:
 atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
-atlas_install_joboptions( share/*.txt share/*.py )
 
diff --git a/Generators/CosmicGenerator/share/CosmicSliceConfig.py b/Generators/CosmicGenerator/share/CosmicSliceConfig.py
deleted file mode 100644
index 340888a5eb7c0b1d8ed792aa81a776d2fa4d007a..0000000000000000000000000000000000000000
--- a/Generators/CosmicGenerator/share/CosmicSliceConfig.py
+++ /dev/null
@@ -1,57 +0,0 @@
-from CosmicGenerator.CosmicGeneratorConf import CosmicGenerator
-cg = CosmicGenerator()
-
-if simFlags.CosmicPtSlice == 'slice1':
-    cg.emin =  10.*1000. #  10 GeV
-    cg.emax = 100.*1000. # 100 GeV
-    cg.xvert_low =  -1000.*200. # 200 m
-    cg.xvert_hig =   1000.*200. # 200 m
-    cg.zvert_low =  -1000.*200. # 200 m
-    cg.zvert_hig =   1000.*200. # 200 m
-elif simFlags.CosmicPtSlice == 'slice2':
-    cg.emin = 100.*1000. # 100 GeV
-    cg.emax = 300.*1000. # 300 GeV
-    cg.xvert_low =  -1000.*600. # 600 m
-    cg.xvert_hig =   1000.*600. # 600 m
-    cg.zvert_low =  -1000.*600. # 600 m
-    cg.zvert_hig =   1000.*600. # 600 m
-elif simFlags.CosmicPtSlice == 'slice3':
-    cg.emin =  300.*1000.  #  300 GeV
-    cg.emax = 1000.*1000.  # 1000 GeV
-    cg.xvert_low =  -1000.*1000. # 1 km
-    cg.xvert_hig =   1000.*1000. # 1 km
-    cg.zvert_low =  -1000.*1000. # 1 km
-    cg.zvert_hig =   1000.*1000. # 1 km
-elif simFlags.CosmicPtSlice == 'slice4':
-    cg.emin = 1000.*1000. # 1 TeV
-    cg.emax = 5000.*1000. # 5 TeV
-    cg.xvert_low =  -1000.*3000. # 3 km
-    cg.xvert_hig =   1000.*3000. # 3 km
-    cg.zvert_low =  -1000.*3000. # 3 km
-    cg.zvert_hig =   1000.*3000. # 3 km
-elif simFlags.CosmicPtSlice != 'NONE':
-    printfunc ('Slice name incorrect!')
-    # TODO: theApp.exit(1)?
-    import sys
-    sys.exit(1)
-
-bedrockDX = (cg.xvert_hig - cg.xvert_low)/2.
-bedrockDZ = (cg.zvert_hig - cg.zvert_low)/2.
-
-if (bedrockDX > 350000 or bedrockDZ > 350000) :
-    newSize = max( bedrockDX , bedrockDZ )
-    printfunc ("Resizing bedrock (mm) to fit cosmic generator:",newSize)
-    from AthenaCommon.Configurable import Configurable
-    if Configurable.allConfigurables.get('GeoModelSvc'):
-        GeoModelSvc = Configurable.allConfigurables.get('GeoModelSvc')
-    else:
-        GeoModelSvc = theApp.service('GeoModelSvc')
-    if (newSize <=  500000) : GeoModelSvc.CavernInfraVersionOverride = 'CavernInfra-03-Bedrock500'
-    elif (newSize <= 1000000) : GeoModelSvc.CavernInfraVersionOverride = 'CavernInfra-03-Bedrock1000'
-    elif (newSize <= 1500000) : GeoModelSvc.CavernInfraVersionOverride = 'CavernInfra-03-Bedrock1500'
-    elif (newSize <= 2000000) : GeoModelSvc.CavernInfraVersionOverride = 'CavernInfra-03-Bedrock2000'
-    elif (newSize <= 3000000) : GeoModelSvc.CavernInfraVersionOverride = 'CavernInfra-03-Bedrock3000'
-    elif (newSize <= 4000000) : GeoModelSvc.CavernInfraVersionOverride = 'CavernInfra-03-Bedrock4000'
-    elif (newSize <= 5000000) : GeoModelSvc.CavernInfraVersionOverride = 'CavernInfra-03-Bedrock5000'
-else :
-    printfunc ("No need to resize the bedrock for cosmic generation")
diff --git a/Generators/CosmicGenerator/share/SetCosmicGenerator.py b/Generators/CosmicGenerator/share/SetCosmicGenerator.py
deleted file mode 100644
index a559e92b2d1ccab53ac88e5629412cc7690ec099..0000000000000000000000000000000000000000
--- a/Generators/CosmicGenerator/share/SetCosmicGenerator.py
+++ /dev/null
@@ -1,57 +0,0 @@
-"""
-Set up cosmic generator for simulation + trigger.
-"""
-
-## Configuring the Athena application for a 'generator' job
-from G4AtlasApps.SimFlags import simFlags
-simFlags.load_cosmics_flags()
-assert hasattr(simFlags, "ReadTR")
-
-## Set up standard algorithms and random seeds
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-from AthenaCommon.CfgGetter import getAlgorithm
-if simFlags.ReadTR.statusOn:
-    try:
-        cosmicGen = topSequence.TrackRecordCosmicGenerator
-    except:
-        cosmicGen = getAlgorithm("TrackRecordCosmicGenerator")
-else:
-    try:
-        cosmicGen = topSequence.EvgenCosmicGenerator
-    except:
-        cosmicGen = getAlgorithm("EvgenCosmicGenerator")
-
-## Adding the McEventCollection dumper to the list of algs
-#from TruthExamples.TruthExamplesConf import DumpMC
-#topSequence += DumpMC()
-
-printfunc (cosmicGen)
-
-#FIXME temporary hacks to ensure that BeamEffectsAlg runs after the Cosmic Generators
-def moveAlgInSequence(alg, seq, newpos):
-    l = seq.getChildren()
-    delattr(seq, alg.getName())
-    seq.insert(newpos, alg)
-
-def checkCosmicAlgPosition(beamAlg, cosmicAlg):
-    topSeq = AlgSequence()
-
-    if cosmicAlg in topSeq:
-        cosmicInd = topSeq.getChildren().index(cosmicAlg)
-        if beamAlg in topSeq:
-            index = topSeq.getChildren().index(beamAlg)
-            # make sure cosmicAlg is before index
-            if cosmicInd > index:
-                moveAlgInSequence(cosmicAlg, topSeq, index)
-                cosmicInd = index
-        #ensure that timinAlg is before cosmicAlg
-        timingAlg=None
-        if hasattr(topSeq,"SimTimerBegin"):
-            timingAlg = topSeq.SimTimerBegin
-        if timingAlg is not None and timingAlg in topSeq:
-            index = topSeq.getChildren().index(timingAlg)
-            if index > cosmicInd:
-                moveAlgInSequence(timinAlg, topSeq, cosmicInd)
-if hasattr(topSequence, 'BeamEffectsAlg'):
-    checkCosmicAlgPosition(topSequence.BeamEffectsAlg, cosmicGen)
diff --git a/Generators/CosmicGenerator/share/cosmicfile.txt b/Generators/CosmicGenerator/share/cosmicfile.txt
deleted file mode 100644
index 2e18bcd2dfa18e3ca41d6db47e9e403f92b3f392..0000000000000000000000000000000000000000
--- a/Generators/CosmicGenerator/share/cosmicfile.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-     3006     1    5    -759.137    1178.204    1605.107      -0.008      -0.417       0.120
-     5702     1    6    -217.468    1251.892     360.173     -54.664    -239.179     -42.406
-     8607     1    6    -900.194    1074.106    1330.148      65.205    -130.682    -110.031
diff --git a/Generators/CosmicGenerator/share/jobOptions_ConfigCosmicProd.py b/Generators/CosmicGenerator/share/jobOptions_ConfigCosmicProd.py
deleted file mode 100644
index 12e69046055d0af932ff719173c9264507bfad2f..0000000000000000000000000000000000000000
--- a/Generators/CosmicGenerator/share/jobOptions_ConfigCosmicProd.py
+++ /dev/null
@@ -1,76 +0,0 @@
-########################################################
-########## including out Generator options...
-#######################################################
-
-from G4AtlasApps.SimFlags import simFlags
-if not simFlags.CosmicFilterVolumeName.statusOn:
-    printfunc ("Warning CosmicFilterVolumeName not set using default (CaloEntryLayer)")
-    simFlags.CosmicFilterVolumeName = "CaloEntryLayer"
-
-#--------------------------------------------------------------
-# Setup generator + simulation + trigger
-#--------------------------------------------------------------
-
-import AthenaCommon.AtlasUnixGeneratorJob
-
-from AthenaCommon.AppMgr import theApp
-from AthenaCommon.AppMgr import ServiceMgr
-
-# make sure we are loading the ParticleProperty service
-from PartPropSvc.PartPropSvcConf import PartPropSvc
-ServiceMgr += PartPropSvc()
-
-from AthenaCommon.AlgSequence import AlgSequence
-job = AlgSequence()
-from CosmicGenerator.CosmicGeneratorConf import CosmicGenerator
-job += CosmicGenerator()
-
-job.CosmicGenerator.emin = 10000;  # default =10000 #10 GeV
-job.CosmicGenerator.emax = 5000*1000; # 2 TeV
-
-if simFlags.CosmicFilterVolumeName == "Muon":
-    printfunc ('Using muon Volume setup of Cosmic Generator...')
-    job.CosmicGenerator.xvert_low =   -301700.
-    job.CosmicGenerator.xvert_hig =    298300.
-    job.CosmicGenerator.zvert_low =    -300000.
-    job.CosmicGenerator.zvert_hig =    300000.
-    job.CosmicGenerator.Radius= 20000.
-else:
-    printfunc ('Using Non-muon Volume setup of Cosmic Generator...')
-    job.CosmicGenerator.xvert_low =   -200000.
-    job.CosmicGenerator.xvert_hig =    200000.
-    job.CosmicGenerator.zvert_low =   -200000.
-    job.CosmicGenerator.zvert_hig =    200000.
-    job.CosmicGenerator.Radius= 10000. #barrel length ~22m
-
-
-job.CosmicGenerator.yvert_val =    57300+41000.
-job.CosmicGenerator.ctcut     =    0.
-job.CosmicGenerator.OptimizeForCavern= True
-job.CosmicGenerator.IPx=0.
-job.CosmicGenerator.IPy=0.
-job.CosmicGenerator.IPz=0.
-
-
-#special settings from Juerg Beringer
-if simFlags.CosmicFilterVolumeName == "Pixel" or simFlags.CosmicFilterVolumeName2 == "Pixel":
-    job.CosmicGenerator.Radius= 2000.
-    job.CosmicGenerator.doPathLengthCut = True         # Optimization based on box cut in pixel detector plane
-    job.CosmicGenerator.energyCutThreshold = 100.      # - margin of error for energy loss calculation (in MeV)
-    job.CosmicGenerator.doAimedAtPixelsCut = True      # Optimization based on box cut in pixel detector plane
-    job.CosmicGenerator.pixelplane_maxx = 1150.        # - require |x| < value in mm
-    job.CosmicGenerator.pixelplane_maxz = 1650.        # - require |y| < value in mm
-    job.CosmicGenerator.doReweighting = True           # Whether to use reweighting for cosmic ray generation
-    job.CosmicGenerator.rvert_max = 300000.            # - radius in mm for generating primary vertex
-
-
-#fix for bug: 49362
-import sys
-ServiceMgr.EventSelector.EventsPerRun = int(2**31 - 1) #sys.maxint on a 32-bit machine
-
-if simFlags.CosmicPtSlice.statusOn:
-    include('CosmicGenerator/CosmicSliceConfig.py')
-
-#######################################################
-########## End of Cosmic Generator Configuration
-#######################################################
diff --git a/Generators/CosmicGenerator/share/jobOptions_CosmicGenerator.py b/Generators/CosmicGenerator/share/jobOptions_CosmicGenerator.py
deleted file mode 100644
index ea02a914af9c20da90b3fe0f2a7bb2075d8bc82a..0000000000000000000000000000000000000000
--- a/Generators/CosmicGenerator/share/jobOptions_CosmicGenerator.py
+++ /dev/null
@@ -1,68 +0,0 @@
-###############################################################
-#
-# Job options file for testing ParticleGenerator.
-#
-#==============================================================
-
-## configuring the Athena application for a 'generator' job
-import AthenaCommon.AtlasUnixGeneratorJob
-
-## get a handle on the ServiceManager
-from AthenaCommon.AppMgr import ServiceMgr
-
-## make sure we are loading the ParticleProperty service
-from PartPropSvc.PartPropSvcConf import PartPropSvc
-ServiceMgr += PartPropSvc()
-
-## get a handle on the top sequence of algorithms
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-#--------------------------------------------------------------
-# Private Application Configuration options
-#--------------------------------------------------------------
-
-## adding the CosmicGenerator alg. to the list of algorithms to be run
-from CosmicGenerator.CosmicGeneratorConf import CosmicGenerator
-topSequence += CosmicGenerator()
-
-## adding the McEventCollection dumper to the list of algs. to be run
-from TruthExamples.TruthExamplesConf import DumpMC
-topSequence += DumpMC()
-
-#--------------------------------------------------------------
-# Set output level threshold (1=VERBOSE, 2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-#--------------------------------------------------------------
-ServiceMgr.MessageSvc.OutputLevel      = VERBOSE
-
-#--------------------------------------------------------------
-# Event related parameters
-#--------------------------------------------------------------
-# Number of events to be processed (default is 10)
-theApp.EvtMax = 20
-ServiceMgr.StoreGateSvc.Dump = FALSE
-
-#--------------------------------------------------------------
-# CosmicGenerator parameters
-#--------------------------------------------------------------
-#
-# Note that in this coordinate frame the y-axis points upward
-# such that the cosmics arrive from upward to downward in y.
-# The production vertex of cosmics is randomly distributed (flat)
-# in the x-z plane with boundaries given below.
-# The energy range is given as well.
-#
-cosmicGenerator = topSequence.CosmicGenerator
-cosmicGenerator.emin = 300
-cosmicGenerator.emax = 310
-cosmicGenerator.xvert_low =   -10.
-cosmicGenerator.xvert_hig =    10.
-cosmicGenerator.zvert_low =     0.
-cosmicGenerator.zvert_hig =    50.
-cosmicGenerator.yvert_val =  5000.
-cosmicGenerator.OptimizeForSR1 = 0
-#==============================================================
-#
-# End of job options file
-#
-###############################################################
diff --git a/Generators/CosmicGenerator/share/jobOptions_CosmicGenerator.txt b/Generators/CosmicGenerator/share/jobOptions_CosmicGenerator.txt
deleted file mode 100644
index 78ff7d1287df9e6618b8e1b0aa6fb227f60441e5..0000000000000000000000000000000000000000
--- a/Generators/CosmicGenerator/share/jobOptions_CosmicGenerator.txt
+++ /dev/null
@@ -1,56 +0,0 @@
-//##############################################################
-//
-// Job options file for testing ParticleGenerator.
-//
-//==============================================================
-
-#include "AthenaCommon/Atlas_Gen.UnixStandardJob.txt"
-#include "PartPropSvc/PartPropSvc.txt"
-
-//--------------------------------------------------------------
-// Private Application Configuration options
-//--------------------------------------------------------------
-//load relevant libraries
-
-ApplicationMgr.Dlls  += { "CosmicGenerator","TruthExamples" };      
-ApplicationMgr.TopAlg = { "CosmicGenerator","DumpMC" };
-
-//--------------------------------------------------------------
-// Set output level threshold (1=VERBOSE, 2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-//--------------------------------------------------------------
-
-MessageSvc.OutputLevel      = 1;
-
-//--------------------------------------------------------------
-// Event related parameters
-//--------------------------------------------------------------
-
-// Number of events to be processed (default is 10)
-ApplicationMgr.EvtMax = 20;
-StoreGateSvc.Dump = false;
-
-//--------------------------------------------------------------
-// CosmicGenerator parameters
-//--------------------------------------------------------------
-//
-// Note that in this coordinate frame the y-axis points upward
-// such that the cosmics arrive from upward to downward in y.
-// The production vertex of cosmics is randomly distributed (flat)
-// in the x-z plane with boundaries given below.
-// The energy range is given as well.
-// Note that the energy is given in MeV and distances in mm
-//
-CosmicGenerator.emin = 300000;
-CosmicGenerator.emax = 310000;
-CosmicGenerator.xvert_low =   -100.;
-CosmicGenerator.xvert_hig =    100.;
-CosmicGenerator.zvert_low =      0.;
-CosmicGenerator.zvert_hig =    500.;
-CosmicGenerator.yvert_val =  50000.;
-CosmicGenerator.OptimizeForSR1 = true;
-//==============================================================
-//
-// End of job options file
-//
-//##############################################################
-
diff --git a/Generators/CosmicGenerator/share/jobOptions_MuonEndCapCosmicGenerator.py b/Generators/CosmicGenerator/share/jobOptions_MuonEndCapCosmicGenerator.py
deleted file mode 100644
index 30ad43f4b61e08b59d3ac02ddef925c5417e3694..0000000000000000000000000000000000000000
--- a/Generators/CosmicGenerator/share/jobOptions_MuonEndCapCosmicGenerator.py
+++ /dev/null
@@ -1,64 +0,0 @@
-###############################################################
-#
-# Job options file for testing ParticleGenerator.
-#
-#==============================================================
-import AthenaCommon.AtlasUnixGeneratorJob
-
-from AthenaCommon.AppMgr import theApp
-from AthenaCommon.AppMgr import ServiceMgr
-
-# make sure we are loading the ParticleProperty service
-from PartPropSvc.PartPropSvcConf import PartPropSvc
-ServiceMgr += PartPropSvc()
-
-#--------------------------------------------------------------
-# Private Application Configuration options
-#--------------------------------------------------------------
-
-#--------------------------------------------------------------
-# Set output level threshold (1=VERBOSE, 2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-#--------------------------------------------------------------
-ServiceMgr.MessageSvc.OutputLevel      = VERBOSE
-
-#--------------------------------------------------------------
-# Event related parameters
-#--------------------------------------------------------------
-# Number of events to be processed (default is 10)
-theApp.EvtMax = 20
-ServiceMgr.StoreGateSvc.Dump = FALSE
-
-#--------------------------------------------------------------
-# CosmicGenerator parameters
-#--------------------------------------------------------------
-#
-# Note that in this coordinate frame the y-axis points upward
-# such that the cosmics arrive from upward to downward in y.
-# The production vertex of cosmics is randomly distributed (flat)
-# in the x-z plane with boundaries given below.
-# The energy range is given as well.
-#
-from AthenaCommon.AlgSequence import AlgSequence
-job=AlgSequence()
-from CosmicGenerator.CosmicGeneratorConf import CosmicGenerator
-job += CosmicGenerator()
-job.CosmicGenerator = Algorithm( "CosmicGenerator" )
-job.CosmicGenerator.emin = 10000;  # 10 GeV
-job.CosmicGenerator.emax = 2000*1000; # 2 TeV
-job.CosmicGenerator.xvert_low =   -301700.
-job.CosmicGenerator.xvert_hig =   298300.
-job.CosmicGenerator.zvert_low =   -300000.
-job.CosmicGenerator.zvert_hig =   300000.
-job.CosmicGenerator.yvert_val =   57300+41000.
-job.CosmicGenerator.ctcut     =   0.0
-job.CosmicGenerator.OptimizeForMuonEndCap = True
-job.CosmicGenerator.Radius    =   12000.
-job.CosmicGenerator.Zposition =   14500.
-
-from TruthExamples.TruthExamplesConf import DumpMC
-job += DumpMC()
-#==============================================================
-#
-# End of job options file
-#
-###############################################################
diff --git a/Generators/CosmicGenerator/share/jobOptions_PixelEndCapCosmicGenerator.py b/Generators/CosmicGenerator/share/jobOptions_PixelEndCapCosmicGenerator.py
deleted file mode 100644
index c129e3baab905f8e88071f561aaa271f9b283a43..0000000000000000000000000000000000000000
--- a/Generators/CosmicGenerator/share/jobOptions_PixelEndCapCosmicGenerator.py
+++ /dev/null
@@ -1,69 +0,0 @@
-###############################################################
-#
-# Job options file for testing ParticleGenerator.
-#
-#==============================================================
-import AthenaCommon.AtlasUnixGeneratorJob
-
-from AthenaCommon.AppMgr import theApp
-from AthenaCommon.AppMgr import ServiceMgr
-
-# make sure we are loading the ParticleProperty service
-from PartPropSvc.PartPropSvcConf import PartPropSvc
-ServiceMgr += PartPropSvc()
-
-#--------------------------------------------------------------
-# Private Application Configuration options
-#--------------------------------------------------------------
-
-#--------------------------------------------------------------
-# Set output level threshold (1=VERBOSE, 2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-#--------------------------------------------------------------
-ServiceMgr.MessageSvc.OutputLevel      = VERBOSE
-#--------------------------------------------------------------
-# Event related parameters
-#--------------------------------------------------------------
-# Number of events to be processed (default is 10)
-theApp.EvtMax = 20
-ServiceMgr.StoreGateSvc.Dump = FALSE
-
-#--------------------------------------------------------------
-# CosmicGenerator parameters
-#--------------------------------------------------------------
-#
-# Note that in this coordinate frame the y-axis points upward
-# such that the cosmics arrive from upward to downward in y.
-# The production vertex of cosmics is randomly distributed (flat)
-# in the x-z plane with boundaries given below.
-# The energy range is given as well.
-#
-from AthenaCommon.AlgSequence import AlgSequence
-job=AlgSequence()
-from CosmicGenerator.CosmicGeneratorConf import CosmicGenerator
-job += CosmicGenerator()
-job.CosmicGenerator = Algorithm( "CosmicGenerator" )
-job.CosmicGenerator.emin = 500
-job.CosmicGenerator.emax = 200000
-job.CosmicGenerator.xvert_low = -229.
-job.CosmicGenerator.xvert_hig = 229.
-job.CosmicGenerator.zvert_low = -356.
-job.CosmicGenerator.zvert_hig = 356.
-# add a centimeter above the scintillator...
-# so that muon has full path through the scintillator volume
-job.CosmicGenerator.yvert_val = 200.
-# remember that theta is measured from the surface of the scintillator
-job.CosmicGenerator.ThetaMin  = 1.
-job.CosmicGenerator.ThetaMax  = 3.1415927
-job.CosmicGenerator.PhiMin    = -3.1415927
-job.CosmicGenerator.PhiMax    = 3.1415927
-job.CosmicGenerator.OptimizeForSR1PixelEndCap = True
-job.CosmicGenerator.SwapYZAxis = True
-
-from TruthExamples.TruthExamplesConf import DumpMC
-job += DumpMC()
-
-#==============================================================
-#
-# End of job options file
-#
-###############################################################
diff --git a/Generators/CosmicGenerator/share/jobOptions_PixelEndCapCosmicGenerator.txt b/Generators/CosmicGenerator/share/jobOptions_PixelEndCapCosmicGenerator.txt
deleted file mode 100644
index dc1e901a874dc39ef31114fd7104bea450acba17..0000000000000000000000000000000000000000
--- a/Generators/CosmicGenerator/share/jobOptions_PixelEndCapCosmicGenerator.txt
+++ /dev/null
@@ -1,61 +0,0 @@
-//##############################################################
-//
-// Job options file for testing ParticleGenerator.
-//
-//==============================================================
-
-#include "AthenaCommon/Atlas_Gen.UnixStandardJob.txt"
-#include "PartPropSvc/PartPropSvc.txt"
-
-//--------------------------------------------------------------
-// Private Application Configuration options
-//--------------------------------------------------------------
-//load relevant libraries
-
-ApplicationMgr.Dlls  += { "CosmicGenerator","TruthExamples" };      
-ApplicationMgr.TopAlg = { "CosmicGenerator","DumpMC" };
-
-//--------------------------------------------------------------
-// Set output level threshold (1=VERBOSE, 2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-//--------------------------------------------------------------
-
-MessageSvc.OutputLevel      = 1;
-
-//--------------------------------------------------------------
-// Event related parameters
-//--------------------------------------------------------------
-
-// Number of events to be processed (default is 10)
-ApplicationMgr.EvtMax = 20;
-StoreGateSvc.Dump = false;
-
-//--------------------------------------------------------------
-// CosmicGenerator parameters
-//--------------------------------------------------------------
-//
-// Note that in this coordinate frame the y-axis points upward
-// such that the cosmics arrive from upward to downward in y.
-// The production vertex of cosmics is randomly distributed (flat)
-// in the x-z plane with boundaries given below.
-// The energy range is given as well.
-// Note that the energy is given in MeV and distances in mm
-//
-CosmicGenerator.emin = 300000;
-CosmicGenerator.emax = 310000;
-CosmicGenerator.xvert_low =   -100.;
-CosmicGenerator.xvert_hig =    100.;
-CosmicGenerator.zvert_low =      0.;
-CosmicGenerator.zvert_hig =    500.;
-CosmicGenerator.yvert_val =  50000.;
-CosmicGenerator.ThetaMin  =      0.;
-CosmicGenerator.ThetaMax  =      1.;
-CosmicGenerator.PhiMin    = -3.1415927;
-CosmicGenerator.PhiMax    =  3.1415927;
-CosmicGenerator.OptimizeForSR1PixelEndCap = true;
-CosmicGenerator.SwapYZAxis = true;
-//==============================================================
-//
-// End of job options file
-//
-//##############################################################
-
diff --git a/InnerDetector/InDetMonitoring/InDetAlignmentMonitoringRun3/python/IDAlignMonGenericTracksAlgCfg.py b/InnerDetector/InDetMonitoring/InDetAlignmentMonitoringRun3/python/IDAlignMonGenericTracksAlgCfg.py
index 21a6caa758746197ba5ed1e1e78c42446e44c6d0..ca88e6cd2d6e49eed1d13e7c2dfd172091096fb1 100644
--- a/InnerDetector/InDetMonitoring/InDetAlignmentMonitoringRun3/python/IDAlignMonGenericTracksAlgCfg.py
+++ b/InnerDetector/InDetMonitoring/InDetAlignmentMonitoringRun3/python/IDAlignMonGenericTracksAlgCfg.py
@@ -21,10 +21,11 @@ def IDAlignMonGenericTracksAlgCfg(helper, alg, **kwargs):
     m_rangeTRTHits = 60
     m_etaRange = 2.7
     m_etaBins = 40
+    m_phiBins = 80
     m_d0BsNbins = 100
     m_d0Range = 2
-    m_z0Range = 50.
-    m_d0BsRange = 0.5
+    m_z0Range = 70.
+    m_d0BsRange = 0.05
      
     # Set a folder name from the user options
     folderName = "ExtendedTracks_NoTriggerSelection"
@@ -32,7 +33,7 @@ def IDAlignMonGenericTracksAlgCfg(helper, alg, **kwargs):
         folderName = kwargs["TrackName"]
     
     # this creates a "genericTrackGroup" called "alg" which will put its histograms into the subdirectory "GenericTracks"
-    genericTrackGroup = helper.addGroup(alg, 'Tracks')
+    genericTrackGroup = helper.addGroup(alg, 'IDA_Tracks')
     pathtrack = '/IDAlignMon/'+folderName+'/GenericTracks'
 
     varName = 'm_ngTracks;NTracksPerEvent'
@@ -40,11 +41,11 @@ def IDAlignMonGenericTracksAlgCfg(helper, alg, **kwargs):
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_NTracksRange+1, xmin=-0.5, xmax=m_NTracksRange +0.5)
     
     varName = 'mu_m;mu_perEvent'
-    title = '#LT#mu#GT average interactions per crossing;#LT#mu#GT per event;Events'
+    title = '#LT#mu#GT average interactions per crossing;#LT#mu#GT;Events'
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=101, xmin=-0.5, xmax= 100.5)
 
     varName = 'm_lb;LumiBlock'
-    title = 'Lumiblock of the tracks;Lumiblock;Events'
+    title = 'Lumiblock of the tracks;Lumiblock;Tracks'
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=1024, xmin=-0.5, xmax=1023.5)
 
     varName = 'm_beamSpotX,m_beamSpotY;YBs_vs_XBs'
@@ -55,7 +56,7 @@ def IDAlignMonGenericTracksAlgCfg(helper, alg, **kwargs):
     title = 'BeamSpot Position: y vs z; z_{BS} [mm]; y_{BS} [mm]'
     genericTrackGroup.defineHistogram(varName, type='TH2F', path=pathtrack, title=title, xbins=100, xmin= -m_z0Range, xmax= m_z0Range, ybins=100, ymin=-0.8, ymax=-0.1)
 
-    varName = 'm_beamSpotX,m_beamSpotZ;XBs_vs_XZs'
+    varName = 'm_beamSpotX,m_beamSpotZ;XBs_vs_ZBs'
     title = 'BeamSpot Position: x vs z; z_{BS} [mm]; x_{BS} [mm]'
     genericTrackGroup.defineHistogram(varName, type='TH2F', path=pathtrack, title=title, xbins=100, xmin= -m_z0Range, xmax= m_z0Range, ybins=100, ymin=-0.8, ymax=-0.1)
 
@@ -63,35 +64,47 @@ def IDAlignMonGenericTracksAlgCfg(helper, alg, **kwargs):
     title = 'Y BeamSpot position: y vs lumiblock; LumiBlock; y_{BS} [mm]'
     genericTrackGroup.defineHistogram(varName, type='TProfile', path=pathtrack, title=title, xbins=1024, xmin=-0.5, xmax=1023.5, ybins=100, ymin=-0.8, ymax=-0.1)
 
+    varName = 'm_lb,m_beamSpotX;XBs_vs_LumiBlock'
+    title = 'X BeamSpot position: x vs lumiblock; LumiBlock; x_{BS} [mm]'
+    genericTrackGroup.defineHistogram(varName, type='TProfile', path=pathtrack, title=title, xbins=1024, xmin=-0.5, xmax=1023.5, ybins=100, ymin=-0.8, ymax=-0.1)
+
     varName = 'm_nhits_per_track;Nhits_per_track'
     title = 'Number of hits per track;Number of hits;Number of Tracks'
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_rangePixHits + m_rangeSCTHits + m_rangeTRTHits + 1, xmin=-0.5, xmax=m_rangePixHits + m_rangeSCTHits + m_rangeTRTHits + 0.5)
 
     varName = 'm_npixelhits_per_track;Npixhits_per_track'
-    title = 'Number of PIXEL hits per track;Number of Pixel hits (PIX+IBL);Number of Tracks'
+    title = 'Number of PIXEL (PIX+IBL) hits per track;Number of Pixel hits (PIX+IBL);Number of Tracks'
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_rangePixHits+1, xmin=-0.5, xmax=m_rangePixHits +0.5)
 
     varName = 'm_npixelhits_per_track_barrel;Npixhits_per_track_barrel'
-    title = 'Number of PIXEL hits per track (Barrel);Number of Pixel hits in Barrel (PIX+IBL);Number of Tracks'
+    title = 'Number of PIXEL (PIX+IBL) hits per track (Barrel);Number of Pixel hits in Barrel (PIX+IBL);Number of Tracks'
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_rangePixHits+1, xmin=-0.5, xmax=m_rangePixHits +0.5)
     
     varName = 'm_nscthits_per_track;Nscthits_per_track'
-    title = 'Number of SCT hits per track;Number of SCT hits per Tracks;Number of Tracks'
+    title = 'Number of SCT hits per track;Number of SCT hits;Number of Tracks'
+    genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_rangeSCTHits+1, xmin=-0.5, xmax=m_rangeSCTHits +0.5)
+
+    varName = 'm_nscthits_per_track_barrel;Nscthits_per_track_barrel'
+    title = 'Number of SCT hits per track (Barrel);Number of SCT hits in Barrel;Number of Tracks'
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_rangeSCTHits+1, xmin=-0.5, xmax=m_rangeSCTHits +0.5)
 
     varName = 'm_ntrthits_per_track;Ntrthits_per_track'
-    title = 'Number of TRT hits per track;Number of TRT hits per Tracks;Number of Tracks'
+    title = 'Number of TRT hits per track;Number of TRT hits;Number of Tracks'
+    genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_rangeTRTHits+1, xmin=-0.5, xmax=m_rangeTRTHits +0.5)
+
+    varName = 'm_ntrthits_per_track_barrel;Ntrthits_per_track_barrel'
+    title = 'Number of TRT hits per track (Barrel);Number of TRT hits in Barrel;Number of Tracks'
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_rangeTRTHits+1, xmin=-0.5, xmax=m_rangeTRTHits +0.5)
 
-    varName = 'm_eta,m_npixelhits_per_track;Npixhits_vs_eta'
+    varName = 'm_eta_2fillpix,m_npixelhits_per_track_2filleta;Npixhits_vs_eta'
     title = "Number of Pixel his vs track #eta; Track #eta; Number of Pixel hits (PIX+IBL)"
     genericTrackGroup.defineHistogram(varName, type='TH2F', path=pathtrack, title=title, xbins=m_etaBins, xmin=-m_etaRange, xmax=m_etaRange, ybins=m_rangePixHits+1, ymin=-0.5, ymax=m_rangePixHits +0.5)
 
-    varName = 'm_eta,m_nscthits_per_track;Nscthits_vs_eta'
+    varName = 'm_eta_2fillsct,m_nscthits_per_track_2filleta;Nscthits_vs_eta'
     title = "Number of SCT his vs track #eta; Track #eta; Number of SCT hits"
     genericTrackGroup.defineHistogram(varName, type='TH2F', path=pathtrack, title=title, xbins=m_etaBins, xmin=-m_etaRange, xmax=m_etaRange, ybins=m_rangeSCTHits+1, ymin=-0.5, ymax=m_rangeSCTHits +0.5)
 
-    varName = 'm_eta,m_ntrthits_per_track;Ntrthits_vs_eta'
+    varName = 'm_eta_2filltrt,m_ntrthits_per_track_2filleta;Ntrthits_vs_eta'
     title = "Number of TRT his vs track #eta; Track #eta; Number of TRT hits"
     genericTrackGroup.defineHistogram(varName, type='TH2F', path=pathtrack, title=title, xbins=m_etaBins, xmin=-m_etaRange, xmax=m_etaRange, ybins=m_rangeTRTHits+1, ymin=-0.5, ymax=m_rangeTRTHits +0.5)
 
@@ -104,40 +117,60 @@ def IDAlignMonGenericTracksAlgCfg(helper, alg, **kwargs):
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_etaBins, xmin=-m_etaRange, xmax=m_etaRange)
 
     varName = 'm_eta_pos;eta_pos'
-    title = 'eta for positive tracks; #eta(+);Number of Tracks'
+    title = 'eta for positive tracks; #eta(#plusq);Number of Tracks'
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_etaBins, xmin=-m_etaRange, xmax=m_etaRange)
 
     varName = 'm_eta_neg;eta_neg'
-    title = 'eta for negative tracks; #eta(-);Number of Tracks'
+    title = 'eta for negative tracks; #eta(#minusq);Number of Tracks'
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_etaBins, xmin=-m_etaRange, xmax=m_etaRange)
 
     varName = 'm_phi;phi'
     title = 'phi;Track #phi;Number of Tracks'
-    genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=80, xmin=0, xmax= 2 * M_PI)
+    genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_phiBins, xmin=0, xmax= 2 * M_PI)
 
-    varName = 'm_z0;z0'
-    title = 'z0;[mm]'
+    varName = 'm_z0;z0_origin'
+    title = 'z_{0} (computed vs origin); z_{0} (origin) [mm]; Tracks'
+    genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_d0BsNbins, xmin=-m_z0Range, xmax=m_z0Range)
+
+    varName = 'm_z0_bscorr;z0'
+    title = 'z_{0} (corrected for beamspot);z_{0} [mm]; Tracks'
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_d0BsNbins, xmin=-m_z0Range, xmax=m_z0Range)
 
     varName = 'm_z0sintheta;z0sintheta'
-    title = 'z0sintheta;[mm]'
+    title = 'z_{0}sin#theta; z_{0}sin#theta [mm]; Tracks'
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_d0BsNbins, xmin=-m_z0Range, xmax=m_z0Range)
 
     varName = 'm_d0;d0_origin'
-    title = 'd_{0} (computed vs origin); d_{0} (origin) [mm]'
+    title = 'd_{0} (computed vs origin);d_{0} (origin) [mm]; Tracks'
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_d0BsNbins, xmin=-m_d0Range, xmax=m_d0Range)
 
     varName = 'm_d0_bscorr;d0'
-    title = 'd_{0} (corrected for beamspot);d_{0} (BS) [mm]'
-    genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_d0BsNbins, xmin=-m_d0BsRange/10, xmax=m_d0BsRange/10)
+    title = 'd_{0} (corrected for beamspot);d_{0} (BS) [mm]; Tracks'
+    genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=m_d0BsNbins, xmin=-m_d0BsRange, xmax=m_d0BsRange)
 
     varName = 'm_pT;pT'
-    title = 'pT;Signed Track pT [GeV];Number of Tracks'
+    title = 'p_{T};Signed Track p_{T} [GeV];Tracks'
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=200, xmin=-m_pTRange, xmax=m_pTRange)
 
     varName = 'm_p;P'
-    title = 'Track Momentum P;Signed Track P [GeV];Number of Tracks'
+    title = 'Track Momentum P;Signed Track P [GeV];Tracks'
     genericTrackGroup.defineHistogram(varName, type='TH1F', path=pathtrack, title=title, xbins=200, xmin=-m_pTRange, xmax=m_pTRange)
+
+    varName = 'm_phi_2filld0bscorr,m_d0_bscorr_2fillphi;D0VsPhi0'
+    title = 'd_{0} (BS) Vs #phi_{0};#phi_{0} [rad];d_{0} (BS) [mm]'
+    genericTrackGroup.defineHistogram(varName, type='TH2F', path=pathtrack, title=title, xbins=m_phiBins, xmin=0, xmax= 2 * M_PI, ybins=m_d0BsNbins, ymin=-m_d0BsRange, ymax=m_d0BsRange)
+   
+    varName = 'm_phi_2filld0bscorrBAR,m_d0_bscorrBAR;D0VsPhi0_Barrel'
+    title = 'd_{0} (BS) Vs #phi_{0} (Barrel);#phi_{0} [rad];d_{0} (BS) [mm]'
+    genericTrackGroup.defineHistogram(varName, type='TH2F', path=pathtrack, title=title, xbins=m_phiBins, xmin=0, xmax= 2 * M_PI, ybins=m_d0BsNbins, ymin=-m_d0BsRange, ymax=m_d0BsRange)
+   
+    varName = 'm_phi_2filld0bscorrECA,m_d0_bscorrECA;D0VsPhi0_ECA'
+    title = 'd_{0} (BS) Vs #phi_{0} (ECA);#phi_{0} [rad];d_{0} (BS) [mm]'
+    genericTrackGroup.defineHistogram(varName, type='TH2F', path=pathtrack, title=title, xbins=m_phiBins, xmin=0, xmax= 2 * M_PI, ybins=m_d0BsNbins, ymin=-m_d0BsRange, ymax=m_d0BsRange)
+   
+    varName = 'm_phi_2filld0bscorrECC,m_d0_bscorrECC;D0VsPhi0_ECC'
+    title = 'd_{0} (BS) Vs #phi_{0} (ECC);#phi_{0} [rad];d_{0} (BS) [mm]'
+    genericTrackGroup.defineHistogram(varName, type='TH2F', path=pathtrack, title=title, xbins=m_phiBins, xmin=0, xmax= 2 * M_PI, ybins=m_d0BsNbins, ymin=-m_d0BsRange, ymax=m_d0BsRange)
    
     # end histograms
 
diff --git a/InnerDetector/InDetMonitoring/InDetAlignmentMonitoringRun3/src/IDAlignMonGenericTracksAlg.cxx b/InnerDetector/InDetMonitoring/InDetAlignmentMonitoringRun3/src/IDAlignMonGenericTracksAlg.cxx
index 220074985e489ed6b17501d854b26786896f3339..1e22066309d731cc493a563281117e48735a7d70 100644
--- a/InnerDetector/InDetMonitoring/InDetAlignmentMonitoringRun3/src/IDAlignMonGenericTracksAlg.cxx
+++ b/InnerDetector/InDetMonitoring/InDetAlignmentMonitoringRun3/src/IDAlignMonGenericTracksAlg.cxx
@@ -47,6 +47,7 @@ IDAlignMonGenericTracksAlg::IDAlignMonGenericTracksAlg( const std::string & name
    m_z0Range(250.0),
    m_etaRange(3.0),
    m_NTracksRange(200),
+   m_barrelEta(0.8), //Tracks between -0.8 & 0.8 are considered as Barrel Tracks, otherwise are End-Caps
    m_trackSelection( "InDet::InDetTrackSelectionTool/TrackSelectionTool", this)
 {
   m_hitQualityTool = ToolHandle<IInDetAlignHitQualSelTool>("");
@@ -131,7 +132,7 @@ StatusCode IDAlignMonGenericTracksAlg::fillHistograms( const EventContext& ctx )
   using namespace Monitored;
  
   // For histogram naming
-  auto genericTrackGroup = getGroup("Tracks");
+  auto genericTrackGroup = getGroup("IDA_Tracks");
 
   //counters
   int ntrkMax=0;
@@ -219,9 +220,6 @@ StatusCode IDAlignMonGenericTracksAlg::fillHistograms( const EventContext& ctx )
   float mu     = lbAverageInteractionsPerCrossing(ctx);
   auto mu_m    = Monitored::Scalar<float>("mu_m", mu);
 
-  // fill lb & beam spot histograms
-  fill(genericTrackGroup, lb_m);
-	
   if (m_extendedPlots) {
     //Fill BeamSpot Position histos
     auto beamSpotX_m = Monitored::Scalar<float>( "m_beamSpotX", beamSpotX );
@@ -229,13 +227,13 @@ StatusCode IDAlignMonGenericTracksAlg::fillHistograms( const EventContext& ctx )
     auto beamSpotZ_m = Monitored::Scalar<float>( "m_beamSpotZ", beamSpotZ );
     auto beamTiltX_m = Monitored::Scalar<float>( "m_beamTiltX", beamTiltX );
     auto beamTiltY_m = Monitored::Scalar<float>( "m_beamTiltY", beamTiltY );
-
     fill(genericTrackGroup, beamSpotX_m, beamSpotY_m);
-    fill(genericTrackGroup, beamSpotZ_m, beamSpotY_m);
     fill(genericTrackGroup, beamSpotZ_m, beamSpotX_m);
+    fill(genericTrackGroup, beamSpotZ_m, beamSpotY_m);
 
     // beam spot vs LB
     fill(genericTrackGroup, lb_m, beamSpotY_m);
+    fill(genericTrackGroup, lb_m, beamSpotX_m);
 
     // interactions per beam crossing
     fill(genericTrackGroup, mu_m);
@@ -297,6 +295,9 @@ StatusCode IDAlignMonGenericTracksAlg::fillHistograms( const EventContext& ctx )
     float beamX          = 0;
     float beamY          = 0;
     float d0bscorr       = -999;
+    bool  isTrackBarrel  = false;
+    bool  isTrackECA     = false;
+    bool  isTrackECC     = false;
  
     // get fit quality and chi2 probability of track
     const Trk::FitQuality* fitQual = trksItr->fitQuality();
@@ -328,6 +329,10 @@ StatusCode IDAlignMonGenericTracksAlg::fillHistograms( const EventContext& ctx )
       trkpt        = measPer->pT()/1000.;  
       if (qOverP < 0) charge = -1;
       else charge=+1; 
+      if (trketa < - m_barrelEta) isTrackECC = true;
+      if (trketa > m_barrelEta) isTrackECA = true;
+      if (!isTrackECA and !isTrackECC) isTrackBarrel = true;
+
 
       // correct the track d0 for the vertex position
       // would rather corrected for the beamline but could not find beamline
@@ -353,7 +358,8 @@ StatusCode IDAlignMonGenericTracksAlg::fillHistograms( const EventContext& ctx )
     ngTracks++;    
     ATH_MSG_DEBUG(nTracks << " is a good track!");  
 
-   
+    // fill lb histogram for each accepted track 
+    // fill(genericTrackGroup, lb_m);	
 
     int nhpixB=0, nhpixECA=0, nhpixECC=0, nhsctB=0, nhsctECA=0, nhsctECC=0, nhtrtB=0, nhtrtECA=0, nhtrtECC=0;
 
@@ -435,20 +441,30 @@ StatusCode IDAlignMonGenericTracksAlg::fillHistograms( const EventContext& ctx )
     fill(genericTrackGroup, npixelhits_per_track_barrel_m);
     auto nscthits_per_track_m = Monitored::Scalar<float>( "m_nscthits_per_track", nhsct );
     fill(genericTrackGroup, nscthits_per_track_m);
+    auto nscthits_per_track_barrel_m = Monitored::Scalar<float>( "m_nscthits_per_track_barrel", nhsctB );
+    fill(genericTrackGroup, nscthits_per_track_barrel_m);
     auto ntrthits_per_track_m = Monitored::Scalar<float>( "m_ntrthits_per_track", nhtrt );
     fill(genericTrackGroup, ntrthits_per_track_m);
-    
+    auto ntrthits_per_track_barrel_m = Monitored::Scalar<float>( "m_ntrthits_per_track_barrel", nhtrtB );
+    fill(genericTrackGroup, ntrthits_per_track_barrel_m);
+
     auto chi2oDoF_m = Monitored::Scalar<float>( "m_chi2oDoF", chi2oDoF );
     fill(genericTrackGroup, chi2oDoF_m);
     auto eta_m = Monitored::Scalar<float>( "m_eta", trketa );
     fill(genericTrackGroup, eta_m);
 
     // pixel hits vs eta
-    fill(genericTrackGroup, eta_m, npixelhits_per_track_m);    
+    auto eta_2fillpix_m = Monitored::Scalar<float>( "m_eta_2fillpix", trketa );
+    auto npixelhits_per_track_2filleta_m = Monitored::Scalar<float>( "m_npixelhits_per_track_2filleta", nhpix );
+    fill(genericTrackGroup, eta_2fillpix_m, npixelhits_per_track_2filleta_m);    
     // sct hits vs eta
-    fill(genericTrackGroup, eta_m, nscthits_per_track_m);
+    auto eta_2fillsct_m = Monitored::Scalar<float>( "m_eta_2fillsct", trketa );
+    auto nscthits_per_track_2filleta_m = Monitored::Scalar<float>( "m_nscthits_per_track_2filleta", nhsct );
+    fill(genericTrackGroup, eta_2fillsct_m, nscthits_per_track_2filleta_m);
     // trt hits vs eta
-    fill(genericTrackGroup, eta_m, ntrthits_per_track_m);
+    auto eta_2filltrt_m = Monitored::Scalar<float>( "m_eta_2filltrt", trketa );
+    auto ntrthits_per_track_2filleta_m = Monitored::Scalar<float>( "m_ntrthits_per_track_2filleta", nhtrt );
+    fill(genericTrackGroup, eta_2filltrt_m, ntrthits_per_track_2filleta_m);
     
     if (charge>0){
       auto eta_pos_m = Monitored::Scalar<float>( "m_eta_pos", trketa );
@@ -463,6 +479,8 @@ StatusCode IDAlignMonGenericTracksAlg::fillHistograms( const EventContext& ctx )
     fill(genericTrackGroup, phi_m);
     auto z0_m = Monitored::Scalar<float>( "m_z0", trkz0 );
     fill(genericTrackGroup, z0_m);
+    auto z0_bscorr_m = Monitored::Scalar<float>( "m_z0_bscorr", trkz0-beamSpotZ );
+    fill(genericTrackGroup, z0_bscorr_m);
     float z0sintheta = trkz0*(sin(trktheta));
     auto z0sintheta_m = Monitored::Scalar<float>( "m_z0sintheta", z0sintheta );
     fill(genericTrackGroup, z0sintheta_m);
@@ -470,6 +488,28 @@ StatusCode IDAlignMonGenericTracksAlg::fillHistograms( const EventContext& ctx )
     fill(genericTrackGroup, d0_m);
     auto d0_bscorr_m = Monitored::Scalar<float>( "m_d0_bscorr", d0bscorr );
     fill(genericTrackGroup, d0_bscorr_m);
+    //d0 vs phi 
+    auto phi_2filld0bscorr_m = Monitored::Scalar<float>( "m_phi_2filld0bscorr", trkphi );
+    auto d0_bscorr_2fillphi_m = Monitored::Scalar<float>( "m_d0_bscorr_2fillphi", d0bscorr );
+    fill(genericTrackGroup, phi_2filld0bscorr_m, d0_bscorr_2fillphi_m); 
+    //d0 vs phi in barrel
+    if (isTrackBarrel) {
+      auto phi_2filld0bscorrBAR_m = Monitored::Scalar<float>( "m_phi_2filld0bscorrBAR", trkphi );
+      auto d0_bscorrBAR_m = Monitored::Scalar<float>( "m_d0_bscorrBAR", d0bscorr );
+      fill(genericTrackGroup, phi_2filld0bscorrBAR_m, d0_bscorrBAR_m);
+    } 
+    //d0 vs phi in End-cap A
+    if (isTrackECA) {
+      auto phi_2filld0bscorrECA_m = Monitored::Scalar<float>( "m_phi_2filld0bscorrECA", trkphi );
+      auto d0_bscorrECA_m = Monitored::Scalar<float>( "m_d0_bscorrECA", d0bscorr );
+      fill(genericTrackGroup, phi_2filld0bscorrECA_m, d0_bscorrECA_m);
+    } 
+    //d0 vs phi in End-cap C
+    if (isTrackECC) {
+      auto phi_2filld0bscorrECC_m = Monitored::Scalar<float>( "m_phi_2filld0bscorrECC", trkphi );
+      auto d0_bscorrECC_m = Monitored::Scalar<float>( "m_d0_bscorrECC", d0bscorr );
+      fill(genericTrackGroup, phi_2filld0bscorrECC_m, d0_bscorrECC_m);
+    } 
 
     float pT = charge*trkpt;
     auto pT_m = Monitored::Scalar<float>( "m_pT", pT );
diff --git a/InnerDetector/InDetMonitoring/InDetAlignmentMonitoringRun3/src/IDAlignMonGenericTracksAlg.h b/InnerDetector/InDetMonitoring/InDetAlignmentMonitoringRun3/src/IDAlignMonGenericTracksAlg.h
index bb09e5814b67d705d1f63198ea7035e2b81f22fa..0872d80e9550cb2e9478f75a122e6dfd9a0202ec 100644
--- a/InnerDetector/InDetMonitoring/InDetAlignmentMonitoringRun3/src/IDAlignMonGenericTracksAlg.h
+++ b/InnerDetector/InDetMonitoring/InDetAlignmentMonitoringRun3/src/IDAlignMonGenericTracksAlg.h
@@ -83,6 +83,7 @@ private:
   float m_z0Range{};
   float m_etaRange{};
   int   m_NTracksRange{};
+  float m_barrelEta{};
   bool  m_doIP{};
   bool  m_doHitQuality{false};
   bool  m_applyTrkSel{};
diff --git a/LArCalorimeter/LArCalibDataQuality/LArCalibDataQuality/LArCalibValidationAlg.icc b/LArCalorimeter/LArCalibDataQuality/LArCalibDataQuality/LArCalibValidationAlg.icc
index 8f81b0febeeba7a81337fbc39f5ff20731cb3931..59c10d1d1b11f3df4bb2b4b2ae9a5ba3ccae6e16 100644
--- a/LArCalorimeter/LArCalibDataQuality/LArCalibDataQuality/LArCalibValidationAlg.icc
+++ b/LArCalorimeter/LArCalibDataQuality/LArCalibDataQuality/LArCalibValidationAlg.icc
@@ -458,9 +458,6 @@ bool LArCalibValidationAlg<CONDITIONSCONTAINER, REFCONTAINER>::checkCoolChannelC
       }
 
       if(m_patchCBs.size()) { // loop over channels, if connected to asked CBs
-         const LArOnlineID *onlHlp=nullptr;
-         StatusCode sc= detStore()->retrieve(onlHlp, "LArOnlineID");
-         if(sc.isFailure()) return false;
          msg()<<MSG::INFO <<" m_patchCBs.size() "<<m_patchCBs.size()<<"  ";
          for(unsigned i=0; i<m_patchCBs.size();++i) msg()<<MSG::INFO <<m_patchCBs[i]<<" ";
          msg() << endmsg;
@@ -468,19 +465,30 @@ bool LArCalibValidationAlg<CONDITIONSCONTAINER, REFCONTAINER>::checkCoolChannelC
          if(m_isSC) chmax=320;
          for(int ch=0; ch<chmax; ++ch) {
             const HWIdentifier chanId = m_onlineHelper->channel_Id(febId,ch);
+            if(! chanId.is_valid() ) {
+               //ATH_MSG_INFO("Not valid HWIdentifier for channel "<<ch<<"of FEB "<<febId.get_identifier32().get_compact());
+               ATH_MSG_INFO("Not valid HWIdentifier for channel "<<ch);
+               continue;
+            }
+            if( m_isSC && (! m_onlineHelper->is_slar(chanId)) ) {
+               ATH_MSG_WARNING("Not a SC identifier, skipping ! ");
+               continue;
+            }
+            try { const HWIdentifier chanIdd(chanId.get_identifier32().get_compact());chanIdd.show();}
+            catch (...) {
+               ATH_MSG_WARNING("Something wrong with this id, skipping ! ");
+               continue;
+            }
             const std::vector<HWIdentifier>& cLids=clCont->calibSlotLine(chanId); 
-            msg()<<MSG::INFO <<"ch. "<<ch<<" CL size: "<<cLids.size()<<": ";
-            for(unsigned cl=0; cl<cLids.size(); ++cl) msg()<<MSG::INFO <<cLids[cl].get_identifier32().get_compact()<<" ";
+            msg()<<MSG::DEBUG <<" CL size: "<<cLids.size()<<": ";
+            for(unsigned cl=0; cl<cLids.size(); ++cl) msg()<<MSG::DEBUG <<cLids[cl].get_identifier32().get_compact()<<" ";
             msg() << endmsg;
             for(unsigned cl=0; cl<cLids.size(); ++cl) {
-               const HWIdentifier calibModuleID = onlHlp->calib_module_Id(cLids[cl]);
-               msg()<<MSG::INFO <<calibModuleID.get_identifier32().get_compact()<<" ";
+               const HWIdentifier calibModuleID = m_onlineHelper->calib_module_Id(cLids[cl]);
                if (std::find(m_patchCBs.begin(),m_patchCBs.end(),calibModuleID.get_identifier32().get_compact()) != m_patchCBs.end()) { // we should patch this channel
                   missingCBs.push_back(std::make_pair(std::make_pair(febId,gain), ch));
-                  msg()<<MSG::INFO <<" found "; 
                   break;
                }
-               msg() << endmsg;
             } // over CLs
          }// over channels
          retVal1=false;
diff --git a/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArCalibPatchingAlg.h b/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArCalibPatchingAlg.h
index 17f9a9d3fac7613eb386dd9b0494c77eed42a3f8..5ce38d7d390bd79471da366e11e7e61008f4013c 100644
--- a/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArCalibPatchingAlg.h
+++ b/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArCalibPatchingAlg.h
@@ -188,7 +188,7 @@ private:
   BooleanProperty m_useCorrChannel{this, "UseCorrChannels", true, "True: Use separate correction COOL channel, False: Correction + data in the same channel"};
   BooleanProperty m_patchAllMissing{this, "PatchAllMissing", false, "True: Patch missing calibration constants regardless of their bad-channel status"};
   BooleanProperty m_unlock{this, "Unlock", false, "Modify input container"};
-  BooleanProperty m_isSC{this, "SuperCell", false, "Working on the SuperCells ?"};
+  BooleanProperty m_isSC{this, "SuperCells", false, "Working on the SuperCells ?"};
 
   const LArOnlineID_Base* m_onlineHelper;
   const CaloCell_Base_ID* m_caloId;
diff --git a/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArPhysWaveHECTool.h b/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArPhysWaveHECTool.h
index 2e0b78e47134614aec4ef97c966cd79e3327f4b7..d412a84fc0519997c3d4863d7b633d0a10bd1460 100755
--- a/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArPhysWaveHECTool.h
+++ b/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArPhysWaveHECTool.h
@@ -70,10 +70,11 @@ class ATLAS_NOT_THREAD_SAFE LArPhysWaveHECTool : public AthAlgTool
   //LArPhysWave m_gPhys;
   const LArPhysWave* m_gIdealPhys = nullptr;	
   
-  const LArOnlineID* m_onlineHelper = nullptr;
+  const LArOnlineID_Base* m_onlineHelper = nullptr;
   //  bool m_verb;
   //int m_region, m_layer ;
   bool m_normalizeCali , m_timeOriginShift , m_subtractBaseline;
+  bool m_isSC;
 
   double   m_TcalMin, m_TcalMax, m_TcalAverage, 
            m_FstepMin, m_FstepMax, m_FstepAverage;
diff --git a/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArPhysWavePredictor.h b/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArPhysWavePredictor.h
index aeb7bbb62743a8282730fe82ae014057fa950b43..93d82f92b3810efb4fa34219e7671e46b5454c4b 100644
--- a/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArPhysWavePredictor.h
+++ b/LArCalorimeter/LArCalibUtils/LArCalibUtils/LArPhysWavePredictor.h
@@ -1,7 +1,7 @@
 //Dear emacs, this is -*- c++ -*-
 
 /*
-  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
 */
 
 
@@ -57,6 +57,7 @@ class ATLAS_NOT_THREAD_SAFE LArPhysWavePredictor : public AthAlgorithm
   std::string m_keyPhys;
   std::string m_keyMphysMcali;
   std::string m_keyIdealPhys;
+  std::string m_keyFcal;
 
   std::string m_groupingType;
   
diff --git a/LArCalorimeter/LArCalibUtils/src/LArCaliWaveBuilder.cxx b/LArCalorimeter/LArCalibUtils/src/LArCaliWaveBuilder.cxx
index 744f86043f49f702da1e83403cc1341a4991c7bc..776a2ca9dd2e92b54c7f40973bd60c3078de0621 100644
--- a/LArCalorimeter/LArCalibUtils/src/LArCaliWaveBuilder.cxx
+++ b/LArCalorimeter/LArCalibUtils/src/LArCaliWaveBuilder.cxx
@@ -144,7 +144,7 @@ StatusCode LArCaliWaveBuilder::execute()
 
  }
 
- if ( m_event_counter < 1000 || m_event_counter%100==0 ) 
+ if ( m_event_counter < 100 || m_event_counter%100==0 ) 
     ATH_MSG_INFO( "Processing event " << m_event_counter );
  
  if (m_keylist.empty()) {
diff --git a/LArCalorimeter/LArCalibUtils/src/LArPhysWaveHECTool.cxx b/LArCalorimeter/LArCalibUtils/src/LArPhysWaveHECTool.cxx
index 286373b7a9c8c957555b2883a45a0085c400c2a3..d5aa2fabf88dd772640b865d204706bf38144cb4 100755
--- a/LArCalorimeter/LArCalibUtils/src/LArPhysWaveHECTool.cxx
+++ b/LArCalorimeter/LArCalibUtils/src/LArPhysWaveHECTool.cxx
@@ -3,6 +3,7 @@
 */
 
 #include "LArCalibUtils/LArPhysWaveHECTool.h" 
+#include "LArIdentifier/LArOnline_SuperCellID.h"
 
 #include <gsl/gsl_integration.h>
 #include <gsl/gsl_errno.h>
@@ -65,6 +66,7 @@ LArPhysWaveHECTool::LArPhysWaveHECTool ( const std::string& type, const std::str
   declareProperty("NormalizeCali",    m_normalizeCali=false) ;//true?
   declareProperty("TimeOriginShift",  m_timeOriginShift=false) ;
   declareProperty("SubtractBaseline", m_subtractBaseline=true) ;
+  declareProperty("isSC",             m_isSC=false) ;
   declareProperty("TcalMin",          m_TcalMin=370) ;
   declareProperty("TcalMax",          m_TcalMax=490) ;
   declareProperty("TcalAverage",      m_TcalAverage=422.2) ;
@@ -78,7 +80,16 @@ LArPhysWaveHECTool::~LArPhysWaveHECTool() {}
 
 StatusCode LArPhysWaveHECTool::initialize()
 {    
-  ATH_CHECK( detStore()->retrieve(m_onlineHelper, "LArOnlineID") );
+  if ( m_isSC ) {
+    ATH_MSG_DEBUG("==== looking at SuperCells ====");
+    const LArOnline_SuperCellID* ll;
+    ATH_CHECK( detStore()->retrieve(ll, "LArOnline_SuperCellID") );
+    m_onlineHelper = (const LArOnlineID_Base*)ll;
+  } else { // m_isSC
+    const LArOnlineID* ll;
+    ATH_CHECK( detStore()->retrieve(ll, "LArOnlineID") );
+    m_onlineHelper = (const LArOnlineID_Base*)ll;
+  }
   return StatusCode::SUCCESS;
 }
 
@@ -94,7 +105,6 @@ StatusCode LArPhysWaveHECTool::makeLArPhysWaveHEC(LArWFParams& wfParam, LArCaliW
   m_Taur   = 0.;
   m_gIdealPhys = &idealPhysWave;
 
-
   if(wfParam.fstep()<m_FstepMin || wfParam.fstep()>m_FstepMax){ 
     ATH_MSG_INFO (" Fstep="<< wfParam.fstep() << " out of accepted region ("<<m_FstepMin<< ","<<m_FstepMax<<") average used instead : "<<m_FstepAverage);
     wfParam.setFstep(m_FstepAverage);
@@ -179,15 +189,19 @@ StatusCode LArPhysWaveHECTool::makeLArPhysWaveHEC(LArWFParams& wfParam, LArCaliW
 	   || repro_count==0){ 
 
     // if deviation is above a limit, move the CALIWAVE by 1.0 ADC counts
-    CALIWAVE_SHIFT=1.0*repro_count;
+    if ( m_normalizeCali || peak_tmp < 1.1) {
+       CALIWAVE_SHIFT=0.0005*repro_count;
+    } else {
+       CALIWAVE_SHIFT=1.0*repro_count;
+    }
 
     if(repro_count>0){
       ATH_MSG_INFO ("FT="<<FT<<" Slot="<<Slot<<" Ch="<<Channel<<" Gain="<<gain<<" adc="<<adc);
       ATH_MSG_INFO (repro_count<<". Iteration of INTEGRATION: CALIWAVE IS MOVED UP by "<<CALIWAVE_SHIFT<<" ADC units");
       if(DIFF_AMPL>=QUAL_REQ_AMPL)
-	ATH_MSG_INFO ("Problematic bin="<<idx_bad_time_ampl<<" AmplPhysGSL="<<Ampl_problem_ampl<<" Time="<< Time_problem_ampl <<" Deviation="<<DIFF_AMPL<<" ADC units"<<" Peak="<<peak_tmp);
+	ATH_MSG_INFO ("Problematic DIFF_AMPL bin="<<idx_bad_time_ampl<<" AmplPhysGSL="<<Ampl_problem_ampl<<" Time="<< Time_problem_ampl <<" Deviation="<<DIFF_AMPL<<" ADC units"<<" Peak="<<peak_tmp);
       if(DIFF>=QUAL_REQ)
-	ATH_MSG_INFO ("Problematic bin="<<idx_bad_time<<" AmplPhysGSL="<<Ampl_problem<<" Time="<<Time_problem<<" Deviation="<<DIFF<<" ADC units"<< " Peak="<<peak_tmp);
+	ATH_MSG_INFO ("Problematic DIFF bin="<<idx_bad_time<<" AmplPhysGSL="<<Ampl_problem<<" Time="<<Time_problem<<" Deviation="<<DIFF<<" ADC units"<< " Peak="<<peak_tmp);
     }
 
     pcal.Reset();
@@ -683,6 +697,7 @@ TF1 * LArPhysWaveHECTool::CaliWave2PhysWaveHEC(TProfile *pcal, Double_t *par, do
    mcut = 0.05*pcal->GetMaximum();
  } 
 
+ cout<<"mcut: "<<mcut<<endl;
  i5bin = 0;
  for(i=1; i<=nbin; ++i) {
    x[i-1] = pcal->GetBinCenter(i);
diff --git a/LArCalorimeter/LArCalibUtils/src/LArPhysWavePredictor.cxx b/LArCalorimeter/LArCalibUtils/src/LArPhysWavePredictor.cxx
index 95644a2735557ef0cf451875380be3e0a1006302..8986b5a02ebd2407f94ab85c86a5a9d2e7454a4d 100644
--- a/LArCalorimeter/LArCalibUtils/src/LArPhysWavePredictor.cxx
+++ b/LArCalorimeter/LArCalibUtils/src/LArPhysWavePredictor.cxx
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2023 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
 */
 
 #include "LArCalibUtils/LArPhysWavePredictor.h"
@@ -57,6 +57,7 @@ LArPhysWavePredictor::LArPhysWavePredictor (const std::string& name, ISvcLocator
   declareProperty("KeyCaliList",      m_keyCali);                  // Keys of LArCaliWaveContainers
   declareProperty("KeyPhys",          m_keyPhys = "LArPhysWave") ; // Key of LArPhysWaveContainer
   declareProperty("KeyIdealPhys",     m_keyIdealPhys   = "LArPhysWaveHECIdeal") ; // added by FT    
+  declareProperty("KeyFcal",          m_keyFcal   = "FCALFromTB") ; // added by FT    
   declareProperty("KeyMphysMcali",    m_keyMphysMcali = "LArMphysOverMcal") ; // Key of LArMphysOverMcalComplete
   declareProperty("DumpMphysMcali",   m_dumpMphysMcali = false ) ; // for debugging
   declareProperty("NormalizeCali",    m_normalizeCali = false ) ; // for debugging
@@ -187,7 +188,7 @@ StatusCode LArPhysWavePredictor::stop()
 
   // Retrieve LArPhysWaveHECTool   // added by FT
   ToolHandle<LArPhysWaveHECTool> larPhysWaveHECTool("LArPhysWaveHECTool");
-  if(m_isHEC){
+  if(m_isSC || m_isHEC){
     sc=larPhysWaveHECTool.retrieve();
     if (sc!=StatusCode::SUCCESS) {
       ATH_MSG_ERROR( " Can't get LArPhysWaveHECTool " );
@@ -230,7 +231,7 @@ StatusCode LArPhysWavePredictor::stop()
   }
 
   if ( !m_useJODetCellParams ) {
-    if (!m_isHEC) {
+    if (!m_isHEC ) {
       sc = detStore()->retrieve(larDetCellParams);
       if ( sc == StatusCode::FAILURE ) {
 	ATH_MSG_WARNING( "Cannot retrieve LArDetCellParams" );
@@ -288,6 +289,18 @@ StatusCode LArPhysWavePredictor::stop()
     }
   }
   
+  const LArPhysWaveContainer *fcalPhysWaves=nullptr;
+
+  if ( m_isSC ) { //retrieve FCAL phys waves from COOL
+    sc = detStore()->retrieve(fcalPhysWaves, m_keyFcal);
+    if ( sc.isFailure() || !fcalPhysWaves) {
+      ATH_MSG_WARNING( "Cannot retrieve FCAL Phys waves" ); 
+      return sc;
+    }else {
+      ATH_MSG_INFO( "LArPhysCaliTdiff successfully retrieved" );
+    }
+  }
+
   int nchannel = 0 ;
   
   // Create LArPhysWaveContainer for predicted physics waveforms
@@ -334,7 +347,8 @@ StatusCode LArPhysWavePredictor::stop()
   std::vector<int> noTdrift;
   std::vector<int> noTdiff;
 		  
-  for ( unsigned i=0; i<CaloGain::LARNGAIN; ++i ) {
+  unsigned maxgain = m_isSC ? CaloGain::LARNGAIN : 1;
+  for ( unsigned i=0; i<maxgain; ++i ) {
     nTotal.push_back(0);
     noTcali.push_back(0);
     noFstep.push_back(0);
@@ -347,7 +361,7 @@ StatusCode LArPhysWavePredictor::stop()
   /////////////IDEAL PHYSWAVE/////////////////////////////
   // Get current LArPhysWaveContainer
   const LArPhysWaveContainer* larIdealPhysWaveContainer=nullptr;
-  if(m_isHEC){
+  if(m_isHEC || m_isSC){
     ATH_CHECK(detStore()->retrieve(larIdealPhysWaveContainer,m_keyIdealPhys));
     ATH_MSG_INFO("LArPhysWaveContainer with (key = " << m_keyIdealPhys << ") reading from StoreGate" );
   }
@@ -390,13 +404,39 @@ StatusCode LArPhysWavePredictor::stop()
           ATH_MSG_DEBUG((*itVec).size() << " LArCaliWaves found for channel " << m_onlineHelper->channel_name(itVec.channelId()) << " 0x" 
 		       << std::hex << itVec.channelId().get_identifier32().get_compact() << std::dec);
 	  const HWIdentifier chid = itVec.channelId();
+          //
+	  // region and layer information are needed
+	  Identifier id;
+	  try {
+            id = cabling->cnvToIdentifier(chid);
+          } catch (LArID_Exception & execpt) {
+	    ATH_MSG_ERROR( "LArCabling exception caught for channel 0x" << MSG::hex << chid << MSG::dec 
+	        << ". Skipping channel." ) ;
+            continue ;
+	  }
 
-          // Skip if it is FCAL
-          if(m_onlineHelper->isFCALchannel(chid)) continue;
+	  int region = m_caloCellId->region(id);
+          int layer  = m_caloCellId->sampling(id);
 
 	  if ( nchannel < 100 || ( nchannel < 1000 && nchannel%100==0 ) || nchannel%1000==0 ) 
 	     ATH_MSG_INFO( "Processing calibration waveform number " << nchannel );
 
+          if(m_onlineHelper->isFCALchannel(chid)) {
+             if(!m_isSC) continue; // Skip if it is FCAL ini standard readout
+             LArPhysWave fcalw; 
+             // we have only 3 FCAL phys waves, according a layer
+             switch (layer) {
+                case 1: fcalw = fcalPhysWaves->get(HWIdentifier(0x3b300000),0); break;
+                case 2: fcalw = fcalPhysWaves->get(HWIdentifier(0x3b348000),0); break;
+                case 3: fcalw = fcalPhysWaves->get(HWIdentifier(0x3b368000),0); break;
+                default : ATH_MSG_ERROR("Wrong layer for FCAL SC: "<<layer<<" skipping channel 0x"<< MSG::hex << chid << MSG::dec); continue;
+             }
+                          
+             larPhysWaveContainer->setPdata(chid,fcalw, gain);
+             continue;
+          } //isFCALchannel 
+
+
 	  if ( larCaliWave.getFlag() == LArWave::dac0 )  continue ; // skip dac0 waves          
 	  // TODO: here we should add a DAC selection mechanism for TCM method
 
@@ -407,26 +447,14 @@ StatusCode LArPhysWavePredictor::stop()
 
 	  // calibration pulse copy (working around the const iterator to be able to manipulate it...)
 	  LArCaliWave theLArCaliWave = larCaliWave;
+          ++nchannel;
 	  
-	  // region and layer information are needed
-	  Identifier id;
-	  try {
-            id = cabling->cnvToIdentifier(chid);
-          } catch (LArID_Exception & execpt) {
-	    ATH_MSG_ERROR( "LArCabling exception caught for channel 0x" << MSG::hex << chid << MSG::dec 
-	        << ". Skipping channel." ) ;
-            continue ;
-	  }
-
 	  if ( !cabling->isOnlineConnected(chid)  ) { // unconnected channel : skipping ...          
 	    ATH_MSG_VERBOSE("Unconnected channel 0x" << MSG::hex << chid << MSG::dec 
 			    << ". Skipping channel.");
 	    continue ; 	  
 	  }
 
-	  int region = m_caloCellId->region(id);
-          int layer  = m_caloCellId->sampling(id);
-
   	  // Get the parameters corresponding to current LArCaliWave
 	  float Tcali;
 	  float Fstep;
@@ -556,7 +584,9 @@ StatusCode LArPhysWavePredictor::stop()
 	  //      
 	  LArPhysWave larPhysWave;
 	  float MphysMcali ;	
-	  if(larIdealPhysWaveContainer && m_caloCellId->is_lar_hec(id)) {
+	  //if(larIdealPhysWaveContainer && m_caloCellId->is_lar_hec(id)) {
+          // decide by online helper, not sure if offline is working for SC
+	  if(larIdealPhysWaveContainer && m_onlineHelper->isHECchannel(chid)) {
 	    const LArPhysWave& laridealPhysWave = larIdealPhysWaveContainer -> get(chid,gain);
 	    int LArWaveFlag=LArWave::predCali;    // 111 - for HEC Wave
 	    //int LArIdealPhysWaveFlag=LArWave::predCali;    // 111 - for HEC Wave
diff --git a/LArCalorimeter/LArCalibUtils/src/LArPhysWaveTool.cxx b/LArCalorimeter/LArCalibUtils/src/LArPhysWaveTool.cxx
index c7c724f999e5741b5760cc8ef39fafc2d9151a28..bff81b7906bda87a8cee6b602c41cbcfebd65586 100755
--- a/LArCalorimeter/LArCalibUtils/src/LArPhysWaveTool.cxx
+++ b/LArCalorimeter/LArCalibUtils/src/LArPhysWaveTool.cxx
@@ -123,8 +123,9 @@ LArWave LArPhysWaveTool::exp2Tri (const LArWave &w,const unsigned N, const doubl
 
 LArWave LArPhysWaveTool::caliPhysCorr(const unsigned N, const double dt, const LArWFParams& params) {
   LArWave w(N,dt);
-  for ( unsigned i=0 ; i<N ; i++ ) 
+  for ( unsigned i=0 ; i<N ; i++ ) {
     w.setSample(i,caliPhysCorr(i*dt,params)) ;
+  }
   return w ;
 }
 
diff --git a/LArCalorimeter/LArCalibUtils/src/LArRampBuilder.cxx b/LArCalorimeter/LArCalibUtils/src/LArRampBuilder.cxx
index 16b625298efb0006c12afe7ade5d826730a46fc0..92d5dfc50c011445317931a67955873c38b165d2 100644
--- a/LArCalorimeter/LArCalibUtils/src/LArRampBuilder.cxx
+++ b/LArCalorimeter/LArCalibUtils/src/LArRampBuilder.cxx
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2023 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
 */
 
 #include "LArRampBuilder.h"
@@ -648,7 +648,11 @@ StatusCode LArRampBuilder::stop()
 	}
 	else if ((m_maxADC > 0)&&(MaxADC >= m_maxADC)) { 
 	  isADCsat = true; // if ADC saturated at least once, it should be notified
-	  ATH_MSG_DEBUG("Saturated: "<<m_onlineHelper->channel_name(chid)<<" at DAC "<<dac_it->first<<" ADC "<< MaxADC);
+          if(ramppoint.DAC < 200){
+             ATH_MSG_DEBUG("Saturated low DAC: "<<m_onlineHelper->channel_name(chid)<<" at DAC "<<dac_it->first<<" ADC "<< MaxADC);
+          } else {
+             ATH_MSG_DEBUG("Saturated: "<<m_onlineHelper->channel_name(chid)<<" at DAC "<<dac_it->first<<" ADC "<< MaxADC);
+          }
 	}else{
 	  ATH_MSG_DEBUG("Fail ramp selection: "<<chid<<" "<<dac_it->first<<" "<<m_minDAC<<" "<<ramppoint.ADC<<" "<<MaxADC<<" "<<m_maxADC);
 	} 
@@ -773,8 +777,17 @@ StatusCode LArRampBuilder::rampfit(unsigned deg, const std::vector<LArRawRamp::R
     for (unsigned int DACIndex=1;DACIndex<linRange;DACIndex++){
       thisslope = (data[DACIndex].ADC - data[DACIndex-1].ADC)/(data[DACIndex].DAC - data[DACIndex-1].DAC);
 
-      //FIXME: this causes some HEC channels to have rampfrom 2 points only !!!!
-      if ( (satpoint == -1) && ((meanslope-thisslope) > meanslope/10.) ) { satpoint = DACIndex; } // saturation was reached
+      float scut;
+      if(m_onlineHelper->isHECchannel(chid) && DACIndex < 5) {
+         scut = meanslope/4.;
+      } else { scut = meanslope/10.;}
+      if ( (satpoint == -1) && ((meanslope-thisslope) > scut) ) { 
+         satpoint = DACIndex; 
+         if (satpoint <= 4) {
+            ATH_MSG_DEBUG("Only "<<satpoint<<" points to fit, chid: "<<std::hex<<chid.get_identifier32().get_compact()<<std::dec);
+            ATH_MSG_DEBUG(meanslope<<" "<<thisslope<<" | "<<data[DACIndex-1].ADC<<" "<<data[DACIndex].ADC);
+         }
+      } // saturation was reached
 
       meanslope = ( thisslope + (DACIndex-1)*(accslope[DACIndex-1]) )/DACIndex;
       accslope.push_back(meanslope);
@@ -793,9 +806,9 @@ StatusCode LArRampBuilder::rampfit(unsigned deg, const std::vector<LArRawRamp::R
   if(m_doBadChannelMask && m_bcMask.cellShouldBeMasked(bcCont,chid)) isgood=false;
   if (deg>linRange) {
     if (cabling->isOnlineConnected(chid) && isgood ) 
-      ATH_MSG_ERROR( "Not enough datapoints before saturation (" << linRange << ") to fit a polynom of degree " << deg );
+      ATH_MSG_ERROR( "Not enough datapoints before saturation (" << linRange << ") to fit a polynom of degree " << deg << "chid: "<<std::hex<<chid.get_identifier32().get_compact()<<std::dec);
     else
-      ATH_MSG_DEBUG("Not enough datapoints before saturation (" << linRange << ") to fit a polynom of degree " << deg 
+      ATH_MSG_DEBUG("Not enough datapoints before saturation (" << linRange << ") to fit a polynom of degree " << deg << "chid: "<<std::hex<<chid.get_identifier32().get_compact()<<std::dec
 		    << " (channel disconnected or known to be bad)");
     
     return StatusCode::FAILURE;
diff --git a/LArCalorimeter/LArCalibUtils/src/LArWFParamTool.cxx b/LArCalorimeter/LArCalibUtils/src/LArWFParamTool.cxx
index 3162fcf129266e9a181d16a68544309c90302927..a3334ff4dac431533cfa15dec548ce60795f63a2 100644
--- a/LArCalorimeter/LArCalibUtils/src/LArWFParamTool.cxx
+++ b/LArCalorimeter/LArCalibUtils/src/LArWFParamTool.cxx
@@ -704,7 +704,6 @@ StatusCode LArWFParamTool::RTM_Taur(const LArWave& gCali, LArWFParams& wf, const
       range.max=gCali.getSize();
     }
 
-    //std::cout << "WL2: min= " << range.min << ", max= " <<  range.max << std::endl;      
     wf.setTaur(taurMinNum ( taurmin, taurmax, m_EPSILON, gCali, wf, range));
     ATH_MSG_VERBOSE( "*** Injection point\t--> m_Taur    = " << wf.taur()) ;
   }
@@ -756,7 +755,6 @@ LArWave LArWFParamTool::cosCorr(const unsigned N, const double dt, const double
  * residual oscillation, to be used in numerical minimisation.
  * ============================================================== */
 double LArWFParamTool::logChi2InjRespRes (const double taur, const LArWave& gCali, const LArWFParams& wf, const waveRange_t& range) const {
-  //if (m_chID==957367040) std::cout << "WL3: " << std::setprecision(8) << taur << std::endl;
   return m_wHelper.getSumSquareRegion(injRespRes(gCali,wf.omega0(),taur), range.min, range.max);
 }
 
@@ -971,7 +969,6 @@ double LArWFParamTool::fminbr(double a, double b, const double tol, double (LArW
 	   fv=ft;
         }
       }
-      
     }			/* ----- end-of-block ----- */
   }		/* ===== End of loop ===== */   
 }
diff --git a/LArCalorimeter/LArCondTools/CMakeLists.txt b/LArCalorimeter/LArCondTools/CMakeLists.txt
index 8c89cd3c1822f89fe30bc956e31def530ffd7d10..aec79f1ea4d8a1ce90c37ee94d46d80c649d98e5 100644
--- a/LArCalorimeter/LArCondTools/CMakeLists.txt
+++ b/LArCalorimeter/LArCondTools/CMakeLists.txt
@@ -15,5 +15,5 @@ atlas_add_component( LArCondTools
                      LINK_LIBRARIES ${CORAL_LIBRARIES} ${ROOT_LIBRARIES} AthenaBaseComps AthenaKernel AthenaPoolUtilities CaloDetDescrLib CaloIdentifier CxxUtils GaudiKernel Identifier LArCOOLConditions LArCablingLib LArElecCalib LArIdentifier LArRawConditions LArRecConditions StoreGateLib )
 
 # Install files from the package:
+atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
 atlas_install_joboptions( share/*.py )
-
diff --git a/LArCalorimeter/LArCondTools/python/LArFlatFromFileConfig.py b/LArCalorimeter/LArCondTools/python/LArFlatFromFileConfig.py
new file mode 100755
index 0000000000000000000000000000000000000000..2071d31ec372eec0cea83e793212a153a37244f4
--- /dev/null
+++ b/LArCalorimeter/LArCondTools/python/LArFlatFromFileConfig.py
@@ -0,0 +1,119 @@
+# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
+
+from AthenaConfiguration.ComponentFactory import CompFactory
+
+def LArFlatFromFileCfg(flags):
+
+   #Get basic services and cond-algos
+   from LArCalibProcessing.LArCalibBaseConfig import LArCalibBaseCfg
+   result=LArCalibBaseCfg(flags)
+
+   if flags.LArCalib.isSC:
+      ckey="LArOnOffIdMapSC"
+      ngain=1
+   else:   
+      ckey="LArOnOffIdMap"
+      ngain=3
+   result.addEventAlgo(CompFactory.LArFlatFromFile(SuperCells=flags.LArCalib.isSC,
+                                                   CablingKey=ckey,NGains=ngain,
+                                                   OFCInput="",
+                                                   SingleInput=flags.LArCalib.Input.Files[0],
+                                                   Folder=flags.LArCalib.DetCellParams.Folder,
+                                                   Blob=flags.LArCalib.Input.SubDet
+                                                   ))
+
+   from RegistrationServices.OutputConditionsAlgConfig import OutputConditionsAlgCfg
+   result.merge(OutputConditionsAlgCfg(flags,
+                                       outputFile="dummy.root",
+                                       ObjectList=["CondAttrListCollection#"+flags.LArCalib.DetCellParams.Folder, ],
+                                       IOVTagList=[flags.LArCalib.Input.Type],
+                                       Run1=flags.LArCalib.IOVStart,
+                                       Run2=flags.LArCalib.IOVEnd
+                                   ))
+
+   #RegistrationSvc
+   result.addService(CompFactory.IOVRegistrationSvc(RecreateFolders = True, SVFolder=False,
+                                     OverrideNames = [flags.LArCalib.Input.SubDet], OverrideTypes = ["Blob16M"]))
+   result.getService("IOVDbSvc").DBInstance=""
+
+   #MC Event selector since we have no input data file
+   from McEventSelector.McEventSelectorConfig import McEventSelectorCfg
+   result.merge(McEventSelectorCfg(flags,
+                                   RunNumber         = flags.LArCalib.Input.RunNumbers[0],
+                                   EventsPerRun      = 1,
+                                   FirstEvent        = 1,
+                                   InitialTimeStamp  = 0,
+                                   TimeStampInterval = 1))
+
+   return result
+
+if __name__=="__main__":
+
+    import sys
+    import argparse
+ 
+    # now process the CL options and assign defaults
+    parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    parser.add_argument('-i','--infile', dest='infile', default="", help='Input file with constants`', type=str)
+    parser.add_argument('-f','--folder', dest='fld', default="", help='Folder for constants`', type=str)
+    parser.add_argument('-t','--tag', dest='tag', default="", help='Folder tag for constants`', type=str)
+    parser.add_argument('-b','--blob', dest='blb', default="", help='Blob name for constants`', type=str)
+    parser.add_argument('-o','--outfile', dest='outfile', default="Float.db", help='Output sqlite file', type=str)
+    parser.add_argument('-c','--isSC', dest='supercells', default=False, action="store_true", help='is SC data ?')
+ 
+    args = parser.parse_args()
+    if help in args and args.help is not None and args.help:
+       parser.print_help()
+       sys.exit(0)
+ 
+    for _, value in args._get_kwargs():
+     if value is not None:
+         print(value)
+
+    from AthenaConfiguration.AllConfigFlags import initConfigFlags
+    flags=initConfigFlags()
+    from LArCalibProcessing.LArCalibConfigFlags import addLArCalibFlags
+    addLArCalibFlags(flags, args.supercells)
+
+    flags.Input.Files=[]
+    flags.LArCalib.isSC = args.supercells
+    flags.LArCalib.Input.RunNumbers = [404400,]
+    flags.Input.RunNumbers=flags.LArCalib.Input.RunNumbers
+
+    flags.IOVDb.DatabaseInstance="CONDBR2"
+    flags.IOVDb.DBConnection="sqlite://;schema=" + args.outfile +";dbname=CONDBR2"
+
+    flags.LAr.doAlign=False
+    from AthenaConfiguration.TestDefaults import defaultGeometryTags
+    flags.GeoModel.AtlasVersion = defaultGeometryTags.RUN3
+
+    #The global tag we are working with
+    flags.IOVDb.GlobalTag = "LARCALIB-RUN2-00"
+
+    # misusing these flags, but do not want to introduce new ones
+    flags.LArCalib.DetCellParams.Folder=args.fld
+    flags.LArCalib.Input.Files=[args.infile]
+    flags.LArCalib.Input.Type=args.tag
+    flags.LArCalib.Input.SubDet=args.blb
+
+    #Define the global output Level:
+    from AthenaCommon.Constants import INFO
+    flags.Exec.OutputLevel = INFO
+
+    flags.Detector.GeometryID = False
+    flags.Detector.GeometryITk = False
+    flags.Detector.GeometryHGTD = False
+    flags.Detector.GeometryCalo = False
+    flags.Detector.GeometryMuon = False
+    flags.Detector.GeometryForward = False
+
+    flags.lock()
+
+    from AthenaConfiguration.MainServicesConfig import MainServicesCfg
+
+    cfg=MainServicesCfg(flags)
+    cfg.merge(LArFlatFromFileCfg(flags))
+
+
+    cfg.run(1)
+
diff --git a/LArCalorimeter/LArCondTools/src/LArFlatFromFile.cxx b/LArCalorimeter/LArCondTools/src/LArFlatFromFile.cxx
index ccfde78a543cfd2350986211fea8b679dd358a29..ca2abe6015946330ad4e99a5045da5c4ad3eb42f 100644
--- a/LArCalorimeter/LArCondTools/src/LArFlatFromFile.cxx
+++ b/LArCalorimeter/LArCondTools/src/LArFlatFromFile.cxx
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
 */
 
 #include "LArFlatFromFile.h"
@@ -43,36 +43,51 @@ void LArFlatFromFile::singleFloatFlat(const char* blobName, const std::string& i
   std::unique_ptr<CondAttrListCollection> coll= std::make_unique<CondAttrListCollection>(true);
   // we expect line per channel with Id, hash and value
   std::ifstream myfile(input);
-  unsigned long id;
-  unsigned hash;
   std::string line;
+  std::vector< std::vector<float> >  values;
+  for (unsigned gain=0;gain<nGain;++gain) {
+     std::vector<float> gval(m_hashMax);
+     for (unsigned hs=0;hs<m_hashMax;++hs) gval[hs]=1.0; // default
+     values.push_back(gval);
+  }   
+  unsigned id;
+  unsigned hash;
+  float value;
   while (std::getline(myfile, line)) {
-    for (unsigned gain=0;gain<nGain;++gain) {
-      auto attrList = std::make_unique<coral::AttributeList>(*spec);
-      (*attrList)["version"].setValue(0U);
-      coral::Blob& blob=(*attrList)[blobName].data<coral::Blob>();
-      blob.resize(m_hashMax*sizeof(float));
-      float* pblob=static_cast<float*>(blob.startingAddress());
-      for (unsigned hs=0;hs<m_hashMax;++hs) {
-        const HWIdentifier chid=m_onlineID->channel_Id(hs);
-        float value;
-        std::stringstream st(line);
-        st>>std::hex>>id>>std::dec>>hash>>value;
-        if (value < 0) {
-          errIfConnected(chid,gain,blobName);
-          value=1.0; //Default vaue is 1.0, since these are multiplicative constants
-          ++nDefault;
-        } 
-        pblob[hs]=value;
-        ++nChannels;
+      std::stringstream st(line);
+      if(m_isSC) {
+        st>>std::hex>>id>>std::dec>>value;
+        hash=0;
+      } else {
+         st>>std::hex>>id>>std::dec>>hash>>value;
       }
-      unsigned coolChan=gain;
-      //Special case: Store single-gain constant in channel 1 
-      //To avoid AttrList vs AttrListCollection confusion in Athena DB infrastructure
-      if (nGain==1) coolChan=1;
- 
-      coll->add(coolChan,*attrList.release());
+      const HWIdentifier chid(id);
+      if (value < 0) {
+        errIfConnected(chid,hash,blobName);
+        value=1.0; //Default vaue is 1.0, since these are multiplicative constants
+        ++nDefault;
+      } 
+      if (hash >= nGain) {
+        errIfConnected(chid,hash,blobName," Wrong Gain !!!");
+        hash=0; 
+      }
+
+      values[hash][m_onlineID->channel_Hash(chid)]=value;
+  } // over the input file
+
+  for (unsigned gain=0;gain<nGain;++gain) {
+    auto attrList = std::make_unique<coral::AttributeList>(*spec);
+    (*attrList)["version"].setValue(0U);
+    coral::Blob& blob=(*attrList)[blobName].data<coral::Blob>();
+    blob.resize(m_hashMax*sizeof(float));
+    float* pblob=static_cast<float*>(blob.startingAddress());
+    for (unsigned hs=0;hs<m_hashMax;++hs) {
+      pblob[hs]=values[gain][hs];
+      ++nChannels;
     }
+    unsigned coolChan=gain;
+ 
+    coll->add(coolChan,*attrList.release());
   }
 
   ATH_MSG_INFO( "Converted " << blobName << " to inline storage. Total number of channels=" << nChannels );
@@ -245,6 +260,24 @@ StatusCode LArFlatFromFile::stop() {
     }
   }//end have m_OFCInput
 
+  //SIngle:
+  if (m_SingleInput.size()) {
+    std::ifstream myfile(m_SingleInput);
+    // new lines will be skipped unless we stop it from happening:    
+    myfile.unsetf(std::ios_base::skipws);
+
+    // count the newlines with an algorithm specialized for counting:
+    unsigned line_count = std::count( std::istream_iterator<char>(myfile),
+                                      std::istream_iterator<char>(), '\n');
+    myfile.close();
+    if (m_ngain*m_hashMax != line_count) {
+	ATH_MSG_ERROR( "Failed to check input file "<<m_SingleInput );
+        ATH_MSG_ERROR( "Line count: "<<line_count<<" expected: "<<m_ngain*m_hashMax<<" "<<m_isSC);
+	return StatusCode::FAILURE;
+    } else {
+         singleFloatFlat(m_BlobName.value().c_str(),m_SingleInput,m_Folder,m_ngain);
+    }
+  }//end have m_SingleInput
 
   return StatusCode::SUCCESS; 
 }
diff --git a/LArCalorimeter/LArCondTools/src/LArFlatFromFile.h b/LArCalorimeter/LArCondTools/src/LArFlatFromFile.h
index d3e1c2563189caff2f53338dd57252bb1428c0e2..870dbe777b4adc7626c372bd16316b98e28016ef 100644
--- a/LArCalorimeter/LArCondTools/src/LArFlatFromFile.h
+++ b/LArCalorimeter/LArCondTools/src/LArFlatFromFile.h
@@ -54,12 +54,19 @@ class LArFlatFromFile: public AthAlgorithm
 
   ///Input files
   Gaudi::Property<std::string> m_OFCInput{this, "OFCInput","","Input file name"};
+  Gaudi::Property<std::string> m_SingleInput{this, "SingleInput","","Input file name"};
 
   // SC switch
   Gaudi::Property<bool> m_isSC{this,"SuperCells",false,"Run for SC ?"};
+
+  // OFC properties
   Gaudi::Property<unsigned> m_ngain{this,"NGains",1,"Number of gains to process"};
   Gaudi::Property<unsigned> m_nsample{this,"NSamples",4,"Number of samples to process"};
 
+  // Single properties
+  Gaudi::Property<std::string> m_Folder{this, "Folder","","Folder to create"};
+  Gaudi::Property<std::string> m_BlobName{this, "Blob","","Blob name"};
+
   bool m_forceStop{};
 }; 
 
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/CMakeLists.txt b/LArCalorimeter/LArExample/LArCalibProcessing/CMakeLists.txt
index 5bce5f1199fa577ab5a79f813b04df451a8c9fc8..45f6054b1146c212d01eaf954bd9377a086c3449 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/CMakeLists.txt
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/CMakeLists.txt
@@ -9,4 +9,4 @@ find_package( cx_Oracle )
 # Install files from the package:
 atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
 atlas_install_joboptions( share/LArCalib_*.py share/GetInputFiles.py share/LArHVCool2Ntuple_jobOptions.py share/LArTimePhysPrediction_jobOptions.py share/UseLArExtendedSubDetGrouping.py )
-atlas_install_scripts( share/LAr_updateHVcalib_online.sh share/LAr_updateHVcalibNoise.sh share/LArNewCalib_*.py share/LArSC2NtupleDumper.py share/LArDigits2NtupleDumper.py share/LArNoiseCorrelationMon.py )
+atlas_install_scripts( share/LAr_updateHVcalib_online.sh share/LAr_updateHVcalibNoise.sh share/LArNewCalib_*.py share/LArSC2NtupleDumper.py share/LArDigits2NtupleDumper.py share/LArNoiseCorrelationMon.py python/LArCalib_postProcessingConfig.py python/LArCalib_ToCoolInlineConfig.py )
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/python/GetInputFiles.py b/LArCalorimeter/LArExample/LArCalibProcessing/python/GetInputFiles.py
index b6bbe3d4b29b3aa52d3b1f05005d986aa4cb426b..54f723215274987c7376222560b62c4b2c8ff8f6 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/python/GetInputFiles.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/python/GetInputFiles.py
@@ -5,10 +5,7 @@ from os import popen
 def GetInputFiles(inputPath,filePattern):
     print("Searching for files with pattern '",filePattern,"' in ",inputPath)
     fileList=[]
-    if (inputPath[0:5]=='/eos/'):    
-        cmd='/usr/bin/eos ls '
-    else:
-        cmd='ls -1'
+    cmd='ls -1'
 
     for f in popen("%(cmd)s %(path)s | grep '%(pattern)s'" \
                    % {'cmd':cmd,'path':inputPath,'pattern':filePattern}):
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalibConfigFlags.py b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalibConfigFlags.py
index 6cc39ca44595cd5e3418eedc31bd2e1a4643b96a..f64bd61e69a7f3641e68600f87cee3e90a1aff40 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalibConfigFlags.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalibConfigFlags.py
@@ -29,6 +29,7 @@ def addLArCalibFlags(flags, isSC=False):
     flags.addFlag("LArCalib.CaliPulseParams.Folder", lambda prevFlags: _prefix(prevFlags)+"CaliPulseParams/RTM")
     flags.addFlag("LArCalib.DetCellParams.Folder", lambda prevFlags: _prefix(prevFlags)+"DetCellParams/RTM")
 
+    flags.addFlag("LArCalib.FCALPhysWave.Folder", "/LAR/ElecCalibOfl/PhysWaves/FCALFromTB")
 
     flags.addFlag("LArCalib.GroupingType",lambda prevFlags: "SuperCells" if prevFlags.LArCalib.isSC else "ExtendedSubDetector")
     flags.addFlag("LArCalib.Output.POOLFile","ouput.pool.root")
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_Delay_OFCCaliConfig.py b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_Delay_OFCCaliConfig.py
index ab4209fafbf3a1d677589845595c4ace1bd38881..8411cc5f119105a30aa2770d73dbbce997ea814f 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_Delay_OFCCaliConfig.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_Delay_OFCCaliConfig.py
@@ -144,7 +144,7 @@ def LArDelay_OFCCaliCfg(flags):
        theCaliWaveValidationAlg.PatchMissingFEBs=True
        theCaliWaveValidationAlg.UseCorrChannels=False
        theCaliWaveValidationAlg.BadChanKey =  bcKey
-       theCaliWaveValidationAlg.SuperCell =  flags.LArCalib.isSC
+       theCaliWaveValidationAlg.SuperCells =  flags.LArCalib.isSC
 
        if flags.LArCalib.isSC:
           theCaliWaveValidationAlg.CablingKey = "LArOnOffIdMapSC"
@@ -186,6 +186,7 @@ def LArDelay_OFCCaliCfg(flags):
                                                             isSC = flags.LArCalib.isSC,
                                                             BadChanKey = bcKey,
                                                             OffId=True,
+                                                            AddCalib=True,
                                                             SaveJitter=True
                                                         ))
 
@@ -193,7 +194,8 @@ def LArDelay_OFCCaliCfg(flags):
            result.addEventAlgo(CompFactory.LArOFC2Ntuple(ContainerKey = "LArOFC",
                                                       AddFEBTempInfo  = False,
                                                       BadChanKey = bcKey,
-                                                      OffId=True
+                                                      OffId=True,
+                                                      AddCalib=True
                                                   ))
 
         import os
@@ -207,7 +209,8 @@ def LArDelay_OFCCaliCfg(flags):
         result.addEventAlgo(CompFactory.LArOFC2Ntuple(ContainerKey = "LArOFC",
                                                    AddFEBTempInfo  = False,
                                                    NtupleFile = "FILE2",
-                                                   BadChanKey = bcKey
+                                                   BadChanKey = bcKey,
+                                                   AddCalib=True
                                                ))
 
         import os
@@ -272,13 +275,16 @@ def LArDelay_OFCCali_PoolDumpCfg(flags):
                                                             AddFEBTempInfo = False,
                                                             SaveDerivedInfo = True,
                                                             ApplyCorrection = True,
-                                                            BadChanKey = bcKey
+                                                            BadChanKey = bcKey,
+                                                            AddCalib=True,
+                                                            SaveJitter=True
                                                         ))
 
         if not rootfile2:
            result.addEventAlgo(CompFactory.LArOFC2Ntuple(ContainerKey = "LArOFC",
                                                       AddFEBTempInfo  = False,
-                                                      BadChanKey = bcKey
+                                                      BadChanKey = bcKey,
+                                                      AddCalib=True
                                                   ))
 
         import os
@@ -292,13 +298,15 @@ def LArDelay_OFCCali_PoolDumpCfg(flags):
            result.addEventAlgo(CompFactory.LArOFC2Ntuple(ContainerKey = "LArOFC",
                                                    AddFEBTempInfo  = False,
                                                    NtupleFile = "FILE1",
-                                                   BadChanKey = bcKey
+                                                   BadChanKey = bcKey,
+                                                   AddCalib=True
                                                ))
         else:
            result.addEventAlgo(CompFactory.LArOFC2Ntuple(ContainerKey = "LArOFC",
                                                    AddFEBTempInfo  = False,
                                                    NtupleFile = "FILE2",
-                                                   BadChanKey = bcKey
+                                                   BadChanKey = bcKey,
+                                                   AddCalib=True
                                                ))
 
            import os
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_OFCPhysConfig.py b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_OFCPhysConfig.py
index 84ef5e1155a97c232b6b035073cd3d9429600e4f..f2b89860399701df1d7d4a7d2cdfb658742dbadc 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_OFCPhysConfig.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_OFCPhysConfig.py
@@ -58,6 +58,7 @@ def _ofcAlg(flags,postfix,folderSuffix,nPhases,dPhases,nDelays,nColl):
         OFC2Ntup.ContainerKey = "LArOFC_"+postfix
         OFC2Ntup.NtupleName   = "OFC_"+postfix
         OFC2Ntup.AddFEBTempInfo   = False   
+        OFC2Ntup.AddCalib   = True   
         OFC2Ntup.isSC = flags.LArCalib.isSC
         OFC2Ntup.BadChanKey = bcKey
         result.addEventAlgo(OFC2Ntup)
@@ -67,6 +68,7 @@ def _ofcAlg(flags,postfix,folderSuffix,nPhases,dPhases,nDelays,nColl):
             OFC2NtupMu.ContainerKey = "LArOFC_"+postfix+"_mu"
             OFC2NtupMu.NtupleName   = "OFC_"+postfix+"_mu"
             OFC2NtupMu.AddFEBTempInfo   = False   
+            OFC2NtupMu.AddCalib   = True   
             OFC2NtupMu.isSC = flags.LArCalib.isSC
             OFC2NtupMu.BadChanKey = bcKey
             result.addEventAlgo(OFC2NtupMu)
@@ -76,6 +78,7 @@ def _ofcAlg(flags,postfix,folderSuffix,nPhases,dPhases,nDelays,nColl):
         Shape2Ntup.ContainerKey="LArShape_"+postfix
         Shape2Ntup.NtupleName="SHAPE_"+postfix
         Shape2Ntup.AddFEBTempInfo   = False
+        Shape2Ntup.AddCalib   = True   
         Shape2Ntup.isSC = flags.LArCalib.isSC
         Shape2Ntup.BadChanKey = bcKey
         result.addEventAlgo(Shape2Ntup)
@@ -84,7 +87,9 @@ def _ofcAlg(flags,postfix,folderSuffix,nPhases,dPhases,nDelays,nColl):
     objList=["LArOFCComplete#LArOFC_"+postfix+"#"+flags.LArCalib.OFCPhys.Folder+folderSuffix,
              "LArShapeComplete#LArShape_"+postfix+"#"+flags.LArCalib.Shape.Folder+folderSuffix]
 
-    rs=FolderTagResolver()
+    if flags.LArCalib.isSC:
+       FolderTagResolver._defaultSuffix="-UPD3-00"
+    rs=FolderTagResolver(dbname="sqlite://;schema=%s;dbname=CONDBR2"%flags.LArCalib.Input.Database)
     OFCTag=rs.getFolderTag(flags.LArCalib.OFCPhys.Folder+folderSuffix)
     ShapeTag=rs.getFolderTag(flags.LArCalib.Shape.Folder+folderSuffix)
     tagList=[OFCTag,ShapeTag]
@@ -120,11 +125,13 @@ def LArOFCPhysCfg(flags,loadPhysAC=True):
     nColl=flags.LArCalib.OFC.Ncoll
     from LArCalibProcessing.utils import FolderTagResolver
     FolderTagResolver._globalTag=flags.IOVDb.GlobalTag
-    rs=FolderTagResolver()
+    if flags.LArCalib.isSC:
+       FolderTagResolver._defaultSuffix="-UPD3-00"
+    rs=FolderTagResolver(dbname="sqlite://;schema=%s;dbname=CONDBR2"%flags.LArCalib.Input.Database)
     PhysWaveTag=rs.getFolderTag(flags.LArCalib.PhysWave.Folder)
     AutoCorrTag=rs.getFolderTag(flags.LArCalib.AutoCorr.Folder)
     PhysAutoCorrTag= rs.getFolderTag(flags.LArCalib.PhysAutoCorr.Folder)
-    if (nColl>0):
+    if (nColl>0 and "mu" not in PhysAutoCorrTag):
         #Insert mu in tag-name:
         elems=PhysAutoCorrTag.split("-")
         PhysAutoCorrTag="-".join([elems[0]+"_mu_%i"%nColl,]+elems[1:])
@@ -147,19 +154,10 @@ def LArOFCPhysCfg(flags,loadPhysAC=True):
         result.merge(addFolders(flags,flags.LArCalib.PhysCaliTdiff.Folder,detDb="LAR_OFL", db="COMP200", tag=PhysCaliTdiffTag))
 
 
-    #def _ofcAlg(flags,postfix,folderSuffix,nPhases,dPhases,nDelays,nColl):
-    if not loadPhysAC:
-        #post-processing mode, fix SG key to allow subsequent OFC-phase picking
-        key1="_unpicked"
-    else:
-        key1=""
-
     if flags.LArCalib.isSC:
-       result.merge(_ofcAlg(flags,"3ns%s"%key1,"%isamples3bins17phases"%flags.LArCalib.OFC.Nsamples,nPhases=17,dPhases=3,nDelays=24,nColl=nColl))
-       result.merge(_ofcAlg(flags,"1ns","%isamples%s"%(flags.LArCalib.OFC.Nsamples,key1),nPhases=50,dPhases=1,nDelays=24,nColl=nColl))
+       result.merge(_ofcAlg(flags,"1ns","%isamples"%(flags.LArCalib.OFC.Nsamples),nPhases=50,dPhases=1,nDelays=24,nColl=nColl))
     else:
-       result.merge(_ofcAlg(flags,"3ns%s"%key1,"%isamples3bins17phases"%flags.LArCalib.OFC.Nsamples,nPhases=8,dPhases=3,nDelays=24,nColl=nColl))
-       result.merge(_ofcAlg(flags,"1ns","%isamples%s"%(flags.LArCalib.OFC.Nsamples,key1),nPhases=24,dPhases=1,nDelays=24,nColl=nColl))
+       result.merge(_ofcAlg(flags,"3ns","%isamples3bins17phases"%flags.LArCalib.OFC.Nsamples,nPhases=8,dPhases=3,nDelays=24,nColl=nColl))
 
     #RegistrationSvc    
     result.addService(CompFactory.IOVRegistrationSvc(RecreateFolders = False))
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_OFPhasePickerConfig.py b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_OFPhasePickerConfig.py
index d0b7fd9a1fa1c63164a615c36f1925a3a3f37785..b0439a8dabdfccb74efe9ee55b7a45995eff6926 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_OFPhasePickerConfig.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_OFPhasePickerConfig.py
@@ -5,11 +5,11 @@ from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator
 from LArCalibProcessing.utils import FolderTagResolver
 from IOVDbSvc.IOVDbSvcConfig import addFolders
 
-def _OFPhasePickerCfg(flags, inputSuffix="4samples3bins17phases",outputSuffix="4samples1phase",keySuffix="",nColl=0,loadInputs=True):
+def _OFPhasePickerCfg(flags, inputSuffix="4samples3bins17phases",outputSuffix="4samples1phase",keySuffix="",nColl=0,loadInputs=True, storeShape=True):
 
     result=ComponentAccumulator()
     FolderTagResolver._globalTag=flags.IOVDb.GlobalTag
-    rs=FolderTagResolver()
+    rs=FolderTagResolver(dbname="sqlite://;schema=%s;dbname=CONDBR2"%flags.LArCalib.Input.Database)
     if nColl > 0:
        tagstr=rs.getFolderTag(flags.LArCalib.OFCPhys.Folder+inputSuffix)
        tagpref=tagstr[0:tagstr.find(inputSuffix)+len(inputSuffix)]
@@ -25,7 +25,13 @@ def _OFPhasePickerCfg(flags, inputSuffix="4samples3bins17phases",outputSuffix="4
        outputOFCTag=rs.getFolderTag(flags.LArCalib.OFCPhys.Folder+outputSuffix)
 
     inputShapeTag=rs.getFolderTag(flags.LArCalib.Shape.Folder+inputSuffix)
-    outputShapeTag=rs.getFolderTag(flags.LArCalib.Shape.Folder+outputSuffix)
+    if flags.LArCalib.OFC.ShapeCorrection:
+       tagstr=rs.getFolderTag(flags.LArCalib.Shape.Folder+outputSuffix)
+       tagpref=tagstr[0:tagstr.find(outputSuffix)+len(outputSuffix)]
+       tagpost=tagstr[tagstr.find(outputSuffix)+len(outputSuffix):]
+       outputShapeTag=f'{tagpref}-corr{tagpost}'
+    else:
+       outputShapeTag=rs.getFolderTag(flags.LArCalib.Shape.Folder+outputSuffix)
 
 
     del rs #Close database
@@ -33,9 +39,9 @@ def _OFPhasePickerCfg(flags, inputSuffix="4samples3bins17phases",outputSuffix="4
     from LArCalibProcessing.LArCalibBaseConfig import chanSelStr
     if loadInputs:
         result.merge(addFolders(flags,flags.LArCalib.OFCPhys.Folder+inputSuffix,detDb=flags.LArCalib.Input.Database, 
-                                tag=inputOFCTag, modifiers=chanSelStr(flags)+"<key>LArOFC"+keySuffix+"_unpicked</key>"))
+                                tag=inputOFCTag, modifiers=chanSelStr(flags)+"<key>LArOFC"+keySuffix+"</key>"))
         result.merge(addFolders(flags,flags.LArCalib.Shape.Folder+inputSuffix,detDb=flags.LArCalib.Input.Database, 
-                                tag=inputShapeTag, modifiers=chanSelStr(flags)+"<key>LArShape"+keySuffix+"_unpicked</key>"))
+                                tag=inputShapeTag, modifiers=chanSelStr(flags)+"<key>LArShape"+keySuffix+"</key>"))
 
     LArOFPhasePick = CompFactory.LArOFPhasePicker("LArOFPhasePicker"+keySuffix)
     if flags.LArCalib.isSC:
@@ -48,10 +54,19 @@ def _OFPhasePickerCfg(flags, inputSuffix="4samples3bins17phases",outputSuffix="4
 
     else:    
         LArOFPhasePick.KeyPhase = ""
-    LArOFPhasePick.KeyOFC_new = "LArOFC"+keySuffix
-    LArOFPhasePick.KeyOFC = "LArOFC"+keySuffix+"_unpicked"
-    LArOFPhasePick.KeyShape_new = "LArShape"+keySuffix+"_uncorr" if flags.LArCalib.OFC.ShapeCorrection else  "LArShape"+keySuffix
-    LArOFPhasePick.KeyShape = "LArShape"+keySuffix+"_unpicked"
+
+    if nColl > 0:
+       muSuffix="_mu"
+    else:   
+           muSuffix=""
+
+    LArOFPhasePick.KeyOFC_new = "LArOFC"+muSuffix
+    LArOFPhasePick.KeyOFC = "LArOFC"+keySuffix
+    if storeShape:
+       LArOFPhasePick.KeyShape_new = "LArShape"+"_uncorr" if flags.LArCalib.OFC.ShapeCorrection else  "LArShape"
+       LArOFPhasePick.KeyShape = "LArShape"+keySuffix
+    else:   
+       LArOFPhasePick.doShape = False
     LArOFPhasePick.GroupingType = flags.LArCalib.GroupingType
     LArOFPhasePick.DefaultPhase = 4
     LArOFPhasePick.TimeOffsetCorrection = 0
@@ -59,19 +74,24 @@ def _OFPhasePickerCfg(flags, inputSuffix="4samples3bins17phases",outputSuffix="4
 
     result.addEventAlgo(LArOFPhasePick)
 
-    if flags.LArCalib.OFC.ShapeCorrection:
+    if flags.LArCalib.OFC.ShapeCorrection and storeShape:
         result.merge(addFolders(flags,"/LAR/ElecCalibOfl/Shape/Residuals/5samples","LAR_OFL"))
         resShapeCorr=CompFactory.LArShapeCorrector("LArShapeCorr"+keySuffix)
-        resShapeCorr.KeyShape= "LArShape"+keySuffix+"_uncorr" 
-        resShapeCorr.KeyShape_newcorr="LArShape"+keySuffix
+        resShapeCorr.KeyShape= "LArShape_uncorr" 
+        resShapeCorr.KeyShape_newcorr="LArShape"
         result.addEventAlgo(resShapeCorr)
 
     from RegistrationServices.OutputConditionsAlgConfig import OutputConditionsAlgCfg
+    Obj=["LArOFCComplete#LArOFC"+muSuffix+"#"+flags.LArCalib.OFCPhys.Folder+outputSuffix,]
+    Tag=[outputOFCTag,]
+    if storeShape:
+       Obj+=["LArShapeComplete#LArShape#"+flags.LArCalib.Shape.Folder+outputSuffix,]
+       Tag+=[outputShapeTag,]
+    print('Obj: ',Obj)
     result.merge(OutputConditionsAlgCfg(flags,
                                         outputFile=flags.LArCalib.Output.POOLFile,
-                                        ObjectList=["LArOFCComplete#LArOFC"+keySuffix+"#"+flags.LArCalib.OFCPhys.Folder+outputSuffix,
-                                                    "LArShapeComplete#LArShape"+keySuffix+"#"+flags.LArCalib.Shape.Folder+outputSuffix],
-                                        IOVTagList=[outputOFCTag,outputShapeTag],
+                                        ObjectList=Obj,
+                                        IOVTagList=Tag,
                                         Run1=flags.LArCalib.IOVStart,
                                         Run2=flags.LArCalib.IOVEnd
                                     ))
@@ -80,22 +100,20 @@ def _OFPhasePickerCfg(flags, inputSuffix="4samples3bins17phases",outputSuffix="4
     rootfile=flags.LArCalib.Output.ROOTFile
     if rootfile != "":
         bcKey = "LArBadChannelSC" if flags.LArCalib.isSC else "LArBadChannel"     
-        if nColl > 0:
-           muSuffix="_mu"
-        else:   
-           muSuffix=""
-        OFC2Ntup=CompFactory.LArOFC2Ntuple("LArOFC2Ntuple"+keySuffix+muSuffix)
-        OFC2Ntup.ContainerKey = "LArOFC"+keySuffix
+        OFC2Ntup=CompFactory.LArOFC2Ntuple("LArOFC2Ntuple"+muSuffix)
+        OFC2Ntup.ContainerKey = "LArOFC"+muSuffix
         OFC2Ntup.NtupleName   = "OFC"+muSuffix
         OFC2Ntup.AddFEBTempInfo   = False   
+        OFC2Ntup.AddCalib   = True   
         OFC2Ntup.isSC = flags.LArCalib.isSC
         OFC2Ntup.BadChanKey = bcKey
         result.addEventAlgo(OFC2Ntup)
 
-        Shape2Ntup=CompFactory.LArShape2Ntuple("LArShape2Ntuple"+keySuffix)
-        Shape2Ntup.ContainerKey="LArShape"+keySuffix
-        Shape2Ntup.NtupleName="SHAPE"+muSuffix
+        Shape2Ntup=CompFactory.LArShape2Ntuple("LArShape2Ntuple")
+        Shape2Ntup.ContainerKey="LArShape"
+        Shape2Ntup.NtupleName="SHAPE"
         Shape2Ntup.AddFEBTempInfo   = False
+        Shape2Ntup.AddCalib   = True
         Shape2Ntup.isSC = flags.LArCalib.isSC
         Shape2Ntup.BadChanKey = bcKey
         result.addEventAlgo(Shape2Ntup)
@@ -110,13 +128,13 @@ def LArOFPhasePickerCfg(flags,loadInputs=True):
     result=LArCalibBaseCfg(flags)
 
     if flags.LArCalib.isSC:
-       result.merge(_OFPhasePickerCfg(flags, inputSuffix="4samples",outputSuffix="4samples1phase",keySuffix="_3ns", nColl=0, loadInputs=loadInputs))
+       result.merge(_OFPhasePickerCfg(flags, inputSuffix="4samples",outputSuffix="4samples1phase",keySuffix="_1ns", nColl=0, loadInputs=loadInputs))
        if flags.LArCalib.OFC.Ncoll > 0:
-          result.merge(_OFPhasePickerCfg(flags, inputSuffix="4samples",outputSuffix="4samples1phase",keySuffix="_3ns_mu", nColl=flags.LArCalib.OFC.Ncoll, loadInputs=loadInputs))
+          result.merge(_OFPhasePickerCfg(flags, inputSuffix="4samples",outputSuffix="4samples1phase",keySuffix="_1ns_mu", nColl=flags.LArCalib.OFC.Ncoll, loadInputs=loadInputs, storeShape=False))
     else:
        result.merge(_OFPhasePickerCfg(flags, inputSuffix="4samples3bins17phases",outputSuffix="4samples1phase",keySuffix="_3ns", nColl=0, loadInputs=loadInputs))
        if flags.LArCalib.OFC.Ncoll > 0:
-          result.merge(_OFPhasePickerCfg(flags, inputSuffix="4samples3bins17phases",outputSuffix="4samples1phase",keySuffix="_3ns_mu", nColl=flags.LArCalib.OFC.Ncoll, loadInputs=loadInputs))
+          result.merge(_OFPhasePickerCfg(flags, inputSuffix="4samples3bins17phases",outputSuffix="4samples1phase",keySuffix="_3ns_mu", nColl=flags.LArCalib.OFC.Ncoll, loadInputs=loadInputs, storeShape=False))
 
     #RegistrationSvc    
     result.addService(CompFactory.IOVRegistrationSvc(RecreateFolders = False))
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_PedestalAutoCorrConfig.py b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_PedestalAutoCorrConfig.py
index d2a9935e26479673e09d2e679b22033660c506ba..667a35161fd307fed6996fcf1be1de8861c0ca3f 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_PedestalAutoCorrConfig.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_PedestalAutoCorrConfig.py
@@ -77,6 +77,7 @@ def LArPedestalAutoCorrCfg(flags):
                                                             AddFEBTempInfo = False, 
                                                             RealGeometry = True,
                                                             OffId = True,
+                                                            AddCalib = True,
                                                             isSC = flags.LArCalib.isSC,
                                                             BadChanKey = bcKey
                                                         )
@@ -85,7 +86,8 @@ def LArPedestalAutoCorrCfg(flags):
         result.addEventAlgo(CompFactory.LArAutoCorr2Ntuple(ContainerKey = "LArAutoCorr",
                                                            AddFEBTempInfo  = False, isSC = flags.LArCalib.isSC,
                                                            BadChanKey = bcKey,
-                                                           OffId=True
+                                                           OffId=True,
+                                                           AddCalib = True
                                                        )
                         )
 
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_PhysWavePredictionConfig.py b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_PhysWavePredictionConfig.py
index c73876f730433578f70a2f5ffd84845e5f2e792e..dede5f4323fba2d8e7837b7d74595afd93ab8749 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_PhysWavePredictionConfig.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_PhysWavePredictionConfig.py
@@ -11,6 +11,8 @@ def LArPhysWavePredictionCfg(flags):
 
     from LArCalibProcessing.utils import FolderTagResolver
     FolderTagResolver._globalTag=flags.IOVDb.GlobalTag
+    if flags.LArCalib.isSC:
+       FolderTagResolver._defaultSuffix="-RUN2-UPD3-00"
     rs=FolderTagResolver()
     CaliWaveTag=rs.getFolderTag(flags.LArCalib.CaliWave.Folder)
     DetCellParamsTag=rs.getFolderTag(flags.LArCalib.DetCellParams.Folder)
@@ -43,13 +45,19 @@ def LArPhysWavePredictionCfg(flags):
     result.merge(addFolders(flags,flags.LArCalib.CaliWave.Folder,detDb=flags.LArCalib.Input.Database, tag=CaliWaveTag, modifiers=chanSelStr(flags)))
     if flags.LArCalib.isSC:
        result.merge(addFolders(flags,"/LAR/ElecCalibOflSC/Tdrift/Computed",detDb="LAR_OFL",tag="LARElecCalibOflSCTdriftComputed-000"))
+       result.merge(addFolders(flags,"/LAR/ElecCalibOflSC/PhysWaves/HECIdeal",detDb="LAR_OFL",tag="LARElecCalibOflSCPhysWavesHECIdeal-calib-02"))
+       result.merge(addFolders(flags,flags.LArCalib.FCALPhysWave.Folder,detDb="LAR_OFL",tag="LARElecCalibOflPhysWavesFCALFromTB-calib-01",modifiers="<key>FCALFromTB</key>"))
     else:   
        result.merge(addFolders(flags,"/LAR/ElecCalibOfl/Tdrift/Computed",detDb="LAR_OFL",tag="LARElecCalibOflTdriftComputed-calib-03"))
     
 
     if isHEC:
-        result.merge(addFolders(flags,"/LAR/ElecCalibOfl/PhysWaves/HECIdeal",detDb="LAR_OFL",db="COMP200",tag="LARElecCalibOflPhysWavesHECIdeal-calib-02"))
-        result.merge(addFolders(flags,flags.LArCalib.CaliPulseParams.Folder,detDb="LAR_OFL", tag=CaliPulseParamsTag))
+        result.merge(addFolders(flags,"/LAR/ElecCalibOfl/PhysWaves/HECIdeal",detDb="LAR_OFL",tag="LARElecCalibOflPhysWavesHECIdeal-calib-02"))
+        if flags.LArCalib.isSC:
+           result.merge(addFolders(flags,flags.LArCalib.CaliPulseParams.Folder,detDb=flags.LArCalib.Input.Database, tag=CaliPulseParamsTag))
+           result.merge(addFolders(flags,flags.LArCalib.DetCellParams.Folder,detDb=flags.LArCalib.Input.Database, tag=DetCellParamsTag))
+        else:    
+           result.merge(addFolders(flags,flags.LArCalib.CaliPulseParams.Folder,detDb="LAR_OFL", tag=CaliPulseParamsTag))
     else:
         result.merge(addFolders(flags,flags.LArCalib.CaliPulseParams.Folder,detDb=flags.LArCalib.Input.Database, tag=CaliPulseParamsTag))
         result.merge(addFolders(flags,flags.LArCalib.DetCellParams.Folder,detDb=flags.LArCalib.Input.Database, tag=DetCellParamsTag))
@@ -78,16 +86,17 @@ def LArPhysWavePredictionCfg(flags):
 
     result.addEventAlgo(LArPhysWavePredictor)
     
-    if (flags.LArCalib.Input.SubDet == "HEC"):
-        LArPhysWaveHECTool=CompFactory.LArPhysWaveHECTool()
+    if (flags.LArCalib.isSC or flags.LArCalib.Input.SubDet == "HEC"):
+        LArPhysWaveHECTool=CompFactory.LArPhysWaveHECTool("LArPhysWaveHECTool")
         LArPhysWaveHECTool.NormalizeCali     = False  
         LArPhysWaveHECTool.TimeOriginShift   = False
         LArPhysWaveHECTool.SubtractBaseline  = False
+        LArPhysWaveHECTool.isSC              = flags.LArCalib.isSC
         result.addPublicTool(LArPhysWaveHECTool)
 
 
-    else: #not HEC but EM:
-        LArPhysWaveTool=CompFactory.LArPhysWaveTool()
+    if flags.LArCalib.isSC or flags.LArCalib.Input.SubDet != "HEC":
+        LArPhysWaveTool=CompFactory.LArPhysWaveTool("LArPhysWaveTool")
         LArPhysWaveTool.NormalizeCali     = False # this is taken care by LArPhysWavePredictor
         LArPhysWaveTool.TimeOriginShift   = False
         LArPhysWaveTool.SubtractBaseline  = False
@@ -103,6 +112,7 @@ def LArPhysWavePredictionCfg(flags):
       LArPhysWaves2Ntuple.AddFEBTempInfo   = False  
       LArPhysWaves2Ntuple.KeyList      = [ "LArPhysWave"  ]
       LArPhysWaves2Ntuple.isSC = flags.LArCalib.isSC
+      LArPhysWaves2Ntuple.AddCalib = True
       LArPhysWaves2Ntuple.BadChanKey = bcKey
       result.addEventAlgo(LArPhysWaves2Ntuple)
 
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_PileUpAutoCorrConfig.py b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_PileUpAutoCorrConfig.py
index 5f2b732eddb0a49953da578122773d22295f523b..95f5a33de4d6cbe76e9be894f3d19bd68dda770f 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_PileUpAutoCorrConfig.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_PileUpAutoCorrConfig.py
@@ -18,14 +18,14 @@ def LArPileUpAutoCorrCfg(flags):
        # for the SC we need uA2MeV also from MC, because the one used for data are weighted with E->ET conversion
        result.merge(LArElecCalibDBMCSCCfg(flags,["fSamplSC","ShapeSC","MinBiasSC","uA2MeVSC"]))
     else:
-       result.merge(addFolders(flags,"/LAR/ElecCalibMC/Shape",detDb="LAR_OFL", db="OFLP200",tag="LARElecCalibMCShapeLArPileupShape-RUN2-2018",className="LArShape32MC"))
+       result.merge(addFolders(flags,"/LAR/ElecCalibMC/Shape",detDb="LAR_OFL", db="OFLP200",tag="LARElecCalibMCShapeLArPileupShape-RUN2-2018",className="LArShape32MC",modifiers="<key>LArShapeMC</key>"))
        result.merge(addFolders(flags,"/LAR/ElecCalibMC/fSampl",detDb="LAR_OFL", db="OFLP200",tag="LARElecCalibMCfSampl-G4101-20371-FTFP_BERT_BIRK_v2",className="LArfSamplMC"))
        result.merge(addFolders(flags,"/LAR/ElecCalibMC/MinBias",detDb="LAR_OFL", db="OFLP200",tag="LARElecCalibMCMinBias-mc16-Epos-A3-s3687",className="LArMinBiasMC"))
 
        result.addCondAlgo(CompFactory.LArMCSymCondAlg())
 
        LArShapeSymAlg =  CompFactory.getComp("LArSymConditionsAlg<LArShape32MC, LArShape32Sym>")
-       result.addCondAlgo(LArShapeSymAlg(ReadKey="LArShape",WriteKey="LArShapeSym"))
+       result.addCondAlgo(LArShapeSymAlg(ReadKey="LArShapeMC",WriteKey="LArShapeSym"))
 
        LArfSamplSymAlg =  CompFactory.getComp("LArSymConditionsAlg<LArfSamplMC, LArfSamplSym>")
        result.addCondAlgo(LArfSamplSymAlg(ReadKey="LArfSampl", WriteKey="LArfSamplSym"))
@@ -35,6 +35,8 @@ def LArPileUpAutoCorrCfg(flags):
 
     from LArCalibProcessing.utils import FolderTagResolver
     FolderTagResolver._globalTag=flags.IOVDb.GlobalTag
+    if flags.LArCalib.isSC:
+       FolderTagResolver._defaultSuffix="-UPD3-00"
     rs=FolderTagResolver(dbname="sqlite://;schema=%s;dbname=CONDBR2"%flags.LArCalib.Input.Database)
     AutoCorrTag=rs.getFolderTag(flags.LArCalib.AutoCorr.Folder)
     # FIXME these tags has to be re-enabled in 2024 also for main readout:
@@ -118,7 +120,7 @@ def LArPileUpAutoCorrCfg(flags):
     rootfile=flags.LArCalib.Output.ROOTFile
     if rootfile != "":
         result.addEventAlgo(CompFactory.LArAutoCorr2Ntuple(ContainerKey="LArPhysAutoCorr",OffId=True,isSC=flags.LArCalib.isSC,
-                                                           BadChanKey=bcKey))
+                                                           BadChanKey=bcKey, AddCalib=True))
         import os
         if os.path.exists(rootfile):
             os.remove(rootfile)
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_RTMParamsConfig.py b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_RTMParamsConfig.py
index e11b947f4d899cae227e35a0c56d74757cfbd27c..3b14f77aba0b5667ea09e7089c5223d355865e57 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_RTMParamsConfig.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_RTMParamsConfig.py
@@ -72,6 +72,7 @@ def LArRTMParamsCfg(flags):
         LArWFParams2Ntuple.DetCellParamsKey="LArDetCellParams_RTM"
         LArWFParams2Ntuple.BadChanKey = bcKey
         LArWFParams2Ntuple.isSC = flags.LArCalib.isSC
+        LArWFParams2Ntuple.AddCalib = True
         result.addEventAlgo(LArWFParams2Ntuple)
    
         if flags.LArCalib.RTM.DumpOmegaScan:
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_RampConfig.py b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_RampConfig.py
index 15e2712bf2f547772062ef9366479261eb5b8ac8..3a9224f27580a039d470580fdc79f57eaa358ffc 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_RampConfig.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_RampConfig.py
@@ -118,7 +118,7 @@ def LArRampCfg(flags):
         theLArRampPatcher.ContainerKey="LArRamp"
         theLArRampPatcher.BadChanKey=bcKey
         theLArRampPatcher.PatchMethod="PhiAverage"
-        theLArRampPatcher.SuperCell=flags.LArCalib.isSC
+        theLArRampPatcher.SuperCells=flags.LArCalib.isSC
    
         theLArRampPatcher.ProblemsToPatch=["deadCalib","deadReadout","deadPhys","almostDead","short"]
         theLArRampPatcher.UseCorrChannels=False
@@ -197,6 +197,7 @@ def LArRampCfg(flags):
                                                          AddFEBTempInfo = False,
                                                          RealGeometry = True,
                                                          OffId = True,
+                                                         AddCalib = True,
                                                          RawRamp = True,
                                                          SaveAllSamples =  True,
                                                          BadChanKey = bcKey,
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_ToCoolInlineConfig.py b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_ToCoolInlineConfig.py
new file mode 100755
index 0000000000000000000000000000000000000000..774c48513e1ad9664880a8d007d2867e876ab0e1
--- /dev/null
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_ToCoolInlineConfig.py
@@ -0,0 +1,163 @@
+#!/usr/bin/env python
+# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
+
+from AthenaConfiguration.ComponentFactory import CompFactory
+
+def LArToCoolInlineCfg(flags,inputFolders,singleV=True):
+
+    from LArCalibProcessing.LArCalibBaseConfig import LArCalibBaseCfg
+    result=LArCalibBaseCfg(flags)
+    from LArCabling.LArCablingConfig import LArOnOffIdMappingCfg 
+    result.merge(LArOnOffIdMappingCfg(flags))
+    if flags.LArCalib.isSC:
+       from LArCabling.LArCablingConfig import LArOnOffIdMappingSCCfg
+       result.merge(LArOnOffIdMappingSCCfg(flags))
+
+    theLArCompleteToFlat = CompFactory.LArCompleteToFlat(FakeEMBPSLowGain=True,isSC=flags.LArCalib.isSC)
+    theLArCompleteToFlat.OutputLevel = 2
+
+    outTypes = []
+    overrides = []
+    outTags=[]
+    for (fldr,ftag,key,classtype) in inputFolders:
+      if "Pedestal" in fldr:
+        outTypes.append("Pedestal")
+        theLArCompleteToFlat.PedestalInput=key
+        overrides.extend(["Pedestal", "PedestalRMS"])
+      elif "Ramp" in fldr:
+        outTypes.append("Ramp")
+        theLArCompleteToFlat.RampInput=key
+        overrides.extend(["RampVec"])
+      elif "OFC" in fldr:
+        outTypes.append("OFC")
+        theLArCompleteToFlat.OFCInput=key
+        overrides.extend(["OFCa", "OFCb","TimeOffset"])
+      elif "MphysOverMcal" in fldr:
+        outTypes.append("MphysOverMcal")
+        theLArCompleteToFlat.MphysOverMcalInput=key
+        overrides.extend(["MphysOverMcal"])
+      elif "Shape" in fldr:
+        outTypes.append("Shape")
+        theLArCompleteToFlat.ShapeInput=key
+        overrides.extend(["Shape","ShapeDer"])
+
+      from IOVDbSvc.IOVDbSvcConfig import addFolders  
+      if len(ftag):
+         result.merge(addFolders(flags,fldr,detDb=flags.LArCalib.Input.Database,tag=ftag))
+         if not singleV: 
+            outTags.append(ftag)
+         else:   
+            outTags.append("")
+      else:
+         result.merge(addFolders(flags,fldr,detDb=flags.LArCalib.Input.Database))
+         outTags.append("")
+      pass
+    
+    result.addEventAlgo(theLArCompleteToFlat)
+    
+    flatName="ElecCalibFlat"
+    if flags.LArCalib.isSC:
+       flatName+="SC"
+    if "outObjects" not in dir():
+       outObjects=["CondAttrListCollection#/LAR/"+flatName+"/"+ot for ot in outTypes] 
+    
+    from RegistrationServices.OutputConditionsAlgConfig import OutputConditionsAlgCfg
+    result.merge(OutputConditionsAlgCfg(flags,
+                                        outputFile="dummy.root",
+                                        ObjectList=outObjects,
+                                        IOVTagList=outTags,
+                                        Run1=flags.LArCalib.IOVStart,
+                                        Run2=flags.LArCalib.IOVEnd
+                                    ))
+    #RegistrationSvc    
+    print("OVERRIDES ARE:",overrides)
+    types=[]
+    for i in range(len(overrides)):
+       types.append("Blob16M")
+    result.addService(CompFactory.IOVRegistrationSvc(RecreateFolders = True, SVFolder=True,OverrideNames = overrides, OverrideTypes = types))
+    result.getService("IOVDbSvc").DBInstance=""
+
+    #MC Event selector since we have no input data file 
+    from McEventSelector.McEventSelectorConfig import McEventSelectorCfg
+    result.merge(McEventSelectorCfg(flags,
+                                    RunNumber         = flags.LArCalib.Input.RunNumbers[0],
+                                    EventsPerRun      = 1,
+                                    FirstEvent	      = 1,
+                                    InitialTimeStamp  = 0,
+                                    TimeStampInterval = 1))
+
+       
+    return result
+
+if __name__=="__main__":
+    import sys,os
+    import argparse
+
+    # now process the CL options and assign defaults
+    parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    parser.add_argument('-r','--run', dest='run', default=str(0x7FFFFFFF), help='Run number to query input DB', type=str)
+    parser.add_argument('-i','--insqlite', dest='insql', default="freshConstants_AP.db", help='Input sqlite file containing the (merged) output of the AP.', type=str)
+    parser.add_argument('-f','--infolders', dest='infold', default="ConvertToInline.py", help='python file with inputFolders definition', type=str)
+    parser.add_argument('-o','--outsqlite', dest='outsql', default="freshConstants_merged.db", help='Output sqlite file', type=str)
+    parser.add_argument('--iovstart',dest="iovstart", default=0, help="IOV start (run-number)", type=int)
+    parser.add_argument('--isSC', dest='supercells', default=False, help='is SC data ?', action="store_true")
+    parser.add_argument('--poolcat', dest='poolcat', default="freshConstants.xml", help='Catalog of POOL files', type=str)
+    args = parser.parse_args()
+    if help in args and args.help is not None and args.help:
+        parser.print_help()
+        sys.exit(0)
+
+
+
+    
+    #Import the MainServices (boilerplate)
+    from AthenaConfiguration.MainServicesConfig import MainServicesCfg
+   
+    #Import the flag-container that is the arguemnt to the configuration methods
+    from AthenaConfiguration.AllConfigFlags import initConfigFlags
+    from LArCalibProcessing.LArCalibConfigFlags import addLArCalibFlags
+    flags=initConfigFlags()
+    addLArCalibFlags(flags, args.supercells)
+
+    #Now we set the flags as required for this particular job:
+    flags.LArCalib.Input.Database = args.insql
+       
+    flags.IOVDb.DBConnection="sqlite://;schema="+args.outsql +";dbname=CONDBR2"
+
+    #The global tag we are working with
+    flags.IOVDb.GlobalTag = "LARCALIB-RUN2-00"
+    # geometry
+    from AthenaConfiguration.TestDefaults import defaultGeometryTags
+    flags.GeoModel.AtlasVersion = defaultGeometryTags.RUN3
+
+    flags.Input.Files=[]
+    flags.LArCalib.Input.Files = [ ]
+    flags.LArCalib.IOVStart = args.iovstart
+
+    flags.LArCalib.Input.RunNumbers=[int(args.run),]
+    flags.Input.RunNumbers=flags.LArCalib.Input.RunNumbers
+
+    flags.dump()
+    flags.lock()
+   
+    try:
+       import importlib
+       mypath=os.getcwd()
+       from pathlib import Path
+       path=Path(mypath)
+       sys.path.append(str(path.parent))
+       module = importlib.import_module('.'+args.infold, package=str(path.name))
+    except Exception as e:
+       print(e)
+       sys.exit(-1)
+
+    cfg=MainServicesCfg(flags)
+    cfg.merge(LArToCoolInlineCfg(flags,module.inputFolders))
+
+    cfg.getService("PoolSvc").WriteCatalog=("xmlcatalog_file:%s"%args.poolcat)
+    cfg.getService("PoolSvc").ReadCatalog+=["xmlcatalog_file:PoolFileCatalog.xml",]
+    cfg.getService("PoolSvc").SortReplicas = False 
+
+    cfg.getService("MessageSvc").debugLimit=9999999
+
+    cfg.run(1)
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_postProcessingConfig.py b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_postProcessingConfig.py
old mode 100644
new mode 100755
index 0728aa4657dffb49c1e0f4b29b9853596553b744..86a025c10225371ed044f6f6819674fa9f2a2093
--- a/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_postProcessingConfig.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/python/LArCalib_postProcessingConfig.py
@@ -1,3 +1,4 @@
+#!/usr/bin/env python
 # Copyright (C) 2002-2023 CERN for the benefit of the ATLAS collaboration
 
 from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator
@@ -64,6 +65,9 @@ if __name__=="__main__":
 
     #The global tag we are working with
     flags.IOVDb.GlobalTag = "LARCALIB-RUN2-00"
+    # geometry
+    from AthenaConfiguration.TestDefaults import defaultGeometryTags
+    flags.GeoModel.AtlasVersion = defaultGeometryTags.RUN3
 
     flags.Input.Files=[]
     flags.LArCalib.Input.Files = [ ]
@@ -72,7 +76,7 @@ if __name__=="__main__":
 
     flags.LArCalib.PhysACuseHG=True
     flags.LArCalib.OFC.ShapeCorrection=True
-
+    flags.LArCalib.OFC.UsePhysCalibTDiff = False
 
     flags.LAr.doAlign=False
     flags.Input.RunNumbers=flags.LArCalib.Input.RunNumbers
@@ -84,5 +88,6 @@ if __name__=="__main__":
     cfg.merge(finalOFCShapeCfg(flags))
 
     cfg.getService("PoolSvc").ReadCatalog+=["xmlcatalog_file:%s"%args.poolcat,]
+    cfg.getService("PoolSvc").WriteCatalog="xmlcatalog_file:%s"%args.poolcat
 
     sys.exit(cfg.run(1).isFailure())
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/python/utils.py b/LArCalorimeter/LArExample/LArCalibProcessing/python/utils.py
index 0cee9c7b6f34012b46497fdffefd460780bfa237..e829f3222a963adb0f56f8a944ed661a57cac0ba 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/python/utils.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/python/utils.py
@@ -51,6 +51,10 @@ class FolderTagResolver:
                 self._msg.warning("\tCould not resolve global tag %s",globalTag)
                 self._msg.warning("\tFalling back to default tag %s",foldertag)
                 pass
+            except cool.TagRelationNotFound:
+                self._msg.warning("\tCould not find tag relation to %s",globalTag)
+                self._msg.warning("\tFalling back to default tag %s",foldertag)
+                pass
         return foldertag
           
             
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_ToCoolInline.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_ToCoolInline.py
index 4b510e00fea6d4436649b294b7e661555e0d3a38..591cf950caa5d5cb898157a6f188eacaf7f34fa7 100644
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_ToCoolInline.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArCalib_ToCoolInline.py
@@ -57,9 +57,6 @@ if "inputFolders" not in dir():
                ]
 
 
-if not 'online' in dir():
-   online = True #False
-
 if os.access(sqliteOut,os.F_OK):
   printfunc ("File",sqliteOut,"exists already, removing ....")
   os.remove(sqliteOut)
@@ -76,12 +73,13 @@ globalflags.DatabaseInstance=dbname
 from AthenaCommon.AlgSequence import AlgSequence 
 topSequence = AlgSequence()  
 
+online=True
 include ("LArConditionsCommon/LArMinimalSetup.py")
 from LArCabling.LArCablingAccess import LArOnOffIdMapping
 LArOnOffIdMapping()
 if SuperCells:
-  from LArCabling.LArCablingAccess import LArOnOffIdMappingSC,LArCalibIdMappingSC
-  LArOnOffIdMappingSC()
+   from LArCabling.LArCablingAccess import LArOnOffIdMappingSC,LArCalibIdMappingSC
+   LArOnOffIdMappingSC()
 
 
 theApp.EvtMax = 1
@@ -89,7 +87,7 @@ conddb.setGlobalTag(globalTag) #For id mapping
 
 svcMgr.PoolSvc.SortReplicas=False
 
-svcMgr.EventSelector.RunNumber = 2147483647
+svcMgr.EventSelector.RunNumber = 999999
 
 from LArCalibTools.LArCalibToolsConf import LArCompleteToFlat
 theLArCompleteToFlat=LArCompleteToFlat()
@@ -154,6 +152,7 @@ svcMgr.IOVRegistrationSvc.OverrideTypes = types;
 
 svcMgr.DetectorStore.Dump=True
 
+svcMgr.PoolSvc.ReadCatalog += ["xmlcatalog_file:PoolFileCatalog.xml"]
 
 if 'poolcat' in dir():
   svcMgr.PoolSvc.WriteCatalog="xmlcatalog_file:"+poolcat
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_DelayDump_OFC_Cali.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_DelayDump_OFC_Cali.py
index d056dfb0af9392f6a93405f93020608265f11b47..e0aa962cf9a86d3b9bbb5e63d9508d830f33db02 100755
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_DelayDump_OFC_Cali.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_DelayDump_OFC_Cali.py
@@ -89,6 +89,9 @@ if __name__=='__main__':
 
    #The global tag we are working with
    flags.IOVDb.GlobalTag = "LARCALIB-RUN2-00"
+
+   from AthenaConfiguration.TestDefaults import defaultGeometryTags
+   flags.GeoModel.AtlasVersion = defaultGeometryTags.RUN3
    
    #Define the global output Level:
    from AthenaCommon.Constants import INFO 
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_Delay_OFC_Cali.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_Delay_OFC_Cali.py
index c68df5d05765a037b1f0fb691b5371e50446a763..4107d99ef71b5ae2284db96a761a025c48db42e5 100755
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_Delay_OFC_Cali.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_Delay_OFC_Cali.py
@@ -14,7 +14,7 @@ if __name__=='__main__':
    parser.add_argument('-r','--run', dest='run', default='00408918', help='Run number string as in input filename', type=str)
    parser.add_argument('-g','--gain', dest='gain', default="MEDIUM", help='Gain string', type=str)
    parser.add_argument('-p','--partition', dest='partition', default="Em", help='Data taking partition string', type=str)
-   parser.add_argument('-f','--fileprefix', dest='fprefix', default="data23_calib", help='File prefix string', type=str)
+   parser.add_argument('-f','--fileprefix', dest='fprefix', default="data24_calib", help='File prefix string', type=str)
    parser.add_argument('-i','--indirprefix', dest='dprefix', default="/eos/atlas/atlastier0/rucio/", help='Input directory prefix string', type=str)
    parser.add_argument('-d','--indir', dest='indir', default="", help='Full input dir string', type=str)
    parser.add_argument('-t','--trigger', dest='trig', default='calibration_', help='Trigger string in filename', type=str)
@@ -125,7 +125,6 @@ if __name__=='__main__':
    
    #Configure the Bad-Channel database we are reading 
    #(the AP typically uses a snapshot in an sqlite file
-   flags.LArCalib.BadChannelTag = "-RUN2-UPD3-00"
    flags.LArCalib.BadChannelDB = args.outpdir + "/" + args.badsql
    
    #Output of this job:
@@ -150,7 +149,10 @@ if __name__=='__main__':
    flags.LArCalib.Output.ROOTFile = args.outrdir + "/" + OutputCaliWaveRootFileName
    flags.LArCalib.Output.POOLFile = args.outpdir + "/" + OutputPoolFileName
    flags.LArCalib.Output.ROOTFile2 = args.outrdir + "/" + OutputOFCCaliRootFileName
-   flags.IOVDb.DBConnection="sqlite://;schema="+args.outpdir + "/" + args.outsql +";dbname=CONDBR2"
+   if args.outsql.startswith("/"):
+      flags.IOVDb.DBConnection="sqlite://;schema=" + args.outsql +";dbname=CONDBR2"
+   else:   
+      flags.IOVDb.DBConnection="sqlite://;schema="+args.outpdir + "/" + args.outsql +";dbname=CONDBR2"
 
    #The global tag we are working with
    flags.IOVDb.GlobalTag = "LARCALIB-RUN2-00"
@@ -162,12 +164,15 @@ if __name__=='__main__':
    #Other potentially useful flags-settings:
    
    #Define the global output Level:
-   from AthenaCommon.Constants import INFO 
+   from AthenaCommon.Constants import INFO
    flags.Exec.OutputLevel = INFO
    
    from AthenaConfiguration.Enums import LHCPeriod
    flags.GeoModel.Run = LHCPeriod.Run3
 
+   from AthenaConfiguration.TestDefaults import defaultGeometryTags
+   flags.GeoModel.AtlasVersion = defaultGeometryTags.RUN3
+
    flags.lock()
    flags.dump()
    
@@ -175,13 +180,14 @@ if __name__=='__main__':
    
    cfg.merge(LArDelay_OFCCaliCfg(flags))
 
+   # Switch on, in case some calib. board is failing:
    # adding new patching, again needed in summer 2023
-   if flags.LArCalib.CorrectBadChannels:
-      if flags.LArCalib.doValidation:
-         cfg.getEventAlgo("CaliWaveVal").PatchCBs=[0x3df70000]
-
-      # block standard patching for this CB
-      cfg.getEventAlgo("LArCaliWavePatch").DoNotPatchCBs=[0x3df70000]
+   #if flags.LArCalib.CorrectBadChannels:
+   #   if flags.LArCalib.doValidation:
+   #      cfg.getEventAlgo("CaliWaveVal").PatchCBs=[0x3df70000]
+   #
+   #   # block standard patching for this CB
+   #   cfg.getEventAlgo("LArCaliWavePatch").DoNotPatchCBs=[0x3df70000]
 
    # ignore some channels ?
    if args.ignoreB:
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PedestalAutoCorr.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PedestalAutoCorr.py
index bb7cd68e31ba29afb31011b66fb21683a090652d..f6bf77652a7d03ce4e80b07a1a7ffcc825bb318a 100755
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PedestalAutoCorr.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PedestalAutoCorr.py
@@ -20,7 +20,7 @@ if __name__=='__main__':
    parser.add_argument('-r','--run', dest='run', default='00408913', help='Run number string as in input filename', type=str)
    parser.add_argument('-g','--gain', dest='gain', default="MEDIUM", help='Gain string', type=str)
    parser.add_argument('-p','--partition', dest='partition', default="All", help='Partition string', type=str)
-   parser.add_argument('-f','--fileprefix', dest='fprefix', default="data23_calib", help='File prefix string', type=str)
+   parser.add_argument('-f','--fileprefix', dest='fprefix', default="data24_calib", help='File prefix string', type=str)
    parser.add_argument('-i','--indirprefix', dest='dprefix', default="/eos/atlas/atlastier0/rucio/", help='Input directory prefix string', type=str)
    parser.add_argument('-d','--indir', dest='indir', default="", help='Full input dir string', type=str)
    parser.add_argument('-t','--trigger', dest='trig', default='calibration_', help='Trigger string in filename', type=str)
@@ -83,7 +83,8 @@ if __name__=='__main__':
    flags.LArCalib.Input.RunNumbers = [int(args.run),]
    flags.LArCalib.Input.isRawData = args.rawdata
 
-
+   from AthenaConfiguration.TestDefaults import defaultGeometryTags
+   flags.GeoModel.AtlasVersion = defaultGeometryTags.RUN3
 
    # Input files
    flags.Input.Files=flags.LArCalib.Input.Files
@@ -138,18 +139,24 @@ if __name__=='__main__':
 
    flags.LArCalib.Output.ROOTFile = args.outrdir + "/" + OutputPedAutoCorrRootFileName
    flags.LArCalib.Output.POOLFile = args.outpdir + "/" + OutputPedAutoCorrPoolFileName
-   flags.IOVDb.DBConnection="sqlite://;schema="+args.outpdir + "/" + args.outsql +";dbname=CONDBR2"
+   if args.outsql.startswith("/"):
+      flags.IOVDb.DBConnection="sqlite://;schema=" + args.outsql +";dbname=CONDBR2"
+   else:   
+      flags.IOVDb.DBConnection="sqlite://;schema="+args.outpdir + "/" + args.outsql +";dbname=CONDBR2"
 
    #The global tag we are working with
    flags.IOVDb.GlobalTag = "LARCALIB-RUN2-00"
    
    #BadChannels sqlite file to be created 
-   flags.LArCalib.BadChannelDB = args.outpdir + "/" + args.badsql
+   if args.badsql.startswith("/"):
+      flags.LArCalib.BadChannelDB =  args.badsql
+   else:   
+      flags.LArCalib.BadChannelDB = args.outpdir + "/" + args.badsql
 
    #Other potentially useful flags-settings:
    
    #Define the global output Level:
-   from AthenaCommon.Constants import INFO 
+   from AthenaCommon.Constants import INFO
    flags.Exec.OutputLevel = INFO
 
    from AthenaConfiguration.Enums import LHCPeriod
@@ -158,12 +165,20 @@ if __name__=='__main__':
    flags.lock()
    
    # create bad chan sqlite file
+   cmdlinerm = (['/bin/rm', '-f', flags.LArCalib.BadChannelDB])
    if not flags.LArCalib.isSC:
       cmdline = (['AtlCoolCopy', 'COOLOFL_LAR/CONDBR2', 'sqlite://;schema='+flags.LArCalib.BadChannelDB+';dbname=CONDBR2', '-f', '/LAR/BadChannelsOfl/BadChannels',  '-f', '/LAR/BadChannelsOfl/MissingFEBs', '-t', flags.IOVDb.GlobalTag, '-c', '-a',  '-hitag'])
    else:   
-      cmdline = (['AtlCoolCopy', 'COOLOFL_LAR/CONDBR2', 'sqlite://;schema='+flags.LArCalib.BadChannelDB+';dbname=CONDBR2', '-f', '/LAR/BadChannelsOfl/BadChannels',  '-of', '/LAR/BadChannelsOfl/BadChannelsSC', '-t', 'LARBadChannelsOflBadChannels-RUN2-empty', '-ot', 'LARBadChannelsOflBadChannelsSC-RUN2-UPD3-00', '-c', '-a',  '-hitag', '-ch', '0'])
-      cmdline1 = (['AtlCoolCopy', 'COOLOFL_LAR/CONDBR2', 'sqlite://;schema='+flags.LArCalib.BadChannelDB+';dbname=CONDBR2', '-f', '/LAR/BadChannelsOfl/MissingFEBs', '-of', '/LAR/BadChannelsOfl/MissingFEBsSC', '-t', flags.IOVDb.GlobalTag, '-ot', 'LARBadChannelsOflMissingFEBsSC-RUN2-UPD3-01', '-a',  '-hitag'])
+      cmdline = (['AtlCoolCopy', 'COOLOFL_LAR/CONDBR2', 'sqlite://;schema='+flags.LArCalib.BadChannelDB+';dbname=CONDBR2', '-f', '/LAR/BadChannelsOfl/BadChannelsSC',  '-t', 'LARBadChannelsOflBadChannelsSC'+flags.LArCalib.BadChannelTagSC, '-c', '-a',  '-hitag', '-ch', '0'])
 
+   try:
+      cp = subprocess.run(cmdlinerm, check=True, capture_output=True )
+   except Exception as e:
+      print((" ").join(cmdlinerm))
+      log.info('not existing BadChan sqlite file, fine')
+      sys.exit(-1)
+   print((" ").join(cmdlinerm))
+   print(cp.stdout)
    try:
       cp = subprocess.run(cmdline, check=True, capture_output=True )
    except Exception as e:
@@ -171,14 +186,9 @@ if __name__=='__main__':
       print((" ").join(cmdline))
       log.error('Could not create BadChan sqlite file !!!!')
       sys.exit(-1)
+   print((" ").join(cmdline))
+   print(cp.stdout)
  
-   if flags.LArCalib.isSC:
-      try:
-         cp = subprocess.run(cmdline1, check=True, capture_output=True )
-      except Exception as e:
-         log.error('Could not create BadChan sqlite file !!!!')
-         sys.exit(-1)
-   
    cfg=MainServicesCfg(flags)
 
    cfg.merge(LArPedestalAutoCorrCfg(flags))
@@ -187,7 +197,10 @@ if __name__=='__main__':
    cfg.run() 
 
    #build tag hierarchy in output sqlite file
-   cmdline = (['/afs/cern.ch/user/l/larcalib/LArDBTools/python/BuildTagHierarchy.py',args.outpdir + "/" + args.outsql , flags.IOVDb.GlobalTag])
+   if args.outsql.startswith("/"):
+      cmdline = (['/afs/cern.ch/user/l/larcalib/LArDBTools/python/BuildTagHierarchy.py', args.outsql , flags.IOVDb.GlobalTag])
+   else:   
+      cmdline = (['/afs/cern.ch/user/l/larcalib/LArDBTools/python/BuildTagHierarchy.py',args.outpdir + "/" + args.outsql , flags.IOVDb.GlobalTag])
    log.debug(cmdline)
    try:
       subprocess.run(cmdline, check=True)
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysAutoCorr.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysAutoCorr.py
index 9dd4c62a7d115f9ab61148a1a3546dd23cadd894..aaad4b9d666fee8286ce2e910db68c8c401b2d00 100755
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysAutoCorr.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysAutoCorr.py
@@ -52,7 +52,10 @@ if __name__=='__main__':
    gainNumMap={"HIGH":0,"MEDIUM":1,"LOW":2}
    flags.LArCalib.Gain=gainNumMap[args.gain.upper()]
 
-   flags.LArCalib.Input.Database = args.outpdir + "/" + args.insqlitefile
+   if args.insqlitefile.startswith("/"):
+      flags.LArCalib.Input.Database = args.insqlitefile
+   else:
+      flags.LArCalib.Input.Database = args.outpdir + "/" + args.insqlitefile
 
    # pileup normalisation
    flags.LArCalib.OFC.Ncoll = 60
@@ -67,7 +70,6 @@ if __name__=='__main__':
 
    #Configure the Bad-Channel database we are reading 
    #(the AP typically uses a snapshot in an sqlite file
-   flags.LArCalib.BadChannelTag = "-RUN2-UPD3-00"
    flags.LArCalib.BadChannelDB = args.badsql
    
    #Output of this job 
@@ -84,14 +86,20 @@ if __name__=='__main__':
 
    flags.LArCalib.Output.ROOTFile = args.outrdir + "/" + OutputRootFileName
    flags.LArCalib.Output.POOLFile = args.outpdir + "/" + OutputPoolFileName
-   flags.IOVDb.DBConnection="sqlite://;schema="+args.outpdir + "/" + args.outsql +";dbname=CONDBR2"
+   if args.outsql.startswith("/"):
+      flags.IOVDb.DBConnection="sqlite://;schema=" + args.outsql +";dbname=CONDBR2"
+   else:
+      flags.IOVDb.DBConnection="sqlite://;schema="+args.outpdir + "/" + args.outsql +";dbname=CONDBR2"
 
    #The global tag we are working with
    flags.IOVDb.GlobalTag = "LARCALIB-RUN2-00"
    
+   from AthenaConfiguration.TestDefaults import defaultGeometryTags
+   flags.GeoModel.AtlasVersion = defaultGeometryTags.RUN3
+
    #Define the global output Level:
-   from AthenaCommon.Constants import DEBUG
-   flags.Exec.OutputLevel = DEBUG
+   from AthenaCommon.Constants import INFO
+   flags.Exec.OutputLevel = INFO
    
    flags.lock()
    
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysOFC.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysOFC.py
index de2237eb3e106b85c4f69d642522d1a664728625..f96210cb48fcc4d7296cba240f8e076198c86afa 100755
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysOFC.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysOFC.py
@@ -66,7 +66,6 @@ if __name__=='__main__':
 
    #Configure the Bad-Channel database we are reading 
    #(the AP typically uses a snapshot in an sqlite file
-   flags.LArCalib.BadChannelTag = "-RUN2-UPD3-00"
    flags.LArCalib.BadChannelDB = args.badsql
    
    #Output of this job 
@@ -88,6 +87,9 @@ if __name__=='__main__':
    #The global tag we are working with
    flags.IOVDb.GlobalTag = "LARCALIB-RUN2-00"
    
+   from AthenaConfiguration.TestDefaults import defaultGeometryTags
+   flags.GeoModel.AtlasVersion = defaultGeometryTags.RUN3
+
    #Define the global output Level:
    from AthenaCommon.Constants import INFO 
    flags.Exec.OutputLevel = INFO
@@ -102,3 +104,13 @@ if __name__=='__main__':
    #run the application
    cfg.run(1) 
 
+   #build tag hierarchy in output sqlite file
+   import subprocess
+   cmdline = (['/afs/cern.ch/user/l/larcalib/LArDBTools/python/BuildTagHierarchy.py',args.outpdir + "/" + args.outsql , flags.IOVDb.GlobalTag])
+   print(cmdline)
+   try:
+      subprocess.run(cmdline, check=True)
+   except Exception as e:
+      print('Could not create tag hierarchy in output sqlite file !!!!')
+      sys.exit(-1)
+
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysOFCPhasePicker.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysOFCPhasePicker.py
index 76c7bba8071f1fc34aaf2ef8ce11d22884ecc4e0..a369ff1ecc812b7a4b0ca810c984de2f5f16048c 100755
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysOFCPhasePicker.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysOFCPhasePicker.py
@@ -63,7 +63,6 @@ if __name__=='__main__':
 
    #Configure the Bad-Channel database we are reading 
    #(the AP typically uses a snapshot in an sqlite file
-   flags.LArCalib.BadChannelTag = "-RUN2-UPD3-00"
    flags.LArCalib.BadChannelDB = args.badsql
    
    #Output of this job 
@@ -84,6 +83,9 @@ if __name__=='__main__':
 
    #The global tag we are working with
    flags.IOVDb.GlobalTag = "LARCALIB-RUN2-00"
+
+   from AthenaConfiguration.TestDefaults import defaultGeometryTags
+   flags.GeoModel.AtlasVersion = defaultGeometryTags.RUN3
    
    #Define the global output Level:
    from AthenaCommon.Constants import INFO
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysWave.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysWave.py
index 4c5bf390723d7c7385046e4e66753f910059d4f3..6deac5e8818048de0e8e0439e67f3d83a0f99546 100755
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysWave.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_PhysWave.py
@@ -53,15 +53,16 @@ if __name__=='__main__':
    gainNumMap={"HIGH":0,"MEDIUM":1,"LOW":2}
    flags.LArCalib.Gain=gainNumMap[args.gain.upper()]
 
-   flags.LArCalib.Input.Database = args.outpdir + "/" + args.insqlitefile
+   if args.insqlitefile.startswith("/"):
+      flags.LArCalib.Input.Database = args.insqlitefile
+   else:   
+      flags.LArCalib.Input.Database = args.outpdir + "/" + args.insqlitefile
 
    # others flags settings
-   flags.LArCalib.isSC = args.supercells
    flags.LArCalib.Input.SubDet=args.subdet
 
    #Configure the Bad-Channel database we are reading 
    #(the AP typically uses a snapshot in an sqlite file
-   flags.LArCalib.BadChannelTag = "-RUN2-UPD3-00"
    flags.LArCalib.BadChannelDB = args.badsql
    
    #Output of this job 
@@ -78,13 +79,19 @@ if __name__=='__main__':
 
    flags.LArCalib.Output.ROOTFile = args.outrdir + "/" + OutputRootFileName
    flags.LArCalib.Output.POOLFile = args.outpdir + "/" + OutputPoolFileName
-   flags.IOVDb.DBConnection="sqlite://;schema="+args.outpdir + "/" + args.outsql +";dbname=CONDBR2"
+   if args.outsql.startswith("/"):
+      flags.IOVDb.DBConnection="sqlite://;schema=" + args.outsql +";dbname=CONDBR2"
+   else:   
+      flags.IOVDb.DBConnection="sqlite://;schema="+args.outpdir + "/" + args.outsql +";dbname=CONDBR2"
 
    #The global tag we are working with
    flags.IOVDb.GlobalTag = "LARCALIB-RUN2-00"
    
+   from AthenaConfiguration.TestDefaults import defaultGeometryTags
+   flags.GeoModel.AtlasVersion = defaultGeometryTags.RUN3
+
    #Define the global output Level:
-   from AthenaCommon.Constants import INFO,VERBOSE
+   from AthenaCommon.Constants import INFO
    flags.Exec.OutputLevel = INFO
    
    flags.lock()
@@ -93,15 +100,16 @@ if __name__=='__main__':
    
    cfg.merge(LArPhysWavePredictionCfg(flags))
 
-   cfg.getEventAlgo("LArPhysWavePredictor").OutputLevel = VERBOSE
+   cfg.printConfig(withDetails=True,printDefaults=True)
+
    cfg.getService("MessageSvc").defaultLimit = 9999999  # all messages
 
    #run the application
    cfg.run(1) 
 
    #Copying the FCAL phys wave to sqlite here 
-   if "HEC" in args.subdet.upper():
-      copycmd='AtlCoolCopy "COOLOFL_LAR/CONDBR2" "'+flags.IOVDb.DBConnection+'" -f /LAR/ElecCalibOfl/PhysWaves/FCALFromTB -t LARElecCalibOflPhysWavesFCALFromTB-calib-01 -of /LAR/ElecCalibOfl/PhysWaves/RTM  -ot LARElecCalibOflPhysWavesRTM-RUN2-UPD3-00  -a '
+   if  "HEC" in args.subdet.upper():
+      copycmd='AtlCoolCopy "COOLOFL_LAR/CONDBR2" "'+flags.IOVDb.DBConnection+'" -f '+ flags.LArCalib.FCALPhysWave.Folder+' -t LARElecCalibOflPhysWavesFCALFromTB-calib-01 -of /LAR/ElecCalibOfl/PhysWaves/RTM  -ot LARElecCalibOflPhysWavesRTM-RUN2-UPD3-00  -a '
       from subprocess import getstatusoutput
       stat,out=getstatusoutput(copycmd)
  
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_Ramp.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_Ramp.py
index bb572b62b5603bc9e0f81663f29eba8f18e9ac86..b53fecd31ef8bb756a49fb91e5d165c98e6d3308 100755
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_Ramp.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_Ramp.py
@@ -14,7 +14,7 @@ if __name__=='__main__':
    parser.add_argument('-r','--run', dest='run', default='00408920', help='Run number string as in input filename', type=str)
    parser.add_argument('-g','--gain', dest='gain', default="MEDIUM", help='Gain string', type=str)
    parser.add_argument('-p','--partition', dest='partition', default="Em", help='Data taking partition string', type=str)
-   parser.add_argument('-f','--fileprefix', dest='fprefix', default="data23_calib", help='File prefix string', type=str)
+   parser.add_argument('-f','--fileprefix', dest='fprefix', default="data24_calib", help='File prefix string', type=str)
    parser.add_argument('-i','--indirprefix', dest='dprefix', default="/eos/atlas/atlastier0/rucio/", help='Input directory prefix string', type=str)
    parser.add_argument('-d','--indir', dest='indir', default="", help='Full input dir string', type=str)
    parser.add_argument('-t','--trigger', dest='trig', default='calibration_', help='Trigger string in filename', type=str)
@@ -32,6 +32,7 @@ if __name__=='__main__':
    parser.add_argument('-b','--badchansqlite', dest='badsql', default="SnapshotBadChannel.db", help='Output sqlite file, in pool output dir.', type=str)
    parser.add_argument('-x','--ignoreBarrel', dest='ignoreB', default=False, action="store_true", help='ignore Barrel channels ?')
    parser.add_argument('-v','--ignoreEndcap', dest='ignoreE', default=False, action="store_true", help='ignore Endcap channels ?')
+   parser.add_argument('-w','--doValid', dest='doValid', default=False, action="store_true", help='run vcalidation ?')
 
 
    args = parser.parse_args()
@@ -83,7 +84,10 @@ if __name__=='__main__':
    flags.LArCalib.Input.Type = args.trig
    flags.LArCalib.Input.RunNumbers = [int(args.run),]
    flags.LArCalib.Input.Database = args.outpdir + "/" +args.inpsql
-   flags.LArCalib.Input.Database2 = args.outpdir + "/" +args.inofcsql
+   if 'db' in args.inofcsql:
+      flags.LArCalib.Input.Database2 = args.outpdir + "/" +args.inofcsql
+   else:   
+      flags.LArCalib.Input.Database2 = args.inofcsql
    gainNumMap={"HIGH":0,"MEDIUM":1,"LOW":2}
    flags.LArCalib.Gain=gainNumMap[args.gain.upper()]
 
@@ -132,7 +136,6 @@ if __name__=='__main__':
    #Configure the Bad-Channel database we are reading 
    #(the AP typically uses a snapshot in an sqlite file
    flags.LArCalib.BadChannelDB = args.outpdir + "/" + args.badsql
-   flags.LArCalib.BadChannelTag = "-RUN2-UPD3-00"
    
    #Output of this job:
    OutputRampRootFileName = args.outrprefix + "_" + args.run
@@ -155,12 +158,22 @@ if __name__=='__main__':
    #The global tag we are working with
    flags.IOVDb.GlobalTag = "LARCALIB-RUN2-00"
    
-   #Other potentially useful flags-settings:
+   from AthenaConfiguration.TestDefaults import defaultGeometryTags
+   flags.GeoModel.AtlasVersion = defaultGeometryTags.RUN3
+
+   #run validation:
+   if flags.LArCalib.isSC:
+      flags.LArCalib.doValidation= args.doValid
+   else:   
+      flags.LArCalib.doValidation=True 
    
    #Define the global output Level:
+   
    from AthenaCommon.Constants import INFO 
    flags.Exec.OutputLevel = INFO
-   
+   from AthenaCommon.Constants import DEBUG
+   #flags.Exec.OutputLevel = DEBUG
+
    from AthenaConfiguration.Enums import LHCPeriod
    flags.GeoModel.Run = LHCPeriod.Run3
 
@@ -170,13 +183,23 @@ if __name__=='__main__':
    
    cfg.merge(LArRampCfg(flags))
 
-   # adding new patching, again needed in summer 2023
-   if flags.LArCalib.CorrectBadChannels:
-      if flags.LArCalib.doValidation:
-         cfg.getEventAlgo("RampVal").PatchCBs=[0x3df70000]
+   # all debug messages
+   cfg.getService("MessageSvc").debugLimit = 9999999
+   cfg.printConfig()
+   # in case debug is needed
+   #if flags.LArCalib.doValidation:
+   #   from AthenaCommon.Constants import DEBUG
+   #   cfg.getEventAlgo("LArRampPatcher").OutputLevel=DEBUG
+   #   cfg.getEventAlgo("RampVal").OutputLevel=DEBUG
 
-      # block standard patching for this CB
-      cfg.getEventAlgo("LArRampPatcher").DoNotPatchCBs=[0x3df70000]
+   # switch on if some calib. board is failing:
+   # adding new patching, again needed in summer 2023
+   #if flags.LArCalib.CorrectBadChannels:
+   #   if flags.LArCalib.doValidation:
+   #      cfg.getEventAlgo("RampVal").PatchCBs=[0x3fc70000]
+   #
+   #   # block standard patching for this CB
+   #   cfg.getEventAlgo("LArRampPatcher").DoNotPatchCBs=[0x3fc70000]
 
    # ignore some channels ?
    if args.ignoreB:
@@ -184,6 +207,8 @@ if __name__=='__main__':
    if args.ignoreE:
        cfg.getEventAlgo("LArRawSCCalibDataReadingAlg").LATOMEDecoder.IgnoreEndcapChannels=args.ignoreE
 
+   # all messages
+   cfg.getService("MessageSvc").debugLimit = 99999999
 
    #run the application
    cfg.run() 
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_tauR.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_tauR.py
index f74ccf0d66c3f428819d3b633f32962d66ea0347..ac05556a49dfa24db1fe5730f4b882f6b91a1053 100755
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_tauR.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNewCalib_tauR.py
@@ -61,7 +61,6 @@ if __name__=='__main__':
 
    #Configure the Bad-Channel database we are reading 
    #(the AP typically uses a snapshot in an sqlite file
-   flags.LArCalib.BadChannelTag = "-RUN2-UPD3-00"
    flags.LArCalib.BadChannelDB = args.badsql
    
    #Output of this job 
@@ -84,8 +83,11 @@ if __name__=='__main__':
    #The global tag we are working with
    flags.IOVDb.GlobalTag = "LARCALIB-RUN2-00"
    
+   from AthenaConfiguration.TestDefaults import defaultGeometryTags
+   flags.GeoModel.AtlasVersion = defaultGeometryTags.RUN3
+
    #Define the global output Level:
-   from AthenaCommon.Constants import INFO 
+   from AthenaCommon.Constants import INFO
    flags.Exec.OutputLevel = INFO
    
    flags.LArCalib.RTM.ExtractAll=True
diff --git a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNoiseCorrelationMon.py b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNoiseCorrelationMon.py
index 50cd6d9a78051e49fa0a1c4ad97da5405c161634..ab786ccacd2af066a4af1c4bc0e4ff20ae817854 100755
--- a/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNoiseCorrelationMon.py
+++ b/LArCalorimeter/LArExample/LArCalibProcessing/share/LArNoiseCorrelationMon.py
@@ -98,7 +98,6 @@ if __name__=='__main__':
      if len(flags.LArCalib.Preselection.Side) > 0 or len(flags.LArCalib.Preselection.BEC) > 0 or len(flags.LArCalib.Preselection.FT) > 0:
         log.warning('No preselection yet in reading physics data !!!') 
             
-
   # we need pedestals
   from LArConfiguration.LArElecCalibDBConfig import LArElecCalibDBCfg
   cfg.merge(LArElecCalibDBCfg(flags,["Pedestal"]))
@@ -117,7 +116,6 @@ if __name__=='__main__':
      ppa.Interval = 1000000 # Big number (>evtMax) to do postprocessing during finalization
      rn=flags.Input.RunNumber[0]
      ppa.FileKey = f'/{flags.DQ.FileKey}/run_{rn}/'
-
      cfg.addEventAlgo(ppa, sequenceName='AthEndSeq')
   
   if args.skipev > 0:
diff --git a/LArCalorimeter/LArRecUtils/python/LArAutoCorrNoiseCondAlgDefault.py b/LArCalorimeter/LArRecUtils/python/LArAutoCorrNoiseCondAlgDefault.py
deleted file mode 100644
index 409e0c095886241af7d96d6f9097e3ab619a1159..0000000000000000000000000000000000000000
--- a/LArCalorimeter/LArRecUtils/python/LArAutoCorrNoiseCondAlgDefault.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-
-from AthenaCommon.Include import include
-from IOVDbSvc.CondDB import conddb
-
-if conddb.isMC:
-    include("LArConditionsCommon/LArConditionsCommon_MC_jobOptions.py")
-else:
-    include("LArConditionsCommon/LArConditionsCommon_comm_jobOptions.py")
-
-from LArRecUtils.LArRecUtilsConf import LArAutoCorrNoiseCondAlg 
-from AthenaCommon.AlgSequence import AthSequencer
-from LArCabling.LArCablingAccess import LArOnOffIdMapping
-from LArRecUtils.LArMCSymCondAlg import LArMCSymCondAlgDefault
-condSeq = AthSequencer("AthCondSeq")
-
-
-def LArAutoCorrNoiseCondAlgDefault():
-
-    LArOnOffIdMapping()
-    LArMCSymCondAlgDefault()
-    condSeq = AthSequencer("AthCondSeq")
-    if hasattr (condSeq,"LArAutoCorrNoiseCondAlg"):
-        return getattr(condSeq,"LArAutoCorrNoiseCondAlg")
-
-    theAutoCorrNoiseCondAlg=LArAutoCorrNoiseCondAlg("LArAutoCorrNoiseCondAlg")
-
-    from LArROD.LArRODFlags import larRODFlags
-    theAutoCorrNoiseCondAlg.nSampl = larRODFlags.nSamples()
- 
-    condSeq+=theAutoCorrNoiseCondAlg
-    return theAutoCorrNoiseCondAlg
diff --git a/LArCalorimeter/LArRecUtils/python/LArAutoCorrNoiseSCCondAlgDefault.py b/LArCalorimeter/LArRecUtils/python/LArAutoCorrNoiseSCCondAlgDefault.py
deleted file mode 100644
index a9af6dd8bc8748353806b1f83add5bf1eebe3331..0000000000000000000000000000000000000000
--- a/LArCalorimeter/LArRecUtils/python/LArAutoCorrNoiseSCCondAlgDefault.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-
-from IOVDbSvc.CondDB import conddb
-from LArRecUtils.LArRecUtilsConf import LArAutoCorrNoiseCondAlg 
-from AthenaCommon.AlgSequence import AthSequencer
-from LArCabling.LArCablingAccess import LArOnOffIdMappingSC
-condSeq = AthSequencer("AthCondSeq")
-
-def addLArFlatFolder (db, obj, calg, folder_base='/LAR/ElecCalibFlat/',qual=''):
-    from AthenaCommon.AlgSequence import AthSequencer
-    condSequence = AthSequencer("AthCondSeq")
-
-    folder = folder_base + obj
-    if not conddb.folderRequested(folder):
-      conddb.addFolder(db, folder + qual,
-                     className = 'CondAttrListCollection')
-      condSequence += calg (ReadKey=folder, WriteKey='LAr'+obj+'SC')
-    return
-
-def LArAutoCorrNoiseSCCondAlgDefault():
-
-    LArOnOffIdMappingSC()
-    condSeq = AthSequencer("AthCondSeq")
-    if hasattr (condSeq,"LArAutoCorrNoiseSCCondAlg"):
-        return getattr(condSeq,"LArAutoCorrNoiseSCCondAlg")
-
-    from LArRecUtils.LArRecUtilsConf import LArFlatConditionsAlg_LArAutoCorrSC_ as LArAutoCorrSCCondAlg
-    addLArFlatFolder ('LAR_OFL', 'AutoCorr', LArAutoCorrSCCondAlg,'/LAR/ElecCalibMCSC/')
-    theAutoCorrNoiseCondAlg=LArAutoCorrNoiseCondAlg("LArAutoCorrNoiseSCCondAlg",isSuperCell=True,LArAutoCorrNoiseObjKey="LArAutoCorrNoiseSC",LArAutoCorrObjKey="LArAutoCorrSC",LArOnOffIdMappingObjKey='LArOnOffIdMapSC')
-
-    from LArROD.LArRODFlags import larRODFlags
-    # we need to keep one sample before, another after
-    theAutoCorrNoiseCondAlg.nSampl = larRODFlags.nSamples() + 2 # See ATLASSIM-5483
- 
-    condSeq+=theAutoCorrNoiseCondAlg
-    return theAutoCorrNoiseCondAlg
diff --git a/MuonSpectrometer/MuonConfig/python/MuonSegmentFindingConfig.py b/MuonSpectrometer/MuonConfig/python/MuonSegmentFindingConfig.py
index 34cb28fddae8f23fbf160d0b7197f3450eab9f47..21105830640eab6d0344e8f4bdf022094dc9b331 100644
--- a/MuonSpectrometer/MuonConfig/python/MuonSegmentFindingConfig.py
+++ b/MuonSpectrometer/MuonConfig/python/MuonSegmentFindingConfig.py
@@ -5,21 +5,12 @@
 # https://gitlab.cern.ch/atlas/athena/blob/master/MuonSpectrometer/MuonReconstruction/MuonRecExample/python/MuonRecTools.py
 # https://gitlab.cern.ch/atlas/athena/blob/master/MuonSpectrometer/MuonReconstruction/MuonRecExample/python/MooreTools.py
 # from https://gitlab.cern.ch/atlas/athena/blob/master/MuonSpectrometer/MuonReconstruction/MuonRecExample/python/CscTools.py
-from AthenaConfiguration.ComponentFactory import CompFactory
-#
-# and algorithms which are defined in several places:
-# 
-#
-# When porting it I have tried to remove anything redundant, and I have tried to simplify the number of configuration techniques 
-# used, so for example I'm no longer using CfgGetter, and am minimising the use of CfgMgr,
 
 # Core
+from AthenaConfiguration.ComponentFactory import CompFactory
 from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator
 from AthenaConfiguration.Enums import BeamType, Format
 
-# Muon
-# Csc2dSegmentMaker, Csc4dSegmentMaker=CompFactory.getComps("Csc2dSegmentMaker","Csc4dSegmentMaker",)
-
 #Local
 from TrkConfig.TrkGlobalChi2FitterConfig import MCTBFitterCfg, MCTBSLFitterCfg, MCTBSLFitterMaterialFromTrackCfg
 from MuonConfig.MuonRecToolsConfig import MuonAmbiProcessorCfg, MuonTrackCleanerCfg, MuonEDMPrinterToolCfg
diff --git a/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtRawDataMonitoring/python/MDTMonitorAlgorithm.py b/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtRawDataMonitoring/python/MDTMonitorAlgorithm.py
index 7bea88f3ec4e071683cf6f435d594b27581fc9fe..22a3ebb6bc2ee7d1c6f8e7f80869bc01dad3d526 100644
--- a/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtRawDataMonitoring/python/MDTMonitorAlgorithm.py
+++ b/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtRawDataMonitoring/python/MDTMonitorAlgorithm.py
@@ -36,8 +36,6 @@ def MdtMonitoringConfig(inputFlags):
     from AthenaMonitoring import AthMonitorCfgHelper
     helper = AthMonitorCfgHelper(inputFlags,'MdtAthMonitorCfg')
     
-    #from AthenaCommon.CfgGetter import getAlgorithm
-
     ### STEP 2 ###
     # Adding an algorithm to the helper. Here, we will use the example 
     # algorithm in the AthenaMonitoring package. Just pass the type to the 
diff --git a/Reconstruction/HeavyIonRec/HIGlobal/python/HIGlobalFlags.py b/Reconstruction/HeavyIonRec/HIGlobal/python/HIGlobalFlags.py
deleted file mode 100644
index 93a49dec626343e2bde1cba699bbc4572f3499af..0000000000000000000000000000000000000000
--- a/Reconstruction/HeavyIonRec/HIGlobal/python/HIGlobalFlags.py
+++ /dev/null
@@ -1,160 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-# @file: HIGlobalFlags.py
-# @purpose: a container of flags for Heavy Ion Reconstruction of Global variables
-# @author: Andrzej Olszewski <Andrzej.Olszewski@ifj.edu.pl>
-
-"""  A container of flags for heavy ion global reconstruction  
-
-"""
-#
-#
-__author__  = 'Andrzej Olszewski'
-__version__ = "$Revision: 1.3 $"
-__doc__     = "A container of flags for Heavy Ion Reconstruction of Globals"
-
-from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer
-from AthenaCommon.JobProperties import jobproperties
-
-class doHIGlobalVars(JobProperty):
-    """ Run heavy ion global variable reconstruction 
-    """
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class doHIFlow(JobProperty):
-    """ Switch for HIFlow
-    """
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class NumFlowHarmonics(JobProperty):
-    """ Min n to include in vn analysis
-    """
-    statusOn     = True
-    allowedTypes = ['int']
-    StoredValue  = 7
-
-class HIFlowContainerPrefix(JobProperty):
-    """ Max n to include in vn analysis
-    """
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'HIFlowData_v'
-
-
-
-    
-class doHICentrality(JobProperty):
-    """ Switch for HICentrality
-    """
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class doHITRT(JobProperty):
-    """ Switch for HITRT
-    """
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class doHIPixelTracklet(JobProperty):
-    """ Run heavy ion pixel tracklet reconstruction 
-    """
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class doHIGlobalNSiCluster(JobProperty):
-    """ Switch for HIGlobalNSiCluster
-    """
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class EventShapeKey(JobProperty):
-    """ Name of HIEventShape object container
-    """
-    statusOn     = True
-    allowedTypes = ['string']
-    StoredValue  = "HIEventShape"
-
-class EventShapeSummaryKey(JobProperty):
-    """ Name of HIEventShape object container containing summary info
-    """
-    statusOn     = True
-    allowedTypes = ['string']
-    StoredValue  = 'CaloSums'
-
-class SummarySubCalos(JobProperty):
-    """ List of SubCalos to be written in summary
-    """
-    statusOn     = True
-    allowedTypes = ['list']
-    StoredValue  = ['FCal','EMCal','HCal','ALL']
-
-class SummarySamplings(JobProperty):
-    """ List of samplings to be written in summary
-    """
-    statusOn     = True
-    allowedTypes = ['list']
-    StoredValue  = ['FCAL0','FCAL1','FCAL2']
-
-class DoSummary(JobProperty):
-    """ Add extra HIEventShapeContainer containing summary
-    """
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class SummaryPN(JobProperty):
-    """ Compute separate summaries for positive and negative eta
-    """
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class HIGlobalItemList(JobProperty):
-    """ List of items to be written to AOD, initially empty but filled based on configuration
-    """
-    statusOn     = True
-    allowedTypes = ['list']
-    StoredValue  = []
-
-    
-# Defines the container for heavy ion reco flags  
-class HIGlobalFlags(JobPropertyContainer):
-    """ The global heavy ion reconstruction flag/job property container.
-    """
-    pass
-
-# add the flags container to the top container 
-jobproperties.add_Container(HIGlobalFlags)
-
-# We want always the following flags in the container  
-list_jobproperties = [
-    doHIGlobalVars,
-    doHIFlow,
-    doHICentrality,
-    doHITRT,
-    doHIPixelTracklet,
-    doHIGlobalNSiCluster,
-    NumFlowHarmonics,
-    HIFlowContainerPrefix,
-    EventShapeKey,
-    DoSummary,
-    EventShapeSummaryKey,
-    SummarySubCalos,
-    SummarySamplings,
-    SummaryPN,
-    HIGlobalItemList
-    ]
-
-for i in list_jobproperties:
-    jobproperties.HIGlobalFlags.add_JobProperty(i)
-
-## module clean-up
-del list_jobproperties
diff --git a/Reconstruction/HeavyIonRec/HIGlobal/python/HIGlobalUtils.py b/Reconstruction/HeavyIonRec/HIGlobal/python/HIGlobalUtils.py
deleted file mode 100644
index 2ab62c31eb862f4a8311bef92534e8e63c76c44e..0000000000000000000000000000000000000000
--- a/Reconstruction/HeavyIonRec/HIGlobal/python/HIGlobalUtils.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-def AppendOutputList(HIAODItemList) :
-    """Adds HIGlobalAODOutputList to the list passed in as an argument"""
-    #need to add procedure for straight copy of input
-    from HIGlobal.HIGlobalFlags import jobproperties
-    keys=[jobproperties.HIGlobalFlags.EventShapeKey()]
-    if jobproperties.HIGlobalFlags.DoSummary() : keys+=[jobproperties.HIGlobalFlags.EventShapeSummaryKey()]
-
-    existing_keys=[k.split('#')[1] for k in jobproperties.HIGlobalFlags.HIGlobalItemList() ]
-
-    for shape_key in keys :
-        if shape_key not in existing_keys :
-            jobproperties.HIGlobalFlags.HIGlobalItemList+=["xAOD::HIEventShapeContainer#"+shape_key]
-            jobproperties.HIGlobalFlags.HIGlobalItemList+=["xAOD::HIEventShapeAuxContainer#"+shape_key+"Aux."]
-
-    jobproperties.HIGlobalFlags.HIGlobalItemList+=["xAOD::MBTSModuleContainer#MBTSModules"]
-    jobproperties.HIGlobalFlags.HIGlobalItemList+=["xAOD::MBTSModuleAuxContainer#MBTSModulesAux."]
-
-    jobproperties.HIGlobalFlags.HIGlobalItemList+=["xAOD::ForwardEventInfoContainer#MBTSForwardEventInfo"]
-    jobproperties.HIGlobalFlags.HIGlobalItemList+=["xAOD::ForwardEventInfoAuxContainer#MBTSForwardEventInfoAux."]
-
-    HIAODItemList+=jobproperties.HIGlobalFlags.HIGlobalItemList()
diff --git a/Reconstruction/RecJobTransforms/python/RDOFilePeeker.py b/Reconstruction/RecJobTransforms/python/RDOFilePeeker.py
deleted file mode 100644
index 218d24a792a1640696733da6e181533140cc42d2..0000000000000000000000000000000000000000
--- a/Reconstruction/RecJobTransforms/python/RDOFilePeeker.py
+++ /dev/null
@@ -1,114 +0,0 @@
-from past.builtins import basestring
-
-# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-
-def RDOFilePeeker(runArgs, skeletonLog):
-    from PyUtils.MetaReader import read_metadata
-    try:
-        input_file = runArgs.inputRDOFile[0]
-        metadata_lite = read_metadata(input_file)  # Use this only to read the key 'eventTypes', which is promoted in 'lite' mode.
-        # promote keys stored under input filename key one level up to access them directly
-        metadata_lite = metadata_lite[input_file]
-        # use the mode 'full' to access all metadata (needed for '/Digitization/Parameters')
-        metadata = read_metadata(input_file, mode= 'full')
-        # promote keys stored under input filename key one level up to access them directly
-        metadata = metadata[input_file]
-    except AssertionError:
-        skeletonLog.error("Failed to open input file: %s", runArgs.inputRDOFile[0])
-    #check eventTypes of input file
-    if 'eventTypes' in metadata_lite:
-        if 'IS_SIMULATION' not in metadata_lite['eventTypes']:
-            skeletonLog.error('This input file has incorrect eventTypes: %s', metadata_lite['eventTypes'])
-            skeletonLog.info('Please make sure you have set input file metadata correctly.')
-            skeletonLog.info('Consider using the job transforms for earlier steps if you aren\'t already.')
-            #then exit gracefully
-            raise SystemExit("Input file eventTypes is incorrect, please check your digi, g4sim and evgen jobs.")
-    else:
-        skeletonLog.warning('Could not find \'eventTypes\' key in MetaReader -> metadata. Unable to that check if eventTypes is correct.')
-
-    metadatadict = {}
-    if '/Digitization/Parameters' in metadata:
-        metadatadict = metadata['/Digitization/Parameters']
-        if isinstance(metadatadict, list):
-            skeletonLog.warning("inputfile: %s contained %s sets of Dititization Metadata. Using the final set in the list.",input_file,len(metadatadict))
-            metadatadict = metadatadict[-1]
-    ##Get IOVDbGlobalTag
-        if 'IOVDbGlobalTag' not in metadatadict:
-            try:
-                if metadata['/TagInfo']['IOVDbGlobalTag'] is not None:
-                    metadatadict['IOVDbGlobalTag'] = metadata['/TagInfo']['IOVDbGlobalTag']
-            except Exception:
-                skeletonLog.warning("Failed to find IOVDbGlobalTag.")
-    else:
-        ##Patch for older hit files
-        if 'DigitizedDetectors' not in metadatadict:
-            metadatadict['DigitizedDetectors'] = ['pixel','SCT','TRT','BCM','Lucid','LAr','Tile','MDT','CSC','TGC','RPC','Truth']
-
-    import re
-    from AthenaCommon.GlobalFlags import globalflags
-
-    ## Configure DetDescrVersion
-    if hasattr(runArgs,"geometryVersion"):
-        inputGeometryVersion = runArgs.geometryVersion
-        if isinstance(inputGeometryVersion, basestring) and inputGeometryVersion.endswith("_VALIDATION"):
-            inputGeometryVersion = inputGeometryVersion.replace("_VALIDATION", "")
-        if 'DetDescrVersion' in metadatadict:
-            if not re.match(metadatadict['DetDescrVersion'], inputGeometryVersion):
-                skeletonLog.warning("command-line geometryVersion (%s) does not match the value used in the Simulation step (%s) !",
-                                    inputGeometryVersion, metadatadict['DetDescrVersion'])
-        globalflags.DetDescrVersion.set_Value_and_Lock( inputGeometryVersion )
-        skeletonLog.info("Using geometryVersion from command-line: %s", globalflags.DetDescrVersion.get_Value())
-    elif 'DetDescrVersion' in metadatadict:
-        globalflags.DetDescrVersion.set_Value_and_Lock( metadatadict['DetDescrVersion'] )
-        skeletonLog.info("Using geometryVersion from RDO file metadata %s", globalflags.DetDescrVersion.get_Value())
-    else:
-        raise SystemExit("geometryVersion not found in RDO file metadata or on transform command-line!")
-
-    ## Configure ConditionsTag
-    if hasattr(runArgs,"conditionsTag"):
-        if 'IOVDbGlobalTag' in metadatadict:
-            if not re.match(metadatadict['IOVDbGlobalTag'], runArgs.conditionsTag):
-                skeletonLog.warning("command-line conditionsTag (%s) does not match the value used in the Simulation step (%s) !",
-                                    runArgs.conditionsTag, metadatadict['IOVDbGlobalTag'])
-        #globalflags.ConditionsTag.set_Value_and_Lock( runArgs.conditionsTag ) ## already done in CommonSkeletonJobOptions.py
-        skeletonLog.info("Using conditionsTag from command-line: %s", globalflags.ConditionsTag.get_Value())
-    elif 'IOVDbGlobalTag' in metadatadict:
-        globalflags.ConditionsTag.set_Value_and_Lock( metadatadict['IOVDbGlobalTag'] )
-        skeletonLog.info("Using conditionsTag from RDO file metadata %s", globalflags.ConditionsTag.get_Value())
-    else:
-        raise SystemExit("conditionsTag not found in RDO file metadata or on transform command-line!")
-
-    ## Configure DetFlags
-    if 'DigitizedDetectors' in metadatadict:
-        from AthenaCommon.DetFlags import DetFlags
-        # by default everything is off
-        DetFlags.all_setOff()
-        skeletonLog.debug("Switching on DetFlags for subdetectors which were digitized")
-        if isinstance(metadatadict['DigitizedDetectors'], str):
-            digitizedDetectors = eval(metadatadict['DigitizedDetectors']) # convert from str to list of str
-        else:
-            digitizedDetectors = metadatadict['DigitizedDetectors']
-        digitizedDetectors[:] = [x.lower() if x == 'Pixel' else x for x in digitizedDetectors] # to cope with CA-based inputs where Pixel rather than pixel is used
-        for subdet in digitizedDetectors:
-            cmd='DetFlags.%s_setOn()' % subdet
-            skeletonLog.debug(cmd)
-            try:
-                exec(cmd)
-            except Exception:
-                skeletonLog.warning('Failed to switch on subdetector %s',subdet)
-        #hacks to reproduce the sub-set of DetFlags left on by RecExCond/AllDet_detDescr.py
-        DetFlags.simulate.all_setOff()
-        DetFlags.simulateLVL1.all_setOff()
-        DetFlags.digitize.all_setOff()
-        DetFlags.pileup.all_setOff()
-        DetFlags.readRDOBS.all_setOff()
-        DetFlags.readRDOPool.all_setOff()
-        DetFlags.readRIOBS.all_setOff()
-        DetFlags.readRIOPool.all_setOff()
-        DetFlags.makeRIO.all_setOff()
-        DetFlags.writeBS.all_setOff()
-        DetFlags.writeRDOPool.all_setOff()
-        DetFlags.writeRIOPool.all_setOff()
-
-    return
-
diff --git a/Simulation/G4Atlas/G4AtlasTools/python/G4GeometryToolConfig.py b/Simulation/G4Atlas/G4AtlasTools/python/G4GeometryToolConfig.py
index b265726d6cd94ba54f1f11f843572aeacf539319..adac3554176c79ce430fb5f254363e6643c90120 100644
--- a/Simulation/G4Atlas/G4AtlasTools/python/G4GeometryToolConfig.py
+++ b/Simulation/G4Atlas/G4AtlasTools/python/G4GeometryToolConfig.py
@@ -392,9 +392,6 @@ def generateSubDetectorList(flags):
         toolFwdRegion = result.popToolsAndMerge(ForwardRegionEnvelopeCfg(flags))
         SubDetectorList += [ toolFwdRegion ]
 
-    #if DetFlags.Muon_on(): #HACK
-    #    SubDetectorList += ['MUONQ02'] #FIXME rename to MUON when safe #HACK
-    #SubDetectorList += generateFwdSubDetectorList() #FIXME Fwd Detectors not supported yet.
     result.setPrivateTools(SubDetectorList)
     return result
 
diff --git a/Simulation/G4Extensions/G4CosmicFilter/python/G4CosmicFilterConfig.py b/Simulation/G4Extensions/G4CosmicFilter/python/G4CosmicFilterConfig.py
index a33f688df9c45f6896aa368e8daae09ed66d21b9..b93f1841c2b2a4a6dee5ddcf7120cd81dddeeb41 100644
--- a/Simulation/G4Extensions/G4CosmicFilter/python/G4CosmicFilterConfig.py
+++ b/Simulation/G4Extensions/G4CosmicFilter/python/G4CosmicFilterConfig.py
@@ -47,17 +47,19 @@ def CosmicFilterToolCfg(flags, name="G4UA::G4CosmicFilterTool", **kwargs):
         # need a cosmic AND filter
         kwargs.setdefault("CollectionName",volumes[0])
         kwargs.setdefault("CollectionName2",volumes[1])
-        result.setPrivateTools(CompFactory.G4UA.G4UA__G4CosmicAndFilterTool(name, **kwargs))
+        result.setPrivateTools(CompFactory.G4UA.G4CosmicAndFilterTool(name, **kwargs))
 
     else:
         # need a cosmic OR filter
         kwargs.setdefault("CollectionName",volumes[0])
         kwargs.setdefault("CollectionName2",volumes[1])
         kwargs.setdefault("CollectionName3",volumes[2])
-        result.setPrivateTools(CompFactory.G4UA.G4UA__G4CosmicOrFilterTool(name, **kwargs))
+        result.setPrivateTools(CompFactory.G4UA.G4CosmicOrFilterTool(name, **kwargs))
     return result
 
-# Note - is an ISF one migrated, but todo the G4UA one
-# def getStoppedParticleFilterTool(name="G4UA::StoppedParticleFilterTool", **kwargs):
-#     kwargs.setdefault("CollectionName",'StoppingPositions')
-#     return  CfgMgr.G4UA__G4CosmicFilterTool(name, **kwargs)
+
+def StoppedParticleFilterToolCfg(flags, name="G4UA::StoppedParticleFilterTool", **kwargs):
+    result = ComponentAccumulator()
+    kwargs.setdefault("CollectionName",'StoppingPositions')
+    result.setPrivateTools(CompFactory.G4UA.G4CosmicFilterTool(name, **kwargs))
+    return result
diff --git a/Simulation/G4Sim/MCTruthBase/python/MCTruthBaseConfig.py b/Simulation/G4Sim/MCTruthBase/python/MCTruthBaseConfig.py
index 00c269a7214704424aeae53fe72c75d288a640a2..f9cd34b7fdff49d14198040c1085986b874c0a69 100644
--- a/Simulation/G4Sim/MCTruthBase/python/MCTruthBaseConfig.py
+++ b/Simulation/G4Sim/MCTruthBase/python/MCTruthBaseConfig.py
@@ -4,8 +4,6 @@ from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator
 from AthenaConfiguration.ComponentFactory import CompFactory
 
 def getEnvelopeMap(flags):
-    #from G4AtlasApps.SimFlags import simFlags
-
     # Map of volume name to output collection name
     envelopeMap = dict()
 
diff --git a/Simulation/ISF/ISF_Config/python/ISF_jobProperties.py b/Simulation/ISF/ISF_Config/python/ISF_jobProperties.py
deleted file mode 100644
index 2133eb7a2943c810ac8c53349878734b7c9382b6..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_Config/python/ISF_jobProperties.py
+++ /dev/null
@@ -1,244 +0,0 @@
-# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-
-## @file ISF_Config/python/ISF_jobProperties.py
-## @brief Python module to hold common flags to configure JobOptions
-##
-
-""" ISF_jobProperties
-
-"""
-
-__author__ = "E. Ritsch"
-__version__= "$Revision: 507036 $"
-__doc__    = "ISF_jobProperties"
-
-__all__    = [ "ISF_jobProperties" ]
-
-# kindly stolen from FatrasJobProperties by A. Salzburger
-
-##-----------------------------------------------------------------------------
-## Import
-
-from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer
-from AthenaCommon.JobProperties import jobproperties
-
-##-----------------------------------------------------------------------------
-## 1st step: define JobProperty classes
-class UseParticleGenerator(JobProperty):
-    """Steering of ISF: use particle generator to create initial particles"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class RunVP1(JobProperty):
-    """Use Virtual Point 1"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class DoPerfMonStats(JobProperty):
-    """Run PerfMon profiling while running"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class DumpMcEvent(JobProperty):
-    """Dump McEvent Collection after each event"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class DumpStoreGate(JobProperty):
-    """Dump StoreGate contents after each event"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class RunValgrind(JobProperty):
-    """Dump StoreGate contents after each event"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class ValidationMode(JobProperty):
-    """Steering of ISF: run ISF internal validation checks"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-    def _do_action(self):
-        jobproperties.ISF_jobProperties.TruthService.set_Value("ISF_ValidationTruthService")
-    def _undo_action(self):
-        jobproperties.ISF_jobProperties.TruthService.set_Value(jobproperties.ISF_jobProperties.TruthService.__class__.StoredValue)
-
-class VertexPositionFromFile(JobProperty):
-    """Steering of ISF: run vertex position from file"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class DoMemoryMonitoring(JobProperty):
-    """Steering of ISF: do memory monitoring"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class DoTimeMonitoring(JobProperty):
-    """Steering of ISF: run time monitoring"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class Input(JobProperty):
-    """Steering of ISF: set input"""
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'NONE'
-
-class Simulator(JobProperty):
-    """Steering of ISF: set simulator"""
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'ATLFASTII'
-    def KernelName(self):
-        if self.statusOn:
-            return 'ISF_Kernel_' + self.StoredValue
-    def configFlagsMethodName(self):
-        if self.statusOn:
-            return 'configureFlags' + self.StoredValue
-    def isFullSim(self):
-        return 'FullG4' in self.StoredValue or 'PassBackG4' in self.StoredValue or 'AtlasG4' in self.StoredValue
-    def usesFastCaloSim(self):
-        return 'ATLFAST' in self.StoredValue or 'G4FastCalo' in self.StoredValue
-    def usesFatras(self):
-        return 'ATLFASTIIF' in self.StoredValue or 'ATLFAST3F' in self.StoredValue # TODO Extend for Acts::Fatras in the future
-    def isQuasiStable(self):
-        return 'QS' in self.StoredValue
-    def isMT(self):
-        return 'MT' in self.StoredValue
-
-class HITSMergingRequired(JobProperty):
-    """The configured set of simulators requires merging of HITS collections"""
-    statusOn     = True
-    allowedTypes = ['dict']
-    StoredValue = {'ID':True, 'CALO':True, 'MUON':True}
-    def anyOn(self):
-        if self.statusOn:
-            return (True in self.StoredValue.values())
-        else:
-            return False
-
-class UsingGeant4(JobProperty):
-    """Will this job be using Geant4?"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-
-class ParticleBroker(JobProperty):
-    """Steering of ISF: set the Particle Broker Service"""
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'ISF_ParticleBrokerSvc'
-
-class OverrideInputFiles(JobProperty):
-    """Steering of ISF: set input override"""
-    statusOn     = True
-    allowedTypes = ['list']
-    StoredValue  = []
-
-class RoutingChainBeamPipe(JobProperty):
-    """Steering of ISF: the RoutingChain to be used in the BeamPipe"""
-    statusOn     = True
-    allowedTypes = ['list']
-    StoredValue  = []
-
-class RoutingChainID(JobProperty):
-    """Steering of ISF: the RoutingChain to be used in the InnerDetector"""
-    statusOn     = True
-    allowedTypes = ['list']
-    StoredValue  = []
-
-class RoutingChainCalo(JobProperty):
-    """Steering of ISF: the RoutingChain to be used in the Calorimeter"""
-    statusOn     = True
-    allowedTypes = ['list']
-    StoredValue  = []
-
-class RoutingChainMS(JobProperty):
-    """Steering of ISF: the RoutingChain to be used in the MuonSpectrometer"""
-    statusOn     = True
-    allowedTypes = ['list']
-    StoredValue  = []
-
-class RoutingChainCavern(JobProperty):
-    """Steering of ISF: the RoutingChain to be used in the Cavern"""
-    statusOn     = True
-    allowedTypes = ['list']
-    StoredValue  = []
-
-class GeoIDService(JobProperty):
-    """The GeoIDService to bue used by ISF"""
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'NONE'
-
-class ReSimulation(JobProperty):
-    """Using ReSimulation workflow"""
-    statusOn     = False
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class UseTrackingGeometryCond(JobProperty):
-    """Using TrackingGeometry from Conditions rather than the Svc"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-##-----------------------------------------------------------------------------
-## 2nd step
-## Definition of the InDet flag container
-class ISF_jobProperties(JobPropertyContainer):
-    """Container for the ISF key flags
-    """
-    pass
-
-
-##-----------------------------------------------------------------------------
-## 3rd step
-## adding the container to the general top-level container
-jobproperties.add_Container(ISF_jobProperties)
-
-
-##-----------------------------------------------------------------------------
-## 4th step
-## adding flags to the jobProperties container
-jobproperties.ISF_jobProperties.add_JobProperty(UseParticleGenerator)
-jobproperties.ISF_jobProperties.add_JobProperty(RunVP1)
-jobproperties.ISF_jobProperties.add_JobProperty(DoPerfMonStats)
-jobproperties.ISF_jobProperties.add_JobProperty(DumpMcEvent)
-jobproperties.ISF_jobProperties.add_JobProperty(DumpStoreGate)
-jobproperties.ISF_jobProperties.add_JobProperty(RunValgrind)
-jobproperties.ISF_jobProperties.add_JobProperty(ValidationMode)
-jobproperties.ISF_jobProperties.add_JobProperty(VertexPositionFromFile)
-jobproperties.ISF_jobProperties.add_JobProperty(DoMemoryMonitoring)
-jobproperties.ISF_jobProperties.add_JobProperty(DoTimeMonitoring)
-jobproperties.ISF_jobProperties.add_JobProperty(Input)
-jobproperties.ISF_jobProperties.add_JobProperty(Simulator)
-jobproperties.ISF_jobProperties.add_JobProperty(HITSMergingRequired)
-jobproperties.ISF_jobProperties.add_JobProperty(UsingGeant4)
-jobproperties.ISF_jobProperties.add_JobProperty(ParticleBroker)
-jobproperties.ISF_jobProperties.add_JobProperty(OverrideInputFiles)
-jobproperties.ISF_jobProperties.add_JobProperty(RoutingChainBeamPipe)
-jobproperties.ISF_jobProperties.add_JobProperty(RoutingChainID)
-jobproperties.ISF_jobProperties.add_JobProperty(RoutingChainCalo)
-jobproperties.ISF_jobProperties.add_JobProperty(RoutingChainMS)
-jobproperties.ISF_jobProperties.add_JobProperty(RoutingChainCavern)
-jobproperties.ISF_jobProperties.add_JobProperty(ReSimulation)
-jobproperties.ISF_jobProperties.add_JobProperty(UseTrackingGeometryCond)
-##-----------------------------------------------------------------------------
-## 5th step
-## short-cut for lazy people
-## carefull: do not select ISF_jobProperties as a short name as well. 
-## otherwise problems with pickle
-## Note: you still have to import it:
-## >>> from ISF_Example.ISF_jobProperties import ISF_Flags
-ISF_Flags = jobproperties.ISF_jobProperties
diff --git a/Simulation/ISF/ISF_FastCaloSim/ISF_FastCaloSimParametrization/python/ISF_NativeFastCaloSimJobProperties.py b/Simulation/ISF/ISF_FastCaloSim/ISF_FastCaloSimParametrization/python/ISF_NativeFastCaloSimJobProperties.py
deleted file mode 100644
index 0026b722c1bff8e4394d1fb6ae11f41ba6f8ff23..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_FastCaloSim/ISF_FastCaloSimParametrization/python/ISF_NativeFastCaloSimJobProperties.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-## @file ISF_NativeFastCaloSimJobProperties.py
-## @brief Python module to hold common flags to configure JobOptions
-##
-
-""" ISF_NativeFastCaloSimJobProperties
-    Python module to hold storegate keys of InDet objects.
-
-"""
-
-__author__ = "KG Tan"
-__version__= "$Revision: 779694 $"
-__doc__    = "ISF_NativeFastCaloSimJobProperties"
-
-__all__    = [ "ISF_NativeFastCaloSimJobProperties" ]
-
-# kindly stolen from AthenaCommonFlags from S. Binet and M. Gallas
-
-##-----------------------------------------------------------------------------
-## Import
-
-from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer
-from AthenaCommon.JobProperties import jobproperties
-
-##-----------------------------------------------------------------------------
-## Define the flag
-
-class NativeFastCaloSimIsActive(JobProperty):
-    """Defines whether or not NativeFastCaloSim is being run in the current athena setup"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class RandomStreamName(JobProperty):
-    """The random number stream used by FastCaloSim"""
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'FastCaloSimRnd'
-
-class CaloCellsName(JobProperty):
-    """StoreGate collection name for FastCaloSim hits"""
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'AllCalo'
-
-class outputFile(JobProperty):
-    statusOn     = False
-    allowedTypes = ['str']
-    StoredValue  = 'ESD_output_test.root'
-
-##-----------------------------------------------------------------------------
-## 2nd step
-## Definition of the InDet flag container
-class ISF_NativeFastCaloSimJobProperties(JobPropertyContainer):
-    """Container for the ISF_FastCaloSim key flags
-    """
-    pass
-
-##-----------------------------------------------------------------------------
-## 3rd step
-## adding the container to the general top-level container
-jobproperties.add_Container(ISF_NativeFastCaloSimJobProperties)
-
-
-##-----------------------------------------------------------------------------
-## 4th step
-## adding the flags to the  container
-jobproperties.ISF_NativeFastCaloSimJobProperties.add_JobProperty( NativeFastCaloSimIsActive        )
-jobproperties.ISF_NativeFastCaloSimJobProperties.add_JobProperty( RandomStreamName           )
-jobproperties.ISF_NativeFastCaloSimJobProperties.add_JobProperty( CaloCellsName              )
-jobproperties.ISF_NativeFastCaloSimJobProperties.add_JobProperty( outputFile              )
-##-----------------------------------------------------------------------------
-## 5th step
-## short-cut for lazy people
-## carefull: do not select ISF_FastCaloSimJobProperties as a short name as well. 
-## otherwise problems with pickle
-## Note: you still have to import it:
-ISF_NativeFastCaloSimFlags = jobproperties.ISF_NativeFastCaloSimJobProperties
diff --git a/Simulation/ISF/ISF_FastCaloSim/ISF_FastCaloSimServices/python/ISF_FastCaloSimJobProperties.py b/Simulation/ISF/ISF_FastCaloSim/ISF_FastCaloSimServices/python/ISF_FastCaloSimJobProperties.py
deleted file mode 100644
index 16bff8f9bd8f3a7953aebb7e15a7019b2c3e35a4..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_FastCaloSim/ISF_FastCaloSimServices/python/ISF_FastCaloSimJobProperties.py
+++ /dev/null
@@ -1,151 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-## @file ISF_FastCaloSimJobProperties.py
-## @brief Python module to hold common flags to configure JobOptions
-##
-
-""" ISF_FastCaloSimJobProperties
-    Python module to hold storegate keys of InDet objects.
-
-"""
-
-__author__ = "KG Tan"
-__version__= "$Revision: 476468 $"
-__doc__    = "ISF_FastCaloSimJobProperties"
-
-__all__    = [ "ISF_FastCaloSimJobProperties" ]
-
-# kindly stolen from AthenaCommonFlags from S. Binet and M. Gallas
-
-##-----------------------------------------------------------------------------
-## Import
-
-from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer
-from AthenaCommon.JobProperties import jobproperties
-
-##-----------------------------------------------------------------------------
-## Define the flag
-
-class FastCaloSimIsActive(JobProperty):
-    """Defines whether or not FastCaloSim is being run in the current athena setup"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class RandomStreamName(JobProperty):
-    """The random number stream used by FastCaloSim"""
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'FastCaloSimRnd'
-
-class CaloCellsName(JobProperty):
-    """StoreGate collection name for FastCaloSim hits"""
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'AllCalo'
-
-class FastShowerInputCollection(JobProperty):
-    """StoreGate collection name of modified TruthEvent for legayc FastCaloSim use"""
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'TruthEvent'
-
-class DoRandomFluctuations(JobProperty):
-    """Determines if Random Fluctuations should be used if particle is a pion"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class ParamsInputFilename(JobProperty):
-    """ Filename of the input parametrizations file. """
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'FastCaloSim/MC23/TFCSparam_AF3_MC23_Sep23.root'
-
-class RunOnGPU(JobProperty):
-    """Determines if run the FastCaloSim on GPU or not"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class DoPunchThroughSimulation(JobProperty):
-    """ Determines whether to run the punch through simulation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class PunchThroughParamsInputFilename(JobProperty):
-    """ Filename of the muon punch through input parametrizations file. """
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'FastCaloSim/MC23/TFCSparam_mpt_v07.root'
-
-class PunchThroughParamsInverseCdfFilename(JobProperty):
-    """ Filename of the muon punch through param inverse cdf config. """
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'FastCaloSim/MC23/TFCSparam_mpt_inverseCdf_v07.xml'
-
-class PunchThroughParamsInversePcaFilename(JobProperty):
-    """ Filename of the muon punch through param inverse pca config. """
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'FastCaloSim/MC23/TFCSparam_mpt_inversePca_v07.xml'
-
-class PunchThroughClassifierScalerFilename(JobProperty):
-    """ Filename of the muon punch through classifier input variable MinMaxScaler. """
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'FastCaloSim/MC23/TFCSparam_mpt_classScaler_v04.xml'
-
-class PunchThroughClassifierNetworkFilename(JobProperty):
-    """ Filename of the muon punch through classifier neural network. """
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'FastCaloSim/MC23/TFCSparam_mpt_classNet_v04.json'
-
-class PunchThroughClassifierCalibratorFilename(JobProperty):
-    """ Filename of the muon punch through classifier calibrator. """
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'FastCaloSim/MC23/TFCSparam_mpt_classCalib_v04.xml'
-##-----------------------------------------------------------------------------
-## 2nd step
-## Definition of the InDet flag container
-class ISF_FastCaloSimJobProperties(JobPropertyContainer):
-    """Container for the ISF_FastCaloSim key flags
-    """
-    pass
-
-##-----------------------------------------------------------------------------
-## 3rd step
-## adding the container to the general top-level container
-jobproperties.add_Container(ISF_FastCaloSimJobProperties)
-
-
-##-----------------------------------------------------------------------------
-## 4th step
-## adding the flags to the  container
-jobproperties.ISF_FastCaloSimJobProperties.add_JobProperty( FastCaloSimIsActive        )
-jobproperties.ISF_FastCaloSimJobProperties.add_JobProperty( RandomStreamName           )
-jobproperties.ISF_FastCaloSimJobProperties.add_JobProperty( CaloCellsName              )
-jobproperties.ISF_FastCaloSimJobProperties.add_JobProperty( FastShowerInputCollection  )
-jobproperties.ISF_FastCaloSimJobProperties.add_JobProperty( DoRandomFluctuations       )
-jobproperties.ISF_FastCaloSimJobProperties.add_JobProperty( ParamsInputFilename        )
-jobproperties.ISF_FastCaloSimJobProperties.add_JobProperty( RunOnGPU )
-jobproperties.ISF_FastCaloSimJobProperties.add_JobProperty( DoPunchThroughSimulation   )
-jobproperties.ISF_FastCaloSimJobProperties.add_JobProperty( PunchThroughParamsInputFilename )
-jobproperties.ISF_FastCaloSimJobProperties.add_JobProperty( PunchThroughParamsInverseCdfFilename )
-jobproperties.ISF_FastCaloSimJobProperties.add_JobProperty( PunchThroughParamsInversePcaFilename )
-jobproperties.ISF_FastCaloSimJobProperties.add_JobProperty( PunchThroughClassifierScalerFilename )
-jobproperties.ISF_FastCaloSimJobProperties.add_JobProperty( PunchThroughClassifierNetworkFilename )
-jobproperties.ISF_FastCaloSimJobProperties.add_JobProperty( PunchThroughClassifierCalibratorFilename )
-
-##-----------------------------------------------------------------------------
-## 5th step
-## short-cut for lazy people
-## carefull: do not select ISF_FastCaloSimJobProperties as a short name as well.
-## otherwise problems with pickle
-## Note: you still have to import it:
-## >>> from ISF_FatrasServices.ISF_FastCaloSimJobProperties import ISF_FastCaloSimFlags
-ISF_FastCaloSimFlags = jobproperties.ISF_FastCaloSimJobProperties
diff --git a/Simulation/ISF/ISF_Fatras/ISF_FatrasServices/python/FatrasPhysicsList.py b/Simulation/ISF/ISF_Fatras/ISF_FatrasServices/python/FatrasPhysicsList.py
deleted file mode 100644
index 87d246ccf4f963a17dfed946a8109f9d02a97de8..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_Fatras/ISF_FatrasServices/python/FatrasPhysicsList.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-#
-## @file FatrasExample/python/FatrasPhysicsList.py
-## @purpose Python module to hold common flags to configure JobOptions
-##
-
-""" FatrasPhysicsList
-    Python module to hold storegate keys of InDet objects.
-
-"""
-
-__author__ = "A. Salzburger"
-__version__= "$Revision: 476468 $"
-__doc__    = "FatrasPhysicsList"
-
-__all__    = [ "FatrasPhysicsList" ]
-
-# kindly stolen from AthenaCommonFlags from S. Binet and M. Gallas
-
-##-----------------------------------------------------------------------------
-## Import
-
-from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer
-from AthenaCommon.JobProperties import jobproperties
-
-##-----------------------------------------------------------------------------
-## 1st step: define JobProperty classes
-class MultipleScattering(JobProperty):
-    """Steering of multiple scattering in simulation/reconstruction"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class EnergyLoss(JobProperty):
-    """Steering of energy loss (ionisation) in simulation/reconstruction"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class Bremsstrahlung(JobProperty):
-    """Steering of energy loss (radiative) in simulation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class PairProduction(JobProperty):
-    """Steering of pair production in simulation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class HadronicInteraction(JobProperty):
-    """Steering of nuclear (hadronic) interactions in simulation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-    
-class ParticleDecay(JobProperty):
-    """Steering of particle decay in simulation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-##-----------------------------------------------------------------------------
-## 2nd step
-## Definition of the InDet flag container
-class FatrasPhysicsList(JobPropertyContainer):
-    """Container for the Fatras physics list flags
-    """
-    pass
-
-
-##-----------------------------------------------------------------------------
-## 3rd step
-## adding the container to the general top-level container
-jobproperties.add_Container(FatrasPhysicsList)
-
-##-----------------------------------------------------------------------------
-## 4th step
-## adding ID flags to the InDetKeys container
-jobproperties.FatrasPhysicsList.add_JobProperty(MultipleScattering)
-jobproperties.FatrasPhysicsList.add_JobProperty(EnergyLoss)
-jobproperties.FatrasPhysicsList.add_JobProperty(Bremsstrahlung)
-jobproperties.FatrasPhysicsList.add_JobProperty(PairProduction)
-jobproperties.FatrasPhysicsList.add_JobProperty(HadronicInteraction)
-jobproperties.FatrasPhysicsList.add_JobProperty(ParticleDecay)
-
-##-----------------------------------------------------------------------------
-## 5th step
-## short-cut for lazy people
-## carefull: do not select FatrasPhysicsList as a short name as well. 
-## otherwise problems with pickle
-## Note: you still have to import it:
-## >>> from FatrasExample.FatrasPhysicsList import FatrasPhysicsListFlags
-FatrasPhysicsListFlags = jobproperties.FatrasPhysicsList
diff --git a/Simulation/ISF/ISF_Fatras/ISF_FatrasServices/python/FatrasTuning.py b/Simulation/ISF/ISF_Fatras/ISF_FatrasServices/python/FatrasTuning.py
deleted file mode 100644
index 00669b7436feee1114cb067a77e91f142408f363..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_Fatras/ISF_FatrasServices/python/FatrasTuning.py
+++ /dev/null
@@ -1,252 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-#
-## @file FatrasExample/python/FatrasTuning.py
-## @purpose Python module to hold common flags to configure JobOptions
-##
-
-""" FatrasKeys
-    Python module to hold storegate keys of InDet objects.
-
-"""
-
-__author__ = "A. Salzburger"
-__version__= "$Revision: 578960 $"
-__doc__    = "FatrasTuning"
-
-__all__    = [ "FatrasTuning" ]
-
-# kindly stolen from AthenaCommonFlags from S. Binet and M. Gallas
-
-##-----------------------------------------------------------------------------
-## Import
-
-from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer
-from AthenaCommon.JobProperties import jobproperties
-
-##-----------------------------------------------------------------------------
-## 1st step: define JobProperty classes
-
-class MinimumParticlePtPrimaryTrackCreation(JobProperty):
-    """do not simulate primary tracks below this momentum"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 250.
-
-class MinimumMuonPtTrackCreation(JobProperty):
-    """Kill the particle when it falls underneath this threshold"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1000.        
-    
-class MinimumParticleMomentum(JobProperty):
-    """Kill the particle when it falls underneath this threshold"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 50.
-
-
-class MinimumBremPhotonMomentum(JobProperty):
-    """Follow the photon if it is over threshold"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 150.
-
-# Conversion Creation: 
-class InteractionVolumeRadius(JobProperty):
-    """Volume (r) in which brem photons / had int are processed"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1200.
-      
-class InteractionVolumeHalfZ(JobProperty):
-    """Volume (z) in which brem photons / had int are processed"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 4000.    
-      
-## ============================================================================
-## Energy Loss through: Bethe-Heitler distribution 
-class BetheHeitlerScalor(JobProperty):
-    """Scalor to adapt Bethe-Heitler contribution"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1.
-    
-## ============================================================================
-## Multiple Scattering: Gaussian mixture model 
-class GaussianMixtureModel(JobProperty):
-    """Gaussian mixture model on/off for Multiple Scattering"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-## ============================================================================
-## Conversion Creation: 
-class ParticleDecayMinChildEnergy(JobProperty):
-    """minimum child energy from a conversion"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 250.
-        
-## ============================================================================
-## Conversion Creation: 
-class ConversionMinChildEnergy(JobProperty):
-    """minimum child energy from a conversion"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 100.
-    
-class ConversionChildEnergyScalor(JobProperty):
-    """minimum child energy from a conversion"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 2.
- 
-# Conversion Creation: 
-class ConversionProbabilityScalor(JobProperty):
-    """Scalor to adapt the conversion probability"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 0.98
-    
-# Conversion Creation: 
-class ConversionVolumeRadius(JobProperty):
-    """Volume (r) in which conversions are created"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1200.
-      
-class ConversionVolumeHalfZ(JobProperty):
-    """Volume (z) in which conversions are created"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 4000.
-
-    
-## ============================================================================
-## Hadronic interactions:     
-class HadronicInteractionProbabilityScalor(JobProperty):
-    """ """
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1.
-
-## Hadronic interactions:     
-class HadronicInteractionMinMomentumIn(JobProperty):
-    """ """
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1000.
-    
-## Hadronic interactions:     
-class HadronicInteractionMinMomentumOut(JobProperty):
-    """Volume (z) in which conversions are created"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 200.    
-           
-
-## ============================================================================
-## Noise Levels
-class PixNoiseLevel(JobProperty):
-    """Pixel overall noise level (fraction of noisy pixels)"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1e-5
-
-class SctNoiseLevel(JobProperty):
-    """SCT overall noise level (fraction of noisy strips)"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1e-5
-
-class TrtNoiseLevel(JobProperty):
-    """TRT overall noise level (fraction of noisy straws)"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 0.02
-
-class TrtMultiHitFlag(JobProperty):
-    """Flag how to deal with TRT multiple hits on straw"""
-    statusOn     = True
-    allowedTypes = ['int']
-    StoredValue  = 1
-
-# Material distortions
-class MaterialScalor(JobProperty):    
-    """efficiencies (vs. eta) for the TRT"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1.00
-
-class MaterialDistortionsX0(JobProperty):    
-    """efficiencies (vs. eta) for the TRT"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 0.02
-    
-# Material distortions
-class MaterialDistortionsRho(JobProperty):    
-    """efficiencies (vs. eta) for the TRT"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 0.02
-
-class MomCutOffSec(JobProperty):
-    """Steering of ISF: common momentum cut-off for secondaries"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 50.
-    
-##-----------------------------------------------------------------------------
-## 2nd step
-## Definition of the InDet flag container
-class FatrasTuning(JobPropertyContainer):
-    """Container for the Fatras key flags
-    """
-    pass
-
-
-##-----------------------------------------------------------------------------
-## 3rd step
-## adding the container to the general top-level container
-jobproperties.add_Container(FatrasTuning)
-
-
-##-----------------------------------------------------------------------------
-## 4th step
-## adding ID flags to the InDetKeys container
-jobproperties.FatrasTuning.add_JobProperty(MinimumParticlePtPrimaryTrackCreation)
-jobproperties.FatrasTuning.add_JobProperty(MinimumMuonPtTrackCreation)
-jobproperties.FatrasTuning.add_JobProperty(MinimumParticleMomentum)
-jobproperties.FatrasTuning.add_JobProperty(MinimumBremPhotonMomentum)
-jobproperties.FatrasTuning.add_JobProperty(InteractionVolumeRadius)
-jobproperties.FatrasTuning.add_JobProperty(InteractionVolumeHalfZ)
-jobproperties.FatrasTuning.add_JobProperty(BetheHeitlerScalor)
-jobproperties.FatrasTuning.add_JobProperty(GaussianMixtureModel)
-jobproperties.FatrasTuning.add_JobProperty(ParticleDecayMinChildEnergy)
-jobproperties.FatrasTuning.add_JobProperty(ConversionMinChildEnergy)
-jobproperties.FatrasTuning.add_JobProperty(ConversionChildEnergyScalor)
-jobproperties.FatrasTuning.add_JobProperty(ConversionProbabilityScalor)
-jobproperties.FatrasTuning.add_JobProperty(ConversionVolumeRadius)
-jobproperties.FatrasTuning.add_JobProperty(ConversionVolumeHalfZ)     
-jobproperties.FatrasTuning.add_JobProperty(HadronicInteractionProbabilityScalor)
-jobproperties.FatrasTuning.add_JobProperty(HadronicInteractionMinMomentumIn)
-jobproperties.FatrasTuning.add_JobProperty(HadronicInteractionMinMomentumOut)
-jobproperties.FatrasTuning.add_JobProperty(PixNoiseLevel)
-jobproperties.FatrasTuning.add_JobProperty(SctNoiseLevel)
-jobproperties.FatrasTuning.add_JobProperty(TrtNoiseLevel)
-jobproperties.FatrasTuning.add_JobProperty(TrtMultiHitFlag)
-jobproperties.FatrasTuning.add_JobProperty(MaterialScalor)
-jobproperties.FatrasTuning.add_JobProperty(MaterialDistortionsX0)
-jobproperties.FatrasTuning.add_JobProperty(MaterialDistortionsRho)
-jobproperties.FatrasTuning.add_JobProperty(MomCutOffSec)
-#-----------------------------------------------------------------------------
-## 5th step
-## short-cut for lazy people
-## carefull: do not select FatrasKeys as a short name as well. 
-## otherwise problems with pickle
-## Note: you still have to import it:
-## >>> from FatrasExample.FatrasTuning import FatrasTuningFlags
-FatrasTuningFlags = jobproperties.FatrasTuning
diff --git a/Simulation/ISF/ISF_Fatras/ISF_FatrasServices/python/FatrasValidation.py b/Simulation/ISF/ISF_Fatras/ISF_FatrasServices/python/FatrasValidation.py
deleted file mode 100644
index ae1bdd0339b83dc480a3756785ea712a22b43e7a..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_Fatras/ISF_FatrasServices/python/FatrasValidation.py
+++ /dev/null
@@ -1,223 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-#
-## @file FatrasExample/python/FatrasValidation.py
-## @purpose Python module to hold common flags to configure JobOptions
-##
-
-""" FatrasValidation
-    Python module to hold storegate keys of InDet objects.
-
-"""
-
-__author__ = "A. Salzburger"
-__version__= "$Revision: 476468 $"
-__doc__    = "FatrasValidation"
-
-__all__    = [ "FatrasValidation" ]
-
-# kindly stolen from AthenaCommonFlags from S. Binet and M. Gallas
-
-##-----------------------------------------------------------------------------
-## Import
-
-from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer
-from AthenaCommon.JobProperties import jobproperties
-
-##-----------------------------------------------------------------------------
-## 1st step: define JobProperty classes
-class OutputFileName(JobProperty):
-    """Steering of output file name for validation"""
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'FatrasOutput.root'
-
-class ValidationStream(JobProperty):
-    """TTree stream to be written at"""
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'val'
-
-class MaterialEffects(JobProperty):
-    """Steering of multiple scattering/eloss (ionisation) in validation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class MaterialDistortions(JobProperty):
-    """Steering of material distortions in validation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class Bremsstrahlung(JobProperty):
-    """Steering of energy loss (radiative) in validation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class CaloEnergyDeposition(JobProperty):
-    """Steering of energy loss (radiative) in validation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False    
-    
-class PairProduction(JobProperty):
-    """Steering of pair production in validation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class HadronicInteractions(JobProperty):
-    """Steering of pair production in validation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class ParticleDecay(JobProperty):
-    """Steering of particle decay in validation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class PreProcessing(JobProperty):
-    """Steering of validation: validation simulation containers"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class Simulation(JobProperty):
-    """Steering of validation: validation simulation containers"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class Refitting(JobProperty):
-    """Steering of validation: validation refitted track containers"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-        
-class TrkNtuple(JobProperty):
-    """Steering of validation: validation using trk ntuple"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class Residuals(JobProperty):
-    """Steering of validation: validation using residuals for validation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class TrackPlotter(JobProperty):
-    """Steering of validation: switch for track plotter"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class McNtuple(JobProperty):
-    """Steering of validation: include McNtuple into output ntule"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class PixelClusterSimulation(JobProperty):
-    """Steering of validation: PixelClusterSimulation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class SctClusterSimulation(JobProperty):
-    """Steering of validation: SCT_ClusterSimulation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class TrtDriftCircleSimulation(JobProperty):
-    """Steering of validation: TRT_DriftCircleSimulation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-#
-class TrtPostProcessor(JobProperty):
-    """Steering of validation: TRT_Processor"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class MuonSimHitNtuple(JobProperty):
-    """Steering of validation: MuonHitNtuple"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-    
-class MuonSimHitValidation(JobProperty):
-    """Steering of validation: MuonSimHitValidation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-    
-class MuonMonitoringNtuple(JobProperty):
-    """Steering of validation: Muon MonitoringNtuple"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-class CheckGenEventRecord(JobProperty):
-    """Steering of validation: run FatrasTestHepMC to check GenEvent"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-
-##-----------------------------------------------------------------------------
-## 2nd step
-## Definition of the InDet flag container
-class FatrasValidation(JobPropertyContainer):
-    """Container for the Fatras physics list flags
-    """
-    pass
-
-##-----------------------------------------------------------------------------
-## 3rd step
-## adding the container to the general top-level container
-jobproperties.add_Container(FatrasValidation)
-
-##-----------------------------------------------------------------------------
-## 4th step
-## adding ID flags to the InDetKeys container
-jobproperties.FatrasValidation.add_JobProperty(OutputFileName)
-jobproperties.FatrasValidation.add_JobProperty(ValidationStream)
-jobproperties.FatrasValidation.add_JobProperty(MaterialEffects)
-jobproperties.FatrasValidation.add_JobProperty(MaterialDistortions)
-jobproperties.FatrasValidation.add_JobProperty(Bremsstrahlung)
-jobproperties.FatrasValidation.add_JobProperty(CaloEnergyDeposition)
-jobproperties.FatrasValidation.add_JobProperty(PairProduction)
-jobproperties.FatrasValidation.add_JobProperty(HadronicInteractions)
-jobproperties.FatrasValidation.add_JobProperty(ParticleDecay)
-jobproperties.FatrasValidation.add_JobProperty(PreProcessing)
-jobproperties.FatrasValidation.add_JobProperty(Simulation)
-jobproperties.FatrasValidation.add_JobProperty(Refitting)
-jobproperties.FatrasValidation.add_JobProperty(TrkNtuple)
-jobproperties.FatrasValidation.add_JobProperty(PixelClusterSimulation)
-jobproperties.FatrasValidation.add_JobProperty(SctClusterSimulation)
-jobproperties.FatrasValidation.add_JobProperty(TrtDriftCircleSimulation)
-#
-jobproperties.FatrasValidation.add_JobProperty(TrtPostProcessor)
-jobproperties.FatrasValidation.add_JobProperty(Residuals)
-jobproperties.FatrasValidation.add_JobProperty(TrackPlotter)
-jobproperties.FatrasValidation.add_JobProperty(McNtuple)
-jobproperties.FatrasValidation.add_JobProperty(MuonSimHitNtuple)
-jobproperties.FatrasValidation.add_JobProperty(MuonSimHitValidation)
-jobproperties.FatrasValidation.add_JobProperty(MuonMonitoringNtuple)
-jobproperties.FatrasValidation.add_JobProperty(CheckGenEventRecord)
-
-##-----------------------------------------------------------------------------
-## 5th step
-## short-cut for lazy people
-## carefull: do not select FatrasValidation as a short name as well. 
-## otherwise problems with pickle
-## Note: you still have to import it:
-## >>> from FatrasExample.FatrasValidation import FatrasValidationFlags
-FatrasValidationFlags = jobproperties.FatrasValidation
diff --git a/Simulation/ISF/ISF_Fatras/ISF_FatrasServices/python/ISF_FatrasJobProperties.py b/Simulation/ISF/ISF_Fatras/ISF_FatrasServices/python/ISF_FatrasJobProperties.py
deleted file mode 100644
index c8a5efcaa732b6b46209368315a65c2e5afc3b41..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_Fatras/ISF_FatrasServices/python/ISF_FatrasJobProperties.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-## @file ISF_FatrasJobProperties.py
-## @brief Python module to hold common flags to configure JobOptions
-##
-
-""" ISF_FatrasJobProperties
-    Python module to hold storegate keys of InDet objects.
-
-"""
-
-__author__ = "A. Salzburger"
-__version__= "$Revision: 476468 $"
-__doc__    = "FatrasJobProperties"
-
-__all__    = [ "ISF_FatrasJobProperties" ]
-
-# kindly stolen from AthenaCommonFlags from S. Binet and M. Gallas
-
-##-----------------------------------------------------------------------------
-## Import
-
-from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer
-from AthenaCommon.JobProperties import jobproperties
-
-##-----------------------------------------------------------------------------
-## 1st step
-## Define the flag
-class OutputLevelGeneral(JobProperty):
-    """The output level for Tools"""
-    statusOn     = True
-    allowedTypes = ['int']
-    StoredValue  = 3
-
-class OutputLevelToolsID(JobProperty):
-    """The output level for Tools"""
-    statusOn     = True
-    allowedTypes = ['int']
-    StoredValue  = 3
-
-class OutputLevelToolsMS(JobProperty):
-    """The output level for Tools"""
-    statusOn     = True
-    allowedTypes = ['int']
-    StoredValue  = 3
-
-class OutputLevelToolsG4(JobProperty):
-    """The output level for Tools"""
-    statusOn     = True
-    allowedTypes = ['int']
-    StoredValue  = 3
-
-class RandomStreamName(JobProperty):
-    """The output level for Tools"""
-    statusOn     = True
-    allowedTypes = ['str']
-    StoredValue  = 'FatrasRnd'
-
-##-----------------------------------------------------------------------------
-## 2nd step
-## Definition of the InDet flag container
-class ISF_FatrasJobProperties(JobPropertyContainer):
-    """Container for the ISF_Fatras key flags
-    """
-    pass
-
-##-----------------------------------------------------------------------------
-## 3rd step
-## adding the container to the general top-level container
-jobproperties.add_Container(ISF_FatrasJobProperties)
-
-
-##-----------------------------------------------------------------------------
-## 4th step
-## adding the flags to the  container
-jobproperties.ISF_FatrasJobProperties.add_JobProperty(OutputLevelGeneral)
-jobproperties.ISF_FatrasJobProperties.add_JobProperty(OutputLevelToolsID)
-jobproperties.ISF_FatrasJobProperties.add_JobProperty(OutputLevelToolsMS)
-jobproperties.ISF_FatrasJobProperties.add_JobProperty(OutputLevelToolsG4)
-jobproperties.ISF_FatrasJobProperties.add_JobProperty(RandomStreamName)
-
-##-----------------------------------------------------------------------------
-## 5th step
-## short-cut for lazy people
-## carefull: do not select FatrasJobProperties as a short name as well. 
-## otherwise problems with pickle
-## Note: you still have to import it:
-## >>> from ISF_FatrasServices.ISF_FatrasJobProperties import ISF_FatrasFlags
-ISF_FatrasFlags = jobproperties.ISF_FatrasJobProperties
diff --git a/Simulation/ISF/ISF_HepMC/ISF_HepMC_Tools/python/ISF_HepMC_ToolsConfig.py b/Simulation/ISF/ISF_HepMC/ISF_HepMC_Tools/python/ISF_HepMC_ToolsConfig.py
index 103a6bfcefe0886a7244e87590247ff0091840f5..8d495875d700a3a14839ad5ae869ed96f2ff8eb1 100644
--- a/Simulation/ISF/ISF_HepMC/ISF_HepMC_Tools/python/ISF_HepMC_ToolsConfig.py
+++ b/Simulation/ISF/ISF_HepMC/ISF_HepMC_Tools/python/ISF_HepMC_ToolsConfig.py
@@ -191,7 +191,7 @@ def TruthStrategyGroupID_MC15Cfg(flags, name="ISF_MCTruthStrategyGroupID_MC15",
     kwargs.setdefault("VertexTypes", [3, 14, 15, 4, 5, 6, 7, 2, 12, 13])
     kwargs.setdefault("VertexTypeRangeLow", 201)  # All kinds of decay processes
     kwargs.setdefault("VertexTypeRangeHigh", 298)  # ...
-    kwargs.setdefault("Regions", [1,2]) # Could import AtlasDetDescr::AtlasRegion enum as in TruthService CfgGetter methods here
+    kwargs.setdefault("Regions", [1,2]) # Could import AtlasDetDescr::AtlasRegion enum as in TruthService Cfg methods here
     result.setPrivateTools(CompFactory.ISF.GenericTruthStrategy(name, **kwargs))
     return result
 
diff --git a/Trigger/TrigAlgorithms/TrigEFMissingET/CMakeLists.txt b/Trigger/TrigAlgorithms/TrigEFMissingET/CMakeLists.txt
index e3e0506cb8a465c8e32fc99a517c59b69ef5a413..5b30e0529bb45ddf917384f8b85f6940b42c551a 100644
--- a/Trigger/TrigAlgorithms/TrigEFMissingET/CMakeLists.txt
+++ b/Trigger/TrigAlgorithms/TrigEFMissingET/CMakeLists.txt
@@ -1,12 +1,11 @@
-# Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
 
 # Declare the package name:
 atlas_subdir( TrigEFMissingET )
 
 # External dependencies:
-find_package( ROOT COMPONENTS Core Hist Matrix )
 find_package( Eigen )
-find_package(lwtnn)
+find_package( lwtnn )
 
 atlas_add_library( TrigEFMissingETLib 
    Root/*.cxx
@@ -20,18 +19,15 @@ atlas_add_library( TrigEFMissingETLib
 # Component(s) in the package:
 atlas_add_component( TrigEFMissingET
    src/*.cxx src/components/*.cxx
-   INCLUDE_DIRS ${ROOT_INCLUDE_DIRS}
-   LINK_LIBRARIES ${ROOT_LIBRARIES}
-      TrigEFMissingETLib AthenaBaseComps AsgTools StoreGateLib AthContainers GaudiKernel AthenaMonitoringKernelLib AthLinks
+   LINK_LIBRARIES
+      TrigEFMissingETLib AthenaBaseComps StoreGateLib AthContainers GaudiKernel AthenaMonitoringKernelLib AthLinks
       CaloGeoHelpers CaloEvent CaloConditions CaloIdentifier 
       InDetTrackSelectionToolLib TrackVertexAssociationToolLib TrkCaloExtension RecoToolInterfaces JetEDM
-      xAODCaloEvent xAODTrigMissingET xAODTracking xAODJet xAODBase xAODEventShape xAODPFlow
+      xAODCaloEvent xAODTrigMissingET xAODJet xAODBase xAODEventShape xAODPFlow
       PathResolver
       ${LWTNN_LIBRARIES}
 )
 
 # Install files from the package:
 atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} --extend-select=ATL900,ATL901 )
-atlas_install_joboptions( share/*.py )
-
 
diff --git a/Trigger/TrigAlgorithms/TrigEFMissingET/share/mc_job_test.py b/Trigger/TrigAlgorithms/TrigEFMissingET/share/mc_job_test.py
deleted file mode 100644
index 3296499cec1c60a83ef4d5ada9365d92338b696d..0000000000000000000000000000000000000000
--- a/Trigger/TrigAlgorithms/TrigEFMissingET/share/mc_job_test.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import AthenaPoolCnvSvc.ReadAthenaPool
-from TrigEFMissingET.TrigEFMissingETMTConfig import getMETMonTool
-
-looper = theApp.EventLoop
-if not hasattr(svcMgr, looper):
-  svcMgr += getattr(CfgMgr, looper)()
-getattr(svcMgr, looper).EventPrintoutInterval = 1000
-
-if not hasattr(svcMgr, "THistSvc"):
-    svcMgr += CfgMgr.THistSvc()
-svcMgr.THistSvc.Output += ["TREE DATAFILE='mc_tree.root' OPT='RECREATE'"]
-
-ToolSvc += CfgMgr.Trig__TrigDecisionTool("TrigDecisionTool")
-
-anaSeq = CfgMgr.AthSequencer("AnalysisSequence")
-athAlgSeq += anaSeq
-
-anaSeq += CfgMgr.HLT__MET__NNHLTFex(
-    MonTool=getMETMonTool()
-)
diff --git a/Trigger/TrigAlgorithms/TrigT2CaloEgamma/CMakeLists.txt b/Trigger/TrigAlgorithms/TrigT2CaloEgamma/CMakeLists.txt
index a99acdf667791376a565148cd71ad1ddc0b79e77..64cead83882ef13d0b8145032cad759dbd04d096 100644
--- a/Trigger/TrigAlgorithms/TrigT2CaloEgamma/CMakeLists.txt
+++ b/Trigger/TrigAlgorithms/TrigT2CaloEgamma/CMakeLists.txt
@@ -8,8 +8,9 @@ atlas_add_library  ( TrigT2CaloEgammaLib
 		     src/*.cxx
                      src/helpers/*.cxx
 		     PUBLIC_HEADERS TrigT2CaloEgamma
-		     LINK_LIBRARIES CaloEvent CaloGeoHelpers SGTools xAODTrigCalo xAODTrigRinger GaudiKernel TrigSteeringEvent TrigT2CaloCommonLib CaloDetDescrLib TrigT2CaloCalibrationLib TrigTimeAlgsLib
-		     PRIVATE_LINK_LIBRARIES CaloIdentifier AthLinks IRegionSelector TrigT1Interfaces )
+		     LINK_LIBRARIES CaloEvent CaloGeoHelpers SGTools xAODTrigCalo xAODTrigRinger GaudiKernel TrigSteeringEvent TrigT2CaloCommonLib CaloDetDescrLib TrigT2CaloCalibrationLib TrigTimeAlgsLib egammaMVACalibLib 
+		     PRIVATE_LINK_LIBRARIES CaloIdentifier AthLinks IRegionSelector TrigT1Interfaces 
+			 )
 atlas_add_component( TrigT2CaloEgamma
                      src/components/*.cxx
                      LINK_LIBRARIES TrigT2CaloEgammaLib )
diff --git a/Trigger/TrigAlgorithms/TrigT2CaloEgamma/python/TrigT2CaloEgammaConfig.py b/Trigger/TrigAlgorithms/TrigT2CaloEgamma/python/TrigT2CaloEgammaConfig.py
index 0476fc5ecb725a70da4a1b5b03e82cdc8f34a122..70082eea379c63daac76cc986593d7c259cfc3e5 100644
--- a/Trigger/TrigAlgorithms/TrigT2CaloEgamma/python/TrigT2CaloEgammaConfig.py
+++ b/Trigger/TrigAlgorithms/TrigT2CaloEgamma/python/TrigT2CaloEgammaConfig.py
@@ -132,7 +132,7 @@ def t2CaloEgamma_ReFastAlgoCfg(flags, name="T2CaloEgamma_ReFastAlgo", ClustersNa
                                          ExtraInputs={('TileEMScale','ConditionStore+TileEMScale'),
                                                       ('TileBadChannels','ConditionStore+TileBadChannels')})
     monTool = GenericMonitoringTool(flags, 'MonTool')
-    monTool.defineHistogram('TrigEMCluster_eT', path='EXPERT', type='TH1F', title="T2Calo Egamma E_T; E_T [ GeV ] ; Nclusters", xbins=80, xmin=0.0, xmax=80.0)
+    monTool.defineHistogram('TrigEMCluster_eT', path='EXPERT', type='TH1F', title="T2Calo Egamma E_T; E_T [ GeV ] ; Nclusters", xbins=80, xmin=0.0, xmax=140.0)
     monTool.defineHistogram('TrigEMCluster_had1', path='EXPERT', type='TH1F', title="T2Calo Egamma had E_T samp1; had E_T samp1 [ GeV ] ; Nclusters", xbins=80, xmin=0.0, xmax=8.0)
     monTool.defineHistogram('TrigEMCluster_eta', path='EXPERT', type='TH1F', title="T2Calo Egamma #eta; #eta ; Nclusters", xbins=100, xmin=-2.5, xmax=2.5)
     monTool.defineHistogram('TrigEMCluster_phi', path='EXPERT', type='TH1F', title="T2Calo Egamma #phi; #phi ; Nclusters", xbins=128, xmin=-3.2, xmax=3.2)
@@ -140,6 +140,10 @@ def t2CaloEgamma_ReFastAlgoCfg(flags, name="T2CaloEgamma_ReFastAlgo", ClustersNa
     monTool.defineHistogram('TrigEMCluster_rEta', path='EXPERT', type='TH1F', title="T2Calo Egamma rEta; rEta (e237/e277) ; Nclusters", xbins=140, xmin=-0.2, xmax=1.2)
     monTool.defineHistogram('TIME_exec', path='EXPERT', type='TH1F', title="T2Calo Egamma time; time [ us ] ; Nruns", xbins=80, xmin=0.0, xmax=8000.0)
     monTool.defineHistogram('TrigEMCluster_eta,TIME_exec', path='EXPERT', type='TH2F', title="T2Calo Egamma time vs #eta ; #eta ; time [ us ]", xbins=100, xmin=-2.5, xmax=2.5, ybins=80, ymin=0.0, ymax=8000.0)
+    monTool.defineHistogram('TrigEMCluster_eta,TrigEMCluster_eT', path='EXPERT', type='TH2F', title="Egamma E_T vs #eta ; #eta; E_T", xbins=100, xmin=-2.5, xmax=2.5)
+    monTool.defineHistogram('TrigEMCluster_eta,TrigEMCluster_et_rings', path='EXPERT', type='TH2F', title="Egamma E_T[calib] vs #eta ; #eta; E_T", xbins=100, xmin=-2.5, xmax=2.5)
+    monTool.defineHistogram('Resolution_et', path='EXPERT', type='TH1F', title="Egamma Resolution E_T; (E_T[calib] - E_T)/ E_T ; Nclusters", xbins=80, xmin=-10.0, xmax=10.0)
+    monTool.defineHistogram('TrigEMCluster_et_rings', path='EXPERT', type='TH1F', title="Egamma E_T [calib]; E_T [ GeV ] ; Nclusters", xbins=80, xmin=0.0, xmax=140.0)
 
     alg = CompFactory.T2CaloEgammaReFastAlgo(
         name,
@@ -162,6 +166,14 @@ def t2CaloEgamma_ReFastAlgoCfg(flags, name="T2CaloEgamma_ReFastAlgo", ClustersNa
                                    ClustersName = ClustersName)
         alg.IReAlgToolList += [ringer]
 
+        if flags.Trigger.egamma.fastCaloETCalibration:
+            ringerCalib = CompFactory.TrigFastCalibWithRings(name="FastCalibRingsTool",
+            CalibPath = flags.Trigger.egamma.fastCaloETCalibrationVersion, RingerKey="HLT_FastCaloRinger")
+            alg.CalibWRingsTool = ringerCalib
+            alg.DoCalibWithRings = True
+
+        alg.UseRings  = True
+
 
     alg.RoIs=RoIs
     alg.ExtraInputs |= ExtraInputs
diff --git a/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/T2CaloEgammaReFastAlgo.cxx b/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/T2CaloEgammaReFastAlgo.cxx
index 81e32ad204c3cf012cdb65f818b174d427f176ca..8b3baf5a43cf40c6096b04d96b14cd0d0f5abbe8 100644
--- a/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/T2CaloEgammaReFastAlgo.cxx
+++ b/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/T2CaloEgammaReFastAlgo.cxx
@@ -34,6 +34,8 @@ StatusCode T2CaloEgammaReFastAlgo::initialize()
   ATH_CHECK(m_roiCollectionKey.initialize());
   ATH_CHECK( m_bcidAvgKey.initialize() );
   if (! m_monTool.empty() ) ATH_CHECK( m_monTool.retrieve() );
+  if(m_doCalibWithRings) ATH_CHECK( m_calibWRingsTool.retrieve() );
+
   return StatusCode::SUCCESS;
 }
 
@@ -46,7 +48,10 @@ StatusCode T2CaloEgammaReFastAlgo::execute(const EventContext& context) const
   auto clEta = Monitored::Scalar("TrigEMCluster_eta",-999.0);
   auto clPhi = Monitored::Scalar("TrigEMCluster_phi",-999.0);
   auto clReta = Monitored::Scalar("TrigEMCluster_rEta",-999.0);
-  auto monitoring = Monitored::Group( m_monTool, timer, clET, clHET, clEta, clPhi, clReta);
+  auto clETrings = Monitored::Scalar("TrigEMCluster_et_rings",-999.0); 
+  auto res_et = Monitored::Scalar("Resolution_et",-999.0);
+  auto monitoring = Monitored::Group( m_monTool, timer, clET, clHET, clEta, clPhi, clReta,clETrings,res_et);
+ 
 
   SG::WriteHandle<xAOD::TrigEMClusterContainer> trigEmClusterCollection(m_clusterContainerKey, context);
   ATH_CHECK( trigEmClusterCollection.record(std::make_unique<xAOD::TrigEMClusterContainer>(),
@@ -152,6 +157,8 @@ StatusCode T2CaloEgammaReFastAlgo::execute(const EventContext& context) const
       ptrigEmCluster->setEnergy(0.0);
       ptrigEmCluster->setEt(0.0);
     }
+
+
       if ( caloDDE != 0 ){
         if ( caloDDE->is_lar_em_barrel() ){
           for( ToolHandleArray<IEgammaCalibration>::const_iterator
@@ -165,6 +172,17 @@ StatusCode T2CaloEgammaReFastAlgo::execute(const EventContext& context) const
             (*ical)->makeCorrection(ptrigEmCluster,caloDDE);
         }
       }
+     
+     float et_calib = -999.0;
+     float et_uncalib = ptrigEmCluster->et();
+     if ( m_doCalibWithRings ){
+        ATH_CHECK(m_calibWRingsTool->checkRings(context));
+        et_calib = m_calibWRingsTool->makeCalibWRings(context);
+        ptrigEmCluster->setEt(et_calib);
+
+     }
+
+
     float calZ0 = 0;
 
     // Print out Cluster produced
@@ -187,10 +205,15 @@ StatusCode T2CaloEgammaReFastAlgo::execute(const EventContext& context) const
                              << std::dec);
     }
     // my monitoring
-    clET = ptrigEmCluster->et()*1e-3;
+    clET = et_uncalib*1e-3;
     clHET = ptrigEmCluster->ehad1()*1e-3;
     clEta = ptrigEmCluster->eta();
     clPhi = ptrigEmCluster->phi();
+    clETrings = et_calib*1e-3;
+   
+    res_et = (et_calib - et_uncalib)/et_uncalib;
+    
+
     if ( ptrigEmCluster->e277() > 0.01 ) clReta = ptrigEmCluster->e237()/ptrigEmCluster->e277();
 
   } // end of roiCollection iterator
diff --git a/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/T2CaloEgammaReFastAlgo.h b/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/T2CaloEgammaReFastAlgo.h
index 78d9ec50b6d65742d142865dc98372a8aaaec4af..2529ac1e8ff302ef7cf2a2a49c9881be72e8afd7 100644
--- a/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/T2CaloEgammaReFastAlgo.h
+++ b/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/T2CaloEgammaReFastAlgo.h
@@ -28,6 +28,7 @@
 #include "xAODTrigCalo/TrigEMClusterContainer.h"
 #include "xAODTrigCalo/TrigEMClusterAuxContainer.h"
 #include "AthenaMonitoringKernel/GenericMonitoringTool.h"
+#include "TrigFastCalibWithRings.h"
 
 #include <string>
 
@@ -50,6 +51,13 @@ class T2CaloEgammaReFastAlgo : public AthReentrantAlgorithm {
 
     Gaudi::Property<bool> m_doForward{this, "DoForward", false,
                                        "Do Forward clusters"};
+
+    Gaudi::Property<bool> m_doCalibWithRings{this, "DoCalibWithRings", false,
+                                       "FastCalo Et Calibration using Rings"};
+
+    Gaudi::Property<bool> m_useRings{this, "UseRings", false,
+                                       "Rings are Used in FastCaloFex"};
+
   
     Gaudi::Property<float> m_l1eta{this, "L1ForceEta", -10.0, "Forced LVL1 eta"};
     Gaudi::Property<float> m_l1phi{this, "L1ForcePhi", -10.0, "Forced LVL1 phi"};
@@ -76,9 +84,12 @@ class T2CaloEgammaReFastAlgo : public AthReentrantAlgorithm {
   
     SG::ReadHandleKey<TrigRoiDescriptorCollection> m_roiCollectionKey{
       this, "RoIs", "OutputRoIs", "input RoIs"};
+    
     SG::WriteHandleKey<xAOD::TrigEMClusterContainer> m_clusterContainerKey{
         this, "ClustersName", "CaloClusters", "Calo cluster container"};
     ToolHandle< GenericMonitoringTool > m_monTool { this, "MonTool", "", "Monitoring tool" };
+    ToolHandle< TrigFastCalibWithRings > m_calibWRingsTool { this, "CalibWRingsTool", "", "FastCalo Calib with Rings tool" };
+
 };
 
 #endif
diff --git a/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/TrigFastCalibWithRings.cxx b/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/TrigFastCalibWithRings.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..9d9ee9d2ce0c76dbdc2c76b15f5f38e97a5671db
--- /dev/null
+++ b/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/TrigFastCalibWithRings.cxx
@@ -0,0 +1,205 @@
+/*
+   Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
+*/
+
+
+
+#include "TrigFastCalibWithRings.h"
+#include <AsgMessaging/MessageCheck.h>
+
+TrigFastCalibWithRings::TrigFastCalibWithRings([[maybe_unused]] const std::string& type, const std::string& myname, [[maybe_unused]] const IInterface* parent):asg::AsgTool(myname){} 
+
+
+TrigFastCalibWithRings::~TrigFastCalibWithRings(){}
+
+StatusCode TrigFastCalibWithRings::initialize() {
+
+    ATH_CHECK(m_ringerKey.initialize());
+
+    //Setup the BDTs ...
+    ATH_CHECK(setupBDTFastCalo(PathResolverFindCalibFile(m_CalibPath)));
+   
+ 
+    return StatusCode::SUCCESS;
+}
+
+StatusCode TrigFastCalibWithRings::execute() const {
+
+    return StatusCode::SUCCESS;
+}
+
+StatusCode TrigFastCalibWithRings::checkRings(const EventContext& ctx ) const {
+   SG::ReadHandle<xAOD::TrigRingerRingsContainer> rgCont( m_ringerKey, ctx);
+   ATH_CHECK(rgCont.isValid());
+   const xAOD::TrigRingerRings_v2 *ring=rgCont->at(0);
+
+
+    if(!ring->emCluster()){
+      ATH_MSG_WARNING("There is no link to emCluster.");
+      return StatusCode::FAILURE;
+    }
+    
+    return StatusCode::SUCCESS;
+
+}
+
+
+StatusCode TrigFastCalibWithRings::setupBDTFastCalo(const std::string& fileName){
+
+
+
+  std::unique_ptr<TFile> f(TFile::Open(fileName.c_str()));
+  if (!f || f->IsZombie()) {
+    ATH_MSG_FATAL("Could not open " << fileName);
+    return StatusCode::FAILURE;
+  }
+
+  // Load hPoly
+  TH2Poly *hPoly = nullptr;
+  f->GetObject("hPoly", hPoly);
+  if (!hPoly) {
+    ATH_MSG_FATAL("Could not find hPoly");
+    return StatusCode::FAILURE;
+  }
+  //pass ownership to class variable
+  m_hPoly.reset(static_cast<TH2Poly*>(hPoly));
+  m_hPoly->SetDirectory(nullptr);
+
+  // Load variables
+  TObjArray *variablesTmp = nullptr;
+  f->GetObject("variables", variablesTmp);
+  if (!variablesTmp) {
+    ATH_MSG_FATAL("Could not find variables");
+    return StatusCode::FAILURE;
+  }
+  auto variables = std::unique_ptr<TObjArray>(variablesTmp);
+  variables->SetOwner(); // to delete the objects when d-tor is called
+
+  // Load shifts
+  TObjArray *shiftsTmp = nullptr;
+  f->GetObject("shifts", shiftsTmp);
+  if (!shiftsTmp) {
+    ATH_MSG_FATAL("Could not find shifts");
+    return StatusCode::FAILURE;
+  }
+  auto shifts = std::unique_ptr<TObjArray>(shiftsTmp);
+  shifts->SetOwner(); // to delete the objects when d-tor is called
+
+  // Load trees
+  TObjArray *treesTmp = nullptr;
+  //std::unique_ptr<TObjArray> trees;
+  TObjArray *trees = nullptr;
+  f->GetObject("trees", treesTmp);
+  if (treesTmp) {
+    trees = treesTmp; 
+    trees->SetOwner(); // to delete the objects when d-tor is called
+    ATH_MSG_DEBUG("setupBDT " << "BDTs read from TObjArray");
+  } else {
+    ATH_MSG_DEBUG("setupBDT " << "Reading trees individually");
+    trees = new TObjArray();
+    trees->SetOwner(); // to delete the objects when d-tor is called
+    for (int i = 0; i < variables->GetEntries(); ++i)
+    {
+      TTree *tree = nullptr;
+      f->GetObject(Form("BDT%d", i), tree);
+      if (tree) tree->SetCacheSize(0);
+      trees->AddAtAndExpand(tree, i);
+    }
+  }
+
+  // Ensure the objects have (the same number of) entries
+  if (!trees->GetEntries() || !(trees->GetEntries() == variables->GetEntries())) {
+    ATH_MSG_FATAL("Tree has size " << trees->GetEntries()
+		  << " while variables has size " << variables->GetEntries());
+    return StatusCode::FAILURE;
+  }
+
+  // Loop simultaneously over trees, variables and shifts
+  // Define the BDTs, the list of variables and the shift for each BDT
+  TObjString *str2;
+
+  TTree *tree;
+  TIter nextTree(trees);
+  TIter nextVariables(variables.get());
+  TIter nextShift(shifts.get());
+  for (int i=0; (tree = (TTree*) nextTree()) && ((TObjString*) nextVariables()); ++i)
+  {
+    m_BDTs.emplace_back(tree);
+
+    std::vector<std::function<float(const xAOD::Egamma*, const xAOD::CaloCluster*)> > funcs;
+    // Loop over variables, which are separated by comma
+    char separator_var = ';';
+    if (getString(variables->At(i)).Index(";") < 1) separator_var = ','; // old versions
+    std::unique_ptr<TObjArray> tokens(getString(variables->At(i)).Tokenize(separator_var));
+    TIter nextVar(tokens.get());
+    while ((str2 = (TObjString*) nextVar()))
+    {
+      const TString& varName = getString(str2);
+      if (!varName.Length()) {
+        ATH_MSG_FATAL("There was an empty variable name!");
+        return StatusCode::FAILURE;
+      }
+    }
+  }
+return StatusCode::SUCCESS;
+}
+
+
+const TString& TrigFastCalibWithRings::getString(TObject* obj) 
+{
+  TObjString *objS = dynamic_cast<TObjString*>(obj);
+  if (!objS) {
+    throw std::runtime_error("egammaMVACalibTool::getString was passed something that was not a string object");
+  }
+  return objS->GetString();
+}
+
+
+
+float TrigFastCalibWithRings::makeCalibWRings(const EventContext& ctx) const {
+    SG::ReadHandle<xAOD::TrigRingerRingsContainer> rgCont( m_ringerKey, ctx);
+    const xAOD::TrigRingerRings_v2 *ring=rgCont->at(0);
+
+
+    //Open the EventContext and create a BDT input vector: Rings + Et + eta
+    float eta_cluster=ring->emCluster()->eta();
+    float et_cluster=ring->emCluster()->et();
+    const static std::vector<float>rings=rgCont->at(0)->rings();
+
+    //Define the Rings to be used as inputs
+    const std::vector<int>inputRingsIndex{0,1,2,3,8,9,10,11,12,13,14,15,72,73,74,75,76,77,78,79,81,82,83,84,88,89,90,91};
+
+    if (!(static_cast<int>(ring ->size()) > inputRingsIndex.back())){
+        throw std::runtime_error("The last ring index is bigger than the ring's lenght");
+    }
+
+    std::vector<float>ringsInput;
+    for(auto index:inputRingsIndex)ringsInput.push_back(rings[index]);
+
+    const TH2Poly* hPoly = m_hPoly.get();
+    const int bin = hPoly->FindFixBin(eta_cluster, et_cluster/Gaudi::Units::GeV) - 1; // poly bins are shifted by one
+
+    ATH_MSG_DEBUG("Using bin: " << bin);
+
+    if (bin < 0) {
+      ATH_MSG_DEBUG("The bin is under/overflow; just return the energy");
+      return et_cluster;
+    }
+
+    if (bin >= static_cast<int>(m_BDTs.size())) {
+      ATH_MSG_WARNING("The bin is outside the range, so just return the energy");
+      return et_cluster;
+    }
+
+    // select the bdt and functions. (shifts are done later if needed)
+    // if there is only one BDT just use that
+    const int bin_BDT = m_BDTs.size() != 1 ? bin : 0;
+    const auto& bdt = m_BDTs[bin_BDT];
+
+    // evaluate the BDT response
+    const float mvaOutput = bdt.GetResponse(ringsInput);
+
+
+
+    return et_cluster*mvaOutput;
+}
diff --git a/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/TrigFastCalibWithRings.h b/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/TrigFastCalibWithRings.h
new file mode 100644
index 0000000000000000000000000000000000000000..ce2586382ac43e1a96afdfb3f41bfb11789c4a04
--- /dev/null
+++ b/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/TrigFastCalibWithRings.h
@@ -0,0 +1,55 @@
+/*
+  Copyright (C) 2002-2023 CERN for the benefit of the ATLAS collaboration
+*/
+
+
+
+#include "egammaMVACalib/egammaMVACalibTool.h"
+#include "EgammaAnalysisInterfaces/IegammaMVACalibTool.h"
+#include <string>
+#include "xAODTrigRinger/TrigRingerRingsContainer.h"
+
+#include "TFile.h"
+#include "TMath.h"
+#include "TObjString.h"
+#include "TTree.h"
+#include "TClass.h"
+
+#include "PathResolver/PathResolver.h"
+#include "CxxUtils/checker_macros.h"
+#include "AsgTools/AsgTool.h"
+
+#ifndef TRIGFASTCALIBWITHRINGS_H
+#define TRIGFASTCALIBWITHRINGS_H
+
+class TrigFastCalibWithRings: public asg::AsgTool
+
+{
+
+    public:
+        TrigFastCalibWithRings(const std::string& type, const std::string& myname, const IInterface* parent);
+        ~TrigFastCalibWithRings();
+        StatusCode initialize() final;
+        StatusCode execute() const;
+        float makeCalibWRings(const EventContext& ctx) const;
+        StatusCode setupBDTFastCalo(const std::string& fileName); 
+        static const TString& getString(TObject* obj);
+        StatusCode checkRings(const EventContext& ctx ) const;
+
+
+    private:
+        Gaudi::Property<std::string> m_CalibPath{this, "CalibPath", "",
+                                       "Path to BDT File"};
+        SG::ReadHandleKey<xAOD::TrigRingerRingsContainer> m_ringerKey{this, "RingerKey" , "HLT_FastCaloRinger", ""}; 
+        /// A TH2Poly used to extract bin numbers. Note there is an offset of 1
+        std::unique_ptr<TH2Poly> m_hPoly;
+
+        /// Where the BDTs are stored
+        std::vector<MVAUtils::BDT> m_BDTs;
+
+        /// shifts formulas
+        std::vector<TFormula> m_shifts;
+
+   };
+
+#endif // TRIGFASTCALIBWITHRINGS_H
\ No newline at end of file
diff --git a/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/components/TrigT2CaloEgamma_entries.cxx b/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/components/TrigT2CaloEgamma_entries.cxx
index c3b36673cbb491532562851356eb9d49c5f2cf4a..12de7f43e5334044d00eae00141a730a47d26a2b 100644
--- a/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/components/TrigT2CaloEgamma_entries.cxx
+++ b/Trigger/TrigAlgorithms/TrigT2CaloEgamma/src/components/TrigT2CaloEgamma_entries.cxx
@@ -7,6 +7,8 @@
 #include "../EgammaReHadEnFex.h"
 #include "TrigT2CaloEgamma/RingerReFex.h"
 #include "../EgammaAllFex.h"
+#include "../TrigFastCalibWithRings.h"
+
 
 DECLARE_COMPONENT( T2CaloEgammaReFastAlgo )
 DECLARE_COMPONENT( T2CaloEgammaForwardReFastAlgo )
@@ -16,3 +18,4 @@ DECLARE_COMPONENT( EgammaReEmEnFex )
 DECLARE_COMPONENT( EgammaReHadEnFex )
 DECLARE_COMPONENT( RingerReFex )
 DECLARE_COMPONENT( EgammaAllFex )
+DECLARE_COMPONENT( TrigFastCalibWithRings )
diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfigSvcCfg.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfigSvcCfg.py
index fcf8f74a5e4861a047fb16fc6ab58f5544656f2e..68ccc044f2f746679b90e384c0a9b5f4945e6b35 100644
--- a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfigSvcCfg.py
+++ b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfigSvcCfg.py
@@ -14,9 +14,9 @@ log = logging.getLogger('TrigConfigSvcCfg')
 # Either via JSON conversion from Run-1&2 or native Run-3 (see ATR-24531).
 def l1menu_generated():
     try:
-        return l1menu_generated._hasRun
+        return l1menu_generated._hasRun # type: ignore
     except AttributeError:
-        l1menu_generated._hasRun = True
+        l1menu_generated._hasRun = True # type: ignore
         return False
 
 
@@ -362,7 +362,7 @@ if __name__ == "__main__":
 
         def setUp(self):
             # Allow multiple L1 menu generations for these tests
-            l1menu_generated._hasRun = False
+            l1menu_generated._hasRun = False # type: ignore
 
         def test_currentMenu(self):
             from AthenaConfiguration.AllConfigFlags import initConfigFlags
diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerConfigAccess.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerConfigAccess.py
index 4c39382f0838ab159b327ecb3ab2c447c27977d1..5d875c66603d0566ec7053a9030d0c30f620bc81 100644
--- a/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerConfigAccess.py
+++ b/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerConfigAccess.py
@@ -1,5 +1,6 @@
 # Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration
 
+from typing import Any, Optional
 from AthenaConfiguration.AccumulatorCache import AccumulatorCache
 from AthenaCommon.Logging import logging
 log = logging.getLogger( "TriggerConfigAccess.py" )
@@ -9,6 +10,9 @@ from .TrigConfigSvcCfg import getTrigConfigFromFlag, getL1MenuFileName, getHLTMe
 from TrigConfIO.L1TriggerConfigAccess import L1MenuAccess, L1PrescalesSetAccess, BunchGroupSetAccess
 from TrigConfIO.HLTTriggerConfigAccess import HLTMenuAccess, HLTPrescalesSetAccess, HLTJobOptionsAccess, HLTMonitoringAccess
 
+from AthenaConfiguration.AutoConfigFlags import GetFileMD
+from AthenaConfiguration.Enums import Format
+
 from functools import lru_cache
 
 """
@@ -45,7 +49,7 @@ are then loaded from the DB.
 """
 
 @lru_cache(maxsize=None)
-def getKeysFromCool(runNr, lbNr = 0):
+def getKeysFromCool(runNr: int, lbNr: int = 0) -> dict[str,int]:
     """Return dictionary of trigger keys for given run and lumiblock number
     """
     from TrigConfStorage.TriggerCoolUtil import TriggerCoolUtil
@@ -74,7 +78,7 @@ def getKeysFromCool(runNr, lbNr = 0):
 
     return d
 
-def getDBKeysFromMetadata(flags):
+def getDBKeysFromMetadata(flags) -> Optional[dict[str, Any]]:
     """Provides access to the database keys from the in-file metadata
 
     Gets the database keys from the in-file metadata which are stored together with the json representation
@@ -83,7 +87,6 @@ def getDBKeysFromMetadata(flags):
 
     @returns: dictionary with the DB keys. Returns 'None' if information is not present.
     """
-    from AthenaConfiguration.AutoConfigFlags import GetFileMD
     metadata = GetFileMD(flags.Input.Files)
     keys = metadata.get("TriggerConfigInfo", None)
     if keys is None:
@@ -99,8 +102,7 @@ def getDBKeysFromMetadata(flags):
 Returns a string-serialised JSON object from the metadata store.
 Checks AOD syntax first, then fully-qualified ESD syntax
 """
-def _getJSONFromMetadata(flags, key):
-    from AthenaConfiguration.Enums import Format
+def _getJSONFromMetadata(flags, key) -> Optional[dict[str,Any]]:
     if flags.Input.Format != Format.POOL:
         raise RuntimeError("Cannot read trigger configuration (%s) from input type %s", key, flags.Input.Format)
     from AthenaConfiguration.AutoConfigFlags import GetFileMD
@@ -124,14 +126,15 @@ L1 information
 
 """
 @AccumulatorCache
-def getL1MenuAccess( flags = None ):
+def getL1MenuAccess( flags = None ) -> L1MenuAccess:
     tc = getTrigConfigFromFlag( flags )
     if tc["SOURCE"] == "FILE":
         cfg = L1MenuAccess( filename = getL1MenuFileName( flags ) )
     elif tc["SOURCE"] == "COOL":
         """This is the case when reconstructing the data."""
-        from RecExConfig.InputFilePeeker import inpSum
-        keysFromCool = getKeysFromCool( inpSum["run_number"] )
+        if len(flags.Input.RunNumbers) == 0:
+            raise RuntimeError("No run number available in input metadata")
+        keysFromCool = getKeysFromCool( flags.Input.RunNumbers[0] )
         cfg = L1MenuAccess( dbalias = keysFromCool["DB"], smkey = keysFromCool['SMK'] )
     elif tc["SOURCE"] == "DB":
         cfg = L1MenuAccess( dbalias = tc["DBCONN"], smkey = tc["SMK"] )
@@ -143,14 +146,15 @@ def getL1MenuAccess( flags = None ):
 
 
 @AccumulatorCache
-def getL1PrescalesSetAccess( flags = None ):
+def getL1PrescalesSetAccess( flags = None ) -> L1PrescalesSetAccess:
     tc = getTrigConfigFromFlag( flags )
     if tc["SOURCE"] == "FILE":
         cfg = L1PrescalesSetAccess( filename = getL1PrescalesSetFileName( flags ) )
     elif tc["SOURCE"] == "COOL":
         """This is the case when reconstructing the data."""
-        from RecExConfig.InputFilePeeker import inpSum
-        keysFromCool = getKeysFromCool( inpSum["run_number"] )
+        if len(flags.Input.RunNumbers) == 0:
+            raise RuntimeError("No run number available in input metadata")
+        keysFromCool = getKeysFromCool( flags.Input.RunNumbers[0] )
         cfg = L1PrescalesSetAccess( dbalias = keysFromCool["DB"], l1pskey = keysFromCool['L1PSK'] )
     elif tc["SOURCE"] == "DB":
         cfg = L1PrescalesSetAccess( dbalias = tc["DBCONN"], l1pskey = tc["L1PSK"] )
@@ -162,21 +166,21 @@ def getL1PrescalesSetAccess( flags = None ):
 
 
 @AccumulatorCache
-def getBunchGroupSetAccess( flags = None ):
+def getBunchGroupSetAccess( flags = None ) -> BunchGroupSetAccess:
     tc = getTrigConfigFromFlag( flags )
     if tc["SOURCE"] == "FILE":
         cfg = BunchGroupSetAccess( filename = getBunchGroupSetFileName( flags ) )
     elif tc["SOURCE"] == "COOL":
         """This is the case when reconstructing the data."""
-        from RecExConfig.InputFilePeeker import inpSum
-        keysFromCool = getKeysFromCool( inpSum["run_number"] )
+        if len(flags.Input.RunNumbers) == 0:
+            raise RuntimeError("No run number available in input metadata")
+        keysFromCool = getKeysFromCool( flags.Input.RunNumbers[0] )
         cfg = BunchGroupSetAccess( dbalias = keysFromCool["DB"], bgskey = keysFromCool['BGSK'] )
     elif tc["SOURCE"] == "DB":
         cfg = BunchGroupSetAccess( dbalias = tc["DBCONN"], bgskey = tc["BGSK"] )
     elif tc["SOURCE"] == "INFILE":
-        from RecExConfig.InputFilePeeker import inputFileSummary as inpSum
-        if inpSum["file_type"] != 'pool':
-            raise RuntimeError("Cannot read trigger configuration (Bunchgroup Set) from input type %s" % inpSum["file_type"])
+        if flags.Input.Format != Format.POOL:
+            raise RuntimeError(f"Cannot read trigger configuration (Bunchgroup Set) from input type {flags.Input.Format}")
         raise NotImplementedError("Python access to the trigger configuration (Bunchgroup Set) from in-file metadata not yet implemented")
     else:
         raise RuntimeError("Unknown source of trigger configuration: %s" % tc["SOURCE"])
@@ -189,14 +193,15 @@ HLT information
 
 """
 @AccumulatorCache
-def getHLTMenuAccess( flags = None ):
+def getHLTMenuAccess( flags = None ) -> HLTMenuAccess:
     tc = getTrigConfigFromFlag( flags )
     if tc["SOURCE"] == "FILE":
         cfg = HLTMenuAccess( filename = getHLTMenuFileName( flags ) )
     elif tc["SOURCE"] == "COOL":
         """This is the case when reconstructing the data."""
-        from RecExConfig.InputFilePeeker import inpSum
-        keysFromCool = getKeysFromCool( inpSum["run_number"] )
+        if len(flags.Input.RunNumbers) == 0:
+            raise RuntimeError("No run number available in input metadata")
+        keysFromCool = getKeysFromCool( flags.Input.RunNumbers[0] )
         cfg = HLTMenuAccess( dbalias = keysFromCool["DB"], smkey = keysFromCool['SMK'] )
     elif tc["SOURCE"] == "DB":
         cfg = HLTMenuAccess( dbalias = tc["DBCONN"], smkey = tc["SMK"] )
@@ -208,14 +213,15 @@ def getHLTMenuAccess( flags = None ):
 
 
 @AccumulatorCache
-def getHLTPrescalesSetAccess( flags = None ):
+def getHLTPrescalesSetAccess( flags = None ) -> HLTPrescalesSetAccess:
     tc = getTrigConfigFromFlag( flags )
     if tc["SOURCE"] == "FILE":
         cfg = HLTPrescalesSetAccess( filename = getHLTPrescalesSetFileName( flags ) )
     elif tc["SOURCE"] == "COOL":
         """This is the case when reconstructing the data."""
-        from RecExConfig.InputFilePeeker import inpSum
-        keysFromCool = getKeysFromCool( inpSum["run_number"] )
+        if len(flags.Input.RunNumbers) == 0:
+            raise RuntimeError("No run number available in input metadata")
+        keysFromCool = getKeysFromCool( flags.Input.RunNumbers[0] )
         cfg = HLTPrescalesSetAccess( dbalias = keysFromCool["DB"], hltpskey = keysFromCool['HLTPSK'] )
     elif tc["SOURCE"] == "DB":
         cfg = HLTPrescalesSetAccess( dbalias = tc["DBCONN"], hltpskey = tc["HLTPSK"] )
@@ -227,14 +233,15 @@ def getHLTPrescalesSetAccess( flags = None ):
 
 
 @AccumulatorCache
-def getHLTJobOptionsAccess( flags = None ):
+def getHLTJobOptionsAccess( flags = None ) -> HLTJobOptionsAccess:
     tc = getTrigConfigFromFlag( flags )
     if tc["SOURCE"] == "FILE":
-        cfg = HLTJobOptionsAccess( filename = getHLTJobOptionsFileName( flags ) )
+        cfg = HLTJobOptionsAccess( filename = getHLTJobOptionsFileName() )
     elif tc["SOURCE"] == "COOL":
         """This is the case when reconstructing the data."""
-        from RecExConfig.InputFilePeeker import inpSum
-        keysFromCool = getKeysFromCool( inpSum["run_number"] )
+        if len(flags.Input.RunNumbers) == 0:
+            raise RuntimeError("No run number available in input metadata")
+        keysFromCool = getKeysFromCool( flags.Input.RunNumbers[0] )
         cfg = HLTJobOptionsAccess( dbalias = keysFromCool["DB"], smkey = keysFromCool['SMK'] )
     elif tc["SOURCE"] == "DB":
         cfg = HLTJobOptionsAccess( dbalias = tc["DBCONN"], smkey = tc["SMK"] )
@@ -246,14 +253,15 @@ def getHLTJobOptionsAccess( flags = None ):
 
 
 @AccumulatorCache
-def getHLTMonitoringAccess( flags = None ):
+def getHLTMonitoringAccess( flags = None ) -> HLTMonitoringAccess:
     tc = getTrigConfigFromFlag( flags )
     if tc["SOURCE"] == "FILE":
         cfg = HLTMonitoringAccess( filename = getHLTMonitoringFileName( flags ) )
     elif tc["SOURCE"] == "COOL":
         """This is the case when reconstructing the data."""
-        from RecExConfig.InputFilePeeker import inpSum
-        keysFromCool = getKeysFromCool( inpSum["run_number"] )
+        if len(flags.Input.RunNumbers) == 0:
+            raise RuntimeError("No run number available in input metadata")
+        keysFromCool = getKeysFromCool( flags.Input.RunNumbers[0] )
         cfg = HLTMonitoringAccess( dbalias = keysFromCool["DB"], smkey = keysFromCool['SMK'] )
     elif tc["SOURCE"] == "DB":
         cfg = HLTMonitoringAccess( dbalias = tc["DBCONN"], smkey = tc["SMK"] )
diff --git a/Trigger/TrigSteer/HLTSeeding/src/L1DataConsistencyChecker.h b/Trigger/TrigSteer/HLTSeeding/src/L1DataConsistencyChecker.h
index 20ff44dc8fffec93602553a25250abc64aee5e72..453ede36f0f3a5247939a80cc8ebd0a54bbe9aca 100644
--- a/Trigger/TrigSteer/HLTSeeding/src/L1DataConsistencyChecker.h
+++ b/Trigger/TrigSteer/HLTSeeding/src/L1DataConsistencyChecker.h
@@ -51,9 +51,9 @@ private:
   Gaudi::Property<std::vector<std::string>> m_excludedThresholdTypes {
     this, "ExcludedThresholdTypes", {
       "internal", // internal to CTP (randoms)
-      "TOPO", "R2TOPO", // topo (data not used in HLTSeeding)
+      "TOPO", "R2TOPO", "ZBTopo", "LArSaturation", // topo (data not used in HLTSeeding)
       "TE", "XE", "XS", "jTE", "jXE", "gTE", "gXE", // energy (data not used in HLTSeeding)
-      "ALFA", "BCM", "BCMCMB", "BPTX", "CALREQ", "LUCID", "MBTS", "MBTSSI", "NIM", "ZDC", "NSWMon", "ZBTopo" // direct signals from detectors to CTP (no TOB readout)
+      "ALFA", "BCM", "BCMCMB", "BPTX", "CALREQ", "LUCID", "MBTS", "MBTSSI", "NIM", "ZDC", "NSWMon" // direct signals from detectors to CTP (no TOB readout)
     },
     "L1 threshold types which are not subject to the consistency check "
     "(because either there are no corresponding TOBs or HLT doesn't use them)"};
diff --git a/Trigger/TrigT1/TrigT1CaloSim/CMakeLists.txt b/Trigger/TrigT1/TrigT1CaloSim/CMakeLists.txt
index ea6cd3de49d29a6b2e11761520c2db1e677132a2..8ac9f1ebd04d58e70b485941aa042a28d893e4b1 100644
--- a/Trigger/TrigT1/TrigT1CaloSim/CMakeLists.txt
+++ b/Trigger/TrigT1/TrigT1CaloSim/CMakeLists.txt
@@ -16,7 +16,6 @@ atlas_add_component( TrigT1CaloSim
 
 # Install files from the package:
 atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
-atlas_install_joboptions( share/*.py )
 
 # Tests:
 atlas_add_test( TrigT1CaloSimRun2Config
diff --git a/Trigger/TrigT1/TrigT1CaloSim/share/TestVectors/RoI_elnew.dat b/Trigger/TrigT1/TrigT1CaloSim/share/TestVectors/RoI_elnew.dat
deleted file mode 100755
index 8f20b001f5c20600209be0108390b3ae2227a168..0000000000000000000000000000000000000000
--- a/Trigger/TrigT1/TrigT1CaloSim/share/TestVectors/RoI_elnew.dat
+++ /dev/null
@@ -1,2421 +0,0 @@
-CF0FE
-00000
- 
-CE0F8
-00000
- 
-0F0FE
-00000
- 
-800E0
-00000
- 
-00000
-CF8FE
- 
-8F0FE
-00000
- 
-800E0
-00000
- 
-CE0DE
-00000
- 
-C80F8
-00000
- 
-00000
-0F0FE
- 
-00000
-000C0
- 
-00000
-CF0FE
- 
-00000
-0F0FE
- 
-00000
-4E0FE
- 
-8C0F8
-00000
- 
-4F8FE
-00000
- 
-8E0FE
-00000
- 
-00000
-4F8FE
- 
-00000
-4E0F8
- 
-0F0FE
-00000
- 
-000F0
-00000
- 
-CF8FE
-00000
- 
-00000
-000E0
- 
-00000
-0F0FE
- 
-8E0DE
-00000
- 
-0C0F8
-00000
- 
-00000
-000F0
- 
-00000
-000F0
- 
-8F8FE
-00000
- 
-00000
-0E0D8
- 
-0C0F8
-00000
- 
-00000
-8FCFF
- 
-00000
-0F0FE
- 
-00000
-CC0F8
- 
-00000
-4FCFF
- 
-C80F8
-00000
- 
-CE0F8
-00000
- 
-00000
-CF0FE
- 
-00000
-8E0FE
- 
-800F0
-00000
- 
-8F8FE
-00000
- 
-00000
-400F0
- 
-800C0
-00000
- 
-00000
-4C0F8
- 
-4C0F8
-00000
- 
-C00E0
-00000
- 
-CF0FE
-00000
- 
-CF8FE
-00000
- 
-00000
-4FCFF
- 
-400F0
-00000
- 
-00000
-C00C0
- 
-0F8FE
-00000
- 
-00000
-CF8FE
- 
-CF8FE
-00000
- 
-800E0
-00000
- 
-00000
-4C0F8
- 
-000C0
-00000
- 
-00000
-0E0FE
- 
-0C0F8
-00000
- 
-00000
-8F8FE
- 
-00000
-0F0FE
- 
-CE0F8
-00000
- 
-00000
-800E0
- 
-00000
-800F0
- 
-00000
-800F0
- 
-00000
-0F0FE
- 
-00000
-C80F8
- 
-00000
-4FCFF
- 
-00000
-8F8FE
- 
-CF0FE
-00000
- 
-8F0FE
-00000
- 
-00000
-000E0
- 
-4F8FE
-00000
- 
-0E0F8
-00000
- 
-00000
-000F0
- 
-CC0F8
-00000
- 
-4C0F8
-00000
- 
-CF8FE
-00000
- 
-00000
-400F0
- 
-00000
-000F0
- 
-00000
-CF0FE
- 
-00000
-8C0F8
- 
-00000
-CF8FE
- 
-00000
-0F0FE
- 
-0C0F8
-00000
- 
-00000
-0C0F8
- 
-400E0
-00000
- 
-CF0FE
-00000
- 
-400E0
-00000
- 
-CF0FE
-00000
- 
-4E0F8
-00000
- 
-4E0F8
-00000
- 
-00000
-4E0FE
- 
-00000
-4C0F8
- 
-4F0FE
-00000
- 
-00000
-0F0FE
- 
-000C0
-00000
- 
-00000
-CE0FE
- 
-00000
-8E0F8
- 
-00000
-8F8FE
- 
-00000
-8C0F8
- 
-CC0F8
-00000
- 
-0C0F8
-00000
- 
-00000
-CE0FE
- 
-CC0F8
-00000
- 
-00000
-4F0FE
- 
-00000
-800E0
- 
-C80F8
-00000
- 
-CE0FE
-00000
- 
-00000
-4E0FE
- 
-0C0F8
-00000
- 
-0F8FE
-00000
- 
-00000
-CC0F8
- 
-CE0F8
-00000
- 
-0C0F8
-00000
- 
-CF0FE
-00000
- 
-00000
-0FCFF
- 
-4F0FE
-00000
- 
-00000
-800F0
- 
-0F0FE
-00000
- 
-4E0F8
-00000
- 
-00000
-080F0
- 
-0FCFF
-00000
- 
-0FCFF
-00000
- 
-00000
-4E0F8
- 
-4F0FE
-00000
- 
-00000
-CC0F8
- 
-00000
-CC0F8
- 
-800E0
-00000
- 
-00000
-880F8
- 
-880F8
-00000
- 
-00000
-CC0F8
- 
-00000
-4C0F8
- 
-00000
-0E0FE
- 
-00000
-8F8FE
- 
-00000
-CE0F8
- 
-4FCFF
-00000
- 
-00000
-0E0FE
- 
-00000
-800E0
- 
-000E0
-00000
- 
-000D0
-00000
- 
-00000
-C00F0
- 
-0F8FE
-00000
- 
-0F8FE
-00000
- 
-00000
-0F8FE
- 
-00000
-CF8FE
- 
-00000
-000E0
- 
-00000
-0F8FE
- 
-00000
-8C0F8
- 
-00000
-4E0F8
- 
-080F8
-00000
- 
-00000
-C80F0
- 
-00000
-0F8FE
- 
-C80F0
-00000
- 
-CC0F8
-00000
- 
-CF0FE
-00000
- 
-00000
-8F8FE
- 
-8C0F8
-00000
- 
-CE0FE
-00000
- 
-0E0FE
-00000
- 
-480F0
-00000
- 
-0F8FE
-00000
- 
-0F0FE
-00000
- 
-480F8
-00000
- 
-CF8FE
-00000
- 
-4E0F8
-00000
- 
-4E0FE
-00000
- 
-00000
-CF0FE
- 
-00000
-8F8FE
- 
-00000
-CF0FE
- 
-CF8FE
-00000
- 
-0F0FE
-00000
- 
-00000
-0F8FE
- 
-8E0FE
-00000
- 
-00000
-CF0FE
- 
-4C0F8
-00000
- 
-8F0FE
-00000
- 
-0C0F8
-00000
- 
-00000
-0F0FE
- 
-00000
-000E0
- 
-CE0DE
-00000
- 
-CE0FE
-00000
- 
-00000
-800E0
- 
-00000
-080F8
- 
-00000
-000F0
- 
-4C0F8
-00000
- 
-00000
-CE0F8
- 
-4F0FE
-00000
- 
-0FCFF
-00000
- 
-400C0
-00000
- 
-CE0F8
-00000
- 
-00000
-CF8FE
- 
-4C0F8
-00000
- 
-00000
-8C0F8
- 
-4C0F8
-00000
- 
-00000
-4C0F8
- 
-4F0FE
-00000
- 
-00000
-0C0F8
- 
-8FCFF
-00000
- 
-00000
-4F8FE
- 
-0C0F8
-00000
- 
-00000
-4F0FE
- 
-00000
-4C0F8
- 
-00000
-CE0F8
- 
-00000
-8F8FE
- 
-00000
-000F0
- 
-CE0FE
-00000
- 
-00000
-0F8FE
- 
-8F0FE
-00000
- 
-00000
-000C0
- 
-00000
-400F0
- 
-00000
-8F8FE
- 
-00000
-480F8
- 
-00000
-880F8
- 
-8C0F8
-00000
- 
-00000
-C80F8
- 
-00000
-4E0F8
- 
-00000
-CFCFF
- 
-00000
-8F0FE
- 
-800C0
-00000
- 
-8F0FE
-00000
- 
-8FCFF
-00000
- 
-00000
-880F0
- 
-480F8
-00000
- 
-00000
-0C0F8
- 
-400F0
-00000
- 
-4F8FE
-00000
- 
-C00F0
-00000
- 
-880F8
-00000
- 
-00000
-8F8FE
- 
-CF0FE
-00000
- 
-00000
-8F8FE
- 
-480F8
-00000
- 
-00000
-080F0
- 
-0F0FE
-00000
- 
-4C0F8
-00000
- 
-00000
-CFCFF
- 
-0F0FE
-00000
- 
-800C0
-00000
- 
-8E0FE
-00000
- 
-00000
-8C0F8
- 
-CF8FE
-00000
- 
-00000
-CF0FE
- 
-00000
-8C0F8
- 
-00000
-C00F0
- 
-800E0
-00000
- 
-C80F8
-00000
- 
-00000
-CFCFF
- 
-4F8FE
-00000
- 
-800E0
-00000
- 
-CE0F8
-00000
- 
-00000
-480F8
- 
-00000
-8F0FE
- 
-00000
-8F8FE
- 
-4C0F8
-00000
- 
-8C0F8
-00000
- 
-8F0FE
-00000
- 
-CE0F8
-00000
- 
-00000
-400E0
- 
-00000
-4F8FE
- 
-400F0
-00000
- 
-CE0FE
-00000
- 
-00000
-4FCFF
- 
-4C0F8
-00000
- 
-C00C0
-00000
- 
-4E0DE
-00000
- 
-8E0FE
-00000
- 
-0C0F8
-00000
- 
-00000
-CF0FE
- 
-8E0FE
-00000
- 
-0C0F8
-00000
- 
-00000
-8F0FE
- 
-CF8FE
-00000
- 
-0C0F8
-00000
- 
-00000
-800E0
- 
-00000
-4E0FE
- 
-4C0F8
-00000
- 
-4C0F8
-00000
- 
-00000
-8C0F8
- 
-480F8
-00000
- 
-00000
-8F0FE
- 
-CFCFF
-00000
- 
-00000
-4E0FE
- 
-00000
-4E0F8
- 
-00000
-0F0FE
- 
-400C0
-00000
- 
-00000
-4F8FE
- 
-00000
-8C0F8
- 
-00000
-0F0FE
- 
-00000
-8E0FE
- 
-0C0F8
-00000
- 
-00000
-8F8FE
- 
-CF8FE
-00000
- 
-00000
-080F8
- 
-CFCFF
-00000
- 
-CC0F8
-00000
- 
-00000
-800E0
- 
-080F8
-00000
- 
-00000
-0F8FE
- 
-800F0
-00000
- 
-00000
-C00F0
- 
-00000
-8F0FE
- 
-00000
-0F0FE
- 
-00000
-0E0FE
- 
-CF0FE
-00000
- 
-00000
-000E0
- 
-0F8FE
-00000
- 
-00000
-000E0
- 
-00000
-480F8
- 
-800E0
-00000
- 
-C00E0
-00000
- 
-00000
-4F0FE
- 
-CE0FE
-00000
- 
-0F0FE
-00000
- 
-00000
-0C0F8
- 
-00000
-0F8FE
- 
-00000
-CF8FE
- 
-8F0FE
-00000
- 
-4F8FE
-00000
- 
-000E0
-00000
- 
-00000
-400C0
- 
-00000
-CC0F8
- 
-00000
-CF0FE
- 
-00000
-080F8
- 
-00000
-CE0FE
- 
-00000
-8C0F8
- 
-00000
-000C0
- 
-00000
-4C0F8
- 
-0F8FE
-00000
- 
-4F8FE
-00000
- 
-8F0FE
-00000
- 
-00000
-0C0F8
- 
-00000
-4F8FE
- 
-00000
-0F0FE
- 
-00000
-0E0F8
- 
-4F8FE
-00000
- 
-080F8
-00000
- 
-4E0DE
-00000
- 
-00000
-4F0FE
- 
-080F8
-00000
- 
-00000
-400C0
- 
-CF0FE
-00000
- 
-4C0F8
-00000
- 
-00000
-080F8
- 
-8E0F8
-00000
- 
-00000
-8F0FE
- 
-00000
-8F0FE
- 
-00000
-CE0F8
- 
-8F0FE
-00000
- 
-400E0
-00000
- 
-0F8FE
-00000
- 
-0C0F8
-00000
- 
-00000
-0E0F8
- 
-8F8FE
-00000
- 
-8C0F8
-00000
- 
-400F0
-00000
- 
-00000
-CE0F8
- 
-4C0F8
-00000
- 
-00000
-8F8FE
- 
-400E0
-00000
- 
-C80F8
-00000
- 
-00000
-0FCFF
- 
-00000
-CE0FE
- 
-00000
-0F0FE
- 
-00000
-0E0F8
- 
-00000
-400C0
- 
-00000
-CF8FE
- 
-4E0F8
-00000
- 
-CF0FE
-00000
- 
-4FCFF
-00000
- 
-00000
-4C0F8
- 
-8C0F8
-00000
- 
-C80F8
-00000
- 
-00000
-8E0FE
- 
-00000
-4C0F8
- 
-00000
-480D8
- 
-CF8FE
-00000
- 
-CC0F8
-00000
- 
-00000
-0E0FE
- 
-0F8FE
-00000
- 
-4F8FE
-00000
- 
-00000
-0FCFF
- 
-00000
-4F0FE
- 
-00000
-000C0
- 
-00000
-000C0
- 
-0F8FE
-00000
- 
-00000
-400C0
- 
-00000
-8F0FE
- 
-8F0FE
-00000
- 
-CC0F8
-00000
- 
-00000
-800F0
- 
-00000
-CF0FE
- 
-8C0F8
-00000
- 
-00000
-CC0F8
- 
-8C0F8
-00000
- 
-C80F8
-00000
- 
-0F0FE
-00000
- 
-8C0F8
-00000
- 
-4E0F8
-00000
- 
-0F8FE
-00000
- 
-0F0FE
-00000
- 
-8F8FE
-00000
- 
-00000
-4F8FE
- 
-00000
-800C0
- 
-00000
-8C0F8
- 
-00000
-4C0F8
- 
-8E0F8
-00000
- 
-800E0
-00000
- 
-00000
-8C0F8
- 
-800F0
-00000
- 
-CE0FE
-00000
- 
-00000
-000E0
- 
-4E0F8
-00000
- 
-4F8FE
-00000
- 
-00000
-C80F8
- 
-00000
-080F8
- 
-000C0
-00000
- 
-400E0
-00000
- 
-8E0D8
-00000
- 
-00000
-0F8FE
- 
-0F0FE
-00000
- 
-8C0F8
-00000
- 
-4F0FE
-00000
- 
-0F0FE
-00000
- 
-4E0F8
-00000
- 
-4E0FE
-00000
- 
-00000
-4F0FE
- 
-00000
-0F8FE
- 
-00000
-C00E0
- 
-00000
-C80F0
- 
-8E0F8
-00000
- 
-00000
-C00E0
- 
-00000
-0FCFF
- 
-CF8FE
-00000
- 
-0F8FE
-00000
- 
-0F0FE
-00000
- 
-0E0F8
-00000
- 
-00000
-8E0FE
- 
-00000
-8E0FE
- 
-CE0FE
-00000
- 
-8F0FE
-00000
- 
-0FCFF
-00000
- 
-480F8
-00000
- 
-00000
-CF0FE
- 
-00000
-0E0FE
- 
-4F0FE
-00000
- 
-00000
-400E0
- 
-00000
-CF0FE
- 
-080F8
-00000
- 
-4E0FE
-00000
- 
-00000
-0F0FE
- 
-00000
-0F8FE
- 
-00000
-CE0FE
- 
-00000
-880F0
- 
-480F8
-00000
- 
-00000
-000E0
- 
-8F0FE
-00000
- 
-4FCFF
-00000
- 
-00000
-800E0
- 
-4F8FE
-00000
- 
-00000
-4F8FE
- 
-00000
-4C0F8
- 
-00000
-000F0
- 
-00000
-000E0
- 
-CC0F8
-00000
- 
-00000
-4F0FE
- 
-0F8FE
-00000
- 
-00000
-C00F0
- 
-CF0FE
-00000
- 
-8F0FE
-00000
- 
-00000
-0E0F8
- 
-CF0FE
-00000
- 
-00000
-8C0F8
- 
-0E0FE
-00000
- 
-00000
-800F0
- 
-00000
-400E0
- 
-400E0
-00000
- 
-0E0FE
-00000
- 
-4E0F8
-00000
- 
-CFCFF
-00000
- 
-00000
-0FCFF
- 
-0F0FE
-00000
- 
-00000
-000E0
- 
-00000
-4F8FE
- 
-00000
-800E0
- 
-8F0FE
-00000
- 
-CF0FE
-00000
- 
-4F8DE
-00000
- 
-00000
-8C0F8
- 
-00000
-0F0FE
- 
-00000
-800F0
- 
-080F8
-00000
- 
-00000
-8F0FE
- 
-00000
-0C0F8
- 
-CF8FE
-00000
- 
-00000
-C00C0
- 
-00000
-CE0F8
- 
-00000
-0F0FE
- 
-0F0FE
-00000
- 
-00000
-8F0FE
- 
-00000
-080F0
- 
-00000
-8E0F8
- 
-00000
-4C0F8
- 
-00000
-8F8FE
- 
-4F8FE
-00000
- 
-00000
-8E0FE
- 
-C80F0
-00000
- 
-0F8FE
-00000
- 
-00000
-CF0FE
- 
-080F8
-00000
- 
-00000
-C80F0
- 
-8C0D8
-00000
- 
-4F8FE
-00000
- 
-0FCFF
-00000
- 
-8F0FE
-00000
- 
-00000
-8F0FE
- 
-0F0FE
-00000
- 
-00000
-0C0F8
- 
-00000
-0C0F8
- 
-8F0FE
-00000
- 
-00000
-4C0F8
- 
-00000
-0F0FE
- 
-8F0FE
-00000
- 
-00080
-00000
- 
-00000
-CE0F8
- 
-0C0F8
-00000
- 
-00000
-4C0F8
- 
-0F0FE
-00000
- 
-0F0FE
-00000
- 
-8C0F8
-00000
- 
-00000
-8FCFF
- 
-000E0
-00000
- 
-0E0FE
-00000
- 
-4E0FE
-00000
- 
-00000
-8F0FE
- 
-00000
-CF0FE
- 
-8C0F8
-00000
- 
-480D0
-00000
- 
-00000
-8C0F8
- 
-8FCFF
-00000
- 
-000F0
-00000
- 
-8E0F8
-00000
- 
-CF0FE
-00000
- 
-4F8FE
-00000
- 
-00000
-800F0
- 
-00000
-8E0FE
- 
-4E0FE
-00000
- 
-00000
-4C0F8
- 
-4F0FE
-00000
- 
-0E0F8
-00000
- 
-00000
-4F0FE
- 
-00000
-4F0FE
- 
-8C0F8
-00000
- 
-CF8FE
-00000
- 
-4F8FE
-00000
- 
-00000
-080F8
- 
-0E0F8
-00000
- 
-00000
-CC0F8
- 
-CE0F8
-00000
- 
-0C0F8
-00000
- 
-00000
-8F8FE
- 
-00000
-8F8FE
- 
-00000
-CC0F8
- 
-00000
-0C0F8
- 
-4F0DE
-00000
- 
-00000
-8E0FE
- 
-000E0
-00000
- 
-00000
-CE0F8
- 
-CE8DE
-00000
- 
-00000
-0C0F8
- 
-CF0FE
-00000
- 
-0E0FE
-00000
- 
-000F0
-00000
- 
-0C0F8
-00000
- 
-00000
-480F8
- 
-8C0F8
-00000
- 
-8C0F8
-00000
- 
-00000
-CFEFF
- 
-800E0
-00000
- 
-00000
-0C0F8
- 
-4F0FE
-00000
- 
-00000
-0E0F8
- 
-080F0
-00000
- 
-00000
-0E0F8
- 
-00000
-C00E0
- 
-800F0
-00000
- 
-4F0FE
-00000
- 
-00000
-CC0F8
- 
-00000
-8E0F8
- 
-4E0FE
-00000
- 
-00000
-000E0
- 
-8F0FE
-00000
- 
-8F0FE
-00000
- 
-4E0F8
-00000
- 
-4FCFF
-00000
- 
-CF8FE
-00000
- 
-00000
-080F0
- 
-00000
-0E0FE
- 
-400C0
-00000
- 
-480F0
-00000
- 
-CE0F8
-00000
- 
-00000
-480F8
- 
-0E0FE
-00000
- 
-4E0FE
-00000
- 
-080F0
-00000
- 
-00000
-0FCFF
- 
-00000
-0F0FE
- 
-800E0
-00000
- 
-00000
-CF8FE
- 
-00000
-0C0F8
- 
-0F8FE
-00000
- 
-00000
-4C0F8
- 
-00000
-400E0
- 
-00000
-880F8
- 
-8F0FE
-00000
- 
-00000
-C80F8
- 
-00000
-000E0
- 
-00000
-0F0FE
- 
-8F8FE
-00000
- 
-00000
-CC0F8
- 
-00000
-4E0FE
- 
-CF8FE
-00000
- 
-00000
-0E0FE
- 
-0C0F8
-00000
- 
-00000
-CC0F8
- 
-400C0
-00000
- 
-800E0
-00000
- 
-00000
-8E0FE
- 
-00000
-8F0FE
- 
-00000
-0C0F8
- 
-880F8
-00000
- 
-0C0F8
-00000
- 
-CE0F8
-00000
- 
-8F0FE
-00000
- 
-00000
-CC0F8
- 
-00000
-880F8
- 
-00000
-CF8FE
- 
-00000
-4F8FE
- 
-00000
-0E0F8
- 
-8E0F8
-00000
- 
-00000
-0E0F8
- 
-00000
-400F0
- 
-00000
-CF0FE
- 
-8F8FE
-00000
- 
-8E0F8
-00000
- 
-00000
-8E0FE
- 
-00000
-880F8
- 
-CF8FE
-00000
- 
-CF0FE
-00000
- 
-800C0
-00000
- 
-CC0F8
-00000
- 
-CC0F8
-00000
- 
-00000
-8FCFF
- 
-00000
-CF0FE
- 
-00000
-400C0
- 
-00000
-8C0F8
- 
-0C0F8
-00000
- 
-800E0
-00000
- 
-00000
-0E0F8
- 
-00000
-8F8FE
- 
-00000
-CF0FE
- 
-0E0F8
-00000
- 
-8F0FE
-00000
- 
-00000
-8E0FE
- 
-480F8
-00000
- 
-00000
-400E0
- 
-4F8FE
-00000
- 
-8E0FE
-00000
- 
-00000
-C00E0
- 
-00000
-0C0F8
- 
-C80F8
-00000
- 
-00000
-0C0F8
- 
-400E0
-00000
- 
-4C0F8
-00000
- 
-4C0F8
-00000
- 
-00000
-0F0FE
- 
-CC0F8
-00000
- 
-4FCFF
-00000
- 
-CE0FE
-00000
- 
-0E0FE
-00000
- 
-8F8FE
-00000
- 
-0F0FE
-00000
- 
-00000
-8F0FE
- 
-800E0
-00000
- 
-00000
-0F0FE
- 
-0F8FE
-00000
- 
-0E0FE
-00000
- 
-4F8FE
-00000
- 
-00000
-4E0F8
- 
-00000
-8C0F8
- 
-4E0FE
-00000
- 
-880F8
-00000
- 
-00000
-0F0FE
- 
-8F0FE
-00000
- 
-00000
-4F8FE
- 
-8C0F8
-00000
- 
-00000
-CE0F8
- 
-00000
-080F8
- 
-00000
-800C0
- 
-4F0FE
-00000
- 
-800F0
-00000
- 
-00000
-8F8FE
- 
-C00C0
-00000
- 
-4C0F8
-00000
- 
-4C0F8
-00000
- 
-00000
-0FCFF
- 
-00000
-0E0F8
- 
-00000
-4F0FE
- 
-00000
-880F8
- 
-00000
-CF0FE
- 
-400E0
-00000
- 
-880F8
-00000
- 
-00000
-4F0FE
- 
-00000
-CE0FE
- 
-00000
-CF0FE
- 
-400F0
-00000
- 
-4C0D8
-00000
- 
-00000
-0C0F8
- 
-00000
-CE0FE
- 
-00000
-4E0FE
- 
-000E0
-00000
- 
-00000
-CE0FE
- 
-4C0F8
-00000
- 
-4E0FE
-00000
- 
-00000
-4F0FE
- 
-8E0F8
-00000
- 
-0FCFF
-00000
- 
-00000
-0F0FE
- 
-00000
-4F8FE
- 
-4F0FE
-00000
- 
-400F0
-00000
- 
-00000
-4F8FE
- 
-00000
-400F0
- 
-00000
-C80F8
- 
-CC0F8
-00000
- 
-480F8
-00000
- 
-4F8FE
-00000
- 
-00000
-4F8FE
- 
-00000
-800C0
- 
-00000
-CE0F8
- 
-00000
-4FCFF
- 
-8E0FE
-00000
- 
-CF0FE
-00000
- 
-00000
-8E0FE
- 
-00000
-8E0F8
- 
-400E0
-00000
- 
-00000
-8F0FE
- 
-00000
-000E0
- 
-00000
-CF8FE
- 
-00000
-0F0FE
- 
-00000
-C00F0
- 
-00000
-C80F8
- 
-00000
-CF0DE
- 
-0C0F8
-00000
- 
-00000
-0C0F8
- 
-00000
-8E0F8
- 
-8F8FE
-00000
- 
-00000
-8E0FE
- 
-00000
-CF0FE
- 
-00000
-480F8
- 
-00000
-CC0F8
- 
-00000
-8F0FE
- 
-00000
-4C0F8
- 
-00000
-CC0F8
- 
-C80F0
-00000
- 
-CFCFF
-00000
- 
-4C0F8
-00000
- 
-00000
-0F0FE
- 
-4C0F8
-00000
- 
-CE0F8
-00000
- 
-400C0
-00000
- 
-8E0FE
-00000
- 
-0FCFF
-00000
- 
-480F8
-00000
- 
-CF8FE
-00000
- 
-8F0FE
-00000
- 
-4E0F8
-00000
- 
-4F8FE
-00000
- 
-CF8FE
-00000
- 
-00000
-0E0F8
- 
-8E0FE
-00000
- 
-C00F0
-00000
- 
-00000
-0C0F8
- 
-00000
-C00C0
- 
-00000
-CF0FE
- 
-00000
-4F0FE
- 
-00000
-0E0F8
- 
-400C0
-00000
- 
-00000
-480F8
- 
-8F8FE
-00000
- 
-C00E0
-00000
- 
-8FCFF
-00000
- 
-00000
-0E0FE
- 
-0FCFF
-00000
- 
-400E0
-00000
- 
-CFCFF
-00000
- 
-8F8FE
-00000
- 
-00000
-C80F8
- 
-00000
-CF0FE
- 
-800F0
-00000
- 
-CFCFF
-00000
- 
-0FCFF
-00000
- 
-4E0F8
-00000
- 
-00000
-4F0FE
- 
-8FCFF
-00000
- 
-CF8FE
-00000
- 
-8C0F8
-00000
- 
diff --git a/Trigger/TrigT1/TrigT1CaloSim/share/TrigT1CaloSimJobOptions_ReadTT_Run2Sim.py b/Trigger/TrigT1/TrigT1CaloSim/share/TrigT1CaloSimJobOptions_ReadTT_Run2Sim.py
deleted file mode 100755
index 06aafc70dfc6799f649c5cb163d8d15085dac74b..0000000000000000000000000000000000000000
--- a/Trigger/TrigT1/TrigT1CaloSim/share/TrigT1CaloSimJobOptions_ReadTT_Run2Sim.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# Import the configurable algorithms for TrigT1Calo
-from AthenaCommon.GlobalFlags  import globalflags
-from AthenaCommon.Logging import logging  # loads logger
-
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__CPMSim
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__JEMJetSim
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__JEMEnergySim
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__RoIROD
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__CPCMX
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__JetCMX
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__EnergyCMX
-
-# Get the algorithm sequence
-from AthenaCommon.AlgSequence import AlgSequence
-job = AlgSequence()
-
-# Add the required algorithms to the sequence
-job += LVL1__CPMSim( 'CPMSim' )
-job += LVL1__JEMJetSim( 'JEMJetSim' )
-job += LVL1__JEMEnergySim( 'JEMEnergySim' )
-job += LVL1__CPCMX( 'CPCMX' )
-job += LVL1__JetCMX( 'JetCMX' )
-job += LVL1__EnergyCMX( 'EnergyCMX' )
-job += LVL1__RoIROD( 'RoIROD' )
-
-
-
diff --git a/Trigger/TrigT1/TrigT1CaloSim/share/TrigT1CaloSimJobOptions_Run2.py b/Trigger/TrigT1/TrigT1CaloSim/share/TrigT1CaloSimJobOptions_Run2.py
deleted file mode 100755
index 7b9e31200b07725992994dea7dfec5147f374d5d..0000000000000000000000000000000000000000
--- a/Trigger/TrigT1/TrigT1CaloSim/share/TrigT1CaloSimJobOptions_Run2.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# Import the configurable algorithms for TrigT1Calo
-from AthenaCommon.GlobalFlags  import globalflags
-from AthenaCommon.Logging import logging  # loads logger
-log = logging.getLogger( "TrigT1CaloSimJobOptions_Run2" )
-
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__Run2TriggerTowerMaker
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__Run2CPMTowerMaker
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__Run2JetElementMaker
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__CPMSim
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__JEMJetSim
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__JEMEnergySim
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__CPCMX
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__JetCMX
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__EnergyCMX
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__RoIROD
-from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__Tester
-
-# Get the algorithm sequence
-from AthenaCommon.AlgSequence import AlgSequence
-job = AlgSequence()
-
-include('TrigT1CaloCalibConditions/L1CaloCalibConditionsMC_jobOptions.py')
-from TrigT1CaloSim.TrigT1CaloSimRun2Config import Run2TriggerTowerMaker25ns, Run2TriggerTowerMaker50ns
-
-from SGComps.AddressRemappingSvc import addInputRename
-addInputRename ( 'xAOD::TriggerTowerContainer', 'xAODTriggerTowers_rerun', 'xAODTriggerTowers')
-
-# try to determine wheter running with 25ns/50ns bunchspacing
-_bunchSpacing = None
-_doPC = True # do pedestal correction?
-_alg = {25 : Run2TriggerTowerMaker25ns, 50 : Run2TriggerTowerMaker50ns}
-
-from PyUtils.MetaReaderPeeker import metadata
-
-from AthenaCommon.GlobalFlags import globalflags
-if globalflags.isOverlay() is True:
-    log.error("L1Calo overlay simulation is currently not supported")
-
-else:
-    try:
-        if not 'metadata' in dir():
-            raise RuntimeError('Input file summary not available.')
-        # try:
-        #     digiParam = metadata['/Digitization/Parameters']
-        # except KeyError:
-        #     raise RuntimeError('Collection /Digitization/Parameters not found in file {}.'.format(metadata['file_name']))
-
-        if 'intraTrainBunchSpacing' not in metadata or not metadata['intraTrainBunchSpacing']:
-            raise RuntimeError('No key "intraTrainBunchSpacing" in /Digitization/Parameters.')
-        _bunchSpacing = int(metadata['intraTrainBunchSpacing'])
-    except RuntimeError as re:
-        log.warning('Could not determine bunch-spacing from input file: %s' % re)
-        log.warning('Configuring for 25ns w/o pedestal correction - a wrong configuration might yield non sensible results!')
-        _bunchSpacing = 25
-        _doPC = False # not enough information to configure pedestal correction
-
-    if _bunchSpacing in _alg:
-        log.info("Scheduling %s" %  _alg[_bunchSpacing].__name__)
-        job += _alg[_bunchSpacing]( 'Run2TriggerTowerMaker' )
-    else:
-        log.warning('No tuned configuration for a bunch-spacing of %s available. Using 25ns settings w/o pedestal correction.' % _bunchSpacing)
-        job += _alg[25]( 'Run2TriggerTowerMaker' )
-        _doPC = False
-
-log.info("Scheduling CPMTowerMaker, JetElementMaker, CPMSim, JEMJetSim, JEMEnergySim, CPCMX, JetCMX, EnergyCMX, RoIROD, Tester")
-
-job += LVL1__Run2CPMTowerMaker( 'CPMTowerMaker' )
-job += LVL1__Run2JetElementMaker( 'JetElementMaker' )
-job += LVL1__CPMSim( 'CPMSim' )
-job += LVL1__JEMJetSim( 'JEMJetSim' )
-job += LVL1__JEMEnergySim( 'JEMEnergySim' )
-job += LVL1__CPCMX( 'CPCMX' )
-job += LVL1__JetCMX( 'JetCMX' )
-job += LVL1__EnergyCMX( 'EnergyCMX' )
-job += LVL1__RoIROD( 'RoIROD' )
-#job += LVL1__Tester( 'Tester' )
-
-from AthenaCommon import CfgMgr
-from AthenaCommon.AppMgr import ToolSvc
-if not hasattr(ToolSvc, 'L1TriggerTowerTool'):
-    ToolSvc += CfgMgr.LVL1__L1TriggerTowerTool('L1TriggerTowerTool')
-
-job.Run2TriggerTowerMaker.ZeroSuppress = True
-
-# autoconfigure pedestal correction based on the input file
-if _doPC and _bunchSpacing not in (25,50):
-    log.warning('Only 25ns intra train bunch spacing currently supported. Dynamic pedestal correction is disabled!')
-    _doPC = False
-
-ToolSvc.L1TriggerTowerTool.BaselineCorrection = _doPC
-
-from TrigBunchCrossingTool.BunchCrossingTool import BunchCrossingTool
-bct = BunchCrossingTool()
-if not hasattr(ToolSvc, bct.getName()):
-    ToolSvc += bct
-else:
-    bct = getattr(ToolSvc, bct.getName())
-    
-if _doPC and not hasattr(ToolSvc, 'L1DynamicPedestalProviderTxt'):
-    ToolSvc += CfgMgr.LVL1__L1DynamicPedestalProviderTxt('L1DynamicPedestalProviderTxt',
-                                                         BunchCrossingTool = bct,
-                                                         InputFileEM_ShortGap='DynamicPedestalCorrection_SG_EM_%dns.txt' % _bunchSpacing,
-                                                         InputFileHAD_ShortGap='DynamicPedestalCorrection_SG_HAD_%dns.txt' % _bunchSpacing,
-                                                         InputFileEM_LongGap='DynamicPedestalCorrection_LG_EM_%dns.txt' % _bunchSpacing,
-                                                         InputFileHAD_LongGap='DynamicPedestalCorrection_LG_HAD_%dns.txt' % _bunchSpacing)
-    ToolSvc.L1TriggerTowerTool.L1DynamicPedestalProvider = ToolSvc.L1DynamicPedestalProviderTxt
diff --git a/Trigger/TrigT1/TrigT1CaloSim/share/TrigT1CaloSimJobOptions_TTL1_NoCalib.py b/Trigger/TrigT1/TrigT1CaloSim/share/TrigT1CaloSimJobOptions_TTL1_NoCalib.py
deleted file mode 100755
index 2a4857ed9a334a70fb79f1cbf2581f0f5746d316..0000000000000000000000000000000000000000
--- a/Trigger/TrigT1/TrigT1CaloSim/share/TrigT1CaloSimJobOptions_TTL1_NoCalib.py
+++ /dev/null
@@ -1 +0,0 @@
-include('TrigT1CaloSim/TrigT1CaloSimJobOptions_Run2.py')
diff --git a/Trigger/TrigT1/TrigT1CaloSim/share/TrigT1CaloSim_Reprocess_Run2.py b/Trigger/TrigT1/TrigT1CaloSim/share/TrigT1CaloSim_Reprocess_Run2.py
deleted file mode 100644
index 9a8549942c47e67f4054b7b45b05bd0b6cdcb4c2..0000000000000000000000000000000000000000
--- a/Trigger/TrigT1/TrigT1CaloSim/share/TrigT1CaloSim_Reprocess_Run2.py
+++ /dev/null
@@ -1,29 +0,0 @@
-include('TrigT1CaloCalibConditions/L1CaloCalibConditions_jobOptions.py')
-
-# Get the algorithm sequence
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-svcMgr.ToolSvc += CfgMgr.LVL1__L1DatabaseOverrideForDataTool('L1DatabaseOverrideForDataTool')
-
-topSequence += CfgMgr.LVL1__TransientDatabaseOverride(
-    'TransientDatabaseOverride',
-    OverrideTools = [ svcMgr.ToolSvc.L1DatabaseOverrideForDataTool ],
-    InDeadChannelsFolder = '/TRIGGER/L1Calo/V1/Calibration/PpmDeadChannels',
-    InDisabledTowersFolder = '/TRIGGER/L1Calo/V1/Conditions/DisabledTowers',
-    OutPprChanDefaultsKey = 'ModifiedPprChanDefaults',
-    OutPprChanCalibKey = 'ModifiedPprChanCalib',
-    OutDeadChannelsKey = 'ModifiedPpmDeadChannels',
-    OutDisabledTowersKey = 'ModifiedDisabledTowers'
-)
-
-topSequence += CfgMgr.LVL1__Run2TriggerTowerMaker(
-    'Run2TriggerTowerMaker',
-    CellType=2, # Trigger Towers
-    TriggerTowerLocation = 'ReprocessedTriggerTowers',
-    ZeroSuppress = False,
-    ChanDefaultsFolderKey = 'ModifiedPprChanDefaults',
-    ChanCalibFolderKey = 'ModifiedPprChanCalib',
-    DeadChannelsFolderKey = 'ModifiedPpmDeadChannels',
-    DisabledTowersFolderKey = 'ModifiedDisabledTowers'
-)
diff --git a/Trigger/TrigValidation/TrigAnalysisTest/test/DISABLED_test_trigAna_BStoBStoESDAOD_v1Dev_build.py b/Trigger/TrigValidation/TrigAnalysisTest/test/DISABLED_test_trigAna_BStoBStoESDAOD_v1Dev_build.py
deleted file mode 100755
index b6cfaf1d2085fa74998231cc1fd2899775a479a6..0000000000000000000000000000000000000000
--- a/Trigger/TrigValidation/TrigAnalysisTest/test/DISABLED_test_trigAna_BStoBStoESDAOD_v1Dev_build.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python
-# Copyright (C) 2002-2023 CERN for the benefit of the ATLAS collaboration
-#
-# art-description: Run Trigger on data with athena and write ByteStream output, then run offline reco
-# art-type: build
-# art-include: main/Athena
-# art-include: 23.0/Athena
-
-from TrigValTools.TrigValSteering import Test, ExecStep, CheckSteps
-from TrigValTools.TrigValSteering.Common import find_file
-from TrigAnalysisTest.TrigAnalysisSteps import add_analysis_steps
-
-menu_name = 'Dev_pp_run3_v1_TriggerValidation_prescale'
-
-# Run athena BS->BS job
-BStoBS = ExecStep.ExecStep("BStoBS")
-BStoBS.type = 'athena'
-BStoBS.job_options = 'TriggerJobOpts/runHLT_standalone.py'
-BStoBS.input = 'data'
-BStoBS.threads = 1
-BStoBSPreExec = ''.join([
-  'setMenu=\'{:s}\';'.format(menu_name),
-  'doL1Sim=True;',  # Run L1 simulation
-  'doWriteBS=True;',    # Write HLT result to BS
-])
-BStoBS.args = ' -c "{:s}"'.format(BStoBSPreExec)
-
-# Extract the Main stream data
-filterBS = ExecStep.ExecStep('FilterBS')
-filterBS.type = 'other'
-filterBS.executable = 'trigbs_extractStream.py'
-filterBS.input = ''
-filterBS.args = '-s Main ' + find_file('*unknown_SingleStream.daq.RAW.*Athena.*.data')
-
-# Reconstruction step, BS->ESD->AOD
-recoPreExec = ' '.join([
-  # Reco flags to disable things missing inputs from previous step
-  "from RecExConfig.RecFlags import rec;",
-  "rec.doInDet=False;",
-  "rec.doAFP=False;",
-  "rec.doEgamma=False;",
-  "rec.doForwardDet=False;",
-  "rec.doMuon=False;",
-  "rec.doMuonCombined=False;",
-  "rec.doJetMissingETTag=False;",
-  "rec.doTau=False;",
-  "rec.doLucid=False;",
-  "from ParticleBuilderOptions.AODFlags import AODFlags;",
-  "AODFlags.ThinNegativeEnergyNeutralPFOs.set_Value_and_Lock(False);",
-  "AODFlags.AddEgammaMuonTracksInAOD.set_Value_and_Lock(False);",
-  # Trigger flags
-  "from AthenaConfiguration.AllConfigFlags import ConfigFlags;",
-  "ConfigFlags.Trigger.triggerMenuSetup=\'{:s}\';".format(menu_name),
-  "ConfigFlags.Trigger.AODEDMSet=\'AODFULL\';"])
-reco = ExecStep.ExecStep('Tier0Reco')
-reco.type = 'Reco_tf'
-reco.threads = 1
-reco.input = ''
-reco.explicit_input = True
-reco.args = '--inputBSFile=' + find_file('*.physics_Main*.data')  # output of the previous step
-reco.args += ' --outputESDFile=ESD.pool.root --outputAODFile=AOD.pool.root'
-reco.args += ' --conditionsTag=\'CONDBR2-BLKPA-2018-11\' --geometryVersion=\'ATLAS-R2-2016-01-00-01\''
-reco.args += ' --preExec="{:s}"'.format(recoPreExec)
-reco.args += ' --postInclude="TriggerTest/disableChronoStatSvcPrintout.py"'
-
-# Test definition
-test = Test.Test()
-test.art_type = 'build'
-test.exec_steps = [BStoBS, filterBS, reco]
-test.check_steps = CheckSteps.default_check_steps(test)
-add_analysis_steps(test)
-
-import sys
-sys.exit(test.run())
diff --git a/Trigger/TrigValidation/TrigValTools/CMakeLists.txt b/Trigger/TrigValidation/TrigValTools/CMakeLists.txt
index 96ba01d6cdc193264c68923fb0dbfaafd51bee48..f6fe106be27ea05fc55902090a9dea318397e755 100644
--- a/Trigger/TrigValidation/TrigValTools/CMakeLists.txt
+++ b/Trigger/TrigValidation/TrigValTools/CMakeLists.txt
@@ -22,7 +22,6 @@ atlas_add_dictionary( TrigValToolsDict
 # Install files from the package:
 atlas_install_python_modules( python/*.py python/TrigValSteering bin/chainDump.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
 atlas_install_scripts( bin/*.py test/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
-atlas_install_scripts( bin/*.pl )
 atlas_install_data( share/*.json share/*.conf )
 
 # Unit tests:
diff --git a/Trigger/TrigValidation/TrigValTools/bin/regtest.pl b/Trigger/TrigValidation/TrigValTools/bin/regtest.pl
deleted file mode 100755
index 0689d3cf24dc7fbff895304426c214237eaf388c..0000000000000000000000000000000000000000
--- a/Trigger/TrigValidation/TrigValTools/bin/regtest.pl
+++ /dev/null
@@ -1,125 +0,0 @@
-#!/usr/bin/perl -w
-use Getopt::Long;
-use File::Basename;
-use File::Spec;
-use File::Copy;
-use File::Path;
-use constant TRUE => 1;
-use constant FALSE => 0;
-use constant UNDEFINED => "undefined";
-$prog = basename $0;
-sub main();
-main();
-exit -10;
-
-sub main(){
-    command_line();
-    $status = regtest();
-    exit($status);
-}
-
-sub prog_error_exit($$){
-    $failkey = 'FAILURE' if (!defined($failkey));
-    my ($message,$exitcode) = @_ or die;
-    print "$prog: $failkey $message\n";
-    exit($exitcode);
-}
-
-sub command_line(){
-  # parse command arguments and check usage
-  $debug = FALSE;
-  $inputfile = '';
-  $reffile = '';
-  $failkey = 'FAILURE';
-  $linematch = 'REGTEST';
-  my $result = GetOptions ('help' => \$help,
-                           'debug!' => \$debug,
-                           'failkey=s' => \$failkey,
-                           'inputfile=s' => \$inputfile,
-                           'reffile=s' => \$reffile,
-			   'linematch=s' => \$linematch);
-  if ($help || !$result) {
-    usage();
-    prog_error_exit("usage",-1);
-  }
-
-}
-
-# usage message
-sub usage(){
-    print "
-  Usage: $prog [options] 
-
-  Testing tool for comparing marked lines in a log file against a reference
-
-  Options:
-
-  --help                show this information
-  --debug               print debug information, for the script maintainer
-  --failkey <string>    keyword to be printed to indicate to the test 
-                        framework that a test has failed, default $failkey
-  --inputfile <string>  specify (athena) log file to take as input
-  --reffile <string>    specifiy reference file, with absolute or reletive path
-  --linematch <string>  only compare lines which match this string
-                        default: REGTEST
-                        Note: this is a perl regexp. See man perlre.
-                        Example to match a particular algorithm:
-                        'TrigJetRec_Cone.+REGTEST'
-
-  Return codes: 0 = success, non-zero = failure of some sort.
-
-  Technical info:
-
-  Lines which match the regular expression
-
-";
-}
-
-sub regtest(){
-    
-    my $newfile = basename($reffile) . ".new";
-    
-    # extract regression test lines from log file
-    open NEW, ">$newfile"
-	or die "$prog: error: failed opening $newfile to write: $!\n";
-    if (! open LOG, "<$inputfile"){
-	print "$prog: error: failed opening $inputfile to read: $!\n";
-	print "-> $failkey Aborting this test\n";
-	return 2; # go on to next test
-    }
-    my $lines=0;
-    my $result;
-    while (<LOG>){
-	if (/$linematch/){
-	    print NEW;
-	    $lines++;
-	}
-    }
-    print "$prog debug: regtest $lines lines matched $linematch in LOG\n" if ($debug);
-    close LOG;
-    close NEW;
-    # check whether any lines matched - if zero it's an error straight away
-    # and no point in doing the diff
-    if ($lines == 0){
-	# print failure keyword here to flag it as an error
-	print "=== Alert! $failkey no lines matching $linematch were found in log file\n";
-	$result = 1;
-    }
-# diff the output and the reference
-# even if verify is off
-    print "$prog: debug: diff -U 2 -b $reffile $newfile\n" if ($debug);
-    my $rc = system("diff -U 2 -b $reffile $newfile ");
-    if ($rc == 0){
-	print "=== Output is the same as reference\n";
-        $result = 0;
-    } else {
-	# print failure keyword here to flag it as an error
-	print "=== Alert! $failkey input file (+) differs from reference (-) \n";
-        print "    If this change is understood, to update the reference file please type:\n";
-        print "    cp ",File::Spec->rel2abs($newfile)," ",File::Spec->rel2abs($reffile),"\n";
-	$result = 1;
-    }
-    
-    print "$prog debug: returning result $result\n" if ($debug);
-    return $result;
-}
diff --git a/Trigger/TrigValidation/TrigValTools/bin/regtest.py b/Trigger/TrigValidation/TrigValTools/bin/regtest.py
deleted file mode 100755
index 84d6aa0d859f4391daf49b617d15a23a67c3b32b..0000000000000000000000000000000000000000
--- a/Trigger/TrigValidation/TrigValTools/bin/regtest.py
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-
-from __future__ import print_function
-
-import re
-import argparse
-import sys
-import os 
-
-def main():
-    commandLine()
-    global status
-    status = regtest()
-    sys.exit(status)
-
-def progErrorExit(message, exitcode):
-    try:
-        failkey
-    except NameError:
-        failkey = 'FAILURE'
-    print('regtest.py: %s %s' % (failkey, message))
-    sys.exit(exitcode)
-        
-def commandLine():
-    global debug
-    global inputfile
-    global failkey
-    global linematch
-    global reffile
-    debug = False
-    inputfile = ''
-    reffile = ''
-    failkey = 'FAILURE'
-    linematch = 'REGTEST'
-    parser = argparse.ArgumentParser(description = 'usage')
-   
-    parser.add_argument('--debug', 
-                        action = 'store_true',
-                        default = False,
-                        help = 'print debug information, for the script maintainer'       
-                        )
-    
-    parser.add_argument('--failkey',
-                        type = str,
-                        default = 'FAILURE',
-                        help =''' keyword to be printed to indicate to the test framework that a
-                               test has failed, default FAILURE'''
-                        )
-   
-    parser.add_argument('--linematch',
-                         default = "REGTEST",
-                         type = str,
-                         help = ' only compare lines which match this string default: REGTEST'
-                        )
-
-    parser.add_argument('--inputfile',
-                        metavar = '<file>',
-                        type = str,
-                        help = 'specify (athena) log file to take as input'
-                        )
-
-    parser.add_argument('--reffile',
-                        metavar = '<file>',
-                        type = str,
-                        help = 'specifiy reference file, with absolute or reletive path'
-                        )
-    global args
-    args = parser.parse_args()
-    if help is True:
-        usage()
-        progErrorExit('usage', -1)
-
-def usage():
-    print('''
- Usage: regtest.py [options] 
-
-  Testing tool for comparing marked lines in a log file against a reference
-
-  Options:
-
-  --help                show this information
-  --debug               print debug information, for the script maintainer
-  --failkey <string>    keyword to be printed to indicate to the test 
-                        framework that a test has failed, default ''', failkey,'''
-  --inputfile <string>  specify (athena) log file to take as input
-  --reffile <string>    specifiy reference file, with absolute or reletive path
-  --linematch <string>  only compare lines which match this string
-                        default: REGTEST
-                       
- Example to match a particular algorithm:
-                       'TrigJetRec_Cone.+REGTEST'
-
-  Return codes: False = success, True = failure of some sort.
-
-  Technical info:
-
-  Lines which match the regular expression
-  ''')
-
-def regtest():
-    with open(args.inputfile,'r') as inpfile:
-        matchline = 0
-        for line in inpfile:
-            if re.search('REGTEST',line):
-                matchline += 1
-   
-    if matchline == 0:
-        print('=== Alert!', failkey, 'no lines matching', linematch, 'in LOG')
-        result = True
-        exit()
-
-    if debug is True:
-       print('regtest.py,: debug: diff -b', args.inputfile, args.reffile)
-  
-    command = 'diff -b ' +  args.inputfile + ' ' +  args.reffile
-    rc = os.system(command)
-    if rc is False:
-       print('=== Output is the same as reference.')
-#       result = False
-       result = 0
-    else:
-        print('''=== Alert!''', failkey, '''input file (<) differs from reference (>)
-        If this change is understood, to update the reference file please type:
-        cp ''', args.inputfile, args.reffile)
-        #        result = True
-        result = 1
-
-
-    if debug is True:
-        print('regtest.py  debug: returning result', result)
-    return result
-
-if __name__ == '__main__':
-    main()
diff --git a/Trigger/TrigValidation/TrigValTools/python/TrigValSteering/CheckSteps.py b/Trigger/TrigValidation/TrigValTools/python/TrigValSteering/CheckSteps.py
index 675139d0fbef71b43ada74ca09845217d7f09d09..e9a2eafd8b868f64af46279180dab338c60b84d5 100644
--- a/Trigger/TrigValidation/TrigValTools/python/TrigValSteering/CheckSteps.py
+++ b/Trigger/TrigValidation/TrigValTools/python/TrigValSteering/CheckSteps.py
@@ -296,16 +296,15 @@ class RegTestStep(RefComparisonStep):
     def __init__(self, name='RegTest'):
         super(RegTestStep, self).__init__(name)
         self.regex = 'REGTEST'
-        self.executable = 'regtest.pl'
+        self.executable = 'diff'
         self.input_base_name = 'athena'
-        self.args += ' --linematch ".*"'
         self.auto_report_result = True
         self.output_stream = Step.OutputStream.FILE_AND_STDOUT
 
     def configure(self, test):
         self.input_file = self.input_base_name+'.regtest'
         RefComparisonStep.configure(self, test)
-        self.args += ' --inputfile {} --reffile {}'.format(self.input_file, self.reference)
+        self.args += ' -U 2 -b {} {}'.format(self.input_file, self.reference)
         Step.configure(self, test)
 
     def prepare_inputs(self):
diff --git a/Trigger/TrigValidation/TriggerTest/test/test_trig_mc_v1Dev_ITk_build.py b/Trigger/TrigValidation/TriggerTest/test/test_trig_mc_v1Dev_ITk_build.py
index 5da0723fd0c213112936e730b920c246ef33a93c..90d58bdea0b5ef1db82ffdd4c40581ce9bff691e 100755
--- a/Trigger/TrigValidation/TriggerTest/test/test_trig_mc_v1Dev_ITk_build.py
+++ b/Trigger/TrigValidation/TriggerTest/test/test_trig_mc_v1Dev_ITk_build.py
@@ -22,7 +22,7 @@ run.flags = ['Trigger.triggerMenuSetup="MC_pp_run4_v1"',
              'ITk.doTruth=False',
              'Tracking.doTruth=False',
              'Trigger.enableL1CaloPhase1=False',
-             'Trigger.enabledSignatures=[\\\"Muon\\\"]']
+             ]
 
 # The full test configuration
 test = Test.Test()
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Bjet/BjetFlavourTaggingConfig.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Bjet/BjetFlavourTaggingConfig.py
index 191b57adb4577d3c7fc446ccb99b6e65f12eb0bb..409193ff9b9e941f6e54cea21b808b573cabe3c7 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Bjet/BjetFlavourTaggingConfig.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Bjet/BjetFlavourTaggingConfig.py
@@ -7,6 +7,7 @@ from AthenaConfiguration.ComponentFactory import CompFactory
 from BTagging.JetParticleAssociationAlgConfig import JetParticleAssociationAlgCfg
 from BTagging.BTagTrackAugmenterAlgConfig import BTagTrackAugmenterAlgCfg
 from BTagging.BTagConfig import BTagAlgsCfg
+from JetTagCalibration.JetTagCalibConfig import JetTagCalibCfg
 
 # fast btagging
 from FlavorTagDiscriminants.FlavorTagNNConfig import getStaticTrackVars
@@ -20,6 +21,8 @@ def flavourTaggingCfg( flags, inputJets, inputVertex, inputTracks, BTagName,
 
     acc = ComponentAccumulator()
 
+    acc.merge(JetTagCalibCfg(flags))
+    
     #Track Augmenter
     acc.merge(BTagTrackAugmenterAlgCfg(
         flags,
@@ -67,6 +70,8 @@ def fastFlavourTaggingCfg( flags, inputJets, inputVertex, inputTracks, isPFlow=F
 
     ca = ComponentAccumulator()
 
+    ca.merge(JetTagCalibCfg(flags))
+    
     # first add the track augmentation
     jet_name = inputJets
     if isPFlow:
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/CalibCosmicMon/CosmicChainConfiguration.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/CalibCosmicMon/CosmicChainConfiguration.py
index e418198ed51dbce1d41b3b84e1f2a5de5a456464..1bd125ae352b4ae1b976ea6b20ac389576085d13 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/CalibCosmicMon/CosmicChainConfiguration.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/CalibCosmicMon/CosmicChainConfiguration.py
@@ -5,7 +5,7 @@ logging.getLogger().info("Importing %s",__name__)
 log = logging.getLogger(__name__)
 
 from AthenaConfiguration.ComponentFactory import CompFactory
-from TriggerMenuMT.HLT.Config.MenuComponents import MenuSequenceCA, SelectionCA, InViewRecoCA, EmptyMenuSequence
+from TriggerMenuMT.HLT.Config.MenuComponents import MenuSequenceCA, SelectionCA, InViewRecoCA, EmptyMenuSequenceCfg
 from TrigEDMConfig.TriggerEDM import recordable
 import AthenaCommon.SystemOfUnits as Units
 
@@ -53,9 +53,6 @@ def CosmicsTrkSequenceCfg(flags):
                           HypoToolGen = TrackCountHypoToolGen)
 
 
-def EmptyMSBeforeCosmicID(flags):
-    return EmptyMenuSequence("EmptyBeforeCosmicID")
-
 #----------------------------------------------------------------
 class CosmicChainConfiguration(ChainConfigurationBase):
 
@@ -73,7 +70,7 @@ class CosmicChainConfiguration(ChainConfigurationBase):
         # define here the names of the steps and obtain the chainStep configuration         
         # --------------------
         if 'cosmic_id' in self.chainName:
-            steps += [  self.getStep(flags, 1, 'Empty', [EmptyMSBeforeCosmicID]),
+            steps += [  self.getStep(flags, 1, 'Empty', [EmptyMenuSequenceCfg], name="EmptyBeforeCosmicID"),
                         self.getStep(flags, 2, 'CosmicTracking', [CosmicsTrkSequenceCfg]) ]
 
         return self.buildChain(steps)
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/CommonSequences/EventBuildingSequences.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/CommonSequences/EventBuildingSequences.py
index e7acf2d7b05b1491f80b00abad465a2e00b0baec..25c06891a965164baee4fbb1c6aa114fcfa50e90 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/CommonSequences/EventBuildingSequences.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/CommonSequences/EventBuildingSequences.py
@@ -1,7 +1,7 @@
 #
 #  Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
 #
-
+import functools
 from TrigEDMConfig import DataScoutingInfo
 from TrigEDMConfig.TriggerEDM import recordable
 from TriggerMenuMT.HLT.Menu import EventBuildingInfo
@@ -26,7 +26,7 @@ def addEventBuildingSequence(flags, chain, eventBuildType, chainDict):
         log.error('eventBuildType \'%s\' not found in the allowed Event Building identifiers', eventBuildType)
         return
 
-    seq = pebMenuSequenceCfg(flags, chain=chain, eventBuildType=eventBuildType, chainDict=chainDict)
+    seq = functools.partial(pebMenuSequenceCfg, flags, chain=chain, eventBuildType=eventBuildType, chainDict=chainDict)
 
     if len(chain.steps)==0:
         # noalg PEB chain
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/CommonSequences/TLABuildingSequences.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/CommonSequences/TLABuildingSequences.py
index f50c61d0242f69cbb14e2e0d7d46ae4e34c747b7..5e2eda585fe74e60fdd8d7954316bf2c8ec87ab6 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/CommonSequences/TLABuildingSequences.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/CommonSequences/TLABuildingSequences.py
@@ -1,6 +1,7 @@
 #
 #  Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
 #
+import functools
 from TriggerMenuMT.HLT.Config.MenuComponents import ChainStep
 from AthenaCommon.Logging import logging
 from ..Jet.JetChainConfiguration import JetChainConfiguration
@@ -23,7 +24,7 @@ def addTLAStep(flags, chain, chainDict):
         
         log.debug("addTLAStep: processing signature: %s", cPart['signature'] )
         # call the sequence from their respective signatures
-        tlaSequencesList.append(getTLASignatureSequence(flags, chainDict=chainDict, chainPart=cPart)), #signature=cPart['signature'])),
+        tlaSequencesList.append(functools.partial(getTLASignatureSequence, flags, chainDict=chainDict, chainPart=cPart)), #signature=cPart['signature'])),
             
     log.debug("addTLAStep: About to add a step with: %d parallel sequences.", len(tlaSequencesList))            
     
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/ChainConfigurationBase.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/ChainConfigurationBase.py
index c8c4b3e8a17d3ebc56ef2d575ab0122549b128ca..1cd94b1adef550f7513133a3653d68fe99ac0e7a 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/ChainConfigurationBase.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/ChainConfigurationBase.py
@@ -44,11 +44,14 @@ class ChainConfigurationBase(metaclass=abc.ABCMeta):
         self.chainPartNameNoMultwL1 += "_"+self.chainL1Item
 
     def getStep(self, flags, stepID, stepPartName, sequenceCfgArray, comboHypoCfg=ComboHypoCfg, comboTools=[], **stepArgs):
-        stepName = 'Step%d'%stepID + '_' + stepPartName
+        stepName = 'Step%s'%str(stepID) + '_' + stepPartName
         log.debug("Configuring step %s", stepName)
-        seqArray = []   
-        for sequenceCfg in sequenceCfgArray:
-            seqArray.append (sequenceCfg(flags, **stepArgs) )
+
+        if flags.Trigger.fastMenuGeneration:
+            # do not generate Menu Sequences, just store the functions that can do that
+            seqArray = [functools.partial(gen, flags, **stepArgs) for gen in sequenceCfgArray]   
+        else:
+            seqArray = [gen(flags, **stepArgs) for gen in sequenceCfgArray]
 
         if (len(seqArray)>0):                                
             if inspect.signature(comboHypoCfg).parameters and all(inspect.signature(comboTool).parameters for comboTool in comboTools):                
@@ -61,7 +64,7 @@ class ChainConfigurationBase(metaclass=abc.ABCMeta):
         raise RuntimeError("[getStep] No sequences generated for step %s!", stepPartName)
 
     def getEmptyStep(self, stepID, stepPartName):
-        stepName = 'Step%d'%stepID + '_' + stepPartName
+        stepName = 'Step%s'%str(stepID) + '_' + stepPartName
         log.debug("Configuring empty step %s", stepName)
         return ChainStep(stepName, Sequences=[], multiplicity=[] ,chainDicts=[self.dict])
  
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/JSON/HLTMenuJSON.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/JSON/HLTMenuJSON.py
index a701bf944cb9873ea6000bd72a1804d89dfa18bd..d7a79def78686b008bcb3d306ba72ae789055012 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/JSON/HLTMenuJSON.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/JSON/HLTMenuJSON.py
@@ -8,7 +8,6 @@ from AthenaCommon.CFElements import getSequenceChildren
 from AthenaCommon.Logging import logging
 __log = logging.getLogger( __name__ )
 
-
 # remove prescale suffixes
 def __getMenuBaseName(menuName):
     pattern = re.compile(r'_v\d+|DC14')
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/MenuComponents.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/MenuComponents.py
index 0250ada521b03f23cc82b672cf0bb700e54f6f0d..53b21fbb6676cac8ae687ee8dc38f0b262d459d0 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/MenuComponents.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/MenuComponents.py
@@ -15,12 +15,12 @@ from TrigCompositeUtils.TrigCompositeUtils import legName
 from TriggerJobOpts.TriggerConfigFlags import ROBPrefetching
 
 from collections.abc import MutableSequence
+import functools
 import inspect
 import re
 
 from AthenaCommon.Logging import logging
 log = logging.getLogger( __name__ )
-
 # Pool of mutable ComboHypo instances
 _ComboHypoPool = dict()
 
@@ -375,6 +375,11 @@ class EmptyMenuSequence:
         return "MenuSequence::%s \n Hypo::%s \n Maker::%s \n Sequence::%s \n HypoTool::%s\n"\
             %(self.name, "Empty", self.maker.Alg.getName(), self.sequence.Alg.getName(), "None")
 
+def EmptyMenuSequenceCfg(flags, name):
+    return EmptyMenuSequence(name)
+
+def isEmptySequenceCfg(o):
+    return o == EmptyMenuSequenceCfg
 
 class MenuSequenceCA:
     """Class to group reco sequences with the Hypo.
@@ -667,8 +672,21 @@ class ChainStep(object):
                 log.error("[ChainStep] multiplicities: %s",multiplicity)
                 raise RuntimeError("Tried to configure a ChainStep %s with %i Sequences and %i multiplicities. These lists must have the same size" % (name, len(Sequences), len(multiplicity)) )
  
-        self.name      = name
-        self.sequences = Sequences
+        self.name = name
+        self.sequences = []
+        for iseq, seq in enumerate(Sequences):                
+            if isinstance(seq, MenuSequenceCA) or isinstance(seq, EmptyMenuSequence): # this is stopgap solution to handle jets
+                self.sequences.append(seq)   
+            else:
+                if not isinstance(seq, functools.partial):
+                    log.error("[ChainStep] %s Sequences verification failed, sequence %d is not partial function, likely ChainBase.getStep function was not used", name, iseq)
+                    log.error("[ChainStep] It rather seems to be of type %s trying to print it", type(seq))
+                    raise RuntimeError("Sequence is not packaged in a tuple, see error message above" )
+                    
+                # at the moment sequences are created here, 
+                #this will be deferred to later stages in followup MRs
+                self.sequences.append(seq())
+
         self.onlyJets  = False
         sig_set = None
         if len(chainDicts) > 0  and 'signature' in chainDicts[0]: 
@@ -798,10 +816,10 @@ class ChainStep(object):
         if len(self.sequences) == 0:
             return "--- ChainStep %s ---\n is Empty, ChainDict = %s "%(self.name,  ' '.join(map(str, [dic['chainName'] for dic in self.stepDicts])) )
         
-        repr_string= "--- ChainStep %s ---\n , multiplicity = %s  ChainDict = %s \n + MenuSequences = %s "%\
+        repr_string= "--- ChainStep %s ---\n , multiplicity = %s  ChainDict = %s \n + MenuSequences size = %d "%\
           (self.name,  ' '.join(map(str,[mult for mult in self.multiplicity])),
              ' '.join(map(str, [dic['chainName'] for dic in self.stepDicts])),
-             ' '.join(map(str, [seq.name for seq in self.sequences]) ))
+             len(self.sequences) )
         if self.combo is not None:
             repr_string += "\n + ComboHypo = %s" % self.combo.Alg.name
             if len(self.comboToolConfs)>0:
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/Utility/ChainMerging.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/Utility/ChainMerging.py
index 077529893b3b3252785b8cf55179e8d50d2f6ef8..ea0f32ced74bb5199c81595b2aca543badccbae8 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/Utility/ChainMerging.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Config/Utility/ChainMerging.py
@@ -1,12 +1,12 @@
 # Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
 
 from TriggerMenuMT.HLT.Config.Utility.MenuAlignmentTools import get_alignment_group_ordering as getAlignmentGroupOrdering
-from TriggerMenuMT.HLT.Config.MenuComponents import Chain, ChainStep, EmptyMenuSequence
+from TriggerMenuMT.HLT.Config.MenuComponents import Chain, ChainStep, EmptyMenuSequence, EmptyMenuSequenceCfg
 
 from AthenaCommon.Logging import logging
 from DecisionHandling.DecisionHandlingConfig import ComboHypoCfg
 from TrigCompositeUtils.TrigCompositeUtils import legName
-
+import functools
 from copy import deepcopy
 import re
 
@@ -545,10 +545,10 @@ def makeCombinedStep(parallel_steps, stepNumber, chainDefList, allSteps = [], cu
             seqName = getEmptySeqName(new_stepDict['signature'], stepNumber, alignment_group)
 
             if isFullScanRoI(chainDefList[chain_index].L1decisions[0]):
-                stepSeq.append(EmptyMenuSequence(seqName+"FS"))
+                stepSeq.append(functools.partial(EmptyMenuSequenceCfg, None, name=seqName+"FS"))
                 currentStepName = 'Empty' + alignment_group +'Align'+str(stepNumber)+'_'+new_stepDict['chainParts'][0]['multiplicity']+new_stepDict['signature']+'FS'
             else:
-                stepSeq.append(EmptyMenuSequence(seqName))
+                stepSeq.append(functools.partial(EmptyMenuSequenceCfg, None, name=seqName))
                 currentStepName = 'Empty' + alignment_group +'Align'+str(stepNumber)+'_'+new_stepDict['chainParts'][0]['multiplicity']+new_stepDict['signature']
 
             log.debug("[makeCombinedStep]  chain_index: %s, step name: %s,  empty sequence name: %s", chain_index, currentStepName, seqName)
@@ -657,10 +657,10 @@ def build_empty_sequences(emptyChainDicts, step_mult, caller, L1decisions, seqNa
     for ileg in range(len(L1decisions)):                        
         if isFullScanRoI(L1decisions[ileg]):
             log.debug("[%s] adding FS empty sequence", caller)
-            emptySequences += [EmptyMenuSequence(seqNames[ileg]+"FS")]
+            emptySequences += [functools.partial(EmptyMenuSequenceCfg, None, name=seqNames[ileg]+"FS")]
         else:
             log.debug("[%s] adding non-FS empty sequence", caller)
-            emptySequences += [EmptyMenuSequence(seqNames[ileg])]
+            emptySequences += [functools.partial(EmptyMenuSequenceCfg, None, name=seqNames[ileg])]
             
     log.verbose("[%s] emptyChainDicts %s", caller, emptyChainDicts)
     log.debug("[%s] %s has number of empty sequences %d and empty legs in stepDicts %d",
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Egamma/TrigEgammaConfigFlags.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Egamma/TrigEgammaConfigFlags.py
index c85d6a4d97fc966e9ab945197243f3c36f563512..c1266ca0532080d4977976ca9e97418e5c52bc5b 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Egamma/TrigEgammaConfigFlags.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Egamma/TrigEgammaConfigFlags.py
@@ -34,6 +34,9 @@ def createTrigEgammaConfigFlags():
     flags.addFlag('Trigger.egamma.doNoiseThrRings', False)
     flags.addFlag('Trigger.egamma.sigmaNoiseFactor',2.0)
 
+    # Fastcalo bdt calibration 
+    flags.addFlag('Trigger.egamma.fastCaloETCalibration',False)
+    flags.addFlag('Trigger.egamma.fastCaloETCalibrationVersion','egammaFastCaloCalib/online/v0')
     return flags
 
 
@@ -42,6 +45,5 @@ if __name__ == "__main__":
     from AthenaConfiguration.TestDefaults import defaultTestFiles
     flags = initConfigFlags()
     flags.Input.Files = defaultTestFiles.RAW_RUN2
-
     flags.lock()
     flags.dump("Egamma|Trigger")
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Menu/MC_pp_run4_v1.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Menu/MC_pp_run4_v1.py
index a7635f804a793ca14d50b02e44dac949a6d9266d..abae6d5c3a2d8ddb8d9d0ab7243b9e35603e6c54 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Menu/MC_pp_run4_v1.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Menu/MC_pp_run4_v1.py
@@ -24,7 +24,7 @@ from TriggerMenuMT.HLT.Menu.Physics_pp_run4_v1 import (
     EgammaMuonGroup, EgammaTauGroup, EgammaMETGroup, EgammaJetGroup, 
     #EgammaBjetGroup,
     #UnconvTrkGroup,
-    #SingleBjetGroup, MultiBjetGroup,
+    SingleBjetGroup, MultiBjetGroup,
     PrimaryLegGroup, PrimaryPhIGroup, PrimaryL1MuGroup,
     SupportPhIGroup, SupportLegGroup, SupportGroup,
     TagAndProbePhIGroup, TagAndProbeLegGroup,
@@ -255,45 +255,41 @@ def addMCSignatures(chains):
     ]
 
     chainsMC['Bjet'] = [
-        # Jan 2024: bjet chains do not configure, mention of TRT and then crash in pixel geo.
-        # GeoModelSvc                                                       INFO GeoModelSvc.TRT_DetectorTool     SZ= 2372Kb      Time = 0.04S
-        # Followed by crash
-        # PixelDetectorTool::create() /build/atnight/localbuilds/nightlies/Athena/main/athena/InnerDetector/InDetDetDescr/PixelGeoModel/src/PixelDetectorTool.cxx:103:25 
-
-        # ChainProp(name="HLT_j225_0eta290_020jvt_bdl1d70_pf_ftf_preselj180_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup, monGroups=['bJetMon:online']),
-        # ChainProp(name="HLT_j300_0eta290_020jvt_bdl1d77_pf_ftf_preselj225_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
-        # ChainProp(name="HLT_j360_0eta290_020jvt_bdl1d85_pf_ftf_preselj225_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
-        # ChainProp(name="HLT_j225_0eta290_020jvt_bdl1d70_pf_ftf_preselj190_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup, monGroups=['bJetMon:online']),
-        # ChainProp(name="HLT_j225_0eta290_020jvt_bdl1d70_pf_ftf_preselj200_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup, monGroups=['bJetMon:online']),
-        # ChainProp(name="HLT_j225_0eta290_020jvt_bdl1d60_pf_ftf_preselj180_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
-        # ChainProp(name="HLT_j275_0eta290_020jvt_bdl1d70_pf_ftf_preselj225_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
-        # ChainProp(name="HLT_j300_0eta290_020jvt_bdl1d70_pf_ftf_preselj225_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
-        # ChainProp(name="HLT_j360_0eta290_020jvt_bdl1d77_pf_ftf_preselj225_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
-        # ChainProp(name="HLT_3j65_0eta290_020jvt_bdl1d77_pf_ftf_presel3j45b95_L13jJ70p0ETA23", l1SeedThresholds=['FSNOSEED'], groups=MultiBjetGroup + PrimaryPhIGroup),
-        # ChainProp(name="HLT_4j35_0eta290_020jvt_bdl1d77_pf_ftf_presel4j25b95_L14jJ40p0ETA25", l1SeedThresholds=['FSNOSEED'], groups=MultiBjetGroup + PrimaryPhIGroup),
-        # ChainProp(name="HLT_3j35_0eta290_020jvt_bdl1d70_j35_pf_ftf_presel2j25XX2j25b85_L14jJ40p0ETA25",      l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name="HLT_2j35_0eta290_020jvt_bdl1d70_2j35_0eta290_020jvt_bdl1d85_pf_ftf_presel4j25b95_L14jJ40p0ETA25", l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name="HLT_2j55_0eta290_020jvt_bdl1d60_2j55_pf_ftf_presel2j25XX2j25b85_L14jJ40p0ETA25",        l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name="HLT_2j35_0eta290_020jvt_bdl1d60_3j35_pf_ftf_presel3j25XX2j25b85_L15jJ40p0ETA25",  l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name="HLT_2j45_0eta290_020jvt_bdl1d60_3j45_pf_ftf_presel3j25XX2j25b85_L15jJ40p0ETA25",  l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name="HLT_j75_0eta290_020jvt_bdl1d60_3j75_pf_ftf_preselj50b85XX3j50_L14jJ50",           l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name="HLT_2j45_0eta290_020jvt_bdl1d60_2j45_pf_ftf_presel2j25XX2j25b85_L14jJ40p0ETA25",  l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name="HLT_j150_2j55_0eta290_020jvt_bdl1d70_pf_ftf_preselj80XX2j45b90_L1jJ140_3jJ60", l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name="HLT_j175_0eta290_020jvt_bdl1d60_j60_0eta290_020jvt_bdl1d60_pf_ftf_preselj140b85XXj45b85_L1jJ160", l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name="HLT_2j35c_020jvt_bdl1d60_2j35c_020jvt_pf_ftf_presel2j25XX2j25b85_L14jJ40p0ETA25", l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name='HLT_2j45_0eta290_020jvt_bdl1d70_j0_HT300_j0_DJMASS700j35_pf_ftf_L1HT150-jJ50s5pETA32_jMJJ-400-CF', l1SeedThresholds=['FSNOSEED']*3, groups=PrimaryPhIGroup+MultiBjetGroup+Topo3Group),
+
+        ChainProp(name="HLT_j225_0eta290_020jvt_bdl1d70_pf_ftf_preselj180_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup, monGroups=['bJetMon:online']),
+        ChainProp(name="HLT_j300_0eta290_020jvt_bdl1d77_pf_ftf_preselj225_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
+        ChainProp(name="HLT_j360_0eta290_020jvt_bdl1d85_pf_ftf_preselj225_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
+        ChainProp(name="HLT_j225_0eta290_020jvt_bdl1d70_pf_ftf_preselj190_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup, monGroups=['bJetMon:online']),
+        ChainProp(name="HLT_j225_0eta290_020jvt_bdl1d70_pf_ftf_preselj200_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup, monGroups=['bJetMon:online']),
+        ChainProp(name="HLT_j225_0eta290_020jvt_bdl1d60_pf_ftf_preselj180_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
+        ChainProp(name="HLT_j275_0eta290_020jvt_bdl1d70_pf_ftf_preselj225_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
+        ChainProp(name="HLT_j300_0eta290_020jvt_bdl1d70_pf_ftf_preselj225_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
+        ChainProp(name="HLT_j360_0eta290_020jvt_bdl1d77_pf_ftf_preselj225_L1jJ160", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
+        ChainProp(name="HLT_3j65_0eta290_020jvt_bdl1d77_pf_ftf_presel3j45b95_L13jJ70p0ETA23", l1SeedThresholds=['FSNOSEED'], groups=MultiBjetGroup + PrimaryPhIGroup),
+        ChainProp(name="HLT_4j35_0eta290_020jvt_bdl1d77_pf_ftf_presel4j25b95_L14jJ40p0ETA25", l1SeedThresholds=['FSNOSEED'], groups=MultiBjetGroup + PrimaryPhIGroup),
+        ChainProp(name="HLT_3j35_0eta290_020jvt_bdl1d70_j35_pf_ftf_presel2j25XX2j25b85_L14jJ40p0ETA25",      l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name="HLT_2j35_0eta290_020jvt_bdl1d70_2j35_0eta290_020jvt_bdl1d85_pf_ftf_presel4j25b95_L14jJ40p0ETA25", l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name="HLT_2j55_0eta290_020jvt_bdl1d60_2j55_pf_ftf_presel2j25XX2j25b85_L14jJ40p0ETA25",        l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name="HLT_2j35_0eta290_020jvt_bdl1d60_3j35_pf_ftf_presel3j25XX2j25b85_L15jJ40p0ETA25",  l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name="HLT_2j45_0eta290_020jvt_bdl1d60_3j45_pf_ftf_presel3j25XX2j25b85_L15jJ40p0ETA25",  l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name="HLT_j75_0eta290_020jvt_bdl1d60_3j75_pf_ftf_preselj50b85XX3j50_L14jJ50",           l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name="HLT_2j45_0eta290_020jvt_bdl1d60_2j45_pf_ftf_presel2j25XX2j25b85_L14jJ40p0ETA25",  l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name="HLT_j150_2j55_0eta290_020jvt_bdl1d70_pf_ftf_preselj80XX2j45b90_L1jJ140_3jJ60", l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name="HLT_j175_0eta290_020jvt_bdl1d60_j60_0eta290_020jvt_bdl1d60_pf_ftf_preselj140b85XXj45b85_L1jJ160", l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name="HLT_2j35c_020jvt_bdl1d60_2j35c_020jvt_pf_ftf_presel2j25XX2j25b85_L14jJ40p0ETA25", l1SeedThresholds=['FSNOSEED','FSNOSEED'], groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name='HLT_2j45_0eta290_020jvt_bdl1d70_j0_HT300_j0_DJMASS700j35_pf_ftf_L1HT150-jJ50s5pETA32_jMJJ-400-CF', l1SeedThresholds=['FSNOSEED']*3, groups=PrimaryPhIGroup+MultiBjetGroup+Topo3Group),
         
-        # VBF chains
-        # ChainProp(name='HLT_j80c_j60_j45f_SHARED_2j45_0eta290_020jvt_bdl1d60_pf_ftf_preselc60XXj45XXf40_L1jJ80p0ETA25_2jJ55_jJ50p30ETA49', l1SeedThresholds=['FSNOSEED']*4, groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name="HLT_j80_0eta290_020jvt_bdl1d70_j60_0eta290_020jvt_bdl1d85_j45f_pf_ftf_preselj60XXj45XXf40_L1jJ80p0ETA25_2jJ55_jJ50p30ETA49", l1SeedThresholds=['FSNOSEED']*3, groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name="HLT_j55_0eta290_020jvt_bdl1d70_2j45f_pf_ftf_preselj45XX2f40_L1jJ55p0ETA23_2jJ40p30ETA49",l1SeedThresholds=['FSNOSEED']*2, groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name='HLT_j70a_j50a_2j35a_SHARED_2j35_0eta290_020jvt_bdl1d70_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1jMJJ-500-NFF', l1SeedThresholds=['FSNOSEED']*5,stream=['VBFDelayed'], groups=PrimaryPhIGroup+MultiBjetGroup+Topo3Group),
-        # ChainProp(name='HLT_j80c_020jvt_j55c_020jvt_j28c_020jvt_j20c_020jvt_SHARED_3j20c_020jvt_bdl1d82_pf_ftf_presel2c20XX2c20b85_L1jJ85p0ETA21_3jJ40p0ETA25', l1SeedThresholds=['FSNOSEED']*5, groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name='HLT_j80c_020jvt_j55c_020jvt_j28c_020jvt_j20c_020jvt_SHARED_2j20c_020jvt_bdl1d77_pf_ftf_presel2c20XX2c20b85_L1jJ85p0ETA21_3jJ40p0ETA25', l1SeedThresholds=['FSNOSEED']*5, stream=['VBFDelayed'], groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name='HLT_5j35c_020jvt_j25c_020jvt_SHARED_j25c_020jvt_bdl1d60_pf_ftf_presel5c25XXc25b85_L14jJ40', l1SeedThresholds=['FSNOSEED']*3, stream=['VBFDelayed'], groups=PrimaryPhIGroup+MultiBjetGroup),
-        # ChainProp(name='HLT_5j45c_020jvt_j25c_020jvt_SHARED_j25c_020jvt_bdl1d60_pf_ftf_presel5c25XXc25b85_L14jJ40', l1SeedThresholds=['FSNOSEED']*3, stream=['VBFDelayed'], groups=PrimaryPhIGroup+MultiBjetGroup),
+        #VBF chains
+        ChainProp(name='HLT_j80c_j60_j45f_SHARED_2j45_0eta290_020jvt_bdl1d60_pf_ftf_preselc60XXj45XXf40_L1jJ80p0ETA25_2jJ55_jJ50p30ETA49', l1SeedThresholds=['FSNOSEED']*4, groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name="HLT_j80_0eta290_020jvt_bdl1d70_j60_0eta290_020jvt_bdl1d85_j45f_pf_ftf_preselj60XXj45XXf40_L1jJ80p0ETA25_2jJ55_jJ50p30ETA49", l1SeedThresholds=['FSNOSEED']*3, groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name="HLT_j55_0eta290_020jvt_bdl1d70_2j45f_pf_ftf_preselj45XX2f40_L1jJ55p0ETA23_2jJ40p30ETA49",l1SeedThresholds=['FSNOSEED']*2, groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name='HLT_j70a_j50a_2j35a_SHARED_2j35_0eta290_020jvt_bdl1d70_j0_DJMASS1000j50_pf_ftf_presela60XXa40XX2a25_L1jMJJ-500-NFF', l1SeedThresholds=['FSNOSEED']*5,stream=['VBFDelayed'], groups=PrimaryPhIGroup+MultiBjetGroup+Topo3Group),
+        ChainProp(name='HLT_j80c_020jvt_j55c_020jvt_j28c_020jvt_j20c_020jvt_SHARED_3j20c_020jvt_bdl1d82_pf_ftf_presel2c20XX2c20b85_L1jJ85p0ETA21_3jJ40p0ETA25', l1SeedThresholds=['FSNOSEED']*5, groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name='HLT_j80c_020jvt_j55c_020jvt_j28c_020jvt_j20c_020jvt_SHARED_2j20c_020jvt_bdl1d77_pf_ftf_presel2c20XX2c20b85_L1jJ85p0ETA21_3jJ40p0ETA25', l1SeedThresholds=['FSNOSEED']*5, stream=['VBFDelayed'], groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name='HLT_5j35c_020jvt_j25c_020jvt_SHARED_j25c_020jvt_bdl1d60_pf_ftf_presel5c25XXc25b85_L14jJ40', l1SeedThresholds=['FSNOSEED']*3, stream=['VBFDelayed'], groups=PrimaryPhIGroup+MultiBjetGroup),
+        ChainProp(name='HLT_5j45c_020jvt_j25c_020jvt_SHARED_j25c_020jvt_bdl1d60_pf_ftf_presel5c25XXc25b85_L14jJ40', l1SeedThresholds=['FSNOSEED']*3, stream=['VBFDelayed'], groups=PrimaryPhIGroup+MultiBjetGroup),
     ]
-
+    
     chainsMC['MET'] = [
         ChainProp(name='HLT_xe65_cell_xe90_pfopufit_L1jXE100', l1SeedThresholds=['FSNOSEED']*2, groups=PrimaryPhIGroup+METGroup, monGroups=['metMon:t0']),
         ChainProp(name='HLT_xe65_cell_xe100_pfopufit_L1jXE100', l1SeedThresholds=['FSNOSEED']*2, groups=PrimaryPhIGroup+METGroup, monGroups=['metMon:t0']),
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Menu/P1_run3_v1.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Menu/P1_run3_v1.py
index e099b8f2a2b84924a214db8f9bd0b8fea2c14a5f..de5e0f4b2ac49e3c1879a8d59b1e97e70816909b 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Menu/P1_run3_v1.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Menu/P1_run3_v1.py
@@ -104,6 +104,7 @@ def addCommonP1Signatures(chains):
         ChainProp(name='HLT_j140f_LArPEBHLT_L1J75p31ETA49', l1SeedThresholds=['FSNOSEED'], stream=['LArCells'], groups=SingleJetGroup+SupportLegGroup),
         ChainProp(name='HLT_j140f_LArPEBHLT_L1jJ125p30ETA49', l1SeedThresholds=['FSNOSEED'], stream=['LArCells'], groups=SingleJetGroup+SupportPhIGroup),
         ChainProp(name='HLT_j165_LArPEBHLT_L1J100', l1SeedThresholds=['FSNOSEED'], stream=['LArCells'], groups=SingleJetGroup+SupportLegGroup),
+        ChainProp(name='HLT_j165_LArPEBHLT_L1jJ160', l1SeedThresholds=['FSNOSEED'], stream=['LArCells'], groups=SingleJetGroup+SupportPhIGroup),
     ]
 
     chainsP1['Calib'] = [
@@ -445,8 +446,6 @@ def addHighMuP1Signatures(chains):
         ChainProp(name='HLT_j0_pf_ftf_presel6c25_BeamSpotPEB_L14jJ40', l1SeedThresholds=['FSNOSEED'], stream=['BeamSpot'], groups=['RATE:BeamSpot',  'BW:BeamSpot', 'RATE:CPS_4jJ40']+SupportPhIGroup),
         ChainProp(name='HLT_j0_pf_ftf_presel2c20b85_BeamSpotPEB_L1jJ85p0ETA21_3jJ40p0ETA25', l1SeedThresholds=['FSNOSEED'], stream=['BeamSpot'], groups=['RATE:BeamSpot',  'BW:BeamSpot', 'RATE:CPS_jJ85p0ETA21_3jJ40p0ETA25']+SupportPhIGroup),
 
-        # Placed here because not in HI menu
-        ChainProp(name='HLT_j165_LArPEBHLT_L1jJ160', l1SeedThresholds=['FSNOSEED'], stream=['LArCells'], groups=SingleJetGroup+SupportPhIGroup),
     ]
 
     addP1Signatures(chains,chainsP1)
@@ -475,6 +474,7 @@ def addLowMuP1Signatures(chains):
         ChainProp(name='HLT_idcalib_trk9_IDCalibPEB_L1J100', stream=['IDCalib'], groups=SupportLegGroup+['RATE:Calibration','BW:Detector'], l1SeedThresholds=['FSNOSEED']), 
         ChainProp(name='HLT_idcalib_trk9_IDCalibPEB_L1jJ160', stream=['IDCalib'], groups=SupportPhIGroup+['RATE:Calibration','BW:Detector'], l1SeedThresholds=['FSNOSEED']), 
         ChainProp(name='HLT_idcalib_trk9_IDCalibPEB_L1XE50', stream=['IDCalib'], groups=SupportLegGroup+['RATE:Calibration','BW:Detector'], l1SeedThresholds=['FSNOSEED']),
+        ChainProp(name='HLT_idcalib_trk9_IDCalibPEB_L1jXE100', stream=['IDCalib'], groups=SupportPhIGroup+['RATE:Calibration','BW:Detector'], l1SeedThresholds=['FSNOSEED']),
         #IDcalib for lower lumi
         ChainProp(name='HLT_idcalib_trk4_IDCalibPEB_L1jJ60', stream=['IDCalib'], groups=SupportPhIGroup+['RATE:Calibration','BW:Detector'], l1SeedThresholds=['FSNOSEED']),
 
@@ -605,9 +605,6 @@ def addCosmicP1Signatures(chains):
         ChainProp(name='HLT_noalg_L1eEM5_EMPTY', l1SeedThresholds=['FSNOSEED'], stream=['CosmicCalo'],groups=['RATE:Cosmic_Calo','BW:MinBias','RATE:Calibration']+SupportPhIGroup),
     ]
 
-    chainsP1['Jet'] = [
-        ChainProp(name='HLT_j165_LArPEBHLT_L1jJ160', l1SeedThresholds=['FSNOSEED'], stream=['LArCells'], groups=SingleJetGroup+SupportPhIGroup),
-    ]
 
     addP1Signatures(chains,chainsP1)
 
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Menu/PhysicsP1_pp_lowMu_run3_v1.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Menu/PhysicsP1_pp_lowMu_run3_v1.py
index caed61335432b546228b037f4632ff1bcc57be63..b46aee78bfc20f36b2696df5b6abf3f79d2b28fa 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Menu/PhysicsP1_pp_lowMu_run3_v1.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLT/Menu/PhysicsP1_pp_lowMu_run3_v1.py
@@ -243,7 +243,7 @@ def getLowMuPhysicsSignatures():
         ChainProp(name='HLT_j120_L1jJ60', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=SingleJetGroup+LowMuGroupPhI),
         ChainProp(name='HLT_j140_L1jJ90', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=SingleJetGroup+LowMuGroupPhI),
         ChainProp(name='HLT_j175_L1jJ90', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=SingleJetGroup+LowMuGroupPhI),
-        ChainProp(name='HLT_j260_L1jJ90', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=SingleJetGroup+LowMuGroupPhI),
+        ChainProp(name='HLT_j260_L1jJ125', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=SingleJetGroup+LowMuGroupPhI),
 
         ChainProp(name='HLT_j15f_L1jTE5', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=SingleJetGroup+LowMuGroupPhI),
         ChainProp(name='HLT_j25f_L1jTE10', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=SingleJetGroup+LowMuGroupPhI),
@@ -263,7 +263,7 @@ def getLowMuPhysicsSignatures():
         ChainProp(name='HLT_j175_a10r_L1jJ90', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=SingleJetGroup+LowMuGroupPhI),
         ChainProp(name='HLT_j175_a10_lcw_subjes_L1jJ90', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=SingleJetGroup+LowMuGroupPhI),
         ChainProp(name='HLT_j260_a10r_L1jJ90', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=SingleJetGroup+LowMuGroupPhI),
-        ChainProp(name='HLT_j260_a10_lcw_subjes_L1jJ90', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=SingleJetGroup+LowMuGroupPhI),
+        ChainProp(name='HLT_j260_a10_lcw_subjes_L1jJ125', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=SingleJetGroup+LowMuGroupPhI),
 
         ChainProp(name='HLT_j110_a10t_lcw_jes_L1gLJ80p0ETA25', l1SeedThresholds=['FSNOSEED'], groups=SingleJetGroup+SupportPhIGroup+['RATE:CPS_gLJ80p0ETA25']),
         ChainProp(name='HLT_j110_a10sd_cssk_pf_jes_ftf_preselj80_L1gLJ80p0ETA25', l1SeedThresholds=['FSNOSEED'], groups=SingleJetGroup+SupportPhIGroup+['RATE:CPS_gLJ80p0ETA25']),
@@ -287,6 +287,21 @@ def getLowMuPhysicsSignatures():
         ChainProp(name="HLT_j80_0eta290_020jvt_bgn160_pf_ftf_L1J30", l1SeedThresholds=['FSNOSEED'], groups=PrimaryLegGroup+SingleBjetGroup),
         ChainProp(name="HLT_j100_0eta290_020jvt_bgn160_pf_ftf_L1J30", l1SeedThresholds=['FSNOSEED'], groups=PrimaryLegGroup+SingleBjetGroup),
 
+        #Phase1
+        #performace chains
+        ChainProp(name="HLT_j30_0eta290_020jvt_boffperf_pf_ftf_L1jTE50", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
+        ChainProp(name="HLT_j45_0eta290_020jvt_boffperf_pf_ftf_L1jJ40", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
+        ChainProp(name="HLT_j60_0eta290_020jvt_boffperf_pf_ftf_L1jJ50", l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream, 'express'], groups=PrimaryPhIGroup+SingleBjetGroup),
+        ChainProp(name="HLT_j80_0eta290_020jvt_boffperf_pf_ftf_L1jJ60", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
+        ChainProp(name="HLT_j100_0eta290_020jvt_boffperf_pf_ftf_L1jJ60", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
+
+        #bjet chains
+        ChainProp(name="HLT_j30_0eta290_020jvt_bgn160_pf_ftf_L1jTE50", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
+        ChainProp(name="HLT_j45_0eta290_020jvt_bgn160_pf_ftf_L1jJ40", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
+        ChainProp(name="HLT_j60_0eta290_020jvt_bgn160_pf_ftf_L1jJ50", l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream, 'express'], groups=PrimaryPhIGroup+SingleBjetGroup),
+        ChainProp(name="HLT_j80_0eta290_020jvt_bgn160_pf_ftf_L1jJ60", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
+        ChainProp(name="HLT_j100_0eta290_020jvt_bgn160_pf_ftf_L1jJ60", l1SeedThresholds=['FSNOSEED'], groups=PrimaryPhIGroup+SingleBjetGroup),
+
     ]
 
     chains['Combined'] = [
@@ -324,6 +339,15 @@ def getLowMuPhysicsSignatures():
         ChainProp(name='HLT_mu4_j40_dRAB05_L1MU3V_J20', l1SeedThresholds=['MU3V','FSNOSEED'], stream=[PhysicsStream, 'express'], groups=SingleBjetGroup+LowMuGroup+PrimaryLegGroup),
         ChainProp(name='HLT_mu4_j40_L1MU3V_J20', l1SeedThresholds=['MU3V','FSNOSEED'], stream=[PhysicsStream], groups=SingleBjetGroup+LowMuGroup+PrimaryLegGroup),
 
+        ChainProp(name='HLT_mu4_j60_dRAB05_L1MU3V_jJ60', l1SeedThresholds=['MU3V','FSNOSEED'], stream=[PhysicsStream], groups=SingleBjetGroup+LowMuGroup+PrimaryPhIGroup),
+        ChainProp(name='HLT_mu4_j60_L1MU3V_jJ60', l1SeedThresholds=['MU3V','FSNOSEED'], stream=[PhysicsStream], groups=SingleBjetGroup+LowMuGroup+PrimaryPhIGroup),
+        ChainProp(name='HLT_mu4_j60_dRAB05_L1MU3V_jJ50', l1SeedThresholds=['MU3V','FSNOSEED'], stream=[PhysicsStream], groups=SingleBjetGroup+LowMuGroup+PrimaryPhIGroup),
+        ChainProp(name='HLT_mu4_j60_L1MU3V_jJ50', l1SeedThresholds=['MU3V','FSNOSEED'], stream=[PhysicsStream], groups=SingleBjetGroup+LowMuGroup+PrimaryPhIGroup),
+        ChainProp(name='HLT_mu4_j50_dRAB05_L1MU3V_jJ50', l1SeedThresholds=['MU3V','FSNOSEED'], stream=[PhysicsStream], groups=SingleBjetGroup+LowMuGroup+PrimaryPhIGroup),
+        ChainProp(name='HLT_mu4_j50_L1MU3V_jJ50', l1SeedThresholds=['MU3V','FSNOSEED'], stream=[PhysicsStream], groups=SingleBjetGroup+LowMuGroup+PrimaryPhIGroup),
+        ChainProp(name='HLT_mu4_j40_dRAB05_L1MU3V_jJ50', l1SeedThresholds=['MU3V','FSNOSEED'], stream=[PhysicsStream, 'express'], groups=SingleBjetGroup+LowMuGroup+PrimaryPhIGroup),
+        ChainProp(name='HLT_mu4_j40_L1MU3V_jJ50', l1SeedThresholds=['MU3V','FSNOSEED'], stream=[PhysicsStream], groups=SingleBjetGroup+LowMuGroup+PrimaryPhIGroup),
+
     ]
 
     chains['MinBias'] = [
@@ -374,10 +398,12 @@ def getLowMuPhysicsSignatures():
         ChainProp(name='HLT_mb_sptrk_L1MBTS_2_EMPTY', l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
         ChainProp(name='HLT_mb_sp900_trk60_hmt_L1MBTS_1_1', l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
         ChainProp(name='HLT_mb_sp1400_trk90_hmt_L1TE5', l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup+LowMuGroupLeg),
+        ChainProp(name='HLT_mb_sp1400_trk90_hmt_L1jTE5', l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup+LowMuGroupPhI),
         ChainProp(name='HLT_mb_sptrk_pt4_L1MBTS_1', l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
         ChainProp(name='HLT_mb_sptrk_pt6_L1MBTS_1', l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
         ChainProp(name='HLT_mb_sptrk_pt8_L1MBTS_1', l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
 
+
         #ATR-27744
         ChainProp(name='HLT_mb_sptrk_L1ZDC_OR', l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
         ChainProp(name='HLT_mb_sptrk_L1ZDC_XOR_E2', l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
@@ -429,6 +455,7 @@ def getLowMuPhysicsSignatures():
         ChainProp(name='HLT_mb_sp500_trk40_hmt_L1MBTS_3_3',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
         ChainProp(name='HLT_mb_sp600_trk50_hmt_L1RD0_FILLED',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias','express'], groups=MinBiasGroup+LowMuGroup, monGroups=['mbMon:shifter']),
         ChainProp(name='HLT_mb_sp600_trk50_hmt_L1TE3',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias','express'], groups=MinBiasGroup+LowMuGroup+LowMuGroupLeg, monGroups=['mbMon:shifter']),
+        ChainProp(name='HLT_mb_sp600_trk50_hmt_L1jTE3',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias','express'], groups=MinBiasGroup+LowMuGroup+LowMuGroupPhI, monGroups=['mbMon:shifter']),
         ChainProp(name='HLT_mb_sp600_trk50_hmt_L1MBTS_2',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup, monGroups=['mbMon:shifter']),
         ChainProp(name='HLT_mb_sp600_trk50_hmt_L1MBTS_2_2',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
         ChainProp(name='HLT_mb_sp600_trk50_hmt_L1MBTS_3_3',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias', 'express'], groups=MinBiasGroup+LowMuGroup),
@@ -438,13 +465,16 @@ def getLowMuPhysicsSignatures():
         ChainProp(name='HLT_mb_sp800_trk60_hmt_L1MBTS_2',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
         ChainProp(name='HLT_mb_sp800_trk60_hmt_L1MBTS_4_4',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
         ChainProp(name='HLT_mb_sp800_trk60_hmt_L1TE3',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup+LowMuGroupLeg),
+        ChainProp(name='HLT_mb_sp800_trk60_hmt_L1jTE3',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup+LowMuGroupPhI),
         ChainProp(name='HLT_mb_sp1000_trk80_hmt_L1RD0_FILLED',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup, monGroups=['mbMon:t0']),
         ChainProp(name='HLT_mb_sp1000_trk80_hmt_L1MBTS_2',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
         ChainProp(name='HLT_mb_sp1000_trk80_hmt_L1MBTS_4_4',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
         ChainProp(name='HLT_mb_sp1000_trk80_hmt_L1TE5',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup+LowMuGroupLeg),
+        ChainProp(name='HLT_mb_sp1000_trk80_hmt_L1jTE5',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup+LowMuGroupPhI),
         ChainProp(name='HLT_mb_sp1500_trk100_hmt_L1RD0_FILLED',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup, monGroups=['mbMon:t0']),
         ChainProp(name='HLT_mb_sp1500_trk100_hmt_L1MBTS_4_4',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
         ChainProp(name='HLT_mb_sp1500_trk100_hmt_L1TE5',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup+LowMuGroupLeg),
+        ChainProp(name='HLT_mb_sp1500_trk100_hmt_L1jTE5',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup+LowMuGroupPhI),
         ChainProp(name='HLT_mb_sp2000_trk130_hmt_L1RD0_FILLED',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup, monGroups=['mbMon:t0']),
         ChainProp(name='HLT_mb_sp2000_trk130_hmt_L1MBTS_4_4',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
         ChainProp(name='HLT_mb_sp2000_trk130_hmt_L1TE5',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup+LowMuGroupLeg),
@@ -456,6 +486,11 @@ def getLowMuPhysicsSignatures():
         ChainProp(name='HLT_mb_sp5000_trk290_hmt_L1MBTS_4_4',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup),
         ChainProp(name='HLT_mb_sp5000_trk290_hmt_L1TE50',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup+LowMuGroupLeg),
 
+        ChainProp(name='HLT_mb_sp2000_trk130_hmt_L1jTE5',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup+LowMuGroupPhI),
+        ChainProp(name='HLT_mb_sp3000_trk200_hmt_L1jTE20',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup+LowMuGroupPhI),
+        ChainProp(name='HLT_mb_sp4100_trk260_hmt_L1jTE20',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup+LowMuGroupPhI),
+        ChainProp(name='HLT_mb_sp5000_trk290_hmt_L1jTE50',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup+LowMuGroupPhI),
+        
         # HMT with pileup suppression
         ChainProp(name='HLT_mb_sp15_pusup0_trk5_hmt_L1RD0_FILLED',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup, monGroups=['mbMon:t0']),
         ChainProp(name='HLT_mb_sp500_pusup7_trk40_hmt_L1RD0_FILLED',          l1SeedThresholds=['FSNOSEED'], stream=['MinBias'], groups=MinBiasGroup+LowMuGroup, monGroups=['mbMon:t0']),
@@ -539,21 +574,20 @@ def getLowMuPhysicsSignatures():
         ChainProp(name='HLT_noalg_L1MU8VF',      l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+SingleMuonGroup),
         ChainProp(name='HLT_noalg_L1MU5VF',      l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+SingleMuonGroup),
         #EM3 is removed from HI L1 menu
+        ChainProp(name='HLT_noalg_L1EM10VH',     l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+EgammaStreamersGroup+SupportLegGroup),
         ChainProp(name='HLT_noalg_L1EM12',       l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+EgammaStreamersGroup+SupportLegGroup),
         ChainProp(name='HLT_noalg_L1EM15',       l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+EgammaStreamersGroup+SupportLegGroup),
+        
         ChainProp(name='HLT_noalg_L1eEM15',       l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+EgammaStreamersGroup+SupportPhIGroup),
         ChainProp(name='HLT_noalg_L1eEM18',       l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+EgammaStreamersGroup+SupportPhIGroup),
         ChainProp(name='HLT_noalg_L1eEM26',       l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+EgammaStreamersGroup+SupportPhIGroup),
-        ChainProp(name='HLT_noalg_L1EM10VH',     l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+EgammaStreamersGroup+SupportLegGroup),
-
+        ChainProp(name='HLT_noalg_L1eEM12L',     l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+EgammaStreamersGroup+SupportPhIGroup),
+                
         ChainProp(name='HLT_noalg_L1eEM5',       l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+EgammaStreamersGroup+SupportPhIGroup),
         ChainProp(name='HLT_noalg_L1eEM9',       l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+EgammaStreamersGroup+SupportPhIGroup),
         ChainProp(name='HLT_noalg_L1eEM26M',       l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+EgammaStreamersGroup+SupportPhIGroup),
 
-        ChainProp(name='HLT_noalg_L1TAU8',       l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+TauStreamersGroup+SupportLegGroup),
         ChainProp(name='HLT_noalg_L1eTAU80',     l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+TauStreamersGroup+SupportPhIGroup),
-        ChainProp(name='HLT_noalg_L1TAU12IM',    l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+TauStreamersGroup+SupportLegGroup),        
-        ChainProp(name='HLT_noalg_L1TAU20IM',    l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+TauStreamersGroup+SupportLegGroup),        
 
         ChainProp(name='HLT_noalg_L1J12',        l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+JetStreamersGroup+SupportLegGroup),
         ChainProp(name='HLT_noalg_L1J15',        l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+JetStreamersGroup+SupportLegGroup),
@@ -568,6 +602,15 @@ def getLowMuPhysicsSignatures():
 
         ChainProp(name='HLT_noalg_L1jJ20',       l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+JetStreamersGroup+SupportPhIGroup),
         ChainProp(name='HLT_noalg_L1jJ30',       l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+JetStreamersGroup+SupportPhIGroup),
+        ChainProp(name='HLT_noalg_L1jJ40',        l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+JetStreamersGroup+SupportPhIGroup),
+        ChainProp(name='HLT_noalg_L1jJ50',        l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+JetStreamersGroup+SupportPhIGroup),
+        ChainProp(name='HLT_noalg_L1jJ55',        l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+JetStreamersGroup+SupportPhIGroup),
+        ChainProp(name='HLT_noalg_L1jJ60',        l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+JetStreamersGroup+SupportPhIGroup),
+        ChainProp(name='HLT_noalg_L1jJ80',        l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+JetStreamersGroup+SupportPhIGroup),
+        ChainProp(name='HLT_noalg_L1jJ90',        l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+JetStreamersGroup+SupportPhIGroup),
+        ChainProp(name='HLT_noalg_L1jJ125',        l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+JetStreamersGroup+SupportPhIGroup),
+        ChainProp(name='HLT_noalg_L1jJ160',       l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+JetStreamersGroup+SupportPhIGroup),
+
         
         ChainProp(name='HLT_noalg_L1XE55',       l1SeedThresholds=['FSNOSEED'], stream=['Main'], groups=['PS:NoBulkMCProd']+METStreamersGroup+SupportLegGroup),
 
@@ -607,6 +650,11 @@ def getLowMuPhysicsSignatures():
         ChainProp(name='HLT_noalg_L1AFP_A_AND_C_TOF_J50', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=['PS:NoBulkMCProd']+MinBiasGroup+SupportLegGroup),
         ChainProp(name='HLT_noalg_L1AFP_A_AND_C_TOF_J75', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=['PS:NoBulkMCProd']+MinBiasGroup+SupportLegGroup),
 
+        ChainProp(name='HLT_noalg_L1AFP_A_AND_C_TOF_T0T1_jJ90', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=['PS:NoBulkMCProd']+MinBiasGroup+SupportPhIGroup),
+        ChainProp(name='HLT_noalg_L1AFP_A_AND_C_TOF_T0T1_jJ125', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=['PS:NoBulkMCProd']+MinBiasGroup+SupportPhIGroup),
+        ChainProp(name='HLT_noalg_L1AFP_A_AND_C_TOF_jJ90', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=['PS:NoBulkMCProd']+MinBiasGroup+SupportPhIGroup),
+        ChainProp(name='HLT_noalg_L1AFP_A_AND_C_TOF_jJ125', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=['PS:NoBulkMCProd']+MinBiasGroup+SupportPhIGroup),
+
         # Calibration AFP
         # all mu
         ChainProp(name='HLT_noalg_L1AFP_FSA_BGRP12', l1SeedThresholds=['FSNOSEED'], stream=[PhysicsStream], groups=['PS:NoBulkMCProd']+MinBiasGroup+SupportGroup),
@@ -627,19 +675,31 @@ def getLowMuPhysicsSignatures():
 
     chains['EnhancedBias'] += [
         ChainProp(name='HLT_noalg_eb_L1MU3V',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportGroup ),
+
         ChainProp(name='HLT_noalg_eb_L1EM12',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportLegGroup ),
         ChainProp(name='HLT_noalg_eb_L1EM15',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportLegGroup ),
         ChainProp(name='HLT_noalg_eb_L1eEM15',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportPhIGroup ),
         ChainProp(name='HLT_noalg_eb_L1eEM18',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportPhIGroup ),
         ChainProp(name='HLT_noalg_eb_L1eEM26',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportPhIGroup ),
+
         ChainProp(name='HLT_noalg_eb_L1TE3',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportLegGroup ),
         ChainProp(name='HLT_noalg_eb_L1TE10',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportLegGroup ),
         ChainProp(name='HLT_noalg_eb_L1TE50',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportLegGroup ),
         ChainProp(name='HLT_noalg_eb_L1TE100',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportLegGroup ),
+        ChainProp(name='HLT_noalg_eb_L1jTE3',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportPhIGroup ),
+        ChainProp(name='HLT_noalg_eb_L1jTE10',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportPhIGroup ),
+        ChainProp(name='HLT_noalg_eb_L1jTE50',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportPhIGroup ),
+        ChainProp(name='HLT_noalg_eb_L1jTE100',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportPhIGroup ),
+
         ChainProp(name='HLT_noalg_eb_L1MBTS_1',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportGroup ),
+
         ChainProp(name='HLT_noalg_eb_L1J15',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportLegGroup ),
         ChainProp(name='HLT_noalg_eb_L1J20',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportLegGroup ),
         ChainProp(name='HLT_noalg_eb_L1J30',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportLegGroup ),
+        ChainProp(name='HLT_noalg_eb_L1jJ40',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportPhIGroup ),
+        ChainProp(name='HLT_noalg_eb_L1jJ50',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportPhIGroup ),
+        ChainProp(name='HLT_noalg_eb_L1jJ60',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportPhIGroup ),
+
         ChainProp(name='HLT_noalg_eb_L1RD1_FILLED',         l1SeedThresholds=['FSNOSEED'], stream=['EnhancedBias'], groups=['PS:NoBulkMCProd', "RATE:EnhancedBias", "BW:Detector"]+SupportGroup ),
     ]
 
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Config/ItemDef.py b/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Config/ItemDef.py
index d2b9a6a6881a24f77aaa43ec54a240373870e9ed..3439e826f98deb63d4e0176c0255e7a43aa39b3e 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Config/ItemDef.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Config/ItemDef.py
@@ -713,6 +713,8 @@ class ItemDef:
         MenuItem('L1_MU3V_jJ20'      ).setLogic( d.MU3V & d.jJ20    & physcond).setTriggerType(TT.calo) # added temporarily 
         MenuItem('L1_MU3V_jJ30'      ).setLogic( d.MU3V & d.jJ30    & physcond).setTriggerType(TT.calo) # added temporarily 
         MenuItem('L1_MU3V_jJ40'      ).setLogic( d.MU3V & d.jJ40    & physcond).setTriggerType(TT.calo)
+        MenuItem('L1_MU3V_jJ50'      ).setLogic( d.MU3V & d.jJ50    & physcond).setTriggerType(TT.calo)
+        MenuItem('L1_MU3V_jJ60'      ).setLogic( d.MU3V & d.jJ60    & physcond).setTriggerType(TT.calo)
         MenuItem('L1_MU5VF_jJ80'      ).setLogic( d.MU5VF & d.jJ80  & physcond).setTriggerType(TT.calo)
 
         MenuItem('L1_jLJ60'         ).setLogic( d.jLJ60        & physcond).setTriggerType(TT.calo) # Not in commissioning
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Config/MonitorDef.py b/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Config/MonitorDef.py
index 4994043aabfc820d64bd39bfdb7fbcca9959c53e..99f365401d039cafbb2fa1d197c474ab5ec0f7c8 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Config/MonitorDef.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Config/MonitorDef.py
@@ -355,7 +355,6 @@ class MonitorDef:
                 "L1_2MU3V_VTE50",
                 # Legacy L1Calo
                 # Tau
-                "L1_TAU8", "L1_TAU12IM",
                 "L1_TAU1_TE4_VTE200",
                 "L1_2TAU1_VTE200",
                 # Jet
@@ -392,7 +391,7 @@ class MonitorDef:
                 # TRT
                 "L1_TRT_VTE50", "L1_TRT_VTE200", "L1_TRT_VTE20",
                 # XE
-                "L1_XE30", "L1_XE50",
+                "L1_XE50",
                 # Phase-I L1Calo
                 "L1_eEM5", "L1_eEM9", "L1_eEM12", "L1_eEM15",
                 "L1_eEM18", "L1_eEM18L",
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Menu/Menu_Physics_HI_run3_v1.py b/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Menu/Menu_Physics_HI_run3_v1.py
index 2e1845088522f308527ef8a724b831ca4d469b08..efd17602e64c74a4389f7e0262c0fd6ae7374d54 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Menu/Menu_Physics_HI_run3_v1.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/L1/Menu/Menu_Physics_HI_run3_v1.py
@@ -57,10 +57,12 @@ def defineMenu():
         # combined mu - jet
         'L1_MU3V_J12',
         'L1_MU3V_J20',
-        'L1_MU3V_J30',
+        'L1_MU3V_J30',        
         'L1_MU3V_jJ40',
+        'L1_MU3V_jJ50',
+        'L1_MU3V_jJ60',        
 
-        'L1_TAU8', 'L1_TAU12IM', 'L1_TAU20IM', 'L1_eTAU12_EMPTY', 'L1_eTAU80', 
+        'L1_eTAU12_EMPTY', 'L1_eTAU80', 
 
         # single jet
         'L1_J12','L1_J15','L1_J20','L1_J25', 'L1_J30', 'L1_J40', 'L1_J50' ,'L1_J75','L1_J85', 'L1_J100',
@@ -80,17 +82,17 @@ def defineMenu():
         'L1_jJ40p30ETA49', 'L1_jJ50p30ETA49', 'L1_jJ60p30ETA49', 'L1_jJ90p30ETA49', 'L1_jJ125p30ETA49',
 
         # gJ - ATR-28029
-        "L1_gJ20p0ETA25","L1_gJ400p0ETA25","L1_gLJ80p0ETA25","L1_gTE200","L1_gXEJWOJ100",
+        "L1_gJ20p0ETA25","L1_gJ400p0ETA25","L1_gLJ80p0ETA25","L1_gTE200",
 
         # XE
         'L1_XE50', 'L1_XE55', 
-        'L1_XE30', 'L1_XE300',
+        'L1_XE300',
        
         'L1_J40_XE50', 'L1_J40_XE60',
 
         #ATR-28679
-        'L1_jXE60', 'L1_jXE110', 'L1_jXE120', 
-        'L1_gXEJWOJ60', 'L1_gXEJWOJ110', 'L1_gXEJWOJ120', 'L1_gXEJWOJ500',
+        'L1_jXE100', 'L1_jXE110', 'L1_jXE120', 
+        'L1_gXEJWOJ100', 'L1_gXEJWOJ110', 'L1_gXEJWOJ120', 'L1_gXEJWOJ500',
         'L1_jJ80_jXE120', 'L1_jJ80_jXE100',
  
          # calo