diff --git a/Calorimeter/CaloEvent/CaloEvent/ATLAS_CHECK_THREAD_SAFETY b/Calorimeter/CaloEvent/CaloEvent/ATLAS_CHECK_THREAD_SAFETY
new file mode 100644
index 0000000000000000000000000000000000000000..7815b311cd3f322643de6cd471e2517df1315107
--- /dev/null
+++ b/Calorimeter/CaloEvent/CaloEvent/ATLAS_CHECK_THREAD_SAFETY
@@ -0,0 +1 @@
+Calorimeter/CaloEvent
diff --git a/Calorimeter/CaloEvent/CaloEvent/CaloBinDescriptor.h b/Calorimeter/CaloEvent/CaloEvent/CaloBinDescriptor.h
index 3d7d44c18e1b870aca4e78f8ff23f451339a9af1..87c91cc64cf7ca8d0015a5ace1c5417b04742bf1 100644
--- a/Calorimeter/CaloEvent/CaloEvent/CaloBinDescriptor.h
+++ b/Calorimeter/CaloEvent/CaloEvent/CaloBinDescriptor.h
@@ -115,7 +115,7 @@ class CaloBinDescriptor
   std::vector<T> m_bins;
 
   /*! \brief Out-of-range marker (convention) */
-  static size_t m_outOfRange;  
+  static const size_t m_outOfRange;  
 };
 
 template<typename T>
@@ -134,7 +134,7 @@ CaloBinDescriptor<T>::operator=(const CaloBinDescriptor& theBins)
 }
 
 template<typename T>
-size_t CaloBinDescriptor<T>::m_outOfRange = size_t(-1);
+const size_t CaloBinDescriptor<T>::m_outOfRange = size_t(-1);
 
 template<typename T>
 CaloBinDescriptor<T>::CaloBinDescriptor(const std::vector<T>& theBins)
diff --git a/Calorimeter/CaloEvent/CaloEvent/CaloCluster.h b/Calorimeter/CaloEvent/CaloEvent/CaloCluster.h
index f984a5fd4285f29262c99cca70aed98c5c62d74e..2ef118499b2173f164e7a3e94130f5cd96d6de4f 100644
--- a/Calorimeter/CaloEvent/CaloEvent/CaloCluster.h
+++ b/Calorimeter/CaloEvent/CaloEvent/CaloCluster.h
@@ -61,6 +61,7 @@ Update:  Sep 18, 2005 P Loch
 #include <vector>
 
 #include "GeoPrimitives/GeoPrimitives.h"
+#include "CxxUtils/checker_macros.h"
 
 
 class CaloCell ; 
@@ -70,11 +71,11 @@ class CaloCluster;
 struct CaloClusterSignalState;
 
 
-class CaloCluster :  public CaloCompositeKineBase,
-		     public CaloCompositeCellBase<CaloClusterNavigable>,
-		     virtual public INavigable4Momentum,
-                     public ISignalState,
-                     public AthenaBarCodeBase
+class ATLAS_NOT_THREAD_SAFE CaloCluster :  public CaloCompositeKineBase,
+                            public CaloCompositeCellBase<CaloClusterNavigable>,
+		                        virtual public INavigable4Momentum,
+                            public ISignalState,
+                            public AthenaBarCodeBase
 {
 
  public:
@@ -463,7 +464,7 @@ class CaloCluster :  public CaloCompositeKineBase,
   CaloClusterLinkTemplate<CaloShowerContainer>::link_type data_link_type;
 
   /*! \brief Internal error return for real numbers */
-  static double m_errorValue;
+  static const double m_errorValue;
 
   /*!{\ brief Cached Stores */
   CaloSamplingData                    m_dataStore;   /*!< sampling data */
diff --git a/Calorimeter/CaloEvent/CaloEvent/CaloClusterContainer.h b/Calorimeter/CaloEvent/CaloEvent/CaloClusterContainer.h
index 7d6b74a7adc8bde07db7b82e15578095be6dd006..8d4b634accff487ec009851646c5aebea2779417 100644
--- a/Calorimeter/CaloEvent/CaloEvent/CaloClusterContainer.h
+++ b/Calorimeter/CaloEvent/CaloEvent/CaloClusterContainer.h
@@ -28,6 +28,8 @@ PURPOSE:  Container for CaloCluster objects
 #include "CaloEvent/CaloTowerSeg.h"
 #include "CaloEvent/CaloCluster.h"
 #include "NavFourMom/INavigable4MomentumCollection.h"
+#include "CxxUtils/checker_macros.h"
+
 
 DATAVECTOR_VIRTBASES1 (CaloCluster, INavigable4Momentum);
 
@@ -75,7 +77,7 @@ class CaloClusterContainer : public DataVector<CaloCluster>
   void print();
 
   /** reimplementation of push_back */
-  void push_back(const CaloCluster *);
+  void push_back ATLAS_NOT_THREAD_SAFE(const CaloCluster *);
   void push_back(CaloCluster *);
 
  private:
diff --git a/Calorimeter/CaloEvent/CaloEvent/CaloClusterNavigable.h b/Calorimeter/CaloEvent/CaloEvent/CaloClusterNavigable.h
index 8e1281d7a69d3eb388ac97b59b528ae1b6bd448d..e0c2cadcf17fa0816d37c1cbbf9820690aec13a7 100644
--- a/Calorimeter/CaloEvent/CaloEvent/CaloClusterNavigable.h
+++ b/Calorimeter/CaloEvent/CaloEvent/CaloClusterNavigable.h
@@ -26,6 +26,10 @@
 
 #include <algorithm>
 
+#include "GeoPrimitives/GeoPrimitives.h"
+#include "CxxUtils/checker_macros.h"
+
+
 class CaloClusterNavigable : virtual public INavigable
 {
  public:
@@ -137,7 +141,7 @@ class CaloClusterNavigable : virtual public INavigable
 			 const boost::any& rPar) const;
 
   /** \brief replace container for all cells*/
-  virtual bool replaceCellContainer(const CaloCellContainer* newCont) const;
+  virtual bool replaceCellContainer ATLAS_NOT_CONST_THREAD_SAFE(const CaloCellContainer* newCont) const;
 
   virtual bool isCellLinkValid() const ; 
 
@@ -164,7 +168,7 @@ protected:
   friend class CaloClusterContainerCnv_p7;
   friend class CaloClusterContainerCnvTestMakeCluster;
 
-  CaloCellLink*        getCellLink();
+  CaloCellLink*        getCellLink ATLAS_NOT_CONST_THREAD_SAFE();
   const CaloCellLink*  getCellLink() const;
 
   bool setCellLink(CaloCellLinkContainer* pLink);
diff --git a/Calorimeter/CaloEvent/CaloEvent/CaloConstCellContainer.h b/Calorimeter/CaloEvent/CaloEvent/CaloConstCellContainer.h
index e268e806f3160c3313bb21dedd7c79b837e5b2fc..541d07346166ec4a7514817c86a83ad83eb4eab7 100644
--- a/Calorimeter/CaloEvent/CaloEvent/CaloConstCellContainer.h
+++ b/Calorimeter/CaloEvent/CaloEvent/CaloConstCellContainer.h
@@ -20,6 +20,8 @@
 #include "CaloEvent/CaloCellContainer.h"
 #include "AthContainers/ConstDataVector.h"
 
+#include "CxxUtils/checker_macros.h"
+
 
 /**
  * @brief @c CaloCellContainer that can accept const cell pointers.
@@ -65,7 +67,7 @@ public:
 
 
   /** @brief indicate that the container is complete and in order */
-  void setIsOrderedAndComplete(bool ordered);
+  void setIsOrderedAndComplete ATLAS_NOT_CONST_THREAD_SAFE (bool ordered);
 
 
   /** @brief tell wether container is complete and in order */
@@ -77,7 +79,7 @@ public:
 
 
   /** @brief indicates that the container is ordered */
-  void setIsOrdered(bool ordered);
+  void setIsOrdered  ATLAS_NOT_CONST_THREAD_SAFE (bool ordered);
 
 
   /** @brief tell wether container is ordered */
@@ -107,7 +109,7 @@ public:
 
 
   /** @brief set which calo has been filled.  */
-  void setHasCalo(CaloCell_ID::SUBCALO caloNum);
+  void setHasCalo ATLAS_NOT_CONST_THREAD_SAFE (CaloCell_ID::SUBCALO caloNum);
 
 
   /** @brief fill calo iterators and the index of first and last cell 
@@ -147,15 +149,15 @@ public:
 
 
   /** @brief order container */
-  void order();
+  void order ATLAS_NOT_CONST_THREAD_SAFE ();
 
 
   /** @brief reimplementation of push_back to gain speed in readin */
-  void push_back_fast (const CaloCell* cell);
+  void push_back_fast ATLAS_NOT_THREAD_SAFE(const CaloCell* cell);
 
 
   /** @brief reset look up table */
-  void resetLookUpTable();
+  void resetLookUpTable ATLAS_NOT_CONST_THREAD_SAFE();
 
 
 private:
@@ -164,10 +166,10 @@ private:
   /** @brief If @ flag is true, then the container size equals the maximum hash.
    *         Only CaloCellContainerFinalizer tool is allowed to set this.
    */
-  void setHasTotalSize(bool flag);
+  void setHasTotalSize ATLAS_NOT_CONST_THREAD_SAFE (bool flag);
 
   /// Return a non-const pointer to the base container.
-  ::CaloCellContainer* baseContainer();
+  ::CaloCellContainer* baseContainer ATLAS_NOT_CONST_THREAD_SAFE ();
 };
 
 
diff --git a/Calorimeter/CaloEvent/CaloEvent/CaloConstCellContainer.icc b/Calorimeter/CaloEvent/CaloEvent/CaloConstCellContainer.icc
index 2c34139b556f4c381fe15892e97ab465882b0ad3..1be92f170ad56e76ab8543194373f70a9fdf928f 100644
--- a/Calorimeter/CaloEvent/CaloEvent/CaloConstCellContainer.icc
+++ b/Calorimeter/CaloEvent/CaloEvent/CaloConstCellContainer.icc
@@ -199,7 +199,7 @@ CaloConstCellContainer::findCellVector (const std::vector<IdentifierHash> & theV
 
 /** @brief order container */
 inline
-void CaloConstCellContainer::order()
+void CaloConstCellContainer::order ()
 {
   baseContainer()->order();
 }
diff --git a/Calorimeter/CaloEvent/CaloEvent/CaloSamplingHelper.h b/Calorimeter/CaloEvent/CaloEvent/CaloSamplingHelper.h
index 878ae3cfe39936352911b44a17644d5e0dc221b5..2d0e730dfbff630de1cc5594b40e3a215c49b14e 100644
--- a/Calorimeter/CaloEvent/CaloEvent/CaloSamplingHelper.h
+++ b/Calorimeter/CaloEvent/CaloEvent/CaloSamplingHelper.h
@@ -75,12 +75,12 @@ private:
    CaloSamplingHelper( const CaloSamplingHelper& rhs);
    CaloSamplingHelper& operator =( const CaloSamplingHelper& rhs);
 
-   static unsigned int m_EMMask;
-   static unsigned int m_HADMask;
-   static unsigned int m_BarrelMask;
-   static unsigned int m_EndCapMask;
-   static unsigned int m_LArMask;
-   static unsigned int m_TileMask;
+   static const unsigned int m_EMMask;
+   static const unsigned int m_HADMask;
+   static const unsigned int m_BarrelMask;
+   static const unsigned int m_EndCapMask;
+   static const unsigned int m_LArMask;
+   static const unsigned int m_TileMask;
 
    static bool matchPattern(const CaloSample& rSample, 
 			    const unsigned int& mask);   
diff --git a/Calorimeter/CaloEvent/CaloEvent/CaloTowerSeg.h b/Calorimeter/CaloEvent/CaloEvent/CaloTowerSeg.h
index 5bf0ccad00bef7f750287c29ee5883a7a2851b5c..6fbb1efc7ba094e7702bd7f1d646469e1074a442 100644
--- a/Calorimeter/CaloEvent/CaloEvent/CaloTowerSeg.h
+++ b/Calorimeter/CaloEvent/CaloEvent/CaloTowerSeg.h
@@ -414,7 +414,7 @@ class CaloTowerSeg
 
   // Can get rid of this when we move to the new CaloDetDescr
   // in which all CaloPhiRange methods are static.
-  static CaloPhiRange s_range;
+  static const CaloPhiRange s_range;
 };
 
 // ----------------------------------------
diff --git a/Calorimeter/CaloEvent/src/CaloCluster.cxx b/Calorimeter/CaloEvent/src/CaloCluster.cxx
index 4824fb5a91b35e65f230cd3929d229cc6e733294..61fd30fe3499e8b97c70d213d0cf20a75001f4f8 100644
--- a/Calorimeter/CaloEvent/src/CaloCluster.cxx
+++ b/Calorimeter/CaloEvent/src/CaloCluster.cxx
@@ -59,7 +59,7 @@ Updated:  30 Jun, 2005 S.Menke
 #include <algorithm>
 //#include <iostream>
 
-double CaloCluster::m_errorValue = -999.;
+const double CaloCluster::m_errorValue = -999.;
 
 /////////////////
 // Constructor //
diff --git a/Calorimeter/CaloEvent/src/CaloSamplingHelper.cxx b/Calorimeter/CaloEvent/src/CaloSamplingHelper.cxx
index 47504794deb59b4669b61f0e39f716a5ff16eaa2..49633f27b06c1294c3ce56c39e97d9ea884d9189 100644
--- a/Calorimeter/CaloEvent/src/CaloSamplingHelper.cxx
+++ b/Calorimeter/CaloEvent/src/CaloSamplingHelper.cxx
@@ -27,12 +27,12 @@
 #include "Identifier/Identifier.h"
 #include "CaloIdentifier/CaloID.h"
 
-unsigned int CaloSamplingHelper::m_EMMask     = 0x000000ff;
-unsigned int CaloSamplingHelper::m_HADMask    = 0x00ffff00;
-unsigned int CaloSamplingHelper::m_BarrelMask = 0x001ff00f;
-unsigned int CaloSamplingHelper::m_EndCapMask = 0x00e00ff0;
-unsigned int CaloSamplingHelper::m_LArMask    = 0x00e00fff;
-unsigned int CaloSamplingHelper::m_TileMask   = 0x001ff000;
+const unsigned int CaloSamplingHelper::m_EMMask     = 0x000000ff;
+const unsigned int CaloSamplingHelper::m_HADMask    = 0x00ffff00;
+const unsigned int CaloSamplingHelper::m_BarrelMask = 0x001ff00f;
+const unsigned int CaloSamplingHelper::m_EndCapMask = 0x00e00ff0;
+const unsigned int CaloSamplingHelper::m_LArMask    = 0x00e00fff;
+const unsigned int CaloSamplingHelper::m_TileMask   = 0x001ff000;
 
 CaloSamplingHelper::CaloSamplingHelper() {
 }
diff --git a/Calorimeter/CaloEvent/src/CaloTowerSeg.cxx b/Calorimeter/CaloEvent/src/CaloTowerSeg.cxx
index b0661647fd06b3b26d197cb83fab6a094b6b7f5f..9a82c2be4b316217e582954944472ae72f058520 100644
--- a/Calorimeter/CaloEvent/src/CaloTowerSeg.cxx
+++ b/Calorimeter/CaloEvent/src/CaloTowerSeg.cxx
@@ -13,7 +13,7 @@
 
 #include "CaloEvent/CaloTowerSeg.h"
 
-CaloPhiRange CaloTowerSeg::s_range;
+const CaloPhiRange CaloTowerSeg::s_range;
 
 
 /**
@@ -24,7 +24,7 @@ CaloTowerSeg::SubSeg CaloTowerSeg::subseg (double eta, double deta,
                                            double phi, double dphi) const
 {
   typedef SubSeg::index_t index_t;
-  static CaloPhiRange range;
+  static const CaloPhiRange range;
 
   index_t etamin = this->etaIndex (eta - deta + 0.001);
   if (etamin == CaloTowerSeg::outOfRange)
diff --git a/Calorimeter/CaloEvent/test/CaloCellContainerTestCommon.icc b/Calorimeter/CaloEvent/test/CaloCellContainerTestCommon.icc
index 312a684f5ea68867ba6012381b482471a7acbea6..43918d9373d41b914fe01bf2b06b8b966ebc219a 100644
--- a/Calorimeter/CaloEvent/test/CaloCellContainerTestCommon.icc
+++ b/Calorimeter/CaloEvent/test/CaloCellContainerTestCommon.icc
@@ -17,6 +17,9 @@
 
 #include "TestTools/random.h"
 
+#include "CxxUtils/checker_macros.h"
+ATLAS_NO_CHECK_FILE_THREAD_SAFETY;
+
 
 //using Athena_test::randi;
 Athena_test::URNG stlrand;
diff --git a/Calorimeter/CaloEvent/test/CaloCellContainer_test.cxx b/Calorimeter/CaloEvent/test/CaloCellContainer_test.cxx
index 4e31bd39ed888519b3baefac652477dad8f9f165..fb2ea1b812a389caf57ef314bcfdaccd1a9c59c9 100644
--- a/Calorimeter/CaloEvent/test/CaloCellContainer_test.cxx
+++ b/Calorimeter/CaloEvent/test/CaloCellContainer_test.cxx
@@ -27,6 +27,8 @@
 typedef std::vector<CaloCell*> CellVector;
 typedef CaloCellContainer CellContainer;
 
+#include "CxxUtils/checker_macros.h"
+ATLAS_NO_CHECK_FILE_THREAD_SAFETY;
 
 #include "CaloCellContainerTestCommon.icc"
 
diff --git a/Calorimeter/CaloEvent/test/CaloCellPrefetchIterator_test.cxx b/Calorimeter/CaloEvent/test/CaloCellPrefetchIterator_test.cxx
index db331613b1ac741ca7b8cfd4686e14fc57b8c5d5..802f3edb49bd1dea4568be2fd472a5593ac5a4bc 100644
--- a/Calorimeter/CaloEvent/test/CaloCellPrefetchIterator_test.cxx
+++ b/Calorimeter/CaloEvent/test/CaloCellPrefetchIterator_test.cxx
@@ -31,6 +31,8 @@
 #include <iostream>
 #include <cassert>
 
+#include "CxxUtils/checker_macros.h"
+ATLAS_NO_CHECK_FILE_THREAD_SAFETY;
 
 using Athena_test::randf;
 
diff --git a/Calorimeter/CaloEvent/test/CaloConstCellContainer_test.cxx b/Calorimeter/CaloEvent/test/CaloConstCellContainer_test.cxx
index e2729ce4b42401ccb4d2e78e01c0301c874d7fbb..84ea521f5fc3902fd00a9eb406479617026892ac 100644
--- a/Calorimeter/CaloEvent/test/CaloConstCellContainer_test.cxx
+++ b/Calorimeter/CaloEvent/test/CaloConstCellContainer_test.cxx
@@ -28,6 +28,9 @@ typedef std::vector<const CaloCell*> CellVector;
 typedef CaloConstCellContainer CellContainer;
 #define CONST_CONTAINER
 
+#include "CxxUtils/checker_macros.h"
+ATLAS_NO_CHECK_FILE_THREAD_SAFETY;
+
 
 #include "CaloCellContainerTestCommon.icc"
 
diff --git a/Control/AthenaMonitoring/CMakeLists.txt b/Control/AthenaMonitoring/CMakeLists.txt
index 5829d66fa4c0a7d6f5b95072ebe0e9ed8f9eafa6..1d27ee52c1779e53a97650d4fae2bb1e9cc68cb7 100644
--- a/Control/AthenaMonitoring/CMakeLists.txt
+++ b/Control/AthenaMonitoring/CMakeLists.txt
@@ -78,4 +78,4 @@ atlas_add_component(
 atlas_install_python_modules( python/*.py 
                               POST_BUILD_CMD ${ATLAS_FLAKE8} )
 atlas_install_joboptions( share/*.py )
-atlas_install_scripts( share/Run3DQTestingDriver.py )
\ No newline at end of file
+atlas_install_scripts( share/Run3DQTestingDriver.py share/hist_file_dump.py share/hist_diff.sh )
diff --git a/Control/AthenaMonitoring/share/hist_diff.sh b/Control/AthenaMonitoring/share/hist_diff.sh
new file mode 100755
index 0000000000000000000000000000000000000000..f75ff83fd00970d487cfd1223ae10244b62d6673
--- /dev/null
+++ b/Control/AthenaMonitoring/share/hist_diff.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+if [[ ! -f $1 ]] ; then { echo "$1 does not exist"; exit 1; } ; fi
+if [[ ! -f $2 ]] ; then { echo "$2 does not exist"; exit 1; } ; fi
+
+LOG1=$(mktemp FILE1.XXXXXXX)
+LOG2=$(mktemp FILE2.XXXXXXX)
+
+hist_file_dump.py $1 --hash > $LOG1
+RV=$?
+if [ $RV != 0 ]; then { echo "Failure dumping $1"; rm -f $LOG1 $LOG2; exit $RV ; }; fi
+hist_file_dump.py $2 --hash > $LOG2
+RV=$?
+if [ $RV != 0 ]; then { echo "Failure dumping $2"; rm -f $LOG1 $LOG2; exit $RV ; }; fi
+
+diff  $LOG1 $LOG2
+RV=$?
+if [ $RV != 0 ]; then
+    echo "$1 <"
+    echo "$2 >"
+    echo "Files differ"
+else
+    echo "Files match"
+fi
+
+rm -f $LOG1 $LOG2
+exit $RV
diff --git a/Control/AthenaMonitoring/share/hist_file_dump.py b/Control/AthenaMonitoring/share/hist_file_dump.py
new file mode 100755
index 0000000000000000000000000000000000000000..2c4109135d834a7acc3feaf206daeb9153cd6648
--- /dev/null
+++ b/Control/AthenaMonitoring/share/hist_file_dump.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+
+import ROOT
+import sys, os, operator
+import argparse
+import zlib
+
+parser=argparse.ArgumentParser()
+parser.add_argument('filename',
+                    help='Input HIST file name')
+parser.add_argument('-r', '--rankorder', default='onfile',
+                    choices=['onfile', 'uncompressed', 'name'],
+                    help='rankorder is "onfile" (default), "uncompressed" or "name"')
+parser.add_argument('-p', '--path',
+                    help='Only look under this directory')
+parser.add_argument('--hash', action='store_true',
+                    help='Print hashes of objects')
+parser.add_argument('--metadata', action='store_true',
+                    help='Include metadata trees')
+parser.add_argument('--no_onfile', action='store_true',
+                    help="Don't show on file size")
+parser.add_argument('--no_inmem', action='store_true',
+                    help="Don't show in memory size")
+args=parser.parse_args()
+
+ordering = args.rankorder
+
+accounting = {}; hashes = {}; types = {}
+
+ROOT.gInterpreter.LoadText("UInt_t bufferhash(TKey* key) { key->SetBuffer(); key->ReadFile(); UInt_t rv = TString::Hash(key->GetBuffer()+key->GetKeylen(), key->GetNbytes()-key->GetKeylen()); key->DeleteBuffer(); return rv; }")
+ROOT.gInterpreter.LoadText("void* getbuffer(TKey* key) { key->SetBuffer(); key->ReadFile(); return (void*) (key->GetBuffer()+key->GetKeylen()); }")
+ROOT.gInterpreter.LoadText("UInt_t bufferhash2(TKey* key) { TObject* obj = key->ReadObj(); TMessage msg(kMESS_OBJECT); msg.WriteObject(obj); UInt_t rv = TString::Hash(msg.Buffer(), msg.Length()); delete obj; return rv; }")
+ROOT.gInterpreter.LoadText("UInt_t bufferhash3(TKey* key) { TObject* obj = key->ReadObj(); UInt_t rv = obj->Hash(); delete obj; return rv; }")
+ROOT.gSystem.Load('libDataQualityUtils')
+
+def dumpdir(d):
+    thispath = d.GetPath()
+    if ':' in thispath:
+        thispath = thispath.split(':', 1)[1]
+    #print thispath
+    subdirs = []
+    for k in d.GetListOfKeys():
+        if not args.metadata and k.GetName() == 'metadata' and k.GetClassName() == 'TTree':
+            continue
+        if k.GetClassName().startswith('TDirectory'):
+            subdirs.append(k)
+        else:
+            if args.hash:
+                #lhash = ROOT.bufferhash(k)
+                #objsize = (k.GetNbytes()-k.GetKeylen())/8
+                #print (k.GetNbytes()-k.GetKeylen())/8.
+                #buf = ROOT.getbuffer(k); buf.SetSize(objsize)
+                #print buf[objsize-1], objsize
+                #lhash = zlib.adler32(str(buf))
+                #k.DeleteBuffer()
+                #obj=k.ReadObj(); 
+                #tm=ROOT.TMessage(ROOT.TMessage.kMESS_OBJECT)
+                #tm.WriteObject(obj)
+                # This is what we _were_ doing
+                #lhash = ROOT.bufferhash2(k)
+                # How about this?
+                #lhash = ROOT.bufferhash3(k)
+                obj = k.ReadObj(); lhash = obj.Hash(); del obj
+            else:
+                lhash = 0
+            idxname = os.path.join(thispath, k.GetName())
+            accounting[idxname] = (k.GetObjlen(), k.GetNbytes()-k.GetKeylen())
+            hashes[idxname] = lhash
+            types[idxname] = k.GetClassName()
+            #print '%s,' % os.path.join(thispath, k.GetName()),
+            #obj = k.ReadObj(); obj.IsA().Destructor(obj)
+            #print 'OK'
+    for k in subdirs:
+        dumpdir(k.ReadObj())
+
+f = ROOT.TFile.Open(args.filename)
+if args.path:
+    d = f.Get(args.path.rstrip('/'))
+    if not d:
+        print "Can't access path", args.path, "- exiting"
+        sys.exit(1)
+else:
+    d = f
+dumpdir(d)
+
+#sortedl = sorted(accounting.items(), key=operator.itemgetter(0,1), reverse=True)
+if ordering == 'onfile':
+    key=lambda x: (x[1][1], x[1][0], x[0])
+elif ordering == 'uncompressed':
+    key=lambda x: (x[1][0], x[1][1], x[0])
+else:
+    key=lambda x: (x[0], x[1][1], x[1][0])
+sortedl = sorted(accounting.items(), key=key, reverse=True)
+if args.hash:
+    print '\n'.join(('%s %s: '
+                     + ('%d uncompressed' % b if not args.no_inmem else '')
+                     + (', %d on file ' % c if not args.no_onfile else '')
+                     + '(hash %s)')
+                    % (types[a], a, hashes[a]) for a, (b, c) in  sortedl)
+else:
+    print '\n'.join(('%s %s: '
+                     + ('%d uncompressed' % b if not args.no_inmem else '')
+                     + (', %d on file' % c if not args.no_onfile else ''))
+                    % (types[a], a) for a, (b, c) in  sortedl)
+    #print '\n'.join('%s %s: %d uncompressed, %d on file' % (types[a], a, b, c) for a, (b, c) in  sortedl)
+
diff --git a/Control/AthenaMonitoring/test/test_run3dq_r21_esd.sh b/Control/AthenaMonitoring/test/test_run3dq_r21_esd.sh
new file mode 100755
index 0000000000000000000000000000000000000000..85055a596f6a9d1ad0a1eedc2a50507f30ea2ffc
--- /dev/null
+++ b/Control/AthenaMonitoring/test/test_run3dq_r21_esd.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+# art-description: ESD->HIST, R21 data ESD
+# art-type: grid
+# art-include: master/Athena
+# art-output: ExampleMonitorOutput.root
+
+Run3DQTestingDriver.py 'Input.Files=["/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/Tier0ChainTests/q431/21.0/myESD.pool.root"]' DQ.Steering.doHLTMon=False --postExec 'cfg.getEventAlgo("LArCollisionTimeAlg").cutIteration=False'
+
+echo "art-result: $? HIST_Creation"
+
+ArtPackage=$1
+ArtJobName=$2
+art.py download ${ArtPackage} ${ArtJobName}
+hist_diff.sh ExampleMonitorOutput.root ./ref-*/ExampleMonitorOutput.root
+echo "art-result: $? HIST_Diff"
diff --git a/Control/AthenaMonitoring/test/test_run3dq_r21_esd_mc.sh b/Control/AthenaMonitoring/test/test_run3dq_r21_esd_mc.sh
new file mode 100755
index 0000000000000000000000000000000000000000..f8f4ac0c73c56241222fbbdd4618e8f181fd3621
--- /dev/null
+++ b/Control/AthenaMonitoring/test/test_run3dq_r21_esd_mc.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+# art-description: ESD->HIST, R21 MC ESD
+# art-type: grid
+# art-include: master/Athena
+# art-output: ExampleMonitorOutput.root
+
+Run3DQTestingDriver.py 'Input.Files=["/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/Tier0ChainTests/q221/21.0/myESD.pool.root"]' DQ.Steering.doHLTMon=False --postExec 'cfg.getEventAlgo("LArCollisionTimeAlg").cutIteration=False'
+
+echo "art-result: $? HIST_Creation"
+
+ArtPackage=$1
+ArtJobName=$2
+art.py download ${ArtPackage} ${ArtJobName}
+hist_diff.sh ExampleMonitorOutput.root ./ref-*/ExampleMonitorOutput.root
+echo "art-result: $? HIST_Diff"
diff --git a/Control/AthenaMonitoring/test/test_run3dq_r21_esd_mt.sh b/Control/AthenaMonitoring/test/test_run3dq_r21_esd_mt.sh
new file mode 100755
index 0000000000000000000000000000000000000000..9e580205c6e64675405a87b63e30b822ea440851
--- /dev/null
+++ b/Control/AthenaMonitoring/test/test_run3dq_r21_esd_mt.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+# art-description: ESD->HIST, R21 data ESD, MT
+# art-type: grid
+# art-include: master/Athena
+# art-output: ExampleMonitorOutput.root
+
+Run3DQTestingDriver.py 'Input.Files=["/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/Tier0ChainTests/q431/21.0/myESD.pool.root"]' DQ.Steering.doHLTMon=False Concurrency.NumThreads=1 Concurrency.NumConcurrentEvents=1 --postExec 'cfg.getEventAlgo("LArCollisionTimeAlg").cutIteration=False'
+
+echo "art-result: $? HIST_Creation"
+
+ArtPackage=$1
+ArtJobName=$2
+art.py download ${ArtPackage} ${ArtJobName}
+hist_diff.sh ExampleMonitorOutput.root ./ref-*/ExampleMonitorOutput.root
+echo "art-result: $? HIST_Diff"
diff --git a/Control/AthenaMonitoring/test/test_run3dq_r22_aod_trigger.sh b/Control/AthenaMonitoring/test/test_run3dq_r22_aod_trigger.sh
new file mode 100755
index 0000000000000000000000000000000000000000..8e7b5ce43a837e661edd85ab70cae312f111c29e
--- /dev/null
+++ b/Control/AthenaMonitoring/test/test_run3dq_r22_aod_trigger.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+# art-description: AOD->HIST, R22 MC, Trigger Only
+# art-type: grid
+# art-include: master/Athena
+# art-output: ExampleMonitorOutput.root
+
+art.py download TrigAnalysisTest test_trigAna_q221_RDOtoAOD_mt1_grid.sh
+AODFILE=(./ref-*/AOD.pool.root)
+Run3DQTestingDriver.py 'Input.Files=["'${AODFILE}'"]' DQ.Steering.doHLTMon=True --dqOffByDefault
+
+echo "art-result: $? HIST_Creation"
+rm -rf ref-*
+
+ArtPackage=$1
+ArtJobName=$2
+art.py download ${ArtPackage} ${ArtJobName}
+hist_diff.sh ExampleMonitorOutput.root ./ref-*/ExampleMonitorOutput.root
+echo "art-result: $? HIST_Diff"
diff --git a/Control/AthenaMonitoring/test/test_run3dq_r22_esd.sh b/Control/AthenaMonitoring/test/test_run3dq_r22_esd.sh
new file mode 100755
index 0000000000000000000000000000000000000000..59c55230ab015d1ca7b7a33e0dc4f95ffbe9807d
--- /dev/null
+++ b/Control/AthenaMonitoring/test/test_run3dq_r22_esd.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+# art-description: ESD->HIST, R22 data ESD
+# art-type: grid
+# art-include: master/Athena
+# art-output: ExampleMonitorOutput.root
+
+art.py download Tier0ChainTests test_q431.sh
+ESDFILE=(./ref-*/myESD.pool.root)
+Run3DQTestingDriver.py 'Input.Files=["'${ESDFILE}'"]' DQ.Steering.doHLTMon=False
+
+echo "art-result: $? HIST_Creation"
+rm -rf ref-*
+
+ArtPackage=$1
+ArtJobName=$2
+art.py download ${ArtPackage} ${ArtJobName}
+hist_diff.sh ExampleMonitorOutput.root ./ref-*/ExampleMonitorOutput.root
+echo "art-result: $? HIST_Diff"
diff --git a/Database/APR/StorageSvc/src/DbDatabaseObj.cpp b/Database/APR/StorageSvc/src/DbDatabaseObj.cpp
index 1088347cf3aeacd6bbaf3a9fff96786ba3e9249c..a977dd8660ec949e5218b8b4473f5417bf48b63f 100644
--- a/Database/APR/StorageSvc/src/DbDatabaseObj.cpp
+++ b/Database/APR/StorageSvc/src/DbDatabaseObj.cpp
@@ -35,6 +35,11 @@
 using namespace std;
 using namespace pool;
 
+ostream& operator << (ostream& os, const Token::OID_t oid ) {
+   os << "("<<oid.first<<","<<oid.second<<")";
+   return os;
+}
+
 static const Guid s_localDb("00000000-0000-0000-0000-000000000000");
 static DbPrintLvl::MsgLevel dbg_lvl = DbPrintLvl::Debug;
 
@@ -673,29 +678,43 @@ DbStatus DbDatabaseObj::params(Parameters& vals)   {
   return Error;
 }
 
-/// Calculate required OID modification (shift) for source OID (oid) for a given merge section
-DbStatus DbDatabaseObj::getRedirection(const Token::OID_t& oid, int merge_section, Token::OID_t& shift)
+// Calculate new OID from the source OID (oid) for a given merge section
+// Only redirect non-indexed OIDs
+DbStatus DbDatabaseObj::getRedirection(const Token::OID_t& oid, int merge_section, Token::OID_t& new_oid)
 {
-   if( merge_section > 0 ) {
-      // find the offset in the links table
-      Sections::const_iterator j = m_sections.find("##Links");
-      if( j == m_sections.end() )
+   new_oid = oid;
+   DbToken* link = nullptr;
+   if( oid.first > 0xFFFFFFFF ) {
+      // for indexed entries find Link vector entry directly
+      auto iter = m_indexMap.find( oid.first );
+      if( iter == m_indexMap.end() )
          return Error;
-      const ContainerSections& sections = (*j).second;
-      if( merge_section < (int)sections.size() ) {
-         shift.first = sections[merge_section].start;
+      new_oid.first = iter->second;
+      link = m_linkVec[ new_oid.first ];
+   }
+   if( merge_section > 0 ) {
+      if( oid.first <= 0xFFFFFFFF ) {
+         // for non-indexed entries find the correct section for the Links table and add offset
+         // 1. find the offset in the links table
+         Sections::const_iterator j = m_sections.find("##Links");
+         if( j == m_sections.end() )
+            return Error;
+         const ContainerSections& sections = (*j).second;
+         if( merge_section >= (int)sections.size() )
+            return Error;
+         new_oid.first =  oid.first + sections[merge_section].start;
+         link = m_linkVec[ new_oid.first ];
       }
-      DbToken* link = m_linkVec[ oid.first + shift.first ];
-       if( link->isLocal() ) {
-         // MN: assuming only internal links need OID_2 adjustment
+      if( link->isLocal() && oid.second <= 0xFFFFFFFF ) {
+         // MN: assuming only non-indexed internal links need OID_2 adjustment
          // find the start of the section for this container
-         j = m_sections.find( link->contID() );
+         Sections::const_iterator j = m_sections.find( link->contID() );
          if( j == m_sections.end() )
             return Error;
          const ContainerSections& sections = (*j).second;
-         if( merge_section < (int)sections.size() ) {
-            shift.second = sections[merge_section].start;
-         }
+         if( merge_section >= (int)sections.size() )
+            return Error;
+         new_oid.second += sections[merge_section].start; 
       }
    }
    return Success;
@@ -706,24 +725,17 @@ DbStatus DbDatabaseObj::getLink(const Token::OID_t& oid, int merge_section, Toke
 {
    if ( 0 == m_info ) open();
    if ( 0 != m_info && 0 != pTok && oid.first >= 0 ) {
-      Token::OID_t shift(0,0);
-      if( merge_section > 0 ) {
-         if( getRedirection(oid, merge_section, shift) != Success )
-            return Error; 
-      }
-      int lnk = oid.first + shift.first;
-      if( lnk >= int(m_linkVec.size()) )
-         return Error;
-
-      pTok->oid().first  = lnk;
-      pTok->oid().second = oid.second + shift.second;
+      Token::OID_t redirected(0,0);
+      if( getRedirection(oid, merge_section, redirected) != Success )
+         return Error; 
+      pTok->oid() = redirected;
       if( !(pTok->type() & DbToken::TOKEN_FULL_KEY) )  {
          if( typeid(*pTok) == typeid(DbToken) )  {
 	    DbToken* pdbTok = (DbToken*)pTok;
 	    pdbTok->setKey(DbToken::TOKEN_FULL_KEY);
          }
       }
-      m_linkVec[lnk]->set(pTok);
+      m_linkVec[ redirected.first ]->set(pTok);
       return Success;
    }
    return Error;
@@ -798,20 +810,19 @@ DbStatus DbDatabaseObj::read(const Token& token, ShapeH shape, void** object)
          // We get here if the token is *part* of a merged file, but still hosted within this DB
          Redirections::iterator i = m_redirects.find( token.dbID().toString() );
          // find out which section to read from
-         if( i!=m_redirects.end() ) sectionN = i->second.first;
-         if( sectionN > 0 ) {
-            Token::OID_t shift(0,0);
-            getRedirection(oid, sectionN, shift);
-            if( DbPrintLvl::outputLvl == DbPrintLvl::Verbose ) {
-               DbPrint log( name() );
-               log << DbPrintLvl::Verbose << "Reading object OID=(" << oid.first << ", " << oid.second
-                   << ")  from merged file section # " << sectionN
-                   << ", Adjusted OID=(" << oid.first + shift.first << ", " << oid.second + shift.second << ")"
-                   << DbPrint::endmsg;
-            }
-            oid.first += shift.first;
-            oid.second += shift.second;
+         if( i == m_redirects.end() )
+            return Error;
+         sectionN = i->second.first;
+         Token::OID_t redirected(0,0);
+         if( getRedirection(oid, sectionN, redirected) == Error )
+            return Error;
+         if( DbPrintLvl::outputLvl <= DbPrintLvl::Verbose ) {
+            DbPrint log( name() );
+            log << DbPrintLvl::Verbose << "Reading object OID=" << oid
+                << "  from merged file section # " << sectionN
+                << ", Adjusted OID=" << redirected << DbPrint::endmsg;
          }
+         oid = redirected;
          containerName = m_linkVec[ oid.first ]->contID();
       }
 
diff --git a/Database/APR/StorageSvc/src/DbDatabaseObj.h b/Database/APR/StorageSvc/src/DbDatabaseObj.h
index 131dd5c539f7006cd4595c763fa027568ef9d3bc..6c6c7def0ec17cbb40dc3574562f6d8876207876 100644
--- a/Database/APR/StorageSvc/src/DbDatabaseObj.h
+++ b/Database/APR/StorageSvc/src/DbDatabaseObj.h
@@ -150,7 +150,7 @@ namespace pool    {
 
     /// read an object referenced by the token
     DbStatus read(const Token& token, ShapeH shape, void** object);
-    /// Calculate required OID modification (shift) for source OID (oid) for a given merge section 
+    /// Calculate new OID from the source OID (oid) for a given merge section
     DbStatus getRedirection(const Token::OID_t& oid, int merge_section, Token::OID_t& shift);
     /// Expand OID into a full Token, based on the Links table. For merged files provide links section#
     DbStatus getLink(const Token::OID_t& oid, int merge_section, Token* pTok);
diff --git a/DetectorDescription/IRegionSelector/IRegionSelector/IRegSelTool.h b/DetectorDescription/IRegionSelector/IRegionSelector/IRegSelTool.h
new file mode 100644
index 0000000000000000000000000000000000000000..984a9dbcb67732aff8a8ace25b9925206e6361fd
--- /dev/null
+++ b/DetectorDescription/IRegionSelector/IRegionSelector/IRegSelTool.h
@@ -0,0 +1,84 @@
+// emacs: this is -*- c++ -*-
+//
+//   IRegSelTool.h        
+// 
+//    Interface for the new local RegionSelector tool
+// 
+//
+// Copyright (C) 2012-2019 CERN for the benefit of the ATLAS collaboration
+
+
+#ifndef IREGIONSELECTOR_IREGSELTOOL_H
+#define IREGIONSELECTOR_IREGSELTOOL_H
+
+#include "GaudiKernel/IAlgTool.h"
+
+
+#include "Identifier/IdentifierHash.h"
+#include <vector>
+#include <stdint.h>
+
+#include "IRegionSelector/IRoiDescriptor.h"
+#include "GaudiKernel/IAlgTool.h"
+
+
+
+/**
+ * @class IRegSelTool
+ * @brief 
+ **/
+
+class IRegSelTool : virtual public IAlgTool {
+
+public: 
+
+  /// InterfaceID
+  DeclareInterfaceID( IRegSelTool, 1, 0 ); 
+
+    
+  /// IdentifierHash methods
+
+    
+  //! HashIDList interface declaration. %return  list of unique IdentifierHash
+
+  /*!
+    \param IRoiDescriptor \c \b roi, the IRoiDescriptor for the roi, all enabled elements in the roi are found.
+    \return std::vector<IdentifierHash> which is a list of non-repeated  %Identifier %Hash numbers.
+  */
+  virtual void HashIDList( const IRoiDescriptor& roi, std::vector<IdentifierHash>& idlist ) const = 0;
+  
+
+  //! HashIDList interface declaration. %return list of non-repeated IdentifierHash
+  /*!
+    \param long           \c \b layer, long int to decide which layer within the detector.
+    \param IRoiDescriptor \c \b roi, the IRoiDescriptor for the roi, all enabled elements in the roi are found.
+    \return std::vector<IdentifierHash> which is a list of non-repeated Offline %Identifier %Hash numbers.
+  */
+  virtual void HashIDList( long layer, const IRoiDescriptor& roi, std::vector<IdentifierHash>& idlist ) const = 0; 
+   
+   
+  /// Rob identifier methods methods
+
+  //! ROBIDList interface declaration. This interface can be used by the ID subdetectors. %A list of non-repeated ROBIDs (uint_32_t) is returned by a reference.
+  /*!
+    \param IRoiDescriptor \c \b the IRoiDescriptor for the roi, all enabled elements in the roi are found. 
+    \return std::vector<uint32_t> which is a list of non-repeated ROBID numbers.
+  */
+
+  virtual void ROBIDList( const IRoiDescriptor& roi, std::vector<uint32_t>& roblist ) const = 0; 
+
+
+  //! ROBIDList interface declaration. This interface can be used by the ID subdetectors. %A list of non-repeated ROBIDs (uint_32_t) is returned by a reference.
+  /*!
+    \param long   \c \b layer, long int to decide which layer within the detector.
+    \param IRoiDescriptor \c \b the IRoiDescriptor for the roi, all enabled elements in the roi are found. 
+    \return std::vector<uint32_t> which is a list of non-repeated ROBID numbers.
+  */
+  
+  virtual void ROBIDList( long layer, const IRoiDescriptor& roi, std::vector<uint32_t>& roblist ) const = 0;   
+   
+};
+
+
+
+#endif //  IREGIONSELECTOR_IREGSELTOOL_H
diff --git a/DetectorDescription/RegionSelector/src/RegSelTool.cxx b/DetectorDescription/RegionSelector/src/RegSelTool.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..c86fcd1304d0396f219fa2eecf7a9821a0ea39c7
--- /dev/null
+++ b/DetectorDescription/RegionSelector/src/RegSelTool.cxx
@@ -0,0 +1,183 @@
+/**
+ **   @file   RegSelTool.cxx         
+ **            
+ **           Implmentation of a local regionselector tool            
+ **            
+ **   @author sutt
+ **   @date   Sun 22 Sep 2019 10:21:50 BST
+ **
+ **
+ **   Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+ **/
+
+
+#include "RegSelTool.h"
+#include "RegSelLUT/RegSelRoI.h"
+#include "RegSelLUT/RegSelSiLUT.h"
+// #include "RegSelLUT/RegSelModule.h"
+
+#include "GaudiKernel/ToolHandle.h"
+
+// ???
+#include "RegionSelector/StoreGateRS_ClassDEF.h"
+#include "RegSelLUT/StoreGateIDRS_ClassDEF.h"
+
+
+
+//! Constructor
+RegSelTool::RegSelTool( const std::string& type, const std::string& name, const IInterface*  parent )
+  :  base_class( type, name, parent ),
+     m_dumpTable(false),
+     m_lookuptable{nullptr}
+{
+  //! Declare properties
+  declareProperty( "WriteTables", m_dumpTable,    "write out maps to files for debugging" );
+}
+
+
+//! Standard destructor
+RegSelTool::~RegSelTool() { }
+
+
+StatusCode RegSelTool::initialize() {
+  ATH_MSG_INFO( "Initializing " << name() );
+  return StatusCode::SUCCESS;
+}
+
+
+StatusCode RegSelTool::finalize() {
+  ATH_MSG_INFO( "Finalizing " << name() );
+  return StatusCode::SUCCESS;
+}
+
+
+bool RegSelTool::handle() { 
+  return m_initialised = false;
+}
+
+
+
+// new RegionSelector interface for the Innner Detector 
+
+void RegSelTool::getRoIData( const IRoiDescriptor& roi, std::vector<const RegSelModule*>& modules ) const {
+  modules.clear();
+  RegSelRoI roitmp( roi.zedMinus(), roi.zedPlus(), roi.phiMinus(), roi.phiPlus(), roi.etaMinus(), roi.etaPlus() );
+  if ( m_lookuptable ) m_lookuptable->getRoIData( roitmp, modules );
+}
+
+
+
+
+///////////////////////////////////////////////////////////////////////////////////
+
+/// hash id access methods
+
+/// standard roi
+
+void RegSelTool::HashIDList( const IRoiDescriptor& roi, std::vector<IdentifierHash>& idlist ) const {
+
+  if ( roi.composite() ) {
+    idlist.clear();
+    for ( unsigned iroi=roi.size() ; iroi-- ;  )  HashIDList( *(roi.at(iroi)), idlist );
+    if ( roi.size()>1 ) RegSelSiLUT::removeDuplicates( idlist );
+    return;
+  }
+
+  if ( roi.isFullscan() ) return HashIDList( idlist );
+
+  RegSelRoI roitmp( roi.zedMinus(), roi.zedPlus(), roi.phiMinus(), roi.phiPlus(), roi.etaMinus(), roi.etaPlus() );
+  if ( m_lookuptable ) m_lookuptable->getHashList( roitmp, idlist ); 
+}
+
+
+
+/// standard roi for specific layer
+
+void RegSelTool::HashIDList( long layer, const IRoiDescriptor& roi, std::vector<IdentifierHash>& idlist ) const {
+
+  if ( roi.composite() ) { 
+    idlist.clear();
+    for ( unsigned iroi=roi.size() ; iroi-- ;  )  HashIDList( layer, *(roi.at(iroi)), idlist );
+    if ( roi.size()>1 ) RegSelSiLUT::removeDuplicates( idlist );
+    return;
+  }
+
+  if ( roi.isFullscan() ) return HashIDList( layer, idlist );
+
+  RegSelRoI roitmp( roi.zedMinus(), roi.zedPlus(), roi.phiMinus(), roi.phiPlus(), roi.etaMinus(), roi.etaPlus() );
+  if ( m_lookuptable ) m_lookuptable->getHashList( roitmp, layer, idlist ); 
+}
+
+
+
+
+
+
+///////////////////////////////////////////////////////////////////////////////////
+
+/// ROB id access methods
+
+/// standard roi
+
+void RegSelTool::ROBIDList( const IRoiDescriptor& roi, std::vector<uint32_t>& roblist ) const {
+
+  if ( roi.composite() ) { 
+    roblist.clear();
+    for ( unsigned iroi=roi.size() ; iroi-- ;  )  ROBIDList( *(roi.at(iroi)), roblist );
+    RegSelSiLUT::removeDuplicates( roblist );
+    return;
+  }
+
+  if ( roi.isFullscan() ) return ROBIDList( roblist );
+
+  RegSelRoI roitmp( roi.zedMinus(), roi.zedPlus(), roi.phiMinus(), roi.phiPlus(), roi.etaMinus(), roi.etaPlus() );
+  if ( m_lookuptable ) m_lookuptable->getRobList( roitmp, roblist ); 
+}
+
+/// standard roi for specific layer
+
+void RegSelTool::ROBIDList( long layer, const IRoiDescriptor& roi, std::vector<uint32_t>& roblist ) const {
+
+  if ( roi.composite() ) { 
+    roblist.clear();
+    for ( unsigned iroi=roi.size() ; iroi-- ;  )  ROBIDList( layer, *(roi.at(iroi)), roblist );
+    RegSelSiLUT::removeDuplicates( roblist );
+    return;
+  }
+
+  if ( roi.isFullscan() ) return ROBIDList( layer, roblist );
+
+  RegSelRoI roitmp( roi.zedMinus(), roi.zedPlus(), roi.phiMinus(), roi.phiPlus(), roi.etaMinus(), roi.etaPlus() );
+  if ( m_lookuptable ) m_lookuptable->getRobList( roitmp, layer, roblist ); ///  m_duplicateRemoval ); ??? 
+}
+
+
+
+///////////////////////////////////////////////////////////////////////////////////
+
+/// protected fullscan access methods
+
+/// full scan hashid 
+
+void RegSelTool::HashIDList( std::vector<IdentifierHash>& idlist ) const {
+  if ( m_lookuptable ) m_lookuptable->getHashList( idlist ); 
+}
+
+/// fullscan hashid for specific layer 
+
+void RegSelTool::HashIDList( long layer, std::vector<IdentifierHash>& idlist ) const {
+  if ( m_lookuptable ) m_lookuptable->getHashList( layer, idlist ); 
+}
+
+/// full scan robid
+
+void RegSelTool::ROBIDList( std::vector<uint32_t>& roblist ) const {
+  if ( m_lookuptable ) m_lookuptable->getRobList( roblist ); 
+}
+
+/// fullscan robid for specific layer 
+
+void RegSelTool::ROBIDList( long layer, std::vector<uint32_t>& roblist ) const {
+  if ( m_lookuptable ) m_lookuptable->getRobList( layer, roblist ); 
+}
+
diff --git a/DetectorDescription/RegionSelector/src/RegSelTool.h b/DetectorDescription/RegionSelector/src/RegSelTool.h
new file mode 100644
index 0000000000000000000000000000000000000000..7241112e183db6c18cdf91a5f98d58d0c81d61c5
--- /dev/null
+++ b/DetectorDescription/RegionSelector/src/RegSelTool.h
@@ -0,0 +1,108 @@
+/// emacs: this is -*- c++ -*-
+///
+///   @class RegSelTool RegSelTool.h
+/// 
+///     This is a Region Selector tool
+///     
+///   @author Mark Sutton
+///
+///   Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+///
+
+#ifndef REGIONSELECTOR_REGSELTOOL_H
+#define REGIONSELECTOR_REGSELTOOL_H
+
+// interface includes
+#include "IRegionSelector/IRegSelTool.h"
+#include "IRegionSelector/IRoiDescriptor.h"
+
+// spam
+#include "GaudiKernel/StatusCode.h"
+#include "GaudiKernel/ToolHandle.h"
+#include "GaudiKernel/MsgStream.h"
+#include "AthenaBaseComps/AthAlgTool.h"
+
+#include <string>
+#include <iostream>
+#include <vector>
+#include <stdint.h>
+
+
+class RegSelModule;
+class RegSelSiLUT;
+class IInterface;
+
+
+
+class RegSelTool : public extends<AthAlgTool, IRegSelTool> {
+
+ public:
+
+  /** @c Standard constructor for tool (obviously).
+   */
+  RegSelTool( const std::string& type, const std::string& name, const IInterface* parent );
+
+  //! Destructor.
+  virtual ~RegSelTool() override;
+
+
+  //! @method initialize, loads lookup tables for retrieve %Identifier %Hash and ROBID 
+  virtual StatusCode initialize() override;
+
+  //! @method finalize, deletes lookup table from memory
+  virtual StatusCode finalize() override;
+  
+
+  //! @method handle, handles the actual lookup table
+  bool handle(); 
+
+
+  /// IRegSlTool interface ...
+
+  // Interface inherited from IRegSelTool service
+
+  void HashIDList( const IRoiDescriptor& roi, std::vector<IdentifierHash>& idlist ) const;
+
+  void HashIDList( long layer, const IRoiDescriptor& roi, std::vector<IdentifierHash>& idlist) const;
+   
+  void ROBIDList( const IRoiDescriptor& roi, std::vector<uint32_t>& roblist ) const;
+
+  void ROBIDList( long layer, const IRoiDescriptor& roi, std::vector<uint32_t>& roblist ) const;
+
+   
+protected:
+
+  // full scan
+  void HashIDList( std::vector<IdentifierHash>& idlist ) const;  
+
+  // full scan for a specific layer
+  void HashIDList( long layer, std::vector<IdentifierHash>& idlist ) const;
+     
+
+  // Methods to obtain the rob id list
+
+  // full scan
+  void ROBIDList( std::vector<uint32_t>& roblist ) const;
+
+  // full scan by layer
+  void ROBIDList( long layer, std::vector<uint32_t>& roblist ) const;
+
+
+  // get list of modules
+  
+  void getRoIData( const IRoiDescriptor& roi, std::vector<const RegSelModule*>& modulelist ) const;
+
+private:
+
+  //! Flag to determine whether it has yet been initialised
+  bool              m_initialised; 
+
+  //! Flag to dump loaded table in data file.
+  BooleanProperty  m_dumpTable;
+
+  //! Actual lookup table
+  RegSelSiLUT*      m_lookuptable;
+
+};
+
+#endif // REGIONSELECTOR_REGSELTOOL_H
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.OverlayPool_tf.py b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.OverlayPool_tf.py
index 38228f4f5a1da1fefa4b7bb0df6dcd6969ed2ba8..13c6f4f9533c1d0fce937bbc79d789ad3640aecd 100644
--- a/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.OverlayPool_tf.py
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.OverlayPool_tf.py
@@ -82,17 +82,17 @@ else:
 
     DetFlags.digitize.LVL1_setOff()
 
-## Tidy up NSW DetFlags: temporary measure
-DetFlags.sTGC_setOff()
-DetFlags.Micromegas_setOff()
-from AtlasGeoModel.CommonGMJobProperties import CommonGeometryFlags
-if (CommonGeometryFlags.Run() in ["RUN3", "RUN4"]):
-    DetFlags.sTGC_setOn()
-    DetFlags.Micromegas_setOn()
-
-from AtlasGeoModel.MuonGMJobProperties import MuonGeometryFlags
-if not MuonGeometryFlags.hasCSC():
-    DetFlags.CSC_setOff()
+if hasattr(runArgs,"geometryVersion") or not globalflags.DetDescrVersion.isDefault():
+    ## Tidy up NSW DetFlags
+    ## only do this if we can be sure globalflags.DetDescrVersion has been configured.
+    from AtlasGeoModel.CommonGMJobProperties import CommonGeometryFlags
+    if CommonGeometryFlags.Run() not in ["RUN3", "RUN4"]:
+        DetFlags.sTGC_setOff()
+        DetFlags.Micromegas_setOff()
+
+    from AtlasGeoModel.MuonGMJobProperties import MuonGeometryFlags
+    if not MuonGeometryFlags.hasCSC():
+        DetFlags.CSC_setOff()
 
 DetFlags.Print()
 
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.Overlay_tf.py b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.Overlay_tf.py
index 81ba3e27855347b1c578db1744c39bc30618ca60..51bfac1f37623a2912acd1eab3486c0252704b12 100644
--- a/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.Overlay_tf.py
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.Overlay_tf.py
@@ -109,17 +109,19 @@ else:
 
     DetFlags.digitize.LVL1_setOff()
 
-## Tidy up NSW DetFlags: temporary measure
-DetFlags.sTGC_setOff()
-DetFlags.Micromegas_setOff()
-from AtlasGeoModel.CommonGMJobProperties import CommonGeometryFlags
-if (CommonGeometryFlags.Run() in ["RUN3", "RUN4"]):
-    DetFlags.sTGC_setOn()
-    DetFlags.Micromegas_setOn()
-
-from AtlasGeoModel.MuonGMJobProperties import MuonGeometryFlags
-if not MuonGeometryFlags.hasCSC():
-    DetFlags.CSC_setOff()
+if hasattr(runArgs,"geometryVersion") or not globalflags.DetDescrVersion.isDefault():
+    ## Tidy up NSW DetFlags: temporary measure
+    ## only do this if we can be sure globalflags.DetDescrVersion has been configured.
+    DetFlags.sTGC_setOff()
+    DetFlags.Micromegas_setOff()
+    from AtlasGeoModel.CommonGMJobProperties import CommonGeometryFlags
+    if (CommonGeometryFlags.Run() in ["RUN3", "RUN4"]):
+        DetFlags.sTGC_setOn()
+        DetFlags.Micromegas_setOn()
+
+        from AtlasGeoModel.MuonGMJobProperties import MuonGeometryFlags
+        if not MuonGeometryFlags.hasCSC():
+            DetFlags.CSC_setOff()
 
 # TODO: need to do it better
 #DetFlags.makeRIO.all_setOff() ## Currently has to be on otherwise InDetTRTStrawStatusSummarySvc is not created
diff --git a/HLT/Trigger/TrigControl/TrigPSC/src/Config.cxx b/HLT/Trigger/TrigControl/TrigPSC/src/Config.cxx
index ff1c3411b9c8112060430b6468b8140a04a83b41..0f689cc23df479b91e941bfc25dface7cc37b060 100644
--- a/HLT/Trigger/TrigControl/TrigPSC/src/Config.cxx
+++ b/HLT/Trigger/TrigControl/TrigPSC/src/Config.cxx
@@ -328,6 +328,7 @@ void psc::Config::fillopt_common(const ptree& hlt)
   optmap["SOFTTIMEOUTFRACTION"] = hltmppu.get_child("softTimeoutFraction").data();
   optmap["NEVENTSLOTS"]         = hltmppu.get_child("numberOfEventSlots").data();
   optmap["NTHREADS"]            = hltmppu.get_child("numberOfAthenaMTThreads").data();
+  optmap["MAXEVENTSIZEMB"]      = hltmppu.get_child("maximumHltResultMb").data();
 }
 
 ////////////////////////////////////////////////////////////////////////////////
diff --git a/HLT/Trigger/TrigControl/TrigPSC/src/Psc.cxx b/HLT/Trigger/TrigControl/TrigPSC/src/Psc.cxx
index c597ae647872ae8d5607cb9a971f3833dda6798a..17631d31f8fb09e54b4a63ba6a922e4f142e994c 100644
--- a/HLT/Trigger/TrigControl/TrigPSC/src/Psc.cxx
+++ b/HLT/Trigger/TrigControl/TrigPSC/src/Psc.cxx
@@ -381,6 +381,17 @@ bool psc::Psc::configure(const ptree& config)
               <<" number of Sub Det IDs read from OKS = " << m_config->enabled_SubDets.size());
   }
 
+  // Write the maximum HLT output size into the JobOptions Catalogue
+  if (std::string opt = m_config->getOption("MAXEVENTSIZEMB"); !opt.empty()) {
+    StatusCode sc = p_jobOptionSvc->addPropertyToCatalogue("DataFlowConfig",
+      IntegerProperty("DF_MaxEventSizeMB", std::stoi(opt)));
+    if ( sc.isFailure() ) {
+      ERS_PSC_ERROR("psc::Psc::configure: Error could not write DF_MaxEventSizeMB in JobOptions Catalogue");
+      return false;
+    }
+    ERS_DEBUG(1,"psc::Psc::configure: Wrote DF_MaxEventSizeMB=" << opt << " in JobOptions Catalogue");
+  }
+
   // Write configuration for HLT muon calibration infrastructure in JobOptions catalogue
   if ( (m_config->getOption("MUONCALBUFFERNAME") != "NONE") && (m_config->getOption("MUONCALBUFFERNAME") != "") ) {
     std::map<std::string, std::string>  muoncal_properties;
diff --git a/InnerDetector/InDetExample/InDetRecExample/python/TrackingCommon.py b/InnerDetector/InDetExample/InDetRecExample/python/TrackingCommon.py
index 2fbdc80a5e9c7e4c1f5676596679067fc3737166..a658be1691443018299805be6f06f6652c095aa9 100644
--- a/InnerDetector/InDetExample/InDetRecExample/python/TrackingCommon.py
+++ b/InnerDetector/InDetExample/InDetRecExample/python/TrackingCommon.py
@@ -51,6 +51,8 @@ def makePublicTool(tool_creator) :
             if len(args) > 1 :
                 raise Exception('Too many positional arguments')
             tool = tool_creator(name, **kwargs)
+            if tool is None :
+                return None
             if the_name != tool.name() :
                 raise Exception('Tool has not the exepected name %s but %s' % (the_name, tool.the_name()))
             ToolSvc += tool
diff --git a/InnerDetector/InDetRecTools/InDetVKalVxInJetTool/src/InDetTrkInJetType.cxx b/InnerDetector/InDetRecTools/InDetVKalVxInJetTool/src/InDetTrkInJetType.cxx
index 7c1793df39a4788f486e005e90a77ba79d9c51af..7f07f8823df52d5d659ab7f9fe52c2c5cf149cd9 100644
--- a/InnerDetector/InDetRecTools/InDetVKalVxInJetTool/src/InDetTrkInJetType.cxx
+++ b/InnerDetector/InDetRecTools/InDetVKalVxInJetTool/src/InDetTrkInJetType.cxx
@@ -11,6 +11,7 @@
 
 #include "Particle/TrackParticle.h"
 #include "MVAUtils/BDT.h" 
+#include "MVAUtils/TMVAToMVAUtils.h"
 #include "GaudiKernel/IChronoStatSvc.h"
 //
 //-------------------------------------------------
@@ -86,9 +87,9 @@ InDetTrkInJetType::InDetTrkInJetType(const std::string& type,
         bool isGrad       = false;
         if(method_bdt->GetOptions().Contains("UseYesNoLeaf=True")) useYesNoLeaf = true;
         if(method_bdt->GetOptions().Contains("BoostType=Grad")) isGrad = true;
-        m_localBDT = new MVAUtils::BDT( method_bdt, isGrad, useYesNoLeaf);
-	if(!m_localBDT){   ATH_MSG_DEBUG("Error! No_BDT from MVAUtils created");
-                           return StatusCode::SUCCESS; }
+        m_localBDT = TMVAToMVAUtils::convert(method_bdt, isGrad, useYesNoLeaf).release();
+        if(!m_localBDT){   ATH_MSG_DEBUG("Error! No_BDT from MVAUtils created");
+          return StatusCode::SUCCESS; }
      }else{
         ATH_MSG_DEBUG("Error! No calibration for TrackClassification found.");
         return StatusCode::SUCCESS;
diff --git a/LArCalorimeter/LArConfiguration/python/LArMonitoringConfig.py b/LArCalorimeter/LArConfiguration/python/LArMonitoringConfig.py
index 5c0aa9c998c12c12cc4f921819fff34a5ced1c9b..13882a2c060c9da57e083bbd1465a73b57020fa1 100644
--- a/LArCalorimeter/LArConfiguration/python/LArMonitoringConfig.py
+++ b/LArCalorimeter/LArConfiguration/python/LArMonitoringConfig.py
@@ -15,7 +15,8 @@ def LArMonitoringConfig(inputFlags):
     
     # algos which could run anytime (therefore should not in tier0Raw):
     if inputFlags.DQ.Environment != 'tier0Raw':
-        acc.merge(LArAffectedRegionsConfig(inputFlags))
+        if not inputFlags.Input.isMC:
+            acc.merge(LArAffectedRegionsConfig(inputFlags))
 
     # algos which can run in ESD but not AOD:
     if inputFlags.DQ.Environment != 'AOD':
diff --git a/LumiBlock/LumiBlockComps/src/BunchCrossingCondTest.cxx b/LumiBlock/LumiBlockComps/src/BunchCrossingCondTest.cxx
index f81bead3655e5921d223cf16731a9065a2bb1011..d94d8ba54d7ad3de8352c3dbec98206f3f3bb63c 100644
--- a/LumiBlock/LumiBlockComps/src/BunchCrossingCondTest.cxx
+++ b/LumiBlock/LumiBlockComps/src/BunchCrossingCondTest.cxx
@@ -63,6 +63,10 @@ void BunchCrossingCondTest::printInfo(const BunchCrossingCondData* bccd, unsigne
       << ", isBeam1="<< bccd->isBeam1(bcid)  << ", isBeam2=" <<bccd->isBeam2(bcid);
   out << ", distFront=" << bccd->distanceFromFront(bcid) 
       << ", distTail= " << bccd->distanceFromTail(bcid);
+
+  out << ", gapBefore=" << bccd->gapBeforeTrain(bcid)
+      << ", gapAfter=" << bccd->gapAfterTrain(bcid);
+
   out << std::endl;
   
 }
diff --git a/LumiBlock/LumiBlockData/LumiBlockData/BunchCrossingCondData.h b/LumiBlock/LumiBlockData/LumiBlockData/BunchCrossingCondData.h
index 5ee23e6243415881545ee6eb784abd1945d49bdd..60c99481f8ec525a174ce4c847ced7773564e3dc 100644
--- a/LumiBlock/LumiBlockData/LumiBlockData/BunchCrossingCondData.h
+++ b/LumiBlock/LumiBlockData/LumiBlockData/BunchCrossingCondData.h
@@ -92,7 +92,6 @@ public:
    */
   bool isBeam2(const bcid_type bcid ) const;
 
- 
   /// Enumeration specifying the units in which to expect the bunch distance type
   /**
    * To make it clear for the following functions what units to interpret their
@@ -106,6 +105,43 @@ public:
     FilledBunches
   };
 
+  /// Gap before the train this BCID is in
+  /**
+   * Get the gap that's between the train that the specified BCID is in, and
+   * the previous train. This is a useful number for some jet/MET studies.
+   *
+   * Note that the function doesn't work with the FilledBunches type, as the
+   * size of the gaps doesn't have to be a multiple of the bunch distance
+   * within the trains.
+   *
+   * Returns "-1" when there's no right answer to the question. (BCID not
+   * part of a train.)
+   *
+   * @param bcid The bcid whose train should be investigated
+   * @param type The type of the requested return value
+   * @returns The gap before the train of the specified bcid
+   */
+  int gapBeforeTrain( bcid_type bcid = 0,
+		      BunchDistanceType type = NanoSec ) const;
+  /// Gap after the train this BCID is in
+  /**
+   * Get the gap that's between the train that the specified BCID is in, and
+   * the next train. This is a useful number for some jet/MET studies.
+   *
+   * Note that the function doesn't work with the FilledBunches type, as the
+   * size of the gaps doesn't have to be a multiple of the bunch distance
+   * within the trains.
+   *
+   * Returns "-1" when there's no right answer to the question. (BCID not
+   * part of a train.)
+   *
+   * @param bcid The bcid whose train should be investigated
+   * @param type The type of the requested return value
+   * @returns The gap after the train of the specified bcid
+   */
+  int gapAfterTrain( bcid_type bcid = 0,
+		     BunchDistanceType type = NanoSec ) const;
+ 
   /// The distance of the specific bunch crossing from the front of the train
   /**
    * Get the distance of the specified bunch crossing from the front of the
diff --git a/LumiBlock/LumiBlockData/src/BunchCrossingCondData.cxx b/LumiBlock/LumiBlockData/src/BunchCrossingCondData.cxx
index 49eb6ffa7e106ba3c0d7f05e0f0e0cc024d2083f..e8b55c2fbd85519cf428f8c57744efa0e8ea81b1 100644
--- a/LumiBlock/LumiBlockData/src/BunchCrossingCondData.cxx
+++ b/LumiBlock/LumiBlockData/src/BunchCrossingCondData.cxx
@@ -122,3 +122,63 @@ unsigned BunchCrossingCondData::countColliding(int from, int to) const {
   }
   return ncoll;
 }
+
+
+int BunchCrossingCondData::gapBeforeTrain( bcid_type bcid,
+					   BunchDistanceType type) const {
+
+  const bunchTrain_t* bt=findTrain(bcid);
+  if (bt==nullptr) {
+    return -1;
+  }
+
+  int index=bt->m_first-1;
+  if (index<0) {
+    index=m_MAX_BCID-1;
+  }
+
+  int result=0;
+  
+  while (!m_luminous.test(index) && result<m_MAX_BCID) {
+    result++;
+    index--;
+    if (index<0) {
+      index=m_MAX_BCID-1;
+    }
+  }
+
+  if (type==NanoSec) {
+    result*=m_BUNCH_SPACING;
+  }
+
+  return result;
+}
+
+
+
+int BunchCrossingCondData::gapAfterTrain( bcid_type bcid,
+					  BunchDistanceType type) const {
+  const bunchTrain_t* bt=findTrain(bcid);
+  if (bt==nullptr) {
+    return -1;
+  }
+
+  int index=bt->m_last+1;
+  if (index>=m_MAX_BCID) {
+    index=0;
+  }
+  int result=0;
+  while (!m_luminous.test(index) && result<m_MAX_BCID) {
+    result++;
+    index++;
+    if (index>=m_MAX_BCID) {
+      index=0;
+    }
+  }
+
+ if (type==NanoSec) {
+    result*=m_BUNCH_SPACING;
+  }
+
+ return result;
+}
diff --git a/MuonSpectrometer/MuonConfig/python/MuonSegmentFindingConfig.py b/MuonSpectrometer/MuonConfig/python/MuonSegmentFindingConfig.py
index 8036c849fb0ce88e4f56e351f7d6fae2251ce9f1..8ff6f8b03bd5638f8c809e848ca0446f077bac17 100644
--- a/MuonSpectrometer/MuonConfig/python/MuonSegmentFindingConfig.py
+++ b/MuonSpectrometer/MuonConfig/python/MuonSegmentFindingConfig.py
@@ -728,7 +728,10 @@ if __name__=="__main__":
 
     from AthenaConfiguration.AllConfigFlags import ConfigFlags
     from AthenaCommon.Logging import log
-    from AthenaConfiguration.TestDefaults import defaultTestFiles
+    # from AthenaConfiguration.TestDefaults import defaultTestFiles
+    # ConfigFlags.Input.Files = defaultTestFiles.ESD
+    ConfigFlags.Input.Files = ['/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/RecExRecoTest/ESD.16747874._000011_100events.pool.root']
+    
 
     ConfigFlags.Concurrency.NumThreads=args.threads
     ConfigFlags.Concurrency.NumConcurrentEvents=args.threads # Might change this later, but good enough for the moment.
@@ -740,7 +743,6 @@ if __name__=="__main__":
     ConfigFlags.Detector.GeometryCSC   = True     
     ConfigFlags.Detector.GeometryRPC   = True 
     
-    ConfigFlags.Input.Files = defaultTestFiles.ESD
     ConfigFlags.Output.ESDFileName=args.output
     
     from AthenaCommon.Constants import DEBUG
diff --git a/MuonSpectrometer/MuonConfig/python/MuonTrackBuildingConfig.py b/MuonSpectrometer/MuonConfig/python/MuonTrackBuildingConfig.py
index 8ff5d10dae7ec794011a26809e1391f704a6f96e..1bbf5bd4e03fded5697327dc75810a7b09920518 100644
--- a/MuonSpectrometer/MuonConfig/python/MuonTrackBuildingConfig.py
+++ b/MuonSpectrometer/MuonConfig/python/MuonTrackBuildingConfig.py
@@ -386,8 +386,9 @@ if __name__=="__main__":
     ConfigFlags.Detector.GeometryCSC   = True     
     ConfigFlags.Detector.GeometryRPC   = True 
         
-    from AthenaConfiguration.TestDefaults import defaultTestFiles
-    ConfigFlags.Input.Files = defaultTestFiles.ESD
+    # from AthenaConfiguration.TestDefaults import defaultTestFiles
+    # ConfigFlags.Input.Files = defaultTestFiles.ESD
+    ConfigFlags.Input.Files = ['/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/RecExRecoTest/ESD.16747874._000011_100events.pool.root']
     ConfigFlags.Output.ESDFileName=args.output
 
     # from AthenaCommon.Constants import DEBUG
diff --git a/PhysicsAnalysis/ElectronPhotonID/egammaMVACalibAnalysis/Root/egammaMVACalib.cxx b/PhysicsAnalysis/ElectronPhotonID/egammaMVACalibAnalysis/Root/egammaMVACalib.cxx
index d22558c1921b392d413bcb8fc22308bc2ac371e9..adbd8792444157aa891a0ec323cceb096c3c4900 100644
--- a/PhysicsAnalysis/ElectronPhotonID/egammaMVACalibAnalysis/Root/egammaMVACalib.cxx
+++ b/PhysicsAnalysis/ElectronPhotonID/egammaMVACalibAnalysis/Root/egammaMVACalib.cxx
@@ -34,9 +34,10 @@
 
 #include "egammaMVACalib/egammaMVALayerDepth.h"
 #include "egammaMVACalibAnalysis/egammaMVACalib.h"
-#include "MVAUtils/BDT.h"
 #include "PathResolver/PathResolver.h"
 
+#include "MVAUtils/BDT.h"
+#include "MVAUtils/TMVAToMVAUtils.h"
 using namespace MVAUtils;
 
 #define CHECK_SETUPBDT(EXP) { \
@@ -1509,12 +1510,11 @@ void egammaMVACalib::addReaderInfoToArrays(TMVA::Reader *reader,
 
   TMVA::MethodBDT* tbdt = dynamic_cast<TMVA::MethodBDT*>(reader->FindMVA("BDTG"));
   assert(tbdt);
-  BDT *bdt = new BDT(tbdt);
+  std::unique_ptr<BDT> bdt = TMVAToMVAUtils::convert(tbdt);
   TTree *tree = bdt->WriteTree(Form("BDT%d", index));
 
   variables->AddAtAndExpand(new TObjString(*vars), index);
   trees->AddAtAndExpand(tree, index);
-  delete bdt;
 }
 
 
diff --git a/Reconstruction/MVAUtils/CMakeLists.txt b/Reconstruction/MVAUtils/CMakeLists.txt
index e2f3ab543fa5722a22936de40aa4954d741b198f..36b1b58419c75daf8f3c59ba0f37a58dcad217d3 100644
--- a/Reconstruction/MVAUtils/CMakeLists.txt
+++ b/Reconstruction/MVAUtils/CMakeLists.txt
@@ -9,8 +9,8 @@ atlas_depends_on_subdirs(
   PRIVATE
   Control/CxxUtils )
 
-# External dependencies:
-find_package( ROOT COMPONENTS Tree TreePlayer TMVA XMLIO Core MathCore Hist RIO pthread )
+# External dependencies for main lib:
+find_package( ROOT COMPONENTS Tree TMVA XMLIO Core MathCore RIO)
 
 # Component(s) in the package:
 atlas_add_library( MVAUtils
@@ -19,11 +19,14 @@ atlas_add_library( MVAUtils
                    INCLUDE_DIRS ${ROOT_INCLUDE_DIRS}
                    LINK_LIBRARIES ${ROOT_LIBRARIES} )
 
-atlas_add_executable( convertXmlToRootTree util/convertXmlToRootTree.cxx
-		      INCLUDE_DIRS MVAUtils
-		      LINK_LIBRARIES MVAUtils CxxUtils)
-
 atlas_add_dictionary( MVAUtilsDict
                       MVAUtils/MVAUtilsDict.h
                       MVAUtils/selection.xml
                       LINK_LIBRARIES egammaMVACalibLib )
+	      
+# External ROOT dependencies for utils :
+atlas_add_executable( convertXmlToRootTree util/convertXmlToRootTree.cxx
+		      INCLUDE_DIRS MVAUtils ${ROOT_INCLUDE_DIRS}
+		      LINK_LIBRARIES MVAUtils CxxUtils ${ROOT_LIBRARIES})
+
+
diff --git a/Reconstruction/MVAUtils/MVAUtils/BDT.h b/Reconstruction/MVAUtils/MVAUtils/BDT.h
index 723c52f4f637195dd4415eb14e424d8f3931ce5f..a3d771fdd41b84e1dda8056f1e88181a97e1c974 100644
--- a/Reconstruction/MVAUtils/MVAUtils/BDT.h
+++ b/Reconstruction/MVAUtils/MVAUtils/BDT.h
@@ -1,7 +1,5 @@
-// dear emacs, this is really -*- C++ -*-
-
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
 */
 
 #ifndef MVAUtils_BDT_H
@@ -9,13 +7,8 @@
 
 #include<vector>
 #include "TString.h"
-
 #include "MVAUtils/Node.h"
 
-namespace TMVA { 
-  class DecisionTreeNode; 
-  class MethodBDT; 
-}
 class TTree;
 
 namespace MVAUtils 
@@ -38,8 +31,29 @@ namespace MVAUtils
   class BDT
   {
   public:
+
+    BDT(float offset,
+        float sumWeights,
+        const std::vector<Node::index_t>& forest,
+        const std::vector<float>& weights,
+        const std::vector<Node>& nodes): 
+      m_offset(offset),
+      m_sumWeights(sumWeights),
+      m_forest(forest),
+      m_weights(weights),
+      m_nodes(nodes){
+      }
+
     BDT(TTree *tree);
-    BDT(TMVA::MethodBDT *bdt, bool isRegression = true, bool useYesNoLeaf = false);
+
+    /** Disable default ctor and copy*/
+    BDT() = delete;
+    BDT (const BDT&) = delete;
+    BDT& operator=(const BDT&) = delete;
+    /** default move ctor, move assignment and dtor*/
+    BDT (BDT&&) = default;
+    BDT& operator=(BDT&&) = default;
+    ~BDT()=default; 
 
     /** return the number of trees in the forest */
     unsigned int GetNTrees() const { return m_forest.size(); }
@@ -72,31 +86,25 @@ namespace MVAUtils
     std::vector<float> GetMultiResponse(const std::vector<float*>& pointers, unsigned int numClasses) const;
     std::vector<float> GetMultiResponse(unsigned int numClasses) const;
 	
+    //for debugging, print out tree or forest to stdout
+    void PrintForest() const;
+    void PrintTree(Node::index_t index) const;
+    //dump out a TTree
+    TTree* WriteTree(TString name = "BDT");
+	
     /** Return the values corresponding to m_pointers (or an empty vector) **/
     std::vector<float> GetValues() const;
 	
-    /** Return stored pointers (which are used by GetResponse with no args) **/
+    /** Return stored pointers (which are used by GetResponse with no args)*/
     std::vector<float*> GetPointers() const { return m_pointers; }
 
-    /** Set the stored pointers so that one can use GetResponse with no args */
-    void SetPointers(std::vector<float*>& pointers) { m_pointers = pointers; }
+    /** Set the stored pointers so that one can use GetResponse with no args non-const not MT safe*/
+    void SetPointers(const std::vector<float*>& pointers) { m_pointers = pointers; }
 	
-    //dump out a TTree
-    TTree* WriteTree(TString name = "BDT");
-	
-    //for debugging, print out tree or forest to stdout
-    void PrintForest() const;
-    void PrintTree(Node::index_t index) const;
-
   private:
 
-    // create new tree from root file
+    // create new tree 
     void newTree(const std::vector<int>& vars, const std::vector<float>& values);
-	
-    // create new tree from decision tree
-    void newTree(const TMVA::DecisionTreeNode *node, bool isRegression, bool useYesNoLeaf); 
-
-
     float GetTreeResponse(const std::vector<float>& values, Node::index_t index) const;
     float GetTreeResponse(const std::vector<float*>& pointers, Node::index_t index) const;
 
@@ -104,9 +112,8 @@ namespace MVAUtils
     float m_sumWeights; //!< the sumOfBoostWeights--no need to recompute each call
     std::vector<Node::index_t> m_forest; //!< indices of the top-level nodes of each tree
     std::vector<float> m_weights; //!< boost weights
-    std::vector<float*> m_pointers; //!< where vars to cut on can be set (but can also be passed)
     std::vector<Node> m_nodes; //!< where the nodes of the forest are stored
-	
+    std::vector<float*> m_pointers; //!< where vars to cut on can be set (but can also be passed) 
   };
 }
 
diff --git a/Reconstruction/MVAUtils/MVAUtils/TMVAToMVAUtils.h b/Reconstruction/MVAUtils/MVAUtils/TMVAToMVAUtils.h
new file mode 100644
index 0000000000000000000000000000000000000000..d9dc71c035f3dd60c6cef1e590a751a03aa363ec
--- /dev/null
+++ b/Reconstruction/MVAUtils/MVAUtils/TMVAToMVAUtils.h
@@ -0,0 +1,113 @@
+/*
+  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+*/
+
+#ifndef MVAUtils_TMVATOMVAUtils_H                                                                                                  
+#define MVAUtils_TMVATOMVAUtils_H                                                                                                 
+
+#include "MVAUtils/BDT.h"
+#include <stack>
+#include "TMVA/Reader.h"
+#include "TMVA/MethodBDT.h"
+
+namespace TMVAToMVAUtils{
+/** Creates the full tree structure from what is stored in root file **/
+void newTree(const TMVA::DecisionTreeNode *node, 
+             bool isRegression, bool useYesNoLeaf, 
+             std::vector<MVAUtils::Node>& nodes)
+{
+  //index is relative to the current node
+  std::vector<MVAUtils::Node::index_t> right;
+  {
+
+    // not strictly parent if doing a right node
+    std::stack<const TMVA::DecisionTreeNode *> parent; 
+    std::stack<MVAUtils::Node::index_t> parentIndex;
+                    
+    parentIndex.push(-1);
+    parent.push(nullptr);
+                                
+    auto currNode = node;
+    int i = -1;
+    while (currNode) {
+      ++i;
+      right.push_back(-1);
+      if (!currNode->GetLeft()){
+        // a leaf
+        auto currParent = parent.top();
+        auto currParentIndex = parentIndex.top();
+        // if right has not been visited, next will be right
+        if (currParentIndex >= 0) {
+          right[currParentIndex] = i + 1 - currParentIndex;
+          currNode = currParent->GetCutType() ? currParent->GetLeft() : currParent->GetRight();
+        } else {
+          currNode = nullptr;
+        }
+        parent.pop();
+        parentIndex.pop();
+      } else {
+        // not a leaf
+        parent.push(currNode);
+        parentIndex.push(i);
+        currNode = currNode->GetCutType() ? currNode->GetRight() : currNode->GetLeft();
+      }
+    }
+                                                                                                                     }
+  {
+    std::stack<const TMVA::DecisionTreeNode *> parent; // not strictly parent if doing a right node
+    parent.push(nullptr);
+    auto currNode = node;
+    int i = -1;
+    while (currNode) {
+      ++i;
+      if (!currNode->GetLeft()){
+        // a leaf
+        nodes.emplace_back(-1, 
+                             isRegression ? 
+                             currNode->GetResponse() : useYesNoLeaf ? currNode->GetNodeType() : currNode->GetPurity(), 
+                             right[i]);
+        auto currParent = parent.top();
+        // if right has not been visited, next will be right
+        if (currParent) {
+          currNode = currParent->GetCutType() ? currParent->GetLeft() : currParent->GetRight();
+        } else {
+          currNode = nullptr;
+        }
+        parent.pop();
+      } else {
+        // not a leaf
+        parent.push(currNode);
+        nodes.emplace_back(currNode->GetSelector(), currNode->GetCutValue(), right[i]);
+
+        currNode = currNode->GetCutType() ? currNode->GetRight() : currNode->GetLeft();
+      }
+    }
+  }
+}
+
+std::unique_ptr<MVAUtils::BDT> convert(TMVA::MethodBDT* bdt, bool isRegression = true, bool useYesNoLeaf = false){
+
+  float sumWeights=0; 
+  float offset=bdt->GetBoostWeights().size() ? bdt->GetBoostWeights()[0] : 0.;  
+  std::vector<MVAUtils::Node::index_t> forest;
+  std::vector<float> weights;
+  std::vector<MVAUtils::Node> nodes;
+  
+  /* Build the BDT*/
+  std::vector<TMVA::DecisionTree*>::const_iterator it;
+  for(it = bdt->GetForest().begin(); it != bdt->GetForest().end(); ++it) {
+    forest.push_back(nodes.size());
+    uint index=it - bdt->GetForest().begin();
+    if( bdt->GetBoostWeights().size() > index ) {
+      weights.push_back( bdt->GetBoostWeights()[index]);
+      sumWeights+=weights.back();
+    }
+    else {
+      weights.push_back(0);
+    }
+    newTree((*it)->GetRoot(), isRegression, useYesNoLeaf,nodes);      
+  }
+ return std::make_unique<MVAUtils::BDT>(offset,sumWeights,forest,weights,nodes);
+}  
+}
+#endif
diff --git a/Reconstruction/MVAUtils/Root/BDT.cxx b/Reconstruction/MVAUtils/Root/BDT.cxx
index addd45bd7c92901e34b84558defd16f3cb9e5e9c..25b5e6f95d71a9849cbd078592e1ac9a2fac5516 100644
--- a/Reconstruction/MVAUtils/Root/BDT.cxx
+++ b/Reconstruction/MVAUtils/Root/BDT.cxx
@@ -3,12 +3,8 @@
 */
 
 #include "MVAUtils/BDT.h"
-
-#include "TMVA/MethodBDT.h"
-#include "TMVA/DecisionTree.h"
 #include "TTree.h"
 #include <stack>
-
 // for debugging:
 #include <iostream>
 
@@ -21,53 +17,24 @@ BDT::BDT(TTree *tree)
 {
     std::vector<int> *vars = 0;
     std::vector<float> *values = 0;
-    
     tree->SetBranchAddress("offset", &m_offset);
     tree->SetBranchAddress("vars", &vars);
-    tree->SetBranchAddress("values", &values);
-    
+    tree->SetBranchAddress("values", &values);   
     for (int i = 0; i < tree->GetEntries(); ++i)
     {
-	tree->GetEntry(i);
-	assert (vars);
-	assert (values);
-	m_forest.push_back(m_nodes.size());
-	newTree(*vars, *values);
-	m_weights.push_back(m_offset);
-	m_sumWeights+=m_offset;
+      tree->GetEntry(i);
+      assert (vars);
+      assert (values);
+      m_forest.push_back(m_nodes.size());
+      newTree(*vars, *values);
+      m_weights.push_back(m_offset);
+      m_sumWeights+=m_offset;
     }
     
     m_offset = m_weights[0];//original use of m_offset
 
     delete vars;
     delete values;
-
-    // // For Debug
-    // std::cout << "Constructed from a TTree" << std::endl;
-    // PrintForest();
-
-}
-
-/** c-tor from TMVA::MethodBDT **/
-BDT::BDT(TMVA::MethodBDT* bdt, bool isRegression, bool useYesNoLeaf)
- : m_sumWeights(0)
-{
-    assert(bdt);
-    m_offset = bdt->GetBoostWeights().size() ? bdt->GetBoostWeights()[0] : 0.;
-    std::vector<TMVA::DecisionTree*>::const_iterator it;
-    for(it = bdt->GetForest().begin(); it != bdt->GetForest().end(); ++it) {
-      m_forest.push_back(m_nodes.size());
-      uint index=it - bdt->GetForest().begin();
-      if( bdt->GetBoostWeights().size() > index ) {
-	m_weights.push_back( bdt->GetBoostWeights()[index]);
-	m_sumWeights+=m_weights.back();
-      }
-      else m_weights.push_back(0);
-      newTree((*it)->GetRoot(), isRegression, useYesNoLeaf);      
-    }
-    // // For Debug
-    // std::cout << "Constructed from a MethodBDT" << std::endl;
-    // PrintForest();
 }
 
 
@@ -90,95 +57,17 @@ void BDT::newTree(const std::vector<int>& vars, const std::vector<float>& values
       auto currParent = parent.top();
       // if right has not been visited, next will be right
       if (currParent >= 0) {
-	right[currParent] = i+1-currParent;
+        right[currParent] = i+1-currParent;
       }
       parent.pop();
     }
   }
 
   for (size_t i = 0; i < vars.size(); ++i) {
-    //std::cout << "    i = " << i << ", vars = " << vars[i] << ", values = " << values[i] << ", right = " <<  right[i] << std::endl;
     m_nodes.emplace_back(vars[i], values[i], right[i]);
   }
 }
 
-/**
- * Creates the full tree structure from TMVA::DecisionTree node.
- **/
-void BDT::newTree(const TMVA::DecisionTreeNode *node, bool isRegression, bool useYesNoLeaf)
-{
-
-  // index is relative to the current node
-  std::vector<Node::index_t> right;
-  {
-
-    // not strictly parent if doing a right node
-    std::stack<const TMVA::DecisionTreeNode *> parent; 
-    std::stack<Node::index_t> parentIndex;
-    
-    parentIndex.push(-1);
-    parent.push(nullptr);
-    
-    auto currNode = node;
-    int i = -1;
-    while (currNode) {
-      ++i;
-      right.push_back(-1);
-      if (!currNode->GetLeft()){
-	// a leaf
-	auto currParent = parent.top();
-	auto currParentIndex = parentIndex.top();
-	// if right has not been visited, next will be right
-	if (currParentIndex >= 0) {
-	  right[currParentIndex] = i + 1 - currParentIndex;
-	  currNode = currParent->GetCutType() ? currParent->GetLeft() : currParent->GetRight();
-	} else {
-	  currNode = nullptr;
-	}
-	parent.pop();
-	parentIndex.pop();
-      } else {
-	// not a leaf
-	parent.push(currNode);
-	parentIndex.push(i);
-	currNode = currNode->GetCutType() ? currNode->GetRight() : currNode->GetLeft();
-      }
-    }
-  }
-  {
-    std::stack<const TMVA::DecisionTreeNode *> parent; // not strictly parent if doing a right node
-    
-    parent.push(nullptr);
-
-    auto currNode = node;
-    int i = -1;
-    while (currNode) {
-      ++i;
-      if (!currNode->GetLeft()){
-	// a leaf
-	m_nodes.emplace_back(-1, 
-			     isRegression ? 
-			     currNode->GetResponse() : useYesNoLeaf ? currNode->GetNodeType() : currNode->GetPurity(), 
-			     right[i]);
-	auto currParent = parent.top();
-	// if right has not been visited, next will be right
-	if (currParent) {
-	  currNode = currParent->GetCutType() ? currParent->GetLeft() : currParent->GetRight();
-	} else {
-	  currNode = nullptr;
-	}
-	parent.pop();
-      } else {
-	// not a leaf
-	parent.push(currNode);
-	m_nodes.emplace_back(currNode->GetSelector(), currNode->GetCutValue(), right[i]);
-	
-	currNode = currNode->GetCutType() ? currNode->GetRight() : currNode->GetLeft();
-      }
-    }
-  }
-}
-
 /** Return offset + the sum of the response of each tree  **/
 float BDT::GetResponse(const std::vector<float>& values) const
 {
@@ -333,8 +222,8 @@ TTree* BDT::WriteTree(TString name)
       auto end = static_cast<Node::index_t>(nodeSize);
       if (i+1 < forSize) end = m_forest[i+1];
       for(auto j = beg; j < end; ++j) {
-	vars.push_back(m_nodes[j].GetVar());
-	values.push_back(m_nodes[j].GetVal());
+        vars.push_back(m_nodes[j].GetVar());
+        values.push_back(m_nodes[j].GetVal());
       }
       m_offset = m_weights[i];
       tree->Fill();
@@ -355,7 +244,6 @@ void BDT::PrintForest() const
   }
     
 }
-
 /** For debugging only:
   * Print the tree in a way that can compare implementations
   * Using pre-order search for now
diff --git a/Reconstruction/MVAUtils/util/convertXmlToRootTree.cxx b/Reconstruction/MVAUtils/util/convertXmlToRootTree.cxx
index 83ddfd5b76f03383027c589ac23315fc9b9b389a..69404140929ade18ba4dea99feab84821a36768d 100644
--- a/Reconstruction/MVAUtils/util/convertXmlToRootTree.cxx
+++ b/Reconstruction/MVAUtils/util/convertXmlToRootTree.cxx
@@ -3,6 +3,7 @@
 */
 
 #include "MVAUtils/BDT.h"
+#include "MVAUtils/TMVAToMVAUtils.h"
 #include "TMVA/Reader.h"
 #include "TMVA/MethodBDT.h"
 
@@ -13,14 +14,13 @@
 #include <TRandom3.h>
 
 #include <vector>
-#include <iostream>
-
+#include <iostream> 
 #include "CxxUtils/checker_macros.h"
 
 using namespace std;
 
 /** 
-    A utility to convert xml files from TMVA into root TTrees for this package.
+    Utility to convert xml files from TMVA into root TTrees for this package.
 
     Usage: convertXmlToRootTree <inFile(xml)> [outFile(root)]
 
@@ -40,7 +40,6 @@ struct XmlVariableInfo {
 TString AnalysisType;
 unsigned int NClass;
 
-
 std::vector<XmlVariableInfo>
 parseVariables(TXMLEngine *xml, void* node, const TString & nodeName)
 {
@@ -178,11 +177,11 @@ int main  ATLAS_NOT_THREAD_SAFE (int argc, char** argv){
     }
     else // should never happen
       {
-	cerr <<"Unknown type from parser "<< infoType.Data()<<endl;
-	//throw std::runtime_error("Unknown type from parser");
-	//	delete vars.back();
-	vars.pop_back();
-	return 0;
+        cerr <<"Unknown type from parser "<< infoType.Data()<<endl;
+        //throw std::runtime_error("Unknown type from parser");
+        //	delete vars.back();
+        vars.pop_back();
+        return 0;
       }
   }
 
@@ -195,7 +194,7 @@ int main  ATLAS_NOT_THREAD_SAFE (int argc, char** argv){
   if(method_bdt->GetOptions().Contains("BoostType=Grad")) isGrad = true;
   cout << "UseYesNoLeaf? " << useYesNoLeaf << endl;
   cout << "Gradient Boost? " << isGrad << endl;
-  MVAUtils::BDT* bdt = new MVAUtils::BDT( method_bdt, isRegression || isGrad, useYesNoLeaf);
+  std::unique_ptr<MVAUtils::BDT> bdt= TMVAToMVAUtils::convert(method_bdt, isRegression || isGrad, useYesNoLeaf);
   bdt->SetPointers(vars);
 
 
@@ -222,10 +221,6 @@ int main  ATLAS_NOT_THREAD_SAFE (int argc, char** argv){
   n->Write();
   f->Close();
   delete f;
-  delete bdt;
-  bdt = nullptr;
-
-
   cout << endl << "Reading BDT from root file and testing " << outFileName << endl;
 
   f = TFile::Open(outFileName, "READ");
@@ -235,7 +230,7 @@ int main  ATLAS_NOT_THREAD_SAFE (int argc, char** argv){
     return 0;
   }
   
-  bdt = new MVAUtils::BDT(bdt_tree);
+  bdt.reset(new MVAUtils::BDT(bdt_tree));
   bdt->SetPointers(vars);
   cout << bdt->GetResponse() << endl;
   cout << "MVAUtils::BDT : "
diff --git a/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCombinedFitTools.py b/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCombinedFitTools.py
index 092a44e5ec8153b4bb32292adda2888229c562d5..5a688a837340c0e5ce9a89fdb1905bb30d5df6e6 100644
--- a/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCombinedFitTools.py
+++ b/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCombinedFitTools.py
@@ -169,10 +169,9 @@ def MuidMaterialEffectsOnTrackProviderParam( name='MuidMaterialEffectsOnTrackPro
 
 
 def MuonCombinedPropagator( name='MuonCombinedPropagator', **kwargs ):
-    if not TriggerFlags.MuonSlice.doTrigMuonConfig:
-        kwargs.setdefault("AccuracyParameter",   .000001 )
-        kwargs.setdefault("IncludeBgradients",   True )
-        kwargs.setdefault("MaxHelixStep",        .001 )
+    kwargs.setdefault("AccuracyParameter",   .000001 )
+    kwargs.setdefault("IncludeBgradients",   True )
+    kwargs.setdefault("MaxHelixStep",        .001 )
     kwargs.setdefault("MaxStraightLineStep", .001 )
     return CfgMgr.Trk__RungeKuttaPropagator(name,**kwargs)
 
@@ -247,6 +246,7 @@ def CombinedMuonTrackBuilderFit( name='CombinedMuonTrackBuilderFit', **kwargs ):
     return CfgMgr.Rec__CombinedMuonTrackBuilder(name,**kwargs)
 
 def CombinedMuonTrackBuilder( name='CombinedMuonTrackBuilder', **kwargs ):
+    from AthenaCommon.AppMgr import ToolSvc
     kwargs.setdefault("CaloEnergyParam"               , getPublicTool("MuidCaloEnergyToolParam") )
     kwargs.setdefault("CaloTSOS"                      , getPublicTool("MuidCaloTrackStateOnSurface") )
     kwargs.setdefault("CscRotCreator"                 , (getPublicTool("CscClusterOnTrackCreator") if MuonGeometryFlags.hasCSC() else "") )
@@ -254,8 +254,6 @@ def CombinedMuonTrackBuilder( name='CombinedMuonTrackBuilder', **kwargs ):
     kwargs.setdefault("SLFitter"                      , getPublicTool("iPatSLFitter") )
     kwargs.setdefault("MaterialAllocator"             , getPublicTool("MuidMaterialAllocator") )
     kwargs.setdefault("MdtRotCreator"                 , getPublicTool("MdtDriftCircleOnTrackCreator") )
-    kwargs.setdefault("Propagator"                    , getPublicTool("MuonCombinedPropagator") )
-    kwargs.setdefault("SLPropagator"                  , getPublicTool("MuonCombinedPropagator") )
     kwargs.setdefault("CleanCombined"                 , True )
     kwargs.setdefault("CleanStandalone"               , True )
     kwargs.setdefault("BadFitChi2"                    , 2.5 )
@@ -273,11 +271,16 @@ def CombinedMuonTrackBuilder( name='CombinedMuonTrackBuilder', **kwargs ):
 
     if TriggerFlags.MuonSlice.doTrigMuonConfig:
         kwargs.setdefault("MuonHoleRecovery"              , "" )
-        kwargs.setdefault("TrackSummaryTool"              , "MuonTrackSummaryTool" )
+        kwargs.setdefault("TrackSummaryTool"              , getPublicTool("MuonTrackSummaryTool") )
+
+        kwargs.setdefault("Propagator"                    , ToolSvc.AtlasRungeKuttaPropagator)
+        kwargs.setdefault("SLPropagator"                  , ToolSvc.AtlasRungeKuttaPropagator)
     else:
         import MuonCombinedRecExample.CombinedMuonTrackSummary
         kwargs.setdefault("MuonHoleRecovery"              , getPublicTool("MuidSegmentRegionRecoveryTool") )
         kwargs.setdefault("TrackSummaryTool"              , ToolSvc.CombinedMuonTrackSummary )
+        kwargs.setdefault("Propagator"                    , getPublicTool("MuonCombinedPropagator") )
+        kwargs.setdefault("SLPropagator"                  , getPublicTool("MuonCombinedPropagator") )
 
     if beamFlags.beamType() == 'cosmics':
         kwargs.setdefault("MdtRotCreator" ,  "" )
diff --git a/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCombinedTools.py b/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCombinedTools.py
index 78bbf6655723c920a4783e676a72e6e297dae3ea..baa6e0380a82954f8f36be2bdaa99d456483e7a2 100644
--- a/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCombinedTools.py
+++ b/Reconstruction/MuonIdentification/MuonCombinedRecExample/python/MuonCombinedTools.py
@@ -96,11 +96,17 @@ def MuonCombinedTool(name="MuonCombinedTool",**kwargs):
     return CfgMgr.MuonCombined__MuonCombinedTool(name,**kwargs)
 
 def MuonCombinedFitTagTool(name="MuonCombinedFitTagTool",**kwargs):
-    kwargs.setdefault("TrackBuilder",         getPublicTool("CombinedMuonTrackBuilder") )
-    kwargs.setdefault("TrackQuery",           getPublicTool("MuonTrackQuery") )
-    kwargs.setdefault("MatchQuality",         getPublicTool("MuonMatchQuality") )
+    from AthenaCommon.AppMgr import ToolSvc
     if TriggerFlags.MuonSlice.doTrigMuonConfig:
+        from TrkExRungeKuttaIntersector.TrkExRungeKuttaIntersectorConf import Trk__IntersectorWrapper as Propagator
+        TrigMuonPropagator = Propagator(name = 'TrigMuonPropagator')
+        ToolSvc += TrigMuonPropagator
+        kwargs.setdefault("TrackBuilder",         getPublicToolClone("TrigMuonTrackBuilder", "CombinedMuonTrackBuilder", Propagator=TrigMuonPropagator) )
         kwargs.setdefault("VertexContainer", "")
+    else:
+        kwargs.setdefault("TrackBuilder",         getPublicTool("CombinedMuonTrackBuilder") )
+    kwargs.setdefault("TrackQuery",           getPublicTool("MuonTrackQuery") )
+    kwargs.setdefault("MatchQuality",         getPublicTool("MuonMatchQuality") )
     return CfgMgr.MuonCombined__MuonCombinedFitTagTool(name,**kwargs)
                          
 def MuonCombinedStacoTagTool(name="MuonCombinedStacoTagTool",**kwargs):
diff --git a/Reconstruction/RecExample/RecExCommon/share/CombinedRec_config.py b/Reconstruction/RecExample/RecExCommon/share/CombinedRec_config.py
index 7c046ebf5ebd673c806c2f67f9ce5f258ba8419b..18382bb48981a178ddf10ab07d5153c123d4dcfa 100755
--- a/Reconstruction/RecExample/RecExCommon/share/CombinedRec_config.py
+++ b/Reconstruction/RecExample/RecExCommon/share/CombinedRec_config.py
@@ -56,6 +56,17 @@ if rec.doESD() and recAlgs.doTrackParticleCellAssociation() and DetFlags.ID_on()
     topSequence += CfgMgr.TrackParticleCellAssociationAlg("TrackParticleCellAssociationAlg", 
                                                           ParticleCaloCellAssociationTool=caloCellAssociationTool)
 
+#
+# functionality : CaloExtensionBuilder setup to be used in tau and pflow
+#    
+pdr.flag_domain('CaloExtensionBuilder')
+if (rec.doESD()) and (recAlgs.doEFlow() or rec.doTau()) : #   or rec.readESD()
+    try:
+        include( "TrackToCalo/CaloExtensionBuilderAlg_jobOptions.py" )
+        CaloExtensionBuilder("TightPrimary", 500.) #Arguments are cutLevel and minPt for track selection
+    except Exception:
+        pass
+
 #
 # functionality : energy flow
 #                                                                                                 
diff --git a/Reconstruction/RecExample/RecExRecoTest/share/RecExRecoTest_ART_jets_fromESD.py b/Reconstruction/RecExample/RecExRecoTest/share/RecExRecoTest_ART_jets_fromESD.py
new file mode 100644
index 0000000000000000000000000000000000000000..51f56846f745dd652c2c3082c25347eefe30c108
--- /dev/null
+++ b/Reconstruction/RecExample/RecExRecoTest/share/RecExRecoTest_ART_jets_fromESD.py
@@ -0,0 +1,33 @@
+from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
+athenaCommonFlags.FilesInput=["/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/RecExRecoTest/mc16_13TeV.361022.Pythia8EvtGen_A14NNPDF23LO_jetjet_JZ2W.recon.ESD.e3668_s3170_r10572_homeMade.pool.root"]
+
+from RecExConfig.RecFlags import rec
+rec.doEgamma.set_Value_and_Lock(False)
+rec.doMuon.set_Value_and_Lock(True)
+
+from CaloRec.CaloRecFlags import jobproperties
+jobproperties.CaloRecFlags.Enabled.set_Value_and_Lock(False)
+jobproperties.CaloRecFlags.doCaloCluster.set_Value_and_Lock(False)
+jobproperties.CaloRecFlags.doCaloTopoCluster.set_Value_and_Lock(False)
+
+#this turns off CaloCluster2xAOD
+rec.doWritexAOD.set_Value_and_Lock(False)
+#nothing to say on these
+rec.doWriteTAG.set_Value_and_Lock(False)
+rec.doTruth.set_Value_and_Lock(False)
+rec.doAODCaloCells.set_Value_and_Lock(False)
+rec.doTrigger.set_Value_and_Lock(False)
+#Turns off xAODRingSetConfWriter
+rec.doCaloRinger.set_Value_and_Lock(False)
+
+#disables VertexCnvAlg
+from InDetRecExample.InDetJobProperties import jobproperties
+jobproperties.InDetJobProperties.doxAOD.set_Value_and_Lock(False)
+#Disables AllExecutedEvents
+rec.doFileMetaData.set_Value_and_Lock(False)
+
+athenaCommonFlags.EvtMax=10
+UserAlgs=["RecExRecoTest/RecExRecoTest_setupJetEtMissPFlow.py"]
+UserAlgs+=["JetRec/JetRec_jobOptions.py"]
+include ("RecExCommon/RecExCommon_topOptions.py")
+
diff --git a/Reconstruction/RecExample/RecExRecoTest/share/RecExRecoTest_ART_met_fromESD.py b/Reconstruction/RecExample/RecExRecoTest/share/RecExRecoTest_ART_met_fromESD.py
new file mode 100644
index 0000000000000000000000000000000000000000..c464f4af822b9a03d0e895c37329d57178ef0a78
--- /dev/null
+++ b/Reconstruction/RecExample/RecExRecoTest/share/RecExRecoTest_ART_met_fromESD.py
@@ -0,0 +1,33 @@
+from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
+athenaCommonFlags.FilesInput=["/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/RecExRecoTest/mc16_13TeV.361022.Pythia8EvtGen_A14NNPDF23LO_jetjet_JZ2W.recon.ESD.e3668_s3170_r10572_homeMade.pool.root"]
+
+from RecExConfig.RecFlags import rec
+rec.doEgamma.set_Value_and_Lock(False)
+rec.doMuon.set_Value_and_Lock(True)
+
+from CaloRec.CaloRecFlags import jobproperties
+jobproperties.CaloRecFlags.Enabled.set_Value_and_Lock(False)
+jobproperties.CaloRecFlags.doCaloCluster.set_Value_and_Lock(False)
+jobproperties.CaloRecFlags.doCaloTopoCluster.set_Value_and_Lock(False)
+
+#this turns off CaloCluster2xAOD
+rec.doWritexAOD.set_Value_and_Lock(False)
+#nothing to say on these
+rec.doWriteTAG.set_Value_and_Lock(False)
+rec.doTruth.set_Value_and_Lock(False)
+rec.doAODCaloCells.set_Value_and_Lock(False)
+rec.doTrigger.set_Value_and_Lock(False)
+#Turns off xAODRingSetConfWriter
+rec.doCaloRinger.set_Value_and_Lock(False)
+
+#disables VertexCnvAlg
+from InDetRecExample.InDetJobProperties import jobproperties
+jobproperties.InDetJobProperties.doxAOD.set_Value_and_Lock(False)
+#Disables AllExecutedEvents
+rec.doFileMetaData.set_Value_and_Lock(False)
+
+athenaCommonFlags.EvtMax=10
+UserAlgs=["RecExRecoTest/RecExRecoTest_setupJetEtMissPFlow.py"]
+UserAlgs+=["METReconstruction/METReconstruction_jobOptions.py"]
+include ("RecExCommon/RecExCommon_topOptions.py")
+
diff --git a/Reconstruction/RecExample/RecExRecoTest/share/RecExRecoTest_setupJetEtMissPFlow.py b/Reconstruction/RecExample/RecExRecoTest/share/RecExRecoTest_setupJetEtMissPFlow.py
new file mode 100644
index 0000000000000000000000000000000000000000..ad68c4b10b834b4aff5d7f278325c1099aa31c20
--- /dev/null
+++ b/Reconstruction/RecExample/RecExRecoTest/share/RecExRecoTest_setupJetEtMissPFlow.py
@@ -0,0 +1 @@
+from eflowRec import ScheduleCHSPFlowMods
diff --git a/Reconstruction/RecExample/RecExRecoTest/test/test_recexreco_art_jets_fromesd_compareSerialThreadings.sh b/Reconstruction/RecExample/RecExRecoTest/test/test_recexreco_art_jets_fromesd_compareSerialThreadings.sh
new file mode 100755
index 0000000000000000000000000000000000000000..978d48ba216b5949ef02259e6590a00e989b0283
--- /dev/null
+++ b/Reconstruction/RecExample/RecExRecoTest/test/test_recexreco_art_jets_fromesd_compareSerialThreadings.sh
@@ -0,0 +1,14 @@
+#!/bin/sh
+#
+# art-description: Athena runs jet reconstruction from an ESD file
+# art-type: grid
+# art-include: master/Athena
+# art-athena-mt: 8
+# art-output: serial
+# art-output: threadOne
+# art-output: threadTwo
+# art-output: threadFive
+
+export ATHENA_CORE_NUMBER=8
+
+test_compare_SerialAndThreadedAthenas.sh RecExRecoTest/RecExRecoTest_ART_jets_fromESD.py
diff --git a/Reconstruction/RecExample/RecExRecoTest/test/test_recexreco_art_met_fromesd_compareSerialThreadings.sh b/Reconstruction/RecExample/RecExRecoTest/test/test_recexreco_art_met_fromesd_compareSerialThreadings.sh
new file mode 100755
index 0000000000000000000000000000000000000000..e1ef67e74d7330b97b6dc80f586d4408e25ad16b
--- /dev/null
+++ b/Reconstruction/RecExample/RecExRecoTest/test/test_recexreco_art_met_fromesd_compareSerialThreadings.sh
@@ -0,0 +1,14 @@
+#!/bin/sh
+#
+# art-description: Athena runs met reconstruction from an ESD file
+# art-type: grid
+# art-include: master/Athena
+# art-athena-mt: 8
+# art-output: serial
+# art-output: threadOne
+# art-output: threadTwo
+# art-output: threadFive
+
+export ATHENA_CORE_NUMBER=8
+
+test_compare_SerialAndThreadedAthenas.sh RecExRecoTest/RecExRecoTest_ART_met_fromESD.py
diff --git a/Reconstruction/RecoAlgs/CaloRingerAlgs/src/xAODRingSetConfWriter.cxx b/Reconstruction/RecoAlgs/CaloRingerAlgs/src/xAODRingSetConfWriter.cxx
index b06340afea9f3bd7f68a816d1d87a37424f465f5..a018816888000d2280b73a9d932bdebde205ff96 100644
--- a/Reconstruction/RecoAlgs/CaloRingerAlgs/src/xAODRingSetConfWriter.cxx
+++ b/Reconstruction/RecoAlgs/CaloRingerAlgs/src/xAODRingSetConfWriter.cxx
@@ -153,6 +153,7 @@ StatusCode xAODRingSetConfWriter::searchAndCopyCLID(
   // Now loop retrieving them and recording on the outputContainer:
   for ( const auto &key : ringConfKeys ) 
   {
+    if (key[0] == ';' && key[3] == ';') continue; // ignore versioned keys
     ATH_MSG_VERBOSE( "Attempting to copy " << key );
 
     // Check if this meta isn't already available in output, if so, do not copy it:
diff --git a/Reconstruction/RecoTools/TrackToCalo/CMakeLists.txt b/Reconstruction/RecoTools/TrackToCalo/CMakeLists.txt
index 1635c231b001b7f1e09e300d9880d1fbc56dc7db..a8bb5c3d698e3dd7cde05d794626841371976757 100644
--- a/Reconstruction/RecoTools/TrackToCalo/CMakeLists.txt
+++ b/Reconstruction/RecoTools/TrackToCalo/CMakeLists.txt
@@ -12,9 +12,11 @@ atlas_depends_on_subdirs( PUBLIC
                           DetectorDescription/GeoPrimitives
                           Event/xAOD/xAODCaloEvent
                           GaudiKernel
+                          InnerDetector/InDetRecTools/InDetTrackSelectionTool
                           Reconstruction/RecoEvent/ParticleCaloExtension
                           Reconstruction/RecoTools/RecoToolInterfaces
                           Reconstruction/TrackCaloClusterRec/TrackCaloClusterRecTools
+                          Reconstruction/RecoTools/ITrackToVertex
                           Tracking/TrkEvent/TrkCaloExtension
                           Tracking/TrkEvent/TrkParametersIdentificationHelpers
                           PRIVATE
@@ -24,17 +26,20 @@ atlas_depends_on_subdirs( PUBLIC
                           Calorimeter/CaloUtils
                           Control/AthenaBaseComps
                           DetectorDescription/AtlasDetDescr
+                          Event/EventKernel
                           Event/FourMomUtils
                           Event/xAOD/xAODTracking
                           Event/xAOD/xAODMuon
-			  Event/xAOD/xAODEgamma
+			              Event/xAOD/xAODEgamma
                           Event/xAOD/xAODTruth
                           Reconstruction/RecoTools/ParticlesInConeTools
                           InnerDetector/InDetRecTools/TrackVertexAssociationTool
                           Tracking/TrkDetDescr/TrkSurfaces
                           Tracking/TrkEvent/TrkEventPrimitives
                           Tracking/TrkEvent/TrkParameters
+                          Tracking/TrkEvent/TrkTrackLink
                           Tracking/TrkEvent/TrkTrack
+                          Tracking/TrkEvent/VxVertex
                           Tracking/TrkExtrapolation/TrkExInterfaces
                           Tracking/TrkTools/TrkToolInterfaces 
 			  Control/CxxUtils )
@@ -48,13 +53,15 @@ atlas_add_library( TrackToCaloLib
                    PUBLIC_HEADERS TrackToCalo
                    INCLUDE_DIRS ${EIGEN_INCLUDE_DIRS}
                    LINK_LIBRARIES ${EIGEN_LIBRARIES} CaloEvent CaloGeoHelpers GeoPrimitives xAODCaloEvent GaudiKernel ParticleCaloExtension RecoToolInterfaces 
-		   TrackCaloClusterRecToolsLib TrkCaloExtension TrkParametersIdentificationHelpers CaloDetDescrLib CaloUtilsLib
-		   PRIVATE_LINK_LIBRARIES CaloIdentifier AthenaBaseComps AtlasDetDescr FourMomUtils xAODTracking xAODMuon xAODEgamma xAODTruth TrkSurfaces 
-		   TrkEventPrimitives TrkParameters TrkTrack TrkExInterfaces TrkToolInterfaces CxxUtils)
+                   TrackCaloClusterRecToolsLib TrkCaloExtension TrkParametersIdentificationHelpers CaloDetDescrLib CaloUtilsLib
+                   PRIVATE_LINK_LIBRARIES CaloIdentifier AthenaBaseComps AtlasDetDescr FourMomUtils xAODTracking xAODMuon xAODEgamma xAODTruth TrkSurfaces 
+                   TrkEventPrimitives TrkParameters TrkTrack TrkExInterfaces TrkToolInterfaces CxxUtils StoreGateLib EventKernel )
 
-atlas_add_component( TrackToCalo src/components/*.cxx
-   LINK_LIBRARIES TrkEventPrimitives TrkExInterfaces RecoToolInterfaces
-   AthenaBaseComps ParticleCaloExtension StoreGateLib CxxUtils
-   TrackVertexAssociationToolLib GaudiKernel TrackToCaloLib )
+atlas_add_component( TrackToCalo
+                     src/components/*.cxx
+                     INCLUDE_DIRS ${EIGEN_INCLUDE_DIRS}
+                     LINK_LIBRARIES GaudiKernel TrackToCaloLib InDetTrackSelectionToolLib 
+                     ITrackToVertex TrkLinks VxVertex TrackVertexAssociationToolLib)
 
+atlas_install_joboptions( share/*.py )
 atlas_install_python_modules( python/*.py )
diff --git a/Reconstruction/RecoTools/TrackToCalo/share/CaloExtensionBuilderAlg_jobOptions.py b/Reconstruction/RecoTools/TrackToCalo/share/CaloExtensionBuilderAlg_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..6183c995d8e761bcab0ecc79a5e904cbc0f9ac34
--- /dev/null
+++ b/Reconstruction/RecoTools/TrackToCalo/share/CaloExtensionBuilderAlg_jobOptions.py
@@ -0,0 +1,83 @@
+## CaloExtensionBuilderAlg Stuff
+# Defined as function such that the user can change the cut level and minPt
+
+
+def CaloExtensionBuilder( cutLevel = "TightPrimary", minPT = 100.0 ):
+    try: 
+        from TrkExTools.AtlasExtrapolator import AtlasExtrapolator
+        from TrackToCalo.TrackToCaloConf import Trk__ParticleCaloExtensionTool
+    except:
+        mlog.error("could not import TrackToCaloConf.Trk__ParticleCaloExtensionTool")
+        mlog.error("could not import TrkExTools.AtlasExtrapolator")
+        print traceback.format_exc()
+    try:
+        from TrackToCalo.TrackToCaloConf import Trk__CaloExtensionBuilderAlg as CaloExtensionBuilderAlg
+    except:
+        mlog.error("could not import TrackToCaloConf.Trk__CaloExtensionBuilderAlg")
+        print traceback.format_exc()
+    try:
+        from InDetTrackSelectionTool.InDetTrackSelectionToolConf import InDet__InDetTrackSelectionTool
+        from InDetTrackSelectorTool.InDetTrackSelectorToolConf import InDet__InDetDetailedTrackSelectorTool
+    except:
+        mlog.error("could not import InDetTrackSelectionTool.InDet__InDetTrackSelectionTool")
+        print traceback.format_exc()
+    try:
+        from AthenaCommon.AppMgr import ToolSvc
+    except:
+        mlog.error("could not import ToolSvc")
+        print traceback.format_exc()
+    try:
+        from AthenaCommon.AlgSequence import AlgSequence
+    except:
+        mlog.error("could not import AlgSequence")
+        print traceback.format_exc()
+
+    topSequence=AlgSequence()
+    
+    theAtlasExtrapolator=AtlasExtrapolator(name = "CaloExtensionBuilderAtlasExtrapolator")
+    theAtlasExtrapolator.DoCaloDynamic = False # this turns off dynamic
+
+    pcExtensionTool = Trk__ParticleCaloExtensionTool(Extrapolator = theAtlasExtrapolator)
+    ToolSvc += pcExtensionTool
+
+    CaloExtensionBuilderTool = CaloExtensionBuilderAlg(LastCaloExtentionTool = pcExtensionTool)
+    TrackSelectionToolHC = InDet__InDetTrackSelectionTool(name            = "CaloExtensionBuilderTrackSelectionTool",
+                                                          minPt           = minPT,
+                                                          CutLevel        = cutLevel)#,
+                                                        #   maxD0           = 9999.*mm,
+                                                        #   maxZ0           = 9999.*mm,                                                                 
+                                                        #   minNPixelHits   = 2,  # PixelHits + PixelDeadSensors
+                                                        #   minNSctHits     = 0,  # SCTHits + SCTDeadSensors
+                                                        #   minNSiHits      = 7,  # PixelHits + SCTHits + PixelDeadSensors + SCTDeadSensors
+                                                        #   minNTrtHits     = 0)
+    TrackDetailedSelectionToolHC = InDet__InDetDetailedTrackSelectorTool(name = "CaloExtensionBuilderDetailedTrackSelectionTool",
+                                                                         pTMin                = minPT,
+                                                                         IPd0Max              = 1.,
+                                                                         IPz0Max              = 1.5, 
+                                                                         useTrackSummaryInfo  = True,
+                                                                         nHitBLayer           = 0, 
+                                                                         nHitPix              = 2,  # PixelHits + PixelDeadSensors
+                                                                         nHitSct              = 0,  # SCTHits + SCTDeadSensors
+                                                                         nHitSi               = 7,  # PixelHits + SCTHits + PixelDeadSensors + SCTDeadSensors
+                                                                         nHitTrt              = 0,  # nTRTHits
+                                                                         useSharedHitInfo     = False,
+                                                                         nSharedBLayer        = 99999,
+                                                                         nSharedPix           = 99999,
+                                                                         nSharedSct           = 99999,
+                                                                         nSharedSi            = 99999,
+                                                                         useTrackQualityInfo  = True,
+                                                                         fitChi2OnNdfMax      = 99999,
+                                                                         TrackSummaryTool     = None,
+                                                                         Extrapolator         = theAtlasExtrapolator)
+
+    ToolSvc += TrackSelectionToolHC
+    ToolSvc += TrackDetailedSelectionToolHC
+
+    CaloExtensionBuilderTool.TrkSelection         = TrackSelectionToolHC
+    CaloExtensionBuilderTool.TrkDetailedSelection = TrackDetailedSelectionToolHC
+
+    ToolSvc += CaloExtensionBuilderTool.LastCaloExtentionTool
+
+    topSequence += CaloExtensionBuilderTool
+
+    return True
diff --git a/Reconstruction/RecoTools/TrackToCalo/src/CaloExtensionBuilderAlg.cxx b/Reconstruction/RecoTools/TrackToCalo/src/CaloExtensionBuilderAlg.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..e9bc83f6f6494e73c904251dfbd00a319a0f70b3
--- /dev/null
+++ b/Reconstruction/RecoTools/TrackToCalo/src/CaloExtensionBuilderAlg.cxx
@@ -0,0 +1,133 @@
+/*
+   Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+*/
+
+/********************************************************************
+NAME:     CaloExtensionBuilderAlg
+PACKAGE:  offline/Reconstruction/RecoTools/TrackToCalo/CaloExtensionBuilderAlg
+
+Based on: offline/Reconstruction/egamma/egammaTrackTools/EMGSFCaloExtensionBuilder
+
+AUTHORS:  Anastopoulos/Capriles
+PURPOSE:  Performs Calo Extension for all selected tracks 
+ **********************************************************************/
+#include "CaloExtensionBuilderAlg.h"
+//
+#include "xAODTracking/TrackParticleContainer.h"
+#include "xAODTracking/TrackParticleAuxContainer.h"
+#include "xAODTracking/TrackParticle.h"
+#include "StoreGate/ReadHandle.h"
+#include "StoreGate/WriteHandle.h"
+
+#include "VxVertex/RecVertex.h"
+#include "VxVertex/VxCandidate.h"
+
+#include "xAODTracking/VertexContainer.h"
+#include "xAODTracking/Vertex.h"
+
+//std includes
+#include <algorithm>
+#include <cmath>
+#include <memory>
+
+StatusCode Trk::CaloExtensionBuilderAlg::initialize() 
+{
+    IToolSvc* myToolSvc;
+
+    ATH_CHECK(m_TrkSelection.retrieve());  
+    ATH_CHECK(m_TrkDetailedSelection.retrieve());  
+    ATH_CHECK(m_particleCaloExtensionTool.retrieve());
+
+    ATH_CHECK(m_ParticleCacheKey.initialize());
+    ATH_CHECK(m_TrkPartContainerKey.initialize());
+    ATH_CHECK(m_vertexInputContainer.initialize());
+
+    if (service("ToolSvc", myToolSvc).isFailure()) {
+        ATH_MSG_WARNING(" Tool Service Not Found");
+        return StatusCode::SUCCESS;
+    }
+
+    if(m_TrkSelection.retrieve().isFailure()){
+        ATH_MSG_ERROR("initialize: Cannot retrieve " << m_TrkSelection);
+        return StatusCode::FAILURE;
+    }else {
+        ATH_MSG_VERBOSE("Successfully retrieved Extrapolation tool "
+                << m_TrkSelection.typeAndName());
+    }
+
+    if(m_TrkDetailedSelection.retrieve().isFailure()){
+        ATH_MSG_ERROR("initialize: Cannot retrieve " << m_TrkDetailedSelection);
+        return StatusCode::FAILURE;
+    }else {
+        ATH_MSG_VERBOSE("Successfully retrieved Extrapolation tool "
+                << m_TrkDetailedSelection.typeAndName());
+    }
+
+    if(m_particleCaloExtensionTool.retrieve().isFailure()){
+        ATH_MSG_ERROR("initialize: Cannot retrieve " << m_particleCaloExtensionTool);
+        return StatusCode::FAILURE;
+    } else {
+        ATH_MSG_VERBOSE("Successfully retrieved Extrapolation tool "
+                << m_particleCaloExtensionTool.typeAndName());
+    }
+    return StatusCode::SUCCESS;
+}  
+
+StatusCode Trk::CaloExtensionBuilderAlg::CaloExtensionBuilderAlg::finalize(){ 
+    return StatusCode::SUCCESS;
+}
+
+StatusCode Trk::CaloExtensionBuilderAlg::execute()
+{
+    // defining needed objects
+    const xAOD::VertexContainer * vxContainer = 0;
+    const xAOD::Vertex*         primaryVertex = 0;
+
+    SG::ReadHandle<xAOD::VertexContainer> vertexInHandle( m_vertexInputContainer );
+    SG::ReadHandle<xAOD::TrackParticleContainer> tracks(m_TrkPartContainerKey);
+
+    // checking for tracks and vertices being read correctly
+    if (!vertexInHandle.isValid()) {
+      ATH_MSG_ERROR ("Could not retrieve HiveDataObj with key " << vertexInHandle.key());
+      return StatusCode::FAILURE;
+    }
+    if(!tracks.isValid()) {
+        ATH_MSG_FATAL("Failed to retrieve TrackParticle container: "<< m_TrkPartContainerKey.key());
+        return StatusCode::FAILURE;
+    }
+
+    // picking primary vertex
+    vxContainer = vertexInHandle.cptr();
+    if (vxContainer->size()>0) {
+      // simple loop through and get the primary vertex
+      xAOD::VertexContainer::const_iterator vxIter    = vxContainer->begin();
+      xAOD::VertexContainer::const_iterator vxIterEnd = vxContainer->end();
+      for ( size_t ivtx = 0; vxIter != vxIterEnd; ++vxIter, ++ivtx ){
+	    // the first and only primary vertex candidate is picked
+        if ( (*vxIter)->vertexType() ==  xAOD::VxType::PriVtx){
+            primaryVertex = (*vxIter);
+            break;
+        }
+      }
+    }
+    ATH_MSG_VERBOSE("size of VxPrimaryContainer is: "  << vxContainer->size() );
+
+    // creating and saving the calo extension collection
+    SG::WriteHandle<CaloExtensionCollection> lastCache(m_ParticleCacheKey); 
+    ATH_CHECK(lastCache.record(std::make_unique<CaloExtensionCollection>()));
+
+    const xAOD::TrackParticleContainer* ptrTracks=tracks.cptr();
+    CaloExtensionCollection* ptrPart=lastCache.ptr();
+    std::vector<bool> mask (ptrTracks->size(),false);
+    for (auto track: *tracks){
+      if( static_cast<bool>(m_TrkSelection->accept(*track, nullptr)) || 
+          m_TrkDetailedSelection->decision(*track, primaryVertex)    || 
+          m_TrkDetailedSelection->decision(*track, (*vxContainer)[0]) ) {
+        mask[track->index()] = true;
+      }
+    }
+
+    ATH_CHECK(m_particleCaloExtensionTool->caloExtensionCollection(*ptrTracks,mask,*ptrPart));
+
+    return StatusCode::SUCCESS;
+}
\ No newline at end of file
diff --git a/Reconstruction/RecoTools/TrackToCalo/src/CaloExtensionBuilderAlg.h b/Reconstruction/RecoTools/TrackToCalo/src/CaloExtensionBuilderAlg.h
new file mode 100644
index 0000000000000000000000000000000000000000..d7e12ea2e4de5be8b47f0f997eef2c982218a6ae
--- /dev/null
+++ b/Reconstruction/RecoTools/TrackToCalo/src/CaloExtensionBuilderAlg.h
@@ -0,0 +1,68 @@
+/*
+   Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+ */
+
+#ifndef REC_HEADCALOEXTRAPOLATIONTOOL_H
+#define REC_HEADCALOEXTRAPOLATIONTOOL_H
+/**
+  @class CaloExtensionBuilderAlg
+  Based on EMGSFCaloExtensionBuilder
+  Algorithm which creates calo extension for all
+  Track Particles
+  */
+#include "AthenaBaseComps/AthAlgorithm.h"
+#include "GaudiKernel/ToolHandle.h"
+#include "StoreGate/ReadHandleKey.h"
+#include "StoreGate/WriteHandleKey.h"
+
+#include "TrkCaloExtension/CaloExtensionCollection.h"
+#include "RecoToolInterfaces/IParticleCaloExtensionTool.h"
+
+#include "xAODTracking/TrackParticleContainer.h"
+#include "xAODTracking/VertexContainer.h"
+#include "xAODTracking/Vertex.h"
+
+#include "VxVertex/RecVertex.h"
+#include "ITrackToVertex/ITrackToVertex.h"
+#include "TrkToolInterfaces/ITrackSelectorTool.h"
+#include "InDetTrackSelectionTool/IInDetTrackSelectionTool.h"
+
+namespace Trk 
+{
+  class CaloExtensionBuilderAlg : public AthAlgorithm//, virtual public ITrackSelectorTool
+  {
+  public:
+  using AthAlgorithm::AthAlgorithm;
+
+  virtual StatusCode initialize() override final;
+  virtual StatusCode finalize() override final;
+  virtual StatusCode execute() override final;
+
+  private:
+  /** @brief the Calo Extension tool*/
+  ToolHandle<Trk::IParticleCaloExtensionTool> m_particleCaloExtensionTool {this,
+      "LastCaloExtentionTool", "Trk::CaloExtensionBuilderTool"};
+  
+  //Manages the track selection. It should be able to handle both pflow and tau selections
+  ToolHandle<InDet::IInDetTrackSelectionTool> m_TrkSelection {this,
+      "TrkSelection", "TrkSelectionCaloExtensionBuilder", "Tool that handles the track selection"};
+
+  ToolHandle<Trk::ITrackSelectorTool> m_TrkDetailedSelection {this,
+      "TrkDetailedSelection", "TrkDetailedSelectionCaloExtensionBuilder", "Tool that handles the detailed track selection"};
+
+  //output particle calo extension collection
+  SG::WriteHandleKey<CaloExtensionCollection>  m_ParticleCacheKey{this,
+      "ParticleCache", "ParticleCaloExtension", "Name of the particle measurement extrapolation cache"};
+
+  //input Track collection and vertex
+  SG::ReadHandleKey<xAOD::TrackParticleContainer> m_TrkPartContainerKey {this,
+      "TrkPartContainerName", "InDetTrackParticles", "Container of tracks"};
+
+  //input Vertex collection
+  SG::ReadHandleKey<xAOD::VertexContainer> m_vertexInputContainer{this,
+      "vertexInputContainer", "PrimaryVertices", "input vertex container key"};
+
+  };
+}
+
+#endif //
\ No newline at end of file
diff --git a/Reconstruction/RecoTools/TrackToCalo/src/ParticleCaloExtensionTool.cxx b/Reconstruction/RecoTools/TrackToCalo/src/ParticleCaloExtensionTool.cxx
index 657f62db5e9b0f9e34b270de8944f063f8135b87..9c29df21975df178e8a89a7f7ad38d991e39a349 100644
--- a/Reconstruction/RecoTools/TrackToCalo/src/ParticleCaloExtensionTool.cxx
+++ b/Reconstruction/RecoTools/TrackToCalo/src/ParticleCaloExtensionTool.cxx
@@ -102,6 +102,7 @@ StatusCode ParticleCaloExtensionTool::caloExtensionCollection( const xAOD::IPart
                                                                const std::vector<bool>& mask,
                                                                CaloExtensionCollection& caloextensions) const{
   const size_t numparticles=particles.size();   
+  
   if(mask.size()!=numparticles){
     ATH_MSG_ERROR("mask does not have the same size as in input collection");
     return StatusCode::FAILURE;
diff --git a/Reconstruction/RecoTools/TrackToCalo/src/components/TrackToCalo_entries.cxx b/Reconstruction/RecoTools/TrackToCalo/src/components/TrackToCalo_entries.cxx
index edc15e23bb63fd03b663d242f15e23169fc848d5..9f92120cee85e164000342d014acfc1feb772956 100644
--- a/Reconstruction/RecoTools/TrackToCalo/src/components/TrackToCalo_entries.cxx
+++ b/Reconstruction/RecoTools/TrackToCalo/src/components/TrackToCalo_entries.cxx
@@ -2,6 +2,7 @@
 #include "../ParticleCaloCellAssociationTool.h"
 #include "../ParticleCaloClusterAssociationTool.h"
 #include "../MuonCaloEnergyTool.h"
+#include "../CaloExtensionBuilderAlg.h"
 
 using namespace Trk;
 using namespace Rec;
@@ -9,4 +10,4 @@ DECLARE_COMPONENT( ParticleCaloExtensionTool )
 DECLARE_COMPONENT( ParticleCaloCellAssociationTool )
 DECLARE_COMPONENT( ParticleCaloClusterAssociationTool )
 DECLARE_COMPONENT( MuonCaloEnergyTool )
-
+DECLARE_COMPONENT( CaloExtensionBuilderAlg )
diff --git a/Reconstruction/eflowRec/eflowRec/eflowTrackCaloExtensionTool.h b/Reconstruction/eflowRec/eflowRec/eflowTrackCaloExtensionTool.h
index 323530356f4d3329c4665ee276ee86ad7a22a034..7750b6f2d82dbd4b38f91e184b6fd22c6af1ac4c 100644
--- a/Reconstruction/eflowRec/eflowRec/eflowTrackCaloExtensionTool.h
+++ b/Reconstruction/eflowRec/eflowRec/eflowTrackCaloExtensionTool.h
@@ -53,6 +53,13 @@ private:
 
   std::unique_ptr<Trk::TrackParametersIdHelper> m_trackParametersIdHelper;
 
+  //output particle calo extension collection
+  SG::ReadHandleKey<CaloExtensionCollection>  m_ParticleCacheKey{this,
+      "PFParticleCache", "ParticleCaloExtension", "Name of the particle measurement extrapolation cache"};
+
+  bool m_useOldCalo;
+
+
 };
 
 inline const InterfaceID& eflowTrackCaloExtensionTool::interfaceID() {
diff --git a/Reconstruction/eflowRec/eflowRec/eflowTrackCaloPoints.h b/Reconstruction/eflowRec/eflowRec/eflowTrackCaloPoints.h
index df8a04f73b2fa80769d46b9321c679389b9e1867..b22dfdb08994b6966f8420e274bafb9661cfd0e6 100644
--- a/Reconstruction/eflowRec/eflowRec/eflowTrackCaloPoints.h
+++ b/Reconstruction/eflowRec/eflowRec/eflowTrackCaloPoints.h
@@ -30,7 +30,7 @@ This class stores a map of calorimeter layers and track parameters (the result o
 class eflowTrackCaloPoints {
  public:
 
-  eflowTrackCaloPoints(std::map<eflowCalo::LAYER, const Trk::TrackParameters*> trackParameters);
+  eflowTrackCaloPoints(const std::map<eflowCalo::LAYER, const Trk::TrackParameters*> & trackParameters);
  eflowTrackCaloPoints() : m_isEM1Barrel(false), m_isEM2Barrel(false)  {}
   ~eflowTrackCaloPoints();
 
@@ -60,7 +60,6 @@ class eflowTrackCaloPoints {
 
   Amg::Vector3D parToPosition(const Trk::TrackParameters* extrapolatedParameters);
   Amg::Vector3D parToDirection(const Trk::TrackParameters* extrapolatedParameters);
-  const Trk::TrackParameters* getParameters(eflowCalo::LAYER layer);
 
   bool m_isEM1Barrel;
   bool m_isEM2Barrel;
@@ -76,7 +75,6 @@ class eflowTrackCaloPoints {
   static const std::pair<float, float>  m_defaultEtaPhiPair;
   static const eflowEtaPhiPosition m_defaultEtaPhiPosition;
 
-  std::map<eflowCalo::LAYER, const Trk::TrackParameters*> m_trackParameters;
   std::map<eflowCalo::LAYER, Amg::Vector3D > m_positions;
   std::map<eflowCalo::LAYER, Amg::Vector3D > m_directions;
   std::map<eflowCalo::LAYER, eflowEtaPhiPosition>  m_etaPhiPositions;
diff --git a/Reconstruction/eflowRec/eflowRec/eflowTrackExtrapolatorBaseAlgTool.h b/Reconstruction/eflowRec/eflowRec/eflowTrackExtrapolatorBaseAlgTool.h
index d1a848671114b182190bb88b49a6c843307fe637..f5718a06b60e38e33d76529b5bce0066486c8fe6 100644
--- a/Reconstruction/eflowRec/eflowRec/eflowTrackExtrapolatorBaseAlgTool.h
+++ b/Reconstruction/eflowRec/eflowRec/eflowTrackExtrapolatorBaseAlgTool.h
@@ -27,7 +27,7 @@ Pure virtual base class, inherits from IAlgTool. Defines execute method which ta
 */
 class eflowTrackExtrapolatorBaseAlgTool : virtual public IAlgTool {
  public:
-  virtual std::unique_ptr<eflowTrackCaloPoints> execute(const xAOD::TrackParticle* track) const = 0;
+  virtual std::unique_ptr<eflowTrackCaloPoints> execute(const xAOD::TrackParticle* track) const = 0;  
 };
 
 #endif
diff --git a/Reconstruction/eflowRec/share/PFlowMTConfig.py b/Reconstruction/eflowRec/share/PFlowMTConfig.py
index 5463ee1ccc801d817a2bf4f08219a029e26f2659..6ec524172d87d2651fce4a8257fb09c6bdd994ad 100644
--- a/Reconstruction/eflowRec/share/PFlowMTConfig.py
+++ b/Reconstruction/eflowRec/share/PFlowMTConfig.py
@@ -4,7 +4,6 @@ topSequence += PFLeptonSelector
 
 from eflowRec.eflowRecConf import PFTrackSelector
 PFTrackSelector=PFTrackSelector("PFTrackSelector")
-
 from TrkExTools.AtlasExtrapolator import AtlasExtrapolator
 from TrackToCalo.TrackToCaloConf import Trk__ParticleCaloExtensionTool
 pcExtensionTool = Trk__ParticleCaloExtensionTool(Extrapolator = AtlasExtrapolator())
diff --git a/Reconstruction/eflowRec/share/run_ESDStandardReco.py b/Reconstruction/eflowRec/share/run_ESDStandardReco.py
index 2ff0fd0076384ef5741292055d6813439e1c2a95..d84ebb3c8ea0476f8c80df5298a384f7be051850 100644
--- a/Reconstruction/eflowRec/share/run_ESDStandardReco.py
+++ b/Reconstruction/eflowRec/share/run_ESDStandardReco.py
@@ -22,4 +22,3 @@ athenaCommonFlags.EvtMax=10
 #Run pflopw jet finding - this cannot be enabled via reconstruction flags currently! (without enabling other things we don't want)
 UserAlgs = ["eflowRec/jetAlgs.py"]
 include ("RecExCommon/RecExCommon_topOptions.py")
-
diff --git a/Reconstruction/eflowRec/src/eflowTrackCaloExtensionTool.cxx b/Reconstruction/eflowRec/src/eflowTrackCaloExtensionTool.cxx
index e1b7122097cd118c5b2f282e2ef9083bb45446e4..d70244d5e054673ecf44ff89ba01c9fb3cbfb5a5 100644
--- a/Reconstruction/eflowRec/src/eflowTrackCaloExtensionTool.cxx
+++ b/Reconstruction/eflowRec/src/eflowTrackCaloExtensionTool.cxx
@@ -58,6 +58,16 @@ StatusCode eflowTrackCaloExtensionTool::initialize() {
 		    << m_theTrackExtrapolatorTool.typeAndName());
   }
 
+  ATH_CHECK(m_ParticleCacheKey.initialize());
+  
+  if (m_ParticleCacheKey.initialize().isFailure()) {
+    ATH_MSG_WARNING("Setting up the CaloExtensionTool to replace CaloExtensionBuilder");
+    ATH_CHECK( m_theTrackExtrapolatorTool.retrieve() );
+    m_useOldCalo = true;
+  } else {
+    m_useOldCalo = false;
+  }
+
   return StatusCode::SUCCESS;
 }
 
@@ -65,23 +75,45 @@ std::unique_ptr<eflowTrackCaloPoints> eflowTrackCaloExtensionTool::execute(const
 
   ATH_MSG_VERBOSE(" Now running eflowTrackCaloExtensionTool");
 
-  /*make the map*/
+  /*Create a map to index the TrackParameters at calo (owned by the extension) wrt to layers*/
   std::map<eflowCalo::LAYER, const Trk::TrackParameters*> parametersMap;
 
   /*get the CaloExtension object*/
-  std::unique_ptr<Trk::CaloExtension> extension = m_theTrackExtrapolatorTool->caloExtension(*track);
-  if (extension.get()!=nullptr) {
+  SG::ReadHandle<CaloExtensionCollection>  particleCache {m_ParticleCacheKey};
+  const Trk::CaloExtension * extension = nullptr;
+  std::unique_ptr<Trk::CaloExtension> uniqueExtension;
+  const int index = track->index();
+  ATH_MSG_VERBOSE("Getting element " << index << " from the particleCache");
+
+  if (m_useOldCalo) {
+    /* If CaloExtensionBuilder is unavailable, use the calo extension tool */
+    ATH_MSG_VERBOSE("Using the CaloExtensionTool");
+    uniqueExtension = m_theTrackExtrapolatorTool->caloExtension(*track);
+    extension = uniqueExtension.get();
+  } else {
+    /*get the CaloExtension object*/
+    ATH_MSG_VERBOSE("Using the CaloExtensionBuilder Cache");
+    extension = (*particleCache)[index];
+    ATH_MSG_VERBOSE("Getting element " << index << " from the particleCache");
+    if( not extension ){
+      ATH_MSG_VERBOSE("Cache does not contain a calo extension -> Calculating with the a CaloExtensionTool" );
+      uniqueExtension = m_theTrackExtrapolatorTool->caloExtension(*track);
+      extension = uniqueExtension.get();
+    }
+  }
+  
+  if (extension != nullptr) {
 
     /*extract the CurvilinearParameters*/
     const std::vector<const Trk::CurvilinearParameters*>& clParametersVector = extension->caloLayerIntersections();
 
-    /*fill the map*/
-    for (auto clParameter : clParametersVector) {
-      if (parametersMap[getLayer(clParameter)] == NULL) {
-        parametersMap[getLayer(clParameter)] = clParameter->clone();
+     /*The parameters are owned by the CaloExtension so are handlel by it the eflowTrackCaloPoints does
+     * not take ownership */
+    for ( const Trk::CurvilinearParameters * clParameter : clParametersVector) {
+      if (parametersMap[getLayer(clParameter)] == nullptr) {
+        parametersMap[getLayer(clParameter)] = clParameter;
       } else if (m_trackParametersIdHelper->isEntryToVolume(clParameter->cIdentifier())) {
-        delete parametersMap[getLayer(clParameter)];
-        parametersMap[getLayer(clParameter)] = clParameter->clone();
+        parametersMap[getLayer(clParameter)] = clParameter;
       }
     }
     /*
@@ -91,14 +123,13 @@ std::unique_ptr<eflowTrackCaloPoints> eflowTrackCaloExtensionTool::execute(const
     */
     
     return std::make_unique<eflowTrackCaloPoints>(parametersMap);
+
   }
   else{
     if (track->pt() > 3*Gaudi::Units::GeV) ATH_MSG_WARNING("TrackExtension failed for track with pt and eta " << track->pt() << " and " << track->eta());
     parametersMap[eflowCalo::LAYER::Unknown] = nullptr;
     return std::make_unique<eflowTrackCaloPoints>(parametersMap);
   }
-
-
 }
 
 StatusCode eflowTrackCaloExtensionTool::finalize() {
diff --git a/Reconstruction/eflowRec/src/eflowTrackCaloPoints.cxx b/Reconstruction/eflowRec/src/eflowTrackCaloPoints.cxx
index 5e2290c4106896d6dd86483cdf24d9666edf3449..90b6cc79f03ed0d61c02a5fcc659b3febbdfa4c1 100644
--- a/Reconstruction/eflowRec/src/eflowTrackCaloPoints.cxx
+++ b/Reconstruction/eflowRec/src/eflowTrackCaloPoints.cxx
@@ -28,12 +28,12 @@ const eflowEtaPhiPosition eflowTrackCaloPoints::m_defaultEtaPhiPosition = eflowE
 double eflowTrackCaloPoints::defaultEta()  {return (double)m_defaultEtaPhiPair.first;}
 double eflowTrackCaloPoints::defaultPhi()  {return (double)m_defaultEtaPhiPair.second;}
 
-eflowTrackCaloPoints::eflowTrackCaloPoints(std::map<eflowCalo::LAYER, const Trk::TrackParameters*> trackParameters):
-  m_isEM1Barrel(trackParameters.begin()->first == eflowCalo::EMB1),m_trackParameters(trackParameters) {
+eflowTrackCaloPoints::eflowTrackCaloPoints(const std::map<eflowCalo::LAYER, const Trk::TrackParameters*> & trackParameters):
+  m_isEM1Barrel(trackParameters.begin()->first == eflowCalo::EMB1) {
 
   /* Fill etaPhiPositions map */
-  std::map<eflowCalo::LAYER, const Trk::TrackParameters*>::iterator itPars = m_trackParameters.begin();
-  std::map<eflowCalo::LAYER, const Trk::TrackParameters*>::iterator endPars = m_trackParameters.end();
+  std::map<eflowCalo::LAYER, const Trk::TrackParameters*>::const_iterator itPars = trackParameters.begin();
+  std::map<eflowCalo::LAYER, const Trk::TrackParameters*>::const_iterator endPars = trackParameters.end();
   m_isEM2Barrel = false;
   for (; itPars != endPars; ++itPars) {
     setEtaPhi(itPars->first, parToPosition(itPars->second));
@@ -46,12 +46,6 @@ eflowTrackCaloPoints::eflowTrackCaloPoints(std::map<eflowCalo::LAYER, const Trk:
 
 eflowTrackCaloPoints::~eflowTrackCaloPoints() {
 
-  std::map<eflowCalo::LAYER, const Trk::TrackParameters*>::iterator itPars = m_trackParameters.begin();
-  std::map<eflowCalo::LAYER, const Trk::TrackParameters*>::iterator endPars = m_trackParameters.end();
-
-  for (; itPars != endPars; ++itPars) {
-    delete itPars->second;
-  }
 }
 
 void eflowTrackCaloPoints::setEtaPhi(eflowCalo::LAYER lay, const Amg::Vector3D& vec) {
@@ -73,11 +67,6 @@ const eflowEtaPhiPosition& eflowTrackCaloPoints::getEtaPhiPos(eflowCalo::LAYER l
   return (it == m_etaPhiPositions.end()) ? m_defaultEtaPhiPosition : it->second;
 }
 
-const Trk::TrackParameters* eflowTrackCaloPoints::getParameters(eflowCalo::LAYER layer) {
-  std::map<eflowCalo::LAYER, const Trk::TrackParameters*>::const_iterator it = m_trackParameters.find(layer);
-  return (it == m_trackParameters.end()) ? 0 : it->second;
-}
-
 Amg::Vector3D eflowTrackCaloPoints::getPosition(eflowCalo::LAYER layer) {
   std::map<eflowCalo::LAYER, Amg::Vector3D>::const_iterator it = m_positions.find(layer);
   return (it == m_positions.end()) ? m_nullVector : it->second;
diff --git a/Reconstruction/egamma/egammaMVACalib/src/egammaMVACalibTool.cxx b/Reconstruction/egamma/egammaMVACalib/src/egammaMVACalibTool.cxx
index 956ec9ac3adc90bc7e6849712bec56fc7f06aaee..bf640dc82e8133e090531731b98f4efba9a5e1e4 100644
--- a/Reconstruction/egamma/egammaMVACalib/src/egammaMVACalibTool.cxx
+++ b/Reconstruction/egamma/egammaMVACalib/src/egammaMVACalibTool.cxx
@@ -173,14 +173,14 @@ StatusCode egammaMVACalibTool::setupBDT(const egammaMVAFunctions::funcMap_t& fun
     {
       const TString& varName = getString(str2);
       if (!varName.Length()) {
-	ATH_MSG_FATAL("There was an empty variable name!");
-	return StatusCode::FAILURE;
+        ATH_MSG_FATAL("There was an empty variable name!");
+        return StatusCode::FAILURE;
       }
       try {
-	funcs.push_back(funcLibrary.at(varName.Data()));
+        funcs.push_back(funcLibrary.at(varName.Data()));
       } catch(const std::out_of_range& e) {
-	ATH_MSG_FATAL("Could not find formula for variable " << varName << ", error: " << e.what());
-	return StatusCode::FAILURE;	
+        ATH_MSG_FATAL("Could not find formula for variable " << varName << ", error: " << e.what());
+        return StatusCode::FAILURE;	
       } 
     }
     m_funcs.push_back(std::move(funcs));
diff --git a/Reconstruction/tauRec/python/TauAlgorithmsHolder.py b/Reconstruction/tauRec/python/TauAlgorithmsHolder.py
index 6da27709cb35c9e05de35dd5bd0a3d880a949bc4..149b68ee3dd334adc05cd49b4ed97968f78dcdbe 100644
--- a/Reconstruction/tauRec/python/TauAlgorithmsHolder.py
+++ b/Reconstruction/tauRec/python/TauAlgorithmsHolder.py
@@ -143,7 +143,8 @@ def getParticleCaloExtensionTool():
         return cached_instances[_name]
     
     from TrackToCalo.TrackToCaloConf import Trk__ParticleCaloExtensionTool
-    tauParticleCaloExtensionTool=Trk__ParticleCaloExtensionTool(name = _name, Extrapolator = getAtlasExtrapolator())
+    
+    tauParticleCaloExtensionTool = Trk__ParticleCaloExtensionTool(name = _name, Extrapolator = getAtlasExtrapolator())
     
     ToolSvc += tauParticleCaloExtensionTool  
     cached_instances[_name] = tauParticleCaloExtensionTool
@@ -336,7 +337,8 @@ def getElectronVetoVars():
     from tauRecTools.tauRecToolsConf import TauElectronVetoVariables
     TauElectronVetoVariables = TauElectronVetoVariables(name = _name,
                                                         CellCorrection = True,
-                                                        ParticleCaloExtensionTool = getParticleCaloExtensionTool())
+                                                        ParticleCaloExtensionTool = getParticleCaloExtensionTool(),
+                                                        tauEVParticleCache = "ParticleCaloExtension")
     
     cached_instances[_name] = TauElectronVetoVariables
     return TauElectronVetoVariables
@@ -685,6 +687,7 @@ def getTauTrackFinder(removeDuplicateTracks=True):
                                     TrackSelectorToolTau  = getInDetTrackSelectorTool(),
                                     TrackToVertexTool         = getTrackToVertexTool(),
                                     ParticleCaloExtensionTool = getParticleCaloExtensionTool(),
+                                    tauParticleCache = "ParticleCaloExtension",
                                     removeDuplicateCoreTracks = removeDuplicateTracks,
                                     Key_trackPartInputContainer = _DefaultTrackContainer,
                                     #maxDeltaZ0wrtLeadTrk = 2, #in mm
diff --git a/Reconstruction/tauRecTools/src/TauElectronVetoVariables.cxx b/Reconstruction/tauRecTools/src/TauElectronVetoVariables.cxx
index e51669343a3104a5b1b793c1060b0cf0d783840a..a49fc3dedbd4c8b9d8d67ac9f865f6c51d849b1b 100644
--- a/Reconstruction/tauRecTools/src/TauElectronVetoVariables.cxx
+++ b/Reconstruction/tauRecTools/src/TauElectronVetoVariables.cxx
@@ -60,10 +60,12 @@ using Gaudi::Units::GeV;
 TauElectronVetoVariables::TauElectronVetoVariables(const std::string &name) :
 TauRecToolBase(name),
 m_doCellCorrection(false), //FF: don't do cell correction by default
-m_caloExtensionTool("Trk::ParticleCaloExtensionTool/ParticleCaloExtensionTool")
+m_caloExtensionTool("Trk::ParticleCaloExtensionTool/ParticleCaloExtensionTool"),
+m_useOldCalo(false)
 {
     declareProperty("CellCorrection", m_doCellCorrection);
     declareProperty("ParticleCaloExtensionTool",   m_caloExtensionTool );
+    // declareProperty("tauEVParticleCache", m_ParticleCacheKey);
 }
 
 //-------------------------------------------------------------------------
@@ -84,11 +86,24 @@ StatusCode TauElectronVetoVariables::finalize()
 //-------------------------------------------------------------------------
 StatusCode TauElectronVetoVariables::initialize()
 {
-    if (m_caloExtensionTool.retrieve().isFailure()) {
-      ATH_MSG_ERROR("Cannot find tool named <" << m_caloExtensionTool << ">");
+  ATH_CHECK( m_ParticleCacheKey.initialize() );
+  ATH_CHECK( m_caloExtensionTool.retrieve() );
+
+  if (m_ParticleCacheKey.initialize().isFailure()) {
+    ATH_MSG_WARNING("Setting up the CaloExtensionTool to replace CaloExtensionBuilder");
+    ATH_CHECK( m_caloExtensionTool.retrieve() );
+    m_useOldCalo = true;
+  } else {
+    m_useOldCalo = false;
+  }
+  if(m_caloExtensionTool.retrieve().isFailure()){
+      ATH_MSG_ERROR("initialize: Cannot retrieve " << m_caloExtensionTool);
       return StatusCode::FAILURE;
-    }
-    return StatusCode::SUCCESS;
+  } else {
+      ATH_MSG_VERBOSE("Successfully retrieved Extrapolation tool "
+              << m_caloExtensionTool.typeAndName());
+  }
+  return StatusCode::SUCCESS;
 }
 StatusCode TauElectronVetoVariables::eventInitialize()
 {
@@ -145,6 +160,8 @@ StatusCode TauElectronVetoVariables::execute(xAOD::TauJet& pTau)
 
     const CaloCell *pCell;
 
+    int trackIndex = -1;
+
     //use tau vertex to correct cell position
     bool applyCellCorrection = false;
     if (m_doCellCorrection && pTau.vertexLink()) {
@@ -165,36 +182,60 @@ StatusCode TauElectronVetoVariables::execute(xAOD::TauJet& pTau)
       phi_extrapol[i] = -11111.;
     }
 
-    // get the extrapolation into the calo
-    std::unique_ptr<Trk::CaloExtension> caloExtension = m_caloExtensionTool->caloExtension(*pTau.track(0)->track());
-    if( !caloExtension || caloExtension->caloLayerIntersections().empty() ){
-      ATH_MSG_WARNING("extrapolation of leading track to calo surfaces failed  " );
+    /*get the CaloExtension object*/
+    const Trk::CaloExtension * caloExtension = nullptr;
+    std::unique_ptr<Trk::CaloExtension> uniqueExtension ;
+    const xAOD::TrackParticle *orgTrack = pTau.track(0)->track();
+    trackIndex = orgTrack->index();
+
+    if (m_useOldCalo) {
+      /* If CaloExtensionBuilder is unavailable, use the calo extension tool */
+      ATH_MSG_VERBOSE("Using the CaloExtensionTool");
+      uniqueExtension = m_caloExtensionTool->caloExtension(*orgTrack);
+      caloExtension = uniqueExtension.get();
+    } else {
+      /*get the CaloExtension object*/
+      ATH_MSG_VERBOSE("Using the CaloExtensionBuilder Cache");
+      SG::ReadHandle<CaloExtensionCollection>  particleCache {m_ParticleCacheKey};
+      caloExtension = (*particleCache)[trackIndex];
+      ATH_MSG_VERBOSE("Getting element " << trackIndex << " from the particleCache");
+      if( not caloExtension ){
+        ATH_MSG_VERBOSE("Cache does not contain a calo extension -> Calculating with the a CaloExtensionTool" );
+        uniqueExtension = m_caloExtensionTool->caloExtension(*orgTrack);
+        caloExtension = uniqueExtension.get();
+      }
+    }
+
+    const std::vector<const Trk::CurvilinearParameters*>& clParametersVector = caloExtension->caloLayerIntersections();
+    
+    if( not caloExtension || clParametersVector.empty() ){
+      ATH_MSG_WARNING("extrapolation of leading track to calo surfaces failed  : caloLayerIntersection is empty" );
       return StatusCode::SUCCESS;
     }
 
     // loop over calo layers
-    for( auto cur = caloExtension->caloLayerIntersections().begin(); cur != caloExtension->caloLayerIntersections().end() ; ++cur ){
+    for( const Trk::CurvilinearParameters * cur : clParametersVector ){
       
       // only use entry layer
-      if( !parsIdHelper.isEntryToVolume((*cur)->cIdentifier()) ) continue;
+      if( !parsIdHelper.isEntryToVolume(cur->cIdentifier()) ) continue;
       
-      CaloSampling::CaloSample sample = parsIdHelper.caloSample((*cur)->cIdentifier());
+      CaloSampling::CaloSample sample = parsIdHelper.caloSample(cur->cIdentifier());
       int index = -1;
       if( sample == CaloSampling::PreSamplerE || sample == CaloSampling::PreSamplerB ) index = 0;
       else if( sample == CaloSampling::EME1 || sample == CaloSampling::EMB1 )          index = 1;
       else if( sample == CaloSampling::EME2 || sample == CaloSampling::EMB2 )          index = 2;
       else if( sample == CaloSampling::EME3 || sample == CaloSampling::EMB3 )          index = 3;
       if( index < 0 ) continue;
-      eta_extrapol[index] = (*cur)->position().eta();
-      phi_extrapol[index] = (*cur)->position().phi();
+      eta_extrapol[index] = cur->position().eta();
+      phi_extrapol[index] = cur->position().phi();
     }
 
     for (int i = 0; i < numOfsampEM; ++i) {
       if ( eta_extrapol[i] < -11110. || phi_extrapol[i] < -11110. )
-	{
-	  ATH_MSG_DEBUG("extrapolation of leading track to calo surfaces failed for sampling : " << i );
-	  return StatusCode::SUCCESS;
-	}
+      {
+        ATH_MSG_DEBUG("extrapolation of leading track to calo surfaces failed for sampling : " << i );
+        return StatusCode::SUCCESS;
+      }
     }
 
     const xAOD::Jet* pJetSeed = (*pTau.jetLink());
diff --git a/Reconstruction/tauRecTools/src/TauElectronVetoVariables.h b/Reconstruction/tauRecTools/src/TauElectronVetoVariables.h
index 7414f75a0f9b07d0a50da914e1a07be2a5e3b183..d7b0e928e86f7416c76dbd1d9a915d8e62efa688 100644
--- a/Reconstruction/tauRecTools/src/TauElectronVetoVariables.h
+++ b/Reconstruction/tauRecTools/src/TauElectronVetoVariables.h
@@ -7,6 +7,8 @@
 
 #include "tauRecTools/TauRecToolBase.h"
 #include "GaudiKernel/ToolHandle.h"
+#include "StoreGate/ReadHandleKey.h"
+#include "TrkCaloExtension/CaloExtensionCollection.h"
 
 namespace Trk {
   class IParticleCaloExtensionTool;
@@ -44,6 +46,10 @@ public:
 
     bool m_doCellCorrection; //!< enable cell origin correction
     ToolHandle< Trk::IParticleCaloExtensionTool >  m_caloExtensionTool;
+    bool m_useOldCalo;
+    //output particle calo extension collection
+    SG::ReadHandleKey<CaloExtensionCollection>  m_ParticleCacheKey{this,
+      "tauEVParticleCache", "ParticleCaloExtension", "Name of the particle measurement extrapolation cache for TauEV Algorithm"};
 };
 
 #endif
diff --git a/Reconstruction/tauRecTools/src/TauTrackFinder.cxx b/Reconstruction/tauRecTools/src/TauTrackFinder.cxx
index a9e55cbd1f36f35596ce68a2d6fd0d3a4c8774d3..48b6a93a619b6350235b1f5c2b8ae9b4cc65e0b2 100644
--- a/Reconstruction/tauRecTools/src/TauTrackFinder.cxx
+++ b/Reconstruction/tauRecTools/src/TauTrackFinder.cxx
@@ -25,7 +25,8 @@ TauTrackFinder::TauTrackFinder(const std::string& name ) :
         m_applyZ0cut(false),
         m_storeInOtherTrks(true),
         m_bypassSelector(false),
-        m_bypassExtrapolator(false)
+        m_bypassExtrapolator(false),
+        m_useOldCalo(false)
 {
     declareProperty("MaxJetDrTau", m_maxJetDr_tau = 0.2);
     declareProperty("MaxJetDrWide", m_maxJetDr_wide = 0.4);
@@ -38,6 +39,7 @@ TauTrackFinder::TauTrackFinder(const std::string& name ) :
     declareProperty("removeDuplicateCoreTracks", m_removeDuplicateCoreTracks = true);
     declareProperty("BypassSelector", m_bypassSelector = false);
     declareProperty("BypassExtrapolator", m_bypassExtrapolator = false);
+    // declareProperty("tauParticleCache", m_ParticleCacheKey);
 
     // initialize samplings
     m_EMSamplings = {CaloSampling::EME1, CaloSampling::EMB1};
@@ -59,6 +61,21 @@ StatusCode TauTrackFinder::initialize() {
     ATH_CHECK( m_caloExtensionTool.retrieve() );
 
     ATH_CHECK( m_trackPartInputContainer.initialize() );
+    ATH_CHECK( m_ParticleCacheKey.initialize() );
+
+    if (m_ParticleCacheKey.initialize().isFailure()) {
+      ATH_MSG_WARNING("Setting up the CaloExtensionTool to replace CaloExtensionBuilder");
+      m_useOldCalo = true;
+    } else {
+      m_useOldCalo = false;
+    }
+    if(m_caloExtensionTool.retrieve().isFailure()){
+        ATH_MSG_ERROR("initialize: Cannot retrieve " << m_caloExtensionTool);
+        return StatusCode::FAILURE;
+    } else {
+        ATH_MSG_VERBOSE("Successfully retrieved Extrapolation tool "
+                << m_caloExtensionTool.typeAndName());
+    }
 
     return StatusCode::SUCCESS;
 }
@@ -316,10 +333,13 @@ StatusCode TauTrackFinder::extrapolateToCaloSurface(xAOD::TauJet& pTau) {
     Trk::TrackParametersIdHelper parsIdHelper;
 
     //    for (unsigned int itr = 0; itr < 10 && itr < pTau.nAllTracks(); ++itr) {
-    
+    int trackIndex = -1;
+    const Trk::CaloExtension * caloExtension = nullptr;
+    std::unique_ptr<Trk::CaloExtension> uniqueExtension ;
     for( xAOD::TauTrack* tauTrack : pTau.allTracks() ) {
         const xAOD::TrackParticle *orgTrack = tauTrack->track();
-        
+        trackIndex = orgTrack->index();
+
         if( !orgTrack ) continue;
 
         // set default values
@@ -333,9 +353,26 @@ StatusCode TauTrackFinder::extrapolateToCaloSurface(xAOD::TauJet& pTau) {
                        << ", eta " << orgTrack->eta() 
                        << ", phi" << orgTrack->phi() );
 
-        std::unique_ptr<Trk::CaloExtension> caloExtension = m_caloExtensionTool->caloExtension(*orgTrack);
-        if (not caloExtension
-            or caloExtension->caloLayerIntersections().empty() )
+        if (m_useOldCalo) {
+          /* If CaloExtensionBuilder is unavailable, use the calo extension tool */
+          ATH_MSG_VERBOSE("Using the CaloExtensionTool");
+          uniqueExtension = m_caloExtensionTool->caloExtension(*orgTrack);
+          caloExtension = uniqueExtension.get();
+        } else {
+          /*get the CaloExtension object*/
+          ATH_MSG_VERBOSE("Using the CaloExtensionBuilder Cache");
+          SG::ReadHandle<CaloExtensionCollection>  particleCache {m_ParticleCacheKey};
+          caloExtension = (*particleCache)[trackIndex];
+          ATH_MSG_VERBOSE("Getting element " << trackIndex << " from the particleCache");
+          if( not caloExtension ){
+            ATH_MSG_VERBOSE("Cache does not contain a calo extension -> Calculating with the a CaloExtensionTool" );
+            uniqueExtension = m_caloExtensionTool->caloExtension(*orgTrack);
+            caloExtension = uniqueExtension.get();
+          }
+        }
+        const std::vector<const Trk::CurvilinearParameters*>& clParametersVector = caloExtension->caloLayerIntersections();
+
+        if (!caloExtension or clParametersVector.empty() )
         { 
             ATH_MSG_DEBUG("Track extrapolation failed");
         }
@@ -343,7 +380,7 @@ StatusCode TauTrackFinder::extrapolateToCaloSurface(xAOD::TauJet& pTau) {
             ATH_MSG_DEBUG("Scanning samplings");
             bool validECal = false;
             bool validHCal = false;
-            for( auto cur : caloExtension->caloLayerIntersections() ){
+            for( const Trk::CurvilinearParameters * cur : clParametersVector ){
                 ATH_MSG_DEBUG("Sampling " << parsIdHelper.caloSample(cur->cIdentifier()) );
                 
                 // only use entry layer
@@ -391,7 +428,6 @@ StatusCode TauTrackFinder::extrapolateToCaloSurface(xAOD::TauJet& pTau) {
         tauTrack->setDetail(xAOD::TauJetParameters::CaloSamplingPhiEM, phiEM);
         tauTrack->setDetail(xAOD::TauJetParameters::CaloSamplingEtaHad, etaHad);
         tauTrack->setDetail(xAOD::TauJetParameters::CaloSamplingPhiHad, phiHad);
-      
     }
 
     return StatusCode::SUCCESS;
diff --git a/Reconstruction/tauRecTools/src/TauTrackFinder.h b/Reconstruction/tauRecTools/src/TauTrackFinder.h
index 9d9aa1a2d766bd8802ba6b1658a57dd5407a84c5..cdd778e5758d38ddc5fa79d893fcb85611b4fa8d 100644
--- a/Reconstruction/tauRecTools/src/TauTrackFinder.h
+++ b/Reconstruction/tauRecTools/src/TauTrackFinder.h
@@ -8,7 +8,9 @@
 
 #include "tauRecTools/TauRecToolBase.h"
 #include "GaudiKernel/ToolHandle.h"
+#include "StoreGate/ReadHandleKey.h"
 #include "ITrackToVertex/ITrackToVertex.h"
+#include "TrkCaloExtension/CaloExtensionCollection.h"
 
 #include "xAODTracking/Vertex.h"
 #include "xAODTracking/TrackParticle.h"
@@ -112,6 +114,9 @@ private:
     ToolHandle< Trk::IParticleCaloExtensionTool >  m_caloExtensionTool;
     ToolHandle<Trk::ITrackSelectorTool> m_trackSelectorTool_tau;
     ToolHandle<Reco::ITrackToVertex> m_trackToVertexTool;
+    //output particle calo extension collection
+    SG::ReadHandleKey<CaloExtensionCollection>  m_ParticleCacheKey{this,
+      "tauParticleCache", "ParticleCaloExtension", "Name of the particle measurement extrapolation cache for TauTrackFinder"};
     
     //-------------------------------------------------------------
     //! Input parameters for algorithm
@@ -135,6 +140,7 @@ private:
 
     bool m_bypassSelector;
     bool m_bypassExtrapolator;
+    bool m_useOldCalo;
 
     //-------------------------------------------------------------
     // Sets of EM/Had samplings for track extrapolation 
diff --git a/Simulation/G4Atlas/G4AtlasAlg/src/G4AtlasAlg.cxx b/Simulation/G4Atlas/G4AtlasAlg/src/G4AtlasAlg.cxx
index 96768d3729272ff4a605772e98b906d0dadb5c82..08ebb978fc011b31984f290ba8006a5f2a04daf4 100644
--- a/Simulation/G4Atlas/G4AtlasAlg/src/G4AtlasAlg.cxx
+++ b/Simulation/G4Atlas/G4AtlasAlg/src/G4AtlasAlg.cxx
@@ -25,6 +25,9 @@
 #include "G4StackManager.hh"
 #include "G4UImanager.hh"
 #include "G4ScoringManager.hh"
+#include "G4VUserPhysicsList.hh"
+#include "G4VModularPhysicsList.hh"
+#include "G4ParallelWorldPhysics.hh"
 
 // CLHEP includes
 #include "CLHEP/Random/RandomEngine.h"
@@ -50,18 +53,6 @@ static std::once_flag releaseGeoModelOnceFlag;
 G4AtlasAlg::G4AtlasAlg(const std::string& name, ISvcLocator* pSvcLocator)
   : AthAlgorithm(name, pSvcLocator)
 {
-  declareProperty( "Dll", m_libList);
-  declareProperty( "Physics", m_physList);
-  declareProperty( "FieldMap", m_fieldMap);
-  declareProperty( "RandomGenerator", m_rndmGen);
-  declareProperty( "ReleaseGeoModel", m_releaseGeoModel);
-  declareProperty( "RecordFlux", m_recordFlux);
-  declareProperty( "KillAbortedEvents", m_killAbortedEvents);
-  declareProperty( "FlagAbortedEvents", m_flagAbortedEvents);
-  declareProperty("G4Commands", m_g4commands, "Commands to send to the G4UI");
-  // Multi-threading specific settings
-  declareProperty("MultiThreading", m_useMT, "Multi-threading specific settings");
-
   // Verbosities
   declareProperty("Verbosities", m_verbosities);
 }
@@ -73,10 +64,6 @@ StatusCode G4AtlasAlg::initialize()
 {
   ATH_MSG_DEBUG("Start of initialize()");
 
-  // Input/Ouput Keys
-  ATH_CHECK( m_inputTruthCollectionKey.initialize());
-  ATH_CHECK( m_outputTruthCollectionKey.initialize());
-
   // Create the scoring manager if requested
   if (m_recordFlux) G4ScoringManager::GetScoringManager();
 
@@ -92,25 +79,24 @@ StatusCode G4AtlasAlg::initialize()
   ATH_CHECK( m_rndmGenSvc.retrieve() );
   ATH_CHECK( m_userActionSvc.retrieve() );
 
-  // FIXME TOO EARLY???
-  ATH_CHECK(m_g4atlasSvc.retrieve());
+  ATH_CHECK(m_senDetTool.retrieve());
+  ATH_CHECK(m_fastSimTool.retrieve());
 
+  // Truth
   ATH_CHECK( m_truthRecordSvc.retrieve() );
   ATH_MSG_INFO( "- Using ISF TruthRecordSvc : " << m_truthRecordSvc.typeAndName() );
   ATH_CHECK( m_geoIDSvc.retrieve() );
   ATH_MSG_INFO( "- Using ISF GeoIDSvc       : " << m_geoIDSvc.typeAndName() );
 
-  ATH_CHECK(m_inputConverter.retrieve());
-
-  ATH_MSG_DEBUG(std::endl << std::endl << std::endl);
-
-
   TruthStrategyManager* sManager = TruthStrategyManager::GetStrategyManager();
   sManager->SetISFTruthSvc( &(*m_truthRecordSvc) );
   sManager->SetISFGeoIDSvc( &(*m_geoIDSvc) );
 
-  ATH_CHECK(m_senDetTool.retrieve());
-  ATH_CHECK(m_fastSimTool.retrieve());
+  // I/O
+  ATH_CHECK( m_inputTruthCollectionKey.initialize());
+  ATH_CHECK( m_outputTruthCollectionKey.initialize());
+
+  ATH_CHECK(m_inputConverter.retrieve());
 
   ATH_MSG_DEBUG("End of initialize()");
   return StatusCode::SUCCESS;
@@ -181,10 +167,39 @@ void G4AtlasAlg::initializeOnce()
     commandLog(returnCode, g4command);
   }
 
-  // G4 init moved to PyG4AtlasAlg / G4AtlasEngine
-  /// @todo Reinstate or delete?! This can't actually be called from the Py algs
-  //ATH_MSG_INFO("Firing initialization of G4!!!");
-  //initializeG4();
+  // Code from G4AtlasSvc
+  auto* rm = G4RunManager::GetRunManager();
+  if(!rm) {
+    throw std::runtime_error("Run manager retrieval has failed");
+  }
+  rm->Initialize();     // Initialization differs slightly in multi-threading.
+  // TODO: add more details about why this is here.
+  if(!m_useMT && rm->ConfirmBeamOnCondition()) {
+    rm->RunInitialization();
+  }
+
+  ATH_MSG_INFO( "retireving the Detector Geometry Service" );
+  if(m_detGeoSvc.retrieve().isFailure()) {
+    throw std::runtime_error("Could not initialize ATLAS DetectorGeometrySvc!");
+  }
+
+  if(m_userLimitsSvc.retrieve().isFailure()) {
+    throw std::runtime_error("Could not initialize ATLAS UserLimitsSvc!");
+  }
+
+  if (m_activateParallelGeometries) {
+    G4VModularPhysicsList* thePhysicsList=dynamic_cast<G4VModularPhysicsList*>(m_physListSvc->GetPhysicsList());
+    if (!thePhysicsList) {
+      throw std::runtime_error("Failed dynamic_cast!! this is not a G4VModularPhysicsList!");
+    }
+#if G4VERSION_NUMBER >= 1010
+    std::vector<std::string>& parallelWorldNames=m_detGeoSvc->GetParallelWorldNames();
+    for (auto& it: parallelWorldNames) {
+      thePhysicsList->RegisterPhysics(new G4ParallelWorldPhysics(it,true));
+    }
+#endif
+  }
+
   return;
 }
 
diff --git a/Simulation/G4Atlas/G4AtlasAlg/src/G4AtlasAlg.h b/Simulation/G4Atlas/G4AtlasAlg/src/G4AtlasAlg.h
index 416ea22cd7f8d1e8b09bca46cb38e46689152a79..d69a20e2b39db99b96bbc4ec1487d78d9628758f 100644
--- a/Simulation/G4Atlas/G4AtlasAlg/src/G4AtlasAlg.h
+++ b/Simulation/G4Atlas/G4AtlasAlg/src/G4AtlasAlg.h
@@ -1,5 +1,5 @@
 /*
-  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+  Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
 */
 
 #ifndef G4ATLASALG_G4AtlasAlg_H
@@ -25,7 +25,7 @@
 #include "G4AtlasInterfaces/ISensitiveDetectorMasterTool.h"
 #include "G4AtlasInterfaces/IFastSimulationMasterTool.h"
 #include "G4AtlasInterfaces/IPhysicsListSvc.h"
-#include "G4AtlasInterfaces/IG4AtlasSvc.h"
+#include "G4AtlasInterfaces/IUserLimitsSvc.h"
 #include "GeneratorObjects/McEventCollection.h"
 
 // ISF includes
@@ -94,8 +94,8 @@ private:
 
   /// @name Configurable Properties
   /// @{
-  bool m_killAbortedEvents{false};
-  bool m_flagAbortedEvents{false};
+  Gaudi::Property<bool> m_killAbortedEvents{this, "KillAbortedEvents", false, ""};
+  Gaudi::Property<bool> m_flagAbortedEvents{this, "FlagAbortedEvents", false, ""};
   SG::ReadHandleKey<McEventCollection>    m_inputTruthCollectionKey{this, "InputTruthCollection", "BeamTruthEvent", "Input hard scatter collection"}; //!< input hard scatter collection
   SG::WriteHandleKey<McEventCollection>   m_outputTruthCollectionKey{this, "OutputTruthCollection", "TruthEvent", "Output hard scatter truth collection"};//!< output hard scatter truth collection
   /// Central Truth Service
@@ -109,20 +109,21 @@ private:
 
   /// @name Configurable Properties (common with TransportTool)
   /// @{
-  std::string m_libList{""};
-  std::string m_physList{""};
-  std::string m_fieldMap{""};
-  std::string m_rndmGen{"athena"};
-  bool m_releaseGeoModel{true};
-  bool m_recordFlux{false};
+  Gaudi::Property<std::string> m_libList{this, "Dll", "", ""};
+  Gaudi::Property<std::string> m_physList{this, "Physics", "", ""};
+  Gaudi::Property<std::string> m_fieldMap{this, "FieldMap", "", ""};
+  Gaudi::Property<std::string> m_rndmGen{this, "RandomGenerator", "athena", ""};
+  Gaudi::Property<bool> m_releaseGeoModel{this, "ReleaseGeoModel", true, ""};
+  Gaudi::Property<bool> m_recordFlux{this, "RecordFlux", false, ""};
   /// Commands to send to the G4 UI
-  std::vector<std::string> m_g4commands;
+  Gaudi::Property<std::vector<std::string> > m_g4commands{this, "G4Commands", {}, "Commands to send to the G4UI"};
   /// Activate multi-threading configuration
-  bool m_useMT{false};
+  Gaudi::Property<bool> m_useMT{this,"MultiThreading",  false, "Multi-threading specific settings"};
+  Gaudi::Property<bool> m_activateParallelGeometries{this, "ActivateParallelWorlds", false, "Toggle on/off the G4 parallel geometry system"};
   /// Random number service
   ServiceHandle<IAthRNGSvc> m_rndmGenSvc{this, "AtRndmGenSvc", "AthRNGSvc", ""}; // TODO rename property
-  /// G4Atlas Service - handles G4 initialization
-  ServiceHandle<IG4AtlasSvc> m_g4atlasSvc{this, "G4AtlasSvc", "G4AtlasSvc", ""};
+  ///
+  ServiceHandle<IUserLimitsSvc> m_userLimitsSvc{this, "UserLimitsSvc", "UserLimitsSvc", ""};
   /// User Action Service
   ServiceHandle<G4UA::IUserActionSvc> m_userActionSvc{this, "UserActionSvc", "G4UA::UserActionSvc", ""};
   /// Detector Geometry Service (builds G4 Geometry)
diff --git a/Simulation/G4Atlas/G4AtlasApps/test/test_AtlasG4_cosmics_configuration.py b/Simulation/G4Atlas/G4AtlasApps/test/test_AtlasG4_cosmics_configuration.py
index 55260b5f47ef60be690bfd5ab6b60a19204a0c0c..37484ce4240568633e0704a1a20346b30ba8d4ff 100755
--- a/Simulation/G4Atlas/G4AtlasApps/test/test_AtlasG4_cosmics_configuration.py
+++ b/Simulation/G4Atlas/G4AtlasApps/test/test_AtlasG4_cosmics_configuration.py
@@ -238,7 +238,7 @@ class TestAtlasG4Cosmics(unittest.TestCase):
 
 
     def test___G4AtlasAlg_ListOfSetProperties(self):
-        expected_list = ['AtRndmGenSvc', 'DetGeoSvc', 'DetStore', 'EvtStore', 'ExtraInputs', 'ExtraOutputs', 'FastSimMasterTool', 'FlagAbortedEvents', 'G4AtlasSvc', 'G4Commands', 'GeoIDSvc', 'InputConverter', 'InputTruthCollection', 'KillAbortedEvents', 'MultiThreading', 'NeededResources', 'OutputTruthCollection', 'PhysicsListSvc', 'RandomGenerator', 'RecordFlux', 'ReleaseGeoModel', 'SenDetMasterTool', 'TruthRecordService', 'UserActionSvc', 'Verbosities']
+        expected_list = ['AtRndmGenSvc', 'DetGeoSvc', 'DetStore', 'EvtStore', 'ExtraInputs', 'ExtraOutputs', 'FastSimMasterTool', 'FlagAbortedEvents', 'G4Commands', 'GeoIDSvc', 'InputConverter', 'InputTruthCollection', 'KillAbortedEvents', 'MultiThreading', 'NeededResources', 'OutputTruthCollection', 'PhysicsListSvc', 'RandomGenerator', 'RecordFlux', 'ReleaseGeoModel', 'SenDetMasterTool', 'TruthRecordService', 'UserActionSvc', 'UserLimitsSvc', 'Verbosities']
         g4atlasalg = self._job_config_dict['G4AtlasAlg']
         actual_list = g4atlasalg.keys()
         expected_property_value_sorted = sorted(expected_list)
@@ -282,9 +282,9 @@ class TestAtlasG4Cosmics(unittest.TestCase):
         self._assert_Algorithm_property_equal('G4AtlasAlg', 'SenDetMasterTool', expected_tool_name)
 
 
-    def test___G4AtlasAlg_G4AtlasSvc_setCorrectly(self):
-        expected_service_name = 'G4AtlasSvc'
-        self._assert_Algorithm_property_equal('G4AtlasAlg', 'G4AtlasSvc', expected_service_name)
+    def test___G4AtlasAlg_UserLimitsSvc_setCorrectly(self):
+        expected_service_name = 'UserLimitsSvc'
+        self._assert_Algorithm_property_equal('G4AtlasAlg', 'UserLimitsSvc', expected_service_name)
 
 
     def test___G4AtlasAlg_UserActionSvc_setCorrectly(self):
diff --git a/Simulation/G4Atlas/G4AtlasApps/test/test_AtlasG4_tf_configuration.py b/Simulation/G4Atlas/G4AtlasApps/test/test_AtlasG4_tf_configuration.py
index 6fc74c7b3c67af02231c506473f4dd8e4b9f296a..70be030e872a2c94a41f25a6e4f86e192b2399fe 100755
--- a/Simulation/G4Atlas/G4AtlasApps/test/test_AtlasG4_tf_configuration.py
+++ b/Simulation/G4Atlas/G4AtlasApps/test/test_AtlasG4_tf_configuration.py
@@ -154,7 +154,7 @@ class TestAtlasG4(unittest.TestCase):
 
 
     def test___G4AtlasAlg_ListOfSetProperties(self):
-        expected_list = ['AtRndmGenSvc', 'DetGeoSvc', 'DetStore', 'EvtStore', 'ExtraInputs', 'ExtraOutputs', 'FastSimMasterTool', 'FlagAbortedEvents', 'G4AtlasSvc', 'G4Commands', 'GeoIDSvc', 'InputConverter', 'InputTruthCollection', 'KillAbortedEvents', 'MultiThreading', 'NeededResources', 'OutputTruthCollection', 'PhysicsListSvc', 'RandomGenerator', 'RecordFlux', 'ReleaseGeoModel', 'SenDetMasterTool', 'TruthRecordService', 'UserActionSvc', 'Verbosities']
+        expected_list = ['AtRndmGenSvc', 'DetGeoSvc', 'DetStore', 'EvtStore', 'ExtraInputs', 'ExtraOutputs', 'FastSimMasterTool', 'FlagAbortedEvents', 'G4Commands', 'GeoIDSvc', 'InputConverter', 'InputTruthCollection', 'KillAbortedEvents', 'MultiThreading', 'NeededResources', 'OutputTruthCollection', 'PhysicsListSvc', 'RandomGenerator', 'RecordFlux', 'ReleaseGeoModel', 'SenDetMasterTool', 'TruthRecordService', 'UserActionSvc', 'UserLimitsSvc', 'Verbosities']
         g4atlasalg = self._job_config_dict['G4AtlasAlg']
         actual_list = g4atlasalg.keys()
         expected_property_value_sorted = sorted(expected_list)
@@ -198,9 +198,9 @@ class TestAtlasG4(unittest.TestCase):
         self._assert_Algorithm_property_equal('G4AtlasAlg', 'SenDetMasterTool', expected_tool_name)
 
 
-    def test___G4AtlasAlg_G4AtlasSvc_setCorrectly(self):
-        expected_service_name = 'G4AtlasSvc'
-        self._assert_Algorithm_property_equal('G4AtlasAlg', 'G4AtlasSvc', expected_service_name)
+    def test___G4AtlasAlg_UserLimitsSvc_setCorrectly(self):
+        expected_service_name = 'UserLimitsSvc'
+        self._assert_Algorithm_property_equal('G4AtlasAlg', 'UserLimitsSvc', expected_service_name)
 
 
     def test___G4AtlasAlg_UserActionSvc_setCorrectly(self):
diff --git a/Simulation/G4Atlas/G4AtlasApps/test/test_TestBeam_tf_configuration.py b/Simulation/G4Atlas/G4AtlasApps/test/test_TestBeam_tf_configuration.py
index 7f50108358f495dee92189a93fd0cb93d01ea643..2e759db16d67f38bb29cce937d4c6ef4b05dc472 100755
--- a/Simulation/G4Atlas/G4AtlasApps/test/test_TestBeam_tf_configuration.py
+++ b/Simulation/G4Atlas/G4AtlasApps/test/test_TestBeam_tf_configuration.py
@@ -174,7 +174,7 @@ class TestTestBeam(unittest.TestCase):
 
 
     def test___G4AtlasAlg_ListOfSetProperties(self):
-        expected_list = ['AtRndmGenSvc', 'DetGeoSvc', 'DetStore', 'EvtStore', 'ExtraInputs', 'ExtraOutputs', 'FastSimMasterTool', 'G4AtlasSvc', 'G4Commands', 'GeoIDSvc', 'InputConverter', 'InputTruthCollection', 'MultiThreading', 'NeededResources', 'OutputTruthCollection', 'PhysicsListSvc', 'RandomGenerator', 'RecordFlux', 'ReleaseGeoModel', 'SenDetMasterTool', 'TruthRecordService', 'UserActionSvc', 'Verbosities']
+        expected_list = ['AtRndmGenSvc', 'DetGeoSvc', 'DetStore', 'EvtStore', 'ExtraInputs', 'ExtraOutputs', 'FastSimMasterTool', 'G4Commands', 'GeoIDSvc', 'InputConverter', 'InputTruthCollection', 'MultiThreading', 'NeededResources', 'OutputTruthCollection', 'PhysicsListSvc', 'RandomGenerator', 'RecordFlux', 'ReleaseGeoModel', 'SenDetMasterTool', 'TruthRecordService', 'UserActionSvc', 'UserLimitsSvc', 'Verbosities']
         g4atlasalg = self._job_config_dict['G4AtlasAlg']
         actual_list = g4atlasalg.keys()
         expected_property_value_sorted = sorted(expected_list)
@@ -218,9 +218,9 @@ class TestTestBeam(unittest.TestCase):
         self._assert_Algorithm_property_equal('G4AtlasAlg', 'SenDetMasterTool', expected_tool_name)
 
 
-    def test___G4AtlasAlg_G4AtlasSvc_setCorrectly(self):
-        expected_service_name = 'G4AtlasSvc'
-        self._assert_Algorithm_property_equal('G4AtlasAlg', 'G4AtlasSvc', expected_service_name)
+    def test___G4AtlasAlg_UserLimitsSvc_setCorrectly(self):
+        expected_service_name = 'UserLimitsSvc'
+        self._assert_Algorithm_property_equal('G4AtlasAlg', 'UserLimitsSvc', expected_service_name)
 
 
     def test___G4AtlasAlg_UserActionSvc_setCorrectly(self):
diff --git a/Simulation/ISF/ISF_Geant4/ISF_Geant4Tools/src/TransportTool.cxx b/Simulation/ISF/ISF_Geant4/ISF_Geant4Tools/src/TransportTool.cxx
index 45b164a2b6d64d94ba5b4c582361b01e502760ee..565dc69ecbaae94c69074747c57a35958971fa50 100644
--- a/Simulation/ISF/ISF_Geant4/ISF_Geant4Tools/src/TransportTool.cxx
+++ b/Simulation/ISF/ISF_Geant4/ISF_Geant4Tools/src/TransportTool.cxx
@@ -38,6 +38,9 @@
 #include "G4ScoringManager.hh"
 #include "G4Timer.hh"
 #include "G4SDManager.hh"
+#include "G4VUserPhysicsList.hh"
+#include "G4VModularPhysicsList.hh"
+#include "G4ParallelWorldPhysics.hh"
 
 #include "AtlasDetDescr/AtlasRegionHelper.h"
 
@@ -52,24 +55,10 @@ iGeant4::G4TransportTool::G4TransportTool(const std::string& type,
                                           const IInterface*  parent )
   : ISF::BaseSimulatorTool(type, name, parent)
 {
-  declareProperty("Dll",                   m_libList);
-  declareProperty("Physics",               m_physList);
-  declareProperty("FieldMap",              m_fieldMap);
-  declareProperty("ReleaseGeoModel",       m_releaseGeoModel);
-  declareProperty("RecordFlux",            m_recordFlux);
-  declareProperty("McEventCollection",     m_mcEventCollectionName);
-  declareProperty("G4Commands",            m_g4commands, "Commands to send to the G4UI");
-  declareProperty("MultiThreading",        m_useMT, "Multi-threading specific settings");
   //declareProperty("KillAllNeutrinos",      m_KillAllNeutrinos=true);
   //declareProperty("KillLowEPhotons",       m_KillLowEPhotons=-1.);
-  declareProperty("PrintTimingInfo",      m_doTiming       );
-
 }
 
-//________________________________________________________________________
-iGeant4::G4TransportTool::~G4TransportTool()
-{}
-
 //________________________________________________________________________
 StatusCode iGeant4::G4TransportTool::initialize()
 {
@@ -84,9 +73,10 @@ StatusCode iGeant4::G4TransportTool::initialize()
     m_runTimer->Start();
   }
 
-  ATH_CHECK(m_inputConverter.retrieve());
+  // Create the scoring manager if requested
+  if (m_recordFlux) G4ScoringManager::GetScoringManager();
 
- // One-time initialization
+  // One-time initialization
   try {
     std::call_once(initializeOnceFlag, &iGeant4::G4TransportTool::initializeOnce, this);
   }
@@ -98,15 +88,11 @@ StatusCode iGeant4::G4TransportTool::initialize()
   ATH_CHECK( m_rndmGenSvc.retrieve() );
   ATH_CHECK( m_userActionSvc.retrieve() );
 
-  ATH_CHECK(m_g4atlasSvc.retrieve());
-
-  if (m_recordFlux) G4ScoringManager::GetScoringManager();
-
-  ATH_CHECK (m_detGeoSvc.retrieve());
-
   ATH_CHECK(m_senDetTool.retrieve());
   ATH_CHECK(m_fastSimTool.retrieve());
 
+  ATH_CHECK(m_inputConverter.retrieve());
+
   return StatusCode::SUCCESS;
 }
 
@@ -167,6 +153,39 @@ void iGeant4::G4TransportTool::initializeOnce()
     commandLog(returnCode, g4command);
   }
 
+  // Code from G4AtlasSvc
+  auto* rm = G4RunManager::GetRunManager();
+  if(!rm) {
+    throw std::runtime_error("Run manager retrieval has failed");
+  }
+  rm->Initialize();     // Initialization differs slightly in multi-threading.
+  // TODO: add more details about why this is here.
+  if(!m_useMT && rm->ConfirmBeamOnCondition()) {
+    rm->RunInitialization();
+  }
+
+  ATH_MSG_INFO( "retireving the Detector Geometry Service" );
+  if(m_detGeoSvc.retrieve().isFailure()) {
+    throw std::runtime_error("Could not initialize ATLAS DetectorGeometrySvc!");
+  }
+
+  if(m_userLimitsSvc.retrieve().isFailure()) {
+    throw std::runtime_error("Could not initialize ATLAS UserLimitsSvc!");
+  }
+
+  if (m_activateParallelGeometries) {
+    G4VModularPhysicsList* thePhysicsList=dynamic_cast<G4VModularPhysicsList*>(m_physListSvc->GetPhysicsList());
+    if (!thePhysicsList) {
+      throw std::runtime_error("Failed dynamic_cast!! this is not a G4VModularPhysicsList!");
+    }
+#if G4VERSION_NUMBER >= 1010
+    std::vector<std::string>& parallelWorldNames=m_detGeoSvc->GetParallelWorldNames();
+    for (auto& it: parallelWorldNames) {
+      thePhysicsList->RegisterPhysics(new G4ParallelWorldPhysics(it,true));
+    }
+#endif
+  }
+
   return;
 }
 
diff --git a/Simulation/ISF/ISF_Geant4/ISF_Geant4Tools/src/TransportTool.h b/Simulation/ISF/ISF_Geant4/ISF_Geant4Tools/src/TransportTool.h
index df0871742452a50565d491bae26a083051299d59..88a82528d910806fbc147b096daebed2d769d9e8 100644
--- a/Simulation/ISF/ISF_Geant4/ISF_Geant4Tools/src/TransportTool.h
+++ b/Simulation/ISF/ISF_Geant4/ISF_Geant4Tools/src/TransportTool.h
@@ -19,12 +19,12 @@
 // Athena headers
 #include "AthenaKernel/IAthRNGSvc.h"
 #include "AthenaKernel/SlotSpecificObj.h"
-#include "G4AtlasInterfaces/IG4AtlasSvc.h"
 #include "G4AtlasInterfaces/IUserActionSvc.h"
 #include "G4AtlasInterfaces/IDetectorGeometrySvc.h"
 #include "G4AtlasInterfaces/ISensitiveDetectorMasterTool.h"
 #include "G4AtlasInterfaces/IFastSimulationMasterTool.h"
 #include "G4AtlasInterfaces/IPhysicsListSvc.h"
+#include "G4AtlasInterfaces/IUserLimitsSvc.h"
 #include "CxxUtils/checker_macros.h"
 
 // ISF includes
@@ -63,7 +63,7 @@ namespace iGeant4
     G4TransportTool(const std::string&,const std::string&,const IInterface*);
 
     /** Destructor */
-    virtual ~G4TransportTool ();
+    virtual ~G4TransportTool () = default;
 
     /** AlgTool initialize method */
     virtual StatusCode initialize() override final;
@@ -102,7 +102,7 @@ namespace iGeant4
     /// @{
 
     // timing checks
-    bool  m_doTiming{true};
+    Gaudi::Property<bool> m_doTiming{this, "PrintTimingInfo", true, ""};
     //float m_runTime;
     float m_accumulatedEventTime{0.};
     float m_accumulatedEventTimeSq{0.};
@@ -120,24 +120,25 @@ namespace iGeant4
       mutex_t m_mutex;
     };
     mutable SG::SlotSpecificObj<Slot> m_slots ATLAS_THREAD_SAFE;
-    std::string m_mcEventCollectionName{"TruthEvent"};
+    Gaudi::Property<std::string> m_mcEventCollectionName{this, "McEventCollection", "TruthEvent", ""};
     /// Helper Tool to provide G4RunManager
     PublicToolHandle<ISF::IG4RunManagerHelper>  m_g4RunManagerHelper{this, "G4RunManagerHelper", "iGeant4::G4RunManagerHelper/G4RunManagerHelper", ""};
     G4AtlasRunManager    *m_pRunMgr{};
 
-    std::string m_libList{""};
-    std::string m_physList{""};
-    std::string m_fieldMap{""};
-    bool   m_releaseGeoModel{true};
-    bool   m_recordFlux{false};
+    Gaudi::Property<std::string> m_libList{this, "Dll", "", ""};
+    Gaudi::Property<std::string> m_physList{this, "Physics", "", ""};
+    Gaudi::Property<std::string> m_fieldMap{this, "FieldMap", "", ""};
+    Gaudi::Property<bool> m_releaseGeoModel{this, "ReleaseGeoModel", true, ""};
+    Gaudi::Property<bool> m_recordFlux{this, "RecordFlux", false, ""};
     /// Commands to send to the G4 UI
-    std::vector<std::string> m_g4commands;
+    Gaudi::Property<std::vector<std::string> > m_g4commands{this, "G4Commands", {}, "Commands to send to the G4UI"};
     /// Activate multi-threading configuration
-    bool m_useMT{false};
+    Gaudi::Property<bool> m_useMT{this,"MultiThreading",  false, "Multi-threading specific settings"};
+    Gaudi::Property<bool> m_activateParallelGeometries{this, "ActivateParallelWorlds", false, "Toggle on/off the G4 parallel geometry system"};
     // Random number service
     ServiceHandle<IAthRNGSvc> m_rndmGenSvc{this, "RandomNumberService", "AthRNGSvc", ""};
-    /// G4AtlasSvc
-    ServiceHandle<IG4AtlasSvc> m_g4atlasSvc{this, "G4AtlasSvc", "G4AtlasSvc", ""};
+    ///
+    ServiceHandle<IUserLimitsSvc> m_userLimitsSvc{this, "UserLimitsSvc", "UserLimitsSvc", ""};
     /// user action service
     ServiceHandle<G4UA::IUserActionSvc> m_userActionSvc{this, "UserActionSvc", "", ""};
     /// Detector Geometry Service (builds G4 Geometry)
diff --git a/Trigger/TrigAlgorithms/TrigPartialEventBuilding/CMakeLists.txt b/Trigger/TrigAlgorithms/TrigPartialEventBuilding/CMakeLists.txt
index 58755dc51f630ad7d83c79dc1301f7b7443e5c7a..f83096cb031831817ec385919b189bfaa4fee788 100644
--- a/Trigger/TrigAlgorithms/TrigPartialEventBuilding/CMakeLists.txt
+++ b/Trigger/TrigAlgorithms/TrigPartialEventBuilding/CMakeLists.txt
@@ -8,7 +8,9 @@ atlas_subdir( TrigPartialEventBuilding )
 # Declare the package's dependencies
 atlas_depends_on_subdirs( PUBLIC
                           Trigger/TrigSteer/DecisionHandling
-                          Trigger/TrigEvent/TrigSteeringEvent )
+                          Trigger/TrigEvent/TrigSteeringEvent
+                          PRIVATE
+                          Control/CxxUtils )
 
 # Component(s) in the package
 atlas_add_library( TrigPartialEventBuildingLib
diff --git a/Trigger/TrigAlgorithms/TrigPartialEventBuilding/TrigPartialEventBuilding/PEBInfoWriterToolBase.h b/Trigger/TrigAlgorithms/TrigPartialEventBuilding/TrigPartialEventBuilding/PEBInfoWriterToolBase.h
index ed48186827df7e5fd5d7f36d70b9cc0e5fc75bef..1b4fbce2cc175f835505eb7d2e1e90244886e363 100644
--- a/Trigger/TrigAlgorithms/TrigPartialEventBuilding/TrigPartialEventBuilding/PEBInfoWriterToolBase.h
+++ b/Trigger/TrigAlgorithms/TrigPartialEventBuilding/TrigPartialEventBuilding/PEBInfoWriterToolBase.h
@@ -25,15 +25,15 @@ public:
   struct Input {
     Input(TrigCompositeUtils::Decision* d,
           const EventContext& ctx,
-          const TrigRoiDescriptor* r,
+          const ElementLink<TrigRoiDescriptorCollection>& r,
           const TrigCompositeUtils::Decision* pd)
     : decision(d),
       eventContext(ctx),
-      roi(r),
+      roiEL(r),
       previousDecisionIDs(TrigCompositeUtils::decisionIDs(pd).begin(), TrigCompositeUtils::decisionIDs(pd).end()) {}
     TrigCompositeUtils::Decision* decision;
     const EventContext& eventContext;
-    const TrigRoiDescriptor* roi;
+    const ElementLink<TrigRoiDescriptorCollection> roiEL;
     const TrigCompositeUtils::DecisionIDContainer previousDecisionIDs;
   };
   /// Structure holding the list of ROBs and SubDets
@@ -61,6 +61,10 @@ public:
 protected:
   /// Creates the PEBInfo which is attached to the decision in \c decide. Has to be implemented by the derived class.
   virtual PEBInfo createPEBInfo(const Input& input) const = 0;
+  /// MaxRoIs property
+  Gaudi::Property<int> m_maxRoIs {
+    this, "MaxRoIs", -1, "Create PEB list only for the first N RoIs from input decisions (<0 means no limit)"
+  };
 
 private:
   /// The decision id of the tool instance
diff --git a/Trigger/TrigAlgorithms/TrigPartialEventBuilding/python/TrigPartialEventBuildingConfig.py b/Trigger/TrigAlgorithms/TrigPartialEventBuilding/python/TrigPartialEventBuildingConfig.py
index 1c651db5b467de0b8a0f52e523eae5fa92560251..c61845124da1fcba577901eba81fa67c59ce5eb1 100644
--- a/Trigger/TrigAlgorithms/TrigPartialEventBuilding/python/TrigPartialEventBuildingConfig.py
+++ b/Trigger/TrigAlgorithms/TrigPartialEventBuilding/python/TrigPartialEventBuildingConfig.py
@@ -12,7 +12,7 @@ class StaticPEBInfoWriterToolCfg(StaticPEBInfoWriterTool):
         self.ROBList.extend(robs)
 
     def addSubDets(self, dets):
-        self.SubDetList.extend(dets)
+        self.SubDetList.extend([int(detid) for detid in dets])
 
     def addHLTResultToROBList(self, moduleId=getFullHLTResultID()):
         hltResultSID = SourceIdentifier(SubDetector.TDAQ_HLT, moduleId)
diff --git a/Trigger/TrigAlgorithms/TrigPartialEventBuilding/src/PEBInfoWriterAlg.cxx b/Trigger/TrigAlgorithms/TrigPartialEventBuilding/src/PEBInfoWriterAlg.cxx
index 28402562aeb2f2d35fad855fa664a30e3d2c925f..aa6fc55de13ebab010796d2d297f47d4d2c8b03a 100644
--- a/Trigger/TrigAlgorithms/TrigPartialEventBuilding/src/PEBInfoWriterAlg.cxx
+++ b/Trigger/TrigAlgorithms/TrigPartialEventBuilding/src/PEBInfoWriterAlg.cxx
@@ -77,13 +77,16 @@ StatusCode PEBInfoWriterAlg::execute(const EventContext& eventContext) const {
     auto roiELInfo = findLink<TrigRoiDescriptorCollection>(previousDecision, initialRoIString());
     auto roiEL = roiELInfo.link;
     ATH_CHECK(roiEL.isValid());
-    const TrigRoiDescriptor* roi = *roiEL;
 
     // Create new decision
     Decision* newd = newDecisionIn(decisions);
 
+    // Attach empty PEB Info lists to the new decision
+    ATH_CHECK(newd->setDetail(PEBInfoWriterToolBase::robListKey(), std::vector<uint32_t>()));
+    ATH_CHECK(newd->setDetail(PEBInfoWriterToolBase::subDetListKey(), std::vector<uint32_t>()));
+
     // Push_back to toolInput
-    toolInputs.emplace_back(newd, eventContext, roi, previousDecision);
+    toolInputs.emplace_back(newd, eventContext, roiEL, previousDecision);
 
     // Link to new decision
     linkToPrevious(newd, previousDecision, eventContext);
diff --git a/Trigger/TrigAlgorithms/TrigPartialEventBuilding/src/PEBInfoWriterToolBase.cxx b/Trigger/TrigAlgorithms/TrigPartialEventBuilding/src/PEBInfoWriterToolBase.cxx
index 3c57fc6922de0eea271334b59d531b44ff4a78b8..500dd708c0ec7da436b6ad38e2a0b0d09a12d075 100644
--- a/Trigger/TrigAlgorithms/TrigPartialEventBuilding/src/PEBInfoWriterToolBase.cxx
+++ b/Trigger/TrigAlgorithms/TrigPartialEventBuilding/src/PEBInfoWriterToolBase.cxx
@@ -28,6 +28,7 @@ PEBInfoWriterToolBase::~PEBInfoWriterToolBase() {}
 // =============================================================================
 
 StatusCode PEBInfoWriterToolBase::decide(std::vector<Input>& inputs) const {
+  std::set<ElementLink<TrigRoiDescriptorCollection>> uniqueRoIs;
   for (Input& input : inputs) {
     // Skip if previous step for this chain didn't pass
     if (not TrigCompositeUtils::passed(m_decisionId.numeric(), input.previousDecisionIDs)) {
@@ -35,20 +36,33 @@ StatusCode PEBInfoWriterToolBase::decide(std::vector<Input>& inputs) const {
       continue;
     }
 
+    // Count unique RoIs
+    bool isUnique = uniqueRoIs.insert(input.roiEL).second;
+    ATH_MSG_DEBUG("RoI eta/phi = " << (*input.roiEL)->eta() << "/" << (*input.roiEL)->phi() << " has "
+                  << (isUnique ? "not yet" : "already") << " been processed. So far seen "
+                  << uniqueRoIs.size() << " unique RoIs");
+
+    // Skip processing if max RoIs limit reached
+    if (m_maxRoIs>=0 && static_cast<int>(uniqueRoIs.size()) > m_maxRoIs) {
+      ATH_MSG_DEBUG("Skipping this input decision because number of processed RoIs reached MaxRoIs ("
+                    << m_maxRoIs.value() << ")");
+      // Make an accept decision without adding PEB Info (PEB hypo is pass-through)
+      TrigCompositeUtils::addDecisionID(m_decisionId, input.decision);
+      continue;
+    }
+
     // Create new PEB Info for this input
     PEBInfo pebInfo = createPEBInfo(input);
 
     // Merge with previous ROBs    
     std::vector<uint32_t> previousRobs;
-    if (input.decision->getDetail(robListKey(), previousRobs)) {
-      pebInfo.robs.insert(previousRobs.begin(), previousRobs.end());
-    }
+    ATH_CHECK(input.decision->getDetail(robListKey(), previousRobs));
+    pebInfo.robs.insert(previousRobs.begin(), previousRobs.end());
 
     // Merge with previous SubDets
     std::vector<uint32_t> previousSubDets;
-    if (input.decision->getDetail(subDetListKey(), previousSubDets)) {
-      pebInfo.subdets.insert(previousSubDets.begin(), previousSubDets.end());
-    }
+    ATH_CHECK(input.decision->getDetail(subDetListKey(), previousSubDets));
+    pebInfo.subdets.insert(previousSubDets.begin(), previousSubDets.end());
 
     // Attach the PEB Info to the decision
     std::vector<uint32_t> robVec(pebInfo.robs.begin(), pebInfo.robs.end());
diff --git a/Trigger/TrigAlgorithms/TrigPartialEventBuilding/src/RoIPEBInfoWriterTool.cxx b/Trigger/TrigAlgorithms/TrigPartialEventBuilding/src/RoIPEBInfoWriterTool.cxx
index eb001c8efbdb8b67e9f20351716e4fbd7ada8b97..5823bcb2dd20a7c13b242f2549b89ad0e7fcc17e 100644
--- a/Trigger/TrigAlgorithms/TrigPartialEventBuilding/src/RoIPEBInfoWriterTool.cxx
+++ b/Trigger/TrigAlgorithms/TrigPartialEventBuilding/src/RoIPEBInfoWriterTool.cxx
@@ -3,7 +3,7 @@
 */
 
 #include "RoIPEBInfoWriterTool.h"
-#include "GaudiKernel/PhysicalConstants.h" // for Gaudi::Units::twopi
+#include "CxxUtils/phihelper.h"
 #include <algorithm>
 #include <unordered_map>
 #include <string_view>
@@ -66,13 +66,8 @@ PEBInfoWriterToolBase::PEBInfo RoIPEBInfoWriterTool::createPEBInfo(const PEBInfo
   // Create output PEBInfo starting from the static extra PEBInfo
   PEBInfo pebi = m_extraPebInfo;
 
-  if (!input.roi) {
-    ATH_MSG_DEBUG("No RoI descriptor in the input for decision, skipping this decision");
-    return pebi;
-  }
-  ATH_MSG_DEBUG("Processing RoI " << *(input.roi));
-
-  float eta = input.roi->eta();
+  ATH_MSG_DEBUG("Processing RoI " << **(input.roiEL));
+  float eta = (*input.roiEL)->eta();
   float etaMin = eta - m_etaWidth;
   float etaMax = eta + m_etaWidth;
   // Stop further execution if RoI is entirely outside the max |eta| range
@@ -85,9 +80,9 @@ PEBInfoWriterToolBase::PEBInfo RoIPEBInfoWriterTool::createPEBInfo(const PEBInfo
   etaMin = std::max(-m_etaEdge.value(), etaMin);
   etaMax = std::min( m_etaEdge.value(), etaMin);
 
-  float phi = input.roi->eta();
-  float phiMin = std::remainder(phi-m_phiWidth, Gaudi::Units::twopi); // range (-pi, pi)
-  float phiMax = std::remainder(phi+m_phiWidth, Gaudi::Units::twopi); // range (-pi, pi)
+  float phi = (*input.roiEL)->phi();
+  float phiMin = CxxUtils::wrapToPi(phi - m_phiWidth); // range (-pi, pi)
+  float phiMax = CxxUtils::wrapToPi(phi + m_phiWidth); // range (-pi, pi)
 
   TrigRoiDescriptor roiForPEB(eta, etaMin, etaMax, phi, phiMin, phiMax);
 
diff --git a/Trigger/TrigSteer/TrigOutputHandling/src/TriggerEDMSerialiserTool.cxx b/Trigger/TrigSteer/TrigOutputHandling/src/TriggerEDMSerialiserTool.cxx
index 52fb2043d5ded4dc3ca1d968a2f11d721aba60bd..0163e87a8fced9720f6b3de2a6c943e8d51239fd 100644
--- a/Trigger/TrigSteer/TrigOutputHandling/src/TriggerEDMSerialiserTool.cxx
+++ b/Trigger/TrigSteer/TrigOutputHandling/src/TriggerEDMSerialiserTool.cxx
@@ -5,6 +5,7 @@
 #include <cstring>
 #include <boost/core/demangle.hpp>
 #include <boost/algorithm/string.hpp>
+#include "GaudiKernel/IJobOptionsSvc.h"
 #include "GaudiKernel/IToolSvc.h"
 #include "GaudiKernel/System.h"
 #include "AthenaKernel/StorableConversions.h"
@@ -17,6 +18,13 @@
 
 #include "TriggerEDMSerialiserTool.h"
 
+#include <numeric>
+
+namespace {
+  // Special module ID used internally to store total result size limit in the truncation threshold map
+  constexpr uint16_t fullResultTruncationID = std::numeric_limits<uint16_t>::max();
+}
+
 TriggerEDMSerialiserTool::TriggerEDMSerialiserTool( const std::string& type,
 						    const std::string& name,
 						    const IInterface* parent )
@@ -34,6 +42,39 @@ StatusCode TriggerEDMSerialiserTool::initialize() {
   for ( const std::string& typeKeyAuxIDs : m_collectionsToSerialize.value() ) {
     ATH_CHECK(addCollectionToSerialise(typeKeyAuxIDs, m_toSerialise));
   }
+
+  // Retrieve the total result size limit from DataFlowConfig which is a special object
+  // used online to hold DF properties passed from TDAQ to HLT as run parameters
+  SmartIF<IJobOptionsSvc> jobOptionsSvc = service<IJobOptionsSvc>("JobOptionsSvc", /*createIf=*/ false);
+  if (!jobOptionsSvc.isValid()) {
+    ATH_MSG_WARNING("Could not retrieve JobOptionsSvc, will not update the EventSizeHardLimitMB property");
+  }
+  else {
+    const Gaudi::Details::PropertyBase* prop = jobOptionsSvc->getClientProperty("DataFlowConfig", "DF_MaxEventSizeMB");
+    if (prop && m_eventSizeHardLimitMB.assign(*prop)) {
+      ATH_MSG_DEBUG("Updated EventSizeHardLimitMB to " << m_eventSizeHardLimitMB.value()
+                    << " from DataFlowConfig.DF_MaxEventSizeMB");
+    }
+    else {
+      ATH_MSG_DEBUG("Could not retrieve DataFlowConfig.DF_MaxEventSizeMB from JobOptionsSvc. This is fine if running "
+                    << "offline, but should not happen online. Leaving EventSizeHardLimitMB="
+                    << m_eventSizeHardLimitMB.value());
+    }
+  }
+
+  // Add the total result size limit to truncation threshold map
+  if (m_eventSizeHardLimitMB >= 0) {
+    if (m_fullResultTruncationFrac > 1.0) {
+      ATH_MSG_ERROR("Fraction cannot be > 1.0, but FullResultTruncationFrac is set to " << m_fullResultTruncationFrac);
+      return StatusCode::FAILURE;
+    }
+    float totalResultSizeLimitBytes = m_fullResultTruncationFrac * m_eventSizeHardLimitMB * 1024. * 1024.;
+    m_truncationThresholds[fullResultTruncationID] = static_cast<uint32_t>(totalResultSizeLimitBytes);
+  }
+  else {
+    m_truncationThresholds[fullResultTruncationID] = std::numeric_limits<uint32_t>::max();
+  }
+
   return StatusCode::SUCCESS;
 }
 
@@ -374,19 +415,32 @@ StatusCode TriggerEDMSerialiserTool::tryAddData(HLT::HLTResultMT& hltResult,
     return StatusCode::FAILURE;
   }
 
+  // Size in this module
   const uint32_t currentSizeBytes = hltResult.getSerialisedData().count(id)==0
                                     ? 0 : hltResult.getSerialisedData().at(id).size()*sizeof(uint32_t);
+  // Total size
+  size_t currentTotalSizeWords = 0;
+  for (const auto& [id, data] : hltResult.getSerialisedData()) currentTotalSizeWords += data.size();
+  const uint32_t currentTotalSizeBytes = currentTotalSizeWords*sizeof(uint32_t);
+  // Size to be added
   const uint32_t extraSizeBytes = data.size()*sizeof(uint32_t);
-  if (currentSizeBytes+extraSizeBytes < m_truncationThresholds.value().at(id)) {
-    // The data fits, so add it to the result
-    ATH_MSG_DEBUG("Adding data to result with module ID " << id);
-    hltResult.addSerialisedData(id, data);
+
+  if (currentTotalSizeBytes+extraSizeBytes > m_truncationThresholds.value().at(fullResultTruncationID)) {
+    // The data doesn't fit, flag the full result as truncated
+    ATH_MSG_DEBUG("Skipping adding data to result with module ID " << id << " because of full-result truncation");
+    hltResult.addTruncatedModuleId(fullResultTruncationID);
+    hltResult.addTruncatedModuleId(id);
   }
-  else {
-    // The data doesn't fit, flag the result as truncated
+  else if (currentSizeBytes+extraSizeBytes > m_truncationThresholds.value().at(id)) {
+    // The data doesn't fit, flag this module's result as truncated
     ATH_MSG_DEBUG("Skipping adding data to truncated result with module ID " << id);
     hltResult.addTruncatedModuleId(id);
   }
+  else {
+    // The data fits, so add it to the result
+    ATH_MSG_DEBUG("Adding data to result with module ID " << id);
+    hltResult.addSerialisedData(id, data);
+  }
   return StatusCode::SUCCESS;
 }
 
@@ -394,6 +448,15 @@ StatusCode TriggerEDMSerialiserTool::fillDebugInfo(const TruncationInfoMap& trun
                                                    xAOD::TrigCompositeContainer& debugInfoCont,
                                                    HLT::HLTResultMT& resultToFill,
                                                    SGImplSvc* evtStore) const {
+  // If full result truncation happened, flag all results as truncated to produce debug info for all
+  if (resultToFill.getTruncatedModuleIds().count(fullResultTruncationID)>0) {
+    ATH_MSG_ERROR("HLT result truncation on total size! Limit of "
+                  << m_truncationThresholds.value().at(fullResultTruncationID)/1024./1024.
+                  << " MB exceeded. Flagging all module IDs as truncated.");
+    for (const auto& [id, data] : resultToFill.getSerialisedData()) {
+      resultToFill.addTruncatedModuleId(id);
+    }
+  }
   // Loop over truncation info and fill histograms and debug info objects
   for (const auto& [id, truncationInfoVec] : truncationInfoMap) {
     if (resultToFill.getTruncatedModuleIds().count(id)>0) {
@@ -405,8 +468,8 @@ StatusCode TriggerEDMSerialiserTool::fillDebugInfo(const TruncationInfoMap& trun
       xAOD::TrigComposite::Accessor<std::vector<std::string>> typeNameVec("typeName");
       xAOD::TrigComposite::Accessor<std::vector<uint32_t>> sizeVec("size");
       xAOD::TrigComposite::Accessor<std::vector<char>> isRecordedVec("isRecorded");
-      std::pair<std::string, size_t> largestRecorded;
-      std::pair<std::string, size_t> largestDropped;
+      std::pair<std::string, size_t> largestRecorded{"None", 0};
+      std::pair<std::string, size_t> largestDropped{"None", 0};
       moduleId(*debugInfoThisModule) = id;
       uint32_t sizeSum = 0;
       for (const TruncationInfo& truncationInfo : truncationInfoVec) {
diff --git a/Trigger/TrigSteer/TrigOutputHandling/src/TriggerEDMSerialiserTool.h b/Trigger/TrigSteer/TrigOutputHandling/src/TriggerEDMSerialiserTool.h
index d5d81e07d872b31b1cc5e012e9678f03e9e5a6cf..4b5ef8da79f0af7539f312607e6ae9d3667fdd28 100644
--- a/Trigger/TrigSteer/TrigOutputHandling/src/TriggerEDMSerialiserTool.h
+++ b/Trigger/TrigSteer/TrigOutputHandling/src/TriggerEDMSerialiserTool.h
@@ -63,6 +63,15 @@ class TriggerEDMSerialiserTool: public extends<AthAlgTool, HLTResultMTMakerTool>
   Gaudi::Property<std::map<uint16_t,uint32_t>> m_truncationThresholds {
     this, "TruncationThresholds", {}, "HLT result truncation thresholds. Key is module ID, value is max size in bytes"
   };
+  Gaudi::Property<int> m_eventSizeHardLimitMB {
+    this, "EventSizeHardLimitMB", -1,
+    "Hard limit for output event size in megabytes. Set automatically in initialize from run parameters. "
+    "In partition, it corresponds to DCM sbaBlockSize_MiB. Value <0 means no limit."
+  };
+  Gaudi::Property<float> m_fullResultTruncationFrac {
+    this, "FullResultTruncationFrac", 0.8,
+    "Fraction of EventSizeHardLimitMB which defines the limit on the total size of HLT output (sum of all modules)"
+  };
   /// StoreGate key for the truncation debug info object
   SG::WriteHandleKey<xAOD::TrigCompositeContainer> m_debugInfoWHKey {
     this, "DebugInfoWHKey", "TruncationDebugInfo"
diff --git a/Trigger/TrigValidation/TrigAnalysisTest/share/q221_RDOtoRDOTrig_mt1_build.ref b/Trigger/TrigValidation/TrigAnalysisTest/share/q221_RDOtoRDOTrig_mt1_build.ref
index 757683d74d9ae684e5494b682782e6859e28aaec..fb53c911cd9190d79f396b53e45da08ab1f639c7 100644
--- a/Trigger/TrigValidation/TrigAnalysisTest/share/q221_RDOtoRDOTrig_mt1_build.ref
+++ b/Trigger/TrigValidation/TrigAnalysisTest/share/q221_RDOtoRDOTrig_mt1_build.ref
@@ -113,15 +113,15 @@ TrigSignatureMoniMT                                INFO HLT_mu6Comb_L1MU6 decisi
 TrigSignatureMoniMT                                INFO HLT_mu6_L1MU6                 10        10        10        5         5         5         0         0         5
 TrigSignatureMoniMT                                INFO HLT_mu6_L1MU6 decisions                           14        6         6         6         0         0
 TrigSignatureMoniMT                                INFO HLT_mu6_idperf_L1MU6          10        10        10        10        10        10        0         0         10
-TrigSignatureMoniMT                                INFO HLT_mu6_idperf_L1MU6 decisions                    14        14        17        20        0         0
+TrigSignatureMoniMT                                INFO HLT_mu6_idperf_L1MU6 decisions                    14        14        17        21        0         0
 TrigSignatureMoniMT                                INFO HLT_mu6_ivarmedium_L1MU6      10        10        10        5         5         5         5         0         5
 TrigSignatureMoniMT                                INFO HLT_mu6_ivarmedium_L1MU6 decisions                    14        6         6         6         6         0
 TrigSignatureMoniMT                                INFO HLT_mu6_msonly_L1MU6          10        10        10        0         10        0         0         0         10
 TrigSignatureMoniMT                                INFO HLT_mu6_msonly_L1MU6 decisions                    14        0         17        0         0         0
 TrigSignatureMoniMT                                INFO HLT_mu6_mu4_L12MU4            4         4         4         3         1         1         0         0         1
 TrigSignatureMoniMT                                INFO HLT_mu6_mu4_L12MU4 decisions                      40        18        6         6         0         0
-TrigSignatureMoniMT                                INFO HLT_mu6_mu6noL1_L1MU6         10        10        10        5         5         5         3         1         1
-TrigSignatureMoniMT                                INFO HLT_mu6_mu6noL1_L1MU6 decisions                    14        6         6         6         8         2
+TrigSignatureMoniMT                                INFO HLT_mu6_mu6noL1_L1MU6         10        10        10        5         5         5         3         0         0
+TrigSignatureMoniMT                                INFO HLT_mu6_mu6noL1_L1MU6 decisions                    14        6         6         6         8         0
 TrigSignatureMoniMT                                INFO HLT_mu6fast_L1MU6             10        10        10        0         0         0         0         0         10
 TrigSignatureMoniMT                                INFO HLT_mu6fast_L1MU6 decisions                       14        0         0         0         0         0
 TrigSignatureMoniMT                                INFO HLT_mu80_msonly_3layersEC_L1MU208         8         8         0         0         0         0         0         0
diff --git a/Trigger/TrigValidation/TrigUpgradeTest/share/full_menu_build.ref b/Trigger/TrigValidation/TrigUpgradeTest/share/full_menu_build.ref
index e4042a271aa6aa8f7c80c56a57e87d8ac28b80a4..1329b1801dfc8bac16a08745673c1aabdb2dc9a9 100644
--- a/Trigger/TrigValidation/TrigUpgradeTest/share/full_menu_build.ref
+++ b/Trigger/TrigValidation/TrigUpgradeTest/share/full_menu_build.ref
@@ -30,8 +30,8 @@ TrigSignatureMoniMT                                 INFO HLT_4mu4_L14MU4
 TrigSignatureMoniMT                                 INFO HLT_4mu4_L14MU4 decisions                         0         0         0         0         0         0         
 TrigSignatureMoniMT                                 INFO HLT_5j70_0eta240_L14J20       20        20        0         0         0         0         0         0         0         
 TrigSignatureMoniMT                                 INFO HLT_5j70_0eta240_L14J20 decisions                    0         0         0         0         0         0         
-TrigSignatureMoniMT                                 INFO HLT_beamspot_allTE_trkfast_L1J1520        20        20        0         0         0         0         0         20        
-TrigSignatureMoniMT                                 INFO HLT_beamspot_allTE_trkfast_L1J15 decisions                    20        0         0         0         0         0         
+TrigSignatureMoniMT                                 INFO HLT_beamspot_allTE_trkfast_BeamSpotPEB_L1J1520        20        20        20        0         0         0         0         20        
+TrigSignatureMoniMT                                 INFO HLT_beamspot_allTE_trkfast_BeamSpotPEB_L1J15 decisions                    20        20        0         0         0         0         
 TrigSignatureMoniMT                                 INFO HLT_costmonitor_CostMonDS_L1All20        20        20        20        0         0         0         0         20        
 TrigSignatureMoniMT                                 INFO HLT_costmonitor_CostMonDS_L1All decisions                    20        20        0         0         0         0         
 TrigSignatureMoniMT                                 INFO HLT_e26_etcut_L1EM22VHI       20        20        1         1         1         0         0         0         1         
@@ -117,7 +117,7 @@ TrigSignatureMoniMT                                 INFO HLT_mu6Comb_L1MU6 decis
 TrigSignatureMoniMT                                 INFO HLT_mu6_L1MU6                 20        20        3         1         1         1         0         0         1         
 TrigSignatureMoniMT                                 INFO HLT_mu6_L1MU6 decisions                           3         1         1         1         0         0         
 TrigSignatureMoniMT                                 INFO HLT_mu6_idperf_L1MU6          20        20        3         3         2         2         0         0         2         
-TrigSignatureMoniMT                                 INFO HLT_mu6_idperf_L1MU6 decisions                    3         3         2         2         0         0         
+TrigSignatureMoniMT                                 INFO HLT_mu6_idperf_L1MU6 decisions                    3         3         2         3         0         0         
 TrigSignatureMoniMT                                 INFO HLT_mu6_ivarmedium_L1MU6      20        20        3         1         1         1         1         0         1         
 TrigSignatureMoniMT                                 INFO HLT_mu6_ivarmedium_L1MU6 decisions                    3         1         1         1         1         0         
 TrigSignatureMoniMT                                 INFO HLT_mu6_msonly_L1MU6          20        20        3         0         2         0         0         0         2         
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/CMakeLists.txt b/Trigger/TriggerCommon/TriggerMenuMT/CMakeLists.txt
index a15fa81213a2a70d5b85cdb6d437838e2b59c7ce..aea35c58a945d813f55d58378bb66a2cc65c1adf 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/CMakeLists.txt
+++ b/Trigger/TriggerCommon/TriggerMenuMT/CMakeLists.txt
@@ -133,6 +133,9 @@ atlas_add_test( generateMenuMT SCRIPT bash test_HLTmenu.sh
 #----------------------------------
 # List of menus to be created:
 atlas_build_lvl1_trigger_menu( LS2_v1 )
+atlas_build_lvl1_trigger_menu( Physics_pp_run3_v1 )
+atlas_build_lvl1_trigger_menu( PhysicsP1_pp_run3_v1 )
+atlas_build_lvl1_trigger_menu( MC_pp_run3_v1 )
 atlas_build_lvl1_trigger_menu( Cosmic_pp_run3_v1 )
 
 
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/CommonSequences/EventBuildingSequenceSetup.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/CommonSequences/EventBuildingSequenceSetup.py
index 06d20f66a81e7215348da2e7a57e4cd6fd203c90..eb25a96e8ede41f3a82b25ccc667874ee807b06d 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/CommonSequences/EventBuildingSequenceSetup.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/CommonSequences/EventBuildingSequenceSetup.py
@@ -7,6 +7,7 @@ from TriggerMenuMT.HLTMenuConfig.Menu.MenuComponents import ChainStep, MenuSeque
 from TrigPartialEventBuilding.TrigPartialEventBuildingConf import PEBInfoWriterAlg
 from TrigPartialEventBuilding.TrigPartialEventBuildingConfig import StaticPEBInfoWriterToolCfg, RoIPEBInfoWriterToolCfg
 from DecisionHandling.DecisionHandlingConf import InputMakerForRoI
+from libpyeformat_helper import SubDetector
 from AthenaCommon.CFElements import seqAND, findAlgorithm
 from AthenaCommon.Logging import logging
 log = logging.getLogger('EventBuildingSequenceSetup')
@@ -43,15 +44,28 @@ def pebInfoWriterTool(name, eventBuildType):
     Create PEBInfoWriterTool configuration for the eventBuildType
     '''
     tool = None
-    if 'LArPEB' in eventBuildType:
+    if 'BeamSpotPEB' in eventBuildType:
+        tool = StaticPEBInfoWriterToolCfg(name)
+        tool.addSubDets([
+            SubDetector.PIXEL_BARREL,
+            SubDetector.PIXEL_DISK_SIDE, # note different name in C++, ADHI-4753
+            SubDetector.PIXEL_B_LAYER,
+            SubDetector.PIXEL_IBL,
+            SubDetector.SCT_BARREL_A_SIDE,
+            SubDetector.SCT_BARREL_C_SIDE,
+            SubDetector.SCT_ENDCAP_A_SIDE,
+            SubDetector.SCT_ENDCAP_C_SIDE,
+            SubDetector.TDAQ_CTP
+        ])
+    elif 'LArPEB' in eventBuildType:
         tool = RoIPEBInfoWriterToolCfg(name)
         tool.DetNames = ['PIXEL', 'SCT', 'TRT', 'TTEM', 'TTHEC', 'FCALEM', 'FCALHAD']
-        # TODO: tool.MaxRoIsPerEvent = 5
+        tool.MaxRoIs = 5
         tool.addHLTResultToROBList()  # add the main (full) HLT result to the list
         tool.addCTPResultToROBList()  # add the CTP result to the list
     elif 'RPCPEBSecondaryReadout' in eventBuildType:
         tool = StaticPEBInfoWriterToolCfg(name)
-        tool.ROBList = [0x610080, 0x620080]
+        tool.addROBs([0x610080, 0x620080])
     elif eventBuildType in EventBuildingInfo.getAllDataScoutingIdentifiers():
         # Pure DataScouting configuration
         tool = StaticPEBInfoWriterToolCfg(name)
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/EventBuildingInfo.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/EventBuildingInfo.py
index f125e83fbece6ea6bab35c66d4ec17d3a490fe25..67ab9b35c79b8c19bb00efe630a0226594d14b1d 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/EventBuildingInfo.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/EventBuildingInfo.py
@@ -16,6 +16,7 @@ LArPEB, LumiPEB, RPCPEB, RPCPEBSecondaryReadout, TrkPEB, JetDS, PhotonDS
 
 # PEB identifiers
 PartialEventBuildingIdentifiers = [
+  'BeamSpotPEB',
   'LArPEB',
   'RPCPEBSecondaryReadout'
 ]
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/MenuPrescaleConfig.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/MenuPrescaleConfig.py
index 1545b08dc7ee026848032c9c0dce851f05b0b2c4..52147d0b54b6a8b8d1c496002a3439b6e3ae7a53 100755
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/MenuPrescaleConfig.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/MenuPrescaleConfig.py
@@ -64,6 +64,20 @@ def MenuPrescaleConfig(triggerConfigHLT):
             L1Prescales = Prescales.L1Prescales
             HLTPrescales = Prescales.HLTPrescales        
 
+    elif menu_name.startswith('MC_pp_run3_v1'):
+        log.info('MC_pp_run3_v1 menu setup')
+        from TriggerMenuMT.HLTMenuConfig.Menu.MC_pp_run3_v1 import setupMenu, Prescales
+        setupMenu()
+        if 'cosmics_prescale' in menu_name:
+            L1Prescales = Prescales.L1Prescales_cosmics
+            HLTPrescales = Prescales.HLTPrescales_cosmics
+        elif 'tight_mc_prescale' in menu_name:
+            L1Prescales = Prescales.L1Prescales_tight_mc_prescale
+            HLTPrescales = Prescales.HLTPrescales_tight_mc_prescale
+        else:
+            L1Prescales = Prescales.L1Prescales
+            HLTPrescales = Prescales.HLTPrescales        
+
 
     elif menu_name.startswith('LS2_emu_v1'):
         log.info('LS2_v1 menu setup')
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/Physics_pp_run3_v1.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/Physics_pp_run3_v1.py
index 08bdd380a5b3bf4c29bbe2e6c9968aa0a146929d..9bc367136f5bcba34eb51141be51bb031da76c0c 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/Physics_pp_run3_v1.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/Physics_pp_run3_v1.py
@@ -104,7 +104,7 @@ def setupMenu():
    ]
     TriggerFlags.HeavyIonSlice.signatures  = []
     TriggerFlags.BeamspotSlice.signatures  = [
-        ChainProp(name='HLT_beamspot_allTE_trkfast_L1J15',  l1SeedThresholds=[''], stream=['BeamSpot'], groups=['RATE:BeamSpot',  'BW:BeamSpot']),
+        ChainProp(name='HLT_beamspot_allTE_trkfast_BeamSpotPEB_L1J15',  l1SeedThresholds=[''], stream=['BeamSpot'], groups=['RATE:BeamSpot',  'BW:BeamSpot']),
         #ChainProp(name='HLT_beamspot_activeTE_trkfast_L1J15',  l1SeedThresholds=[''], stream=['BeamSpot'], groups=['RATE:BeamSpot',  'BW:BeamSpot']),
         #ChainProp(name='HLT_beamspot_trkFS_trkfast_L1J15',  l1SeedThresholds=[''], stream=['BeamSpot'], groups=['RATE:BeamSpot',  'BW:BeamSpot']),
     ]
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/MuonSequenceSetup.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/MuonSequenceSetup.py
index 4427b21cb87c417d49270b9bf8b412ddca835545..fd57504582b027139b71117a8e2ce43c7f97975d 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/MuonSequenceSetup.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/MuonSequenceSetup.py
@@ -19,6 +19,9 @@ muonRecFlags.TrackPerfSummaryLevel = 2
 muonRecFlags.TrackPerfDebugLevel   = 5
 muonCombinedRecFlags.doCaloTrkMuId = False
 muonCombinedRecFlags.printSummary = False
+muonCombinedRecFlags.doSiAssocForwardMuons = False
+muonCombinedRecFlags.doStatisticalCombination = False
+muonCombinedRecFlags.doCombinedFit = True
 
 from ViewAlgs.ViewAlgsConf import EventViewCreatorAlgorithm
 
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/MuonSetup.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/MuonSetup.py
index 194508340988491c860e885005be0049c0a795e2..56037f4e5f3728b21494749b2832c633cc9fd6b1 100644
--- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/MuonSetup.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/MuonSetup.py
@@ -482,12 +482,10 @@ def muEFSARecoSequence( RoIs, name ):
 def muEFCBRecoSequence( RoIs, name ):
   global TrackParticlesName
 
-  from AthenaCommon.AppMgr import ToolSvc
-  import AthenaCommon.CfgGetter as CfgGetter
-
-  from AthenaCommon.CfgGetter import getPublicTool, getPublicToolClone
+  from AthenaCommon.CfgGetter import getPublicToolClone
   from AthenaCommon import CfgMgr
   from AthenaCommon.CFElements import parOR, seqAND
+  from MuonCombinedRecExample.MuonCombinedAlgs import MuonCombinedInDetCandidateAlg, MuonCombinedAlg
 
   efAlgs = []
   muEFCBRecoSequence = parOR("efcbViewNode_"+name)
@@ -550,57 +548,18 @@ def muEFCBRecoSequence( RoIs, name ):
   trackParticles = PTTrackParticles[-1]
 
   #Make InDetCandidates
-  from TrkExTools.AtlasExtrapolator import AtlasExtrapolator
-  from TrackToCalo.TrackToCaloConf import Trk__ParticleCaloExtensionTool
-  from MuonTGRecTools.MuonTGRecToolsConf import Muon__MuonSystemExtensionTool
-  pcExtensionTool = Trk__ParticleCaloExtensionTool(Extrapolator = AtlasExtrapolator())
-  
-  muonExtTool = Muon__MuonSystemExtensionTool(Extrapolator = AtlasExtrapolator(),
-                                              ParticleCaloExtensionTool = pcExtensionTool)
-
-  theIndetCandidateAlg = CfgMgr.MuonCombinedInDetCandidateAlg("TrigMuonCombinedInDetCandidateAlg_"+name,TrackSelector=getPublicTool("MuonCombinedInDetDetailedTrackSelectorTool"),TrackParticleLocation = [trackParticles],ForwardParticleLocation=trackParticles, MuonSystemExtensionTool=muonExtTool, InDetCandidateLocation="InDetCandidates_"+name)
-
-  theCaloEnergyTool = getPublicToolClone("TrigCaloEnergyTool_"+name, "MuidCaloEnergyTool", EnergyLossMeasurement=False, MopParametrization=True, TrackIsolation=False)
-
-  from TrkExRungeKuttaIntersector.TrkExRungeKuttaIntersectorConf import Trk__IntersectorWrapper as Propagator
-  TrigMuonPropagator = Propagator(name = 'TrigMuonPropagator')
-  ToolSvc += TrigMuonPropagator
-
-  theCaloTSOS = getPublicToolClone("TrigCaloTrackStateOnSurface_"+name, "MuidCaloTrackStateOnSurface", CaloEnergyDeposit=theCaloEnergyTool, CaloEnergyParam=theCaloEnergyTool, Propagator =TrigMuonPropagator, MinRemainingEnergy= 200, ParamPtCut= 3000)
-  from MuidCaloScatteringTools.MuidCaloScatteringToolsConf import Rec__MuidMaterialEffectsOnTrackProvider
-  Rec__MuidMaterialEffectsOnTrackProvider.TSOSTool=theCaloTSOS
-
-  theErrorOptimiser = getPublicToolClone("TrigMuonErrorOptimiser_"+name, "MuonErrorOptimisationTool", PrepareForFit=False, RecreateStartingParameters=False,RefitTool=getPublicToolClone("TrigMuidRefitTool_"+name, "MuonRefitTool", AlignmentErrors = False, Fitter = CfgGetter.getPublicTool("iPatFitter")))
-
-  theTrackCleaner = getPublicToolClone("TrigMuonTrackCleaner_"+name, "MuonTrackCleaner", Fitter='TMEF_iPatFitter', SLFitter='TMEF_iPatFitter')
-
-  from TrkTrackSummaryTool.TrkTrackSummaryToolConf import Trk__TrackSummaryTool
-  trkSummaryTool =  Trk__TrackSummaryTool( 'TrigMuonTrackSummary',MuonSummaryHelperTool=getPublicTool('MuonTrackSummaryHelperTool'), doSharedHits=False)
-  if DetFlags.detdescr.ID_on():
-    from InDetTrigRecExample.InDetTrigConfigRecLoadTools import InDetTrigTrackSummaryHelperTool
-    trkSummaryTool.InDetSummaryHelperTool=InDetTrigTrackSummaryHelperTool
-    trkSummaryTool.doHolesInDet=True
-  ToolSvc += Trk__TrackSummaryTool('TrigMuonTrackSummary')
-
-  theTrackQueryNoFit = getPublicToolClone("TrigMuonTrackQueryNoFit_"+name, "MuonTrackQuery", Fitter="")
-  theTrackBuilderTool = getPublicToolClone("TrigCombinedMuonTrackBuilder_"+name,"CombinedMuonTrackBuilder", UseCaloTG = True, CaloTSOS=theCaloTSOS, CaloMaterialProvider='TMEF_TrkMaterialProviderTool', MuonHoleRecovery="",CaloEnergyParam=theCaloEnergyTool,MuonErrorOptimizer=theErrorOptimiser, Fitter='TMEF_iPatFitter', MaterialAllocator="TMEF_MaterialAllocator", Propagator=TrigMuonPropagator, LargeMomentumError=0.5, PerigeeAtSpectrometerEntrance=False, ReallocateMaterial=False, TrackSummaryTool=trkSummaryTool, Cleaner=theTrackCleaner,TrackQuery=theTrackQueryNoFit)
-  theTrackQuery = getPublicToolClone("TrigMuonTrackQuery_"+name, "MuonTrackQuery", Fitter=theTrackBuilderTool)
-
+  theIndetCandidateAlg = MuonCombinedInDetCandidateAlg("TrigMuonCombinedInDetCandidateAlg_"+name,TrackParticleLocation = [trackParticles],ForwardParticleLocation=trackParticles, 
+                                                       InDetCandidateLocation="InDetCandidates_"+name)
 
   #MS ID combination
-  theMuonCombinedFitTagTool = getPublicToolClone("TrigMuonCombinedFitTagTool_"+name, "MuonCombinedFitTagTool",TrackBuilder=theTrackBuilderTool,MuonRecovery=getPublicToolClone("TrigMuonRecovery_"+name,"MuidMuonRecovery", TrackBuilder=theTrackBuilderTool), TrackQuery=theTrackQuery, MatchQuality = getPublicToolClone("TrigMuonMatchQuality_"+name, "MuonMatchQuality", TrackQuery=theTrackQuery))
-  tools=[]
-  tools.append(theMuonCombinedFitTagTool)
-  theMuonCombinedTool = getPublicToolClone("TrigMuonCombinedToolCB_"+name, "MuonCombinedTool", MuonCombinedTagTools=tools)
-
   candidatesName = "MuonCandidates"
   if 'FS' in name:
     candidatesName = "MuonCandidates_FS"
-
-  theMuonCombinedAlg = CfgMgr.MuonCombinedAlg("TrigMuonCombinedAlg_"+name, MuonCandidateLocation=candidatesName, MuonCombinedTool=theMuonCombinedTool, CombinedTagMaps=["muidcoTagMap"], InDetCandidateLocation="InDetCandidates_"+name)
+  theMuonCombinedAlg = MuonCombinedAlg("TrigMuonCombinedAlg_"+name, MuonCandidateLocation=candidatesName, InDetCandidateLocation="InDetCandidates_"+name)
 
 
   #Create xAOD Muons
+  theTrackQueryNoFit = getPublicToolClone("TrigMuonTrackQueryNoFit_"+name, "MuonTrackQuery", Fitter="")
   muonparticlecreator = getPublicToolClone("MuonParticleCreatorCB_"+name, "TrackParticleCreatorTool", UseTrackSummaryTool=False, UseMuonSummaryTool=True, KeepAllPerigee=True)
   thecreatortoolCB= getPublicToolClone("MuonCreatorTool_triggerCB_"+name, "MuonCreatorTool", ScatteringAngleTool="", CaloMaterialProvider='TMEF_TrkMaterialProviderTool', MuonSelectionTool="", FillTimingInformation=False, UseCaloCells=False,TrackQuery=theTrackQueryNoFit,TrackParticleCreator=muonparticlecreator)
 
@@ -646,6 +605,7 @@ def muEFInsideOutRecoSequence(RoIs, name):
   from AthenaCommon.AppMgr import ToolSvc, ServiceMgr
   import AthenaCommon.CfgGetter as CfgGetter
   from MuonRecExample.MuonStandalone import MooSegmentFinderAlg
+  from MuonCombinedRecExample.MuonCombinedAlgs import MuonCombinedInDetCandidateAlg
 
   efAlgs = []
 
@@ -690,15 +650,8 @@ def muEFInsideOutRecoSequence(RoIs, name):
     trackParticles = PTTrackParticles[-1]
 
     #Make InDetCandidates
-    from TrkExTools.AtlasExtrapolator import AtlasExtrapolator
-    from TrackToCalo.TrackToCaloConf import Trk__ParticleCaloExtensionTool
-    from MuonTGRecTools.MuonTGRecToolsConf import Muon__MuonSystemExtensionTool
-    pcExtensionTool = Trk__ParticleCaloExtensionTool(Extrapolator = AtlasExtrapolator())
-  
-    muonExtTool = Muon__MuonSystemExtensionTool(Extrapolator = AtlasExtrapolator(),
-                                                ParticleCaloExtensionTool = pcExtensionTool)
-
-    theIndetCandidateAlg = CfgMgr.MuonCombinedInDetCandidateAlg("TrigMuonCombinedInDetCandidateAlg_"+name,TrackSelector=getPublicTool("MuonCombinedInDetDetailedTrackSelectorTool"),TrackParticleLocation = [trackParticles],ForwardParticleLocation=trackParticles, MuonSystemExtensionTool=muonExtTool, InDetCandidateLocation="InDetCandidates_"+name)
+    theIndetCandidateAlg = MuonCombinedInDetCandidateAlg("TrigMuonCombinedInDetCandidateAlg_"+name,TrackParticleLocation = [trackParticles],ForwardParticleLocation=trackParticles, 
+                                                         InDetCandidateLocation="InDetCandidates_"+name)
 
     efAlgs.append(theIndetCandidateAlg)
 
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/scripts/generateL1MenuRun3.py b/Trigger/TriggerCommon/TriggerMenuMT/scripts/generateL1MenuRun3.py
index 9e9fc24fed8ff0d070a4313e639a1a7a9fdd8fbc..660e80ceece5def0d086931861f028f6f7d5d14b 100755
--- a/Trigger/TriggerCommon/TriggerMenuMT/scripts/generateL1MenuRun3.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/scripts/generateL1MenuRun3.py
@@ -58,6 +58,9 @@ def main():
         ("mc"   , "MC_pp_v8"     ),
         ("mcp"  , ["MC_pp_v8_no_prescale", "MC_pp_v8_loose_mc_prescale", "MC_pp_v8_tight_mc_prescale"]),
         ("ls"   , "LS2_v1"       ),
+        ("phyr3v1"   , "Physics_pp_run3_v1"       ),
+        ("phyp1r3v1"   , "PhysicsP1_pp_run3_v1"       ),
+        ("mcr3v1"   , "MC_pp_run3_v1"       ),
         ("cosmic", "Cosmic_pp_run3_v1"),
         ("hiphy4","Physics_HI_v4"),
         ("hiphy", "Physics_HI_v4"),
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/scripts/generateL1TopoMenuMT.py b/Trigger/TriggerCommon/TriggerMenuMT/scripts/generateL1TopoMenuMT.py
index fb6c6350bc9eba7616e58b5c8b70f8c6e02d7118..2aef0e0ebaf53375ba24385a27f078810f65ec03 100755
--- a/Trigger/TriggerCommon/TriggerMenuMT/scripts/generateL1TopoMenuMT.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/scripts/generateL1TopoMenuMT.py
@@ -30,9 +30,12 @@ def main():
         #generateL1TopoMenu(menu="MC_pp_v7")
         #generateL1TopoMenu(menu="Physics_pp_v7")
         generateL1TopoMenu(menu="LS2_v1" )
+        generateL1TopoMenu(menu="Physics_pp_run3_v1" )
+        generateL1TopoMenu(menu="PhysicsP1_pp_run3_v1" )
+        generateL1TopoMenu(menu="MC_pp_run3_v1" )
         return 0
 
-    if sys.argv[1] in ["LS2_v1","Physics_pp_v7", "MC_pp_v7"]: # explicit names for TMXML nightly
+    if sys.argv[1] in ["LS2_v1","Physics_pp_v7", "MC_pp_v7", "Physics_pp_run3_v1", "PhysicsP1_pp_run3_v1", "MC_pp_run3_v1"]: # explicit names for TMXML nightly
         generateL1TopoMenu(menu=sys.argv[1])
         return 0
 
diff --git a/Trigger/TriggerCommon/TriggerMenuMT/scripts/generateLVL1MenuMT.py b/Trigger/TriggerCommon/TriggerMenuMT/scripts/generateLVL1MenuMT.py
index 5072938db801ea09e28818f2842d63075027b681..c557f68e81bb197e55289840009ef6ac70468c78 100755
--- a/Trigger/TriggerCommon/TriggerMenuMT/scripts/generateLVL1MenuMT.py
+++ b/Trigger/TriggerCommon/TriggerMenuMT/scripts/generateLVL1MenuMT.py
@@ -76,7 +76,8 @@ def findUnneededRun2():
 
 
 def findRequiredItemsFromXML():
-    menus = ['Physics_pp_v7','MC_pp_v7','LS2_v1','Cosmic_pp_run3_v1']
+    
+    menus = ['Physics_pp_v7','MC_pp_v7','LS2_v1', 'Physics_pp_run3_v1', 'PhysicsP1_pp_run3_v1', 'MC_pp_run3_v1', 'Cosmic_pp_run3_v1']
 
     from TriggerMenu.l1.XMLReader import L1MenuXMLReader
 
@@ -139,6 +140,9 @@ def main():
     if len(sys.argv)==1:
         
         generateL1Menu(menu="LS2_v1" )
+        generateL1Menu(menu="Physics_pp_run3_v1" )
+        generateL1Menu(menu="PhysicsP1_pp_run3_v1" )
+        generateL1Menu(menu="MC_pp_run3_v1" )
         generateL1Menu(menu="Cosmic_pp_run3_v1")
         return 0
 
@@ -147,6 +151,21 @@ def main():
         readL1MenuFromXML(sys.argv[1])
         return 0
     
+    if sys.argv[1].lower().startswith("physics_pp_run3_v1"):
+        menu = generateL1Menu(menu="Physics_pp_run3_v1")
+        menu.printCabling()
+        return 0
+
+    if sys.argv[1].lower().startswith("physicsp1_pp_run3_v1"):
+        menu = generateL1Menu(menu="PhysicsP1_pp_run3_v1")
+        menu.printCabling()
+        return 0
+
+    if sys.argv[1].lower().startswith("mc_pp_run3_v1"):
+        menu = generateL1Menu(menu="MC_pp_run3_v1")
+        menu.printCabling()
+        return 0
+
     if sys.argv[1].lower().startswith("phy6"):
         menu = generateL1Menu(menu="Physics_pp_v6")
         if printCabling: