diff --git a/Calorimeter/CaloRec/python/CaloTopoClusterConfig.py b/Calorimeter/CaloRec/python/CaloTopoClusterConfig.py index 7f334ebd4458aafe89e96d7e0028bfc962d7c952..a3120b9b97140791b94e50ca40b059c325502b7e 100644 --- a/Calorimeter/CaloRec/python/CaloTopoClusterConfig.py +++ b/Calorimeter/CaloRec/python/CaloTopoClusterConfig.py @@ -76,7 +76,7 @@ def getTopoClusterLocalCalibTools(configFlags): LCDeadMaterial = CaloLCDeadMaterialTool("LCDeadMaterial") LCDeadMaterial.HadDMCoeffKey = "HadDMCoeff2" LCDeadMaterial.ClusterRecoStatus = 0 - LCDeadMaterial.WeightModeDM = 2 + LCDeadMaterial.WeightModeDM = 2 LCDeadMaterial.UseHadProbability = True LCDeadMaterial.WeightingOfNegClusters = configFlags.Calo.TopoCluster.doTreatEnergyCutAsAbsolute @@ -101,13 +101,13 @@ def getTopoMoments(configFlags): TopoMoments.MaxAxisAngle = 20*deg TopoMoments.TwoGaussianNoise = configFlags.Calo.TopoCluster.doTwoGaussianNoise TopoMoments.MinBadLArQuality = 4000 - TopoMoments.MomentsNames = ["FIRST_PHI" + TopoMoments.MomentsNames = ["FIRST_PHI" ,"FIRST_ETA" - ,"SECOND_R" + ,"SECOND_R" ,"SECOND_LAMBDA" ,"DELTA_PHI" ,"DELTA_THETA" - ,"DELTA_ALPHA" + ,"DELTA_ALPHA" ,"CENTER_X" ,"CENTER_Y" ,"CENTER_Z" @@ -115,12 +115,12 @@ def getTopoMoments(configFlags): ,"CENTER_LAMBDA" ,"LATERAL" ,"LONGITUDINAL" - ,"FIRST_ENG_DENS" - ,"ENG_FRAC_EM" - ,"ENG_FRAC_MAX" - ,"ENG_FRAC_CORE" - ,"FIRST_ENG_DENS" - ,"SECOND_ENG_DENS" + ,"FIRST_ENG_DENS" + ,"ENG_FRAC_EM" + ,"ENG_FRAC_MAX" + ,"ENG_FRAC_CORE" + ,"FIRST_ENG_DENS" + ,"SECOND_ENG_DENS" ,"ISOLATION" ,"ENG_BAD_CELLS" ,"N_BAD_CELLS" @@ -222,7 +222,7 @@ def getTopoCalibMoments(configFlags): ,"ENG_CALIB_FRAC_EM" ,"ENG_CALIB_FRAC_HAD" ,"ENG_CALIB_FRAC_REST"] - + TopoCalibMoments.CalibrationHitContainerNames = ["LArCalibrationHitInactive" ,"LArCalibrationHitActive" ,"TileCalibHitActiveCell" @@ -231,43 +231,11 @@ def getTopoCalibMoments(configFlags): ,"TileCalibHitDeadMaterial"] return TopoCalibMoments -# Steering options for trigger -# Maybe offline reco options should be extracted from flags elsewhere -def CaloTopoClusterCfg(configFlags,cellsname="AllCalo",clustersname="",doLCCalib=None,sequenceName='AthAlgSeq'): +def CaloTopoClusterToolCfg(configFlags, cellsname): result=ComponentAccumulator() - if (sequenceName != 'AthAlgSeq'): - from AthenaCommon.CFElements import seqAND - #result.mainSeq( seqAND( sequenceName ) ) - result.addSequence( seqAND(sequenceName) ) - - if not clustersname: - clustersname = "CaloTopoClusters" - - from LArGeoAlgsNV.LArGMConfig import LArGMCfg - from TileGeoModel.TileGMConfig import TileGMCfg - from CaloTools.CaloNoiseCondAlgConfig import CaloNoiseCondAlgCfg - # Schedule total noise cond alg - result.merge(CaloNoiseCondAlgCfg(configFlags,"totalNoise")) - # Schedule electronic noise cond alg (needed for LC weights) - result.merge(CaloNoiseCondAlgCfg(configFlags,"electronicNoise")) - - CaloTopoClusterMaker, CaloTopoClusterSplitter, CaloClusterMaker, CaloClusterSnapshot=CompFactory.getComps("CaloTopoClusterMaker","CaloTopoClusterSplitter","CaloClusterMaker","CaloClusterSnapshot",) - - result.merge(LArGMCfg(configFlags)) - - from LArCalibUtils.LArHVScaleConfig import LArHVScaleCfg - result.merge(LArHVScaleCfg(configFlags)) - - result.merge(TileGMCfg(configFlags)) - - if not doLCCalib: - theCaloClusterSnapshot=CaloClusterSnapshot(OutputName=clustersname+"snapshot",SetCrossLinks=True) - else: - theCaloClusterSnapshot=CaloClusterSnapshot(OutputName=clustersname,SetCrossLinks=True) - # maker tools - TopoMaker = CaloTopoClusterMaker("TopoMaker") - + TopoMaker = CompFactory.CaloTopoClusterMaker("TopoMaker") + TopoMaker.CellsName = cellsname TopoMaker.CalorimeterNames=["LAREM", "LARHEC", @@ -281,7 +249,7 @@ def CaloTopoClusterCfg(configFlags,cellsname="AllCalo",clustersname="",doLCCalib "TileBar0", "TileBar1", "TileBar2", "TileExt0", "TileExt1", "TileExt2", "TileGap1", "TileGap2", "TileGap3", - "FCAL0", "FCAL1", "FCAL2"] + "FCAL0", "FCAL1", "FCAL2"] TopoMaker.NeighborOption = "super3D" TopoMaker.RestrictHECIWandFCalNeighbors = False TopoMaker.RestrictPSNeighbors = True @@ -294,18 +262,22 @@ def CaloTopoClusterCfg(configFlags,cellsname="AllCalo",clustersname="",doLCCalib TopoMaker.CutOOTseed = configFlags.Calo.TopoCluster.extendTimeCut and configFlags.Calo.TopoCluster.doTimeCut TopoMaker.UseTimeCutUpperLimit = configFlags.Calo.TopoCluster.useUpperLimitForTimeCut TopoMaker.TimeCutUpperLimit = 20.0 - - # note E or AbsE + + # note E or AbsE # # the following property must be set to TRUE in order to make double - # sided cuts on the seed and the cluster level + # sided cuts on the seed and the cluster level # TopoMaker.SeedCutsInAbsE = True TopoMaker.ClusterEtorAbsEtCut = 0.0*MeV # use 2-gaussian or single gaussian noise for TileCal TopoMaker.TwoGaussianNoise = configFlags.Calo.TopoCluster.doTwoGaussianNoise - - TopoSplitter = CaloTopoClusterSplitter("TopoSplitter") + result.setPrivateTools(TopoMaker) + return result + +def CaloTopoClusterSplitterToolCfg(configFlags): + result=ComponentAccumulator() + TopoSplitter = CompFactory.CaloTopoClusterSplitter("TopoSplitter") # cells from the following samplings will be able to form local # maxima. The excluded samplings are PreSamplerB, EMB1, # PreSamplerE, EME1, all Tile samplings, all HEC samplings and the @@ -326,6 +298,45 @@ def CaloTopoClusterCfg(configFlags,cellsname="AllCalo",clustersname="",doLCCalib TopoSplitter.ShareBorderCells = True TopoSplitter.RestrictHECIWandFCalNeighbors = False TopoSplitter.WeightingOfNegClusters = configFlags.Calo.TopoCluster.doTreatEnergyCutAsAbsolute + result.setPrivateTools(TopoSplitter) + return result + +# Steering options for trigger +# Maybe offline reco options should be extracted from flags elsewhere +def CaloTopoClusterCfg(configFlags,cellsname="AllCalo",clustersname="",doLCCalib=None,sequenceName='AthAlgSeq'): + result=ComponentAccumulator() + if (sequenceName != 'AthAlgSeq'): + from AthenaCommon.CFElements import seqAND + #result.mainSeq( seqAND( sequenceName ) ) + result.addSequence( seqAND(sequenceName) ) + + if not clustersname: + clustersname = "CaloTopoClusters" + + from LArGeoAlgsNV.LArGMConfig import LArGMCfg + from TileGeoModel.TileGMConfig import TileGMCfg + from CaloTools.CaloNoiseCondAlgConfig import CaloNoiseCondAlgCfg + # Schedule total noise cond alg + result.merge(CaloNoiseCondAlgCfg(configFlags,"totalNoise")) + # Schedule electronic noise cond alg (needed for LC weights) + result.merge(CaloNoiseCondAlgCfg(configFlags,"electronicNoise")) + + CaloClusterMaker, CaloClusterSnapshot=CompFactory.getComps("CaloClusterMaker","CaloClusterSnapshot",) + + result.merge(LArGMCfg(configFlags)) + + from LArCalibUtils.LArHVScaleConfig import LArHVScaleCfg + result.merge(LArHVScaleCfg(configFlags)) + + result.merge(TileGMCfg(configFlags)) + + if not doLCCalib: + theCaloClusterSnapshot=CaloClusterSnapshot(OutputName=clustersname+"snapshot",SetCrossLinks=True) + else: + theCaloClusterSnapshot=CaloClusterSnapshot(OutputName=clustersname,SetCrossLinks=True) + + TopoMaker = result.popToolsAndMerge( CaloTopoClusterToolCfg(configFlags, cellsname=cellsname)) + TopoSplitter = result.popToolsAndMerge( CaloTopoClusterSplitterToolCfg(configFlags) ) # # the following options are not set, since these are the default # values @@ -333,13 +344,13 @@ def CaloTopoClusterCfg(configFlags,cellsname="AllCalo",clustersname="",doLCCalib # NeighborOption = "super3D", # NumberOfCellsCut = 4, # EnergyCut = 500*MeV, - + CaloTopoCluster=CaloClusterMaker(clustersname) CaloTopoCluster.ClustersOutputName=clustersname CaloTopoCluster.ClusterMakerTools = [TopoMaker, TopoSplitter] - + from CaloBadChannelTool.CaloBadChanToolConfig import CaloBadChanToolCfg caloBadChanTool = result.popToolsAndMerge( CaloBadChanToolCfg(configFlags) ) CaloClusterBadChannelList=CompFactory.CaloClusterBadChannelList @@ -375,20 +386,20 @@ if __name__=="__main__": ConfigFlags.lock() - from AthenaConfiguration.MainServicesConfig import MainServicesCfg + from AthenaConfiguration.MainServicesConfig import MainServicesCfg from AthenaPoolCnvSvc.PoolReadConfig import PoolReadCfg cfg=MainServicesCfg(ConfigFlags) cfg.merge(PoolReadCfg(ConfigFlags)) # from IOVDbSvc.IOVDbSvcConfig import IOVDbSvcCfg # cfg.mergeAll(IOVDbSvcCfg(ConfigFlags)) - + theKey="CaloCalTopoClustersNew" topoAcc=CaloTopoClusterCfg(ConfigFlags) topoAlg = topoAcc.getPrimary() topoAlg.ClustersOutputName=theKey - + cfg.merge(topoAcc) from OutputStreamAthenaPool.OutputStreamConfig import OutputStreamCfg @@ -405,11 +416,11 @@ if __name__=="__main__": StreamName = 'StreamAOD' ) cfg.addEventAlgo(theNegativeEnergyCaloClustersThinner,"AthAlgSeq") - + # cfg.getService("StoreGateSvc").Dump=True cfg.run(10) #f=open("CaloTopoCluster.pkl","wb") #cfg.store(f) #f.close() - + diff --git a/Control/AthViews/src/SimpleView.cxx b/Control/AthViews/src/SimpleView.cxx index 9c232b98026abfc7b5c723a32bae6d5c010740f0..f8de70c5b0dcab27579b9430921e353803ccd91d 100644 --- a/Control/AthViews/src/SimpleView.cxx +++ b/Control/AthViews/src/SimpleView.cxx @@ -120,7 +120,6 @@ SG::DataProxy * SimpleView::findProxy( const CLID& id, const std::string& key, c */ SG::DataProxy * SimpleView::proxy( const void* const pTransient ) const { - throw std::runtime_error( "Not implemented: SimpleView::proxy" ); return m_store->proxy( pTransient ); } diff --git a/Control/AthenaConfiguration/python/ComponentAccumulator.py b/Control/AthenaConfiguration/python/ComponentAccumulator.py index b08120e3ecfde2dc733a5ac949c1abceb4d0a528..74d2ec9dbc945469758176660fe324f2f4cdd0ab 100644 --- a/Control/AthenaConfiguration/python/ComponentAccumulator.py +++ b/Control/AthenaConfiguration/python/ComponentAccumulator.py @@ -9,6 +9,7 @@ from AthenaCommon.Constants import INFO import GaudiKernel.GaudiHandles as GaudiHandles import GaudiConfig2 +import AthenaPython from AthenaConfiguration.Deduplication import deduplicate, DeduplicationFailed import collections @@ -331,8 +332,11 @@ class ComponentAccumulator(object): raise ConfigurationError("Can not find sequence %s" % sequenceName ) for algo in algorithms: - if algo.__component_type__ != "Algorithm": + if not isinstance(algo,GaudiConfig2._configurables.Configurable) and not isinstance(algo,AthenaPython.Configurables.CfgPyAlgorithm): raise TypeError("Attempt to add wrong type: %s as event algorithm" % type( algo ).__name__) + + if algo.__component_type__ != "Algorithm": + raise TypeError("Attempt to add an %s as event algorithm" % algo.__component_type__) if algo.name in self._algorithms: self._algorithms[algo.name].merge(algo) @@ -368,8 +372,11 @@ class ComponentAccumulator(object): return list( set( sum( flatSequencers( seq, algsCollection=self._algorithms ).values(), []) ) ) def addCondAlgo(self,algo,primary=False): - if algo.__component_type__ != "Algorithm": + if not isinstance(algo,GaudiConfig2._configurables.Configurable) and not isinstance(algo,AthenaPython.Configurables.CfgPyAlgorithm): raise TypeError("Attempt to add wrong type: %s as conditions algorithm" % type( algo ).__name__) + + if algo.__component_type__ != "Algorithm": + raise TypeError("Attempt to add wrong type: %s as conditions algorithm" % algo.__component_type__) pass deduplicate(algo,self._conditionsAlgs) #will raise on conflict if primary: @@ -389,6 +396,10 @@ class ComponentAccumulator(object): return hits[0] def addService(self,newSvc,primary=False,create=False): + + if not isinstance(newSvc,GaudiConfig2._configurables.Configurable) and not isinstance(newSvc,AthenaPython.Configurables.CfgPyService): + raise TypeError("Attempt to add wrong type: %s as service" % type( newSvc ).__name__) + if newSvc.__component_type__ != "Service": raise TypeError("Attempt to add wrong type: %s as service" % newSvc.__component_type__) pass @@ -408,8 +419,11 @@ class ComponentAccumulator(object): return def addPublicTool(self,newTool,primary=False): + if not isinstance(newTool,GaudiConfig2._configurables.Configurable) and not isinstance(newTool,AthenaPython.Configurables.CfgPyAlgTool): + raise TypeError("Attempt to add wrong type: %s as public AlgTool" % type( newTool ).__name__) + if newTool.__component_type__ != "AlgTool": - raise TypeError("Attempt to add wrong type: %s as AlgTool" % type( newTool ).__name__) + raise TypeError("Attempt to add wrong type: %s as public AlgTool" % newTool.__component_type__) deduplicate(newTool,self._publicTools) if primary: diff --git a/Control/AthenaMonitoringKernel/AthenaMonitoringKernel/HistogramFiller.h b/Control/AthenaMonitoringKernel/AthenaMonitoringKernel/HistogramFiller.h index 59f1e5f31bbf74fb59a8607eb7226331cfb5da10..60e5cb7560171dd2118d3bf8a237180eab2bb4a2 100644 --- a/Control/AthenaMonitoringKernel/AthenaMonitoringKernel/HistogramFiller.h +++ b/Control/AthenaMonitoringKernel/AthenaMonitoringKernel/HistogramFiller.h @@ -14,6 +14,8 @@ #include "AthenaMonitoringKernel/IHistogramProvider.h" #include "AthenaMonitoringKernel/IMonitoredVariable.h" +class TProfile; + namespace Monitored { // Forward declare generic histogram filler (see HistogramFillerUtils.h) diff --git a/Control/AthenaMonitoringKernel/src/HistogramFiller/VecHistogramFiller1D.h b/Control/AthenaMonitoringKernel/src/HistogramFiller/VecHistogramFiller1D.h index 27ecc1c19d164512785a5911276f544c6c78052d..fd212d1f54c519520ddaabff8b2c06695a641943 100644 --- a/Control/AthenaMonitoringKernel/src/HistogramFiller/VecHistogramFiller1D.h +++ b/Control/AthenaMonitoringKernel/src/HistogramFiller/VecHistogramFiller1D.h @@ -15,7 +15,7 @@ namespace Monitored { virtual unsigned fill(const HistogramFiller::VariablesPack& vars) const override { - if ( ATH_UNLIKELY( vars.var[0] == nullptr or vars.size() != 0 ) ) { return 0; } + if ( ATH_UNLIKELY(vars.size() == 0 or vars.var[0] == nullptr) ) { return 0; } std::function<bool(size_t)> cutMaskAccessor; if (vars.cut) { @@ -33,7 +33,7 @@ namespace Monitored { auto histogram = this->histogram<TH1>(); const unsigned offset = m_histDef->kVecUO ? 0 : 1; for (unsigned i = 0; i < vars.var[0]->size(); ++i) { - if (cutMaskAccessor && cutMaskAccessor(i)) { + if (cutMaskAccessor == nullptr or cutMaskAccessor(i)) { const double value = vars.var[0]->get(i); histogram->AddBinContent(i+offset, value); histogram->SetEntries(histogram->GetEntries() + value); diff --git a/Control/AthenaMonitoringKernel/test/HistogramFillerRebinable1DTestSuite.cxx b/Control/AthenaMonitoringKernel/test/HistogramFillerRebinable1DTestSuite.cxx index 4133e65d0efb96343a29913f455dee260cac8006..9723b256e8accd9b03bfeb86b79ffbc852db6946 100644 --- a/Control/AthenaMonitoringKernel/test/HistogramFillerRebinable1DTestSuite.cxx +++ b/Control/AthenaMonitoringKernel/test/HistogramFillerRebinable1DTestSuite.cxx @@ -23,6 +23,7 @@ #include "AthenaMonitoringKernel/MonitoredScalar.h" #include "../src/HistogramFiller/HistogramFillerRebinable.h" +#include "mocks/MockHistogramDef.h" #include "mocks/MockHistogramProvider.h" using namespace std; @@ -223,7 +224,7 @@ class HistogramFillerRebinable1DTestSuite { private: MsgStream m_log; - HistogramDef m_histogramDef; + MockHistogramDef m_histogramDef; shared_ptr<MockHistogramProvider> m_histogramProvider; shared_ptr<TH1D> m_histogram; diff --git a/Control/AthenaMonitoringKernel/test/HistogramFillerRebinable2DTestSuite.cxx b/Control/AthenaMonitoringKernel/test/HistogramFillerRebinable2DTestSuite.cxx index f55abb00313aa8dc81bd34a738c6fe8c2514d0f8..dfc5328225e65a1dca76eb4dd83b4bcaaa850abb 100644 --- a/Control/AthenaMonitoringKernel/test/HistogramFillerRebinable2DTestSuite.cxx +++ b/Control/AthenaMonitoringKernel/test/HistogramFillerRebinable2DTestSuite.cxx @@ -26,6 +26,7 @@ #include "AthenaMonitoringKernel/MonitoredScalar.h" #include "../src/HistogramFiller/HistogramFillerRebinable.h" +#include "mocks/MockHistogramDef.h" #include "mocks/MockHistogramProvider.h" using namespace std; @@ -122,7 +123,7 @@ class HistogramFillerRebinable2DTestSuite { private: MsgStream m_log; - HistogramDef m_histogramDef; + MockHistogramDef m_histogramDef; shared_ptr<MockHistogramProvider> m_histogramProvider; shared_ptr<TH2D> m_histogram; diff --git a/Control/AthenaMonitoringKernel/test/HistogramFillerVec1DTestSuite.cxx b/Control/AthenaMonitoringKernel/test/HistogramFillerVec1DTestSuite.cxx new file mode 100644 index 0000000000000000000000000000000000000000..c4f9c011a55fdbe595534a52cb76d347bd2800a9 --- /dev/null +++ b/Control/AthenaMonitoringKernel/test/HistogramFillerVec1DTestSuite.cxx @@ -0,0 +1,192 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#include <list> +#include <functional> +#include <memory> + +#include "TestTools/initGaudi.h" +#include "TestTools/expect.h" +#include "GaudiKernel/MsgStream.h" +#include "GaudiKernel/ITHistSvc.h" +#include "AthenaKernel/getMessageSvc.h" + +#include "TH1.h" + +#include "AthenaMonitoringKernel/MonitoredCollection.h" +#include "../src/HistogramFiller/VecHistogramFiller1D.h" + +#include "mocks/MockHistogramDef.h" +#include "mocks/MockHistogramProvider.h" + +using namespace std; +using namespace Monitored; + +#define REGISTER_TEST_CASE(TEST_CASE_NAME, KVECUO) registerTestCase(&HistogramFillerVec1DTestSuite::TEST_CASE_NAME, #TEST_CASE_NAME, KVECUO) + +class HistogramFillerVec1DTestSuite { + // ==================== All registered test cases ==================== + private: + list<function<void(void)>> registeredTestCases() { + return { + REGISTER_TEST_CASE(test_fillWithVector, false), + REGISTER_TEST_CASE(test_fillWithShortVector, false), + REGISTER_TEST_CASE(test_fillWithVectorUO, true), + REGISTER_TEST_CASE(test_fillWithShortVectorUO, true) + }; + } + + // ==================== Test code ==================== + private: + void beforeEach(bool kVecUO) { + if (kVecUO) { + m_histogramDef.kVecUO = true; + } else { + m_histogramDef.kVec = true; + } + m_histogramProvider.reset(new MockHistogramProvider()); + m_histogram.reset(new TH1D("MockHistogram", "Mock Histogram", 5, 0.0, 5.0)); + m_testObj.reset(new VecHistogramFiller1D(m_histogramDef, m_histogramProvider)); + + m_histogramProvider->mock_histogram = [this]() { return m_histogram.get(); }; + } + + void afterEach() { + } + + void test_fillWithVector() { + + using Coll = vector<double>; + Coll values({1., 2., 3., 4., 5.}); + auto var = Monitored::Collection("values", values); + + HistogramFiller::VariablesPack vars({&var}); + + VALUE(m_histogram->GetXaxis()->GetNbins()) EXPECTED(5); + VALUE(m_histogram->GetXaxis()->GetXmin()) EXPECTED(0.0); + VALUE(m_histogram->GetXaxis()->GetXmax()) EXPECTED(5.0); + + m_testObj->fill(vars); + + for (unsigned i: {0, 6}) { + VALUE(m_histogram->GetBinContent(i)) EXPECTED(0.0); + } + for (unsigned i = 0; i != values.size(); ++ i) { + VALUE(m_histogram->GetBinContent(i+1)) EXPECTED(values[i]); + } + } + + void test_fillWithShortVector() { + + using Coll = vector<double>; + Coll values({1., 2., 3.}); + auto var = Monitored::Collection("values", values); + + HistogramFiller::VariablesPack vars({&var}); + + VALUE(m_histogram->GetXaxis()->GetNbins()) EXPECTED(5); + VALUE(m_histogram->GetXaxis()->GetXmin()) EXPECTED(0.0); + VALUE(m_histogram->GetXaxis()->GetXmax()) EXPECTED(5.0); + + m_testObj->fill(vars); + + for (unsigned i: {0, 4, 5, 6}) { + VALUE(m_histogram->GetBinContent(i)) EXPECTED(0.0); + } + for (unsigned i = 0; i != values.size(); ++ i) { + VALUE(m_histogram->GetBinContent(i+1)) EXPECTED(values[i]); + } + } + + void test_fillWithVectorUO() { + + using Coll = vector<double>; + Coll values({1., 2., 3., 4., 5., 6., 7.}); + auto var = Monitored::Collection("values", values); + + HistogramFiller::VariablesPack vars({&var}); + + VALUE(m_histogram->GetXaxis()->GetNbins()) EXPECTED(5); + VALUE(m_histogram->GetXaxis()->GetXmin()) EXPECTED(0.0); + VALUE(m_histogram->GetXaxis()->GetXmax()) EXPECTED(5.0); + + m_testObj->fill(vars); + + for (unsigned i = 0; i != values.size(); ++ i) { + VALUE(m_histogram->GetBinContent(i)) EXPECTED(values[i]); + } + } + + void test_fillWithShortVectorUO() { + + using Coll = vector<double>; + Coll values({1., 2., 3., 4., 5.}); + auto var = Monitored::Collection("values", values); + + HistogramFiller::VariablesPack vars({&var}); + + VALUE(m_histogram->GetXaxis()->GetNbins()) EXPECTED(5); + VALUE(m_histogram->GetXaxis()->GetXmin()) EXPECTED(0.0); + VALUE(m_histogram->GetXaxis()->GetXmax()) EXPECTED(5.0); + + m_testObj->fill(vars); + + for (unsigned i: {5, 6}) { + VALUE(m_histogram->GetBinContent(i)) EXPECTED(0.0); + } + for (unsigned i = 0; i != values.size(); ++ i) { + VALUE(m_histogram->GetBinContent(i)) EXPECTED(values[i]); + } + } + + // ==================== Helper methods ==================== + private: + + // ==================== Initialization & run ==================== + public: + HistogramFillerVec1DTestSuite() + : m_log(Athena::getMessageSvc(), "HistogramFillerVec1DTestSuite") { + } + + void run() { + for (function<void(void)> testCase : registeredTestCases()) { + testCase(); + } + } + + // ==================== Test case registration ==================== + private: + typedef void (HistogramFillerVec1DTestSuite::*TestCase)(void); + + function<void(void)> registerTestCase(TestCase testCase, const string& testCaseName, bool kVecUO) { + return [this, testCase, testCaseName, kVecUO]() { + m_log << MSG::INFO << "Current test case: " << testCaseName << endmsg; + beforeEach(kVecUO); + invoke(testCase, this); + afterEach(); + }; + } + + // ==================== Properties ==================== + private: + MsgStream m_log; + + MockHistogramDef m_histogramDef; + shared_ptr<MockHistogramProvider> m_histogramProvider; + shared_ptr<TH1D> m_histogram; + + shared_ptr<VecHistogramFiller1D> m_testObj; +}; + +int main() { + ISvcLocator* pSvcLoc; + + if (!Athena_test::initGaudi("GenericMon.txt", pSvcLoc)) { + throw runtime_error("This test can not be run: GenericMon.txt is missing"); + } + + HistogramFillerVec1DTestSuite().run(); + + return 0; +} diff --git a/Control/PerformanceMonitoring/PerfMonComps/CMakeLists.txt b/Control/PerformanceMonitoring/PerfMonComps/CMakeLists.txt index e2512aedb0f2e8677ac158ab46ebc8e22920425f..7e335590382b9b60a73475aa6421e1a6d38e7565 100644 --- a/Control/PerformanceMonitoring/PerfMonComps/CMakeLists.txt +++ b/Control/PerformanceMonitoring/PerfMonComps/CMakeLists.txt @@ -23,5 +23,5 @@ atlas_add_component( PerfMonComps AthDSoCallBacks nlohmann_json::nlohmann_json) # Install files from the package: -atlas_install_python_modules( python/*.py ) -atlas_install_joboptions( share/*.py ) +atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} ) +atlas_install_joboptions( share/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} ) diff --git a/Control/PerformanceMonitoring/PerfMonComps/python/DomainsRegistry.py b/Control/PerformanceMonitoring/PerfMonComps/python/DomainsRegistry.py index ad3867be4e2205256f7770d38541d23299d3985a..57deaf97bb05f7d02dad2a29f3aa9fe8764494f7 100644 --- a/Control/PerformanceMonitoring/PerfMonComps/python/DomainsRegistry.py +++ b/Control/PerformanceMonitoring/PerfMonComps/python/DomainsRegistry.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # @file PerfMonComps/python/DomainsRegistry.py # @purpose hold a registry of alg names and their association w/ domain @@ -158,8 +158,6 @@ class Registry(object): """ if registry is None: registry=self._registry - start_alg = None - idx = None for ielmt, elmt in enumerate(registry): if elmt[0] == name: return ielmt, elmt[1] @@ -328,7 +326,7 @@ class Registry(object): if not self._dirty_db: return dict(self._d2a_db) # side-effect of calling self.algs: will build self._d2a_db - a2d = self.algs + a2d = self.algs # noqa: F841 return dict(self._d2a_db) @property @@ -443,9 +441,6 @@ def _test_main(): print(" ref: ",ref[d]) assert algs == ref[d] - db = pdr.a2d_db() - db = pdr.d2a_db() - print("OK") return 0 diff --git a/Control/PerformanceMonitoring/PerfMonComps/python/JobOptCfg.py b/Control/PerformanceMonitoring/PerfMonComps/python/JobOptCfg.py index 639ead38e6495143ce8af05700b4d71258c157e4..bd31661d30beef3f6c8a67591cdc33305ce8e699 100644 --- a/Control/PerformanceMonitoring/PerfMonComps/python/JobOptCfg.py +++ b/Control/PerformanceMonitoring/PerfMonComps/python/JobOptCfg.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # @file: JobOptCfg.py # @purpose: a customized Configurable class for the PerfMonSvc @@ -145,11 +145,11 @@ class PerfMonSvc( _PerfMonSvc ): ioLabels = [ "streamRDO","streamESD", "streamAOD","streamTAG", "inputBackNav","inputFile" ] - for l in ioLabels: + for z in ioLabels: try: - ioContainers.extend(keystore[l].list()) + ioContainers.extend(keystore[z].list()) except AttributeError: - for k,v in keystore[l].items(): + for k,v in keystore[z].items(): ioContainers += [ "%s#%s" % (k,c) for c in v ] pass ## collect everything diff --git a/Control/PerformanceMonitoring/PerfMonComps/python/MTJobOptCfg.py b/Control/PerformanceMonitoring/PerfMonComps/python/MTJobOptCfg.py index a366480824bf6c0be85767e778913f24be9a6de6..3562eb45f3c73b1809f8c74d1092f1ecea522e92 100644 --- a/Control/PerformanceMonitoring/PerfMonComps/python/MTJobOptCfg.py +++ b/Control/PerformanceMonitoring/PerfMonComps/python/MTJobOptCfg.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # Job options configuration file for PerfMonMTSvc @@ -26,8 +26,6 @@ class PerfMonMTSvc ( _PerfMonMTSvc ): if not isinstance(handle, PerfMonMTSvc): return - from AthenaCommon import CfgMgr - ## Enable the auditors from AthenaCommon.AppMgr import theApp theApp.AuditAlgorithms = True diff --git a/Control/PerformanceMonitoring/PerfMonComps/python/PMonSD.py b/Control/PerformanceMonitoring/PerfMonComps/python/PMonSD.py index ec3404dbe2d75206484881a6f67429823d5fbb22..cd47b4052bcaaeb029913c434e6eca9b399ffbd2 100644 --- a/Control/PerformanceMonitoring/PerfMonComps/python/PMonSD.py +++ b/Control/PerformanceMonitoring/PerfMonComps/python/PMonSD.py @@ -1,5 +1,4 @@ -# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration -from __future__ import print_function +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration __doc__ ='Module for parsing and basic analysis of Semi-Detailed PerfMon (PMonSD) output. More info at https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PerfMonSD' __author__='Thomas Kittelmann <thomas.kittelmann@cern.ch>' @@ -15,7 +14,8 @@ def pmonsd_version(): def parse(infile,outfile=None): """Parse PMonSD output and return list of dictionaries. Optionally save output in pickle file.""" p=__smart_parse(infile) - if p==None: return None + if p is None: + return None if outfile: __save_output(p,outfile,'.psd',infile) return p @@ -25,14 +25,15 @@ def deparse(infile): identical to the ones it was parsed from""" out=[] p=__smart_parse(infile) - if p==None: return [] + if p is None: + return [] for e in p: out+=__deparse_single(e) return out -def need_line(l): +def need_line(z): """To identify lines which PMonSD needs for parsing""" - return l.startswith(_appname) + return z.startswith(_appname) def print_ascii(infile): """Print parsed PMonSD info to stdout""" @@ -52,13 +53,16 @@ def _validate_identical(infile1,infile2): #For validation p1=__smart_parse(infile1) p2=__smart_parse(infile2) - if p1==None or p2==None: return False + if p1 is None or p2 is None: + return False return p1==p2 def force_share(obj): """Dive into lists and dictionaries and make sure strings with similar content gets shared""" - if type(obj)==list: __fs_list(obj) - elif type(obj)==dict: __fs_dict(obj) + if type(obj)==list: + __fs_list(obj) + elif type(obj)==dict: + __fs_dict(obj) def get_shared_string(s): return __get_shared_string(s) @@ -94,7 +98,7 @@ def __save_output(data,outfile,prefix,infile=None): fh=gzip.open(outfile,'w') else: fh=open(outfile,'w') - if infile!=None and outfile==infile: + if infile is not None and outfile==infile: print("%s.parse WARNING: output file %s equals input file. Won't dump."%(_appname,outfile)) else: import cPickle @@ -146,8 +150,10 @@ def __smart_parse(infile): else: #in case this is already parsed info, make sure we just return it as it is: if type(infile)==list: - if len(infile)==0: return infile - if type(infile[0])==dict and 'steps_comps' in infile[0].keys(): return infile + if len(infile)==0: + return infile + if type(infile[0])==dict and 'steps_comps' in infile[0].keys(): + return infile #Hopefully this is something we can iterate through (like a list of strings or a file-handle): return __actual_parse(infile) @@ -168,13 +174,13 @@ def __actual_parse(filehandle): return (float(v),int(i)) d=new_dict() stepcount={}#for keeping track of in what order within each step a component is listed - for l in filehandle: - if not l.startswith(_prefix): + for z in filehandle: + if not z.startswith(_prefix): continue #ensure the first thing we pick up is the version: - if version==None: - if intro_version in l: - vstr=l.split(intro_version)[1].split()[0] + if version is None: + if intro_version in z: + vstr=z.split(intro_version)[1].split()[0] full_info=vstr[-1]=='f' v_major,v_minor=vstr[:-1].split('.') version=(int(v_major),int(v_minor)) @@ -185,22 +191,23 @@ def __actual_parse(filehandle): print("WARNING: Using PMonSD of version %f to parse output made with version %f"%(pmonsd_version(),version)) continue #remove prefix: - l=l[len(_prefix):].strip() - if l.startswith('WARNING'): continue - if l.startswith('=='): + z=z[len(_prefix):].strip() + if z.startswith('WARNING'): + continue + if z.startswith('=='): #This is a comment/separator. Look for end marker: - if end_marker in l: + if end_marker in z: #found. Grab parsed info and make room for more (in case of concatenated logs) output+=[d] d=new_dict() version=None#reset - elif 'Full output inside:' in l: - filename=l.split('Full output inside:')[1].split('===')[0].strip() + elif 'Full output inside:' in z: + filename=z.split('Full output inside:')[1].split('===')[0].strip() d['fulloutput_file']=filename continue - if not l.startswith('['): + if not z.startswith('['): continue#ignore column headers - f=l.split() + f=z.split() if f[0]=='[---]' and '=' in f[1]: for valfield in f[1:]: n,vstr=valfield.split('=',1) @@ -226,15 +233,17 @@ def __actual_parse(filehandle): d['special']['snapshots'][comp]={'n':n,'cpu':float(f[0]),'wall':float(f[1]), 'vmem':float(f[2]),'malloc':float(f[3])} else: - if not step in d['steps_comps'].keys(): + if step not in d['steps_comps'].keys(): d['steps_comps'][step]={} d['steps_totals'][step]={} stepcount[step]=0 iorder=stepcount[step] stepcount[step]+=1 #workaround situation where two collapsed or total lines have same form (nentries is always different): - if is_collapsed and comp in d['steps_comps'][step].keys(): comp+=':n=%i'%n - if is_total and comp in d['steps_totals'][step].keys(): comp+=':n=%i'%n + if is_collapsed and comp in d['steps_comps'][step].keys(): + comp+=':n=%i'%n + if is_total and comp in d['steps_totals'][step].keys(): + comp+=':n=%i'%n if len(f)==6: #has max@evt info d['steps_comps'][step][comp]={'order':iorder,'n':n,'cpu':float(f[0]),'vmem':float(f[2]),'malloc':float(f[4])} @@ -242,8 +251,10 @@ def __actual_parse(filehandle): else: #doesn't have max@evt info (step!='evt' or 'evt' but collapsed or total) nfo={'order':iorder,'n':n,'cpu':float(f[0]),'vmem':float(f[1]),'malloc':float(f[2])} - if is_total: d['steps_totals'][step][comp]=nfo - else: d['steps_comps'][step][comp]=nfo + if is_total: + d['steps_totals'][step][comp]=nfo + else: + d['steps_comps'][step][comp]=nfo force_share(output)#make sure we register shared strings return output @@ -251,14 +262,18 @@ def __deparse_single(d): _prefix=_appname+' ' out=[] assert type(d)==dict - def header(l,s,center=True): - if center: s=(' %s '%s).center(82,'=') - else: s=(' %s '%s).ljust(82,'=') - l+=[ _prefix+'==='+s+'==='] + def header(z,s,center=True): + if center: + s=(' %s '%s).center(82,'=') + else: + s=(' %s '%s).ljust(82,'=') + z+=[ _prefix+'==='+s+'==='] full_info=d['full_info'] - if full_info: fullstr='f' - else: fullstr='c' + if full_info: + fullstr='f' + else: + fullstr='c' header(out,'semi-detailed perfmon info v%i.%i%s / start'%(d['version'][0],d['version'][1],fullstr)) header(out,'Documentation: https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PerfMonSD',center=False) header(out,'Note that documentation includes recipe for easy parsing from python. ',center=False) @@ -268,7 +283,7 @@ def __deparse_single(d): stdsteps=['ini','1st','cbk','evt','fin'] steps=[] for step in d['steps_comps'].keys(): - if not step in stdsteps and not step in steps: + if step not in stdsteps and step not in steps: steps+=[step] steps.sort() steps=stdsteps+steps @@ -284,20 +299,25 @@ def __deparse_single(d): is_evt=step=='evt' header(out,'step %s'%step) entries=[] - if not step in d['steps_comps'].keys(): continue + if step not in d['steps_comps'].keys(): + continue for comp,compdata in d['steps_comps'][step].items(): - if '_comps]:n=' in comp: comp=comp.split('_comps]:n=')[0]+'_comps]' + if '_comps]:n=' in comp: + comp=comp.split('_comps]:n=')[0]+'_comps]' if is_evt and comp in d['evt_max_info'].keys(): s=format_evt_withmax%(compdata['n'],compdata['cpu'],format_max(d['evt_max_info'][comp]['cpu']), compdata['vmem'],format_max(d['evt_max_info'][comp]['vmem']), compdata['malloc'],format_max(d['evt_max_info'][comp]['malloc']),comp) else: - if is_evt: format=format_evt_nomax - else: format=format_notevt + if is_evt: + format=format_evt_nomax + else: + format=format_notevt s=format%(compdata['n'],compdata['cpu'],compdata['vmem'],compdata['malloc'],comp) entries+=[(compdata['order'],comp,s)] for comp,compdata in d['steps_totals'][step].items(): - if '_comps]:n=' in comp: comp=comp.split('_comps]:n=')[0]+'_comps]' + if '_comps]:n=' in comp: + comp=comp.split('_comps]:n=')[0]+'_comps]' format='%4i %6i %7i %7i %s' if is_evt: format='%4i %6i %7i %7i %s' @@ -305,8 +325,10 @@ def __deparse_single(d): entries+=[(compdata['order'],comp,s)] if entries: entries.sort() - if is_evt: out+=[ _prefix+' '*len(step)+colheader_evt] - else: out+=[ _prefix+' '*len(step)+colheader_std] + if is_evt: + out+=[ _prefix+' '*len(step)+colheader_evt] + else: + out+=[ _prefix+' '*len(step)+colheader_std] for _,_,s in entries: out+=[ '%s[%s] %s'%(_prefix,step,s)] header(out,'special info') @@ -323,8 +345,6 @@ def __deparse_single(d): for leak in leaks: dl=d['special']['leaks'][leak] out+=[ '%s[---] %4i - - %8i %8i %s'%(_prefix,dl['n'],dl['vmem'],dl['malloc'],leak)] - specialvals=d['special']['values'].keys() - svs=[] order=[['vmem_peak','vmem_mean','rss_mean'], ['jobcfg_walltime','jobstart'], ['cpu_bmips','cpu_res','release'], @@ -335,8 +355,10 @@ def __deparse_single(d): lineformat=[] for sv in lineorder: v=d['special']['values'][sv] - if type(v)==float: v_str='%i'%v - else: v_str=v + if type(v)==float: + v_str='%i'%v + else: + v_str=v lineformat+=['%s=%s'%(sv,v_str)] out+=['%s[---] %s'%(_prefix,' '.join(lineformat))] header(out,'semi-detailed perfmon info / end') @@ -352,17 +374,18 @@ def _validate_deparsing(f): fh=gzip_fastopen(f) else: fh=open(f) - for l in fh: - if l.startswith(_prefix): - if l.startswith(_prefix+'WARNING'): + for z in fh: + if z.startswith(_prefix): + if z.startswith(_prefix+'WARNING'): continue - if l.endswith('\n'): l=l[0:-1] - lines+=[l] + if z.endswith('\n'): + z=z[0:-1] + lines+=[z] if len(lines)==0: print("File does not have %s lines!"%_appname) return False d=__smart_parse(lines) - if d==None: + if d is None: return False lines2=deparse(d) if len(lines)!=len(lines2): @@ -388,7 +411,8 @@ def _validate_deparsing(f): def __actual_diff(infile1,infile2): d1=__smart_parse(infile1) d2=__smart_parse(infile2) - if d1==None or d2==None: return False + if d1 is None or d2 is None: + return False #Gymnastics to accept separate types: if type(d1)==list and type(d2)==list: if len(d1)!=len(d2): @@ -422,13 +446,15 @@ def __actual_diff(infile1,infile2): anycollapsed=False for comp,data in compdata.items(): n=data['n'] - if not n in nentries2ncomps.keys(): nentries2ncomps[n]=0 + if n not in nentries2ncomps.keys(): + nentries2ncomps[n]=0 if comp.startswith('[collapsed_'): anycollapsed=True - nc=int(comp.split('_')[1]) + #nc=int(comp.split('_')[1]) else: - nc=1 - nentries2ncomps[n]+=1 + pass + #nc=1 + nentries2ncomps[n]+=1 # check if this should be +=nc return nentries2ncomps,anycollapsed @@ -455,7 +481,7 @@ def __actual_diff(infile1,infile2): if not anycollapsed1 and not anycollapsed2: #awesome, we can check all comps completely before vs. after for comp,compdata in d1['steps_comps'][step].items(): - if not comp in d2['steps_comps'][step].keys(): + if comp not in d2['steps_comps'][step].keys(): print("Difference: Component %s only present in one input in step %s"%(comp,step)) return False check+=[(comp,compdata,d2['steps_comps'][step][comp])] @@ -477,14 +503,17 @@ def __get_shared_string(s): global __allstrings return __allstrings.setdefault(s,s) -def __fs_list(l): - i=len(l) +def __fs_list(z): + i=len(z) while i: i-=1 - t=type(l[i]) - if t==str: l[i]=__get_shared_string(l[i]) - elif t==list: __fs_list(l[i]) - elif t==dict: __fs_dict(l[i]) + t=type(z[i]) + if t==str: + z[i]=__get_shared_string(z[i]) + elif t==list: + __fs_list(z[i]) + elif t==dict: + __fs_dict(z[i]) def __fs_dict(d): keys=d.keys() @@ -492,7 +521,10 @@ def __fs_dict(d): o=d[k] del d[k] t=type(o) - if t==str: o=__get_shared_string(o) - elif t==list: __fs_list(o) - elif t==dict: __fs_dict(o) + if t==str: + o=__get_shared_string(o) + elif t==list: + __fs_list(o) + elif t==dict: + __fs_dict(o) d[__get_shared_string(k)]=o diff --git a/Control/PerformanceMonitoring/PerfMonComps/python/PerfMonFlags.py b/Control/PerformanceMonitoring/PerfMonComps/python/PerfMonFlags.py index 565bb21ad05de13700d44ffabe18b094df03554a..150ecf02ad8d4192369a5c2d8c44233abd49f458 100644 --- a/Control/PerformanceMonitoring/PerfMonComps/python/PerfMonFlags.py +++ b/Control/PerformanceMonitoring/PerfMonComps/python/PerfMonFlags.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # @file: PerfMonFlags.py # @purpose: a container of flags for Performance Monitoring @@ -58,8 +58,6 @@ class doPersistencyMonitoring(JobProperty): if not jobproperties.PerfMonFlags.doMonitoring(): jobproperties.PerfMonFlags.doMonitoring = True pass - from AthenaCommon.AppMgr import ServiceMgr as svcMgr - #svcMgr.PerfMonSvc.MonLvl = -1 return # class doDetailedMonitoring(JobProperty): @@ -105,8 +103,6 @@ class doFullMon(JobProperty): jobproperties.PerfMonFlags.doFastMon = False jobproperties.PerfMonFlags.doMonitoring = True # setup values - from AthenaCommon.AppMgr import ServiceMgr as svcMgr - #svcMgr.PerfMonSvc.MonLvl = -1 # enable DSO monitoring jobproperties.PerfMonFlags.doDsoMonitoring = True # activate persistency monitoring too @@ -410,7 +406,7 @@ def _decode_pmon_opts(opts): elif opt.startswith('+'): val = True flag_name = flag_name[1:] - if not flag_name in dispatch: + if flag_name not in dispatch: raise ValueError( '[%s] is not a valid PerfMonFlag (allowed: %r)' % (flag_name, dispatch.keys()) diff --git a/Control/PerformanceMonitoring/PerfMonComps/python/PerfMonSerializer.py b/Control/PerformanceMonitoring/PerfMonComps/python/PerfMonSerializer.py index 94cc5df2b969fcdcde4f5282c799a4536498b6b3..ab092b64cc30a40beadc4d77f3aa2aeb78c46324 100644 --- a/Control/PerformanceMonitoring/PerfMonComps/python/PerfMonSerializer.py +++ b/Control/PerformanceMonitoring/PerfMonComps/python/PerfMonSerializer.py @@ -1,9 +1,7 @@ -# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # @file PerfMonComps/python/PerfMonSerializer -from __future__ import with_statement, print_function - __version__ = "$Revision: 524466 $" __doc__ = "various utils to encode/decode perfmon (meta)data with base64" __author__ = "Sebastien Binet, Thomas Kittlemann" @@ -161,23 +159,22 @@ def iextract_pmon_data(fname): else: raise ValueError("expect a xyz.pmon.gz or xyz.stream file (got [%s])"%(fname,)) - from collections import defaultdict import numpy as np out = _init_pmon_data() with open(stream_fname, 'r') as f: - for l in f: + for z in f: data, step, idx, comp = (None, ) * 4 - if l.startswith('#'): + if z.startswith('#'): continue #print("[%s]" % l.strip()) # handle things like: # /io/std::vector<unsigned int>#L1CaloUnpackingErrors ... # /io/std::map<std::string,std::vector<int> >#mapdata ... - l = l.replace('unsigned int', 'unsigned-int')\ + z = z.replace('unsigned int', 'unsigned-int')\ .replace('> >', '>->') - fields = l.split() + fields = z.split() #print("##",repr(l)) if fields[0].startswith(('/ini/','/evt/','/fin/', '/cbk/','/usr/', @@ -345,7 +342,7 @@ def iextract_pmon_data(fname): pass else: print("warning: unhandled field [%s]" % (fields[0],)) - print(repr(l)) + print(repr(z)) # yields what we got so far yield step, idx, comp, out @@ -391,7 +388,8 @@ def encode(data, use_base64=True): def decode(s): """decode a (compressed) string into a python object """ - if not s: return None + if not s: + return None import zlib import cPickle as pickle if s[0]=='B': @@ -399,7 +397,6 @@ def decode(s): s=base64.b64decode(s[1:]) else: s=s[1:] - d=pickle.loads(zlib.decompress(s)) return pickle.loads(zlib.decompress(s)) def build_callgraph(fname): @@ -417,9 +414,7 @@ def build_callgraph(fname): current_step = 'ini' local_ctx = None - out = None for step, idx, comp, table in iextract_pmon_data(fname): - out = table if idx is None: if comp == 'PerfMonSliceIo': # ignore this component for now... @@ -493,7 +488,7 @@ def build_callgraph(fname): # push the stack of contexes parent_ctx = local_ctx local_ctx = GraphNode(comp, parent=parent_ctx) - if not step in graph.keys(): + if step not in graph.keys(): local_ctx.ctype = step parent_ctx.children.append(local_ctx) elif idx == 1: diff --git a/Control/PerformanceMonitoring/PerfMonComps/python/PyComps.py b/Control/PerformanceMonitoring/PerfMonComps/python/PyComps.py index 305c6a877a770203437560d99871a0fe5e10b5e2..0646fbafff59c9aa1eda8d7e875c55b50c94b151 100644 --- a/Control/PerformanceMonitoring/PerfMonComps/python/PyComps.py +++ b/Control/PerformanceMonitoring/PerfMonComps/python/PyComps.py @@ -1,15 +1,13 @@ -# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # @file: PerfMonComps/python/PyComps.py # @purpose: a set of python components to perform performance monitoring # @author: Sebastien Binet <binet@cern.ch> -from __future__ import print_function __doc__ = 'a set of python components to perform performance monitoring' __version__ = '$Revision: 298807 $' __author__ = 'Sebastien Binet <binet@cern.ch>' -import AthenaCommon.SystemOfUnits as Units import AthenaPython.PyAthena as PyAthena from AthenaPython.PyAthena import StatusCode @@ -105,8 +103,8 @@ class PyStorePayloadMon (PyAthena.Svc): tp_name = clid2name(p.clID()) print(fmt, (mem_0, mem_1, mem_0 - mem_1, tp_name, p.name()), file=fd) pass - mem_store_0 = long(mem_store_0) - mem_store_1 = long(mem_store_1) + mem_store_0 = int(mem_store_0) + mem_store_1 = int(mem_store_1) print(fmt, ( mem_store_0, mem_store_1, mem_store_0 - mem_store_1, @@ -133,7 +131,7 @@ class PyStorePayloadMon (PyAthena.Svc): ## mem_0, mem_1, mem_1 - mem_0, ncalls_0, ncalls_1, ## p.clID(), p.name() ## )) - return (p, long(mem_0), long(mem_1)) + return (p, int(mem_0), int(mem_1)) def finalize(self): self.msg.info('==> finalize...') diff --git a/Control/PerformanceMonitoring/PerfMonComps/python/PyMonUtils.py b/Control/PerformanceMonitoring/PerfMonComps/python/PyMonUtils.py index 78bd444393e970e36d22d259f3a810c2eadde1fc..488d9145455de5945dadb1dfdf2dbe5303747f98 100644 --- a/Control/PerformanceMonitoring/PerfMonComps/python/PyMonUtils.py +++ b/Control/PerformanceMonitoring/PerfMonComps/python/PyMonUtils.py @@ -1,8 +1,7 @@ -# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # @file: PyMonUtils.py # @author: Sebastien Binet <binet@cern.ch> -from __future__ import print_function __author__ = "Sebastien Binet <binet@cern.ch>" __version__ = "$Revision: 1.3 $" @@ -44,7 +43,7 @@ def mon_push_back (sgname='StoreGateSvc'): # no double counting from symlinks # FIXME: it is actually valid to have 2 different collections # (=/= CLIDs) with the same key... - if wasted.has_key(k): + if k in wasted: continue clid = dp.clID() klass = "%s" % cl.typename(clid) @@ -81,18 +80,22 @@ def mon_push_back (sgname='StoreGateSvc'): def dump_smaps (fname=None): import os,sys - if not (fname is None): o = open (fname, 'w') - else: o = sys.stdout - for l in open('/proc/%d/smaps'%os.getpid()): - print(l, file=o) + if not (fname is None): + o = open (fname, 'w') + else: + o = sys.stdout + for z in open('/proc/%d/smaps'%os.getpid()): + print(z, file=o) if not (fname is None): o.close() return def loaded_libs (fname=None, pid=None, show=False): import os,sys,re - if not (fname is None): o = open (fname, 'w') - else: o = sys.stdout + if not (fname is None): + o = open (fname, 'w') + else: + o = sys.stdout pat = re.compile(r'(?P<addr_beg>\w*?)\-(?P<addr_end>\w*?)\s'\ r'(?P<perm>.{4})\s(?P<offset>\w*?)\s'\ r'(?P<devmajor>\d{2}):(?P<devminor>\d{2})\s'\ @@ -102,13 +105,13 @@ def loaded_libs (fname=None, pid=None, show=False): if pid is None: pid = os.getpid() for line in open('/proc/%s/smaps'%pid): - l = line.strip() - res = re.match(pat,l) + z = line.strip() + res = re.match(pat,z) if res: g = res.group libname = g('libname').strip() libs.add(_realpath(libname)) - libs = sorted([l for l in libs], reverse=True) + libs = sorted([z for z in libs], reverse=True) if show: for libname in libs: print(libname, file=o) @@ -117,8 +120,6 @@ def loaded_libs (fname=None, pid=None, show=False): import sys if sys.platform == 'darwin': def pymon(): - from os import getpid,sysconf - from sys import platform from resource import getrusage, RUSAGE_SELF cpu = getrusage(RUSAGE_SELF) cpu = (cpu.ru_utime+cpu.ru_stime) * 1e3 # in milliseconds @@ -138,7 +139,6 @@ if sys.platform == 'darwin': else: def pymon(): from os import getpid,sysconf - from sys import platform from resource import getrusage, RUSAGE_SELF cpu = getrusage(RUSAGE_SELF) cpu = (cpu.ru_utime+cpu.ru_stime) * 1e3 # in milliseconds @@ -152,13 +152,17 @@ else: def lshosts_infos(): import socket,commands hostname = '<unknown>' - try: hostname = socket.gethostname() - except Exception: pass + try: + hostname = socket.gethostname() + except Exception: + pass sc,out = commands.getstatusoutput('which lshosts') - if sc != 0: return ('no lshosts command',0.) # no lshosts could be found + if sc != 0: + return ('no lshosts command',0.) # no lshosts could be found cmd = out sc,out = commands.getstatusoutput("%s %s"%(cmd,hostname)) - if sc != 0: return ('host not in db', 0.) + if sc != 0: + return ('host not in db', 0.) cpu_infos = {} try: title,data = out.splitlines() diff --git a/Control/PerformanceMonitoring/PerfMonComps/python/PyPerfMon.py b/Control/PerformanceMonitoring/PerfMonComps/python/PyPerfMon.py index 60d0dc473fbcddc9be1cde13dffcee6ccd3595ae..90ef5b7bdbd1b5a7fa980de4cb4e1cc205162875 100644 --- a/Control/PerformanceMonitoring/PerfMonComps/python/PyPerfMon.py +++ b/Control/PerformanceMonitoring/PerfMonComps/python/PyPerfMon.py @@ -2,7 +2,6 @@ # @file: PyPerfMon.py # @author: Sebastien Binet <binet@cern.ch> -from __future__ import with_statement __author__ = "Sebastien Binet <binet@cern.ch>" __version__ = "$Revision: 1.51 $" @@ -10,17 +9,12 @@ __doc__ = """python module holding a python service to monitor athena perfor """ import os,sys -from time import time -import resource -from resource import getrusage as resource_getrusage -import string -import array import AthenaCommon.Logging as L _perfMonStates = ('ini','evt','fin') -from PerfMonComps.PyMonUtils import * +from PerfMonComps.PyMonUtils import Units, pymon from PyUtils.Decorators import memoize, forking @@ -54,7 +48,6 @@ class Svc(object): instances = {} def __init__(self, name, properties = None): - import AthenaCommon.Logging as L ## init base class super(Svc,self).__init__() self.name = name @@ -94,9 +87,12 @@ class Svc(object): cfg_module = 'PerfMonComps' elif c in cfgs: cfg = cfgs[c] - if isinstance(cfg, ConfigurableAlgorithm): cfg_type = 'alg' - elif isinstance(cfg, ConfigurableAlgTool): cfg_type = 'algtool' - elif isinstance(cfg, ConfigurableService): cfg_type = 'svc' + if isinstance(cfg, ConfigurableAlgorithm): + cfg_type = 'alg' + elif isinstance(cfg, ConfigurableAlgTool): + cfg_type = 'algtool' + elif isinstance(cfg, ConfigurableService): + cfg_type = 'svc' cfg_class = cfg.__class__.__name__ cfg_module = cfg.__class__.__module__ else: @@ -139,7 +135,7 @@ class Svc(object): ## perfmon domains try: - import DomainsRegistry as pdr + import PerfMonComps.DomainsRegistry as pdr self.meta['domains_a2d'] = pdr.a2d_db() except Exception: _msg.info('problem retrieving domains-registry...') @@ -159,7 +155,9 @@ class Svc(object): 'rt': (0.,0.), } - import gc; gc.collect(); del gc + import gc + gc.collect() + del gc return def domains_db(self): @@ -170,7 +168,6 @@ class Svc(object): @property def msg(self): - import AthenaCommon.Logging as L return L.logging.getLogger(self.name) def _set_stats(self, name, @@ -227,7 +224,7 @@ class Svc(object): self._do_malloc_mon = False _msg.info('installing pmon-malloc hooks: %s', self._do_malloc_mon) import AthenaPython.PyAthena as PyAthena - lib = PyAthena.load_library('PerfMonEventDict') + PyAthena.load_library('PerfMonEventDict') memstats = PyAthena.PerfMon.MemStats memstats.enable(bool(self._do_malloc_mon)) _msg.info('pmon-malloc hooks enabled: %s', bool(memstats.enabled())) @@ -308,10 +305,10 @@ class Svc(object): statm = {} from sys import platform if platform != 'darwin' : - for l in open('/proc/self/status', 'r'): + for z in open('/proc/self/status', 'r'): # lines are of the form: # VmPeak: some value - ll = list(map(str.strip, l.split(':'))) + ll = list(map(str.strip, z.split(':'))) k = ll[0] v = ' '.join(ll[1:]) statm[k] = v @@ -343,9 +340,9 @@ class Svc(object): for evtstr,fitn,fitted_slope in self._slope_data['fits']: maxfitn=max(maxfitn,fitn) for evtstr,fitn,fitted_slope in self._slope_data['fits']: - _msg.info( ' evt %s fitted vmem-slope (%s points): %s'% - (evtstr,str(fitn).rjust(len(str(maxfitn))), - '%7.1f kb/evt'%fitted_slope if fitn>=2 else 'N/A') ) + _msg.info( ' evt %s fitted vmem-slope (%s points): %s', + evtstr,str(fitn).rjust(len(str(maxfitn))), + '%7.1f kb/evt'%fitted_slope if fitn>=2 else 'N/A' ) summary['job']['vmem_slope'] = self._slope_data else: _msg.info('vmem-leak estimation: [N/A]') @@ -353,8 +350,10 @@ class Svc(object): ## try to recoup some memory by flushing out ROOT stuff... headerFile = os.path.splitext(self.outFileName)[0]+".dat" - if os.path.exists(headerFile): os.remove(headerFile) - if os.path.exists(self.outFileName): os.remove(self.outFileName) + if os.path.exists(headerFile): + os.remove(headerFile) + if os.path.exists(self.outFileName): + os.remove(self.outFileName) ## build the callgraph... #import PerfMonComps.PerfMonSerializer as pmon_ser @@ -431,12 +430,15 @@ class Svc(object): ## write out meta-data import PyUtils.dbsqlite as dbs meta = dbs.open(headerFile, 'n') - for k,v in six.iteritems (self.meta): meta[k] = v + for k,v in six.iteritems (self.meta): + meta[k] = v meta['version_id'] = '0.4.0' # stream-format + header file meta['pmon_tuple_files'] = map( os.path.basename, outFiles[1:] ) import socket - try: meta['hostname'] = socket.gethostname() - except Exception: meta['hostname'] = '<unknown>' + try: + meta['hostname'] = socket.gethostname() + except Exception: + meta['hostname'] = '<unknown>' meta.close() @@ -447,8 +449,10 @@ class Svc(object): try: for outFile in outFiles: outFileDirName = os.path.dirname(outFile) - try: os.chdir(outFileDirName) - except OSError as err: pass + try: + os.chdir(outFileDirName) + except OSError: + pass outFile = os.path.basename(outFile) _msg.info(' --> [%s] => %8.3f kb', outFile, @@ -517,10 +521,12 @@ class PoolMonTool(object): from AthenaCommon import CfgMgr from AthenaCommon.Configurable import Configurable for c in list(Configurable.allConfigurables.values()): - if not isinstance(c, CfgMgr.AthenaOutputStream): continue + if not isinstance(c, CfgMgr.AthenaOutputStream): + continue try: outFile = c.properties()["OutputFile"] - except KeyError: continue + except KeyError: + continue if outFile.startswith("ROOTTREE:"): outFile = outFile[len("ROOTTREE:"):] outFiles.add( outFile ) @@ -530,7 +536,6 @@ class PoolMonTool(object): @property def msg(self): - import AthenaCommon.Logging as L return L.logging.getLogger(self.name) def initialize(self): @@ -620,7 +625,8 @@ class PoolMonTool(object): self.msg.info( "Could not run checkFile on [%s] !!", inFileName ) self.msg.info( "Reason: %s", err ) - if 'inFile' in dir(): del inFile + if 'inFile' in dir(): + del inFile _msg.unMute() if len(self.outputPoolFiles)>0: self.msg.info( "Content of output POOL files:" ) @@ -651,7 +657,8 @@ class PoolMonTool(object): self.msg.info( "Could not run checkFile on [%s] !!", outFileName ) self.msg.info( "Reason: %s", err ) - if 'outFile' in dir(): del outFile + if 'outFile' in dir(): + del outFile _msg.unMute() return @@ -678,13 +685,13 @@ class HephaestusMonTool(object): # during our finalize. self._heph_has_checkPoint = False import sys - if not 'Hephaestus.atexit' in sys.modules.keys(): + if 'Hephaestus.atexit' not in sys.modules.keys(): self.msg.warning('Hephaestus was not correctly initialized !') self.msg.warning('Final report may be inaccurate...') self.msg.warning('(to fix this, run athena with --leak-check)') import dl, Hephaestus.MemoryTracker as m - _hephLib = dl.open (m.__file__, dl.RTLD_GLOBAL | dl.RTLD_NOW) + dl.open (m.__file__, dl.RTLD_GLOBAL | dl.RTLD_NOW) memtrack = m from os.path import splitext @@ -742,7 +749,7 @@ class HephaestusMonTool(object): # consolidate last events with end-of-job leak report _clearCheckPoint = self.memtrack.CheckPoints.clearCheckPoint - for _ in xrange(self.lag): + for _ in range(self.lag): _clearCheckPoint( 0 ) # put the per-evt leaks into a different file diff --git a/Control/PerformanceMonitoring/PerfMonComps/share/FastMon.py b/Control/PerformanceMonitoring/PerfMonComps/share/FastMon.py index bead3f619f1d913b46b7992b552ffd468eaedc6a..6c175942e6a6276d83dac82269c506ab36c6fffd 100644 --- a/Control/PerformanceMonitoring/PerfMonComps/share/FastMon.py +++ b/Control/PerformanceMonitoring/PerfMonComps/share/FastMon.py @@ -1,3 +1,5 @@ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration + # @file: PerfMonComps/FastMon.py # @author: Sebastien Binet # $Id: FastMon.py,v 1.2 2007-12-03 19:07:38 binet Exp $ diff --git a/Control/PerformanceMonitoring/PerfMonComps/share/FullMon.py b/Control/PerformanceMonitoring/PerfMonComps/share/FullMon.py index 8cf777264c1c95ab5e2614279fdbaf7c149a95ab..0f5ab9cf19d244f7b09704f5d32c58436e0ca5ae 100644 --- a/Control/PerformanceMonitoring/PerfMonComps/share/FullMon.py +++ b/Control/PerformanceMonitoring/PerfMonComps/share/FullMon.py @@ -1,3 +1,5 @@ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration + # @file: PerfMonComps/FullMon.py # @author: Sebastien Binet # $Id$ diff --git a/Control/PerformanceMonitoring/PerfMonComps/share/PerfMonMTSvc_jobOptions.py b/Control/PerformanceMonitoring/PerfMonComps/share/PerfMonMTSvc_jobOptions.py index b8adada555d2b950e92575b9950033913c317ff6..b3cb87c5506ecd7ae7223eb80f87d625a24c3763 100644 --- a/Control/PerformanceMonitoring/PerfMonComps/share/PerfMonMTSvc_jobOptions.py +++ b/Control/PerformanceMonitoring/PerfMonComps/share/PerfMonMTSvc_jobOptions.py @@ -1,3 +1,5 @@ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration + ############################### # Print what we're doing ############################### diff --git a/Control/PerformanceMonitoring/PerfMonComps/share/PerfMonSvc_jobOptions.py b/Control/PerformanceMonitoring/PerfMonComps/share/PerfMonSvc_jobOptions.py index 8b732fc34a69275448fde4fc548b82bf08bb8437..5f546a8b942a677fe02312986b99e0f3ff0ad962 100644 --- a/Control/PerformanceMonitoring/PerfMonComps/share/PerfMonSvc_jobOptions.py +++ b/Control/PerformanceMonitoring/PerfMonComps/share/PerfMonSvc_jobOptions.py @@ -1,3 +1,5 @@ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration + # @file: PerfMonComps/PerfMonSvc_jobOptions.py # @author: Sebastien Binet # $Id: PerfMonSvc_jobOptions.py,v 1.3 2007-08-01 20:58:52 binet Exp $ diff --git a/Control/StoreGate/src/SGImplSvc.cxx b/Control/StoreGate/src/SGImplSvc.cxx index 66d8f08d59c3160a9dbb9a31ebfc12b2672b89cb..225df8b112723ab9fdf22b6333d635018e36e5da 100644 --- a/Control/StoreGate/src/SGImplSvc.cxx +++ b/Control/StoreGate/src/SGImplSvc.cxx @@ -1311,15 +1311,21 @@ SGImplSvc::removeProxy(DataProxy* proxy, const void* pTrans, } // remove all entries from t2p map - this->t2pRemove(pTrans); - SG::DataProxy::CLIDCont_t clids = proxy->transientID(); - for (SG::DataProxy::CLIDCont_t::const_iterator i = clids.begin(); - i != clids.end(); - ++i) + // --- only if the proxy actually has an object! + // otherwise, we can trigger I/O. + // besides being useless here, we can get deadlocks if we + // call into the I/O code while holding the SG lock. + if (proxy->isValidObject()) { + this->t2pRemove(pTrans); + SG::DataProxy::CLIDCont_t clids = proxy->transientID(); + for (SG::DataProxy::CLIDCont_t::const_iterator i = clids.begin(); + i != clids.end(); + ++i) { void* ptr = SG::DataProxy_cast (proxy, *i); this->t2pRemove(ptr); } + } // remove from store return m_pStore->removeProxy(proxy, forceRemove, true); diff --git a/Control/xAODRootAccess/test/ut_xaodrootaccess_transtree_test.cxx b/Control/xAODRootAccess/test/ut_xaodrootaccess_transtree_test.cxx index f7b4e3d8138b29605cc06257e78569977c9e8f4f..c58c63c9e4bee08a1b02a8f1cf29524da97ecd4c 100644 --- a/Control/xAODRootAccess/test/ut_xaodrootaccess_transtree_test.cxx +++ b/Control/xAODRootAccess/test/ut_xaodrootaccess_transtree_test.cxx @@ -1,5 +1,5 @@ /* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ // System include(s): @@ -17,7 +17,6 @@ // Local include(s): #include "xAODRootAccess/Init.h" #include "xAODRootAccess/MakeTransientTree.h" -#include "xAODRootAccess/tools/ReturnCheck.h" #include "xAODRootAccess/tools/Message.h" int main() { @@ -26,7 +25,10 @@ int main() { static const char* APP_NAME = "ut_xaodrootaccess_transtree_test"; // Initialise the environment: - RETURN_CHECK( APP_NAME, xAOD::Init( APP_NAME ) ); + if( ! xAOD::Init( APP_NAME ).isSuccess() ) { + ::Error( APP_NAME, XAOD_MESSAGE( "Failed to call xAOD::Init()" ) ); + return 1; + } // Open it using a TFile: std::unique_ptr< ::TFile > ifile( ::TFile::Open( "$ASG_TEST_FILE_MC", diff --git a/DataQuality/DataQualityUtils/src/HanOutputFile.cxx b/DataQuality/DataQualityUtils/src/HanOutputFile.cxx index cc6c4bbbe024296fadcf29241e6068f485870d16..8ef2fb04bb40e3afd172fb38b0a982dc23860c8a 100644 --- a/DataQuality/DataQualityUtils/src/HanOutputFile.cxx +++ b/DataQuality/DataQualityUtils/src/HanOutputFile.cxx @@ -36,6 +36,7 @@ #include <TBufferJSON.h> #include <TString.h> #include <TEfficiency.h> +#include <TGraphAsymmErrors.h> #include "TPluginManager.h" #define BINLOEDGE(h,n) h->GetXaxis()->GetBinLowEdge(n) @@ -1008,8 +1009,10 @@ std::pair<std::string,std::string> HanOutputFile:: getHistogram( std::string nam TObject* hobj = hkey->ReadObj(); TObject* ref(0); TH1* hRef(0); + TEfficiency* eRef(0); TH2* h2Ref(0); std::vector<TH1*> hRefs; + std::vector<TEfficiency*> eRefs; bool hasPlotted(false); TH1* h = dynamic_cast<TH1*>( hobj ); TH2* h2 = dynamic_cast<TH2*>( h ); @@ -1305,7 +1308,7 @@ std::pair<std::string,std::string> HanOutputFile:: getHistogram( std::string nam while ((ref2=icolln->Next())) { hRef = dynamic_cast<TH1*>(ref2); if (hRef) { - if (hRef->GetDimension() == h->GetDimension()) { + if (hRef->GetDimension() == h->GetDimension()) { hRefs.push_back(hRef); } } @@ -1315,6 +1318,7 @@ std::pair<std::string,std::string> HanOutputFile:: getHistogram( std::string nam } groupDir->cd(); } + if( hRefs.size() > 0 ){ legend = new TLegend(0.55,0.77,0.87,0.87); legend->SetTextFont(62); @@ -1517,10 +1521,75 @@ std::pair<std::string,std::string> HanOutputFile:: getHistogram( std::string nam /*************************************************************************************************************/ if( e != 0 ) { + hasPlotted=false; auto myC = std::make_unique<TCanvas>( nameHis.c_str(), "myC", ww, wh ); - myC->cd(); formatTEfficiency( myC.get(), e ); - e->Draw((std::string("AP") + drawopt).c_str()); + if(drawRefs){ + groupDir->cd((nameHis+"_/Results").c_str()); + gDirectory->GetObject("Reference;1",ref); + eRef = dynamic_cast<TEfficiency*>(ref); + if(eRef){ + eRefs.push_back(eRef); + } else{ + TCollection* colln = dynamic_cast<TCollection*>(ref); + if (colln) { + WasCollectionReference = true; + TIterator* icolln = colln->MakeIterator(); + TObject* ref2; + while ((ref2=icolln->Next())) { + eRef = dynamic_cast<TEfficiency*>(ref2); + if (eRef) { + if (eRef->GetDimension() == e->GetDimension()) { + eRefs.push_back(eRef); + } + } + else std::cout << "eRef cast failed!!!" << std::endl; + } + } + } + groupDir->cd(); + } + if(eRefs.size() > 0 ){ + legend = new TLegend(0.55,0.77,0.87,0.87); + legend->SetTextFont(62); + legend->SetMargin(0.15); + legend->SetFillStyle(0); + legend->SetBorderSize(0); + legend->AddEntry(e,datatitle.c_str()); + int itrcolor(0); + for (auto eRef : eRefs) { + myC->cd(); + e->Draw(""); + eRef->Draw(""); + gPad->Update(); + + int local_color = root_color_choices[itrcolor]; + itrcolor++; + + formatTEfficiency( myC.get(), eRef ); + eRef->SetMarkerColor(local_color); + eRef->SetLineColor(local_color); + + if (!hasPlotted) { + e->Draw((std::string("AP") + drawopt).c_str()); + hasPlotted=true; + } + eRef->Draw("SAME"); + myC->Update(); + + if (WasCollectionReference) { + legend->AddEntry(eRef, eRef->GetName()); + } else { + std::string refInfo = getStringName(pathname + "/"+ nameHis+"_/Config/annotations/refInfo"); + legend->AddEntry(eRef, refInfo != "Undefined" ? refInfo.c_str() : "Reference"); + } + } + legend->Draw(); + } else { + myC->cd(); + e->Draw((std::string("AP") + drawopt).c_str()); + } + myC->cd(); displayExtra(myC.get(),display); TLatex t; t.SetNDC(); @@ -1550,6 +1619,7 @@ std::pair<std::string,std::string> HanOutputFile:: getHistogram( std::string nam bool HanOutputFile::saveHistogramToFileSuperimposed( std::string nameHis, std::string location, TDirectory* groupDir1, TDirectory* groupDir2, bool drawRefs,std::string run_min_LB, std::string pathName,int cnvsType){ + dqi::DisableMustClean disabled; groupDir1->cd(); gStyle->SetFrameBorderMode(0); @@ -1778,7 +1848,7 @@ bool HanOutputFile::saveHistogramToFileSuperimposed( std::string nameHis, std::s tt.SetNDC(); tt.SetTextSize(0.03); tt.DrawLatex(0.02,0.01,pathName.c_str()); - + convertToGraphics(cnvsType,myC.get(),namePNG,nameJSON); gStyle->Reset(); diff --git a/DataQuality/GoodRunsLists/CMakeLists.txt b/DataQuality/GoodRunsLists/CMakeLists.txt index 40364193734d17cd5ebaaa87188dcd1478b20142..cc83d9c442b469eae166e48be591a1647ec18510 100644 --- a/DataQuality/GoodRunsLists/CMakeLists.txt +++ b/DataQuality/GoodRunsLists/CMakeLists.txt @@ -15,12 +15,9 @@ atlas_add_root_dictionary( GoodRunsListsLib GoodRunsLists/DQHelperFunctions.h Root/LinkDef.h EXTERNAL_PACKAGES LibXml2 ROOT ) -if( NOT XAOD_STANDALONE ) - if( XAOD_ANALYSIS ) - set( extra_libs ) - else() - set( extra_libs DerivationFrameworkInterfaces ) - endif() +set( extra_libs DerivationFrameworkInterfaces ) +if( XAOD_STANDALONE ) + set( extra_libs ) endif() atlas_add_library( GoodRunsListsLib diff --git a/DataQuality/GoodRunsLists/src/GoodRunsListSelectorTool.cxx b/DataQuality/GoodRunsLists/src/GoodRunsListSelectorTool.cxx index 38d0f9564ea22eacffd7af148dc1df66c6b43c56..afc0afd744c961cb1fb20ede5c1bde655f54777a 100644 --- a/DataQuality/GoodRunsLists/src/GoodRunsListSelectorTool.cxx +++ b/DataQuality/GoodRunsLists/src/GoodRunsListSelectorTool.cxx @@ -1,5 +1,3 @@ -#ifndef XAOD_ANALYSIS - /* Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ @@ -24,7 +22,7 @@ using namespace std; -GoodRunsListSelectorTool::GoodRunsListSelectorTool( const std::string& type, const std::string& name, const IInterface* parent ) +GoodRunsListSelectorTool::GoodRunsListSelectorTool( const std::string& type, const std::string& name, const IInterface* parent ) : AthAlgTool( type, name, parent ) , m_reader(0) , m_boolop(0) @@ -66,7 +64,7 @@ GoodRunsListSelectorTool::~GoodRunsListSelectorTool() } -StatusCode +StatusCode GoodRunsListSelectorTool::queryInterface( const InterfaceID& riid, void** ppvIf ) { if ( riid == IGoodRunsListSelectorTool::interfaceID() ) { @@ -126,7 +124,7 @@ GoodRunsListSelectorTool::initialize() /// start reading xml files if ( !m_goodrunslistVec.empty() ) { m_reader->Reset(); - for (itr=m_goodrunslistVec.begin(); itr!=m_goodrunslistVec.end() && !m_usecool; ++itr) { + for (itr=m_goodrunslistVec.begin(); itr!=m_goodrunslistVec.end() && !m_usecool; ++itr) { //const char* fname; std::string fname; if ( itr->find("/")==0 || itr->find("$")==0 || itr->find(".")==0 || itr->find(":")!=string::npos ) { @@ -163,8 +161,8 @@ GoodRunsListSelectorTool::initialize() } -bool -GoodRunsListSelectorTool::passEvent(const EventIDBase& pEvent) +bool +GoodRunsListSelectorTool::passEvent(const EventIDBase& pEvent) { ATH_MSG_DEBUG ("passEvent() "); @@ -224,7 +222,7 @@ GoodRunsListSelectorTool::passThisRunLB( const std::vector<std::string>& grlname if (m_passthrough) { ATH_MSG_DEBUG ("passThisRunLB() :: Pass through mode."); pass = true; - } + } /// decide from XML files else { pass = this->passRunLB(runNumber,lumiBlockNr,grlnameVec,brlnameVec); @@ -245,9 +243,9 @@ GoodRunsListSelectorTool::passRunLB( int runNumber, int lumiBlockNr, if (m_passthrough) { ATH_MSG_DEBUG ("passRunLB() :: Pass through mode."); return true; - } + } - /// decision based on merged blackrunslist + /// decision based on merged blackrunslist if ( m_rejectanybrl && m_eventselectormode ) { if ( m_brlcollection->HasRunLumiBlock(runNumber,lumiBlockNr) ) { ATH_MSG_DEBUG ("passRunLB() :: Event rejected by (_any_ of) merged black runs list."); @@ -261,7 +259,7 @@ GoodRunsListSelectorTool::passRunLB( int runNumber, int lumiBlockNr, brlitr = m_brlcollection->find(brlnameVec[i]); if (brlitr!=m_brlcollection->end()) reject = brlitr->HasRunLumiBlock(runNumber,lumiBlockNr); - } + } if (reject) { ATH_MSG_DEBUG ("passRunLB() :: Event rejected by specific black runs list."); return false; @@ -276,23 +274,23 @@ GoodRunsListSelectorTool::passRunLB( int runNumber, int lumiBlockNr, grlitr = m_grlcollection->find(grlnameVec[i]); if (grlitr!=m_grlcollection->end()) pass = grlitr->HasRunLumiBlock(runNumber,lumiBlockNr); - } + } if (pass) { ATH_MSG_DEBUG ("passRunLB() :: Event accepted by specific good runs list."); return true; - } - /// decision based on merged goodrunslist + } + /// decision based on merged goodrunslist } else if (m_grlcollection->HasRunLumiBlock(runNumber,lumiBlockNr)) { ATH_MSG_DEBUG ("passRunLB() :: Event accepted by (_any_ of) merged good runs list."); return true; - } + } ATH_MSG_DEBUG ("passRunLB() :: Event rejected, not in (any) good runs list."); return false; } -StatusCode +StatusCode GoodRunsListSelectorTool::finalize() { ATH_MSG_DEBUG ("finalize() "); @@ -305,7 +303,7 @@ GoodRunsListSelectorTool::fileExists(const char* fileName) { struct stat info; int ret = -1; - + //get the file attributes ret = stat(fileName, &info); @@ -313,7 +311,7 @@ GoodRunsListSelectorTool::fileExists(const char* fileName) /// stat() is able to get the file attributes, so the file obviously exists /// if filesize==0 assume the copying failed. //if (info.st_size == 0) return false; - //else + //else return true; } else { /// stat() is not able to get the file attributes, so the file obviously does not exist. @@ -322,7 +320,7 @@ GoodRunsListSelectorTool::fileExists(const char* fileName) } -bool +bool GoodRunsListSelectorTool::registerGRLSelector(const std::string& name, const std::vector<std::string>& grlnameVec, const std::vector<std::string>& brlnameVec) { if (m_registry.find(name)!=m_registry.end()) { @@ -358,5 +356,3 @@ GoodRunsListSelectorTool::registerGRLSelector(const std::string& name, const std m_registry[name] = vvPair(grlnameVec,brlnameVec); return true; } - -#endif // XAOD_ANALYSIS diff --git a/DataQuality/dqm_algorithms/src/Chi2Test.cxx b/DataQuality/dqm_algorithms/src/Chi2Test.cxx index 171360340c70e9015938ee0e490a8bb168165692..f7e314ab80ea93659137abee13c1220595e2b4ac 100644 --- a/DataQuality/dqm_algorithms/src/Chi2Test.cxx +++ b/DataQuality/dqm_algorithms/src/Chi2Test.cxx @@ -10,6 +10,7 @@ #include <dqm_algorithms/Chi2Test.h> #include <dqm_algorithms/tools/AlgorithmHelper.h> #include <TH1.h> +#include <TEfficiency.h> #include <TF1.h> #include <TClass.h> #include <ers/ers.h> @@ -42,26 +43,49 @@ dqm_algorithms::Chi2Test::execute( const std::string & name , const TObject & object, const dqm_core::AlgorithmConfig & config ) { - const TH1 * histogram; - + const TH1* histogram = 0; + const TEfficiency* efficiency = 0; + TH1* passed_histogram = 0; + TH1* total_histogram = 0; + if(object.IsA()->InheritsFrom( "TH1" )) { histogram = static_cast<const TH1*>( &object ); if (histogram->GetDimension() > 2 ){ throw dqm_core::BadConfig( ERS_HERE, name, "dimension > 2 " ); } + } else if(object.IsA()->InheritsFrom( "TEfficiency" )) { + // get the histograms from TEfficiency object to perform Chi2Test + efficiency = static_cast<const TEfficiency*>( &object); + if (efficiency->GetDimension() > 2 ){ + throw dqm_core::BadConfig( ERS_HERE, name, "dimension > 2 " ); + } + + passed_histogram = efficiency->GetCopyPassedHisto(); + total_histogram = efficiency->GetCopyTotalHisto(); + passed_histogram->Divide(total_histogram); + } else { - throw dqm_core::BadConfig( ERS_HERE, name, "does not inherit from TH1" ); + throw dqm_core::BadConfig( ERS_HERE, name, "does not inherit from TH1 or TEfficiency"); } const double minstat = dqm_algorithms::tools::GetFirstFromMap( "MinStat", config.getParameters(), -1); - - if (histogram->GetEntries() < minstat ) { + double current_stat = 0; + + if(object.IsA()->InheritsFrom( "TH1" )) { + current_stat = histogram->GetEntries(); + } else if(object.IsA()->InheritsFrom( "TEfficiency" )){ + current_stat = total_histogram->GetEntries(); + } + + if(current_stat < minstat ) { dqm_core::Result *result = new dqm_core::Result(dqm_core::Result::Undefined); - result->tags_["InsufficientEntries"] = histogram->GetEntries(); + result->tags_["InsufficientEntries"] = current_stat; return result; } - - TH1 * refhist; + + TH1 * refhist = 0; + TH1 * ref_total_hist; + TEfficiency * refeff; double gthresho; double rthresho; std::string option; @@ -90,24 +114,54 @@ dqm_algorithms::Chi2Test::execute( const std::string & name , } - try { - refhist = dynamic_cast<TH1 *>( config.getReference() ); - } - catch ( dqm_core::Exception & ex ) { - throw dqm_core::BadRefHist(ERS_HERE,name," Could not retreive reference"); - } + if(object.IsA()->InheritsFrom( "TH1" )) { + try { + refhist = dynamic_cast<TH1 *>( config.getReference() ); + } + catch ( dqm_core::Exception & ex ) { + throw dqm_core::BadRefHist(ERS_HERE,name," Could not retreive reference"); + } + } else if(object.IsA()->InheritsFrom( "TEfficiency" )){ + try { + refeff = dynamic_cast<TEfficiency *>( config.getReference() ); + } + catch ( dqm_core::Exception & ex ) { + throw dqm_core::BadRefHist(ERS_HERE,name," Could not retreive reference"); + } + + refhist = refeff->GetCopyPassedHisto(); + ref_total_hist = refeff->GetCopyTotalHisto(); + refhist->Divide(ref_total_hist); + } + if (!refhist) { throw dqm_core::BadRefHist(ERS_HERE,name,"Bad reference type"); } + + double value = 0; + if(object.IsA()->InheritsFrom( "TH1" )) { - if (histogram->GetDimension() != refhist->GetDimension() ) { - throw dqm_core::BadRefHist( ERS_HERE, "Dimension", name ); - } - - if ((histogram->GetNbinsX() != refhist->GetNbinsX()) || (histogram->GetNbinsY() != refhist->GetNbinsY())) { - throw dqm_core::BadRefHist( ERS_HERE, "number of bins", name ); - } + if (histogram->GetDimension() != refhist->GetDimension() ) { + throw dqm_core::BadRefHist( ERS_HERE, "Dimension", name ); + } + if ((histogram->GetNbinsX() != refhist->GetNbinsX()) || (histogram->GetNbinsY() != refhist->GetNbinsY())) { + throw dqm_core::BadRefHist( ERS_HERE, "number of bins", name ); + } - double value = histogram->Chi2Test( refhist, option.c_str() ); + value = histogram->Chi2Test( refhist, option.c_str() ); + + } else if(object.IsA()->InheritsFrom( "TEfficiency" )){ + + if (passed_histogram->GetDimension() != refhist->GetDimension() ) { + throw dqm_core::BadRefHist( ERS_HERE, "Dimension", name ); + } + + if ((passed_histogram->GetNbinsX() != refhist->GetNbinsX()) || (passed_histogram->GetNbinsY() != refhist->GetNbinsY())) { + throw dqm_core::BadRefHist( ERS_HERE, "number of bins", name ); + } + + value = passed_histogram->Chi2Test( refhist, option.c_str() ); + } + ERS_DEBUG(1,"Green threshold: "<< gthresho << "; Red threshold: " << rthresho ); ERS_DEBUG(1,"Chi2 Test with Option " << option << " is " << value ); @@ -132,6 +186,7 @@ dqm_algorithms::Chi2Test::execute( const std::string & name , } } + ERS_DEBUG(2,"Result: "<<*result); return result; diff --git a/DataQuality/dqm_algorithms/src/RepeatAlgorithm.cxx b/DataQuality/dqm_algorithms/src/RepeatAlgorithm.cxx index 0913db52bef955527292688e70635575498067ba..bacb12659e5ab14c0f995d8f52b78dbbcab74641 100644 --- a/DataQuality/dqm_algorithms/src/RepeatAlgorithm.cxx +++ b/DataQuality/dqm_algorithms/src/RepeatAlgorithm.cxx @@ -118,12 +118,13 @@ execute( const std::string& name, const TObject& data, const dqm_core::Algorithm } tags[ireference->GetName() + std::string("|Status")] = subResult->status_; if ( dqm_algorithms::tools::GetFirstFromMap("RepeatAlgorithm--ResultsNEntries", config.getParameters(), 0) > 0 ) { - TH1* hireference = dynamic_cast<TH1*>(ireference); - if (hireference) { - tags[ireference->GetName() + std::string("|NEntries")] = hireference->GetEntries(); - } else { - throw dqm_core::BadConfig( ERS_HERE, "RepeatAlgorithm", std::string("Reference ") + ireference->GetName() + " is not TH1, yet we want to get # entries" ); - } + + if( ireference->IsA()->InheritsFrom( "TH1" )){ + TH1* hireference = dynamic_cast<TH1*>(ireference); + if (hireference) { + tags[ireference->GetName() + std::string("|NEntries")] = hireference->GetEntries(); + } + } } if (subResult->getObject()) { diff --git a/DetectorDescription/AtlasDetDescr/src/AtlasDetectorIDHelper.cxx b/DetectorDescription/AtlasDetDescr/src/AtlasDetectorIDHelper.cxx index c6097206e00b77597f066d2ff03c9c6da0bed707..77154ebd815bf7f95215d78a3563cc26317ab9f1 100755 --- a/DetectorDescription/AtlasDetDescr/src/AtlasDetectorIDHelper.cxx +++ b/DetectorDescription/AtlasDetDescr/src/AtlasDetectorIDHelper.cxx @@ -1,88 +1,40 @@ /* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ -/*************************************************************************** - - ----------------------------------------- - ***************************************************************************/ - -//<doc><file> $Id: AtlasDetectorIDHelper.cxx,v 1.6 2006-01-11 09:24:23 schaffer Exp $ -//<version> $Name: not supported by cvs2svn $ - -//<<<<<< INCLUDES >>>>>> - #include "AtlasDetectorIDHelper.h" #include "IdDict/IdDictDefs.h" #include "AtlasDetDescr/AtlasDetectorID.h" #include "GaudiKernel/MsgStream.h" #include <iostream> -//<<<<<< PRIVATE DEFINES >>>>>> -//<<<<<< PRIVATE CONSTANTS >>>>>> -//<<<<<< PRIVATE TYPES >>>>>> -//<<<<<< PRIVATE VARIABLE DEFINITIONS >>>>>> -//<<<<<< PUBLIC VARIABLE DEFINITIONS >>>>>> -//<<<<<< CLASS STRUCTURE INITIALIZATION >>>>>> -//<<<<<< PRIVATE FUNCTION DEFINITIONS >>>>>> -//<<<<<< PUBLIC FUNCTION DEFINITIONS >>>>>> -//<<<<<< MEMBER FUNCTION DEFINITIONS >>>>>> - -AtlasDetectorIDHelper::AtlasDetectorIDHelper(void) - : - m_isSLHC(false), - m_pixel_region_index(UNDEFINED), - m_sct_region_index(UNDEFINED), - m_trt_region_index(UNDEFINED), - m_lar_em_region_index(UNDEFINED), - m_lar_hec_region_index(UNDEFINED), - m_lar_fcal_region_index(UNDEFINED), - m_lvl1_region_index(UNDEFINED), - m_dm_region_index(UNDEFINED), - m_tile_region_index(UNDEFINED), - m_mdt_region_index(UNDEFINED), - m_csc_region_index(UNDEFINED), - m_rpc_region_index(UNDEFINED), - m_tgc_region_index(UNDEFINED), - m_mm_region_index(UNDEFINED), - m_stgc_region_index(UNDEFINED), - m_muon_station_index(UNDEFINED), - m_alfa_region_index(UNDEFINED), - m_bcm_region_index(UNDEFINED), - m_lucid_region_index(UNDEFINED), - m_zdc_region_index(UNDEFINED), - m_initialized(false), - m_station_field(0), - m_msgSvc(0) -{} - -AtlasDetectorIDHelper::~AtlasDetectorIDHelper(void) -{ +AtlasDetectorIDHelper::AtlasDetectorIDHelper(void) : + m_isSLHC(false), + m_pixel_region_index(UNDEFINED), + m_sct_region_index(UNDEFINED), + m_trt_region_index(UNDEFINED), + m_lar_em_region_index(UNDEFINED), + m_lar_hec_region_index(UNDEFINED), + m_lar_fcal_region_index(UNDEFINED), + m_lvl1_region_index(UNDEFINED), + m_dm_region_index(UNDEFINED), + m_tile_region_index(UNDEFINED), + m_mdt_region_index(UNDEFINED), + m_csc_region_index(UNDEFINED), + m_rpc_region_index(UNDEFINED), + m_tgc_region_index(UNDEFINED), + m_mm_region_index(UNDEFINED), + m_stgc_region_index(UNDEFINED), + m_muon_station_index(UNDEFINED), + m_alfa_region_index(UNDEFINED), + m_bcm_region_index(UNDEFINED), + m_lucid_region_index(UNDEFINED), + m_zdc_region_index(UNDEFINED), + m_initialized(false), + m_station_field(nullptr), + m_msgSvc(nullptr) { } -// AtlasDetectorIDHelper::AtlasDetectorIDHelper (const AtlasDetectorIDHelper &) -// { -// } - -// AtlasDetectorIDHelper & AtlasDetectorIDHelper::operator= (const AtlasDetectorIDHelper & other) -// { -// return other; -// } - - -// AtlasDetectorIDHelper * -// AtlasDetectorIDHelper::instance() -// { - -// static AtlasDetectorIDHelper* instance = 0; - -// if (instance == 0) { -// instance = new AtlasDetectorIDHelper; -// } -// return instance; -// } - - int AtlasDetectorIDHelper::initialize_from_dictionary(const IdDictMgr& dict_mgr, bool quiet) diff --git a/DetectorDescription/AtlasDetDescr/src/AtlasDetectorIDHelper.h b/DetectorDescription/AtlasDetDescr/src/AtlasDetectorIDHelper.h index 1302c606d06c8ab9dc8561ed8adf8925161dbd45..c168f01f0e86c2885fc51304e71e4d1c81438daa 100755 --- a/DetectorDescription/AtlasDetDescr/src/AtlasDetectorIDHelper.h +++ b/DetectorDescription/AtlasDetDescr/src/AtlasDetectorIDHelper.h @@ -1,19 +1,9 @@ /* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ -/*************************************************************************** - - ----------------------------------------- - ***************************************************************************/ - -//<doc><file> $Id: AtlasDetectorIDHelper.h,v 1.6 2006-01-11 09:24:23 schaffer Exp $ -//<version> $Name: not supported by cvs2svn $ - #ifndef SRC_ATLASDETECTORIDHELPER_H -# define SRC_ATLASDETECTORIDHELPER_H - -//<<<<<< INCLUDES >>>>>> +#define SRC_ATLASDETECTORIDHELPER_H #include "Identifier/Identifier.h" #include "Identifier/IdContext.h" @@ -21,18 +11,9 @@ #include <string> #include <vector> -//<<<<<< PUBLIC DEFINES >>>>>> -//<<<<<< PUBLIC CONSTANTS >>>>>> -//<<<<<< PUBLIC TYPES >>>>>> - class IdDictField; -//<<<<<< PUBLIC VARIABLES >>>>>> -//<<<<<< PUBLIC FUNCTIONS >>>>>> -//<<<<<< CLASS DECLARATIONS >>>>>> - -class AtlasDetectorIDHelper -{ +class AtlasDetectorIDHelper { public: enum ERRORS { UNDEFINED = 999 }; @@ -48,7 +29,7 @@ public: /// Initialization from the identifier dictionary int initialize_from_dictionary(const IdDictMgr& dict_mgr, bool quiet); - ~AtlasDetectorIDHelper(void); + ~AtlasDetectorIDHelper()=default; size_type pixel_region_index(); size_type sct_region_index(); diff --git a/Event/xAOD/xAODCaloEventAthenaPool/CMakeLists.txt b/Event/xAOD/xAODCaloEventAthenaPool/CMakeLists.txt index 94e47eebce8ad8af75fb63ae87971f985f75488d..c9651a3619579e8522eb4e3bffe3dda5f2f9d5c2 100644 --- a/Event/xAOD/xAODCaloEventAthenaPool/CMakeLists.txt +++ b/Event/xAOD/xAODCaloEventAthenaPool/CMakeLists.txt @@ -3,6 +3,12 @@ # Declare the package name: atlas_subdir( xAODCaloEventAthenaPool ) +# Extra libraries, depending on the build environment. +set( extra_libs ) +if( NOT XAOD_ANALYSIS ) + set( extra_libs CaloInterfaceLib ) +endif() + # Component(s) in the package: atlas_add_poolcnv_library( xAODCaloEventAthenaPoolPoolCnv src/*.h src/*.cxx @@ -13,9 +19,8 @@ atlas_add_poolcnv_library( xAODCaloEventAthenaPoolPoolCnv TYPES_WITH_NAMESPACE xAOD::CaloClusterContainer xAOD::CaloClusterAuxContainer xAOD::CaloTowerContainer xAOD::CaloTowerAuxContainer CNV_PFX xAOD - LINK_LIBRARIES CaloInterfaceLib AthContainers AthenaKernel - AthenaPoolCnvSvcLib AthenaPoolUtilities xAODCaloEvent - GaudiKernel ) + LINK_LIBRARIES AthContainers AthenaKernel AthenaPoolCnvSvcLib + AthenaPoolUtilities xAODCaloEvent GaudiKernel ${extra_libs} ) # Install files from the package. atlas_install_joboptions( share/*.py ) diff --git a/Event/xAOD/xAODMetaData/Root/FileMetaData_v1.cxx b/Event/xAOD/xAODMetaData/Root/FileMetaData_v1.cxx index efc4dd3e0b424bcd0429511ca9fdf1f6e12f065d..33644eb58b2d5ad7a73ef2e48f0e65df5b28111e 100644 --- a/Event/xAOD/xAODMetaData/Root/FileMetaData_v1.cxx +++ b/Event/xAOD/xAODMetaData/Root/FileMetaData_v1.cxx @@ -1,5 +1,5 @@ /* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ // $Id: FileMetaData_v1.cxx 683694 2015-07-17 09:03:52Z krasznaa $ @@ -362,8 +362,6 @@ namespace xAOD { return true; } -} // namespace xAOD - /// Helper macro used to print MetaDataType values #define PRINT_TYPE( TYPE ) \ case xAOD::FileMetaData_v1::TYPE: \ @@ -404,3 +402,5 @@ std::ostream& operator<< ( std::ostream& out, return out; } + +} // namespace xAOD diff --git a/Event/xAOD/xAODMetaData/xAODMetaData/versions/FileMetaData_v1.h b/Event/xAOD/xAODMetaData/xAODMetaData/versions/FileMetaData_v1.h index 19f0fb56072a9e2368635fd9308bd5f2863bd079..990b15f55c1b2d94a04ccd867cfcc3cd38423e49 100644 --- a/Event/xAOD/xAODMetaData/xAODMetaData/versions/FileMetaData_v1.h +++ b/Event/xAOD/xAODMetaData/xAODMetaData/versions/FileMetaData_v1.h @@ -1,7 +1,7 @@ // Dear emacs, this is -*- c++ -*- /* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ // $Id: FileMetaData_v1.h 685184 2015-07-23 20:25:43Z cranshaw $ @@ -106,11 +106,11 @@ namespace xAOD { }; // class FileMetaData_v1 -} // namespace xAOD + /// A convenience print operator for xAOD::FileMetaData_v1::MetaDataType + std::ostream& operator<< ( std::ostream& out, + xAOD::FileMetaData_v1::MetaDataType type ); -/// A convenience print operator for xAOD::FileMetaData_v1::MetaDataType -std::ostream& operator<< ( std::ostream& out, - xAOD::FileMetaData_v1::MetaDataType type ); +} // namespace xAOD // Declare a base class for the type: #include "xAODCore/BaseInfo.h" diff --git a/Event/xAOD/xAODTracking/Root/NeutralParticle_v1.cxx b/Event/xAOD/xAODTracking/Root/NeutralParticle_v1.cxx index a1e966310951471acfac32ef0bdd07e79c65fd26..e3e1bd851b8c4618af5e80177c6f0ba45832046c 100644 --- a/Event/xAOD/xAODTracking/Root/NeutralParticle_v1.cxx +++ b/Event/xAOD/xAODTracking/Root/NeutralParticle_v1.cxx @@ -1,5 +1,5 @@ /* - Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ // Misc includes @@ -30,9 +30,9 @@ namespace xAOD { if(!hasStore() ) makePrivateStore(); this->IParticle::operator=( tp ); -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS m_perigeeParameters.reset(); -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS return *this; } @@ -107,11 +107,11 @@ namespace xAOD { } void NeutralParticle_v1::setDefiningParameters(float d0, float z0, float phi0, float theta, float oneOverP) { -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS if(m_perigeeParameters.isValid()) { m_perigeeParameters.reset(); } -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS static const Accessor< float > acc1( "d0" ); acc1( *this ) = d0; @@ -131,11 +131,11 @@ namespace xAOD { } void NeutralParticle_v1::setDefiningParametersCovMatrix(const xAOD::ParametersCovMatrix_t& cov){ -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS if(m_perigeeParameters.isValid()) { m_perigeeParameters.reset(); } -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS static const Accessor< std::vector<float> > acc( "definingParametersCovMatrix" ); std::vector<float>& v = acc(*this); @@ -184,7 +184,7 @@ namespace xAOD { acc3( *this ) = z; } -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS const Trk::NeutralPerigee& NeutralParticle_v1::perigeeParameters() const { // Require the cache to be valid and check if the cached pointer has been set @@ -210,12 +210,12 @@ namespace xAOD { m_perigeeParameters.set(tmpPerigeeParameters); return *(m_perigeeParameters.ptr()); } -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS void NeutralParticle_v1::resetCache() { -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS m_perigeeParameters.reset(); -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS } } // namespace xAOD diff --git a/Event/xAOD/xAODTracking/Root/TrackParticle_v1.cxx b/Event/xAOD/xAODTracking/Root/TrackParticle_v1.cxx index 293206b7db312f7f7dcf1c7276cd22304848c209..862de111f020e9a736bb487482d0b3e350857406 100644 --- a/Event/xAOD/xAODTracking/Root/TrackParticle_v1.cxx +++ b/Event/xAOD/xAODTracking/Root/TrackParticle_v1.cxx @@ -61,10 +61,10 @@ namespace xAOD { makePrivateStore(); } this->IParticle::operator=( tp ); -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS // assume that this copy will create new cache as needed m_perigeeParameters.reset(); -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS return *this; } @@ -149,12 +149,12 @@ namespace xAOD { } void TrackParticle_v1::setDefiningParameters(float d0, float z0, float phi0, float theta, float qOverP) { -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS // reset perigee cache if existing if(m_perigeeParameters.isValid()) { m_perigeeParameters.reset(); } -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS static const Accessor< float > acc1( "d0" ); acc1( *this ) = d0; @@ -180,12 +180,12 @@ namespace xAOD { void TrackParticle_v1::setDefiningParametersCovMatrix(const xAOD::ParametersCovMatrix_t& cov){ -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS // reset perigee cache if existing if(m_perigeeParameters.isValid()) { m_perigeeParameters.reset(); } -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS // Extract the diagonal elements from the matrix. std::vector< float > diagVec; @@ -437,7 +437,7 @@ namespace xAOD { acc3( *this ) = z; } -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS const Trk::Perigee& TrackParticle_v1::perigeeParameters() const { // Require the cache to be valid and check if the cached pointer has been set @@ -470,7 +470,7 @@ namespace xAOD { m_perigeeParameters.set(tmpPerigeeParameters); return *(m_perigeeParameters.ptr()); } -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS AUXSTORE_PRIMITIVE_GETTER(TrackParticle_v1, float, chiSquared) AUXSTORE_PRIMITIVE_GETTER(TrackParticle_v1, float, numberDoF) @@ -615,8 +615,8 @@ namespace xAOD { acc( *this ).at(index) = static_cast<uint8_t>(pos); } -#ifndef XAOD_STANDALONE - const Trk::CurvilinearParameters TrackParticle_v1::curvilinearParameters(unsigned int index) const { +#ifndef XAOD_ANALYSIS + const Trk::CurvilinearParameters TrackParticle_v1::curvilinearParameters(unsigned int index) const { static const Accessor< std::vector<float> > acc( "trackParameterCovarianceMatrices" ); unsigned int offset = index*15; @@ -631,7 +631,7 @@ namespace xAOD { return param; } -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS AUXSTORE_PRIMITIVE_GETTER_WITH_CAST(TrackParticle_v1, uint8_t, xAOD::TrackProperties,trackProperties) AUXSTORE_PRIMITIVE_SETTER_WITH_CAST(TrackParticle_v1, uint8_t, xAOD::TrackProperties,trackProperties, setTrackProperties) @@ -687,7 +687,7 @@ namespace xAOD { } -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS /// The function will return an invalid ElementLink in case nothing was set /// for it yet. This is to avoid users having to always check both for /// the decoration being available, and the link being valid. @@ -733,13 +733,13 @@ namespace xAOD { } return *( acc( *this ) ); - } -#endif // not XAOD_STANDALONE - + } +#endif // not XAOD_ANALYSIS + void TrackParticle_v1::resetCache(){ -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS m_perigeeParameters.reset(); -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS } } // namespace xAOD diff --git a/Event/xAOD/xAODTracking/Root/Vertex_v1.cxx b/Event/xAOD/xAODTracking/Root/Vertex_v1.cxx index 3569fe59887aba3b09c9ea1207a3cc88da811db3..85c9c4d601056a8d084f805f9ea0e0f85d4f8f22 100644 --- a/Event/xAOD/xAODTracking/Root/Vertex_v1.cxx +++ b/Event/xAOD/xAODTracking/Root/Vertex_v1.cxx @@ -18,12 +18,12 @@ namespace xAOD { Vertex_v1::Vertex_v1() : SG::AuxElement(), - m_position(), + m_position(), m_covariance() { } - Vertex_v1::Vertex_v1( const Vertex_v1& other ) + Vertex_v1::Vertex_v1( const Vertex_v1& other ) : SG::AuxElement(other), m_position( other.m_position ), m_covariance( other.m_covariance ){ @@ -157,7 +157,7 @@ namespace xAOD { AUXSTORE_PRIMITIVE_SETTER_WITH_CAST( Vertex_v1, short, VxType::VertexType, vertexType, setVertexType ) -#if ( ! defined(XAOD_STANDALONE) ) && ( ! defined(XAOD_MANACORE) ) +#ifndef XAOD_ANALYSIS /// Helper object for implementing the vxTrackAtVertex functions static const SG::AuxElement::Accessor< std::vector< Trk::VxTrackAtVertex > > vxVertAcc( "vxTrackAtVertex" ); @@ -202,7 +202,7 @@ namespace xAOD { return vxVertAcc.isAvailable( *this ); } -#endif // not XAOD_STANDALONE and not XAOD_MANACORE +#endif // not XAOD_ANALYSIS ///////////////////////////////////////////////////////////////////////////// // diff --git a/Event/xAOD/xAODTracking/xAODTracking/ParticleCaloExtension.h b/Event/xAOD/xAODTracking/xAODTracking/ParticleCaloExtension.h index 2e403e4086713ec2d44ac40c10ea2feb0e50d233..374b4fbda9ca1f0a1f5fa81a0812a7b9d5e2f1df 100644 --- a/Event/xAOD/xAODTracking/xAODTracking/ParticleCaloExtension.h +++ b/Event/xAOD/xAODTracking/xAODTracking/ParticleCaloExtension.h @@ -1,19 +1,17 @@ /* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ -#ifndef XAOD_PARTICLECALOEXTENSION_H +#ifndef XAOD_PARTICLECALOEXTENSION_H #define XAOD_PARTICLECALOEXTENSION_H -#include "xAODTracking/TrackingPrimitives.h" +#include "xAODTracking/TrackingPrimitives.h" #include "EventPrimitives/EventPrimitivesHelpers.h" -#ifndef XAOD_STANDALONE -#ifndef XAOD_MANACORE +#ifndef XAOD_ANALYSIS // Athena includes #include "TrkParameters/TrackParameters.h" -#endif // not XAOD_MANACORE -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS #include <vector> class CaloCell; @@ -24,9 +22,9 @@ namespace xAOD { class ParticleCaloExtension { public: /** constructor taking calo extrapolation as input. All vectors should be of the same length */ - ParticleCaloExtension( float charge, - std::vector<std::vector<float> >&& parameters, - std::vector<std::vector<float> >&& parametersCovariance, + ParticleCaloExtension( float charge, + std::vector<std::vector<float> >&& parameters, + std::vector<std::vector<float> >&& parametersCovariance, std::vector<int>&& identifiers ); /** destructor */ @@ -38,8 +36,8 @@ namespace xAOD { /** no assignment operator */ ParticleCaloExtension& operator=(const ParticleCaloExtension&) = delete; - /// Returns the number of additional parameters stored in the Particle. - size_t numberOfParameters() const; + /// Returns the number of additional parameters stored in the Particle. + size_t numberOfParameters() const; /// Returns the track parameter vector at 'index'. const CurvilinearParameters_t trackParameters(unsigned int index) const; @@ -50,11 +48,11 @@ namespace xAOD { /// @brief fill the matrix with the covariance at position 'index', returns false if the parameters at 'index' does not have a covariance bool trackParameterCovarianceMatrix(ParametersCovMatrix_t& matrix, unsigned int index) const; -#if ( ! defined(XAOD_STANDALONE) ) && ( ! defined(XAOD_MANACORE) ) +#ifndef XAOD_ANALYSIS /// @brief Returns a curvilinear representation of the parameters at 'index'. - /// @note This is only available in Athena. - const Trk::CurvilinearParameters curvilinearParameters(unsigned int index) const; -#endif // not XAOD_STANDALONE and not XAOD_MANACORE + /// @note This is only available in Athena. + const Trk::CurvilinearParameters curvilinearParameters(unsigned int index) const; +#endif // not XAOD_ANALYSIS /** return whether cells were already associated or not */ bool cellsAreAssociated() const; @@ -77,13 +75,13 @@ namespace xAOD { /// identifiers of the intersections with detector layers std::vector<int> m_identifiers; - + /** cell information */ bool m_cellsAreSet; // bool to store whether already set std::vector<CaloCell*> m_caloCells; // vector of cells }; - /// Returns the number of additional parameters stored in the Particle. + /// Returns the number of additional parameters stored in the Particle. inline size_t ParticleCaloExtension::numberOfParameters() const { return m_parameters.size(); } @@ -94,7 +92,7 @@ namespace xAOD { tmp << m_parameters[index][0],m_parameters[index][1],m_parameters[index][2], m_parameters[index][3],m_parameters[index][4],m_parameters[index][5]; return tmp; - } + } inline bool ParticleCaloExtension::trackParameterCovarianceMatrix(ParametersCovMatrix_t& cov, unsigned int index) const { const std::vector<float>& covVec = m_parametersCovariance[index]; @@ -105,12 +103,12 @@ namespace xAOD { } return true; } - + /// @brief Return the ParameterPosition of the parameters at 'index'. inline int ParticleCaloExtension::parameterIdentifier(unsigned int index) const { return m_identifiers[index]; } - + inline bool ParticleCaloExtension::cellsAreAssociated() const { return m_cellsAreSet; } @@ -124,13 +122,13 @@ namespace xAOD { m_cellsAreSet=true; } -#if ( ! defined(XAOD_STANDALONE) ) && ( ! defined(XAOD_MANACORE) ) - inline const Trk::CurvilinearParameters ParticleCaloExtension::curvilinearParameters(unsigned int index) const { +#ifndef XAOD_ANALYSIS + inline const Trk::CurvilinearParameters ParticleCaloExtension::curvilinearParameters(unsigned int index) const { // copy the correct values into the temp matrix ParametersCovMatrix_t* cov = 0; if( !m_parametersCovariance[index].empty() ) { - cov = new ParametersCovMatrix_t(); + cov = new ParametersCovMatrix_t(); trackParameterCovarianceMatrix(*cov,index); } // retrieve the parameters to build the curvilinear frame @@ -140,7 +138,7 @@ namespace xAOD { return param; } -#endif // not XAOD_STANDALONE and not XAOD_MANACORE +#endif // not XAOD_ANALYSIS } diff --git a/Event/xAOD/xAODTracking/xAODTracking/versions/NeutralParticleAuxContainer_v1.h b/Event/xAOD/xAODTracking/xAODTracking/versions/NeutralParticleAuxContainer_v1.h index 2471aad6d1d16a7fb908e020b9c94ee36806adc1..da942e9e4bd82ac7b2ba80f3c9f5cc43245ec517 100644 --- a/Event/xAOD/xAODTracking/xAODTracking/versions/NeutralParticleAuxContainer_v1.h +++ b/Event/xAOD/xAODTracking/xAODTracking/versions/NeutralParticleAuxContainer_v1.h @@ -1,31 +1,19 @@ // Dear emacs, this is -*- c++ -*- - /* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ - -// $Id: NeutralParticleAuxContainer_v1.h 573493 2013-12-03 13:05:51Z salzburg $ #ifndef XAODTRACKING_VERSIONS_NEUTRALPARTICLEAUXCONTAINER_V1_H #define XAODTRACKING_VERSIONS_NEUTRALPARTICLEAUXCONTAINER_V1_H - + // System include(s): #include <stdint.h> #include <vector> - + // EDM include(s): #include "xAODCore/AuxContainerBase.h" -#include "AthLinks/ElementLink.h" -#ifndef XAOD_STANDALONE -#ifndef XAOD_MANACORE -#include "TrkTrack/TrackCollection.h" -#endif // not XAOD_MANACORE -#endif // not XAOD_STANDALONE - -// Want to be able to switch this off and on for the moment -//#define XAODTRACKING_SUMMARYDYNAMIC namespace xAOD { - + /// Temporary container used until we have I/O for AuxStoreInternal /// /// This class is meant to serve as a temporary way to provide an auxiliary @@ -35,17 +23,17 @@ namespace xAOD { /// @author Andreas Salzburger /// class NeutralParticleAuxContainer_v1 : public AuxContainerBase { - + public: /// Default constructor NeutralParticleAuxContainer_v1(); - + private: /// Dumps contents (for debugging) void dump() const; - + /// @name Defining parameters (perigee) - /// @{ + /// @{ std::vector< float > d0; std::vector< float > z0; std::vector< float > phi; @@ -57,12 +45,11 @@ namespace xAOD { std::vector< float > vx; std::vector< float > vy; std::vector< float > vz; - }; // class NeutralParticleAuxContainer_v1 - + } // namespace xAOD - + #include "xAODCore/BaseInfo.h" SG_BASE( xAOD::NeutralParticleAuxContainer_v1, xAOD::AuxContainerBase ); diff --git a/Event/xAOD/xAODTracking/xAODTracking/versions/NeutralParticle_v1.h b/Event/xAOD/xAODTracking/xAODTracking/versions/NeutralParticle_v1.h index e35519f6576b881f78ed32615e036b6800a3a29c..ed8187a3a2ac375bd6f2cd20917fc577af9a321a 100644 --- a/Event/xAOD/xAODTracking/xAODTracking/versions/NeutralParticle_v1.h +++ b/Event/xAOD/xAODTracking/xAODTracking/versions/NeutralParticle_v1.h @@ -1,7 +1,7 @@ // Dear emacs, this is -*- c++ -*- /* - Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ #ifndef XAODTRACKING_VERSIONS_NEUTRALPARTICLE_V1_H @@ -15,15 +15,15 @@ // xAOD include(s): #include "xAODBase/IParticle.h" -#include "xAODTracking/TrackingPrimitives.h" -#include "xAODTracking/VertexContainerFwd.h" +#include "xAODTracking/TrackingPrimitives.h" +#include "xAODTracking/VertexContainerFwd.h" #include <bitset> #include <stdint.h> -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS #include "TrkNeutralParameters/NeutralParameters.h" -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS // ROOT include(s): #include "Math/Vector4D.h" @@ -36,7 +36,7 @@ namespace xAOD { /// /// @author Andreas Salzburger <Andreas.Salzburger@cern.ch> /// @nosubgrouping - + class NeutralParticle_v1 : public IParticle { public: @@ -49,7 +49,7 @@ namespace xAOD { NeutralParticle_v1(const NeutralParticle_v1& o ); /// Assignment operator. This can involve creating and copying an Auxilary store, and so should be used sparingly. NeutralParticle_v1& operator=(const NeutralParticle_v1& tp ); - + /// @name xAOD::IParticle functions /// @{ /// The transverse momentum (\f$p_T\f$) of the particle. @@ -64,10 +64,10 @@ namespace xAOD { virtual double e() const; /// The true rapidity (y) of the particle. virtual double rapidity() const; - + /// Definition of the 4-momentum type. typedef IParticle::FourMom_t FourMom_t; - + /// The full 4-momentum of the particle. virtual FourMom_t p4() const; @@ -76,11 +76,11 @@ namespace xAOD { /// The full 4-momentum of the particle : GenVector form GenVecFourMom_t genvecP4() const; - + /// The type of the object as a simple enumeration virtual Type::ObjectType type() const; /// @} - + /// @name Defining parameters functions /// The 'defining parameters' are key to the concept of a NeutralParticle, and give the values for the IParticle interface /// ( pt(), phi(), eta() etc.). @@ -89,7 +89,7 @@ namespace xAOD { /// The parameters are expressed with respect to an origin (returned by vx(), vy() and vy() ), currently intended to be the 'beamspot'. /// This origin is expected to be the same for all track particles in a collection (and this may be be enforced). /// @{ - + /// Returns the \f$d_0\f$ parameter float d0() const; /// Returns the \f$z_0\f$ parameter @@ -100,22 +100,22 @@ namespace xAOD { float theta() const; /// Returns the \f$q/p\f$ parameter float oneOverP() const; - /// @brief Returns a SVector of the Perigee track parameters. + /// @brief Returns a SVector of the Perigee track parameters. /// i.e. a vector of /// \f$\left(\begin{array}{c}d_0\\z_0\\\phi_0\\\theta\\q/p\end{array}\right)\f$ const DefiningParameters_t definingParameters() const; /// Returns the 5x5 symmetric matrix containing the defining parameters covariance matrix. - const ParametersCovMatrix_t definingParametersCovMatrix() const; + const ParametersCovMatrix_t definingParametersCovMatrix() const; /// Returns the vector of the covariance values - 15 elements const std::vector<float>& definingParametersCovMatrixVec() const; - - /// Set the defining parameters. + + /// Set the defining parameters. void setDefiningParameters(float d0, float z0, float phi0, float theta, float qOverP); /// Set the defining parameters covariance matrix. void setDefiningParametersCovMatrix(const ParametersCovMatrix_t& cov); /// Set the defining parameters covariance matrix using a length 15 vector. void setDefiningParametersCovMatrixVec(const std::vector<float>& cov); - + /// The x origin for the parameters. float vx() const; /// The y origin for the parameters. @@ -125,24 +125,24 @@ namespace xAOD { /// Set the origin for the parameters. void setParametersOrigin(float x, float y, float z); -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS /// @brief Returns the Trk::NeutralPerigee track parameters. /// /// These are defined as: /// \f$\left(\begin{array}{c}d_0\\z_0\\\phi_0\\\theta\\1/p\\\end{array}\right)\f$ - /// @note This is only available in Athena. + /// @note This is only available in Athena. const Trk::NeutralPerigee& perigeeParameters() const; -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS /// Reset the internal cache of the object void resetCache(); - + private: -#if ( ! defined(XAOD_STANDALONE) ) && ( ! defined(__CLING__) ) +#if ( ! defined(XAOD_ANALYSIS) ) && ( ! defined(__CLING__) ) /// @brief Cached NeutralPerigee, built from this object. /// @note This is only available in Athena. CxxUtils::CachedValue<Trk::NeutralPerigee> m_perigeeParameters; -#endif // not XAOD_STANDALONE and not __CLING__ +#endif // not XAOD_ANALYSIS and not __CLING__ }; // class NeutralParticle_v1 diff --git a/Event/xAOD/xAODTracking/xAODTracking/versions/TrackParticle_v1.h b/Event/xAOD/xAODTracking/xAODTracking/versions/TrackParticle_v1.h index b2b0a0c24c0b9afc481437347226aa5aa1b5ebe6..c4bbcf0f915e40f101d7fba6163156ac47e8dcbd 100644 --- a/Event/xAOD/xAODTracking/xAODTracking/versions/TrackParticle_v1.h +++ b/Event/xAOD/xAODTracking/xAODTracking/versions/TrackParticle_v1.h @@ -20,13 +20,13 @@ extern "C" { // xAOD include(s): #include "xAODBase/IParticle.h" -#include "xAODTracking/TrackingPrimitives.h" +#include "xAODTracking/TrackingPrimitives.h" -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS // Athena includes #include "TrkParameters/TrackParameters.h" #include "TrkTrack/TrackCollection.h" -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS // ROOT include(s): #include "Math/Vector4D.h" @@ -52,7 +52,7 @@ namespace xAOD { TrackParticle_v1(const TrackParticle_v1& o ); /// Assignment operator. This can involve creating and copying an Auxilary store, and so should be used sparingly. TrackParticle_v1& operator=(const TrackParticle_v1& tp ); - + /// @name IParticle functions /// @{ /// The transverse momentum (\f$p_T\f$) of the particle. @@ -105,7 +105,7 @@ namespace xAOD { float theta() const; /// Returns the \f$q/p\f$ parameter float qOverP() const; - /// @brief Returns a SVector of the Perigee track parameters. + /// @brief Returns a SVector of the Perigee track parameters. /// i.e. a vector of /// \f$\left(\begin{array}{c}d_0\\z_0\\\phi_0\\\theta\\q/p\end{array}\right)\f$ DefiningParameters_t definingParameters() const; @@ -120,7 +120,7 @@ namespace xAOD { /// Returns the length 6 vector containing the elements of defining parameters covariance matrix. std::vector<float> definingParametersCovMatrixVec() const; bool definingParametersCovMatrixOffDiagCompr() const ; - /// Set the defining parameters. + /// Set the defining parameters. void setDefiningParameters(float d0, float z0, float phi0, float theta, float qOverP); /// Set the defining parameters covariance matrix. void setDefiningParametersCovMatrix(const ParametersCovMatrix_t& cov); @@ -141,22 +141,22 @@ namespace xAOD { /// Set the origin for the parameters. void setParametersOrigin(float x, float y, float z); -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS /// @brief Returns the Trk::MeasuredPerigee track parameters. /// /// These are defined as: /// \f$\left(\begin{array}{c}d_0\\z_0\\\phi_0\\\theta\\q/p\\\end{array}\right)\f$ - /// @note This is only available in Athena. + /// @note This is only available in Athena. const Trk::Perigee& perigeeParameters() const; -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS /// @} /// @name Curvilinear functions /// The set of functions which return other track parameters. - /// The remaining track parameters (i.e. not the 'defining parameters') use the 'curvilinear' coordinate system, + /// The remaining track parameters (i.e. not the 'defining parameters') use the 'curvilinear' coordinate system, /// and are represented by the parameters @f$(x,y,z,p_x,p_y,p_z)@f$. /// The parameters can have an associated local 5x5 error/covariance matrix. They are expressed at various points through the - /// detector, which can be determined by the parameterPosition() method. + /// detector, which can be determined by the parameterPosition() method. /// @code /// // Example code to use parameters /// unsigned int index=0; @@ -165,8 +165,8 @@ namespace xAOD { /// } /// @endcode /// @{ - /// Returns the number of additional parameters stored in the TrackParticle. - size_t numberOfParameters() const; + /// Returns the number of additional parameters stored in the TrackParticle. + size_t numberOfParameters() const; /// Returns the track parameter vector at 'index'. const CurvilinearParameters_t trackParameters(unsigned int index) const; /// Returns the parameter x position, for 'index'. @@ -180,17 +180,17 @@ namespace xAOD { /// Returns the parameter y momentum component, for 'index'. float parameterPY(unsigned int index) const; /// Returns the parameter z momentum component, for 'index'. - float parameterPZ(unsigned int index) const; - /// Set the parameters via the passed vector of vectors. + float parameterPZ(unsigned int index) const; + /// Set the parameters via the passed vector of vectors. /// The vector<float> should be of size 6: x,y,z,px,py,pz (charge is stored elsewhere) void setTrackParameters(std::vector<std::vector<float> >& parameters); - /// @brief Returns the TrackParticleCovMatrix_t (covariance matrix) at 'index', + /// @brief Returns the TrackParticleCovMatrix_t (covariance matrix) at 'index', /// which corresponds to the parameters at the same index. ParametersCovMatrix_t trackParameterCovarianceMatrix(unsigned int index) const; /// Set the cov matrix of the parameter at 'index', using a vector of floats. - /// The vector @f$\mathrm{v}(a1,a2,a3 ... a_{15})@f$ represents the lower diagonal, i.e. it gives a matrix of + /// The vector @f$\mathrm{v}(a1,a2,a3 ... a_{15})@f$ represents the lower diagonal, i.e. it gives a matrix of /// \f$\left(\begin{array}{ccccc} a_1 & a_2 & a_4 & a_7 & a_{11} \\ a_2 & a_3 & a_5 & a_8 & a_{12} \\ a_4 & a_5 & a_6 & a_9 & a_{13} \\ a_7 & a_8 & a_9 & a_{10} & a_{14} \\ a_{11} & a_{12} & a_{13} & a_{14} & a_{15} \end{array}\right)\f$ - void setTrackParameterCovarianceMatrix(unsigned int index, std::vector<float>& cov); + void setTrackParameterCovarianceMatrix(unsigned int index, std::vector<float>& cov); /// @brief Return the ParameterPosition of the parameters at 'index'. xAOD::ParameterPosition parameterPosition(unsigned int index) const; /// @brief Function to determine if this TrackParticle contains track parameters at a certain position, and if so, what the 'index' is. @@ -198,30 +198,30 @@ namespace xAOD { /// @param[out] position The location in the detector of the required track parameters. /// @return Returns 'true' if the TrackParticle parameters at 'position', returns False otherwise. bool indexOfParameterAtPosition(unsigned int& index, ParameterPosition position) const; - /// Set the 'position' (i.e. where it is in ATLAS) of the parameter at 'index', using the ParameterPosition enum. + /// Set the 'position' (i.e. where it is in ATLAS) of the parameter at 'index', using the ParameterPosition enum. void setParameterPosition(unsigned int index, ParameterPosition pos); -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS /// @brief Returns a curvilinear representation of the parameters at 'index'. - /// @note This is only available in Athena. - const Trk::CurvilinearParameters curvilinearParameters(unsigned int index) const; -#endif // not XAOD_STANDALONE + /// @note This is only available in Athena. + const Trk::CurvilinearParameters curvilinearParameters(unsigned int index) const; +#endif // not XAOD_ANALYSIS /// Returns the radius of the first hit. float radiusOfFirstHit() const; /// Set the radius of the first hit. void setRadiusOfFirstHit(float radius); - + /// Returns the offline identifier of the first hit. uint64_t identifierOfFirstHit() const; /// Set the offline identifier of the first hit. void setIdentifierOfFirstHit( uint64_t id); - + float beamlineTiltX() const; void setBeamlineTiltX(float tiltX); - + float beamlineTiltY() const; void setBeamlineTiltY(float tiltY); - + uint32_t hitPattern() const; void setHitPattern(uint32_t hitpattern); @@ -239,26 +239,26 @@ namespace xAOD { /// Returns the @f$ \chi^2 @f$ of the overall track fit. float chiSquared() const; /// Returns the number of degrees of freedom of the overall track or vertex fit as float. - float numberDoF() const; + float numberDoF() const; /// Set the 'Fit Quality' information. - void setFitQuality(float chiSquared, float numberDoF); + void setFitQuality(float chiSquared, float numberDoF); /// @} /// @name TrackInfo functions /// Contains information about the 'fitter' of this Trk::Track / TrackParticle. - /// Additionally there is some information about how the e.g. fit was configured. + /// Additionally there is some information about how the e.g. fit was configured. /// Also the information on the properties of the track fit is stored. /// @{ /// Methods setting the TrackProperties. void setTrackProperties (const TrackProperties properties) ; - /// Method setting the pattern recognition algorithm, using a bitset. + /// Method setting the pattern recognition algorithm, using a bitset. /// The bitset should be created using the TrackPatternRecoInfo enum as follows: /// @code /// const std::bitset<xAOD::NumberOfTrackRecoInfo> patternReco; /// patternReco.set(xAOD::Fatras); /// @endcode void setPatternRecognitionInfo(const std::bitset<xAOD::NumberOfTrackRecoInfo>& patternReco) ; - /// Method setting the pattern recognition algorithm, using a 64-bit int (which is faster than using a bitset). + /// Method setting the pattern recognition algorithm, using a 64-bit int (which is faster than using a bitset). /// The bit set should be created using the TrackPatternRecoInfo enum as follows: /// @code /// uint64_t patternReco; @@ -269,10 +269,10 @@ namespace xAOD { void setTrackFitter(const TrackFitter fitter) ; /// Method for setting the particle type, using the ParticleHypothesis enum. void setParticleHypothesis(const ParticleHypothesis hypo); - ///Access methods for track properties, which returns 'true' + ///Access methods for track properties, which returns 'true' /// if a logical AND of the parameter 'proprty' and the stored properties returns true. - /// i.e. you do: - /// @code + /// i.e. you do: + /// @code /// TrackProperties testProperty; /// testProperty.set(SOMEPROPERTY); /// if (trackParticle.trackProperties(testProperty)) doSomething(); @@ -289,17 +289,17 @@ namespace xAOD { /// Accessor for TrackSummary values. - /// If 'information' is stored in this TrackParticle and is of the correct templated type T, - /// then the function fills 'value' and returns 'true', otherwise it returns 'false', and does not touch 'value'. + /// If 'information' is stored in this TrackParticle and is of the correct templated type T, + /// then the function fills 'value' and returns 'true', otherwise it returns 'false', and does not touch 'value'. /// See below for an example of how this is intended to be used. /// @code /// int numberOfBLayerHits=0; /// if( myParticle.summaryValue(numberOfBLayerHits,xAOD::numberOfBLayerHits) ){ - /// ATH_MSG_INFO("Successfully retrieved the integer value, numberOfBLayerHits"); + /// ATH_MSG_INFO("Successfully retrieved the integer value, numberOfBLayerHits"); /// } /// float numberOfCscPhiHits=0.0; //Wrong! This is actually an int too. /// if( !myParticle.summaryValue(numberOfCscPhiHits,xAOD::numberOfCscPhiHits) ){ - /// ATH_MSG_INFO("Types must match!"); + /// ATH_MSG_INFO("Types must match!"); /// } /// @endcode /// @param[in] information The information being requested. This is not guaranteed to be stored in all TrackParticles. @@ -313,26 +313,26 @@ namespace xAOD { /// @copydoc TrackParticle_v1::setSummaryValue(uint8_t& value, const SummaryType &information) void setSummaryValue(float& value, const SummaryType &information); /// @} - + /// @name Links /// @{ -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS /// @brief Returns a link (which can be invalid) to the Trk::Track which was used to make this TrackParticle. - /// @note This is only available in Athena. + /// @note This is only available in Athena. const ElementLink< TrackCollection >& trackLink() const; /// @brief Set the link to the original track /// @note This is only available in Athena. void setTrackLink(const ElementLink< TrackCollection >& track); /// @brief Returns a pointer (which can be NULL) to the Trk::Track which was used to make this TrackParticle. - /// @note This is only available in Athena. - const Trk::Track* track() const; -#endif // not XAOD_STANDALONE + /// @note This is only available in Athena. + const Trk::Track* track() const; +#endif // not XAOD_ANALYSIS /// @} /// Reset the internal cache of the object void resetCache(); - + private: enum covMatrixIndex{d0_index=0, z0_index=1, phi_index=2, th_index=3, qp_index=4}; @@ -340,11 +340,11 @@ private: static const std::size_t COVMATRIX_OFFDIAG_VEC_COMPR_SIZE = 6; -#if ( ! defined(XAOD_STANDALONE) ) && ( ! defined(__CLING__) ) +#if ( ! defined(XAOD_ANALYSIS) ) && ( ! defined(__CLING__) ) /// @brief Cached MeasuredPerigee, built from this object. /// @note This is only available in Athena. CxxUtils::CachedValue<Trk::Perigee> m_perigeeParameters; -#endif // not XAOD_STANDALONE and not __CLING__ +#endif // not XAOD_ANALYSIS and not __CLING__ }; // class Track Particle diff --git a/Event/xAOD/xAODTracking/xAODTracking/versions/Vertex_v1.h b/Event/xAOD/xAODTracking/xAODTracking/versions/Vertex_v1.h index ee71bebc05ff37e4db2fec7f3cc0288bc72dd527..8d0c8ab17e816f1e5b25e91fa46d6f63b1ce4ef2 100644 --- a/Event/xAOD/xAODTracking/xAODTracking/versions/Vertex_v1.h +++ b/Event/xAOD/xAODTracking/xAODTracking/versions/Vertex_v1.h @@ -1,7 +1,7 @@ // Dear emacs, this is -*- c++ -*- /* - Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ #ifndef XAODTRACKING_VERSIONS_VERTEX_V1_H @@ -17,11 +17,9 @@ // EDM include(s): #include "EventPrimitives/EventPrimitives.h" #include "GeoPrimitives/GeoPrimitives.h" -#ifndef XAOD_STANDALONE -#ifndef XAOD_MANACORE +#ifndef XAOD_ANALYSIS # include "VxVertex/VxTrackAtVertex.h" -#endif // not XAOD_MANACORE -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS // xAOD include(s): #include "xAODTracking/TrackingPrimitives.h" @@ -90,7 +88,7 @@ namespace xAOD { /// Returns the @f$ \chi^2 @f$ of the vertex fit as float. float chiSquared() const; /// Returns the number of degrees of freedom of the vertex fit as float. - float numberDoF() const; + float numberDoF() const; /// Set the 'Fit Quality' information. void setFitQuality( float chiSquared, float numberDoF ); @@ -101,14 +99,14 @@ namespace xAOD { /// Set the type of the vertex void setVertexType( VxType::VertexType vType ); -#if ( ! defined(XAOD_STANDALONE) ) && ( ! defined(XAOD_MANACORE) ) +#ifndef XAOD_ANALYSIS /// Non-const access to the VxTrackAtVertex vector std::vector< Trk::VxTrackAtVertex >& vxTrackAtVertex(); /// Const access to the vector of tracks fitted to the vertex (may not exist!) const std::vector< Trk::VxTrackAtVertex >& vxTrackAtVertex() const; /// Check if VxTrackAtVertices are attached to the object bool vxTrackAtVertexAvailable() const; -#endif // not XAOD_STANDALONE and not XAOD_MANACORE +#endif // not XAOD_ANALYSIS /// @name Track particle contents operations /// @{ diff --git a/Event/xAOD/xAODTracking/xAODTracking/xAODTrackingDict.h b/Event/xAOD/xAODTracking/xAODTracking/xAODTrackingDict.h index 78f6ff384d6fdbf02003d1ed6a44c7095c6c426c..4d8baa0f69c19b00a1b8f46677db9f53da0d6ef8 100644 --- a/Event/xAOD/xAODTracking/xAODTracking/xAODTrackingDict.h +++ b/Event/xAOD/xAODTracking/xAODTracking/xAODTrackingDict.h @@ -4,7 +4,7 @@ */ #ifndef XAODTRACKING_XAODTRACKINGDICT_H #define XAODTRACKING_XAODTRACKINGDICT_H - + // Local include(s). #include "xAODTracking/TrackParticle.h" #include "xAODTracking/TrackParticleContainer.h" @@ -74,9 +74,9 @@ namespace { SCTRawHitValidationContainer_v1 ); // Type(s) needed for the dictionary generation to succeed. -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS XAOD_INSTANTIATE_CONTAINER_TYPES( TrackCollection ); -#endif // not XAOD_STANDALONE +#endif // not XAOD_ANALYSIS xAOD::CurvilinearParameters_t dummy; }; } diff --git a/Event/xAOD/xAODTrackingCnv/CMakeLists.txt b/Event/xAOD/xAODTrackingCnv/CMakeLists.txt index d48e57d6c758d15465cab18ea560221a7686d01b..a093942c0072680dd1aef773374a72d0ae5fb947 100644 --- a/Event/xAOD/xAODTrackingCnv/CMakeLists.txt +++ b/Event/xAOD/xAODTrackingCnv/CMakeLists.txt @@ -3,17 +3,34 @@ # Declare the package name: atlas_subdir( xAODTrackingCnv ) -# Component(s) in the package: -atlas_add_library( xAODTrackingCnvLib - xAODTrackingCnv/*.h - INTERFACE - PUBLIC_HEADERS xAODTrackingCnv - LINK_LIBRARIES xAODTracking TrkTrack GaudiKernel ) - -atlas_add_component( xAODTrackingCnv - src/*.h src/*.cxx src/components/*.cxx - LINK_LIBRARIES xAODTracking TrkTrack AthenaBaseComps AthenaKernel - EventPrimitives GaudiKernel GeneratorObjects MCTruthClassifierLib Particle - ParticleTruth TrkLinks TrkParticleBase TrkTruthData VxVertex - TrkToolInterfaces xAODCore xAODTrackingCnvLib - PRIVATE_LINK_LIBRARIES CxxUtils ) +# Component(s) in the package. Built in a much more lightweight fashion for +# AthAnalysis. +if( XAOD_ANALYSIS ) + + atlas_add_library( xAODTrackingCnvLib + xAODTrackingCnv/ITrackParticleCompressorTool.h + INTERFACE + PUBLIC_HEADERS xAODTrackingCnv + LINK_LIBRARIES xAODTracking GaudiKernel ) + + atlas_add_component( xAODTrackingCnv + src/TrackParticleCompressorTool.* src/components/*.cxx + LINK_LIBRARIES AthenaBaseComps CxxUtils xAODTrackingCnvLib ) + +else() + + atlas_add_library( xAODTrackingCnvLib + xAODTrackingCnv/*.h + INTERFACE + PUBLIC_HEADERS xAODTrackingCnv + LINK_LIBRARIES xAODTracking TrkTrack GaudiKernel ) + + atlas_add_component( xAODTrackingCnv + src/*.h src/*.cxx src/components/*.cxx + LINK_LIBRARIES xAODTracking TrkTrack AthenaBaseComps AthenaKernel + EventPrimitives GaudiKernel GeneratorObjects MCTruthClassifierLib Particle + ParticleTruth TrkLinks TrkParticleBase TrkTruthData VxVertex + TrkToolInterfaces xAODCore xAODTrackingCnvLib + PRIVATE_LINK_LIBRARIES CxxUtils ) + +endif() diff --git a/Event/xAOD/xAODTrackingCnv/src/components/xAODTrackingCnv_entries.cxx b/Event/xAOD/xAODTrackingCnv/src/components/xAODTrackingCnv_entries.cxx index 2e08c663b6c39cfee89cb0ddc5600f19508fd287..9c54f444966682fb2b9125bc0635a36958a2c5cc 100644 --- a/Event/xAOD/xAODTrackingCnv/src/components/xAODTrackingCnv_entries.cxx +++ b/Event/xAOD/xAODTrackingCnv/src/components/xAODTrackingCnv_entries.cxx @@ -1,12 +1,19 @@ -#include "../TrackCollectionCnvTool.h" -#include "../RecTrackParticleContainerCnvTool.h" -#include "../TrackParticleCnvAlg.h" -#include "../VertexCnvAlg.h" +// +// Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +// + +#ifndef XAOD_ANALYSIS +# include "../TrackCollectionCnvTool.h" +# include "../RecTrackParticleContainerCnvTool.h" +# include "../TrackParticleCnvAlg.h" +# include "../VertexCnvAlg.h" +#endif // NOT XAOD_ANALYSIS #include "../TrackParticleCompressorTool.h" -DECLARE_COMPONENT( xAODMaker::RecTrackParticleContainerCnvTool ) -DECLARE_COMPONENT( xAODMaker::TrackCollectionCnvTool ) +#ifndef XAOD_ANALYSIS + DECLARE_COMPONENT( xAODMaker::RecTrackParticleContainerCnvTool ) + DECLARE_COMPONENT( xAODMaker::TrackCollectionCnvTool ) + DECLARE_COMPONENT( xAODMaker::TrackParticleCnvAlg ) + DECLARE_COMPONENT( xAODMaker::VertexCnvAlg ) +#endif // NOT XAOD_ANALYSIS DECLARE_COMPONENT( xAODMaker::TrackParticleCompressorTool ) -DECLARE_COMPONENT( xAODMaker::TrackParticleCnvAlg ) -DECLARE_COMPONENT( xAODMaker::VertexCnvAlg ) - diff --git a/Event/xAOD/xAODTriggerAthenaPool/CMakeLists.txt b/Event/xAOD/xAODTriggerAthenaPool/CMakeLists.txt index a9f51cd369034a5f26c96de41aedd7b3aa78424f..840ffb1469e87f8613e1421a73b81dbb76638e07 100644 --- a/Event/xAOD/xAODTriggerAthenaPool/CMakeLists.txt +++ b/Event/xAOD/xAODTriggerAthenaPool/CMakeLists.txt @@ -37,7 +37,7 @@ atlas_add_poolcnv_library( xAODTriggerAthenaPoolPoolCnv xAOD::BunchConfContainer xAOD::BunchConfAuxContainer xAOD::TrigPassBitsContainer xAOD::TrigPassBitsAuxContainer CNV_PFX xAOD - LINK_LIBRARIES TrigNavToolsLib AthContainers AthenaKernel AthenaPoolCnvSvcLib + LINK_LIBRARIES AthContainers AthenaKernel AthenaPoolCnvSvcLib AthenaPoolUtilities xAODTrigger ) # Install files from the package. diff --git a/ForwardDetectors/ZDC/ZdcConditions/ZdcConditions/ZdcCablingService.h b/ForwardDetectors/ZDC/ZdcConditions/ZdcConditions/ZdcCablingService.h index 3f44a8c2e84334c18abec1818a151f00dc303c9e..5c153baf2e3f6c8f521d43ae9fb881ecc9489bc1 100755 --- a/ForwardDetectors/ZDC/ZdcConditions/ZdcConditions/ZdcCablingService.h +++ b/ForwardDetectors/ZDC/ZdcConditions/ZdcConditions/ZdcCablingService.h @@ -74,7 +74,6 @@ public: int m_channel_db[4][64]; int m_hv_db[4][64]; int m_ppm_db[16]; - int m_crate_db[8]; int m_crate_index[4]; int m_ncrate; //int m_crate_lookup[2][3][2]; diff --git a/InnerDetector/InDetConfig/CMakeLists.txt b/InnerDetector/InDetConfig/CMakeLists.txt index 193803d2174d22f4af3e6978d3d272d42e5d05b9..7c6b3a570c5aa1f0d1a4fd9a283ba233c054e28a 100644 --- a/InnerDetector/InDetConfig/CMakeLists.txt +++ b/InnerDetector/InDetConfig/CMakeLists.txt @@ -14,3 +14,10 @@ atlas_add_test( TrackingCutsFlags_test SCRIPT python -m InDetConfig.TrackingCutsFlags POST_EXEC_SCRIPT nopost.sh) +atlas_add_test( BackTrackingConfig_test + SCRIPT python -m InDetConfig.BackTrackingConfig --norun + POST_EXEC_SCRIPT nopost.sh) + +atlas_add_test( TRTSegmentFindingConfig_test + SCRIPT python -m InDetConfig.TRTSegmentFindingConfig --norun + POST_EXEC_SCRIPT nopost.sh) diff --git a/InnerDetector/InDetConfig/python/BackTrackingConfig.py b/InnerDetector/InDetConfig/python/BackTrackingConfig.py index 5d2714b8d588a25aaca0742a73e555b90b3bb0df..e0aba1065e439c63579fabc9320857fe1988e985 100644 --- a/InnerDetector/InDetConfig/python/BackTrackingConfig.py +++ b/InnerDetector/InDetConfig/python/BackTrackingConfig.py @@ -3,7 +3,7 @@ from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator from AthenaConfiguration.ComponentFactory import CompFactory import InDetConfig.TrackingCommonConfig as TC -def SiDetElementsRoadMaker_xkCfg(flags, name = 'InDetTRT_SeededSiRoad', TrackingFlags=None, **kwargs): +def SiDetElementsRoadMaker_xkCfg(flags, name = 'InDetTRT_SeededSiRoad', **kwargs): acc = ComponentAccumulator() # # Silicon det elements road maker tool @@ -12,9 +12,9 @@ def SiDetElementsRoadMaker_xkCfg(flags, name = 'InDetTRT_SeededSiRoad', Tracking acc.addPublicTool(InDetPatternPropagator) kwargs.setdefault("PropagatorTool", InDetPatternPropagator) - kwargs.setdefault("usePixel", TrackingFlags.usePixel) + kwargs.setdefault("usePixel", flags.InDet.Tracking.usePixel) kwargs.setdefault("PixManagerLocation", 'Pixel') # InDetKeys.PixelManager() - kwargs.setdefault("useSCT", TrackingFlags.useSCT) + kwargs.setdefault("useSCT", flags.InDet.Tracking.useSCT) kwargs.setdefault("SCTManagerLocation", 'SCT') # InDetKeys.SCT_Manager() kwargs.setdefault("RoadWidth", 35.) kwargs.setdefault("MaxStep", 20.) @@ -22,7 +22,7 @@ def SiDetElementsRoadMaker_xkCfg(flags, name = 'InDetTRT_SeededSiRoad', Tracking if flags.Beam.Type == "cosmics": kwargs.setdefault("RoadWidth", 50) # Condition algorithm for InDet__SiDetElementsRoadMaker_xk - if TrackingFlags.useSCT: + if flags.InDet.Tracking.useSCT: acc.addCondAlgo(CompFactory.InDet.SiDetElementsRoadCondAlg_xk(name = "InDet__SiDetElementsRoadCondAlg_xk")) InDetTRT_SeededSiRoadMaker = CompFactory.InDet.SiDetElementsRoadMaker_xk(name = name, **kwargs) @@ -52,7 +52,7 @@ def SimpleTRT_SeededSpacePointFinder_ATLCfg(flags, name='InDetTRT_SeededSpFinder acc.setPrivateTools(InDetTRT_SeededSpacePointFinder) return acc -def TRT_SeededSpacePointFinder_ATLCfg(flags, name='InDetTRT_SeededSpFinder', TrackingFlags=None, InputCollections=[], **kwargs): +def TRT_SeededSpacePointFinder_ATLCfg(flags, name='InDetTRT_SeededSpFinder', InputCollections=[], **kwargs): acc = ComponentAccumulator() # # --- decide if use the association tool @@ -74,13 +74,13 @@ def TRT_SeededSpacePointFinder_ATLCfg(flags, name='InDetTRT_SeededSpFinder', Tra kwargs.setdefault("NeighborSearch", True) kwargs.setdefault("LoadFull", False) kwargs.setdefault("DoCosmics", flags.Beam.Type == "cosmics") - kwargs.setdefault("pTmin", TrackingFlags.minSecondaryPt) + kwargs.setdefault("pTmin", flags.InDet.Tracking.minSecondaryPt) InDetTRT_SeededSpacePointFinder = CompFactory.InDet.TRT_SeededSpacePointFinder_ATL(name = name, **kwargs) acc.setPrivateTools(InDetTRT_SeededSpacePointFinder) return acc -def TRT_SeededTrackFinder_ATLCfg(flags, name='InDetTRT_SeededTrackMaker', TrackingFlags=None, InputCollections=[], **kwargs): +def TRT_SeededTrackFinder_ATLCfg(flags, name='InDetTRT_SeededTrackMaker', InputCollections=[], **kwargs): acc = ComponentAccumulator() # # --- TRT seeded back tracking tool @@ -94,8 +94,8 @@ def TRT_SeededTrackFinder_ATLCfg(flags, name='InDetTRT_SeededTrackMaker', Tracki InDetSiComTrackFinder = acc.popToolsAndMerge(TC.SiCombinatorialTrackFinder_xkCfg(flags)) acc.addPublicTool(InDetSiComTrackFinder) - if (TrackingFlags.usePixel and TrackingFlags.useSCT) is not False: - InDetTRT_SeededSiRoadMaker = acc.popToolsAndMerge(SiDetElementsRoadMaker_xkCfg(flags, TrackingFlags=TrackingFlags)) + if (flags.InDet.Tracking.usePixel and flags.InDet.Tracking.useSCT) is not False: + InDetTRT_SeededSiRoadMaker = acc.popToolsAndMerge(SiDetElementsRoadMaker_xkCfg(flags)) acc.addPublicTool(InDetTRT_SeededSiRoadMaker) kwargs.setdefault("RoadTool", InDetTRT_SeededSiRoadMaker) @@ -104,7 +104,6 @@ def TRT_SeededTrackFinder_ATLCfg(flags, name='InDetTRT_SeededTrackMaker', Tracki # if flags.InDet.loadTRTSeededSPFinder: InDetTRT_SeededSpacePointFinder = acc.popToolsAndMerge(TRT_SeededSpacePointFinder_ATLCfg(flags, - TrackingFlags=TrackingFlags, InputCollections=InputCollections)) elif flags.InDet.loadSimpleTRTSeededSPFinder: InDetTRT_SeededSpacePointFinder = acc.popToolsAndMerge(SimpleTRT_SeededSpacePointFinder_ATLCfg(flags, InputCollections=InputCollections)) @@ -115,11 +114,11 @@ def TRT_SeededTrackFinder_ATLCfg(flags, name='InDetTRT_SeededTrackMaker', Tracki kwargs.setdefault("UpdatorTool", InDetPatternUpdator) kwargs.setdefault("SeedTool", InDetTRT_SeededSpacePointFinder) kwargs.setdefault("CombinatorialTrackFinder", InDetSiComTrackFinder) - kwargs.setdefault("pTmin", TrackingFlags.minSecondaryPt) - kwargs.setdefault("nHolesMax", TrackingFlags.SecondarynHolesMax) - kwargs.setdefault("nHolesGapMax", TrackingFlags.SecondarynHolesGapMax) - kwargs.setdefault("Xi2max", TrackingFlags.SecondaryXi2max) - kwargs.setdefault("Xi2maxNoAdd", TrackingFlags.SecondaryXi2maxNoAdd) + kwargs.setdefault("pTmin", flags.InDet.Tracking.minSecondaryPt) + kwargs.setdefault("nHolesMax", flags.InDet.Tracking.SecondarynHolesMax) + kwargs.setdefault("nHolesGapMax", flags.InDet.Tracking.SecondarynHolesGapMax) + kwargs.setdefault("Xi2max", flags.InDet.Tracking.SecondaryXi2max) + kwargs.setdefault("Xi2maxNoAdd", flags.InDet.Tracking.SecondaryXi2maxNoAdd) kwargs.setdefault("SearchInCaloROI", False) kwargs.setdefault("InputClusterContainerName", 'InDetCaloClusterROIs') # InDetKeys.CaloClusterROIContainer() kwargs.setdefault("ConsistentSeeds", True) @@ -132,7 +131,7 @@ def TRT_SeededTrackFinder_ATLCfg(flags, name='InDetTRT_SeededTrackMaker', Tracki acc.setPrivateTools(InDetTRT_SeededTrackTool) return acc -def TRT_SeededTrackFinderCfg(flags, name='InDetTRT_SeededTrackFinder', TrackingFlags = None, InputCollections=[], **kwargs): +def TRT_SeededTrackFinderCfg(flags, name='InDetTRT_SeededTrackFinder', InputCollections=[], **kwargs): acc = ComponentAccumulator() # @@ -160,7 +159,7 @@ def TRT_SeededTrackFinderCfg(flags, name='InDetTRT_SeededTrackFinder', TrackingF InDetTrackSummaryToolNoHoleSearch = acc.popToolsAndMerge(TC.InDetTrackSummaryToolNoHoleSearchCfg(flags)) acc.addPublicTool(InDetTrackSummaryToolNoHoleSearch) - InDetTRTExtensionTool = acc.popToolsAndMerge(TC.InDetTRT_ExtensionToolCfg(flags, TrackingFlags = TrackingFlags)) + InDetTRTExtensionTool = acc.popToolsAndMerge(TC.InDetTRT_ExtensionToolCfg(flags)) acc.addPublicTool(InDetTRTExtensionTool) from InDetConfig.InDetRecToolConfig import InDetExtrapolatorCfg @@ -169,7 +168,6 @@ def TRT_SeededTrackFinderCfg(flags, name='InDetTRT_SeededTrackFinder', TrackingF acc.merge(tmpAcc) InDetTRT_SeededTrackTool = acc.popToolsAndMerge(TRT_SeededTrackFinder_ATLCfg(flags, - TrackingFlags = TrackingFlags, InputCollections=InputCollections)) acc.addPublicTool(InDetTRT_SeededTrackTool) @@ -178,24 +176,24 @@ def TRT_SeededTrackFinderCfg(flags, name='InDetTRT_SeededTrackFinder', TrackingF kwargs.setdefault("PRDtoTrackMap", prefix+'PRDtoTrackMap'+suffix if usePrdAssociationTool else "") kwargs.setdefault("TrackSummaryTool", InDetTrackSummaryToolNoHoleSearch) kwargs.setdefault("TrackExtensionTool", InDetTRTExtensionTool) - kwargs.setdefault("MinTRTonSegment", TrackingFlags.minSecondaryTRTonTrk) - kwargs.setdefault("MinTRTonly", TrackingFlags.minTRTonly) + kwargs.setdefault("MinTRTonSegment", flags.InDet.Tracking.minSecondaryTRTonTrk) + kwargs.setdefault("MinTRTonly", flags.InDet.Tracking.minTRTonly) kwargs.setdefault("TrtExtension", True) - kwargs.setdefault("SiExtensionCuts", TrackingFlags.SiExtensionCuts) - kwargs.setdefault("minPt", TrackingFlags.minSecondaryPt) - kwargs.setdefault("maxRPhiImp", TrackingFlags.maxSecondaryImpact) - kwargs.setdefault("maxZImp", TrackingFlags.maxZImpact) - kwargs.setdefault("maxEta", TrackingFlags.maxEta) + kwargs.setdefault("SiExtensionCuts", flags.InDet.Tracking.SiExtensionCuts) + kwargs.setdefault("minPt", flags.InDet.Tracking.minSecondaryPt) + kwargs.setdefault("maxRPhiImp", flags.InDet.Tracking.maxSecondaryImpact) + kwargs.setdefault("maxZImp", flags.InDet.Tracking.maxZImpact) + kwargs.setdefault("maxEta", flags.InDet.Tracking.maxEta) kwargs.setdefault("Extrapolator", InDetExtrapolator) - kwargs.setdefault("RejectShortExtension", TrackingFlags.rejectShortExtensions) + kwargs.setdefault("RejectShortExtension", flags.InDet.Tracking.rejectShortExtensions) kwargs.setdefault("FinalRefit", False) kwargs.setdefault("FinalStatistics", False) kwargs.setdefault("OutputSegments", False) kwargs.setdefault("InputSegmentsLocation", 'TRTSegments') # InDetKeys.TRT_Segments() kwargs.setdefault("OutputTracksLocation", TRTSeededTracks) - kwargs.setdefault("CaloClusterEt", TrackingFlags.minRoIClusterEt) + kwargs.setdefault("CaloClusterEt", flags.InDet.Tracking.minRoIClusterEt) - if TrackingFlags.RoISeededBackTracking: + if flags.InDet.Tracking.RoISeededBackTracking: from RegionSelector.RegSelToolConfig import regSelTool_SCT_Cfg RegSelTool_SCT = acc.popToolsAndMerge(regSelTool_SCT_Cfg(flags)) acc.addPublicTool(RegSelTool_SCT) @@ -221,10 +219,10 @@ def TrkAmbiguityScoreCfg(name='InDetTRT_SeededAmbiguityScore', **kwargs): acc.addEventAlgo(InDetAmbiguityScore) return acc -def InDetAmbiTrackSelectionToolCfg(flags, name='InDetTRT_SeededAmbiTrackSelectionTool', TrackingFlags=None, **kwargs): +def InDetAmbiTrackSelectionToolCfg(flags, name='InDetTRT_SeededAmbiTrackSelectionTool', **kwargs): acc = ComponentAccumulator() - InDetTRTDriftCircleCut = TC.InDetTRTDriftCircleCutForPatternRecoCfg(flags, TrackingFlags= TrackingFlags) + InDetTRTDriftCircleCut = TC.InDetTRTDriftCircleCutForPatternRecoCfg(flags) acc.addPublicTool(InDetTRTDriftCircleCut) InDetPRDtoTrackMapToolGangedPixels = TC.InDetPRDtoTrackMapToolGangedPixelsCfg(flags) @@ -233,11 +231,11 @@ def InDetAmbiTrackSelectionToolCfg(flags, name='InDetTRT_SeededAmbiTrackSelectio kwargs.setdefault("DriftCircleCutTool", InDetTRTDriftCircleCut) kwargs.setdefault("AssociationTool", InDetPRDtoTrackMapToolGangedPixels) kwargs.setdefault("minScoreShareTracks", -1.) # off ! - kwargs.setdefault("minHits", TrackingFlags.minSecondaryClusters) - kwargs.setdefault("minNotShared", TrackingFlags.minSecondarySiNotShared) - kwargs.setdefault("maxShared", TrackingFlags.maxSecondaryShared) - kwargs.setdefault("minTRTHits", TrackingFlags.minSecondaryTRTonTrk) - kwargs.setdefault("UseParameterization", TrackingFlags.useParameterizedTRTCuts) + kwargs.setdefault("minHits", flags.InDet.Tracking.minSecondaryClusters) + kwargs.setdefault("minNotShared", flags.InDet.Tracking.minSecondarySiNotShared) + kwargs.setdefault("maxShared", flags.InDet.Tracking.maxSecondaryShared) + kwargs.setdefault("minTRTHits", flags.InDet.Tracking.minSecondaryTRTonTrk) + kwargs.setdefault("UseParameterization", flags.InDet.Tracking.useParameterizedTRTCuts) kwargs.setdefault("Cosmics", flags.Beam.Type == "cosmics") kwargs.setdefault("doPixelSplitting", flags.InDet.doPixelClusterSplitting) @@ -245,7 +243,7 @@ def InDetAmbiTrackSelectionToolCfg(flags, name='InDetTRT_SeededAmbiTrackSelectio acc.setPrivateTools(InDetTRT_SeededAmbiTrackSelectionTool) return acc -def SimpleAmbiguityProcessorToolCfg(flags, name='InDetTRT_SeededAmbiguityProcessor', TrackingFlags=None, ClusterSplitProbContainer="", **kwargs): +def SimpleAmbiguityProcessorToolCfg(flags, name='InDetTRT_SeededAmbiguityProcessor', ClusterSplitProbContainer="", **kwargs): acc = ComponentAccumulator() # # --- load Ambiguity Processor @@ -260,17 +258,17 @@ def SimpleAmbiguityProcessorToolCfg(flags, name='InDetTRT_SeededAmbiguityProcess # --- set up special Scoring Tool for TRT seeded tracks # if flags.Beam.Type == "cosmics": - InDetTRT_SeededScoringTool = acc.popToolsAndMerge(TC.InDetCosmicScoringTool_TRTCfg(flags, TrackingFlags=TrackingFlags)) + InDetTRT_SeededScoringTool = acc.popToolsAndMerge(TC.InDetCosmicScoringTool_TRTCfg(flags)) acc.addPublicTool(InDetTRT_SeededScoringTool) InDetTRT_SeededSummaryTool = acc.popToolsAndMerge(TC.InDetTrackSummaryToolSharedHitsCfg(flags)) acc.addPublicTool(InDetTRT_SeededSummaryTool) else: - InDetTRT_SeededScoringTool = acc.popToolsAndMerge(TC.InDetTRT_SeededScoringToolCfg(flags, TrackingFlags=TrackingFlags)) + InDetTRT_SeededScoringTool = acc.popToolsAndMerge(TC.InDetTRT_SeededScoringToolCfg(flags)) acc.addPublicTool(InDetTRT_SeededScoringTool) InDetTRT_SeededSummaryTool = acc.popToolsAndMerge(TC.InDetTrackSummaryToolCfg(flags)) acc.addPublicTool(InDetTRT_SeededSummaryTool) - InDetTRT_SeededAmbiTrackSelectionTool = acc.popToolsAndMerge(InDetAmbiTrackSelectionToolCfg(flags, TrackingFlags=TrackingFlags)) + InDetTRT_SeededAmbiTrackSelectionTool = acc.popToolsAndMerge(InDetAmbiTrackSelectionToolCfg(flags)) acc.addPublicTool(InDetTRT_SeededAmbiTrackSelectionTool) kwargs.setdefault("Fitter", InDetTrackFitterBT) @@ -278,7 +276,7 @@ def SimpleAmbiguityProcessorToolCfg(flags, name='InDetTRT_SeededAmbiguityProcess kwargs.setdefault("TrackSummaryTool", InDetTRT_SeededSummaryTool) kwargs.setdefault("SelectionTool", InDetTRT_SeededAmbiTrackSelectionTool) kwargs.setdefault("InputClusterSplitProbabilityName", ClusterSplitProbContainer) - kwargs.setdefault("OutputClusterSplitProbabilityName", 'InDetTRT_SeededAmbiguityProcessorSplitProb'+TrackingFlags.extension) + kwargs.setdefault("OutputClusterSplitProbabilityName", 'InDetTRT_SeededAmbiguityProcessorSplitProb'+flags.InDet.Tracking.extension) kwargs.setdefault("RefitPrds", not flags.InDet.refitROT) kwargs.setdefault("SuppressTrackFit", False) kwargs.setdefault("SuppressHoleSearch", False) @@ -293,13 +291,12 @@ def SimpleAmbiguityProcessorToolCfg(flags, name='InDetTRT_SeededAmbiguityProcess acc.setPrivateTools(InDetTRT_SeededAmbiguityProcessor) return acc -def TrkAmbiguitySolverCfg(flags, name='InDetTRT_SeededAmbiguitySolver', TrackingFlags=None, ClusterSplitProbContainer ='', **kwargs): +def TrkAmbiguitySolverCfg(flags, name='InDetTRT_SeededAmbiguitySolver', ClusterSplitProbContainer ='', **kwargs): acc = ComponentAccumulator() ResolvedTRTSeededTracks = 'ResolvedTRTSeededTracks' # InDetKeys.ResolvedTRTSeededTracks() InDetTRT_SeededAmbiguityProcessor = acc.popToolsAndMerge(SimpleAmbiguityProcessorToolCfg(flags, - TrackingFlags=TrackingFlags, ClusterSplitProbContainer=ClusterSplitProbContainer)) acc.addPublicTool(InDetTRT_SeededAmbiguityProcessor) @@ -317,7 +314,7 @@ def TrkAmbiguitySolverCfg(flags, name='InDetTRT_SeededAmbiguitySolver', Tracking # # ------------------------------------------------------------ -def BackTrackingCfg(flags, InputCollections = None, TrackingFlags = None, TrackCollectionKeys=[] , TrackCollectionTruthKeys=[], ClusterSplitProbContainer=''): +def BackTrackingCfg(flags, InputCollections = None, TrackCollectionKeys=[] , TrackCollectionTruthKeys=[], ClusterSplitProbContainer=''): acc = ComponentAccumulator() # ------------------------------------------------------------ # @@ -330,7 +327,6 @@ def BackTrackingCfg(flags, InputCollections = None, TrackingFlags = None, TrackC # --- decide which TRT seed space point finder to use # acc.merge(TRT_SeededTrackFinderCfg( flags, - TrackingFlags=TrackingFlags, InputCollections=InputCollections)) # ------------------------------------------------------------ # @@ -340,7 +336,6 @@ def BackTrackingCfg(flags, InputCollections = None, TrackingFlags = None, TrackC if flags.InDet.doResolveBackTracks: acc.merge(TrkAmbiguityScoreCfg()) acc.merge(TrkAmbiguitySolverCfg(flags, - TrackingFlags=TrackingFlags, ClusterSplitProbContainer = ClusterSplitProbContainer)) return acc @@ -433,13 +428,11 @@ if __name__ == "__main__": ######################################## TRTSegmentFinding Configuration ########################################### InputCollections = [] - TrackingFlags = ConfigFlags.InDet.Tracking from InDetConfig.TRTSegmentFindingConfig import TRTSegmentFindingCfg top_acc.merge(TRTSegmentFindingCfg( ConfigFlags, extension = "", InputCollections = InputCollections, - TrackingFlags = TrackingFlags, BarrelSegments = 'TRTSegments', # InDetKeys.TRT_Segments doPhase = False)) @@ -448,7 +441,6 @@ if __name__ == "__main__": top_acc.merge(BackTrackingCfg( ConfigFlags, InputCollections = InputCollections, - TrackingFlags = TrackingFlags, TrackCollectionKeys=TrackCollectionKeys, TrackCollectionTruthKeys=[], ClusterSplitProbContainer='')) @@ -459,5 +451,8 @@ if __name__ == "__main__": iovsvc.OutputLevel=5 top_acc.printConfig() - top_acc.run(25) - top_acc.store(open("test_BackTrackingConfig.pkl", "wb")) \ No newline at end of file + top_acc.store(open("test_BackTrackingConfig.pkl", "wb")) + import sys + if "--norun" not in sys.argv: + sc = top_acc.run(25) + sys.exit(not sc.isSuccess()) diff --git a/InnerDetector/InDetConfig/python/TRTSegmentFindingConfig.py b/InnerDetector/InDetConfig/python/TRTSegmentFindingConfig.py index fdf5a0e45ca184ebba797cae3f7eee263078dbe1..deeec8e92527ce818bcc5e887d3ae31cf3ca9527 100644 --- a/InnerDetector/InDetConfig/python/TRTSegmentFindingConfig.py +++ b/InnerDetector/InDetConfig/python/TRTSegmentFindingConfig.py @@ -13,7 +13,7 @@ def TRT_TrackSegmentsMaker_BarrelCosmicsCfg(flags, name='InDetTRTSegmentsMaker', acc.setPrivateTools(CompFactory.InDet.TRT_TrackSegmentsMaker_BarrelCosmics(name = name, **kwargs)) return acc -def TRT_TrackSegmentsMaker_ATLxkCfg(flags, name = 'InDetTRT_SeedsMaker', extension = '', TrackingFlags = None, InputCollections = None, **kwargs): +def TRT_TrackSegmentsMaker_ATLxkCfg(flags, name = 'InDetTRT_SeedsMaker', extension = '', InputCollections = None, **kwargs): acc = ComponentAccumulator() # # --- decide if use the association tool @@ -30,21 +30,21 @@ def TRT_TrackSegmentsMaker_ATLxkCfg(flags, name = 'InDetTRT_SeedsMaker', extensi # if extension == "_TRT": # TRT Subdetector segment finding - MinNumberDCs = TrackingFlags.minTRTonly - pTmin = TrackingFlags.minPT - sharedFrac = TrackingFlags.maxTRTonlyShared + MinNumberDCs = flags.InDet.Tracking.minTRTonly + pTmin = flags.InDet.Tracking.minPT + sharedFrac = flags.InDet.Tracking.maxTRTonlyShared else: # TRT-only/back-tracking segment finding - MinNumberDCs = TrackingFlags.minSecondaryTRTonTrk - pTmin = TrackingFlags.minSecondaryPt - sharedFrac = TrackingFlags.maxSecondaryTRTShared + MinNumberDCs = flags.InDet.Tracking.minSecondaryTRTonTrk + pTmin = flags.InDet.Tracking.minSecondaryPt + sharedFrac = flags.InDet.Tracking.maxSecondaryTRTShared # # --- offline version of TRT segemnt making # InDetPatternPropagator = TC.InDetPatternPropagatorCfg() acc.addPublicTool(InDetPatternPropagator) - InDetTRTExtensionTool = acc.popToolsAndMerge(TC.InDetTRT_ExtensionToolCfg(flags, TrackingFlags = TrackingFlags)) + InDetTRTExtensionTool = acc.popToolsAndMerge(TC.InDetTRT_ExtensionToolCfg(flags)) acc.addPublicTool(InDetTRTExtensionTool) kwargs.setdefault("TRT_ClustersContainer", 'TRT_DriftCircles') # InDetKeys.TRT_DriftCircles @@ -53,7 +53,7 @@ def TRT_TrackSegmentsMaker_ATLxkCfg(flags, name = 'InDetTRT_SeedsMaker', extensi kwargs.setdefault("PRDtoTrackMap", prefix+'PRDtoTrackMap'+suffix if usePrdAssociationTool else '') kwargs.setdefault("RemoveNoiseDriftCircles", flags.InDet.removeTRTNoise) kwargs.setdefault("MinNumberDriftCircles", MinNumberDCs) - kwargs.setdefault("NumberMomentumChannel", TrackingFlags.TRTSegFinderPtBins) + kwargs.setdefault("NumberMomentumChannel", flags.InDet.Tracking.TRTSegFinderPtBins) kwargs.setdefault("pTmin", pTmin) kwargs.setdefault("sharedFrac", sharedFrac) @@ -61,30 +61,30 @@ def TRT_TrackSegmentsMaker_ATLxkCfg(flags, name = 'InDetTRT_SeedsMaker', extensi acc.setPrivateTools(InDetTRT_TrackSegmentsMaker) return acc -def TRT_TrackSegmentsMakerCondAlg_ATLxkCfg(name = 'InDetTRT_SeedsMakerCondAlg', extension = '', TrackingFlags = None, **kwargs): +def TRT_TrackSegmentsMakerCondAlg_ATLxkCfg(flags, name = 'InDetTRT_SeedsMakerCondAlg', extension = '', **kwargs): acc = ComponentAccumulator() # # --- cut values # if extension == "_TRT": # TRT Subdetector segment finding - pTmin = TrackingFlags.minPT + pTmin = flags.InDet.Tracking.minPT else: # TRT-only/back-tracking segment finding - pTmin = TrackingFlags.minSecondaryPt + pTmin = flags.InDet.Tracking.minSecondaryPt InDetPatternPropagator = TC.InDetPatternPropagatorCfg() acc.addPublicTool(InDetPatternPropagator) kwargs.setdefault("PropagatorTool", InDetPatternPropagator) - kwargs.setdefault("NumberMomentumChannel", TrackingFlags.TRTSegFinderPtBins) + kwargs.setdefault("NumberMomentumChannel", flags.InDet.Tracking.TRTSegFinderPtBins) kwargs.setdefault("pTmin", pTmin) InDetTRT_TrackSegmentsMakerCondAlg = CompFactory.InDet.TRT_TrackSegmentsMakerCondAlg_ATLxk(name = name, **kwargs) acc.addCondAlgo(InDetTRT_TrackSegmentsMakerCondAlg) return acc -def TRT_TrackSegmentsFinderCfg(flags, name = 'InDetTRT_TrackSegmentsFinderPhase', extension = '', TrackingFlags = None, BarrelSegments = None, InputCollections =None, doPhase = False, **kwargs): +def TRT_TrackSegmentsFinderCfg(flags, name = 'InDetTRT_TrackSegmentsFinderPhase', extension = '', BarrelSegments = None, InputCollections =None, doPhase = False, **kwargs): acc = ComponentAccumulator() # --------------------------------------------------------------- @@ -107,7 +107,7 @@ def TRT_TrackSegmentsFinderCfg(flags, name = 'InDetTRT_TrackSegmentsFinderPhase' TRT_ClustersContainer = 'TRT_DriftCircles')) # InDetKeys.TRT_DriftCircles acc.addPublicTool(InDetTRT_TrackSegmentsMaker) - if flags.InDet.doCaloSeededTRTSegments or TrackingFlags.RoISeededBackTracking: + if flags.InDet.doCaloSeededTRTSegments or flags.InDet.Tracking.RoISeededBackTracking: kwargs.setdefault("SegmentsMakerTool", InDetTRT_TrackSegmentsMaker) kwargs.setdefault("SegmentsLocation", BarrelSegments) kwargs.setdefault("useCaloSeeds", True) @@ -123,31 +123,30 @@ def TRT_TrackSegmentsFinderCfg(flags, name = 'InDetTRT_TrackSegmentsFinderPhase' InDetTRT_TrackSegmentsMaker = acc.popToolsAndMerge(TRT_TrackSegmentsMaker_ATLxkCfg( flags, name = 'InDetTRT_SeedsMaker'+extension, extension = extension, - TrackingFlags = TrackingFlags, InputCollections = InputCollections)) acc.addPublicTool(InDetTRT_TrackSegmentsMaker) kwargs.setdefault("SegmentsMakerTool", InDetTRT_TrackSegmentsMaker) - acc.merge(TRT_TrackSegmentsMakerCondAlg_ATLxkCfg(name = 'InDetTRT_SeedsMakerCondAlg'+ extension, - extension = extension, - TrackingFlags=TrackingFlags)) + acc.merge(TRT_TrackSegmentsMakerCondAlg_ATLxkCfg(flags, + name = 'InDetTRT_SeedsMakerCondAlg'+ extension, + extension = extension)) acc.addEventAlgo(CompFactory.InDet.TRT_TrackSegmentsFinder( name = name, **kwargs)) return acc -def SegmentDriftCircleAssValidationCfg(flags, name="InDetSegmentDriftCircleAssValidation", extension='', TrackingFlags=None, BarrelSegments='', **kwargs): +def SegmentDriftCircleAssValidationCfg(flags, name="InDetSegmentDriftCircleAssValidation", extension='', BarrelSegments='', **kwargs): acc = ComponentAccumulator() # # --- cut values # if extension == "_TRT": # TRT Subdetector segment finding - MinNumberDCs = TrackingFlags.minTRTonly - pTmin = TrackingFlags.minPT + MinNumberDCs = flags.InDet.Tracking.minTRTonly + pTmin = flags.InDet.Tracking.minPT else: # TRT-only/back-tracking segment finding - MinNumberDCs = TrackingFlags.minSecondaryTRTonTrk - pTmin = TrackingFlags.minSecondaryPt + MinNumberDCs = flags.InDet.Tracking.minSecondaryTRTonTrk + pTmin = flags.InDet.Tracking.minSecondaryPt #kwargs.setdefault("OrigTracksLocation", BarrelSegments) kwargs.setdefault("TRT_DriftCirclesName", 'TRT_DriftCircles') # InDetKeys.TRT_DriftCircles @@ -173,7 +172,7 @@ def TRTActiveCondAlgCfg(flags, name="TRTActiveCondAlg", **kwargs): acc.addCondAlgo(TRTActiveCondAlg) return acc -def TRTSegmentFindingCfg(flags, extension = "", InputCollections = None, TrackingFlags = None, BarrelSegments = None, doPhase = False): +def TRTSegmentFindingCfg(flags, extension = "", InputCollections = None, BarrelSegments = None, doPhase = False): acc = ComponentAccumulator() # # --- decide if use the association tool @@ -195,7 +194,6 @@ def TRTSegmentFindingCfg(flags, extension = "", InputCollections = None, Trackin acc.merge(TRT_TrackSegmentsFinderCfg( flags, name = 'InDetTRT_TrackSegmentsFinderPhase'+extension, extension =extension, - TrackingFlags = TrackingFlags, BarrelSegments=BarrelSegments, InputCollections = InputCollections, doPhase = doPhase)) @@ -206,7 +204,6 @@ def TRTSegmentFindingCfg(flags, extension = "", InputCollections = None, Trackin if flags.InDet.doTruth and not flags.Beam.Type == "cosmics": acc.merge(SegmentDriftCircleAssValidationCfg(flags, name="InDetSegmentDriftCircleAssValidation"+extension, - TrackingFlags = TrackingFlags, BarrelSegments=BarrelSegments)) return acc @@ -253,7 +250,6 @@ if __name__ == "__main__": # NewTracking collection keys InputCombinedInDetTracks = [] - TrackingFlags = ConfigFlags.InDet.Tracking ############################################################################# top_acc.merge(TRTActiveCondAlgCfg(ConfigFlags)) top_acc.merge(TC.TRT_DetElementsRoadCondAlgCfg()) @@ -265,7 +261,6 @@ if __name__ == "__main__": top_acc.merge(TRTSegmentFindingCfg( ConfigFlags, "", InputCombinedInDetTracks, - TrackingFlags, 'TRTSegments')) # InDetKeys.TRT_Segments ############################################################################# @@ -273,5 +268,9 @@ if __name__ == "__main__": iovsvc.OutputLevel=5 top_acc.getService('StoreGateSvc').Dump = True top_acc.printConfig(withDetails = True, summariseProps = True) - top_acc.run(25) - top_acc.store(open("test_TRTSegmentFinding.pkl", "wb")) \ No newline at end of file + top_acc.store(open("test_TRTSegmentFinding.pkl", "wb")) + + import sys + if "--norun" not in sys.argv: + sc = top_acc.run(25) + sys.exit(not sc.isSuccess()) diff --git a/InnerDetector/InDetConfig/python/TrackingCommonConfig.py b/InnerDetector/InDetConfig/python/TrackingCommonConfig.py index 13107c433f5a52c3a9c6101a3bf23d53298038d5..ac7fe2bab99aedf59903d90484c80ba04258c8b7 100644 --- a/InnerDetector/InDetConfig/python/TrackingCommonConfig.py +++ b/InnerDetector/InDetConfig/python/TrackingCommonConfig.py @@ -322,11 +322,11 @@ def InDetTrackPRD_AssociationCfg(flags, name='InDetTrackPRD_Association', **kwar acc.addEventAlgo(CompFactory.InDet.InDetTrackPRD_Association(name = the_name, **kwargs)) return acc -def InDetTRTDriftCircleCutForPatternRecoCfg(flags, name='InDetTRTDriftCircleCutForPatternReco', TrackingFlags=None, **kwargs): +def InDetTRTDriftCircleCutForPatternRecoCfg(flags, name='InDetTRTDriftCircleCutForPatternReco', **kwargs): the_name = makeName( name, kwargs) kwargs.setdefault("MinOffsetDCs", 5) - kwargs.setdefault("UseNewParameterization", TrackingFlags.useNewParameterizationTRT) + kwargs.setdefault("UseNewParameterization", flags.InDet.Tracking.useNewParameterizationTRT) kwargs.setdefault("UseActiveFractionSvc", flags.Detector.RecoTRT) return CompFactory.InDet.InDetTrtDriftCircleCutTool(the_name, **kwargs) @@ -1110,7 +1110,7 @@ def InDetPatternUpdatorCfg(name='InDetPatternUpdator', **kwargs): the_name = makeName(name, kwargs) return CompFactory.Trk.KalmanUpdator_xk(name = the_name, **kwargs) -def InDetTRT_TrackExtensionTool_xkCfg(flags, name='InDetTRT_ExtensionTool', TrackingFlags=None, **kwargs): +def InDetTRT_TrackExtensionTool_xkCfg(flags, name='InDetTRT_ExtensionTool', **kwargs): acc = ComponentAccumulator() the_name = makeName( name, kwargs) @@ -1125,7 +1125,7 @@ def InDetTRT_TrackExtensionTool_xkCfg(flags, name='InDetTRT_ExtensionTool', Trac kwargs.setdefault("UpdatorTool", InDetPatternUpdator) if 'DriftCircleCutTool' not in kwargs : - InDetTRTDriftCircleCutForPatternReco = InDetTRTDriftCircleCutForPatternRecoCfg(flags, TrackingFlags=TrackingFlags) + InDetTRTDriftCircleCutForPatternReco = InDetTRTDriftCircleCutForPatternRecoCfg(flags) acc.addPublicTool(InDetTRTDriftCircleCutForPatternReco) kwargs.setdefault("DriftCircleCutTool", InDetTRTDriftCircleCutForPatternReco) @@ -1143,14 +1143,14 @@ def InDetTRT_TrackExtensionTool_xkCfg(flags, name='InDetTRT_ExtensionTool', Trac kwargs.setdefault("TRT_ClustersContainer", 'TRT_DriftCircles') # InDetKeys.TRT_DriftCircles() kwargs.setdefault("TrtManagerLocation", 'TRT') # InDetKeys.TRT_Manager() kwargs.setdefault("UseDriftRadius", not flags.InDet.noTRTTiming) - kwargs.setdefault("MinNumberDriftCircles", TrackingFlags.minTRTonTrk) + kwargs.setdefault("MinNumberDriftCircles", flags.InDet.Tracking.minTRTonTrk) kwargs.setdefault("ScaleHitUncertainty", 2) kwargs.setdefault("RoadWidth", 20.) - kwargs.setdefault("UseParameterization", TrackingFlags.useParameterizedTRTCuts) + kwargs.setdefault("UseParameterization", flags.InDet.Tracking.useParameterizedTRTCuts) kwargs.setdefault("maxImpactParameter", 500 if flags.InDet.doBeamHalo or flags.InDet.doBeamGas else 50 ) # single beam running, open cuts - if TrackingFlags.RoISeededBackTracking: - kwargs.setdefault("minTRTSegmentpT", TrackingFlags.minSecondaryPt) + if flags.InDet.Tracking.RoISeededBackTracking: + kwargs.setdefault("minTRTSegmentpT", flags.InDet.Tracking.minSecondaryPt) acc.setPrivateTools(CompFactory.InDet.TRT_TrackExtensionTool_xk(the_name, **kwargs)) return acc @@ -1219,13 +1219,13 @@ def InDetTRT_TrackExtensionTool_DAFCfg(flags, name='TRT_TrackExtensionTool_DAF', acc.setPrivateTools(CompFactory.InDet.TRT_TrackExtensionTool_DAF(the_name,**kwargs)) return acc -def InDetTRT_ExtensionToolCfg(flags, TrackingFlags=None, **kwargs): +def InDetTRT_ExtensionToolCfg(flags, **kwargs): # @TODO set all names to InDetTRT_ExtensionTool ? if (flags.InDet.trtExtensionType == 'xk') or (not flags.InDet.doNewTracking) : if (flags.Beam.Type == "cosmics"): return InDetTRT_ExtensionToolCosmicsCfg(flags, **kwargs) else: - return InDetTRT_TrackExtensionTool_xkCfg(flags, TrackingFlags=TrackingFlags, **kwargs) + return InDetTRT_TrackExtensionTool_xkCfg(flags, **kwargs) elif flags.InDet.trtExtensionType == 'DAF' : return InDetTRT_TrackExtensionTool_DAFCfg(flags, name = 'InDetTRT_ExtensionTool',**kwargs) @@ -1264,7 +1264,6 @@ def InDetROIInfoVecCondAlgCfg(name='InDetROIInfoVecCondAlg', **kwargs) : def InDetAmbiScoringToolBaseCfg(flags, name='InDetAmbiScoringTool', **kwargs) : acc = ComponentAccumulator() - TrackingFlags = kwargs.pop("TrackingFlags") from InDetConfig.InDetRecToolConfig import InDetExtrapolatorCfg tmpAcc = InDetExtrapolatorCfg(flags) @@ -1275,7 +1274,7 @@ def InDetAmbiScoringToolBaseCfg(flags, name='InDetAmbiScoringTool', **kwargs) : InDetTrackSummaryTool = acc.popToolsAndMerge(InDetTrackSummaryToolCfg(flags)) acc.addPublicTool(InDetTrackSummaryTool) - InDetTRTDriftCircleCutForPatternReco = InDetTRTDriftCircleCutForPatternRecoCfg(flags, TrackingFlags=TrackingFlags) + InDetTRTDriftCircleCutForPatternReco = InDetTRTDriftCircleCutForPatternRecoCfg(flags) acc.addPublicTool(InDetTRTDriftCircleCutForPatternReco) from AthenaCommon.DetFlags import DetFlags @@ -1287,23 +1286,23 @@ def InDetAmbiScoringToolBaseCfg(flags, name='InDetAmbiScoringTool', **kwargs) : kwargs.setdefault("DriftCircleCutTool", InDetTRTDriftCircleCutForPatternReco ) kwargs.setdefault("useAmbigFcn", True ) kwargs.setdefault("useTRT_AmbigFcn", False ) - kwargs.setdefault("maxZImp", TrackingFlags.maxZImpact ) - kwargs.setdefault("maxEta", TrackingFlags.maxEta ) - kwargs.setdefault("usePixel", TrackingFlags.usePixel ) - kwargs.setdefault("useSCT", TrackingFlags.useSCT ) + kwargs.setdefault("maxZImp", flags.InDet.Tracking.maxZImpact ) + kwargs.setdefault("maxEta", flags.InDet.Tracking.maxEta ) + kwargs.setdefault("usePixel", flags.InDet.Tracking.usePixel ) + kwargs.setdefault("useSCT", flags.InDet.Tracking.useSCT ) kwargs.setdefault("doEmCaloSeed", have_calo_rois ) acc.setPrivateTools(CompFactory.InDet.InDetAmbiScoringTool(name = name, **kwargs)) return acc def InDetCosmicsScoringToolBaseCfg(flags, name='InDetCosmicsScoringTool', **kwargs) : acc = ComponentAccumulator() - TrackingFlags = kwargs.pop("TrackingFlags") + flags.InDet.Tracking = kwargs.pop("flags.InDet.Tracking") the_name=makeName(name, kwargs) InDetTrackSummaryTool = acc.popToolsAndMerge(InDetTrackSummaryToolCfg(flags)) acc.setPrivateTools(InDetTrackSummaryTool) - kwargs.setdefault("nWeightedClustersMin", TrackingFlags.nWeightedClustersMin ) + kwargs.setdefault("nWeightedClustersMin", flags.InDet.Tracking.nWeightedClustersMin ) kwargs.setdefault("minTRTHits", 0 ) kwargs.setdefault("SummaryTool", InDetTrackSummaryTool ) @@ -1322,11 +1321,11 @@ def InDetTRT_ExtensionToolPhaseCfg(flags, name='InDetTRT_ExtensionToolPhase', ** acc.setPrivateTools(acc.popToolsAndMerge(InDetTRT_ExtensionToolCosmicsCfg(flags, name = name, **kwargs))) return acc -def InDetCosmicExtenScoringToolCfg(flags, TrackingFlags, name='InDetCosmicExtenScoringTool',**kwargs) : +def InDetCosmicExtenScoringToolCfg(flags, name='InDetCosmicExtenScoringTool',**kwargs) : acc = ComponentAccumulator() - kwargs.setdefault("TrackingFlags", TrackingFlags) + kwargs.setdefault("flags.InDet.Tracking", flags.InDet.Tracking) kwargs.setdefault("nWeightedClustersMin", 0) - kwargs.setdefault("minTRTHits", TrackingFlags.minTRTonTrk ) + kwargs.setdefault("minTRTHits", flags.InDet.Tracking.minTRTonTrk ) acc.setPrivateTools(acc.popToolsAndMerge(InDetCosmicsScoringToolBaseCfg(flags, name = 'InDetCosmicExtenScoringTool', **kwargs))) return acc @@ -1359,34 +1358,32 @@ def SiCombinatorialTrackFinder_xkCfg(flags, name='InDetSiComTrackFinder', **kwar acc.setPrivateTools(InDetSiComTrackFinder) return acc -def InDetCosmicScoringTool_TRTCfg(flags, TrackingFlags, name='InDetCosmicExtenScoringTool',**kwargs) : +def InDetCosmicScoringTool_TRTCfg(flags, name='InDetCosmicExtenScoringTool',**kwargs) : acc = ComponentAccumulator() InDetTrackSummaryToolNoHoleSearch = acc.popToolsAndMerge(InDetTrackSummaryToolNoHoleSearchCfg(flags)) acc.addPublicTool(InDetTrackSummaryToolNoHoleSearch) - kwargs.setdefault("minTRTHits", TrackingFlags.minSecondaryTRTonTrk) + kwargs.setdefault("minTRTHits", flags.InDet.Tracking.minSecondaryTRTonTrk) kwargs.setdefault("SummaryTool", InDetTrackSummaryToolNoHoleSearch) acc.setPrivateTools(acc.popToolsAndMerge(InDetCosmicExtenScoringToolCfg(flags, - TrackingFlags=TrackingFlags, name = 'InDetCosmicScoringTool_TRT', **kwargs))) return acc -def InDetTRT_SeededScoringToolCfg(flags, name='InDetTRT_SeededScoringTool', TrackingFlags=None, **kwargs) : +def InDetTRT_SeededScoringToolCfg(flags, name='InDetTRT_SeededScoringTool', **kwargs) : acc = ComponentAccumulator() - kwargs.setdefault("TrackingFlags", TrackingFlags) kwargs.setdefault("useAmbigFcn", not flags.InDet.doNewTracking) # full search => use NewT kwargs.setdefault("useTRT_AmbigFcn", flags.InDet.doNewTracking) # full search => use NewT - kwargs.setdefault("minTRTonTrk", TrackingFlags.minSecondaryTRTonTrk) - kwargs.setdefault("minTRTPrecisionFraction", TrackingFlags.minSecondaryTRTPrecFrac) - kwargs.setdefault("minPt", TrackingFlags.minSecondaryPt) - kwargs.setdefault("maxRPhiImp", TrackingFlags.maxSecondaryImpact) - kwargs.setdefault("minSiClusters", TrackingFlags.minSecondaryClusters) - kwargs.setdefault("maxSiHoles", TrackingFlags.maxSecondaryHoles) - kwargs.setdefault("maxPixelHoles", TrackingFlags.maxSecondaryPixelHoles) - kwargs.setdefault("maxSCTHoles", TrackingFlags.maxSecondarySCTHoles) - kwargs.setdefault("maxDoubleHoles", TrackingFlags.maxSecondaryDoubleHoles) + kwargs.setdefault("minTRTonTrk", flags.InDet.Tracking.minSecondaryTRTonTrk) + kwargs.setdefault("minTRTPrecisionFraction", flags.InDet.Tracking.minSecondaryTRTPrecFrac) + kwargs.setdefault("minPt", flags.InDet.Tracking.minSecondaryPt) + kwargs.setdefault("maxRPhiImp", flags.InDet.Tracking.maxSecondaryImpact) + kwargs.setdefault("minSiClusters", flags.InDet.Tracking.minSecondaryClusters) + kwargs.setdefault("maxSiHoles", flags.InDet.Tracking.maxSecondaryHoles) + kwargs.setdefault("maxPixelHoles", flags.InDet.Tracking.maxSecondaryPixelHoles) + kwargs.setdefault("maxSCTHoles", flags.InDet.Tracking.maxSecondarySCTHoles) + kwargs.setdefault("maxDoubleHoles", flags.InDet.Tracking.maxSecondaryDoubleHoles) acc.setPrivateTools(acc.popToolsAndMerge(InDetAmbiScoringToolBaseCfg(flags, name=name, **kwargs))) return acc @@ -1395,31 +1392,31 @@ def InDetTRT_SeededScoringToolCfg(flags, name='InDetTRT_SeededScoringTool', Trac #TRTExtension ######################################################################################################### -def InDetAmbiScoringToolCfg(flags, name='InDetAmbiScoringTool', TrackingFlags=None, **kwargs) : +def InDetAmbiScoringToolCfg(flags, name='InDetAmbiScoringTool', **kwargs) : acc = ComponentAccumulator() - kwargs.setdefault("TrackingFlags", TrackingFlags ) + kwargs.setdefault("flags.InDet.Tracking", flags.InDet.Tracking ) kwargs.setdefault("useAmbigFcn", True ) kwargs.setdefault("useTRT_AmbigFcn", False ) kwargs.setdefault("minTRTonTrk", 0 ) kwargs.setdefault("minTRTPrecisionFraction", 0 ) - kwargs.setdefault("minPt", TrackingFlags.minPT ) - kwargs.setdefault("maxRPhiImp", TrackingFlags.maxPrimaryImpact ) - kwargs.setdefault("minSiClusters", TrackingFlags.minClusters ) - kwargs.setdefault("minPixel", TrackingFlags.minPixel ) - kwargs.setdefault("maxSiHoles", TrackingFlags.maxHoles ) - kwargs.setdefault("maxPixelHoles", TrackingFlags.maxPixelHoles ) - kwargs.setdefault("maxSCTHoles", TrackingFlags.maxSctHoles ) - kwargs.setdefault("maxDoubleHoles", TrackingFlags.maxDoubleHoles ) - acc.setPrivateTools(acc.popToolsAndMerge(InDetAmbiScoringToolBaseCfg(flags, name = name + TrackingFlags.extension, **kwargs ))) + kwargs.setdefault("minPt", flags.InDet.Tracking.minPT ) + kwargs.setdefault("maxRPhiImp", flags.InDet.Tracking.maxPrimaryImpact ) + kwargs.setdefault("minSiClusters", flags.InDet.Tracking.minClusters ) + kwargs.setdefault("minPixel", flags.InDet.Tracking.minPixel ) + kwargs.setdefault("maxSiHoles", flags.InDet.Tracking.maxHoles ) + kwargs.setdefault("maxPixelHoles", flags.InDet.Tracking.maxPixelHoles ) + kwargs.setdefault("maxSCTHoles", flags.InDet.Tracking.maxSctHoles ) + kwargs.setdefault("maxDoubleHoles", flags.InDet.Tracking.maxDoubleHoles ) + acc.setPrivateTools(acc.popToolsAndMerge(InDetAmbiScoringToolBaseCfg(flags, name = name + flags.InDet.Tracking.extension, **kwargs ))) return acc -def InDetExtenScoringToolCfg(flags, TrackingFlags, name='InDetExtenScoringTool', **kwargs) : +def InDetExtenScoringToolCfg(flags, name='InDetExtenScoringTool', **kwargs) : acc = ComponentAccumulator() if flags.InDet.trackFitterType in ['KalmanFitter', 'KalmanDNAFitter', 'ReferenceKalmanFitter']: kwargs.setdefault("minTRTPrecisionFraction", 0.2) - kwargs.setdefault("minTRTonTrk", TrackingFlags.minTRTonTrk) - kwargs.setdefault("minTRTPrecisionFraction", TrackingFlags.minTRTPrecFrac) - acc.setPrivateTools(acc.popToolsAndMerge(InDetAmbiScoringToolCfg(flags, name = name, TrackingFlags = TrackingFlags, **kwargs))) + kwargs.setdefault("minTRTonTrk", flags.InDet.Tracking.minTRTonTrk) + kwargs.setdefault("minTRTPrecisionFraction", flags.InDet.Tracking.minTRTPrecFrac) + acc.setPrivateTools(acc.popToolsAndMerge(InDetAmbiScoringToolCfg(flags, name = name, **kwargs))) return acc ############################################################################################# @@ -1458,7 +1455,6 @@ def PRDtoTrackMapToolCfg(name='PRDtoTrackMapTool',**kwargs) : def InDetNNScoringToolBaseCfg(flags, name='InDetNNScoringTool', **kwargs) : acc = ComponentAccumulator() - TrackingFlags = kwargs.pop("TrackingFlags") the_name=makeName(name,kwargs) from AthenaCommon.DetFlags import DetFlags @@ -1475,7 +1471,7 @@ def InDetNNScoringToolBaseCfg(flags, name='InDetNNScoringTool', **kwargs) : InDetTrackSummaryTool = acc.popToolsAndMerge(InDetTrackSummaryToolCfg(flags)) acc.addPublicTool(InDetTrackSummaryTool) - InDetTRTDriftCircleCutForPatternReco = InDetTRTDriftCircleCutForPatternRecoCfg(flags, TrackingFlags=TrackingFlags) + InDetTRTDriftCircleCutForPatternReco = InDetTRTDriftCircleCutForPatternRecoCfg(flags) acc.addPublicTool(InDetTRTDriftCircleCutForPatternReco) kwargs.setdefault("nnCutConfig", "dev/TrackingCP/LRTAmbiNetwork/20200727_225401/nn-config.json" ) @@ -1485,33 +1481,32 @@ def InDetNNScoringToolBaseCfg(flags, name='InDetNNScoringTool', **kwargs) : kwargs.setdefault("DriftCircleCutTool", InDetTRTDriftCircleCutForPatternReco ) kwargs.setdefault("useAmbigFcn", True ) kwargs.setdefault("useTRT_AmbigFcn", False ) - kwargs.setdefault("maxZImp", TrackingFlags.maxZImpact ) - kwargs.setdefault("maxEta", TrackingFlags.maxEta ) - kwargs.setdefault("usePixel", TrackingFlags.usePixel ) - kwargs.setdefault("useSCT", TrackingFlags.useSCT ) + kwargs.setdefault("maxZImp", flags.InDet.Tracking.maxZImpact ) + kwargs.setdefault("maxEta", flags.InDet.Tracking.maxEta ) + kwargs.setdefault("usePixel", flags.InDet.Tracking.usePixel ) + kwargs.setdefault("useSCT", flags.InDet.Tracking.useSCT ) kwargs.setdefault("doEmCaloSeed", have_calo_rois ) acc.setPrivateTools(CompFactory.InDet.InDetNNScoringTool(name = the_name, **kwargs )) return acc -def InDetNNScoringToolCfg(flags, TrackingFlags, name='InDetNNScoringTool', **kwargs) : - kwargs.setdefault("TrackingFlags", TrackingFlags ) +def InDetNNScoringToolCfg(flags, name='InDetNNScoringTool', **kwargs) : + kwargs.setdefault("flags.InDet.Tracking", flags.InDet.Tracking ) kwargs.setdefault("useAmbigFcn", True ) kwargs.setdefault("useTRT_AmbigFcn", False ) kwargs.setdefault("minTRTonTrk", 0 ) kwargs.setdefault("minTRTPrecisionFraction", 0 ) - kwargs.setdefault("minPt", TrackingFlags.minPT ) - kwargs.setdefault("maxRPhiImp", TrackingFlags.maxPrimaryImpact ) - kwargs.setdefault("minSiClusters", TrackingFlags.minClusters ) - kwargs.setdefault("minPixel", TrackingFlags.minPixel ) - kwargs.setdefault("maxSiHoles", TrackingFlags.maxHoles ) - kwargs.setdefault("maxPixelHoles", TrackingFlags.maxPixelHoles ) - kwargs.setdefault("maxSCTHoles", TrackingFlags.maxSctHoles ) - kwargs.setdefault("maxDoubleHoles", TrackingFlags.maxDoubleHoles) - - return InDetNNScoringToolBaseCfg(flags, name=name+TrackingFlags.extension, **kwargs ) - -def InDetCosmicsScoringToolCfg(flags, TrackingFlags, name='InDetCosmicsScoringTool', **kwargs) : + kwargs.setdefault("minPt", flags.InDet.Tracking.minPT ) + kwargs.setdefault("maxRPhiImp", flags.InDet.Tracking.maxPrimaryImpact ) + kwargs.setdefault("minSiClusters", flags.InDet.Tracking.minClusters ) + kwargs.setdefault("minPixel", flags.InDet.Tracking.minPixel ) + kwargs.setdefault("maxSiHoles", flags.InDet.Tracking.maxHoles ) + kwargs.setdefault("maxPixelHoles", flags.InDet.Tracking.maxPixelHoles ) + kwargs.setdefault("maxSCTHoles", flags.InDet.Tracking.maxSctHoles ) + kwargs.setdefault("maxDoubleHoles", flags.InDet.Tracking.maxDoubleHoles) + + return InDetNNScoringToolBaseCfg(flags, name=name+flags.InDet.Tracking.extension, **kwargs ) + +def InDetCosmicsScoringToolCfg(flags, name='InDetCosmicsScoringTool', **kwargs) : return InDetCosmicsScoringToolBaseCfg(flags, - name=name+TrackingFlags.extension, - TrackingFlags=TrackingFlags) + name=name+flags.InDet.Tracking.extension) diff --git a/InnerDetector/InDetExample/InDetRecExample/CMakeLists.txt b/InnerDetector/InDetExample/InDetRecExample/CMakeLists.txt index 194eb7184dcc2332c318a3e55520b40b8b8779b1..0573d1101cb425963f2204763604dac24127f1eb 100644 --- a/InnerDetector/InDetExample/InDetRecExample/CMakeLists.txt +++ b/InnerDetector/InDetExample/InDetRecExample/CMakeLists.txt @@ -4,6 +4,6 @@ atlas_subdir( InDetRecExample ) # Install files from the package: -atlas_install_python_modules( python/*.py ) +atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} ) atlas_install_joboptions( share/*.py ) atlas_install_runtime( share/jobOptions.py ) diff --git a/InnerDetector/InDetExample/InDetRecExample/python/ConfiguredNewTrackingCuts.py b/InnerDetector/InDetExample/InDetRecExample/python/ConfiguredNewTrackingCuts.py index c6cbe58ebdfcba888e403f6506dc59b17e683303..5c4d468c1ef1c9eac589fb6eb3f7d0f931f7b4ac 100755 --- a/InnerDetector/InDetExample/InDetRecExample/python/ConfiguredNewTrackingCuts.py +++ b/InnerDetector/InDetExample/InDetRecExample/python/ConfiguredNewTrackingCuts.py @@ -1,8 +1,5 @@ # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -from __future__ import print_function - - ######################################################################### # ConfiguredNewtrackingCuts class ######################################################################### @@ -22,10 +19,7 @@ class ConfiguredNewTrackingCuts : self.__set_indetflags() #pointer to InDetFlags, don't use them directly #to allow sharing this code with the trigger - from AthenaCommon.GlobalFlags import globalflags from AthenaCommon.DetFlags import DetFlags - from AthenaCommon.BeamFlags import jobproperties - from RecExConfig.RecFlags import rec # --- put defaults to run Pixel/SCT/TRT self.__usePixel = DetFlags.haveRIO.pixel_on() diff --git a/InnerDetector/InDetExample/InDetRecExample/python/ConfiguredSecondaryVertexCuts.py b/InnerDetector/InDetExample/InDetRecExample/python/ConfiguredSecondaryVertexCuts.py index 27515663fc48ef96618f34ed99750fc6b4d667e8..9e19d16c2ec20aa8bd31baf0bc35d59199664c03 100644 --- a/InnerDetector/InDetExample/InDetRecExample/python/ConfiguredSecondaryVertexCuts.py +++ b/InnerDetector/InDetExample/InDetRecExample/python/ConfiguredSecondaryVertexCuts.py @@ -1,7 +1,4 @@ -# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration - -from __future__ import print_function - +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration ######################################################################## # ConfiguredSecondaryVertexCuts class @@ -19,13 +16,8 @@ class ConfiguredSecondaryVertexCuts : self.__mode = mode self.__extension = "" self.__set_indetflags() # pointer to InDetFlags, don't use them directly - # to allow sharing this code with the trigger + # to allow sharing this code with the trigger - from AthenaCommon.GlobalFlags import globalflags - from AthenaCommon.DetFlags import DetFlags - from AthenaCommon.BeamFlags import jobproperties - from RecExConfig.RecFlags import rec - # ------------------------------------------------ # --- secondary vertexing setup of cuts # ------------------------------------------------ diff --git a/InnerDetector/InDetExample/InDetRecExample/python/ConfiguredVertexingCuts.py b/InnerDetector/InDetExample/InDetRecExample/python/ConfiguredVertexingCuts.py index 889bc5fb701b0895aa76f18bb203e6221b290688..1507f2b33d1c80844e1a520e6ad751ec6e6d01f7 100644 --- a/InnerDetector/InDetExample/InDetRecExample/python/ConfiguredVertexingCuts.py +++ b/InnerDetector/InDetExample/InDetRecExample/python/ConfiguredVertexingCuts.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration from __future__ import print_function @@ -32,10 +32,7 @@ class ConfiguredVertexingCuts : #pointer to InDetFlags, don't use them directly #to allow sharing this code with the trigger self.__set_indetflags() - - from AthenaCommon.GlobalFlags import globalflags - from AthenaCommon.DetFlags import DetFlags - from AthenaCommon.BeamFlags import jobproperties + from RecExConfig.RecFlags import rec #----------------------------------------------------------------------- diff --git a/InnerDetector/InDetExample/InDetRecExample/python/InDetJobProperties.py b/InnerDetector/InDetExample/InDetRecExample/python/InDetJobProperties.py index e945c5532be84cc23f7534525f515bec5d4b4d14..bf315e2c2a919dbaacbf93edc77c14200d5de215 100644 --- a/InnerDetector/InDetExample/InDetRecExample/python/InDetJobProperties.py +++ b/InnerDetector/InDetExample/InDetRecExample/python/InDetJobProperties.py @@ -1,8 +1,5 @@ # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -from __future__ import print_function - - # ## @file InDetRecExample/python/InDetJobProperties.py ## @brief Python module to hold common flags to configure JobOptions @@ -26,7 +23,6 @@ __all__ = [ "InDetJobProperties" ] from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer from AthenaCommon.JobProperties import jobproperties -import AthenaCommon.SystemOfUnits as Units ##----------------------------------------------------------------------------- ## 0th step: define infrastructure JPs DO NOT MODIFY THEM!!! @@ -45,7 +41,7 @@ class Enabled(JobProperty): if not(obj._locked): obj.__dict__['statusOn']=True else: - obj._log.info('The JobProperty %s is blocked' % obj.__name__) + obj._log.info('The JobProperty %s is blocked', obj.__name__) def set_Off_NoDoAction(self, obj): """ Sets statusOn equals to False w/o _undo_action. @@ -53,7 +49,7 @@ class Enabled(JobProperty): if not(obj._locked): obj.__dict__['statusOn']=False else: - obj._log.info('The JobProperty %s is blocked' % obj.__name__) + obj._log.info('The JobProperty %s is blocked', obj.__name__) def _do_action(self): for obj in self._the_same_context_objects(): @@ -1685,9 +1681,7 @@ class InDetJobProperties(JobPropertyContainer): # THIS METHOD MUST BE THE FIRST TO BE CALLED. DO NOT MOVE IT OR ADD THINGS IN FRONT self.setupDefaults() - from AthenaCommon.GlobalFlags import globalflags from AthenaCommon.DetFlags import DetFlags - from RecExConfig.RecFlags import rec # ------------------------------------------------------------------- # Cosmics TRT extension, NewTracking needs full tracking geometry @@ -1741,7 +1735,7 @@ class InDetJobProperties(JobPropertyContainer): # # no new tracking if pixel or sct off (new tracking = inside out only) self.doNewTracking = self.doNewTracking() and (DetFlags.haveRIO.pixel_on() or DetFlags.haveRIO.SCT_on()) - # always use truth tracking for SplitReco and never use it if pixel or sct off + # always use truth tracking for SplitReco and never use it if pixel or sct off self.doPseudoTracking = (self.doPseudoTracking() or self.doSplitReco()) and (DetFlags.haveRIO.pixel_on() or DetFlags.haveRIO.SCT_on()) # # no low pt tracking if no new tracking before or if pixels are off (since low-pt tracking is pixel seeded)! Explicitly veto for cosmics to aid T0 @@ -1884,22 +1878,22 @@ class InDetJobProperties(JobPropertyContainer): # ---- Refit of tracks # -------------------------------------------------------------------- # - if (self.trackFitterType() is not 'KalmanFitter' and self.trackFitterType() is not 'KalmanDNAFitter') : + if (self.trackFitterType() != 'KalmanFitter' and self.trackFitterType() != 'KalmanDNAFitter') : self.refitROT = True if not self.refitROT() and not self.redoTRT_LR() : print('ConfiguredInDetFlags.py WARNING refitROT and redoTRT_LR are both False, NOT RECOMMENDED!') # # refKF needs a new method in IUpdator, where there is currently only one implementation - if (self.trackFitterType() is 'ReferenceKalmanFitter'): + if (self.trackFitterType() == 'ReferenceKalmanFitter'): self.kalmanUpdator = 'amg' # # check if a valid fitter has been used - if not ( (self.trackFitterType() is 'KalmanFitter') - or (self.trackFitterType() is 'KalmanDNAFitter') - or (self.trackFitterType() is 'ReferenceKalmanFitter') - or (self.trackFitterType() is 'DistributedKalmanFilter') - or (self.trackFitterType() is 'GlobalChi2Fitter' ) - or (self.trackFitterType() is 'GaussianSumFilter') ): + if not ( (self.trackFitterType() == 'KalmanFitter') + or (self.trackFitterType() == 'KalmanDNAFitter') + or (self.trackFitterType() == 'ReferenceKalmanFitter') + or (self.trackFitterType() == 'DistributedKalmanFilter') + or (self.trackFitterType() == 'GlobalChi2Fitter' ) + or (self.trackFitterType() == 'GaussianSumFilter') ): print('InDetJobProperties.py WARNING unregistered or invalid track fitter setup.') print(' --> re-setting to TrkKalmanFitter.') self.trackFitterType = 'KalmanFitter' @@ -2011,7 +2005,7 @@ class InDetJobProperties(JobPropertyContainer): def doNtupleCreation(self): return (self.doSctClusterNtuple() or - self.doTrkNtuple() or self.doPixelTrkNtuple() or self.doSctTrkNtuple() or + self.doTrkNtuple() or self.doPixelTrkNtuple() or self.doSctTrkNtuple() or self.doTrtTrkNtuple() or self.doVtxNtuple() or self.doConvVtxNtuple() or self.doV0VtxNtuple()) @@ -2567,14 +2561,14 @@ class InDetJobProperties(JobPropertyContainer): print('* load TrackingGeometry') if self.loadExtrapolator() : print('* load Extrapolator:') - if self.propagatorType() is 'RungeKutta' : + if self.propagatorType() == 'RungeKutta' : print('* - load Runge Kutta propagator') - elif self.propagatorType() is 'STEP' : + elif self.propagatorType() == 'STEP' : print('* - load STEP propagator') if self.materialInteractions() : print('* - use material corrections of type %s in extrapolation and fit'% self.materialInteractionsType()) if self.loadUpdator() : - if self.kalmanUpdator() is "fast" : + if self.kalmanUpdator() == "fast" : print('* load MeasurementUpdator_xk') else: print('* load MeasurementUpdator') diff --git a/InnerDetector/InDetExample/InDetRecExample/python/InDetKeys.py b/InnerDetector/InDetExample/InDetRecExample/python/InDetKeys.py index 8a54ef539b9d3bd36f947a98b64f037372598fb4..9fe123b801b07dc238c708668fbf9b1d49d1fbcc 100644 --- a/InnerDetector/InDetExample/InDetRecExample/python/InDetKeys.py +++ b/InnerDetector/InDetExample/InDetRecExample/python/InDetKeys.py @@ -1,7 +1,4 @@ -# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration - -from __future__ import print_function - +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration ## ## @file InDetRecExample/python/InDetKeys.py @@ -17,8 +14,6 @@ __author__ = "A. Wildauer" __version__= "$Revision: 1.19 $" __doc__ = "InDetContainerKeys" -__all__ = [ "InDetKeyContainers" ] - # kindly stolen from AthenaCommonFlags from S. Binet and M. Gallas ##----------------------------------------------------------------------------- @@ -108,19 +103,18 @@ class TRT_RDOs(JobProperty): allowedTypes = ['str'] StoredValue = 'TRT_RDOs' - class PixelPURDOs(JobProperty): """StoreGate key for PU pixel raw data objects""" statusOn = True allowedTypes = ['str'] StoredValue = 'Pixel_PU_RDOs' - + class SCT_PU_RDOs(JobProperty): """StoreGate key for PU SCT raw data objects""" statusOn = True allowedTypes = ['str'] StoredValue = 'SCT_PU_RDOs' - + class TRT_PU_RDOs(JobProperty): """StoreGate key for PU TRT raw data objects""" statusOn = True @@ -603,7 +597,7 @@ class TRTTracks_NewT(JobProperty): statusOn = True allowedTypes = ['str'] StoredValue = 'TRTStandaloneTracks' - + class PseudoTracks(JobProperty): """StoreGate key for the final track collection (PseudoTracking)""" statusOn = True diff --git a/InnerDetector/InDetExample/InDetRecExample/python/InDetRecExampleConfig.py b/InnerDetector/InDetExample/InDetRecExample/python/InDetRecExampleConfig.py index d4968cf69d368df1653b95da87cab84a80a5c3b6..1cbd4a3f59351d7ef7f1005da0f67cc1b812cf23 100644 --- a/InnerDetector/InDetExample/InDetRecExample/python/InDetRecExampleConfig.py +++ b/InnerDetector/InDetExample/InDetRecExample/python/InDetRecExampleConfig.py @@ -1,6 +1,6 @@ # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -from AthenaCommon import CfgMgr,CfgGetter +from AthenaCommon import CfgGetter import AthenaCommon.SystemOfUnits as Units from InDetRecExample.TrackingCommon import setDefaults,copyArgs @@ -195,7 +195,7 @@ def KalmanDNAFitter(name='KalmanDNAFitter',**kwargs) : def DistributedKalmanFilter(name="DistributedKalmanFilter", **kwargs) : pix_cluster_on_track_args = stripArgs(kwargs,['SplitClusterMapExtension','ClusterSplitProbabilityName','nameSuffix']) - + from InDetRecExample import TrackingCommon from InDetRecExample.TrackingCommon import setDefaults if 'ExtrapolatorTool' not in kwargs : kwargs = setDefaults(kwargs, ExtrapolatorTool = TrackingCommon.getInDetExtrapolator()) @@ -313,7 +313,6 @@ def InDetGlobalChi2FitterLowPt(name='InDetGlobalChi2FitterLowPt', **kwargs) : RotCreatorTool = TrackingCommon.getInDetRotCreator(**pix_cluster_on_track_args)) from InDetRecExample.InDetJobProperties import InDetFlags - use_broad_cluster_any = InDetFlags.useBroadClusterErrors() and (not InDetFlags.doDBMstandalone()) if 'BroadRotCreatorTool' not in kwargs and not InDetFlags.doRefit(): kwargs=setDefaults(kwargs, BroadRotCreatorTool = TrackingCommon.getInDetBroadRotCreator(**pix_cluster_on_track_args)) diff --git a/InnerDetector/InDetExample/InDetRecExample/python/TrackingCommon.py b/InnerDetector/InDetExample/InDetRecExample/python/TrackingCommon.py index 90142d7220a34b3ba9d815ab657ad53945d8b4c6..761913acf116c19969b1f994c19b59084829ef5d 100644 --- a/InnerDetector/InDetExample/InDetRecExample/python/TrackingCommon.py +++ b/InnerDetector/InDetExample/InDetRecExample/python/TrackingCommon.py @@ -1,7 +1,6 @@ # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -from __future__ import print_function - +from AthenaCommon.GlobalFlags import globalflags from AthenaCommon.Logging import logging log = logging.getLogger('TrackingCommon') @@ -95,7 +94,7 @@ def makePublicTool(tool_creator) : if the_name != tool.name() : raise Exception('Tool has not the exepected name %s but %s' % (the_name, tool.name())) if private is False : - log.debug ('Add to ToolSvc %s' % (tool.name())) + log.debug ('Add to ToolSvc %s', tool.name()) ToolSvc += tool return tool else : @@ -211,7 +210,6 @@ def getRIO_OnTrackErrorScalingCondAlg( **kwargs) : def getEventInfoKey() : - from AthenaCommon.GlobalFlags import globalflags from AthenaCommon.DetFlags import DetFlags isData = (globalflags.DataSource == 'data') @@ -792,7 +790,6 @@ def getInDetPrdAssociationTool_setup(name='InDetPrdAssociationTool_setup',**kwar return getInDetPrdAssociationTool(name, **setDefaults(kwargs, SetupCorrect = True) ) def getInDetPixelConditionsSummaryTool() : - from AthenaCommon.GlobalFlags import globalflags from InDetRecExample.InDetJobProperties import InDetFlags from PixelConditionsTools.PixelConditionsToolsConf import PixelConditionsSummaryTool pixelConditionsSummaryToolSetup = PixelConditionsSummaryTool("PixelConditionsSummaryTool", @@ -880,7 +877,6 @@ def getInDetSCT_ConditionsSummaryTool() : def getInDetBoundaryCheckTool(name="InDetBoundarySearchTool", **kwargs): the_name = makeName(name, kwargs) from AthenaCommon.DetFlags import DetFlags - from InDetRecExample.InDetJobProperties import InDetFlags if 'SctSummaryTool' not in kwargs : kwargs = setDefaults( kwargs, SctSummaryTool = getInDetSCT_ConditionsSummaryTool() if DetFlags.haveRIO.SCT_on() else None) @@ -900,7 +896,6 @@ def getInDetBoundaryCheckTool(name="InDetBoundarySearchTool", **kwargs): @makePublicTool def getInDetHoleSearchTool(name = 'InDetHoleSearchTool', **kwargs) : the_name = makeName( name, kwargs) - from AthenaCommon.DetFlags import DetFlags from InDetRecExample.InDetJobProperties import InDetFlags if 'Extrapolator' not in kwargs : @@ -945,7 +940,6 @@ def getInDetRecTestBLayerTool(name='InDetRecTestBLayerTool', **kwargs) : @makePublicTool def getInDetTRTStrawStatusSummaryTool(name = "InDetTRT_StrawStatusSummaryTool", **kwargs) : the_name = makeName( name, kwargs) - from AthenaCommon.GlobalFlags import globalflags kwargs = setDefaults( kwargs, isGEANT4 = (globalflags.DataSource == 'geant4')) from TRT_ConditionsServices.TRT_ConditionsServicesConf import TRT_StrawStatusSummaryTool return TRT_StrawStatusSummaryTool(name = the_name, **kwargs ) @@ -977,7 +971,6 @@ def getInDetTRT_dEdxTool(name = "InDetTRT_dEdxTool", **kwargs) : or InDetFlags.useExistingTracksAsInput(): # TRT_RDOs (used by the TRT_LocalOccupancy tool) are not present in ESD return None - from AthenaCommon.GlobalFlags import globalflags kwargs = setDefaults( kwargs, TRT_dEdx_isData = (globalflags.DataSource == 'data')) if 'TRT_LocalOccupancyTool' not in kwargs : diff --git a/InnerDetector/InDetMonitoring/PixelMonitoring/src/PixelAthErrorMonAlg.cxx b/InnerDetector/InDetMonitoring/PixelMonitoring/src/PixelAthErrorMonAlg.cxx index c5fac8bb94bed53dae9f4cb1944b6e198169cc37..e496478075cd52a30830b7ca7345cd807617b921 100644 --- a/InnerDetector/InDetMonitoring/PixelMonitoring/src/PixelAthErrorMonAlg.cxx +++ b/InnerDetector/InDetMonitoring/PixelMonitoring/src/PixelAthErrorMonAlg.cxx @@ -137,8 +137,8 @@ StatusCode PixelAthErrorMonAlg::fillHistograms( const EventContext& ctx ) const is_fei4 = false; } // flagging/counting categorized errors per module. - bool has_err_cat[ErrorCategory::COUNT][nFEIBL2D] = {false}; - int nerrors_cat_rodmod[ErrorCategoryRODMOD::COUNT][nFEIBL2D] = {0}; + bool has_err_cat[ErrorCategory::COUNT][nFEIBL2D] = {{false}}; + int nerrors_cat_rodmod[ErrorCategoryRODMOD::COUNT][nFEIBL2D] = {{0}}; // count number of words w/ MCC/FE flags per module unsigned int num_femcc_errwords = 0; diff --git a/InnerDetector/InDetRecTools/InDetMultipleVertexSeedFinder/InDetMultipleVertexSeedFinder/DivisiveMultiSeedFinder.h b/InnerDetector/InDetRecTools/InDetMultipleVertexSeedFinder/InDetMultipleVertexSeedFinder/DivisiveMultiSeedFinder.h index 7d11fb5e92aed41ac1db46604966b4d3b8c6a8bb..b262ba99b6bd46beba3da14ed4f68eda1e25515d 100644 --- a/InnerDetector/InDetRecTools/InDetMultipleVertexSeedFinder/InDetMultipleVertexSeedFinder/DivisiveMultiSeedFinder.h +++ b/InnerDetector/InDetRecTools/InDetMultipleVertexSeedFinder/InDetMultipleVertexSeedFinder/DivisiveMultiSeedFinder.h @@ -41,11 +41,11 @@ namespace InDet /** * Clustering method itself */ - std::vector< std::vector<const Trk::Track *> > seeds(const std::vector<const Trk::Track*>& tracks )const; + virtual std::vector< std::vector<const Trk::Track *> > seeds(const std::vector<const Trk::Track*>& tracks )const override; - std::vector< std::vector<const Trk::TrackParticleBase *> > seeds(const std::vector<const Trk::TrackParticleBase*>& tracks )const; + virtual std::vector< std::vector<const Trk::TrackParticleBase *> > seeds(const std::vector<const Trk::TrackParticleBase*>& tracks )const override; - std::vector< std::vector<const Trk::TrackParameters *> > seeds(const std::vector<const xAOD::TrackParticle*>& tracks )const; + virtual std::vector< std::vector<const Trk::TrackParameters *> > seeds(const std::vector<const xAOD::TrackParticle*>& tracks )const override; private: diff --git a/InnerDetector/InDetRecTools/InDetMultipleVertexSeedFinder/InDetMultipleVertexSeedFinder/HistogrammingMultiSeedFinder.h b/InnerDetector/InDetRecTools/InDetMultipleVertexSeedFinder/InDetMultipleVertexSeedFinder/HistogrammingMultiSeedFinder.h index 2ccf1445c2f98f168126c0cc403a59541d5e18e4..702d3d481c1c89482490bc9ea4b9f96aa7398577 100644 --- a/InnerDetector/InDetRecTools/InDetMultipleVertexSeedFinder/InDetMultipleVertexSeedFinder/HistogrammingMultiSeedFinder.h +++ b/InnerDetector/InDetRecTools/InDetMultipleVertexSeedFinder/InDetMultipleVertexSeedFinder/HistogrammingMultiSeedFinder.h @@ -54,11 +54,11 @@ namespace InDet /** * Clustering method itself */ - std::vector< std::vector<const Trk::Track *> > seeds(const std::vector<const Trk::Track*>& tracks )const; + virtual std::vector< std::vector<const Trk::Track *> > seeds(const std::vector<const Trk::Track*>& tracks )const override; - std::vector< std::vector<const Trk::TrackParticleBase *> > seeds(const std::vector<const Trk::TrackParticleBase*>& tracks )const; + virtual std::vector< std::vector<const Trk::TrackParticleBase *> > seeds(const std::vector<const Trk::TrackParticleBase*>& tracks )const override; - std::vector< std::vector<const Trk::TrackParameters *> > seeds(const std::vector<const xAOD::TrackParticle*>& tracks )const; + virtual std::vector< std::vector<const Trk::TrackParameters *> > seeds(const std::vector<const xAOD::TrackParticle*>& tracks )const override; // std::pair<std::vector<const Trk::TrackParameters *>, // std::vector<const xAOD::TrackParticle *> > m_clusterAndOutliers(std::vector<const xAOD::TrackParticle *> tracks_to_clean, xAOD::Vertex * beamposition) const; diff --git a/InnerDetector/InDetRecTools/InDetMultipleVertexSeedFinder/InDetMultipleVertexSeedFinder/SlidingWindowMultiSeedFinder.h b/InnerDetector/InDetRecTools/InDetMultipleVertexSeedFinder/InDetMultipleVertexSeedFinder/SlidingWindowMultiSeedFinder.h index fbbf71e2a2343608e715312e69359f0788742d00..4f6bef4829916b1bbbac5ea32c02210e490c68ff 100644 --- a/InnerDetector/InDetRecTools/InDetMultipleVertexSeedFinder/InDetMultipleVertexSeedFinder/SlidingWindowMultiSeedFinder.h +++ b/InnerDetector/InDetRecTools/InDetMultipleVertexSeedFinder/InDetMultipleVertexSeedFinder/SlidingWindowMultiSeedFinder.h @@ -41,11 +41,11 @@ namespace InDet /** * Clustering method itself */ - std::vector< std::vector<const Trk::Track *> > seeds(const std::vector<const Trk::Track*>& tracks )const; + virtual std::vector< std::vector<const Trk::Track *> > seeds(const std::vector<const Trk::Track*>& tracks )const override; - std::vector< std::vector<const Trk::TrackParticleBase *> > seeds(const std::vector<const Trk::TrackParticleBase*>& tracks )const; + virtual std::vector< std::vector<const Trk::TrackParticleBase *> > seeds(const std::vector<const Trk::TrackParticleBase*>& tracks )const override; - std::vector< std::vector<const Trk::TrackParameters *> > seeds(const std::vector<const xAOD::TrackParticle*>& tracks )const; + virtual std::vector< std::vector<const Trk::TrackParameters *> > seeds(const std::vector<const xAOD::TrackParticle*>& tracks )const override; //std::vector<int> m_z0sort(std::vector<const xAOD::TrackParticle*>& perigeeList,xAOD::Vertex * beamVtx) const; diff --git a/InnerDetector/InDetRecTools/InDetRecToolInterfaces/InDetRecToolInterfaces/IInDetTestBLayerTool.h b/InnerDetector/InDetRecTools/InDetRecToolInterfaces/InDetRecToolInterfaces/IInDetTestBLayerTool.h index a294fbeb9ba4d5dc62b887465c78055f68217ad6..4a2d1835a5b5c37bd41f1ba2d1afe3dcdd01edb9 100755 --- a/InnerDetector/InDetRecTools/InDetRecToolInterfaces/InDetRecToolInterfaces/IInDetTestBLayerTool.h +++ b/InnerDetector/InDetRecTools/InDetRecToolInterfaces/InDetRecToolInterfaces/IInDetTestBLayerTool.h @@ -1,19 +1,25 @@ /* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ #ifndef IInDetTestBLayerTool_H #define IInDetTestBLayerTool_H -#include "GaudiKernel/EventContext.h" #include "GaudiKernel/IAlgTool.h" -#include "GaudiKernel/ThreadLocalContext.h" +#include "GaudiKernel/ThreadLocalContext.h" //for Gaudi::Hive::currentContext() #include "TrkParameters/TrackParameters.h" -#include "TrkEventPrimitives/ResidualPull.h" #include <vector> -#include <string> -namespace Trk { class Track; class TrackParticleBase; class IResidualPullCalculator;} -namespace InDet { class TrackStateOnBLayerInfo; } +namespace Trk { + class Track; + class IResidualPullCalculator; + class ResidualPull; +} + +namespace InDet { +class TrackStateOnBLayerInfo; +} + +class EventContext; namespace InDet { @@ -32,8 +38,6 @@ namespace InDet { virtual bool expectHitInBLayer(const EventContext& ctx, const Trk::Track* track, bool recompute = false) const = 0; - virtual bool expectHitInBLayer(const Trk::TrackParticleBase*, - bool recompute = false) const = 0; bool expectHitInBLayer(const Trk::Track* track, bool recompute = false) const { return expectHitInBLayer(Gaudi::Hive::currentContext(), track, recompute); @@ -42,10 +46,7 @@ namespace InDet { const Trk::TrackParameters* trackpar) const = 0; virtual const Trk::ResidualPull* bLayerHitResidual(const Trk::Track* ) const=0; - virtual const Trk::ResidualPull* bLayerHitResidual(const Trk::TrackParticleBase*) const=0; - virtual bool getTrackStateOnBlayerInfo(const Trk::TrackParticleBase*, - std::vector<TrackStateOnBLayerInfo>& infoList) const=0; virtual bool getTrackStateOnBlayerInfo(const Trk::Track*, std::vector<TrackStateOnBLayerInfo>& infoList) const=0; virtual bool getTrackStateOnBlayerInfo(const Trk::TrackParameters* trackpar, @@ -55,9 +56,6 @@ namespace InDet { const EventContext& ctx, const Trk::Track* track, bool recompute = false) const = 0; - virtual bool expectHitInInnermostPixelLayer( - const Trk::TrackParticleBase*, - bool recompute = false) const = 0; bool expectHitInInnermostPixelLayer(const Trk::Track* track, bool recompute = false) const { @@ -68,10 +66,7 @@ namespace InDet { const Trk::TrackParameters* trackpar) const = 0; virtual const Trk::ResidualPull* innermostPixelLayerHitResidual(const Trk::Track* ) const=0; - virtual const Trk::ResidualPull* innermostPixelLayerHitResidual(const Trk::TrackParticleBase*) const=0; - virtual bool getTrackStateOnInnermostPixelLayerInfo(const Trk::TrackParticleBase*, - std::vector<TrackStateOnBLayerInfo>& infoList) const=0; virtual bool getTrackStateOnInnermostPixelLayerInfo(const Trk::Track*, std::vector<TrackStateOnBLayerInfo>& infoList) const=0; virtual bool getTrackStateOnInnermostPixelLayerInfo(const Trk::TrackParameters* trackpar, @@ -81,9 +76,6 @@ namespace InDet { const EventContext& ctx, const Trk::Track* track, bool recompute = false) const = 0; - virtual bool expectHitInNextToInnermostPixelLayer( - const Trk::TrackParticleBase*, - bool recompute = false) const = 0; bool expectHitInNextToInnermostPixelLayer(const Trk::Track* track, bool recompute = false) const { @@ -95,10 +87,7 @@ namespace InDet { virtual const Trk::ResidualPull* nextToInnermostPixelLayerHitResidual( const Trk::Track*) const = 0; - virtual const Trk::ResidualPull* nextToInnermostPixelLayerHitResidual(const Trk::TrackParticleBase*) const=0; - virtual bool getTrackStateOnNextToInnermostPixelLayerInfo(const Trk::TrackParticleBase*, - std::vector<TrackStateOnBLayerInfo>& infoList) const=0; virtual bool getTrackStateOnNextToInnermostPixelLayerInfo(const Trk::Track*, std::vector<TrackStateOnBLayerInfo>& infoList) const=0; virtual bool getTrackStateOnNextToInnermostPixelLayerInfo(const Trk::TrackParameters* trackpar, diff --git a/InnerDetector/InDetRecTools/InDetTestBLayer/InDetTestBLayer/InDetTestBLayerTool.h b/InnerDetector/InDetRecTools/InDetTestBLayer/InDetTestBLayer/InDetTestBLayerTool.h index 3e42bc06d7a5ba711de47ba26c423570a01bd43b..3cb1da4f566af8a550c7398561f459ebc3b39aaa 100644 --- a/InnerDetector/InDetRecTools/InDetTestBLayer/InDetTestBLayer/InDetTestBLayerTool.h +++ b/InnerDetector/InDetRecTools/InDetTestBLayer/InDetTestBLayer/InDetTestBLayerTool.h @@ -8,26 +8,24 @@ #include "InDetRecToolInterfaces/IInDetTestBLayerTool.h" #include "AthenaBaseComps/AthAlgTool.h" #include "GaudiKernel/ToolHandle.h" -#include "GaudiKernel/ServiceHandle.h" +#include "GaudiKernel/ThreadLocalContext.h" //for Gaudi::Hive::currentContext() #include "TrkParameters/TrackParameters.h" -#include "TrkEventPrimitives/ResidualPull.h" #include "TrkExInterfaces/IExtrapolator.h" #include "TrkToolInterfaces/IResidualPullCalculator.h" - -#include "InDetTestBLayer/TrackStateOnBLayerInfo.h" #include "InDetConditionsSummaryService/IInDetConditionsTool.h" #include <vector> #include <string> +#include <memory> namespace Trk { -class Track; -class TrackParticleBase; + class Track; + class ResidualPull; } -namespace Rec { class TrackParticle; } + class AtlasDetectorID; -class Identifier; class PixelID; - +class TrackStateOnBLayerInfo; +class EventContext; namespace InDet { @@ -45,18 +43,13 @@ namespace InDet { const Trk::Track*, bool recompute = false) const override final; - virtual bool expectHitInBLayer(const Trk::TrackParticleBase*, - bool recompute = false) const override final; virtual bool expectHitInBLayer( const Trk::TrackParameters* trackpar) const override final; virtual const Trk::ResidualPull* bLayerHitResidual( const Trk::Track*) const override; - virtual const Trk::ResidualPull* bLayerHitResidual(const Trk::TrackParticleBase*) const override; //// return false if extrapolation failed - virtual bool getTrackStateOnBlayerInfo(const Trk::TrackParticleBase*, - std::vector<TrackStateOnBLayerInfo>& infoList) const override; virtual bool getTrackStateOnBlayerInfo(const Trk::Track*, std::vector<TrackStateOnBLayerInfo>& infoList) const override; virtual bool getTrackStateOnBlayerInfo(const Trk::TrackParameters* trackpar, @@ -66,19 +59,11 @@ namespace InDet { const EventContext& ctx, const Trk::Track* track, bool recompute = false) const override final; - virtual bool expectHitInInnermostPixelLayer( - const Trk::TrackParticleBase*, - bool recompute = false) const override final; virtual bool expectHitInInnermostPixelLayer( const Trk::TrackParameters* trackpar) const override final; virtual const Trk::ResidualPull* innermostPixelLayerHitResidual(const Trk::Track*) const override; - virtual const Trk::ResidualPull* innermostPixelLayerHitResidual( - const Trk::TrackParticleBase*) const override; - virtual bool getTrackStateOnInnermostPixelLayerInfo( - const Trk::TrackParticleBase*, - std::vector<TrackStateOnBLayerInfo>& infoList) const override; virtual bool getTrackStateOnInnermostPixelLayerInfo( const Trk::Track*, @@ -92,19 +77,11 @@ namespace InDet { const EventContext& ctx, const Trk::Track*, bool recompute = false) const override final; - virtual bool expectHitInNextToInnermostPixelLayer( - const Trk::TrackParticleBase*, - bool recompute = false) const override final; virtual bool expectHitInNextToInnermostPixelLayer( const Trk::TrackParameters* trackpar) const override final; virtual const Trk::ResidualPull* nextToInnermostPixelLayerHitResidual(const Trk::Track*) const override; - virtual const Trk::ResidualPull* nextToInnermostPixelLayerHitResidual( - const Trk::TrackParticleBase*) const override; - virtual bool getTrackStateOnNextToInnermostPixelLayerInfo( - const Trk::TrackParticleBase*, - std::vector<TrackStateOnBLayerInfo>& infoList) const override; virtual bool getTrackStateOnNextToInnermostPixelLayerInfo( const Trk::Track*, std::vector<TrackStateOnBLayerInfo>& infoList) const override; @@ -117,9 +94,6 @@ namespace InDet { const Trk::Track*, int layer, bool recompute = false) const; - bool expectHitInPixelLayer(const Trk::TrackParticleBase*, - int layer, - bool recompute = false) const; bool expectHitInPixelLayer(const EventContext& ctx, const Trk::TrackParameters* trackpar, int layer) const; @@ -132,11 +106,7 @@ namespace InDet { const Trk::ResidualPull* pixelLayerHitResidual(const Trk::Track*, int layer) const; - const Trk::ResidualPull* pixelLayerHitResidual(const Trk::TrackParticleBase*, int layer) const; - bool getTrackStateOnPixelLayerInfo(const Trk::TrackParticleBase*, - std::vector<TrackStateOnBLayerInfo>& infoList, - int layer) const; bool getTrackStateOnPixelLayerInfo(const Trk::Track*, std::vector<TrackStateOnBLayerInfo>& infoList, int layer) const; diff --git a/InnerDetector/InDetRecTools/InDetTestBLayer/src/InDetTestBLayerTool.cxx b/InnerDetector/InDetRecTools/InDetTestBLayer/src/InDetTestBLayerTool.cxx index 53a0749595c2167aa453a9cac1b4f881de9bab61..583477a96cf3074676cd6293e6203be8f9d8014a 100644 --- a/InnerDetector/InDetRecTools/InDetTestBLayer/src/InDetTestBLayerTool.cxx +++ b/InnerDetector/InDetRecTools/InDetTestBLayer/src/InDetTestBLayerTool.cxx @@ -1,40 +1,31 @@ /* Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ +#include "InDetTestBLayer/InDetTestBLayerTool.h" -#include "AthenaBaseComps/AthAlgTool.h" -#include "AthenaBaseComps/AthService.h" - +#include "TrkEventPrimitives/ResidualPull.h" #include "InDetReadoutGeometry/SiDetectorElement.h" #include "PixelReadoutGeometry/PixelModuleDesign.h" -#include "InDetTestBLayer/InDetTestBLayerTool.h" +#include "InDetTestBLayer/TrackStateOnBLayerInfo.h" +#include "TrkEventPrimitives/ResidualPull.h" #include "TrkTrack/Track.h" -#include "TrkParameters/TrackParameters.h" #include "Particle/TrackParticle.h" #include "TrkMeasurementBase/MeasurementBase.h" - #include "TrkSurfaces/CylinderSurface.h" - #include "TrkGeometry/Layer.h" - #include "Identifier/Identifier.h" #include "InDetIdentifier/PixelID.h" #include "AtlasDetDescr/AtlasDetectorID.h" #include "IdDictDetDescr/IdDictManager.h" - - -#include <iostream> -#include <sstream> +#include "GaudiKernel/EventContext.h" using Amg::Transform3D; // don't want to include TrackSummary in the header // therefore anonymous "static" definition in the implementation file -//namespace { - static const Trk::SummaryType s_layerSummaryTypeExpectHit[2] { - Trk::expectInnermostPixelLayerHit, - Trk::expectNextToInnermostPixelLayerHit - }; -//} +static const Trk::SummaryType s_layerSummaryTypeExpectHit[2] { + Trk::expectInnermostPixelLayerHit, + Trk::expectNextToInnermostPixelLayerHit +}; namespace InDet { @@ -133,35 +124,6 @@ namespace InDet { return StatusCode::SUCCESS; } - const Trk::ResidualPull* InDet::InDetTestBLayerTool::bLayerHitResidual(const Trk::TrackParticleBase* trackparticle) const - { - return pixelLayerHitResidual(trackparticle,0); - } - - const Trk::ResidualPull* InDet::InDetTestBLayerTool::innermostPixelLayerHitResidual(const Trk::TrackParticleBase* trackparticle) const - { - return pixelLayerHitResidual(trackparticle,0); - } - - const Trk::ResidualPull* InDet::InDetTestBLayerTool::nextToInnermostPixelLayerHitResidual(const Trk::TrackParticleBase* trackparticle) const - { - return pixelLayerHitResidual(trackparticle,1); - } - - const Trk::ResidualPull* InDet::InDetTestBLayerTool::pixelLayerHitResidual(const Trk::TrackParticleBase* trackparticle, int layer) const - { - assert(layer>=0 && layer<=1); - const Trk::Track* track = trackparticle->originalTrack(); - - if (!track) { - ATH_MSG_DEBUG( "No original track, residual calculation for " << s_layerNames[layer] << " can not be performed" ); - return 0; - } - - return(this->pixelLayerHitResidual(track,layer)); - } - - const Trk::ResidualPull* InDet::InDetTestBLayerTool::bLayerHitResidual(const Trk::Track* track) const { return pixelLayerHitResidual(track,0); @@ -295,56 +257,6 @@ namespace InDet { } } - bool InDet::InDetTestBLayerTool::expectHitInBLayer(const Trk::TrackParticleBase* track, bool recompute) const - { - return expectHitInPixelLayer(track,0,recompute); - } - - bool InDet::InDetTestBLayerTool::expectHitInInnermostPixelLayer(const Trk::TrackParticleBase* track, bool recompute) const - { - return expectHitInPixelLayer(track,0,recompute); - } - - - bool InDet::InDetTestBLayerTool::expectHitInNextToInnermostPixelLayer(const Trk::TrackParticleBase* track, bool recompute) const - { - return expectHitInPixelLayer(track,1,recompute); - } - - bool InDet::InDetTestBLayerTool::expectHitInPixelLayer(const Trk::TrackParticleBase *track, int layer, bool recompute) const { - assert( layer>=0 && layer<=1); - if(!recompute){ - const Trk::TrackSummary* ts = track->trackSummary(); - if(ts){ - int ehbl = ts->get(s_layerSummaryTypeExpectHit[layer]); - if(0==ehbl || 1==ehbl ){ - ATH_MSG_DEBUG("Found the expected hit in the " << s_layerNames[layer] << " info in TrackSummary: return cached value" ); - return ehbl; - } - } - } - else{ - ATH_MSG_DEBUG("Forced to recompute whether a hit is expected in the " << s_layerNames[layer] << " or not." ); - } - - ATH_MSG_DEBUG("Computing whether a hit is expected in the " << s_layerNames[layer] << " or not." ); - - const Trk::Perigee* mp = track->perigee(); - - if(!mp) - { - //This can happen if re-creating the summary for tracks prior to ambi-solving and final fit, e.g. in StatisticAlg - ATH_MSG_DEBUG("Found TrackParticle with no perigee parameters: no information whether a hit is expected in the " << s_layerNames[layer] << " will be provided." ); - return false; - } - else - { - ATH_MSG_DEBUG("TrackParticle perigee parameters"); - // mp->dump(mLog); - return (this->expectHitInPixelLayer(mp,layer)); - } - } - bool InDet::InDetTestBLayerTool::expectHitInBLayer(const Trk::TrackParameters* trackpar) const { @@ -507,42 +419,6 @@ namespace InDet { } - bool InDet::InDetTestBLayerTool::getTrackStateOnBlayerInfo(const Trk::TrackParticleBase* trackparticle, - std::vector<TrackStateOnBLayerInfo>& infoList) const { - return getTrackStateOnPixelLayerInfo(trackparticle, infoList,0); - } - - bool InDet::InDetTestBLayerTool::getTrackStateOnInnermostPixelLayerInfo(const Trk::TrackParticleBase* trackparticle, - std::vector<TrackStateOnBLayerInfo>& infoList) const { - return getTrackStateOnPixelLayerInfo(trackparticle, infoList,0); - } - - - bool InDet::InDetTestBLayerTool::getTrackStateOnNextToInnermostPixelLayerInfo(const Trk::TrackParticleBase* trackparticle, - std::vector<TrackStateOnBLayerInfo>& infoList) const { - return getTrackStateOnPixelLayerInfo(trackparticle, infoList,1); - } - - - bool InDet::InDetTestBLayerTool::getTrackStateOnPixelLayerInfo(const Trk::TrackParticleBase* track, - std::vector<TrackStateOnBLayerInfo>& infoList, - int layer) const - { - assert( layer>=0 && layer<=1); - - const Trk::Perigee* startParameters = track->perigee(); - - if(!startParameters){ - //This can happen if re-creating the summary for tracks prior to ambi-solving and final fit, e.g. in StatisticAlg - ATH_MSG_DEBUG("Found TrackParticle with no perigee parameters: no " << s_layerNames[layer] << " info will be provided"); - return false; - } - - return getTrackStateOnPixelLayerInfo(startParameters, infoList, layer); - - } - - bool InDet::InDetTestBLayerTool::getTrackStateOnBlayerInfo(const Trk::TrackParameters* trackpar, std::vector<TrackStateOnBLayerInfo>& infoList) const { return getTrackStateOnPixelLayerInfo(trackpar,infoList,0); diff --git a/InnerDetector/InDetRecTools/InDetTrackSelectionTool/Root/InDetTrackAccessor.h b/InnerDetector/InDetRecTools/InDetTrackSelectionTool/Root/InDetTrackAccessor.h index f7891ef01b044c40af86917c6bda1db1d2d1236b..0014795a5661d05601d50a2cd78556c3617cf36a 100644 --- a/InnerDetector/InDetRecTools/InDetTrackSelectionTool/Root/InDetTrackAccessor.h +++ b/InnerDetector/InDetRecTools/InDetTrackSelectionTool/Root/InDetTrackAccessor.h @@ -1,7 +1,7 @@ // -*- c++ -*- /* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ // InDetTrackAccessor.h @@ -89,7 +89,7 @@ namespace InDet { template <size_t index_i, size_t index_j> class ParamCovAccessor : public virtual TrackAccessor { public: - ParamCovAccessor<index_i, index_j>(const asg::IAsgTool*); + ParamCovAccessor(const asg::IAsgTool*); StatusCode access ( const xAOD::TrackParticle& track, const xAOD::Vertex* vertex = 0 ); #ifndef XAOD_ANALYSIS diff --git a/InnerDetector/InDetValidation/InDetPhysValMonitoring/share/InDetPVMPlotDefCommon.xml b/InnerDetector/InDetValidation/InDetPhysValMonitoring/share/InDetPVMPlotDefCommon.xml index 8a3e04e692761681962c5724779d1e466257fb66..91db4d14cd2dabc29bc979e78270fb3cefcc3ad7 100644 --- a/InnerDetector/InDetValidation/InDetPhysValMonitoring/share/InDetPVMPlotDefCommon.xml +++ b/InnerDetector/InDetValidation/InDetPhysValMonitoring/share/InDetPVMPlotDefCommon.xml @@ -630,6 +630,10 @@ <x title="p_{T} [GeV]" n="25" lo="0" hi="50"/> <y title="Efficiency" lo="0.0" hi="2.0"/> </h> +<h id="efficiency_vs_pt_high" type="TEfficiency" title="Fraction of reco-matched truth track"> + <x title="p_{T} [GeV]" n="29" lo="50" hi="1500"/> + <y title="Efficiency" lo="0.0" hi="2.0"/> +</h> <h id="efficiency_vs_pt_low" type="TEfficiency" title="Fraction of reco-matched truth track"> <x title="p_{T} [GeV]" n="52" lo="0.4" hi="3"/> <y title="Efficiency" lo="0.0" hi="2.0"/> @@ -639,7 +643,7 @@ <y title="Efficiency" lo="0.0" hi="2.0"/> </h> <h id="efficiency_vs_d0" type="TEfficiency" title="Fraction of reco-matched truth track"> - <x title="d_{0} [mm]" n="100" lo="-25" hi="25"/> + <x title="d_{0} [mm]" n="80" lo="-10" hi="10"/> <y title="Efficiency" lo="0.0" hi="2.0"/> </h> <h id="efficiency_vs_z0" type="TEfficiency" title="Fraction of reco-matched truth track"> @@ -663,11 +667,19 @@ <y title="Efficiency" lo="0.0" hi="2.0"/> </h> <h id="efficiency_vs_prodR" type="TEfficiency" title="Track Efficiency vs Production Vertex Radius"> - <x title="prod_R [mm]" n="100" lo="0" hi="1500"/> + <x title="prod_R [mm]" n="65" lo="0" hi="650"/> + <y title="Efficiency" lo="0.0" hi="2.0"/> +</h> +<h id="efficiency_vs_prodR_extended" type="TEfficiency" title="Track Efficiency vs Production Vertex Radius"> + <x title="prod_R [mm]" n="40" lo="0" hi="1200"/> <y title="Efficiency" lo="0.0" hi="2.0"/> </h> <h id="efficiency_vs_prodZ" type="TEfficiency" title="Track Efficiency vs Production Vertex Z"> - <x title="prod_Z [mm]" n="100" lo="0" hi="2000"/> + <x title="prod_Z [mm]" n="50" lo="0" hi="300"/> + <y title="Efficiency" lo="0.0" hi="2.0"/> +</h> +<h id="efficiency_vs_prodZ_extended" type="TEfficiency" title="Track Efficiency vs Production Vertex Z"> + <x title="prod_Z [mm]" n="40" lo="0" hi="1000"/> <y title="Efficiency" lo="0.0" hi="2.0"/> </h> <!-- Fake rate plots --> diff --git a/InnerDetector/InDetValidation/InDetPhysValMonitoring/src/InDetPerfPlot_Efficiency.cxx b/InnerDetector/InDetValidation/InDetPhysValMonitoring/src/InDetPerfPlot_Efficiency.cxx index e62298d7631a5b31d89a632159d4ccb24497d9b7..66bed68a4bcf626683ca3abb4794ec7a2be7565a 100644 --- a/InnerDetector/InDetValidation/InDetPhysValMonitoring/src/InDetPerfPlot_Efficiency.cxx +++ b/InnerDetector/InDetValidation/InDetPhysValMonitoring/src/InDetPerfPlot_Efficiency.cxx @@ -15,6 +15,7 @@ InDetPerfPlot_Efficiency::InDetPerfPlot_Efficiency(InDetPlotBase* pParent, const m_efficiency_vs_eta{}, m_efficiency_vs_pt{}, m_efficiency_vs_pt_low{}, + m_efficiency_vs_pt_high{}, m_efficiency_vs_phi{}, m_efficiency_vs_d0{}, m_efficiency_vs_z0{}, @@ -23,7 +24,9 @@ InDetPerfPlot_Efficiency::InDetPerfPlot_Efficiency(InDetPlotBase* pParent, const m_extended_efficiency_vs_d0{}, m_extended_efficiency_vs_z0{}, m_efficiency_vs_prodR{}, - m_efficiency_vs_prodZ{} { + m_efficiency_vs_prodR_extended{}, + m_efficiency_vs_prodZ{}, + m_efficiency_vs_prodZ_extended{} { // nop } @@ -33,6 +36,7 @@ InDetPerfPlot_Efficiency::initializePlots() { book(m_efficiency_vs_eta, "efficiency_vs_eta"); book(m_efficiency_vs_pt, "efficiency_vs_pt"); book(m_efficiency_vs_pt_low, "efficiency_vs_pt_low"); + book(m_efficiency_vs_pt_high, "efficiency_vs_pt_high"); book(m_efficiency_vs_phi, "efficiency_vs_phi"); book(m_efficiency_vs_d0, "efficiency_vs_d0"); book(m_efficiency_vs_z0, "efficiency_vs_z0"); @@ -42,7 +46,9 @@ InDetPerfPlot_Efficiency::initializePlots() { book(m_extended_efficiency_vs_d0, "extended_efficiency_vs_d0"); book(m_extended_efficiency_vs_z0, "extended_efficiency_vs_z0"); book(m_efficiency_vs_prodR, "efficiency_vs_prodR"); + book(m_efficiency_vs_prodR_extended, "efficiency_vs_prodR_extended"); book(m_efficiency_vs_prodZ, "efficiency_vs_prodZ"); + book(m_efficiency_vs_prodZ_extended, "efficiency_vs_prodZ_extended"); } @@ -55,6 +61,7 @@ InDetPerfPlot_Efficiency::fill(const xAOD::TruthParticle& truth, const bool isGo fillHisto(m_efficiency_vs_eta, eta, isGood); fillHisto(m_efficiency_vs_pt, pt, isGood); fillHisto(m_efficiency_vs_pt_low, pt, isGood); + fillHisto(m_efficiency_vs_pt_high, pt, isGood); fillHisto(m_efficiency_vs_phi, phi, isGood); double d0 = truth.auxdata<float>("d0"); @@ -73,7 +80,9 @@ InDetPerfPlot_Efficiency::fill(const xAOD::TruthParticle& truth, const bool isGo double prod_rad = vtx->perp(); double prod_z = vtx->z(); fillHisto(m_efficiency_vs_prodR, prod_rad, isGood); + fillHisto(m_efficiency_vs_prodR_extended, prod_rad, isGood); fillHisto(m_efficiency_vs_prodZ, prod_z, isGood); + fillHisto(m_efficiency_vs_prodZ_extended, prod_z, isGood); } } diff --git a/InnerDetector/InDetValidation/InDetPhysValMonitoring/src/InDetPerfPlot_Efficiency.h b/InnerDetector/InDetValidation/InDetPhysValMonitoring/src/InDetPerfPlot_Efficiency.h index e075aedf929f56d62a51c9a543c7f35a9e76e43f..b03ded92b8d424693767c3c8f96354145dd8a932 100644 --- a/InnerDetector/InDetValidation/InDetPhysValMonitoring/src/InDetPerfPlot_Efficiency.h +++ b/InnerDetector/InDetValidation/InDetPhysValMonitoring/src/InDetPerfPlot_Efficiency.h @@ -34,6 +34,7 @@ private: TEfficiency* m_efficiency_vs_eta; TEfficiency* m_efficiency_vs_pt; TEfficiency* m_efficiency_vs_pt_low; + TEfficiency* m_efficiency_vs_pt_high; TEfficiency* m_efficiency_vs_phi; TEfficiency* m_efficiency_vs_d0; TEfficiency* m_efficiency_vs_z0; @@ -43,7 +44,9 @@ private: TEfficiency* m_extended_efficiency_vs_d0; TEfficiency* m_extended_efficiency_vs_z0; TEfficiency* m_efficiency_vs_prodR; + TEfficiency* m_efficiency_vs_prodR_extended; TEfficiency* m_efficiency_vs_prodZ; + TEfficiency* m_efficiency_vs_prodZ_extended; // plot base has nop default implementation of this; we use it to book the histos void initializePlots(); diff --git a/LumiBlock/LumiBlockComps/CMakeLists.txt b/LumiBlock/LumiBlockComps/CMakeLists.txt index 2e4d24ca88b7b992d403b8d711e1e1154eb51a36..b28292f863222e428be03ec5b20ea1ad9fedfe1d 100644 --- a/LumiBlock/LumiBlockComps/CMakeLists.txt +++ b/LumiBlock/LumiBlockComps/CMakeLists.txt @@ -17,7 +17,7 @@ if( NOT XAOD_ANALYSIS ) endif() atlas_add_library( LumiBlockCompsLib LumiBlockComps/*.h src/*.h Root/*.cxx - src/CreateLumiBlockCollectionFromFile.cxx + src/CreateLumiBlockCollectionFromFile.cxx ${extra_srcs} PUBLIC_HEADERS LumiBlockComps INCLUDE_DIRS ${CORAL_INCLUDE_DIRS} ${ROOT_INCLUDE_DIRS} @@ -48,17 +48,20 @@ atlas_install_python_modules( python/*.py atlas_install_joboptions( share/*.py ) atlas_install_joboptions( share/*.txt ) -atlas_add_test( LBDurationCondAlg_test - SOURCES test/LBDurationCondAlg_test.cxx - LINK_LIBRARIES GaudiKernel LumiBlockCompsLib TestTools ) +if( NOT XAOD_ANALYSIS ) + atlas_add_test( LBDurationCondAlg_test + SOURCES test/LBDurationCondAlg_test.cxx + LINK_LIBRARIES GaudiKernel LumiBlockData LumiBlockCompsLib TestTools ) -atlas_add_test( LuminosityCondAlg_test - SOURCES test/LuminosityCondAlg_test.cxx - LINK_LIBRARIES GaudiKernel LumiBlockCompsLib AthenaPoolUtilities TestTools ) + atlas_add_test( LuminosityCondAlg_test + SOURCES test/LuminosityCondAlg_test.cxx + LINK_LIBRARIES GaudiKernel LumiBlockData LumiBlockCompsLib + AthenaPoolUtilities TestTools ) -atlas_add_test( TrigLiveFractionCondAlg_test - SOURCES test/TrigLiveFractionCondAlg_test.cxx - LINK_LIBRARIES GaudiKernel LumiBlockCompsLib TestTools ) + atlas_add_test( TrigLiveFractionCondAlg_test + SOURCES test/TrigLiveFractionCondAlg_test.cxx + LINK_LIBRARIES GaudiKernel LumiBlockData LumiBlockCompsLib TestTools ) +endif() atlas_add_test( LuminosityCondAlgConfig_test SCRIPT python -m LumiBlockComps.LuminosityCondAlgConfig @@ -76,6 +79,6 @@ atlas_add_test( LumiBlockMuWriterConfig_test SCRIPT python -m LumiBlockComps.LumiBlockMuWriterConfig LOG_SELECT_PATTERN "ComponentAccumulator|^---|^IOVDbSvc" ) -atlas_add_test( BunchCrossingCondAlg +atlas_add_test( BunchCrossingCondAlg SCRIPT python ${CMAKE_CURRENT_SOURCE_DIR}/python/BunchCrossingCondAlgTest.py POST_EXEC_SCRIPT " /usr/bin/diff BCData.txt ${CMAKE_CURRENT_SOURCE_DIR}/share/BCData.txt.ref > BC_diff.log " ) diff --git a/LumiBlock/LumiBlockComps/util/getLumi.cxx b/LumiBlock/LumiBlockComps/util/getLumi.cxx index 6d0d087351270c7ec801427c50f7157e689bf374..c2b9d4361d9d13cd86c3077d008e42b21fafc70d 100644 --- a/LumiBlock/LumiBlockComps/util/getLumi.cxx +++ b/LumiBlock/LumiBlockComps/util/getLumi.cxx @@ -1,5 +1,5 @@ /* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ ///getLumi - the lumi helper application @@ -34,6 +34,7 @@ int main( int, char** ) { #include "TKey.h" #include "TDirectory.h" #include "TSystem.h" +#include "TObjString.h" #include <getopt.h> #include <cstdlib> @@ -56,7 +57,7 @@ struct lbx { int main(int argc, char* argv[]) { const char* optstring = "m"; bool showMissing = false; -static struct option long_options[] = +static struct option long_options[] = { /* name has_arg flag val */ {"showMissing", no_argument , NULL, 'm'} @@ -73,9 +74,9 @@ static struct option long_options[] = case 'm' : showMissing = true;nOptions++; break; - case '?' : + case '?' : printf("Please supply a valid option to the program. Exiting\n"); - exit(1); + exit(1); break; } } @@ -106,7 +107,7 @@ static struct option long_options[] = grlMaskReader.AddXMLFile( s ); hasMask=true; } - Root::TGoodRunsList grlMask; + Root::TGoodRunsList grlMask; if(hasMask) { grlMaskReader.Interpret();grlMask.AddGRL(grlMaskReader.GetMergedGoodRunsList()); } @@ -121,7 +122,7 @@ static struct option long_options[] = TFile *file = TFile::Open(argv[i]); if(file==0) continue; std::cout << s; - + //check if this file is a LumiMetaData file ... if it is we add to the list for later TTree *tmp = (TTree*)file->Get( "LumiMetaData" ); if(tmp) { @@ -130,13 +131,13 @@ static struct option long_options[] = lumiFiles.push_back(s); continue; } - + //try to get lumi - //if this is xAOD, it will have a MetaData tree ... + //if this is xAOD, it will have a MetaData tree ... if(file->Get("MetaData")!=0) { TTree* metaTree = static_cast<TTree*>(file->Get("MetaData")); - //use TEvent if the MetaData tree contains an EventFormat branch and only 1 entry + //use TEvent if the MetaData tree contains an EventFormat branch and only 1 entry //WB : 21/11/2016 - actually just always avoid using TEvent bool useTEvent(false); /*if(metaTree->GetEntries()==1 && metaTree->FindBranch("EventFormat")) { @@ -192,12 +193,12 @@ static struct option long_options[] = } } - //add lb to grl + //add lb to grl for(auto lbr : *lbrs) { for(uint runNum = lbr->startRunNumber(); runNum <= lbr->stopRunNumber(); runNum++) { if(lbr->startLumiBlockNumber()!=lbr->stopLumiBlockNumber()) {std::cout << " Unexpected behaviour. Please report! " << std::endl; exit(1);} for(uint lb = lbr->startLumiBlockNumber(); lb <= lbr->stopLumiBlockNumber(); lb++) { - lbxs[runNum][lb].nSeen += lbr->eventsSeen(); + lbxs[runNum][lb].nSeen += lbr->eventsSeen(); if(lbxs[runNum][lb].nExpected!=0 && lbxs[runNum][lb].nExpected != lbr->eventsExpected()) { std::cout << "...mismatch on expected events in [run,lb]=[" << runNum << "," << lb << "] got " << lbr->eventsExpected() << " but had " << lbxs[runNum][lb].nExpected << std::endl; std::cout << "...PLEASE REPORT THIS to hn-atlas-PATHelp@cern.ch ... for now I will assume the larger number is correct" << std::endl; @@ -251,12 +252,12 @@ static struct option long_options[] = } } - //add lb to grl + //add lb to grl for(auto lbr : *lbrs) { for(uint runNum = lbr->startRunNumber(); runNum <= lbr->stopRunNumber(); runNum++) { if(lbr->startLumiBlockNumber()!=lbr->stopLumiBlockNumber()) {std::cout << " Unexpected behaviour. Please report! " << std::endl; exit(1);} for(uint lb = lbr->startLumiBlockNumber(); lb <= lbr->stopLumiBlockNumber(); lb++) { - lbxs[runNum][lb].nSeen += lbr->eventsSeen(); + lbxs[runNum][lb].nSeen += lbr->eventsSeen(); if(lbxs[runNum][lb].nExpected!=0 && lbxs[runNum][lb].nExpected != lbr->eventsExpected()) { std::cout << "...mismatch on expected events in [run,lb]=[" << runNum << "," << lb << "] got " << lbr->eventsExpected() << " but had " << lbxs[runNum][lb].nExpected << std::endl; std::cout << "...PLEASE REPORT THIS to hn-atlas-PATHelp@cern.ch ... for now I will assume the larger number is correct" << std::endl; @@ -311,12 +312,12 @@ static struct option long_options[] = r->setEventsExpected(eventsExpected.at(j));r->setEventsSeen(eventsSeen.at(j)); } } - //add lb to grl + //add lb to grl for(auto lbr : *lbrs) { for(uint runNum = lbr->startRunNumber(); runNum <= lbr->stopRunNumber(); runNum++) { if(lbr->startLumiBlockNumber()!=lbr->stopLumiBlockNumber()) {std::cout << " Unexpected behaviour. Please report! " << std::endl; exit(1);} for(uint lb = lbr->startLumiBlockNumber(); lb <= lbr->stopLumiBlockNumber(); lb++) { - lbxs[runNum][lb].nSeen += lbr->eventsSeen(); + lbxs[runNum][lb].nSeen += lbr->eventsSeen(); lbxs[runNum][lb].fromSuspect = true; if(lbxs[runNum][lb].nExpected!=0 && lbxs[runNum][lb].nExpected != lbr->eventsExpected()) { std::cout << "...mismatch on expected events in [run,lb]=[" << runNum << "," << lb << "] got " << lbr->eventsExpected() << " but had " << lbxs[runNum][lb].nExpected << std::endl; @@ -381,14 +382,14 @@ static struct option long_options[] = for(auto& it2 : it.second) { uint lbn = it2.first; if(it2.second.nSeen > it2.second.nExpected) { fromXAODSuspect.AddRunLumiBlock(runNum,lbn); continue; } - + if(it2.second.fromSuspect) { fromXAODPossiblySuspect.AddRunLumiBlock(runNum,lbn);} - + if(it2.second.nSeen==it2.second.nExpected) { fromXAOD.AddRunLumiBlock(runNum,lbn); } else { fromXAODIncomplete.AddRunLumiBlock(runNum,lbn); } } } - + Root::TGoodRunsList l;Root::TGoodRunsList lIncomplete;Root::TGoodRunsList lSuspect;Root::TGoodRunsList lPossiblySuspect; l.AddGRL(fromXAOD); lIncomplete.AddGRL(fromXAODIncomplete); lSuspect.AddGRL(fromXAODSuspect);lPossiblySuspect.AddGRL(fromXAODPossiblySuspect);//have to do this because of weird GRL behaviour if(readXML) { @@ -400,10 +401,10 @@ static struct option long_options[] = std::map<UInt_t, float> missingRuns;std::map<UInt_t,bool> allRuns;std::set<UInt_t> incompleteRuns;std::set<UInt_t> suspectRuns; std::map<UInt_t, std::string> missingRunLB; //lumiblocks that are missing - - + + Root::TGoodRunsList providedL; //keeps track of what came from lumicalc file @@ -420,13 +421,13 @@ static struct option long_options[] = std::cout << "Could not open lumicalc file: " << argv[1+nOptions] << std::endl; return 0; } - + TTree *tmp = (TTree*)lumicalcFile->Get( "LumiMetaData" ); if(!tmp) { std::cout << "Could not find LumiMetaData tree in lumicalc file: " << argv[1+nOptions] << std::endl; return 0; } - + //structure expected is as given by iLumiCalc: // RunNbr, AvergeInteractionPerXing, IntLumi UInt_t runNbr=0;Float_t intLumi=0;TBranch *b_runNbr;TBranch *b_intLumi; @@ -441,9 +442,9 @@ static struct option long_options[] = std::cout << "Could not find IntLumi branch in Data TTree" << std::endl; return 0; } long nEntries = tmp->GetEntries(); - - int startMissingBlock = -1;UInt_t lastRunNumber=0;int lastLb=0;double missingLumi=0; - + + int startMissingBlock = -1;UInt_t lastRunNumber=0;int lastLb=0;double missingLumi=0; + for(long i=0;i<nEntries;i++) { b_runNbr->GetEntry(i);b_intLumi->GetEntry(i);b_lb->GetEntry(i); if(hasMask && !grlMask.HasRunLumiBlock(runNbr,lb)) continue; @@ -452,9 +453,9 @@ static struct option long_options[] = if(hasLumi) totalLumi += intLumi; else if(lIncomplete.HasRunLumiBlock(runNbr,lb)) {hasLumi=true; totalLumiIncomplete += intLumi; incompleteRuns.insert(runNbr);} //else if ensures we never double count lumi else if(lSuspect.HasRunLumiBlock(runNbr,lb)) {hasLumi=true;totalLumiSuspect += intLumi; suspectRuns.insert(runNbr); definitelySuspect << "(" << runNbr << "," << lb << "),";} - + if(lPossiblySuspect.HasRunLumiBlock(runNbr,lb)) {totalLumiPossiblySuspect += intLumi; possiblySuspect << "(" << runNbr << "," << lb << "),"; } - + if(!hasLumi && intLumi==0.) hasLumi=true; //if there is no lumi, it's as good as having some if((lastRunNumber!=runNbr&&startMissingBlock>=0) || (hasLumi && startMissingBlock>=0)) { //print now, if startMissingBlock not negative @@ -483,7 +484,7 @@ static struct option long_options[] = std::cout << "***************LUMI REPORT******************" << std::endl << std::endl; - + if(possiblySuspect.str().size()) { std::cout << "Possibly suspect lumiblocks: " << possiblySuspect.str() << std::endl; } @@ -522,7 +523,7 @@ static struct option long_options[] = if(allMissing) { std::cout << "(Missing Lumonisity = " << allMissing << " pb-1) (this is luminosity in your lumicalc files that you appear not to have run over)"; //already divided by 1E6 in loop above //if(!showMissing) std::cout << " rerun with the '-m' option to see runs where this luminosity resides"; - std::cout << std::endl; + std::cout << std::endl; } /* diff --git a/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCnv_p2_test.ref b/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCnv_p2_test.ref index b63df6294ab66a2c6ffe0b0e845f35d7a9e087b1..a832e5ee14169e095d5f658a38e219711e32d1d7 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCnv_p2_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCnv_p2_test.ref @@ -3,15 +3,9 @@ ApplicationMgr INFO Application Manager Configured successfully EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. -HistogramPersis...WARNING Histograms saving not required. ApplicationMgr INFO Application Manager Initialized successfully ApplicationMgr Ready -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 241 RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 241 RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 241 @@ -19,12 +13,7 @@ RpcIdHelper INFO Initializing RPC hash indices ... RpcIdHelper INFO The element hash max is 600 RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 210 CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 237 CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 241 @@ -33,12 +22,7 @@ CscIdHelper INFO The element hash max is 32 CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 210 MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 241 MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 241 @@ -46,12 +30,7 @@ MdtIdHelper INFO Initializing MDT hash indices ... MdtIdHelper INFO The element hash max is 1188 MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 210 TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 210 TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 241 diff --git a/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCnv_p3_test.ref b/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCnv_p3_test.ref index b63df6294ab66a2c6ffe0b0e845f35d7a9e087b1..a832e5ee14169e095d5f658a38e219711e32d1d7 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCnv_p3_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCnv_p3_test.ref @@ -3,15 +3,9 @@ ApplicationMgr INFO Application Manager Configured successfully EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. -HistogramPersis...WARNING Histograms saving not required. ApplicationMgr INFO Application Manager Initialized successfully ApplicationMgr Ready -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 241 RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 241 RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 241 @@ -19,12 +13,7 @@ RpcIdHelper INFO Initializing RPC hash indices ... RpcIdHelper INFO The element hash max is 600 RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 210 CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 237 CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 241 @@ -33,12 +22,7 @@ CscIdHelper INFO The element hash max is 32 CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 210 MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 241 MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 241 @@ -46,12 +30,7 @@ MdtIdHelper INFO Initializing MDT hash indices ... MdtIdHelper INFO The element hash max is 1188 MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 210 TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 210 TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 241 diff --git a/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCollectionCnv_p2_test.ref b/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCollectionCnv_p2_test.ref index b63df6294ab66a2c6ffe0b0e845f35d7a9e087b1..a832e5ee14169e095d5f658a38e219711e32d1d7 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCollectionCnv_p2_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCollectionCnv_p2_test.ref @@ -3,15 +3,9 @@ ApplicationMgr INFO Application Manager Configured successfully EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. -HistogramPersis...WARNING Histograms saving not required. ApplicationMgr INFO Application Manager Initialized successfully ApplicationMgr Ready -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 241 RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 241 RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 241 @@ -19,12 +13,7 @@ RpcIdHelper INFO Initializing RPC hash indices ... RpcIdHelper INFO The element hash max is 600 RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 210 CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 237 CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 241 @@ -33,12 +22,7 @@ CscIdHelper INFO The element hash max is 32 CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 210 MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 241 MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 241 @@ -46,12 +30,7 @@ MdtIdHelper INFO Initializing MDT hash indices ... MdtIdHelper INFO The element hash max is 1188 MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 210 TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 210 TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 241 diff --git a/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCollectionCnv_p3_test.ref b/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCollectionCnv_p3_test.ref index b63df6294ab66a2c6ffe0b0e845f35d7a9e087b1..a832e5ee14169e095d5f658a38e219711e32d1d7 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCollectionCnv_p3_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/CscRawDataCollectionCnv_p3_test.ref @@ -3,15 +3,9 @@ ApplicationMgr INFO Application Manager Configured successfully EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. -HistogramPersis...WARNING Histograms saving not required. ApplicationMgr INFO Application Manager Initialized successfully ApplicationMgr Ready -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 241 RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 241 RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 241 @@ -19,12 +13,7 @@ RpcIdHelper INFO Initializing RPC hash indices ... RpcIdHelper INFO The element hash max is 600 RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 210 CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 237 CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 241 @@ -33,12 +22,7 @@ CscIdHelper INFO The element hash max is 32 CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 210 MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 241 MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 241 @@ -46,12 +30,7 @@ MdtIdHelper INFO Initializing MDT hash indices ... MdtIdHelper INFO The element hash max is 1188 MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 210 TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 210 TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 241 diff --git a/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/RpcPadContainerCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/RpcPadContainerCnv_p1_test.ref index 4a294b1ab94c2abc84db380982653e6158dcc16f..d56b817d62cf49ed5d326a92dffc2e7831420d20 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/RpcPadContainerCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/RpcPadContainerCnv_p1_test.ref @@ -1,25 +1,11 @@ -Initializing Gaudi ApplicationMgr using job opts ../share/MuonEventAthenaPool_test.txt -JobOptionsSvc INFO # =======> /afs/cern.ch/user/s/ssnyder/atlas-work3/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/../share/MuonEventAthenaPool_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore"] -JobOptionsSvc INFO # (3,1): TGCcablingServerSvc.forcedUse = 1 -JobOptionsSvc INFO Job options successfully read in from ../share/MuonEventAthenaPool_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v29r0) - running on lxplus000.cern.ch on Wed Nov 22 22:09:33 2017 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 1844 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. -HistogramPersis...WARNING Histograms saving not required. ApplicationMgr INFO Application Manager Initialized successfully ApplicationMgr Ready - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 @@ -27,5 +13,4 @@ RpcIdHelper INFO Initializing RPC hash indices ... RpcIdHelper INFO The element hash max is 600 RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... -ClassIDSvc INFO getRegistryEntries: read 372 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/RpcPadContainerCnv_p2_test.ref b/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/RpcPadContainerCnv_p2_test.ref index 7b7f6c23c624511708b9042832633aa24a2d7919..a246a707f206dfe9493c7d1435a8d8e55fa901bb 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/RpcPadContainerCnv_p2_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/RpcPadContainerCnv_p2_test.ref @@ -1,25 +1,11 @@ -Initializing Gaudi ApplicationMgr using job opts ../share/MuonEventAthenaPool_test.txt -JobOptionsSvc INFO # =======> /afs/cern.ch/user/s/ssnyder/atlas-work3/MuonSpectrometer/MuonCnv/MuonEventAthenaPool/share/../share/MuonEventAthenaPool_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore"] -JobOptionsSvc INFO # (3,1): TGCcablingServerSvc.forcedUse = 1 -JobOptionsSvc INFO Job options successfully read in from ../share/MuonEventAthenaPool_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v29r0) - running on lxplus000.cern.ch on Wed Nov 22 21:23:47 2017 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 1844 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. -HistogramPersis...WARNING Histograms saving not required. ApplicationMgr INFO Application Manager Initialized successfully ApplicationMgr Ready - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 @@ -27,6 +13,5 @@ RpcIdHelper INFO Initializing RPC hash indices ... RpcIdHelper INFO The element hash max is 600 RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... -ClassIDSvc INFO getRegistryEntries: read 372 CLIDRegistry entries for module ALL test1 test INFO RPCcablingSvc obtained - hashmax = 3 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CompetingMuonClustersOnTrackCnv_p2_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CompetingMuonClustersOnTrackCnv_p2_test.ref index 309d5a1a4b96696a99981f7107372429c86a22e3..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CompetingMuonClustersOnTrackCnv_p2_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CompetingMuonClustersOnTrackCnv_p2_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:34:39 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12361 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,87 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 -ClassIDSvc INFO getRegistryEntries: read 320 CLIDRegistry entries for module ALL -ClassIDSvc INFO getRegistryEntries: read 170 CLIDRegistry entries for module ALL diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscClusterOnTrackCnv_p2_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscClusterOnTrackCnv_p2_test.ref index 43c07e16f5d6b16f88d482d7e34635c9f0093691..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscClusterOnTrackCnv_p2_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscClusterOnTrackCnv_p2_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:34:27 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12361 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,87 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 -ClassIDSvc INFO getRegistryEntries: read 320 CLIDRegistry entries for module ALL -ClassIDSvc INFO getRegistryEntries: read 170 CLIDRegistry entries for module ALL diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscPrepDataContainerCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscPrepDataContainerCnv_p1_test.ref index 54c1ab9d9c33189c409ea8cebf49e05632695d2c..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscPrepDataContainerCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscPrepDataContainerCnv_p1_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:34:57 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12360 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscPrepDataContainerCnv_p2_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscPrepDataContainerCnv_p2_test.ref index f8e21d8dc42f6ac9851732dae83d45f52a34e48e..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscPrepDataContainerCnv_p2_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscPrepDataContainerCnv_p2_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:34:59 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12360 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscStripPrepDataContainerCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscStripPrepDataContainerCnv_p1_test.ref index 3f1b0d76cd201bdbb47bf8574da218b87b1d1316..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscStripPrepDataContainerCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/CscStripPrepDataContainerCnv_p1_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:01 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12362 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MMPrepDataContainerCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MMPrepDataContainerCnv_p1_test.ref index 3cd171c475c5b6de76afa2ab29a7a99fabb39915..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MMPrepDataContainerCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MMPrepDataContainerCnv_p1_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:20 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12360 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MM_ClusterOnTrackCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MM_ClusterOnTrackCnv_p1_test.ref index bc750a9634ec0a785026b427cfe48233001a0f2d..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MM_ClusterOnTrackCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MM_ClusterOnTrackCnv_p1_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:34:33 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12361 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,87 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 -ClassIDSvc INFO getRegistryEntries: read 320 CLIDRegistry entries for module ALL -ClassIDSvc INFO getRegistryEntries: read 170 CLIDRegistry entries for module ALL diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MM_DigitContainerCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MM_DigitContainerCnv_p1_test.ref index 2585fcf71181472422a23a067e84aea2246af506..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MM_DigitContainerCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MM_DigitContainerCnv_p1_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:35 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12351 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MM_DigitContainerCnv_p2_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MM_DigitContainerCnv_p2_test.ref index 7c761e86cccb1ba2df451664333647771bd75996..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MM_DigitContainerCnv_p2_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MM_DigitContainerCnv_p2_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:37 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12351 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MdtDriftCircleOnTrackCnv_p2_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MdtDriftCircleOnTrackCnv_p2_test.ref index bbf4b00b1aee50566dfbbeb5acbf480cf5fc497c..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MdtDriftCircleOnTrackCnv_p2_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MdtDriftCircleOnTrackCnv_p2_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:34:37 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12361 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,87 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 -ClassIDSvc INFO getRegistryEntries: read 320 CLIDRegistry entries for module ALL -ClassIDSvc INFO getRegistryEntries: read 170 CLIDRegistry entries for module ALL diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MdtPrepDataContainerCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MdtPrepDataContainerCnv_p1_test.ref index 911568af82bf9370064f58b4e2d76d45e7651bbf..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MdtPrepDataContainerCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MdtPrepDataContainerCnv_p1_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:03 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12360 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MdtPrepDataContainerCnv_p2_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MdtPrepDataContainerCnv_p2_test.ref index 3934b603b410976ca5f8d36552efe31f9788695a..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MdtPrepDataContainerCnv_p2_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/MdtPrepDataContainerCnv_p2_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:05 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12360 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcClusterOnTrackCnv_p3_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcClusterOnTrackCnv_p3_test.ref index 1a0f13b1c0c227435ce376c2e4cc5c8e01a7f8a5..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcClusterOnTrackCnv_p3_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcClusterOnTrackCnv_p3_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:34:29 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12361 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,87 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 -ClassIDSvc INFO getRegistryEntries: read 320 CLIDRegistry entries for module ALL -ClassIDSvc INFO getRegistryEntries: read 170 CLIDRegistry entries for module ALL diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcCoinDataContainerCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcCoinDataContainerCnv_p1_test.ref index 37d3bc57a50bddab07999e2574bfcba359b79c0b..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcCoinDataContainerCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcCoinDataContainerCnv_p1_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:26 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12350 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcPrepDataContainerCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcPrepDataContainerCnv_p1_test.ref index 5636415539d52778261301827ad0f11a8a8290c7..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcPrepDataContainerCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcPrepDataContainerCnv_p1_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:06 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12360 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcPrepDataContainerCnv_p2_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcPrepDataContainerCnv_p2_test.ref index 997bc0557f5f592867d15fea25ef3d70677acc51..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcPrepDataContainerCnv_p2_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcPrepDataContainerCnv_p2_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:08 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12360 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcPrepDataContainerCnv_p3_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcPrepDataContainerCnv_p3_test.ref index 7f8be9acd74deb1ce4dbb998df15218e3264ad49..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcPrepDataContainerCnv_p3_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/RpcPrepDataContainerCnv_p3_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:10 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12360 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/STGC_ClusterOnTrackCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/STGC_ClusterOnTrackCnv_p1_test.ref index e9908470fc969545fb3be6a0c793add316c2c4ba..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/STGC_ClusterOnTrackCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/STGC_ClusterOnTrackCnv_p1_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:34:35 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12361 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,87 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 -ClassIDSvc INFO getRegistryEntries: read 320 CLIDRegistry entries for module ALL -ClassIDSvc INFO getRegistryEntries: read 170 CLIDRegistry entries for module ALL diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/STGC_DigitContainerCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/STGC_DigitContainerCnv_p1_test.ref index cb664f54651404e964537757e952f5e504da71f1..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/STGC_DigitContainerCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/STGC_DigitContainerCnv_p1_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:40 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12351 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/STGC_RawDataContainerCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/STGC_RawDataContainerCnv_p1_test.ref index 54c3f168fdbbed6cb04a47a8adebae4bc48f81e3..d23421fb19a6f208fb187d09c9984383a4ab247b 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/STGC_RawDataContainerCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/STGC_RawDataContainerCnv_p1_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:42 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12350 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,87 +33,30 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 Before has size: 2, after has size: 2 Collection #1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcClusterOnTrackCnv_p2_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcClusterOnTrackCnv_p2_test.ref index a216b0a3c4c5fb12f88a79d4d1abd15c9cfa1e37..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcClusterOnTrackCnv_p2_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcClusterOnTrackCnv_p2_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:34:31 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12361 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,87 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 -ClassIDSvc INFO getRegistryEntries: read 320 CLIDRegistry entries for module ALL -ClassIDSvc INFO getRegistryEntries: read 170 CLIDRegistry entries for module ALL diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcCoinDataContainerCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcCoinDataContainerCnv_p1_test.ref index fe8dd235360ef206269804df65cfa2414a93839b..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcCoinDataContainerCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcCoinDataContainerCnv_p1_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:27 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12350 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcCoinDataContainerCnv_p2_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcCoinDataContainerCnv_p2_test.ref index 7e2ea0595251a74c2261f9fa6cb21779e7215f1d..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcCoinDataContainerCnv_p2_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcCoinDataContainerCnv_p2_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:29 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12350 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcCoinDataContainerCnv_p3_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcCoinDataContainerCnv_p3_test.ref index 08753e8b94b72c88e0f387016049c33fe74ea582..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcCoinDataContainerCnv_p3_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcCoinDataContainerCnv_p3_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:31 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12350 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcPrepDataContainerCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcPrepDataContainerCnv_p1_test.ref index 6b326a928c9fffe92efb257b6bb87389cf012149..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcPrepDataContainerCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcPrepDataContainerCnv_p1_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:12 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12360 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcPrepDataContainerCnv_p2_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcPrepDataContainerCnv_p2_test.ref index 6443a5ffd3b1d391f4ce5197943e88b050f741b1..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcPrepDataContainerCnv_p2_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcPrepDataContainerCnv_p2_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:14 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12360 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcPrepDataContainerCnv_p3_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcPrepDataContainerCnv_p3_test.ref index e4fb9369a6ad8e7659aeb821200a3dcbd2a0d08b..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcPrepDataContainerCnv_p3_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/TgcPrepDataContainerCnv_p3_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:16 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12360 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/sTgcPrepDataContainerCnv_p1_test.ref b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/sTgcPrepDataContainerCnv_p1_test.ref index 27d72f519f66cf0fcc18f9474536686e04d67d17..eafa59f97879b9e447a9cdea019dc6bb7238a360 100644 --- a/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/sTgcPrepDataContainerCnv_p1_test.ref +++ b/MuonSpectrometer/MuonCnv/MuonEventTPCnv/share/sTgcPrepDataContainerCnv_p1_test.ref @@ -1,18 +1,6 @@ -Initializing Gaudi ApplicationMgr using job opts /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # =======> /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -JobOptionsSvc INFO # (1,1): ApplicationMgr.Dlls += ["StoreGate", "CLIDComps"] -JobOptionsSvc INFO # (2,1): ApplicationMgr.ExtSvc += ["StoreGateSvc", "StoreGateSvc/DetectorStore", "StoreGateSvc/ConditionStore", "Athena::RCUSvc"] -JobOptionsSvc INFO Job options successfully read in from /home/emoyse/master2/build/x86_64-centos7-gcc8-opt/jobOptions/MuonEventTPCnv/MuonEventTPCnv_test.txt -ApplicationMgr SUCCESS -==================================================================================================================================== - Welcome to ApplicationMgr (GaudiCoreSvc v34r0) - running on pcumass4 on Wed Nov 4 09:35:18 2020 -==================================================================================================================================== -ApplicationMgr INFO Successfully loaded modules : StoreGate, CLIDComps ApplicationMgr INFO Application Manager Configured successfully -ClassIDSvc INFO getRegistryEntries: read 12360 CLIDRegistry entries for module ALL EventLoopMgr WARNING Unable to locate service "EventSelector" EventLoopMgr WARNING No events will be processed from external input. ApplicationMgr INFO Application Manager Initialized successfully @@ -22,22 +10,7 @@ ApplicationMgr Ready Volume pmdum2 not found: returning 0 Volume pmdum3 not found: returning 0 INFO Initialize from dictionary - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG pixel decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,0,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 0 1 0 2 mode enumerated - DEBUG bec_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG lay_disk decode 0 vals 0:2 mask/zero mask/shift/bits/offset 3 fe7fffffffffffff 55 2 7 indexes mode both_bounded - DEBUG lay_disk_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_mod decode 0 vals 0:51 mask/zero mask/shift/bits/offset 3f ff81ffffffffffff 49 6 9 indexes mode both_bounded - DEBUG phi_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG eta_mod decode 1 vals -6:6 mask/zero mask/shift/bits/offset f fffe1fffffffffff 45 4 15 indexes mode both_bounded - DEBUG eta_mod_shift decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded - DEBUG phi_index decode 0 vals 0:327 mask/zero mask/shift/bits/offset 1ff ffffe00fffffffff 36 9 19 indexes mode both_bounded - DEBUG eta_index decode 0 vals 0:191 mask/zero mask/shift/bits/offset ff fffffff00fffffff 28 8 28 indexes mode both_bounded - DEBUG bec_eta_mod decode 0 vals 0 mask/zero mask/shift/bits/offset 0 0 0 0 0 indexes mode both_bounded indet 1 2,4,5,7,10,11,12,13 7 1fffffffffffffff 61 3 0 min/max 2 13 values 2 4 5 7 10 11 12 13 indexes 0 0 1 2 0 3 0 0 4 5 6 7 indices 8 prev 0 next 0 mode enumerated cont mode none pixel 1 1:3 3 e7ffffffffffffff 59 2 3 min/max 1 3 values indexes indices 3 prev 0 next 0 mode both_bounded cont mode none bec 1 -2,0,2 3 f9ffffffffffffff 57 2 5 min/max -2 2 values -2 0 2 indexes 0 0 1 0 2 indices 3 prev 0 next 0 mode enumerated cont mode none @@ -60,85 +33,28 @@ phi_module 4 eta_module 5 phi_index 6 eta_index 7 - DEBUG PixelID::initialize_from_dict Set barrel field values: 0 - DEBUG PixelID::initialize_from_dict Set dbm field values: -999,999 - DEBUG PixelID::initialize_from_dict Found field values: InDet/Pixel 2/1 - DEBUG PixelID::init_neighbors - DEBUG PixelID::initialize_from_dict - DEBUG Wafer range -> 2/1/0/0/0:21/-6:6 | 2/1/0/1/0:37/-6:6 | 2/1/0/2/0:51/-6:6 | 2/1/-2,2/0:2/0:47/0 - DEBUG Pixel range -> 2/1/0/0/0:21/-6:6/0:327/0:191 | 2/1/0/1/0:37/-6:6/0:327/0:143 | 2/1/0/2/0:51/-6:6/0:327/0:143 | 2/1/-2,2/0:2/0:47/0/0:327/0:143 INFO SCT_ID::initialize_from_dictionary AtlasDetectorID::initialize_from_dictionary - OK INFO Initialize from dictionary cout 0 - DEBUG (Re)initialize AtlasDetectorID::initialize_from_dictionary - OK - DEBUG decode index and bit fields for each level: - DEBUG indet decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated - DEBUG trt decode 1 vals 1:3 mask/zero mask/shift/bits/offset 3 e7ffffffffffffff 59 2 3 indexes mode both_bounded - DEBUG bec decode 1 vals -2,-1,1,2 mask/zero mask/shift/bits/offset 3 f9ffffffffffffff 57 2 5 indexes 0 1 0 2 3 mode enumerated - DEBUG phi_mod decode 0 vals 0:31 mask/zero mask/shift/bits/offset 1f fe0fffffffffffff 52 5 7 indexes mode both_bounded - DEBUG lay_wheel decode 0 vals 0:17 mask/zero mask/shift/bits/offset 1f fff07fffffffffff 47 5 12 indexes mode both_bounded - DEBUG str_lay decode 0 vals 0:29 mask/zero mask/shift/bits/offset 1f ffff83ffffffffff 42 5 17 indexes mode both_bounded - DEBUG straw decode 0 vals 0:28 mask/zero mask/shift/bits/offset 1f fffffc1fffffffff 37 5 22 indexes mode both_bounded - DEBUG TRT_ID::initialize_from_dict Set barrel field values: -1,1 - DEBUG TRT_ID::initialize_from_dict Found field values: InDet/TRT 2/3 - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG TRT_ID::init_hashes module hash max 1344 - DEBUG TRT_ID::init_hashes straw layer hash max 19008 - DEBUG TRT_ID::init_hashes straw hash max 424576 - DEBUG TRT_ID::initialize_from_dict - DEBUG Module range -> 2/3/-1,1/0:31/0 | 2/3/-1,1/0:31/1 | 2/3/-1,1/0:31/2 | 2/3/-2,2/0:31/0:5 | 2/3/-2,2/0:31/6:13 | 2/3/-2,2/0:31/14:17 - DEBUG Straw layer range -> 2/3/-1,1/0:31/0/0 | 2/3/-1,1/0:31/0/1:4 | 2/3/-1,1/0:31/0/5:9 | 2/3/-1,1/0:31/0/10:14 | 2/3/-1,1/0:31/0/15:17 | 2/3/-1,1/0:31/0/18 | 2/3/-1,1/0:31/1/0 | 2/3/-1,1/0:31/1/1:5 | 2/3/-1,1/0:31/1/6:10 | 2/3/-1,1/0:31/1/11:15 | 2/3/-1,1/0:31/1/16:20 | 2/3/-1,1/0:31/1/21:22 | 2/3/-1,1/0:31/1/23 | 2/3/-1,1/0:31/2/0 | 2/3/-1,1/0:31/2/1:4 | 2/3/-1,1/0:31/2/5:9 | 2/3/-1,1/0:31/2/10:14 | 2/3/-1,1/0:31/2/15:19 | 2/3/-1,1/0:31/2/20:24 | 2/3/-1,1/0:31/2/25:28 | 2/3/-1,1/0:31/2/29 | 2/3/-2,2/0:31/0:5/0:15 | 2/3/-2,2/0:31/6:13/0:7 | 2/3/-2,2/0:31/14:17/0:15 - DEBUG Straw range -> 2/3/-1,1/0:31/0/0/0:14 | 2/3/-1,1/0:31/0/1:4/0:15 | 2/3/-1,1/0:31/0/5:9/0:16 | 2/3/-1,1/0:31/0/10:14/0:17 | 2/3/-1,1/0:31/0/15:17/0:18 | 2/3/-1,1/0:31/0/18/0:17 | 2/3/-1,1/0:31/1/0/0:18 | 2/3/-1,1/0:31/1/1:5/0:19 | 2/3/-1,1/0:31/1/6:10/0:20 | 2/3/-1,1/0:31/1/11:15/0:21 | 2/3/-1,1/0:31/1/16:20/0:22 | 2/3/-1,1/0:31/1/21:22/0:23 | 2/3/-1,1/0:31/1/23/0:22 | 2/3/-1,1/0:31/2/0/0:22 | 2/3/-1,1/0:31/2/1:4/0:23 | 2/3/-1,1/0:31/2/5:9/0:24 | 2/3/-1,1/0:31/2/10:14/0:25 | 2/3/-1,1/0:31/2/15:19/0:26 | 2/3/-1,1/0:31/2/20:24/0:27 | 2/3/-1,1/0:31/2/25:28/0:28 | 2/3/-1,1/0:31/2/29/0:27 | 2/3/-2,2/0:31/0:5/0:15/0:23 | 2/3/-2,2/0:31/6:13/0:7/0:23 | 2/3/-2,2/0:31/14:17/0:15/0:17 - AtlasDetectorID::initialize_from_dictionary - OK -CscIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -CscIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -CscIdHelper INFO MultiRange built successfully to cscStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper INFO Initializing CSC hash indices ... -CscIdHelper INFO The element hash max is 32 -CscIdHelper INFO The detector element hash max is 64 CscIdHelper INFO The channel hash max is 61440 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -RpcIdHelper INFO MultiRange built successfully to doubletR: MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to detectorElement: DetectorElement MultiRange size is 261 -RpcIdHelper INFO MultiRange built successfully to rpcStrip: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper INFO Initializing RPC hash indices ... -RpcIdHelper INFO The element hash max is 600 -RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -TgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 218 -TgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper INFO Initializing TGC hash indices ... -TgcIdHelper INFO The element hash max is 1578 -TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MdtIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MdtIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 261 -MdtIdHelper INFO MultiRange built successfully to tube: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper INFO Initializing MDT hash indices ... -MdtIdHelper INFO The element hash max is 1188 -MdtIdHelper INFO The detector element hash max is 2328 MdtIdHelper INFO Initializing MDT hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -sTgcIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -sTgcIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 253 -sTgcIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK sTgcIdHelper INFO Initializing sTGC hash indices ... -sTgcIdHelper INFO The element hash max is 96 -sTgcIdHelper INFO The detector element hash max is 192 sTgcIdHelper INFO Initializing sTGC hash indices for finding neighbors ... - AtlasDetectorID::initialize_from_dictionary - OK -MmIdHelper INFO MultiRange built successfully to Technology: MultiRange size is 218 -MmIdHelper INFO MultiRange built successfully to detector element: Multilayer MultiRange size is 257 -MmIdHelper INFO MultiRange built successfully to channel: MultiRange size is 261 +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper INFO Initializing MicroMegas hash indices ... -MmIdHelper INFO The element hash max is 64 -MmIdHelper INFO The detector element hash max is 128 MmIdHelper INFO Initializing MicroMegas hash indices for finding neighbors ... AtlasDetectorID::initialize_from_dictionary - OK -ClassIDSvc INFO getRegistryEntries: read 379 CLIDRegistry entries for module ALL test1 diff --git a/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/MuonRegSelCondAlg.cxx b/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/MuonRegSelCondAlg.cxx index a7799acd7afc6b4d2b95985f3fa1958493a24eb6..eb1741b66b3743743bdead9f29e1465522dfb68c 100644 --- a/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/MuonRegSelCondAlg.cxx +++ b/MuonSpectrometer/MuonDetDescr/MuonRegionSelector/src/MuonRegSelCondAlg.cxx @@ -77,7 +77,11 @@ StatusCode MuonRegSelCondAlg::execute(const EventContext& ctx ) const // write out new new LUT to a file if need be - if ( m_printTable ) dynamic_cast<const RegSelSiLUT*>(rd.get())->write( name()+".map" ); + if ( m_printTable ) { + if (auto lut = dynamic_cast<const RegSelSiLUT*>(rd.get())) { + lut->write( name()+".map" ); + } + } /// create the conditions data for storage diff --git a/MuonSpectrometer/MuonIdHelpers/MuonIdHelpers/CscIdHelper.h b/MuonSpectrometer/MuonIdHelpers/MuonIdHelpers/CscIdHelper.h index 904c884a909db1a404925c498897a23e3785050f..cc98712eabfea3d0893c052f1768ec54cc2d2e7f 100644 --- a/MuonSpectrometer/MuonIdHelpers/MuonIdHelpers/CscIdHelper.h +++ b/MuonSpectrometer/MuonIdHelpers/MuonIdHelpers/CscIdHelper.h @@ -59,7 +59,7 @@ class CscIdHelper : public MuonIdHelper // Destructor - virtual ~CscIdHelper(); + virtual ~CscIdHelper()=default; ///////////// compact identifier stuff begins ////////////////////////////////////// diff --git a/MuonSpectrometer/MuonIdHelpers/MuonIdHelpers/MuonIdHelper.h b/MuonSpectrometer/MuonIdHelpers/MuonIdHelpers/MuonIdHelper.h index de81c50f2de418230c095c05f61b55a1248da8b4..011841697370c0bc2c20f10eaeb5e9b19262eac4 100644 --- a/MuonSpectrometer/MuonIdHelpers/MuonIdHelpers/MuonIdHelper.h +++ b/MuonSpectrometer/MuonIdHelpers/MuonIdHelpers/MuonIdHelper.h @@ -2,19 +2,9 @@ Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ -// ****************************************************************************** -// ATLAS Muon Identifier Helpers Package -// ----------------------------------------- -// ****************************************************************************** - -//<doc><file> $Id: MuonIdHelper.h,v 1.31 2007-11-25 18:19:00 ketevi Exp $ -//<version> $Name: not supported by cvs2svn $ - #ifndef DETECTORDESCRIPTION_MUONIDHELPER_H #define DETECTORDESCRIPTION_MUONIDHELPER_H -// Includes - #include "AtlasDetDescr/AtlasDetectorID.h" #include "Identifier/IdentifierHash.h" #include "IdDict/IdDictFieldImplementation.h" @@ -30,7 +20,6 @@ #include <stdexcept> class IdDictDictionary; -class IMessageSvc; // ****************************************************************************** // class MuonIdHelper @@ -98,7 +87,7 @@ class MuonIdHelper : public AtlasDetectorID // Destructor - virtual ~MuonIdHelper(); + virtual ~MuonIdHelper()=default; // Build identifier diff --git a/MuonSpectrometer/MuonIdHelpers/MuonIdHelpers/TgcIdHelper.h b/MuonSpectrometer/MuonIdHelpers/MuonIdHelpers/TgcIdHelper.h index d01841da25f326360e9b3dc2f0ad18371e2097b1..94934bcca0bf961c1debdbe0c68b91ef0f5f80c8 100644 --- a/MuonSpectrometer/MuonIdHelpers/MuonIdHelpers/TgcIdHelper.h +++ b/MuonSpectrometer/MuonIdHelpers/MuonIdHelpers/TgcIdHelper.h @@ -2,19 +2,9 @@ Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ -// ****************************************************************************** -// ATLAS Muon Identifier Helpers Package -// ----------------------------------------- -// ****************************************************************************** - -//<doc><file> $Id: TgcIdHelper.h,v 1.32 2009-01-20 22:44:13 kblack Exp $ -//<version> $Name: not supported by cvs2svn $ - #ifndef MUONIDHELPERS_TGCIDHELPER_H #define MUONIDHELPERS_TGCIDHELPER_H -// Includes -class MsgStream; #include "MuonIdHelpers/MuonIdHelper.h" // ****************************************************************************** @@ -67,7 +57,7 @@ class TgcIdHelper : public MuonIdHelper // Destructor - virtual ~TgcIdHelper(); + virtual ~TgcIdHelper()=default; ///////////// compact identifier stuff begins ////////////////////////////////////// diff --git a/MuonSpectrometer/MuonIdHelpers/share/muon_id_test.ref b/MuonSpectrometer/MuonIdHelpers/share/muon_id_test.ref index b004cb36366fd7eed5713a77250cccca51968b1a..a4a61d6d926c4fb1b3606b3c4ca383e3f6bca296 100644 --- a/MuonSpectrometer/MuonIdHelpers/share/muon_id_test.ref +++ b/MuonSpectrometer/MuonIdHelpers/share/muon_id_test.ref @@ -1,11 +1,11 @@ =========> checking dictionnary file=IdDictMuonSpectrometer_R.03.xml MdtIdHelper DEBUG (Re)initialize -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID... DEBUG initialize_from_dictionary - unable to find mm region index: group, region size 0 0 +AtlasDetectorID... DEBUG initialize_from_dictionary - unable to find stgc region index: group, region size 0 0 +AtlasDetectorID DEBUG initLevelsFromDict - there are no sTGC entries in the dictionary! +AtlasDetectorID DEBUG initLevelsFromDict - there are no MM entries in the dictionary! +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper DEBUG MDT decode index and bit fields for each level: MdtIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated MdtIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,47,48,49,50,51,52,53,54 mask/zero mask/shift/bits/offset 1f e0ffffffffffffff 56 5 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 24 25 26 27 28 29 30 31 mode enumerated @@ -442,12 +442,12 @@ MdtIdHelper INFO Initializing MDT hash indices for finding neighbors .. MdtIdHelper VERBOSE MuonIdHelper::init_neighbors MdtIdHelper DEBUG Maximum number of MDT tubes is 78 RpcIdHelper DEBUG (Re)initialize -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID... DEBUG initialize_from_dictionary - unable to find mm region index: group, region size 0 0 +AtlasDetectorID... DEBUG initialize_from_dictionary - unable to find stgc region index: group, region size 0 0 +AtlasDetectorID DEBUG initLevelsFromDict - there are no sTGC entries in the dictionary! +AtlasDetectorID DEBUG initLevelsFromDict - there are no MM entries in the dictionary! +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper DEBUG RPC decode index and bit fields for each level: RpcIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated RpcIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,47,48,49,50,51,52,53,54 mask/zero mask/shift/bits/offset 1f e0ffffffffffffff 56 5 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 24 25 26 27 28 29 30 31 mode enumerated @@ -629,1043 +629,17 @@ RpcIdHelper DEBUG full module range size is 54 RpcIdHelper DEBUG full channel range size is 54 RpcIdHelper INFO Initializing RPC hash indices ... RpcIdHelper INFO The element hash max is 600 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6248880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6248980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6248c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6248d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6249880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6249980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6249c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6249d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6238880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6238c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6239880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6239980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6239c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6239d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6230880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6230980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6230c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6230d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6231880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6231980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6231c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6231d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6232880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6232980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6232c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6232d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6233880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6233980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6233c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6233d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6234880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6234980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6234c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6234d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6235880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6235980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6235c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6235d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6236880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6236980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6236c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6236d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6237880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6237980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6237c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6237d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6250880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6250980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6250c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6250d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6251880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6251980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6251c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6251d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6252880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6252980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6252c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6252d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6253880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6253980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6253c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6253d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6254880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6254980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6254c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6254d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6255880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6255980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6255c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6255d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6256880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6256980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6256c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6256d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6257880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6257980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6257c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6257d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6228880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6228980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6228c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6228d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6229880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6229980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6229c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6229d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622bd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622cd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622fd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6258880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6258980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6258c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6258d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6259880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6259980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6259c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6259d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625bd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625cd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625fd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6220880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6220c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6221880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6221c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6222880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6222c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6223880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6223c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6224880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6224c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6225880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6225c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6227880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6227c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6260880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6260c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6261880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6261c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6262880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6262c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6263880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6263c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6264880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6264c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6265880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6265c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6267880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6267c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6226880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6226c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6266880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6266c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6218880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6218c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6219880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6219c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6268880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6268c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6269880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6269c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6210880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6210980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6210c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6210d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6211880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6211980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6211c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6211d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6212880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6212980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6212c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6212d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6213880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6213980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6213c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6213d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6214880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6214980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6214c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6214d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6215880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6215980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6215c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6215d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6217880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6217980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6217c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6217d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6270880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6270980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6270c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6270d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6271880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6271980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6271c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6271d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6272880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6272980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6272c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6272d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6273880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6273980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6273c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6273d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6274880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6274980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6274c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6274d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6275880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6275980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6275c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6275d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6277880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6277980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6277c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6277d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6216880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6276880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6208880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6209880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6278880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6279880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x627a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x627b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x627c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x627d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x627f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6338880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6338980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6338c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6338d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6339880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6339980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6339c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6339d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633bd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633cd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633fd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6348880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6348980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6348c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6348d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6349880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6349980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6349c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6349d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634bd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634cd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634fd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6330880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6330980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6330c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6330d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6331880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6331980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6331c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6331d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6332880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6332980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6332c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6332d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6333880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6333980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6333c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6333d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6334880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6334980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6334c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6334d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6337880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6337980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6337c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6337d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6350880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6350980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6350c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6350d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6351880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6351980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6351c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6351d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6352880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6352980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6352c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6352d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6353880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6353980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6353c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6353d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6354880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6354980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6354c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6354d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6357880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6357980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6357c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6357d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6328880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6328980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6328c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6328d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6329880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6329980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6329c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6329d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632bd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632cd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632fd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6358880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6358980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6358c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6358d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6359880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6359980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6359c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6359d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635bd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635cd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635fd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6320880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6321880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6322880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6323880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6324880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6327880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6360880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6361880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6362880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6363880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6364880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6367880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6320c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6320d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6321c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6321d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6322c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6322d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6323c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6323d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6324c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6324d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6327c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6327d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6360c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6360d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6361c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6361d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6362c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6362d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6363c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6363d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6364c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6364d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6367c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6367d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6318880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6318c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6319880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6319c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6368880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6368c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6369880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6369c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6310880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6310980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6310c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6310d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6311880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6311980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6311c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6311d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6312880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6312980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6312c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6312d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6313880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6313980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6313c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6313d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6314880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6314980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6314c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6314d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6317880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6317980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6317c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6317d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6370880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6370980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6370c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6370d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6371880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6371980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6371c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6371d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6372880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6372980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6372c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6372d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6373880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6373980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6373c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6373d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6374880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6374980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6374c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6374d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6377880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6377980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6377c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6377d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6448880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6448980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6449880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6449980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6438880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6438980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6439880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6439980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6430880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6430980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6431880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6431980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6432880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6432980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6433880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6433980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6434880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6434980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6435880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6435980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6436880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6436980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6437880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6437980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6450880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6450980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6451880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6451980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6452880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6452980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6453880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6453980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6454880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6454980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6455880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6455980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6456880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6456980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6457880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6457980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6428880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6428980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6429880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6429980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6458880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6458980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6459880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6459980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6420880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6420980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6421880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6421980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6422880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6422980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6423880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6423980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6424880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6424980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6425880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6425980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6426880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6426980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6427880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6427980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6460880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6460980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6461880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6461980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6462880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6462980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6463880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6463980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6464880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6464980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6465880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6465980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6466880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6466980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6467880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6467980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6418880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6418980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6419880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6419980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6468880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6468980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6469880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6469980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6410880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6410980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6411880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6411980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6412880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6412980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6413880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6413980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6414880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6414980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6415880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6415980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6416880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6416980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6417880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6417980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6470880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6470980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6471880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6471980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6472880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6472980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6473880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6473980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6474880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6474980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6475880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6475980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6476880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6476980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6477880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6477980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6538880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6538980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6539880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6539980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6548880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6548980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6549880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6549980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6530880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6530980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6531880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6531980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6532880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6532980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6533880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6533980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6534880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6534980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6537880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6537980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6550880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6550980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6551880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6551980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6552880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6552980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6553880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6553980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6554880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6554980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6557880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6557980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6528880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6528980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6529880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6529980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6558880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6558980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6559880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6559980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6520880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6520980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6521880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6521980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6522880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6522980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6523880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6523980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6524880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6524980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6527880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6527980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6560880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6560980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6561880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6561980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6562880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6562980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6563880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6563980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6564880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6564980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6567880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6567980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6518880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6518980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6519880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6519980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6568880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6568980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6569880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6569980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6510880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6510980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6511880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6511980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6512880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6512980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6513880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6513980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6514880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6514980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6517880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6517980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6570880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6570980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6571880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6571980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6572880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6572980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6573880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6573980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6574880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6574980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6577880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6577980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6835880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6835980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6835c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6835d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6836880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6836980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6836c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6836d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6855880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6855980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6855c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6855d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6856880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6856980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6856c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6856d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x682d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x682e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x685d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x685e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x682dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x682dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x682ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x682ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x685dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x685dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x685ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x685ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x693d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x693d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x693e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x693e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x694d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x694d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x694e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x694e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6935880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6935980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6935c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6935d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6936880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6936980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6936c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6936d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6955880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6955980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6955c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6955d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6956880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6956980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6956c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6956d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6925880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6925c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6926880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6926c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6965880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6965c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6966880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6966c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a45880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a46880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a3d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a3e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a4d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a4e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a35880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a36880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a55880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a56880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a2d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a2dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a2e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a2ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a5d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a5dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a5e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a5ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a25880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a25c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a26880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a26c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a65880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a65c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a66880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a66c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x7e3e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x7e3ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x7e4e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x7e4ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6406880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6486880000000000 RpcIdHelper INFO The detector element hash max is 1122 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... RpcIdHelper VERBOSE MuonIdHelper::init_neighbors RpcIdHelper DEBUG Maximum number of RPC gas gaps is 2 TgcIdHelper DEBUG (Re)initialize -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID... DEBUG initialize_from_dictionary - unable to find mm region index: group, region size 0 0 +AtlasDetectorID... DEBUG initialize_from_dictionary - unable to find stgc region index: group, region size 0 0 +AtlasDetectorID DEBUG initLevelsFromDict - there are no sTGC entries in the dictionary! +AtlasDetectorID DEBUG initLevelsFromDict - there are no MM entries in the dictionary! +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper DEBUG TGC decode index and bit fields for each level: TgcIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated TgcIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,47,48,49,50,51,52,53,54 mask/zero mask/shift/bits/offset 1f e0ffffffffffffff 56 5 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 24 25 26 27 28 29 30 31 mode enumerated @@ -1760,12 +734,12 @@ TgcIdHelper INFO The detector element hash max is 1578 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... TgcIdHelper VERBOSE MuonIdHelper::init_neighbors CscIdHelper DEBUG (Re)initialize -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find mm region index: group, region size 0 0 -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find stgc region index: group, region size 0 0 -AtlasDetectorID::initLevelsFromDict - there are no sTGC entries in the dictionary! -AtlasDetectorID::initLevelsFromDict - there are no MM entries in the dictionary! -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID... DEBUG initialize_from_dictionary - unable to find mm region index: group, region size 0 0 +AtlasDetectorID... DEBUG initialize_from_dictionary - unable to find stgc region index: group, region size 0 0 +AtlasDetectorID DEBUG initLevelsFromDict - there are no sTGC entries in the dictionary! +AtlasDetectorID DEBUG initLevelsFromDict - there are no MM entries in the dictionary! +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper DEBUG CSC decode index and bit fields for each level: CscIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated CscIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,47,48,49,50,51,52,53,54 mask/zero mask/shift/bits/offset 1f e0ffffffffffffff 56 5 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 24 25 26 27 28 29 30 31 mode enumerated @@ -1803,8 +777,8 @@ CscIdHelper INFO Initializing CSC hash indices for finding neighbors .. CscIdHelper VERBOSE MuonIdHelper::init_neighbors =========> checking dictionnary file=IdDictMuonSpectrometer_R.09.02.Asym.xml MdtIdHelper DEBUG (Re)initialize -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper DEBUG MDT decode index and bit fields for each level: MdtIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated MdtIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58 mask/zero mask/shift/bits/offset 3f e07fffffffffffff 55 6 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 mode enumerated @@ -2247,8 +1221,8 @@ MdtIdHelper INFO Initializing MDT hash indices for finding neighbors .. MdtIdHelper VERBOSE MuonIdHelper::init_neighbors MdtIdHelper DEBUG Maximum number of MDT tubes is 108 RpcIdHelper DEBUG (Re)initialize -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper DEBUG RPC decode index and bit fields for each level: RpcIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated RpcIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58 mask/zero mask/shift/bits/offset 3f e07fffffffffffff 55 6 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 mode enumerated @@ -2433,1055 +1407,13 @@ RpcIdHelper DEBUG full module range size is 55 RpcIdHelper DEBUG full channel range size is 55 RpcIdHelper INFO Initializing RPC hash indices ... RpcIdHelper INFO The element hash max is 608 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6124440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61244c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6124640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61246c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6124c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6124cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6124e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6124ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611c440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611c640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611cc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611ccc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611ce40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611cec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6125440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61254c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6125640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61256c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6127440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61274c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6127640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61276c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611d440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611d4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611d640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611d6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611f440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611f4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611f640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611f6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6125c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6125e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6126440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6126640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611dc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611de40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611e440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611e640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611ec40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611ee40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611fc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611fe40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6126c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6126e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6127c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6127e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6118440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61184c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6118640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61186c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6118c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6118cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6118e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6118ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6119440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61194c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6119640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61196c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6119c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6119cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6119e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6119ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611a440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611a4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611a640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611a6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611ac40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611acc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611ae40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611aec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611b440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611b4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611b640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611b6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611bc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611bcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611be40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x611bec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6128440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61284c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6128640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61286c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6128c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6128cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6128e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6128ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6129440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61294c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6129640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61296c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6129c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6129cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6129e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6129ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612a440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612a4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612a640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612a6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612ac40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612acc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612ae40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612aec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612b440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612b4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612b640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612b6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612bc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612bcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612be40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612bec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6114440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61144c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6114640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61146c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6114c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6114cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6114e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6114ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6115440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61154c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6115640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61156c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6115c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6115cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6115e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6115ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6116440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61164c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6116640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61166c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6116c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6116cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6116e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6116ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6117440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61174c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6117640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61176c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6117c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6117cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6117e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6117ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612c440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612c4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612c640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612c6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612cc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612ccc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612ce40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612cec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612d440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612d4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612d640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612d6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612dc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612dcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612de40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612dec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612e440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612e4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612e640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612e6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612ec40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612ecc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612ee40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612eec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612f440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612f4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612f640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612f6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612fc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612fcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612fe40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x612fec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6110440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6110640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6110c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6110e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6111440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6111640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6111c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6111e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6112440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6112640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6112c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6112e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6113c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6113e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6130440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6130640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6130c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6130e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6131440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6131640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6131c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6131e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6132440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6132640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6132c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6132e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6133c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6133e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6113440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6113640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6133440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6133640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610c440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610c640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610cc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610ce40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610d440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610d640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610dc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610de40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610e440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610e640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610ec40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610ee40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610fc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610fe40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6134440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6134640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6134c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6134e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6135440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6135640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6135c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6135e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6136440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6136640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6136c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6136e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6137c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6137e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610f440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610f4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610f640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610f6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6137440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61374c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6137640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61376c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6108440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61084c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6108640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61086c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6108c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6108cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6108e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6108ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6109440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61094c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6109640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61096c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6109c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6109cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6109e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6109ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610a440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610a4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610a640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610a6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610ac40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610acc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610ae40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610aec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610bc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610bcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610be40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610bec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6138440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61384c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6138640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61386c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6138c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6138cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6138e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6138ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6139440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61394c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6139640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61396c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6139c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6139cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6139e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6139ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613a440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613a4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613a640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613a6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613ac40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613acc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613ae40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613aec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613bc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613bcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613be40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613bec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610b440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613b440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6104440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6104c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6105440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6105c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6106440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6106c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6107c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613c440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613cc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613d440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613dc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613e440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613ec40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x613fc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619c440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619c4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619c640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619c6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619cc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619ccc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619ce40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619cec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619d440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619d4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619d640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619d6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619dc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619dcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619de40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619dec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619e440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619e4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619e640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619e6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619fc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619fcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619fe40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619fec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a4440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a44c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a4640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a46c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a4c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a4cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a4e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a4ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a5440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a54c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a5640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a56c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a5c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a5cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a5e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a5ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a6440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a64c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a6640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a66c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a7c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a7cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a7e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a7ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6198440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61984c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6198640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61986c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6198c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6198cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6198e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6198ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6199440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61994c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6199640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61996c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6199c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6199cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6199e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6199ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619a440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619a4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619a640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619a6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619bc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619bcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619be40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x619bec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a8440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a84c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a8640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a86c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a8c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a8cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a8e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a8ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a9440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a94c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a9640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a96c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a9c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a9cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a9e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61a9ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61aa440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61aa4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61aa640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61aa6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61abc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61abcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61abe40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61abec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6194440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61944c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6194640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61946c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6194c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6194cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6194e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6194ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6195440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61954c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6195640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61956c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6195c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6195cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6195e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6195ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6196440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61964c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6196640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61966c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6197c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6197cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6197e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6197ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ac440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ac4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ac640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ac6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61acc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61accc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ace40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61acec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ad440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ad4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ad640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ad6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61adc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61adcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ade40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61adec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ae440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ae4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ae640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ae6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61afc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61afcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61afe40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61afec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6190440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6190c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6191440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6191c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6192440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6193c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b0440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b0c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b1440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b1c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b2440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b3c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6190640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61906c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6190e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6190ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6191640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61916c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6191e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6191ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6192640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61926c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6193e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6193ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b0640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b06c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b0e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b0ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b1640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b16c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b1e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b1ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b2640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b26c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b3e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b3ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618c440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618c640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618cc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618ce40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618d440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618d640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618dc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618de40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618e440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618e640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618fc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618fe40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b4440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b4640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b4c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b4e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b5440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b5640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b5c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b5e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b6440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b6640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b7c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b7e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6188440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61884c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6188640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61886c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6188c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6188cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6188e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6188ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6189440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61894c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6189640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61896c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6189c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6189cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6189e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6189ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618a440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618a4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618a640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618a6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618bc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618bcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618be40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x618bec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b8440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b84c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b8640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b86c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b8c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b8cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b8e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b8ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b9440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b94c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b9640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b96c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b9c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b9cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b9e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61b9ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ba440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ba4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ba640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61ba6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61bbc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61bbcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61bbe40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x61bbec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6224440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62244c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6224c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6224cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6225440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62254c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6225c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6225cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6226440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62264c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6226c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6226cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6227440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62274c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6227c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6227cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621c440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621c4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621cc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621ccc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621d440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621d4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621dc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621dcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621e440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621e4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621ec40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621ecc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621f440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621f4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621fc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621fcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6218440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62184c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6218c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6218cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6219440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62194c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6219c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6219cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621a440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621a4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621ac40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621acc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621b440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621b4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621bc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621bcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6228440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62284c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6228c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6228cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6229440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62294c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6229c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6229cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622a440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622a4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622ac40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622acc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622b440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622b4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622bc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622bcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6214440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62144c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6214c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6214cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6215440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62154c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6215c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6215cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6216440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62164c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6216c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6216cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6217440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62174c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6217c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6217cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622c440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622c4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622cc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622ccc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622d440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622d4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622dc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622dcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622e440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622e4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622ec40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622ecc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622f440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622f4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622fc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622fcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6210440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62104c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6210c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6210cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6211440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62114c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6211c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6211cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6212440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62124c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6212c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6212cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6213440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62134c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6213c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6213cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6230440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62304c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6230c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6230cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6231440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62314c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6231c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6231cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6232440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62324c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6232c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6232cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6233440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62334c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6233c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6233cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620c440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620c4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620cc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620ccc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620d440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620d4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620dc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620dcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620e440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620e4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620ec40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620ecc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620f440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620f4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620fc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620fcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6234440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62344c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6234c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6234cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6235440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62354c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6235c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6235cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6236440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62364c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6236c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6236cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6237440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62374c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6237c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6237cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6208440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62084c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6208c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6208cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6209440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62094c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6209c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6209cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620a440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620a4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620ac40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620acc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620b440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620b4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620bc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620bcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6238440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62384c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6238c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6238cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6239440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62394c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6239c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6239cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623a440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623a4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623ac40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623acc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623b440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623b4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623bc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623bcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629c440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629c4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629cc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629ccc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629d440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629d4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629dc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629dcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629e440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629e4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629fc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629fcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a4440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a44c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a4c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a4cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a5440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a54c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a5c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a5cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a6440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a64c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a7c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a7cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6298440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62984c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6298c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6298cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6299440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62994c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6299c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6299cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629a440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629a4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629bc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x629bcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a8440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a84c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a8c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a8cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a9440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a94c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a9c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62a9cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62aa440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62aa4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62abc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62abcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6294440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62944c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6294c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6294cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6295440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62954c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6295c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6295cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6296440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62964c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6297c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6297cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62ac440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62ac4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62acc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62accc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62ad440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62ad4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62adc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62adcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62ae440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62ae4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62afc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62afcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6290440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62904c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6290c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6290cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6291440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62914c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6291c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6291cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6292440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62924c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6293c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6293cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b0440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b04c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b0c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b0cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b1440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b14c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b1c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b1cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b2440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b24c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b3c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b3cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628c440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628c4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628cc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628ccc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628d440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628d4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628dc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628dcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628e440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628e4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628fc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628fcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b4440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b44c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b4c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b4cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b5440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b54c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b5c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b5cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b6440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b64c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b7c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b7cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6288440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62884c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6288c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6288cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6289440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62894c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6289c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6289cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628a440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628a4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628bc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x628bcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b8440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b84c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b8c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b8cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b9440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b94c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b9c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62b9cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62ba440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62ba4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62bbc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x62bbcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641ec40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641ecc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641ee40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641eec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641f440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641f4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641f640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641f6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6426c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6426cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6426e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6426ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6427440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64274c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6427640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64276c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641ac40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641acc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641ae40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641aec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641b440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641b4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641b640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641b6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642ac40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642acc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642ae40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642aec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642b440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642b4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642b640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642b6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6416c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6417440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642ec40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642f440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6416e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6416ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6417640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64176c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642ee40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642eec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642f640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642f6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x649ec40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x649ecc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x649f440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x649f4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64a6c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64a6cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64a7440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64a74c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x649ac40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x649acc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x649ae40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x649aec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x649b440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x649b4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x649b640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x649b6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64aac40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64aacc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64aae40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64aaec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64ab440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64ab4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64ab640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64ab6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6496c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6496cc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6496e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6496ec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6497440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64974c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6497640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64976c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64aec40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64aecc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64aee40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64aeec0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64af440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64af4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64af640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64af6c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6492c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6492e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6493440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6493640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64b2c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64b2e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64b3440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x64b3640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6522c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6523440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651ec40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651f440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6526c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6527440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651ac40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651b440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652ac40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652b440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6516c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6516e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6517440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6517640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652ec40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652ee40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652f440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652f640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6512c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6512e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6513440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6513640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6532c40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6532e40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6533440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6533640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6f1f440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6f1f640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6f27440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6f27640000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6203440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6243440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60bc440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60bc4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60bcc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60bccc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60bd440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60bd4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60bdc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60bdcc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60be440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60be4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60bec40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60becc0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60bf440000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60bf4c0000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60bfc40000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x60bfcc0000000000 RpcIdHelper INFO The detector element hash max is 1138 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... RpcIdHelper VERBOSE MuonIdHelper::init_neighbors RpcIdHelper DEBUG Maximum number of RPC gas gaps is 3 TgcIdHelper DEBUG (Re)initialize -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper DEBUG TGC decode index and bit fields for each level: TgcIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated TgcIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58 mask/zero mask/shift/bits/offset 3f e07fffffffffffff 55 6 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 mode enumerated @@ -3576,8 +1508,8 @@ TgcIdHelper INFO The detector element hash max is 1554 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... TgcIdHelper VERBOSE MuonIdHelper::init_neighbors CscIdHelper DEBUG (Re)initialize -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK CscIdHelper DEBUG CSC decode index and bit fields for each level: CscIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated CscIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58 mask/zero mask/shift/bits/offset 3f e07fffffffffffff 55 6 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 mode enumerated @@ -3606,8 +1538,8 @@ CscIdHelper INFO The channel hash max is 15360 CscIdHelper INFO Initializing CSC hash indices for finding neighbors ... CscIdHelper VERBOSE MuonIdHelper::init_neighbors TgcIdHelper DEBUG (Re)initialize -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper DEBUG TGC decode index and bit fields for each level: TgcIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated TgcIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58 mask/zero mask/shift/bits/offset 3f e07fffffffffffff 55 6 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 mode enumerated @@ -3702,8 +1634,8 @@ TgcIdHelper INFO The detector element hash max is 1554 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... TgcIdHelper VERBOSE MuonIdHelper::init_neighbors MmIdHelper DEBUG (Re)initialize -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper DEBUG MicroMegas decode index and bit fields for each level: MmIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated MmIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58 mask/zero mask/shift/bits/offset 3f e07fffffffffffff 55 6 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 mode enumerated @@ -3739,9 +1671,9 @@ MmIdHelper INFO Initializing MicroMegas hash indices for finding neigh MmIdHelper VERBOSE MuonIdHelper::init_neighbors =========> checking dictionnary file=IdDictMuonSpectrometer_R.09.02.xml MdtIdHelper DEBUG (Re)initialize -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find csc region index: group, region size 0 0 -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID... DEBUG initialize_from_dictionary - unable to find csc region index: group, region size 0 0 +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK MdtIdHelper DEBUG MDT decode index and bit fields for each level: MdtIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated MdtIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,48,52,53,54,55,56,57,58 mask/zero mask/shift/bits/offset 1f e0ffffffffffffff 56 5 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 0 24 0 0 0 25 26 27 28 29 30 31 mode enumerated @@ -4160,9 +2092,9 @@ MdtIdHelper INFO Initializing MDT hash indices for finding neighbors .. MdtIdHelper VERBOSE MuonIdHelper::init_neighbors MdtIdHelper DEBUG Maximum number of MDT tubes is 108 RpcIdHelper DEBUG (Re)initialize -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find csc region index: group, region size 0 0 -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID... DEBUG initialize_from_dictionary - unable to find csc region index: group, region size 0 0 +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK RpcIdHelper DEBUG RPC decode index and bit fields for each level: RpcIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated RpcIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,48,52,53,54,55,56,57,58 mask/zero mask/shift/bits/offset 1f e0ffffffffffffff 56 5 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 0 24 0 0 0 25 26 27 28 29 30 31 mode enumerated @@ -4347,1072 +2279,14 @@ RpcIdHelper DEBUG full module range size is 55 RpcIdHelper DEBUG full channel range size is 55 RpcIdHelper INFO Initializing RPC hash indices ... RpcIdHelper INFO The element hash max is 616 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6248880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6248980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6248c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6248d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6249880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6249980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6249c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6249d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6238880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6238c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6239880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6239980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6239c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6239d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x623fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x624fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6230880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6230980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6230c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6230d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6231880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6231980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6231c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6231d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6232880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6232980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6232c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6232d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6233880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6233980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6233c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6233d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6234880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6234980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6234c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6234d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6235880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6235980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6235c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6235d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6236880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6236980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6236c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6236d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6237880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6237980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6237c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6237d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6250880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6250980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6250c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6250d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6251880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6251980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6251c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6251d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6252880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6252980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6252c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6252d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6253880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6253980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6253c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6253d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6254880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6254980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6254c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6254d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6255880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6255980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6255c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6255d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6256880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6256980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6256c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6256d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6257880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6257980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6257c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6257d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6228880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6228980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6228c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6228d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6229880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6229980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6229c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6229d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622bd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622cd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x622fd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6258880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6258980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6258c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6258d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6259880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6259980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6259c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6259d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625bd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625cd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x625fd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6220880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6220c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6221880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6221c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6222880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6222c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6223880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6223c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6224880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6224c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6225880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6225c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6227880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6227c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6260880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6260c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6261880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6261c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6262880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6262c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6263880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6263c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6264880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6264c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6265880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6265c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6267880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6267c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6226880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6226c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6266880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6266c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6218880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6218c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6219880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6219c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6268880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6268c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6269880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6269c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x621ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x626ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6210880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6210980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6210c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6210d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6211880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6211980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6211c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6211d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6212880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6212980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6212c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6212d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6213880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6213980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6213c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6213d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6214880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6214980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6214c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6214d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6215880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6215980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6215c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6215d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6217880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6217980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6217c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6217d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6270880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6270980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6270c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6270d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6271880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6271980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6271c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6271d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6272880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6272980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6272c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6272d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6273880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6273980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6273c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6273d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6274880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6274980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6274c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6274d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6275880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6275980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6275c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6275d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6277880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6277980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6277c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6277d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6216880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6276880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6208880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6209880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x620f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6278880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6279880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x627a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x627b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x627c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x627d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x627f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6338880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6338980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6338c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6338d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6339880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6339980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6339c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6339d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633bd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633cd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x633fd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6348880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6348980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6348c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6348d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6349880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6349980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6349c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6349d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634bd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634cd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x634fd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6330880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6330980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6330c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6330d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6331880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6331980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6331c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6331d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6332880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6332980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6332c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6332d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6333880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6333980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6333c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6333d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6334880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6334980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6334c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6334d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6337880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6337980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6337c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6337d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6350880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6350980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6350c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6350d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6351880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6351980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6351c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6351d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6352880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6352980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6352c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6352d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6353880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6353980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6353c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6353d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6354880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6354980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6354c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6354d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6357880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6357980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6357c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6357d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6328880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6328980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6328c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6328d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6329880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6329980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6329c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6329d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632bd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632cd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x632fd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6358880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6358980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6358c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6358d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6359880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6359980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6359c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6359d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635ad80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635bd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635cd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x635fd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6320880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6321880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6322880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6323880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6324880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6327880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6360880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6361880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6362880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6363880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6364880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6367880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6320c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6320d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6321c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6321d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6322c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6322d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6323c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6323d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6324c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6324d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6327c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6327d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6360c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6360d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6361c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6361d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6362c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6362d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6363c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6363d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6364c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6364d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6367c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6367d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6318880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6318c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6319880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6319c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x631fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6368880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6368c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6369880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6369c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636ac80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636bc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636cc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x636fc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6310880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6310980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6310c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6310d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6311880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6311980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6311c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6311d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6312880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6312980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6312c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6312d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6313880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6313980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6313c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6313d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6314880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6314980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6314c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6314d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6317880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6317980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6317c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6317d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6370880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6370980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6370c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6370d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6371880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6371980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6371c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6371d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6372880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6372980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6372c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6372d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6373880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6373980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6373c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6373d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6374880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6374980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6374c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6374d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6377880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6377980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6377c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6377d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6448880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6448980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6449880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6449980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x644f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6438880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6438980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6439880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6439980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x643f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6430880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6430980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6431880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6431980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6432880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6432980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6433880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6433980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6434880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6434980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6435880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6435980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6436880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6436980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6437880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6437980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6450880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6450980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6451880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6451980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6452880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6452980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6453880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6453980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6454880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6454980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6455880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6455980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6456880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6456980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6457880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6457980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6428880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6428980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6429880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6429980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x642f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6458880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6458980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6459880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6459980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x645f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6420880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6420980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6421880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6421980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6422880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6422980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6423880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6423980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6424880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6424980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6425880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6425980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6426880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6426980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6427880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6427980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6460880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6460980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6461880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6461980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6462880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6462980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6463880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6463980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6464880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6464980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6465880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6465980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6466880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6466980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6467880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6467980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6418880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6418980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6419880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6419980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x641f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6468880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6468980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6469880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6469980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x646f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6410880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6410980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6411880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6411980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6412880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6412980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6413880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6413980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6414880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6414980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6415880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6415980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6416880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6416980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6417880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6417980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6470880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6470980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6471880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6471980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6472880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6472980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6473880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6473980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6474880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6474980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6475880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6475980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6476880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6476980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6477880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6477980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6538880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6538980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6539880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6539980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x653f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6548880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6548980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6549880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6549980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x654f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6530880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6530980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6531880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6531980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6532880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6532980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6533880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6533980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6534880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6534980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6537880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6537980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6550880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6550980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6551880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6551980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6552880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6552980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6553880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6553980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6554880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6554980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6557880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6557980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6528880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6528980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6529880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6529980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x652f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6558880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6558980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6559880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6559980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x655f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6520880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6520980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6521880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6521980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6522880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6522980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6523880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6523980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6524880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6524980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6527880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6527980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6560880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6560980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6561880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6561980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6562880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6562980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6563880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6563980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6564880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6564980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6567880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6567980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6518880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6518980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6519880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6519980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x651f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6568880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6568980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6569880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6569980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x656f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6510880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6510980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6511880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6511980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6512880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6512980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6513880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6513980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6514880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6514980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6517880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6517980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6570880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6570980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6571880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6571980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6572880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6572980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6573880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6573980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6574880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6574980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6577880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6577980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x683ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x684ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6835880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6835980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6835c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6835d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6836880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6836980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6836c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6836d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6855880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6855980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6855c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6855d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6856880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6856980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6856c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6856d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x682d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x682e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x685d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x685e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x682dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x682dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x682ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x682ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x685dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x685dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x685ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x685ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x693d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x693d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x693e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x693e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x694d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x694d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x694e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x694e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6935880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6935980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6935c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6935d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6936880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6936980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6936c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6936d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6955880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6955980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6955c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6955d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6956880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6956980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6956c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6956d80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x692ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695dd80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x695ed80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6925880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6925c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6926880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6926c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6965880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6965c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6966880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6966c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a45880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a46880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a3d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a3e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a4d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a4e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a35880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a36880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a55880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a56880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a2d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a2dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a2e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a2ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a5d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a5dc80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a5e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a5ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a25880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a25c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a26880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a26c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a65880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a65c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a66880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6a66c80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x7a3e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x7a3ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x7a4e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x7a4ec80000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6406880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6486880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6108880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6108980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6109880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6109980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x610f980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6178880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6178980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6179880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x6179980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x617a880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x617a980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x617b880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x617b980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x617c880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x617c980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x617d880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x617d980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x617e880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x617e980000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x617f880000000000 -RpcIdHelper DEBUG init_detectorElement_hashes Please check the dictionary for possible duplication for 0x617f980000000000 RpcIdHelper INFO The detector element hash max is 1154 RpcIdHelper INFO Initializing RPC hash indices for finding neighbors ... RpcIdHelper VERBOSE MuonIdHelper::init_neighbors RpcIdHelper DEBUG Maximum number of RPC gas gaps is 3 TgcIdHelper DEBUG (Re)initialize -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find csc region index: group, region size 0 0 -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID... DEBUG initialize_from_dictionary - unable to find csc region index: group, region size 0 0 +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper DEBUG TGC decode index and bit fields for each level: TgcIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated TgcIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,48,52,53,54,55,56,57,58 mask/zero mask/shift/bits/offset 1f e0ffffffffffffff 56 5 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 0 24 0 0 0 25 26 27 28 29 30 31 mode enumerated @@ -5503,9 +2377,9 @@ TgcIdHelper INFO The detector element hash max is 1530 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... TgcIdHelper VERBOSE MuonIdHelper::init_neighbors TgcIdHelper DEBUG (Re)initialize -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find csc region index: group, region size 0 0 -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID... DEBUG initialize_from_dictionary - unable to find csc region index: group, region size 0 0 +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK TgcIdHelper DEBUG TGC decode index and bit fields for each level: TgcIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated TgcIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,48,52,53,54,55,56,57,58 mask/zero mask/shift/bits/offset 1f e0ffffffffffffff 56 5 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 0 24 0 0 0 25 26 27 28 29 30 31 mode enumerated @@ -5596,9 +2470,9 @@ TgcIdHelper INFO The detector element hash max is 1530 TgcIdHelper INFO Initializing TGC hash indices for finding neighbors ... TgcIdHelper VERBOSE MuonIdHelper::init_neighbors MmIdHelper DEBUG (Re)initialize -AtlasDetectorIDHelper::initialize_from_dictionary - Warning: unable to find csc region index: group, region size 0 0 -AtlasDetectorID DEBUG Could not get value for label 'no_side' of field 'DetZside' in dictionary Calorimeter - AtlasDetectorID::initialize_from_dictionary - OK +AtlasDetectorID... DEBUG initialize_from_dictionary - unable to find csc region index: group, region size 0 0 +AtlasDetectorID DEBUG initLevelsFromDict - Could not get value for label 'no_side' of field 'DetZside' in dictionary +AtlasDetectorID INFO initialize_from_dictionary - OK MmIdHelper DEBUG MicroMegas decode index and bit fields for each level: MmIdHelper DEBUG muon decode 1 vals 2,4,5,7,10,11,12,13 mask/zero mask/shift/bits/offset 7 1fffffffffffffff 61 3 0 indexes 0 0 1 2 0 3 0 0 4 5 6 7 mode enumerated MmIdHelper DEBUG station decode 1 vals 0,1,2,3,4,5,6,7,8,9,10,13,14,15,17,18,20,21,41,42,43,44,45,46,48,52,53,54,55,56,57,58 mask/zero mask/shift/bits/offset 1f e0ffffffffffffff 56 5 3 indexes 0 1 2 3 4 5 6 7 8 9 10 0 0 11 12 13 0 14 15 0 16 17 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 19 20 21 22 23 0 24 0 0 0 25 26 27 28 29 30 31 mode enumerated diff --git a/MuonSpectrometer/MuonIdHelpers/src/CscIdHelper.cxx b/MuonSpectrometer/MuonIdHelpers/src/CscIdHelper.cxx index 9607c7116c7ca74b5959e7da11c81855713c7e17..234511e12503b5de24015e4bc6ce73d50df576ca 100644 --- a/MuonSpectrometer/MuonIdHelpers/src/CscIdHelper.cxx +++ b/MuonSpectrometer/MuonIdHelpers/src/CscIdHelper.cxx @@ -1,37 +1,16 @@ /* - Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ -/** - * ============================================================================== - * ATLAS Muon Identifier Helpers Package - * ============================================================================== - */ - #include "MuonIdHelpers/CscIdHelper.h" +#include "AthenaKernel/getMessageSvc.h" -#include "GaudiKernel/ISvcLocator.h" -#include "GaudiKernel/Bootstrap.h" -#include "GaudiKernel/MsgStream.h" -#include "GaudiKernel/IMessageSvc.h" #include <mutex> -/// Constructor/Destructor - CscIdHelper::CscIdHelper() : MuonIdHelper("CscIdHelper"), m_CHAMBERLAYER_INDEX(0), m_WIRELAYER_INDEX(0), m_MEASURESPHI_INDEX(0), m_stripMaxPhi(UINT_MAX), m_stripMaxEta(UINT_MAX), m_hasChamLay1(false) {} -/// Destructor - -CscIdHelper::~CscIdHelper() -{ - // m_Log deleted in base class. -} - - - /// Initialize dictionary - int CscIdHelper::initialize_from_dictionary(const IdDictMgr& dict_mgr) { int status = 0; @@ -46,7 +25,7 @@ int CscIdHelper::initialize_from_dictionary(const IdDictMgr& dict_mgr) } /// init base object - + AtlasDetectorID::setMessageSvc(Athena::getMessageSvc()); if (AtlasDetectorID::initialize_from_dictionary(dict_mgr)) return (1); // Register version of the MuonSpectrometer dictionary diff --git a/MuonSpectrometer/MuonIdHelpers/src/MdtIdHelper.cxx b/MuonSpectrometer/MuonIdHelpers/src/MdtIdHelper.cxx index a433dbc3150ba30ce56d16260e702103448623d2..c82c55b75d304c448767e462217de1d0bb13d410 100644 --- a/MuonSpectrometer/MuonIdHelpers/src/MdtIdHelper.cxx +++ b/MuonSpectrometer/MuonIdHelpers/src/MdtIdHelper.cxx @@ -2,19 +2,8 @@ Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ -/** - * ============================================================================== - * ATLAS Muon Identifier Helpers Package - * ----------------------------------------- - * ============================================================================== - */ - #include "MuonIdHelpers/MdtIdHelper.h" - -#include "GaudiKernel/ISvcLocator.h" -#include "GaudiKernel/Bootstrap.h" -#include "GaudiKernel/MsgStream.h" -#include "GaudiKernel/IMessageSvc.h" +#include "AthenaKernel/getMessageSvc.h" MdtIdHelper::MdtIdHelper() : MuonIdHelper("MdtIdHelper"), @@ -37,7 +26,7 @@ int MdtIdHelper::initialize_from_dictionary(const IdDictMgr& dict_mgr) } /// init base object - + AtlasDetectorID::setMessageSvc(Athena::getMessageSvc()); if (AtlasDetectorID::initialize_from_dictionary(dict_mgr)) return (1); // Register version of the MuonSpectrometer dictionary diff --git a/MuonSpectrometer/MuonIdHelpers/src/MmIdHelper.cxx b/MuonSpectrometer/MuonIdHelpers/src/MmIdHelper.cxx index fc754d33835e0f220b6f9d5c50de9e1e55540f82..49a8cd78d6515e25b1a3adeddf7ed88289b2944d 100644 --- a/MuonSpectrometer/MuonIdHelpers/src/MmIdHelper.cxx +++ b/MuonSpectrometer/MuonIdHelpers/src/MmIdHelper.cxx @@ -2,19 +2,8 @@ Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ -/** - * ============================================================================== - * ATLAS Muon Identifier Helpers Package - * ----------------------------------------- - * ============================================================================== - */ - -// Includes #include "MuonIdHelpers/MmIdHelper.h" -#include "GaudiKernel/ISvcLocator.h" -#include "GaudiKernel/Bootstrap.h" -#include "GaudiKernel/MsgStream.h" -#include "GaudiKernel/IMessageSvc.h" +#include "AthenaKernel/getMessageSvc.h" /*******************************************************************************/ // Constructor/Destructor @@ -36,6 +25,7 @@ int MmIdHelper::initialize_from_dictionary(const IdDictMgr& dict_mgr) { } // init base object + AtlasDetectorID::setMessageSvc(Athena::getMessageSvc()); if(AtlasDetectorID::initialize_from_dictionary(dict_mgr)) return (1); // Register version of the MuonSpectrometer dictionary diff --git a/MuonSpectrometer/MuonIdHelpers/src/MuonIdHelper.cxx b/MuonSpectrometer/MuonIdHelpers/src/MuonIdHelper.cxx index 92a53fc19d2c66d051b10444e1d51c4407e7cc10..7f6366917af368c631c5a66055183ac91f3b1901 100644 --- a/MuonSpectrometer/MuonIdHelpers/src/MuonIdHelper.cxx +++ b/MuonSpectrometer/MuonIdHelpers/src/MuonIdHelper.cxx @@ -2,16 +2,6 @@ Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ -// ****************************************************************************** -// ATLAS Muon Identifier Helpers Package -// ----------------------------------------- -// ****************************************************************************** - -//<doc><file> $Id: MuonIdHelper.cxx,v 1.48 2007-11-25 16:33:41 ketevi Exp $ -//<version> $Name: not supported by cvs2svn $ - -// Includes - #include "MuonIdHelpers/MuonIdHelper.h" #include "GaudiKernel/ISvcLocator.h" @@ -19,11 +9,8 @@ #include "GaudiKernel/MsgStream.h" #include "GaudiKernel/IMessageSvc.h" - const std::string MuonIdHelper::BAD_NAME = "UNKNOWN"; -// Constructor - MuonIdHelper::MuonIdHelper(std::string logName): m_station_region_index(0), m_module_hash_max(0), m_channel_hash_max(0), m_detectorElement_hash_max(0), m_init(false) { @@ -45,12 +32,6 @@ MuonIdHelper::MuonIdHelper(std::string logName): m_station_region_index(0), m_mo m_Log = std::make_unique<MsgStream>(msgSvc, logName.empty() ? "MuonIdHelper" : logName); } -// Destructor - -MuonIdHelper::~MuonIdHelper() -{ -} - int MuonIdHelper::initialize_from_dictionary(const IdDictMgr& dict_mgr) { diff --git a/MuonSpectrometer/MuonIdHelpers/src/RpcIdHelper.cxx b/MuonSpectrometer/MuonIdHelpers/src/RpcIdHelper.cxx index da77bfb09a81df888984d1f0713ba848867dee9c..d8ba7bf39de2bd9f14a3fb21eda2704511a9791a 100644 --- a/MuonSpectrometer/MuonIdHelpers/src/RpcIdHelper.cxx +++ b/MuonSpectrometer/MuonIdHelpers/src/RpcIdHelper.cxx @@ -3,11 +3,7 @@ */ #include "MuonIdHelpers/RpcIdHelper.h" - -#include "GaudiKernel/ISvcLocator.h" -#include "GaudiKernel/Bootstrap.h" -#include "GaudiKernel/MsgStream.h" -#include "GaudiKernel/IMessageSvc.h" +#include "AthenaKernel/getMessageSvc.h" RpcIdHelper::RpcIdHelper(): MuonIdHelper("RpcIdHelper"), @@ -33,6 +29,7 @@ int RpcIdHelper::initialize_from_dictionary(const IdDictMgr& dict_mgr) } // init base object + AtlasDetectorID::setMessageSvc(Athena::getMessageSvc()); if(AtlasDetectorID::initialize_from_dictionary(dict_mgr)) return (1); // Register version of the MuonSpectrometer dictionary diff --git a/MuonSpectrometer/MuonIdHelpers/src/TgcIdHelper.cxx b/MuonSpectrometer/MuonIdHelpers/src/TgcIdHelper.cxx index 88bd80817a83f262702c951866a902b8cb80d5cd..629f1236b8e75899aa7753122b6a2d3712e845a0 100644 --- a/MuonSpectrometer/MuonIdHelpers/src/TgcIdHelper.cxx +++ b/MuonSpectrometer/MuonIdHelpers/src/TgcIdHelper.cxx @@ -1,41 +1,14 @@ /* - Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ -/** - * ============================================================================== - * ATLAS Muon Identifier Helpers Package - * ----------------------------------------- - * ============================================================================== - */ - -//<doc><file> $Id: TgcIdHelper.cxx,v 1.39 2009-01-20 22:44:13 kblack Exp $ -//<version> $Name: not supported by cvs2svn $ - -// Includes - #include "MuonIdHelpers/TgcIdHelper.h" - -#include "GaudiKernel/ISvcLocator.h" -#include "GaudiKernel/Bootstrap.h" -#include "GaudiKernel/MsgStream.h" -#include "GaudiKernel/IMessageSvc.h" - - -// Constructor/Destructor +#include "AthenaKernel/getMessageSvc.h" TgcIdHelper::TgcIdHelper() : MuonIdHelper("TgcIdHelper"), m_GASGAP_INDEX(0), m_ISSTRIP_INDEX(0) {} -// Destructor - -TgcIdHelper::~TgcIdHelper() -{ - // m_Log deleted in base class. -} - // Initialize dictionary - int TgcIdHelper::initialize_from_dictionary(const IdDictMgr& dict_mgr) { int status = 0; @@ -50,6 +23,7 @@ int TgcIdHelper::initialize_from_dictionary(const IdDictMgr& dict_mgr) } // init base object + AtlasDetectorID::setMessageSvc(Athena::getMessageSvc()); if(AtlasDetectorID::initialize_from_dictionary(dict_mgr)) return (1); // Register version of the MuonSpectrometer dictionary diff --git a/MuonSpectrometer/MuonIdHelpers/src/sTgcIdHelper.cxx b/MuonSpectrometer/MuonIdHelpers/src/sTgcIdHelper.cxx index 294533ef5cd5914aa3343bbc4ff0d5ecb77cec64..2d42c13ef4fc3770bf5592b462c5d4812b4bdd28 100644 --- a/MuonSpectrometer/MuonIdHelpers/src/sTgcIdHelper.cxx +++ b/MuonSpectrometer/MuonIdHelpers/src/sTgcIdHelper.cxx @@ -2,20 +2,8 @@ Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ -/** - * ============================================================================== - * ATLAS Muon Identifier Helpers Package - * ----------------------------------------- - * ============================================================================== - */ - - -// Includes #include "MuonIdHelpers/sTgcIdHelper.h" -#include "GaudiKernel/ISvcLocator.h" -#include "GaudiKernel/Bootstrap.h" -#include "GaudiKernel/MsgStream.h" -#include "GaudiKernel/IMessageSvc.h" +#include "AthenaKernel/getMessageSvc.h" /*******************************************************************************/ // Constructor/Destructor @@ -38,6 +26,7 @@ int sTgcIdHelper::initialize_from_dictionary(const IdDictMgr& dict_mgr) { } // init base object + AtlasDetectorID::setMessageSvc(Athena::getMessageSvc()); if(AtlasDetectorID::initialize_from_dictionary(dict_mgr)) return (1); // Register version of the MuonSpectrometer dictionary diff --git a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonSegmentRegionRecoveryTool.h b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonSegmentRegionRecoveryTool.h index f45b7fc80096cd4e2d38bc189eb1e5f42ea2b6d7..01a31046769023e181d910f424aa721a007dec7d 100644 --- a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonSegmentRegionRecoveryTool.h +++ b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackFinderTools/src/MuonSegmentRegionRecoveryTool.h @@ -110,15 +110,15 @@ namespace Muon { virtual ~MuonSegmentRegionRecoveryTool () = default; /** @brief AlgTool initialize */ - StatusCode initialize(); + virtual StatusCode initialize() override; /** @brief AlgTool finalize */ - StatusCode finalize(); + virtual StatusCode finalize() override; /** @brief returns a new track with segments recovered using RegionSelector*/ - Trk::Track* recover( const Trk::Track& track ) const; + virtual Trk::Track* recover( const Trk::Track& track ) const override; - void cleanUp() const override; + virtual void cleanUp() const override; private: diff --git a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MooCandidateMatchingTool.h b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MooCandidateMatchingTool.h index bda954a90c03799f6b0bd3aaad6c1908c2ecbb1f..ace936628b1fa5100afd164e3146a57d5658b45a 100644 --- a/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MooCandidateMatchingTool.h +++ b/MuonSpectrometer/MuonReconstruction/MuonTrackMakers/MuonTrackMakerTools/MuonTrackSteeringTools/src/MooCandidateMatchingTool.h @@ -68,13 +68,13 @@ namespace Muon { MooCandidateMatchingTool(const std::string&, const std::string&, const IInterface*); /** destructor */ - ~MooCandidateMatchingTool(); + virtual ~MooCandidateMatchingTool(); /** initialize method, method taken from bass-class AlgTool */ - StatusCode initialize(); + virtual StatusCode initialize() override; /** finialize method, method taken from bass-class AlgTool */ - StatusCode finalize(); + virtual StatusCode finalize() override; /** @brief access to tool interface */ static const InterfaceID& interfaceID() { return IID_MooCandidateMatchingTool; } @@ -89,7 +89,7 @@ namespace Muon { bool match( const MuPatCandidateBase& entry1, const MuPatSegment& entry2, bool useTightCuts = false ) const; /** @brief match a track with a segment */ - bool match( const Trk::Track& track, const MuonSegment& segment, bool useTightCuts ) const; + virtual bool match( const Trk::Track& track, const MuonSegment& segment, bool useTightCuts ) const override; /** @brief calculate the info needed for the matching decision */ void calculateTrackSegmentMatchResult( const MuPatTrack& entry1, const MuPatSegment& entry2, MooTrackSegmentMatchResult& info ) const; diff --git a/PhysicsAnalysis/AnalysisCommon/HDF5Utils/CMakeLists.txt b/PhysicsAnalysis/AnalysisCommon/HDF5Utils/CMakeLists.txt index 6f55e05d3ffb2215cddd37e9d8a7e9bec879632b..505f9ad07e01bf8860e20eb20ef8b50f5d4ffb43 100644 --- a/PhysicsAnalysis/AnalysisCommon/HDF5Utils/CMakeLists.txt +++ b/PhysicsAnalysis/AnalysisCommon/HDF5Utils/CMakeLists.txt @@ -1,14 +1,16 @@ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration + # Set the project's name and version. atlas_subdir( HDF5Utils ) # Grab HDF5 from AnalysisBaseExternals. -find_package( HDF5 1.10.1 REQUIRED COMPONENTS C CXX ) +find_package( HDF5 1.10.1 COMPONENTS CXX ) # find root -find_package(ROOT REQUIRED COMPONENTS RIO Hist Tree Net Core TreePlayer) +find_package(ROOT COMPONENTS RIO Hist Tree Net Core TreePlayer) # find boost -find_package( Boost 1.54.0 REQUIRED COMPONENTS program_options) +find_package( Boost 1.54.0 COMPONENTS program_options ) # Add the hdf tuple library atlas_add_library(HDF5Utils @@ -18,8 +20,9 @@ atlas_add_library(HDF5Utils Root/IH5Merger.cxx Root/MergeUtils.cxx PUBLIC_HEADERS HDF5Utils - INCLUDE_DIRS ${HDF5_INCLUDE_DIRS} - LINK_LIBRARIES ${HDF5_LIBRARIES}) + DEFINITIONS ${HDF5_CXX_DEFINITIONS} + INCLUDE_DIRS ${HDF5_CXX_INCLUDE_DIRS} + LINK_LIBRARIES ${HDF5_CXX_LIBRARIES}) # build a translation utility set( _exe_sources @@ -29,15 +32,17 @@ set( _exe_sources util/ttree2hdf5.cxx) atlas_add_executable(ttree2hdf5 ${_exe_sources} - INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} util ${Boost_INCLUDE_DIRS} ${HDF5_INCLUDE_DIRS} - LINK_LIBRARIES HDF5Utils ${Boost_LIBRARIES} ${ROOT_LIBRARIES} ${HDF5_LIBRARIES} ) + DEFINITIONS ${HDF5_CXX_DEFINITIONS} + INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} util ${Boost_INCLUDE_DIRS} ${HDF5_CXX_INCLUDE_DIRS} + LINK_LIBRARIES HDF5Utils ${Boost_LIBRARIES} ${ROOT_LIBRARIES} ${HDF5_CXX_LIBRARIES} ) unset(_exe_sources) # add the merge utility atlas_add_executable( hdf5-merge util/hdf5-merge.cxx - INCLUDE_DIRS ${Boost_INCLUDE_DIRS} ${HDF5_INCLUDE_DIRS} - LINK_LIBRARIES HDF5Utils ${Boost_LIBRARIES} ${HDF5_LIBRARIES} ) + DEFINITIONS ${HDF5_CXX_DEFINITIONS} + INCLUDE_DIRS ${Boost_INCLUDE_DIRS} ${HDF5_CXX_INCLUDE_DIRS} + LINK_LIBRARIES HDF5Utils ${Boost_LIBRARIES} ${HDF5_CXX_LIBRARIES} ) atlas_install_scripts( bin/hdf5-merge-nolock ) diff --git a/PhysicsAnalysis/AnalysisCommon/ParticleJetTools/CMakeLists.txt b/PhysicsAnalysis/AnalysisCommon/ParticleJetTools/CMakeLists.txt index c6037c209eb88a409839fb44ed9c6389efe4b50c..d8f44123c870aa9c8e97457b93661e8c792695f3 100644 --- a/PhysicsAnalysis/AnalysisCommon/ParticleJetTools/CMakeLists.txt +++ b/PhysicsAnalysis/AnalysisCommon/ParticleJetTools/CMakeLists.txt @@ -3,6 +3,8 @@ # Declare the package name: atlas_subdir( ParticleJetTools ) +find_package( ROOT ) + # Component(s) in the package: if( XAOD_STANDALONE ) atlas_add_library( ParticleJetToolsLib diff --git a/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/BPhysBlindingTool.h b/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/BPhysBlindingTool.h new file mode 100644 index 0000000000000000000000000000000000000000..c7aed623cecbc41d57615896170a84ec1a8dd296 --- /dev/null +++ b/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/BPhysBlindingTool.h @@ -0,0 +1,278 @@ +// Dear emacs, this is -*- c++ -*- + +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ + +/** + * @file BPhysBlindingTool.h + * @author Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch> + * + * @brief Dual-use tool for blinding and unblinding certain float values + */ + +// $Id: $ +#ifndef BPHYSTOOLS_BPHYSBLINDINGTOOL_H +#define BPHYSTOOLS_BPHYSBLINDINGTOOL_H + +// Framework includes +#include "AsgTools/AsgTool.h" + +// System include(s): +#include <memory> + +// Local includes +#include "BPhysTools/IBPhysBlindingTool.h" +#include "BPhysTools/SimpleEncrypter.h" + + +// EDM includes +#include "xAODTracking/VertexAuxContainer.h" + +namespace xAOD { + /// + /// @class BPhysBlindingToll + /// @author Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch> + /// + /// Dual-use tool for blinding and unblinding certain float values + /// provided as variables in a container. + /// + /// This tool can be used in two ways: + /// 1. As a tool to blind or unblind arbitrary positive float values + /// using doBlind(float val) or doUnblind(float val). + /// For this mode to work only the corresponding key + /// (JO BlindingKey or UnblindingKey) needs to be set. + /// 2. As a tool to blind a list of variables (VarToBlindNames) + /// for a certain xAOD::VertexContainer's vertices. + /// Since this only works for positive float values, + /// an the values may be scaled by a factor and an + /// offset may be added to the values, specified separately for + /// each variable (BlindingFactors, BlindingOffsets). + /// In addition a negative sign may be added to the resulting blinded + /// value (NegativeSigns) as a convenience. + /// If this tool is used for unblinding the same values for + /// BlindingFactors, BlindingOffsets and NegativeSigns need to be + /// provided. + /// Depending on the mode, the BlindingKey or Unblindingkey need + /// to be set. + /// + /// @Note: Key pairs may be produced using the createBlindingKeys + /// utility. + /// + /// Job options: + /// - BlindingKey + /// Hex string providing the (public) blinding key. + /// - UnblindingKey + /// Hex string providing the (private) unblinding key. + /// - VertexContainerName + /// Name of the vertex container to be used + /// - BlindingFlag + /// Flag to indicate candidates for blinding ("pass_XXXX") + /// Blind values for all candidates if empty. + /// - VarToBlindNames + /// String with list of float variables to blind (delimiter: .) + /// - BlindingOffsets + /// Offsets applied to values before blinding + /// List must have same length as VarToBlindNames or zero. + /// - BlindingFactors + /// Scale factors applied before blinding + /// List must have same length as VarToBlindNames or zero. + /// - NegativeSigns + /// Flip signs to negative range? + /// List must have same length as VarToBlindNames or zero. + /// + /// + class BPhysBlindingTool : + public asg::AsgTool, virtual public xAOD::IBPhysBlindingTool { + + /// Declare the correct constructor for Athena + ASG_TOOL_CLASS( BPhysBlindingTool, xAOD::IBPhysBlindingTool ) + + public: + /// + /// @brief Regular AsgTool constructor + BPhysBlindingTool(const std::string& name = "BPhysBlindingTool"); + /// + /// @brief Method initialising the tool + virtual StatusCode initialize() override; + + /// + /// @brief Method finalizing the tool + virtual StatusCode finalize() override; + + /// + /// @brief Simply blind one positive float value + /// + /// @param[in] val : positive float value to blind. + /// + /// @returns Blinded positive float value; same value as input on error. + virtual float doBlind(const float& val) override; + + /// + /// @name Methods to be called by user classes + /// @{ + /// + /// @brief Simply unblind one positive float value + /// + /// @param[in] val : Blinded positive float value. + /// + /// @returns Unblinded positive float value; same value as input on error. + virtual float doUnblind(const float& val) override; + + /// + /// @brief Simply blind one (positive) float value with corrections + /// + /// @param[in] val : float value to blind. + /// @param[in] negativeSign : flip sign after blinding + /// @param[in] offset : before blinding, shift by offset + /// @param[in] factor : before blinding, stretch by factor + /// + /// @returns Blinded float value; same value as input on error. + virtual float doBlind(const float& val, + const bool& negativeSign, + const float& offset, + const float& factor) override; + + /// + /// @name Methods to be called by user classes + /// @{ + /// + /// @brief Simply unblind one (positive) float value with corrections + /// + /// @param[in] val : Blinded float value. + /// @param[in] negativeSign : flip sign before unblinding + /// @param[in] offset : after unblinding, shift by offset + /// @param[in] factor : after unblinding, stretch by factor + /// + /// @returns Unblinded positive float value; same value as input on error. + virtual float doUnblind(const float& val, + const bool& negativeSign, + const float& offset, + const float& factor) override; + + /// + /// @brief Perform blinding of requested variables + virtual StatusCode doBlind() override; + + /// + /// @brief Perform unblinding of requested variables + virtual StatusCode doUnblind() override; + + protected: + /// + /// @name Perform blinding or unblinding action + /// + virtual StatusCode doBlindingAction(bool unblind=false); + /// + /// @name Utility methods + /// @{ + /// + /// @brief Check whether an element is marked as passing a hypothesis. + /// + virtual bool pass(const SG::AuxElement& em, std::string hypo); + + /// + /// @brief Tokenize a string using certain separators + /// + virtual std::vector<std::string> getTokens(std::string input, + std::string seperators); + /// + /// @brief Convert vector of floats to string + /// + virtual std::string vecToString(const std::vector<float>& v) const; + /// + /// @brief Convert vector of bools to string + /// + virtual std::string vecToString(const std::vector<bool>& v) const; + /// + /// @name Cache current event. + /// + virtual StatusCode cacheEvent(); + /// @} + + protected: + /// + /// @name Job options + /// @{ + /// + /// @brief Vertex container name + std::string m_vertexContainerName; + /// + /// @brief List of variables to blind + /// + /// (as concatenated string using . as delimiter) + std::string m_varToBlindNames; + /// + /// @brief Flag to indicate candidates for blinding + /// + /// Left empty: Blind values for all candidates. + std::string m_blindingFlag; + /// + /// @brief Offsets applied to values before blinding + /// + /// List must have same length as VarToBlindNames or zero. + /// Applied before blinding/after unblinding. + std::vector<float> m_vOffsets; + /// + /// @brief Scale factors applied before blinding + /// + /// List must have same length as VarToBlindNames or zero. + /// Applied before blinding/after unblinding. + std::vector<float> m_vFactors; + /// + /// @brief Flip signs to negative range? + /// + /// List must have same length as VarToBlindNames or zero. + /// Applied after blinding/before unblinding. + std::vector<bool> m_vNegSigns; + /// + /// @brief Key for blinding + std::string m_blindKey; + /// + /// @brief Key for unblinding + std::string m_unblindKey; + /// @} + + /// + /// @name Containers + /// @{ + xAOD::VertexContainer* m_vtxContainer; //! + xAOD::VertexAuxContainer* m_vtxAuxContainer; //! + /// @} + + /// + /// @name Event caching + /// @{ + int m_cachedRun; //! + int m_cachedEvent; //! + /// @} + + /// + /// + /// @name Counters + /// @{ + long m_eventsForBlindingSeen; //! + long m_candidatesForBlindingSeen; //! + long m_eventsForUnblindingSeen; //! + long m_candidatesForUnblindingSeen; //! + long m_eventsBlinded; //! + long m_candidatesBlinded; //! + long m_eventsUnblinded; //! + long m_candidatesUnblinded; //! + /// @} + private: + /// + /// @brief Vector of variable names + /// + std::vector<std::string> m_vVarNames; //! + + /// + /// @brief Instance of SimpleEncrypter + /// + SimpleEncrypter m_senc; //! + + }; // class BPhysBlindingTool + +} // namespace xAOD + +#endif // BPHYSTOOLS_BPHYSBLINDINGTOOL_H diff --git a/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/BPhysToolsDict.h b/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/BPhysToolsDict.h new file mode 100644 index 0000000000000000000000000000000000000000..bf760bccff1e8bf495f39c86da2d5871e58b8e37 --- /dev/null +++ b/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/BPhysToolsDict.h @@ -0,0 +1,16 @@ +// This file's extension implies that it's C, but it's really -*- C++ -*-. + +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ + +/** + * @file BPhysTools/BPhysToolsDict.h + * @author Wolfgang Walkowiak <wolfgang.walkowiak@cern.ch> + * @date Mar 2018 + * @brief Dictionary header for BPhysTools. + */ + +#include "BPhysTools/BPhysBlindingTool.h" +#include "BPhysTools/BPhysTrackVertexMapTool.h" +#include "BPhysTools/SimpleEncrypter.h" diff --git a/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/BPhysTrackVertexMapTool.h b/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/BPhysTrackVertexMapTool.h new file mode 100644 index 0000000000000000000000000000000000000000..597539c71b99b2ce32f03375ea48a80f54289641 --- /dev/null +++ b/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/BPhysTrackVertexMapTool.h @@ -0,0 +1,196 @@ +// Dear emacs, this is -*- c++ -*- + +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +// $Id: $ +#ifndef BPHYSTOOLS_BPHYSTRACKVERTEXMAPTOOL_H +#define BPHYSTOOLS_BPHYSTRACKVERTEXMAPTOOL_H + +// Framework includes +#include "BPhysTools/IBPhysTrackVertexMapTool.h" +#include "AsgTools/AsgTool.h" + +// System include(s): +#include <memory> + +// EDM includes +#include "xAODTracking/TrackParticleAuxContainer.h" +#include "xAODTracking/VertexAuxContainer.h" + +namespace xAOD { + /// + /// Dual-use tool createing a track-to-vertex map from + /// the vertex-to-track information. + /// + /// Job options provided by this class: + /// - VertexContainerName -- name of container for secondary vertices + /// - RefPVContainerName -- name of container for refitted PVs + /// - PVContainerName -- name of container for primary vertices + /// - TrackParticleContainerName -- name of container for TrackParticles + /// - DebugTrkToVtxMaxEvents -- Maximum number of events to produce + /// detailed log output for the + /// track-to-vertex association maps. + /// Set to -1 for infinity. + /// - DumpPrefix -- Line prefix for log dump lines. + /// - HypoName -- Hypothesis name + /// (for picking up inv. mass values) + /// May be a set of hypo names to be + /// tested, delimited by '|'. + /// + /// @author Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch> + /// + /// $Revision:$ + /// $Date: $ + /// + class BPhysTrackVertexMapTool : + public asg::AsgTool, virtual public xAOD::IBPhysTrackVertexMapTool { + + /// Declare the correct constructor for Athena + ASG_TOOL_CLASS( BPhysTrackVertexMapTool, xAOD::IBPhysTrackVertexMapTool ) + + public: + /// Regular AsgTool constructor + BPhysTrackVertexMapTool(const std::string& name = + "BPhysTrackVertexMapTool"); + + /// Function initialising the tool + virtual StatusCode initialize() override; + + /// Function being excuted for each event + virtual StatusCode logEvent() override; + + /// Function finalizing the tool + virtual StatusCode finalize() override; + + /// Function indicating whether log counter allows logging of current event + virtual bool doLog() const override; + + /// convenience method to wrap output lines by a prefix + static std::string wrapLines(std::string lines, std::string prefix); + + protected: + /// @name Functions to be called by user classes + /// @{ + /// fill cache for current event + virtual StatusCode cacheEvent() override; + + /// obtain primary vertices for a given ID track (may return empty vector) + virtual std::vector<const xAOD::Vertex*> + pvsForIDTrack(const xAOD::TrackParticle* track) const override; + + /// obtain refitted primary vertices for a given ID track + /// (may return empty vector) + virtual std::vector<const xAOD::Vertex*> + refPVsForIDTrack(const xAOD::TrackParticle* track) const override; + + /// obtain secondary vertices for a given ID track (may return empty vector) + virtual std::vector<const xAOD::Vertex*> + svsForIDTrack(const xAOD::TrackParticle* track) const override; + + // track-vertex association related + virtual std::string idTrackToString(const xAOD::TrackParticle* track, + unsigned int indent=0, + bool withPV=false, + bool withRefPV=false, + bool withSV=false) override; + + virtual std::string pvToString(const xAOD::Vertex* vtx, + unsigned int indent=0, + bool withTracks=false) override; + + virtual std::string refPVToString(const xAOD::Vertex* vtx, + unsigned int indent=0, + bool withTracks=false) override; + virtual std::string svToString(const xAOD::Vertex* vtx, + unsigned int indent=0, + bool withTracks=false, + bool withMasses=false) override; + virtual std::string idTracksToString(const xAOD::TrackParticleContainer* + tpc, + unsigned int indent=0, + bool withPV=false, + bool withRefPV=false, + bool withSV=false) override; + + virtual std::string pvsToString(const xAOD::VertexContainer* pvc, + unsigned int indent=0, + bool withTracks=false) override; + virtual std::string refPVsToString(const xAOD::VertexContainer* rpvc, + unsigned int indent=0, + bool withTracks=false) override; + virtual std::string svsToString(const xAOD::VertexContainer* svc, + unsigned int indent=0, + bool withTracks=false, + bool withMasses=false) override; + virtual std::string summaryToString(std::string prefix) override; + /// @} + + protected: + virtual float getFloat(std::string name, const xAOD::Vertex* b); + + virtual std::vector<std::string> getTokens(std::string input, + std::string seperators); + + + private: + // track-vertex association related + typedef std::map<const xAOD::TrackParticle*, + std::vector<const xAOD::Vertex*> > TrackToVertexMap_t; + + virtual void initTrackVertexMaps(const xAOD::TrackParticleContainer* tpc, + const xAOD::VertexContainer* pvc, + const xAOD::VertexContainer* rpvc, + const xAOD::VertexContainer* svc); + virtual void addVertexToTrackVertexMap(TrackToVertexMap_t& map, + const xAOD::TrackParticle* track, + const xAOD::Vertex* vtx); + virtual std::string pvName(const xAOD::Vertex* vtx); + virtual std::string refPVName(const xAOD::Vertex* vtx); + virtual std::string svName(const xAOD::Vertex* vtx); + virtual std::string idTrackName(const xAOD::TrackParticle* vtx); + + protected: + // job options + std::string m_vertexContainerName; + std::string m_refPVContainerName; + std::string m_pvContainerName; + std::string m_trackParticleContainerName; + int m_debugTrkToVtxMaxEvents; + std::string m_dumpPrefix; + std::string m_hypoName; + + // containers + const xAOD::TrackParticleContainer* m_tracks; + const xAOD::TrackParticleAuxContainer* m_tracksAux; + const xAOD::VertexContainer* m_pvtxContainer; + const xAOD::VertexContainer* m_svtxContainer; + const xAOD::VertexAuxContainer* m_svtxAuxContainer; + const xAOD::VertexContainer* m_refPVContainer; + const xAOD::VertexAuxContainer* m_refPVAuxContainer; + + unsigned int m_nEvtsSeen; + + int m_cachedRun; + int m_cachedEvent; + + private: + // track-vertex association related + typedef std::map<const xAOD::Vertex*, std::string> VertexNameMap_t; + VertexNameMap_t m_pvNameMap; + VertexNameMap_t m_refPVNameMap; + VertexNameMap_t m_svNameMap; + + typedef std::map<const xAOD::TrackParticle*, std::string> TrackNameMap_t; + TrackNameMap_t m_idTrackNameMap; + + TrackToVertexMap_t m_idTrackToPVMap; + TrackToVertexMap_t m_idTrackToRefPVMap; + TrackToVertexMap_t m_idTrackToSVMap; + + }; // class BPhysTrackVertexMapTool + +} // namespace xAOD + +#endif // BPHYSTOOLS_BPHYSTRACKVERTEXMAPTOOL_H diff --git a/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/IBPhysBlindingTool.h b/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/IBPhysBlindingTool.h new file mode 100644 index 0000000000000000000000000000000000000000..8891bb12cd779c1b8f1fc28ea5e637d50f851816 --- /dev/null +++ b/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/IBPhysBlindingTool.h @@ -0,0 +1,79 @@ +// Dear emacs, this is -*- c++ -*- + +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ + +/** + * @file IBPhysBlindingTool.h + * @author Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch> + * + * @brief Interface for dual-use tool for (un-)blinding of float values. + */ + +#ifndef BPHYSTOOLS_IBPHYSBLINDINGTOOL_H +#define BPHYSTOOLS_IBPHYSBLINDINGTOOL_H + +// Framework includes +#include "AsgTools/IAsgTool.h" + +// System include(s): +#include <string> +#include <vector> + +// EDM includes +#include "xAODTracking/VertexContainer.h" + +namespace xAOD { + /// + /// Interface for dual-use tool for blinding and unblinding + /// certain float values provided as variables in a container. + /// + /// @author Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch> + /// + /// + class IBPhysBlindingTool : virtual public asg::IAsgTool { + + public: + /// Declare the correct interface for Athena + ASG_TOOL_INTERFACE( xAOD::IBPhysBlindingTool ) + + /// @ brief Function finalizing the tool + virtual StatusCode finalize() = 0; + + /// + /// @name Methods to be called by user classes + /// @{ + /// + /// @brief Simply blind one positive float value + virtual float doBlind(const float& val) = 0; + + /// + /// @brief Simply unblind one positive float value + virtual float doUnblind(const float& val) = 0; + + /// + /// @brief Simply blind one (positive) float value with corretions + virtual float doBlind(const float& val, const bool& negativeSign, + const float& offset, const float& factor) = 0; + + /// + /// @brief Simply unblind one (positive) float value with corrections + virtual float doUnblind(const float& val, const bool& negativeSign, + const float& offset, const float& factor) = 0; + + /// + /// @brief Perform blinding of requested variables + virtual StatusCode doBlind() = 0; + + /// + /// @brief Perform unblinding of requested variables + virtual StatusCode doUnblind() = 0; + + /// @} + + }; // class IBPhysBlindingTool + +} // namespace xAOD + +#endif // BPHYSTOOLS_IBPHYSBLINDINGTOOL_H diff --git a/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/IBPhysTrackVertexMapTool.h b/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/IBPhysTrackVertexMapTool.h new file mode 100644 index 0000000000000000000000000000000000000000..bb0857657df6ce4aa38b26679795f5ac70a10ee7 --- /dev/null +++ b/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/IBPhysTrackVertexMapTool.h @@ -0,0 +1,109 @@ +// Dear emacs, this is -*- c++ -*- + +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +// $Id: $ +#ifndef BPHYSTOOLS_IBPHYSTRACKVERTEXMAPTOOL_H +#define BPHYSTOOLS_IBPHYSTRACKVERTEXMAPTOOL_H + +// Framework includes +#include "AsgTools/IAsgTool.h" + +// System include(s): +#include <string> +#include <memory> +#include <vector> + +// EDM includes +#include "xAODTracking/TrackParticleContainer.h" +#include "xAODTracking/VertexContainer.h" + +namespace xAOD { + /// + /// Interface for dual-use tool createing a track-to-vertex map from + /// the vertex-to-track information. + /// + /// @author Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch> + /// + /// $Revision:$ + /// $Date: $ + /// + class IBPhysTrackVertexMapTool : virtual public asg::IAsgTool { + + public: + /// Declare the correct interface for Athena + ASG_TOOL_INTERFACE( xAOD::IBPhysTrackVertexMapTool ) + + /// Function being excuted for each event + virtual StatusCode logEvent() = 0; + + /// Function finalizing the tool + virtual StatusCode finalize() = 0; + + /// Function indicating whether log counter allows logging of current event + virtual bool doLog() const = 0; + + public: + /// @name Functions to be called by user classes + /// @{ + /// fill cache for current event + virtual StatusCode cacheEvent() = 0; + + /// obtain primary vertices for a given ID track (may return empty vector) + virtual std::vector<const xAOD::Vertex*> + pvsForIDTrack(const xAOD::TrackParticle* track) const = 0; + + /// obtain refitted primary vertices for a given ID track + /// (may return empty vector) + virtual std::vector<const xAOD::Vertex*> + refPVsForIDTrack(const xAOD::TrackParticle* track) const = 0; + + /// obtain secondary vertices for a given ID track (may return empty vector) + virtual std::vector<const xAOD::Vertex*> + svsForIDTrack(const xAOD::TrackParticle* track) const = 0; + + // track-vertex association related + virtual std::string idTrackToString(const xAOD::TrackParticle* track, + unsigned int indent=0, + bool withPV=false, + bool withRefPV=false, + bool withSV=false) = 0; + + virtual std::string pvToString(const xAOD::Vertex* vtx, + unsigned int indent=0, + bool withTracks=false) = 0; + + virtual std::string refPVToString(const xAOD::Vertex* vtx, + unsigned int indent=0, + bool withTracks=false) = 0; + virtual std::string svToString(const xAOD::Vertex* vtx, + unsigned int indent=0, + bool withTracks=false, + bool withMasses=false) = 0; + virtual std::string idTracksToString(const xAOD::TrackParticleContainer* + tpc, + unsigned int indent=0, + bool withPV=false, + bool withRefPV=false, + bool withSV=false) = 0; + + virtual std::string pvsToString(const xAOD::VertexContainer* pvc, + unsigned int indent=0, + bool withTracks=false) = 0; + virtual std::string refPVsToString(const xAOD::VertexContainer* rpvc, + unsigned int indent=0, + bool withTracks=false) =0; + virtual std::string svsToString(const xAOD::VertexContainer* svc, + unsigned int indent=0, + bool withTracks=false, + bool withMasses=false) = 0; + virtual std::string summaryToString(std::string prefix) = 0; + /// @} + + }; // class IBPhysTrackVertexMapTool + +} // namespace xAOD + +#endif // BPHYSTOOLS_IBPHYSTRACKVERTEXMAPTOOL_H diff --git a/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/SimpleEncrypter.h b/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/SimpleEncrypter.h new file mode 100644 index 0000000000000000000000000000000000000000..59c351aa638a78e74815fb032dfc2ace646923ec --- /dev/null +++ b/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/SimpleEncrypter.h @@ -0,0 +1,244 @@ +// Dear emacs, this is -*- c++ -*- + +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ + +/** + * @file SimpleEncrypter.h + * @author Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch> + * + * @brief Provide simple asymmetric encryption for blinding of float values. + */ + +#ifndef BPHYSTOOLS_SIMPLEENCRYPTER_H +#define BPHYSTOOLS_SIMPLEENCRYPTER_H + +// Framework includes +#include "AsgMessaging/AsgMessaging.h" + +// System includes +#include <string> +#include <set> + +namespace xAOD { + /// + /// @class SimpleEncrypter + /// @author Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> + /// + /// @brief Provide simple asymmetric encryption for blinding of float values. + /// + /// Provides asymmetric key encryption for blinding of positive float + /// values. Internally it uses a simple RSA encryption of bits in + /// the floating point numbers. + /// This class is used by the BPhysBlindingTool. + /// + class SimpleEncrypter : public asg::AsgMessaging { + + public: + /// @brief Useful typedefs + typedef long long int LLI_t; + typedef unsigned long long int ULLI_t; + + /// + /// @brief Main constructor + /// + /// @param[in] name of instance + /// + SimpleEncrypter(const std::string& name = "SimpleEncrypter"); + + /// + /// @brief Default destructor + /// + virtual ~SimpleEncrypter(); + + /// + /// @brief Generate private and public keys + /// + /// @returns key pair as string: [private key, public key] + /// + virtual std::pair<std::string, std::string> genKeyPair(); + + /// + /// @brief Set private key + /// + /// @param[in] hex string with private key + /// + virtual void setPrivKey(std::string keystr); + + /// + /// @brief Set public key + /// + /// @param[in] hex string with public key + /// + virtual void setPubKey(std::string keystr); + + /// + /// @brief Get private key + /// + /// @returns hex string with private key + /// + virtual std::string getPrivKey() const; + + /// + /// @brief Get public key + /// + /// @returns hex string with public key + /// + virtual std::string getPubKey() const; + + /// + /// @brief Encrypt a positive integer value + /// + /// @param[in] unsigned integer value to be encrypted + /// + /// @returns encrypted unsigned integer value + /// + virtual ULLI_t encrypt(ULLI_t x); + + /// + /// @brief Decrypt a positive integer value + /// + /// @param[in] unsigned integer value to be decrypted + /// + /// @returns encrypted unsigned integer value + /// + virtual ULLI_t decrypt(ULLI_t x); + + /// + /// @brief Encrypt a positive float value + /// + /// @param[in] positive float value to be encrypted + /// + /// @returns encrypted float value + /// + virtual float encrypt(float x); + + /// + /// @brief Decrypt a positive float value + /// + /// @param[in] positive float value to be decrypted + /// + /// @returns encrypted float value + /// + virtual float decrypt(float x); + + + private: + /// + /// @name Key generation utilities + /// @{ + /// + /// @brief Internally generate numeric representation of key pair + /// + virtual void genKeyPairInternal(); + /// + /// @brief Find a prime number + /// + virtual ULLI_t genPrime() const; + /// + /// @brief Check for being a prime number + /// + virtual bool isPrime(ULLI_t n) const; + /// + /// @brief Find greatest common denominator + /// + virtual ULLI_t greatestCommonDenominator(ULLI_t n1, ULLI_t n2) const; + /// + /// @brief Find a coprime number + /// + virtual ULLI_t genCoprime(ULLI_t n) const; + /// + /// @brief Find decryption exponent + /// + virtual ULLI_t genDecryptionExponent(ULLI_t phi, ULLI_t e) const; + /// + /// @} + /// + /// @name Key conversion utilities + /// @{ + /// + /// @brief Convert key to hex string + /// + virtual std::string keyToString(ULLI_t a, ULLI_t b) const; + /// + /// @brief Decode hex string to two integers + /// + virtual std::pair<ULLI_t, ULLI_t> decodeKeyString(std::string str) const; + /// @} + /// + /// @name float <-> int conversion utilities + /// @{ + /// + /// @brief Interpret bits of floating point number as integer + /// + virtual ULLI_t floatBitsToInt(float val) const; + /// + /// @brief Interpret bits of integer as floating point number + /// + virtual float intBitsToFloat(ULLI_t val) const; + /// @} + /// + /// @name Internal en-/decryption methods + /// @{ + /// + /// @brief Encrypt using format preserving encryption w.r.t. RSA modulus + /// + ULLI_t encryptFPECycle(ULLI_t a) const; + /// + /// @brief Decrypt using format preserving encryption w.r.t. RSA modulus + /// + ULLI_t decryptFPECycle(ULLI_t a) const; + /// + /// @brief Encrypt integer (internal) + /// + ULLI_t encryptInternal(ULLI_t x) const; + /// + /// @brief Decrypt integer (internal) + /// + ULLI_t decryptInternal(ULLI_t x) const; + /// + /// @brief Exponentiate a with d observing modulus n + /// + ULLI_t powerMod(ULLI_t a, ULLI_t d, ULLI_t n) const; + /// + /// @brief Check setup readiness for encryption + /// + bool isOkForEnc(); + /// + /// @brief Check setup readiness for decryption + /// + bool isOkForDec(); + /// + /// @} + + private: + /// + /// @name Internal static consts + /// + /// @brief Approximate range for prime numbers to be generated in + static const ULLI_t m_MAXRANGE; + static const ULLI_t m_MINRANGE; + /// @brief maximum number of hex digits for key parts + static const unsigned int m_MAXHEXDIGITS; + + /// + /// @name Internal member variables + /// + /// RSA modulus: common part of both keys + ULLI_t m_n; + /// encryption exponent: public key part II + ULLI_t m_e; + /// decryption exponent: private key part II + ULLI_t m_d; + + /// indicates that keys are set and range checks are ok + bool m_isOkForEnc; + bool m_isOkForDec; + + }; // class + +} // namespace xAOD + +#endif // BPHYSTOOLS_SIMPLEENCRYPTER_H + diff --git a/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/selection.xml b/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/selection.xml new file mode 100644 index 0000000000000000000000000000000000000000..a6539c15eec73ab662301f22499c834b31880ede --- /dev/null +++ b/PhysicsAnalysis/BPhys/BPhysTools/BPhysTools/selection.xml @@ -0,0 +1,10 @@ +<!-- + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +--> + +<lcgdict> + <!-- BPhysTools --> + <class name="xAOD::BPhysBlindingTool" /> + <class name="xAOD::BPhysTrackVertexMapTool" /> + <class name="xAOD::SimpleEncrypter" /> +</lcgdict> diff --git a/PhysicsAnalysis/BPhys/BPhysTools/CMakeLists.txt b/PhysicsAnalysis/BPhys/BPhysTools/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..4229cef3b69381f839c1ccb4ecedc06e2f53274c --- /dev/null +++ b/PhysicsAnalysis/BPhys/BPhysTools/CMakeLists.txt @@ -0,0 +1,84 @@ +# +# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +# + +# $Id: CMakeLists.txt 805745 2017-05-31 17:23:48Z wwalko $ +# +# Build configuration for the package. +# +#******************************************** +set(extra_dep) +if( XAOD_STANDALONE ) + set(extra_dep) +else() + set( extra_dep + GaudiKernel + Control/AthenaKernel + ) +endif() +#******************************************** +set(extra_libs) +if( XAOD_STANDALONE ) + set(extra_libs) +else() + set( extra_libs + GaudiKernel + AthenaKernel + ) +endif() +#******************************************** +# The name of the package: +atlas_subdir( BPhysTools ) + + +# Used external(s): +find_package( ROOT COMPONENTS Core Physics Matrix ) +find_package( Boost ) + +# Build the main library of the package: +atlas_add_library( BPhysToolsLib + BPhysTools/*.h Root/*.cxx src/*.cxx + PUBLIC_HEADERS BPhysTools + INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} + PRIVATE_INCLUDE_DIRS ${Boost_INCLUDE_DIRS} + LINK_LIBRARIES ${ROOT_LIBRARIES} + xAODTracking + xAODBPhysLib + AsgTools + ${extra_libs} + PRIVATE_LINK_LIBRARIES ${Boost_LIBRARIES} + xAODEventInfo + ) + +if(NOT XAOD_STANDALONE) +atlas_add_component( BPhysTools + src/components/*.cxx + INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} + PRIVATE_INCLUDE_DIRS ${Boost_INCLUDE_DIRS} + LINK_LIBRARIES ${ROOT_LIBRARIES} + xAODTracking + xAODBPhysLib + AsgTools + ${extra_libs} + BPhysToolsLib + PRIVATE_LINK_LIBRARIES ${Boost_LIBRARIES} + xAODEventInfo + ) +endif() + +# Build the dictionary +atlas_add_dictionary( BPhysToolsDict + BPhysTools/BPhysToolsDict.h + BPhysTools/selection.xml + LINK_LIBRARIES BPhysToolsLib + ) + + +# Executables in util subdirectory +file (GLOB util_sources RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}" + "${CMAKE_CURRENT_SOURCE_DIR}/util/*.cxx") +foreach (source ${util_sources}) + string (REGEX REPLACE "util/(.*).cxx" "\\1" util ${source}) + atlas_add_executable (${util} ${source} LINK_LIBRARIES BPhysToolsLib) +endforeach (source ${util_sources}) + diff --git a/PhysicsAnalysis/BPhys/BPhysTools/Root/BPhysBlindingTool.cxx b/PhysicsAnalysis/BPhys/BPhysTools/Root/BPhysBlindingTool.cxx new file mode 100644 index 0000000000000000000000000000000000000000..269f6699792335847d80b1c4985607456834f4b1 --- /dev/null +++ b/PhysicsAnalysis/BPhys/BPhysTools/Root/BPhysBlindingTool.cxx @@ -0,0 +1,464 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ + +// system include: +#include "boost/tokenizer.hpp" +#include <boost/algorithm/string.hpp> +#include <set> +#include <cmath> + +// EDM includes: +#include "xAODEventInfo/EventInfo.h" + +// ROOT includes +#include "TString.h" + +// Local include(s): +#include "BPhysTools/BPhysBlindingTool.h" + +namespace xAOD { + + //-------------------------------------------------------------------------- + // Constructor + //-------------------------------------------------------------------------- + BPhysBlindingTool::BPhysBlindingTool( const std::string& name ) + : asg::AsgTool( name ), + m_vtxContainer(nullptr), m_vtxAuxContainer(nullptr), + m_cachedRun(-1), m_cachedEvent(-1), + m_eventsForBlindingSeen(0), + m_candidatesForBlindingSeen(0), + m_eventsForUnblindingSeen(0), + m_candidatesForUnblindingSeen(0), + m_eventsBlinded(0), + m_candidatesBlinded(0), + m_eventsUnblinded(0), + m_candidatesUnblinded(0) { + +#ifdef ASGTOOL_ATHENA + declareInterface< IBPhysBlindingTool >( this ); +#endif // ASGTOOL_ATHENA + + // Vertex container + declareProperty("VertexContainerName", m_vertexContainerName = ""); + + // List of variables to blind + // (as concatenated string using . as delimiter) + declareProperty("VarToBlindNames", m_varToBlindNames = ""); + + // Flag to indicate candidates for blinding + // Left empty: Blind values for all candidates. + declareProperty("BlindingFlag" , m_blindingFlag = ""); + + // Offsets applied to values before blinding + // List must have same length as VarToBlindNames or zero. + declareProperty("BlindingOffsets", m_vOffsets); + + // Scale factors applied before blinding + // List must have same length as VarToBlindNames or zero. + declareProperty("BlindingFactors", m_vFactors); + + // Flip signs to negative range? + declareProperty("NegativeSigns" , m_vNegSigns); + + // Key for blinding + declareProperty("BlindingKey" , m_blindKey = ""); + + // Key for unblinding + declareProperty("UnblindingKey" , m_unblindKey = ""); + + } + //-------------------------------------------------------------------------- + StatusCode BPhysBlindingTool::initialize() { + + // Greet the user: + ATH_MSG_DEBUG( "Initializing xAOD::BPhysBlindingTool" ); + + // Setup of variables + if ( m_vertexContainerName == "" ) { + ATH_MSG_INFO("No vertex container name provided."); + } + + if ( m_varToBlindNames != "" ) { + m_vVarNames = getTokens(m_varToBlindNames, ".,:;|"); + } + + // Blinding and unblinding keys + if ( m_blindKey == "" && m_unblindKey == "" ) { + ATH_MSG_ERROR("You must at least set a key for blinding or unblinding!"); + } else { + if ( m_blindKey != "" ) { + m_senc.setPubKey(m_blindKey); + ATH_MSG_INFO("Setting blinding key."); + } + if ( m_unblindKey != "" ) { + m_senc.setPrivKey(m_unblindKey); + ATH_MSG_INFO("Setting unblinding key."); + } + } + + // make sure offsets vector is of correct length + if ( m_vOffsets.size() < m_vVarNames.size() ) { + for (uint i=m_vOffsets.size(); i<m_vVarNames.size(); ++i) { + m_vOffsets.push_back(0.); + } + ATH_MSG_INFO("Extending BlindingOffsets list ..."); + } else if ( m_vOffsets.size() > m_vVarNames.size() ) { + ATH_MSG_WARNING("BlindingOffsets list longer than VarToBlindNames."); + } + + // make sure scale factors vector is of correct length + if ( m_vFactors.size() < m_vVarNames.size() ) { + for (uint i=m_vFactors.size(); i<m_vVarNames.size(); ++i) { + m_vFactors.push_back(1.); + } + ATH_MSG_INFO("Extending BlindingOffsets list ..."); + } else if ( m_vFactors.size() > m_vVarNames.size() ) { + ATH_MSG_WARNING("BlindingFactors list longer than VarToBlindNames."); + } + + // make sure negative signs vector is of correct length + if ( m_vNegSigns.size() < m_vVarNames.size() ) { + for (uint i=m_vNegSigns.size(); i<m_vVarNames.size(); ++i) { + m_vNegSigns.push_back(1.); + } + ATH_MSG_INFO("Extending NegativeSigns list ..."); + } else if ( m_vNegSigns.size() > m_vVarNames.size() ) { + ATH_MSG_WARNING("NegativeSigns list longer than VarToBlindNames."); + } + + // some info for the job log + ATH_MSG_INFO("VertexContainerName : " << m_vertexContainerName); + ATH_MSG_INFO("BlindingFlag : " << m_blindingFlag); + ATH_MSG_INFO("VarToBlindNames : " << m_varToBlindNames); + ATH_MSG_INFO("BlindingOffsets : " << vecToString(m_vOffsets)); + ATH_MSG_INFO("BlindingFactors : " << vecToString(m_vFactors)); + ATH_MSG_INFO("NegativeSigns : " << vecToString(m_vNegSigns)); + ATH_MSG_INFO("BlindingKey : " << m_blindKey); + ATH_MSG_INFO("UnblindingKey : " << m_unblindKey); + + // Return gracefully: + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode BPhysBlindingTool::finalize() { + + ATH_MSG_DEBUG( "Finalizing xAOD::BPhysBlindingTool" ); + + ATH_MSG_INFO("Statistics for " << name() << ":"); + ATH_MSG_INFO(Form("N_eventsForBlindingSeen : %10ld", + m_eventsForBlindingSeen)); + ATH_MSG_INFO(Form("N_eventsBlinded : %10ld", + m_eventsBlinded)); + ATH_MSG_INFO(Form("N_eventsForUnblindingSeen : %10ld", + m_eventsForUnblindingSeen)); + ATH_MSG_INFO(Form("N_eventsUnblinded : %10ld", + m_eventsUnblinded)); + ATH_MSG_INFO(Form("N_candidatesForBlindingSeen : %10ld", + m_candidatesForBlindingSeen)); + ATH_MSG_INFO(Form("N_candidatesBlinded : %10ld", + m_candidatesBlinded)); + ATH_MSG_INFO(Form("N_candidatesForUnblindingSeen : %10ld", + m_candidatesForUnblindingSeen)); + ATH_MSG_INFO(Form("N_candidatesUnblinded : %10ld", + m_candidatesUnblinded)); + + // Return gracefully: + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Simply blind one positive float value + //-------------------------------------------------------------------------- + float BPhysBlindingTool::doBlind(const float& val) { + + return m_senc.encrypt(val); + } + //-------------------------------------------------------------------------- + // Simply unblind one positive float value + //-------------------------------------------------------------------------- + float BPhysBlindingTool::doUnblind(const float& val) { + + return m_senc.decrypt(val); + } + //-------------------------------------------------------------------------- + // Simply blind one (positive) float value + //-------------------------------------------------------------------------- + float BPhysBlindingTool::doBlind(const float& val, + const bool& negativeSign, + const float& offset, + const float& factor) { + + // adjustment if requested + float bval(val); + float cval = val*factor + offset; + if ( cval > 0. ) { + // perform actual blinding + bval = m_senc.encrypt(cval); + if (negativeSign) bval *= -1.; + } else { + ATH_MSG_WARNING("Blinding: Corrected value not positive: " + << val << Form(" (%a) -> ", val) + << cval << Form(" (%a)", cval)); + } // if cval > 0 + + return bval; + } + //-------------------------------------------------------------------------- + // Simply unblind one (positive) float value + //-------------------------------------------------------------------------- + float BPhysBlindingTool::doUnblind(const float& val, + const bool& negativeSign, + const float& offset, + const float& factor) { + + float bval(val), cval(val); + if (negativeSign) bval *= -1.; + // if ( bval > 0. || isnan(bval) ) { + if ( bval > 0. || !std::isnormal(bval) ) { + // perform actual unblinding + cval = m_senc.decrypt(bval); + if ( factor != 0. ) { + cval = (cval - offset)/factor; + } else { + ATH_MSG_WARNING("Unblinding: BlindingFactor == 0!: " + << val << Form(" (%a)", val)); + } // if m_vFactors[ivtx] != 0 + } else { + ATH_MSG_WARNING("Unblinding: Corrected value not positive: " + << val << Form(" (%a) -> ", val) + << bval << Form(" (%a)", bval)); + } // if bval > 0 + + return cval; + } + //-------------------------------------------------------------------------- + // Perform blinding of requested variables + //-------------------------------------------------------------------------- + StatusCode BPhysBlindingTool::doBlind() { + + if ( m_blindKey == "" ) { + ATH_MSG_WARNING("Can not blind without blinding key!"); + } else { + ATH_CHECK( doBlindingAction(false) ); + } + + // Return gracefully: + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Perform unblinding of requested variables + //-------------------------------------------------------------------------- + StatusCode BPhysBlindingTool::doUnblind() { + + if ( m_unblindKey == "" ) { + ATH_MSG_WARNING("Can not unblind without unblinding key!"); + } else { + ATH_CHECK( doBlindingAction(true) ); + } + + // Return gracefully: + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + + //-------------------------------------------------------------------------- + // Protected methods + //-------------------------------------------------------------------------- + + //-------------------------------------------------------------------------- + // Perform blinding or unblinding action + //-------------------------------------------------------------------------- + StatusCode BPhysBlindingTool::doBlindingAction(bool unblind) { + + ATH_CHECK(cacheEvent()); + + // counters + if ( unblind ) { + ++m_eventsForUnblindingSeen; + } else { + ++m_eventsForBlindingSeen; + } + + if ( m_vVarNames.size() > 0 ) { + long candidatesBlinded(0); + long candidatesUnblinded(0); + // loop over vertices + // int ivtx(0); + for (xAOD::VertexContainer::const_iterator + vtxItr = m_vtxContainer->begin(); + vtxItr != m_vtxContainer->end(); ++vtxItr) { + // counters + if ( unblind ) { + ++m_candidatesForUnblindingSeen; + } else { + ++m_candidatesForBlindingSeen; + } + const xAOD::Vertex* vtx = *vtxItr; + // check whether to apply (un-)blinding to this candidate + if ( m_blindingFlag == "" || pass(*vtx, m_blindingFlag) ) { + // counters + if ( unblind ) { + ++candidatesUnblinded; + } else { + ++candidatesBlinded; + } + // loop over variable names + for (size_t iv=0; iv<m_vVarNames.size(); ++iv) { + SG::AuxElement::Decorator<float> floatDec(m_vVarNames[iv]); + // check for variable + if ( floatDec.isAvailable(*vtx) ) { + float val = floatDec(*vtx); + if ( unblind ) { + // unblinding + floatDec(*vtx) = doUnblind(val, m_vNegSigns[iv], + m_vOffsets[iv], m_vFactors[iv]); + ATH_MSG_DEBUG("Unblind: " << val << Form(" (%a) -> ", val) + << floatDec(*vtx) + << Form(" (%a)", floatDec(*vtx))); + } else { + // blinding + floatDec(*vtx) = doBlind(val, m_vNegSigns[iv], + m_vOffsets[iv], m_vFactors[iv]); + ATH_MSG_DEBUG("Blind: " << val << Form(" (%a) -> ", val) + << floatDec(*vtx) + << Form(" (%a)", floatDec(*vtx))); + } // if unblind + } else { + ATH_MSG_WARNING("Missing variable " << m_vVarNames[iv]); + } // if isAvailable + } // for m_vVarNames + } // if blinding + } // for iv + // counters + if ( unblind ) { + m_candidatesUnblinded += candidatesUnblinded; + if ( candidatesUnblinded > 0 ) ++m_eventsUnblinded; + } else { + m_candidatesBlinded += candidatesBlinded; + if ( candidatesBlinded > 0 ) ++m_eventsBlinded; + } + } // if m_vVarNames.size() + + // Return gracefully: + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Cache current event. + // + // Call this once per event. + // Repeated calls for the same run/event are not updating the cache again. + //-------------------------------------------------------------------------- + StatusCode BPhysBlindingTool::cacheEvent() { + + ATH_MSG_DEBUG("BPhysBlindingTool::cacheEvent -- begin"); + + const xAOD::EventInfo* eventInfo = NULL; + ATH_CHECK(evtStore()->retrieve(eventInfo, "EventInfo")); + + if ( m_cachedRun != (int)eventInfo->runNumber() || + m_cachedEvent != (int)eventInfo->eventNumber() ) { + + // note update + m_cachedRun = eventInfo->runNumber(); + m_cachedEvent = eventInfo->eventNumber(); + + ATH_MSG_DEBUG("BPhysBlindingTool::cacheEvent: caching now: " + << "run " << m_cachedRun << " event " << m_cachedEvent); + + // retrieve vertices container + m_vtxContainer = nullptr; + m_vtxAuxContainer = nullptr; + + if ( evtStore()->transientContains<xAOD::VertexContainer>(m_vertexContainerName) ) { + ATH_MSG_DEBUG("In transient store: " << m_vertexContainerName); + ATH_CHECK(evtStore()->retrieve(m_vtxContainer, + m_vertexContainerName)); + ATH_CHECK(evtStore()->retrieve(m_vtxAuxContainer, + m_vertexContainerName+"Aux.")); + } else { + ATH_MSG_DEBUG("Not in transient store: " << m_vertexContainerName); + const xAOD::VertexContainer* constVtxContainer = nullptr; + const xAOD::VertexAuxContainer* constVtxAuxContainer = nullptr; + ATH_CHECK(evtStore()->retrieve(constVtxContainer, + m_vertexContainerName)); + ATH_CHECK(evtStore()->retrieve(constVtxAuxContainer, + m_vertexContainerName+"Aux.")); + // create a copy + m_vtxContainer = new xAOD::VertexContainer(); + m_vtxAuxContainer = new xAOD::VertexAuxContainer(); + m_vtxContainer->setStore(m_vtxAuxContainer); + for (const xAOD::Vertex* constVtx : *constVtxContainer) { + xAOD::Vertex* vtx = new xAOD::Vertex(); + m_vtxContainer->push_back(vtx); + *vtx = *constVtx; + } + ATH_CHECK(evtStore()->record(m_vtxContainer, + m_vertexContainerName)); + ATH_CHECK(evtStore()->record(m_vtxAuxContainer, + m_vertexContainerName+"Aux.")); + } + + ATH_MSG_DEBUG("Found vertex collection with key " + << m_vertexContainerName); + + } // if new run/event + + ATH_MSG_DEBUG("BPhysBlindingTool::cacheEvent -- end"); + + // Return gracefully: + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Helper to check whether an element is marked as passing a specific + // hypothesis. + //-------------------------------------------------------------------------- + bool BPhysBlindingTool::pass(const SG::AuxElement& em, std::string hypo) { + + if ( !boost::algorithm::starts_with(hypo, "passed_") ) + hypo = "passed_" + hypo; + SG::AuxElement::Accessor<Char_t> flagAcc(hypo); + return flagAcc.isAvailable(em) && flagAcc(em) != 0; + } + //-------------------------------------------------------------------------- + // Tokenize a string using certain separators + //-------------------------------------------------------------------------- + std::vector<std::string> + BPhysBlindingTool::getTokens(std::string input, std::string seperators) { + + std::vector<std::string> tokens; + boost::char_separator<char> sep(seperators.c_str()); + typedef boost::tokenizer<boost::char_separator<char> > Tokenizer_t; + Tokenizer_t tokenizer(input, sep); + for (auto& token : tokenizer) { + tokens.push_back(token); + } + return tokens; + } + //-------------------------------------------------------------------------- + // Format vector of floats as string + //-------------------------------------------------------------------------- + std::string BPhysBlindingTool::vecToString(const std::vector<float>& v) + const { + std::string str("["); + for (unsigned int i=0; i<v.size(); ++i) { + str += std::to_string(v[i]); + if ( i < v.size()-1 ) str += ","; + } + str += "]"; + return str; + } + //-------------------------------------------------------------------------- + // Format vector of bools as string + //-------------------------------------------------------------------------- + std::string BPhysBlindingTool::vecToString(const std::vector<bool>& v) + const { + std::string str("["); + for (unsigned int i=0; i<v.size(); ++i) { + str += std::to_string(v[i]); + if ( i < v.size()-1 ) str += ","; + } + str += "]"; + return str; + } + //-------------------------------------------------------------------------- +} // namespace xAOD diff --git a/PhysicsAnalysis/BPhys/BPhysTools/Root/BPhysTrackVertexMapTool.cxx b/PhysicsAnalysis/BPhys/BPhysTools/Root/BPhysTrackVertexMapTool.cxx new file mode 100644 index 0000000000000000000000000000000000000000..c0cf97b32de914890642c851bc0e8aacbf0965ab --- /dev/null +++ b/PhysicsAnalysis/BPhys/BPhysTools/Root/BPhysTrackVertexMapTool.cxx @@ -0,0 +1,664 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +// $Id: $ + +// system include: +#include "boost/format.hpp" +#include "boost/tokenizer.hpp" + +// EDM includes: +#include "xAODEventInfo/EventInfo.h" +#include "xAODBPhys/BPhysHypoHelper.h" + +// Local include(s): +#include "BPhysTools/BPhysTrackVertexMapTool.h" + +namespace xAOD { + + //-------------------------------------------------------------------------- + // Static utility method to prefix every line by a certain string + //-------------------------------------------------------------------------- + std::string BPhysTrackVertexMapTool::wrapLines(std::string lines, + std::string prefix) { + + std::string ostr; + std::istringstream stream(lines); + std::string line; + while ( std::getline(stream, line) ) { + if ( !ostr.empty() ) ostr += "\n"; + ostr += prefix + line; + } + return ostr; + } + //-------------------------------------------------------------------------- + // Constructor + //-------------------------------------------------------------------------- + BPhysTrackVertexMapTool::BPhysTrackVertexMapTool( const std::string& name ) + : asg::AsgTool( name ), + m_tracks(NULL), m_tracksAux(NULL), m_pvtxContainer(NULL), + m_svtxContainer(NULL), m_svtxAuxContainer(NULL), m_refPVContainer(NULL), + m_refPVAuxContainer(NULL), + m_nEvtsSeen (0), m_cachedRun(-1), m_cachedEvent(-1) { + +#ifdef ASGTOOL_ATHENA + declareInterface< IBPhysTrackVertexMapTool >( this ); +#endif // ASGTOOL_ATHENA + + // Necessary containers + declareProperty("VertexContainerName", m_vertexContainerName); + declareProperty("TrackParticleContainerName", + m_trackParticleContainerName="InDetTrackParticles"); + declareProperty("PVContainerName", m_pvContainerName = "PrimaryVertices"); + declareProperty("RefPVContainerName", m_refPVContainerName); + + // Maximum number of events to dump maps to log file for + declareProperty("DebugTrkToVtxMaxEvents", m_debugTrkToVtxMaxEvents = 0); + + // Prefix for log dump lines + declareProperty("DumpPrefix", m_dumpPrefix="TTV> "); + + // Hypothesis name (for mass value pickup) + declareProperty("HypoName", m_hypoName="__NONE__"); + } + //-------------------------------------------------------------------------- + StatusCode BPhysTrackVertexMapTool::initialize() { + + // Greet the user: + ATH_MSG_DEBUG( "Initializing xAOD::BPhysTrackVertexMapTool" ); + + if ( m_vertexContainerName == "" ) { + ATH_MSG_ERROR("No vertex container name provided!"); + } + if ( m_refPVContainerName == "" ) { + ATH_MSG_ERROR("No refitted PV container name provided!"); + } + if ( m_trackParticleContainerName == "" ) { + ATH_MSG_ERROR("No track particle container name provided!"); + } + if ( m_pvContainerName == "" ) { + ATH_MSG_ERROR("No PV container name provided!"); + } + // some info for the job log + ATH_MSG_INFO("VertexContainerName : " << m_vertexContainerName); + ATH_MSG_INFO("PVContainerName : " << m_pvContainerName); + ATH_MSG_INFO("RefPVContainerName : " << m_refPVContainerName); + ATH_MSG_INFO("TrackParticleContainerName : " + << m_trackParticleContainerName); + ATH_MSG_INFO("DebugTrkToVtxMaxEvents : " << m_debugTrkToVtxMaxEvents); + ATH_MSG_INFO("DumpPrefix : " << m_dumpPrefix); + ATH_MSG_INFO("HypoName : " << m_hypoName); + + // Return gracefully: + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode BPhysTrackVertexMapTool::finalize() { + + ATH_MSG_DEBUG( "Finalizing xAOD::BPhysTrackVertexMapTool" ); + + // Return gracefully: + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode BPhysTrackVertexMapTool::logEvent() { + + ATH_MSG_DEBUG( "logEvent in xAOD::BPhysTrackVertexMapTool" ); + + // read info into maps cache + ATH_CHECK(cacheEvent()); + + // dump info from maps if requested + if ( doLog() ) { + + ATH_MSG_INFO("Track-to-vertex association map:"); + + std::cout << summaryToString(m_dumpPrefix) << std::endl; + + } // if requested + + // increment counter + m_nEvtsSeen++; + + // Return gracefully: + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + bool BPhysTrackVertexMapTool::doLog() const { + + return ( m_debugTrkToVtxMaxEvents < 0 || + m_debugTrkToVtxMaxEvents > (int)m_nEvtsSeen ); + } + //-------------------------------------------------------------------------- + // + // Cache maps for current event. + // + // Call this once per event. + // Repeated calls for the same run/event are not updating the cache again. + // + //-------------------------------------------------------------------------- + StatusCode BPhysTrackVertexMapTool::cacheEvent() { + + ATH_MSG_DEBUG("BPhysTrackVertexMapTool::cacheEvent -- begin"); + + const xAOD::EventInfo* eventInfo = NULL; + ATH_CHECK(evtStore()->retrieve(eventInfo, "EventInfo")); + + if ( m_cachedRun != (int)eventInfo->runNumber() || + m_cachedEvent != (int)eventInfo->eventNumber() ) { + + // note update + m_cachedRun = eventInfo->runNumber(); + m_cachedEvent = eventInfo->eventNumber(); + + ATH_MSG_DEBUG("BPhysTrackVertexMapTool::cacheEvent: caching now: " + << "run " << m_cachedRun << " event " << m_cachedEvent); + + // retrieve primary vertices container + m_pvtxContainer = NULL; + ATH_CHECK(evtStore()->retrieve(m_pvtxContainer, m_pvContainerName)); + ATH_MSG_DEBUG("Found PV collection with key " << m_pvContainerName); + + // retrieve ID track container + m_tracks = NULL; + m_tracksAux = NULL; + ATH_CHECK(evtStore()->retrieve(m_tracks, m_trackParticleContainerName)); + if (evtStore()->contains<xAOD:: + TrackParticleAuxContainer>(m_trackParticleContainerName+"Aux.")) { + ATH_CHECK(evtStore()->retrieve(m_tracksAux, + m_trackParticleContainerName+"Aux.")); + } else { + ATH_MSG_DEBUG("No aux track collection with key " + << m_trackParticleContainerName+"Aux."); + } + ATH_MSG_DEBUG("Found track collection with key " + << m_trackParticleContainerName); + + // vertex container and its auxilliary store + m_svtxContainer = NULL; + m_svtxAuxContainer = NULL; + ATH_CHECK(evtStore()->retrieve(m_svtxContainer, m_vertexContainerName)); + ATH_CHECK(evtStore()->retrieve(m_svtxAuxContainer, + m_vertexContainerName+"Aux.")); + ATH_MSG_DEBUG("Found SV collection with key " << m_vertexContainerName); + + // refitted primary vertex container and its auxilliary store + m_refPVContainer = NULL; + m_refPVAuxContainer = NULL; + ATH_CHECK(evtStore()->retrieve(m_refPVContainer, m_refPVContainerName)); + ATH_CHECK(evtStore()->retrieve(m_refPVAuxContainer, + m_refPVContainerName+"Aux.")); + ATH_MSG_DEBUG("Found refitted PV collection with key " + << m_refPVContainerName); + + // initialize track, PV and refPV maps + initTrackVertexMaps(m_tracks, m_pvtxContainer, m_refPVContainer, + m_svtxContainer); + } // if new run/event + + ATH_MSG_DEBUG("BPhysTrackVertexMapTool::cacheEvent -- end"); + + // Return gracefully: + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // + // Retrieve primary vertices for ID track from map. + // + //-------------------------------------------------------------------------- + std::vector<const xAOD::Vertex*> + BPhysTrackVertexMapTool::pvsForIDTrack(const xAOD::TrackParticle* track) + const { + + TrackToVertexMap_t::const_iterator it = m_idTrackToPVMap.find(track); + + if ( it != m_idTrackToPVMap.end() ) { + return it->second; + } else { + std::vector<const xAOD::Vertex*> dummy; + return dummy; + } + } + //-------------------------------------------------------------------------- + // + // Retrieve refitted primary vertices for ID track from map. + // + //-------------------------------------------------------------------------- + std::vector<const xAOD::Vertex*> + BPhysTrackVertexMapTool::refPVsForIDTrack(const xAOD::TrackParticle* track) + const { + + TrackToVertexMap_t::const_iterator it = m_idTrackToRefPVMap.find(track); + + if ( it != m_idTrackToRefPVMap.end() ) { + return it->second; + } else { + std::vector<const xAOD::Vertex*> dummy; + return dummy; + } + } + //-------------------------------------------------------------------------- + // + // Retrieve secondary vertices for ID track from map. + // + //-------------------------------------------------------------------------- + std::vector<const xAOD::Vertex*> + BPhysTrackVertexMapTool::svsForIDTrack(const xAOD::TrackParticle* track) + const { + + TrackToVertexMap_t::const_iterator it = m_idTrackToSVMap.find(track); + + if ( it != m_idTrackToSVMap.end() ) { + return it->second; + } else { + std::vector<const xAOD::Vertex*> dummy; + return dummy; + } + } + //-------------------------------------------------------------------------- + // + // Initialize ID tracks, PV and refPV related maps. + // + //-------------------------------------------------------------------------- + void BPhysTrackVertexMapTool + ::initTrackVertexMaps(const xAOD::TrackParticleContainer* tpc, + const xAOD::VertexContainer* pvc, + const xAOD::VertexContainer* rpvc, + const xAOD::VertexContainer* svc) { + + // clear previous entries + m_pvNameMap.clear(); + m_refPVNameMap.clear(); + m_svNameMap.clear(); + m_idTrackNameMap.clear(); + m_idTrackToPVMap.clear(); + m_idTrackToRefPVMap.clear(); + m_idTrackToSVMap.clear(); + + // initialize maps for PVs + for (xAOD::VertexContainer::const_iterator vtxItr = pvc->begin(); + vtxItr != pvc->end(); ++vtxItr) { + const xAOD::Vertex* vtx = *vtxItr; + pvName(vtx); + for (size_t i = 0; i < vtx->nTrackParticles(); ++i) { + const xAOD::TrackParticle* track = vtx->trackParticle(i); + // m_idTrackToPVMap[track] = vtx; + addVertexToTrackVertexMap(m_idTrackToPVMap, track, vtx); + } + } + // initialize maps for refitted PVs + for (xAOD::VertexContainer::const_iterator vtxItr = rpvc->begin(); + vtxItr != rpvc->end(); ++vtxItr) { + const xAOD::Vertex* vtx = *vtxItr; + refPVName(vtx); + for (size_t i = 0; i < vtx->nTrackParticles(); ++i) { + const xAOD::TrackParticle* track = vtx->trackParticle(i); + // m_idTrackToRefPVMap[track] = vtx; + addVertexToTrackVertexMap(m_idTrackToRefPVMap, track, vtx); + } + } + + // initialize maps for SVs + for (xAOD::VertexContainer::const_iterator vtxItr = svc->begin(); + vtxItr != svc->end(); ++vtxItr) { + const xAOD::Vertex* vtx = *vtxItr; + svName(vtx); + for (size_t i = 0; i < vtx->nTrackParticles(); ++i) { + const xAOD::TrackParticle* track = vtx->trackParticle(i); + // m_idTrackToSVMap[track] = vtx; + addVertexToTrackVertexMap(m_idTrackToSVMap, track, vtx); + } + } + // initialize maps for ID tracks + for (xAOD::TrackParticleContainer::const_iterator trkItr = tpc->begin(); + trkItr != tpc->end(); ++trkItr) { + const xAOD::TrackParticle* track = *trkItr; + idTrackName(track); + } + } + //-------------------------------------------------------------------------- + // + // Add vertex to track-to-vertex map with vector of vertices. + // + //-------------------------------------------------------------------------- + void BPhysTrackVertexMapTool + ::addVertexToTrackVertexMap(TrackToVertexMap_t& map, + const xAOD::TrackParticle* track, + const xAOD::Vertex* vtx) { + + TrackToVertexMap_t::const_iterator it = map.find(track); + + if ( it == map.end() ) { + map[track] = std::vector<const xAOD::Vertex*>(); + } + map[track].push_back(vtx); + } + //-------------------------------------------------------------------------- + // Lookup name for PV -- add as next if not yet known + //-------------------------------------------------------------------------- + std::string BPhysTrackVertexMapTool::pvName(const xAOD::Vertex* vtx) { + + if ( m_pvNameMap.find(vtx) == m_pvNameMap.end() ) { + boost::format f("PV%03d"); + f % m_pvNameMap.size(); + m_pvNameMap[vtx] = f.str(); + } + return m_pvNameMap[vtx]; + } + //-------------------------------------------------------------------------- + // Lookup name for refitted PV -- add as next if not yet known + //-------------------------------------------------------------------------- + std::string BPhysTrackVertexMapTool::refPVName(const xAOD::Vertex* vtx) { + + if ( m_refPVNameMap.find(vtx) == m_refPVNameMap.end() ) { + boost::format f("RV%03d"); + f % m_refPVNameMap.size(); + m_refPVNameMap[vtx] = f.str(); + } + return m_refPVNameMap[vtx]; + } + //-------------------------------------------------------------------------- + // Lookup name for SV -- add as next if not yet known + //-------------------------------------------------------------------------- + std::string BPhysTrackVertexMapTool::svName(const xAOD::Vertex* vtx) { + + if ( m_svNameMap.find(vtx) == m_svNameMap.end() ) { + boost::format f("SV%03d"); + f % m_svNameMap.size(); + m_svNameMap[vtx] = f.str(); + } + return m_svNameMap[vtx]; + } + //-------------------------------------------------------------------------- + // Lookup name for ID track -- add as next if not yet known + //-------------------------------------------------------------------------- + std::string + BPhysTrackVertexMapTool::idTrackName(const xAOD::TrackParticle* track) { + + if ( m_idTrackNameMap.find(track) == m_idTrackNameMap.end() ) { + boost::format f("T%04d"); + f % m_idTrackNameMap.size(); + m_idTrackNameMap[track] = f.str(); + } + return m_idTrackNameMap[track]; + } + //-------------------------------------------------------------------------- + // Print Track information to string -- optionally adding PVs or refPVs + //-------------------------------------------------------------------------- + std::string + BPhysTrackVertexMapTool::idTrackToString(const xAOD::TrackParticle* track, + unsigned int indent, + bool withPV, bool withRefPV, + bool withSV) { + + std::string sind(indent, ' '); + boost::format f1("%s %-5s %p (%10.4f, %10.4f, %10.4f) VL %p"); + f1 % sind % idTrackName(track) % track + % track->pt() % track->eta() % track->phi(); //% track->vertex(); REMOVE IN MIGRATION TO MAKER + std::string str = f1.str(); + if ( withPV ) { + TrackToVertexMap_t::iterator it = m_idTrackToPVMap.find(track); + if ( it != m_idTrackToPVMap.end() ) { + for ( auto vtx : it->second ) { + str += "\n" + pvToString(vtx, indent+2, false); + } + } else { + boost::format f2("\n%s %s"); + f2 % sind % "NOPV"; + str += f2.str(); + + } + } + if ( withRefPV ) { + TrackToVertexMap_t::iterator it = m_idTrackToRefPVMap.find(track); + if ( it != m_idTrackToRefPVMap.end() ) { + for ( auto vtx : it->second ) { + str += "\n" + refPVToString(vtx, indent+2, false); + } + } else { + boost::format f2("\n%s %s"); + f2 % sind % "NORV"; + str += f2.str(); + } + } + if ( withSV ) { + TrackToVertexMap_t::iterator it = m_idTrackToSVMap.find(track); + if ( it != m_idTrackToSVMap.end() ) { + for ( auto vtx : it->second ) { + str += "\n" + svToString(vtx, indent+2, false); + } + } else { + boost::format f2("\n%s %s"); + f2 % sind % "NOSV"; + str += f2.str(); + } + } + return str; + } + //-------------------------------------------------------------------------- + // Print PV information to string -- optionally adding tracks + //-------------------------------------------------------------------------- + std::string + BPhysTrackVertexMapTool::pvToString(const xAOD::Vertex* vtx, + unsigned int indent, + bool withTracks) { + + std::string sind(indent, ' '); + boost::format f1("%s %-5s %p (%10.4f, %10.4f, %10.4f) NT %4d VT %d"); + f1 % sind % pvName(vtx) % vtx % vtx->x() % vtx->y() % vtx->z() + % vtx->nTrackParticles() % vtx->vertexType(); + std::string str = f1.str(); + if ( withTracks ) { + for (size_t i=0; i < vtx->nTrackParticles(); ++i) { + boost::format f2("\n%s %4d %s"); + f2 % sind % i + % idTrackToString(vtx->trackParticle(i), 0, false, false); + str += f2.str(); + } // for + } + + return str; + } + //-------------------------------------------------------------------------- + // Print refitted PV information to string -- optionally adding tracks + //-------------------------------------------------------------------------- + std::string + BPhysTrackVertexMapTool::refPVToString(const xAOD::Vertex* vtx, + unsigned int indent, + bool withTracks) { + + std::string sind(indent, ' '); + boost::format f1("%s %-5s %p (%10.4f, %10.4f, %10.4f) NT %4d VT %d"); + f1 % sind % refPVName(vtx) % vtx % vtx->x() % vtx->y() % vtx->z() + % vtx->nTrackParticles() % vtx->vertexType(); + std::string str = f1.str(); + if ( withTracks ) { + for (size_t i=0; i < vtx->nTrackParticles(); ++i) { + boost::format f2("\n%s %4d %s"); + f2 % sind % i + % idTrackToString(vtx->trackParticle(i), 0, false, false); + str += f2.str(); + } // for + } + + return str; + } + //-------------------------------------------------------------------------- + // Print SV information to string -- optionally adding tracks + //-------------------------------------------------------------------------- + std::string + BPhysTrackVertexMapTool::svToString(const xAOD::Vertex* vtx, + unsigned int indent, + bool withTracks, + bool withMasses) { + + std::string sind(indent, ' '); + boost::format f1("%s %-5s %p (%10.4f, %10.4f, %10.4f) NT %4d VT %d"); + f1 % sind % svName(vtx) % vtx % vtx->x() % vtx->y() % vtx->z() + % vtx->nTrackParticles() % vtx->vertexType(); + std::string str = f1.str(); + if ( withMasses && m_hypoName != "__NONE__" ) { + // vector of possible hypo names + std::vector<std::string> hypoNames = getTokens(m_hypoName, "|;/"); + for ( auto hypoName : hypoNames ) { + BPhysHypoHelper bhh(hypoName, vtx); + float bMass = bhh.mass(); + float bMassErr = bhh.massErr(); + float bMucMass = getFloat(hypoName+"_MUCALC_mass", vtx); + float bMucMassErr = getFloat(hypoName+"_MUCALC_massErr", vtx); + if ( bMass > 0. || bMassErr > 0. + || bMucMass > 0. || bMucMassErr > 0. ) { + boost::format f3("\n%s %-10s : mass : (%15.4f +/- %15.4f) MeV"); + + boost::format f4("\n%s %-10s : m(MUCALC): (%15.4f +/- %15.4f) MeV"); + f3 % sind % hypoName % bMass % bMassErr; + f4 % sind % hypoName % bMucMass % bMucMassErr; + str += f3.str() + f4.str(); + } // if one > 0. + } // for hypoNames + } // if withMasses + if ( withTracks ) { + for (size_t i=0; i < vtx->nTrackParticles(); ++i) { + boost::format f2("\n%s %4d %s"); + f2 % sind % i + % idTrackToString(vtx->trackParticle(i), 0, false, false); + str += f2.str(); + } // for + } + return str; + } + //-------------------------------------------------------------------------- + // Print track container information to string + // -- optionally adding PVs and refitted PVs + //-------------------------------------------------------------------------- + std::string + BPhysTrackVertexMapTool::idTracksToString(const xAOD::TrackParticleContainer* + tpc, + unsigned int indent, + bool withPV, + bool withRefPV, + bool withSV) { + + std::string str; + std::string sind(indent, ' '); + str += sind + "ID tracks: (" + std::to_string(tpc->size()) + ")\n"; + str += sind + std::string(80-indent, '-'); + // loop over ID tracks + for (xAOD::TrackParticleContainer::const_iterator trkItr = tpc->begin(); + trkItr != tpc->end(); ++trkItr) { + const xAOD::TrackParticle* track = *trkItr; + str += "\n" + + idTrackToString(track, indent+2, withPV, withRefPV, withSV); + } + return str; + } + //-------------------------------------------------------------------------- + // Print PV container information to string -- optionally adding tracks + //-------------------------------------------------------------------------- + std::string + BPhysTrackVertexMapTool::pvsToString(const xAOD::VertexContainer* pvc, + unsigned int indent, + bool withTracks) { + + std::string str; + std::string sind(indent, ' '); + str += sind + "Primary vertices: (" + std::to_string(pvc->size()) + ")\n"; + str += sind + std::string(80-indent, '-'); + for (xAOD::VertexContainer::const_iterator vtxItr = pvc->begin(); + vtxItr != pvc->end(); ++vtxItr) { + const xAOD::Vertex* vtx = *vtxItr; + str += "\n" + pvToString(vtx, indent+2, withTracks); + } // for + + return str; + } + //-------------------------------------------------------------------------- + // Print refitted PV container information to string + // -- optionally adding tracks + //-------------------------------------------------------------------------- + std::string + BPhysTrackVertexMapTool::refPVsToString(const xAOD::VertexContainer* rpvc, + unsigned int indent, + bool withTracks) { + + std::string str; + std::string sind(indent, ' '); + str += sind + "Refitted primary vertices: (" + std::to_string(rpvc->size()) + ")\n"; + str += sind + std::string(80-indent, '-'); + for (xAOD::VertexContainer::const_iterator vtxItr = rpvc->begin(); + vtxItr != rpvc->end(); ++vtxItr) { + const xAOD::Vertex* vtx = *vtxItr; + str += "\n" + refPVToString(vtx, indent+2, withTracks); + } // for + + return str; + } + //-------------------------------------------------------------------------- + // Print SV container information to string -- optionally adding tracks + //-------------------------------------------------------------------------- + std::string + BPhysTrackVertexMapTool::svsToString(const xAOD::VertexContainer* svc, + unsigned int indent, + bool withTracks, + bool withMasses) { + + std::string str; + std::string sind(indent, ' '); + str += sind + "Secondary vertices: (" + std::to_string(svc->size()) + ")\n"; + str += sind + std::string(80-indent, '-'); + for (xAOD::VertexContainer::const_iterator vtxItr = svc->begin(); + vtxItr != svc->end(); ++vtxItr) { + const xAOD::Vertex* vtx = *vtxItr; + str += "\n" + svToString(vtx, indent+2, withTracks, withMasses); + } // for + + return str; + } + //-------------------------------------------------------------------------- + // Print a summary of all maps to string -- optionally adding a prefix + //-------------------------------------------------------------------------- + std::string BPhysTrackVertexMapTool::summaryToString(std::string prefix) { + + boost::format form("%s\n\nRun: %d Event: %d\n\n"); + form % name() % m_cachedRun % m_cachedEvent; + std::string dstr = + wrapLines("\n"+form.str() + + pvsToString(m_pvtxContainer, 0, true) + "\n\n" + + refPVsToString(m_refPVContainer, 0, true) + "\n\n" + + svsToString(m_svtxContainer, 0, true, true) + "\n\n" + + idTracksToString(m_tracks, 0, true, true, true) + "\n", + prefix); + + return dstr; + } + //-------------------------------------------------------------------------- + // Pick up a float from StoreGate. + //-------------------------------------------------------------------------- + float BPhysTrackVertexMapTool::getFloat(std::string name, + const xAOD::Vertex* b) { + + float res = -999999.; + + SG::AuxElement::Accessor<float> floatAcc(name); + if ( floatAcc.isAvailable(*b) ) res = floatAcc(*b); + + return res; + } + //-------------------------------------------------------------------------- + // Tokenize a string using certain separators + //-------------------------------------------------------------------------- + std::vector<std::string> BPhysTrackVertexMapTool + ::getTokens(std::string input, std::string seperators) { + + std::vector<std::string> tokens; + boost::char_separator<char> sep(seperators.c_str()); + typedef boost::tokenizer<boost::char_separator<char> > Tokenizer_t; + Tokenizer_t tokenizer(input, sep); + for (auto& token : tokenizer) { + tokens.push_back(token); + } + return tokens; + } + //-------------------------------------------------------------------------- +} // namespace xAOD diff --git a/PhysicsAnalysis/BPhys/BPhysTools/Root/SimpleEncrypter.cxx b/PhysicsAnalysis/BPhys/BPhysTools/Root/SimpleEncrypter.cxx new file mode 100644 index 0000000000000000000000000000000000000000..77cc5475f19267425b78afc72be5caa696740bb2 --- /dev/null +++ b/PhysicsAnalysis/BPhys/BPhysTools/Root/SimpleEncrypter.cxx @@ -0,0 +1,516 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ + +// system include: +#include <climits> +#include <vector> +#include <algorithm> +#include <cstdlib> +#include <ctime> +#include <cmath> + +// ROOT includes +#include <TString.h> + +// Local include(s): +#include "BPhysTools/SimpleEncrypter.h" + +namespace xAOD { + + //-------------------------------------------------------------------------- + // Private static constants + //-------------------------------------------------------------------------- + const SimpleEncrypter::ULLI_t SimpleEncrypter::m_MAXRANGE = + (SimpleEncrypter::ULLI_t)pow(std::numeric_limits<ULLI_t>::max(), 0.25); + const SimpleEncrypter::ULLI_t SimpleEncrypter::m_MINRANGE = + (SimpleEncrypter::ULLI_t)SimpleEncrypter::m_MAXRANGE/10; + const unsigned int SimpleEncrypter::m_MAXHEXDIGITS = + (unsigned int)(log(pow(SimpleEncrypter::m_MAXRANGE,2))/log(16.))+3; + + //-------------------------------------------------------------------------- + // Public methods + //-------------------------------------------------------------------------- + + //-------------------------------------------------------------------------- + // Constructor + //-------------------------------------------------------------------------- + SimpleEncrypter::SimpleEncrypter(const std::string& name) : + asg::AsgMessaging(name), m_n(0), m_e(0), m_d(0), + m_isOkForEnc(false), m_isOkForDec(false) { + + // initialize random number generator + srand(static_cast<unsigned>(time(0))); + } + + //-------------------------------------------------------------------------- + // Destructor + //-------------------------------------------------------------------------- + SimpleEncrypter::~SimpleEncrypter() { + + } + + //-------------------------------------------------------------------------- + // Generation of key pair as pair of hex strings + //-------------------------------------------------------------------------- + std::pair<std::string, std::string> SimpleEncrypter::genKeyPair() { + + // default preset + std::pair<std::string, std::string> keys = + std::make_pair("__NO_PRIV_KEY__", "__NO_PUB_KEY__"); + + // generate keys + genKeyPairInternal(); + + if ( isOkForEnc() && isOkForDec() ) { + keys = std::make_pair(getPrivKey(), getPubKey()); + } + return keys; + } + + //-------------------------------------------------------------------------- + // Set private key + //-------------------------------------------------------------------------- + void SimpleEncrypter::setPrivKey(std::string keystr) { + + std::pair<ULLI_t, ULLI_t> keys = decodeKeyString(keystr); + + if ( m_n > 0 && m_n != keys.first ) { + ATH_MSG_WARNING("RSA module already set!"); + } + m_n = keys.first; + m_d = keys.second; + m_isOkForDec = false; + } + //-------------------------------------------------------------------------- + // Set public key + //-------------------------------------------------------------------------- + void SimpleEncrypter::setPubKey(std::string keystr) { + + std::pair<ULLI_t, ULLI_t> keys = decodeKeyString(keystr); + + if ( m_n > 0 && m_n != keys.second ) { + ATH_MSG_WARNING("RSA module already set!"); + } + m_e = keys.first; + m_n = keys.second; + m_isOkForEnc = false; + } + //-------------------------------------------------------------------------- + // Get private key + //-------------------------------------------------------------------------- + std::string SimpleEncrypter::getPrivKey() const { + + return keyToString(m_n, m_d); + } + //-------------------------------------------------------------------------- + // Get public key + //-------------------------------------------------------------------------- + std::string SimpleEncrypter::getPubKey() const { + + return keyToString(m_e, m_n); + } + //-------------------------------------------------------------------------- + // Encrypt unsigned integer value + //-------------------------------------------------------------------------- + SimpleEncrypter::ULLI_t SimpleEncrypter::encrypt(ULLI_t a) { + + ULLI_t b = a; + + if ( isOkForEnc() ) { + b = encryptFPECycle(a); + } + return b; + } + //-------------------------------------------------------------------------- + // Decrypt unsigned integer value + //-------------------------------------------------------------------------- + SimpleEncrypter::ULLI_t SimpleEncrypter::decrypt(ULLI_t a) { + + ULLI_t b = a; + + if ( isOkForDec() ) { + b = decryptFPECycle(a); + } + return b; + } + //-------------------------------------------------------------------------- + // Encrypt positive float value + //-------------------------------------------------------------------------- + float SimpleEncrypter::encrypt(float a) { + + float b = a; + + if ( a > 0. ) { + if ( isOkForEnc() ) { + ULLI_t ia = floatBitsToInt(a); + ULLI_t ib = encryptFPECycle(ia); + b = intBitsToFloat(ib); + } + } else { + ATH_MSG_WARNING("Encrypt: Float value not positive: " + << a << Form(" (%a) !", a)); + } // if a > 0 + return b; + } + + //-------------------------------------------------------------------------- + // Decrypt positive float value + //-------------------------------------------------------------------------- + float SimpleEncrypter::decrypt(float a) { + + float b = a; + + // As nan is a valid encrypted value, decrypt it as well. + if ( a > 0. || std::isnan(a) ) { + if ( isOkForDec() ) { + ULLI_t ia = floatBitsToInt(a); + ULLI_t ib = decryptFPECycle(ia); + b = intBitsToFloat(ib); + } + } else { + ATH_MSG_WARNING("Decrypt: Float value not positive: " + << a << Form(" (%a) !", a)); + } // if a > 0 + return b; + } + + //-------------------------------------------------------------------------- + // Private methods + //-------------------------------------------------------------------------- + + //-------------------------------------------------------------------------- + // Generate numeric representation of the keys + //-------------------------------------------------------------------------- + void SimpleEncrypter::genKeyPairInternal() { + + // Generate prime numbers p != q + ULLI_t p(1); + ULLI_t q(1); + // Euler's phi function + ULLI_t phi(1); + + // reset encryption and decryption exponent + m_e = 0; + m_d = 0; + while ( p == q || m_e < 2 || m_e >= phi || m_d < 2 + || m_e*m_d % phi != 1 ) { + double dlog2 = 0.; + while ( p == q || dlog2 < 0.1 || dlog2 > 30. ) { + p = genPrime(); + q = genPrime(); + dlog2 = fabs(log2(p)-log2(q)); + } // inner while loop + phi = (p-1)*(q-1); + m_n = p*q; + m_e = genCoprime(phi); + m_d = genDecryptionExponent(phi, m_e); + } // outer while loop + m_isOkForDec = false; + m_isOkForEnc = false; + } + //-------------------------------------------------------------------------- + // Find a prime number + //-------------------------------------------------------------------------- + SimpleEncrypter::ULLI_t SimpleEncrypter::genPrime() const { + + ULLI_t t = (m_MINRANGE + rand()) % (m_MAXRANGE-1); + do { + t++; + } while ( !isPrime(t) || t < m_MINRANGE ); + return t; + } + //-------------------------------------------------------------------------- + // Test for being a prime number + //-------------------------------------------------------------------------- + bool SimpleEncrypter::isPrime(ULLI_t n) const { + + bool isPrime = true; + if (n != 2) { + for (LLI_t i = 2; i < (LLI_t)sqrt(n) + 1; ++i) { + if (n % i == 0) { + isPrime = false; + break; + } + } + } + return isPrime; + } + //-------------------------------------------------------------------------- + // Greatest common denominator + //-------------------------------------------------------------------------- + SimpleEncrypter::ULLI_t + SimpleEncrypter::greatestCommonDenominator(ULLI_t n1, ULLI_t n2) const { + + std::vector<LLI_t> r; + LLI_t i = 1; + r.push_back(std::max(n1, n2)); + r.push_back(std::min(n1, n2)); + while (r[i] != 0) { + ++i; + r.push_back(r[i-2] % r[i-1]); + } + return r[i-1]; + } + //-------------------------------------------------------------------------- + // Find coprime number + //-------------------------------------------------------------------------- + SimpleEncrypter::ULLI_t SimpleEncrypter::genCoprime(ULLI_t n) const { + + // make sure coprime is larger than 5th Fermat number (2^16+1 = 65537) + ULLI_t i = (65537 + rand()) % (m_MAXRANGE -1); + do { + ++i; + } while (greatestCommonDenominator(n, i) != 1); + return i; + } + //-------------------------------------------------------------------------- + // Find decryption exponent + //-------------------------------------------------------------------------- + SimpleEncrypter::ULLI_t + SimpleEncrypter::genDecryptionExponent(ULLI_t phi, ULLI_t e) const { + + for (ULLI_t i=1; i<m_MAXRANGE; ++i) { + if ( ((phi * i + 1) % e) == 0 ) { + return (ULLI_t)((phi * i + 1) / e); + } + } + return 0; + } + //-------------------------------------------------------------------------- + // Convert key to a hex string + //-------------------------------------------------------------------------- + std::string SimpleEncrypter::keyToString(ULLI_t a, ULLI_t b) const { + + // length of keys w.r.t. hex digits + unsigned int ra = (unsigned int)(log(a)/log(16.))+1; + unsigned int rb = (unsigned int)(log(b)/log(16.))+1; + + // random numbers for padding + unsigned int r1 = rand() & ((1 << 4*(m_MAXHEXDIGITS-ra))-1); + unsigned int r2 = rand() & ((1 << 4*(m_MAXHEXDIGITS-rb))-1); + + // format string + TString tstr = Form("%02x%02x%02x%0*x%0*llx%0*x%0*llx", + m_MAXHEXDIGITS, ra, rb, + m_MAXHEXDIGITS-ra, r1, ra, a, + m_MAXHEXDIGITS-rb, r2, rb, b); + + return std::string(tstr.Data()); + } + //-------------------------------------------------------------------------- + // Convert hex string to two integers + //-------------------------------------------------------------------------- + std::pair<SimpleEncrypter::ULLI_t, SimpleEncrypter::ULLI_t> + SimpleEncrypter::decodeKeyString(std::string hstr) const { + + std::pair<ULLI_t, ULLI_t> keys(0,0); + + TString str(hstr); + if (str.IsHex() && str.Length() > 3) { + str.ToLower(); + unsigned int ndigits = strtoul(TString(str(0,2)).Data(), nullptr, 16); + unsigned int ra = strtoul(TString(str(2,2)).Data(), nullptr, 16); + unsigned int rb = strtoul(TString(str(4,2)).Data(), nullptr, 16); + if ( str.Length() == (int)(2*ndigits + 6) ) { + keys.first = strtoll(TString(str(ndigits+6-ra, ra)).Data(), + nullptr, 16); + keys.second = strtoll(TString(str(2*ndigits+6-rb, rb)).Data(), + nullptr, 16); + } else { + ATH_MSG_ERROR("Private/public key must be a hex string of " << + 2*m_MAXHEXDIGITS+6 << " digits!"); + } // if Length() + } else { + ATH_MSG_ERROR("Private/public key must be a hex string of " << + 2*m_MAXHEXDIGITS+6 << " digits!"); + } // if IsHex() ... + + return keys; + } + //-------------------------------------------------------------------------- + // Interpret bits of positive floating point number as integer + //-------------------------------------------------------------------------- + SimpleEncrypter::ULLI_t SimpleEncrypter::floatBitsToInt(float val) const { + + ULLI_t res(0); + + if ( val < 0. ) { + ATH_MSG_ERROR("Float value needs to be positive!"); + } else { + // convert floating point number to ULLI_t if size fits + if ( sizeof(float) <= sizeof(ULLI_t) ) { + // check whether a quick conversion is possible + if ( sizeof(float) == sizeof(int) ) { + int* p = reinterpret_cast<int*>(&val); + res = *p; + } else { + // do a slow conversion + char* pval = reinterpret_cast<char*>(&val); + // loop over bytes + for (unsigned int i=0; i<sizeof(float); ++i) { + // loop over bits + for (unsigned int j=0; j<CHAR_BIT; ++j) { + unsigned int n = i*CHAR_BIT + j; + unsigned int bit = (*(pval+i) >> j) & 1; + if ( bit > 0 ) res |= 1 << n; + } // for bits + } // for bytes + } // if sizeof + } else { + ATH_MSG_ERROR("sizeof(float) > sizeof(ULLI_t): " + << sizeof(float) << " > " << sizeof(LLI_t)); + } // if sizeof + } // if val < 0. + + return res; + } + //-------------------------------------------------------------------------- + // Interpret bits of positive integer as floating point number + //-------------------------------------------------------------------------- + float SimpleEncrypter::intBitsToFloat(ULLI_t val) const { + + float res(0.); + + // number of bits needed + unsigned int r = (int)(std::log2(val))+1; + + // convert ULLI_t to floating point number if size fits + if ( sizeof(float)*CHAR_BIT >= r ) { + // check whether a quick conversion is possible + if ( sizeof(float) == sizeof(int) ) { + float* p = reinterpret_cast<float*>(&val); + res = *p; + } else { + // do a slow conversion + char* pres = reinterpret_cast<char*>(&res); + // loop over bytes + for (unsigned int i=0; i<sizeof(float); ++i) { + // loop over bits + for (unsigned int j=0; j<CHAR_BIT; ++j) { + unsigned int n = i*CHAR_BIT + j; + unsigned int bit = (val >> n) & 1; + if ( bit > 0 ) *(pres+i) |= 1 << j; + } // for bits + } // for bytes + } // if sizeof + } else { + ATH_MSG_WARNING("sizeof(float)*CHAR_BIT < r: " + << sizeof(float)*CHAR_BIT << " < " << r); + } // if sizeof + + return res; + } + //-------------------------------------------------------------------------- + // Encrypt using format preserving encryption w.r.t. RSA modulus + // via cycling + //-------------------------------------------------------------------------- + SimpleEncrypter::ULLI_t SimpleEncrypter::encryptFPECycle(ULLI_t a) const { + + ULLI_t enc = 0; + if ( a > 0 ) { + ULLI_t r = (int)(std::log2(m_n)); + ULLI_t rmask = pow(2,r)-1; + ULLI_t c = a & rmask; + ULLI_t b = a - c; + do { + c = encryptInternal(c); + } while ( c > rmask ); + enc = b + c; + } // if + return enc; + } + //-------------------------------------------------------------------------- + // Decrypt using format preserving encryption w.r.t. RSA modulus + // via cycling + //-------------------------------------------------------------------------- + SimpleEncrypter::ULLI_t SimpleEncrypter::decryptFPECycle(ULLI_t enc) const { + + ULLI_t dec = 0; + if ( enc > 0 ) { + ULLI_t r = (int)(std::log2(m_n)); + ULLI_t rmask = pow(2,r)-1; + ULLI_t d = enc & rmask; + ULLI_t b = enc - d; + do { + d = decryptInternal(d); + } while ( d > rmask ); + dec = d + b; + } // if + return dec; + } + //-------------------------------------------------------------------------- + // Encrypt integer + //-------------------------------------------------------------------------- + SimpleEncrypter::ULLI_t SimpleEncrypter::encryptInternal(ULLI_t x) const { + + return powerMod(x, m_e, m_n); + } + //-------------------------------------------------------------------------- + // Decrypt integer + //-------------------------------------------------------------------------- + SimpleEncrypter::ULLI_t SimpleEncrypter::decryptInternal(ULLI_t x) const { + + return powerMod(x, m_d, m_n); + } + //-------------------------------------------------------------------------- + // Exponentiate a with d observing modulus n + //-------------------------------------------------------------------------- + SimpleEncrypter::ULLI_t + SimpleEncrypter::powerMod(ULLI_t a, ULLI_t d, ULLI_t n) const { + + int bin[sizeof(ULLI_t)*CHAR_BIT]; + ULLI_t dec[sizeof(ULLI_t)*CHAR_BIT]; + + ULLI_t r = (ULLI_t)(std::log2(d))+1; + ULLI_t tmp = d; + // decompose exponent into binary number (reverse order!) + for (ULLI_t i=0; i < r; ++i) { + bin[r-i-1] = tmp % 2; + tmp = (LLI_t)(tmp/2); + } // for i + + // perform the exponentiation taking modulus into account + dec[0] = a; + for (ULLI_t i=1; i < r; ++i) { + ULLI_t d2 = dec[i-1]*dec[i-1] % n; + if ( bin[i] > 0 ) d2 *= a; + dec[i] = d2 % n; + } // for i + + return dec[r-1]; + } + //-------------------------------------------------------------------------- + // Check setup readiness for encryption + //-------------------------------------------------------------------------- + bool SimpleEncrypter::isOkForEnc() { + + if ( !m_isOkForEnc ) { + if ( m_n > 0 && m_e > 1 && m_e < m_n ) { + m_isOkForEnc = true; + } else { + ATH_MSG_ERROR("Setup not OK for encryption: public key set?"); + } + } // if ! m_isOkForEnc + + return m_isOkForEnc; + } + + //-------------------------------------------------------------------------- + // Check setup readiness for decryption + //-------------------------------------------------------------------------- + bool SimpleEncrypter::isOkForDec() { + + if ( !m_isOkForDec ) { + if ( m_n > 0 && m_d > 1 && m_d < m_n ) { + m_isOkForDec = true; + } else { + ATH_MSG_ERROR("Setup not OK for decryption: private key set?"); + } + } // if ! m_isOkForDec + + return m_isOkForDec; + } + + //-------------------------------------------------------------------------- +} // namespace xAOD diff --git a/PhysicsAnalysis/BPhys/BPhysTools/src/components/BPhysTools_entries.cxx b/PhysicsAnalysis/BPhys/BPhysTools/src/components/BPhysTools_entries.cxx new file mode 100644 index 0000000000000000000000000000000000000000..2298acdeb1092567af4e6b0e3d194a7184f22b62 --- /dev/null +++ b/PhysicsAnalysis/BPhys/BPhysTools/src/components/BPhysTools_entries.cxx @@ -0,0 +1,4 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + diff --git a/PhysicsAnalysis/BPhys/BPhysTools/util/createBlindingKeys.cxx b/PhysicsAnalysis/BPhys/BPhysTools/util/createBlindingKeys.cxx new file mode 100644 index 0000000000000000000000000000000000000000..3be9cb51430fdfd9c2ae506a52e856ff30fcf84b --- /dev/null +++ b/PhysicsAnalysis/BPhys/BPhysTools/util/createBlindingKeys.cxx @@ -0,0 +1,83 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +/** + * @file creteBlindingKeys.cxx + * @author Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch> + * + * @brief Utility to create a set of blinding keys + * + * @param[in] option: -c : perform a quick encoding/decoding check + */ + +// system includes: +#include <iostream> +#include <iomanip> +#include <set> +#include <string> +#include <cstdlib> +#include <ctime> + +// Local include(s): +#include "BPhysTools/SimpleEncrypter.h" + +int main(int argc, char* argv[]) { + + // Enable check (-c flag) + bool doCheck(false); + int nChecks(1); + if ( argc > 1 ) { + std::string arg(argv[1]); + if ( arg == "-c" ) doCheck = true; + } + if ( argc > 2 ) { + nChecks = atoi(argv[2]); + } + + // Helper object + xAOD::SimpleEncrypter senc; + + // Create key pair + std::pair<std::string, std::string> keys = senc.genKeyPair(); + + std::cout << std::endl; + std::cout << "Blinding keys generated:" << std::endl; + std::cout << " Private key: " << keys.first << std::endl; + std::cout << " Public key: " << keys.second << std::endl; + std::cout << std::endl; + + // check that encryption works + if ( doCheck ) { + srand(static_cast<unsigned>(time(0))); + + std::cout << "Encryption test:" << std::endl; + int nOK(0); + for (int i=0; i<nChecks; ++i) { + float val = 10000.* + static_cast <float>(rand())/(static_cast <float> (RAND_MAX)); + // float val = 5267.23; + float enc = senc.encrypt(val); + float dec = senc.decrypt(enc); + if ( dec == val ) ++nOK; + if ( i == 0 || dec != val ) { + std::cout << " Test # " << i << std::endl; + std::cout << " val = " << val << std::endl; + std::cout << " enc = " << enc << std::endl; + std::cout << " dec = " << dec << std::endl; + if ( dec == val ) { + std::cout << " => worked!" << std::endl; + } else { + std::cout << " => FAILED!" << std::endl; + } + } // if + } // for + std::cout << std::endl; + std::cout << "Summary:" << std::endl; + std::cout << " nChecks: " << std::setw(12) << nChecks << std::endl; + std::cout << " nOK : " << std::setw(12) << nOK << std::endl; + std::cout << " nFailed: " << std::setw(12) << nChecks - nOK << std::endl; + } // if + + exit(0); +} diff --git a/PhysicsAnalysis/D3PDMaker/D3PDMakerConfig/share/AODToEgammaD3PD.py b/PhysicsAnalysis/D3PDMaker/D3PDMakerConfig/share/AODToEgammaD3PD.py index 3a7512fb4bf6feff19ec760965674ea376b81519..dfa8e02ffd393d1475db8f6d90137207f9bfb8b0 100644 --- a/PhysicsAnalysis/D3PDMaker/D3PDMakerConfig/share/AODToEgammaD3PD.py +++ b/PhysicsAnalysis/D3PDMaker/D3PDMakerConfig/share/AODToEgammaD3PD.py @@ -13,6 +13,8 @@ from AthenaCommon.AthenaCommonFlags import athenaCommonFlags athenaCommonFlags.FilesInput= ["AOD.pool.root"] +from AthenaConfiguration.AllConfigFlags import ConfigFlags +ConfigFlags.Input.Files = athenaCommonFlags.FilesInput() ###################################################################3 diff --git a/PhysicsAnalysis/D3PDMaker/D3PDMakerConfig/share/EgammaD3PD_prodJobOFragment.py b/PhysicsAnalysis/D3PDMaker/D3PDMakerConfig/share/EgammaD3PD_prodJobOFragment.py index a65e3009e84fc224d28a2f8b1edc0999c941855b..2b11e34ce1e6e94b26261a115de79c831823c7a6 100644 --- a/PhysicsAnalysis/D3PDMaker/D3PDMakerConfig/share/EgammaD3PD_prodJobOFragment.py +++ b/PhysicsAnalysis/D3PDMaker/D3PDMakerConfig/share/EgammaD3PD_prodJobOFragment.py @@ -23,9 +23,6 @@ if prodFlags.WriteEgammaD3PD.isVirtual: raise NameError( "Egamma D3PD set to be a virtual stream" ) pass -#configure MuonScatteringAngleSignificanceTool -include("JetTagD3PDMaker/MuonScatteringSigToolConfig.py") - # Construct the stream and file names for the SUSY D3PD: streamName = prodFlags.WriteEgammaD3PD.StreamName fileName = buildFileName( prodFlags.WriteEgammaD3PD ) diff --git a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/CMakeLists.txt b/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/CMakeLists.txt index 5f934fcd6f0af52d328bbce19ba9c4c13d063f4f..8681b0e32097705a902daee95cc504dfc6cfbb3a 100644 --- a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/CMakeLists.txt +++ b/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/CMakeLists.txt @@ -15,6 +15,6 @@ atlas_add_component( MissingETD3PDMaker LINK_LIBRARIES ${ROOT_LIBRARIES} ${CLHEP_LIBRARIES} CaloEvent StoreGateLib EventKernel GaudiKernel D3PDMakerInterfaces D3PDMakerUtils JetEvent MissingETEvent MissingETGoodnessLib MissingETPerformanceLib Particle AthenaKernel FourMomUtils xAODMissingET muonEvent egammaEvent tauEvent ) # Install files from the package: -atlas_install_python_modules( python/*.py ) +atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} ) atlas_install_joboptions( share/*.py ) diff --git a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/METGetterTrack.py b/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/METGetterTrack.py deleted file mode 100644 index 69d30a442ef5e5fa26e199c0a34019456beed525..0000000000000000000000000000000000000000 --- a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/METGetterTrack.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration - -############################################################################################## -# -# METTrack Getters for MissingET.METAlg -# -# Author : Jet Goodson after S. Resconi -# Date : 27 March 2010 -# -#How to run this: -#include this file -#then add the line: -#METGetterTrack() -#before your alg (ie, the d3pd) -############################################################################################## - -from AthenaCommon.SystemOfUnits import * # loads MeV etc... -from AthenaCommon.Constants import * # Loads DEBUG INFO etc.. -from AthenaCommon.Logging import logging # loads logger -import traceback # to allow printout of trace back - -from RecExConfig.Configured import Configured # import base class - -from MissingET.METRefGetter import getStandardCalibTool # to get calib from DB - -from CaloTools.CaloNoiseToolDefault import CaloNoiseToolDefault -theCaloNoiseTool=CaloNoiseToolDefault() -from AthenaCommon.AppMgr import ToolSvc -ToolSvc+=theCaloNoiseTool - -class METGetterTrack ( Configured ): - - def configure(self): - mlog = logging.getLogger ('METGetterTrack::configure:') - mlog.info ('entering') - - # now configure the algorithm - try: - from MissingET.MissingETConf import METAlg - theMETAlg=METAlg("METAlg") - except: - mlog.error("could not import MissingET.METAlg") - mlog.error(traceback.format_exc()) - return False - - - from AthenaCommon.DetFlags import DetFlags - if DetFlags.detdescr.ID_on(): - try: - from MissingET.MissingETConf import METTrackTool - theMETTrackTool = METTrackTool("METTrack"); - - - # if doFastCaloSim set calibrator tool for ATLFAST2 - from CaloRec.CaloCellFlags import jobproperties - if jobproperties.CaloCellFlags.doFastCaloSim: - doAtlfastII=True - else: - doAtlfastII=False - - cellcalibtool = getStandardCalibTool(doAtlfastII); - calibtool_name = cellcalibtool.name(); - - # add track select user interface - theMETTrackTool.trackd0 = 1.5 # cut on trackd0 - theMETTrackTool.trackz0 = 1.50 # cut on trackz0 - theMETTrackTool.trackPtMin = 500.0 # cut on trackPtMin - theMETTrackTool.trackPtMax = 9999999.0 - theMETTrackTool.trackPixelHits = 1 - theMETTrackTool.trackSCTHits = 6 - theMETTrackTool.trackChi2OverNdf = 999999 - theMETTrackTool.UseInsideOut = 1 - - theMETTrackTool.outKey = "MET_Track" - - except: - mlog.error("could not get handle to METTrackTool Quit") - mlog.error(traceback.format_exc()) - return False - - # add cellcalibtool - theMETTrackTool += cellcalibtool - - # add METTrackTool to list of tools - theMETAlg.AlgTools+= [ theMETTrackTool.getFullName() ] - - # add tools to alg - theMETAlg += theMETTrackTool - - -#------------------------------------------------------------------------------------------------ - # add algorithm to topSequence (this should always come at the end) - - mlog.info(" now adding to topSequence") - from AthenaCommon.AlgSequence import AlgSequence - topSequence = AlgSequence() - topSequence += theMETAlg - - return True - diff --git a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETCompAssociation.py b/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETCompAssociation.py deleted file mode 100644 index 27b3d46d3622300dde190c5bd379d56bb20f94fc..0000000000000000000000000000000000000000 --- a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETCompAssociation.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -# $Id$ -# -# @file MissingETD3PDMaker/python/MissingETAssociation.py -# @author Jet Goodson <jgoodson@cern.ch> -# @date September, 2010 -# @brief Helper for setting up a MissingET association to a contained object. -# - -import D3PDMakerCoreComps -import MissingETD3PDMaker -from D3PDMakerCoreComps.D3PDObject import D3PDObject - - -def MissingETCompAssociation (parent, - type_name, - default_sgkey, - assocTool = MissingETD3PDMaker.MissingETCompAssociationTool, - prefix = '', - matched = '', - level = 0, - objectType = 'egamma', - blockname = None, - allowMissing = False, - *args, **kw): - - if blockname == None: - blockname = prefix + 'METCompAssoc' - - def maker (name, prefix, object_name, - sgkey = default_sgkey, - getter = None, - assoc = None): - - if not getter: - getter = D3PDMakerCoreComps.SGObjGetterTool \ - (name + '_Getter', - TypeName = type_name, - SGKey = sgkey) - - assoc = assocTool (name + 'Assoc', - Getter = getter, - ObjectType = objectType, - AllowMissing = allowMissing) - - return D3PDMakerCoreComps.ContainedAssociationFillerTool (name, - Prefix = prefix, - Associator = assoc, - Matched = matched) - - obj = D3PDObject (maker, prefix) - parent.defineBlock (level, blockname, obj) - return obj diff --git a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETD3PD.py b/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETD3PD.py index 8ddaf7dbed5eb50ca7fd5d24c11d26fae85895b0..c37c458c371f499de43a2afedb449bfc532aaa2c 100644 --- a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETD3PD.py +++ b/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETD3PD.py @@ -1,67 +1,20 @@ -# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration import D3PDMakerCoreComps -from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags -from RecExConfig.RecFlags import rec -from MissingETD3PDMaker.MissingETD3PDMakerFlags import * -from MissingETD3PDMaker.MissingETD3PDObject import * -from MissingETD3PDMaker.MissingETGoodnessD3PDObject import MissingETGoodnessD3PDObject -## from TrackD3PDMaker.TrackD3PDMakerFlags import * -## from CaloD3PDMaker.ClusterD3PDObject import ClusterD3PDObject -## from egammaD3PDMaker.ElectronD3PDObject import ElectronD3PDObject -## from egammaD3PDMaker.PhotonD3PDObject import PhotonD3PDObject -## from MuonD3PDMaker.MuonD3PDObject import MuonD3PDObject -## from JetD3PDMaker.JetD3PDObject import JetD3PDObject -## from TauD3PDMaker.TauD3PDObject import TauD3PDObject +from MissingETD3PDMaker.MissingETD3PDObject import MissingETD3PDObject from AthenaCommon.AlgSequence import AlgSequence topSequence = AlgSequence() - + def MissingETD3PD (file, level = 10, tuplename = 'METD3PD', seq = topSequence, D3PDSvc = 'D3PD::RootD3PDSvc'): - + alg = D3PDMakerCoreComps.MakerAlg(tuplename, seq, file = file, D3PDSvc = D3PDSvc) - alg += MissingETD3PDObject (level=level, allowMissing = True, exclude=['MET_Base', 'MET_Truth_Int', 'MET_RefFinal_Phi', 'MET_MuonBoy_Et', 'MET_RefJet_SumEt'])#, exclude = ['MET_Muon_Isol_Muid', 'MET_Muon_Total_Muid', 'MET_Muon_NonIsol_Muid','MET_Muon_Isol_Staco', 'MET_Muon_Total_Staco', 'MET_Muon_NonIsol_Staco', 'MET_SoftJets', 'MET_RefMuon_Muid', 'MET_RefMuon_Staco', 'MET_RefMuon_Track_Muid', 'MET_RefMuon_Track_Staco']) - ##the terms in the exclude before this are specialized terms used by MET experts. They're not in regular AODs or ESDs [yet?], just specialized datasets used for the Pisa Hadronic Cal Workshop - so you may want to exclude them, otherwise they may show up as zeroed sets - # alg += MissingETGoodnessD3PDObject (level=level, allowMissing = True) - - ##Example of a custom object - #alg += MissingETD3PDObject (level=level, sgkey = MissingETD3PDMakerFlags.METRefEleSGKey(), prefix=MissingETD3PDMakerFlags.METRefEleSGKey(), allowMissing = True) - - ## #More involved custom example - these are setup for custom MET D3PDs for the Pisa Hadronic Cal Workshop -## customMETs_Staco = ['MET_RefFinal', 'MET_RefGamma', 'MET_RefEle', 'MET_RefTau', 'MET_RefJet', 'MET_RefMuon' ,'MET_RefMuon_Staco', 'MET_CellOut', 'MET_Cryo', 'MET_Muon_Isol_Staco', 'MET_Muon_NonIsol_Staco', 'MET_Muon_Total_Staco', 'MET_SoftJets', 'MET_RefMuon_Track', 'MET_RefMuon_Track_Staco'] - - -## customMETs_Muid = ['MET_RefFinal', 'MET_RefGamma', 'MET_RefEe', 'MET_RefTau', 'MET_RefJet', 'MET_RefMuon' ,'MET_RefMuon_Muid', 'MET_CellOut', 'MET_Cryo', 'MET_Muon_Isol_Muid', 'MET_Muon_NonIsol_Muid', 'MET_Muon_Total_Muid', 'MET_SoftJets', 'MET_RefMuon_Track', 'MET_RefMuon_Track_Muid'] - - -## for custom in customMETs_Staco: -## alg += MissingETD3PDObject (level=0, sgkey = custom+'_LCW_pt20', prefix=custom+'_LCW_pt20', allowMissing = True) -## alg += MissingETD3PDObject (level=0, sgkey = custom+'_GCW_pt20', prefix=custom+'_GCW_pt20', allowMissing = True) -## alg += MissingETD3PDObject (level=0, sgkey = custom+'_LCW_NI_pt20_noSoftJets_eflow', prefix=custom+'_LCW_NI_pt20_noSoftJets_eflow', allowMissing = True) - -## for custom in customMETs_Muid: -## alg += MissingETD3PDObject (level=0, sgkey = custom+'_GCW_NI_pt20_Muid', prefix=custom+'_GCW_NI_pt20_Muid', allowMissing = True) -## alg += MissingETD3PDObject (level=0, sgkey = custom+'_LCW_NI_pt20_Muid_eflow', prefix=custom+'_LCW_NI_pt20_Muid_eflow', allowMissing = True) - - - ## alg += ElectronD3PDObject(0) -## alg += PhotonD3PDObject(0) -## alg += TauD3PDObject(0) -## alg += MuonD3PDObject(0, sgkey='StacoMuonCollection', prefix="mu_staco_") -## alg += MuonD3PDObject(0, sgkey='MuidMuonCollection', prefix="mu_muid_") -## alg += JetD3PDObject(0, sgkey=MissingETD3PDMakerFlags.METDefaultJetCollectionSGKey(), prefix="jet_"+MissingETD3PDMakerFlags.METDefaultJetCollectionSGKey()[:-4].lower()+"_") -## alg += ClusterD3PDObject(0, prefix='cl_') -## #TrackD3PDMakerFlags.stor eTrackPredictionAtBLayer = False - ##If you want a custom objecto MissingEtCalo type use MissingETCaloD3PDObject - ##Or TruthMETD3PDObject - ##JetsInfoMETD3PDObject - ##EtaRingsMETD3PDObject - ##those and MissingETD3PDObject are the distinct types + alg += MissingETD3PDObject (level=level, allowMissing = True, exclude=['MET_Base', 'MET_Truth_Int', 'MET_RefFinal_Phi', 'MET_MuonBoy_Et', 'MET_RefJet_SumEt']) return alg diff --git a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETD3PDMakerFlags.py b/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETD3PDMakerFlags.py index 72875c15b0ce71921c46566026a4cd50059bea17..70553395ae129eac9978889a6c3b39bb9a27d342 100644 --- a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETD3PDMakerFlags.py +++ b/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETD3PDMakerFlags.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # $Id$ # @@ -97,9 +97,9 @@ _sgkey_prop ('METMuonsSpectroSGKey', 'MET_Muons_Spectro') # 3rd chain _sgkey_prop ('METMuonsTrackSGKey', 'MET_Muons_Track') # 3rd chain _sgkey_prop ('METTruthPileUpSGKey', 'MET_Truth_PileUp') -_sgkey_prop ('AllCaloCellsSGKey', 'AllCalo');##change to AllCalo for ESD use AODCellContainer for AOD -_sgkey_prop ('METDefaultJetCollectionSGKey', 'AntiKt4LCTopoJets,Cone4H1TopoJets'); ##probably want to key to default later -_sgkey_prop ('METDefaultTrackCollectionSGKey', 'TrackParticleCandidate'); ##probably want to key to default later +_sgkey_prop ('AllCaloCellsSGKey', 'AllCalo') ##change to AllCalo for ESD use AODCellContainer for AOD +_sgkey_prop ('METDefaultJetCollectionSGKey', 'AntiKt4LCTopoJets,Cone4H1TopoJets') ##probably want to key to default later +_sgkey_prop ('METDefaultTrackCollectionSGKey', 'TrackParticleCandidate') ##probably want to key to default later _sgkey_prop ('METRefFinalEMSGKey', 'MET_RefFinal_em') _sgkey_prop ('METRefEleEMSGKey', 'MET_RefEle_em') _sgkey_prop ('METRefJetEMSGKey', 'MET_RefJet_em') @@ -131,11 +131,11 @@ _sgkey_prop ('METDefaultJetPrefix', 'jet_antikt4LCtopo_MET_') _sgkey_prop ('METDefaultTrackPrefix', 'trk_MET_') ##########Trigger Flags -_sgkey_prop ('METL1SGKey' , 'LVL1_ROI'); -_sgkey_prop ('METL2SGKey' , 'HLT_T2MissingET'); -_sgkey_prop ('METEFSGKey' , 'HLT_TrigEFMissingET'); -_sgkey_prop ('METEFNoiseSGKey' , 'HLT_TrigEFMissingET_noiseSupp'); -_sgkey_prop ('METEFFEBSGKey' , 'HLT_TrigEFMissingET_FEB'); +_sgkey_prop ('METL1SGKey' , 'LVL1_ROI') +_sgkey_prop ('METL2SGKey' , 'HLT_T2MissingET') +_sgkey_prop ('METEFSGKey' , 'HLT_TrigEFMissingET') +_sgkey_prop ('METEFNoiseSGKey' , 'HLT_TrigEFMissingET_noiseSupp') +_sgkey_prop ('METEFFEBSGKey' , 'HLT_TrigEFMissingET_FEB') class DoTruth (JobProperty): """If true, put truth information in D3PD.""" diff --git a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETD3PDTriggerBitsObject.py b/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETD3PDTriggerBitsObject.py deleted file mode 100644 index b6e6c9a9b53d34f592cc1548b38c0c06e5d6a597..0000000000000000000000000000000000000000 --- a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETD3PDTriggerBitsObject.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -# $Id$ -# -#@file MissingETD3PDMaker/python/MissingETD3PDTriggerBitsObject.py -#@author Jet Goodson <jgoodson@cern.ch> (copied Haifeng Li's & Scott Snyder's tool in egamma) -#@date 12 Nov, 2009 -#@brief Define trigger bit blocks for MissingET -# - -from D3PDMakerCoreComps.D3PDObject import make_Void_D3PDObject - - -METD3PDTriggerBitsObject = \ - make_Void_D3PDObject (default_name = 'MissingETTriggerBitsFiller') - -# -# The MET trigger bits are now added in MissingETD3PDObject; -# this file is kept just for backwards compatibility. diff --git a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETD3PD_GoodnessModule.py b/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETD3PD_GoodnessModule.py deleted file mode 100644 index 74163f63b95259b74d5402e8e81fe30f6f84136d..0000000000000000000000000000000000000000 --- a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETD3PD_GoodnessModule.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -import D3PDMakerCoreComps -from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags -from RecExConfig.RecFlags import rec -from MissingETD3PDMaker.MissingETD3PDMakerFlags import * -from MissingETD3PDMaker.MissingETD3PDGoodnessD3PDObject import * - -from AthenaCommon.AlgSequence import AlgSequence -topSequence = AlgSequence() - -## config file in case one only wishes to add the met goodness module - -def METPhysicsD3PDGoodness (file, - level = 4, - tuplename = 'METPhysicsD3PD', - seq = topSequence, - D3PDSvc = 'D3PD::RootD3PDSvc'): - - alg = D3PDMakerCoreComps.MakerAlg(tuplename, seq, - file = file, D3PDSvc = D3PDSvc) - - ## goodness filler - alg += MissingETGoodnessD3PDObject (level, allow_missing = True) - #Level 4 Objects ---- MetPerf Cleaning Variables - - alg += JetsInfoMETD3PDObject (level, allow_missing = True) - - return alg - diff --git a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETGoodnessD3PDObject.py b/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETGoodnessD3PDObject.py index e748bea49aa65dd8b59354cb2115a9c3c992e029..392680b5549d5eedc7522e2f16942c68ccdff31b 100644 --- a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETGoodnessD3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/MissingETGoodnessD3PDObject.py @@ -1,11 +1,9 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration import MissingETD3PDMaker -import D3PDMakerCoreComps from D3PDMakerCoreComps.D3PDObject import make_SG_D3PDObject from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags -from MissingETD3PDMaker.MissingETD3PDMakerFlags import * -from MissingETD3PDMaker.MissingETD3PDMakerConf import * +from MissingETD3PDMaker.MissingETD3PDMakerFlags import MissingETD3PDMakerFlags MissingETGoodnessD3PDObject = \ make_SG_D3PDObject ('MissingET', diff --git a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/STVFMETGetter.py b/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/STVFMETGetter.py deleted file mode 100644 index b55512488d4e1e2f93cfea032ed5b909e7e02f2f..0000000000000000000000000000000000000000 --- a/PhysicsAnalysis/D3PDMaker/MissingETD3PDMaker/python/STVFMETGetter.py +++ /dev/null @@ -1,113 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -# $Id: STVFMETGetter.py 525597 2012-11-13 12:54:50Z ssnyder $ -# -# This configurable can schedule the STVF MET reconstruction if it's needed -# by the D3PDMaker job. - -# Gaudi/Athena import(s): -from AthenaCommon.Logging import logging -from AthenaCommon.AlgSequence import AlgSequence -from RecExConfig.Configured import Configured - -# D3PDMaker import(s): -from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags - -## -# @short Class decorator for implementing the singleton pattern -# -# I just copy-pasted this code from the TauDiscriGetter code. -# The idea is the same. The STVF MET reconstruction should only be -# scheduled once into a job. -# -def singleton( cls ): - - __log = logging.getLogger( "%s::__init__" % cls.__name__ ) - __instances = {} - - def getinstance( *args, **kwargs ): - - # Check if the singleton has already been created: - if cls in __instances: - __log.debug( "STVF MET reconstruction already configured" ) - return __instances[ cls ] - - # Create the singleton: - obj = cls( *args, **kwargs ) - __instances[ cls ] = obj - return obj - - return getinstance - -## -# @short Singleton class setting up the STVF MET reconstruction -# -# This class is responsible for setting up the STVF MET reconstruction for -# D3PDMaker jobs. -# -# @author Attila Krasznahorkay <Attila.Krasznahorkay@cern.ch> -# -# $Revision: 525597 $ -# $Date: 2012-11-13 13:54:50 +0100 (Tue, 13 Nov 2012) $ -@singleton -class STVFMETGetter( Configured ): - - def __init__( self, - name = "STVFMETGetter", - sequence = AlgSequence( D3PDMakerFlags.PreD3PDAlgSeqName() ), - **kw): - - # Remember the parameter(s): - self.__name = name - self.__sequence = sequence - Configured.__init__( self, **kw ) - - def configure( self ): - - # Let the user know what we're doing: - __log = logging.getLogger( "STVFMETGetter" ) - __log.info( "Configuring STVF MET reconstruction" ) - - # Sort out the sequence first. This is not pretty, but is needed - # in order to be able to use the default configurable nicely outside - # of D3PDMaker jobs. - topSeq = AlgSequence() - if self.__sequence != topSeq and \ - not hasattr( topSeq, self.__sequence._name ): - topSeq += self.__sequence - pass - - # Turn on MET reconstruction: - from RecExConfig.RecAlgsFlags import recAlgs - recAlgs.doMissingET.set_Value_and_Lock( True ) - - # Schedule the reconstruction of the STVF objects: - from MissingET.METRefGetter_newplup import make_METRefAlg - from AthenaCommon.SystemOfUnits import GeV - METalg_STVF = make_METRefAlg( _suffix = '_STVF' ) - METalg_STVF.sequence = self.__sequence - METalg_STVF.jet_JetInputCollectionKey = 'AntiKt4LCTopoJets' - METalg_STVF.jet_JetPtCut = 20.0 * GeV - METalg_STVF.jet_ApplyJetScale = "Yes" - METalg_STVF.jet_UseJetMomentForScale = True - METalg_STVF.jet_JetMomentForScale = "LCJES" - METalg_STVF.jet_ApplyJetJVF = "Yes" - METalg_STVF.jet_RunSoftJetsTool = False - METalg_STVF.jet_calibType = 'LocHad' - METalg_STVF.ele_calibType = 'RefCalib' - METalg_STVF.gamma_calibType = 'EmScale' - METalg_STVF.plupSuppCorr = 'STVF' - METalg_STVF.celloutCorrection = 'STVF' - METalg_STVF.cellout_calibType = 'Eflow' - METalg_STVF.tau_calibType = 'ExclRefCalib' - METalg_STVF.cryo_ApplyCorrection = "Off" - METalg_STVF.muon_algorithm = "Staco" - METalg_STVF.muon_isolationAlg = "dRJet" - - # Only run this if MET_RefFinal_STVF doesn't exist already. - METalg_STVF._output = {'MissingET' : ['MET_RefFinal_STVF']} - - METalg_STVF() - - # Signal that everything went okay: - return True diff --git a/PhysicsAnalysis/D3PDMaker/TileD3PDMaker/CMakeLists.txt b/PhysicsAnalysis/D3PDMaker/TileD3PDMaker/CMakeLists.txt index 4207f67cf1f74bc255b50d9a596582fb085c5e9c..b62564de17cdcf463967e745dd48433a1330c0b3 100644 --- a/PhysicsAnalysis/D3PDMaker/TileD3PDMaker/CMakeLists.txt +++ b/PhysicsAnalysis/D3PDMaker/TileD3PDMaker/CMakeLists.txt @@ -1,26 +1,19 @@ -################################################################################ -# Package: TileD3PDMaker -################################################################################ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # Declare the package name: atlas_subdir( TileD3PDMaker ) # External dependencies: -find_package( Boost COMPONENTS filesystem thread system ) -find_package( CORAL COMPONENTS CoralBase CoralKernel RelationalAccess ) -find_package( ROOT COMPONENTS Core Tree MathCore Hist RIO pthread ) +find_package( ROOT COMPONENTS Core MathCore ) # Component(s) in the package: atlas_add_component( TileD3PDMaker src/*.cxx src/components/*.cxx - INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} ${Boost_INCLUDE_DIRS} ${CORAL_INCLUDE_DIRS} - LINK_LIBRARIES ${ROOT_LIBRARIES} ${Boost_LIBRARIES} ${CORAL_LIBRARIES} AtlasHepMCLib CaloEvent CaloGeoHelpers AthContainers AthenaBaseComps - StoreGateLib SGtests GeoModelUtilities Identifier EventInfo xAODCaloEvent xAODEventInfo xAODMissingET xAODMuon xAODPrimitives xAODTracking - xAODTrigger GaudiKernel GeneratorObjects D3PDMakerUtils RecoToolInterfaces ITrackToVertex TileEvent TileIdentifier - TrkParameters TrkParametersIdentificationHelpers VxVertex TrigInDetEvent TrigMuonEvent ) + INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} + LINK_LIBRARIES ${ROOT_LIBRARIES} AthContainers AthenaBaseComps AtlasHepMCLib CaloEvent CaloGeoHelpers D3PDMakerUtils EventInfo GaudiKernel GeneratorObjects GeoModelInterfaces GeoModelUtilities GeoPrimitives ITrackToVertex Identifier RDBAccessSvcLib RecoToolInterfaces StoreGateLib TileEvent TileIdentifier TrkParameters TrkParametersIdentificationHelpers VxVertex xAODCaloEvent xAODEventInfo xAODMissingET xAODMuon xAODPrimitives xAODTracking xAODTrigger ) # Install files from the package: -atlas_install_python_modules( python/*.py ) +atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} ) atlas_install_joboptions( share/*.py ) diff --git a/PhysicsAnalysis/D3PDMaker/TileD3PDMaker/python/TileD3PD.py b/PhysicsAnalysis/D3PDMaker/TileD3PDMaker/python/TileD3PD.py deleted file mode 100644 index 05d66e192230e9d6d6d7ccb93dd0ece757312822..0000000000000000000000000000000000000000 --- a/PhysicsAnalysis/D3PDMaker/TileD3PDMaker/python/TileD3PD.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -# -*- coding: iso-8859-1 -*- -# -# File: TileD3PD.py -# Author: Marco van Woerden <mvanwoer@cern.ch> -# Date: September 2012 -# - -import TileD3PDMaker.TileD3PDObject -from TileD3PDMaker.TileD3PDObject import * -from TileD3PDMaker import * -from D3PDMakerCoreComps.resolveSGKey import testSGKey - -def mu_D3PD (alg = None, - file = 'tile.root', - tuplename = 'calo'): - - if not alg: - from OutputStreamAthenaPool.MultipleStreamManager import MSMgr - alg = MSMgr.NewRootStream( tuplename, file ) - alg += TileD3PDMaker.TileD3PDObject.TileCellD3PDObject(10) - alg += TileD3PDMaker.TileD3PDObject.TileMBTSD3PDObject(10) - alg += TileD3PDMaker.TileD3PDObject.TileStacoMuonD3PDObject(10) - alg += TileD3PDMaker.TileD3PDObject.TileEventD3PDObject(10) - - return alg - -def Ep_D3PD (alg = None, - file = 'tile.root', - tuplename = 'calo'): - - if not alg: - from OutputStreamAthenaPool.MultipleStreamManager import MSMgr - alg = MSMgr.NewRootStream( tuplename, file ) - alg += TileD3PDMaker.TileD3PDObject.TileTrackD3PDObject(10) - alg += TileD3PDMaker.TileD3PDObject.TileCellD3PDObject(10) - alg += TileD3PDMaker.TileD3PDObject.TileEventD3PDObject(10) - #alg += TileD3PDMaker.TileD3PDObject.TileMuonD3PDObject(10) - alg += TileD3PDMaker.TileD3PDObject.TileClusterD3PDObject(10) - - return alg diff --git a/PhysicsAnalysis/D3PDMaker/TileD3PDMaker/python/TileD3PDObject.py b/PhysicsAnalysis/D3PDMaker/TileD3PDMaker/python/TileD3PDObject.py index 8ee6a2937ed0ddf4cc73cebf2e5e8d3634748607..a019c5db7c0e7041c387b51cbc5dc71aaadf414e 100644 --- a/PhysicsAnalysis/D3PDMaker/TileD3PDMaker/python/TileD3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TileD3PDMaker/python/TileD3PDObject.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # # File: TileAnalysisD3PDObject.py @@ -8,22 +8,9 @@ # # IMPORT MODULES -import TileD3PDMaker import D3PDMakerCoreComps -import EventCommonD3PDMaker -from D3PDMakerCoreComps.D3PDObject import D3PDObject from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags -from D3PDMakerCoreComps.D3PDObject import make_SGDataVector_D3PDObject -from D3PDMakerCoreComps.D3PDObject import make_Void_D3PDObject -from D3PDMakerCoreComps.D3PDObject import make_SG_D3PDObject -from D3PDMakerCoreComps.SimpleAssociation import SimpleAssociation -from D3PDMakerCoreComps.IndexAssociation import IndexAssociation -from D3PDMakerCoreComps.ContainedVectorMultiAssociation import ContainedVectorMultiAssociation -from D3PDMakerCoreComps.IndexMultiAssociation import IndexMultiAssociation -from AthenaCommon.AlgSequence import AlgSequence -from TileD3PDMaker import * -from CaloIdentifier import SUBCALO -from AthenaCommon.AppMgr import ServiceMgr as svcMgr + # VECTORFILLER DEFINITIONS def make_Cell_D3PDVectorFiller(name, prefix, object_name, getter = None,sgkey = None,label = None): @@ -65,76 +52,3 @@ def make_Cluster_D3PDVectorFiller(name, prefix, object_name, getter = None,sgkey # CREATE SELECTED return D3PDMakerCoreComps.VectorFillerTool('cluster_Getter',Prefix = 'clusters_',Getter = calocluster_getter, ObjectName = 'caloclusters',SaveMetadata = D3PDMakerFlags.SaveObjectMetadata()) - -###################################################### -# RETRIEVE D3PD OBJECT CONTAINING SELECTED TRACKS # -###################################################### -#TileTrackD3PDObject = D3PDObject(make_Track_D3PDVectorFiller, 'tracks_' , 'TracksD3PDObject') -#TileTrackD3PDObject.defineBlock(0, 'TrackParticleDump', TileD3PDMaker.TileTrackFillerTool) - -###################################################### -# RETRIEVE D3PD OBJECT CONTAINING SELECTED CALOCELLS # -###################################################### -#TileCellD3PDObject = D3PDObject(make_Cell_D3PDVectorFiller, 'cells_' , 'CellsD3PDObject') -#TileCellD3PDObject.defineBlock(0, 'CaloCellDump', TileD3PDMaker.TileCellFillerTool) - -#TileMBTSD3PDObject = D3PDObject(make_MBTS_D3PDVectorFiller, 'mbts_' , 'MBTSD3PDObject') -#TileMBTSD3PDObject.defineBlock(0, 'MBTSDump', TileD3PDMaker.TileMBTSFillerTool) - -###################################################### -# RETRIEVE D3PD OBJECT CONTAINING SELECTED CLUSTERS # -###################################################### -#TileClusterD3PDObject = D3PDObject(make_Cluster_D3PDVectorFiller, 'clusters_' , 'ClustersD3PDObject') -#TileClusterD3PDObject.defineBlock(0, 'CaloClusterDump', TileD3PDMaker.TileCaloClusterFillerTool) - -######################################### -# RETRIEVE D3PD OBJECT CONTAINING MUONS # -######################################### -#TileStacoMuonD3PDObject = make_SGDataVector_D3PDObject ('Analysis::MuonContainer','StacoMuonCollection','mu_', 'TileStacoMuonD3PDObject') -#TileStacoMuonD3PDObject.defineBlock(0, 'StacoDump', TileD3PDMaker.TileMuonFillerTool) - -#TileMuidMuonD3PDObject = make_SGDataVector_D3PDObject ('Analysis::MuonContainer','MuidMuonCollection','muid_', 'TileMuidMuonD3PDObject') -#TileMuidMuonD3PDObject.defineBlock(0, 'MuidDump', TileD3PDMaker.TileMuonFillerTool) - -####################################### -# EVENT BY EVENT INFORMATION TO STORE # -####################################### -#TileEventD3PDObject = make_SG_D3PDObject( "EventInfo", D3PDMakerFlags.EventInfoSGKey(), 'evt_', "TileEventD3PDObject" ) -#TileEventD3PDObject.defineBlock(0, 'EventDump', TileD3PDMaker.TileEventFillerTool) - - -###################### -# SETUP ASSOCIATIONS # -###################### -#IndexMultiAssociation(parent=TileCellD3PDObject,assoctool=TileD3PDMaker.TileCellMuAssociationTool,target="mu_",prefix="cellsmu_",level=0,blockname="AssocCellToMuon") - -#IndexMultiAssociation(parent=TileMuonD3PDObject,assoctool=TileD3PDMaker.TileMuCellAssociationTool,target="cells_",prefix="mucells_",level=0,blockname="AssocMuonToCell") - -#IndexMultiAssociation(parent=TileClusterD3PDObject,assoctool=TileD3PDMaker.TileClusterCellAssociationTool,target="cells_",prefix="clustercells_",level=0,blockname="AssocClusterToCell") - -#IndexMultiAssociation(parent=TileTrackD3PDObject,assoctool=TileD3PDMaker.TileTrackCellAssociationTool,target="cells_",prefix="trackcells_",level=0,blockname="AssocTrackToCell") - -#IndexMultiAssociation(parent=TileTrackD3PDObject,assoctool=TileD3PDMaker.TileTrackClusterAssociationTool,target="clusters_",prefix="trackclusters_",level=0,blockname="AssocTrackToCluster") - - - - - - - - - - - - - - - - - - - - - - - diff --git a/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/CMakeLists.txt b/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/CMakeLists.txt index f50e867947f636a1f4bdd32f8ec6b7959b9239ae..c83691606fe4c663034612c159e355bfde95a051 100644 --- a/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/CMakeLists.txt +++ b/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/CMakeLists.txt @@ -9,4 +9,4 @@ atlas_add_component( TrackD3PDMaker LINK_LIBRARIES TrkParameters AthenaKernel CxxUtils Identifier EventPrimitives xAODBase xAODPrimitives xAODTracking GaudiKernel InDetIdentifier InDetReadoutGeometry InDetTestBLayerLib ParticleEvent D3PDMakerInterfaces D3PDMakerUtils Particle InDetRecToolInterfaces ITrackToVertex RecoToolInterfaces TrkEventPrimitives TrkParticleBase VxVertex InDetBeamSpotServiceLib ) # Install files from the package: -atlas_install_python_modules( python/*.py ) +atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} ) diff --git a/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/TrackParticleImpactParameters.py b/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/TrackParticleImpactParameters.py index e5988f53775b5cbe5763a53414f38019346af3c3..b492d007bd8a13e33d1f61265b2083a8d208f6b1 100644 --- a/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/TrackParticleImpactParameters.py +++ b/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/TrackParticleImpactParameters.py @@ -1,6 +1,5 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -# $Id$ # # @file TrackD3PDMaker/python/TrackParticleImpactParameters.py # @author scott snyder <snyder@bnl.gov> @@ -11,9 +10,6 @@ import TrackD3PDMaker from D3PDMakerCoreComps.SimpleAssociation import SimpleAssociation -from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags -import D3PDMakerCoreComps - def TrackParticleImpactParameters (TPD3PDObject, prefix = 'track', diff --git a/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/__init__.py b/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/__init__.py index ac486e0201f5f1c0f78dc91cb71a414c166d8144..8629e6ab6e8c2895f2069ebec3953b01c8fd14ea 100644 --- a/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/__init__.py +++ b/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/__init__.py @@ -5,10 +5,6 @@ for k, v in TrackD3PDMakerConf.__dict__.items(): if k.startswith ('D3PD__'): globals()[k[6:]] = v -# Backwards compatibility. -TrackPerigeeFillerTool = PerigeeFillerTool - - # Copy these here from TrackSummary.h so that we don't need to load # all the EDM libraries to get these during configuration. # (FIXME: The enums should be split into a separate dictionary diff --git a/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/xAODTrackD3PDObject.py b/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/xAODTrackD3PDObject.py index b1db9a65a70737c0a168e72b62b3dcc1bf3337df..033f4311cf710d265d9b6015801948e6240cfec2 100644 --- a/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/xAODTrackD3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/xAODTrackD3PDObject.py @@ -7,7 +7,6 @@ from D3PDMakerCoreComps.D3PDObject import make_SGDataVector_D3PDObject from D3PDMakerCoreComps.IndexAssociation import IndexAssociation from TrackD3PDMaker.PerigeeAssociation import PerigeeAssociation from TrackD3PDMaker.TrackD3PDMakerFlags import TrackD3PDFlags -from AthenaCommon.AppMgr import ToolSvc def xAODTrackD3PDObject(_label='trkTrack', @@ -54,16 +53,14 @@ def xAODTrackD3PDObject(_label='trkTrack', InDetTestBLayerTool = ToolSvc.InDetRecTestBLayerTool) # perigee at Primary Vertex - PerigeeAtPVAssoc = PerigeeAssociation\ - (object, + PerigeeAtPVAssoc = PerigeeAssociation(object, # noqa: F841 TrackD3PDMaker.TrackParticlePerigeeAtPVAssociationTool, "PerigeeAtPV", suffix='_wrtPV', levelName = 'trackParametersAtPrimaryVertexLevelOfDetails') # perigee at Beam Spot - PerigeeAtBSAssoc = PerigeeAssociation\ - (object, + PerigeeAtBSAssoc = PerigeeAssociation(object, # noqa: F841 TrackD3PDMaker.TrackParticlePerigeeAtBSAssociationTool, "PerigeeAtBS", suffix='_wrtBS', @@ -242,8 +239,7 @@ def xAODTrackD3PDObject(_label='trkTrack', 'patternRecoInfo']) # Vertex association - VertexAssoc = IndexAssociation ( - object, + VertexAssoc = IndexAssociation(object, # noqa: F841 TrackD3PDMaker.TrackParticleVertexAssociationTool, vertexTarget, prefix = vertexPrefix, diff --git a/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/xAODTrackSummaryFiller.py b/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/xAODTrackSummaryFiller.py index 26280a322ccd5b6eca84068bda4240c4188fb966..8e39cc4810d1cf0ea852083246577e1a1e6b29e4 100644 --- a/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/xAODTrackSummaryFiller.py +++ b/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/python/xAODTrackSummaryFiller.py @@ -118,7 +118,7 @@ def xAODTrackSummaryFiller (obj, lod, blockName, **kw): varlist = [] for tags, v in sumvars: - if type(tags) != type([]): tags = [tags] + if not isinstance(tags, list): tags = [tags] sel = FullInfo for t in tags: sel += eval(t) diff --git a/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/src/PerigeeBLPredictionFillerTool.cxx b/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/src/PerigeeBLPredictionFillerTool.cxx index 1e7172b2637c49a47b523eb23556f99742cafd84..80a67a86219e6cd87f50a17de62c3ac633d433f5 100644 --- a/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/src/PerigeeBLPredictionFillerTool.cxx +++ b/PhysicsAnalysis/D3PDMaker/TrackD3PDMaker/src/PerigeeBLPredictionFillerTool.cxx @@ -12,7 +12,7 @@ #include "PerigeeBLPredictionFillerTool.h" #include "InDetTestBLayer/InDetTestBLayerTool.h" -//#include "TrkParameters/Perigee.h" +#include "InDetTestBLayer/TrackStateOnBLayerInfo.h" #include "AthenaKernel/errorcheck.h" #include "InDetReadoutGeometry/SiDetectorElement.h" #include "InDetIdentifier/PixelID.h" diff --git a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/CMakeLists.txt b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/CMakeLists.txt index d74201ba19fbad4804a9e2913f5751adcf7ee57d..9e2c231d3fec147a21a12a16a5bc70d45c04f856 100644 --- a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/CMakeLists.txt +++ b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/CMakeLists.txt @@ -1,27 +1,24 @@ -################################################################################ -# Package: TriggerD3PDMaker -################################################################################ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # Declare the package name: atlas_subdir( TriggerD3PDMaker ) # External dependencies: -find_package( Boost COMPONENTS filesystem thread system ) +find_package( Boost ) # Component(s) in the package: atlas_add_library( TriggerD3PDMakerLib src/*.cxx PUBLIC_HEADERS TriggerD3PDMaker INCLUDE_DIRS ${Boost_INCLUDE_DIRS} - LINK_LIBRARIES ${Boost_LIBRARIES} AthenaBaseComps AthenaKernel CxxUtils EventKernel FourMomUtils GaudiKernel D3PDMakerUtils TrigSteeringEvent TrigDecisionToolLib TrigObjectMatchingLib StoreGateLib SGtests - PRIVATE_LINK_LIBRARIES xAODTrigger AnalysisTriggerEvent TrigConfHLTData TrigConfL1Data TrigMonitoringEvent TrigT1Interfaces TrigT1Result TrigAnalysisInterfaces ) + LINK_LIBRARIES ${Boost_LIBRARIES} AthenaBaseComps AthenaKernel CxxUtils D3PDMakerInterfaces D3PDMakerUtils EventKernel FourMomUtils GaudiKernel TrigAnalysisInterfaces TrigDecisionToolLib TrigObjectMatchingLib TrigT1Result + PRIVATE_LINK_LIBRARIES AnalysisTriggerEvent StoreGateLib TrigConfHLTData TrigConfInterfaces TrigConfL1Data TrigMonitoringEvent TrigSteeringEvent TrigT1Interfaces xAODTrigger ) atlas_add_component( TriggerD3PDMaker src/components/*.cxx - INCLUDE_DIRS ${Boost_INCLUDE_DIRS} - LINK_LIBRARIES ${Boost_LIBRARIES} AthenaBaseComps AthenaKernel CxxUtils EventKernel FourMomUtils GaudiKernel D3PDMakerUtils TrigDecisionToolLib TrigObjectMatchingLib TrigSteeringEvent StoreGateLib SGtests xAODTrigger AnalysisTriggerEvent TrigConfHLTData TrigConfL1Data TrigMonitoringEvent TrigT1Interfaces TrigT1Result TriggerD3PDMakerLib TrigAnalysisInterfaces ) + LINK_LIBRARIES TriggerD3PDMakerLib ) # Install files from the package: -atlas_install_python_modules( python/*.py ) +atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} ) atlas_install_joboptions( share/*.py ) diff --git a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/BunchStructureMetadata.py b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/BunchStructureMetadata.py index 0c9a6c95edcfb177bb464bd05e3d1ebdfab7c51d..e9b207ff260fc9a8472a6843918147224c2bd03b 100644 --- a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/BunchStructureMetadata.py +++ b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/BunchStructureMetadata.py @@ -1,6 +1,5 @@ # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration -# $Id: BunchStructureMetadata.py 345964 2011-02-15 15:25:18Z ssnyder $ ## @package BunchStructureMetadata # @@ -9,8 +8,6 @@ # # @author Attila Krasznahorkay <Attila.Krasznahorkay@cern.ch> # -# $Revision: 345964 $ -# $Date: 2011-02-15 16:25:18 +0100 (Tue, 15 Feb 2011) $ ## # @short Function adding the bunch structure metadata to the D3PD @@ -27,13 +24,9 @@ # # @author Attila Krasznahorkay <Attila.Krasznahorkay@cern.ch> # -# $Revision: 345964 $ -# $Date: 2011-02-15 16:25:18 +0100 (Tue, 15 Feb 2011) $ -# def addBunchStructureMetadata( d3pdalg = None, source = "" ): # Create a logger for the function: - if "logger" in dir(): orig_logger = logger from AthenaCommon.Logging import logging logger = logging.getLogger( "addBunchStructureMetadata" ) @@ -55,9 +48,9 @@ def addBunchStructureMetadata( d3pdalg = None, source = "" ): _d3pdSvc = getattr( ServiceMgr, _d3pdSvcName ) # If no D3PD::MakerAlg has been provided, create a dummy one: - if d3pdalg == None: + if d3pdalg is None: logger.warning( "No D3PD MakerAlg given to function!" ) - logger.warning( "The bunch configuration will be saved into file: " + + logger.warning( "The bunch configuration will be saved into file: " "\"BunchConfig.root\"" ) from AthenaCommon.AlgSequence import AlgSequence theJob = AlgSequence() @@ -68,7 +61,7 @@ def addBunchStructureMetadata( d3pdalg = None, source = "" ): # Add the metadata tool: _d3pdToolName = "BunchStructureMetadataTool" - if not _d3pdToolName in [ t.name() for t in d3pdalg.MetadataTools ]: + if _d3pdToolName not in [ t.name() for t in d3pdalg.MetadataTools ]: import TriggerD3PDMaker from TrigBunchCrossingTool.BunchCrossingConfProvider import BunchCrossingConfProvider d3pdalg.MetadataTools += [ @@ -90,7 +83,4 @@ def addBunchStructureMetadata( d3pdalg = None, source = "" ): else: logger.info( "BunchConfigIDD3PDObject already added to the D3PD::MakerAlg" ) - # Restore the original logger if necessary: - if "orig_logger" in dir(): logger = orig_logger - return diff --git a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/CTPD3PDObject.py b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/CTPD3PDObject.py index 9daa23f56b1c7f27d8e58b8c16af5be74dd10572..4441f41195a83483547cd0e5519dc5cca44eb398 100644 --- a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/CTPD3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/CTPD3PDObject.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # # D3PD object saving the CTP_RDO information into the D3PD @@ -6,8 +6,6 @@ # is active...) # -import D3PDMakerCoreComps -from D3PDMakerCoreComps.D3PDObject import D3PDObject from D3PDMakerCoreComps.D3PDObject import make_SG_D3PDObject import TriggerD3PDMaker diff --git a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/EmTauROID3PDObject.py b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/EmTauROID3PDObject.py index 20f851907c4c9a89de0cce4d5db5090a198c1788..702f619311551f0502fc7e5fde8be97336b0f75d 100644 --- a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/EmTauROID3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/EmTauROID3PDObject.py @@ -1,13 +1,11 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -# $Id: EmTauROID3PDObject.py 633616 2014-12-04 10:10:12Z ssnyder $ # # D3PD object saving the LVL1 Em/Tau RoI information into the D3PD # from D3PDMakerCoreComps.D3PDObject import make_SGDataVector_D3PDObject import TriggerD3PDMaker -import EventCommonD3PDMaker import D3PDMakerCoreComps diff --git a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/MuCTPID3PDObject.py b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/MuCTPID3PDObject.py index c000fad52131fdfd30aedf72af37d8749f689e9f..02b95661bcc085648481e1baa0062fa16ea8547e 100644 --- a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/MuCTPID3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/MuCTPID3PDObject.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # # D3PD object saving the MuCTPI_RDO information into the D3PD @@ -6,8 +6,6 @@ # is active...) # -import D3PDMakerCoreComps -from D3PDMakerCoreComps.D3PDObject import D3PDObject from D3PDMakerCoreComps.D3PDObject import make_SG_D3PDObject import TriggerD3PDMaker @@ -28,8 +26,8 @@ MuCTPID3PDObject = make_SG_D3PDObject( "MuCTPI_RDO", "MUCTPI_RDO", if _haveRDO: # Make sure the cabling services are configured: - import TrigT1RPCRecRoiSvc.TrigT1RPCRecRoiConfig - import TrigT1TGCRecRoiSvc.TrigT1TGCRecRoiConfig + import TrigT1RPCRecRoiSvc.TrigT1RPCRecRoiConfig # noqa: F401 + import TrigT1TGCRecRoiSvc.TrigT1TGCRecRoiConfig # noqa: F401 # Define the blocks: MuCTPID3PDObject.defineBlock( 0, "RDOInfo", diff --git a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/MuonROID3PDObject.py b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/MuonROID3PDObject.py index f1a36fe504536b82b9f35d2796c1411d1589487a..bd396c7f0b1781ce6e9b0032a79681b7403809f4 100644 --- a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/MuonROID3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/MuonROID3PDObject.py @@ -1,13 +1,11 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -# $Id: MuonROID3PDObject.py 620244 2014-10-06 19:02:48Z ssnyder $ # # D3PD object saving the LVL1 muon RoI information into the D3PD # from D3PDMakerCoreComps.D3PDObject import make_SGDataVector_D3PDObject import TriggerD3PDMaker -import EventCommonD3PDMaker import D3PDMakerCoreComps MuonROID3PDObject = \ diff --git a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/TrigConfMetadata.py b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/TrigConfMetadata.py index 26b66437fe4413c4a6cf9efea6f7316508ae9792..3b18eed10f4c61ba3bce55a37339d885eb190e96 100644 --- a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/TrigConfMetadata.py +++ b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/TrigConfMetadata.py @@ -5,7 +5,7 @@ # metadata to the D3PD. # -def addTrigConfMetadata( d3pdalg = None, useTrigConfEventSummaries = False, doCostL2 = False, doCostEF = False, doCostHLT = False, saveKeys = True, tuplePath = "" ): +def addTrigConfMetadata( d3pdalg = None, useTrigConfEventSummaries = False, saveKeys = True, tuplePath = "" ): """Helper function that adds the necessary tool(s) and service(s) to the job to save the trigger configuration metadata to the output D3PD @@ -20,7 +20,6 @@ def addTrigConfMetadata( d3pdalg = None, useTrigConfEventSummaries = False, doCo """ # Create a logger for the function: - if "logger" in dir(): orig_logger = logger from AthenaCommon.Logging import logging logger = logging.getLogger( "addTrigConfMetadata" ) @@ -41,9 +40,9 @@ def addTrigConfMetadata( d3pdalg = None, useTrigConfEventSummaries = False, doCo _d3pdSvc = getattr( ServiceMgr, _d3pdSvcName ) # If no D3PD::MakerAlg has been provided, create a dummy one: - if d3pdalg == None: + if d3pdalg is None: logger.warning( "No D3PD MakerAlg given to function!" ) - logger.warning( "The trigger configuration will be saved into file: " + + logger.warning( "The trigger configuration will be saved into file: " "\"TrigConfig.root\"" ) from AthenaCommon.AlgSequence import AlgSequence theJob = AlgSequence() @@ -53,7 +52,7 @@ def addTrigConfMetadata( d3pdalg = None, useTrigConfEventSummaries = False, doCo # Add the metadata tool: _d3pdToolName = "TrigConfMetadataTool" - if not _d3pdToolName in [ t.name() for t in d3pdalg.MetadataTools ]: + if _d3pdToolName not in [ t.name() for t in d3pdalg.MetadataTools ]: import TriggerD3PDMaker if (tuplePath == ""): tuplePath = d3pdalg.TuplePath @@ -62,26 +61,17 @@ def addTrigConfMetadata( d3pdalg = None, useTrigConfEventSummaries = False, doCo ConfigDir = tuplePath + "Meta" ) _trigConfTool.UseTrigConfEventSummaries = useTrigConfEventSummaries if useTrigConfEventSummaries: - # Figure out if old or new style HLT if using CostMon to get correct storegate key - # Old key fomat was HLT_OPI_HLT_monitoring_config - if (doCostL2 == True or doCostEF == True or doCostHLT == True): - logger.info( "TrigConfMetadataTool will use passed arguments [L2="+str(doCostL2)+",EF="+str(doCostEF)+",HLT="+str(doCostHLT)+"]" ) - if (doCostL2 == True or doCostEF == True): - _trigConfTool.keyConfig = "HLT_TrigMonConfigCollection_OPI_EF_monitoring_config" - elif (doCostHLT == True): + logger.info( "TrigConfMetadataTool will use TriggerFlags flags for config" ) + from TriggerJobOpts.TriggerFlags import TriggerFlags + if TriggerFlags.doHLT(): _trigConfTool.keyConfig = "HLT_TrigMonConfigCollection_OPI_HLT_monitoring_config" - else: - logger.info( "TrigConfMetadataTool will use TriggerFlags flags for config" ) - from TriggerJobOpts.TriggerFlags import TriggerFlags - if TriggerFlags.doHLT(): - _trigConfTool.keyConfig = "HLT_TrigMonConfigCollection_OPI_HLT_monitoring_config" - logger.info( "TrigConfMetadataTool will use the StoreGate key " + _trigConfTool.keyConfig ) + logger.info( "TrigConfMetadataTool will use the StoreGate key %s", _trigConfTool.keyConfig ) d3pdalg.MetadataTools += [ _trigConfTool ] else: logger.info( "TrigConfMetadataTool was already added to the D3PD::MakerAlg" ) # Add the DB key filler object: - if saveKeys == True: + if saveKeys is True: _dbKeysFillerName = "TrigDBKeysFiller" if not hasattr( d3pdalg, _dbKeysFillerName ): from TriggerD3PDMaker.TrigDBKeysD3PDObject import TrigDBKeysD3PDObject @@ -89,7 +79,4 @@ def addTrigConfMetadata( d3pdalg = None, useTrigConfEventSummaries = False, doCo else: logger.info( "TrigDBKeysD3PDObject already added to the D3PD::MakerAlg" ) - # Restore the original logger if necessary: - if "orig_logger" in dir(): logger = orig_logger - return diff --git a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/TrigRoiDescD3PDObject.py b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/TrigRoiDescD3PDObject.py index d81c2ce3197653f840cd6fa5c708c1ba6ef93849..3c76c4fe7111283e1853761cf9222e7083c97d98 100644 --- a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/TrigRoiDescD3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/TrigRoiDescD3PDObject.py @@ -1,12 +1,10 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -# $Id: TrigRoiDescD3PDObject.py 338058 2010-12-20 13:20:43Z krasznaa $ # # D3PD object saving information about TrigRoiDescriptor objects. # By default it saves the "initial RoIs", the RoIs that are given # to the LVL2 algorithms from LVL1. -from D3PDMakerCoreComps.D3PDObject import D3PDObject from D3PDMakerCoreComps.D3PDObject import make_SGDataVector_D3PDObject import TriggerD3PDMaker diff --git a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/defineTriggerBits.py b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/defineTriggerBits.py index 21be38ef0912dcfe6c8b99892ada3c8e00c2f595..af6ba52ec989330982d0d89c2b2da6e89cc23154 100644 --- a/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/defineTriggerBits.py +++ b/PhysicsAnalysis/D3PDMaker/TriggerD3PDMaker/python/defineTriggerBits.py @@ -1,6 +1,5 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -# $Id: defineTriggerBits.py 484175 2012-02-21 13:06:42Z krasznaa $ # # @file TriggerD3PDMaker/python/addTriggerBits.py # @author scott snyder <snyder@bnl.gov> @@ -28,7 +27,6 @@ import TriggerD3PDMaker from D3PDMakerCoreComps.D3PDObject import make_Void_D3PDObject from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags -from AthenaCommon.AppMgr import theApp triggerBitsD3PDObject = \ @@ -84,7 +82,7 @@ This allows one to segregate the trigger decision flags in a separate tree. pass pat = pattern - if type(pat) != type([]): + if not isinstance(pat, list): pat = [pat] pass filler.TriggerBitsFiller_TriggerBits.Triggers += pat diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/CMakeLists.txt b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/CMakeLists.txt index 3d9ab004df548d5272481cba205f6b6ece841e42..ec53ce1d2b03e0ecc7690a5bf578f629efc31e0b 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/CMakeLists.txt +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/CMakeLists.txt @@ -16,6 +16,6 @@ atlas_add_component( TruthD3PDMaker LINK_LIBRARIES ${Boost_LIBRARIES} ${HEPPDT_LIBRARIES} ${CLHEP_LIBRARIES} AtlasHepMCLib AthenaBaseComps AthenaKernel Navigation EventInfo xAODTruth GaudiKernel GeneratorObjects TruthUtils D3PDMakerInterfaces D3PDMakerUtils TruthD3PDAnalysisLib MCTruthClassifierLib McParticleEvent McParticleKernel JetEvent TrkToolInterfaces GenInterfacesLib ) # Install files from the package: -atlas_install_python_modules( python/*.py ) +atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} ) atlas_install_joboptions( share/*.py ) diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/Atlfast1ElectronD3PDObject.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/Atlfast1ElectronD3PDObject.py index 8cc3725ea955a16eb12afb07730330b9811037a0..111afefc8002b6f98c69f9564bcfbe620b378131 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/Atlfast1ElectronD3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/Atlfast1ElectronD3PDObject.py @@ -1,6 +1,5 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -from egammaD3PDMaker.isem_version import isem_version from EventCommonD3PDMaker.DRAssociation import DRAssociation from D3PDMakerCoreComps.D3PDObject import make_SGDataVector_D3PDObject from D3PDMakerCoreComps.SimpleAssociation import SimpleAssociation @@ -9,10 +8,7 @@ from TruthD3PDMaker.Atlfast1D3PDMakerFlags import Atlfast1D3PDMakerFlags from RecExConfig.RecFlags import rec import egammaD3PDMaker import EventCommonD3PDMaker -import D3PDMakerCoreComps -from ROOT import egammaParameters -from ROOT import egammaPID Atlfast1ElectronD3PDObject = \ make_SGDataVector_D3PDObject ('ElectronContainer', @@ -29,7 +25,7 @@ Atlfast1ElectronD3PDObject.defineBlock (0, 'Charge', EventCommonD3PDMaker.ChargeFillerTool) if rec.doTruth(): - import TruthD3PDMaker.MCTruthClassifierConfig + import TruthD3PDMaker.MCTruthClassifierConfig # noqa: F401 (import side-effect) Atlfast1ElectronD3PDObject.defineBlock (1, 'TruthClassification', egammaD3PDMaker.egammaTruthClassificationFillerTool) Atlfast1ElectronGenPartAssoc = SimpleAssociation \ diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/Atlfast1MissingETD3PDObject.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/Atlfast1MissingETD3PDObject.py index adbb5738cb1993249a05b98cd77c9a2d4bc97163..6cf07a528921bae8e92182beca426b39687b29d1 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/Atlfast1MissingETD3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/Atlfast1MissingETD3PDObject.py @@ -1,11 +1,9 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration import MissingETD3PDMaker -import D3PDMakerCoreComps -from D3PDMakerCoreComps.D3PDObject import D3PDObject from D3PDMakerCoreComps.D3PDObject import make_SG_D3PDObject from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags -from MissingETD3PDMaker.MissingETD3PDMakerFlags import * +from MissingETD3PDMaker.MissingETD3PDMakerFlags import MissingETD3PDMakerFlags Atlfast1MissingETD3PDObject = \ make_SG_D3PDObject ('MissingET', diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/Atlfast1PhotonD3PDObject.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/Atlfast1PhotonD3PDObject.py index 61a009a401408fe779cbd711b1dbd2f1c491a819..3fdcefad52f9fd7d845c35caad039be87c4bd6b6 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/Atlfast1PhotonD3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/Atlfast1PhotonD3PDObject.py @@ -1,6 +1,5 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -from egammaD3PDMaker.isem_version import isem_version from EventCommonD3PDMaker.DRAssociation import DRAssociation from D3PDMakerCoreComps.D3PDObject import make_SGDataVector_D3PDObject from D3PDMakerCoreComps.SimpleAssociation import SimpleAssociation @@ -9,10 +8,6 @@ from TruthD3PDMaker.Atlfast1D3PDMakerFlags import Atlfast1D3PDMakerFlags from RecExConfig.RecFlags import rec import egammaD3PDMaker import EventCommonD3PDMaker -import D3PDMakerCoreComps - -from ROOT import egammaParameters -from ROOT import egammaPID Atlfast1PhotonD3PDObject = \ @@ -28,7 +23,7 @@ Atlfast1PhotonD3PDObject.defineBlock (0, 'Kinematics', WriteRect = True) if rec.doTruth(): - import TruthD3PDMaker.MCTruthClassifierConfig + import TruthD3PDMaker.MCTruthClassifierConfig # noqa: F401 (import side-effect) Atlfast1PhotonD3PDObject.defineBlock (1, 'TruthClassification', egammaD3PDMaker.egammaTruthClassificationFillerTool) Atlfast1PhotonGenPartAssoc = SimpleAssociation \ diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/GenEventD3PDObject.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/GenEventD3PDObject.py index 89e6bb49ff9e9a0cc947e58d5e0d1a6619bc7028..aa69372d00552fd0569b21f903da5a67eb67c548 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/GenEventD3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/GenEventD3PDObject.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # ## @file TruthD3PDMaker/python/GenParticleD3PDObject.py @@ -7,8 +7,6 @@ ## @date Nov, 2010 ## -from AthenaCommon.AppMgr import ToolSvc - import TruthD3PDMaker import D3PDMakerCoreComps @@ -17,7 +15,6 @@ from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags from TruthD3PDAnalysis.AllTruthFilterTool import AllTruthFilterTool from TruthD3PDMaker.TruthD3PDMakerKeys import TruthD3PDKeys -from TruthD3PDMaker.TruthD3PDMakerFlags import TruthD3PDFlags def make_GenEvent_D3PDObject( default_prefix, default_sgkey, default_object_name = "", @@ -28,9 +25,9 @@ def make_GenEvent_D3PDObject( default_prefix, default_sgkey, getter = None, sgkey = None, filter = default_filter, label = default_label, **kw ): - if sgkey == None: sgkey = default_sgkey - if label == None: label = TruthD3PDKeys.GenEventGetterLabel() - if getter == None: + if sgkey is None: sgkey = default_sgkey + if label is None: label = TruthD3PDKeys.GenEventGetterLabel() + if getter is None: getter = TruthD3PDMaker.GenEventGetterTool( name + '_Getter', Label = label, Selector = filter, diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/GenParticleD3PDObject.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/GenParticleD3PDObject.py index d1c4ecb0614b43ce73f49e6581c05cb09d63491f..410fa23e6a5862c9c537bd5b67e917a688f1a254 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/GenParticleD3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/GenParticleD3PDObject.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # ## @file TruthD3PDMaker/python/GenParticleD3PDObject.py @@ -31,9 +31,9 @@ def make_GenParticle_D3PDObject( default_prefix, default_sgkey, getter = None, sgkey = None, filter = default_filter, label = default_label, **kw ): - if sgkey == None: sgkey = default_sgkey - if label == None: label = prefix - if getter == None: + if sgkey is None: sgkey = default_sgkey + if label is None: label = prefix + if getter is None: getter = TruthD3PDMaker.GenParticleGetterTool (name + "_Getter", Label = label, SGKey = sgkey, @@ -62,14 +62,14 @@ GenParticleD3PDObject = make_GenParticle_D3PDObject( TruthD3PDKeys.GenParticlePr GenParticleD3PDObject.defineBlock( 0, 'GenParticle', TruthD3PDMaker.GenParticleFillerTool ) -if TruthD3PDFlags.GenEventAssocLabel() != None and TruthD3PDFlags.GenEventAssocLabel() != "": +if TruthD3PDFlags.GenEventAssocLabel() is not None and TruthD3PDFlags.GenEventAssocLabel() != "": GenPartEventAssoc = IndexAssociation( GenParticleD3PDObject, TruthD3PDMaker.GenParticleEventAssociationTool, TruthD3PDFlags.GenEventAssocLabel(), blockname = "GenPartEventAssoc", prefix = 'mcevt_' ) -if TruthD3PDFlags.GenVertexAssocLabel() != None and TruthD3PDFlags.GenVertexAssocLabel() != "": +if TruthD3PDFlags.GenVertexAssocLabel() is not None and TruthD3PDFlags.GenVertexAssocLabel() != "": GenPartProdVertexAssoc = IndexAssociation( GenParticleD3PDObject, TruthD3PDMaker.GenParticleVertexAssociationTool, TruthD3PDFlags.GenVertexAssocLabel(), @@ -91,7 +91,7 @@ if TruthD3PDFlags.GenParticleMother(): prefix = 'mother_', InParticles = True) -if TruthD3PDFlags.GenVertexAssocLabel() != None and TruthD3PDFlags.GenVertexAssocLabel() != "": +if TruthD3PDFlags.GenVertexAssocLabel() is not None and TruthD3PDFlags.GenVertexAssocLabel() != "": GenPartDecayVertexAssoc = IndexAssociation( GenParticleD3PDObject, TruthD3PDMaker.GenParticleVertexAssociationTool, TruthD3PDFlags.GenVertexAssocLabel(), @@ -113,7 +113,7 @@ if TruthD3PDFlags.GenParticleChild(): prefix = 'child_', InParticles = False ) -if TruthD3PDFlags.TruthTrackAssocLabel() != None and TruthD3PDFlags.TruthTrackAssocLabel() != "": +if TruthD3PDFlags.TruthTrackAssocLabel() is not None and TruthD3PDFlags.TruthTrackAssocLabel() != "": GenPartTruthTrackAssoc = IndexAssociation( GenParticleD3PDObject, TruthD3PDMaker.GenParticleParticleAssociationTool, TruthD3PDFlags.TruthTrackAssocLabel(), diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/GenVertexD3PDObject.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/GenVertexD3PDObject.py index 7e5462cda3ec6c1c17cc1066d24146f34044255c..035d510283a6ca12a12b2e6054c2925e55f1a7f2 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/GenVertexD3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/GenVertexD3PDObject.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # ## @file TruthD3PDMaker/python/GenParticleD3PDObject.py @@ -29,9 +29,9 @@ def make_GenVertex_D3PDObject( default_prefix, default_sgkey, getter = None, sgkey = None, filter = default_filter, label = default_label, **kw ): - if sgkey == None: sgkey = default_sgkey - if label == None: label = TruthD3PDKeys.GenVertexGetterLabel() - if getter == None: + if sgkey is None: sgkey = default_sgkey + if label is None: label = TruthD3PDKeys.GenVertexGetterLabel() + if getter is None: getter = TruthD3PDMaker.GenVertexGetterTool( name + '_Getter', Label = label, Selector = filter, @@ -60,7 +60,7 @@ GenVertexD3PDObject.defineBlock( 0, TruthD3PDMaker.GenVertexFillerTool, WriteID=TruthD3PDFlags.WriteTruthVertexIDs() ) -if TruthD3PDFlags.GenParticleAssocLabel() != None and TruthD3PDFlags.GenParticleAssocLabel() != "": +if TruthD3PDFlags.GenParticleAssocLabel() is not None and TruthD3PDFlags.GenParticleAssocLabel() != "": if TruthD3PDFlags.GenVertexInPartAssoc(): GenVertexPartInAssoc = \ IndexMultiAssociation( GenVertexD3PDObject, @@ -79,7 +79,7 @@ if TruthD3PDFlags.GenParticleAssocLabel() != None and TruthD3PDFlags.GenParticle prefix = 'outpart_', InParticles = False ) -if TruthD3PDFlags.GenEventAssocLabel() != None and TruthD3PDFlags.GenEventAssocLabel() != "": +if TruthD3PDFlags.GenEventAssocLabel() is not None and TruthD3PDFlags.GenEventAssocLabel() != "": GenVertexEventAssoc = IndexAssociation( GenVertexD3PDObject, TruthD3PDMaker.GenVertexEventAssociationTool, TruthD3PDFlags.GenEventAssocLabel(), diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/HforConfig.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/HforConfig.py index ac8720e1ec515bebbb7fcca451fc3a38fdc3f8a4..58e38bb33173dc66efc08ce0a6c273a557bfc112 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/HforConfig.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/HforConfig.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration class HforConfig: """ @@ -35,7 +35,6 @@ class HforConfig: self.getConfigFile(runArgs, filepath) self.getDSID(runArgs) self.getHforType() - #self.checkConsistency() return self.config_String @@ -50,12 +49,11 @@ class HforConfig: """ #Identify Hfor type - from AthenaCommon.Utils.unixtools import FindFile - import sys, os + import sys try: self.file = open(filepath) - except: + except Exception: currentError = "Exiting. Configuration file should be in "+str(filepath) self.hforLog.error(currentError) sys.exit(0) @@ -87,7 +85,7 @@ class HforConfig: try: #try to get the DSID from runArgs self.curr_DSID = runArgs.RunNumber - except: + except Exception: #if cannot access runargs parse input file name for DSID if len(self.fList) != 0: files = self.fList @@ -98,7 +96,7 @@ class HforConfig: self.curr_DSID = firstFile[index+1] try: int(self.curr_DSID) - except: + except Exception: self.hforLog.error("Could not find DSID from filename. The Hfor tool will not be correctly configured! Have you obeyed the naming convention?") self.curr_DSID = 0 @@ -139,33 +137,3 @@ class HforConfig: if self.config_String == "fail": self.hforLog.warning("failed to find DSID in configuration file. Hfor has not been activated. Does this sample require Hfor? ") - - - - - def checkConsistency(self): - """ - Checks that all the files have the same DSID - Currently not used - remove? - - """ - import re - - - #check that all samples are of the same Hfor type otherwise exit - #What to do in case of runArgs being used? - for newfile in self.fList: - tmp2 = newfile.split(".") - for index, x in enumerate(tmp2): - if re.search('mc[1234567890]{2}', x) is not None: - thisDSID = index - - try: - if proc_dict[tmp2[thisDSID+1]] != self.config_String: - self.hforLog.error("This tool must be used with samples of the same Hfor type. Terminating now") - sys.exit(0) - except: - - self.hforLog.error("failure when checking if all DSIDs are of same type") - - diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/PartonJetConfig.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/PartonJetConfig.py index b46163c34a40778ad7738cb28e051556834bf971..32577c8e7ad349cffa7c3c3bc796ca0aaf3e0839 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/PartonJetConfig.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/PartonJetConfig.py @@ -9,19 +9,11 @@ # Reconstruction/Jet/JetSimTools/PartonTruthJets_jobOptions.py # - -import EventCommonD3PDMaker -from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags -from AthenaCommon.AlgSequence import AlgSequence +from AthenaCommon.SystemOfUnits import GeV from RecExConfig.ObjKeyStore import cfgKeyStore from RecExConfig.RecFlags import rec -from JetRec.JetMomentGetter import make_JetMomentGetter -from JetRec.JetGetters import * - -from AthenaCommon.SystemOfUnits import MeV, GeV -from JetRec.JetRecConf import JetAlgorithm - +from JetRec.JetGetters import make_StandardJetGetter from JetSimTools.JetSimToolsConf import JetPartonSelectorTool def PartonJetConfig (finder = 'AntiKt', @@ -50,11 +42,10 @@ def PartonJetConfig (finder = 'AntiKt', jetPartonSelectorTool.DoPythia = doPythia jetPartonSelectorTool.DoHerwig = doHerwig jetPartonSelectorTool.max_absEta = absEtaMax - #jetPartonSelectorTool.OutputLevel = INFO ToolSvc += jetPartonSelectorTool # Configure jets builder - if inputCollections != None: + if inputCollections is not None: partonJetAlg = make_StandardJetGetter(finder,size,'Truth',inputSuff='Parton'+suffix, inputCollectionNames=inputCollections, inputTools=[jetPartonSelectorTool]).jetAlgorithmHandle() @@ -63,7 +54,5 @@ def PartonJetConfig (finder = 'AntiKt', else: partonJetAlg = make_StandardJetGetter(finder,size,'Truth',inputSuff='Parton'+suffix).jetAlgorithmHandle() partonJetAlg.AlgTools['JetFinalPtCut'].MinimumSignal = minJetPt - #partonJetAlg.AlgTools['InputToJet'].InputSelector = jetPartonSelectorTool - #partonJetAlg.OutputLevel = INFO return diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/PileUpInfoD3PDObject.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/PileUpInfoD3PDObject.py index b0b4c96010ecd0d9b801e7312a9aec255cacadc5..1a29127936b9944ea765595eb3275816248897d8 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/PileUpInfoD3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/PileUpInfoD3PDObject.py @@ -1,10 +1,7 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration import TruthD3PDMaker -import D3PDMakerCoreComps -from D3PDMakerCoreComps.D3PDObject import * -from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags -from TruthD3PDMaker.TruthD3PDMakerConf import * +from D3PDMakerCoreComps.D3PDObject import make_SG_D3PDObject from D3PDMakerCoreComps.ContainedVectorMultiAssociation import ContainedVectorMultiAssociation diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthJetD3PDObject.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthJetD3PDObject.py index e3b5b17e614377f5739fb9e1f6a611744c8739ee..c8d57d3a164a94700fe291ccacd0b6e77456d34c 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthJetD3PDObject.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthJetD3PDObject.py @@ -1,9 +1,7 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -import D3PDMakerCoreComps import EventCommonD3PDMaker import JetD3PDMaker -from D3PDMakerCoreComps.D3PDObject import D3PDObject from D3PDMakerCoreComps.D3PDObject import make_SGDataVector_D3PDObject from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags from RecExConfig.RecFlags import rec diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthJetFilterConfig.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthJetFilterConfig.py index dd93bf3c72a3d3f45298dfb7a6e83fd33196358c..c19a5236101c5f7be99096add7574dc042aaf5fc 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthJetFilterConfig.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthJetFilterConfig.py @@ -1,6 +1,5 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -# $Id$ # # @file TruthD3PDMaker/python/TruthJetFilterConfig.py # @author Renaud Bruneliere <Renaud.Bruneliere@cern.ch> @@ -8,11 +7,9 @@ # @brief Build truth container to be used for parton-jet building # -import EventCommonD3PDMaker from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags from McParticleAlgs.JobOptCfg import createMcAodBuilder from RecExConfig.RecFlags import rec -from AthenaCommon.AlgSequence import AlgSequence from RecExConfig.ObjKeyStore import cfgKeyStore from AthenaCommon import CfgMgr from TruthD3PDMaker.TruthD3PDMakerConf import D3PD__TruthJetFilterTool diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthLeptonParentAssociation.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthLeptonParentAssociation.py index 975f5b841d3a7b3e8643d184646f250cc9109e7e..e1c6a2d64a7e81594fef61f93c790de167823118 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthLeptonParentAssociation.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthLeptonParentAssociation.py @@ -1,6 +1,5 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -# $Id$ # # @file TruthD3PDMaker/python/TruthLeptonParentAssociation.py # @author Zach Marshall <zach.marshall@cern.ch> @@ -18,7 +17,7 @@ def TruthLeptonParentAssociation (parent, *args, **kw): """Helper for setting up an association to lepton parents in the truth record""" - if blockname == None: + if blockname is None: blockname = prefix + 'LeptonParentMultiAssoc' return IndexMultiAssociation (parent, diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthParticleChildAssociation.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthParticleChildAssociation.py index 795b767121eea2196b17ba3adb15a573d814fb4c..e071911ab3f113cdc5c65dd6408e0821e92ccc50 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthParticleChildAssociation.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthParticleChildAssociation.py @@ -1,6 +1,5 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -# $Id$ # # @file TruthD3PDMaker/python/TruthParticleChildAssociation.py # @author Ryan Reece <ryan.reece@cern.ch> @@ -10,9 +9,7 @@ # -import D3PDMakerCoreComps import TruthD3PDMaker -from D3PDMakerCoreComps.D3PDObject import D3PDObject from D3PDMakerCoreComps.IndexMultiAssociation import IndexMultiAssociation @@ -25,7 +22,7 @@ def TruthParticleChildAssociation (parent, """Helper for setting up an association for truth particle children by index. """ - if blockname == None: + if blockname is None: blockname = prefix + 'TruthParticleChildAssociation' return IndexMultiAssociation (parent, diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthParticleFakerObject.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthParticleFakerObject.py index 3ad98ababf79e6cd04023ae3b3014dbcce70230d..d36f9a04b027bb01f16c7a06f4b31b69c5f9db06 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthParticleFakerObject.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthParticleFakerObject.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration ## @file TruthD3PDMaker/python/TruthParticleFakerObject.py ## @brief Truth D3PD object for single particles @@ -13,9 +13,6 @@ from D3PDMakerCoreComps.D3PDObject import D3PDObject from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags from TruthD3PDAnalysis.AllTruthFilterTool import AllTruthFilterTool -from TruthD3PDAnalysis.StableChargedTruthFilterTool import StableChargedTruthFilterTool -from TruthD3PDMaker.TruthD3PDMakerKeys import TruthD3PDKeys -from TruthD3PDMaker.TruthD3PDMakerFlags import TruthD3PDFlags from AthenaCommon.SystemOfUnits import GeV @@ -28,9 +25,9 @@ def make_TruthParticleFaker_D3PDObject( default_prefix, default_sgkey, getter = None, sgkey = None, filter = default_filter, label = default_label, **kw ): - if sgkey == None: sgkey = default_sgkey - if label == None: label = prefix - if getter == None: + if sgkey is None: sgkey = default_sgkey + if label is None: label = prefix + if getter is None: getter = TruthD3PDMaker.GenParticleGetterTool (name + "_Getter", Label = label, SGKey = sgkey, diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthParticleParentAssociation.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthParticleParentAssociation.py index f0ee62e6ac07a7e1645b80cbcbc8b0ba6ccbf2a1..2b34e7984799b7f177cc353c12c083d24afc4093 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthParticleParentAssociation.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthParticleParentAssociation.py @@ -1,6 +1,5 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -# $Id$ # # @file TruthD3PDMaker/python/TruthParticleParentAssociation.py # @author Ryan Reece <ryan.reece@cern.ch> @@ -10,9 +9,7 @@ # -import D3PDMakerCoreComps import TruthD3PDMaker -from D3PDMakerCoreComps.D3PDObject import D3PDObject from D3PDMakerCoreComps.IndexMultiAssociation import IndexMultiAssociation @@ -25,7 +22,7 @@ def TruthParticleParentAssociation (parent, """Helper for setting up an association for truth particle parents by index. """ - if blockname == None: + if blockname is None: blockname = prefix + 'TruthParticleParentAssociation' return IndexMultiAssociation (parent, diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthTauDecayAssociation.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthTauDecayAssociation.py index 0f70fffb537b414ff733888686e7668e8d9c4157..1abfaf6e357fd63e3f24e51457717ab870183461 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthTauDecayAssociation.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/TruthTauDecayAssociation.py @@ -1,15 +1,12 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -# $Id$ # # @file TruthD3PDMaker/python/TruthTauDecayAssociation.py # @author Zach Marshall <zach.marshall@cern.ch> # @date June 2013 # @brief Helper for setting up an association to tau decay products in the truth record -#import D3PDMakerCoreComps import TruthD3PDMaker -#from D3PDMakerCoreComps.D3PDObject import D3PDObject from D3PDMakerCoreComps.IndexMultiAssociation import IndexMultiAssociation def TruthTauDecayAssociation (parent, @@ -20,7 +17,7 @@ def TruthTauDecayAssociation (parent, *args, **kw): """Helper for setting up an association to tau decay products in the truth record""" - if blockname == None: + if blockname is None: blockname = prefix + 'TauDecayMultiAssoc' return IndexMultiAssociation (parent, diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/atlfast1D3PD.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/atlfast1D3PD.py index bf4cd19a34fbc12c382c4d9456c2f403306f25c5..0f9616d9f4dc253be617952db8617e0b88572c9a 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/atlfast1D3PD.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/atlfast1D3PD.py @@ -1,6 +1,5 @@ -# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -# $Id$ # # @file TruthD3PDMaker/python/atlfast1D3PD.py # @author Renaud Bruneliere <Renaud.Bruneliere@cern.ch> @@ -10,8 +9,6 @@ import D3PDMakerCoreComps -from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags - from EventCommonD3PDMaker.EventInfoD3PDObject import EventInfoD3PDObject from TruthD3PDMaker.Atlfast1ElectronD3PDObject import Atlfast1ElectronD3PDObject @@ -19,17 +16,11 @@ from TruthD3PDMaker.Atlfast1PhotonD3PDObject import Atlfast1PhotonD3PDOb from MuonD3PDMaker.MuonD3PDObject import MuonD3PDObject from JetD3PDMaker.JetD3PDObject import JetD3PDObject from TruthD3PDMaker.TruthJetD3PDObject import TruthJetD3PDObject -#from TauD3PDMaker.TauD3PDObject import TauD3PDObject -from MissingETD3PDMaker.MissingETD3PDMakerFlags import MissingETD3PDMakerFlags from TruthD3PDMaker.Atlfast1MissingETD3PDObject import Atlfast1MissingETD3PDObject from TruthD3PDMaker.Atlfast1MissingETD3PDObject import TruthMETD3PDObject -from EventCommonD3PDMaker.LBMetadataConfig import LBMetadataConfig -from HforD3PDObject import HforD3PDObject - from TruthD3PDMaker.GenEventD3PDObject import GenEventD3PDObject from TruthD3PDAnalysis.truthParticleConfig import truthParticleConfig -#from TruthD3PDAnalysis.TruthJetFilterConfig import TruthJetFilterConfig from TruthD3PDMaker.TruthParticleD3PDObject import TruthParticleD3PDObject from TruthD3PDMaker.PartonJetConfig import PartonJetConfig from RecExConfig.RecFlags import rec @@ -73,6 +64,5 @@ def atlfast1D3PD (file, alg += TruthMETD3PDObject (level=10) alg += TruthJetD3PDObject (level=10, sgkey='AntiKt4TruthJets', prefix='AntiKt4TruthJets_') alg += TruthJetD3PDObject (level=10, sgkey='AntiKt4TruthPartonJets', prefix='AntiKt4TruthPartonJets_') - alg += HforD3PDObject (**_args (0, 'HforInfo', kw)) return alg diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/evgenD3PD.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/evgenD3PD.py index 207221d26a7c11307de23cf48941826b41d787dc..d3a17cc0a05908f5f451a024664c3de80d2eb921 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/evgenD3PD.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/python/evgenD3PD.py @@ -9,13 +9,9 @@ import D3PDMakerCoreComps -from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags - from EventCommonD3PDMaker.EventInfoD3PDObject import EventInfoD3PDObject -from JetD3PDMaker.JetD3PDObject import JetD3PDObject from TruthD3PDMaker.TruthJetD3PDObject import TruthJetD3PDObject -from MissingETD3PDMaker.MissingETD3PDMakerFlags import MissingETD3PDMakerFlags from TruthD3PDMaker.Atlfast1MissingETD3PDObject import TruthMETD3PDObject from TruthD3PDMaker.GenEventD3PDObject import GenEventD3PDObject @@ -24,7 +20,7 @@ from TruthD3PDMaker.TruthJetFilterConfig import TruthJetFilterConfig from TruthD3PDMaker.TruthParticleD3PDObject import TruthParticleD3PDObject from TruthD3PDMaker.PartonJetConfig import PartonJetConfig from RecExConfig.RecFlags import rec -from JetRec.JetGetters import * +from JetRec.JetGetters import make_StandardJetGetter from AthenaCommon.AlgSequence import AlgSequence topSequence = AlgSequence() @@ -76,8 +72,8 @@ def evgenD3PD (file, antikt6truthAlg.AlgTools['InputToJet'].InputCollectionKeys = ['FilteredD3PDTruth'] if doExcludeWZdecays: # Reconstruct standard ATLAS truth jets - antikt4truthAlgStd = make_StandardJetGetter('AntiKt',0.4,'Truth',disable=False).jetAlgorithmHandle() - antikt6truthAlgStd = make_StandardJetGetter('AntiKt',0.6,'Truth',disable=False).jetAlgorithmHandle() + antikt4truthAlgStd = make_StandardJetGetter('AntiKt',0.4,'Truth',disable=False).jetAlgorithmHandle() # noqa: F841 + antikt6truthAlgStd = make_StandardJetGetter('AntiKt',0.6,'Truth',disable=False).jetAlgorithmHandle() # noqa: F841 #-------------------------------------------------------------------------- diff --git a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/share/GenD3PDExample_jobOptions.py b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/share/GenD3PDExample_jobOptions.py index d01dcc54d2fbd5aafbb6618d1f70d830900758af..fb0777e983f30315fedcd63b25bfac8d6a4587fe 100644 --- a/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/share/GenD3PDExample_jobOptions.py +++ b/PhysicsAnalysis/D3PDMaker/TruthD3PDMaker/share/GenD3PDExample_jobOptions.py @@ -64,8 +64,5 @@ alg += GenParticleD3PDObject( 10, filter = AllTrackFilterTool() ) from TruthD3PDMaker.GenParticleD3PDObject import GenTruthTrackD3PDObject alg += GenTruthTrackD3PDObject( 10, filter = TruthTrackFilterTool() ) -#from TruthD3PDMaker.HforD3PDObject import HforD3PDObject -#alg += HforD3PDObject(**_args(0,'HforInfo',kw)) - ### you can link to the gen particle (e.g from tracks or btag truth lepton info) ### using the gen particle getter label: TruthD3PDKeys.GenParticleGetterLabel() diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/CMakeLists.txt b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..357dae19e36b473b6185c0840aef7313c2c1c835 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/CMakeLists.txt @@ -0,0 +1,26 @@ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +################################################################################ +# Package: DerivationFrameworkBPhys +################################################################################ + +# Declare the package name: +atlas_subdir( DerivationFrameworkBPhys ) +find_package( ROOT COMPONENTS Core MathCore ) + +# Component(s) in the package: +atlas_add_component( DerivationFrameworkBPhys + DerivationFrameworkBPhys/*.h src/*.cxx src/components/*.cxx + INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} + LINK_LIBRARIES xAODMuon AthenaBaseComps JpsiUpsilonToolsLib + MuonSelectorToolsLib ${ROOT_LIBRARIES} + xAODTracking xAODBPhysLib AthenaKernel RecoToolInterfaces EventPrimitives + DerivationFrameworkInterfaces BPhysToolsLib TrackVertexAssociationToolLib + xAODBase xAODMetaData AsgTools CaloInterfaceLib TrackToCaloLib + xAODEventInfo AthenaPoolUtilities xAODPrimitives TrigDecisionToolLib + BeamSpotConditionsData TrkVertexAnalysisUtilsLib ITrackToVertex + InDetTrackSelectionToolLib InDetV0FinderLib) + + +# Install files from the package: +atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8}) +atlas_install_joboptions( share/*.py ) diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/AugOriginalCounts.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/AugOriginalCounts.h new file mode 100644 index 0000000000000000000000000000000000000000..47c5b166c795ed1c5cecd6a67eb7289b72c130f2 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/AugOriginalCounts.h @@ -0,0 +1,91 @@ +/* + Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +*/ + +/** + * @file AugOriginalCounts.h + * + * @brief Augmentation with primary vertex counts (before thinning) + */ + +#ifndef DERIVATIONFRAMEWORKBPHYS_AUGORIGINALCOUNTS_H +#define DERIVATIONFRAMEWORKBPHYS_AUGORIGINALCOUNTS_H + +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" + + +#include "AthenaBaseComps/AthAlgTool.h" +#include "StoreGate/WriteDecorHandleKey.h" + +#include "xAODTracking/TrackParticleContainer.h" +#include "xAODTracking/VertexContainer.h" +#include "xAODEventInfo/EventInfo.h" + +#include <string> + + +#include "AthenaBaseComps/AthAlgTool.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "xAODTracking/TrackParticleContainer.h" +#include "xAODTracking/VertexContainer.h" +#include <string> +#include "xAODEventInfo/EventInfo.h" +#include <StoreGate/WriteDecorHandleKey.h> + +namespace DerivationFramework { + /// + /// @class AugOriginalCounts + /// + /// @brief Augmentation with primary vertex counts (before thinning) + /// + /// This tool adds primary vertex counts and track counts + /// to the EventInfo container in order to preserve them in + /// case the primary vertex or track collections are thinned. + /// + /// ### Job options + /// <table border="0"> + /// <tr><th align="left">Name</td> + /// <th align="left">Description</th></tr> + /// <tr><td>TrackContainer</td> + /// <td>name of the TrackParticle container to be used</td> + /// </tr> + /// <tr><td>VertexContainer</td> + /// <td>name of the Vertex container to be used</td> + /// </tr> + /// <tr><td>AddPVCountsByType</td> + /// <td>add PV counts by PV type (default: false)</td> + /// </td> + /// </table> + /// + class AugOriginalCounts : public AthAlgTool, public IAugmentationTool { + public: + /// @brief Main constructor + AugOriginalCounts(const std::string& t, const std::string& n, + const IInterface* p); + /// @brief Main method called for each event + virtual StatusCode addBranches() const override; + virtual StatusCode initialize() override; + private: + /// + /// @name job options + /// @{ + SG::WriteDecorHandleKey<xAOD::EventInfo> m_OrigPVNTracks{this, "DO_NOT_SET1", "", "internal property"}; + SG::WriteDecorHandleKey<xAOD::EventInfo> m_OrigNTracksKeys{this, "DO_NOT_SET2", "", "internal property"}; + SG::WriteDecorHandleKey<xAOD::EventInfo> m_OrigNtype0{this, "DO_NOT_SET3", "", "internal property"}; + SG::WriteDecorHandleKey<xAOD::EventInfo> m_OrigNtype1{this, "DO_NOT_SET4", "", "internal property"}; + SG::WriteDecorHandleKey<xAOD::EventInfo> m_OrigNtype2{this, "DO_NOT_SET5", "", "internal property"}; + SG::WriteDecorHandleKey<xAOD::EventInfo> m_OrigNtype3{this, "DO_NOT_SET6", "", "internal property"}; + SG::WriteDecorHandleKey<xAOD::EventInfo> m_OrigNtypeUnknown{this, "DO_NOT_SET7", "", "internal property"}; + + SG::WriteDecorHandleKey<xAOD::VertexContainer> m_OrigSqrtPt2Sum{this, "DO_NOT_SET8", "", "internal property"}; + SG::WriteDecorHandleKey<xAOD::VertexContainer> m_d_nPVTracks{this, "DO_NOT_SET9", "", "internal property"}; + SG::ReadHandleKey<xAOD::TrackParticleContainer> m_TrackContainername; + SG::ReadHandleKey<xAOD::VertexContainer> m_PVContainername; + bool m_addPVCountsByType; + bool m_addNTracksToPVs; + bool m_addSqrtPt2SumToPVs; + /// @} + }; +} + +#endif // DERIVATIONFRAMEWORKBPHYS_AUGORIGINALCOUNTS_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BMuonTrackIsoTool.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BMuonTrackIsoTool.h new file mode 100644 index 0000000000000000000000000000000000000000..b670df14553eb9f885fb5201c586f0e4dfb61283 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BMuonTrackIsoTool.h @@ -0,0 +1,111 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// BMuonTrackIsoTool.h +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// +// Add muon track isolation information for different configurations, +// different track selections and different PV-to-SV association methods. +// +// For an usage example see BPHY8.py . +// +//============================================================================ +// +#ifndef DERIVATIONFRAMEWORK_BMuonTrackIsoTool_H +#define DERIVATIONFRAMEWORK_BMuonTrackIsoTool_H + +#include "DerivationFrameworkBPhys/BPhysVertexTrackBase.h" +#include "xAODMuon/MuonContainer.h" +#include "boost/multi_array.hpp" + +namespace InDet { + class IInDetTrackSelectionTool; +} + +namespace DerivationFramework { + + class BMuonTrackIsoTool : virtual public BPhysVertexTrackBase { + + private: + typedef BPhysVertexTrackBase super; + + // + // internal helper class + // + protected: + class MuIsoItem : public BaseItem { + + public: + MuIsoItem(std::string Name="_none_", std::string Bname="muiso", + std::string Prefix=""); + virtual ~MuIsoItem(); + + virtual void resetVals(); + virtual void copyVals(const BaseItem& item); + virtual void copyVals(const MuIsoItem& item); + virtual void fill(double isoValue=-2., int nTracks=-1, + const xAOD::Muon* muon=NULL); + virtual std::string muIsoName(); + virtual std::string nTracksName(); + virtual std::string muLinkName(); + + public: + mutable std::vector<float> vIsoValues; + mutable std::vector<int> vNTracks; + mutable MuonBag vMuons; + }; // MuIsoItem + + public: + BMuonTrackIsoTool(const std::string& t, const std::string& n, + const IInterface* p); + + protected: + // Hook methods + virtual StatusCode initializeHook(); + virtual StatusCode finalizeHook(); + + virtual StatusCode addBranchesVCSetupHook(size_t ivc) const; + + virtual StatusCode addBranchesSVLoopHook(const xAOD::Vertex* vtx) const; + + virtual StatusCode calcValuesHook(const xAOD::Vertex* vtx, + const unsigned int ipv, + const unsigned int its, + const unsigned int itt) const; + virtual bool fastFillHook(const xAOD::Vertex* vtx, + const int ipv) const; + + private: + virtual StatusCode saveIsolation(const xAOD::Vertex* vtx) const; + virtual void initResults(); + virtual void setResultsPrefix(std::string prefix) const; + + virtual std::string buildBranchName(unsigned int ic, + unsigned int its, + unsigned int ipv, + unsigned int itt) const; + + private: + // job options + std::string m_muonContainerName; + std::vector<double> m_isoConeSizes; + std::vector<double> m_isoTrkImpLogChi2Max; + std::vector<int> m_isoDoTrkImpLogChi2Cut; + + // containers + mutable const xAOD::MuonContainer* m_muons; + + + // results array + typedef boost::multi_array<MuIsoItem, 4> MuIsoItem4_t; + mutable MuIsoItem4_t m_results; + + }; // BMuonTrackIsoTool +} // namespace + +#endif // DERIVATIONFRAMEWORK_BMuonTrackIsoTool_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysAddMuonBasedInvMass.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysAddMuonBasedInvMass.h new file mode 100644 index 0000000000000000000000000000000000000000..3b87f7528241bb851ec4209b5cb249b97787bac8 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysAddMuonBasedInvMass.h @@ -0,0 +1,304 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +/** + * @file BPhysAddMuonBasedInvMass.h + * @author Wolfgang Walkowiak <wolfgang.walkowiak@cern.ch> + * + * @brief Augmentation with muon-information based invariant mass. + * + */ +// +#ifndef DERIVATIONFRAMEWORK_BPhysAddMuonBasedInvMass_H +#define DERIVATIONFRAMEWORK_BPhysAddMuonBasedInvMass_H + +#include <string> +#include <vector> +#include <set> +#include <map> + +#include "AthenaBaseComps/AthAlgTool.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "xAODBPhys/BPhysHelper.h" +#include "EventPrimitives/EventPrimitives.h" +#include "ITrackToVertex/ITrackToVertex.h" + +namespace DerivationFramework { + // + // typedefs -- to abbreviate long lines + // + typedef std::vector<const xAOD::TrackParticle*> TrackBag; + typedef std::vector<const xAOD::Muon*> MuonBag; + + /// + /// @class BPhysAddMuonBasedInvMass + /// @author Wolfgang Walkowiak <wolfgang.walkowiak@cern.ch> + /// + /// @brief Augment secondary vertices with muon-information-based mass. + /// + /// Add muon-information based invarient mass to secondary vertices using + /// a four vector sum. Optionally, it also calculates the minimum + /// chi2 for all muon tracks of the secondary vertex candidate w.r.t. + /// any primary vertex matching the selection criteria. + /// + /// ### Job options provided by this class: + /// <table border="0"> + /// <tr><th align="left">Name</th> + /// <th align="left">Description</th> + /// </tr> + /// <tr><td>BranchPrefix</td> + /// <td>assign the prefix of added branches + /// (possibly the derivation format's name)</td> + /// </tr> + /// <tr><td>VertexContainerName</td> + /// <td>name of container for vertices</td> + /// </tr> + /// <tr><td>TrkMasses</td> + /// <td>ordered list of track masses + /// (Important to keep proper order: J/psi muons go first!)</td> + /// </tr> + /// <tr><td>TrackToVertexTool</td> + /// <td>ToolHandle for track-to-vertex tool</td> + /// </tr> + /// <tr><td>AdjustToMuonKinematics</td> + /// <td>Adjust the primary track particle's kinematics to the one of + /// the muon.</td> + /// </tr> + /// <tr><td valign="top">AddMinChi2ToAnyPVMode</td> + /// <td>mode of minLogChi2ToAnyPV calculation: (default: 0) + /// <table border="0"> + /// <tr><th align="left">Value</th> + /// <th align="left">Explanation<th></tr> + /// <tr><td> 0 </td><td>no such calculation</td></tr> + /// <tr><td> 1 </td><td>use all PVs of requested + /// type(s)</td></tr> + /// <tr><td> 2 </td><td>exclude PVs associated to SVs</td></tr> + /// <tr><td> 3 </td><td>replace PVs associated to SVs by + /// corresponding refitted PVs</td></tr> + /// </table></td> + /// </tr> + /// <tr><td>PrimaryVertexContainerName</td> + /// <td>name of container for primary vertices</td> + /// </tr> + /// <tr><td>MinNTracksInPV</td> + /// <td>minimum number of tracks in PV + /// for PV to be considered in calculation + /// of minChi2MuToAnyPV variable.</td> + /// </tr> + /// <tr><td>PVTypesToConsider</td> + /// <td>list of primary vertex types to consider + /// (default: {1, 3})</td> + /// </tr> + /// <tr><td>DoVertexType</td> + /// <td>PV-to-SV association types to be considered (bitwise variable, + /// see xAODBPhys::BPhysHelper)<br> + /// Note: only needed for AddMinChi2ToAnyPVMode > 1</td> + /// </tr> + /// </table> + /// + /// @note + /// + /// For a usage example see BPHY8.py . + /// + class BPhysAddMuonBasedInvMass : virtual public AthAlgTool, + virtual public IAugmentationTool { + + public: + /// + /// @brief Main contructor + /// + BPhysAddMuonBasedInvMass(const std::string& t, const std::string& n, + const IInterface* p); + + /// @brief Initialize augmentation tool. + virtual StatusCode initialize(); + /// @brief Finalize augmentation tool. + virtual StatusCode finalize(); + /// @brief Main method called for each event. + virtual StatusCode addBranches() const; + + protected: + /// + /// @name Internal protected methods + /// @{ + /// + /// @brief Calculate muon-information based mass values if available. + /// + /// @param[in] vtx secondary vertex + /// @param[in] trkMasses ordered vector of track mass values + /// @param[in] nMuRequested number of muons requested + /// @returns muon-information based invariant mass for secondary + /// vertex and the corresponding uncertainty + /// + std::pair<double, double> getMuCalcMass(xAOD::BPhysHelper& vtx, + std::vector<double> + trkMasses, + int nMuRequested) const; + /// + /// @brief Obtain a set of tracks with muon track information if available + /// + /// @param[in] vtx secondary vertex + /// @returns container of muon tracks, number of muons found + /// + std::pair<TrackBag, int> getTracksWithMuons(xAOD::BPhysHelper& vtx) const; + /// + /// @brief Calculate invariant mass and uncertainty from a set of tracks. + /// + /// Returns invariant mass and mass error given + /// a set of tracks, their mass hypotheses and a reference position. + /// Each track must have a separate mass hypothesis in + /// the vector, and they must be in the same order as the tracks in the + /// track vector. Otherwise it will go horribly wrong. + /// + /// @param[in] trksIn container with tracks to be considered + /// @param[in] massHypoTheses vector of mass hypotheses in the same + /// order as the tracks + /// @param[in] pos position of the vertex + /// @returns invariant mass value and uncertainty + /// + std::pair<double,double> + getInvariantMassWithError(TrackBag trksIn, + std::vector<double> massHypotheses, + const Amg::Vector3D& pos) const; + /// + /// @brief Determine minimum log chi2 of signal muon tracks w.r.t. + // any primary vertex. + /// + /// Find minimum log chi2 distance of signal muons w.r.t any primary + /// vertex of required types and with a minimum number of tracks cut. + /// It also depends on the mode w.r.t. the treatment of the associated + /// primary vertex and the type of PV-to-SV association. + /// Returns this minimum chi2. + /// + /// @param[in] vtx secondary vertex + /// @param[in] pvContainer container of primary vertices + /// @parma[in] pvtypes vector of primary vertex types to be considered + /// @param[in] minNTracksInPV minimum number of tracks in primary + /// vertex for it to be considered + /// @param[in] mode mode of operation (possible values: 0, 1, 2 ,3) + /// @param[in] pv_type type of PV-to-SV association + /// @returns minimum log chi2 = log(d0^2/d0e^+z0^2/z0e^2) w.r.t. + /// any primary vertex + /// + double getMinChi2ToAnyPV(xAOD::BPhysHelper& vtx, + const xAOD::VertexContainer* pvContainer, + const std::vector<int>& pvtypes, + const int minNTracksInPV, + const int mode, + const xAOD::BPhysHelper::pv_type& + pvAssocType) const; + /// + /// @brief Calculate log chi2 value of a track w.r.t. a position. + /// + /// Calculate the log chi2 ( = log((d0/d0e)^2+(z0/z0e)^2) contribution + /// of a track at the position closest to the given PV. + /// + /// @param[in] track track considered + /// @param[in] pos position considered + /// @returns log chi2 value + /// + double getTrackPVChi2(const xAOD::TrackParticle& track, + const Amg::Vector3D& pos) const; + /// + /// @brief Extract 3x3 momentum covariance matrix from a TrackParticle. + /// + /// Extract the 3x3 momentum covariance matrix in (x,y,z) notation + /// from the (phi, theta, qoverp) notation from a TrackParticle. + /// + /// @param[in] track TrackParticle considered + /// @returns 3x3 momentum covariance matrix + /// + AmgSymMatrix(3) getMomentumCov(const xAOD::TrackParticle* track) const; + /// + /// @brief Extract 3x3 momentum covariance matrix from a Perigee. + /// + /// Extract the 3x3 momentum covariance matrix in (x,y,z) notation + /// from the (phi, theta, qoverp) notation from a Perigee. + /// + /// @param[in] perigee Trk::Perigee considered + /// @returns 3x3 momentum covariance matrix + /// + AmgSymMatrix(3) getMomentumCov(const Trk::Perigee* perigee) const; + /// + /// @brief Extract 3x3 momentum covariance matrix from a track parameter + /// vector and 5x5 covariance matrix. + /// + /// Extract the 3x3 momentum covariance matrix in (x,y,z) notation + /// from the (phi, theta, qoverp) notation from a vector of + /// track parameters and the 5x5 error matrix. + /// + /// @param[in] pars 5-vector of track parameters + /// @param[in] cov 5x5 covariance matrix of track parameters + /// @returns 3x3 momentum covariance matrix + /// + AmgSymMatrix(3) getMomentumCov(const AmgVector(5)& pars, + const AmgSymMatrix(5)& cov) const; + /// + /// @brief Find all muons associated to secondary vertex. + /// + /// Returns a vector of xAOD::Muon objects found + /// in this vertex and subsequent decay vertices. + /// Recursively calls itself if necessary. + /// + /// @param[in] vtx secondary vertex + /// @returns container of muons found + /// + MuonBag findAllMuonsInDecay(xAOD::BPhysHelper& vtx) const; + /// + /// @brief Obtain a set of ID tracks for a set of muons. + /// + /// @param[in] muons container of muon objects + /// @returns container of associated ID tracks + /// + TrackBag getIdTracksForMuons(MuonBag& muons) const; + /// + /// @brief Extract TrackParticle for Muon and adjust kinematics. + /// + /// Extract primary track particle from muon; + /// if configured adjust pt, eta and phi of it before returning + /// a pointer to it. + /// + /// @param[in] muon pointer to muon + /// @returns TrackParticle pointer + /// + const xAOD::TrackParticle* adjustTrackParticle(const xAOD::Muon* muon) + const; + /// + /// @brief Initialize PV-to-SV association type vector. + /// + void initPvAssocTypeVec(); + /// + /// @brief Clear the cache of adjusted TrackParticles. + /// + void clearAdjTpCache() const; + /// @} + private: + /// @name job options + /// @{ + std::string m_branchPrefix; + std::string m_vertexContainerName; + std::vector<double> m_trkMasses; + ToolHandle<Reco::ITrackToVertex> m_trackToVertexTool; + bool m_adjustToMuonKinematics; + int m_addMinChi2ToAnyPVMode; + std::string m_pvContainerName; + int m_minNTracksInPV; + std::vector<int> m_pvTypesToConsider; + int m_doVertexType; + /// @} + /// + /// map original -> adjusted track particles + typedef std::map<const xAOD::TrackParticle*, const xAOD::TrackParticle*> + TpMap_t; + /// map of adjusted track particles as cache + mutable TpMap_t m_adjTpCache; + + /// cache for individual vertex types + std::vector<xAOD::BPhysHelper::pv_type> m_pvAssocTypes; + + }; // class +} // namespace + +#endif // DERIVATIONFRAMEWORK_BPhysAddMuonBasedInvMass_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysConversionFinder.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysConversionFinder.h new file mode 100644 index 0000000000000000000000000000000000000000..50a429669624464377daaa5a0be20f8836623618 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysConversionFinder.h @@ -0,0 +1,76 @@ +/* + Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +*/ +/////////////////////////////////////////////////////////////////// +// BPhysConversionFinder.h, (c) ATLAS Detector software +/////////////////////////////////////////////////////////////////// +// Author: A. Chisholm <andrew.chisholm@cern.ch> +#ifndef DERIVATIONFRAMEWORK_BPHYSCONVERSIONFINDER_H +#define DERIVATIONFRAMEWORK_BPHYSCONVERSIONFINDER_H + +#include <string> + +#include "AthenaBaseComps/AthAlgTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" + +#include "InDetConversionFinderTools/VertexPointEstimator.h" +#include "InDetConversionFinderTools/ConversionPostSelector.h" +#include "TrkVertexSeedFinderUtils/ITrkDistanceFinder.h" + +#include "TLorentzVector.h" + +namespace Trk +{ + class V0Tools; + class IVertexFitter; + class TrkVKalVrtFitter; +} + +namespace InDet +{ + class VertexPointEstimator; + class TrackPairsSelector; + class ConversionPostSelector; +} + +namespace DerivationFramework { + +class BPhysConversionFinder : public AthAlgTool, public IAugmentationTool { + + public: + + BPhysConversionFinder(const std::string& t, const std::string& n, const IInterface* p); + + StatusCode initialize() override; + StatusCode finalize() override; + + virtual StatusCode addBranches() const override; + + private: + + StatusCode doCascadeFit(const xAOD::Vertex * diMuonVertex, const xAOD::Vertex * convVertex, const double diMuonMassConstraint, TLorentzVector & fitMom, float & chiSq) const; + + std::string m_diMuonCollectionToCheck; + std::vector<std::string> m_passFlagsToCheck; + + ToolHandle <Trk::V0Tools> m_v0Tools; + ToolHandle <Trk::IVertexFitter> m_vertexFitter; + ToolHandle <InDet::VertexPointEstimator> m_vertexEstimator; + ToolHandle <Trk::ITrkDistanceFinder> m_distanceTool; + ToolHandle <InDet::ConversionPostSelector> m_postSelector; + ToolHandle <Trk::TrkVKalVrtFitter > m_cascadeFitter; + + std::string m_inputTrackParticleContainerName; + std::string m_conversionContainerName; + + float m_maxDistBetweenTracks; + float m_maxDeltaCotTheta; + + bool m_requireDeltaM; + float m_maxDeltaM; + + }; +} + +#endif // DERIVATIONFRAMEWORK_BPhysConversionFinder_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysMetadataBase.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysMetadataBase.h new file mode 100644 index 0000000000000000000000000000000000000000..d4a5ffd9085689269c4530ece8f75cb818f33c44 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysMetadataBase.h @@ -0,0 +1,100 @@ +/* + Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// BPhysMetadataBase.h +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// - w.w., 2017-01-22: Added use of BPhysMetaDataTool. +// - w.w., 2019-12-05: Added long and vector<long> types +// +// Store JO metadata in the output file. +// +// It uses the BPhysMetaDataTool (default) or the IOVDbMetaDataTool to +// store job option information as metadata in a specific branch whose +// name needs to prefixed by the deriviation format name. +// Note: Metadata stored by the IOVDbMetaDataTool is not readable on +// 'RootCore' level. +// +// This is a base class. Inherit from it to add the job options you want +// to store. For a usage example, see +// Bmumu_metadata.h / Bmumu_metadata.cxx +// and +// BPHY8.py . +// +//============================================================================ +// +#ifndef DERIVATIONFRAMEWORK_BPhysMetadataBase_H +#define DERIVATIONFRAMEWORK_BPhysMetadataBase_H + +#include <string> +#include <map> +#include <vector> + +#include "AthenaBaseComps/AthAlgTool.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "GaudiKernel/ToolHandle.h" + +namespace DerivationFramework { + + class BPhysMetadataBase : virtual public AthAlgTool, + virtual public IAugmentationTool { + public: + BPhysMetadataBase(const std::string& t, const std::string& n, + const IInterface* p); + + virtual StatusCode initialize(); + virtual StatusCode finalize(); + + virtual StatusCode addBranches() const; + + protected: + virtual void recordPropertyI(const std::string& name, int val); + virtual void recordPropertyL(const std::string& name, long val); + virtual void recordPropertyD(const std::string& name, double val); + virtual void recordPropertyB(const std::string& name, bool val); + virtual void recordPropertyS(const std::string& name, const std::string& val); + + virtual void recordPropertyVI(const std::string& name, const std::vector<int>& val); + virtual void recordPropertyVL(const std::string& name, const std::vector<long>& val); + virtual void recordPropertyVD(const std::string& name, const std::vector<double>& val); + virtual void recordPropertyVB(const std::string& name, const std::vector<bool>& val); + virtual void recordPropertyVS(const std::string& name, + const std::vector<std::string>& val); + + private: + virtual StatusCode saveMetaDataBPhys() const; + virtual std::string buildFolderName(const std::string& fname="") const; + virtual std::string vecToString(const std::vector<int>& v) const; + virtual std::string vecToString(const std::vector<long>& v) const; + virtual std::string vecToString(const std::vector<double>& v) const; + virtual std::string vecToString(const std::vector<bool>& v) const; + virtual std::string vecToString(const std::vector<std::string>& v) const; + + private: + /// Object accessing the output metadata store + mutable ServiceHandle< StoreGateSvc > m_outputMetaStore; + + // job options + std::string m_derivationName; + std::string m_mdFolderName; + std::string m_prefix; + + // maps for different types of JOs + std::map<std::string, int> m_propInt; + std::map<std::string, long> m_propLong; + std::map<std::string, double> m_propDouble; + std::map<std::string, bool> m_propBool; + std::map<std::string, std::string> m_propString; + std::map<std::string, std::vector<int> > m_propVInt; + std::map<std::string, std::vector<long> > m_propVLong; + std::map<std::string, std::vector<double> > m_propVDouble; + std::map<std::string, std::vector<bool> > m_propVBool; + std::map<std::string, std::vector<std::string> > m_propVString; + }; // class +} // namespace + +#endif // DERIVATIONFRAMEWORK_BPhysMetadataBase_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysPVCascadeTools.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysPVCascadeTools.h new file mode 100644 index 0000000000000000000000000000000000000000..c1561102db7691fc7fbcd2a311956d694943ad03 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysPVCascadeTools.h @@ -0,0 +1,159 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef DERIVATIONFRAMEWORK_PVCASCADETOOLS_H +#define DERIVATIONFRAMEWORK_PVCASCADETOOLS_H + +#include "GaudiKernel/ToolHandle.h" +#include "xAODBPhys/BPhysHypoHelper.h" +#include "DerivationFrameworkBPhys/CascadeTools.h" +#include "EventKernel/PdtPdg.h" + +#include <vector> +// Authors: Adam Barton <abarton@SPAMMENOTTtttcern.ch> +// Eva Bouhova <bouhova@SPAMMENOTTtttcern.ch> + + +//class CascadeTools; +namespace Trk { + class V0Tools; + class VxCascadeInfo; +} + +namespace Analysis{ + class PrimaryVertexRefitter; +} +namespace InDet{ +class BeamSpotData; +} + +namespace HepPDT{ + class ParticleDataTable; +} + +namespace DerivationFramework { + + class BPhysPVCascadeTools { + typedef ElementLink<xAOD::VertexContainer> VertexLink; + typedef std::vector<VertexLink> VertexLinkVector; + private: + const Trk::V0Tools *m_v0Tools; + const CascadeTools *m_cascadeTools; + const InDet::BeamSpotData *m_beamSpotData; + + /// minimum number of tracks required in PVs considered + size_t m_PV_minNTracks; + + public: + bool m_copyAllVertices; + BPhysPVCascadeTools(const CascadeTools *cascadeTools); + BPhysPVCascadeTools(const CascadeTools *cascadeTools, + const InDet::BeamSpotData*); + + + void ProcessVertex(const std::vector<TLorentzVector> &mom, Amg::MatrixX cov, xAOD::BPhysHypoHelper &vtx, xAOD::BPhysHelper::pv_type pvtype, double mass) const; + + static void FillBPhysHelperNULL(xAOD::BPhysHelper &vtx, const xAOD::VertexContainer* PvContainer, + xAOD::BPhysHelper::pv_type pvtype); + + ///Fills the BPhysHelper object with the standard parameters + void FillBPhysHelper(const std::vector<TLorentzVector> &mom, Amg::MatrixX cov, xAOD::BPhysHelper &vtx, const xAOD::Vertex* refPV,const xAOD::VertexContainer* refPvContainer, + xAOD::BPhysHelper::pv_type pvtype, int) const; + + ///Returns the index integer of the vertex with the lowest Z in relation to the given vertex + size_t FindLowZIndex(const std::vector<TLorentzVector> &mom, const xAOD::BPhysHelper &Obj, + const std::vector<const xAOD::Vertex*> &PVlist, + const size_t PV_minNTracks=0) const; + ///Returns the index integer of the vertex with the lowest A0 in relation to the given vertex + size_t FindLowA0Index(const std::vector<TLorentzVector> &mom, const xAOD::BPhysHelper &Obj, + const std::vector<const xAOD::Vertex*> &PVlist, + const size_t PV_minNTracks=0) const; + + static size_t FindHighPtIndex(const std::vector<const xAOD::Vertex*> &PVlist); + + template< size_t NTracks> //NTracks = number of tracks in this type of vertex, if this is not known do not use this method + static bool VerticesMatchTracks(const xAOD::Vertex* v1, const xAOD::Vertex* v2); + + template< size_t NTracks> + static const xAOD::Vertex* FindVertex(const xAOD::VertexContainer* c, const xAOD::Vertex* v); + + /// Static method call with + /// DerivationFramework::BPhysDerHelpers::GetGoodPV + /// Returns a std::vector containing only PVs of type 1 and 3 - HighPt + /// and Pileup, which have at least PV_minNTracks tracks. + static std::vector<const xAOD::Vertex*> GetGoodPV(const xAOD::VertexContainer* pvContainer); + + /// Set the minimum number of tracks required for primary vertices to be + /// considered for primary vertex association to a secondary vertex. + /// Note that this requirement will not be applied for finding + /// the vertex with the highest pT sum (FindHighPtIndex()) since + /// it would possibly exclude this vertex which has been marked + /// earlier in the tool chain. + void SetMinNTracksInPV(size_t PV_minNTracks); + + /// Get the current beamspot position either from cache or from + /// BeamCondSvc. + /// Before processing a new event, make sure to call + /// GetBeamSpot(); + [[nodiscard]] const Amg::Vector3D& GetBeamSpot() const noexcept; + + /// Find the index for the PV with the lowest distance in z of + /// the SV's DOCA point w.r.t. the beamline and the PV. + size_t FindLowZ0BAIndex(const std::vector<TLorentzVector> &mom, const xAOD::BPhysHelper &obj, + const std::vector<const xAOD::Vertex*> &PVlist, + const size_t PV_minNTracks=0) const; + /// Calculate the distance along z axis between the PV and + /// SV's DOCA point w.r.t. the beamline. + double DistInZtoDOCA(const std::vector<TLorentzVector> &mom, const xAOD::BPhysHelper &obj, + const xAOD::Vertex* vertex) const; + /// Point of DOCA w.r.t. the beamline backward extrapolated + /// along the B candidate's momentum direction. + Amg::Vector3D DocaExtrapToBeamSpot(const std::vector<TLorentzVector> &mom, const xAOD::BPhysHelper &obj) const; + + static void PrepareVertexLinks(Trk::VxCascadeInfo *result, const xAOD::TrackParticleContainer* importedTrackCollection); + + StatusCode FillCandwithRefittedVertices( bool refitPV, + const xAOD::VertexContainer* pvContainer, xAOD::VertexContainer* refPvContainer, + const Analysis::PrimaryVertexRefitter *pvRefitter, size_t in_PV_max, int DoVertexType, + Trk::VxCascadeInfo* casc, int index, + double mass, xAOD::BPhysHypoHelper &vtx); + + static std::vector<const xAOD::TrackParticle*> CollectAllChargedTracks(const std::vector<xAOD::Vertex*> &cascadeVertices); + + static void SetVectorInfo(xAOD::BPhysHelper &, const Trk::VxCascadeInfo*); + static bool uniqueCollection(const std::vector<const xAOD::TrackParticle*>&); + static bool uniqueCollection(const std::vector<const xAOD::TrackParticle*>&, const std::vector<const xAOD::TrackParticle*>&); + static bool LinkVertices(SG::AuxElement::Decorator<VertexLinkVector> &decor, const std::vector<const xAOD::Vertex*>& vertices, + const xAOD::VertexContainer* vertexContainer, const xAOD::Vertex* vert); + static double getParticleMass(const HepPDT::ParticleDataTable* pdt, int pdg); + }; // class BPhysPVCascadeTools + +} // namespace DerivationFramework + + +//added by ab +template< size_t NTracks> +bool DerivationFramework::BPhysPVCascadeTools::VerticesMatchTracks(const xAOD::Vertex* v1, const xAOD::Vertex* v2) +{ + if(v1->nTrackParticles() != v2->nTrackParticles()) return false; + assert(v1->nTrackParticles() == NTracks); + std::array<const xAOD::TrackParticle*, NTracks> a1; + std::array<const xAOD::TrackParticle*, NTracks> a2; + for(size_t i=0;i<NTracks;i++){ + a1[i] = v1->trackParticle(i); + a2[i] = v2->trackParticle(i); + } + std::sort(a1.begin(), a1.end()); + std::sort(a2.begin(), a2.end()); + return a1 == a2; +} + +template< size_t NTracks> +const xAOD::Vertex* DerivationFramework::BPhysPVCascadeTools::FindVertex(const xAOD::VertexContainer* c, const xAOD::Vertex* v){ + for (const xAOD::Vertex* a : *c){ + if(VerticesMatchTracks<NTracks>(a,v)) return a; + } + return nullptr; +} +#endif // DERIVATIONFRAMEWORK_PVCASCADETOOLS_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysPVThinningTool.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysPVThinningTool.h new file mode 100644 index 0000000000000000000000000000000000000000..62d62d559433af5e05faa519d33e3593ffa9872f --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysPVThinningTool.h @@ -0,0 +1,53 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +/////////////////////////////////////////////////////////////////// +// BPhysPVThinningTool.h +/////////////////////////////////////////////////////////////////// + +#ifndef DERIVATIONFRAMEWORK_BPhysPVThinningTool_H +#define DERIVATIONFRAMEWORK_BPhysPVThinningTool_H 1 + +#include "xAODTracking/VertexContainer.h" +// Gaudi & Athena basics +#include "AthenaBaseComps/AthAlgTool.h" +#include "StoreGate/ReadHandleKeyArray.h" +#include "StoreGate/ThinningHandleKey.h" +// DerivationFramework includes +#include "DerivationFrameworkInterfaces/IThinningTool.h" + +namespace DerivationFramework { + + + class BPhysPVThinningTool : public AthAlgTool, public IThinningTool { + + public: + /** Constructor with parameters */ + BPhysPVThinningTool( const std::string& t, const std::string& n, const IInterface* p ); + + /** Destructor */ + ~BPhysPVThinningTool(); + + // Athena algtool's Hooks + virtual StatusCode initialize() override; + virtual StatusCode finalize() override; + + /** Check that the current event passes this filter */ + virtual StatusCode doThinning() const override; + + private: + StringProperty m_streamName{ this, "StreamName", "", "Name of the stream being thinned" }; + SG::ReadHandleKeyArray<xAOD::VertexContainer> m_BPhyCandList; + SG::ThinningHandleKey<xAOD::TrackParticleContainer> m_TrackContainerName; + SG::ThinningHandleKey<xAOD::VertexContainer> m_PVContainerName; + mutable std::atomic<unsigned int> m_ntot; + mutable std::atomic<unsigned int> m_npass; + mutable std::atomic<unsigned int> m_tracks_kept; + bool m_keepTracks; + }; + +} + + +#endif diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysPVTools.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysPVTools.h new file mode 100644 index 0000000000000000000000000000000000000000..86412eb46d9a2035c1ff0e390a11dc331720adbd --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysPVTools.h @@ -0,0 +1,108 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef DERIVATIONFRAMEWORK_PVTOOLS_H +#define DERIVATIONFRAMEWORK_PVTOOLS_H + +#include "GaudiKernel/ToolHandle.h" +#include "xAODBPhys/BPhysHelper.h" +#include <vector> + +// Author: Adam Barton <abarton@SPAMMENOTTtttcern.ch> +namespace InDet{ +class BeamSpotData; +} +namespace Trk { + class V0Tools; +} +namespace Analysis{ + class PrimaryVertexRefitter; +} + + +namespace DerivationFramework { + + class BPhysPVTools { + + private: + const Trk::V0Tools *m_v0Tools; + const InDet::BeamSpotData *m_beamSpotData; + + /// minimum number of tracks required in PVs considered + size_t m_PV_minNTracks; + + bool m_3dCalc; + + public: + + BPhysPVTools(const Trk::V0Tools *v0Tools); + BPhysPVTools(const Trk::V0Tools *v0Tools, const InDet::BeamSpotData*); + void SetSave3d(bool v) { m_3dCalc =v; } + StatusCode FillCandExistingVertices(xAOD::VertexContainer* vtxContainer, const xAOD::VertexContainer* pvContainer, int DoVertexType); + + static void FillBPhysHelperNULL(xAOD::BPhysHelper &vtx, const xAOD::VertexContainer* PvContainer, + xAOD::BPhysHelper::pv_type pvtype, bool do3d = false); + + StatusCode FillCandwithRefittedVertices(xAOD::VertexContainer* vtxContainer, const xAOD::VertexContainer* pvContainer,xAOD::VertexContainer* refPvContainer, const Analysis::PrimaryVertexRefitter* , size_t in_PV_max, int DoVertexType); + + void DecorateWithNULL(xAOD::VertexContainer* vtxContainer,const xAOD::VertexContainer* pvContainer, int DoVertexType) const; + + void DecorateWithDummyVertex(xAOD::VertexContainer* vtxContainer, const xAOD::VertexContainer* pvContainer, const xAOD::Vertex* Dummy, int DoVertexType, bool SetOrignal) const; + + ///Fills the BPhysHelper object with the standard parameters + void FillBPhysHelper(xAOD::BPhysHelper &vtx, const xAOD::Vertex* refPV,const xAOD::VertexContainer* refPvContainer, + xAOD::BPhysHelper::pv_type pvtype, int) const; + + ///Returns the index integer of the vertex with the lowest Z in relation to the given vertex + size_t FindLowZIndex(const xAOD::BPhysHelper &Obj, + const std::vector<const xAOD::Vertex*> &PVlist, + const size_t PV_minNTracks=0) const; + ///Returns the index integer of the vertex with the lowest A0 in relation to the given vertex + size_t FindLowA0Index(const xAOD::BPhysHelper &Obj, + const std::vector<const xAOD::Vertex*> &PVlist, + const size_t PV_minNTracks=0) const; + + static size_t FindHighPtIndex(const std::vector<const xAOD::Vertex*> &PVlist); + + /// Static method call with + /// DerivationFramework::BPhysDerHelpers::GetGoodPV + /// Returns a std::vector containing only PVs of type 1 and 3 - HighPt + /// and Pileup, which have at least PV_minNTracks tracks. + static std::vector<const xAOD::Vertex*> GetGoodPV(const xAOD::VertexContainer* pvContainer); + + /// Set the minimum number of tracks required for primary vertices to be + /// considered for primary vertex association to a secondary vertex. + /// Note that this requirement will not be applied for finding + /// the vertex with the highest pT sum (FindHighPtIndex()) since + /// it would possibly exclude this vertex which has been marked + /// earlier in the tool chain. + void SetMinNTracksInPV(size_t PV_minNTracks); + + /// Get the current beamspot position either from cache or from + /// BeamCondSvc. + /// Before processing a new event, make sure to call + /// GetBeamSpot(); + [[nodiscard]] const Amg::Vector3D& GetBeamSpot() const noexcept; + + /// Find the index for the PV with the lowest distance in z of + /// the SV's DOCA point w.r.t. the beamline and the PV. + size_t FindLowZ0BAIndex(const xAOD::BPhysHelper &obj, + const std::vector<const xAOD::Vertex*> &PVlist, + const size_t PV_minNTracks=0) const; + /// Calculate the distance along z axis between the PV and + /// SV's DOCA point w.r.t. the beamline. + double DistInZtoDOCA(const xAOD::BPhysHelper &obj, + const xAOD::Vertex* vertex) const; + /// Point of DOCA w.r.t. the beamline backward extrapolated + /// along the B candidate's momentum direction. + Amg::Vector3D DocaExtrapToBeamSpot(const xAOD::BPhysHelper &obj) const; + + static void PrepareVertexLinks(xAOD::Vertex* theResult, + const xAOD::TrackParticleContainer* importedTrackCollection); + }; // class BPhysPVTools + +} // namespace DerivationFramework + + +#endif // DERIVATIONFRAMEWORK_PVTOOLS_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysVarBlinder.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysVarBlinder.h new file mode 100644 index 0000000000000000000000000000000000000000..02e572a4f598a7d8b8d98d96f8b25389978df38d --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysVarBlinder.h @@ -0,0 +1,76 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ + +/// +/// @file BPhysVarBlinder.h +/// @author Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch> +/// +/// @brief Vertex variable(s) blinding tool +/// +#ifndef DERIVATIONFRAMEWORK_BPhysVarBlinder_H +#define DERIVATIONFRAMEWORK_BPhysVarBlinder_H + +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "DerivationFrameworkBPhys/CfAthAlgTool.h" +#include "GaudiKernel/ToolHandle.h" + +/// +/// forward declarations +/// +namespace xAOD { + class BPhysBlindingTool; +} +namespace DerivationFramework { + + /// + /// @class BPhysVarBlinder + /// @author Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch> + /// + /// @ brief Vertex variable(s) blinding tool + /// + /// This is an AthAlgTool wrapper around the BPhysBlindingTool + /// from the package BPhysTools. + /// + /// Job options: + /// - BlindingTool : ToolHandle for xAOD::BPhysBlindingTool + /// - EnableBlinding : Switch to easily en-/disable this tool + /// + /// For an example configuration using this tool see BPHY8.py. + /// + class BPhysVarBlinder : public CfAthAlgTool, public IAugmentationTool { + + public: + /// + /// @brief Constructor + /// + BPhysVarBlinder(const std::string& t, const std::string& n, + const IInterface* p); + /// + /// @brief Initialization + /// + StatusCode initialize(); + /// + /// @brief Finalization + /// + StatusCode finalize(); + /// + /// @brief Perform blinding per event (if enabled) + /// + virtual StatusCode addBranches() const; + + private: + /// + /// @name Job options + /// @{ + /// + /// @brief ToolHandle for blinding tool + ToolHandle<xAOD::BPhysBlindingTool> m_blindingTool; + /// + /// @brief Switch for enabling blinding + bool m_enableBlinding; + /// @} + + }; +} +#endif // DERIVATIONFRAMEWORK_BPhysVarBlinder_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysVertexTrackBase.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysVertexTrackBase.h new file mode 100644 index 0000000000000000000000000000000000000000..28a8f683152f0d015855083d0946ca05a966276f --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BPhysVertexTrackBase.h @@ -0,0 +1,303 @@ +/* + Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// BPhysVertexTrackBase.h +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// +// Base class for vertex-track related classes in need of +// track-to-vertex association handling. +// +// For an usage example see BVertexTrackIsoTool and BPHY8.py . +// +//============================================================================ +// +#ifndef DERIVATIONFRAMEWORK_BPhysVertexTrackBase_H +#define DERIVATIONFRAMEWORK_BPhysVertexTrackBase_H + +#include "AthenaBaseComps/AthAlgTool.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "xAODEventInfo/EventInfo.h" +#include "xAODBPhys/BPhysHelper.h" +#include "EventPrimitives/EventPrimitives.h" +#include "ITrackToVertex/ITrackToVertex.h" +#include "TrackVertexAssociationTool/ITrackVertexAssociationTool.h" +#include "xAODTracking/TrackParticleContainer.h" +#include "xAODTracking/TrackParticleAuxContainer.h" +#include "xAODTracking/VertexContainer.h" +#include "xAODTracking/VertexAuxContainer.h" + +#include <string> +#include <vector> +#include <map> + +// forward declarations +namespace InDet { + class IInDetTrackSelectionTool; +} + +class TVector3; + +namespace DerivationFramework { + // + // typedefs -- to abbreviate long lines + // + typedef std::vector<const xAOD::TrackParticle*> TrackBag; + typedef std::vector<const xAOD::Muon*> MuonBag; + typedef InDet::IInDetTrackSelectionTool TrkSelTool; + + class BPhysVertexTrackBase : public AthAlgTool, + virtual public IAugmentationTool { + + protected: + class BaseItem { + + public: + BaseItem(std::string Name="_none_", std::string Bname="iso", + std::string Prefix=""); + virtual ~BaseItem(); + + virtual void setup(std::string Name, std::string Bname="iso", + std::string Prefix=""); + virtual void setPrefix(std::string Prefix); + virtual void resetVals(); + virtual void copyVals(const BaseItem& item) = 0; + virtual std::string buildName(std::string qualifier="", + std::string suffix=""); + virtual std::string toString() const; + + public: + std::string name; + std::string bname; + std::string prefix; + }; + + protected: + class TrackTypeCounter { + + public: + TrackTypeCounter(BPhysVertexTrackBase& Parent, std::string Name); + virtual ~TrackTypeCounter(); + + virtual void addToCounter(uint64_t atype, uint64_t rtype=0, + std::string prefix="", std::string suffix="", + uint64_t counts=1); + + virtual void addToCounter(std::string name, uint64_t atype=0, + uint64_t counts=1); + + virtual std::string countsToString(uint indent=0) const; + + public: + std::string name; + + private: + typedef std::map<std::string, std::pair<uint64_t, uint64_t> > + NameCountMap_t; + NameCountMap_t m_cnts; + BPhysVertexTrackBase& m_parent; + }; + + public: + // + // enumeration for types of tracks to be considered + // + enum track_type {ASSOCPV, PVTYPE0, PVTYPE1, PVTYPE2, PVTYPE3, NONE, + NULLVP, + CAPVRFN3U0, CAPVNRN3U0, CAPVRF3DU0, CAPVNR3DU0, + CAPVRFN3U1, CAPVNRN3U1, CAPVRF3DU1, CAPVNR3DU1, + CAPVRFN3U2, CAPVNRN3U2, CAPVRF3DU2, CAPVNR3DU2, + CAPVRFNNU3, CAPVNRNNU3, CAPVRFNNU4, CAPVNRNNU4, + CAPVRFNNU5, CAPVNRNNU5, CAPVRFNNU6, CAPVNRNNU6, + CAPVRFNNU7, CAPVNRNNU7, CAPVRFNNU8, CAPVNRNNU8, + CAPVRFNNU9, CAPVNRNNU9 }; + static const int n_track_types; + static const std::string track_type_str[]; + static const uint64_t track_type_bit[]; + private: + static uint64_t s_track_type_all_cached; + + public: + // + // convenience methods + // + static const std::string tts(track_type type); + static uint64_t ttb(track_type type); + static uint64_t ttall(); + static uint64_t ttallMin(); + static uint64_t rttor(const std::vector<uint64_t> &vtypes); + static std::string wrapLines(std::string lines, + std::string prefix); + static std::string trackToString(const xAOD::TrackParticle* track); + + public: + // + // public methods called by the framework + // + BPhysVertexTrackBase(const std::string& t, const std::string& n, + const IInterface* p); + + virtual StatusCode initialize(); + virtual StatusCode finalize(); + virtual StatusCode addBranches() const; + + protected: + // + // Hook methods -- need be be overwritten in the concrete class + // + virtual StatusCode initializeHook(); + virtual StatusCode finalizeHook(); + virtual StatusCode addBranchesHook() const; + virtual StatusCode addBranchesVCSetupHook(size_t ivc) const; + virtual StatusCode addBranchesSVLoopHook(const xAOD::Vertex* vtx) const; + virtual StatusCode calcValuesHook(const xAOD::Vertex* vtx, + const unsigned int ipv, + const unsigned int its, + const unsigned int itt) const; + virtual bool fastFillHook(const xAOD::Vertex* vtx, + const int ipv) const; + + // + // Methods to be called from within addBranchesSVLoopHook() + // + virtual StatusCode calculateValues(const xAOD::Vertex* vtx) const; + + // + // internal methods + // + // name string for vertex pointer and PV index + virtual std::string buildPvAssocCacheName(const xAOD::Vertex* vtx, + const int ipv) const; + + virtual void initPvAssocTypeVec(); + virtual TrackBag findAllTracksInDecay(xAOD::BPhysHelper& vtx) const; + virtual void findAllTracksInDecay(xAOD::BPhysHelper& vtx, + TrackBag& tracks) const; + virtual MuonBag findAllMuonsInDecay(xAOD::BPhysHelper& vtx) const; + virtual void findAllMuonsInDecay(xAOD::BPhysHelper& vtx, + MuonBag& muons) const; + virtual TrackBag findAllMuonIdTracksInDecay(xAOD::BPhysHelper& vtx, + MuonBag& muons) const; + virtual std::vector<TVector3> + findMuonRefTrackMomenta(xAOD::BPhysHelper& vtx, MuonBag& muons) const; + + virtual TrackBag selectTracks(const xAOD::TrackParticleContainer* + inpTracks, + xAOD::BPhysHelper& cand, + const unsigned int ipv, + const unsigned int its, + const unsigned int itt) const; + virtual TrackBag selectTracks(const xAOD::TrackParticleContainer* + inpTracks, + const TrackBag& exclTracks, + xAOD::BPhysHelper& cand, + const unsigned int ipv, + const unsigned int its, + const unsigned int itt) const; + virtual uint64_t detTrackTypes(const xAOD::TrackParticle* track, + const xAOD::Vertex* candPV, + const xAOD::Vertex* candRefPV) const; + virtual double getTrackCandPVLogChi2(const xAOD::TrackParticle* + track, + const xAOD::Vertex* vtx, + bool doDCAin3D=false, + int chi2DefToUse=0) const; + virtual std::vector<double> getTrackLogChi2DCA(const xAOD::TrackParticle* + track, + const xAOD::Vertex* vtx, + bool doDCAin3D=false, + int chi2DefToUse=0) + const; + virtual std::string buildBranchBaseName(unsigned int its, + unsigned int ipv, + unsigned int itt, + std::string preSuffix="") const; + + virtual std::pair<const xAOD::Vertex*, double> + findMinChi2PV(const xAOD::TrackParticle* track, + const xAOD::Vertex* candPV, + const xAOD::Vertex* candRefPV, + const std::vector<uint64_t>& pvtypes, + const int minNTracksInPV, + const bool useRefittedPvs, + const bool doDCAin3D, + const int chi2DefToUse) const; + + virtual const xAOD::Vertex* + findAssocPV(const xAOD::TrackParticle* track, + const xAOD::Vertex* candPV, + const xAOD::Vertex* candRefPV, + const std::vector<uint64_t>& pvtypes, + const int minNTracksInPV, + const bool useRefittedPvs) const; + + protected: + // job options + std::vector<std::string> m_branchPrefixes; + std::string m_branchBaseName; + std::string m_branchSuffix; + std::vector<std::string> m_vertexContainerNames; + std::string m_trackParticleContainerName; + ToolHandleArray<TrkSelTool> m_trackSelectionTools; + + ToolHandle<Reco::ITrackToVertex> m_trackToVertexTool; + + ToolHandle<CP::ITrackVertexAssociationTool> m_tvaTool; + + std::string m_pvContainerName; + std::vector<std::string> m_refPVContainerNames; + + int m_doVertexType; + std::vector<uint64_t> m_useTrackTypes; + bool m_incPrecVerticesInDecay; + int m_minNTracksInPV; + std::vector<uint64_t> m_pvTypesToConsider; + int m_debugTrackTypes; + std::vector<uint64_t> m_debugTracksInEvents; + + // working point of TVA tool + bool m_tvaToolHasWpLoose; + + // containers + mutable const xAOD::TrackParticleContainer* m_tracks; + mutable const xAOD::TrackParticleAuxContainer* m_tracksAux; + mutable const xAOD::VertexContainer* m_pvtxContainer; + mutable const xAOD::VertexContainer* m_svtxContainer; + mutable const xAOD::VertexAuxContainer* m_svtxAuxContainer; + mutable const xAOD::VertexContainer* m_refPVContainer; + mutable const xAOD::VertexAuxContainer* m_refPVAuxContainer; + + // cache for individual vertex types + std::vector<xAOD::BPhysHelper::pv_type> m_pvAssocTypes; + + mutable unsigned int m_nEvtsSeen; + + // event info + mutable const xAOD::EventInfo* m_eventInfo; + + // cache for similar PV-to-SV associations + typedef std::map<std::string, int> StringIntMap_t; + mutable StringIntMap_t m_pvAssocResMap; + + // track types considered + uint64_t m_trackTypesUsed; + + // track type counter map (for debugging) + std::unique_ptr<TrackTypeCounter> m_mttc; + + // run and event numbers (see EventIDBase.h for types) + mutable unsigned int m_runNumber; + mutable uint64_t m_evtNumber; + + // debug tracks in the current event? + mutable bool m_debugTracksInThisEvent; + + }; // class +} // namespace + +#endif // DERIVATIONFRAMEWORK_BPhysVertexTrackBase_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BTrackVertexMapLogger.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BTrackVertexMapLogger.h new file mode 100644 index 0000000000000000000000000000000000000000..0b20390c590ad9ce57e4c5599d56256a11d6fa72 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BTrackVertexMapLogger.h @@ -0,0 +1,49 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// BTrackVertexMapLogger.h +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// +// Initiate dumps of track-to-vertex maps to log file which are provided +// by BPhysTrackVertexMapTool instances. +// +// The BPhysTrackVertexMapTool instances need to be configured separately +// and handed to this tool. +// +//============================================================================ +// +#ifndef DERIVATIONFRAMEWORK_BTrackVertexMapLogger_H +#define DERIVATIONFRAMEWORK_BTrackVertexMapLogger_H + +#include "AthenaBaseComps/AthAlgTool.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "BPhysTools/IBPhysTrackVertexMapTool.h" + +namespace DerivationFramework { + + class BTrackVertexMapLogger : virtual public AthAlgTool, + virtual public IAugmentationTool { + public: + BTrackVertexMapLogger(const std::string& t, const std::string& n, + const IInterface* p); + + virtual StatusCode initialize(); + virtual StatusCode finalize(); + + virtual StatusCode addBranches() const; + + private: + // job options + ToolHandleArray<xAOD::IBPhysTrackVertexMapTool> m_ttvmTools; + bool m_enable; + + }; // class +} // namespace + +#endif // DERIVATIONFRAMEWORK_BTrackVertexMapLogger_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BVertexClosestTrackTool.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BVertexClosestTrackTool.h new file mode 100644 index 0000000000000000000000000000000000000000..2b704516a23f3b1da1bf37b78b26b4a6b6bfb60a --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BVertexClosestTrackTool.h @@ -0,0 +1,149 @@ +/* + Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// BVertexClosestTrackTool.h +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// +// Add B vertex closest track information for different configurations, +// different track selections and different PV-to-SV association methods. +// +// For an usage example see BPHY8.py . +// +//============================================================================ +// +#ifndef DERIVATIONFRAMEWORK_BVertexClosestTrackTool_H +#define DERIVATIONFRAMEWORK_BVertexClosestTrackTool_H + +#include "DerivationFrameworkBPhys/BPhysVertexTrackBase.h" +#include "boost/multi_array.hpp" + +namespace InDet { + class IInDetTrackSelectionTool; +} + +namespace DerivationFramework { + + class BVertexClosestTrackTool : virtual public BPhysVertexTrackBase { + + private: + typedef BPhysVertexTrackBase super; + + // + // internal helper class + // + protected: + class CtItem : public BaseItem { + + public: + CtItem(std::string Name="_none_", + std::string Bname = "ctrk", + std::string Prefix="", + double Dca=-999., double DcaErr=-99., + double Zca=-999., double ZcaErr=-99., + double VtxNDErr2=-99., double TrkNDErr2=-99., + double Phi0Used=-999., + int NTrksChi2=0, xAOD::TrackParticle* CloseTrack=NULL, + TrackBag Tracks = {}, + std::vector<std::vector<double> > Vtap = {}, + std::vector<unsigned short> Selpat = {}); + + virtual ~CtItem(); + + virtual void setup(std::string Name="_none_", + std::string Bname="ctrk", + std::string Prefix=""); + virtual void setup(std::string Name, std::string Bname, + std::string Prefix, + double Dca, double DcaErr, + double Zca, double ZcaErr, + double VtxNDErr2, double TrkNDErr2, + double Phi0Used, + int NTrksChi2, + xAOD::TrackParticle* CloseTrack=NULL, + TrackBag Tracks = {}, + std::vector<std::vector<double> > Vtap = {}, + std::vector<unsigned short> Selpat = {}); + virtual void resetVals(); + virtual void copyVals(const BaseItem& item); + virtual void copyVals(const CtItem& item); + virtual std::string dcaName(); + virtual std::string dcaErrName(); + virtual std::string zcaName(); + virtual std::string zcaErrName(); + virtual std::string vtxNDErr2Name(); + virtual std::string trkNDErr2Name(); + virtual std::string phi0UsedName(); + virtual std::string nTrksChi2Name(); + virtual std::string closeTrackName(); + virtual std::string toString() const; + + public: + mutable double dca; + mutable double dcaErr; + mutable double zca; + mutable double zcaErr; + mutable double vtxNDErr2; + mutable double trkNDErr2; + mutable double phi0Used; + mutable int nTrksChi2; + const xAOD::TrackParticle* closeTrack; + mutable TrackBag tracks; + mutable std::vector<std::vector<double> > vtap; + mutable std::vector<unsigned short> selpat; + + }; // CtItem + + public: + BVertexClosestTrackTool(const std::string& t, const std::string& n, + const IInterface* p); + + protected: + // Hook methods + virtual StatusCode initializeHook(); + virtual StatusCode finalizeHook(); + + virtual StatusCode addBranchesVCSetupHook(size_t ivc) const; + + virtual StatusCode addBranchesSVLoopHook(const xAOD::Vertex* vtx) const; + + virtual StatusCode calcValuesHook(const xAOD::Vertex* vtx, + const unsigned int ipv, + const unsigned int its, + const unsigned int itt) const; + virtual bool fastFillHook(const xAOD::Vertex* vtx, + const int ipv) const; + private: + virtual StatusCode saveClosestTrack(const xAOD::Vertex* vtx) const; + virtual void initResults(); + virtual void setResultsPrefix(std::string prefix) const; + virtual StatusCode logCloseTracksDebugInfo() const; + + private: + // job options + + std::vector<std::string> m_closeTrackChi2SetName; + std::vector<int> m_closeTrackCorrChi2; + std::vector<bool> m_minDCAin3D; + std::vector<double> m_closeTrackMaxLogChi2; + std::vector<double> m_nCloseTrackMaxLogChi2; + + // results array + typedef boost::multi_array<CtItem, 4> CtItem4_t; + mutable CtItem4_t m_results; + + // last run and event numbers seen + mutable unsigned int m_lastRunNumber; + mutable uint64_t m_lastEvtNumber; + + // last secondary vertex (candidate) index + mutable unsigned int m_svIdx; + + }; // BVertexClosestTrackTool +} // namespace + +#endif // DERIVATIONFRAMEWORK_BVertexClosestTrackTool_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BVertexTrackIsoTool.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BVertexTrackIsoTool.h new file mode 100644 index 0000000000000000000000000000000000000000..03e645b85146a973ab987fef508b9fe3c899300b --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BVertexTrackIsoTool.h @@ -0,0 +1,119 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// BVertexTrackIsoTool.h +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// +// Add B vertex track isolation information for different configurations, +// different track selections and different PV-to-SV association methods. +// +// For an usage example see BPHY8.py . +// +//============================================================================ +// +#ifndef DERIVATIONFRAMEWORK_BVertexTrackIsoTool_H +#define DERIVATIONFRAMEWORK_BVertexTrackIsoTool_H + +#include "DerivationFrameworkBPhys/BPhysVertexTrackBase.h" +#include "boost/multi_array.hpp" + +namespace InDet { + class IInDetTrackSelectionTool; +} + +namespace DerivationFramework { + + class BVertexTrackIsoTool : virtual public BPhysVertexTrackBase { + + private: + typedef BPhysVertexTrackBase super; + + // + // internal helper class + // + protected: + class IsoItem : public BaseItem { + + public: + IsoItem(std::string Name="_none_", std::string Bname="iso", + std::string Prefix="", + double IsoValue=-1., int NTracks=0); + virtual ~IsoItem(); + + virtual void setup(std::string Name, std::string Bname="iso", + std::string Prefix=""); + virtual void setup(std::string Name, std::string Bname, + std::string Prefix, + double IsoValue, int NTracks=0); + virtual void resetVals(); + virtual void copyVals(const BaseItem& item); + virtual void copyVals(const IsoItem& item); + virtual std::string isoName(); + virtual std::string nTracksName(); + + public: + mutable double isoValue; + mutable int nTracks; + }; // IsoItem + + public: + BVertexTrackIsoTool(const std::string& t, const std::string& n, + const IInterface* p); + + protected: + // Hook methods + virtual StatusCode initializeHook(); + virtual StatusCode finalizeHook(); + + virtual StatusCode addBranchesVCSetupHook(size_t ivc) const; + + virtual StatusCode addBranchesSVLoopHook(const xAOD::Vertex* vtx) const; + + virtual StatusCode calcValuesHook(const xAOD::Vertex* vtx, + const unsigned int ipv, + const unsigned int its, + const unsigned int itt) const; + virtual bool fastFillHook(const xAOD::Vertex* vtx, + const int ipv) const; + + private: + virtual StatusCode saveIsolation(const xAOD::Vertex* vtx) const; + virtual StatusCode calculateIsolation(const xAOD::Vertex* vtx) const; + virtual StatusCode calcIsolation(const IsoItem& iso, + const xAOD::Vertex* vtx, + const double coneSize, + const double logChi2Max, + const int doLogChi2, + const ToolHandle<TrkSelTool>& tSelTool, + const xAOD::BPhysHelper::pv_type + pvAssocType, + const int trackTypes ) const; + + virtual void initResults(); + virtual void setResultsPrefix(std::string prefix) const; + + virtual std::string buildBranchName(unsigned int ic, + unsigned int its, + unsigned int ipv, + unsigned int itt) const; + + private: + // job options + std::vector<double> m_isoConeSizes; + std::vector<double> m_isoTrkImpLogChi2Max; + std::vector<int> m_isoDoTrkImpLogChi2Cut; + bool m_useOptimizedAlgo; + + // results array + typedef boost::multi_array<IsoItem, 4> IsoItem4_t; + mutable IsoItem4_t m_results; + + }; // BVertexTrackIsoTool +} // namespace + +#endif // DERIVATIONFRAMEWORK_BVertexTrackIsoTool_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BmumuThinningTool.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BmumuThinningTool.h new file mode 100644 index 0000000000000000000000000000000000000000..bba64bb5fcb4d22afc44f5456c5c674f54d9a94f --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/BmumuThinningTool.h @@ -0,0 +1,441 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +// +/** + * @file BmumuThinningTool.h + * @author Wolfgang Walkowiak <wolfgang.walkowiak@cern.ch> + * + * @brief Primary vertex, track and muon thinning for Bmumu analysis. + */ + +#ifndef DERIVATIONFRAMEWORK_BmumuThinningTool_H +#define DERIVATIONFRAMEWORK_BmumuThinningTool_H + +#include "DerivationFrameworkBPhys/CfAthAlgTool.h" +#include "DerivationFrameworkInterfaces/IThinningTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "xAODBPhys/BPhysHypoHelper.h" + +#include <string> + +class IThinningSvc; + +namespace SG { + class AuxElement; +} + +namespace xAOD { + class AuxContainerBase; +} + +namespace DerivationFramework { + /// + /// @class BmumuThinningTool + /// @author Wolfgang Walkowiak <wolfgang.walkowiak@cern.ch> + /// + /// @brief Primary vertex, track and muon thinning for Bmumu analysis. + /// + /// This class provides a thinning tool to thin the primary vertex, + /// the muon and calibrated muon collections as well as the inner + /// detector track selection depending on: + /// - the selected secondary vertices + /// - the selected PV-to-SV vertex association method + /// - additional track sub-collection (e.g. "closest tracks") + /// + /// This tool is primarily written for the B(s)->mumu analysis + /// and used by the BPHY8 derivation. + /// + /// ### Job options + /// <table border="0"> + /// <tr><th align="left">Name</td> + /// <th align="left">Description</th></tr> + /// <tr><td valign="top">ThinningService</td> + /// <td>Thinning service handle</td></tr> + /// <tr><td valign="top">TrackParticleContainerName</td> + /// <td>TrackParticle container name + /// (default: InDetTrackParticles)</td></tr> + /// <tr><td valign="top">VertexContainerNames</td> + /// <td>List of secondary vertex container names</td></tr> + /// <tr><td valign="top">VertexPassFlags</td> + /// <td>List of pass flags for the seconary vertices + /// empty list lets all vertices pass. + /// List length needs to be identical to length of + /// VertexContainerNames list if AlignPassToVertexList + /// is True</td></tr> + /// <tr><td valign="top">AlignPassToVertexList</td> + /// <td>Align VertexPassFlags to VertexContainerNames list? + /// This option causes a 1:1 correlation between the two lists, + /// i.e. a flag is only applied to the corresponding container + /// if this option is set to True. (default: false)</td></tr> + /// <tr><td valign="top">PVContainerName</td> + /// <td>Primary vertex container name</td></tr> + /// <tr><td valign="top">RefPVContainerNames</td> + /// <td>Refitted primary vertex container names. + /// This list must be of same length and order as the + /// m_vtxContNames list. + /// (or empty: no thinning of refitted primary + /// vertex containers)</td></tr> + /// <tr><td valign="top">MuonContainerName</td> + /// <td> Name of the used muon container (default: "")</td></tr> + /// <tr><td valign="top">CalibMuonContainerName</td> + /// <td>Name of the calibrated muons container (default: "")</td></tr> + /// <tr><td valign="top">CloseTrackBranchBaseName</td> + /// <td>Closest track branch base name</td></tr> + /// <tr><td valign="top">CloseTrackBranchPrefixes</td> + /// <td>Closest track branch prefixes</td></tr> + /// <tr><td valign="top">KeepTracksForSelectedPVs</td> + /// <td>Keep tracks for selected (refitted) primary vertices? + /// (default: false)</td></tr> + /// <tr><td valign="top">MatchCalibratedMuons</td> + /// <td>Match vertex muons with calibrated muons? + /// (default: false)</td></tr> + /// <tr><td valign="top">MarkMatchedMuons</td> + /// <td>Mark orginal muons for matched calibrated muons as well? + /// (only makes sense if MatchCalibratedMuons = True; + /// default: false)</td></tr> + /// <tr><td valign="top">MarkMatchedCalMuons</td> + /// <td>Mark calibrated muons for matched calibrated muons as well? + /// (only makes sense if MatchedCalibratedMuons = False; + /// default: false)</td></tr> + /// <tr><td valign="top">SyncMatchedMuonsBothWays</td> + /// <td> Force syncing marked muons both ways? + /// (default: false)</td></tr> + /// <tr><td valign="top">AllowFastMuonMaskSync</td> + /// <td>Allow fast sync of myon masks? + /// (Set to 'False' to force in-depth synchronization + /// of muon masks. Default: false)</td></tr> + /// <tr><td valign="top">KeepCloseTracks</td> + /// <td>Keep tracks for closest tracks? (default: false)</td></tr> + /// <tr><td valign="top">KeepTracksForMuons</td> + /// <td>Keep tracks for selected muons? (default: false)</td></tr> + /// <tr><td valign="top">KeepTracksForCalMuons</td> + /// <td>Keep tracks for selected calibrated muons? + /// (default: false)</td></tr> + /// <tr><td valign="top">KeepMuonsForTracks</td> + /// <td>Keep (original) muons for selected tracks? + /// (default: false)</td></tr> + /// <tr><td valign="top">KeepCalMuonsForTracks</td> + /// <td>Keep calibrated muons for selected tracks? + /// (default: false)</td></tr> + /// <tr><td valign="top">ApplyAndForVertices</td> + /// <td>Apply AND for mask matching for vertices? + /// (default: false)</td></tr> + /// <tr><td valign="top">ApplyAndForTracks</td> + /// <td>Apply AND for mask matching for tracks? + /// (default: false)</td></tr> + /// <tr><td valign="top">ApplyAndForMuons</td> + /// <td>Apply AND for mask matching for muons? + /// (default: false)</td></tr> + /// <tr><td valign="top">ThinPVs"</td> + /// <td>Thin primary vertex collection? (default: true)</td></tr> + /// <tr><td valign="top">ThinRefittedPVs"</td> + /// <td>Thin refitted primary vertex collection? + /// (default: true)</td></tr> + /// <tr><td valign="top">ThinTracks"</td> + /// <td>Thin ID track collection? + /// (default: true)</td></tr> + /// <tr><td valign="top">ThinMuons"</td> + /// <td>Thin muon collections? + /// (default: true)</td></tr> + /// </table> + /// + class BmumuThinningTool : public CfAthAlgTool, public IThinningTool { + + // useful typedefs + typedef xAOD::BPhysHelper::pv_type pv_type; + typedef ElementLink<xAOD::TrackParticleContainer> TrackParticleLink; + + public: + /// @name pv_type to string map + // Note: may later be migrated to xAODBPhys/BPhysHelper + static std::map<pv_type, std::string> PvTypeToVarNameMap; + + public: + /// @brief Main constructor + BmumuThinningTool(const std::string& t, const std::string& n, + const IInterface* p); + /// @brief Default destructor + ~BmumuThinningTool(); + /// @brief Initialize tool + StatusCode initialize(); + /// @brief Finalize tool + StatusCode finalize(); + /// @brief Main thinning method executed for each event + virtual StatusCode doThinning() const; + + protected: + /// + /// @brief Helper checking for hypothesis passing + /// + /// Helper to check whether an element is marked as passing a specific + /// hypothesis. + /// + /// @param[in] em auxillary storage element + /// @param[in] hypo name of the hypothesis + /// @returns true if hypothesis element contains true + /// + bool pass(const SG::AuxElement& em, std::string hypo) const; + /// + /// @brief Helper to get a TrackParticle link + /// + /// @param[in] vtx secondary vertex containing link + /// @param[in] name name of the link + /// @returns pointer to TrackParticle (NULL if not found) + /// + const xAOD::TrackParticle* getTrackParticle(const xAOD::Vertex* vtx, + std::string name) const; + + template<typename TYPE> + StatusCode applyThinMask(SG::ThinningHandle<TYPE> &trkCont, + const std::vector<bool>& trkMask, + bool doAnd) const; + + + /// + /// @brief Mark muons matched to secondary vertices + /// + /// @param[in] muCont pointer to MuonContainer + /// @param[in,out] muMask vector with mask per muon + /// @param[in] vtx secondary vertex + /// @param[in] counterName name of counter + /// @returns StatusCode + /// + StatusCode matchMuons(const xAOD::MuonContainer* muCont, + std::vector<bool>& muMask, + xAOD::BPhysHelper& vtx, + std::string counterName) const; + /// + /// @name Sync-mark methods + /// + /// @{ + /// + /// @brief Mark original muons for accepted calibrated muons + /// + /// @param[in] muCont pointer to MuonContainer of + /// (original) muons + /// @param[in] cmuCont pointer to MuonContainer of + /// calibrated muons + /// @param[in,out] muMask mask for (original) muons + /// @param[in] cmuMask mask for calibrated muons + /// @param[in] counterName base name for counters + /// @param[in] allowFastSync use fast synchronization method + /// @returns StatusCode + /// + StatusCode markOrigMuons(const xAOD::MuonContainer* muCont, + const xAOD::MuonContainer* cmuCont, + std::vector<bool>& muMask, + std::vector<bool>& cmuMask, + std::string counterName, + bool allowFastSync=true) const; + /// + /// @brief Mark calibrated muons for accepted (original) muons + /// + /// @param[in] muCont pointer to MuonContainer of + /// (original) muons + /// @param[in] cmuCont pointer to MuonContainer of + /// calibrated muons + /// @param[in] muMask mask for (original) muons + /// @param[in,out] cmuMask mask for calibrated muons + /// @param[in] counterName base name for counters + /// @param[in] allowFastSync use fast synchronization method + /// @returns StatusCode + /// + StatusCode markCalibMuons(const xAOD::MuonContainer* muCont, + const xAOD::MuonContainer* cmuCont, + std::vector<bool>& muMask, + std::vector<bool>& cmuMask, + std::string counterName, + bool allowFastSync) const; + /// + /// @brief Mark ID tracks of selected (original or calibrated) muons + /// + /// @param[in] trkPartCont pointer to TrackParticle container + /// @param[in,out] trkMask mask for tracks + /// @param[in] muCont pointer to MuonContainer + /// @param[in] muMask mask for muons + /// @param[in] counterName base name for counters + /// @returns StatusCode + /// + StatusCode markTrksForSelMuons(const xAOD::TrackParticleContainer* + trkPartCont, + std::vector<bool>& trkMask, + const xAOD::MuonContainer* muCont, + std::vector<bool>& muMask, + std::string counterName) const; + /// + /// @brief Mark muons for selected ID tracks + /// + /// @param[in] trkPartCont pointer to TrackParticle container + /// @param[in] trkMask mask for tracks + /// @param[in,out] muCont pointer to MuonContainer + /// @param[in] muMask mask for muons + /// @param[in] counterName base name for counters + /// @returns StatusCode + /// + StatusCode markMuonsForSelTracks(const xAOD::TrackParticleContainer* + trkPartCont, + std::vector<bool>& trkMask, + const xAOD::MuonContainer* muCont, + std::vector<bool>& muMask, + std::string counterName) const; + /// @} + /// + + /// + /// @brief Obtain all auxillary elements matching a certain pattern. + /// + /// Helper to filter all names of auxillary elements of an aux container + /// according to a certain pattern. The pattern must be a regular + /// expression pattern. + /// + /// @param[in] auxCont pointer to AuxContainer + /// @param[in] pattern regular expression pattern to be matched by names + /// @returns vector<string> of auxillary element names + /// + std::vector<std::string> + filterAuxElements(const xAOD::AuxContainerBase* auxCont, + std::string pattern) const; + /// + /// @brief Determine aux elements to be looked at -- for (refitted) PVs + /// + /// @param[in] auxCont pointer to AuxContainer + /// @param[out] vLinkNames vector of aux element names selected + /// @param[out] vLinkTypes vector of PV-to-SV types corresponding to + /// aux element names selected + /// @param[in] pattern regular expression pattern to be matched by names + /// + void selectAuxElements(const xAOD::AuxContainerBase* auxCont, + std::vector<std::string>& vLinkNames, + std::vector<pv_type>& vLinkTypes, + std::string pattern) const; + /// + /// @brief Determine aux elements to be looked at -- for closest tracks + /// + /// @param[in] auxCont pointer to AuxContainer + /// @param[out] vLinkNames vector of aux element names selected + /// @param[in] vPrefixes vector of prefixes to be concatenated with + /// pattern for search + /// @param[out] vLinkTypes vector of PV-to-SV types corresponding to + /// aux element names selected + /// @param[in] pattern regular expression pattern to be matched by names + /// + void selectAuxElements(const xAOD::AuxContainerBase* auxCont, + std::vector<std::string>& vLinkNames, + std::vector<std::string> vPrefixes, + std::vector<pv_type>& vLinkTypes, + std::string pattern) const; + /// + /// @brief Dump a vector<str> to a string + /// + /// @param[in] vs vector<string> to be dumped + /// @param[in] header header string to be prepended + /// @param[in] nBlanks number of blanks to prepend each line with + /// + std::string dumpVS(const std::vector<std::string>& vs, + const std::string header="", + size_t nBlanks=0) const; + /// + /// @brief Wrap string at line breaks and print with + /// appropriate message level + /// + /// @param[in] str string to be printed + /// @param[in] lvl MSG::Level chosen + /// + void logWrappedMsg(const std::string& str, const MSG::Level lvl) const; + /// + /// @brief Check two masks for consistency + /// + /// This is a method returning debugging information. + /// + /// @param[in] mask1 first mask vector to be checked + /// @param[in] mask2 second mask vector to be checked + /// @param[in] name1 name of first mask vector + /// @param[in] name2 name of second mask vector + /// @param[in] header text to be prepended to output string + /// @returns string with debugging information + /// + std::string checkMaskConsistency(const std::vector<bool>& mask1, + const std::vector<bool>& mask2, + const std::string name1, + const std::string name2, + const std::string header="") const; + + private: + /// + /// @name Job options + /// @{ + ServiceHandle<IThinningSvc> m_thinningSvc; + std::string m_trkPartContName; + std::vector<std::string> m_vtxContNames; + std::vector<std::string> m_vtxPassFlags; + std::string m_PVContName; + std::vector<std::string> m_refPVContNames; + std::string m_muonContName; + std::string m_calMuonContName; + std::string m_ctBranchBaseName; + std::vector<std::string> m_ctBranchPrefixes; + bool m_alignPassToVertexList; + bool m_keepPVTracks; + bool m_matchCalMuons; + bool m_markMuons; + bool m_markCalMuons; + bool m_syncMuonsBothWays; + bool m_keepCloseTracks; + bool m_keepSelMuonTracks; + bool m_keepSelCalMuonTracks; + bool m_keepSelTrackMuons; + bool m_keepSelTrackCalMuons; + bool m_allowFastMuonMaskSync; + bool m_thinPVs; + bool m_thinRefPVs; + bool m_thinTracks; + bool m_thinMuons; + bool m_vertexAnd; + bool m_trackAnd; + bool m_muonAnd; + /// @} + + /// + /// @name internal member variables + /// + /// process close tracks + bool m_doCloseTracks; + /// process primary vertices + bool m_doPVs; + /// process refitted primary vertices + bool m_doRefPVs; + /// process (original) muons + bool m_doMuons; + /// process refitted muons + bool m_doCalMuons; + /// process ID tracks + bool m_doTracks; + /// @} + + /// + /// @name aux element link name caches + /// + /// @{ + /// + /// caching aux element link names (and pv types) + /// for original and refitted PVs + /// + mutable std::vector<std::vector<std::string> > m_vvOrigPVLinkNames; + mutable std::vector<std::vector<pv_type> > m_vvOrigPVLinkTypes; + mutable std::vector<std::vector<std::string> > m_vvRefPVLinkNames; + mutable std::vector<std::vector<pv_type> > m_vvRefPVLinkTypes; + + /// + /// caching aux element link names (and pv types) + /// for closest tracks + mutable std::vector<std::vector<std::string> > m_vvCtLinkNames; + mutable std::vector<std::vector<pv_type> > m_vvCtLinkTypes; + /// @} + + }; + +} // namespace DerivationFramework + +#endif // DERIVATIONFRAMEWORK_BmumuThinningTool_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Bmumu_metadata.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Bmumu_metadata.h new file mode 100644 index 0000000000000000000000000000000000000000..4e040a8ad14d6cf3a3905fffc94d15228f3b43a7 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Bmumu_metadata.h @@ -0,0 +1,42 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +/** + * @file Bmumu_metadata.h + * @author Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch> + * + * @brief Store JO metadata specific to the Bmumu analysis. + */ + +#ifndef DERIVATIONFRAMEWORK_Bmumu_metadata_H +#define DERIVATIONFRAMEWORK_Bmumu_metadata_H + +#include <string> +#include <map> +#include <vector> + +#include "DerivationFrameworkBPhys/BPhysMetadataBase.h" +#include "AthenaBaseComps/AthAlgTool.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "GaudiKernel/ToolHandle.h" + +namespace DerivationFramework { + /// + /// @class Bmumu_metadata + /// @author Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> + /// + /// @brief Store JO metadata specific to the Bmumu analysis. + /// + /// Store JO metadata specific to the Bmumu analysis in the output file. + /// This class inherits from BPhysMetadataBase. + /// + class Bmumu_metadata : virtual public BPhysMetadataBase { + public: + /// @brief Main constructor + Bmumu_metadata(const std::string& t, const std::string& n, + const IInterface* p); + }; // class +} // namespace + +#endif // DERIVATIONFRAMEWORK_Bmumu_metadata_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Bmumu_reco_mumu.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Bmumu_reco_mumu.h new file mode 100644 index 0000000000000000000000000000000000000000..efe1ab4ee9149a6368bc8c8e5d174869315d660d --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Bmumu_reco_mumu.h @@ -0,0 +1,73 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ + +/////////////////////////////////////////////////////////////////// +// Bmumu_reco_mumu.h +/////////////////////////////////////////////////////////////////// +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Original author (Reco_mumu): +// Daniel Scheirich <daniel.scheirich@cern.ch> +// +// Changes: +// Basic dimuon reconstruction for the derivation framework. +// This class inherits from CfAthAlgTool instead of AthAlgTool in order +// to have access to the CutFlowSvc instance. +// +//============================================================================ +// +#ifndef DERIVATIONFRAMEWORK_Bmumu_reco_mumu_H +#define DERIVATIONFRAMEWORK_Bmumu_reco_mumu_H + +#include <string> + +#include "DerivationFrameworkBPhys/CfAthAlgTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "JpsiUpsilonTools/JpsiFinder.h" +#include "JpsiUpsilonTools/PrimaryVertexRefitter.h" +#include "BeamSpotConditionsData/BeamSpotData.h" + +/** forward declarations + */ +namespace Trk { + class V0Tools; +} + +/** THE reconstruction tool + */ +namespace DerivationFramework { + + class Bmumu_reco_mumu : public CfAthAlgTool, public IAugmentationTool { + public: + Bmumu_reco_mumu(const std::string& t, const std::string& n, + const IInterface* p); + + StatusCode initialize(); + StatusCode finalize(); + + virtual StatusCode addBranches() const; + + private: + /** tools + */ + ToolHandle<Trk::V0Tools> m_v0Tools; + ToolHandle<Analysis::JpsiFinder> m_jpsiFinder; + ToolHandle<Analysis::PrimaryVertexRefitter> m_pvRefitter; + SG::ReadCondHandleKey<InDet::BeamSpotData> m_beamSpotKey { this, "BeamSpotKey", "BeamSpotData", "SG key for beam spot" }; + + /** job options + */ + std::string m_outputVtxContainerName; + std::string m_pvContainerName; + std::string m_refPVContainerName; + bool m_refitPV; + int m_PV_max; + int m_DoVertexType; + size_t m_PV_minNTracks; + bool m_do3d; + }; +} + +#endif // DERIVATIONFRAMEWORK_Bmumu_reco_mumu_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Cascade3Plus1.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Cascade3Plus1.h new file mode 100644 index 0000000000000000000000000000000000000000..7ed5f72f719f9a8bc93552831c421f068da3d599 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Cascade3Plus1.h @@ -0,0 +1,99 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ +#ifndef DERIVATIONFRAMEWORKBPHYS_CASCADE3PLUS1_H +#define DERIVATIONFRAMEWORKBPHYS_CASCADE3PLUS1_H +//********************* +// Cascade3Plus1 header file +// +// Adam Barton <abarton@cern.ch> +#include "AthenaBaseComps/AthAlgTool.h" +#include "GaudiKernel/ToolHandle.h" +#include <vector> +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "xAODTracking/TrackParticle.h" +#include "JpsiUpsilonTools/PrimaryVertexRefitter.h" +#include "InDetConversionFinderTools/VertexPointEstimator.h" +#include "BeamSpotConditionsData/BeamSpotData.h" + +namespace Trk { + class IVertexFitter; + class TrkVKalVrtFitter; + class VxTrackAtVertex; + class ITrackSelectorTool; + class VxCascadeInfo; + class V0Tools; +} + + +namespace DerivationFramework { + class CascadeTools; +} + +namespace DerivationFramework { + static const InterfaceID IID_Cascade3Plus1("Cascade3Plus1", 1, 0); +class Cascade3Plus1 : virtual public AthAlgTool, public IAugmentationTool +{ + +public: + static const InterfaceID& interfaceID() { return IID_Cascade3Plus1;} + Cascade3Plus1(const std::string& t, const std::string& n, const IInterface* p); + virtual ~Cascade3Plus1(); + virtual StatusCode initialize() override; + virtual StatusCode addBranches() const override; + +private: + static constexpr int s_topoN = 2; + + ToolHandle < Trk::ITrackSelectorTool > m_trkSelector; + ToolHandle < Trk::TrkVKalVrtFitter > m_iVertexFitter; + ToolHandle < Trk::V0Tools > m_V0Tools; + ToolHandle < DerivationFramework::CascadeTools > m_CascadeTools; + ToolHandle < Analysis::PrimaryVertexRefitter > m_pvRefitter; + ToolHandle < InDet::VertexPointEstimator > m_vertexEstimator; + SG::ReadCondHandleKey<InDet::BeamSpotData> m_beamSpotKey { this, "BeamSpotKey", "BeamSpotData", "SG key for beam spot" }; + + std::unique_ptr<Trk::VxCascadeInfo> CascadeFit(std::array<const xAOD::TrackParticle*, 4> &Track)const; + + std::vector<double> m_trackMasses; + std::vector<std::string> m_cascadeOutputsKeys; + double m_2trackmassMin = 979.45; + double m_2trackmassMax = 1059.45; + double m_3trackmassMin = 1800.47; + double m_3trackmassMax = 2168.47; + double m_4trackmassMin = 5200.0; + double m_4trackmassMax = 5450.0; + double m_3tracksMass = 1968.47; + double m_4tracksMass = 5366.79; + double m_2tracksMass = 0; + + double m_4trackmassFinalMin = 0; + double m_4trackmassFinalMax = 0; + std::string m_hypoName; //!< name of the mass hypothesis. E.g. Jpsi, Upsi, etc. Will be used as a prefix for decorations + std::string m_3TrackName; + int m_PV_max; + int m_DoVertexType; + size_t m_PV_minNTracks; + std::string m_VxPrimaryCandidateName; //!< Name of primary vertex container + std::string m_refPVContainerName; + double m_Chi2NDFCut=0.; + float m_3TrackChi2NDFCut=0.; + double m_tauCut = -999999; + bool m_refitPV; + bool m_3TrackMassConstraint = false; + bool m_2TrackMassConstraint = false; + bool m_eliminateBad3Tracksfrom4Track = false; + bool m_copyAllVertices = false; + std::bitset<4> m_muonTrackBit{0}; + std::vector<int> m_requireMuonsOnTrack; + std::string m_3TrackVertexOutput; + std::unique_ptr<xAOD::Vertex> StandardFit(const std::vector<const xAOD::TrackParticle*> &inputTracks, const xAOD::TrackParticleContainer* importedTrackCollection) const; + std::vector<double> m_ptCutPerTrack; + std::array<double, 3> m_ptCutPerVertex; + const std::vector<const xAOD::TrackParticle*>& ApplyAdditionalCuts(const std::vector<const xAOD::TrackParticle*>&, + const std::vector<const xAOD::TrackParticle*>&, + std::vector<const xAOD::TrackParticle*>&, size_t) const; +}; +} + +#endif diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/CascadeTools.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/CascadeTools.h new file mode 100644 index 0000000000000000000000000000000000000000..932b814d3d2ad23faf6746c15cfaf25e2de5cfb5 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/CascadeTools.h @@ -0,0 +1,87 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ +#ifndef DERIVATIONFRAMEWORKBPHYS_CASCADETOOLS_H +#define DERIVATIONFRAMEWORKBPHYS_CASCADETOOLS_H +//********************* +// CascadeTools header file +// +// Eva Bouhova <e.bouhova@cern.ch> +// Adam Barton <abarton@cern.ch> + +#include "AthenaBaseComps/AthAlgTool.h" +#include "CLHEP/Vector/LorentzVector.h" +#include "xAODTracking/Vertex.h" + + +namespace DerivationFramework { + + static const InterfaceID IID_CascadeTools("CascadeTools", 1, 1); + + class CascadeTools : public AthAlgTool{ + + public: + +/** + * Default constructor due to Athena interface + */ + CascadeTools(const std::string& t, const std::string& n, const IInterface* p); + +/** + * Virtual destructor + */ + ~CascadeTools(); + +/** + * Standard AlgTool methods + */ + //Nothing done not needed + //StatusCode initialize() override; + //StatusCode finalize() override; + +/** + * AlgTool interface methods + */ + static const InterfaceID& interfaceID() + { + return IID_CascadeTools; + } + + + Amg::Vector3D momentum(const std::vector<TLorentzVector> &particleMom) const; + Amg::Vector3D pca(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const; + + double invariantMass(const std::vector<TLorentzVector> &moms, const std::vector<double> &masses) const; + double invariantMass(const std::vector<TLorentzVector> &moms) const; + double invariantMassError(const std::vector<TLorentzVector> &moms, const Amg::MatrixX& cov, const std::vector<double> &masses) const; + double invariantMassError(const std::vector<TLorentzVector> &moms, const Amg::MatrixX& cov) const; + double pT(const std::vector<TLorentzVector> &moms) const; + double pTError(const std::vector<TLorentzVector> &moms, const Amg::MatrixX& cov) const; + double lxy(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const; + double lxyError(const std::vector<TLorentzVector> &particleMom, const Amg::MatrixX& cov, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const; + double tau(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const; + double tauError(const std::vector<TLorentzVector> &particleMom, const Amg::MatrixX& cov, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const; + double tau(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV, double M) const; + double tauError(const std::vector<TLorentzVector> &particleMom, const Amg::MatrixX& cov, const xAOD::Vertex* SV, const xAOD::Vertex* PV, double M) const; + double a0z(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const; + double a0zError(const std::vector<TLorentzVector> &particleMom, const Amg::MatrixX& cov, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const; + double a0xy(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const; + double a0xyError(const std::vector<TLorentzVector> &particleMom, const Amg::MatrixX& cov, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const; + double a0(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const; + double a0Error(const std::vector<TLorentzVector> &particleMom, const Amg::MatrixX& cov, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const; + double cosTheta(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const; + double cosTheta_xy(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const; + double massProbability(double V0Mass, double mass, double massErr) const; + double vertexProbability(int ndf, double chi2) const; + + Amg::MatrixX * convertCovMatrix(const xAOD::Vertex * vxCandidate) const; + Amg::MatrixX SetFullMatrix(int NTrk, const std::vector<float> & Matrix) const; + + //private: + + }; //end of class definitions + +} //end of namespace definitions + + +#endif diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/CfAthAlgTool.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/CfAthAlgTool.h new file mode 100644 index 0000000000000000000000000000000000000000..f5280c5ccde81ceb70539c64a490e7bd7862a7fc --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/CfAthAlgTool.h @@ -0,0 +1,86 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// CfAthAlgTool.h +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// +// Wrapper around AthAlgTool to provide easy access to CutFlowSvc +// and some utility methods for it. +// Methods for accessing the CutFlowSvc are modelled after +// AthFilterAlgorithm's implementation. +// +// This class inherits from AthAlgTool. It should be inherited from. +// +//============================================================================ +// +#ifndef DERIVATIONFRAMEWORK_CfAthAlgTool_H +#define DERIVATIONFRAMEWORK_CfAthAlgTool_H + +#include "GaudiKernel/ToolHandle.h" +#include "GaudiKernel/ServiceHandle.h" +#include "AthenaKernel/ICutFlowSvc.h" +#include "AthenaBaseComps/AthAlgTool.h" + +#include <string> +#include <map> + +namespace DerivationFramework { + + class CfAthAlgTool : public AthAlgTool { + public: + // constructor with parameters + CfAthAlgTool(const std::string& t, const std::string& n, + const IInterface* p); + // destructor + virtual ~CfAthAlgTool(); + + // return a handle to an ICutFlowSvc instance + ServiceHandle<ICutFlowSvc>& cutFlowSvc() const; + + // Initialization method invoked by the framework. + virtual StatusCode sysInitialize() override; + + // add event to a named counter -- returns counts after adding + virtual bool addEvent(const std::string &name, double weight=1.) const; + + // add to a named counter -- returns counts after adding + // if counts > 1 : same weight is added multiple times + virtual bool addToCounter(const std::string &name, uint64_t counts=1, + double weight=1.) const; + + protected: + // add a counter by name -- returns id if it already exists + virtual CutIdentifier getCounter(const std::string &name) const; + + // returns counter name by id + virtual std::string getCounterNameById(CutIdentifier id) const; + + // returns counter id by name + virtual CutIdentifier getCounterIdByName(const std::string &name) const; + + private: + // typedef for ServiceHandle<ICutFlowSvc> + typedef ServiceHandle<ICutFlowSvc> ICutFlowSvc_t; + // handle to the service holding tables of cut-flows for filtering algs. + mutable ICutFlowSvc_t m_cutFlowSvc; + + // base name for counters + std::string m_ctbasename; + + // map of counter names to counter ids + typedef std::map<std::string, CutIdentifier> NameIdMap_t; + mutable NameIdMap_t m_mctn; + + // base counter + mutable CutIdentifier m_bid; + mutable bool m_bidisset; + + }; // class +} // namespace + +#endif // DERIVATIONFRAMEWORK_CfAthAlgTool_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/FourMuonTool.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/FourMuonTool.h new file mode 100644 index 0000000000000000000000000000000000000000..88348740f02c9f70d41afaf7af61ada4a3169d79 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/FourMuonTool.h @@ -0,0 +1,188 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +// **************************************************************************** +// ---------------------------------------------------------------------------- +// FourMuonTool header file +// +// James Catmore <James.Catmore@cern.ch> + +// ---------------------------------------------------------------------------- +// **************************************************************************** +#ifndef BPHY4TOOL_H +#define BPHY4TOOL_H +#include "AthenaBaseComps/AthAlgorithm.h" +#include "GaudiKernel/ToolHandle.h" +#include "TrkVKalVrtFitter/TrkVKalVrtFitter.h" +#include "InDetConversionFinderTools/InDetConversionFinderTools.h" +#include "BeamSpotConditionsData/BeamSpotData.h" +#include "xAODMuon/Muon.h" +#include "xAODMuon/MuonContainer.h" + +#include <vector> +#include <string> +///////////////////////////////////////////////////////////////////////////// + +namespace Trk { + class IVertexFitter; + // class VxCandidate; + // class TrackParticleBase; + // class VxTrackAtVertex; + // class RecVertex; + class TrkV0VertexFitter; + class ITrackSelectorTool; + class V0Tools; + // class ExtendedVxCandidate; +} + +namespace InDet { class VertexPointEstimator; class BeamSpotData;} + +namespace DerivationFramework { + + static const InterfaceID IID_FourMuonTool("FourMuonTool", 1, 0); + + // Struct and enum to associate muon pairs with track pairs + // and make the program flow more straightforward + struct Combination + { + std::vector<const xAOD::Muon*> muons; + std::vector<unsigned int> quadIndices; + std::pair<unsigned int, unsigned int> pairIndices; + + std::string combinationCharges() { + std::string chargeStr = ""; + if (muons.at(0)->charge() > 0) {chargeStr += "+";} + else {chargeStr += "-";} + if (muons.at(1)->charge() > 0) {chargeStr += "+";} + else {chargeStr += "-";} + if (muons.size()==4) { + if (muons.at(2)->charge() > 0) {chargeStr += "+";} + else {chargeStr += "-";} + if (muons.at(3)->charge() > 0) {chargeStr += "+";} + else {chargeStr += "-";} + } + return chargeStr; + } + + std::string combinationIndices() { + std::string indexStr = ""; + std::stringstream ss; + if (muons.size()==2) { + ss.str(""); ss.clear(); + ss << pairIndices.first; + indexStr+=ss.str(); + ss.str(""); ss.clear(); + ss << pairIndices.second; + indexStr+=ss.str(); + } + if (muons.size()==4) { + for (unsigned int i=0; i<4; ++i) { + ss.str(""); ss.clear(); + ss << quadIndices[i]; + indexStr+=ss.str(); + } + } + return indexStr; + } + + const xAOD::TrackParticle* GetMuonTrack(const xAOD::Muon* mu) const{ + auto& link = mu->inDetTrackParticleLink(); + return link.isValid() ? *link : nullptr; + } + + std::vector<const xAOD::TrackParticle*> trackParticles(std::string specify) { + std::vector<const xAOD::TrackParticle*> theTracks; + bool oppCh(false); + if (muons.at(0)->charge()*muons.at(1)->charge() < 0) oppCh=true; + if (specify=="pair1") { + theTracks.push_back(GetMuonTrack(muons.at(0))); + theTracks.push_back(GetMuonTrack(muons.at(1))); + } + if (specify=="pair2") { + theTracks.push_back(GetMuonTrack(muons.at(2))); + theTracks.push_back(GetMuonTrack(muons.at(3))); + } + if (specify=="DC") { + if (oppCh) { + theTracks.push_back(GetMuonTrack(muons.at(0))); + theTracks.push_back(GetMuonTrack(muons.at(1))); + theTracks.push_back(GetMuonTrack(muons.at(2))); + theTracks.push_back(GetMuonTrack(muons.at(3))); + } else { + theTracks.push_back(GetMuonTrack(muons.at(0))); + theTracks.push_back(GetMuonTrack(muons.at(2))); + theTracks.push_back(GetMuonTrack(muons.at(1))); + theTracks.push_back(GetMuonTrack(muons.at(3))); + } + } + if (specify=="AC") { + theTracks.push_back(GetMuonTrack(muons.at(0))); + theTracks.push_back(GetMuonTrack(muons.at(3))); + theTracks.push_back(GetMuonTrack(muons.at(1))); + theTracks.push_back(GetMuonTrack(muons.at(2))); + } + if (specify=="SS") { + if (oppCh) { + theTracks.push_back(GetMuonTrack(muons.at(0))); + theTracks.push_back(GetMuonTrack(muons.at(2))); + theTracks.push_back(GetMuonTrack(muons.at(1))); + theTracks.push_back(GetMuonTrack(muons.at(3))); + } else { + theTracks.push_back(GetMuonTrack(muons.at(0))); + theTracks.push_back(GetMuonTrack(muons.at(1))); + theTracks.push_back(GetMuonTrack(muons.at(2))); + theTracks.push_back(GetMuonTrack(muons.at(3))); + } + } + return theTracks; + } + + }; + + class FourMuonTool: virtual public AthAlgTool + { + public: + FourMuonTool(const std::string& t, const std::string& n, const IInterface* p); + ~FourMuonTool(); + StatusCode initialize(); + + static const InterfaceID& interfaceID() { return IID_FourMuonTool;} + + //------------------------------------------------------------------------------------- + //Doing Calculation and inline functions + StatusCode performSearch(xAOD::VertexContainer*& pairVxContainer, xAOD::VertexAuxContainer*& pairVxAuxContainer, + xAOD::VertexContainer*& quadVxContainer, xAOD::VertexAuxContainer*& quadVxAuxContainer, bool &acceptEvent) const; + xAOD::Vertex* fit(const std::vector<const xAOD::TrackParticle*>& ,const xAOD::TrackParticleContainer* importedTrackCollection, const Amg::Vector3D &beamSpot) const; + static std::vector<std::vector<unsigned int> > getQuadIndices(unsigned int length); + static std::vector<std::pair<unsigned int, unsigned int> > getPairIndices(unsigned int length); + static std::vector<std::vector<unsigned int> > mFromN(unsigned int m, unsigned int n); + static void combinatorics(unsigned int offset, + unsigned int k, + std::vector<unsigned int> &combination, + std::vector<unsigned int> &mainList, + std::vector<std::vector<unsigned int> > &allCombinations); + static void buildCombinations(const std::vector<const xAOD::Muon*> &muonsIn, + std::vector<Combination> &pairs, + std::vector<Combination> &quadruplets, + unsigned int nSelectedMuons); + static bool passesQuadSelection(const std::vector<const xAOD::Muon*> &muonsIn); + //------------------------------------------------------------------------------------- + + private: + double m_ptCut; + double m_etaCut; + bool m_useV0Fitter; + SG::ReadHandleKey<xAOD::MuonContainer> m_muonCollectionKey; + SG::ReadHandleKey<xAOD::TrackParticleContainer> m_TrkParticleCollection; + ToolHandle < Trk::IVertexFitter > m_iVertexFitter; + ToolHandle < Trk::IVertexFitter > m_iV0VertexFitter; + ToolHandle < Trk::V0Tools > m_V0Tools; + ToolHandle < Trk::ITrackSelectorTool > m_trkSelector; + ToolHandle < InDet::VertexPointEstimator > m_vertexEstimator; + SG::ReadCondHandleKey<InDet::BeamSpotData> m_beamSpotKey { this, "BeamSpotKey", "BeamSpotData", "SG key for beam spot" }; + SG::WriteDecorHandleKey<xAOD::MuonContainer> m_muonIndex{this, "muonIndexDec", "Muons.BPHY4MuonIndex"}; + + }; +} // end of namespace +#endif diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/JpsiPlusDpstCascade.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/JpsiPlusDpstCascade.h new file mode 100644 index 0000000000000000000000000000000000000000..7376be66ae176609cc284c6342b29a6e8d9c6ec8 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/JpsiPlusDpstCascade.h @@ -0,0 +1,96 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ +#ifndef JPSIPLUSDPSTCASCADE_H +#define JPSIPLUSDPSTCASCADE_H +//********************* +// JpsiPlusDpstCascade header file +// +// Eva Bouhova <e.bouhova@cern.ch> +// Adam Barton <abarton@cern.ch> + +#include "AthenaBaseComps/AthAlgTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "JpsiUpsilonTools/PrimaryVertexRefitter.h" +#include <vector> +#include "BeamSpotConditionsData/BeamSpotData.h" + +namespace Trk { + class IVertexFitter; + class TrkVKalVrtFitter; + class IVertexCascadeFitter; + class VxCascadeInfo; + class V0Tools; + class ParticleDataTable; +} + +namespace DerivationFramework { + class CascadeTools; +} + +namespace DerivationFramework { + + static const InterfaceID IID_JpsiPlusDpstCascade("JpsiPlusDpstCascade", 1, 0); + + class JpsiPlusDpstCascade : virtual public AthAlgTool, public IAugmentationTool + { + public: + static const InterfaceID& interfaceID() { return IID_JpsiPlusDpstCascade;} + JpsiPlusDpstCascade(const std::string& t, const std::string& n, const IInterface* p); + ~JpsiPlusDpstCascade(); + virtual StatusCode initialize() override; + StatusCode performSearch(std::vector<Trk::VxCascadeInfo*> *cascadeinfoContainer ) const; + virtual StatusCode addBranches() const override; + + private: + std::string m_vertexContainerKey; + std::string m_vertexD0ContainerKey; + std::vector<std::string> m_cascadeOutputsKeys; + + std::string m_VxPrimaryCandidateName; //!< Name of primary vertex container + + double m_jpsiMassLower; + double m_jpsiMassUpper; + double m_jpsipiMassLower; + double m_jpsipiMassUpper; + double m_D0MassLower; + double m_D0MassUpper; + double m_DstMassLower; + double m_DstMassUpper; + double m_MassLower; + double m_MassUpper; + double m_vtx0MassHypo; // mass hypothesis of vertex 0 + double m_vtx1MassHypo; // mass hypothesis of vertex 1 + double m_vtx0Daug1MassHypo; // mass hypothesis of 1st daughter from vertex 0 + double m_vtx0Daug2MassHypo; // mass hypothesis of 2nd daughter from vertex 0 + double m_vtx0Daug3MassHypo; // mass hypothesis of 3rd daughter from vertex 0 + double m_vtx1Daug1MassHypo; // mass hypothesis of 1st daughter from vertex 1 + double m_vtx1Daug2MassHypo; // mass hypothesis of 2nd daughter from vertex 1 + + + double m_mass_jpsi; + int m_Dx_pid; + bool m_constrD0; + bool m_constrJpsi; + double m_chi2cut; + + SG::ReadCondHandleKey<InDet::BeamSpotData> m_beamSpotKey { this, "BeamSpotKey", "BeamSpotData", "SG key for beam spot" }; + ToolHandle < Trk::TrkVKalVrtFitter > m_iVertexFitter; + ToolHandle < Analysis::PrimaryVertexRefitter > m_pvRefitter; + ToolHandle < Trk::V0Tools > m_V0Tools; + ToolHandle < DerivationFramework::CascadeTools > m_CascadeTools; + + bool m_refitPV; + std::string m_refPVContainerName; + std::string m_hypoName; //!< name of the mass hypothesis. E.g. Jpsi, Upsi, etc. Will be used as a prefix for decorations + //This parameter will allow us to optimize the number of PVs under consideration as the probability + //of a useful primary vertex drops significantly the higher you go + int m_PV_max; + int m_DoVertexType; + size_t m_PV_minNTracks; + + }; +} + +#endif diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/JpsiPlusDs1Cascade.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/JpsiPlusDs1Cascade.h new file mode 100644 index 0000000000000000000000000000000000000000..d923f5fe4f9125d8de51a3817b0a09f01b4cdf6f --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/JpsiPlusDs1Cascade.h @@ -0,0 +1,105 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ +#ifndef JPSIPLUSDS1CASCADE_H +#define JPSIPLUSDS1CASCADE_H +//********************* +// JpsiPlusDs1Cascade header file +// +// Eva Bouhova <e.bouhova@cern.ch> +// Adam Barton <abarton@cern.ch> + +#include "AthenaBaseComps/AthAlgTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "HepPDT/ParticleDataTable.hh" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "JpsiUpsilonTools/PrimaryVertexRefitter.h" +#include <vector> +#include "BeamSpotConditionsData/BeamSpotData.h" + +namespace Trk { + class IVertexFitter; + class TrkVKalVrtFitter; + class IVertexCascadeFitter; + class VxCascadeInfo; + class V0Tools; + class ParticleDataTable; +} + +namespace DerivationFramework { + class CascadeTools; +} + +namespace DerivationFramework { + + static const InterfaceID IID_JpsiPlusDs1Cascade("JpsiPlusDs1Cascade", 1, 0); + + class JpsiPlusDs1Cascade : virtual public AthAlgTool, public IAugmentationTool + { + public: + static const InterfaceID& interfaceID() { return IID_JpsiPlusDs1Cascade;} + JpsiPlusDs1Cascade(const std::string& t, const std::string& n, const IInterface* p); + ~JpsiPlusDs1Cascade(); + virtual StatusCode initialize() override; + StatusCode performSearch(std::vector<Trk::VxCascadeInfo*> *cascadeinfoContainer ) const; + virtual StatusCode addBranches() const override; + + private: + std::string m_vertexContainerKey; + std::string m_vertexD0ContainerKey; + std::string m_vertexK0ContainerKey; + std::vector<std::string> m_cascadeOutputsKeys; + + std::string m_VxPrimaryCandidateName; //!< Name of primary vertex container + + double m_jpsiMassLower; + double m_jpsiMassUpper; + double m_jpsipiMassLower; + double m_jpsipiMassUpper; + double m_D0MassLower; + double m_D0MassUpper; + double m_K0MassLower; + double m_K0MassUpper; + double m_DstMassLower; + double m_DstMassUpper; + double m_MassLower; + double m_MassUpper; + double m_vtx0MassHypo; // mass hypothesis of vertex 0 + double m_vtx1MassHypo; // mass hypothesis of vertex 1 + double m_vtx2MassHypo; // mass hypothesis of vertex 2 + double m_vtx0Daug1MassHypo; // mass hypothesis of 1st daughter from vertex 0 + double m_vtx0Daug2MassHypo; // mass hypothesis of 2nd daughter from vertex 0 + double m_vtx0Daug3MassHypo; // mass hypothesis of 3rd daughter from vertex 0 + double m_vtx1Daug1MassHypo; // mass hypothesis of 1st daughter from vertex 1 + double m_vtx1Daug2MassHypo; // mass hypothesis of 2nd daughter from vertex 1 + double m_vtx2Daug1MassHypo; // mass hypothesis of 1st daughter from vertex 2 + double m_vtx2Daug2MassHypo; // mass hypothesis of 2nd daughter from vertex 2 + + const HepPDT::ParticleDataTable* m_particleDataTable; + double m_mass_jpsi; + int m_Dx_pid; + bool m_constrD0; + bool m_constrK0; + bool m_constrJpsi; + double m_chi2cut; + + SG::ReadCondHandleKey<InDet::BeamSpotData> m_beamSpotKey { this, "BeamSpotKey", "BeamSpotData", "SG key for beam spot" }; + ToolHandle < Trk::TrkVKalVrtFitter > m_iVertexFitter; + ToolHandle < Analysis::PrimaryVertexRefitter > m_pvRefitter; + ToolHandle < Trk::V0Tools > m_V0Tools; + ToolHandle < DerivationFramework::CascadeTools > m_CascadeTools; + + bool m_refitPV; + std::string m_refPVContainerName; + std::string m_hypoName; //!< name of the mass hypothesis. E.g. Jpsi, Upsi, etc. Will be used as a prefix for decorations + //This parameter will allow us to optimize the number of PVs under consideration as the probability + //of a useful primary vertex drops significantly the higher you go + int m_PV_max; + int m_DoVertexType; + size_t m_PV_minNTracks; + + double getParticleMass(int particlecode) const; + }; +} + +#endif diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/JpsiPlusDsCascade.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/JpsiPlusDsCascade.h new file mode 100644 index 0000000000000000000000000000000000000000..024fe6a73133f005a708787207cfa0aaad66419b --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/JpsiPlusDsCascade.h @@ -0,0 +1,91 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ +#ifndef JPSIPLUSDSCASCADE_H +#define JPSIPLUSDSCASCADE_H +//********************* +// JpsiPlusDsCascade header file +// +// Eva Bouhova <e.bouhova@cern.ch> +// Adam Barton <abarton@cern.ch> + +#include "AthenaBaseComps/AthAlgTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "JpsiUpsilonTools/PrimaryVertexRefitter.h" +#include <vector> +#include "BeamSpotConditionsData/BeamSpotData.h" + +namespace Trk { + class IVertexFitter; + class TrkVKalVrtFitter; + class IVertexCascadeFitter; + class VxCascadeInfo; + class V0Tools; + class ParticleDataTable; +} + +namespace DerivationFramework { + class CascadeTools; +} + +namespace DerivationFramework { + + static const InterfaceID IID_JpsiPlusDsCascade("JpsiPlusDsCascade", 1, 0); + + class JpsiPlusDsCascade : virtual public AthAlgTool, public IAugmentationTool + { + public: + static const InterfaceID& interfaceID() { return IID_JpsiPlusDsCascade;} + JpsiPlusDsCascade(const std::string& t, const std::string& n, const IInterface* p); + ~JpsiPlusDsCascade(); + virtual StatusCode initialize() override; + StatusCode performSearch(std::vector<Trk::VxCascadeInfo*> *cascadeinfoContainer ) const; + virtual StatusCode addBranches() const override; + + private: + std::string m_vertexContainerKey; + std::string m_vertexDxContainerKey; + std::vector<std::string> m_cascadeOutputsKeys; + + std::string m_VxPrimaryCandidateName; //!< Name of primary vertex container + + double m_jpsiMassLower; + double m_jpsiMassUpper; + double m_DxMassLower; + double m_DxMassUpper; + double m_MassLower; + double m_MassUpper; + double m_vtx0MassHypo; // 1st vertex mass hypothesis + double m_vtx1MassHypo; // 2nd vertex mass hypothesis + double m_vtx0Daug1MassHypo; // mass hypothesis of 1st daughter from vertex 0 + double m_vtx0Daug2MassHypo; // mass hypothesis of 2nd daughter from vertex 0 + double m_vtx1Daug1MassHypo; // mass hypothesis of 1st daughter from vertex 1 + double m_vtx1Daug2MassHypo; // mass hypothesis of 2nd daughter from vertex 1 + double m_vtx1Daug3MassHypo; // mass hypothesis of 3rd daughter from vertex 1 + + double m_mass_jpsi; + int m_Dx_pid; + bool m_constrDx; + bool m_constrJpsi; + double m_chi2cut; + + SG::ReadCondHandleKey<InDet::BeamSpotData> m_beamSpotKey { this, "BeamSpotKey", "BeamSpotData", "SG key for beam spot" }; + ToolHandle < Trk::TrkVKalVrtFitter > m_iVertexFitter; + ToolHandle < Analysis::PrimaryVertexRefitter > m_pvRefitter; + ToolHandle < Trk::V0Tools > m_V0Tools; + ToolHandle < DerivationFramework::CascadeTools > m_CascadeTools; + + bool m_refitPV; + std::string m_refPVContainerName; + std::string m_hypoName; //!< name of the mass hypothesis. E.g. Jpis, Upsi, etc. Will be used as a prefix for decorations + //This parameter will allow us to optimize the number of PVs under consideration as the probability + //of a useful primary vertex drops significantly the higher you go + int m_PV_max; + int m_DoVertexType; + size_t m_PV_minNTracks; + + }; +} + +#endif diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/JpsiPlusV0Cascade.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/JpsiPlusV0Cascade.h new file mode 100644 index 0000000000000000000000000000000000000000..9720b3d6ca36829dc4eda1e7d95491881eba0845 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/JpsiPlusV0Cascade.h @@ -0,0 +1,94 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ +#ifndef JPSIPLUSV0CASCADE_H +#define JPSIPLUSV0CASCADE_H +//********************* +// JpsiPlusV0Cascade header file +// +// Eva Bouhova <e.bouhova@cern.ch> +// Adam Barton <abarton@cern.ch> + +#include "AthenaBaseComps/AthAlgTool.h" +#include "GaudiKernel/ToolHandle.h" + +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "JpsiUpsilonTools/PrimaryVertexRefitter.h" +#include <vector> +#include "BeamSpotConditionsData/BeamSpotData.h" + + +namespace Trk { + class IVertexFitter; + class TrkVKalVrtFitter; + class IVertexCascadeFitter; + class VxCascadeInfo; + class V0Tools; + class ParticleDataTable; +} + +namespace DerivationFramework { + class CascadeTools; +} + + +namespace DerivationFramework { + + static const InterfaceID IID_JpsiPlusV0Cascade("JpsiPlusV0Cascade", 1, 0); + + class JpsiPlusV0Cascade : virtual public AthAlgTool, public IAugmentationTool + { + + std::string m_vertexContainerKey; + std::string m_vertexV0ContainerKey; + std::vector<std::string> m_cascadeOutputsKeys; + + std::string m_VxPrimaryCandidateName; //!< Name of primary vertex container + + double m_jpsiMassLower; + double m_jpsiMassUpper; + double m_V0MassLower; + double m_V0MassUpper; + double m_MassLower; + double m_MassUpper; + + double m_mass_muon; + double m_mass_pion; + double m_mass_proton; + double m_mass_lambda; + double m_mass_ks; + double m_mass_jpsi; + double m_mass_b0; + double m_mass_lambdaB; + int m_v0_pid; + bool m_constrV0; + bool m_constrJpsi; + + SG::ReadCondHandleKey<InDet::BeamSpotData> m_beamSpotKey { this, "BeamSpotKey", "BeamSpotData", "SG key for beam spot" }; + ToolHandle < Trk::TrkVKalVrtFitter > m_iVertexFitter; + ToolHandle < Analysis::PrimaryVertexRefitter > m_pvRefitter; + ToolHandle < Trk::V0Tools > m_V0Tools; + ToolHandle < DerivationFramework::CascadeTools > m_CascadeTools; + + bool m_refitPV; + std::string m_refPVContainerName; + std::string m_hypoName; //!< name of the mass hypothesis. E.g. Jpis, Upsi, etc. Will be used as a prefix for decorations + //This parameter will allow us to optimize the number of PVs under consideration as the probability + //of a useful primary vertex drops significantly the higher you go + int m_PV_max; + int m_DoVertexType; + size_t m_PV_minNTracks; + + public: + static const InterfaceID& interfaceID() { return IID_JpsiPlusV0Cascade;} + JpsiPlusV0Cascade(const std::string& t, const std::string& n, const IInterface* p); + ~JpsiPlusV0Cascade(); + StatusCode initialize() override; + StatusCode performSearch(std::vector<Trk::VxCascadeInfo*> *cascadeinfoContainer ) const; + virtual StatusCode addBranches() const override; + }; +} + + +#endif + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/LocalVector.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/LocalVector.h new file mode 100644 index 0000000000000000000000000000000000000000..1865c520e35f949b66eda6975784ac7bbdd34c85 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/LocalVector.h @@ -0,0 +1,72 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ +#ifndef LOCALVECTOR_H +#define LOCALVECTOR_H + +#include <array> +#include <algorithm> +#include <iterator> + +template <class A_Type, size_t SIZE> +class LocalVector +{ +protected: + std::array<A_Type, SIZE> m_memblock; + size_t m_x; +public: + LocalVector() : m_x(0) { } + size_t size() const noexcept { + return m_x; + } + constexpr size_t max_size() const noexcept { + return SIZE; + } + typename std::array<A_Type, SIZE>::iterator begin() noexcept { + return m_memblock.begin(); + } + typename std::array<A_Type, SIZE>::iterator end() noexcept { + auto it = m_memblock.begin(); + std::advance(it, m_x); + return it; + } + typename std::array<A_Type, SIZE>::const_iterator begin() const noexcept { + return m_memblock.cbegin(); + } + typename std::array<A_Type, SIZE>::const_iterator end() const noexcept { + auto it = m_memblock.cbegin(); + std::advance(it, m_x); + return it; + } + A_Type& operator[]( size_t pos ) { + return m_memblock[pos]; + } + const A_Type& operator[]( size_t pos ) const { + return m_memblock[pos]; + } + bool contains(const A_Type& a) const { + return std::find(begin(), end(), a) != end(); + } + void push_back(const A_Type& a) { + m_memblock[m_x++] = a; + } + void pop_back() { + --m_x; + } + void clear() noexcept { + m_x = 0; + } + A_Type& back( ) { + return m_memblock[m_x-1]; + } + const A_Type& back( ) const { + return m_memblock[m_x-1]; + } + A_Type& front( ) { + return m_memblock.front(); + } + const A_Type& front( ) const { + return m_memblock.front(); + } +}; +#endif diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/MuonExtrapolationTool.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/MuonExtrapolationTool.h new file mode 100644 index 0000000000000000000000000000000000000000..7464e520f1075582e78d42361171d1c410526862 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/MuonExtrapolationTool.h @@ -0,0 +1,59 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + */ + +////////////////////////////////////////////////////////////////////////////// +// MuonExtrapolationTool +////////////////////////////////////////////////////////////////////////////// +#ifndef MuonExtrapolationTool_H +#define MuonExtrapolationTool_H + +#include "AthenaBaseComps/AthAlgTool.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "xAODTracking/TrackParticle.h" +#include "xAODMuon/Muon.h" +#include "TrkParameters/TrackParameters.h" + +namespace Trk { + class IExtrapolator; +} + +namespace DerivationFramework { + class MuonExtrapolationTool : public AthAlgTool, public IAugmentationTool { + + public: + MuonExtrapolationTool(const std::string& t, const std::string& n, const IInterface *p); + + virtual StatusCode initialize(); + virtual StatusCode addBranches() const; + + ToolHandle<Trk::IExtrapolator> m_extrapolator; + private: + + /// run the extrapolation - only available in full athena + const Trk::TrackParameters* extrapolateToTriggerPivotPlane(const xAOD::TrackParticle& track) const; + + // Utility method to handle extrapolation and decoration for one TrackParticle. + // It looks for the decoration, and, if it is missing, runs track extrapolation, decorating the result + // to the particle to avoid repeating the process unnecessarily. + // Returns success (true) or failure (false) of the procedure, fills eta and phi coordinates via reference + // If the extrapolation fails or the decoration is missing in AthAnalysis, it will *not* change eta and phi + // So you can set them to defaults before calling this guy, and they will be preserved in case of failure. + // Will not run outside athena, because it requires the extrapolator + bool extrapolateAndDecorateTrackParticle(const xAOD::TrackParticle* particle, float & eta, float & phi) const; + + // utility method: Obtains the track particle which we want to extrapolate into the MS. + // Works for all kinds of probes. + const xAOD::TrackParticle* getPreferredTrackParticle (const xAOD::IParticle* probe) const; + + // these define the surfaces that we extrapolate to. + // We approximate the pivot plane in the form of a cylinder surface and two disks + double m_endcapPivotPlaneZ; + double m_endcapPivotPlaneMinimumRadius; + double m_endcapPivotPlaneMaximumRadius; + double m_barrelPivotPlaneRadius; + double m_barrelPivotPlaneHalfLength; + std::string m_muonContainerName; + }; +} +#endif // MuonExtrapolationTool_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/ReVertex.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/ReVertex.h new file mode 100644 index 0000000000000000000000000000000000000000..853f03bcf72f2b8d53d7ffa1b85e4d27e64d4cdb --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/ReVertex.h @@ -0,0 +1,98 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ +// **************************************************************************** +// ---------------------------------------------------------------------------- +// ReVertex header file +// +// Konstantin Beloborodov <Konstantin.Beloborodov@cern.ch> +// +// ---------------------------------------------------------------------------- +// **************************************************************************** + +#ifndef DERIVATIONFRAMEWORK_ReVertex_H +#define DERIVATIONFRAMEWORK_ReVertex_H + +#include "AthenaBaseComps/AthAlgTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "xAODTracking/Vertex.h" +#include "BeamSpotConditionsData/BeamSpotData.h" +/** forward declarations + */ +namespace Trk { + class IVertexFitter; + class TrkV0VertexFitter; + class ITrackSelectorTool; + class V0Tools; + class TrkVKalVrtFitter; +} + +namespace Analysis { + class PrimaryVertexRefitter; +} + +namespace InDet { class VertexPointEstimator; } + +namespace DerivationFramework { + +class ReVertex : public AthAlgTool, public IAugmentationTool { +public: + + ReVertex(const std::string& t, const std::string& n, const IInterface* p); + + virtual StatusCode initialize() override; + //virtual StatusCode finalize() override; + + virtual StatusCode addBranches() const override; + + void fitAndStore(xAOD::VertexContainer* vtxContainer, + const xAOD::Vertex* v, + const xAOD::VertexContainer *InVtxContainer, + const std::vector<const xAOD::TrackParticle*> &inputTracks, + const xAOD::TrackParticleContainer* importedTrackCollection, + const xAOD::VertexContainer* pvContainer) const; + xAOD::Vertex* fit(const std::vector<const xAOD::TrackParticle*> &inputTracks, + const xAOD::Vertex* pv) const; +private: + std::vector<int> m_TrackIndices; + ToolHandle < InDet::VertexPointEstimator > m_vertexEstimator; + ToolHandle < Trk::IVertexFitter > m_iVertexFitter; + Trk::TrkVKalVrtFitter* m_VKVFitter; + SG::WriteHandleKey<xAOD::VertexContainer> m_OutputContainerName; + SG::ReadHandleKey<xAOD::VertexContainer> m_inputContainerName; + SG::ReadHandleKey<xAOD::TrackParticleContainer> m_trackContainer; + SG::WriteHandleKey<xAOD::VertexContainer> m_refPVContainerName; + SG::ReadHandleKey<xAOD::VertexContainer> m_pvContainerName; + + + std::vector<double> m_trkMasses; + std::vector<int> m_indices; + double m_massConst; + double m_totalMassConst; + std::vector<std::string> m_hypoNames; + + ToolHandle<Trk::V0Tools> m_v0Tools; + ToolHandle<Analysis::PrimaryVertexRefitter> m_pvRefitter; + SG::ReadCondHandleKey<InDet::BeamSpotData> m_beamSpotKey { this, "BeamSpotKey", "BeamSpotData", "SG key for beam spot" }; + int m_PV_max; + int m_DoVertexType; + size_t m_PV_minNTracks; + bool m_do3d; + bool m_AddPVData; + bool m_refitPV; + bool m_doMassConst; + bool m_startingpoint0; + + bool m_vertexFittingWithPV; + + double m_BMassUpper; + double m_BMassLower; + double m_chi2cut; // chi2/Ndof of the final veretx + double m_trkDeltaZ; // DeltaZ between the JPsi vertex and hadronic tracks Z0 + + bool m_useAdditionalTrack; +}; +} + +#endif diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Reco_4mu.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Reco_4mu.h new file mode 100644 index 0000000000000000000000000000000000000000..5b311a975fdd0b362613210939fdb5f5bfbfc031 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Reco_4mu.h @@ -0,0 +1,63 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +/////////////////////////////////////////////////////////////////// +// Reco_4mu.h +/////////////////////////////////////////////////////////////////// + +#ifndef DERIVATIONFRAMEWORK_Reco_4mu_H +#define DERIVATIONFRAMEWORK_Reco_4mu_H + +#include <string> + +#include "AthenaBaseComps/AthAlgTool.h" +#include "DerivationFrameworkInterfaces/ISkimmingTool.h" +#include "DerivationFrameworkBPhys/FourMuonTool.h" +#include "JpsiUpsilonTools/PrimaryVertexRefitter.h" +#include "xAODBPhys/BPhysHelper.h" + +/** forward declarations + */ +namespace Trk { + class V0Tools; +} + +namespace xAOD { + class BPhysHypoHelper; +} + +/** THE reconstruction tool + */ +namespace DerivationFramework { + + class Reco_4mu : public AthAlgTool, public ISkimmingTool { + public: + Reco_4mu(const std::string& t, const std::string& n, const IInterface* p); + + StatusCode initialize(); + StatusCode finalize(); + + virtual bool eventPassesFilter() const; + + private: + /** tools + */ + void ProcessVertex(xAOD::BPhysHypoHelper&, xAOD::BPhysHelper::pv_type, std::vector<double> trackMasses) const; + ToolHandle<Trk::V0Tools> m_v0Tools; + ToolHandle<DerivationFramework::FourMuonTool> m_fourMuonTool; + ToolHandle<Analysis::PrimaryVertexRefitter> m_pvRefitter; + + /** job options + */ + std::string m_pairName; + std::string m_quadName; + std::string m_pvContainerName; + std::string m_refPVContainerName; + bool m_refitPV; + int m_PV_max; + int m_DoVertexType; + }; +} + +#endif // DERIVATIONFRAMEWORK_Reco_4mu_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Reco_V0Finder.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Reco_V0Finder.h new file mode 100644 index 0000000000000000000000000000000000000000..ceb666c2393104246f1eb43feaf18fefc493796a --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Reco_V0Finder.h @@ -0,0 +1,63 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ +/////////////////////////////////////////////////////////////////// +// Reco_V0Finder.h, (c) ATLAS Detector software +/////////////////////////////////////////////////////////////////// + +#ifndef DERIVATIONFRAMEWORK_V0FINDER_H +#define DERIVATIONFRAMEWORK_V0FINDER_H + +#include <string> + +#include "AthenaBaseComps/AthAlgTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "InDetV0Finder/InDetV0FinderTool.h" +#include "HepPDT/ParticleDataTable.hh" + +/** forward declarations + */ +namespace Trk +{ + class V0Tools; + class ParticleDataTable; +} + +namespace DerivationFramework { + + class Reco_V0Finder : public AthAlgTool, public IAugmentationTool { + public: + Reco_V0Finder(const std::string& t, const std::string& n, const IInterface* p); + + StatusCode initialize() override; + StatusCode finalize() override; + + virtual StatusCode addBranches() const override; + + private: + + std::vector<std::string> m_CollectionsToCheck; + ToolHandle <InDet::InDetV0FinderTool> m_v0FinderTool; + ToolHandle <Trk::V0Tools> m_V0Tools; + const HepPDT::ParticleDataTable *m_particleDataTable; + + int m_masses; //!< = 1 if using PDG values, = 2 if user set (1) + double m_masspi; //!< pion mass (139.57 MeV) + double m_massp; //!< proton mass (938.272 MeV) + double m_masse; //!< electron mass (0.510999 MeV) + double m_massK0S; //!< Kshort mass (497.672 MeV) + double m_massLambda; //!< Lambda mass (1115.68 MeV) + + std::string m_VxPrimaryCandidateName; //!< Name of primary vertex container + + std::string m_v0ContainerName; + std::string m_ksContainerName; + std::string m_laContainerName; + std::string m_lbContainerName; + + }; +} + +#endif // DERIVATIONFRAMEWORK_Reco_dimuTrk_H + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Reco_Vertex.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Reco_Vertex.h new file mode 100644 index 0000000000000000000000000000000000000000..57e82e2048e8acb55d5c5698e29cb2b1f02efb00 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Reco_Vertex.h @@ -0,0 +1,54 @@ +/* + Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +*/ + +/////////////////////////////////////////////////////////////////// +// Reco_Vertex.h +/////////////////////////////////////////////////////////////////// + +#ifndef DERIVATIONFRAMEWORK_Reco_Vertex_H +#define DERIVATIONFRAMEWORK_Reco_Vertex_H + +#include "AthenaBaseComps/AthAlgTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "JpsiUpsilonTools/ICandidateSearch.h" +#include "JpsiUpsilonTools/PrimaryVertexRefitter.h" +#include "BeamSpotConditionsData/BeamSpotData.h" +#include "StoreGate/ReadHandleKeyArray.h" + +namespace DerivationFramework { + + class Reco_Vertex : public AthAlgTool, public IAugmentationTool { + public: + Reco_Vertex(const std::string& t, const std::string& n, const IInterface* p); + + virtual StatusCode initialize(); + + virtual StatusCode addBranches() const; + + private: + /** tools + */ + ToolHandle<Trk::V0Tools> m_v0Tools; + ToolHandle<Analysis::ICandidateSearch> m_SearchTool; + ToolHandle<Analysis::PrimaryVertexRefitter> m_pvRefitter; + SG::ReadCondHandleKey<InDet::BeamSpotData> m_beamSpotKey { this, "BeamSpotKey", "BeamSpotData", "SG key for beam spot" }; + + /** job options + */ + SG::WriteHandleKey<xAOD::VertexContainer> m_outputVtxContainerName; + SG::ReadHandleKey<xAOD::VertexContainer> m_pvContainerName; + SG::WriteHandleKey<xAOD::VertexContainer> m_refPVContainerName; + bool m_refitPV; + int m_PV_max; + int m_DoVertexType; + size_t m_PV_minNTracks; + bool m_do3d; + bool m_checkCollections; + SG::ReadHandleKeyArray<xAOD::VertexContainer> m_CollectionsToCheck; + }; +} + +#endif // DERIVATIONFRAMEWORK_Reco_Vertex_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Select_Bmumu.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Select_Bmumu.h new file mode 100644 index 0000000000000000000000000000000000000000..cf43240a3ac86d169cf540e30d13d86a32c5d5e9 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Select_Bmumu.h @@ -0,0 +1,116 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// Select_Bmumu.h +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// +// Based on Select_onia2mumu.h. +// Original author: Daniel Scheirich <daniel.scheirich@cern.ch> +// +// Select B candidates for the B(s)mumu analysis including for +// the reference channels used. +// +// For an example see BPHY8.py . +// +// Job options provided by this class: +// +//============================================================================ +// +#ifndef DERIVATIONFRAMEWORK_Select_Bmumu_H +#define DERIVATIONFRAMEWORK_Select_Bmumu_H + +#include <string> + +#include "GaudiKernel/ToolHandle.h" +#include "AthenaBaseComps/AthAlgTool.h" +#include "DerivationFrameworkBPhys/CfAthAlgTool.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "MuonAnalysisInterfaces/IMuonSelectionTool.h" +#include "JpsiUpsilonTools/JpsiFinder.h" +#include "xAODBPhys/BPhysHelper.h" + +/** forward declarations + */ +namespace Trk { + class V0Tools; +} + +namespace xAOD { + class BPhysHypoHelper; +} + +namespace SG { + class AuxElement; +} + +/** THE candidate selection tool + */ +namespace DerivationFramework { + + class Select_Bmumu : public CfAthAlgTool, public IAugmentationTool { + public: + Select_Bmumu(const std::string& t, const std::string& n, const IInterface* p); + + /** initialization and finalization + */ + StatusCode initialize() override; + StatusCode finalize() override; + + /** @brief: augmentation and selection + * Retrieved vertices are augmented with usual information. + * Selection is performed and each candidate is decorated with the + * Char_t flag named "passed_"+name() to indicate whether if the candidate + * passed the selection. This flag is then used by the event selection tool + * and by the vertex thinning tool. + */ + virtual StatusCode addBranches() const override; + + private: + void ProcessVertex(xAOD::BPhysHypoHelper&, xAOD::BPhysHelper::pv_type) const; + bool massCuts(float mass) const; + bool massInBlindedRegion(float mass) const; + + bool checkAllMuonsTight(const std::vector<const xAOD::Muon*>& muons, + int maxMuonsToCheck=-1) const; + + bool pass(const SG::AuxElement& em, std::string hypo) const; + bool setPass(const SG::AuxElement& em, std::string hypo, bool passVal) const; + bool setPassIfNotAvailable(SG::AuxElement& em, std::string hypo, + bool passVal) const; + // std::vector<xAOD::Vertex*> getPrecedingVertices(const xAOD::Vertex* vtx); + + /** tools + */ + ToolHandle<Trk::V0Tools> m_v0Tools; + ToolHandle<CP::IMuonSelectionTool> m_muSelectionTool; + + /** job options + */ + std::string m_hypoName; //!< name of the mass hypothesis. E.g. Jpis, Upsi, etc. Will be used as a prefix for decorations + std::string m_inputVtxContainerName; //!< name of the input container name + std::vector<double> m_trkMasses; //!< track mass hypotheses + double m_massHypo; //!< vertex mass hypothesis + double m_massMax; //!< invariant mass range + double m_massMin; //!< invariant mass range + double m_chi2Max; //!< max chi2 cut + int m_DoVertexType; //!< Allows user to skip certain vertexes - bitwise test 7==all(111) + bool m_do3d; //!< add 3d proper time + double m_blindMassMin; //!< blinding mass range + double m_blindMassMax; //!< blinding mass range + bool m_doBlinding; //!< enable blinding range + bool m_doCutBlinded; //!< enable cutting blinded vertices + bool m_blindOnlyAllMuonsTight; //!< only blind candidates with all tight muons + bool m_useMuCalcMass; //!< also check against MUCALC mass + + std::vector<std::string> m_subDecVtxContNames; //!< names of sub-decay vertex containers + std::vector<std::string> m_subDecVtxHypoCondNames; //!< hypo names for sub-decays to be considered + std::vector<std::string> m_subDecVtxHypoFlagNames; //!< names of hypo flags set on sub-decays if passing + }; +} + +#endif // DERIVATIONFRAMEWORK_Select_Bmumu_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Select_onia2mumu.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Select_onia2mumu.h new file mode 100644 index 0000000000000000000000000000000000000000..f2157f8b24eac7a96cbad16a72803d26a4cb5dd5 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Select_onia2mumu.h @@ -0,0 +1,71 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +/////////////////////////////////////////////////////////////////// +// Select_onia2mumu.h +/////////////////////////////////////////////////////////////////// + +#ifndef DERIVATIONFRAMEWORK_Select_onia2mumu_H +#define DERIVATIONFRAMEWORK_Select_onia2mumu_H + + +#include "AthenaBaseComps/AthAlgTool.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "JpsiUpsilonTools/JpsiFinder.h" +#include "xAODBPhys/BPhysHelper.h" +#include <string> + +/** forward declarations + */ +namespace Trk { + class V0Tools; +} + +namespace xAOD { + class BPhysHypoHelper; +} + +/** THE candidate selection tool + */ +namespace DerivationFramework { + + class Select_onia2mumu : public AthAlgTool, public IAugmentationTool { + public: + Select_onia2mumu(const std::string& t, const std::string& n, const IInterface* p); + + /** inirialization and finalization + */ + StatusCode initialize() override; + + /** @brief: augmentation and selection + * Retrieved vertices are augmented with usual information. + * Selection is performed and each candidate is decorated with the + * Char_t flag named "passed_"+name() to indicate whether if the candidate + * passed the selection. This flag is then used by the event selection tool + * and by the vertex thinning tool. + */ + virtual StatusCode addBranches() const override; + + private: + void ProcessVertex(xAOD::BPhysHypoHelper&, xAOD::BPhysHelper::pv_type) const; + /** tools + */ + ToolHandle<Trk::V0Tools> m_v0Tools; + + /** job options + */ + std::string m_hypoName; //!< name of the mass hypothesis. E.g. Jpis, Upsi, etc. Will be used as a prefix for decorations + SG::ReadHandleKey<xAOD::VertexContainer> m_inputVtxContainerName; //!< name of the input container name + std::vector<double> m_trkMasses; //!< track mass hypotheses + double m_massHypo; //!< vertex mass hypothesis + double m_massMax; //!< invariant mass range + double m_massMin; //!< invariant mass range + double m_chi2Max; //!< max chi2 cut + double m_lxyMin; //!< min lxy cut + int m_DoVertexType; //!< Allows user to skip certain vertexes - bitwise test 7==all(111) + bool m_do3d; + }; +} + +#endif // DERIVATIONFRAMEWORK_Select_onia2mumu_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Thin_vtxDuplicates.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Thin_vtxDuplicates.h new file mode 100644 index 0000000000000000000000000000000000000000..77aa7c8138698b902447a3f4f86517880b43fd1a --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Thin_vtxDuplicates.h @@ -0,0 +1,37 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef DERIVATIONFRAMEWORK_Thin_vtxDuplicates_H +#define DERIVATIONFRAMEWORK_Thin_vtxDuplicates_H + +#include "xAODTracking/VertexContainer.h" +#include "AthenaBaseComps/AthAlgTool.h" +#include "DerivationFrameworkInterfaces/IThinningTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "StoreGate/ThinningHandleKey.h" +#include "StoreGate/ReadDecorHandleKeyArray.h" +class IThinningSvc; + +namespace DerivationFramework { + + class Thin_vtxDuplicates : public AthAlgTool, public IThinningTool { + public: + Thin_vtxDuplicates(const std::string& t, const std::string& n, const IInterface* p); + ~Thin_vtxDuplicates(); + virtual StatusCode initialize(); + virtual StatusCode finalize(); + virtual StatusCode doThinning() const; + + private: + bool m_noFlags; + StringProperty m_streamName{ this, "StreamName", "", "Name of the stream being thinned" }; + mutable std::atomic<unsigned int> m_nVtxTot, m_nVtxPass; + + SG::ThinningHandleKey< xAOD::VertexContainer > m_vertexContainerNames; + SG::ReadDecorHandleKeyArray<xAOD::VertexContainer> m_passFlags; + bool m_and; + }; +} + +#endif diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Thin_vtxTrk.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Thin_vtxTrk.h new file mode 100644 index 0000000000000000000000000000000000000000..3222f0ffcfd5d44045760fd7a058d52554045fe0 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/Thin_vtxTrk.h @@ -0,0 +1,54 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +/////////////////////////////////////////////////////////////////// +// Thin_vtxTrk.h +/////////////////////////////////////////////////////////////////// + +#ifndef DERIVATIONFRAMEWORK_Thin_vtxTrk_H +#define DERIVATIONFRAMEWORK_Thin_vtxTrk_H + +#include "xAODTracking/TrackParticleContainer.h" +#include "xAODTracking/VertexContainer.h" +#include "AthenaBaseComps/AthAlgTool.h" +#include "DerivationFrameworkInterfaces/IThinningTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "StoreGate/ThinningHandleKey.h" +#include "StoreGate/HandleKeyArray.h" +#include "StoreGate/ReadDecorHandleKeyArray.h" +#include <string> + +namespace SG{ + template <class T> + using ThinningHandleKeyArray = HandleKeyArray<ReadHandle<T>, ThinningHandleKey<T>, Gaudi::DataHandle::Reader >; +} + +namespace DerivationFramework { + + class Thin_vtxTrk : public AthAlgTool, public IThinningTool { + public: + Thin_vtxTrk(const std::string& t, const std::string& n, const IInterface* p); + ~Thin_vtxTrk(); + StatusCode initialize(); + StatusCode finalize(); + virtual StatusCode doThinning() const; + + private: + StringProperty m_streamName{ this, "StreamName", "", "Name of the stream being thinned" }; + mutable std::atomic<unsigned int> m_ntot, m_npass; + double m_acceptanceR; + mutable std::atomic<unsigned int> m_nVtxTot, m_nVtxPass; + + SG::ThinningHandleKey<xAOD::TrackParticleContainer> m_trackParticleContainerName; + SG::ThinningHandleKeyArray<xAOD::VertexContainer> m_vertexContainerName; + std::vector<std::string> m_passFlags; + SG::ReadDecorHandleKeyArray<xAOD::VertexContainer> m_passArray{this, "INTERNALARRAY", {}}; + bool m_and; + bool m_trackAnd; + bool m_thinTracks; + bool m_noFlags; //To take all entries, regardless of flags + }; +} + +#endif diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/TriggerCountToMetadata.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/TriggerCountToMetadata.h new file mode 100644 index 0000000000000000000000000000000000000000..69290703ebd604d17daf4c7b8672678ba9c4eae9 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/TriggerCountToMetadata.h @@ -0,0 +1,51 @@ +/* +Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ +//============================================================================ +// +// Author : Matteo Bedognetti <matteo.bedognetti@cern.ch.> +// Changes: +// +// Store trigger counts for specific chains in the DAOD's MetaData. +// This allows it to store information about triggers upon which events are NOT selected during the derivation +// +// Job options: +// - TriggerList -- a vector containing all triggers to store as strings +// - TrigDecisionTool -- if one wants to pass this a specific TrigDecisionTool +// +//============================================================================ +// +#ifndef DERIVATIONFRAMEWORK_TriggerCountToMetadata_H +#define DERIVATIONFRAMEWORK_TriggerCountToMetadata_H + +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "DerivationFrameworkBPhys/CfAthAlgTool.h" +#include "TrigDecisionTool/TrigDecisionTool.h" + +#include <string> +#include <vector> + +namespace Trig{ + class TrigDecisionTool; +} + +namespace DerivationFramework { + + class TriggerCountToMetadata : virtual public CfAthAlgTool, virtual public IAugmentationTool { + + public: + TriggerCountToMetadata(const std::string& t, const std::string& n, const IInterface* p); + virtual StatusCode initialize() override; + virtual StatusCode addBranches() const override; + + private: //Don't use protected for this one! + + std::vector<std::string> m_triggerList; + ToolHandle<Trig::TrigDecisionTool> m_trigDecisionTool; + std::string m_folderName; + + }; // class +} // namespace + +#endif // DERIVATIONFRAMEWORK_TriggerCountToMetadata_H diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/VertexCaloIsolation.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/VertexCaloIsolation.h new file mode 100644 index 0000000000000000000000000000000000000000..50e7b4d252dac51a57027e9609ec8b3f2d3cf8d3 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/VertexCaloIsolation.h @@ -0,0 +1,89 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +/////////////////////////////////////////////////////////////////// +// VertexCaloIsolation.h by Matteo Bedognetti +/////////////////////////////////////////////////////////////////// +// +// This code is based on CaloIsolationTool of IsolationTools package +// +// Etcone is determined as a topoCluster-isolation value minus Energy Density (ED) correction and minus the energy depositions of the muons +// Muon's energy deposition is already stored in side the xAOD::Muon objects, but the muon-clusters are used to correct for the fact that they muons may have overlapping clusters +// The muon-clusters are stored as well in connection with the muons themselves +// +// The idea of comparing topoClusters with muon-clusters to decide what part of the muon's deposition is of +// importance had to be abandoned because topCluster cells are not present in xAOD +// +// It enforces the fact that for muons no core-surface is removed for the energy-density correction (thus the corrections are independent from each other) +// +// "isReliable" flag reports of each isolation value if all particles crossing the cone have been correctly corrected for. +// In the case of 2mu+ 1 track it mirrors the fact that the track does not extrapolate into the cone (as tracks have no muon-cluster from which to determine the core-correction) +// +#ifndef DERIVATIONFRAMEWORK_VertexCaloIsolation_H +#define DERIVATIONFRAMEWORK_VertexCaloIsolation_H + +#include <string> + +#include "AthenaBaseComps/AthAlgTool.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "JpsiUpsilonTools/PrimaryVertexRefitter.h" +//#include "MuonIdHelpers/MuonIdHelperTool.h" +//#include "IsolationTool/CaloIsolationTool.h" +//#include "TrkCaloExtension/CaloExtension.h" + +#include "RecoToolInterfaces/IParticleCaloExtensionTool.h" +#include "RecoToolInterfaces/ICaloTopoClusterIsolationTool.h" + +//#include "TrackToCalo/CaloCellCollector.h" +//#include "CaloInterface/ICaloNoiseTool.h" +#include "xAODBPhys/BPhysHelper.h" +//#include "xAODPrimitives/IsolationType.h" // + + +/** THE reconstruction tool + */ +namespace DerivationFramework { + + class VertexCaloIsolation : public AthAlgTool, public IAugmentationTool { + public: + VertexCaloIsolation(const std::string& t, const std::string& n, const IInterface* p); + + StatusCode initialize(); + StatusCode finalize(); + + StatusCode addBranches() const; + bool extrapolateTrack(TLorentzVector& extr_tp, const xAOD::IParticle& tp) const; + bool extrapolateMuon(TLorentzVector& extr_tp, const xAOD::CaloCluster* cluster) const; + xAOD::TrackParticle& makeSlyTrack(xAOD::TrackParticle&, const TLorentzVector& candidate, const xAOD::Vertex* vertex, xAOD::BPhysHelper::pv_type vertexType) const; + + + private: + + //ToolHandle<xAOD::ICaloCellIsolationTool> m_caloIsoTool; + ToolHandle<xAOD::ICaloTopoClusterIsolationTool> m_caloIsoTool; + std::string m_trackContainerName; + std::string m_vertexContainerName; + std::string m_caloClusterContainerName; + std::string m_cellContainerName; + std::string m_muonContainerName; + ToolHandle<Trk::IParticleCaloExtensionTool> m_caloExtTool; + std::vector<unsigned int> m_cones; //I cannot use xAOD::Iso::IsolationType as a type here, as it clashes with setProperty() + std::vector<std::string> m_passFlags; + + + // ToolHandle <ICaloNoiseTool> m_caloNoiseTool; //Removed to reduce requirements + //Rec::CaloCellCollector m_cellCollector; //Seems to be a plain class, so no need for handles + + /// Number of sigma for calo cell noise cut + float m_sigmaCaloNoiseCut; + + int m_vertexType; + + + + }; +} + +#endif + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/VertexPlus1TrackCascade.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/VertexPlus1TrackCascade.h new file mode 100644 index 0000000000000000000000000000000000000000..416b42c5a0716c71b0d12904e4ddf0e922e2e196 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/VertexPlus1TrackCascade.h @@ -0,0 +1,74 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef VERTEXPLUS1TRACKCASCADE_H +#define VERTEXPLUS1TRACKCASCADE_H +//********************* +// VertexPlus1Cascade header file +// +// Adam Barton <abarton@cern.ch> + +#include "AthenaBaseComps/AthAlgTool.h" +#include "GaudiKernel/ToolHandle.h" +#include "xAODMuon/MuonContainer.h" +#include "xAODTracking/TrackParticle.h" +#include "xAODTracking/TrackParticleContainer.h" +#include "xAODTracking/VertexContainer.h" +#include <vector> + + +namespace Trk { + class IVertexFitter; + class ITrackSelectorTool; + class TrkVKalVrtFitter; + class IVertexCascadeFitter; + class VxCascadeInfo; +} + +namespace DerivationFramework { + + static const InterfaceID IID_VertexPlus1TrackCascade("VertexPlus1TrackCascade", 1, 0); + + class VertexPlus1TrackCascade : virtual public AthAlgTool + { + + + SG::ReadHandleKey<xAOD::VertexContainer> m_vertexContainerKey; + SG::ReadHandleKey<xAOD::TrackParticleContainer> m_TrackPContainerKey; + SG::ReadHandleKey<xAOD::MuonContainer> m_MuonsUsedInJpsiKey; + + std::vector<double> m_massHypothesis; + + std::vector<int> m_massConstraintTracksVtx1; + std::vector<int> m_massConstraintTracksVtx2; + double m_Vtx1MassConstraint; + double m_Vtx2MassConstraint; + + double m_trkThresholdPt; + double m_trkMaxEta; +// double m_BThresholdPt; +// double m_BMassUpper; +// double m_BMassLower; + + double m_roughMassLower; + double m_roughMassUpper; + ToolHandle < Trk::TrkVKalVrtFitter > m_iVertexFitter; + ToolHandle < Trk::ITrackSelectorTool > m_trkSelector; + + public: + static const InterfaceID& interfaceID() { return IID_VertexPlus1TrackCascade;} + VertexPlus1TrackCascade(const std::string& t, const std::string& n, const IInterface* p); + ~VertexPlus1TrackCascade(); + StatusCode initialize() override; + StatusCode finalize() override; + static double getInvariantMass(const std::vector<const xAOD::TrackParticle*> &Tracks, const std::vector<double> &massHypotheses); + static bool isContainedIn(const xAOD::TrackParticle* theTrack, const xAOD::MuonContainer* theColl); + StatusCode performSearch(std::vector<Trk::VxCascadeInfo*> *cascadeinfoContainer ) const; + + }; +} + + +#endif + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/VertexTrackIsolation.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/VertexTrackIsolation.h new file mode 100644 index 0000000000000000000000000000000000000000..603da6cef65cb579803455b07ad3589a5700d794 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/DerivationFrameworkBPhys/VertexTrackIsolation.h @@ -0,0 +1,52 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +/////////////////////////////////////////////////////////////////// +// VertexTrackIsolation.h, +/////////////////////////////////////////////////////////////////// + +#ifndef DERIVATIONFRAMEWORK_VertexTrackIsolation_H +#define DERIVATIONFRAMEWORK_VertexTrackIsolation_H + +#include <string> + +#include "AthenaBaseComps/AthAlgTool.h" +#include "DerivationFrameworkInterfaces/IAugmentationTool.h" +#include "JpsiUpsilonTools/PrimaryVertexRefitter.h" +//#include "MuonIdHelpers/MuonIdHelperTool.h" +//#include "IsolationTool/TrackIsolationTool.h" +#include "RecoToolInterfaces/ITrackIsolationTool.h" +//#include "xAODPrimitives/IsolationType.h" +#include <vector> +/** THE reconstruction tool + */ +namespace DerivationFramework { + + class VertexTrackIsolation : public AthAlgTool, public IAugmentationTool { + public: + VertexTrackIsolation(const std::string& t, const std::string& n, const IInterface* p); + + StatusCode initialize(); + StatusCode finalize(); + + virtual StatusCode addBranches() const; + + bool isSame(const xAOD::Vertex* theVtx1, const xAOD::Vertex* theVtx2) const; + bool isContainedIn(const xAOD::Vertex* theVtx, const std::vector<const xAOD::Vertex*> &theColl) const; + + private: + + ToolHandle<xAOD::ITrackIsolationTool> m_trackIsoTool; + std::string m_trackContainerName; + std::string m_vertexContainerName; + std::vector<unsigned int> m_cones; + std::vector<std::string> m_passFlags; + int m_vertexType; //Which type of primary vertices should be used? (7 = 0b111 are all at the moment) + + bool m_doIsoPerTrk; + int m_removeDuplicate; + }; +} + +#endif diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/python/BPhysPyHelpers.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/python/BPhysPyHelpers.py new file mode 100644 index 0000000000000000000000000000000000000000..c30320e4e2fc7d849c4ba0d2272cbfb931ca64eb --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/python/BPhysPyHelpers.py @@ -0,0 +1,84 @@ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration + +#==================================================================== +# BPhysPyHelpers.py +# +# Python helper classes for BPHY derivations. +# +# authors: W. Walkowiak <wolfgang.walkowiak@cern.ch>, 2016-05-19 +# changed: +# +# This file contains a set of Python helper classes for the +# BPHY derivations. +# +# Available helper classes and methods: +# - BPyWrapper -- wrap configurable to ensure default contents of +# __slots__ dict are available as attributes +# Usage example: see BPHY8.py +# Note: Unfortunately, this doesn't quite work, since the +# wrapped class is no longer recognized as Configurable +# - BPhysEnsureAttributes(algtool) +# -- ensure default contents of __slots__ dict a +# are available as attributes to the class +# - BPhysFilterBranches(...) +# -- create list of isolation or closest track branches +# to be thinned +# +#==================================================================== + +#-------------------------------------------------------------------- +class BPyWrapper(object): + __slots__ = {'wclass' : object} + + def __init__(self, wclass, *args, **kargs): + object.__setattr__(self, 'wclass', wclass(*args, **kargs)) + # the important part: make __slot__ variables attributes + for n,v in self.wclass.__slots__.items(): + if not hasattr(self.wclass, n): + setattr(self.wclass, n, v) + + def __getattr__(self, attr): + return self.wclass.__getattribute__(attr) + + def __setattr__(self, attr, value): + setattr(self.wclass, attr, value) + + def __call__(self, *args, **kwargs): + return self.wclass(*args, **kwargs) +#-------------------------------------------------------------------- +# +# ensure default contents of __slots__ dict are available as attributes +# +def BPhysEnsureAttributes(algtool): + + for n,v in algtool.__slots__.items(): + if not hasattr(algtool, n): + setattr(algtool, n, v) + return algtool +#-------------------------------------------------------------------- +# +# create list of isolation or closest track branches to be thinned +# (used by BPHY8) +# +def BPhysFilterBranches(name, brPrefixList, brIncludeList, doVertexTypeList, + categoryList, trackTypeList, coneOrChi2SetList, + forCloseTrack=False): + res = "" + brIncludes = [tuple(x.split('|',3)) for x in brIncludeList] + for bntup in brPrefixList: + bn, sep, bnsuf = bntup.partition('+') + for i, cstr in enumerate(coneOrChi2SetList): + for itt in trackTypeList: + ittstr = "T%010d" % itt + for itcstr in categoryList: + if brIncludes and not (cstr,str(itt),itcstr) in brIncludes: + for dvs in doVertexTypeList: + if forCloseTrack: + fbn = '_'.join(filter(None, [name,bn,ittstr,itcstr, + dvs,cstr,bnsuf])) + else: + fbn = '_'.join(filter(None, [name,bn,cstr,ittstr, + itcstr,dvs,bnsuf])) + res += ".-"+fbn + return res +#-------------------------------------------------------------------- diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/python/__init__.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/python/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..74583d364ec2ca794156596c7254d9b234a940c6 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/python/__init__.py @@ -0,0 +1,2 @@ +# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY1.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY1.py new file mode 100644 index 0000000000000000000000000000000000000000..6b20ae9dacb394283984509c5d0448e33dd1ba67 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY1.py @@ -0,0 +1,274 @@ +#==================================================================== +# BPHY1.py +# This an example job options script showing how to set up a +# derivation of the data using the derivation framework. +# It requires the reductionConf flag BPHY1 in Reco_tf.py +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print (isSimulation) + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +#include( "JpsiUpsilonTools/configureServices.py" ) + +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY1_VertexTools = BPHYVertexTools("BPHY1") + +#-------------------------------------------------------------------- +## 2/ Setup the vertex fitter tools (e.g. JpsiFinder, JpsiPlus1Track, etc). +## These are general tools independent of DerivationFramework that do the +## actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY1JpsiFinder = Analysis__JpsiFinder( + name = "BPHY1JpsiFinder", + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, # If true, will assume dimu hypothesis and use PDG value for mu mass + invMassUpper = 100000.0, + invMassLower = 0.0, + Chi2Cut = 200., + oppChargesOnly = True, + atLeastOneComb = True, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY1_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY1_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY1_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY1_VertexTools.VtxPointEstimator, + useMCPCuts = False ) + +ToolSvc += BPHY1JpsiFinder +print (BPHY1JpsiFinder) + +#-------------------------------------------------------------------- +## 3/ setup the vertex reconstruction "call" tool(s). They are part of the derivation framework. +## These Augmentation tools add output vertex collection(s) into the StoreGate and add basic +## decorations which do not depend on the vertex mass hypothesis (e.g. lxy, ptError, etc). +## There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need two instance of the +## Reco tool is the JpsiFinder mass window is wide enough. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY1_Reco_mumu = DerivationFramework__Reco_Vertex( + name = "BPHY1_Reco_mumu", + VertexSearchTool = BPHY1JpsiFinder, + OutputVtxContainerName = "BPHY1OniaCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY1RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 100000, + DoVertexType = 7) + +ToolSvc += BPHY1_Reco_mumu +print (BPHY1_Reco_mumu) + +#-------------------------------------------------------------------- +## 4/ setup the vertex selection and augmentation tool(s). These tools decorate the vertices with +## variables that depend on the vertex mass hypothesis, e.g. invariant mass, proper decay time, etc. +## Property HypothesisName is used as a prefix for these decorations. +## They also perform tighter selection, flagging the vertecis that passed. The flag is a Char_t branch +## named "passed_"+HypothesisName. It is used later by the "SelectEvent" and "Thin_vtxTrk" tools +## to determine which events and candidates should be kept in the output stream. +## Multiple instances of the Select_* tools can be used on a single input collection as long as they +## use different "HypothesisName" flags. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu + +## a/ augment and select Jpsi->mumu candidates +BPHY1_Select_Jpsi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY1_Select_Jpsi2mumu", + HypothesisName = "Jpsi", + InputVtxContainerName = "BPHY1OniaCandidates", + VtxMassHypo = 3096.916, + MassMin = 2000.0, + MassMax = 3600.0, + Chi2Max = 200, + DoVertexType = 7) + +ToolSvc += BPHY1_Select_Jpsi2mumu +print (BPHY1_Select_Jpsi2mumu) + +## b/ augment and select Psi(2S)->mumu candidates +BPHY1_Select_Psi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY1_Select_Psi2mumu", + HypothesisName = "Psi", + InputVtxContainerName = "BPHY1OniaCandidates", + VtxMassHypo = 3686.09, + MassMin = 3300.0, + MassMax = 4500.0, + Chi2Max = 200, + DoVertexType = 7) + +ToolSvc += BPHY1_Select_Psi2mumu +print (BPHY1_Select_Psi2mumu) + +# Added by ASC +## c/ augment and select Upsilon(nS)->mumu candidates +BPHY1_Select_Upsi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY1_Select_Upsi2mumu", + HypothesisName = "Upsi", + InputVtxContainerName = "BPHY1OniaCandidates", + VtxMassHypo = 9460.30, + MassMin = 7000.0, + MassMax = 12500.0, + Chi2Max = 200, + DoVertexType = 7) + +ToolSvc += BPHY1_Select_Upsi2mumu +print (BPHY1_Select_Upsi2mumu) + + + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY1Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY1Stream ) +BPHY1Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY1Stream.AcceptAlgs(["BPHY1Kernel"]) +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +augStream = MSMgr.GetStream( streamName ) + + + +#-------------------------------------------------------------------- +## 5/ select the event. We only want to keep events that contain certain vertices which passed certain selection. +## This is specified by the "SelectionExpression" property, which contains the expression in the following format: +## +## "ContainerName.passed_HypoName > count" +## +## where "ContainerName" is output container form some Reco_* tool, "HypoName" is the hypothesis name setup in some "Select_*" +## tool and "count" is the number of candidates passing the selection you want to keep. + +expression = "count(BPHY1OniaCandidates.passed_Jpsi) > 0 || count(BPHY1OniaCandidates.passed_Psi) > 0 || count(BPHY1OniaCandidates.passed_Upsi) > 0" +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool +BPHY1_SelectEvent = DerivationFramework__xAODStringSkimmingTool(name = "BPHY1_SelectEvent", + expression = expression) +ToolSvc += BPHY1_SelectEvent +print (BPHY1_SelectEvent) + +#-------------------------------------------------------------------- +## 6/ track and vertex thinning. We want to remove all reconstructed secondary vertices +## which hasn't passed any of the selections defined by (Select_*) tools. +## We also want to keep only tracks which are associates with either muons or any of the +## vertices that passed the selection. Multiple thinning tools can perform the +## selection. The final thinning decision is based OR of all the decisions (by default, +## although it can be changed by the JO). + +## a) thining out vertices that didn't pass any selection and idetifying tracks associated with +## selected vertices. The "VertexContainerNames" is a list of the vertex containers, and "PassFlags" +## contains all pass flags for Select_* tools that must be satisfied. The vertex is kept is it +## satisfy any of the listed selections. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY1Thin_vtxTrk = DerivationFramework__Thin_vtxTrk( + name = "BPHY1Thin_vtxTrk", + TrackParticleContainerName = "InDetTrackParticles", + StreamName = streamName, + VertexContainerNames = ["BPHY1OniaCandidates"], + PassFlags = ["passed_Jpsi", "passed_Psi", "passed_Upsi"] ) + +ToolSvc += BPHY1Thin_vtxTrk + +## b) thinning out tracks that are not attached to muons. The final thinning decision is based on the OR operation +## between decision from this and the previous tools. +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY1MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning(name = "BPHY1MuonTPThinningTool", + MuonKey = "Muons", + StreamName = streamName, + InDetTrackParticlesKey = "InDetTrackParticles") +ToolSvc += BPHY1MuonTPThinningTool + +# Added by ASC +# Only save truth informtion directly associated with Onia +from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__GenericTruthThinning +BPHY1TruthThinTool = DerivationFramework__GenericTruthThinning(name = "BPHY1TruthThinTool", + StreamName = streamName, + ParticleSelectionString = "TruthParticles.pdgId == 443 || TruthParticles.pdgId == 100443 || TruthParticles.pdgId == 553 || TruthParticles.pdgId == 100553 || TruthParticles.pdgId == 200553", + PreserveDescendants = True, + PreserveAncestors = True) +ToolSvc += BPHY1TruthThinTool +print (BPHY1TruthThinTool) + + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== +## 7/ IMPORTANT bit. Don't forget to pass the tools to the DerivationKernel! If you don't do that, they will not be +## be executed! + +# Added by ASC +BPHY1ThinningTools = [BPHY1Thin_vtxTrk, BPHY1MuonTPThinningTool] +if globalflags.DataSource()=='geant4': + BPHY1ThinningTools.append(BPHY1TruthThinTool) + +# The name of the kernel (BPHY1Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY1Kernel", + AugmentationTools = [BPHY1_Reco_mumu, BPHY1_Select_Jpsi2mumu, BPHY1_Select_Psi2mumu, BPHY1_Select_Upsi2mumu], + SkimmingTools = [BPHY1_SelectEvent], + ThinningTools = BPHY1ThinningTools + ) + + + +#==================================================================== +# Slimming +#==================================================================== + +# Added by ASC +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY1SlimmingHelper = SlimmingHelper("BPHY1SlimmingHelper") +AllVariables = [] +StaticContent = [] + +# Needed for trigger objects +BPHY1SlimmingHelper.IncludeMuonTriggerContent = True +BPHY1SlimmingHelper.IncludeBPhysTriggerContent = True + +## primary vertices +AllVariables += ["PrimaryVertices"] +StaticContent += ["xAOD::VertexContainer#BPHY1RefittedPrimaryVertices"] +StaticContent += ["xAOD::VertexAuxContainer#BPHY1RefittedPrimaryVerticesAux."] + +## ID track particles +AllVariables += ["InDetTrackParticles"] + +## combined / extrapolated muon track particles +## (note: for tagged muons there is no extra TrackParticle collection since the ID tracks +## are store in InDetTrackParticles collection) +AllVariables += ["CombinedMuonTrackParticles"] +AllVariables += ["ExtrapolatedMuonTrackParticles"] + +## muon container +AllVariables += ["Muons"] + +## Jpsi candidates +StaticContent += ["xAOD::VertexContainer#%s" % BPHY1_Reco_mumu.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY1_Reco_mumu.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY1_Reco_mumu.OutputVtxContainerName] + +# Added by ASC +# Truth information for MC only +if isSimulation: + AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles"] + +BPHY1SlimmingHelper.AllVariables = AllVariables +BPHY1SlimmingHelper.StaticContent = StaticContent +BPHY1SlimmingHelper.AppendContentToStream(BPHY1Stream) diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY10.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY10.py new file mode 100644 index 0000000000000000000000000000000000000000..2f9dcb083a9ee0f4a42a59c2935014c754bce043 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY10.py @@ -0,0 +1,477 @@ +#==================================================================== +# BPHY10.py +# Bs>J/psiKK +# It requires the reductionConf flag BPHY10 in Reco_tf.py +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print(isSimulation) + + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +#include( "JpsiUpsilonTools/configureServices.py" ) + +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY10_VertexTools = BPHYVertexTools("BPHY10") + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__AugOriginalCounts +BPHY10_AugOriginalCounts = DerivationFramework__AugOriginalCounts( + name = "BPHY10_AugOriginalCounts", + VertexContainer = "PrimaryVertices", + TrackContainer = "InDetTrackParticles" ) +ToolSvc += BPHY10_AugOriginalCounts + + +#-------------------------------------------------------------------- +## 2/ setup JpsiFinder tool +## These are general tools independent of DerivationFramework that do the +## actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY10JpsiFinder = Analysis__JpsiFinder( + name = "BPHY10JpsiFinder", + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, + invMassUpper = 4000.0, + invMassLower = 2600.0, + Chi2Cut = 200., + oppChargesOnly = True, + combOnly = True, + atLeastOneComb = False, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY10_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY10_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY10_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY10_VertexTools.VtxPointEstimator, + useMCPCuts = False) + +ToolSvc += BPHY10JpsiFinder +print(BPHY10JpsiFinder) + +#-------------------------------------------------------------------- +## 3/ setup the vertex reconstruction "call" tool(s). They are part of the derivation framework. +## These Augmentation tools add output vertex collection(s) into the StoreGate and add basic +## decorations which do not depend on the vertex mass hypothesis (e.g. lxy, ptError, etc). +## There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need two instance of the +## Reco tool is the JpsiFinder mass window is wide enough. +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY10JpsiSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY10JpsiSelectAndWrite", + VertexSearchTool = BPHY10JpsiFinder, + OutputVtxContainerName = "BPHY10JpsiCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "SHOULDNOTBEUSED", + # RefPVContainerName = "BPHY10RefJpsiPrimaryVertices", + # RefitPV = True, + # MaxPVrefit = 10000, + DoVertexType = 1) + +ToolSvc += BPHY10JpsiSelectAndWrite +print(BPHY10JpsiSelectAndWrite) + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu + +## a/ augment and select Jpsi->mumu candidates +BPHY10_Select_Jpsi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY10_Select_Jpsi2mumu", + HypothesisName = "Jpsi", + InputVtxContainerName = "BPHY10JpsiCandidates", + VtxMassHypo = 3096.916, + MassMin = 2600.0, + MassMax = 4000.0, + Chi2Max = 200, + DoVertexType =1) + + +ToolSvc += BPHY10_Select_Jpsi2mumu +print(BPHY10_Select_Jpsi2mumu) + + +## 4/ setup a new vertexing tool (necessary due to use of mass constraint) +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +BdKstVertexFit = Trk__TrkVKalVrtFitter( + name = "BdKstVertexFit", + Extrapolator = BPHY10_VertexTools.InDetExtrapolator, + FirstMeasuredPoint = True, + MakeExtendedVertex = True) + +ToolSvc += BdKstVertexFit +print(BdKstVertexFit) + +## 5/ setup the Jpsi+2 track finder +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiPlus2Tracks +BPHY10BdJpsiKst = Analysis__JpsiPlus2Tracks( + name = "BPHY10BdJpsiKst", + OutputLevel = INFO, + kaonkaonHypothesis = False, + pionpionHypothesis = False, + kaonpionHypothesis = True, + trkThresholdPt = 500.0, + trkMaxEta = 3.0, + BThresholdPt = 5000., + BMassLower = 4300.0, + BMassUpper = 6300.0, + JpsiContainerKey = "BPHY10JpsiCandidates", + TrackParticleCollection = "InDetTrackParticles", + #MuonsUsedInJpsi = "Muons", #Don't remove all muons, just those in J/psi candidate (see the following cut) + ExcludeCrossJpsiTracks = False, #setting this to False rejects the muons from J/psi candidate + TrkVertexFitterTool = BdKstVertexFit, + TrackSelectorTool = BPHY10_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY10_VertexTools.VtxPointEstimator, + UseMassConstraint = True, + #DiTrackMassUpper = 1500., + #DiTrackMassLower = 500., + Chi2Cut = 10.0, + DiTrackPt = 500., + TrkQuadrupletMassLower = 3500.0, + TrkQuadrupletMassUpper = 6800.0, + #FinalDiTrackMassUpper = 1000., + #FinalDiTrackMassLower = 800., + #TrkDeltaZ = 20., #Normally, this cut should not be used since it is lifetime-dependent + FinalDiTrackPt = 500. + ) + +ToolSvc += BPHY10BdJpsiKst +print(BPHY10BdJpsiKst) + + +## 6/ setup the combined augmentation/skimming tool for the BdKst +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY10BdKstSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY10BdKstSelectAndWrite", + Jpsi2PlusTrackName = BPHY10BdJpsiKst, + OutputVtxContainerName = "BPHY10BdJpsiKstCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY10RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 10000, + DoVertexType = 7) + +ToolSvc += BPHY10BdKstSelectAndWrite +print(BPHY10BdKstSelectAndWrite) + +## b/ augment and select Bd->JpsiKst candidates +# set mass hypothesis (K pi) +BPHY10_Select_Bd2JpsiKst = DerivationFramework__Select_onia2mumu( + name = "BPHY10_Select_Bd2JpsiKst", + HypothesisName = "Bd", + InputVtxContainerName = "BPHY10BdJpsiKstCandidates", + TrkMasses = [105.658, 105.658, 493.677, 139.570], + VtxMassHypo = 5279.6, + MassMin = 100.0, #no mass cuts here + MassMax = 100000.0, #no mass cuts here + Chi2Max = 200) + +ToolSvc += BPHY10_Select_Bd2JpsiKst +print(BPHY10_Select_Bd2JpsiKst) + +## c/ augment and select Bdbar->JpsiKstbar candidates +# set mass hypothesis (pi K) +BPHY10_Select_Bd2JpsiKstbar = DerivationFramework__Select_onia2mumu( + name = "BPHY10_Select_Bd2JpsiKstbar", + HypothesisName = "Bdbar", + InputVtxContainerName = "BPHY10BdJpsiKstCandidates", + TrkMasses = [105.658, 105.658, 139.570, 493.677], + VtxMassHypo = 5279.6, + MassMin = 100.0, #no mass cuts here + MassMax = 100000.0, #no mass cuts here + Chi2Max = 200) + +ToolSvc += BPHY10_Select_Bd2JpsiKstbar +print(BPHY10_Select_Bd2JpsiKstbar) + + +## 7/ call the V0Finder if a Jpsi has been found +doSimpleV0Finder = False +if doSimpleV0Finder: + include("DerivationFrameworkBPhys/configureSimpleV0Finder.py") +else: + include("DerivationFrameworkBPhys/configureV0Finder.py") + +BPHY10_V0FinderTools = BPHYV0FinderTools("BPHY10") +print(BPHY10_V0FinderTools) + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_V0Finder +BPHY10_Reco_V0Finder = DerivationFramework__Reco_V0Finder( + name = "BPHY10_Reco_V0Finder", + V0FinderTool = BPHY10_V0FinderTools.V0FinderTool, + #OutputLevel = DEBUG, + V0ContainerName = "BPHY10RecoV0Candidates", + KshortContainerName = "BPHY10RecoKshortCandidates", + LambdaContainerName = "BPHY10RecoLambdaCandidates", + LambdabarContainerName = "BPHY10RecoLambdabarCandidates", + CheckVertexContainers = ['BPHY10JpsiCandidates']) + +ToolSvc += BPHY10_Reco_V0Finder +print(BPHY10_Reco_V0Finder) + +## 8/ setup the cascade vertexing tool +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +JpsiV0VertexFit = Trk__TrkVKalVrtFitter( + name = "JpsiV0VertexFit", + #OutputLevel = DEBUG, + Extrapolator = BPHY10_VertexTools.InDetExtrapolator, + #FirstMeasuredPoint = True, + FirstMeasuredPoint = False, + CascadeCnstPrecision = 1e-6, + MakeExtendedVertex = True) + +ToolSvc += JpsiV0VertexFit +print(JpsiV0VertexFit) + +## 9/ setup the Jpsi+V0 finder +## a/ Bd->JpsiKshort +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__JpsiPlusV0Cascade +BPHY10JpsiKshort = DerivationFramework__JpsiPlusV0Cascade( + name = "BPHY10JpsiKshort", + #OutputLevel = DEBUG, + HypothesisName = "Bd", + TrkVertexFitterTool = JpsiV0VertexFit, + V0Hypothesis = 310, + JpsiMassLowerCut = 2800., + JpsiMassUpperCut = 4000., + V0MassLowerCut = 400., + V0MassUpperCut = 600., + MassLowerCut = 4300., + MassUpperCut = 6300., + RefitPV = True, + RefPVContainerName = "BPHY10RefittedPrimaryVertices", + JpsiVertices = "BPHY10JpsiCandidates", + CascadeVertexCollections= ["BPHY10JpsiKshortCascadeSV2", "BPHY10JpsiKshortCascadeSV1"], + V0Vertices = "BPHY10RecoV0Candidates") + +ToolSvc += BPHY10JpsiKshort +print(BPHY10JpsiKshort) + +## b/ Lambda_b->JpsiLambda +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__JpsiPlusV0Cascade +BPHY10JpsiLambda = DerivationFramework__JpsiPlusV0Cascade( + name = "BPHY10JpsiLambda", + #OutputLevel = DEBUG, + HypothesisName = "Lambda_b", + TrkVertexFitterTool = JpsiV0VertexFit, + V0Hypothesis = 3122, + JpsiMassLowerCut = 2800., + JpsiMassUpperCut = 4000., + V0MassLowerCut = 1050., + V0MassUpperCut = 1250., + MassLowerCut = 4600., + MassUpperCut = 6600., + RefitPV = True, + RefPVContainerName = "BPHY10RefittedPrimaryVertices", + JpsiVertices = "BPHY10JpsiCandidates", + CascadeVertexCollections= ["BPHY10JpsiLambdaCascadeSV2", "BPHY10JpsiLambdaCascadeSV1"], + V0Vertices = "BPHY10RecoV0Candidates") + +ToolSvc += BPHY10JpsiLambda +print(BPHY10JpsiLambda) + +## c/ Lambda_bbar->JpsiLambdabar +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__JpsiPlusV0Cascade +BPHY10JpsiLambdabar = DerivationFramework__JpsiPlusV0Cascade( + name = "BPHY10JpsiLambdabar", + HypothesisName = "Lambda_bbar", + #OutputLevel = DEBUG, + TrkVertexFitterTool = JpsiV0VertexFit, + V0Hypothesis = -3122, + JpsiMassLowerCut = 2800., + JpsiMassUpperCut = 4000., + V0MassLowerCut = 1050., + V0MassUpperCut = 1250., + MassLowerCut = 4600., + MassUpperCut = 6600., + RefitPV = True, + RefPVContainerName = "BPHY10RefittedPrimaryVertices", + JpsiVertices = "BPHY10JpsiCandidates", + CascadeVertexCollections= ["BPHY10JpsiLambdabarCascadeSV2", "BPHY10JpsiLambdabarCascadeSV1"], + V0Vertices = "BPHY10RecoV0Candidates") + +ToolSvc += BPHY10JpsiLambdabar +print(BPHY10JpsiLambdabar) + +CascadeCollections = [] +CascadeCollections += BPHY10JpsiKshort.CascadeVertexCollections +CascadeCollections += BPHY10JpsiLambda.CascadeVertexCollections +CascadeCollections += BPHY10JpsiLambdabar.CascadeVertexCollections + + + +if not isSimulation: #Only Skim Data + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool + BPHY10_SelectBdJpsiKstEvent = DerivationFramework__xAODStringSkimmingTool( + name = "BPHY10_SelectBdJpsiKstEvent", + #expression = "(count(BPHY10BdJpsiKstCandidates.passed_Bd > 0) + count(BPHY10BdJpsiKstCandidates.passed_BdBar > 0) + count(BPHY10RecoV0Candidates) + count(RecoKshortContainerName) + count(RecoLambdaContainerName) + count(RecoLambdabarContainerName) ) > 0") + expression = "(count(BPHY10BdJpsiKstCandidates.passed_Bd > 0) + count(BPHY10BdJpsiKstCandidates.passed_Bdbar > 0) + count(BPHY10JpsiKshortCascadeSV1.x > -999) + count(BPHY10JpsiLambdaCascadeSV1.x > -999) + count(BPHY10JpsiLambdabarCascadeSV1.x > -999) ) > 0") + + ToolSvc += BPHY10_SelectBdJpsiKstEvent + print(BPHY10_SelectBdJpsiKstEvent) + + + + #==================================================================== + # Make event selection based on an OR of the input skimming tools + #==================================================================== + + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__FilterCombinationOR + BPHY10SkimmingOR = CfgMgr.DerivationFramework__FilterCombinationOR( + "BPHY10SkimmingOR", + FilterList = [BPHY10_SelectBdJpsiKstEvent],) + ToolSvc += BPHY10SkimmingOR + print(BPHY10SkimmingOR) + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY10_thinningTool_Tracks = DerivationFramework__Thin_vtxTrk( + name = "BPHY10_thinningTool_Tracks", + TrackParticleContainerName = "InDetTrackParticles", + VertexContainerNames = ["BPHY10BdJpsiKstCandidates","BPHY10JpsiKshortCascadeSV1","BPHY10JpsiKshortCascadeSV2","BPHY10JpsiLambdaCascadeSV1","BPHY10JpsiLambdaCascadeSV2","BPHY10JpsiLambdabarCascadeSV1","BPHY10JpsiLambdabarCascadeSV2"], + PassFlags = ["passed_Bd", "passed_Bdbar"] ) + +ToolSvc += BPHY10_thinningTool_Tracks + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__BPhysPVThinningTool +BPHY10_thinningTool_PV = DerivationFramework__BPhysPVThinningTool( + name = "BPHY10_thinningTool_PV", + CandidateCollections = ["BPHY10BdJpsiKstCandidates","BPHY10JpsiKshortCascadeSV1","BPHY10JpsiKshortCascadeSV2","BPHY10JpsiLambdaCascadeSV1","BPHY10JpsiLambdaCascadeSV2","BPHY10JpsiLambdabarCascadeSV1","BPHY10JpsiLambdabarCascadeSV2"], + KeepPVTracks =True + ) + +ToolSvc += BPHY10_thinningTool_PV + + +## b) thinning out tracks that are not attached to muons. The final thinning decision is based on the OR operation +## between decision from this and the previous tools. +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY10MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning( + name = "BPHY10MuonTPThinningTool", + MuonKey = "Muons", + InDetTrackParticlesKey = "InDetTrackParticles") +ToolSvc += BPHY10MuonTPThinningTool + + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== + +thiningCollection = [] + +print(thiningCollection) + + +# The name of the kernel (BPHY10Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY10Kernel", + AugmentationTools = [BPHY10JpsiSelectAndWrite, BPHY10_Select_Jpsi2mumu, + BPHY10BdKstSelectAndWrite, BPHY10_Select_Bd2JpsiKst, BPHY10_Select_Bd2JpsiKstbar, + BPHY10_Reco_V0Finder, BPHY10JpsiKshort, BPHY10JpsiLambda, BPHY10JpsiLambdabar, + BPHY10_AugOriginalCounts], + #Only skim if not MC + SkimmingTools = [BPHY10SkimmingOR] if not isSimulation else [], + ThinningTools = thiningCollection + ) + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY10Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY10Stream ) +BPHY10Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY10Stream.AcceptAlgs(["BPHY10Kernel"]) + +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +from AthenaServices.Configurables import ThinningSvc, createThinningSvc +augStream = MSMgr.GetStream( streamName ) +evtStream = augStream.GetEventStream() + +BPHY10ThinningSvc = createThinningSvc( svcName="BPHY10ThinningSvc", outStreams=[evtStream] ) +svcMgr += BPHY10ThinningSvc + +#==================================================================== +# Slimming +#==================================================================== + +# Added by ASC +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY10SlimmingHelper = SlimmingHelper("BPHY10SlimmingHelper") +AllVariables = [] +StaticContent = [] + +# Needed for trigger objects +BPHY10SlimmingHelper.IncludeMuonTriggerContent = TRUE +BPHY10SlimmingHelper.IncludeBPhysTriggerContent = TRUE + +## primary vertices +AllVariables += ["PrimaryVertices"] +StaticContent += ["xAOD::VertexContainer#BPHY10RefittedPrimaryVertices"] +StaticContent += ["xAOD::VertexAuxContainer#BPHY10RefittedPrimaryVerticesAux."] + +## ID track particles +AllVariables += ["InDetTrackParticles"] + +## combined / extrapolated muon track particles +## (note: for tagged muons there is no extra TrackParticle collection since the ID tracks +## are store in InDetTrackParticles collection) +AllVariables += ["CombinedMuonTrackParticles"] +AllVariables += ["ExtrapolatedMuonTrackParticles"] + +## muon container +AllVariables += ["Muons"] + + +## Jpsi candidates +StaticContent += ["xAOD::VertexContainer#%s" % BPHY10JpsiSelectAndWrite.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY10JpsiSelectAndWrite.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY10BdKstSelectAndWrite.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY10BdKstSelectAndWrite.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % 'BPHY10RecoV0Candidates'] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % 'BPHY10RecoV0Candidates'] +StaticContent += ["xAOD::VertexContainer#%s" % 'BPHY10RecoKshortCandidates'] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % 'BPHY10RecoKshortCandidates'] +StaticContent += ["xAOD::VertexContainer#%s" % 'BPHY10RecoLambdaCandidates'] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % 'BPHY10RecoLambdaCandidates'] +StaticContent += ["xAOD::VertexContainer#%s" % 'BPHY10RecoLambdabarCandidates'] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % 'BPHY10RecoLambdabarCandidates'] + +for cascades in CascadeCollections: + StaticContent += ["xAOD::VertexContainer#%s" % cascades] + StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % cascades] + +# Tagging information (in addition to that already requested by usual algorithms) +AllVariables += ["GSFTrackParticles", "MuonSpectrometerTrackParticles" ] + + + +# Added by ASC +# Truth information for MC only +if isSimulation: + AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles"] + +AllVariables = list(set(AllVariables)) # remove duplicates + +BPHY10SlimmingHelper.AllVariables = AllVariables +BPHY10SlimmingHelper.StaticContent = StaticContent +BPHY10SlimmingHelper.SmartCollections = [] + +BPHY10SlimmingHelper.AppendContentToStream(BPHY10Stream) + + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY11.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY11.py new file mode 100644 index 0000000000000000000000000000000000000000..a0ea555b5e45204860b990d96b493a50674d0e43 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY11.py @@ -0,0 +1,506 @@ +#==================================================================== +# BPHY11.py +# Lambda_b -> J/psi p K +# It requires the reductionConf flag BPHY11 in Reco_tf.py +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print(isSimulation) + + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +#include( "JpsiUpsilonTools/configureServices.py" ) + +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY11_VertexTools = BPHYVertexTools("BPHY11") + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__AugOriginalCounts +BPHY11_AugOriginalCounts = DerivationFramework__AugOriginalCounts( + name = "BPHY11_AugOriginalCounts", + VertexContainer = "PrimaryVertices", + TrackContainer = "InDetTrackParticles" +) + +ToolSvc += BPHY11_AugOriginalCounts + + +#-------------------------------------------------------------------- +## 2/ setup JpsiFinder tool +## These are general tools independent of DerivationFramework that do the +## actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY11_JpsiFinder = Analysis__JpsiFinder( + name = "BPHY11_JpsiFinder", + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, + invMassUpper = 3600.0, + invMassLower = 2600.0, + Chi2Cut = 30.0, + oppChargesOnly = False, + allChargeCombinations = True, + combOnly = False, + atLeastOneComb = True, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY11_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY11_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY11_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY11_VertexTools.VtxPointEstimator, + useMCPCuts = False +) + +ToolSvc += BPHY11_JpsiFinder +print(BPHY11_JpsiFinder) + +#-------------------------------------------------------------------- +## 3/ setup the vertex reconstruction "call" tool(s). They are part of the derivation framework. +## These Augmentation tools add output vertex collection(s) into the StoreGate and add basic +## decorations which do not depend on the vertex mass hypothesis (e.g. lxy, ptError, etc). +## There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need two instance of the +## Reco tool is the JpsiFinder mass window is wide enough. +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY11_JpsiSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY11_JpsiSelectAndWrite", + VertexSearchTool = BPHY11_JpsiFinder, + OutputVtxContainerName = "BPHY11_JpsiCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "SHOULDNOTBEUSED", + DoVertexType = 1 +) + +ToolSvc += BPHY11_JpsiSelectAndWrite +print(BPHY11_JpsiSelectAndWrite) + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu + +## a/ augment and select Jpsi->mumu candidates +BPHY11_Select_Jpsi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY11_Select_Jpsi2mumu", + HypothesisName = "Jpsi", + InputVtxContainerName = "BPHY11_JpsiCandidates", + VtxMassHypo = 3096.900, + MassMin = 2600.0, + MassMax = 3600.0, + Chi2Max = 30.0, + DoVertexType = 1 +) + +ToolSvc += BPHY11_Select_Jpsi2mumu +print(BPHY11_Select_Jpsi2mumu) + + +## 4/ setup a new vertexing tool (necessary due to use of mass constraint) +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +LbJpsipKVertexFit = Trk__TrkVKalVrtFitter( + name = "LbJpsipKVertexFit", + Extrapolator = BPHY11_VertexTools.InDetExtrapolator, + FirstMeasuredPoint = False, + MakeExtendedVertex = True, + usePassWithTrkErrCnst = True +) + +ToolSvc += LbJpsipKVertexFit +print(LbJpsipKVertexFit) + + +## 5/ setup the Jpsi+2 track finder +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiPlus2Tracks +BPHY11_LbJpsipK = Analysis__JpsiPlus2Tracks( + name = "BPHY11_LbJpsipK", + OutputLevel = INFO, + kaonkaonHypothesis = False, + pionpionHypothesis = False, + kaonpionHypothesis = False, + kaonprotonHypothesis = True, + oppChargesOnly = False, + trkThresholdPt = 1500.0, + trkMaxEta = 3.0, + BMassUpper = 6500.0, + BMassLower = 4000.0, +# DiTrackMassUpper = 10000., + DiTrackMassLower = 1000., + Chi2Cut = 200.0, + TrkQuadrupletMassUpper = 7000.0, + TrkQuadrupletMassLower = 4000.0, + JpsiContainerKey = "BPHY11_JpsiCandidates", + TrackParticleCollection = "InDetTrackParticles", + MuonsUsedInJpsi = "Muons", + TrkVertexFitterTool = LbJpsipKVertexFit, + TrackSelectorTool = BPHY11_VertexTools.InDetTrackSelectorTool, + UseMassConstraint = True, + UseVertexFittingWithPV = True, + VertexContainer = "PrimaryVertices" +) + +ToolSvc += BPHY11_LbJpsipK +print(BPHY11_LbJpsipK) + +## 6/ setup the combined augmentation/skimming tool for the Bpm +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY11_LbJpsipKSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY11_LbJpsipKSelectAndWrite", + Jpsi2PlusTrackName = BPHY11_LbJpsipK, + OutputVtxContainerName = "LbJpsipKCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY11_RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 10000, + DoVertexType = 7 +) + +ToolSvc += BPHY11_LbJpsipKSelectAndWrite +print(BPHY11_LbJpsipKSelectAndWrite) + +## b/ augment and select Lb->JpsipK candidates +BPHY11_Select_Lb2JpsipK = DerivationFramework__Select_onia2mumu( + name = "BPHY11_Select_Lb2JpsipK", + HypothesisName = "Lb_pK", + InputVtxContainerName = "LbJpsipKCandidates", + TrkMasses = [105.658, 105.658, 938.272, 493.677], + VtxMassHypo = 5619.6, + MassMin = 4000.0, + MassMax = 6500.0, + Chi2Max = 200, + LxyMin = 0.3 +) + +ToolSvc += BPHY11_Select_Lb2JpsipK +print(BPHY11_Select_Lb2JpsipK) + + +## b/ augment and select Lb->JpsiKp candidates +BPHY11_Select_Lb2JpsiKp = DerivationFramework__Select_onia2mumu( + name = "BPHY11_Select_Lb2JpsiKp", + HypothesisName = "Lb_Kp", + InputVtxContainerName = "LbJpsipKCandidates", + TrkMasses = [105.658, 105.658, 493.677, 938.272], + VtxMassHypo = 5619.6, + MassMin = 4000.0, + MassMax = 6500.0, + Chi2Max = 200.0, + LxyMin = 0.3 +) + +ToolSvc += BPHY11_Select_Lb2JpsiKp +print(BPHY11_Select_Lb2JpsiKp) + +#------------------------------------------------------- +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__ReVertex +BPHY11_LbPlusTrk = DerivationFramework__ReVertex( + name = "BPHY11_LbPlusTrk", + InputVtxContainerName = "LbJpsipKCandidates", + HypothesisNames = [ BPHY11_Select_Lb2JpsipK.HypothesisName, BPHY11_Select_Lb2JpsiKp.HypothesisName ], + TrackIndices = [ 0, 1, 2, 3 ], + UseAdditionalTrack = True, + UseMassConstraint = True, + UseVertexFittingWithPV = True, +# VertexMass = 5619.6, + SubVertexMass = 3096.900, + MassInputParticles = [ 105.658, 105.658, 139.57, 139.57, 139.57 ], + SubVertexTrackIndices = [ 1, 2 ], + BMassUpper = 10000.0, + BMassLower = 4000.0, + Chi2Cut = 5.0, + TrkVertexFitterTool = LbJpsipKVertexFit, + OutputVtxContainerName = "LbJpsipKTrkCandidates" +) + +ToolSvc += BPHY11_LbPlusTrk +print(BPHY11_LbPlusTrk) + +BPHY11_Select_LbPlusTrk = DerivationFramework__Select_onia2mumu( + name = "BPHY11_Select_LbPlusTrk", + HypothesisName = "LbPlusTrk", + InputVtxContainerName = "LbJpsipKTrkCandidates", + TrkMasses = BPHY11_LbPlusTrk.MassInputParticles, + VtxMassHypo = 5619.6, + MassMin = 4000.0, + MassMax = 10000.0, + Chi2Max = 50.0 +) + +ToolSvc += BPHY11_Select_LbPlusTrk +print(BPHY11_Select_LbPlusTrk) + +#------------------------------------------------------- + +BPHY11_Lb_pK_ReFit = DerivationFramework__ReVertex( + name = "BPHY11_Lb_pK_ReFit", + InputVtxContainerName = "LbJpsipKCandidates", + HypothesisNames = [ BPHY11_Select_Lb2JpsipK.HypothesisName ], + TrackIndices = [ 0, 1, 2, 3 ], + UseMassConstraint = True, + UseVertexFittingWithPV = True, + VertexMass = 5619.6, + SubVertexMass = 3096.900, + MassInputParticles = [ 105.658, 105.658, 938.272, 493.677 ], + SubVertexTrackIndices = [ 1, 2 ], + TrkVertexFitterTool = LbJpsipKVertexFit, + OutputVtxContainerName = "LbJpsipKCandidatesReFit" +) + +ToolSvc += BPHY11_Lb_pK_ReFit +print(BPHY11_Lb_pK_ReFit) + +BPHY11_Select_Lb_pK_ReFit = DerivationFramework__Select_onia2mumu( + name = "BPHY11_Select_Lb_pK_ReFit", + HypothesisName = "Lb_pK_ReFit", + InputVtxContainerName = "LbJpsipKCandidatesReFit", + TrkMasses = BPHY11_Lb_pK_ReFit.MassInputParticles, + VtxMassHypo = 5619.6, + MassMin = 0.0, + MassMax = 1.0e10, + Chi2Max = 1.0e10 +) + +ToolSvc += BPHY11_Select_Lb_pK_ReFit +print(BPHY11_Select_Lb_pK_ReFit) + +BPHY11_Lb_Kp_ReFit = DerivationFramework__ReVertex( + name = "BPHY11_Lb_Kp_ReFit", + InputVtxContainerName = "LbJpsipKCandidates", + HypothesisNames = [ BPHY11_Select_Lb2JpsiKp.HypothesisName ], + TrackIndices = [ 0, 1, 2, 3 ], + UseMassConstraint = True, + UseVertexFittingWithPV = True, + VertexMass = 5619.6, + SubVertexMass = 3096.900, + MassInputParticles = [ 105.658, 105.658, 493.677, 938.272 ], + SubVertexTrackIndices = [ 1, 2 ], + TrkVertexFitterTool = LbJpsipKVertexFit, + OutputVtxContainerName = "LbJpsiKpCandidatesReFit" +) + +ToolSvc += BPHY11_Lb_Kp_ReFit +print(BPHY11_Lb_Kp_ReFit) + +BPHY11_Select_Lb_Kp_ReFit = DerivationFramework__Select_onia2mumu( + name = "BPHY11_Select_Lb_Kp_ReFit", + HypothesisName = "Lb_Kp_ReFit", + InputVtxContainerName = "LbJpsiKpCandidatesReFit", + TrkMasses = BPHY11_Lb_Kp_ReFit.MassInputParticles, + VtxMassHypo = 5619.6, + MassMin = 0.0, + MassMax = 1.0e10, + Chi2Max = 1.0e10 +) + +ToolSvc += BPHY11_Select_Lb_Kp_ReFit +print(BPHY11_Select_Lb_Kp_ReFit) + + + +#from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__SelectEvent + +if not isSimulation: #Only Skim Data + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool + BPHY11_SelectLdJpsipKEvent = DerivationFramework__xAODStringSkimmingTool( + name = "BPHY11_SelectLdJpsipKEvent", + expression = "count(LbJpsipKCandidates.passed_Lb_pK > 0) > 0" + ) + + ToolSvc += BPHY11_SelectLdJpsipKEvent + print(BPHY11_SelectLdJpsipKEvent) + + BPHY11_SelectLdJpsiKpEvent = DerivationFramework__xAODStringSkimmingTool( + name = "BPHY11_SelectLdJpsiKpEvent", + expression = "count(LbJpsipKCandidates.passed_Lb_Kp > 0) > 0" + ) + + ToolSvc += BPHY11_SelectLdJpsiKpEvent + print(BPHY11_SelectLdJpsiKpEvent) + + #==================================================================== + # Make event selection based on an OR of the input skimming tools + #==================================================================== + + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__FilterCombinationOR + BPHY11_SkimmingOR = CfgMgr.DerivationFramework__FilterCombinationOR( + "BPHY11_SkimmingOR", + FilterList = [BPHY11_SelectLdJpsipKEvent,BPHY11_SelectLdJpsiKpEvent],) + + ToolSvc += BPHY11_SkimmingOR + print(BPHY11_SkimmingOR) + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY11_thinningTool_Tracks = DerivationFramework__Thin_vtxTrk( + name = "BPHY11_thinningTool_Tracks", + TrackParticleContainerName = "InDetTrackParticles", + VertexContainerNames = ["LbJpsipKCandidates"], + PassFlags = ["passed_Lb_pK","passed_Lb_Kp"] +) + +ToolSvc += BPHY11_thinningTool_Tracks + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__BPhysPVThinningTool +BPHY11_thinningTool_PV = DerivationFramework__BPhysPVThinningTool( + name = "BPHY11_thinningTool_PV", + CandidateCollections = ["LbJpsipKCandidates"], + KeepPVTracks = True +) + +ToolSvc += BPHY11_thinningTool_PV + + +## b) thinning out tracks that are not attached to muons. The final thinning decision is based on the OR operation +## between decision from this and the previous tools. +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY11_MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning( + name = "BPHY11_MuonTPThinningTool", + MuonKey = "Muons", + InDetTrackParticlesKey = "InDetTrackParticles" +) + +ToolSvc += BPHY11_MuonTPThinningTool + +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__EgammaTrackParticleThinning +BPHY11_ElectronTPThinningTool = DerivationFramework__EgammaTrackParticleThinning( + name = "BPHY11_ElectronTPThinningTool", + SGKey = "Electrons", + GSFTrackParticlesKey = "GSFTrackParticles", + InDetTrackParticlesKey = "InDetTrackParticles", + SelectionString = "", + BestMatchOnly = True, + ConeSize = 0.3, + ApplyAnd = False +) + +ToolSvc+=BPHY11_ElectronTPThinningTool + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== + +BPHY11_ThiningCollection = [BPHY11_thinningTool_Tracks, + BPHY11_thinningTool_PV, + BPHY11_MuonTPThinningTool, + BPHY11_ElectronTPThinningTool] +print(BPHY11_ThiningCollection) + + +# The name of the kernel (BPHY11_Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY11_Kernel", + AugmentationTools = [BPHY11_JpsiSelectAndWrite, BPHY11_Select_Jpsi2mumu, + BPHY11_LbJpsipKSelectAndWrite, BPHY11_Select_Lb2JpsipK, BPHY11_Select_Lb2JpsiKp, + BPHY11_LbPlusTrk, BPHY11_Select_LbPlusTrk, + BPHY11_Lb_pK_ReFit, BPHY11_Select_Lb_pK_ReFit, + BPHY11_Lb_Kp_ReFit, BPHY11_Select_Lb_Kp_ReFit, + BPHY11_AugOriginalCounts], + #Only skim if not MC + SkimmingTools = [BPHY11_SkimmingOR] if not isSimulation else [], + ThinningTools = BPHY11_ThiningCollection +) + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY11Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY11Stream ) +BPHY11Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY11Stream.AcceptAlgs(["BPHY11_Kernel"]) + +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +from AthenaServices.Configurables import ThinningSvc, createThinningSvc +augStream = MSMgr.GetStream( streamName ) +evtStream = augStream.GetEventStream() + +BPHY11_ThinningSvc = createThinningSvc( svcName="BPHY11_ThinningSvc", outStreams=[evtStream] ) +svcMgr += BPHY11_ThinningSvc + +#==================================================================== +# Slimming +#==================================================================== + +# Added by ASC +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY11_SlimmingHelper = SlimmingHelper("BPHY11_SlimmingHelper") +AllVariables = [] +StaticContent = [] + +# Needed for trigger objects +BPHY11_SlimmingHelper.IncludeMuonTriggerContent = TRUE +BPHY11_SlimmingHelper.IncludeBPhysTriggerContent = TRUE + +## primary vertices +AllVariables += ["PrimaryVertices"] +StaticContent += ["xAOD::VertexContainer#BPHY11_RefittedPrimaryVertices"] +StaticContent += ["xAOD::VertexAuxContainer#BPHY11_RefittedPrimaryVerticesAux."] + +## ID track particles +AllVariables += ["InDetTrackParticles"] + +## combined / extrapolated muon track particles +## (note: for tagged muons there is no extra TrackParticle collection since the ID tracks +## are store in InDetTrackParticles collection) +AllVariables += ["CombinedMuonTrackParticles"] +AllVariables += ["ExtrapolatedMuonTrackParticles"] + +## muon container +AllVariables += ["Muons"] + +## Jpsi candidates +StaticContent += ["xAOD::VertexContainer#%s" % BPHY11_JpsiSelectAndWrite.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY11_JpsiSelectAndWrite.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY11_LbJpsipKSelectAndWrite.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY11_LbJpsipKSelectAndWrite.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY11_LbPlusTrk.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY11_LbPlusTrk.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY11_Lb_pK_ReFit.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY11_Lb_pK_ReFit.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY11_Lb_Kp_ReFit.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY11_Lb_Kp_ReFit.OutputVtxContainerName] + + + +# Tagging information (in addition to that already requested by usual algorithms) +#AllVariables += ["Electrons"] +AllVariables += ["GSFTrackParticles"] +tagJetCollections = ['AntiKt4LCTopoJets'] + +for jet_collection in tagJetCollections: + AllVariables += [jet_collection] + AllVariables += ["BTagging_%s" % (jet_collection[:-4]) ] + AllVariables += ["BTagging_%sJFVtx" % (jet_collection[:-4]) ] + AllVariables += ["BTagging_%sSecVtx" % (jet_collection[:-4]) ] + + + + +# Added by ASC +# Truth information for MC only +if isSimulation: + AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles"] + AllVariables += ["AntiKt4TruthJets","egammaTruthParticles"] + +BPHY11_SlimmingHelper.AllVariables = AllVariables +BPHY11_SlimmingHelper.StaticContent = StaticContent +BPHY11_SlimmingHelper.SmartCollections = ["Electrons" , "Photons"] + +BPHY11_SlimmingHelper.AppendContentToStream(BPHY11Stream) + + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY12.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY12.py new file mode 100644 index 0000000000000000000000000000000000000000..90f52acbc5658799b6e7ef213c84ab9ae324c293 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY12.py @@ -0,0 +1,389 @@ +#==================================================================== +# BPHY12.py +# This an example job options script showing how to set up a +# derivation of the data using the derivation framework. +# It requires the reductionConf flag BPHY12 in Reco_tf.py +#==================================================================== + +#==================================================================== +# FLAGS TO PERSONALIZE THE DERIVATION +#==================================================================== + +skimTruth = False + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print('is this simulation? ', isSimulation) + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY12_VertexTools = BPHYVertexTools("BPHY12") + +print('********************** VERTEX TOOLS ***********************') +print(BPHY12_VertexTools) +print(BPHY12_VertexTools.TrkV0Fitter) +print('********************** END VERTEX TOOLS ***********************') + +#==================================================================== +# TriggerCounting for Kernel1 #Added by Matteo +#==================================================================== +#List of trigggers to be counted (high Sig-eff*Lumi ones are in) +triggersToMetadata= [ +"HLT_mu11_mu6_bBmumuxv2", +"HLT_2mu10_bBmumuxv2", +"HLT_2mu6_bBmumuxv2_L1LFV-MU6", +"HLT_mu11_mu6_bBmumux_BpmumuKp", +"HLT_2mu6_bBmumux_BpmumuKp_L1BPH-2M9-2MU6_BPH-2DR15-2MU6", + +"HLT_mu11_mu6_bDimu", +"HLT_4mu4_bDimu6000" + ] + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__TriggerCountToMetadata +BPHY12TriggerCountToMetadata = DerivationFramework__TriggerCountToMetadata(name = "BPHY12TriggerCount", + TriggerList = triggersToMetadata, + FolderName = "BPHY12") + +ToolSvc += BPHY12TriggerCountToMetadata + +#==================================================================== +# PRESELECTION for Kernel1 #Added by Matteo +#==================================================================== +## 1/ Setup the skimming based on triggers +## + +triggerList = [ +"HLT_mu11_mu6_bBmumuxv2", +"HLT_2mu10_bBmumuxv2", +"HLT_2mu6_bBmumuxv2_L1LFV-MU6", +"HLT_mu11_mu6_bBmumux_BpmumuKp", +"HLT_2mu6_bBmumux_BpmumuKp_L1BPH-2M9-2MU6_BPH-2DR15-2MU6", +"HLT_mu11_mu6_bDimu", +"HLT_4mu4_bDimu6000" + ] + +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__TriggerSkimmingTool +BPHY12TriggerSkim = DerivationFramework__TriggerSkimmingTool(name = "BPHY12TriggerSkim", + TriggerListOR = triggerList, + TriggerListAND = [] ) + +ToolSvc += BPHY12TriggerSkim + +#-------------------------------------------------------------------- +## 2/ Setup the vertex fitter tools (e.g. JpsiFinder, JpsiPlus1Track, etc). +## These are general tools independent of DerivationFramework that do the +## actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY12DiMuonFinder = Analysis__JpsiFinder( + name = "BPHY12DiMuonFinder", + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, # If true, will assume dimu hypothesis and use PDG value for mu mass + invMassUpper = 100000.0, + invMassLower = 0.0, + Chi2Cut = 200., + oppChargesOnly = True, + atLeastOneComb = True, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY12_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY12_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY12_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY12_VertexTools.VtxPointEstimator, + useMCPCuts = False ) + +ToolSvc += BPHY12DiMuonFinder +print(BPHY12DiMuonFinder) + +#-------------------------------------------------------------------- +## 3/ setup the vertex reconstruction "call" tool(s). They are part of the derivation framework. +## These Augmentation tools add output vertex collection(s) into the StoreGate and add basic +## decorations which do not depend on the vertex mass hypothesis (e.g. lxy, ptError, etc). +## There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need two instance of the +## Reco tool is the JpsiFinder mass window is wide enough. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY12_Reco_DiMuon = DerivationFramework__Reco_Vertex( + name = "BPHY12_Reco_DiMuon", + VertexSearchTool = BPHY12DiMuonFinder, + OutputVtxContainerName = "BPHY12DiMuonCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY12RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 100000, + DoVertexType = 7) + +ToolSvc += BPHY12_Reco_DiMuon +print(BPHY12_Reco_DiMuon) + +#-------------------------------------------------------------------- +## 4/ setup the vertex selection and augmentation tool(s). These tools decorate the vertices with +## variables that depend on the vertex mass hypothesis, e.g. invariant mass, proper decay time, etc. +## Property HypothesisName is used as a prefix for these decorations. +## They also perform tighter selection, flagging the vertecis that passed. The flag is a Char_t branch +## named "passed_"+HypothesisName. It is used later by the "SelectEvent" and "Thin_vtxTrk" tools +## to determine which events and candidates should be kept in the output stream. +## Multiple instances of the Select_* tools can be used on a single input collection as long as they +## use different "HypothesisName" flags. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu + +## a/ augment and select Jpsi->mumu candidates +BPHY12_Select_DiMuons = DerivationFramework__Select_onia2mumu( + name = "BPHY12_Select_DiMuons", + HypothesisName = "Jpsi", + InputVtxContainerName = "BPHY12DiMuonCandidates", + VtxMassHypo = 3096.916, + MassMin = 1.0, + MassMax = 7000.0, + Chi2Max = 200., + DoVertexType = 7) + +ToolSvc += BPHY12_Select_DiMuons +print(BPHY12_Select_DiMuons) + +## 4/ setup a new vertexing tool (necessary due to use of mass constraint) +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +BmumuKstVertexFit = Trk__TrkVKalVrtFitter( + name = "BmumuKstVertexFit", + Extrapolator = BPHY12_VertexTools.InDetExtrapolator, + FirstMeasuredPoint = True, + MakeExtendedVertex = True) + +ToolSvc += BmumuKstVertexFit +print(BmumuKstVertexFit) + +## 5/ setup the Jpsi+2 track finder +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiPlus2Tracks +BPHY12BmumuKstFinder = Analysis__JpsiPlus2Tracks( + name = "BPHY12BmumuKstFinder", + OutputLevel = INFO, #can also be DEBUG, WARNING, VERBOSE + kaonkaonHypothesis = False, + pionpionHypothesis = False, + kaonpionHypothesis = True, + trkThresholdPt = 500.0, #minimum track pT in MeV + trkMaxEta = 3.0, + BThresholdPt = 1000., + BMassLower = 3000.0, #OI makes no sense below Jpsi mass #same values as BPHY18 (original) - Bs->JpsiKK + BMassUpper = 6500.0, + JpsiContainerKey = "BPHY12DiMuonCandidates", + TrackParticleCollection = "InDetTrackParticles", + #MuonsUsedInJpsi = "Muons", #Don't remove all muons, just those in J/psi candidate (see the following cut) + ExcludeCrossJpsiTracks = False, #setting this to False rejects the muons from J/psi candidate + TrkVertexFitterTool = BmumuKstVertexFit, + TrackSelectorTool = BPHY12_VertexTools.InDetTrackSelectorTool, + UseMassConstraint = False, #Set to True, according to Bs->JpsiKK DAOD + DiTrackMassUpper = 1110., #OI was 1500. Can eventually set these to be the K* mass? + DiTrackMassLower = 690., #OI was 500 + Chi2Cut = 15., #THIS IS CHI2/NDOF, checked the code!!! + DiTrackPt = 500., + TrkQuadrupletMassLower = 1000.0, #Two electrons + two tracks (one K, one pi) + TrkQuadrupletMassUpper = 100000.0, # same as BPHY18, original + #FinalDiTrackMassUpper = 1000., + #FinalDiTrackMassLower = 800., + #TrkDeltaZ = 20., #Normally, this cut should not be used since it is lifetime-dependent + FinalDiTrackPt = 500., +#OI DoElectrons = True, + #UseGSFTrackIndices = [0,1] + ) + +ToolSvc += BPHY12BmumuKstFinder +print(BPHY12BmumuKstFinder) +## 6/ setup the combined augmentation/skimming tool for the BeeKst +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY12_Reco_BmumuKst = DerivationFramework__Reco_Vertex( + name = "BPHY12_Reco_BmumuKst", + Jpsi2PlusTrackName = BPHY12BmumuKstFinder, + OutputVtxContainerName = "BPHY12BmumuKstCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY12RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 10000, + DoVertexType = 7) + +ToolSvc += BPHY12_Reco_BmumuKst +print(BPHY12_Reco_BmumuKst) + +## b/ augment and select B->eeKst candidates +# set mass hypothesis (K pi) +BPHY12_Select_BmumuKst = DerivationFramework__Select_onia2mumu( + name = "BPHY12_Select_BmumuKst", + HypothesisName = "Bd", #creates output variable pass_Bd + InputVtxContainerName = "BPHY12BmumuKstCandidates", + TrkMasses = [105.658, 105.658, 493.677, 139.570], + VtxMassHypo = 5279.6, #mass of B + MassMin = 1.0, #no mass cuts here + MassMax = 10000.0, #no mass cuts here + Chi2Max = 30.0) #THIS IS CHI2! NOT CHI2/NDOF! Careful! + +ToolSvc += BPHY12_Select_BmumuKst +print(BPHY12_Select_BmumuKst) + +#-------------------------------------------------------------------- +## 5/ select the event. We only want to keep events that contain certain vertices which passed certain selection. +## This is specified by the "SelectionExpression" property, which contains the expression in the following format: +## +## "ContainerName.passed_HypoName > count" +## +## where "ContainerName" is output container form some Reco_* tool, "HypoName" is the hypothesis name setup in some "Select_*" +## tool and "count" is the number of candidates passing the selection you want to keep. + +if skimTruth or not isSimulation: #Only Skim Data + expression = "count(BPHY12BmumuKstCandidates.passed_Bd) > 0" + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool + BPHY12_SelectEvent = DerivationFramework__xAODStringSkimmingTool(name = "BPHY12_SelectEvent", + expression = expression) + ToolSvc += BPHY12_SelectEvent + print(BPHY12_SelectEvent) + + #==================================================================== + # Make event selection based on an OR of the input skimming tools (though it seems we only have one here!) + #==================================================================== + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__FilterCombinationOR + BPHY12SkimmingOR = CfgMgr.DerivationFramework__FilterCombinationOR( + name = "BPHY12SkimmingOR", + FilterList = [BPHY12_SelectEvent, BPHY12TriggerSkim]) #OR of all your different filters + ToolSvc += BPHY12SkimmingOR + print(BPHY12SkimmingOR) + +#-------------------------------------------------------------------- +## 6/ track and vertex thinning. We want to remove all reconstructed secondary vertices +## which hasn't passed any of the selections defined by (Select_*) tools. +## We also want to keep only tracks which are associates with either muons or any of the +## vertices that passed the selection. Multiple thinning tools can perform the +## selection. The final thinning decision is based OR of all the decisions (by default, +## although it can be changed by the JO). + +## a) thining out vertices that didn't pass any selection and idetifying tracks associated with +## selected vertices. The "VertexContainerNames" is a list of the vertex containers, and "PassFlags" +## contains all pass flags for Select_* tools that must be satisfied. The vertex is kept is it +## satisfy any of the listed selections. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY12Thin_vtxTrk = DerivationFramework__Thin_vtxTrk( + name = "BPHY12Thin_vtxTrk", + TrackParticleContainerName = "InDetTrackParticles", + VertexContainerNames = ["BPHY12BmumuKstCandidates"], + PassFlags = ["passed_Bd"] ) + +ToolSvc += BPHY12Thin_vtxTrk + +## b) thinning out tracks that are not attached to muons. The final thinning decision is based on the OR operation +## between decision from this and the previous tools. +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY12MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning(name = "BPHY12MuonTPThinningTool", + MuonKey = "Muons", + InDetTrackParticlesKey = "InDetTrackParticles") +ToolSvc += BPHY12MuonTPThinningTool + +# Added by ASC +# Only save truth informtion directly associated with Onia +from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__GenericTruthThinning +BPHY12TruthThinTool = DerivationFramework__GenericTruthThinning(name = "BPHY12TruthThinTool", + ParticleSelectionString = "TruthParticles.pdgId == 511 || TruthParticles.pdgId == -511 || TruthParticles.pdgId == 531 || TruthParticles.pdgId == -531", + PreserveDescendants = True, + PreserveAncestors = True) +ToolSvc += BPHY12TruthThinTool +print(BPHY12TruthThinTool) + + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== +## 7/ IMPORTANT bit. Don't forget to pass the tools to the DerivationKernel! If you don't do that, they will not be +## be executed! + +# Added by ASC +BPHY12ThinningTools = [BPHY12Thin_vtxTrk, BPHY12MuonTPThinningTool] +if globalflags.DataSource()=='geant4': + BPHY12ThinningTools.append(BPHY12TruthThinTool) + +# The name of the kernel (BPHY12Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY12Kernel", + AugmentationTools = [BPHY12_Reco_DiMuon, BPHY12_Select_DiMuons, + BPHY12_Reco_BmumuKst, BPHY12_Select_BmumuKst], + SkimmingTools = [BPHY12SkimmingOR] if skimTruth or not isSimulation else [], + ThinningTools = BPHY12ThinningTools + ) + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY12Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY12Stream ) +BPHY12Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY12Stream.AcceptAlgs(["BPHY12Kernel"]) +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +from AthenaServices.Configurables import ThinningSvc, createThinningSvc +augStream = MSMgr.GetStream( streamName ) +evtStream = augStream.GetEventStream() +svcMgr += createThinningSvc( svcName="BPHY12ThinningSvc", outStreams=[evtStream] ) + + +#==================================================================== +# Slimming +#==================================================================== + +# Added by ASC +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY12SlimmingHelper = SlimmingHelper("BPHY12SlimmingHelper") +AllVariables = [] +StaticContent = [] + +# Needed for trigger objects +BPHY12SlimmingHelper.IncludeMuonTriggerContent = True +BPHY12SlimmingHelper.IncludeBPhysTriggerContent = True + +## primary vertices +AllVariables += ["PrimaryVertices"] +StaticContent += ["xAOD::VertexContainer#BPHY12RefittedPrimaryVertices"] +StaticContent += ["xAOD::VertexAuxContainer#BPHY12RefittedPrimaryVerticesAux."] + +## ID track particles +AllVariables += ["InDetTrackParticles"] + +## combined / extrapolated muon track particles +## (note: for tagged muons there is no extra TrackParticle collection since the ID tracks +## are store in InDetTrackParticles collection) +AllVariables += ["CombinedMuonTrackParticles"] +AllVariables += ["ExtrapolatedMuonTrackParticles"] + +## muon container +AllVariables += ["Muons"] + +## Jpsi candidates +StaticContent += ["xAOD::VertexContainer#%s" % BPHY12_Reco_DiMuon.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY12_Reco_DiMuon.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY12_Reco_DiMuon.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY12_Reco_BmumuKst.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY12_Reco_BmumuKst.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY12_Reco_BmumuKst.OutputVtxContainerName] + +# Added by ASC +# Truth information for MC only +if isSimulation: + AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles"] + +BPHY12SlimmingHelper.AllVariables = AllVariables +BPHY12SlimmingHelper.StaticContent = StaticContent +BPHY12SlimmingHelper.AppendContentToStream(BPHY12Stream) diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY13.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY13.py new file mode 100644 index 0000000000000000000000000000000000000000..b0549a616181683eadfe5fe73637fca5d0f49b53 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY13.py @@ -0,0 +1,483 @@ +#==================================================================== +# BPHY13.py (Based on BPHY8, BPHY16, and the old BPHY13) +# Contact: xin.chen@cern.ch +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print(isSimulation) + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY13_VertexTools = BPHYVertexTools("BPHY13") + +#-------------------------------------------------------------------- +## 2/ Setup the vertex fitter tools (e.g. JpsiFinder, JpsiPlus1Track, etc). +## These are general tools independent of DerivationFramework that do the +## actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY13JpsiFinder = Analysis__JpsiFinder( + name = "BPHY13JpsiFinder", + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, # If true, will assume dimu hypothesis and use PDG value for mu mass + trackThresholdPt = 2500., + invMassUpper = 12500., + invMassLower = 2000., + Chi2Cut = 200., + oppChargesOnly = True, + atLeastOneComb = True, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY13_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY13_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY13_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY13_VertexTools.VtxPointEstimator, + useMCPCuts = False ) + +ToolSvc += BPHY13JpsiFinder +print(BPHY13JpsiFinder) + +#-------------------------------------------------------------------- +## 3/ setup the vertex reconstruction "call" tool(s). They are part of the derivation framework. +## These Augmentation tools add output vertex collection(s) into the StoreGate and add basic +## decorations which do not depend on the vertex mass hypothesis (e.g. lxy, ptError, etc). +## There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need two instance of the +## Reco tool if the JpsiFinder mass window is wide enough. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY13_Reco_mumu = DerivationFramework__Reco_Vertex( + name = "BPHY13_Reco_mumu", + VertexSearchTool = BPHY13JpsiFinder, + OutputVtxContainerName = "BPHY13OniaCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "SHOULDNOTBEUSED", +# RefPVContainerName = "BPHY13RefittedPrimaryVertices", +# RefitPV = True, +# MaxPVrefit = 10000, +#https://gitlab.cern.ch/atlas/athena/-/blob/21.2/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysPVTools.cxx#L259 +# bit pattern: doZ0BA|doZ0|doA0|doPt + DoVertexType = 1) + +ToolSvc += BPHY13_Reco_mumu +print(BPHY13_Reco_mumu) + +## 4/ setup a new vertexing tool (necessary due to use of mass constraint) +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +BPHY13VertexFit = Trk__TrkVKalVrtFitter( + name = "BPHY13VertexFit", + Extrapolator = BPHY13_VertexTools.InDetExtrapolator, +# FirstMeasuredPoint = True, + FirstMeasuredPoint = False, + MakeExtendedVertex = True) +ToolSvc += BPHY13VertexFit +print(BPHY13VertexFit) + +## 5/ setup the Jpsi+2 track finder +# https://gitlab.cern.ch/atlas/athena/-/blob/21.2/PhysicsAnalysis/JpsiUpsilonTools/src/JpsiPlus2Tracks.cxx +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiPlus2Tracks +BPHY13Plus2Tracks = Analysis__JpsiPlus2Tracks( + name = "BPHY13Plus2Tracks", + # OutputLevel = DEBUG, + kaonkaonHypothesis = False, + pionpionHypothesis = False, + kaonpionHypothesis = False, + ManualMassHypo = [ 105.658, 105.658, 105.658, 105.658 ], + trkThresholdPt = 1500., + trkMaxEta = 2.5, + oppChargesOnly = False, + DiTrackMassUpper = 12500., + DiTrackMassLower = 2000., + TrkQuadrupletMassUpper = 25000., + TrkQuadrupletMassLower = 0., + Chi2Cut = 200., + JpsiContainerKey = "BPHY13OniaCandidates", + TrackParticleCollection = "InDetTrackParticles", + MuonsUsedInJpsi = "Muons", + ExcludeJpsiMuonsOnly = True, + RequireNMuonTracks = 1, + TrkVertexFitterTool = BPHY13VertexFit, + TrackSelectorTool = BPHY13_VertexTools.InDetTrackSelectorTool, + UseMassConstraint = False) + +ToolSvc += BPHY13Plus2Tracks +print(BPHY13Plus2Tracks) + +## 6/ setup the combined augmentation/skimming tool +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY13FourTrackSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY13FourTrackSelectAndWrite", + Jpsi2PlusTrackName = BPHY13Plus2Tracks, + OutputVtxContainerName = "BPHY13FourTrack", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY13RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 10000, + DoVertexType = 7) + +ToolSvc += BPHY13FourTrackSelectAndWrite +print(BPHY13FourTrackSelectAndWrite) + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu + +BPHY13_Select_FourTrack = DerivationFramework__Select_onia2mumu( + name = "BPHY13_Select_FourTrack", + HypothesisName = "FourTracks", + InputVtxContainerName = "BPHY13FourTrack", + TrkMasses = [105.658, 105.658, 105.658, 105.658], + VtxMassHypo = 6900.0, # for decay time + MassMin = 0., + MassMax = 25000., + Chi2Max = 200.) + +ToolSvc += BPHY13_Select_FourTrack +print(BPHY13_Select_FourTrack) + + +#==================================================================== +# Isolation +#==================================================================== + +#Track isolation for candidates +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__VertexTrackIsolation +BPHY13TrackIsolationDecorator = DerivationFramework__VertexTrackIsolation( + name = "BPHY13TrackIsolationDecorator", + OutputLevel = INFO, + TrackIsoTool = "xAOD::TrackIsolationTool", + TrackContainer = "InDetTrackParticles", + InputVertexContainer = "BPHY13FourTrack", + PassFlags = ["passed_FourTracks"], + DoIsoPerTrk = True, + RemoveDuplicate = 2 +) + +ToolSvc += BPHY13TrackIsolationDecorator +print(BPHY13TrackIsolationDecorator) + + +#==================================================================== +# Revertex with mass constraint +#==================================================================== + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__ReVertex +BPHY13_Revertex_2mu = DerivationFramework__ReVertex( + name = "BPHY13_Revertex_2mu", + InputVtxContainerName = "BPHY13FourTrack", + TrackIndices = [ 0, 1 ], + RefitPV = True, + RefPVContainerName = "BPHY13RefittedPrimaryVertices", # use existing refitted PVs + UseMassConstraint = True, + VertexMass = 3096.916, + MassInputParticles = [105.658, 105.658], + TrkVertexFitterTool = BPHY13VertexFit, + OutputVtxContainerName = "BPHY13TwoMuon") + +ToolSvc += BPHY13_Revertex_2mu +print(BPHY13_Revertex_2mu) + +BPHY13_Select_TwoMuon = DerivationFramework__Select_onia2mumu( + name = "BPHY13_Select_TwoMuon", + HypothesisName = "TwoMuons", + InputVtxContainerName = "BPHY13TwoMuon", + TrkMasses = [105.658, 105.658], + VtxMassHypo = 3096.916, + MassMin = 2000., + MassMax = 3600., + Chi2Max = 200) + +ToolSvc += BPHY13_Select_TwoMuon +print(BPHY13_Select_TwoMuon) + +BPHY13_Revertex_2trk = DerivationFramework__ReVertex( + name = "BPHY13_Revertex_2trk", + InputVtxContainerName = "BPHY13FourTrack", + TrackIndices = [ 2, 3 ], + RefitPV = True, + RefPVContainerName = "BPHY13RefittedPrimaryVertices", # use existing refitted PVs + UseMassConstraint = True, + VertexMass = 3096.916, + MassInputParticles = [105.658, 105.658], + TrkVertexFitterTool = BPHY13VertexFit, + OutputVtxContainerName = "BPHY13TwoTrack") + +ToolSvc += BPHY13_Revertex_2trk +print(BPHY13_Revertex_2trk) + +BPHY13_Select_TwoTrack = DerivationFramework__Select_onia2mumu( + name = "BPHY13_Select_TwoTrack", + HypothesisName = "TwoTracks", + InputVtxContainerName = "BPHY13TwoTrack", + TrkMasses = [105.658, 105.658], + VtxMassHypo = 3096.916, + MassMin = 2000., + MassMax = 3600., + Chi2Max = 200) + +ToolSvc += BPHY13_Select_TwoTrack +print(BPHY13_Select_TwoTrack) + + +BPHY13_Revertex_2muHi = DerivationFramework__ReVertex( + name = "BPHY13_Revertex_2muHi", + InputVtxContainerName = "BPHY13FourTrack", + TrackIndices = [ 0, 1 ], + RefitPV = True, + RefPVContainerName = "BPHY13RefittedPrimaryVertices", # use existing refitted PVs + UseMassConstraint = True, + VertexMass = 9460.30, + MassInputParticles = [105.658, 105.658], + TrkVertexFitterTool = BPHY13VertexFit, + OutputVtxContainerName = "BPHY13TwoMuonHi") + +ToolSvc += BPHY13_Revertex_2muHi +print(BPHY13_Revertex_2muHi) + +BPHY13_Select_TwoMuonHi = DerivationFramework__Select_onia2mumu( + name = "BPHY13_Select_TwoMuonHi", + HypothesisName = "TwoMuonsHi", + InputVtxContainerName = "BPHY13TwoMuonHi", + TrkMasses = [105.658, 105.658], + VtxMassHypo = 9460.30, + MassMin = 8500., + MassMax = 11000., + Chi2Max = 200) + +ToolSvc += BPHY13_Select_TwoMuonHi +print(BPHY13_Select_TwoMuonHi) + +BPHY13_Revertex_2trkHi = DerivationFramework__ReVertex( + name = "BPHY13_Revertex_2trkHi", + InputVtxContainerName = "BPHY13FourTrack", + TrackIndices = [ 2, 3 ], + RefitPV = True, + RefPVContainerName = "BPHY13RefittedPrimaryVertices", # use existing refitted PVs + UseMassConstraint = True, + VertexMass = 9460.30, + MassInputParticles = [105.658, 105.658], + TrkVertexFitterTool = BPHY13VertexFit, + OutputVtxContainerName = "BPHY13TwoTrackHi") + +ToolSvc += BPHY13_Revertex_2trkHi +print(BPHY13_Revertex_2trkHi) + +BPHY13_Select_TwoTrackHi = DerivationFramework__Select_onia2mumu( + name = "BPHY13_Select_TwoTrackHi", + HypothesisName = "TwoTracksHi", + InputVtxContainerName = "BPHY13TwoTrackHi", + TrkMasses = [105.658, 105.658], + VtxMassHypo = 9460.30, + MassMin = 8500., + MassMax = 11000., + Chi2Max = 200) + +ToolSvc += BPHY13_Select_TwoTrackHi +print(BPHY13_Select_TwoTrackHi) + + +BPHY13_Revertex_2muMed = DerivationFramework__ReVertex( + name = "BPHY13_Revertex_2muMed", + InputVtxContainerName = "BPHY13FourTrack", + TrackIndices = [ 0, 1 ], + RefitPV = True, + RefPVContainerName = "BPHY13RefittedPrimaryVertices", # use existing refitted PVs + UseMassConstraint = True, + VertexMass = 3686.10, + MassInputParticles = [105.658, 105.658], + TrkVertexFitterTool = BPHY13VertexFit, + OutputVtxContainerName = "BPHY13TwoMuonMed") + +ToolSvc += BPHY13_Revertex_2muMed +print(BPHY13_Revertex_2muMed) + +BPHY13_Select_TwoMuonMed = DerivationFramework__Select_onia2mumu( + name = "BPHY13_Select_TwoMuonMed", + HypothesisName = "TwoMuonsMed", + InputVtxContainerName = "BPHY13TwoMuonMed", + TrkMasses = [105.658, 105.658], + VtxMassHypo = 3686.10, + MassMin = 3300.0, + MassMax = 4500.0, + Chi2Max = 200) + +ToolSvc += BPHY13_Select_TwoMuonMed +print(BPHY13_Select_TwoMuonMed) + +BPHY13_Revertex_2trkMed = DerivationFramework__ReVertex( + name = "BPHY13_Revertex_2trkMed", + InputVtxContainerName = "BPHY13FourTrack", + TrackIndices = [ 2, 3 ], + RefitPV = True, + RefPVContainerName = "BPHY13RefittedPrimaryVertices", # use existing refitted PVs + UseMassConstraint = True, + VertexMass = 3686.10, + MassInputParticles = [105.658, 105.658], + TrkVertexFitterTool = BPHY13VertexFit, + OutputVtxContainerName = "BPHY13TwoTrackMed") + +ToolSvc += BPHY13_Revertex_2trkMed +print(BPHY13_Revertex_2trkMed) + +BPHY13_Select_TwoTrackMed = DerivationFramework__Select_onia2mumu( + name = "BPHY13_Select_TwoTrackMed", + HypothesisName = "TwoTracksMed", + InputVtxContainerName = "BPHY13TwoTrackMed", + TrkMasses = [105.658, 105.658], + VtxMassHypo = 3686.10, + MassMin = 3300., + MassMax = 4500., + Chi2Max = 200) + +ToolSvc += BPHY13_Select_TwoTrackMed +print(BPHY13_Select_TwoTrackMed) + +#-------------------------------------------------------------------- +## 7/ select the event. We only want to keep events that contain certain vertices which passed certain selection. +## This is specified by the "SelectionExpression" property, which contains the expression in the following format: +## +## "ContainerName.passed_HypoName > count" +## +## where "ContainerName" is output container from some Reco_* tool, "HypoName" is the hypothesis name setup in some "Select_*" +## tool and "count" is the number of candidates passing the selection you want to keep. + +#expression = "count(BPHY13FourTrack.passed_FourTracks) > 0" +expression = "count(BPHY13FourTrack.passed_FourTracks) > 0 && ( count(BPHY13TwoMuon.passed_TwoMuons) + count(BPHY13TwoTrack.passed_TwoTracks) > 1 || count(BPHY13TwoMuonMed.passed_TwoMuonsMed) + count(BPHY13TwoTrackMed.passed_TwoTracksMed) > 1 || count(BPHY13TwoMuon.passed_TwoMuons) + count(BPHY13TwoTrackMed.passed_TwoTracksMed) > 1 || count(BPHY13TwoMuonMed.passed_TwoMuonsMed) + count(BPHY13TwoTrack.passed_TwoTracks) > 1 || count(BPHY13TwoMuonHi.passed_TwoMuonsHi) + count(BPHY13TwoTrackHi.passed_TwoTracksHi) > 0 )" + +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool +BPHY13_SelectEvent = DerivationFramework__xAODStringSkimmingTool(name = "BPHY13_SelectEvent", + expression = expression) +ToolSvc += BPHY13_SelectEvent +print(BPHY13_SelectEvent) + +#-------------------------------------------------------------------- +## 8/ track and vertex thinning. We want to remove all reconstructed secondary vertices +## which hasn't passed any of the selections defined by (Select_*) tools. +## We also want to keep only tracks which are associates with either muons or any of the +## vertices that passed the selection. Multiple thinning tools can perform the +## selection. The final thinning decision is based OR of all the decisions (by default, +## although it can be changed by the JO). + +## a) thining out vertices that didn't pass any selection and idetifying tracks associated with +## selected vertices. The "VertexContainerNames" is a list of the vertex containers, and "PassFlags" +## contains all pass flags for Select_* tools that must be satisfied. The vertex is kept is it +## satisfy any of the listed selections. + + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== +## 9/ IMPORTANT bit. Don't forget to pass the tools to the DerivationKernel! If you don't do that, they will not be +## be executed! + + +# The name of the kernel (BPHY13Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY13Kernel", + AugmentationTools = [BPHY13_Reco_mumu, BPHY13FourTrackSelectAndWrite, BPHY13_Select_FourTrack, BPHY13TrackIsolationDecorator, BPHY13_Revertex_2mu, BPHY13_Select_TwoMuon, BPHY13_Revertex_2trk, BPHY13_Select_TwoTrack, BPHY13_Revertex_2muHi, BPHY13_Select_TwoMuonHi, BPHY13_Revertex_2trkHi, BPHY13_Select_TwoTrackHi, BPHY13_Revertex_2muMed, BPHY13_Select_TwoMuonMed, BPHY13_Revertex_2trkMed, BPHY13_Select_TwoTrackMed], + SkimmingTools = [BPHY13_SelectEvent] + ) + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY13Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY13Stream ) +BPHY13Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY13Stream.AcceptAlgs(["BPHY13Kernel"]) +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +from AthenaServices.Configurables import ThinningSvc, createThinningSvc +augStream = MSMgr.GetStream( streamName ) +evtStream = augStream.GetEventStream() +svcMgr += createThinningSvc( svcName="BPHY13ThinningSvc", outStreams=[evtStream] ) + + +#==================================================================== +# Slimming +#==================================================================== + +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY13SlimmingHelper = SlimmingHelper("BPHY13SlimmingHelper") +BPHY13_AllVariables = [] +BPHY13_StaticContent = [] + +# Needed for trigger objects +BPHY13SlimmingHelper.IncludeMuonTriggerContent = True +BPHY13SlimmingHelper.IncludeBPhysTriggerContent = True + +## primary vertices +BPHY13_AllVariables += ["PrimaryVertices"] +BPHY13_StaticContent += ["xAOD::VertexContainer#BPHY13RefittedPrimaryVertices"] +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#BPHY13RefittedPrimaryVerticesAux."] + +## ID track particles +BPHY13_AllVariables += ["InDetTrackParticles"] + +## combined / extrapolated muon track particles +## (note: for tagged muons there is no extra TrackParticle collection since the ID tracks +## are store in InDetTrackParticles collection) +BPHY13_AllVariables += ["CombinedMuonTrackParticles"] +BPHY13_AllVariables += ["ExtrapolatedMuonTrackParticles"] + +## muon container +BPHY13_AllVariables += ["Muons"] + + +BPHY13_StaticContent += ["xAOD::VertexContainer#%s" % BPHY13FourTrackSelectAndWrite.OutputVtxContainerName] +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY13FourTrackSelectAndWrite.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY13FourTrackSelectAndWrite.OutputVtxContainerName] + +BPHY13_StaticContent += ["xAOD::VertexContainer#%s" % BPHY13_Revertex_2mu.OutputVtxContainerName] +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY13_Revertex_2mu.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY13_Revertex_2mu.OutputVtxContainerName] + +BPHY13_StaticContent += ["xAOD::VertexContainer#%s" % BPHY13_Revertex_2trk.OutputVtxContainerName] +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY13_Revertex_2trk.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY13_Revertex_2trk.OutputVtxContainerName] + +BPHY13_StaticContent += ["xAOD::VertexContainer#%s" % BPHY13_Revertex_2muHi.OutputVtxContainerName] +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY13_Revertex_2muHi.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY13_Revertex_2muHi.OutputVtxContainerName] + +BPHY13_StaticContent += ["xAOD::VertexContainer#%s" % BPHY13_Revertex_2trkHi.OutputVtxContainerName] +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY13_Revertex_2trkHi.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY13_Revertex_2trkHi.OutputVtxContainerName] + +BPHY13_StaticContent += ["xAOD::VertexContainer#%s" % BPHY13_Revertex_2muMed.OutputVtxContainerName] +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY13_Revertex_2muMed.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY13_Revertex_2muMed.OutputVtxContainerName] + +BPHY13_StaticContent += ["xAOD::VertexContainer#%s" % BPHY13_Revertex_2trkMed.OutputVtxContainerName] +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY13_Revertex_2trkMed.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +BPHY13_StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY13_Revertex_2trkMed.OutputVtxContainerName] + + +# Truth information for MC only +if isSimulation: + BPHY13_AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles"] + +BPHY13SlimmingHelper.AllVariables = BPHY13_AllVariables +BPHY13SlimmingHelper.StaticContent = BPHY13_StaticContent +BPHY13SlimmingHelper.AppendContentToStream(BPHY13Stream) + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY14.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY14.py new file mode 100644 index 0000000000000000000000000000000000000000..411fe0b4a3c4e194d12f7dad825c3f048c3bb610 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY14.py @@ -0,0 +1,329 @@ +#==================================================================== +# BPHY14.py +# This an example job options script showing how to set up a +# derivation of the data using the derivation framework. +# It requires the reductionConf flag BPHY14 in Reco_tf.py +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + + +print(isSimulation) + + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +#include( "JpsiUpsilonTools/configureServices.py" ) + +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY14_VertexTools = BPHYVertexTools("BPHY14") + +#-------------------------------------------------------------------- +## 2/ Setup the vertex fitter tools (e.g. JpsiFinder, JpsiPlus1Track, etc). +## These are general tools independent of DerivationFramework that do the +## actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY14JpsiFinder = Analysis__JpsiFinder( + name = "BPHY14JpsiFinder", + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, # If true, will assume dimu hypothesis and use PDG value for mu mass + invMassUpper = 15000.0, + invMassLower = 2000., + Chi2Cut = 200., + muonThresholdPt = 2500., + oppChargesOnly = True, + atLeastOneComb = False, + combOnly = True, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY14_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY14_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY14_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY14_VertexTools.VtxPointEstimator, + useMCPCuts = False ) + +ToolSvc += BPHY14JpsiFinder +print(BPHY14JpsiFinder) + +#-------------------------------------------------------------------- +## 3/ setup the vertex reconstruction "call" tool(s). They are part of the derivation framework. +## These Augmentation tools add output vertex collection(s) into the StoreGate and add basic +## decorations which do not depend on the vertex mass hypothesis (e.g. lxy, ptError, etc). +## There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need two instance of the +## Reco tool is the JpsiFinder mass window is wide enough. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY14_Reco_mumu = DerivationFramework__Reco_Vertex( + name = "BPHY14_Reco_mumu", + VertexSearchTool = BPHY14JpsiFinder, + OutputVtxContainerName = "BPHY14OniaCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY14RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 100000, + DoVertexType = 7) + +ToolSvc += BPHY14_Reco_mumu +print(BPHY14_Reco_mumu) + +#-------------------------------------------------------------------- +## 4/ setup the vertex selection and augmentation tool(s). These tools decorate the vertices with +## variables that depend on the vertex mass hypothesis, e.g. invariant mass, proper decay time, etc. +## Property HypothesisName is used as a prefix for these decorations. +## They also perform tighter selection, flagging the vertecis that passed. The flag is a Char_t branch +## named "passed_"+HypothesisName. It is used later by the "SelectEvent" and "Thin_vtxTrk" tools +## to determine which events and candidates should be kept in the output stream. +## Multiple instances of the Select_* tools can be used on a single input collection as long as they +## use different "HypothesisName" flags. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu + +## a/ augment and select Jpsi->mumu candidates +BPHY14_Select_Jpsi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY14_Select_Jpsi2mumu", + HypothesisName = "Jpsi", + InputVtxContainerName = "BPHY14OniaCandidates", + VtxMassHypo = 3096.916, + MassMin = 2000.0, + MassMax = 3600.0, + Chi2Max = 200, + DoVertexType = 7) + +ToolSvc += BPHY14_Select_Jpsi2mumu +print(BPHY14_Select_Jpsi2mumu) + +## b/ augment and select Psi(2S)->mumu candidates +BPHY14_Select_Psi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY14_Select_Psi2mumu", + HypothesisName = "Psi", + InputVtxContainerName = "BPHY14OniaCandidates", + VtxMassHypo = 3686.09, + MassMin = 3300.0, + MassMax = 4500.0, + Chi2Max = 200, + DoVertexType = 7) + +ToolSvc += BPHY14_Select_Psi2mumu +print(BPHY14_Select_Psi2mumu) + +# Added by ASC +## c/ augment and select Upsilon(nS)->mumu candidates +BPHY14_Select_Upsi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY14_Select_Upsi2mumu", + HypothesisName = "Upsi", + InputVtxContainerName = "BPHY14OniaCandidates", + VtxMassHypo = 9460.30, + MassMin = 7000.0, + MassMax = 12500.0, + Chi2Max = 200, + DoVertexType = 7) + +ToolSvc += BPHY14_Select_Upsi2mumu +print(BPHY14_Select_Upsi2mumu) + +#-------------------------------------------------------------------- +## 5/ select the event. We only want to keep events that contain certain vertices which passed certain selection. +## This is specified by the "SelectionExpression" property, which contains the expression in the following format: +## +## "ContainerName.passed_HypoName > count" +## +## where "ContainerName" is output container form some Reco_* tool, "HypoName" is the hypothesis name setup in some "Select_*" +## tool and "count" is the number of candidates passing the selection you want to keep. + +#==================================================================== +# Photon things +#==================================================================== +from DerivationFrameworkCore.DerivationFrameworkMaster import * +from DerivationFrameworkInDet.InDetCommon import * +from DerivationFrameworkMuons.MuonsCommon import * +from DerivationFrameworkJetEtMiss.JetCommon import * +from DerivationFrameworkJetEtMiss.METCommon import * +from DerivationFrameworkEGamma.EGammaCommon import * + +#photonRequirements = '(DFCommonPhotons_et >= 5*GeV) && (abs(DFCommonPhotons_eta) < 2.6)'# && (Photons.Loose)' +photonRequirements = 'DFCommonPhotons_et > 5*GeV' + + +expression = "(count(BPHY14OniaCandidates.passed_Jpsi) > 0 || count(BPHY14OniaCandidates.passed_Psi) > 0 || count(BPHY14OniaCandidates.passed_Upsi) > 0) && count("+photonRequirements+") >0" +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool +BPHY14_SelectEvent = DerivationFramework__xAODStringSkimmingTool(name = "BPHY14_SelectEvent", + expression = expression) +ToolSvc += BPHY14_SelectEvent +print(BPHY14_SelectEvent) + +#-------------------------------------------------------------------- +## 6/ track and vertex thinning. We want to remove all reconstructed secondary vertices +## which hasn't passed any of the selections defined by (Select_*) tools. +## We also want to keep only tracks which are associates with either muons or any of the +## vertices that passed the selection. Multiple thinning tools can perform the +## selection. The final thinning decision is based OR of all the decisions (by default, +## although it can be changed by the JO). + +## a) thining out vertices that didn't pass any selection and idetifying tracks associated with +## selected vertices. The "VertexContainerNames" is a list of the vertex containers, and "PassFlags" +## contains all pass flags for Select_* tools that must be satisfied. The vertex is kept is it +## satisfy any of the listed selections. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY14Thin_vtxTrk = DerivationFramework__Thin_vtxTrk( + name = "BPHY14Thin_vtxTrk", + TrackParticleContainerName = "InDetTrackParticles", + VertexContainerNames = ["BPHY14OniaCandidates"], + PassFlags = ["passed_Jpsi", "passed_Psi", "passed_Upsi"] ) + +ToolSvc += BPHY14Thin_vtxTrk + +## b) thinning out tracks that are not attached to muons. The final thinning decision is based on the OR operation +## between decision from this and the previous tools. +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY14MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning(name = "BPHY14MuonTPThinningTool", + MuonKey = "Muons", + InDetTrackParticlesKey = "InDetTrackParticles") +ToolSvc += BPHY14MuonTPThinningTool + + +# Tracks associated with Photons +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__EgammaTrackParticleThinning +BPHY14PhotonTPThinningTool = DerivationFramework__EgammaTrackParticleThinning(name = "BPHY14PhotonTPThinningTool", + SGKey = "Photons", + GSFTrackParticlesKey = "GSFTrackParticles", + InDetTrackParticlesKey = "InDetTrackParticles", + SelectionString = photonRequirements, + BestMatchOnly = False, + ConeSize = 0.6, + ApplyAnd = False) +ToolSvc += BPHY14PhotonTPThinningTool + + + + +# Added by ASC +# Only save truth informtion directly associated with Onia +from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__GenericTruthThinning +BPHY14TruthThinTool = DerivationFramework__GenericTruthThinning(name = "BPHY14TruthThinTool", + ParticleSelectionString = "TruthParticles.pdgId == 22 || TruthParticles.pdgId == 443 || TruthParticles.pdgId == 100443 || TruthParticles.pdgId == 553 || TruthParticles.pdgId == 100553 || TruthParticles.pdgId == 200553", + PreserveDescendants = True, + PreserveAncestors = True) +ToolSvc += BPHY14TruthThinTool +print(BPHY14TruthThinTool) + + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== +## 7/ IMPORTANT bit. Don't forget to pass the tools to the DerivationKernel! If you don't do that, they will not be +## be executed! + +# Added by ASC +BPHY14ThinningTools = [BPHY14Thin_vtxTrk, BPHY14MuonTPThinningTool,BPHY14PhotonTPThinningTool] +if globalflags.DataSource()=='geant4': + BPHY14ThinningTools.append(BPHY14TruthThinTool) + +# The name of the kernel (BPHY14Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY14Kernel", + AugmentationTools = [BPHY14_Reco_mumu, BPHY14_Select_Jpsi2mumu, BPHY14_Select_Psi2mumu, BPHY14_Select_Upsi2mumu], + SkimmingTools = [BPHY14_SelectEvent], + ThinningTools = BPHY14ThinningTools + ) + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY14Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY14Stream ) +BPHY14Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY14Stream.AcceptAlgs(["BPHY14Kernel"]) +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +from AthenaServices.Configurables import ThinningSvc, createThinningSvc +augStream = MSMgr.GetStream( streamName ) +evtStream = augStream.GetEventStream() +svcMgr += createThinningSvc( svcName="BPHY14ThinningSvc", outStreams=[evtStream] ) + + +#==================================================================== +# Slimming +#==================================================================== + +# Added by ASC +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY14SlimmingHelper = SlimmingHelper("BPHY14SlimmingHelper") +BPHY14_AllVariables = [] +BPHY14_StaticContent = [] +BPHY14_SmartCollections = [] +BPHY14_ExtraVariables = [] + +# Needed for trigger objects +BPHY14SlimmingHelper.IncludeMuonTriggerContent = True +BPHY14SlimmingHelper.IncludeBPhysTriggerContent = True +BPHY14SlimmingHelper.IncludeEGammaTriggerContent = True + +## primary vertices +BPHY14_SmartCollections += ["PrimaryVertices"] +BPHY14_StaticContent += ["xAOD::VertexContainer#BPHY14RefittedPrimaryVertices"] +BPHY14_StaticContent += ["xAOD::VertexAuxContainer#BPHY14RefittedPrimaryVerticesAux."] + +## ID track particles +BPHY14_SmartCollections += ["InDetTrackParticles"] +BPHY14_ExtraVariables += ["%s.vx.vy.vz" % "InDetTrackParticles"] +#BPHY14_AllVariables += ["InDetTrackParticles"] + + +## combined / extrapolated muon track particles +## (note: for tagged muons there is no extra TrackParticle collection since the ID tracks +## are store in InDetTrackParticles collection) +BPHY14_AllVariables += ["CombinedMuonTrackParticles"] +BPHY14_AllVariables += ["ExtrapolatedMuonTrackParticles"] + +## muon container +BPHY14_SmartCollections += ["Muons"] +BPHY14_ExtraVariables += ["%s.etcone30.etcone40" % "Muons" + +".momentumBalanceSignificance" + +".scatteringCurvatureSignificance" + +".scatteringNeighbourSignificance" + +".msInnerMatchDOF.msInnerMatchChi2" + +".msOuterMatchDOF.msOuterMatchChi2" + +".EnergyLoss.ParamEnergyLoss.MeasEnergyLoss" + +".ET_Core" ] +#BPHY14_AllVariables += ["Muons"] + +## Jpsi candidates +BPHY14_StaticContent += ["xAOD::VertexContainer#%s" % BPHY14_Reco_mumu.OutputVtxContainerName] +BPHY14_StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY14_Reco_mumu.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +BPHY14_StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY14_Reco_mumu.OutputVtxContainerName] + +# Truth information for MC only +if isSimulation: + BPHY14_StaticContent += ["xAOD::TruthParticleContainer#TruthMuons","xAOD::TruthParticleAuxContainer#TruthMuonsAux."] + BPHY14_StaticContent += ["xAOD::TruthParticleContainer#TruthPhotons","xAOD::TruthParticleAuxContainer#TruthPhotonsAux."] + BPHY14_AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles"] + +#Photon Information +#AllVariables += ["Photons"] +BPHY14_SmartCollections += ["Photons"] #,"Muons","InDetTrackParticles","PrimaryVertices"] +from DerivationFrameworkSM.STDMExtraContent import * +BPHY14_ExtraVariables.extend(ExtraContentPhotons) + + +BPHY14SlimmingHelper.AllVariables = BPHY14_AllVariables +BPHY14SlimmingHelper.StaticContent = BPHY14_StaticContent +BPHY14SlimmingHelper.SmartCollections = BPHY14_SmartCollections +BPHY14SlimmingHelper.ExtraVariables = BPHY14_ExtraVariables +BPHY14SlimmingHelper.AppendContentToStream(BPHY14Stream) + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY15.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY15.py new file mode 100644 index 0000000000000000000000000000000000000000..be74b9f1394794d40ed252fa0cc3bc73a4715855 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY15.py @@ -0,0 +1,855 @@ +#2018/11/24 +#==================================================================== +# BPHY15.py +# Bc+>J/psiD_s+, Bc+>J/psiD+, Bc+>J/psiD*+, Bc+>J/psiD_s1+ +# It requires the reductionConf flag BPHY15 in Reco_tf.py +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print(isSimulation) + + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY15_VertexTools = BPHYVertexTools("BPHY15") + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__AugOriginalCounts +BPHY15_AugOriginalCounts = DerivationFramework__AugOriginalCounts( + name = "BPHY15_AugOriginalCounts", + VertexContainer = "PrimaryVertices", + TrackContainer = "InDetTrackParticles" ) +ToolSvc += BPHY15_AugOriginalCounts + + +#-------------------------------------------------------------------- +# 2/ Select J/psi>mu+mu- +#-------------------------------------------------------------------- +## a/ setup JpsiFinder tool +## These are general tools independent of DerivationFramework that do the +## actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY15JpsiFinder = Analysis__JpsiFinder( + name = "BPHY15JpsiFinder", + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, + muonThresholdPt = 2700, + invMassUpper = 3400.0, + invMassLower = 2800.0, + Chi2Cut = 10., + oppChargesOnly = True, + allMuons = True, + combOnly = False, + atLeastOneComb = False, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY15_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY15_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY15_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY15_VertexTools.VtxPointEstimator, + useMCPCuts = False) + +ToolSvc += BPHY15JpsiFinder +print(BPHY15JpsiFinder) + +#-------------------------------------------------------------------- +## b/ setup the vertex reconstruction "call" tool(s). They are part of the derivation framework. +## These Augmentation tools add output vertex collection(s) into the StoreGate and add basic +## decorations which do not depend on the vertex mass hypothesis (e.g. lxy, ptError, etc). +## There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need two instance of the +## Reco tool is the JpsiFinder mass window is wide enough. +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY15JpsiSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY15JpsiSelectAndWrite", + VertexSearchTool = BPHY15JpsiFinder, + OutputVtxContainerName = "BPHY15JpsiCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "SHOULDNOTBEUSED", + DoVertexType = 1) + +ToolSvc += BPHY15JpsiSelectAndWrite +print(BPHY15JpsiSelectAndWrite) + +#-------------------------------------------------------------------- +## c/ augment and select Jpsi->mumu candidates +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu +BPHY15_Select_Jpsi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY15_Select_Jpsi2mumu", + HypothesisName = "Jpsi", + InputVtxContainerName = "BPHY15JpsiCandidates", + VtxMassHypo = 3096.900, + MassMin = 2600.0, + MassMax = 3600.0, + Chi2Max = 200, + LxyMin = 0.1, + DoVertexType = 1) + +ToolSvc += BPHY15_Select_Jpsi2mumu +print(BPHY15_Select_Jpsi2mumu) + +#-------------------------------------------------------------------- +# 3/ select B_c+->J/psi pi+ +#-------------------------------------------------------------------- +## a/ setup a new vertexing tool (necessary due to use of mass constraint) +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +BcJpsipiVertexFit = Trk__TrkVKalVrtFitter( + name = "BcJpsipiVertexFit", + Extrapolator = BPHY15_VertexTools.InDetExtrapolator, + FirstMeasuredPoint = True, + MakeExtendedVertex = True) + +ToolSvc += BcJpsipiVertexFit +print(BcJpsipiVertexFit) + +#-------------------------------------------------------------------- +## b/ setup the Jpsi+1 track finder +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiPlus1Track +BPHY15BcJpsipi = Analysis__JpsiPlus1Track( + name = "BPHY15BcJpsipi", + OutputLevel = INFO, #DEBUG, + pionHypothesis = True, #False, + kaonHypothesis = False,#True, + trkThresholdPt = 2700, + trkMaxEta = 2.7, + BThresholdPt = 100.0, + BMassUpper = 6900.0, + BMassLower = 5600.0, + JpsiContainerKey = "BPHY15JpsiCandidates", + TrackParticleCollection = "InDetTrackParticles", + MuonsUsedInJpsi = "Muons", + TrkVertexFitterTool = BcJpsipiVertexFit, + TrackSelectorTool = BPHY15_VertexTools.InDetTrackSelectorTool, + UseMassConstraint = True, + Chi2Cut = 5, + TrkTrippletMassUpper = 6900, + TrkTrippletMassLower = 5600) + +ToolSvc += BPHY15BcJpsipi +print(BPHY15BcJpsipi) + +#-------------------------------------------------------------------- +## c/ setup the combined augmentation/skimming tool for the Bc+>J/psi pi+ +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY15BcJpsipiSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY15BcJpsipiSelectAndWrite", + Jpsi1PlusTrackName = BPHY15BcJpsipi, + OutputVtxContainerName = "BPHY15BcJpsipiCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY15RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 1000) + +ToolSvc += BPHY15BcJpsipiSelectAndWrite +print(BPHY15BcJpsipiSelectAndWrite) + +#-------------------------------------------------------------------- +## d/ augment and select B_c+>Jpsi pi+ candidates +BPHY15_Select_Bc2Jpsipi = DerivationFramework__Select_onia2mumu( + name = "BPHY15_Select_Bc2Jpsipi", + HypothesisName = "Bc", + InputVtxContainerName = "BPHY15BcJpsipiCandidates", + TrkMasses = [105.658, 105.658, 139.571], + VtxMassHypo = 6274.9, + MassMin = 5600.0, + MassMax = 6900.0, + Chi2Max = 200) + +ToolSvc += BPHY15_Select_Bc2Jpsipi +print(BPHY15_Select_Bc2Jpsipi) + +#-------------------------------------------------------------------- +# 4/ select J/psi pi+ +#-------------------------------------------------------------------- +## a/ setup the Jpsi+1 track finder +BPHY15JpsipiFinder = Analysis__JpsiPlus1Track( + name = "BPHY15JpsipiFinder", + OutputLevel = INFO, #DEBUG, + pionHypothesis = True, #False, + kaonHypothesis = False,#True, + trkThresholdPt = 350.0, + trkMaxEta = 2.7, + BThresholdPt = 5000.0, + BMassUpper = 3600.0, + BMassLower = 3200.0, + TrkDeltaZ = 20., + TrkQuadrupletPt = 5000, + JpsiContainerKey = "BPHY15JpsiCandidates", + TrackParticleCollection = "InDetTrackParticles", + MuonsUsedInJpsi = "Muons", + TrkVertexFitterTool = BcJpsipiVertexFit, + TrackSelectorTool = BPHY15_VertexTools.InDetTrackSelectorTool, + UseMassConstraint = True, + Chi2Cut = 5, + TrkTrippletMassUpper = 3600, + TrkTrippletMassLower = 3200) + +ToolSvc += BPHY15JpsipiFinder +print(BPHY15JpsipiFinder) + +#-------------------------------------------------------------------- +## b/ setup the combined augmentation/skimming tool for J/psi pi+ +BPHY15JpsipiSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY15JpsipiSelectAndWrite", + Jpsi1PlusTrackName = BPHY15JpsipiFinder, + OutputVtxContainerName = "BPHY15JpsipiCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "SHOULDNOTBEUSED", + #RefitPV = True, + MaxPVrefit = 1000) + +ToolSvc += BPHY15JpsipiSelectAndWrite +print(BPHY15JpsipiSelectAndWrite) + +#-------------------------------------------------------------------- +## c/ augment and select Jpsi pi+ candidates for the J/psi D*+ and J/psi D_s1+ modes +BPHY15_Select_Jpsipi = DerivationFramework__Select_onia2mumu( + name = "BPHY15_Select_Jpsipi", + HypothesisName = "Jpsipi", + TrkMasses = [105.658, 105.658, 139.571], + InputVtxContainerName = "BPHY15JpsipiCandidates", + VtxMassHypo = 3396.900, + MassMin = 3200.0, + MassMax = 3600.0, + Chi2Max = 200, + LxyMin = 0.1, + DoVertexType = 1) + +ToolSvc += BPHY15_Select_Jpsipi +print(BPHY15_Select_Jpsipi) + +#-------------------------------------------------------------------- +# 5/ Select K+K-, pi+K- and K+pi- +#-------------------------------------------------------------------- +## a/ Setup the vertex fitter tools +BPHY15DiTrkFinder = Analysis__JpsiFinder( + name = "BPHY15DiTrkFinder", + OutputLevel = INFO, + muAndMu = False, + muAndTrack = False, + TrackAndTrack = True, + assumeDiMuons = False, # If true, will assume dimu hypothesis and use PDG value for mu mass + trackThresholdPt = 900, + invMassUpper = 1900.0, + invMassLower = 280.0, + Chi2Cut = 10., + oppChargesOnly = True, + atLeastOneComb = False, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY15_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY15_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY15_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY15_VertexTools.VtxPointEstimator, + useMCPCuts = False, + track1Mass = 139.571, # Not very important, only used to calculate inv. mass cut, leave it loose here + track2Mass = 139.571) + +ToolSvc += BPHY15DiTrkFinder +print(BPHY15DiTrkFinder) + +#-------------------------------------------------------------------- +## b/ setup the vertex reconstruction "call" tool(s). +BPHY15DiTrkSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY15DiTrkSelectAndWrite", + VertexSearchTool = BPHY15DiTrkFinder, + OutputVtxContainerName = "BPHY15DiTrkCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "SHOULDNOTBEUSED", + CheckCollections = True, + CheckVertexContainers = ['BPHY15JpsiCandidates'], + DoVertexType = 1) + +ToolSvc += BPHY15DiTrkSelectAndWrite +print(BPHY15DiTrkSelectAndWrite) + +#-------------------------------------------------------------------- +## c/ augment and select D0 candidates +BPHY15_Select_D0 = DerivationFramework__Select_onia2mumu( + name = "BPHY15_Select_D0", + HypothesisName = "D0", + InputVtxContainerName = "BPHY15DiTrkCandidates", + TrkMasses = [139.571, 493.677], + VtxMassHypo = 1864.83, + MassMin = 1864.83-170, + MassMax = 1864.83+170, + LxyMin = 0.15, + Chi2Max = 200) + +ToolSvc += BPHY15_Select_D0 +print(BPHY15_Select_D0) + +#-------------------------------------------------------------------- +## d/ augment and select D0bar candidates +BPHY15_Select_D0b = DerivationFramework__Select_onia2mumu( + name = "BPHY15_Select_D0b", + HypothesisName = "D0b", + InputVtxContainerName = "BPHY15DiTrkCandidates", + TrkMasses = [493.677, 139.571], + VtxMassHypo = 1864.83, + MassMin = 1864.83-170, + MassMax = 1864.83+170, + LxyMin = 0.15, + Chi2Max = 200) + +ToolSvc += BPHY15_Select_D0b +print(BPHY15_Select_D0b) + +#-------------------------------------------------------------------- +# 6/ select D_s+>K+K-pi+ and D+>K+pi-pi- candidates +#-------------------------------------------------------------------- +## a/ setup a new vertexing tool (necessary due to use of mass constraint) +Dh3VertexFit = Trk__TrkVKalVrtFitter( + name = "Dh3VertexFit", + Extrapolator = BPHY15_VertexTools.InDetExtrapolator, + FirstMeasuredPoint = True, + MakeExtendedVertex = True) + +ToolSvc += Dh3VertexFit +print(Dh3VertexFit) + +#-------------------------------------------------------------------- +## b/ setup the Jpsi+1 track finder +BPHY15Dh3Finder = Analysis__JpsiPlus1Track( + name = "BPHY15Dh3Finder", + OutputLevel = INFO, + pionHypothesis = True, + kaonHypothesis = False, + trkThresholdPt = 900.0, + trkMaxEta = 2.7, # is this value fine?? default would be 102.5 + BThresholdPt = 2000.0, + #BThresholdPt = 3000.0, + BMassUpper = 1800.0, # What is this?? + BMassLower = 500.0, + TrkDeltaZ = 20., + TrkTrippletMassUpper = 1800, + TrkTrippletMassLower = 500, + TrkQuadrupletPt = 2000, + #TrkQuadrupletPt = 3000, + JpsiContainerKey = "BPHY15DiTrkCandidates", + TrackParticleCollection = "InDetTrackParticles", + MuonsUsedInJpsi = "NONE", # ? + ExcludeCrossJpsiTracks = False, + TrkVertexFitterTool = Dh3VertexFit, + TrackSelectorTool = BPHY15_VertexTools.InDetTrackSelectorTool, + UseMassConstraint = False, + Chi2Cut = 5) #Cut on chi2/Ndeg_of_freedom + +ToolSvc += BPHY15Dh3Finder +print(BPHY15Dh3Finder) + +#-------------------------------------------------------------------- +## c/ setup the combined augmentation/skimming tool for the D(s)+ +BPHY15Dh3SelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY15Dh3SelectAndWrite", + OutputLevel = INFO, + Jpsi1PlusTrackName = BPHY15Dh3Finder, + OutputVtxContainerName = "BPHY15Dh3Candidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "SHOULDNOTBEUSED", + MaxPVrefit = 1000) + +ToolSvc += BPHY15Dh3SelectAndWrite +print(BPHY15Dh3SelectAndWrite) + + +#-------------------------------------------------------------------- +## d/ augment and select D_s+/- candidates +BPHY15_Select_Ds = DerivationFramework__Select_onia2mumu( + name = "BPHY15_Select_Ds", + HypothesisName = "Ds", + TrkMasses = [493.677, 493.677, 139.571], + InputVtxContainerName = "BPHY15Dh3Candidates", + VtxMassHypo = 1968.28, + MassMin = 1968.28-200, + MassMax = 1968.28+200, + Chi2Max = 200, + LxyMin = 0.1, + DoVertexType = 1) + +ToolSvc += BPHY15_Select_Ds +print(BPHY15_Select_Ds) + +#-------------------------------------------------------------------- +## e/ augment and select D+ candidates +BPHY15_Select_Dp = DerivationFramework__Select_onia2mumu( + name = "BPHY15_Select_Dp", + HypothesisName = "Dp", + TrkMasses = [139.571, 493.677, 139.571], + InputVtxContainerName = "BPHY15Dh3Candidates", + VtxMassHypo = 1869.59, + MassMin = 1869.59-200, + MassMax = 1869.59+200, + Chi2Max = 200, + LxyMin = 0.1, + DoVertexType = 1) + +ToolSvc += BPHY15_Select_Dp +print(BPHY15_Select_Dp) + +#-------------------------------------------------------------------- +## c/ augment and select D- candidates +BPHY15_Select_Dm = DerivationFramework__Select_onia2mumu( + name = "BPHY15_Select_Dm", + HypothesisName = "Dm", + TrkMasses = [493.677, 139.571, 139.571], + InputVtxContainerName = "BPHY15Dh3Candidates", + VtxMassHypo = 1869.59, + MassMin = 1869.59-200, + MassMax = 1869.59+200, + Chi2Max = 200, + LxyMin = 0.1, + DoVertexType = 1) + +ToolSvc += BPHY15_Select_Dm +print(BPHY15_Select_Dm) + + +#-------------------------------------------------------------------- +# 7/ select Bc+>J/psi D_(s)+/- +#-------------------------------------------------------------------- +## a/ setup the cascade vertexing tool +BcJpsiDxVertexFit = Trk__TrkVKalVrtFitter( + name = "BcJpsiDxVertexFit", + Extrapolator = BPHY15_VertexTools.InDetExtrapolator, + #FirstMeasuredPoint = True, + FirstMeasuredPoint = False, + CascadeCnstPrecision = 1e-6, + MakeExtendedVertex = True) + +ToolSvc += BcJpsiDxVertexFit +print(BcJpsiDxVertexFit) + +#-------------------------------------------------------------------- +## b/ setup the Jpsi Ds finder +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__JpsiPlusDsCascade +BPHY15JpsiDs = DerivationFramework__JpsiPlusDsCascade( + name = "BPHY15JpsiDs", + HypothesisName = "Bc", + TrkVertexFitterTool = BcJpsiDxVertexFit, + DxHypothesis = 431, + ApplyDxMassConstraint = True, + ApplyJpsiMassConstraint = True, + JpsiMassLowerCut = 2600., + JpsiMassUpperCut = 3600., + DxMassLowerCut = 1968.28 - 200., + DxMassUpperCut = 1968.28 + 200., + MassLowerCut = 6274.90 - 600., + MassUpperCut = 6274.90 + 600., + Chi2Cut = 10, + RefitPV = True, + RefPVContainerName = "BPHY15RefittedPrimaryVertices", + JpsiVertices = "BPHY15JpsiCandidates", + CascadeVertexCollections = ["BcJpsiDsCascadeSV2", "BcJpsiDsCascadeSV1"], + DxVertices = "BPHY15Dh3Candidates") + +ToolSvc += BPHY15JpsiDs +print(BPHY15JpsiDs) + +#-------------------------------------------------------------------- +## c/ setup the Jpsi D+ finder +BPHY15JpsiDp = DerivationFramework__JpsiPlusDsCascade( + name = "BPHY15JpsiDp", + HypothesisName = "Bc", + TrkVertexFitterTool = BcJpsiDxVertexFit, + DxHypothesis = 411, + ApplyDxMassConstraint = True, + ApplyJpsiMassConstraint = True, + JpsiMassLowerCut = 2600., + JpsiMassUpperCut = 3600., + DxMassLowerCut = 1869.59 - 180., + DxMassUpperCut = 1869.59 + 180., + MassLowerCut = 6274.90 - 600., + MassUpperCut = 6274.90 + 600., + Chi2Cut = 10, + RefitPV = True, + RefPVContainerName = "BPHY15RefittedPrimaryVertices", + JpsiVertices = "BPHY15JpsiCandidates", + CascadeVertexCollections = ["BcJpsiDpCascadeSV2", "BcJpsiDpCascadeSV1"], + DxVertices = "BPHY15Dh3Candidates") + +ToolSvc += BPHY15JpsiDp +print(BPHY15JpsiDp) + +#-------------------------------------------------------------------- +# 8/ select Bc+>J/psi D*+/- +#-------------------------------------------------------------------- +## a/ setup the cascade vertexing tool +BcJpsiDstVertexFit = Trk__TrkVKalVrtFitter( + name = "BcJpsiDstVertexFit", + Extrapolator = BPHY15_VertexTools.InDetExtrapolator, + #FirstMeasuredPoint = True, + FirstMeasuredPoint = False, + CascadeCnstPrecision = 1e-6, + MakeExtendedVertex = True) + +ToolSvc += BcJpsiDstVertexFit +print(BcJpsiDstVertexFit) + +#-------------------------------------------------------------------- +## b/ setup Jpsi D*+ finder +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__JpsiPlusDpstCascade +BPHY15JpsiDpst = DerivationFramework__JpsiPlusDpstCascade( + name = "BPHY15JpsiDpst", + HypothesisName = "Bc", + TrkVertexFitterTool = BcJpsiDstVertexFit, + DxHypothesis = 421, + ApplyD0MassConstraint = True, + ApplyJpsiMassConstraint = True, + JpsiMassLowerCut = 2600., + JpsiMassUpperCut = 3600., + JpsipiMassLowerCut = 2600., + JpsipiMassUpperCut = 6800., + D0MassLowerCut = 1864.83 - 200., + D0MassUpperCut = 1864.83 + 200., + DstMassLowerCut = 2010.26 - 300., + DstMassUpperCut = 2010.26 + 300., + MassLowerCut = 5400, + MassUpperCut = 6274.90 + 600., + Chi2Cut = 10, + RefitPV = True, + RefPVContainerName = "BPHY15RefittedPrimaryVertices", + JpsipiVertices = "BPHY15JpsipiCandidates", + CascadeVertexCollections = ["BcJpsiDpstCascadeSV2", "BcJpsiDpstCascadeSV1"], + D0Vertices = "BPHY15DiTrkCandidates") + +ToolSvc += BPHY15JpsiDpst +print(BPHY15JpsiDpst) + + + +#-------------------------------------------------------------------- +# 9/ select K_S0>pi+pi- +#-------------------------------------------------------------------- + +include("DerivationFrameworkBPhys/configureV0Finder.py") +BPHY15_K0FinderTools = BPHYV0FinderTools("BPHY15") +print(BPHY15_K0FinderTools) + +## a/ Setup the vertex fitter tools +BPHY15K0Finder = Analysis__JpsiFinder( + name = "BPHY15K0Finder", + OutputLevel = INFO, + muAndMu = False, + muAndTrack = False, + TrackAndTrack = True, + assumeDiMuons = False, # If true, will assume dimu hypothesis and use PDG value for mu mass + trackThresholdPt = 400, + #trackThresholdPt = 500, + invMassUpper = 600.0, + invMassLower = 400.0, + Chi2Cut = 20, + #Chi2Cut = 5., + oppChargesOnly = True, + atLeastOneComb = False, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY15_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = True, # if False a TrkVertexFitterTool will be used + #useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY15_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + #TrackSelectorTool = BPHY15_VertexTools.InDetTrackSelectorTool, + TrackSelectorTool = BPHY15_K0FinderTools.InDetV0VxTrackSelector, + VertexPointEstimator = BPHY15_K0FinderTools.V0VtxPointEstimator, + #VertexPointEstimator = BPHY15_VertexTools.VtxPointEstimator, + useMCPCuts = False, + track1Mass = 139.571, # Not very important, only used to calculate inv. mass cut, leave it loose here + track2Mass = 139.571) + +ToolSvc += BPHY15K0Finder +print(BPHY15K0Finder) + +#-------------------------------------------------------------------- +## b/ setup the vertex reconstruction "call" tool(s). +BPHY15K0SelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY15K0SelectAndWrite", + VertexSearchTool = BPHY15K0Finder, + OutputVtxContainerName = "BPHY15K0Candidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "SHOULDNOTBEUSED", + CheckCollections = True, + CheckVertexContainers = ['BPHY15JpsipiCandidates','BPHY15DiTrkCandidates','BcJpsiDpstCascadeSV1'], + DoVertexType = 1) + +ToolSvc += BPHY15K0SelectAndWrite +print(BPHY15K0SelectAndWrite) + +#-------------------------------------------------------------------- +## c/ augment and select K_S0 candidates +BPHY15_Select_K0 = DerivationFramework__Select_onia2mumu( + name = "BPHY15_Select_K0", + HypothesisName = "K0", + InputVtxContainerName = "BPHY15K0Candidates", + TrkMasses = [139.571, 139.571], + VtxMassHypo = 497.672, + MassMin = 400, + MassMax = 600, + LxyMin = 0.2, + Chi2Max = 200) + +ToolSvc += BPHY15_Select_K0 +print(BPHY15_Select_K0) + +#-------------------------------------------------------------------- +# 10/ select Bc+>J/psi D_s1+/- +#-------------------------------------------------------------------- +## a/ setup the cascade vertexing tool +BcJpsiDs1VertexFit = Trk__TrkVKalVrtFitter( + name = "BcJpsiDs1VertexFit", + Extrapolator = BPHY15_VertexTools.InDetExtrapolator, + #FirstMeasuredPoint = True, + FirstMeasuredPoint = False, + CascadeCnstPrecision = 1e-6, + MakeExtendedVertex = True) + +ToolSvc += BcJpsiDs1VertexFit +print(BcJpsiDs1VertexFit) + +#-------------------------------------------------------------------- +## b/ setup Jpsi D_s1+ finder +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__JpsiPlusDs1Cascade +BPHY15JpsiDps1 = DerivationFramework__JpsiPlusDs1Cascade( + name = "BPHY15JpsiDps1", + HypothesisName = "Bc", + TrkVertexFitterTool = BcJpsiDs1VertexFit, + DxHypothesis = 421, + ApplyD0MassConstraint = True, + ApplyK0MassConstraint = True, + ApplyJpsiMassConstraint = True, + JpsiMassLowerCut = 2600., + JpsiMassUpperCut = 3600., + JpsipiMassLowerCut = 2600., + JpsipiMassUpperCut = 6800., + D0MassLowerCut = 1864.83 - 180., + D0MassUpperCut = 1864.83 + 180., + K0MassLowerCut = 400., + K0MassUpperCut = 600., + DstMassLowerCut = 2010.26 - 300., + DstMassUpperCut = 2010.26 + 300., + MassLowerCut = 6274.90 - 600, + MassUpperCut = 6274.90 + 600., + Chi2Cut = 10, + RefitPV = True, + RefPVContainerName = "BPHY15RefittedPrimaryVertices", + JpsipiVertices = "BPHY15JpsipiCandidates", + CascadeVertexCollections = ["BcJpsiDps1CascadeSV3", "BcJpsiDps1CascadeSV2", "BcJpsiDps1CascadeSV1"], + K0Vertices = "BPHY15K0Candidates", + D0Vertices = "BPHY15DiTrkCandidates") + +ToolSvc += BPHY15JpsiDps1 +print(BPHY15JpsiDps1) + +#-------------------------------------------------------------------- + +CascadeCollections = [] + +CascadeCollections += BPHY15JpsiDs.CascadeVertexCollections +CascadeCollections += BPHY15JpsiDp.CascadeVertexCollections + +CascadeCollections += BPHY15JpsiDpst.CascadeVertexCollections +CascadeCollections += BPHY15JpsiDps1.CascadeVertexCollections + +#-------------------------------------------------------------------- + + +if not isSimulation: #Only Skim Data + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool + BPHY15_SelectBcJpsipiEvent = DerivationFramework__xAODStringSkimmingTool( + name = "BPHY15_SelectBcJpsipiEvent", + expression = "( count(BPHY15BcJpsipiCandidates.passed_Bc > 0) + count(BcJpsiDsCascadeSV1.x > -999) + count(BcJpsiDpCascadeSV1.x > -999) + count(BcJpsiDpstCascadeSV1.x > -999) + count(BcJpsiDps1CascadeSV1.x > -999) ) > 0") + + ToolSvc += BPHY15_SelectBcJpsipiEvent + print(BPHY15_SelectBcJpsipiEvent) + + #==================================================================== + # Make event selection based on an OR of the input skimming tools + #==================================================================== + + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__FilterCombinationOR + BPHY15SkimmingOR = CfgMgr.DerivationFramework__FilterCombinationOR( + "BPHY15SkimmingOR", + FilterList = [BPHY15_SelectBcJpsipiEvent] ) + ToolSvc += BPHY15SkimmingOR + print(BPHY15SkimmingOR) + +#-------------------------------------------------------------------- +##10/ track and vertex thinning. We want to remove all reconstructed secondary vertices +## which hasn't passed any of the selections defined by (Select_*) tools. +## We also want to keep only tracks which are associates with either muons or any of the +## vertices that passed the selection. Multiple thinning tools can perform the +## selection. The final thinning decision is based OR of all the decisions (by default, +## although it can be changed by the JO). + +## a) thining out vertices that didn't pass any selection and idetifying tracks associated with +## selected vertices. The "VertexContainerNames" is a list of the vertex containers, and "PassFlags" +## contains all pass flags for Select_* tools that must be satisfied. The vertex is kept is it +## satisfy any of the listed selections. +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY15_thinningTool_Tracks = DerivationFramework__Thin_vtxTrk( + name = "BPHY15_thinningTool_Tracks", + TrackParticleContainerName = "InDetTrackParticles", + VertexContainerNames = ["BPHY15BcJpsipiCandidates", "BcJpsiDsCascadeSV1", "BcJpsiDsCascadeSV2", "BcJpsiDpCascadeSV1", "BcJpsiDpCascadeSV2", "BcJpsiDpstCascadeSV1", "BcJpsiDpstCascadeSV2", "BcJpsiDps1CascadeSV1", "BcJpsiDps1CascadeSV2", "BcJpsiDps1CascadeSV3"], + PassFlags = ["passed_Bc"]) + +ToolSvc += BPHY15_thinningTool_Tracks +print(BPHY15_thinningTool_Tracks) + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__BPhysPVThinningTool +BPHY15_thinningTool_PV = DerivationFramework__BPhysPVThinningTool( + name = "BPHY15_thinningTool_PV", + CandidateCollections = ["BPHY15BcJpsipiCandidates", "BcJpsiDsCascadeSV1", "BcJpsiDsCascadeSV2", "BcJpsiDpCascadeSV1", "BcJpsiDpCascadeSV2", "BcJpsiDpstCascadeSV1", "BcJpsiDpstCascadeSV2", "BcJpsiDps1CascadeSV1", "BcJpsiDps1CascadeSV2", "BcJpsiDps1CascadeSV3"], + KeepPVTracks = True) + +ToolSvc += BPHY15_thinningTool_PV +print(BPHY15_thinningTool_PV) + +## b) thinning out tracks that are not attached to muons. The final thinning decision is based on the OR operation +## between decision from this and the previous tools. +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY15MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning( + name = "BPHY15MuonTPThinningTool", + MuonKey = "Muons", + InDetTrackParticlesKey = "InDetTrackParticles") + +ToolSvc += BPHY15MuonTPThinningTool +print(BPHY15MuonTPThinningTool) + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== + +thiningCollection = [] + +print(thiningCollection) + +# The name of the kernel (BPHY15Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY15Kernel", + AugmentationTools = [BPHY15JpsiSelectAndWrite, BPHY15_Select_Jpsi2mumu, + BPHY15BcJpsipiSelectAndWrite, BPHY15_Select_Bc2Jpsipi, + BPHY15JpsipiSelectAndWrite, BPHY15_Select_Jpsipi, + BPHY15DiTrkSelectAndWrite, BPHY15_Select_D0, BPHY15_Select_D0b, + BPHY15Dh3SelectAndWrite, BPHY15_Select_Ds, BPHY15_Select_Dp, BPHY15_Select_Dm, + BPHY15JpsiDs, + BPHY15JpsiDp, + BPHY15JpsiDpst, + BPHY15K0SelectAndWrite, BPHY15_Select_K0, + BPHY15JpsiDps1, + BPHY15_AugOriginalCounts], + #Only skim if not MC + SkimmingTools = [BPHY15SkimmingOR] if not isSimulation else [], + ThinningTools = thiningCollection + ) + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY15Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY15Stream ) +BPHY15Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY15Stream.AcceptAlgs(["BPHY15Kernel"]) + +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +from AthenaServices.Configurables import ThinningSvc, createThinningSvc +augStream = MSMgr.GetStream( streamName ) +evtStream = augStream.GetEventStream() + +BPHY15ThinningSvc = createThinningSvc( svcName="BPHY15ThinningSvc", outStreams=[evtStream] ) +svcMgr += BPHY15ThinningSvc + +#==================================================================== +# Slimming +#==================================================================== + +# Added by ASC +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY15SlimmingHelper = SlimmingHelper("BPHY15SlimmingHelper") +AllVariables = [] +StaticContent = [] + +# Needed for trigger objects +BPHY15SlimmingHelper.IncludeMuonTriggerContent = TRUE +BPHY15SlimmingHelper.IncludeBPhysTriggerContent = TRUE + +## primary vertices +AllVariables += ["PrimaryVertices"] +StaticContent += ["xAOD::VertexContainer#BPHY15RefittedPrimaryVertices"] +StaticContent += ["xAOD::VertexAuxContainer#BPHY15RefittedPrimaryVerticesAux."] + +## ID track particles +AllVariables += ["InDetTrackParticles"] + +## combined / extrapolated muon track particles +## (note: for tagged muons there is no extra TrackParticle collection since the ID tracks +## are store in InDetTrackParticles collection) +AllVariables += ["CombinedMuonTrackParticles"] +AllVariables += ["ExtrapolatedMuonTrackParticles"] + +## muon container +AllVariables += ["Muons"] + + +## Jpsi candidates +StaticContent += ["xAOD::VertexContainer#%s" % BPHY15JpsiSelectAndWrite.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY15JpsiSelectAndWrite.OutputVtxContainerName] + +## Bc+>J/psi pi+ candidates +StaticContent += ["xAOD::VertexContainer#%s" % BPHY15BcJpsipiSelectAndWrite.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY15BcJpsipiSelectAndWrite.OutputVtxContainerName] + +## K+K-, Kpi, D0/D0bar candidates +#StaticContent += ["xAOD::VertexContainer#%s" % BPHY15DiTrkSelectAndWrite.OutputVtxContainerName] +#StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY15DiTrkSelectAndWrite.OutputVtxContainerName] + +## D_(s)+/- candidates +#StaticContent += ["xAOD::VertexContainer#%s" % BPHY15Dh3SelectAndWrite.OutputVtxContainerName] +#StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY15Dh3SelectAndWrite.OutputVtxContainerName] + +## Jpsi pi+ candidates +#StaticContent += ["xAOD::VertexContainer#%s" % BPHY15JpsipiSelectAndWrite.OutputVtxContainerName] +#StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY15JpsipiSelectAndWrite.OutputVtxContainerName] + +## K_S0 candidates +#StaticContent += ["xAOD::VertexContainer#%s" % BPHY15K0SelectAndWrite.OutputVtxContainerName] +#StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY15K0SelectAndWrite.OutputVtxContainerName] + +## Bc+>J/psi D_(s)+/-, J/psi D*+/- and J/psi D_s1+/- candidates +for cascades in CascadeCollections: + StaticContent += ["xAOD::VertexContainer#%s" % cascades] + StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % cascades] + +# Tagging information (in addition to that already requested by usual algorithms) +AllVariables += ["GSFTrackParticles", "MuonSpectrometerTrackParticles" ] + +# Added by ASC +# Truth information for MC only +if isSimulation: + AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles"] + +AllVariables = list(set(AllVariables)) # remove duplicates + +BPHY15SlimmingHelper.AllVariables = AllVariables +BPHY15SlimmingHelper.StaticContent = StaticContent +BPHY15SlimmingHelper.SmartCollections = [] + +BPHY15SlimmingHelper.AppendContentToStream(BPHY15Stream) diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY16.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY16.py new file mode 100644 index 0000000000000000000000000000000000000000..44d8383d94328881876741e9a996c5b6c200ec8f --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY16.py @@ -0,0 +1,288 @@ +#==================================================================== +# BPHY16.py +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print(isSimulation) + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY16_VertexTools = BPHYVertexTools("BPHY16") + +#-------------------------------------------------------------------- +## 2/ Setup the vertex fitter tools (e.g. JpsiFinder, JpsiPlus1Track, etc). +## These are general tools independent of DerivationFramework that do the +## actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY16JpsiFinder = Analysis__JpsiFinder( + name = "BPHY16JpsiFinder", + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, # If true, will assume dimu hypothesis and use PDG value for mu mass + invMassUpper = 12000.0, + invMassLower = 8000., + Chi2Cut = 20., + oppChargesOnly = True, + atLeastOneComb = True, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY16_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY16_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY16_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY16_VertexTools.VtxPointEstimator, + useMCPCuts = False ) + +ToolSvc += BPHY16JpsiFinder +print(BPHY16JpsiFinder) + +#-------------------------------------------------------------------- +## 3/ setup the vertex reconstruction "call" tool(s). They are part of the derivation framework. +## These Augmentation tools add output vertex collection(s) into the StoreGate and add basic +## decorations which do not depend on the vertex mass hypothesis (e.g. lxy, ptError, etc). +## There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need two instance of the +## Reco tool is the JpsiFinder mass window is wide enough. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY16_Reco_mumu = DerivationFramework__Reco_Vertex( + name = "BPHY16_Reco_mumu", + VertexSearchTool = BPHY16JpsiFinder, + OutputVtxContainerName = "BPHY16OniaCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY16RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 100000, + DoVertexType = 7) + +ToolSvc += BPHY16_Reco_mumu +print(BPHY16_Reco_mumu) + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu + +## a/ augment and select Jpsi->mumu candidates +BPHY16_Select_Upsi = DerivationFramework__Select_onia2mumu( + name = "BPHY16_Select_Upsi", + HypothesisName = "Upsilon", + InputVtxContainerName = "BPHY16OniaCandidates", + VtxMassHypo = 9460.30, + MassMin = 8000., + MassMax = 12000., + Chi2Max = 200, + DoVertexType = 7) + +ToolSvc += BPHY16_Select_Upsi +print(BPHY16_Select_Upsi) + + +## 4/ setup a new vertexing tool (necessary due to use of mass constraint) +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +BPHY16VertexFit = Trk__TrkVKalVrtFitter( + name = "BPHY16VertexFit", + Extrapolator = BPHY16_VertexTools.InDetExtrapolator, + FirstMeasuredPoint = True, + MakeExtendedVertex = True) +ToolSvc += BPHY16VertexFit +print(BPHY16VertexFit) + +## 5/ setup the Jpsi+2 track finder +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiPlus2Tracks +BPHY16Plus2Tracks = Analysis__JpsiPlus2Tracks(name = "BPHY16Plus2Tracks", +# OutputLevel = DEBUG, +kaonkaonHypothesis = False, +pionpionHypothesis = False, +kaonpionHypothesis = False, +ManualMassHypo = [ 105.658, 105.658, 105.658, 105.658 ], +trkThresholdPt = 0.0, +trkMaxEta = 3.0, +BMassUpper = 50000.0, +BMassLower = 0, +oppChargesOnly = False, +#DiTrackMassUpper = 1019.445 + 100., +#DiTrackMassLower = 1019.445 - 100., +Chi2Cut = 100.0, +#TrkQuadrupletMassUpper = 60000.0, +#TrkQuadrupletMassLower = 0.01, +JpsiContainerKey = "BPHY16OniaCandidates", +TrackParticleCollection = "InDetTrackParticles", +MuonsUsedInJpsi = "Muons", +ExcludeJpsiMuonsOnly = True, +RequireNMuonTracks = 1, +TrkVertexFitterTool = BPHY16VertexFit, +TrackSelectorTool = BPHY16_VertexTools.InDetTrackSelectorTool, +UseMassConstraint = False) + +ToolSvc += BPHY16Plus2Tracks +print(BPHY16Plus2Tracks) + +## 6/ setup the combined augmentation/skimming tool for the Bpm +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY16FourTrackSelectAndWrite = DerivationFramework__Reco_Vertex(name = "BPHY16FourTrackSelectAndWrite", + Jpsi2PlusTrackName = BPHY16Plus2Tracks, + OutputVtxContainerName = "BPHY16FourTrack", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY16RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 10000, DoVertexType = 7) +ToolSvc += BPHY16FourTrackSelectAndWrite +print(BPHY16FourTrackSelectAndWrite) + + +BPHY16_Select_FourTrack = DerivationFramework__Select_onia2mumu( + name = "BPHY16_Select_FourTracks", + HypothesisName = "FourTracks", + InputVtxContainerName = "BPHY16FourTrack", + TrkMasses = [105.658, 105.658, 105.658, 105.658], + VtxMassHypo = 18100.0, + MassMin = 0, + MassMax = 500000, + Chi2Max = BPHY16Plus2Tracks.Chi2Cut) + +ToolSvc += BPHY16_Select_FourTrack +print(BPHY16_Select_FourTrack) + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__ReVertex +BPHY16_Revertex = DerivationFramework__ReVertex( + name = "BPHY16_ReVertex", + InputVtxContainerName = "BPHY16FourTrack", + TrackIndices = [ 2, 3 ], + TrkVertexFitterTool = BPHY16VertexFit, + OutputVtxContainerName = "BPHY16TwoTrack" +) + +ToolSvc += BPHY16_Revertex +print(BPHY16_Revertex) + +BPHY16_Select_TwoTrack = DerivationFramework__Select_onia2mumu( + name = "BPHY16_Select_TwoTracks", + HypothesisName = "TwoTracks", + InputVtxContainerName = "BPHY16TwoTrack", + TrkMasses = [105.658, 105.658], + VtxMassHypo = 18100.0, + MassMin = 1, + MassMax = 500000, + Chi2Max = 90) + +ToolSvc += BPHY16_Select_TwoTrack +print(BPHY16_Select_TwoTrack) + +expression = "count(BPHY16FourTrack.passed_FourTracks) > 0" + +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool +BPHY16_SelectEvent = DerivationFramework__xAODStringSkimmingTool(name = "BPHY16_SelectEvent", + expression = expression) +ToolSvc += BPHY16_SelectEvent +print(BPHY16_SelectEvent) + +#-------------------------------------------------------------------- +## 6/ track and vertex thinning. We want to remove all reconstructed secondary vertices +## which hasn't passed any of the selections defined by (Select_*) tools. +## We also want to keep only tracks which are associates with either muons or any of the +## vertices that passed the selection. Multiple thinning tools can perform the +## selection. The final thinning decision is based OR of all the decisions (by default, +## although it can be changed by the JO). + +## a) thining out vertices that didn't pass any selection and idetifying tracks associated with +## selected vertices. The "VertexContainerNames" is a list of the vertex containers, and "PassFlags" +## contains all pass flags for Select_* tools that must be satisfied. The vertex is kept is it +## satisfy any of the listed selections. + + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== +## 7/ IMPORTANT bit. Don't forget to pass the tools to the DerivationKernel! If you don't do that, they will not be +## be executed! + + +# The name of the kernel (BPHY16Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY16Kernel", + AugmentationTools = [BPHY16_Reco_mumu, BPHY16_Select_Upsi, BPHY16FourTrackSelectAndWrite, BPHY16_Select_FourTrack, BPHY16_Revertex, BPHY16_Select_TwoTrack], + SkimmingTools = [BPHY16_SelectEvent] + ) + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY16Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY16Stream ) +BPHY16Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY16Stream.AcceptAlgs(["BPHY16Kernel"]) +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +from AthenaServices.Configurables import ThinningSvc, createThinningSvc +augStream = MSMgr.GetStream( streamName ) +evtStream = augStream.GetEventStream() +svcMgr += createThinningSvc( svcName="BPHY16ThinningSvc", outStreams=[evtStream] ) + + +#==================================================================== +# Slimming +#==================================================================== + +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY16SlimmingHelper = SlimmingHelper("BPHY16SlimmingHelper") +AllVariables = [] +StaticContent = [] + +# Needed for trigger objects +BPHY16SlimmingHelper.IncludeMuonTriggerContent = True +BPHY16SlimmingHelper.IncludeBPhysTriggerContent = True + +## primary vertices +AllVariables += ["PrimaryVertices"] +StaticContent += ["xAOD::VertexContainer#BPHY16RefittedPrimaryVertices"] +StaticContent += ["xAOD::VertexAuxContainer#BPHY16RefittedPrimaryVerticesAux."] + +## ID track particles +AllVariables += ["InDetTrackParticles"] + +## combined / extrapolated muon track particles +## (note: for tagged muons there is no extra TrackParticle collection since the ID tracks +## are store in InDetTrackParticles collection) +AllVariables += ["CombinedMuonTrackParticles"] +AllVariables += ["ExtrapolatedMuonTrackParticles"] + +## muon container +AllVariables += ["Muons"] + + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY16_Reco_mumu.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY16_Reco_mumu.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY16_Reco_mumu.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY16FourTrackSelectAndWrite.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY16FourTrackSelectAndWrite.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY16FourTrackSelectAndWrite.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY16_Revertex.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY16_Revertex.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY16_Revertex.OutputVtxContainerName] + + +# Truth information for MC only +if isSimulation: + AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles"] + +BPHY16SlimmingHelper.AllVariables = AllVariables +BPHY16SlimmingHelper.StaticContent = StaticContent +BPHY16SlimmingHelper.AppendContentToStream(BPHY16Stream) + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY17.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY17.py new file mode 100644 index 0000000000000000000000000000000000000000..fcd3b47a2e685c8752b611f0fb6c434e3fcbade8 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY17.py @@ -0,0 +1,316 @@ +#==================================================================== +# BPHY17.py +# This an example job options script showing how to set up a +# derivation of the data using the derivation framework. +# It requires the reductionConf flag BPHY17 in Reco_tf.py +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = globalflags.DataSource()=='geant4' + +print(isSimulation) + + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY17_VertexTools = BPHYVertexTools("BPHY17") + +from InDetTrackSelectorTool.InDetTrackSelectorToolConf import InDet__InDetDetailedTrackSelectorTool +InDetTrackSelectorTool = InDet__InDetDetailedTrackSelectorTool(name = "BPHY17_CascadeTrackSelectorTool", + pTMin = 1000.0, + IPd0Max = 10000.0, + IPz0Max = 10000.0, + z0Max = 10000.0, + sigIPd0Max = 10000.0, + sigIPz0Max = 10000.0, + d0significanceMax = -1., + z0significanceMax = -1., + etaMax = 9999., + useTrackSummaryInfo = True, + nHitBLayer = 0, + nHitPix = 1, + nHitBLayerPlusPix = 1, + nHitSct = 2, + nHitSi = 3, + nHitTrt = 0, + nHitTrtHighEFractionMax = 10000.0, + useSharedHitInfo = False, + useTrackQualityInfo = True, + fitChi2OnNdfMax = 10000.0, + TrtMaxEtaAcceptance = 1.9, + TrackSummaryTool = BPHY17_VertexTools.InDetTrackSummaryTool, + Extrapolator = BPHY17_VertexTools.InDetExtrapolator + ) + +ToolSvc += InDetTrackSelectorTool +print(InDetTrackSelectorTool) + +#BPHY17_VertexTools.TrkVKalVrtFitter.OutputLevel = DEBUG + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Cascade3Plus1 +BPHY17BsDsPi = DerivationFramework__Cascade3Plus1( + name = "BPHY17BsDsPi", + TrackMassHyp = [ 493.677, 493.677, 139.57018, 139.57018 ], + PTCutPerTrack = [ 0,0,1500, 1500], + HypothesisName = "Bs", + Track3Name = "Ds", + TrackSelectorTool = InDetTrackSelectorTool, + TrkVertexFitterTool = BPHY17_VertexTools.TrkVKalVrtFitter, + PVContainerName = "PrimaryVertices", + RefitPV = True, + #OutputLevel = DEBUG, + RefPVContainerName = "BPHY17RefittedPrimaryVertices", + CascadeVertexCollections= ["BPHY17CascadeVtx1", "BPHY17CascadeVtx2"], + ThreeVertexOutputContainer = "BPHY17DsKaonVertexes", EliminateBad3Tracksfrom4Track = True, + ThreeTrackChi2NDF = 6, + TwoTrackMassMin = 1009.0, + TwoTrackMassMax = 1031.0, + ThreeTrackMassMin = 1800.47, + ThreeTrackMassMax = 2100.0, + FourTrackMassMin = 5100.0, + FourTrackMassMax = 5600.0, + ThreeTracksMass = 1968.47, + FourTracksMass = 5366.79, + Chi2NDFCut = 10, + FourTrackMassFinalMin = 5150., + FourTrackMassFinalMax = 5650.0, + ThreeTrackMassConstraint = False, + CopyAllVertices = False + ) + +ToolSvc += BPHY17BsDsPi +print(BPHY17BsDsPi) + +### +BPHY17BsDsPiMuons = DerivationFramework__Cascade3Plus1( + name = "BPHY17BsDsPiMuons", + TrackMassHyp = [ 105.658374, 105.658374, 139.57018, 139.57018 ], + PTCutPerTrack = [ 0,0,1500, 1500], + HypothesisName = "Bs", + Track3Name = "Ds", + TrackSelectorTool = InDetTrackSelectorTool, + TrkVertexFitterTool = BPHY17_VertexTools.TrkVKalVrtFitter, + PVContainerName = "PrimaryVertices", + RefitPV = True, + #OutputLevel = DEBUG, + RefPVContainerName = "BPHY17RefittedPrimaryVerticesMuons", + CascadeVertexCollections= ["BPHY17CascadeMuonVtx1", "BPHY17CascadeMuonVtx2"], + ThreeVertexOutputContainer = "BPHY17DsMuonVertexes", EliminateBad3Tracksfrom4Track = True, + ThreeTrackChi2NDF = 10, + TwoTrackMassMin = 860.0, + TwoTrackMassMax = 1180.0, + ThreeTrackMassMin = 1800.47, + ThreeTrackMassMax = 2100.0, + FourTrackMassMin = 5100.0, + FourTrackMassMax = 5550.0, + ThreeTracksMass = 1968.47, + FourTracksMass = 5366.79, + Chi2NDFCut = 10, + FourTrackMassFinalMin = 5150., + FourTrackMassFinalMax = 5500.0, + ThreeTrackMassConstraint = False, + UseMuonsForTracks = [0, 1], + CopyAllVertices = False + ) + +ToolSvc += BPHY17BsDsPiMuons +print(BPHY17BsDsPiMuons) + +### +BPHY17BsDsMuSemiLepMuons = DerivationFramework__Cascade3Plus1( + name = "BPHY17BsDsMuSemiLepMuons", + TrackMassHyp = [ 105.658374, 105.658374, 139.57018, 105.658374 ], + PTCutPerTrack = [ 0,0,1500, 0], + HypothesisName = "Bs", + Track3Name = "Ds", + TrackSelectorTool = InDetTrackSelectorTool, + TrkVertexFitterTool = BPHY17_VertexTools.TrkVKalVrtFitter, + PVContainerName = "PrimaryVertices", + RefitPV = True, + #OutputLevel = DEBUG, + RefPVContainerName = "BPHY17RefittedPrimaryVerticesMuonsSemiLep", + CascadeVertexCollections= ["BPHY17CascadeMuonSemiLepVtx1", "BPHY17CascadeMuonSemiLepVtx2"], + ThreeVertexOutputContainer = "BPHY17DsMuonSemiLepVertexes", EliminateBad3Tracksfrom4Track = True, + ThreeTrackChi2NDF = 10, + TwoTrackMassMin = 860.0, + TwoTrackMassMax = 1180.0, + ThreeTrackMassMin = 1800.47, + ThreeTrackMassMax = 2100.0, + FourTrackMassMin = 0., + FourTrackMassMax = 999999.0, + ThreeTracksMass = 1968.47, + FourTracksMass = 5366.79, + Chi2NDFCut = 10, + FourTrackMassFinalMin = 0, + FourTrackMassFinalMax = 999999.0, + ThreeTrackMassConstraint = False, + UseMuonsForTracks = [0, 1, 3], + CopyAllVertices = False + ) + +ToolSvc += BPHY17BsDsMuSemiLepMuons +print(BPHY17BsDsMuSemiLepMuons) + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu +BPHY17_Select_DsPhiKK = DerivationFramework__Select_onia2mumu( + name = "BPHY17_Select_DsPhiKK", + HypothesisName = "DsPhiKK", + InputVtxContainerName = BPHY17BsDsPi.ThreeVertexOutputContainer, + TrkMasses = [493.677, 493.677, 139.57018], + VtxMassHypo = 1968.47, + MassMin = 1000.0, + MassMax = 3000.0, Do3d = False, DoVertexType = 1, + Chi2Max = 200) + +ToolSvc += BPHY17_Select_DsPhiKK +print(BPHY17_Select_DsPhiKK) + +BPHY17_Select_DsPhiMM = DerivationFramework__Select_onia2mumu( + name = "BPHY17_Select_DsPhiMM", + HypothesisName = "DsPhiMM", + InputVtxContainerName = BPHY17BsDsPiMuons.ThreeVertexOutputContainer, + TrkMasses = [105.658374, 105.658374, 139.57018], + VtxMassHypo = 1968.47, + MassMin = 1000.0, + MassMax = 3000.0, Do3d = False, DoVertexType = 1, + Chi2Max = 200) + +ToolSvc += BPHY17_Select_DsPhiMM +print(BPHY17_Select_DsPhiMM) + + +BPHY17_Select_DsPhiMMSemi = DerivationFramework__Select_onia2mumu( + name = "BPHY17_Select_DsPhiMMSemi", + HypothesisName = "DsPhiMMSemiLep", + InputVtxContainerName = BPHY17BsDsMuSemiLepMuons.ThreeVertexOutputContainer, + TrkMasses = [105.658374, 105.658374, 139.57018], + VtxMassHypo = 1968.47, + MassMin = 1000.0, + MassMax = 3000.0, Do3d = False, DoVertexType = 1, + Chi2Max = 200) + +ToolSvc += BPHY17_Select_DsPhiMMSemi +print(BPHY17_Select_DsPhiMMSemi) + +### + +#-------------------------------------------------------------------- +## 5/ select the event. We only want to keep events that contain certain vertices which passed certain selection. +## This is specified by the "SelectionExpression" property, which contains the expression in the following format: +## +## "ContainerName.passed_HypoName > count" +## +## where "ContainerName" is output container form some Reco_* tool, "HypoName" is the hypothesis name setup in some "Select_*" +## tool and "count" is the number of candidates passing the selection you want to keep. + +expression = "count(%s.x > -999) > 0" % BPHY17BsDsPi.CascadeVertexCollections[0] +expression += " || count(%s.x > -999) > 0" % BPHY17BsDsPiMuons.CascadeVertexCollections[0] +expression += " || count(%s.x > -999) > 0" % BPHY17BsDsPi.ThreeVertexOutputContainer +expression += " || count(%s.x > -999) > 0" % BPHY17BsDsPiMuons.ThreeVertexOutputContainer +expression += " || count(%s.x > -999) > 0" % BPHY17BsDsMuSemiLepMuons.CascadeVertexCollections[0] +expression += " || count(%s.x > -999) > 0" % BPHY17BsDsMuSemiLepMuons.ThreeVertexOutputContainer + +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool +BPHY17_SelectEvent = DerivationFramework__xAODStringSkimmingTool(name = "BPHY17_SelectEvent", + expression = expression) +ToolSvc += BPHY17_SelectEvent +print(BPHY17_SelectEvent) + +MyVertexCollections = BPHY17BsDsPi.CascadeVertexCollections + BPHY17BsDsPiMuons.CascadeVertexCollections + BPHY17BsDsMuSemiLepMuons.CascadeVertexCollections + \ + [ BPHY17BsDsPi.ThreeVertexOutputContainer, BPHY17BsDsPiMuons.ThreeVertexOutputContainer, BPHY17BsDsMuSemiLepMuons.ThreeVertexOutputContainer ] + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY17_thinningTool_Tracks = DerivationFramework__Thin_vtxTrk( + name = "BPHY17_thinningTool_Tracks", + TrackParticleContainerName = "InDetTrackParticles", + IgnoreFlags = True, + VertexContainerNames = MyVertexCollections, + PassFlags = ["passed_DsPhiMM", "passed_DsPhiKK"] ) + +ToolSvc += BPHY17_thinningTool_Tracks + + +# Only save truth informtion directly associated with Onia +from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__GenericTruthThinning +BPHY17TruthThinTool = DerivationFramework__GenericTruthThinning(name = "BPHY17TruthThinTool", + ParticleSelectionString = "TruthParticles.pdgId == 511 || TruthParticles.pdgId == -511 || TruthParticles.pdgId == 531 || TruthParticles.pdgId == -531", + PreserveDescendants = True, + PreserveAncestors = True) +ToolSvc += BPHY17TruthThinTool +print(BPHY17TruthThinTool) + + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== +## 7/ IMPORTANT bit. Don't forget to pass the tools to the DerivationKernel! If you don't do that, they will not be +## be executed! + +# Added by ASC +BPHY17ThinningTools = [BPHY17_thinningTool_Tracks] +if globalflags.DataSource()=='geant4': + BPHY17ThinningTools.append(BPHY17TruthThinTool) + +# The name of the kernel (BPHY17Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY17Kernel", + AugmentationTools = [BPHY17BsDsPi, BPHY17BsDsPiMuons, BPHY17BsDsMuSemiLepMuons, BPHY17_Select_DsPhiKK, BPHY17_Select_DsPhiMM, BPHY17_Select_DsPhiMMSemi], + SkimmingTools = [BPHY17_SelectEvent], + ThinningTools = BPHY17ThinningTools, +# OutputLevel = DEBUG + ) + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY17Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY17Stream ) +BPHY17Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY17Stream.AcceptAlgs(["BPHY17Kernel"]) +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +from AthenaServices.Configurables import ThinningSvc, createThinningSvc +augStream = MSMgr.GetStream( streamName ) +evtStream = augStream.GetEventStream() +svcMgr += createThinningSvc( svcName="BPHY17ThinningSvc", outStreams=[evtStream] ) + + +#==================================================================== +# Slimming +#==================================================================== + +# Added by ASC +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY17SlimmingHelper = SlimmingHelper("BPHY17SlimmingHelper") +AllVariables = [] +StaticContent = [] + +# Needed for trigger objects +BPHY17SlimmingHelper.IncludeBPhysTriggerContent = False +BPHY17SlimmingHelper.IncludeMuonTriggerContent = False +## primary vertices +AllVariables += ["PrimaryVertices"] + +for f in MyVertexCollections + [BPHY17BsDsPi.RefPVContainerName, BPHY17BsDsPiMuons.RefPVContainerName + BPHY17BsDsMuSemiLepMuons.RefPVContainerName ]: + StaticContent += ["xAOD::VertexContainer#%s" % f] + StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % f] + +## ID track particles +AllVariables += ["InDetTrackParticles"] +AllVariables += ["Muons"] + +# Truth information for MC only +if isSimulation: + AllVariables += ["TruthEvents","TruthParticles","TruthVertices"] + +BPHY17SlimmingHelper.AllVariables = AllVariables +BPHY17SlimmingHelper.StaticContent = StaticContent +BPHY17SlimmingHelper.AppendContentToStream(BPHY17Stream) diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY18.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY18.py new file mode 100644 index 0000000000000000000000000000000000000000..fdabaaacdfbd025ddbd50e894e39630a73296a47 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY18.py @@ -0,0 +1,506 @@ +#==================================================================== +# BPHY18.py +# B0 -> K*ee +# It requires the reductionConf flag BPHY18 in Reco_tf.py +#==================================================================== + + +#==================================================================== +# FLAGS TO PERSONALIZE THE DERIVATION +#==================================================================== + +onlyAugmentations = False +thinTruth = False +skimTruth = False + + +# Set up common services and job object. +from DerivationFrameworkCore.DerivationFrameworkMaster import * +from DerivationFrameworkMuons.MuonsCommon import * +from DerivationFrameworkJetEtMiss.JetCommon import * +from DerivationFrameworkEGamma.EGammaCommon import * +from DerivationFrameworkEGamma.ElectronsCPContent import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print(isSimulation) + + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY18_VertexTools = BPHYVertexTools("BPHY18") + + +print('********************** VERTEX TOOLS ***********************') +print(BPHY18_VertexTools) +print(BPHY18_VertexTools.TrkV0Fitter) +print('********************** END VERTEX TOOLS ***********************') + +#==================================================================== +# TriggerCounting for Kernel1 #Added by Matteo +#==================================================================== +#List of trigggers to be counted (high Sig-eff*Lumi ones are in) +triggersToMetadata = [ +"HLT_2e5_lhvloose_nod0_bBeexM6000t", #37,143,877 inb +"HLT_e5_lhvloose_nod0_bBeexM6000t", #37,143,877 +"HLT_e5_lhvloose_nod0_bBeexM6000t_2mu4_nomucomb_L1BPH-0DR3-EM7J15_2MU4", #37,312,506 +"HLT_e5_lhvloose_nod0_bBeexM6000t_mu6_nomucomb_L1BPH-0DR3-EM7J15_MU6", #27,041,892 +"HLT_e5_lhvloose_nod0_bBeexM6000_mu6_nomucomb_L1BPH-0DR3-EM7J15_MU6", #149,100 +"HLT_e9_lhloose_bBeexM2700_2mu4_nomucomb_L1BPH-0DR3-EM7J15_2MU4", #2,681,764 +"HLT_e9_lhloose_bBeexM2700_mu6_nomucomb_L1BPH-0DR3-EM7J15_MU6", #1,979,362 +"HLT_e9_lhloose_bBeexM6000_2mu4_nomucomb_L1BPH-0DR3-EM7J15_2MU4", #3,359,105 +"HLT_e9_lhloose_bBeexM6000_mu6_nomucomb_L1BPH-0DR3-EM7J15_MU6", #2,426,663 +"HLT_e9_lhloose_e5_lhloose_bBeexM2700_2mu4_nomucomb_L1BPH-0M9-EM7-EM5_2MU4", #2,950,935 +"HLT_e9_lhloose_e5_lhloose_bBeexM2700_mu6_nomucomb_L1BPH-0M9-EM7-EM5_MU6", #2,928,030 +"HLT_e9_lhloose_e5_lhloose_bBeexM6000_2mu4_nomucomb_L1BPH-0M9-EM7-EM5_2MU4", #3,647,507 +"HLT_e9_lhloose_e5_lhloose_bBeexM6000_mu6_nomucomb_L1BPH-0M9-EM7-EM5_MU6", #3,605,371 +"HLT_e9_lhvloose_nod0_e5_lhvloose_nod0_bBeexM6000t_2mu4_nomucomb_L1BPH-0M9-EM7-EM5_2MU4", #40,169,436 +"HLT_e9_lhvloose_nod0_e5_lhvloose_nod0_bBeexM6000t_mu6_nomucomb_L1BPH-0M9-EM7-EM5_MU6", #37,312,506 +"HLT_e9_lhvloose_nod0_e5_lhvloose_nod0_bBeexM6000_mu6_nomucomb_L1BPH-0M9-EM7-EM5_MU6", #677,340 + +'HLT_3mu4_bDimu2700', 'HLT_3mu6_bDimu', 'HLT_mu6_2mu4_bDimu2700','HLT_mu11_mu6_bJpsimumu', 'HLT_3mu4_bJpsi', 'HLT_mu11_mu6_bDimu', 'HLT_2mu6_bBmumu_Lxy0_L1BPH-2M9-2MU6_BPH-2DR15-2MU6','HLT_2mu6_bJpsimumu_L1BPH-2M9-2MU6_BPH-2DR15-2MU6', 'HLT_2mu10_bBmumuxv2', 'HLT_mu6_mu4_bDimu', 'HLT_2mu6_bBmumuxv2_L1LFV-MU6', 'HLT_mu11_mu6_bJpsimumu_Lxy0', 'HLT_mu11_mu6_bDimu2700', 'HLT_mu11_mu6_bBmumux_BpmumuKp', 'HLT_mu6_mu4_bJpsimumu_Lxy0_L1BPH-2M9-MU6MU4_BPH-0DR15-MU6MU4', 'HLT_mu11_mu6_bBmumuxv2', 'HLT_mu6_2mu4_bJpsi', 'HLT_2mu6_bBmumux_BpmumuKp_L1BPH-2M9-2MU6_BPH-2DR15-2MU6', 'HLT_2mu6_bJpsimumu_Lxy0_L1BPH-2M9-2MU6_BPH-2DR15-2MU6', 'HLT_3mu6_bJpsi', 'HLT_mu11_mu6_bBmumu'] + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__TriggerCountToMetadata +BPHY18TriggerCountToMetadata = DerivationFramework__TriggerCountToMetadata(name = "BPHY18TriggerCount", + TriggerList = triggersToMetadata, + FolderName = "BPHY18") + +ToolSvc += BPHY18TriggerCountToMetadata + +#==================================================================== +# PRESELECTION for Kernel1 #Added by Matteo +#==================================================================== +## 1/ Setup the skimming based on triggers +## + +triggerList = ["HLT_e5_lhvloose_nod0_bBeexM6000t_2mu4_nomucomb_L1BPH-0DR3-EM7J15_2MU4", #37,312,506 +"HLT_e5_lhvloose_nod0_bBeexM6000t_mu6_nomucomb_L1BPH-0DR3-EM7J15_MU6", #27,041,892 +"HLT_e5_lhvloose_nod0_bBeexM6000_mu6_nomucomb_L1BPH-0DR3-EM7J15_MU6", #149,100 +"HLT_e9_lhloose_bBeexM2700_2mu4_nomucomb_L1BPH-0DR3-EM7J15_2MU4", #2,681,764 +"HLT_e9_lhloose_bBeexM2700_mu6_nomucomb_L1BPH-0DR3-EM7J15_MU6", #1,979,362 +"HLT_e9_lhloose_bBeexM6000_2mu4_nomucomb_L1BPH-0DR3-EM7J15_2MU4", #3,359,105 +"HLT_e9_lhloose_bBeexM6000_mu6_nomucomb_L1BPH-0DR3-EM7J15_MU6", #2,426,663 +"HLT_e9_lhloose_e5_lhloose_bBeexM2700_2mu4_nomucomb_L1BPH-0M9-EM7-EM5_2MU4", #2,950,935 +"HLT_e9_lhloose_e5_lhloose_bBeexM2700_mu6_nomucomb_L1BPH-0M9-EM7-EM5_MU6", #2,928,030 +"HLT_e9_lhloose_e5_lhloose_bBeexM6000_2mu4_nomucomb_L1BPH-0M9-EM7-EM5_2MU4", #3,647,507 +"HLT_e9_lhloose_e5_lhloose_bBeexM6000_mu6_nomucomb_L1BPH-0M9-EM7-EM5_MU6", #3,605,371 +"HLT_e9_lhvloose_nod0_e5_lhvloose_nod0_bBeexM6000t_2mu4_nomucomb_L1BPH-0M9-EM7-EM5_2MU4", #40,169,436 +"HLT_e9_lhvloose_nod0_e5_lhvloose_nod0_bBeexM6000t_mu6_nomucomb_L1BPH-0M9-EM7-EM5_MU6", #37,312,506 +"HLT_e9_lhvloose_nod0_e5_lhvloose_nod0_bBeexM6000_mu6_nomucomb_L1BPH-0M9-EM7-EM5_MU6", #677,340 +"HLT_2e5_lhvloose_nod0_bBeexM6000t", #37,143,877 inb +"HLT_e5_lhvloose_nod0_bBeexM6000t" #37,143,877 +] # Seeded + Unseeded BeeX triggers + +triggerList_unseeded = ["HLT_2e5_lhvloose_nod0_bBeexM6000t", #37,143,877 inb +"HLT_e5_lhvloose_nod0_bBeexM6000t" #37,143,877 +] + +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__TriggerSkimmingTool +BPHY18TriggerSkim = DerivationFramework__TriggerSkimmingTool(name = "BPHY18TriggerSkim", + TriggerListOR = triggerList, + TriggerListORHLTOnly = triggerList_unseeded ) + +ToolSvc += BPHY18TriggerSkim +print(BPHY18TriggerSkim) + +#do not know what this does, but let's leave it for now, until we see if it's useful or not! +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__AugOriginalCounts +BPHY18_AugOriginalCounts = DerivationFramework__AugOriginalCounts(name = "BPHY18_AugOriginalCounts", + VertexContainer = "PrimaryVertices", + TrackContainer = "InDetTrackParticles" ) +ToolSvc += BPHY18_AugOriginalCounts + +#lhvloose_nod0 +from ElectronPhotonSelectorTools.ElectronPhotonSelectorToolsConf import AsgElectronLikelihoodTool +ElectronLHSelectorLHvloose_nod0 = AsgElectronLikelihoodTool("ElectronLHSelectorLHvloosenod0", +ConfigFile="ElectronPhotonSelectorTools/offline/mc16_20190328_nod0/ElectronLikelihoodVeryLooseOfflineConfig2017_Smooth_nod0.conf") +ElectronLHSelectorLHvloose_nod0.primaryVertexContainer = "PrimaryVertices" +ToolSvc += ElectronLHSelectorLHvloose_nod0 +print(ElectronLHSelectorLHvloose_nod0) + +# decorate electrons with the output of LH vloose nod0 +ElectronPassLHvloosenod0 = DerivationFramework__EGSelectionToolWrapper(name = "ElectronPassLHvloosenod0", + EGammaSelectionTool = ElectronLHSelectorLHvloose_nod0, + EGammaFudgeMCTool = "", + CutType = "", + StoreGateEntryName = "DFCommonElectronsLHVeryLoosenod0", + ContainerName = "Electrons") +ToolSvc += ElectronPassLHvloosenod0 +print(ElectronPassLHvloosenod0) + +#-------------------------------------------------------------------- +## 2/ setup JpsiFinder tool +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder_ee +BPHY18DiElectronFinder = Analysis__JpsiFinder_ee( + name = "BPHY18DiElectronFinder", + OutputLevel = INFO, + elAndEl = True, + elAndTrack = False, + TrackAndTrack = False, + assumeDiElectrons = True, + elThresholdPt = 4000.0, + invMassUpper = 7000.0, + invMassLower = 1.0, + Chi2Cut = 30., + oppChargesOnly = False, + allChargeCombinations = True, + useElectronTrackMeasurement = True, + electronCollectionKey = "Electrons", + TrackParticleCollection = "GSFTrackParticles", + useEgammaCuts = True, + V0VertexFitterTool = BPHY18_VertexTools.TrkV0Fitter, + useV0Fitter = False, + TrkVertexFitterTool = BPHY18_VertexTools.TrkVKalVrtFitter, + TrackSelectorTool = BPHY18_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY18_VertexTools.VtxPointEstimator, + ElectronSelection = "d0_or_nod0" + ) + +ToolSvc += BPHY18DiElectronFinder +print(BPHY18DiElectronFinder) + +#-------------------------------------------------------------------- +## 3/ setup the vertex reconstruction "call" tool(s). +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY18DiElectronSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY18DiElectronSelectAndWrite", + VertexSearchTool = BPHY18DiElectronFinder, + OutputVtxContainerName = "BPHY18DiElectronCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "SHOULDNOTBEUSED", + DoVertexType = 7 + ) + +ToolSvc += BPHY18DiElectronSelectAndWrite +print(BPHY18DiElectronSelectAndWrite) + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu +BPHY18_Select_DiElectrons = DerivationFramework__Select_onia2mumu( + name = "BPHY18_Select_DiElectrons", + HypothesisName = "Jpsi", + InputVtxContainerName = "BPHY18DiElectronCandidates", + VtxMassHypo = 3096.916, + MassMin = 1.0, + MassMax = 7000.0, + Chi2Max = 30, + DoVertexType = 7 + ) + +ToolSvc += BPHY18_Select_DiElectrons +print(BPHY18_Select_DiElectrons) + +## 4/ setup a new vertexing tool (necessary due to use of mass constraint) +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +BeeKstVertexFit = Trk__TrkVKalVrtFitter( + name = "BeeKstVertexFit", + Extrapolator = BPHY18_VertexTools.InDetExtrapolator, + FirstMeasuredPoint = True, + MakeExtendedVertex = True + ) + +ToolSvc += BeeKstVertexFit +print(BeeKstVertexFit) + +## 5/ setup the Jpsi+2 track finder +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiPlus2Tracks +BPHY18BeeKst = Analysis__JpsiPlus2Tracks( + name = "BPHY18BeeKstFinder", + OutputLevel = INFO, + kaonkaonHypothesis = False, + pionpionHypothesis = False, + kaonpionHypothesis = True, + oppChargesOnly = False, + SameChargesOnly = False, + trkThresholdPt = 500.0, + trkMaxEta = 3.0, + BThresholdPt = 1000., + BMassLower = 3000.0, + BMassUpper = 6500.0, + JpsiContainerKey = "BPHY18DiElectronCandidates", + TrackParticleCollection = "InDetTrackParticles", + ExcludeCrossJpsiTracks = False, + TrkVertexFitterTool = BeeKstVertexFit, + TrackSelectorTool = BPHY18_VertexTools.InDetTrackSelectorTool, + UseMassConstraint = False, + DiTrackMassUpper = 1110., + DiTrackMassLower = 690., + Chi2Cut = 15.0, + DiTrackPt = 500., + TrkQuadrupletMassLower = 1000.0, + TrkQuadrupletMassUpper = 10000.0, + FinalDiTrackPt = 500., + UseGSFTrackIndices = [0,1] + ) + +ToolSvc += BPHY18BeeKst +print(BPHY18BeeKst) + +## 6/ setup the combined augmentation/skimming tool for the BeeKst +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY18BeeKstSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY18BeeKstSelectAndWrite", + Jpsi2PlusTrackName = BPHY18BeeKst, + OutputVtxContainerName = "BeeKstCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY18RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 10000, + DoVertexType = 7 + ) + +ToolSvc += BPHY18BeeKstSelectAndWrite +print(BPHY18BeeKstSelectAndWrite) + +## b/ augment and select B->eeKst candidates +BPHY18_Select_BeeKst = DerivationFramework__Select_onia2mumu( + name = "BPHY18_Select_BeeKst", + HypothesisName = "Bd", + InputVtxContainerName = "BeeKstCandidates", + TrkMasses = [0.511, 0.511, 493.677, 139.570], + VtxMassHypo = 5279.6, + MassMin = 1.0, + MassMax = 10000.0, + Chi2Max = 30.0 + ) + +ToolSvc += BPHY18_Select_BeeKst +print(BPHY18_Select_BeeKst) + +## c/ augment and select Bdbar->eeKstbar candidates +BPHY18_Select_BeeKstbar = DerivationFramework__Select_onia2mumu( + name = "BPHY18_Select_Bd2JpsiKstbar", + HypothesisName = "Bdbar", + InputVtxContainerName = "BeeKstCandidates", + TrkMasses = [0.511, 0.511, 139.570, 493.677], + VtxMassHypo = 5279.6, + MassMin = 1.0, + MassMax = 10000.0, + Chi2Max = 30.0 + ) + +ToolSvc += BPHY18_Select_BeeKstbar +print(BPHY18_Select_BeeKstbar) + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__ReVertex +BPHY18_diMeson_revertex = DerivationFramework__ReVertex( + name = "BPHY18_diMeson_revertex", + InputVtxContainerName = "BeeKstCandidates", + TrackIndices = [ 2, 3 ], + TrkVertexFitterTool = BeeKstVertexFit, + OutputVtxContainerName = "BPHY18DiMeson" + ) + +ToolSvc += BPHY18_diMeson_revertex +print(BPHY18_diMeson_revertex) + +BPHY18_Select_Kpi = DerivationFramework__Select_onia2mumu( + name = "BPHY18_Select_Kpi", + HypothesisName = "Kpi", + InputVtxContainerName = "BPHY18DiMeson", + TrkMasses = [ 493.677, 139.570 ], + VtxMassHypo = 891.66, + MassMin = 1.0, + MassMax = 100000.0, + Chi2Max = 100.0 + ) + +ToolSvc += BPHY18_Select_Kpi +print(BPHY18_Select_Kpi) + +BPHY18_Select_piK = DerivationFramework__Select_onia2mumu( + name = "BPHY18_Select_piK", + HypothesisName = "piK", + InputVtxContainerName = "BPHY18DiMeson", + TrkMasses = [ 139.570, 493.677 ], + VtxMassHypo = 891.66, + MassMin = 1.0, + MassMax = 100000.0, + Chi2Max = 100.0 + ) + +ToolSvc += BPHY18_Select_piK +print(BPHY18_Select_piK) + +if True: + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool + BPHY18_SelectBeeKstEvent = DerivationFramework__xAODStringSkimmingTool( + name = "BPHY18_SelectBeeKstEvent", + expression = "(count(BeeKstCandidates.passed_Bd > 0) + count(BeeKstCandidates.passed_Bdbar > 0)) > 0") + ToolSvc += BPHY18_SelectBeeKstEvent + print(BPHY18_SelectBeeKstEvent) + + #==================================================================== + # Make event selection based on an OR of the input skimming tools + #==================================================================== + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__FilterCombinationAND + BPHY18SkimmingAND = CfgMgr.DerivationFramework__FilterCombinationAND( + "BPHY18SkimmingAND", + FilterList = [BPHY18_SelectBeeKstEvent, BPHY18TriggerSkim]) + ToolSvc += BPHY18SkimmingAND + print(BPHY18SkimmingAND) + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY18_thinningTool_Tracks = DerivationFramework__Thin_vtxTrk( + name = "BPHY18_thinningTool_Tracks", + TrackParticleContainerName = "InDetTrackParticles", + VertexContainerNames = ["BeeKstCandidates"], + PassFlags = ["passed_Bd", "passed_Bdbar"] ) + +BPHY18_thinningTool_GSFTracks = DerivationFramework__Thin_vtxTrk( + name = "BPHY18_thinningTool_GSFTracks", + TrackParticleContainerName = "GSFTrackParticles", + VertexContainerNames = ["BeeKstCandidates"], + PassFlags = ["passed_Bd", "passed_Bdbar"] ) + +ToolSvc += BPHY18_thinningTool_Tracks +ToolSvc += BPHY18_thinningTool_GSFTracks + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__BPhysPVThinningTool +BPHY18_thinningTool_PV = DerivationFramework__BPhysPVThinningTool( + name = "BPHY18_thinningTool_PV", + CandidateCollections = ["BeeKstCandidates"], + KeepPVTracks = True + ) + +ToolSvc += BPHY18_thinningTool_PV + +## b) thinning out tracks that are not attached to muons. +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY18MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning( + name = "BPHY18MuonTPThinningTool", + MuonKey = "Muons", + InDetTrackParticlesKey = "InDetTrackParticles") +ToolSvc += BPHY18MuonTPThinningTool + +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__EgammaTrackParticleThinning +BPHY18EgammaTPThinningTool = DerivationFramework__EgammaTrackParticleThinning( + name = "BPHY18EgammaTPThinningTool", + SGKey = "Electrons", + InDetTrackParticlesKey = "InDetTrackParticles") +ToolSvc += BPHY18EgammaTPThinningTool + +# Only save truth informtion directly associated with: mu Ds+ D+ D*+ Ds*+ D0 D*0 B+ B*+ B0 B*0 +from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__GenericTruthThinning +BPHY18TruthThinTool = DerivationFramework__GenericTruthThinning(name = "BPHY18TruthThinTool", + ParticleSelectionString = "abs(TruthParticles.pdgId) == 11 || abs(TruthParticles.pdgId) == 13 || abs(TruthParticles.pdgId) == 10311 || abs(TruthParticles.pdgId) == 521 || abs(TruthParticles.pdgId) == 523 || TruthParticles.pdgId == 511 || TruthParticles.pdgId == 513", + PreserveDescendants = True, + PreserveAncestors = True) +ToolSvc += BPHY18TruthThinTool + +# Only save truth neutrino and b/c quarks information +from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__GenericTruthThinning +BPHY18TruthThinNoChainTool = DerivationFramework__GenericTruthThinning(name = "BPHY18TruthThinNoChainTool", + ParticleSelectionString = "abs(TruthParticles.pdgId) == 5 || abs(TruthParticles.pdgId) == 12 || abs(TruthParticles.pdgId) == 14", + PreserveDescendants = False, + PreserveAncestors = False) +ToolSvc += BPHY18TruthThinNoChainTool + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== + +thinningCollection = [ BPHY18_thinningTool_Tracks, BPHY18_thinningTool_GSFTracks, + BPHY18_thinningTool_PV, #BPHY18_thinningTool_PV_GSF, + BPHY18EgammaTPThinningTool, BPHY18MuonTPThinningTool + ] + +#if we're doing truth, add these [BPHY18TruthThinTool,BPHY18TruthThinNoChainTool] +if isSimulation: + thinningCollection += [BPHY18TruthThinTool,BPHY18TruthThinNoChainTool] + +print(thinningCollection) + +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY18Kernel", + + AugmentationTools = [ ElectronPassLHvloosenod0,BPHY18DiElectronSelectAndWrite, + BPHY18_Select_DiElectrons, + BPHY18BeeKstSelectAndWrite, BPHY18_Select_BeeKst, BPHY18_Select_BeeKstbar, + BPHY18_diMeson_revertex, BPHY18_Select_Kpi, BPHY18_Select_piK ], + + #Only skim if not MC + SkimmingTools = [BPHY18SkimmingAND], + ThinningTools = thinningCollection + ) + + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY18Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY18Stream ) +BPHY18Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY18Stream.AcceptAlgs(["BPHY18Kernel"]) + +# Special lines for thinning +from AthenaServices.Configurables import ThinningSvc, createThinningSvc +augStream = MSMgr.GetStream( streamName ) +evtStream = augStream.GetEventStream() + +BPHY18ThinningSvc = createThinningSvc( svcName="BPHY18ThinningSvc", outStreams=[evtStream] ) +svcMgr += BPHY18ThinningSvc + +#==================================================================== +# Slimming +#==================================================================== + +# Added by ASC +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY18SlimmingHelper = SlimmingHelper("BPHY18SlimmingHelper") +AllVariables = [] +StaticContent = [] +ExtraVariables = [] +BPHY18SlimmingHelper.SmartCollections = ["Electrons", "Muons", "InDetTrackParticles" ] + +# Needed for trigger objects +BPHY18SlimmingHelper.IncludeMuonTriggerContent = False +BPHY18SlimmingHelper.IncludeBPhysTriggerContent = False +BPHY18SlimmingHelper.IncludeEGammaTriggerContent = True + +## primary vertices +AllVariables += ["PrimaryVertices"] +StaticContent += ["xAOD::VertexContainer#BPHY18RefittedPrimaryVertices"] +StaticContent += ["xAOD::VertexAuxContainer#BPHY18RefittedPrimaryVerticesAux."] + +ExtraVariables += ["Muons.etaLayer1Hits.etaLayer2Hits.etaLayer3Hits.etaLayer4Hits.phiLayer1Hits.phiLayer2Hits.phiLayer3Hits.phiLayer4Hits", + "Muons.numberOfTriggerEtaLayers.numberOfPhiLayers", + "InDetTrackParticles.numberOfTRTHits.numberOfTRTHighThresholdHits.vx.vy.vz", + "PrimaryVertices.chiSquared.covariance", "Electrons.deltaEta1.DFCommonElectronsLHVeryLoosenod0","egammaClusters.calE.calEta.calPhi.e_sampl.eta_sampl.etaCalo.phiCalo.ETACALOFRAME.PHICALOFRAME","HLT_xAOD__ElectronContainer_egamma_ElectronsAuxDyn.charge"] + +## Jpsi candidates +StaticContent += ["xAOD::VertexContainer#%s" % BPHY18DiElectronSelectAndWrite.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY18DiElectronSelectAndWrite.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY18BeeKstSelectAndWrite.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY18BeeKstSelectAndWrite.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY18_diMeson_revertex.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY18_diMeson_revertex.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY18_diMeson_revertex.OutputVtxContainerName] + +AllVariables += [ "GSFTrackParticles"] + + +# Added by ASC +# Truth information for MC only +if isSimulation: + AllVariables += ["TruthEvents","TruthParticles","TruthVertices", "ElectronTruthParticles"] + +AllVariables = list(set(AllVariables)) # remove duplicates + +BPHY18SlimmingHelper.AllVariables = AllVariables +BPHY18SlimmingHelper.ExtraVariables = ExtraVariables + +BPHY18SlimmingHelper.StaticContent = StaticContent + +from DerivationFrameworkEGamma.ElectronsCPDetailedContent import * +BPHY18SlimmingHelper.ExtraVariables += ElectronsCPDetailedContent +BPHY18SlimmingHelper.ExtraVariables += GSFTracksCPDetailedContent + +BPHY18SlimmingHelper.AppendContentToStream(BPHY18Stream) diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY19.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY19.py new file mode 100644 index 0000000000000000000000000000000000000000..a8679449c86d89ebc03abcbae6942407a25442b6 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY19.py @@ -0,0 +1,313 @@ +# +# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +# +#==================================================================== +# BPHY19.py +# Derivation for dimuon + photon conversion (chi_c/b) +# Contact: A. Chisholm <andrew.chisholm@cern.ch> +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print(isSimulation) + +#-------------------------------------------------------------------- +# Setup the JpsiFinder vertex fitter tools +#-------------------------------------------------------------------- + +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY19_VertexTools = BPHYVertexTools("BPHY19") + +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY19JpsiFinder = Analysis__JpsiFinder( + name = "BPHY19JpsiFinder", + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, # If true, will assume dimu hypothesis and use PDG value for mu mass + invMassUpper = 100000.0, + invMassLower = 0.0, + Chi2Cut = 200., + oppChargesOnly = True, + atLeastOneComb = True, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY19_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY19_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY19_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY19_VertexTools.VtxPointEstimator, + useMCPCuts = False ) + +ToolSvc += BPHY19JpsiFinder +print(BPHY19JpsiFinder) + +#-------------------------------------------------------------------- +# Setup the vertex reconstruction tools +#-------------------------------------------------------------------- + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex + +BPHY19_Reco_mumu = DerivationFramework__Reco_Vertex( + name = "BPHY19_Reco_mumu", + VertexSearchTool = BPHY19JpsiFinder, + OutputVtxContainerName = "BPHY19OniaCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY19RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 100000, + DoVertexType = 7) + +ToolSvc += BPHY19_Reco_mumu +print(BPHY19_Reco_mumu) + +#-------------------------------------------------------------------- +# Setup the vertex selection and augmentation tools +#-------------------------------------------------------------------- + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu + +# psi(nS)->mu+mu- candidates +BPHY19_Select_Psi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY19_Select_Psi2mumu", + HypothesisName = "Psi", + InputVtxContainerName = "BPHY19OniaCandidates", + VtxMassHypo = 3096.916, + MassMin = 2000.0, + MassMax = 4500.0, + Chi2Max = 200, + DoVertexType = 7) + +ToolSvc += BPHY19_Select_Psi2mumu +print(BPHY19_Select_Psi2mumu) + +# Y(nS)->mu+mu- candidates +BPHY19_Select_Upsi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY19_Select_Upsi2mumu", + HypothesisName = "Upsi", + InputVtxContainerName = "BPHY19OniaCandidates", + VtxMassHypo = 9460.30, + MassMin = 8000.0, + MassMax = 12000.0, + Chi2Max = 200, + DoVertexType = 7) + +ToolSvc += BPHY19_Select_Upsi2mumu +print(BPHY19_Select_Upsi2mumu) + + +#-------------------------------------------------------------------- +# Configure the conversion finder +#-------------------------------------------------------------------- + +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +BPHY19_CascadeVertexFitter = Trk__TrkVKalVrtFitter( + name = "BPHY19_CascadeVertexFit", + Extrapolator = BPHY19_VertexTools.InDetExtrapolator, + #FirstMeasuredPoint = True, + FirstMeasuredPoint = False, + CascadeCnstPrecision = 1e-6, + MakeExtendedVertex = True) + +ToolSvc += BPHY19_CascadeVertexFitter +print(BPHY19_CascadeVertexFitter) + +include("DerivationFrameworkBPhys/configureConversionFinder.py") +BPHY19_ConvTools = BPHYConversionFinderTools("BPHY19") + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__BPhysConversionFinder +BPHY19_ConversionFinder = DerivationFramework__BPhysConversionFinder( + name = "BPHY19_ConversionFinder", + VertexFitterTool = BPHY19_ConvTools.InDetSecVxFitterTool, + VertexEstimator = BPHY19_ConvTools.InDetSecVtxPointEstimator, + DistanceTool = BPHY19_ConvTools.InDetSecVxTrkDistanceFinder, + ConversionPostSelector = BPHY19_ConvTools.InDetSecVtxPostSelector, + CascadeFitter = BPHY19_CascadeVertexFitter, + InputTrackParticleContainerName = "InDetTrackParticles", + ConversionContainerName = "BPhysConversionCandidates", + DiMuonVertexContainer = "BPHY19OniaCandidates", + PassFlagsToCheck = ["passed_Psi","passed_Upsi"], + RequireDeltaM = True, # Only save conversion if it's a chi_c,b candidate (passes "MaxDeltaM" cut w.r.t. any di-muon candidate) + MaxDeltaM = 2000.0) + +ToolSvc += BPHY19_ConversionFinder +print(BPHY19_ConversionFinder) + +#-------------------------------------------------------------------- +# Select the events to save +#-------------------------------------------------------------------- + +# Require at least one conversion AND one di-muon +BPHY19_expression = "count(BPhysConversionCandidates.passed) > 0 && (count(BPHY19OniaCandidates.passed_Psi) > 0 || count(BPHY19OniaCandidates.passed_Upsi) > 0)" + +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool +BPHY19_SelectEvent = DerivationFramework__xAODStringSkimmingTool(name = "BPHY19_SelectEvent", + expression = BPHY19_expression) +ToolSvc += BPHY19_SelectEvent +print(BPHY19_SelectEvent) + +#-------------------------------------------------------------------- +# Thin Collections +#-------------------------------------------------------------------- + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk + +# Keep tracks from di-muon vertices +BPHY19Thin_vtxTrk = DerivationFramework__Thin_vtxTrk( + name = "BPHY19Thin_vtxTrk", + TrackParticleContainerName = "InDetTrackParticles", + VertexContainerNames = ["BPHY19OniaCandidates"], + PassFlags = ["passed_Psi", "passed_Upsi"] ) + +ToolSvc += BPHY19Thin_vtxTrk + +# Keep tracks from conversions +BPHY19Thin_ConvTrk = DerivationFramework__Thin_vtxTrk( + name = "BPHY19Thin_ConvTrk", + TrackParticleContainerName = "InDetTrackParticles", + VertexContainerNames = ["BPhysConversionCandidates"], + PassFlags = ["passed"] ) + +ToolSvc += BPHY19Thin_ConvTrk + +# Keep tracks from all muons +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY19MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning(name = "BPHY19MuonTPThinningTool", + MuonKey = "Muons", + InDetTrackParticlesKey = "InDetTrackParticles") + +ToolSvc += BPHY19MuonTPThinningTool + +#-------------------------------------------------------------------- +# Truth Particle Thinning +#-------------------------------------------------------------------- + +BPHY19_TruthIDString = "" +BPHY19_TruthIDString += "TruthParticles.pdgId == 443" #J/psi +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 100443" #psi(2S) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 10441" #chi_c0(1P) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 20443" #chi_c1(1P) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 445" #chi_c2(1P) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 553" #Y(1S) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 100553" #Y(2S) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 200553" #Y(3S) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 10551" #chi_b0(1P) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 110551" #chi_b0(2P) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 210551" #chi_b0(3P) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 20553" #chi_b1(1P) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 120553" #chi_b1(2P) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 220553" #chi_b1(3P) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 555" #chi_b2(1P) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 100555" #chi_b2(2P) +BPHY19_TruthIDString += " || " +BPHY19_TruthIDString += "TruthParticles.pdgId == 200555" #chi_b2(3P) + +print("PDG IDs to save:") +print(BPHY19_TruthIDString) + +# Only save truth informtion directly associated with Onia +from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__GenericTruthThinning +BPHY19TruthThinTool = DerivationFramework__GenericTruthThinning(name = "BPHY19TruthThinTool", + ParticleSelectionString = BPHY19_TruthIDString, + PreserveDescendants = True, + PreserveAncestors = True) +ToolSvc += BPHY19TruthThinTool +print(BPHY19TruthThinTool) + +#-------------------------------------------------------------------- +# Create the derivation kernel +#-------------------------------------------------------------------- + +BPHY19ThinningTools = [BPHY19Thin_vtxTrk, BPHY19MuonTPThinningTool, BPHY19Thin_ConvTrk] +if isSimulation: + BPHY19ThinningTools.append(BPHY19TruthThinTool) + +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY19Kernel", + AugmentationTools = [BPHY19_Reco_mumu, BPHY19_Select_Psi2mumu, BPHY19_Select_Upsi2mumu,BPHY19_ConversionFinder], + SkimmingTools = [BPHY19_SelectEvent], + ThinningTools = BPHY19ThinningTools + ) + +#-------------------------------------------------------------------- +# Create the stream +#-------------------------------------------------------------------- + +streamName = derivationFlags.WriteDAOD_BPHY19Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY19Stream ) +BPHY19Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY19Stream.AcceptAlgs(["BPHY19Kernel"]) +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +from AthenaServices.Configurables import ThinningSvc, createThinningSvc +augStream = MSMgr.GetStream( streamName ) +evtStream = augStream.GetEventStream() +svcMgr += createThinningSvc( svcName="BPHY19ThinningSvc", outStreams=[evtStream] ) + +#-------------------------------------------------------------------- +# Generic Collection Slimming +#-------------------------------------------------------------------- + +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY19SlimmingHelper = SlimmingHelper("BPHY19SlimmingHelper") +AllVariables = [] +StaticContent = [] + +# Trigger informtion +BPHY19SlimmingHelper.IncludeMuonTriggerContent = True +BPHY19SlimmingHelper.IncludeBPhysTriggerContent = True + +## Primary vertices +AllVariables += ["PrimaryVertices"] +StaticContent += ["xAOD::VertexContainer#BPHY19RefittedPrimaryVertices"] +StaticContent += ["xAOD::VertexAuxContainer#BPHY19RefittedPrimaryVerticesAux."] + +## ID track particles +AllVariables += ["InDetTrackParticles"] +AllVariables += ["CombinedMuonTrackParticles"] +AllVariables += ["ExtrapolatedMuonTrackParticles"] + +## Muon container +AllVariables += ["Muons"] + +## Di-muon candidates +StaticContent += ["xAOD::VertexContainer#%s" % BPHY19_Reco_mumu.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY19_Reco_mumu.OutputVtxContainerName] + +# Conversions +StaticContent += ["xAOD::VertexContainer#%s" % BPHY19_ConversionFinder.ConversionContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY19_ConversionFinder.ConversionContainerName] + +# Truth information for MC only +if isSimulation: + AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles"] + +BPHY19SlimmingHelper.AllVariables = AllVariables +BPHY19SlimmingHelper.StaticContent = StaticContent +BPHY19SlimmingHelper.AppendContentToStream(BPHY19Stream) diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY2.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY2.py new file mode 100644 index 0000000000000000000000000000000000000000..ea9966593f4dd79ebdbeb405adf5acbe6373f2ed --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY2.py @@ -0,0 +1,332 @@ +#==================================================================== +# BPHY2.py +# BPHY2 repurposed for students analysis of Bs->Upsilon phi Analysis +# It requires the reductionConf flag BPHY2 in Reco_tf.py +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print(isSimulation) + + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== + +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY2_VertexTools = BPHYVertexTools("BPHY2") + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__AugOriginalCounts +BPHY2_AugOriginalCounts = DerivationFramework__AugOriginalCounts( + name = "BPHY2_AugOriginalCounts", + VertexContainer = "PrimaryVertices", + TrackContainer = "InDetTrackParticles" ) +ToolSvc += BPHY2_AugOriginalCounts + + +#-------------------------------------------------------------------- +## 2/ setup JpsiFinder tool +## These are general tools independent of DerivationFramework that do the +## actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY2JpsiFinder = Analysis__JpsiFinder(name = "BPHY2JpsiFinder", + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, + invMassUpper = 4700.0, + invMassLower = 2600.0, + Chi2Cut = 15., + oppChargesOnly = True, + combOnly = True, + atLeastOneComb = False, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY2_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY2_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY2_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY2_VertexTools.VtxPointEstimator, + useMCPCuts = False) +ToolSvc += BPHY2JpsiFinder +print(BPHY2JpsiFinder) + +#-------------------------------------------------------------------- +## 3/ setup the vertex reconstruction "call" tool(s). They are part of the derivation framework. +## These Augmentation tools add output vertex collection(s) into the StoreGate and add basic +## decorations which do not depend on the vertex mass hypothesis (e.g. lxy, ptError, etc). +## There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need two instance of the +## Reco tool is the JpsiFinder mass window is wide enough. +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY2JpsiSelectAndWrite = DerivationFramework__Reco_Vertex(name = "BPHY2JpsiSelectAndWrite", + VertexSearchTool = BPHY2JpsiFinder, + OutputVtxContainerName = "BPHY2JpsiCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "SHOULDNOTBEUSED", + DoVertexType =1) +ToolSvc += BPHY2JpsiSelectAndWrite +print(BPHY2JpsiSelectAndWrite) + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu + + + +## 4/ setup a new vertexing tool (necessary due to use of mass constraint) +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +BPHY2BsKKVertexFit = Trk__TrkVKalVrtFitter( + name = "BPHY2BsKKVertexFit", + Extrapolator = BPHY2_VertexTools.InDetExtrapolator, + FirstMeasuredPoint = True, + MakeExtendedVertex = True) +ToolSvc += BPHY2BsKKVertexFit +print(BPHY2BsKKVertexFit) + + + +## 5/ setup the Jpsi+2 track finder +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiPlus2Tracks +BPHY2BsJpsiKK = Analysis__JpsiPlus2Tracks(name = "BPHY2BsJpsiKK", + OutputLevel = INFO, +kaonkaonHypothesis = True, +pionpionHypothesis = False, +kaonpionHypothesis = False, +trkThresholdPt = 800.0, +trkMaxEta = 3.0, +BMassUpper = 5800.0, +BMassLower = 5000.0, +DiTrackMassUpper = 1019.445 + 100., +DiTrackMassLower = 1019.445 - 100., +Chi2Cut = 8.0, +TrkQuadrupletMassUpper = 6000.0, +TrkQuadrupletMassLower = 4800.0, +JpsiContainerKey = "BPHY2JpsiCandidates", +TrackParticleCollection = "InDetTrackParticles", +MuonsUsedInJpsi = "Muons", +TrkVertexFitterTool = BPHY2BsKKVertexFit, +TrackSelectorTool = BPHY2_VertexTools.InDetTrackSelectorTool, +UseMassConstraint = False) + +ToolSvc += BPHY2BsJpsiKK +print(BPHY2BsJpsiKK) + + + + +## 6/ setup the combined augmentation/skimming tool for the Bpm +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY2BsKKSelectAndWrite = DerivationFramework__Reco_Vertex(name = "BPHY2BsKKSelectAndWrite", + VertexSearchTool = BPHY2BsJpsiKK, + OutputVtxContainerName = "BPHY2BsJpsiKKCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY2RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 10000, DoVertexType = 7) +ToolSvc += BPHY2BsKKSelectAndWrite +print(BPHY2BsKKSelectAndWrite) + + +## b/ augment and select Psi(2S)->mumu candidates +BPHY2_Select_Psi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY2_Select_Psi2mumu", + HypothesisName = "Psi", + InputVtxContainerName = "BPHY2JpsiCandidates", + VtxMassHypo = 3686.09, + MassMin = 3300.0, + MassMax = 4500.0, + Chi2Max = 200, + DoVertexType = 7) + +ToolSvc += BPHY2_Select_Psi2mumu +print(BPHY2_Select_Psi2mumu) + + +## a/ augment and select Jpsi->mumu candidates +BPHY2_Select_Jpsi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY2_Select_Jpsi2mumu", + HypothesisName = "Jpsi", + InputVtxContainerName = "BPHY2JpsiCandidates", + VtxMassHypo = 3096.916, + MassMin = 2000.0, + MassMax = 3600.0, + Chi2Max = 200, + DoVertexType = 7) + +ToolSvc += BPHY2_Select_Jpsi2mumu +print(BPHY2_Select_Jpsi2mumu) + +## b/ augment and select Bs->JpsiKK candidates +BPHY2_Select_Bs2JpsiKK = DerivationFramework__Select_onia2mumu( + name = "BPHY2_Select_Bs2JpsiKK", + HypothesisName = "Bs", + InputVtxContainerName = "BPHY2BsJpsiKKCandidates", + TrkMasses = [105.658, 105.658, 493.677, 493.677], + VtxMassHypo = 5366.3, + MassMin = 5000.0, + MassMax = 5800.0, + Chi2Max = 200) + +ToolSvc += BPHY2_Select_Bs2JpsiKK +print(BPHY2_Select_Bs2JpsiKK) + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY2Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY2Stream ) +BPHY2Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY2Stream.AcceptAlgs(["BPHY2Kernel"]) + +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +augStream = MSMgr.GetStream( streamName ) + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY2_thinningTool_Tracks = DerivationFramework__Thin_vtxTrk( + name = "BPHY2_thinningTool_Tracks", + TrackParticleContainerName = "InDetTrackParticles", + StreamName = streamName, + VertexContainerNames = ["BPHY2BsJpsiKKCandidates", "BPHY2JpsiCandidates"], + PassFlags = ["passed_Bs", "passed_Psi", "passed_Jpsi"] ) + +ToolSvc += BPHY2_thinningTool_Tracks + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__BPhysPVThinningTool +BPHY2_thinningTool_PV = DerivationFramework__BPhysPVThinningTool( + name = "BPHY2_thinningTool_PV", + CandidateCollections = ["BPHY2BsJpsiKKCandidates"], + StreamName = streamName, + KeepPVTracks =True) + +ToolSvc += BPHY2_thinningTool_PV + +if not isSimulation: #Only Skim Data + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool + + BPHY2_SelectBsJpsiKKEvent = DerivationFramework__xAODStringSkimmingTool( + name = "BPHY2_SelectBsJpsiKKEvent", + expression = "count(BPHY2BsJpsiKKCandidates.passed_Bs > 0) > 0") + ToolSvc += BPHY2_SelectBsJpsiKKEvent + print(BPHY2_SelectBsJpsiKKEvent) + + + #==================================================================== + # Make event selection based on an OR of the input skimming tools + #==================================================================== + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__FilterCombinationOR + BPHY2SkimmingOR = CfgMgr.DerivationFramework__FilterCombinationOR("BPHY2SkimmingOR", + FilterList = [BPHY2_SelectBsJpsiKKEvent ])#, BPHY2_SelectBplJpsiKplEventBc + PassFlags = ["passed_Bs"] + ToolSvc += BPHY2SkimmingOR + print(BPHY2SkimmingOR) + + + +## b) thinning out tracks that are not attached to muons. The final thinning decision is based on the OR operation +## between decision from this and the previous tools. +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY2MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning(name = "BPHY2MuonTPThinningTool", + MuonKey = "Muons", + StreamName = streamName, + InDetTrackParticlesKey = "InDetTrackParticles") +ToolSvc += BPHY2MuonTPThinningTool + + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== + +thiningCollection = [BPHY2_thinningTool_Tracks, BPHY2_thinningTool_PV, BPHY2MuonTPThinningTool] +print(thiningCollection) + +BPHY2Seq = CfgMgr.AthSequencer("BPHY2Sequence") +DerivationFrameworkJob += BPHY2Seq + +# The name of the kernel (BPHY2Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +BPHY2Seq += CfgMgr.DerivationFramework__DerivationKernel("BPHY2Kernel", + AugmentationTools = [ BPHY2JpsiSelectAndWrite, + BPHY2BsKKSelectAndWrite, + BPHY2_Select_Psi2mumu, + BPHY2_Select_Jpsi2mumu, BPHY2_Select_Bs2JpsiKK, + BPHY2_AugOriginalCounts], + #Only skim if not MC + SkimmingTools = [BPHY2SkimmingOR] if not isSimulation else [], + ThinningTools = thiningCollection + + ) + + +#==================================================================== +# Slimming +#==================================================================== + +# Added by ASC +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY2SlimmingHelper = SlimmingHelper("BPHY2SlimmingHelper") +AllVariables = [] +StaticContent = [] + +# Needed for trigger objects +BPHY2SlimmingHelper.IncludeMuonTriggerContent = TRUE +BPHY2SlimmingHelper.IncludeBPhysTriggerContent = TRUE +SmartVar = [] +## primary vertices +SmartVar += ["PrimaryVertices"] +StaticContent += ["xAOD::VertexContainer#BPHY2RefittedPrimaryVertices"] +StaticContent += ["xAOD::VertexAuxContainer#BPHY2RefittedPrimaryVerticesAux."] + + + +## ID track particles +AllVariables += ["InDetTrackParticles"] + +## combined / extrapolated muon track particles +## (note: for tagged muons there is no extra TrackParticle collection since the ID tracks +## are store in InDetTrackParticles collection) +AllVariables += ["CombinedMuonTrackParticles"] + + +## muon container +SmartVar += ["Muons"] + + +## Jpsi candidates +StaticContent += ["xAOD::VertexContainer#%s" % BPHY2JpsiSelectAndWrite.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY2JpsiSelectAndWrite.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY2BsKKSelectAndWrite.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY2BsKKSelectAndWrite.OutputVtxContainerName] + + +# Tagging information (in addition to that already requested by usual algorithms) +AllVariables += ["MuonSpectrometerTrackParticles" ] + + + + + +# Added by ASC +# Truth information for MC only +if isSimulation: + AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles" ] + + +AllVariables = list(set(AllVariables)) # remove duplicates + +BPHY2SlimmingHelper.AllVariables = AllVariables +BPHY2SlimmingHelper.StaticContent = StaticContent +BPHY2SlimmingHelper.SmartCollections = SmartVar + +BPHY2SlimmingHelper.AppendContentToStream(BPHY2Stream) diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY20.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY20.py new file mode 100644 index 0000000000000000000000000000000000000000..1e8f6f8fd313eb8740f2df844e9bdc8e2008b80f --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY20.py @@ -0,0 +1,775 @@ +#==================================================================== +# +# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +# +# BPHY20.py +# +# R_Jpsi analysis (currently muon channel only) +# +#==================================================================== +from AthenaCommon.AppMgr import ServiceMgr as svcMgr +if not hasattr(svcMgr, 'ItemListSvc'): svcMgr += CfgMgr.ItemListSvc() +svcMgr.ItemListSvc.OutputLevel = DEBUG + +#==================================================================== +# FLAGS TO PERSONALIZE THE DERIVATION +#==================================================================== + +onlyAugmentations = False # Set to True to deactivate thinning and skimming, and only keep augmentations (to generate a sample with full xAOD plus all the extra) +thinTruth = True +addMuExtrapolationForTrigger = True + +from DerivationFrameworkCore.DerivationFrameworkMaster import * +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print(isSimulation) + + +from DerivationFrameworkJetEtMiss.JetCommon import * +from DerivationFrameworkJetEtMiss.METCommon import * + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY20Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY20Stream ) + +BPHY20Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY20Stream.AcceptAlgs(["BPHY20Kernel"]) + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services + +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY20_VertexTools = BPHYVertexTools("BPHY20") + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__AugOriginalCounts +BPHY20_AugOriginalCounts = DerivationFramework__AugOriginalCounts( + name = "BPHY20_AugOriginalCounts", + VertexContainer = "PrimaryVertices", + TrackContainer = "InDetTrackParticles" ) +ToolSvc += BPHY20_AugOriginalCounts + +#==================================================================== +# TriggerCounting for Kernel1 (from BPHY7) +#==================================================================== +#List of trigggers to be counted (high Sig-eff*Lumi ones are in) +triggersToMetadata= [ +### 2018 +"HLT_mu11_mu6_bJpsimumu", +"HLT_2mu10_bJpsimumu", +"HLT_mu6_2mu4_bJpsi", +"HLT_3mu4_bJpsi", +"HLT_mu11_mu6_bJpsimumu_Lxy0", +"HLT_3mu6_bJpsi", +"HLT_mu11_mu6_bJpsimumu_L1LFV-MU11", +"HLT_2mu6_bJpsimumu_L1BPH-2M9-2MU6_BPH-2DR15-2MU6", +"HLT_mu11_mu6_bJpsimumu_Lxy0_L1LFV-MU11", +"HLT_2mu6_bJpsimumu_Lxy0_L1BPH-2M9-2MU6_BPH-2DR15-2MU6", +"HLT_mu6_mu4_bJpsimumu_Lxy0_L1BPH-2M9-MU6MU4_BPH-0DR15-MU6MU4", +"HLT_2mu4_bJpsimumu_Lxy0_L1BPH-2M9-2MU4_BPH-0DR15-2MU4", +"HLT_mu6_mu4_bJpsimumu_L1BPH-2M9-MU6MU4_BPH-0DR15-MU6MU4", +"HLT_mu10_bJpsi_TrkPEB", +"HLT_mu6_bJpsi_TrkPEB", +"HLT_2mu6_bJpsimumu", +"HLT_mu6_mu2noL1_msonly_bJpsimumu_noid_PEB", + +### 2017 +#HLT_2mu10_bJpsimumu +"HLT_2mu10_bJpsimumu_noL2", +"HLT_2mu6_bJpsimumu", +#HLT_2mu6_bJpsimumu_L1BPH-2M9-2MU6_BPH-2DR15-2MU6 +"HLT_2mu6_bJpsimumu_Lxy0", +#HLT_2mu6_bJpsimumu_Lxy0_L1BPH-2M9-2MU6_BPH-2DR15-2MU6 +#HLT_3mu4_bJpsi +#HLT_3mu6_bJpsi +#HLT_mu10_bJpsi_TrkPEB +"HLT_mu10_mu6_bJpsimumu", +"HLT_mu10_mu6_bJpsimumu_Lxy0", +#HLT_mu11_mu6_bJpsimumu +#HLT_mu11_mu6_bJpsimumu_Lxy0 +"HLT_mu14_bJpsi_Trkloose", +"HLT_mu14_bJpsi_TrkPEB", +"HLT_mu20_2mu2noL1_JpsimumuFS", +"HLT_mu20_2mu4_JpsimumuL2", +"HLT_mu20_bJpsi_Trkloose", +"HLT_mu20_bJpsi_TrkPEB", +#HLT_mu6_bJpsi_TrkPEB +#HLT_mu6_mu4_bJpsimumu_L1BPH-2M9-MU6MU4_BPH-0DR15-MU6MU4 +#HLT_mu6_mu4_bJpsimumu_Lxy0_L1BPH-2M9-MU6MU4_BPH-0DR15-MU6MU4 + +### 2016 +#HLT_2mu10_bJpsimumu +"HLT_2mu10_bJpsimumu_delayed", +"HLT_2mu10_bJpsimumu_noL2", +"HLT_2mu4_bJpsimumu_delayed_L1BPH-2M8-2MU4", +"HLT_2mu4_bJpsimumu_L1BPH-2M8-2MU4", +"HLT_2mu4_bJpsimumu_Lxy0_delayed_L1BPH-2M8-2MU4", +"HLT_2mu6_bJpsimumu", +"HLT_2mu6_bJpsimumu_delayed", +"HLT_2mu6_bJpsimumu_delayed_L1BPH-2M9-2MU6_BPH-2DR15-2MU6", +#HLT_2mu6_bJpsimumu_L1BPH-2M9-2MU6_BPH-2DR15-2MU6 +"HLT_2mu6_bJpsimumu_Lxy0", +"HLT_2mu6_bJpsimumu_Lxy0_delayed", +"HLT_2mu6_bJpsimumu_Lxy0_delayed_L1BPH-2M9-2MU6_BPH-2DR15-2MU6", +#HLT_2mu6_bJpsimumu_Lxy0_L1BPH-2M9-2MU6_BPH-2DR15-2MU6 +#HLT_3mu4_bJpsi +"HLT_3mu4_bJpsi_delayed", +#HLT_3mu6_bJpsi +"HLT_mu10_mu6_bJpsimumu", +"HLT_mu10_mu6_bJpsimumu_delayed", +"HLT_mu10_mu6_bJpsimumu_Lxy0", +"HLT_mu10_mu6_bJpsimumu_Lxy0_delayed", +"HLT_mu18_bJpsi_Trkloose", +"HLT_mu20_2mu0noL1_JpsimumuFS", +"HLT_mu20_2mu4_JpsimumuL2", +#HLT_mu6_2mu4_bJpsi +"HLT_mu6_2mu4_bJpsi_delayed", +"HLT_mu6_mu4_bJpsimumu", +"HLT_mu6_mu4_bJpsimumu_delayed", +"HLT_mu6_mu4_bJpsimumu_delayed_L1BPH-2M8-MU6MU4_BPH-0DR15-MU6MU4", +"HLT_mu6_mu4_bJpsimumu_delayed_L1MU6MU4-BO", +"HLT_mu6_mu4_bJpsimumu_L12MU4-B", +"HLT_mu6_mu4_bJpsimumu_L1BPH-2M8-MU6MU4_BPH-0DR15-MU6MU4", +"HLT_mu6_mu4_bJpsimumu_L1MU6MU4-BO", +"HLT_mu6_mu4_bJpsimumu_Lxy0", +"HLT_mu6_mu4_bJpsimumu_Lxy0_delayed", +"HLT_mu6_mu4_bJpsimumu_Lxy0_delayed_L1BPH-2M8-MU6MU4_BPH-0DR15-MU6MU4", +"HLT_mu6_mu4_bJpsimumu_Lxy0_L1BPH-2M8-MU6MU4_BPH-0DR15-MU6MU4", + +### 2015 +#HLT_2mu10_bJpsimumu +#HLT_2mu10_bJpsimumu_noL2 +"HLT_2mu10_l2msonly_bJpsimumu_noL2", +"HLT_2mu4_bJpsimumu", +"HLT_2mu4_bJpsimumu_noL2", +"HLT_2mu4_l2msonly_bJpsimumu_noL2", +#HLT_2mu6_bJpsimumu +"HLT_2mu6_bJpsimumu_noL2", +"HLT_2mu6_l2msonly_bJpsimumu_noL2", +#HLT_3mu4_bJpsi +#HLT_3mu6_bJpsi +"HLT_mu10_mu10_l2msonly_bJpsimumu_noL2", +"HLT_mu18_2mu0noL1_JpsimumuFS", +"HLT_mu18_2mu4_JpsimumuL2", +#HLT_mu18_bJpsi_Trkloose +#HLT_mu20_2mu0noL1_JpsimumuFS +#HLT_mu20_2mu4_JpsimumuL2 +"HLT_mu4_mu4_l2msonly_bJpsimumu_noL2", +"HLT_mu6_l2msonly_mu4_bJpsimumu_noL2", +"HLT_mu6_l2msonly_mu4_l2msonly_bJpsimumu_noL2", +#HLT_mu6_mu4_bJpsimumu +"HLT_mu6_mu4_bJpsimumu_noL2", +"HLT_mu6_mu4_l2msonly_bJpsimumu_noL2", +"HLT_mu6_mu6_l2msonly_bJpsimumu_noL2", + +"HLT_mu4","HLT_mu6","HLT_mu10","HLT_mu14","HLT_mu18","HLT_mu24" #5% + + ] + +triggersToMetadata_filter = list( set(triggersToMetadata) ) + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__TriggerCountToMetadata +BPHY20TriggerCountToMetadata = DerivationFramework__TriggerCountToMetadata(name = "BPHY20TriggerCount", + TriggerList = triggersToMetadata_filter, + FolderName = "BPHY20") + +ToolSvc += BPHY20TriggerCountToMetadata + +#==================================================================== +# PRESELECTION for Kernel1 #Added by Matteo +#==================================================================== +## 1/ Setup the skimming based on triggers +## + +triggerList = [ +## 2018 +"HLT_mu11_mu6_bJpsimumu", +"HLT_2mu10_bJpsimumu", +"HLT_mu6_2mu4_bJpsi", +"HLT_3mu4_bJpsi", +"HLT_mu11_mu6_bJpsimumu_Lxy0", +"HLT_3mu6_bJpsi", +"HLT_mu11_mu6_bJpsimumu_L1LFV-MU11", +"HLT_2mu6_bJpsimumu_L1BPH-2M9-2MU6_BPH-2DR15-2MU6", +"HLT_mu11_mu6_bJpsimumu_Lxy0_L1LFV-MU11", +"HLT_2mu6_bJpsimumu_Lxy0_L1BPH-2M9-2MU6_BPH-2DR15-2MU6", +"HLT_mu6_mu4_bJpsimumu_Lxy0_L1BPH-2M9-MU6MU4_BPH-0DR15-MU6MU4", +"HLT_2mu4_bJpsimumu_Lxy0_L1BPH-2M9-2MU4_BPH-0DR15-2MU4", +"HLT_mu6_mu4_bJpsimumu_L1BPH-2M9-MU6MU4_BPH-0DR15-MU6MU4", +"HLT_mu10_bJpsi_TrkPEB", +"HLT_mu6_bJpsi_TrkPEB", +"HLT_2mu6_bJpsimumu", +"HLT_mu6_mu2noL1_msonly_bJpsimumu_noid_PEB", + + +"HLT_mu22_mu8noL1_TagandProbe", + +### 2017 +#HLT_2mu10_bJpsimumu +"HLT_2mu10_bJpsimumu_noL2", +"HLT_2mu6_bJpsimumu", +#HLT_2mu6_bJpsimumu_L1BPH-2M9-2MU6_BPH-2DR15-2MU6 +"HLT_2mu6_bJpsimumu_Lxy0", +#HLT_2mu6_bJpsimumu_Lxy0_L1BPH-2M9-2MU6_BPH-2DR15-2MU6 +#HLT_3mu4_bJpsi +#HLT_3mu6_bJpsi +#HLT_mu10_bJpsi_TrkPEB +"HLT_mu10_mu6_bJpsimumu", +"HLT_mu10_mu6_bJpsimumu_Lxy0", +#HLT_mu11_mu6_bJpsimumu +#HLT_mu11_mu6_bJpsimumu_Lxy0 +"HLT_mu14_bJpsi_Trkloose", +"HLT_mu14_bJpsi_TrkPEB", +"HLT_mu20_2mu2noL1_JpsimumuFS", +"HLT_mu20_2mu4_JpsimumuL2", +"HLT_mu20_bJpsi_Trkloose", +"HLT_mu20_bJpsi_TrkPEB", +#HLT_mu6_bJpsi_TrkPEB +#HLT_mu6_mu4_bJpsimumu_L1BPH-2M9-MU6MU4_BPH-0DR15-MU6MU4 +#HLT_mu6_mu4_bJpsimumu_Lxy0_L1BPH-2M9-MU6MU4_BPH-0DR15-MU6MU4 + +### 2016 +#HLT_2mu10_bJpsimumu +"HLT_2mu10_bJpsimumu_delayed", +"HLT_2mu10_bJpsimumu_noL2", +"HLT_2mu4_bJpsimumu_delayed_L1BPH-2M8-2MU4", +"HLT_2mu4_bJpsimumu_L1BPH-2M8-2MU4", +"HLT_2mu4_bJpsimumu_Lxy0_delayed_L1BPH-2M8-2MU4", +"HLT_2mu6_bJpsimumu", +"HLT_2mu6_bJpsimumu_delayed", +"HLT_2mu6_bJpsimumu_delayed_L1BPH-2M9-2MU6_BPH-2DR15-2MU6", +#HLT_2mu6_bJpsimumu_L1BPH-2M9-2MU6_BPH-2DR15-2MU6 +"HLT_2mu6_bJpsimumu_Lxy0", +"HLT_2mu6_bJpsimumu_Lxy0_delayed", +"HLT_2mu6_bJpsimumu_Lxy0_delayed_L1BPH-2M9-2MU6_BPH-2DR15-2MU6", +#HLT_2mu6_bJpsimumu_Lxy0_L1BPH-2M9-2MU6_BPH-2DR15-2MU6 +#HLT_3mu4_bJpsi +"HLT_3mu4_bJpsi_delayed", +#HLT_3mu6_bJpsi +"HLT_mu10_mu6_bJpsimumu", +"HLT_mu10_mu6_bJpsimumu_delayed", +"HLT_mu10_mu6_bJpsimumu_Lxy0", +"HLT_mu10_mu6_bJpsimumu_Lxy0_delayed", +"HLT_mu18_bJpsi_Trkloose", +"HLT_mu20_2mu0noL1_JpsimumuFS", +"HLT_mu20_2mu4_JpsimumuL2", +#HLT_mu6_2mu4_bJpsi +"HLT_mu6_2mu4_bJpsi_delayed", +"HLT_mu6_mu4_bJpsimumu", +"HLT_mu6_mu4_bJpsimumu_delayed", +"HLT_mu6_mu4_bJpsimumu_delayed_L1BPH-2M8-MU6MU4_BPH-0DR15-MU6MU4", +"HLT_mu6_mu4_bJpsimumu_delayed_L1MU6MU4-BO", +"HLT_mu6_mu4_bJpsimumu_L12MU4-B", +"HLT_mu6_mu4_bJpsimumu_L1BPH-2M8-MU6MU4_BPH-0DR15-MU6MU4", +"HLT_mu6_mu4_bJpsimumu_L1MU6MU4-BO", +"HLT_mu6_mu4_bJpsimumu_Lxy0", +"HLT_mu6_mu4_bJpsimumu_Lxy0_delayed", +"HLT_mu6_mu4_bJpsimumu_Lxy0_delayed_L1BPH-2M8-MU6MU4_BPH-0DR15-MU6MU4", +"HLT_mu6_mu4_bJpsimumu_Lxy0_L1BPH-2M8-MU6MU4_BPH-0DR15-MU6MU4", + +### 2015 +#HLT_2mu10_bJpsimumu +#HLT_2mu10_bJpsimumu_noL2 +"HLT_2mu10_l2msonly_bJpsimumu_noL2", +"HLT_2mu4_bJpsimumu", +"HLT_2mu4_bJpsimumu_noL2", +"HLT_2mu4_l2msonly_bJpsimumu_noL2", +#HLT_2mu6_bJpsimumu +"HLT_2mu6_bJpsimumu_noL2", +"HLT_2mu6_l2msonly_bJpsimumu_noL2", +#HLT_3mu4_bJpsi +#HLT_3mu6_bJpsi +"HLT_mu10_mu10_l2msonly_bJpsimumu_noL2", +"HLT_mu18_2mu0noL1_JpsimumuFS", +"HLT_mu18_2mu4_JpsimumuL2", +#HLT_mu18_bJpsi_Trkloose +#HLT_mu20_2mu0noL1_JpsimumuFS +#HLT_mu20_2mu4_JpsimumuL2 +"HLT_mu4_mu4_l2msonly_bJpsimumu_noL2", +"HLT_mu6_l2msonly_mu4_bJpsimumu_noL2", +"HLT_mu6_l2msonly_mu4_l2msonly_bJpsimumu_noL2", +#HLT_mu6_mu4_bJpsimumu +"HLT_mu6_mu4_bJpsimumu_noL2", +"HLT_mu6_mu4_l2msonly_bJpsimumu_noL2", +"HLT_mu6_mu6_l2msonly_bJpsimumu_noL2" , + + + +"HLT_mu4","HLT_mu6","HLT_mu10","HLT_mu14","HLT_mu18","HLT_mu24", #5% + + +"HLT_.*mu11_mu6.*", # Recent triggers +"HLT_.*mu.*imedium.*", # Trigger with looser isolation selection +"HLT_.*mu.*iloose.*", +"HLT_.*mu6.*2mu4.*", +"HLT_.*2mu.*", +"HLT_.*mu11.*2mu4noL1.*", +"HLT_.*2mu14_nomucomb.*", +"HLT_.*bTau.*", # Our tau triggers +"HLT_.*bDimu2700.*", +"HLT_.*bPhi.*" + ] + +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__TriggerSkimmingTool +BPHY20TriggerSkim = DerivationFramework__TriggerSkimmingTool(name = "BPHY20TriggerSkim", + TriggerListOR = triggerList, + TriggerListAND = [] ) + +ToolSvc += BPHY20TriggerSkim + +#-------------------------------------------------------------------- +# 2/ Select J/psi>mu+mu- +#-------------------------------------------------------------------- +## a/ setup JpsiFinder tool +## These are general tools independent of DerivationFramework that do the +## actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY20JpsiFinder = Analysis__JpsiFinder( + name = "BPHY20JpsiFinder", + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, + muonThresholdPt = 1000, + invMassUpper = 4500.0, + invMassLower = 2000.0, + Chi2Cut = 20., + oppChargesOnly = True, +# allChargeCombinations = True, + combOnly = False, + atLeastOneComb = True, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY20_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY20_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY20_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY20_VertexTools.VtxPointEstimator, + useMCPCuts = False) + +ToolSvc += BPHY20JpsiFinder +print(BPHY20JpsiFinder) + +#-------------------------------------------------------------------- +## b/ setup the vertex reconstruction "call" tool(s). They are part of the derivation framework. +## These Augmentation tools add output vertex collection(s) into the StoreGate and add basic +## decorations which do not depend on the vertex mass hypothesis (e.g. lxy, ptError, etc). +## There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need two instance of the +## Reco tool is the JpsiFinder mass window is wide enough. +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY20JpsiSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY20JpsiSelectAndWrite", + VertexSearchTool = BPHY20JpsiFinder, + OutputVtxContainerName = "BPHY20JpsiCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "SHOULDNOTBEUSED", + DoVertexType = 1) + +ToolSvc += BPHY20JpsiSelectAndWrite +print(BPHY20JpsiSelectAndWrite) + +#-------------------------------------------------------------------- +## c/ augment and select Jpsi->mumu candidates +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu +BPHY20_Select_Jpsi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY20_Select_Jpsi2mumu", + HypothesisName = "Jpsi", + InputVtxContainerName = "BPHY20JpsiCandidates", + VtxMassHypo = 3096.900, + MassMin = 2000.0, + MassMax = 4500.0, + Chi2Max = 20, + DoVertexType = 1) + +ToolSvc += BPHY20_Select_Jpsi2mumu +print(BPHY20_Select_Jpsi2mumu) + +#-------------------------------------------------------------------- +# 3/ select B_c+->J/psi mu +#-------------------------------------------------------------------- +## a/ setup a new vertexing tool (necessary due to use of mass constraint) +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +BcJpsiMuVertexFit = Trk__TrkVKalVrtFitter( + name = "BcJpsiMuVertexFit", + Extrapolator = BPHY20_VertexTools.InDetExtrapolator, + FirstMeasuredPoint = True, + MakeExtendedVertex = True) + +ToolSvc += BcJpsiMuVertexFit +print(BcJpsiMuVertexFit) + +#-------------------------------------------------------------------- +## b/ setup the Jpsi+1 track finder +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiPlus1Track +BPHY20BcJpsiMu = Analysis__JpsiPlus1Track( + name = "BPHY20BcJpsiMu", + OutputLevel = INFO, #DEBUG, + pionHypothesis = True, #False, + kaonHypothesis = False,#True, + trkThresholdPt = 1000, + trkMaxEta = 3.0, + BThresholdPt = 1000.0, + BMassUpper = 6900.0, + BMassLower = 2000.0, + JpsiContainerKey = "BPHY20JpsiCandidates", + TrackParticleCollection = "InDetTrackParticles", + MuonsUsedInJpsi = "Muons", + ExcludeCrossJpsiTracks = False, + TrkVertexFitterTool = BcJpsiMuVertexFit, + TrackSelectorTool = BPHY20_VertexTools.InDetTrackSelectorTool, + UseMassConstraint = True, + RequireNMuonTracks = 1, + Chi2Cut = 1000, #5 + TrkTrippletMassUpper = 6900, + TrkTrippletMassLower = 2000.0) + +ToolSvc += BPHY20BcJpsiMu +print(BPHY20BcJpsiMu) + +#-------------------------------------------------------------------- +## c/ setup the combined augmentation/skimming tool for the Bc+>J/psi mu +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY20BcJpsiMuSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY20BcJpsiMuSelectAndWrite", + OutputLevel = INFO, + VertexSearchTool = BPHY20BcJpsiMu, + OutputVtxContainerName = "BPHY20BcJpsiMuCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY20RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 1000) + +ToolSvc += BPHY20BcJpsiMuSelectAndWrite +print(BPHY20BcJpsiMuSelectAndWrite) + +#-------------------------------------------------------------------- +## c/ augment and select B_c+>Jpsi mu candidates +BPHY20_Select_Bc2JpsiMu = DerivationFramework__Select_onia2mumu( + name = "BPHY20_Select_Bc2JpsiMu", + HypothesisName = "Bc", + InputVtxContainerName = "BPHY20BcJpsiMuCandidates", + TrkMasses = [105.658, 105.658, 105.658], + VtxMassHypo = 6274.9, + MassMin = 2000.0, + MassMax = 6900.0, + Chi2Max = 1000) + +ToolSvc += BPHY20_Select_Bc2JpsiMu +print(BPHY20_Select_Bc2JpsiMu) + + + +#==================================================================== +# Isolation +#==================================================================== + + +#Track isolation for candidates +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__VertexTrackIsolation +BPHY20TrackIsolationDecorator = DerivationFramework__VertexTrackIsolation( + name = "BPHY20TrackIsolationDecorator", + OutputLevel = INFO, + TrackIsoTool = "xAOD::TrackIsolationTool", + TrackContainer = "InDetTrackParticles", + InputVertexContainer = "BPHY20BcJpsiMuCandidates", + PassFlags = ["passed_Bc"] ) + +ToolSvc += BPHY20TrackIsolationDecorator + +#CaloIsolationTool explicitly declared to avoid pointless warnings (it works!!!) +from IsolationTool.IsolationToolConf import xAOD__CaloIsolationTool +BPHY20CaloIsolationTool = xAOD__CaloIsolationTool( + name = "BPHY20CaloIsolationTool", + OutputLevel = WARNING, + saveOnlyRequestedCorrections = True, + IsoLeakCorrectionTool = "" ) #Workaround for a bug in older versions + +ToolSvc += BPHY20CaloIsolationTool + +#Calo isolation for candidates +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__VertexCaloIsolation +BPHY20CaloIsolationDecorator = DerivationFramework__VertexCaloIsolation( + name = "BPHY20CaloIsolationDecorator", + OutputLevel = INFO, + CaloIsoTool = BPHY20CaloIsolationTool, #"xAOD::CaloIsolationTool", + TrackContainer = "InDetTrackParticles", + InputVertexContainer = "BPHY20BcJpsiMuCandidates", + CaloClusterContainer = "CaloCalTopoClusters", + ParticleCaloExtensionTool = "Trk::ParticleCaloExtensionTool/ParticleCaloExtensionTool", + PassFlags = ["passed_Bc"] ) + +ToolSvc += BPHY20CaloIsolationDecorator + + + +#==================================================================== +# Skimming tool to select only events with the correct vertices +#==================================================================== + +#-------------------------------------------------------------------- +## 9/ select the event. We only want to keep events that contain certain three-mu vertices which passed certain selection. +## Exactly like in the preselection, where only 2mu vertices are treated. + +# +#expression = "count(BPHY20JpsiCandidates.x > -999.0)+count(BPHY20BcJpsiMuCandidates.x > -999.0)+ count(BPHY20BcJpsiMuCandidates.passed_Bc) >0 " + +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool +BPHY20_SelectBcJpsiMuEvent = DerivationFramework__xAODStringSkimmingTool( + name = "BPHY20_SelectBcJpsiMuEvent", + expression = "count(BPHY20BcJpsiMuCandidates.passed_Bc) >= 1 ") + +ToolSvc += BPHY20_SelectBcJpsiMuEvent +print(BPHY20_SelectBcJpsiMuEvent) + + #==================================================================== + # Make event selection based on an OR of the input skimming tools + #==================================================================== + +# from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__FilterCombinationOR +# BPHY20SkimmingOR = CfgMgr.DerivationFramework__FilterCombinationOR( +# "BPHY20SkimmingOR", +# FilterList = [BPHY20_SelectBcJpsiMuEvent] ) +# ToolSvc += BPHY20SkimmingOR +# print BPHY20SkimmingOR + + +#==================================================================== +# Add Extrapolation of muons to trigger layers +#==================================================================== + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__MuonExtrapolationTool +BPHY20_Extrap_Tool = DerivationFramework__MuonExtrapolationTool( name = "BPHY20_ExtrapolationTool", OutputLevel = INFO ) + +ToolSvc += BPHY20_Extrap_Tool + + + + + +#==================================================================== +# Thinning Helper and various thinning tools +#==================================================================== + +#-------------------------------------------------------------------- +## 10/ Setup the thinning helper, only tool able to perform thinning of trigger navigation information + +from DerivationFrameworkCore.ThinningHelper import ThinningHelper +BPHY20ThinningHelper = ThinningHelper( "BPHY20ThinningHelper" ) +BPHY20ThinningHelper.TriggerChains = 'HLT_.*mu.*' #triggerList # . = any character; * = 0 or more times; + = 1 or more times; ? 0 or 1 times "Regular_Expression" +BPHY20ThinningHelper.AppendToStream( BPHY20Stream ) + + +#-------------------------------------------------------------------- +## 11/ track and vertex thinning. We want to remove all reconstructed secondary vertices +## which haven't passed any of the selections defined by (Select_*) tools. +## We also want to keep only tracks which are associates with either muons or any of the +## vertices that passed the selection. Multiple thinning tools can perform the +## selection. The final thinning decision is based OR of all the decisions (by default, +## although it can be changed by the JO). + +## 12/ Cleans up, removing duplicate vertices. An issue caused by the logic of Jpsi+1 track in the case of 3-muon candidates + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxDuplicates +BPHY20Thin_vtxDuplicates = DerivationFramework__Thin_vtxDuplicates(name = "BPHY20Thin_vtxDuplicates", + VertexContainerName = "BPHY20BcJpsiMuCandidates", + PassFlags = ["passed_Bc"]) + +ToolSvc += BPHY20Thin_vtxDuplicates + +## a) thining out vertices that didn't pass any selection and idetifying tracks associated with +## selected vertices. The "VertexContainerNames" is a list of the vertex containers, and "PassFlags" +## contains all pass flags for Select_* tools that must be satisfied. The vertex is kept is it +## satisfy any of the listed selections. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY20Thin_vtxTrk = DerivationFramework__Thin_vtxTrk( + name = "BPHY20Thin_vtxTrk", + OutputLevel = INFO, + TrackParticleContainerName = "InDetTrackParticles", + AcceptanceRadius = 1., + VertexContainerNames = ["BPHY20BcJpsiMuCandidates"], + PassFlags = ["passed_Bc"], + ApplyAnd = True ) # "and" requirement for Vertices + +ToolSvc += BPHY20Thin_vtxTrk + + +## 13/ thinning out tracks that are not attached to muons. The final thinning decision is based on the OR operation +## between decision from this and the previous tools. +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY20MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning(name = "BPHY20MuonTPThinningTool", + MuonKey = "Muons", + InDetTrackParticlesKey = "InDetTrackParticles") +ToolSvc += BPHY20MuonTPThinningTool + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__BPhysPVThinningTool +BPHY20_thinningTool_PV = DerivationFramework__BPhysPVThinningTool(name = "BPHY20_thinningTool_PV", + CandidateCollections = ["BPHY20BcJpsiMuCandidates"], + KeepPVTracks =True) + +ToolSvc += BPHY20_thinningTool_PV + +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__TauTrackParticleThinning +BPHY20TauTPThinningTool = DerivationFramework__TauTrackParticleThinning(name = "BPHY20TauTPThinningTool", + TauKey = "TauJets", + InDetTrackParticlesKey = "InDetTrackParticles") +ToolSvc += BPHY20TauTPThinningTool + +# Only save truth informtion directly associated with: mu Ds+ D+ D*+ Ds*+ D0 D*0 B+ B*+ B0 B*0 +from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__GenericTruthThinning +BPHY20TruthThinTool = DerivationFramework__GenericTruthThinning(name = "BPHY20TruthThinTool", + ParticleSelectionString = "abs(TruthParticles.pdgId) == 13 || abs(TruthParticles.pdgId) == 15 || abs(TruthParticles.pdgId) == 541 || abs(TruthParticles.pdgId) == 431 || abs(TruthParticles.pdgId) == 411 || abs(TruthParticles.pdgId) == 413 || abs(TruthParticles.pdgId) == 433 || TruthParticles.pdgId == 421 || TruthParticles.pdgId == 423 || abs(TruthParticles.pdgId) == 521 || abs(TruthParticles.pdgId) == 523 || TruthParticles.pdgId == 511 || TruthParticles.pdgId == 513", + PreserveDescendants = True, + PreserveAncestors = True) +ToolSvc += BPHY20TruthThinTool + +# Only save truth neutrino and b/c quarks information +from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__GenericTruthThinning +BPHY20TruthThinNoChainTool = DerivationFramework__GenericTruthThinning(name = "BPHY20TruthThinNoChainTool", + ParticleSelectionString = "abs(TruthParticles.pdgId) == 4 || abs(TruthParticles.pdgId) == 5 || abs(TruthParticles.pdgId) == 12 || abs(TruthParticles.pdgId) == 14 || abs(TruthParticles.pdgId) == 16", + PreserveDescendants = False, + PreserveAncestors = False) +ToolSvc += BPHY20TruthThinNoChainTool + + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== + +BPHY20ThinningTools = [ BPHY20MuonTPThinningTool, BPHY20Thin_vtxDuplicates, + BPHY20Thin_vtxTrk, BPHY20_thinningTool_PV, + BPHY20TauTPThinningTool] + +BPHY20SkimmingTools = [BPHY20_SelectBcJpsiMuEvent] + +BPHY20AugmentationTools = [BPHY20JpsiSelectAndWrite, BPHY20_Select_Jpsi2mumu, + BPHY20BcJpsiMuSelectAndWrite, BPHY20_Select_Bc2JpsiMu, + BPHY20_AugOriginalCounts, + BPHY20TrackIsolationDecorator, BPHY20CaloIsolationDecorator] + +if addMuExtrapolationForTrigger: + BPHY20AugmentationTools.append(BPHY20_Extrap_Tool) + +Kernel1Tools = [BPHY20TriggerSkim] + +if isSimulation: + #BPHY20AugmentationTools.append(DFCommonTauTruthMatchingWrapper) + if thinTruth: + BPHY20ThinningTools.append(BPHY20TruthThinTool) + BPHY20ThinningTools.append(BPHY20TruthThinNoChainTool) + +#The sequence object. Is in principle just a wrapper which allows to run two kernels in sequence +BPHY20_Sequence = CfgMgr.AthSequencer("BPHY20_Sequence") +from DerivationFrameworkFlavourTag.FlavourTagCommon import FlavorTagInit +FlavorTagInit(JetCollections=['AntiKt4EMPFlowJets'], Sequencer=BPHY20_Sequence) + +#onlyAugmentations implementation +if onlyAugmentations: + Kernel1Tools = [] + BPHY20SkimmingTools = [] + BPHY20ThinningTools = [] + +# Kernel n1 PRESELECTION +# The name of the kernel (BPHY20Kernel1 in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +BPHY20_Sequence += CfgMgr.DerivationFramework__DerivationKernel("BPHY20Kernel_trigPresel", + AugmentationTools = [BPHY20TriggerCountToMetadata] , + SkimmingTools = Kernel1Tools) +# Kernel n2 deep Derivation +# The name of the kernel (BPHY20Kernel2 in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +BPHY20_Sequence += CfgMgr.DerivationFramework__DerivationKernel("BPHY20Kernel", + AugmentationTools = BPHY20AugmentationTools, + SkimmingTools = BPHY20SkimmingTools, + ThinningTools = BPHY20ThinningTools) + +#Vital, replaces the adding of kernels directly +DerivationFrameworkJob += BPHY20_Sequence + +#==================================================================== +# Slimming +#==================================================================== + +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY20SlimmingHelper = SlimmingHelper("BPHY20SlimmingHelper") +AllVariables = [] +StaticContent = [] + + + +SmartCollections = [ + "Photons", + "TauJets", + "AntiKt4EMTopoJets_BTagging201810", + "BTagging_AntiKt4EMTopo_201810", + "PrimaryVertices", + "Muons", + "InDetTrackParticles", + "MET_Reference_AntiKt4EMTopo" + ] + + +AllVariables = ["METAssoc_AntiKt4EMTopo", + "MET_Core_AntiKt4EMTopo", + "MET_Truth", + "MET_Track", + "MET_LocHadTopo"] + +AllVariables += ["Kt4EMTopoOriginEventShape", + "Kt4EMTopoEventShape"] + +AllVariables += ["CombinedMuonTrackParticles", + "ExtrapolatedMuonTrackParticles", + "MuonSpectrometerTrackParticles"] + + +ExtraVariables = ["Photons.pt.eta.phi.m", + "Electrons.pt.eta.phi.m","TauJets.pt.eta.phi.m.IsTruthMatched.truthJetLink.truthParticleLink", + "AntiKt4EMTopoJets_BTagging201810.JetPileupScaleMomentum_pt.JetPileupScaleMomentum_eta.JetPileupScaleMomentum_phi.JetPileupScaleMomentum_m", + "AntiKt4EMTopoJets_BTagging201810.JvtJvfcorr.HECFrac.LArQuality.HECQuality.NegativeE.AverageLArQF", + "AntiKt4EMTopoJets_BTagging201810.JetEtaJESScaleMomentum_pt.JetEtaJESScaleMomentum_eta.JetEtaJESScaleMomentum_phi.JetEtaJESScaleMomentum_m"] + +ExtraVariables += ["Muons.etaLayer1Hits.etaLayer2Hits.etaLayer3Hits.etaLayer4Hits.phiLayer1Hits.phiLayer2Hits.phiLayer3Hits.phiLayer4Hits", + "Muons.numberOfTriggerEtaLayers.numberOfPhiLayers", + "CombinedMuonTrackParticles.numberOfTRTHits.numberOfTRTHighThresholdHits", + "InDetTrackParticles.numberOfTRTHits.numberOfTRTHighThresholdHits.vx.vy.vz", + "PrimaryVertices.chiSquared.covariance"] + + +StaticContent = ["xAOD::VertexContainer#BPHY20RefittedPrimaryVertices", + "xAOD::VertexAuxContainer#BPHY20RefittedPrimaryVerticesAux."] + + +## Jpsi candidates +StaticContent += ["xAOD::VertexContainer#%s" % BPHY20JpsiSelectAndWrite.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY20JpsiSelectAndWrite.OutputVtxContainerName] + +## Bc+>J/psi Mu+ candidates +StaticContent += ["xAOD::VertexContainer#%s" % BPHY20BcJpsiMuSelectAndWrite.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY20BcJpsiMuSelectAndWrite.OutputVtxContainerName] + + +# Truth information for MC only +if isSimulation: + AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles", "METMap_Truth"] + SmartCollections += ["AntiKt4TruthJets"] + +# Needed for trigger objects +BPHY20SlimmingHelper.IncludeMuonTriggerContent = True +BPHY20SlimmingHelper.IncludeBPhysTriggerContent = True + +# Pass all lists to the SlimmingHelper +BPHY20SlimmingHelper.ExtraVariables = ExtraVariables +BPHY20SlimmingHelper.AllVariables = AllVariables +BPHY20SlimmingHelper.StaticContent = StaticContent +BPHY20SlimmingHelper.SmartCollections = SmartCollections +BPHY20SlimmingHelper.AppendContentToStream(BPHY20Stream) + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY21.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY21.py new file mode 100644 index 0000000000000000000000000000000000000000..def21d405a711adadab0dbc3fa347adc759dfec1 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY21.py @@ -0,0 +1,329 @@ +#2019/11/18 +#==================================================================== +# BPHY21.py +# W -> (J/psi + D_s) +# It requires the reductionConf flag BPHY21 in Reco_tf.py +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print(isSimulation) + + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY21_VertexTools = BPHYVertexTools("BPHY21") + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__AugOriginalCounts +BPHY21_AugOriginalCounts = DerivationFramework__AugOriginalCounts( + name = "BPHY21_AugOriginalCounts", + VertexContainer = "PrimaryVertices", + TrackContainer = "InDetTrackParticles" ) +ToolSvc += BPHY21_AugOriginalCounts + +#==================================================================== +# TriggerCounting for Kernel1 +#==================================================================== +#List of trigggers to be counted +BPHY21_triggersToMetadata= [ + "HLT_2mu10", + "HLT_2mu10_nomucomb", + "HLT_2mu14", + "HLT_2mu14_nomucomb", + "HLT_mu18_mu8noL1", + "HLT_mu18_nomucomb_mu8noL1", + "HLT_mu20_mu8noL1", + "HLT_mu20_nomucomb_mu8noL1", + "HLT_mu22_mu8noL1", + "HLT_mu22_nomucomb_mu8noL1", + "HLT_mu20_mu8noL1", + "HLT_mu24_mu8noL1", + "HLT_mu10_mu6_bJpsimumu", + "HLT_mu22_mu8noL1_calotag_0eta010_L1MU1" + ] + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__TriggerCountToMetadata +BPHY21_TriggerCountToMetadata = DerivationFramework__TriggerCountToMetadata(name = "BPHY21_TriggerCount", + TriggerList = BPHY21_triggersToMetadata, + FolderName = "BPHY21") + +ToolSvc += BPHY21_TriggerCountToMetadata + +#==================================================================== + + +#==================================================================== +#==================================================================== +## 1/ Setup the skimming based on triggers +## + +BPHY21_triggerList = [ + "HLT_2mu10", + "HLT_2mu10_nomucomb", + "HLT_2mu14", + "HLT_2mu14_nomucomb", + "HLT_mu18_mu8noL1", + "HLT_mu18_nomucomb_mu8noL1", + "HLT_mu20_mu8noL1", + "HLT_mu20_nomucomb_mu8noL1", + "HLT_mu22_mu8noL1", + "HLT_mu22_nomucomb_mu8noL1", + "HLT_mu20_mu8noL1", + "HLT_mu24_mu8noL1", + "HLT_mu10_mu6_bJpsimumu", + "HLT_mu22_mu8noL1_calotag_0eta010_L1MU1" + ] + +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__TriggerSkimmingTool +BPHY21_TriggerSkim = DerivationFramework__TriggerSkimmingTool(name = "BPHY21_TriggerSkim", + TriggerListOR = BPHY21_triggerList) + +ToolSvc += BPHY21_TriggerSkim + + +#-------------------------------------------------------------------- +# 2/ Select J/psi>mu+mu- +#-------------------------------------------------------------------- +## a/ setup JpsiFinder tool +## These are general tools independent of DerivationFramework that do the +## actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY21_JpsiFinder = Analysis__JpsiFinder( + name = "BPHY21_JpsiFinder", + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, + muonThresholdPt = 2700, + invMassUpper = 3400.0, + invMassLower = 2800.0, + Chi2Cut = 10., + oppChargesOnly = True, + combOnly = True, + atLeastOneComb = False, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY21_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY21_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY21_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY21_VertexTools.VtxPointEstimator, + useMCPCuts = False) + +ToolSvc += BPHY21_JpsiFinder +print(BPHY21_JpsiFinder) + +#-------------------------------------------------------------------- +## b/ setup the vertex reconstruction "call" tool(s). They are part of the derivation framework. +## These Augmentation tools add output vertex collection(s) into the StoreGate and add basic +## decorations which do not depend on the vertex mass hypothesis (e.g. lxy, ptError, etc). +## There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need two instance of the +## Reco tool is the JpsiFinder mass window is wide enough. +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY21_JpsiSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY21_JpsiSelectAndWrite", + VertexSearchTool = BPHY21_JpsiFinder, + OutputVtxContainerName = "BPHY21_JpsiCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "SHOULDNOTBEUSED", + DoVertexType = 1) + +ToolSvc += BPHY21_JpsiSelectAndWrite +print(BPHY21_JpsiSelectAndWrite) + +#-------------------------------------------------------------------- +## c/ augment and select Jpsi->mumu candidates +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu +BPHY21_Select_Jpsi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY21_Select_Jpsi2mumu", + HypothesisName = "Jpsi", + InputVtxContainerName = "BPHY21_JpsiCandidates", + VtxMassHypo = 3096.900, + MassMin = 2600.0, + MassMax = 3600.0, + Chi2Max = 200, + LxyMin = 0.1, + DoVertexType = 1) + +ToolSvc += BPHY21_Select_Jpsi2mumu +print(BPHY21_Select_Jpsi2mumu) + + +#-------------------------------------------------------------------- + +BPHY21_CascadeCollections = [] + + +#-------------------------------------------------------------------- + + +if not isSimulation: #Only Skim Data + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool + BPHY21_SelectJpsiEvent = DerivationFramework__xAODStringSkimmingTool( + name = "BPHY21_SelectJpsiEvent", + expression = "count(BPHY21_JpsiCandidates.passed_Jpsi) > 0") + + ToolSvc += BPHY21_SelectJpsiEvent + print(BPHY21_SelectJpsiEvent) + + #==================================================================== + # Make event selection based on an OR of the input skimming tools + #==================================================================== + + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__FilterCombinationOR + BPHY21_SkimmingOR = CfgMgr.DerivationFramework__FilterCombinationOR( + "BPHY21_SkimmingOR", + FilterList = [ BPHY21_TriggerSkim, BPHY21_SelectJpsiEvent] ) + ToolSvc += BPHY21_SkimmingOR + print(BPHY21_SkimmingOR) + +#-------------------------------------------------------------------- +##10/ track and vertex thinning. We want to remove all reconstructed secondary vertices +## which hasn't passed any of the selections defined by (Select_*) tools. +## We also want to keep only tracks which are associates with either muons or any of the +## vertices that passed the selection. Multiple thinning tools can perform the +## selection. The final thinning decision is based OR of all the decisions (by default, +## although it can be changed by the JO). + +## a) thining out vertices that didn't pass any selection and idetifying tracks associated with +## selected vertices. The "VertexContainerNames" is a list of the vertex containers, and "PassFlags" +## contains all pass flags for Select_* tools that must be satisfied. The vertex is kept is it +## satisfy any of the listed selections. +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY21_thinningTool_Tracks = DerivationFramework__Thin_vtxTrk( + name = "BPHY21_thinningTool_Tracks", + TrackParticleContainerName = "InDetTrackParticles", + VertexContainerNames = ["BPHY21_JpsiCandidates"], + PassFlags = ["passed_Jpsi"]) + +ToolSvc += BPHY21_thinningTool_Tracks +print(BPHY21_thinningTool_Tracks) +''' +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__BPhysPVThinningTool +BPHY21_thinningTool_PV = DerivationFramework__BPhysPVThinningTool( + name = "BPHY21_thinningTool_PV", + CandidateCollections = ["BPHY21_JpsiCandidates"], + KeepPVTracks = True) + +ToolSvc += BPHY21_thinningTool_PV +print BPHY21_thinningTool_PV +''' +## b) thinning out tracks that are not attached to muons. The final thinning decision is based on the OR operation +## between decision from this and the previous tools. +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY21_MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning( + name = "BPHY21_MuonTPThinningTool", + MuonKey = "Muons", + InDetTrackParticlesKey = "InDetTrackParticles") + +ToolSvc += BPHY21_MuonTPThinningTool +print(BPHY21_MuonTPThinningTool) + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== + +BPHY21_thiningCollection = [] + +print(BPHY21_thiningCollection) + +# The name of the kernel (BPHY21_Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY21_Kernel", + AugmentationTools = [BPHY21_JpsiSelectAndWrite, BPHY21_Select_Jpsi2mumu, + + BPHY21_AugOriginalCounts], + #Only skim if not MC + SkimmingTools = [BPHY21_SkimmingOR] if not isSimulation else [], + ThinningTools = BPHY21_thiningCollection + ) + +#==================================================================== +# SET UP STREAM +#==================================================================== +BPHY21_streamName = derivationFlags.WriteDAOD_BPHY21Stream.StreamName +BPHY21_fileName = buildFileName( derivationFlags.WriteDAOD_BPHY21Stream ) +BPHY21_Stream = MSMgr.NewPoolRootStream( BPHY21_streamName, BPHY21_fileName ) +BPHY21_Stream.AcceptAlgs(["BPHY21_Kernel"]) + +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +from AthenaServices.Configurables import ThinningSvc, createThinningSvc +BPHY21_augStream = MSMgr.GetStream( BPHY21_streamName ) +BPHY21_evtStream = BPHY21_augStream.GetEventStream() + +BPHY21_ThinningSvc = createThinningSvc( svcName="BPHY21_ThinningSvc", outStreams=[BPHY21_evtStream] ) +svcMgr += BPHY21_ThinningSvc + +#==================================================================== +# Slimming +#==================================================================== + +# Added by ASC +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY21_SlimmingHelper = SlimmingHelper("BPHY21_SlimmingHelper") +BPHY21_AllVariables = [] +BPHY21_StaticContent = [] + +# Needed for trigger objects +BPHY21_SlimmingHelper.IncludeMuonTriggerContent = TRUE +BPHY21_SlimmingHelper.IncludeBPhysTriggerContent = TRUE + +## primary vertices +BPHY21_AllVariables += ["PrimaryVertices"] +BPHY21_StaticContent += ["xAOD::VertexContainer#BPHY21_RefittedPrimaryVertices"] +BPHY21_StaticContent += ["xAOD::VertexAuxContainer#BPHY21_RefittedPrimaryVerticesAux."] + +## ID track particles +BPHY21_AllVariables += ["InDetTrackParticles"] + +## combined / extrapolated muon track particles +## (note: for tagged muons there is no extra TrackParticle collection since the ID tracks +## are store in InDetTrackParticles collection) +BPHY21_AllVariables += ["CombinedMuonTrackParticles"] +BPHY21_AllVariables += ["ExtrapolatedMuonTrackParticles"] + +## muon container +BPHY21_AllVariables += ["Muons"] + + +## Jpsi candidates +BPHY21_StaticContent += ["xAOD::VertexContainer#%s" % BPHY21_JpsiSelectAndWrite.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +BPHY21_StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY21_JpsiSelectAndWrite.OutputVtxContainerName] + + +# Tagging information (in addition to that already requested by usual algorithms) +#AllVariables += ["GSFTrackParticles", "MuonSpectrometerTrackParticles" ] + +# Added by ASC +# Truth information for MC only +if isSimulation: + BPHY21_AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles"] + + +BPHY21_AllVariables = list(set(BPHY21_AllVariables)) # remove duplicates + +BPHY21_SlimmingHelper.AllVariables = BPHY21_AllVariables +BPHY21_SlimmingHelper.StaticContent = BPHY21_StaticContent +BPHY21_SlimmingHelper.SmartCollections = [] + +BPHY21_SlimmingHelper.AppendContentToStream(BPHY21_Stream) + +#==================================================================== +# END OF BPHY21.py +#==================================================================== \ No newline at end of file diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY22.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY22.py new file mode 100644 index 0000000000000000000000000000000000000000..791a4f5b028b874cbb4102da0a3442dd8d947fdc --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY22.py @@ -0,0 +1,22 @@ +#==================================================================== +# BPHY22.py +# This an example job options script showing how to set up a +# derivation of the data using the derivation framework. +# It requires the reductionConf flag BPHY12 in Reco_tf.py +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +# This is a dummy file thus just printing something + +print("") +print("BPHY22 dummy file ... doing nothing.") +print("") +print("Please make sure that all local variables in this python") +print("script are prefixed by BPHY22_ in order to avoid collisions") +print("in case this derivation format is run in a train with others.") +print("Please ensure that it is python3 compatible e.g. by using") +print("print() instead of just print withouth parentheses.") +print("") diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY3.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY3.py new file mode 100644 index 0000000000000000000000000000000000000000..20f6984828012f4f81f917eb3ec9337e8bb7e7c3 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY3.py @@ -0,0 +1,284 @@ +#==================================================================== +# BPHY3.py +# This an example job options script showing how to set up a +# derivation of the data using the derivation framework. +# It requires the reductionConf flag BPHY3 in Reco_tf.py +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print(isSimulation) + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +#include( "JpsiUpsilonTools/configureServices.py" ) + +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY3_VertexTools = BPHYVertexTools("BPHY3") + +#-------------------------------------------------------------------- +## 2/ Setup the vertex fitter tools (e.g. JpsiFinder, JpsiPlus1Track, etc). +## These are general tools independent of DerivationFramework that do the +## actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY3JpsiFinder = Analysis__JpsiFinder( + name = "BPHY3JpsiFinder", + OutputLevel = INFO, + muAndMu = False, + muAndTrack = False, + TrackAndTrack = True, + assumeDiMuons = False, # If true, will assume dimu hypothesis and use PDG value for mu mass + invMassUpper = 10000.0, + invMassLower = 0.0, + Chi2Cut = 100., + oppChargesOnly = True, + atLeastOneComb = False, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY3_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY3_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY3_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY3_VertexTools.VtxPointEstimator, + useMCPCuts = False, + track1Mass = 139.57, # Not very important, only used to calculate inv. mass cut, leave it loose here + track2Mass = 139.57) + +ToolSvc += BPHY3JpsiFinder +print(BPHY3JpsiFinder) + +#-------------------------------------------------------------------- +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY3_Reco_diTrk = DerivationFramework__Reco_Vertex( + name = "BPHY3_Reco_diTrk", + VertexSearchTool = BPHY3JpsiFinder, + OutputVtxContainerName = "BPHY3VertexCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY3RefittedPrimaryVertices") + +ToolSvc += BPHY3_Reco_diTrk +print(BPHY3_Reco_diTrk) +#-------------------------------------------------------------------- + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu + +#-------------------------------------------------------------------- +## a/ augment and select X->pi+pi- candidates +BPHY3_Select_PiPi = DerivationFramework__Select_onia2mumu( + name = "BPHY3_Select_PiPi", + HypothesisName = "PiPi", + InputVtxContainerName = "BPHY3VertexCandidates", + TrkMasses = [139.57,139.57], + VtxMassHypo = 497.614, + MassMin = 300.0, + MassMax = 700.0, + Chi2Max = 20) + +ToolSvc += BPHY3_Select_PiPi +print(BPHY3_Select_PiPi) +#-------------------------------------------------------------------- + +#-------------------------------------------------------------------- +## a/ augment and select X->piK candidates +BPHY3_Select_PiK = DerivationFramework__Select_onia2mumu( + name = "BPHY3_Select_PiK", + HypothesisName = "PiK", + InputVtxContainerName = "BPHY3VertexCandidates", + TrkMasses = [139.57,493.677], + VtxMassHypo = 892., + MassMin = 0.0, + MassMax = 3500.0, + Chi2Max = 10) + +ToolSvc += BPHY3_Select_PiK +print(BPHY3_Select_PiK) +#-------------------------------------------------------------------- + +#-------------------------------------------------------------------- +## a/ augment and select X->KPi candidates +BPHY3_Select_KPi = DerivationFramework__Select_onia2mumu( + name = "BPHY3_Select_KPi", + HypothesisName = "KPi", + InputVtxContainerName = "BPHY3VertexCandidates", + TrkMasses = [493.677,139.57], + VtxMassHypo = 892., + MassMin = 0.0, + MassMax = 3500.0, + Chi2Max = 10) + +ToolSvc += BPHY3_Select_KPi +print(BPHY3_Select_KPi) +#-------------------------------------------------------------------- + + +#-------------------------------------------------------------------- +## a/ augment and select X->K+K- candidates +BPHY3_Select_KK = DerivationFramework__Select_onia2mumu( + name = "BPHY3_Select_KK", + HypothesisName = "KK", + InputVtxContainerName = "BPHY3VertexCandidates", + TrkMasses = [493.677,493.677], + VtxMassHypo = 1019.461, + MassMin = 0.0, + MassMax = 1100.0, + Chi2Max = 20) + +ToolSvc += BPHY3_Select_KK +print(BPHY3_Select_KK) +#-------------------------------------------------------------------- + +#-------------------------------------------------------------------- +## a/ augment and select X->ppbar candidates +BPHY3_Select_PP = DerivationFramework__Select_onia2mumu( + name = "BPHY3_Select_PP", + HypothesisName = "PP", + InputVtxContainerName = "BPHY3VertexCandidates", + TrkMasses = [938.272,938.272], + VtxMassHypo = 3096.916, + MassMin = 2800.0, + MassMax = 3600.0, + Chi2Max = 1) + +ToolSvc += BPHY3_Select_PP +print(BPHY3_Select_PP) +#-------------------------------------------------------------------- + + +#-------------------------------------------------------------------- +## 5/ select the event. We only want to keep events that contain certain vertices which passed certain selection. +## This is specified by the "SelectionExpression" property, which contains the expression in the following format: +## +## "ContainerName.passed_HypoName > count" +## +## where "ContainerName" is output container form some Reco_* tool, "HypoName" is the hypothesis name setup in some "Select_*" +## tool and "count" is the number of candidates passing the selection you want to keep. + +expression = "count(BPHY3VertexCandidates.passed_PiPi) > 0 || count(BPHY3VertexCandidates.passed_KPi) > 0 || count(BPHY3VertexCandidates.passed_PiK) > 0 || count(BPHY3VertexCandidates.passed_KK) > 0 || count(BPHY3VertexCandidates.passed_PP) > 0" + +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool +BPHY3_SelectEvent = DerivationFramework__xAODStringSkimmingTool(name = "BPHY3_SelectEvent", + expression = expression) +ToolSvc += BPHY3_SelectEvent +print(BPHY3_SelectEvent) + + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY3Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY3Stream ) +BPHY3Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY3Stream.AcceptAlgs(["BPHY3Kernel"]) +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +augStream = MSMgr.GetStream( streamName ) +evtStream = augStream.GetEventStream() + + + + +#-------------------------------------------------------------------- +## 6/ track and vertex thinning. We want to remove all reconstructed secondary vertices +## which hasn't passed any of the selections defined by (Select_*) tools. +## We also want to keep only tracks which are associates with either muons or any of the +## vertices that passed the selection. Multiple thinning tools can perform the +## selection. The final thinning decision is based OR of all the decisions (by default, +## although it can be changed by the JO). + +## a) thining out vertices that didn't pass any selection and idetifying tracks associated with +## selected vertices. The "VertexContainerNames" is a list of the vertex containers, and "PassFlags" +## contains all pass flags for Select_* tools that must be satisfied. The vertex is kept is it +## satisfy any of the listed selections. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY3Thin_vtxTrk = DerivationFramework__Thin_vtxTrk( + name = "BPHY3Thin_vtxTrk", + TrackParticleContainerName = "InDetTrackParticles", + VertexContainerNames = ["BPHY3VertexCandidates"], + StreamName = streamName, + PassFlags = ["passed_PiPi","passed_KPi","passed_PiK","passed_KK","passed_PP"]) + +ToolSvc += BPHY3Thin_vtxTrk + + + +# Added by ASC +# Only save truth informtion directly associated with Onia +#from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__GenericTruthThinning +#BPHY1TruthThinTool = DerivationFramework__GenericTruthThinning(name = "BPHY1TruthThinTool", +# ParticleSelectionString = "TruthParticles.pdgId == 443 || TruthParticles.pdgId == 100443", +# PreserveDescendants = True, +# PreserveAncestors = True) +#ToolSvc += BPHY1TruthThinTool +#print BPHY1TruthThinTool + + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== +## 7/ IMPORTANT bit. Don't forget to pass the tools to the DerivationKernel! If you don't do that, they will not be +## be executed! + +# Added by ASC +BPHY3ThinningTools = [BPHY3Thin_vtxTrk] +#if globalflags.DataSource()=='geant4': +# BPHY3ThinningTools.append(BPHY3TruthThinTool) + +# The name of the kernel (BPHY1Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY3Kernel", + AugmentationTools = [BPHY3_Reco_diTrk,BPHY3_Select_PiPi,BPHY3_Select_KPi,BPHY3_Select_PiK,BPHY3_Select_KK,BPHY3_Select_PP], + SkimmingTools = [BPHY3_SelectEvent], + ThinningTools = BPHY3ThinningTools + + ) + + + +#==================================================================== +# Slimming +#==================================================================== + +# Added by ASC +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY3SlimmingHelper = SlimmingHelper("BPHY3SlimmingHelper") +AllVariables = [] +StaticContent = [] + +# Needed for trigger objects +#BPHY3SlimmingHelper.IncludeMuonTriggerContent = True +#BPHY3SlimmingHelper.IncludeBPhysTriggerContent = True + +## primary vertices +AllVariables += ["PrimaryVertices"] +#StaticContent += ["xAOD::VertexContainer#BPHY3RefittedPrimaryVertices"] +#StaticContent += ["xAOD::VertexAuxContainer#BPHY3RefittedPrimaryVerticesAux."] + +## ID track particles +AllVariables += ["InDetTrackParticles"] + +## Vertex candidates +StaticContent += ["xAOD::VertexContainer#%s" % BPHY3_Reco_diTrk.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY3_Reco_diTrk.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY3_Reco_diTrk.OutputVtxContainerName] + +# Added by ASC +# Truth information for MC only +#if isSimulation: +# AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles"] + +BPHY3SlimmingHelper.AllVariables = AllVariables +BPHY3SlimmingHelper.StaticContent = StaticContent +BPHY3SlimmingHelper.AppendContentToStream(BPHY3Stream) diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY4.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY4.py new file mode 100644 index 0000000000000000000000000000000000000000..9a973b28e4d5c47f3d480de615ac627ac504b2fe --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY4.py @@ -0,0 +1,155 @@ +#==================================================================== +# BPHY4.py +#==================================================================== +#ServiceMgr.MessageSvc.debugLimit=100000000 +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print(isSimulation) + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY4_VertexTools = BPHYVertexTools("BPHY4") + +#-------------------------------------------------------------------- +## 2/ Setup the vertex fitter tools +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__FourMuonTool +BPHY4FourMuonTool = DerivationFramework__FourMuonTool( + name = "BPHY4FourMuonTool", + OutputLevel = INFO, + ptCut = 2500.0, + etaCut = 2.5, + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY4_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY4_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY4_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY4_VertexTools.VtxPointEstimator) + +ToolSvc += BPHY4FourMuonTool +print(BPHY4FourMuonTool) + +#-------------------------------------------------------------------- +## 3/ setup the vertex reconstruction "call" tool(s). + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_4mu +BPHY4_Reco_4mu = DerivationFramework__Reco_4mu( + name = "BPHY4_Reco_4mu", + OutputLevel = INFO, + FourMuonTool = BPHY4FourMuonTool, + PairContainerName = "BPHY4Pairs", + QuadrupletContainerName = "BPHY4Quads", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY4RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 100000, + DoVertexType = 7) + +ToolSvc += BPHY4_Reco_4mu +print(BPHY4_Reco_4mu) + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY4Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY4Stream ) +BPHY4Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY4Stream.AcceptAlgs(["BPHY4Kernel"]) +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools + +augStream = MSMgr.GetStream( streamName ) + + +#-------------------------------------------------------------------- +## thinning out tracks that are not attached to muons/electrons. The final thinning decision is based on the OR operation +## between decision from this and the previous tools. +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY4MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning(name = "BPHY4MuonTPThinningTool", + MuonKey = "Muons", + StreamName = streamName, + InDetTrackParticlesKey = "InDetTrackParticles") +ToolSvc += BPHY4MuonTPThinningTool +BPHY4ThinningTools = [BPHY4MuonTPThinningTool] + +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__EgammaTrackParticleThinning +BPHY4ElectronTPThinningTool = DerivationFramework__EgammaTrackParticleThinning(name = "BPHY4ElectronTPThinningTool", + SGKey = "Electrons", + GSFTrackParticlesKey = "GSFTrackParticles", + StreamName = streamName, + InDetTrackParticlesKey = "InDetTrackParticles") +ToolSvc += BPHY4ElectronTPThinningTool +BPHY4ThinningTools += [BPHY4ElectronTPThinningTool] + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== + +# The name of the kernel (BPHY4Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY4Kernel", + SkimmingTools = [BPHY4_Reco_4mu], + ThinningTools = BPHY4ThinningTools + ) + + + +#==================================================================== +# Slimming +#==================================================================== + +# Added by ASC +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY4SlimmingHelper = SlimmingHelper("BPHY4SlimmingHelper") +BPHY4AllVariables = [] +BPHY4SmartVariables = [] +BPHY4StaticContent = [] + +# Needed for trigger objects +BPHY4SlimmingHelper.IncludeMuonTriggerContent = True +BPHY4SlimmingHelper.IncludeBPhysTriggerContent = True + +## primary vertices +BPHY4AllVariables += ["PrimaryVertices"] +BPHY4StaticContent += ["xAOD::VertexContainer#BPHY4RefittedPrimaryVertices"] +BPHY4StaticContent += ["xAOD::VertexAuxContainer#BPHY4RefittedPrimaryVerticesAux."] + +## ID track particles +BPHY4AllVariables += ["InDetTrackParticles"] +BPHY4SmartVariables += ["InDetTrackParticles"] + +## combined / extrapolated muon track particles +## (note: for tagged muons there is no extra TrackParticle collection since the ID tracks +## are store in InDetTrackParticles collection) +BPHY4AllVariables += ["CombinedMuonTrackParticles"] +BPHY4AllVariables += ["ExtrapolatedMuonTrackParticles"] + +## muon container +BPHY4AllVariables += ["Muons"] +BPHY4SmartVariables += ["Muons"] + +## Electron container +BPHY4SmartVariables += ["Electrons"] + +## Pair/quad candidates +BPHY4StaticContent += ["xAOD::VertexContainer#%s" % BPHY4_Reco_4mu.PairContainerName] +BPHY4StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY4_Reco_4mu.PairContainerName] +BPHY4StaticContent += ["xAOD::VertexContainer#%s" % BPHY4_Reco_4mu.QuadrupletContainerName] +BPHY4StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY4_Reco_4mu.QuadrupletContainerName] + +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +BPHY4StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY4_Reco_4mu.PairContainerName] +BPHY4StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY4_Reco_4mu.QuadrupletContainerName] + +BPHY4SlimmingHelper.AllVariables = BPHY4AllVariables +BPHY4SlimmingHelper.StaticContent = BPHY4StaticContent +BPHY4SlimmingHelper.SmartCollections = BPHY4SmartVariables +BPHY4SlimmingHelper.AppendContentToStream(BPHY4Stream) diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY5.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY5.py new file mode 100644 index 0000000000000000000000000000000000000000..0b041d9fc3e37590f44c44318d27cc4e28fabf2d --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY5.py @@ -0,0 +1,558 @@ +#==================================================================== +# BPHY5.py +# Bs>J/psiKK +# It requires the reductionConf flag BPHY5 in Reco_tf.py +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print( isSimulation ) + + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +#include( "JpsiUpsilonTools/configureServices.py" ) + +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY5_VertexTools = BPHYVertexTools("BPHY5") + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__AugOriginalCounts +BPHY5_AugOriginalCounts = DerivationFramework__AugOriginalCounts( + name = "BPHY5_AugOriginalCounts", + VertexContainer = "PrimaryVertices", + TrackContainer = "InDetTrackParticles" ) +ToolSvc += BPHY5_AugOriginalCounts + + +#-------------------------------------------------------------------- +## 2/ setup JpsiFinder tool +## These are general tools independent of DerivationFramework that do the +## actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY5JpsiFinder = Analysis__JpsiFinder(name = "BPHY5JpsiFinder", + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, + invMassUpper = 3600.0, + invMassLower = 2600.0, + Chi2Cut = 30., + oppChargesOnly = True, + combOnly = True, + atLeastOneComb = False, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY5_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY5_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY5_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY5_VertexTools.VtxPointEstimator, + useMCPCuts = False) +ToolSvc += BPHY5JpsiFinder +print (BPHY5JpsiFinder) + +#-------------------------------------------------------------------- +## 3/ setup the vertex reconstruction "call" tool(s). They are part of the derivation framework. +## These Augmentation tools add output vertex collection(s) into the StoreGate and add basic +## decorations which do not depend on the vertex mass hypothesis (e.g. lxy, ptError, etc). +## There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need two instance of the +## Reco tool is the JpsiFinder mass window is wide enough. +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY5JpsiSelectAndWrite = DerivationFramework__Reco_Vertex(name = "BPHY5JpsiSelectAndWrite", + VertexSearchTool = BPHY5JpsiFinder, + OutputVtxContainerName = "BPHY5JpsiCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "SHOULDNOTBEUSED", + DoVertexType =1) +ToolSvc += BPHY5JpsiSelectAndWrite +print (BPHY5JpsiSelectAndWrite) + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu + +## a/ augment and select Jpsi->mumu candidates +BPHY5_Select_Jpsi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY5_Select_Jpsi2mumu", + HypothesisName = "Jpsi", + InputVtxContainerName = "BPHY5JpsiCandidates", + VtxMassHypo = 3096.916, + MassMin = 2000.0, + MassMax = 3600.0, + Chi2Max = 200, Do3d = False, + DoVertexType =1) + + +ToolSvc += BPHY5_Select_Jpsi2mumu +print (BPHY5_Select_Jpsi2mumu) + + + + +## 4/ setup a new vertexing tool (necessary due to use of mass constraint) +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +BsKKVertexFit = Trk__TrkVKalVrtFitter( + name = "BsKKVertexFit", + Extrapolator = BPHY5_VertexTools.InDetExtrapolator, + FirstMeasuredPoint = False, + MakeExtendedVertex = True) +ToolSvc += BsKKVertexFit +print (BsKKVertexFit) + +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +BplKplVertexFit = Trk__TrkVKalVrtFitter( + name = "BplKplVertexFit", + Extrapolator = BPHY5_VertexTools.InDetExtrapolator, + FirstMeasuredPoint = False, + MakeExtendedVertex = True) +ToolSvc += BplKplVertexFit +print (BplKplVertexFit) + +#Add the B to pi pi Jpsi X final states +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +BpipiXVertexFit = Trk__TrkVKalVrtFitter( + name = "BpipiXVertexFit", + Extrapolator = BPHY5_VertexTools.InDetExtrapolator, + FirstMeasuredPoint = False, + MakeExtendedVertex = True) +ToolSvc += BpipiXVertexFit +print (BpipiXVertexFit) + +## 5/ setup the Jpsi+2 track finder +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiPlus2Tracks +BPHY5BsJpsiKK = Analysis__JpsiPlus2Tracks(name = "BPHY5BsJpsiKK", + OutputLevel = INFO, +kaonkaonHypothesis = True, +pionpionHypothesis = False, +kaonpionHypothesis = False, +trkThresholdPt = 800.0, +trkMaxEta = 3.0, +BMassUpper = 5800.0, +BMassLower = 5000.0, +#DiTrackMassUpper = 1019.445 + 100., +#DiTrackMassLower = 1019.445 - 100., +Chi2Cut = 15.0, +TrkQuadrupletMassUpper = 6000.0, +TrkQuadrupletMassLower = 4800.0, +JpsiContainerKey = "BPHY5JpsiCandidates", +TrackParticleCollection = "InDetTrackParticles", +MuonsUsedInJpsi = "Muons", +TrkVertexFitterTool = BsKKVertexFit, +TrackSelectorTool = BPHY5_VertexTools.InDetTrackSelectorTool, +UseMassConstraint = True) + +ToolSvc += BPHY5BsJpsiKK +print (BPHY5BsJpsiKK) + +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiPlus2Tracks +BPHY5BdJpsiKst = Analysis__JpsiPlus2Tracks( + name = "BPHY5BdJpsiKst", + OutputLevel = INFO, + kaonkaonHypothesis = False, + pionpionHypothesis = False, + kaonpionHypothesis = True, + trkThresholdPt = 800.0, + trkMaxEta = 3.0, + BThresholdPt = 5000., + BMassLower = 4300.0, + BMassUpper = 6300.0, + JpsiContainerKey = "BPHY5JpsiCandidates", + TrackParticleCollection = "InDetTrackParticles", + #MuonsUsedInJpsi = "Muons", #Don't remove all muons, just those in J/psi candidate (see the following cut) + ExcludeCrossJpsiTracks = False, #setting this to False rejects the muons from J/psi candidate + TrkVertexFitterTool = BsKKVertexFit, + TrackSelectorTool = BPHY5_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY5_VertexTools.VtxPointEstimator, + UseMassConstraint = True, + Chi2Cut = 15.0, + TrkQuadrupletMassLower = 3500.0, + TrkQuadrupletMassUpper = 6800.0, + ) + +ToolSvc += BPHY5BdJpsiKst +print (BPHY5BdJpsiKst) + + +## 5a/ setup the Jpsi+1 track finder +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiPlus1Track +BPHY5BplJpsiKpl = Analysis__JpsiPlus1Track(name = "BPHY5BplJpsiKpl", +OutputLevel = INFO,#DEBUG, +pionHypothesis = True, +kaonHypothesis = True, +trkThresholdPt = 750.0, +trkMaxEta = 3.0, +BThresholdPt = 4000.0, +BMassUpper = 7000.0, +BMassLower = 4500.0, +Chi2Cut = 15.0, +TrkTrippletMassUpper = 8000, +TrkTrippletMassLower = 4000, +JpsiContainerKey = "BPHY5JpsiCandidates", +TrackParticleCollection = "InDetTrackParticles", +MuonsUsedInJpsi = "Muons", +TrkVertexFitterTool = BplKplVertexFit, +TrackSelectorTool = BPHY5_VertexTools.InDetTrackSelectorTool, +UseMassConstraint = True, +ExcludeCrossJpsiTracks = False, +ExcludeJpsiMuonsOnly = True) + +ToolSvc += BPHY5BplJpsiKpl +print (BPHY5BplJpsiKpl) + +## 5b/ setup the Jpsi+pi+pi+X track finder +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiPlus2Tracks +BPHY5BJpsipipiX = Analysis__JpsiPlus2Tracks(name = "BPHY5BJpsipipiX", + OutputLevel = INFO, +kaonkaonHypothesis = False, +pionpionHypothesis = True, +kaonpionHypothesis = False, +trkThresholdPt = 800.0, +trkMaxEta = 3.0, +BMassUpper = 5800.0, +BMassLower = 3400.0, +#DiTrackMassUpper = 1019.445 + 100., +#DiTrackMassLower = 1019.445 - 100., +Chi2Cut = 15.0, +TrkQuadrupletMassUpper = 5800.0, +TrkQuadrupletMassLower = 3400.0, +JpsiContainerKey = "BPHY5JpsiCandidates", +TrackParticleCollection = "InDetTrackParticles", +MuonsUsedInJpsi = "Muons", +TrkVertexFitterTool = BpipiXVertexFit, +TrackSelectorTool = BPHY5_VertexTools.InDetTrackSelectorTool, +UseMassConstraint = True, +ExcludeCrossJpsiTracks = False, +ExcludeJpsiMuonsOnly = True) + +ToolSvc += BPHY5BJpsipipiX +print (BPHY5BJpsipipiX) + +## 6/ setup the combined augmentation/skimming tool for the Bpm +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY5BsKKSelectAndWrite = DerivationFramework__Reco_Vertex(name = "BPHY5BsKKSelectAndWrite", + VertexSearchTool = BPHY5BsJpsiKK, + OutputVtxContainerName = "BPHY5BsJpsiKKCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY5RefittedPrimaryVertices", + RefitPV = True, Do3d = False, + MaxPVrefit = 10000, DoVertexType = 7) +ToolSvc += BPHY5BsKKSelectAndWrite +print (BPHY5BsKKSelectAndWrite) + +BPHY5BplKplSelectAndWrite = DerivationFramework__Reco_Vertex(name = "BPHY5BplKplSelectAndWrite", + VertexSearchTool = BPHY5BplJpsiKpl, + OutputVtxContainerName = "BPHY5BpmJpsiKpmCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY5RefBplJpsiKplPrimaryVertices", + RefitPV = True, + MaxPVrefit = 10000 ) +ToolSvc += BPHY5BplKplSelectAndWrite +print (BPHY5BplKplSelectAndWrite) + +BPHY5BpipiXSelectAndWrite = DerivationFramework__Reco_Vertex(name = "BPHY5BpipiXSelectAndWrite", + VertexSearchTool = BPHY5BJpsipipiX, + OutputVtxContainerName = "BPHY5BJpsipipiXCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY5RefittedBPipiPrimaryVertices", + RefitPV = True, Do3d = False, + MaxPVrefit = 10000, DoVertexType = 7) +ToolSvc += BPHY5BpipiXSelectAndWrite +print (BPHY5BpipiXSelectAndWrite) + +BPHY5BdKstSelectAndWrite = DerivationFramework__Reco_Vertex( + name = "BPHY5BdKstSelectAndWrite", + VertexSearchTool = BPHY5BdJpsiKst, + OutputVtxContainerName = "BPHY5BdJpsiKstCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY5RefittedKstPrimaryVertices", + RefitPV = True, + MaxPVrefit = 10000, + DoVertexType = 7) +ToolSvc += BPHY5BdKstSelectAndWrite +print (BPHY5BdKstSelectAndWrite) +## b/ augment and select Bd->JpsiKst candidates +# set mass hypothesis (K pi) +BPHY5_Select_Bd2JpsiKst = DerivationFramework__Select_onia2mumu( + name = "BPHY5_Select_Bd2JpsiKst", + HypothesisName = "Bd", + InputVtxContainerName = "BPHY5BdJpsiKstCandidates", + TrkMasses = [105.658, 105.658, 493.677, 139.570], + VtxMassHypo = 5279.6, + MassMin = 100.0, #no mass cuts here + MassMax = 100000.0, #no mass cuts here + Chi2Max = 200) + +ToolSvc += BPHY5_Select_Bd2JpsiKst +print (BPHY5_Select_Bd2JpsiKst) + +## c/ augment and select Bdbar->JpsiKstbar candidates +# set mass hypothesis (pi K) +BPHY5_Select_Bd2JpsiKstbar = DerivationFramework__Select_onia2mumu( + name = "BPHY5_Select_Bd2JpsiKstbar", + HypothesisName = "Bdbar", + InputVtxContainerName = "BPHY5BdJpsiKstCandidates", + TrkMasses = [105.658, 105.658, 139.570, 493.677], + VtxMassHypo = 5279.6, + MassMin = 100.0, #no mass cuts here + MassMax = 100000.0, #no mass cuts here + Chi2Max = 200) + +ToolSvc += BPHY5_Select_Bd2JpsiKstbar +print (BPHY5_Select_Bd2JpsiKstbar) + + +## b/ augment and select Bs->JpsiKK candidates +BPHY5_Select_Bs2JpsiKK = DerivationFramework__Select_onia2mumu( + name = "BPHY5_Select_Bs2JpsiKK", + HypothesisName = "Bs", + InputVtxContainerName = "BPHY5BsJpsiKKCandidates", + TrkMasses = [105.658, 105.658, 493.677, 493.677], + VtxMassHypo = 5366.3, + MassMin = 5000.0, + MassMax = 5800.0, Do3d = False, + Chi2Max = 200) + +ToolSvc += BPHY5_Select_Bs2JpsiKK +print (BPHY5_Select_Bs2JpsiKK) + +BPHY5_Select_Bpl2JpsiKpl = DerivationFramework__Select_onia2mumu( + name = "BPHY5_Select_Bpl2JpsiKpl", + HypothesisName = "Bplus", + InputVtxContainerName = "BPHY5BpmJpsiKpmCandidates", + TrkMasses = [105.658, 105.658, 493.677], + VtxMassHypo = 5279.26, + MassMin = 5279.26 - 500, Do3d = False, + MassMax = 5279.26 + 500, + Chi2Max = 200 ) + +ToolSvc += BPHY5_Select_Bpl2JpsiKpl +print (BPHY5_Select_Bpl2JpsiKpl) + +BPHY5_Select_Bpl2JpsiPi = DerivationFramework__Select_onia2mumu( + name = "BPHY5_Select_Bpl2JpsiPi", + HypothesisName = "Bc", + InputVtxContainerName = "BPHY5BpmJpsiKpmCandidates", + TrkMasses = [105.658, 105.658, 139.570], + VtxMassHypo = 6275.1, Do3d = False, + MassMin = 6275.1 - 500, + MassMax = 6275.1 + 500, + Chi2Max = 200 ) + +ToolSvc += BPHY5_Select_Bpl2JpsiPi +print (BPHY5_Select_Bpl2JpsiPi) + +BPHY5_Select_B2JpsipipiX = DerivationFramework__Select_onia2mumu( + name = "BPHY5_Select_B2JpsipipiX", + HypothesisName = "pipiJpsi", + InputVtxContainerName = "BPHY5BJpsipipiXCandidates", + TrkMasses = [105.658, 105.658, 139.570, 139.570], + VtxMassHypo = 4260, + MassMin = 3400.0, + MassMax = 5800.0, Do3d = False, + Chi2Max = 200) + +ToolSvc += BPHY5_Select_B2JpsipipiX +print (BPHY5_Select_B2JpsipipiX) + +#expression = "count(BPHY5BpmJpsiKpmCandidates.passed_Bplus) > 0" +#from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool +#BPHY5_SelectEvent = DerivationFramework__xAODStringSkimmingTool(name = "BPHY5_SelectEvent", +# expression = expression) +#ToolSvc += BPHY5_SelectEvent +#print BPHY5_SelectEvent + + +#from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__SelectEvent + +if not isSimulation: #Only Skim Data + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool + BPHY5_SelectBsJpsiKKEvent = DerivationFramework__xAODStringSkimmingTool( + name = "BPHY5_SelectBsJpsiKKEvent", + expression = "count(BPHY5BsJpsiKKCandidates.passed_Bs > 0) > 0") + + ToolSvc += BPHY5_SelectBsJpsiKKEvent + print (BPHY5_SelectBsJpsiKKEvent) + + BPHY5_SelectBplJpsiKplEvent = DerivationFramework__xAODStringSkimmingTool(name = "BPHY5_SelectBplJpsiKplEvent", + expression = "count(BPHY5BpmJpsiKpmCandidates.passed_Bplus>0) > 0") + ToolSvc += BPHY5_SelectBplJpsiKplEvent + print (BPHY5_SelectBplJpsiKplEvent) + + BPHY5_SelectBplJpsiKplEventBc = DerivationFramework__xAODStringSkimmingTool(name = "BPHY5_SelectBplJpsiKplEventBc", + expression = "count(BPHY5BpmJpsiKpmCandidates.passed_Bc>0) > 0") + ToolSvc += BPHY5_SelectBplJpsiKplEventBc + print (BPHY5_SelectBplJpsiKplEventBc) + + BPHY5_SelectBdKstarEventBd = DerivationFramework__xAODStringSkimmingTool(name = "BPHY5_SelectBdKstarEventBd", + expression = "count(BPHY5BdJpsiKstCandidates.passed_Bd>0) > 0") + ToolSvc += BPHY5_SelectBdKstarEventBd + print (BPHY5_SelectBdKstarEventBd) + + BPHY5_SelectBdKstarEventBdBar = DerivationFramework__xAODStringSkimmingTool(name = "BPHY5_SelectBdKstarEventBdbar", + expression = "count(BPHY5BdJpsiKstCandidates.passed_Bdbar>0) > 0") + ToolSvc += BPHY5_SelectBdKstarEventBdBar + print (BPHY5_SelectBdKstarEventBdBar) + #==================================================================== + # Make event selection based on an OR of the input skimming tools + #==================================================================== + from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__FilterCombinationOR + BPHY5SkimmingOR = CfgMgr.DerivationFramework__FilterCombinationOR("BPHY5SkimmingOR", + FilterList = [BPHY5_SelectBsJpsiKKEvent, BPHY5_SelectBplJpsiKplEvent, BPHY5_SelectBplJpsiKplEventBc, + BPHY5_SelectBdKstarEventBd, BPHY5_SelectBdKstarEventBdBar]) + ToolSvc += BPHY5SkimmingOR + print (BPHY5SkimmingOR) + + +## b) thinning out tracks that are not attached to muons. The final thinning decision is based on the OR operation +## between decision from this and the previous tools. + + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== + +thiningCollection = [] +print (thiningCollection) + +import DerivationFrameworkJetEtMiss.JetCommon +bphy5Seq = CfgMgr.AthSequencer("BPHY5Sequence") +DerivationFrameworkJob += bphy5Seq + +# The name of the kernel (BPHY5Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +bphy5Seq += CfgMgr.DerivationFramework__DerivationKernel("BPHY5Kernel", + AugmentationTools = [BPHY5JpsiSelectAndWrite, BPHY5_Select_Jpsi2mumu, + BPHY5BsKKSelectAndWrite, BPHY5_Select_Bs2JpsiKK, + BPHY5BplKplSelectAndWrite, BPHY5BpipiXSelectAndWrite, BPHY5_Select_Bpl2JpsiKpl, BPHY5_Select_Bpl2JpsiPi, BPHY5_Select_B2JpsipipiX, + BPHY5BdKstSelectAndWrite, BPHY5_Select_Bd2JpsiKst, BPHY5_Select_Bd2JpsiKstbar, + BPHY5_AugOriginalCounts], + #Only skim if not MC + SkimmingTools = [BPHY5SkimmingOR] if not isSimulation else [], + ThinningTools = thiningCollection + + ) + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY5Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY5Stream ) +BPHY5Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY5Stream.AcceptAlgs(["BPHY5Kernel"]) + +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +#from AthenaServices.Configurables import ThinningSvc, createThinningSvc +augStream = MSMgr.GetStream( streamName ) +evtStream = augStream.GetEventStream() + +#BPHY5ThinningSvc = createThinningSvc( svcName="BPHY5ThinningSvc", outStreams=[evtStream] ) +#svcMgr += BPHY5ThinningSvc + +#==================================================================== +# Slimming +#==================================================================== + +# Added by ASC +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY5SlimmingHelper = SlimmingHelper("BPHY5SlimmingHelper") +AllVariables = [] +StaticContent = [] + +# Needed for trigger objects +BPHY5SlimmingHelper.IncludeMuonTriggerContent = TRUE +BPHY5SlimmingHelper.IncludeBPhysTriggerContent = TRUE + +## primary vertices +AllVariables += ["PrimaryVertices"] +StaticContent += ["xAOD::VertexContainer#BPHY5RefittedPrimaryVertices"] +StaticContent += ["xAOD::VertexAuxContainer#BPHY5RefittedPrimaryVerticesAux."] +StaticContent += ["xAOD::VertexContainer#BPHY5RefBplJpsiKplPrimaryVertices"] +StaticContent += ["xAOD::VertexAuxContainer#BPHY5RefBplJpsiKplPrimaryVerticesAux."] +StaticContent += ["xAOD::VertexContainer#BPHY5RefittedBPipiPrimaryVertices"] +StaticContent += ["xAOD::VertexAuxContainer#BPHY5RefittedBPipiPrimaryVerticesAux."] +StaticContent += ["xAOD::VertexContainer#BPHY5RefittedKstPrimaryVertices"] +StaticContent += ["xAOD::VertexAuxContainer#BPHY5RefittedKstPrimaryVerticesAux."] + +## ID track particles +AllVariables += ["InDetTrackParticles"] + +## combined / extrapolated muon track particles +## (note: for tagged muons there is no extra TrackParticle collection since the ID tracks +## are store in InDetTrackParticles collection) +AllVariables += ["CombinedMuonTrackParticles"] +AllVariables += ["ExtrapolatedMuonTrackParticles"] + +## muon container +AllVariables += ["Muons"] + + +## Jpsi candidates +StaticContent += ["xAOD::VertexContainer#%s" % BPHY5JpsiSelectAndWrite.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY5JpsiSelectAndWrite.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY5BsKKSelectAndWrite.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY5BsKKSelectAndWrite.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY5BplKplSelectAndWrite.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY5BplKplSelectAndWrite.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY5BpipiXSelectAndWrite.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY5BpipiXSelectAndWrite.OutputVtxContainerName] + +StaticContent += ["xAOD::VertexContainer#%s" % BPHY5BdKstSelectAndWrite.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY5BdKstSelectAndWrite.OutputVtxContainerName] + +# Tagging information (in addition to that already requested by usual algorithms) +#AllVariables += ["Electrons"] +AllVariables += ["GSFTrackParticles", "Electrons" , "Photons", "MuonSpectrometerTrackParticles" ] +tagJetCollections = ['AntiKt4LCTopoJets', 'AntiKt4EMTopoJets', 'AntiKt4PV0TrackJets'] + +AllVariables += [ "Kt4LCTopoOriginEventShape", "Kt4EMTopoOriginEventShape" ] +SmartVar = ["Photons" ] #[ tagJetCollections ] + + + + +for jet_collection in tagJetCollections: + AllVariables += [jet_collection] + AllVariables += ["BTagging_%s" % (jet_collection[:-4]) ] + AllVariables += ["BTagging_%sJFVtx" % (jet_collection[:-4]) ] + AllVariables += ["BTagging_%sSecVtx" % (jet_collection[:-4]) ] + +#addStandardJets("AntiKt", 0.4, "PV0Track", 2000, mods="track_ungroomed", algseq=bphy5Seq, outputGroup="BPHY5") + + +# Added by ASC +# Truth information for MC only +if isSimulation: + AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles", "egammaTruthParticles" ] + AllVariables += ["AntiKt4TruthJets", "AntiKt4TruthWZJets" ] +# addStandardJets("AntiKt", 0.4, "Truth", 5000, mods="truth_ungroomed", algseq=bphy5Seq, outputGroup="BPHY5") +# addStandardJets("AntiKt", 0.4, "TruthWZ", 5000, mods="truth_ungroomed", algseq=bphy5Seq, outputGroup="BPHY5") + tagJetCollections += [ "AntiKt4TruthJets", "AntiKt4TruthWZJets" ] + +from DerivationFrameworkJetEtMiss.ExtendedJetCommon import replaceAODReducedJets +replaceAODReducedJets(tagJetCollections, bphy5Seq , "BPHY5" ) + + +AllVariables = list(set(AllVariables)) # remove duplicates + +BPHY5SlimmingHelper.AllVariables = AllVariables +BPHY5SlimmingHelper.StaticContent = StaticContent +BPHY5SlimmingHelper.SmartCollections = SmartVar + +BPHY5SlimmingHelper.AppendContentToStream(BPHY5Stream) + + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY6.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY6.py new file mode 100644 index 0000000000000000000000000000000000000000..23b5bb5095dfea9408c43d241cf2e6fb524a72bc --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY6.py @@ -0,0 +1,344 @@ +#==================================================================== +# BPHY6.py +# This an example job options script showing how to set up a +# derivation of the data using the derivation framework. +# It requires the reductionConf flag BPHY6 in Reco_tf.py +#==================================================================== + +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +print(isSimulation) +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +## 1/ setup vertexing tools and services +#include( "JpsiUpsilonTools/configureServices.py" ) + +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY6_VertexTools = BPHYVertexTools("BPHY6") + + +# General Variables +dimuon_chi2_max = 50. +dimuon_mass_min = 100. +dimuon_mass_max = 150e3 + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__MuonExtrapolationTool +BPHY6_Extrap_Tool = DerivationFramework__MuonExtrapolationTool( + name = "BPHY6_ExtrapolationTool", + OutputLevel = INFO ) +ToolSvc += BPHY6_Extrap_Tool + + + +#-------------------------------------------------------------------- +## 2/ Setup the vertex fitter tools (e.g. JpsiFinder, JpsiPlus1Track, etc). +## These are general tools independent of DerivationFramework that do the +## actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY6JpsiFinder = Analysis__JpsiFinder( + name = "BPHY6JpsiFinder", + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, # If true, will assume dimu hypothesis and use PDG value for mu mass + invMassUpper = dimuon_mass_max, + invMassLower = dimuon_mass_min, + Chi2Cut = dimuon_chi2_max, + oppChargesOnly = True, + atLeastOneComb = True, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY6_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY6_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY6_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY6_VertexTools.VtxPointEstimator, + useMCPCuts = False ) + +ToolSvc += BPHY6JpsiFinder +print(BPHY6JpsiFinder) + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex + + + +BPHY6_Reco_mumu = DerivationFramework__Reco_Vertex( + name = "BPHY6_Reco_mumu", + VertexSearchTool = BPHY6JpsiFinder, + OutputVtxContainerName = "BPHY6OniaCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY6RefittedPrimaryVertices") + +ToolSvc += BPHY6_Reco_mumu +print(BPHY6_Reco_mumu) + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu + +## a/ augment and select Jpsi->mumu candidates +BPHY6_Select_Jpsi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY6_Select_Jpsi2mumu", + HypothesisName = "Jpsi", + InputVtxContainerName = "BPHY6OniaCandidates", + VtxMassHypo = 3096.916, + MassMin = 2700.0, + MassMax = 3500.0, + Chi2Max = 20) + +ToolSvc += BPHY6_Select_Jpsi2mumu +print(BPHY6_Select_Jpsi2mumu) + + +## b/ augment and select Psi(2S)->mumu candidates +BPHY6_Select_Psi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY6_Select_Psi2mumu", + HypothesisName = "Psi", + InputVtxContainerName = "BPHY6OniaCandidates", + VtxMassHypo = 3686.09, + MassMin = 3200.0, + MassMax = 4200.0, + Chi2Max = 20) + +ToolSvc += BPHY6_Select_Psi2mumu +print(BPHY6_Select_Psi2mumu) + +# Added by ASC +## c/ augment and select Upsilon(nS)->mumu candidates +BPHY6_Select_Upsi2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY6_Select_Upsi2mumu", + HypothesisName = "Upsi", + InputVtxContainerName = "BPHY6OniaCandidates", + VtxMassHypo = 9460.30, + MassMin = 8000.0, + MassMax = 12000.0, + Chi2Max = 20) + +ToolSvc += BPHY6_Select_Upsi2mumu +print(BPHY6_Select_Upsi2mumu) + +BPHY6_Select_Bmumu2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY6_Select_Bmumu2mumu", + HypothesisName = "Bmumu", + InputVtxContainerName = "BPHY6OniaCandidates", + VtxMassHypo = 5366.77, + MassMin = 4200.0, + MassMax = 8000.0, + Chi2Max = 20) + +ToolSvc += BPHY6_Select_Bmumu2mumu +print(BPHY6_Select_Bmumu2mumu) + +BPHY6_Select_Zmumu2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY6_Select_Zmumu2mumu", + HypothesisName = "Zmumu", + InputVtxContainerName = "BPHY6OniaCandidates", + VtxMassHypo = 91187.6, + MassMin = 60000.0, + MassMax = 120000.0, + Chi2Max = 20) + +ToolSvc += BPHY6_Select_Zmumu2mumu +print(BPHY6_Select_Zmumu2mumu) + +BPHY6_Select_Onia2mumu = DerivationFramework__Select_onia2mumu( + name = "BPHY6_Select_Onia2mumu", + HypothesisName = "Onia", + InputVtxContainerName = "BPHY6OniaCandidates", + VtxMassHypo = 3096.916, + MassMin = dimuon_mass_min, + MassMax = dimuon_mass_max, + Chi2Max = 20) + +ToolSvc += BPHY6_Select_Onia2mumu +print(BPHY6_Select_Onia2mumu) + + + +trigger_list = [r'HLT_\d?mu\d+'] + +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__TriggerSkimmingTool +BPHY6TrigSkimmingTool = DerivationFramework__TriggerSkimmingTool( name = "BPHY6TrigSkimmingTool", + TriggerListOR = trigger_list ) +ToolSvc += BPHY6TrigSkimmingTool + + + +expression = "count(BPHY6OniaCandidates.passed_Onia) > 0 " +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool +BPHY6_SelectEvent = DerivationFramework__xAODStringSkimmingTool(name = "BPHY6_SelectEvent", + expression = expression) +ToolSvc += BPHY6_SelectEvent +print(BPHY6_SelectEvent) + +#-------------------------------------------------------------------- +## 6/ track and vertex thinning. We want to remove all reconstructed secondary vertices +## which hasn't passed any of the selections defined by (Select_*) tools. +## We also want to keep only tracks which are associates with either muons or any of the +## vertices that passed the selection. Multiple thinning tools can perform the +## selection. The final thinning decision is based OR of all the decisions (by default, +## although it can be changed by the JO). + +## a) thining out vertices that didn't pass any selection and idetifying tracks associated with +## selected vertices. The "VertexContainerNames" is a list of the vertex containers, and "PassFlags" +## contains all pass flags for Select_* tools that must be satisfied. The vertex is kept is it +## satisfy any of the listed selections. + + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY6Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY6Stream ) +BPHY6Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY6Stream.AcceptAlgs(["BPHY6Kernel"]) +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +augStream = MSMgr.GetStream( streamName ) +evtStream = augStream.GetEventStream() + + + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY6Thin_vtxTrk = DerivationFramework__Thin_vtxTrk( + name = "BPHY6Thin_vtxTrk", + TrackParticleContainerName = "InDetTrackParticles", + StreamName = streamName, + VertexContainerNames = ["BPHY6OniaCandidates"], + PassFlags = ["passed_Onia"], ) + +ToolSvc += BPHY6Thin_vtxTrk + + +## b) thinning out tracks that are not attached to muons. The final thinning decision is based on the OR operation +## between decision from this and the previous tools. +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY6MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning(name = "BPHY6MuonTPThinningTool", + MuonKey = "Muons", + StreamName = streamName, + InDetTrackParticlesKey = "InDetTrackParticles") +ToolSvc += BPHY6MuonTPThinningTool + +# Added by ASC +# Only save truth informtion directly associated with Onia +from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__GenericTruthThinning +BPHY6TruthThinTool = DerivationFramework__GenericTruthThinning(name = "BPHY6TruthThinTool", + ParticleSelectionString = "TruthParticles.pdgId == 443 || TruthParticles.pdgId == 100443 || TruthParticles.pdgId == 553 || TruthParticles.pdgId == 100553 || TruthParticles.pdgId == 200553 || TruthParticles.pdgId == 23 || TruthParticles.pdgId == 531 || TruthParticles.pdgId == 511 || TruthParticles.pdgId == 521 || TruthParticles.pdgId == 541", + PreserveDescendants = True, + PreserveAncestors = True) +ToolSvc += BPHY6TruthThinTool +print(BPHY6TruthThinTool) + + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== +## 7/ IMPORTANT bit. Don't forget to pass the tools to the DerivationKernel! If you don't do that, they will not be +## be executed! + +# Added by ASC +BPHY6ThinningTools = [BPHY6Thin_vtxTrk, BPHY6MuonTPThinningTool] +if globalflags.DataSource()=='geant4': + BPHY6ThinningTools.append(BPHY6TruthThinTool) + + +# Build a tool to apply the OR combination of the String expression skimming tool, and the Trigger Skimming Tool +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__FilterCombinationOR +SkimmingORTool = CfgMgr.DerivationFramework__FilterCombinationOR("BPHY6SkimmingOR", + FilterList = [BPHY6_SelectEvent,BPHY6TrigSkimmingTool],) +ToolSvc += SkimmingORTool +print(SkimmingORTool) + +# The name of the kernel (BPHY6Kernel in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY6Kernel", + AugmentationTools = [BPHY6_Reco_mumu, + BPHY6_Select_Jpsi2mumu, BPHY6_Select_Psi2mumu, BPHY6_Select_Upsi2mumu,BPHY6_Select_Bmumu2mumu, + BPHY6_Select_Zmumu2mumu,BPHY6_Select_Onia2mumu, BPHY6_Extrap_Tool], + SkimmingTools = [SkimmingORTool], + # ThinningTools = [BPHY6Thin_vtxTrk, BPHY6MuonTPThinningTool] + ThinningTools = BPHY6ThinningTools + + ) + + + + +#==================================================================== +# Slimming +#==================================================================== + +# Added by ASC +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY6SlimmingHelper = SlimmingHelper("BPHY6SlimmingHelper") +AllVariables = [] +StaticContent = [] + +# Needed for trigger objects +BPHY6SlimmingHelper.IncludeMuonTriggerContent = True +BPHY6SlimmingHelper.IncludeBPhysTriggerContent = True + +AllVariables += ["LVL1MuonRoIs"] + +## primary vertices +AllVariables += ["PrimaryVertices"] +StaticContent += ["xAOD::VertexContainer#BPHY6RefittedPrimaryVertices"] +StaticContent += ["xAOD::VertexAuxContainer#BPHY6RefittedPrimaryVerticesAux."] + +## ID track particles +AllVariables += ["InDetTrackParticles"] + +AllVariables += ["HLT_xAOD__TrackParticleContainer_InDetTrigTrackingxAODCnv_Muon_EFID"] +AllVariables += ["HLT_xAOD__TrackParticleContainer_InDetTrigTrackingxAODCnv_Muon_IDTrig"] +AllVariables += ["HLT_xAOD__TrackParticleContainer_InDetTrigTrackingxAODCnv_Muon_FTF"] +AllVariables += ["HLT_xAOD__TrackParticleContainer_InDetTrigTrackingxAODCnv_Bphysics_FTF"] +AllVariables += ["HLT_xAOD__TrackParticleContainer_InDetTrigTrackingxAODCnv_Bphysics_IDTrig"] + + + +## combined / extrapolated muon track particles +## (note: for tagged muons there is no extra TrackParticle collection since the ID tracks +## are store in InDetTrackParticles collection) +AllVariables += ["CombinedMuonTrackParticles"] +AllVariables += ["ExtrapolatedMuonTrackParticles"] +AllVariables += ["MuonSpectrometerTrackParticles"] + +## muon container +AllVariables += ["Muons"] +AllVariables += ["HLT_xAOD__L2StandAloneMuonContainer_MuonL2SAInfo"] +AllVariables += ["HLT_xAOD__L2CombinedMuonContainer_MuonL2CBInfo"] +AllVariables += ["HLT_xAOD__MuonContainer_MuonEFInfo"] + + +AllVariables += ["HLT_xAOD__TrigBphysContainer_L2BMuMuXFex" ] +AllVariables += ["HLT_xAOD__TrigBphysContainer_EFBMuMuXFex" ] +AllVariables += ["HLT_xAOD__TrigBphysContainer_L2BMuMuFex" ] +AllVariables += ["HLT_xAOD__TrigBphysContainer_EFBMuMuFex" ] +AllVariables += ["HLT_xAOD__TrigBphysContainer_L2TrackMass" ] +AllVariables += ["HLT_xAOD__TrigBphysContainer_EFTrackMass" ] +AllVariables += ["HLT_xAOD__TrigBphysContainer_L2MultiMuFex"] +AllVariables += ["HLT_xAOD__TrigBphysContainer_EFMultiMuFex"] + + +## Jpsi candidates +StaticContent += ["xAOD::VertexContainer#%s" % BPHY6_Reco_mumu.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY6_Reco_mumu.OutputVtxContainerName] + +if isSimulation: + AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles"] + +BPHY6SlimmingHelper.AllVariables = AllVariables +BPHY6SlimmingHelper.StaticContent = StaticContent +BPHY6SlimmingHelper.AppendContentToStream(BPHY6Stream) + + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY7.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY7.py new file mode 100644 index 0000000000000000000000000000000000000000..b53b1a403bc9d9a8a99e52556890f6c59b6afabd --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY7.py @@ -0,0 +1,648 @@ +#==================================================================== +# BPHY7.py +# +# https://twiki.cern.ch/twiki/bin/view/AtlasProtected/LfvBphy7 +#==================================================================== + + +#==================================================================== +# FLAGS TO PERSONALIZE THE DERIVATION +#==================================================================== + +onlyAugmentations = False # Set to True to deactivate thinning and skimming, and only keep augmentations (to generate a sample with full xAOD plus all the extra) +thinTruth = True +addMuExtrapolationForTrigger = True + + +from DerivationFrameworkCore.DerivationFrameworkMaster import * +isSimulation = False +if globalflags.DataSource()=='geant4': + isSimulation = True + +from DerivationFrameworkJetEtMiss.JetCommon import * +from DerivationFrameworkJetEtMiss.METCommon import * + + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY7Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY7Stream ) + +BPHY7Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY7Stream.AcceptAlgs(["BPHY7Kernel2"]) + +## 0/ setup vertexing tools and services +#include( "JpsiUpsilonTools/configureServices.py" ) + +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY7_VertexTools = BPHYVertexTools("BPHY7") + + +#==================================================================== +# TriggerCounting for Kernel1 #Added by Matteo +#==================================================================== +#List of trigggers to be counted (high Sig-eff*Lumi ones are in) +triggersToMetadata= ["HLT_2mu10", + "HLT_2mu10_bJpsimumu", + "HLT_2mu10_bJpsimumu_delayed", + "HLT_2mu10_l2msonly", + "HLT_2mu10_nomucomb", + "HLT_2mu14", + "HLT_2mu14_nomucomb", + "HLT_2mu4", + "HLT_2mu4_bBmumuxv2", + "HLT_2mu4_bDimu_noinvm_novtx_ss", + "HLT_2mu6", + "HLT_2mu6_10invm30_pt2_z10", + "HLT_2mu6_bBmumu", + "HLT_2mu6_bBmumux_Taumumux", + "HLT_2mu6_bBmumuxv2", + "HLT_2mu6_bBmumuxv2_delayed", + "HLT_2mu6_bDimu_noinvm_novtx_ss", + "HLT_2mu6_bJpsimumu", + "HLT_2mu6_bJpsimumu_delayed", + "HLT_2mu6_bJpsimumu_Lxy0_delayed", + "HLT_2mu6_nomucomb_bPhi", + "HLT_2mu6_nomucomb_mu4_nomucomb_bTau_L12MU6_3MU4", + "HLT_3mu4", + "HLT_3mu4_bDimu", + "HLT_3mu4_bDimu2700", + "HLT_3mu4_bTau", + "HLT_3mu4_l2msonly", + "HLT_3mu4_nomucomb_bTau", + "HLT_3mu4_nomucomb_delayed", + "HLT_3mu6", + "HLT_3mu6_bTau", + "HLT_3mu6_msonly", + "HLT_mu10_mu6_bBmumux_BcmumuDsloose_delayed", + "HLT_mu10_mu6_bBmumux_Taumumux", + "HLT_mu10_mu6_bBmumux_Taumumux_noL2", + "HLT_mu10_mu6_bBmumuxv2", + "HLT_mu10_mu6_bBmumuxv2_delayed", + "HLT_mu10_mu6_bJpsimumu", + "HLT_mu10_mu6_bJpsimumu_Lxy0", + "HLT_mu10_mu6_bJpsimumu_Lxy0_delayed", + "HLT_mu10_mu6_bUpsimumu", + "HLT_mu11_mu6_bBmumu", + "HLT_mu11_mu6_bBmumux_BpmumuKp", + "HLT_mu11_mu6_bBmumuxv2", + "HLT_mu11_mu6_bDimu", + "HLT_mu11_mu6_bDimu2700", + "HLT_mu11_mu6_bDimu2700_Lxy0", + "HLT_mu11_mu6_bDimu_Lxy0", + "HLT_mu11_mu6_bJpsimumu", + "HLT_mu11_mu6_bJpsimumu_Lxy0", + "HLT_mu11_mu6_bPhi", + "HLT_mu11_mu6_bTau", + "HLT_mu11_mu6_bUpsimumu", + "HLT_mu11_mu6noL1_bPhi_L1MU11_2MU6", + "HLT_mu10_mu6_bDimu", + "HLT_2mu6_bBmumu_Lxy0_L1BPH-2M9-2MU6_BPH-2DR15-2MU6", + "HLT_2mu6_bJpsimumu_Lxy0_L1BPH-2M9-2MU6_BPH-2DR15-2MU6", + "HLT_2mu10_bDimu", + "HLT_mu11_2mu4noL1_nscan03_L1MU11_2MU6", + "HLT_mu11_L1MU10_2mu4noL1_nscan03_L1MU10_2MU6", + "HLT_mu11_nomucomb_2mu4noL1_nscan03_L1MU11_2MU6", + "HLT_mu11_nomucomb_2mu4noL1_nscan03_L1MU11_2MU6_bTau", + "HLT_mu11_nomucomb_mu6noL1_nscan03_L1MU11_2MU6", + "HLT_mu11_nomucomb_mu6noL1_nscan03_L1MU11_2MU6_bTau", + "HLT_mu11_nomucomb_mu6noL1_nscan03_L1MU11_2MU6_bTau_delayed", + "HLT_mu18_2mu4noL1", + "HLT_mu18_mu8noL1", + "HLT_mu20_2mu4noL1", + "HLT_mu20_l2idonly_mu6noL1_nscan03", + "HLT_mu20_l2idonly_mu6noL1_nscan03_bTau", + "HLT_mu20_msonly_mu6noL1_msonly_nscan05", + "HLT_mu20_mu8noL1", + "HLT_mu20_nomucomb_mu6noL1_nscan03", + "HLT_mu20_nomucomb_mu6noL1_nscan03_bTau", + "HLT_mu22_2mu4noL1", + "HLT_mu22_mu8noL1", + "HLT_mu24_2mu4noL1", + "HLT_mu24_imedium", + "HLT_mu24_mu8noL1", + "HLT_mu26_ivarmedium", + "HLT_mu26i", + "HLT_mu50", + "HLT_mu6_2mu4", + "HLT_mu6_2mu4_bJpsi_delayed", + "HLT_mu6_2mu4_bTau_noL2", + "HLT_mu6_l2msonly_2mu4_l2msonly_L1MU6_3MU4", + "HLT_mu6_mu4_bBmumuxv2", + "HLT_mu6_mu4_bBmumuxv2_delayed", + "HLT_mu6_mu4_bDimu_noinvm_novtx_ss", + "HLT_mu6_nomucomb_2mu4_nomucomb_bTau_L1MU6_3MU4", + "HLT_mu6_nomucomb_2mu4_nomucomb_delayed_L1MU6_3MU4", + "HLT_mu20_mu6noL1_bTau", + "HLT_2mu6_mu4_bTau_L12MU6_3MU4", + "HLT_mu6_2mu4_bTau_L1MU6_3MU4", + "HLT_mu11_2mu4noL1_bTau_L1MU11_2MU6", + "HLT_mu11_mu6noL1_bTau_L1MU11_2MU6", + "HLT_3mu4_bPhi", + "HLT_mu11_mu6_bPhi", + "HLT_mu11_nomucomb_mu6_nomucomb_bPhi", + "HLT_mu11_nomucomb_mu6noL1_nscan03_L1MU11_2MU6_bPhi", + "HLT_mu6_2mu4_bTau_L1MU6_3MU4", + "HLT_mu20_mu6btrk_bTauTight", + "HLT_mu20_2mu2btrk_bTauTight", + "HLT_mu11_2mu2btrk_bTauTight_L1MU11_2MU6", + "HLT_3mu4_bPhi", + "HLT_mu11_mu6_bPhi", + "HLT_mu11_mu6noL1_bPhi_L1MU11_2MU6", + "HLT_mu11_mu6_bPhi_L1LFV-MU11", + "HLT_2mu6_bPhi_L1LFV-MU6" ] + + + + +triggersToMetadata_filter = list( set(triggersToMetadata) ) + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__TriggerCountToMetadata +BPHY7TriggerCountToMetadata = DerivationFramework__TriggerCountToMetadata(name = "BPHY7TriggerCount", + TriggerList = triggersToMetadata_filter, + FolderName = "BPHY7") + +ToolSvc += BPHY7TriggerCountToMetadata + +#==================================================================== +# PRESELECTION for Kernel1 #Added by Matteo +#==================================================================== +## 1/ Setup the skimming based on triggers +## + +triggerList = [ "HLT_2mu10", + "HLT_2mu10_l2msonly", + "HLT_2mu10_nomucomb", + "HLT_2mu14", + "HLT_mu50", + "HLT_2mu14_l2msonly", + "HLT_2mu14_nomucomb", + "HLT_2mu6_l2msonly_mu4_l2msonly_L12MU6_3MU4", + "HLT_2mu6_nomucomb_mu4_nomucomb_L12MU6_3MU4", + "HLT_mu6_2mu4", + "HLT_mu6_l2msonly_2mu4_l2msonly_L1MU6_3MU4", + "HLT_mu6_nomucomb_2mu4_nomucomb_L1MU6_3MU4", + "HLT_3mu6", + "HLT_3mu6_msonly", + "HLT_3mu6_nomucomb", + "HLT_mu4","HLT_mu6","HLT_mu10","HLT_mu18", + "HLT_mu14", + "HLT_mu24", + "HLT_mu24_L1MU15", + "HLT_2mu4", + "HLT_2mu6", + "HLT_mu20_L1MU15", + "HLT_mu18_2mu4noL1", + "HLT_mu18_nomucomb_2mu4noL1", + "HLT_mu20_2mu4noL1", + "HLT_mu20_l2idonly_2mu4noL1", + "HLT_mu20_nomucomb_2mu4noL1", + "HLT_mu18_mu8noL1", + "HLT_mu18_nomucomb_mu8noL1", + "HLT_mu20_mu8noL1", + "HLT_mu20_l2idonly_2mu4noL1", + "HLT_mu20_nomucomb_mu8noL1", + "HLT_mu22_mu8noL1", + "HLT_mu22_l2idonly_2mu4noL1", + "HLT_mu22_nomucomb_mu8noL1", + "HLT_mu22_2mu4noL1", + "HLT_mu22_nomucomb_2mu4noL1", + "HLT_mu20_2mu4noL1", "HLT_mu20_mu8noL1", + "HLT_mu14_tau25_medium1_tracktwo", + "HLT_mu14_tau35_medium1_tracktwo", + "HLT_mu14_tau25_medium1_tracktwo_xe50", + "HLT_mu14_tau35_medium1_tracktwo_L1TAU20", + "HLT_mu24_mu8noL1", + "HLT_mu6_nomucomb_2mu4_nomucomb_delayed_L1MU6_3MU4", + "HLT_2mu6_bBmumuxv2_delayed", + "HLT_2mu4_bDimu_noinvm_novtx_ss", + "HLT_2mu6_bDimu_noinvm_novtx_ss", + "HLT_mu24_2mu4noL1", + "HLT_mu10_mu6_bUpsimumu", + "HLT_mu10_mu6_bBmumuxv2", + "HLT_mu10_mu6_bJpsimumu", + "HLT_mu6_mu4_bBmumuxv2_delayed", + "HLT_2mu6_10invm30_pt2_z10", + "HLT_2mu6_nomucomb_bPhi", + "HLT_mu6_mu4_bDimu_noinvm_novtx_ss", + "HLT_mu11_mu6_bDimu2700", + "HLT_2mu6_bBmumux_Taumumux", + "HLT_mu10_mu6_bBmumux_Taumumux", + "HLT_mu10_mu6_bBmumux_Taumumux_noL2", + "HLT_.*mu11_mu6.*", # Recent triggers + "HLT_.*3mu4.*", + "HLT_.*mu.*imedium.*", # Trigger with looser isolation selection + "HLT_.*mu.*iloose.*", + "HLT_.*mu6.*2mu4.*", + "HLT_.*mu11.*2mu4noL1.*", + "HLT_.*2mu14_nomucomb.*", + "HLT_.*bTau.*", # Our tau triggers + "HLT_.*bDimu2700.*", + "HLT_.*bPhi.*", + "HLT_.*bBmumuxv2.*", + "HLT_.*nscan.*" ] # Narrow scan triggers + +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__TriggerSkimmingTool +BPHY7TriggerSkim = DerivationFramework__TriggerSkimmingTool(name = "BPHY7TriggerSkim", + TriggerListOR = triggerList, + TriggerListAND = [] ) + +ToolSvc += BPHY7TriggerSkim + + +#==================================================================== +# 2mu vertex for Kernel2 #Added by Matteo +#==================================================================== + +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY7DiMuon_Finder = Analysis__JpsiFinder(name = "BPHY7DiMuon_Finder", + # OutputLevel = DEBUG, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, + invMassUpper = 2900.0, # Cut just below the J/psi + invMassLower = 0.0, + Chi2Cut = 110., #CHANGED! Was 200 + oppChargesOnly = False, + allChargeCombinations = True, + combOnly = False, + atLeastOneComb = True, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = "Muons", + TrackParticleCollection = "InDetTrackParticles", + V0VertexFitterTool = BPHY7_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY7_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY7_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY7_VertexTools.VtxPointEstimator, + useMCPCuts = False) +ToolSvc += BPHY7DiMuon_Finder + +#-------------------------------------------------------------------- +##Comment from BPHY2... +## 3/ setup the vertex reconstruction "call" tool(s). They are part of the derivation framework. +## These Augmentation tools add output vertex collection(s) into the StoreGate and add basic +## decorations which do not depend on the vertex mass hypothesis (e.g. lxy, ptError, etc). +## There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need two instance of the +## Reco tool is the JpsiFinder mass window is wide enough. +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY7DiMuon_SelectAndWrite = DerivationFramework__Reco_Vertex(name = "BPHY7DiMuon_SelectAndWrite", + VertexSearchTool = BPHY7DiMuon_Finder, + OutputVtxContainerName = "BPHY7TwoMuCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "SHOULDNOTBEUSED_DiMuonRefittedPV") +ToolSvc += BPHY7DiMuon_SelectAndWrite + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu + +## a/ augment and select Jpsi->mumu candidates +BPHY7DiMuon_Decorator = DerivationFramework__Select_onia2mumu(name = "BPHY7DiMuon_Decorator", + HypothesisName = "Jpsi", + InputVtxContainerName = "BPHY7TwoMuCandidates", + VtxMassHypo = 1230, # used to determine time-of-flight and thus lifetime (deviations and sigmas are also added to the vertex) + MassMin = 0.0, + MassMax = 2900.0, + Chi2Max = 200, + DoVertexType =1) # 1 = Pt, 2 = A0, 4 = Z0 + +ToolSvc += BPHY7DiMuon_Decorator +#==================================================================== +# 3mu/2mu+trk vertex for Kernel2 #Added by Matteo +#==================================================================== +## 4/ setup a new vertexing tool (necessary due to use of mass constraint) +from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter +BpmVertexFit = Trk__TrkVKalVrtFitter(name = "BpmVertexFit", + Extrapolator = BPHY7_VertexTools.InDetExtrapolator, + FirstMeasuredPoint = True, + MakeExtendedVertex = True) +ToolSvc += BpmVertexFit + +## 5/ setup the Jpsi+1 track finder +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiPlus1Track +BPHY7ThreeMuon_Finder = Analysis__JpsiPlus1Track(name = "BPHY7ThreeMuon_Finder", + OutputLevel = INFO, + pionHypothesis = True, + kaonHypothesis = False, + trkThresholdPt = 1000.0, + #trkMaxEta = 2.5, # is this value fine?? default would be 102.5 + BThresholdPt = 1000.0, + BMassUpper = 5000.0, # What is this?? + BMassLower = 0.0, + JpsiContainerKey = "BPHY7TwoMuCandidates", + TrackParticleCollection = "InDetTrackParticles", + MuonsUsedInJpsi = "NONE", #cannnot allow, would kill 3muons + ExcludeCrossJpsiTracks = False, + TrkVertexFitterTool = BpmVertexFit, + TrackSelectorTool = BPHY7_VertexTools.InDetTrackSelectorTool, + UseMassConstraint = False, + Chi2Cut = 150) #Cut on chi2/Ndeg_of_freedom, so is very loose + + +ToolSvc += BPHY7ThreeMuon_Finder + +## 6/ setup the combined augmentation/skimming tool for the Bpm +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY7ThreeMuon_SelectAndWrite = DerivationFramework__Reco_Vertex(name = "BPHY7ThreeMuon_SelectAndWrite", + OutputLevel = INFO, + VertexSearchTool = BPHY7ThreeMuon_Finder, + OutputVtxContainerName = "BPHY7Tau3MuCandidates", + PVContainerName = "PrimaryVertices", + RefPVContainerName = "BPHY7RefittedPrimaryVertices", + RefitPV = True, + MaxPVrefit = 1000) +ToolSvc += BPHY7ThreeMuon_SelectAndWrite + +## b/ augment and select Bplus->JpsiKplus candidates +BPHY7ThreeMuon_Decorator = DerivationFramework__Select_onia2mumu( + name = "BPHY7ThreeMuon_Decorator", + OutputLevel = INFO, + HypothesisName = "Tau3MuLoose", + InputVtxContainerName = "BPHY7Tau3MuCandidates", + TrkMasses = [105.658, 105.658, 105.658], + VtxMassHypo = 1777., + MassMin = 0.0, + MassMax = 5000., # If the two selections start differing one might have to check that the tools below still run on the right vertices + Chi2Max = 100.) + +ToolSvc += BPHY7ThreeMuon_Decorator + +## b/ augment and select Bplus->JpsiKplus candidates +BPHY7ThreeMuon_Decorator2 = DerivationFramework__Select_onia2mumu( + name = "BPHY7ThreeMuon_Decorator2", + OutputLevel = INFO, + HypothesisName = "Ds2MuPi", + InputVtxContainerName = "BPHY7Tau3MuCandidates", + TrkMasses = [105.658, 105.658, 139.57], + VtxMassHypo = 1968.3, + MassMin = 0.0, + MassMax = 5000., # If the two selections start differing one might have to check that the tools below still run on the right vertices + Chi2Max = 100.) + +ToolSvc += BPHY7ThreeMuon_Decorator2 + +#Track isolation for candidates +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__VertexTrackIsolation +BPHY7TrackIsolationDecorator = DerivationFramework__VertexTrackIsolation( + name = "BPHY7TrackIsolationDecorator", + OutputLevel = INFO, + TrackIsoTool = "xAOD::TrackIsolationTool", + TrackContainer = "InDetTrackParticles", + InputVertexContainer = "BPHY7Tau3MuCandidates", + PassFlags = ["passed_Tau3MuLoose", "passed_Ds2MuPi"] ) + +ToolSvc += BPHY7TrackIsolationDecorator + +#CaloIsolationTool explicitly declared to avoid pointless warnings (it works!!!) +from IsolationTool.IsolationToolConf import xAOD__CaloIsolationTool +BPHY7CaloIsolationTool = xAOD__CaloIsolationTool( + name = "BPHY7CaloIsolationTool", + OutputLevel = WARNING, + saveOnlyRequestedCorrections = True, + IsoLeakCorrectionTool = "" ) #Workaround for a bug in older versions + +ToolSvc += BPHY7CaloIsolationTool + +#Calo isolation for candidates +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__VertexCaloIsolation +BPHY7CaloIsolationDecorator = DerivationFramework__VertexCaloIsolation( + name = "BPHY7CaloIsolationDecorator", + OutputLevel = INFO, + CaloIsoTool = BPHY7CaloIsolationTool, #"xAOD::CaloIsolationTool", + TrackContainer = "InDetTrackParticles", + InputVertexContainer = "BPHY7Tau3MuCandidates", + CaloClusterContainer = "CaloCalTopoClusters", + ParticleCaloExtensionTool = "Trk::ParticleCaloExtensionTool/ParticleCaloExtensionTool", + PassFlags = ["passed_Tau3MuLoose", "passed_Ds2MuPi"] ) + +ToolSvc += BPHY7CaloIsolationDecorator + +#==================================================================== +# Skimming tool to select only events with the correct vertices +#==================================================================== + +#-------------------------------------------------------------------- +## 9/ select the event. We only want to keep events that contain certain three-mu vertices which passed certain selection. +## Exactly like in the preselection, where only 2mu vertices are treated. + +expression = "count(BPHY7Tau3MuCandidates.passed_Tau3MuLoose) > 0 || count(BPHY7Tau3MuCandidates.passed_Ds2MuPi) > 0" + +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool +BPHY7_SelectEvent = DerivationFramework__xAODStringSkimmingTool(name = "BPHY7_SelectEvent", + OutputLevel = INFO, + expression = expression) + +ToolSvc += BPHY7_SelectEvent +print(BPHY7_SelectEvent) + +#==================================================================== +# Add Extrapolation of muons to trigger layers +#==================================================================== + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__MuonExtrapolationTool +BPHY7_Extrap_Tool = DerivationFramework__MuonExtrapolationTool( name = "BPHY7_ExtrapolationTool", OutputLevel = INFO ) + +ToolSvc += BPHY7_Extrap_Tool + + +#==================================================================== +# Thinning Helper and various thinning tools +#==================================================================== + +#-------------------------------------------------------------------- +## 10/ Setup the thinning helper, only tool able to perform thinning of trigger navigation information + +from DerivationFrameworkCore.ThinningHelper import ThinningHelper +BPHY7ThinningHelper = ThinningHelper( "BPHY7ThinningHelper" ) +BPHY7ThinningHelper.TriggerChains = 'HLT_.*mu.*' #triggerList # . = any character; * = 0 or more times; + = 1 or more times; ? 0 or 1 times "Regular_Expression" +BPHY7ThinningHelper.AppendToStream( BPHY7Stream ) + + +#-------------------------------------------------------------------- +## 11/ track and vertex thinning. We want to remove all reconstructed secondary vertices +## which haven't passed any of the selections defined by (Select_*) tools. +## We also want to keep only tracks which are associates with either muons or any of the +## vertices that passed the selection. Multiple thinning tools can perform the +## selection. The final thinning decision is based OR of all the decisions (by default, +## although it can be changed by the JO). + +## 12/ Cleans up, removing duplicate vertices. An issue caused by the logic of Jpsi+1 track in the case of 3-muon candidates + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxDuplicates +BPHY7Thin_vtxDuplicates = DerivationFramework__Thin_vtxDuplicates(name = "BPHY7Thin_vtxDuplicates", + OutputLevel = INFO, + VertexContainerName = "BPHY7Tau3MuCandidates", + PassFlags = ["passed_Tau3MuLoose", "passed_Ds2MuPi"]) + +ToolSvc += BPHY7Thin_vtxDuplicates + +## a) thining out vertices that didn't pass any selection and idetifying tracks associated with +## selected vertices. The "VertexContainerNames" is a list of the vertex containers, and "PassFlags" +## contains all pass flags for Select_* tools that must be satisfied. The vertex is kept is it +## satisfy any of the listed selections. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY7Thin_vtxTrk = DerivationFramework__Thin_vtxTrk( + name = "BPHY7Thin_vtxTrk", + OutputLevel = INFO, + TrackParticleContainerName = "InDetTrackParticles", + AcceptanceRadius = 1., + VertexContainerNames = ["BPHY7Tau3MuCandidates"], + PassFlags = ["passed_Tau3MuLoose", "passed_Ds2MuPi"], + ApplyAnd = True ) # "and" requirement for Vertices + +ToolSvc += BPHY7Thin_vtxTrk + + +## 13/ thinning out tracks that are not attached to muons. The final thinning decision is based on the OR operation +## between decision from this and the previous tools. +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY7MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning(name = "BPHY7MuonTPThinningTool", + MuonKey = "Muons", + InDetTrackParticlesKey = "InDetTrackParticles") +ToolSvc += BPHY7MuonTPThinningTool + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__BPhysPVThinningTool +BPHY7_thinningTool_PV = DerivationFramework__BPhysPVThinningTool(name = "BPHY7_thinningTool_PV", + CandidateCollections = ["BPHY7Tau3MuCandidates"], + KeepPVTracks =True) + +ToolSvc += BPHY7_thinningTool_PV + +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__TauTrackParticleThinning +BPHY7TauTPThinningTool = DerivationFramework__TauTrackParticleThinning(name = "BPHY7TauTPThinningTool", + TauKey = "TauJets", + InDetTrackParticlesKey = "InDetTrackParticles") +ToolSvc += BPHY7TauTPThinningTool + +# Only save truth informtion directly associated with: mu Ds+ D+ D*+ Ds*+ D0 D*0 B+ B*+ B0 B*0 +from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__GenericTruthThinning +BPHY7TruthThinTool = DerivationFramework__GenericTruthThinning(name = "BPHY7TruthThinTool", + ParticleSelectionString = "abs(TruthParticles.pdgId) == 13 || abs(TruthParticles.pdgId) == 431 || abs(TruthParticles.pdgId) == 411 || abs(TruthParticles.pdgId) == 413 || abs(TruthParticles.pdgId) == 433 || TruthParticles.pdgId == 421 || TruthParticles.pdgId == 423 || abs(TruthParticles.pdgId) == 521 || abs(TruthParticles.pdgId) == 523 || TruthParticles.pdgId == 511 || TruthParticles.pdgId == 513", + PreserveDescendants = True, + PreserveAncestors = True) +ToolSvc += BPHY7TruthThinTool + +# Only save truth neutrino and b/c quarks information +from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__GenericTruthThinning +BPHY7TruthThinNoChainTool = DerivationFramework__GenericTruthThinning(name = "BPHY7TruthThinNoChainTool", + ParticleSelectionString = "abs(TruthParticles.pdgId) == 4 || abs(TruthParticles.pdgId) == 5 || abs(TruthParticles.pdgId) == 12 || abs(TruthParticles.pdgId) == 14 || abs(TruthParticles.pdgId) == 16", + PreserveDescendants = False, + PreserveAncestors = False) +ToolSvc += BPHY7TruthThinNoChainTool + + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== + +BPHY7ThinningTools = [ BPHY7MuonTPThinningTool, BPHY7Thin_vtxDuplicates, BPHY7Thin_vtxTrk, BPHY7_thinningTool_PV, BPHY7TauTPThinningTool] + +BPHY7SkimmingTools = [BPHY7_SelectEvent] + +BPHY7AugmentationTools = [BPHY7DiMuon_SelectAndWrite, BPHY7DiMuon_Decorator, BPHY7ThreeMuon_SelectAndWrite, BPHY7ThreeMuon_Decorator, BPHY7ThreeMuon_Decorator2, BPHY7TrackIsolationDecorator, BPHY7CaloIsolationDecorator] + +if addMuExtrapolationForTrigger: + BPHY7AugmentationTools.append(BPHY7_Extrap_Tool) + +Kernel1Tools = [BPHY7TriggerSkim] + +if isSimulation: + #BPHY7AugmentationTools.append(DFCommonTauTruthMatchingWrapper) + if thinTruth: + BPHY7ThinningTools.append(BPHY7TruthThinTool) + BPHY7ThinningTools.append(BPHY7TruthThinNoChainTool) + +#The sequence object. Is in principle just a wrapper which allows to run two kernels in sequence +BPHY7_Sequence = CfgMgr.AthSequencer("BPHY7_Sequence") +from DerivationFrameworkFlavourTag.FlavourTagCommon import FlavorTagInit +FlavorTagInit(JetCollections=['AntiKt4EMPFlowJets'], Sequencer=BPHY7_Sequence) + + +#onlyAugmentations implementation +if onlyAugmentations: + Kernel1Tools = [] + BPHY7SkimmingTools = [] + BPHY7ThinningTools = [] + +# Kernel n1 PRESELECTION +# The name of the kernel (BPHY7Kernel1 in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +BPHY7_Sequence += CfgMgr.DerivationFramework__DerivationKernel("BPHY7Kernel1", + AugmentationTools = [BPHY7TriggerCountToMetadata] , + SkimmingTools = Kernel1Tools) +# Kernel n2 deep Derivation +# The name of the kernel (BPHY7Kernel2 in this case) must be unique to this derivation +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +BPHY7_Sequence += CfgMgr.DerivationFramework__DerivationKernel("BPHY7Kernel2", + AugmentationTools = BPHY7AugmentationTools, + SkimmingTools = BPHY7SkimmingTools, + ThinningTools = BPHY7ThinningTools) + +#Vital, replaces the adding of kernels directly +DerivationFrameworkJob += BPHY7_Sequence + +#==================================================================== +# Slimming +#==================================================================== + +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY7SlimmingHelper = SlimmingHelper("BPHY7SlimmingHelper") + + +SmartCollections = ["Electrons", "Photons", "TauJets", "AntiKt4EMTopoJets_BTagging201810", "BTagging_AntiKt4EMTopo_201810", "PrimaryVertices", "Muons", "InDetTrackParticles", "MET_Reference_AntiKt4EMTopo"] + + +AllVariables = ["METAssoc_AntiKt4EMTopo", + "MET_Core_AntiKt4EMTopo", + "MET_Truth", + "MET_Track", + "MET_LocHadTopo"] + +AllVariables += ["Kt4EMTopoOriginEventShape", + "Kt4EMTopoEventShape"] + +AllVariables += ["CombinedMuonTrackParticles", + "ExtrapolatedMuonTrackParticles", + "MuonSpectrometerTrackParticles"] + + +ExtraVariables = ["Photons.pt.eta.phi.m", + "Electrons.pt.eta.phi.m","TauJets.pt.eta.phi.m.IsTruthMatched.truthJetLink.truthParticleLink", + "AntiKt4EMTopoJets_BTagging201810.JetPileupScaleMomentum_pt.JetPileupScaleMomentum_eta.JetPileupScaleMomentum_phi.JetPileupScaleMomentum_m", + "AntiKt4EMTopoJets_BTagging201810.JvtJvfcorr.HECFrac.LArQuality.HECQuality.NegativeE.AverageLArQF", + "AntiKt4EMTopoJets_BTagging201810.JetEtaJESScaleMomentum_pt.JetEtaJESScaleMomentum_eta.JetEtaJESScaleMomentum_phi.JetEtaJESScaleMomentum_m"] + +ExtraVariables += ["Muons.etaLayer1Hits.etaLayer2Hits.etaLayer3Hits.etaLayer4Hits.phiLayer1Hits.phiLayer2Hits.phiLayer3Hits.phiLayer4Hits", + "Muons.numberOfTriggerEtaLayers.numberOfPhiLayers", + "CombinedMuonTrackParticles.numberOfTRTHits.numberOfTRTHighThresholdHits", + "InDetTrackParticles.numberOfTRTHits.numberOfTRTHighThresholdHits.vx.vy.vz", + "PrimaryVertices.chiSquared.covariance"] + + +StaticContent = ["xAOD::VertexContainer#BPHY7RefittedPrimaryVertices", + "xAOD::VertexAuxContainer#BPHY7RefittedPrimaryVerticesAux."] + +# ThreeBody candidates (vertices) +StaticContent += ["xAOD::VertexContainer#%s" % BPHY7ThreeMuon_SelectAndWrite.OutputVtxContainerName] +StaticContent += ["xAOD::VertexAuxContainer#%sAux." % BPHY7ThreeMuon_SelectAndWrite.OutputVtxContainerName] +## we have to disable vxTrackAtVertex branch since it is not xAOD compatible +StaticContent += ["xAOD::VertexAuxContainer#%sAux.-vxTrackAtVertex" % BPHY7ThreeMuon_SelectAndWrite.OutputVtxContainerName] + +# Truth information for MC only +if isSimulation: + AllVariables += ["TruthEvents","TruthParticles","TruthVertices","MuonTruthParticles", "METMap_Truth"] + SmartCollections += ["AntiKt4TruthJets"] + +# Needed for trigger objects +BPHY7SlimmingHelper.IncludeMuonTriggerContent = True +BPHY7SlimmingHelper.IncludeBPhysTriggerContent = True + +# Pass all lists to the SlimmingHelper +BPHY7SlimmingHelper.ExtraVariables = ExtraVariables +BPHY7SlimmingHelper.AllVariables = AllVariables +BPHY7SlimmingHelper.StaticContent = StaticContent +BPHY7SlimmingHelper.SmartCollections = SmartCollections +BPHY7SlimmingHelper.AppendContentToStream(BPHY7Stream) + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY8.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY8.py new file mode 100644 index 0000000000000000000000000000000000000000..5e3de69ae0d083e354ce42d16a8bd9f0ca505d8c --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY8.py @@ -0,0 +1,2200 @@ +#==================================================================== +# +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +# +# @file BPHY8.py +# +# @author W. Walkowiak, <wolfgang.walkowiak@cern.ch> +# +# Based on example derivation formats. +# It requires the reductionConf flag BPHY8 in Reco_tf.py +# +# Produces DxAODs for the B(s)->mu+mu- analysis including the reference +# channels B+->J/psiK+ and Bs->J/psiPhi: +# * For data vertex containers for all three channels are produced +# in parallel. +# * For signal or reference channel MC the appropriate configuration +# is set according to the dataset number (DSN). The list associating +# known dataset numbers to decay channels (below) needs to be adjusted +# in case there are new MC samples with new numbers. +# +#==================================================================== +# Set up common services and job object. +# This should appear in ALL derivation job options +from DerivationFrameworkCore.DerivationFrameworkMaster import * + +# for debugging output +from pprint import pprint +from egammaRec.Factories import getPropertyValue + +# more debug messages +## svcMgr.MessageSvc.debugLimit = 5000000 +## svcMgr.MessageSvc.debugLimit = 5000 + +# Set up Bmumu configuration (metadata) tracking tool. +# This tool imports our defaults from Bmumu_metadata.cxx. +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf \ + import DerivationFramework__Bmumu_metadata +BPHY8_MetaDataTool = DerivationFramework__Bmumu_metadata( + name = "BPHY8_metadata", + DerivationName = "BPHY8", + OutputLevel = WARNING, + # verbosity of python script (0 - 10) + verbose = 10 +) +# local shorthand and ensure default contents of __slots__ dict are +# available as attributes +from DerivationFrameworkBPhys.BPhysPyHelpers import BPhysEnsureAttributes +BPHY8cf = BPhysEnsureAttributes(BPHY8_MetaDataTool) + +# add it to the ToolSvc chain +ToolSvc += BPHY8_MetaDataTool + +print(BPHY8_MetaDataTool) +pprint(BPHY8_MetaDataTool.properties()) + +# data or simulation? +if globalflags.DataSource() == 'geant4': + BPHY8cf.isSimulation = True + +# project tag +BPHY8cf.projectTag = rec.projectName() + +# trigger stream name +from RecExConfig.InputFilePeeker import inputFileSummary +if inputFileSummary is not None: + BPHY8cf.triggerStream = inputFileSummary['tag_info']['triggerStreamOfFile'] + +# release 21 or newer? +from PyJobTransforms.trfUtils import releaseIsOlderThan +BPHY8cf.isRelease21 = not releaseIsOlderThan(21,0) + +# MC campaigns by MC run number +BPHY8MCcampaigns = {284500 : 'mc16a', + 300000 : 'mc16d', + 310000 : 'mc16e'} + +# run number and MC campaign by MC run number +from AthenaCommon.AthenaCommonFlags import athenaCommonFlags +import PyUtils.AthFile as BPHY8_af +BPHY8_f = BPHY8_af.fopen(athenaCommonFlags.PoolAODInput()[0]) +BPHY8cf.mcCampaign = 'unknown' +if len(BPHY8_f.run_numbers) > 0: + BPHY8cf.runNumber = int(BPHY8_f.run_numbers[0]) + if BPHY8cf.isSimulation and BPHY8cf.runNumber in BPHY8MCcampaigns: + BPHY8cf.mcCampaign = BPHY8MCcampaigns[BPHY8cf.runNumber] + +print("BPHY8: isSimulation = %s" % BPHY8cf.isSimulation) +print("BPHY8: project tag = %s" % BPHY8cf.projectTag) +print("BPHY8: MC campaign = %s" % BPHY8cf.mcCampaign) +print("BPHY8: DerivationFrameworkHasTruth = %s" % \ + DerivationFrameworkHasTruth) +print("BPHY8: release 21 or up: %s" % BPHY8cf.isRelease21) + +#==================================================================== +# MC dataset categories (lists of dataset numbers) +#==================================================================== +BPHY8cf.mcBsmumu = [300203,300306,300307,300308,300402,300426,300430,300446,300447] +BPHY8cf.mcBplusJpsiKplus = [300203,300306,300307,300308,300997,300999,300404,300405,300406,300437] +BPHY8cf.mcBsJpsiPhi = [300203,300306,300307,300308,300401,300438,300448,300449] +BPHY8cf.mcBplusJpsiPiplus = [300406,300437] +BPHY8cf.mcBhh = [300431,300432,300433,300434] +BPHY8cf.mcNoTrigger = [300446,300447,300448,300449] + +#==================================================================== +# Data datasets to receive special treatment +#==================================================================== +# Remember our special runs used during validation +## BPHY8cf.specDataRuns = [302393,339849,358096] +BPHY8cf.specDataRuns = [] +# +# for testing only +## BPHY8cf.specDataRuns += [337491] + +#==================================================================== +# MC datasets to receive special treatment +#==================================================================== +# Remember our special MC datasets used during validation +## BPHY8cf.specMcChannels = [300307,300404,300405,300426,300430,300438] +BPHY8cf.specMcChannels = [] +# +#==================================================================== +# Defaults for BPHY8 configuration +#==================================================================== +# +# Blind search setup +# +# Enable? +BPHY8cf.doBmumuBlinding = True +# Cut blinded values/vertices? +BPHY8cf.doCutBlinded = False +## BPHY8cf.doCutBlinded = True +# Blind only candidates where all muons are of quality tight +BPHY8cf.blindOnlyAllMuonsTight = True +# Variables to blind (of vertices) +BPHY8cf.BlindedVars = "Bsmumu_mass.Bsmumu_MUCALC_mass" +# Pass flag indicating blinded candidates +BPHY8cf.BlindingFlag = "Bsmumu_blinded" +# Blinding key for testing +## BPHY8cf.BlindingKey = "0b0408d1f5c4760e7d4b50e97095" +# Blinding key for production +# key for Run 2 - 2015/16 only analysis +## BPHY8cf.BlindingKey = "0b04087bdac4564252fd778ac351" +# keys for full Run 2 analysis +BPHY8_data15BlindingKey = "0b040820229968c09fec401ace33" +BPHY8_data16BlindingKey = "0b04083901ad3d2bb3881ffca6a7" +BPHY8_data17BlindingKey = "0b040831a13a9c83f9936cf5b703" +BPHY8_data18BlindingKey = "0b040893fc715e9b346759bf4f3b" +# default is data15 +BPHY8cf.BlindingKey = BPHY8_data15BlindingKey +# +# Thinning level +# 0 - simple vertex thinning using Thin_vtxTrk. +# 1 - thinning subdecay vertex candidates using Thin_vtxTrk. +# 2 - thinning subdecay vertex candidates using BmumuThinningTool. +# 3 - thinning subdecay vertex candidates using BmumuThinningTool, +# but keeping all PVs and refittedPVs +# 4 - thinning subdecay vertex candidates using BmumuThinningTool, +# but keeping all PVs and refittedPVs and all ID tracks +# 5 - thinning subdecay vertex candidates using BmumuThinningTool, +# but keeping all PVs, all refittedPVs, all ID tracks and all muons +# +BPHY8cf.thinLevel = 3 +# +# Track particle collection +BPHY8cf.TrkPartContName = "InDetTrackParticles" +# +# Primary vertex collection +BPHY8cf.PVContName = "PrimaryVertices" +# +# Trigger navigation object thinning +# +# Apply thinning? +BPHY8cf.doTrigNavThinning = True +# +# Keep muon based HLT items for now +BPHY8cf.TrigNavThinList = [ "HLT_[0-9]*mu[0-9]+.*" ] +# +# Add containers for soft B tagging vertices +BPHY8cf.doAddSoftBVertices = True +# +# Muon collection +BPHY8cf.MuonCollection = "Muons" +# +# Apply MCP calibration to muons? (only for MC) +# +# Options: +# 0 : none +# 1 : apply calibration on-the-fly inside the muon container +# (calibration effects B candidate building) +# 2 : apply calibration and store it in an extra container for +# calibrated muons +# (calibration does not effect B candidate building) +# 3 : apply calibration and store it in an extra container for +# calibrated muons and use this container for B candidate building +# +BPHY8cf.useCalibratedMuons = 3 +## BPHY8cf.useCalibratedMuons = 2 + +# make sure data are not affected +# Update: Needs to be applied to data as well +## if not BPHY8cf.isSimulation: +## BPHY8cf.useCalibratedMuons = 0 + +# +# Configuration for MuonCalibrationAndSmearingTool +# +# Set string values to "_READ_" to only read the default values +# from the tool. +# +# THE FOLLOWING COMMENT IS VOID AS OF 2017-07-30. +# Note: Several JOs of the MuonCalibrationAndSmearingTool are not +# (yet) available in AtlasDerivation-20.7.8.x caches which uses +# MuonMomentumCorrections-01-00-35 instead of the latest +# MuonMomentumCorrections-01-00-60. +# For now these options will be disabled further below. +# +# Note: (2018-05-05 -> 2018-11-29) +# Formerly used the rel. 21 pre-recommendations from +# https://twiki.cern.ch/twiki/bin/view/AtlasProtected/MCPAnalysisGuidelinesMC16 +# +# Note: (2018-11-29) +# Now updated to new rel. 21 pre-recommendations +# Page revision r18 (as of 2018-11-27) +# https://twiki.cern.ch/twiki/bin/view/AtlasProtected/MCPAnalysisConsolidationMC16 +# +# Note: (2020-01-15) +# Now updated to new release 21 recommendatons for full run 2 (winter update) +# Page revision r17 (as of 2019-11-13) +# https://twiki.cern.ch/twiki/bin/view/AtlasProtected/MCPAnalysisWinterMC16#Momentum_corrections +# +# Note: (2020-03-31) +# Now updated to new release 21 recommendations for full run 2 / setup 1 +# Page reivision r37 (as of 2020-03-23) +# https://twiki.cern.ch/twiki/bin/view/AtlasProtected/MCPAnalysisGuidelinesMC16#Momentum_corrections +# +# MC +if BPHY8cf.isSimulation: +# +# for MC16a + if BPHY8cf.mcCampaign == "mc16a": + BPHY8cf.McstYear = "Data16" + BPHY8cf.McstRelease = "Recs2020_03_03" + BPHY8cf.McstStatComb = False + BPHY8cf.McstSagittaCorr = True + BPHY8cf.McstSagittaRelease = "sagittaBiasDataAll_03_02_19_Data16" + BPHY8cf.McstDoSagittaMCDistortion = False + BPHY8cf.McstSagittaCorrPhaseSpace = True +# +# for MC16d + elif BPHY8cf.mcCampaign == "mc16d": + BPHY8cf.McstYear = "Data17" + BPHY8cf.McstRelease = "Recs2020_03_03" + BPHY8cf.McstStatComb = False + BPHY8cf.McstSagittaCorr = True + BPHY8cf.McstSagittaRelease = "sagittaBiasDataAll_03_02_19_Data17" + BPHY8cf.McstDoSagittaMCDistortion = False + BPHY8cf.McstSagittaCorrPhaseSpace = True +# +# for MC16e + elif BPHY8cf.mcCampaign == "mc16e": + BPHY8cf.McstYear = "Data18" + BPHY8cf.McstRelease = "Recs2020_03_03" + BPHY8cf.McstStatComb = False + BPHY8cf.McstSagittaCorr = True + BPHY8cf.McstSagittaRelease = "sagittaBiasDataAll_03_02_19_Data18" + BPHY8cf.McstDoSagittaMCDistortion = False + BPHY8cf.McstSagittaCorrPhaseSpace = True +# +# default (like for mc16a) + else: + BPHY8cf.McstYear = "Data16" + BPHY8cf.McstRelease = "Recs2020_03_03" + BPHY8cf.McstStatComb = False + BPHY8cf.McstSagittaCorr = True + BPHY8cf.McstSagittaRelease = "sagittaBiasDataAll_03_02_19_Data16" + BPHY8cf.McstDoSagittaMCDistortion = False + BPHY8cf.McstSagittaCorrPhaseSpace = True +# +# data 15 +else: + # Note: The recommendation page sets McstYear to 'Data16' + if BPHY8cf.projectTag.startswith("data15"): + BPHY8cf.McstYear = "Data16" + BPHY8cf.McstRelease = "Recs2020_03_03" + BPHY8cf.McstStatComb = False + BPHY8cf.McstSagittaCorr = True + BPHY8cf.McstSagittaRelease = "sagittaBiasDataAll_03_02_19_Data16" + BPHY8cf.McstDoSagittaMCDistortion = False + BPHY8cf.McstSagittaCorrPhaseSpace = True +# +# data 16 + if BPHY8cf.projectTag.startswith("data16"): + BPHY8cf.McstYear = "Data16" + BPHY8cf.McstRelease = "Recs2020_03_03" + BPHY8cf.McstStatComb = False + BPHY8cf.McstSagittaCorr = True + BPHY8cf.McstSagittaRelease = "sagittaBiasDataAll_03_02_19_Data16" + BPHY8cf.McstDoSagittaMCDistortion = False + BPHY8cf.McstSagittaCorrPhaseSpace = True +# +# data 17 + if BPHY8cf.projectTag.startswith("data17"): + BPHY8cf.McstYear = "Data17" + BPHY8cf.McstRelease = "Recs2020_03_03" + BPHY8cf.McstStatComb = False + BPHY8cf.McstSagittaCorr = True + BPHY8cf.McstSagittaRelease = "sagittaBiasDataAll_03_02_19_Data17" + BPHY8cf.McstDoSagittaMCDistortion = False + BPHY8cf.McstSagittaCorrPhaseSpace = True +# +# data 18 + if BPHY8cf.projectTag.startswith("data18"): + BPHY8cf.McstYear = "Data18"; + BPHY8cf.McstRelease = "Recs2020_03_03" + BPHY8cf.McstStatComb = False + BPHY8cf.McstSagittaCorr = True + BPHY8cf.McstSagittaRelease = "sagittaBiasDataAll_03_02_19_Data18" + BPHY8cf.McstDoSagittaMCDistortion = False + BPHY8cf.McstSagittaCorrPhaseSpace = True + +# wide mumu mass range? +BPHY8cf.doUseWideMuMuMassRange = False + +# other default settings +BPHY8cf.GlobalChi2CutBase = 15. + +# Global mass values (in MeV, from PDG 2015) +BPHY8cf.GlobalMuonMass = 105.6584 +BPHY8cf.GlobalPionMass = 139.57061 +BPHY8cf.GlobalKaonMass = 493.677 +BPHY8cf.GlobalJpsiMass = 3096.92 +BPHY8cf.GlobalBplusMass = 5279.29 +BPHY8cf.GlobalB0Mass = 5279.61 +BPHY8cf.GlobalBsMass = 5366.79 + +# Cut values for kaon candidates +BPHY8cf.GlobalKaonPtCut = 1000. +BPHY8cf.GlobalKaonEtaCut = 2.5 + +# primary vertex association types (interpreted bit-wise) +## BPHY8cf.doVertexType = 15 # ALL +BPHY8cf.doVertexType = 8 # only Z0_BA +## BPHY8cf.doVertexType = 9 # only SUM_PT2 and Z0_BA + +# minimum number of tracks in PV considered for PV association +BPHY8cf.minNTracksInPV = 3 + +# add 3-dimensional proper time information? +BPHY8cf.do3dProperTime = True + +# use invariant mass based on combined muon track information in mass cuts? +BPHY8cf.useMuCalcMass = True + +# add MUCALC mass from non-modified muons for debugging +BPHY8cf.addMucalcMassForDebug = False + +# PV types to be considered in calculation of minLogChi2ToAnyPV variable +BPHY8cf.MinChi2ToAnyPVTypes = [1, 3] + +# MCP cuts for JpsiFinder +BPHY8cf.useJpsiFinderMCPCuts = False + +# reject muons in JpsiPlus1Track or JpsiPlus2Track finders +BPHY8cf.GlobalMuonsUsedInJpsi = "NONE" # default to turn it off + +# mode of minLogChi2ToAnyPV calculation: +# 0 : no such calculation +# 1 : use all PVs of requested type(s) +# 2 : exclude PVs associated to SVs +# 3 : replace PVs associated to SVs by +# corresponding refitted PVs +BPHY8cf.AddMinChi2ToAnyPVMode = 3 + +# Vertex isolation -- track selection requirements +# (Sizes of all lists below need to be identical!) +# Set to "Custom" (for strings) or -1. (for numerics) to disable setting +BPHY8cf.IsoTrackCategoryName = ["LoosePt05", "LooSiHi1Pt05"] +BPHY8cf.IsoTrackCutLevel = ["Loose" , "Loose" ] +BPHY8cf.IsoTrackPtCut = [ 500. , 500. ] +BPHY8cf.IsoTrackEtaCut = [ -1. , -1. ] +BPHY8cf.IsoTrackPixelHits = [ -1 , 1 ] +BPHY8cf.IsoTrackSCTHits = [ -1 , 2 ] +BPHY8cf.IsoTrackbLayerHits = [ -1 , -1 ] +BPHY8cf.IsoTrackIBLHits = [ -1 , -1 ] +# Vertex isolation -- cone sizes +# (Sizes of all lists below need to be identical!) +# Note: IsoDoTrkImpLogChi2Cut = 2 implements old method used +# for the 2015/16 analysis +BPHY8cf.IsolationConeSizes = [ 0.7, 0.7, 1.0] +BPHY8cf.IsoTrkImpLogChi2Max = [ 5.0, 5.0, 0.0] +BPHY8cf.IsoDoTrkImpLogChi2Cut = [ 2 , 1 , 0 ] +# Track types to be used (bit pattern) +# track sets to consider: +# bit : meaning +# 0 : tracks close to PV associated +# with SV +# 1 : tracks associated with dummy PV +# ("type-0 PV tracks") +# 2 : tracks associated with PV of type 1 +# 3 : tracks associated with PV of type 2 +# 4 : tracks associated with PV of type 3 +# 5 : tracks associated with PV with types other than 0 to 4. +# 6 : tracks with missing pointer to PV (NULL pointer) +# 7-24: tracks being closest to assoc. PV +# decimal useRefittedPVs doDCAin3D chi2DefToUse +# 7 : 128 yes no 0 +# 8 : 256 no no 0 +# 9 : 512 yes yes 0 +# 10 : 1024 no yes 0 +# 11 : 2048 yes no 1 +# 12 : 4096 no no 1 +# 13 : 8192 yes yes 1 +# 14 : 16384 no yes 1 +# 15 : 32768 yes no 2 +# 16 : 65536 no no 2 +# 17 : 131072 yes yes 2 +# 18 : 262144 no yes 2 +# 19 : 524288 yes -- 3 +# 20 : 1048576 no -- 3 +# 21 : 2097152 yes -- 4 +# 22 : 4194304 no -- 4 +# 23 : 8388608 yes -- 5 +# 24 : 16777216 no -- 5 +# 25 : 33554432 yes yes 6 +# 26 : 67108864 no yes 6 +# 27 : 134217728 yes yes 7 +# 28 : 268435456 no yes 7 +# 29 : 536870912 yes yes 8 +# 30 : 1073741824 no yes 8 +# 31 : 2147483648 yes yes 9 +# 32 : 4294967296 no yes 9 +# useRefittedPVs: +# replace PV associated to decay candidate +# by the refitted PV +# doDCAin3D: +# use d0 and z0 in the determination of +# of the point of closest approach of +# a track to a vertex +# chi2DefToUse: +# PV uncertainties in the chi2 calculation +# in addition to track uncertainties +# 0 : from track perigee (old method) +# (only track uncertainties) +# 1 : from track perigee with +# uncertainties from track and vertex +# 2 : simple extrapolation from track +# parameters with uncertainties from +# track and vertex (extrapolation +# used for track swimming) +# 3 : CalcLogChi2toPV method from NtupleMaker +# using xAOD::TrackingHelpers. +# (only track uncertainties) +# 4 : CalcLogChi2toPV method from NtupleMaker +# using xAOD::TrackingHelpers. +# (track and vertex uncertainties) +# 5 : use TrackVertexAssociationTool +# 6 : full 3D chi2 from track perigee with uncertainties +# from track and vertex (sum of 3x3 covariance matrices) +# 7 : full 3D chi2 from track perigee with uncertainties +# from track and vertex (sum of 2x2 covariance matrices) +# 8 : simple extrapolation from track parameters with uncertainties +# from track and vertex (sum of 3x3 covariance matrices) +# 9 simple extrapolation from track parameters with uncertainties +# from track and vertex (sum of 2x2 covariance matrices) +# (E.g. 127 means to consider all tracks.) +BPHY8cf.useIsoTrackTypes = [ 35, 8388608, 134217728, 127] +# Working point for TrackVertexAssociationTool (for chi2DefToUse == 5) +BPHY8cf.IsoTvaWorkingPoint = "Loose" +# use of speed-optimized algorithm +BPHY8cf.IsoUseOptimizedAlgo = True +## BPHY8cf.IsoUseOptimizedAlgo = False +# +# Combinations to keep: save from removal as non-needed branches +# Tuples: (isolation settings|track types|ID track selection) +# Note: use an empty list to keep all +BPHY8cf.IsoIncludes = ['07_LC50d2|35|LoosePt05', # ACH + '10_LC00d0|134217728|LooSiHi1Pt05', # BEJ + '10_LC00d0|8388608|LooSiHi1Pt05', # BGJ + '07_LC50d1|127|LooSiHi1Pt05' ] # BDI + +# Isolation for muons from B candidate -- track selection requirements +# (Sizes of all lists below need to be identical!) +# Set to "Custom" (for strings) or -1. (for numerics) to disable setting +BPHY8cf.MuIsoTrackCategoryName = ["LoosePt05", "LooSiHi1Pt05"] +BPHY8cf.MuIsoTrackCutLevel = ["Loose" , "Loose" ] +BPHY8cf.MuIsoTrackPtCut = [ 500. , 500. ] +BPHY8cf.MuIsoTrackEtaCut = [ -1. , -1. ] +BPHY8cf.MuIsoTrackPixelHits = [ -1 , 1 ] +BPHY8cf.MuIsoTrackSCTHits = [ -1 , 2 ] +BPHY8cf.MuIsoTrackbLayerHits = [ -1 , -1 ] +BPHY8cf.MuIsoTrackIBLHits = [ -1 , -1 ] +# Muon isolation -- cone sizes +# (Sizes of all lists below need to be identical!) +# Note: MuIsoDoTrkImpLogChi2Cut = 2 implements old method used +# for the 2015/16 analysis +BPHY8cf.MuIsolationConeSizes = [ 0.7, 0.7, 1.0] +BPHY8cf.MuIsoTrkImpLogChi2Max = [ 5.0, 5.0, 0.0] +BPHY8cf.MuIsoDoTrkImpLogChi2Cut = [ 2 , 1 , 0 ] +# Track types to be used (bit pattern) +# track sets to consider: +# bit : meaning +# 0 : tracks close to PV associated +# with SV +# 1 : tracks associated with dummy PV +# ("type-0 PV tracks") +# 2 : tracks associated with PV of type 1 +# 3 : tracks associated with PV of type 2 +# 4 : tracks associated with PV of type 3 +# 5 : tracks associated with PV with types other than 0 to 4. +# 6 : tracks with missing pointer to PV (NULL pointer) +# 7-24: tracks being closest to assoc. PV +# decimal useRefittedPVs doDCAin3D chi2DefToUse +# 7 : 128 yes no 0 +# 8 : 256 no no 0 +# 9 : 512 yes yes 0 +# 10 : 1024 no yes 0 +# 11 : 2048 yes no 1 +# 12 : 4096 no no 1 +# 13 : 8192 yes yes 1 +# 14 : 16384 no yes 1 +# 15 : 32768 yes no 2 +# 16 : 65536 no no 2 +# 17 : 131072 yes yes 2 +# 18 : 262144 no yes 2 +# 19 : 524288 yes -- 3 +# 20 : 1048576 no -- 3 +# 21 : 2097152 yes -- 4 +# 22 : 4194304 no -- 4 +# 23 : 8388608 yes -- 5 +# 24 : 16777216 no -- 5 +# 25 : 33554432 yes yes 6 +# 26 : 67108864 no yes 6 +# 27 : 134217728 yes yes 7 +# 28 : 268435456 no yes 7 +# 29 : 536870912 yes yes 8 +# 30 : 1073741824 no yes 8 +# 31 : 2147483648 yes yes 9 +# 32 : 4294967296 no yes 9 +# useRefittedPVs: +# replace PV associated to decay candidate +# by the refitted PV +# doDCAin3D: +# use d0 and z0 in the determination of +# of the point of closest approach of +# a track to a vertex +# chi2DefToUse: +# PV uncertainties in the chi2 calculation +# in addition to track uncertainties +# 0 : from track perigee (old method) +# (only track uncertainties) +# 1 : from track perigee with +# uncertainties from track and vertex +# 2 : simple extrapolation from track +# parameters with uncertainties from +# track and vertex (extrapolation +# used for track swimming) +# 3 : CalcLogChi2toPV method from NtupleMaker +# using xAOD::TrackingHelpers. +# (only track uncertainties) +# 4 : CalcLogChi2toPV method from NtupleMaker +# using xAOD::TrackingHelpers. +# (track and vertex uncertainties) +# 5 : use TrackVertexAssociationTool +# 6 : full 3D chi2 from track perigee with uncertainties +# from track and vertex (sum of 3x3 covariance matrices) +# 7 : full 3D chi2 from track perigee with uncertainties +# from track and vertex (sum of 2x2 covariance matrices) +# 8 : simple extrapolation from track parameters with uncertainties +# from track and vertex (sum of 3x3 covariance matrices) +# 9 simple extrapolation from track parameters with uncertainties +# from track and vertex (sum of 2x2 covariance matrices) +# (E.g. 127 means to consider all tracks.) +BPHY8cf.useMuIsoTrackTypes = [ 35, 8388608, 134217728, 127] +# Working point for TrackVertexAssociationTool (for chi2DefToUse == 5) +BPHY8cf.MuIsoTvaWorkingPoint = "Loose" +# +# Combinations to keep: save from removal as non-needed branches +# Tuples: (isolation settings|track types|ID track selection) +# Note: use an empty list to keep all +BPHY8cf.MuIsoIncludes = ['07_LC50d2|35|LoosePt05', # ACH + '10_LC00d0|134217728|LooSiHi1Pt05', # BEJ + '10_LC00d0|8388608|LooSiHi1Pt05', # BGJB + '07_LC50d1|127|LooSiHi1Pt05' ] # BDI + +# Closest track finding -- track selection requirements +# Set to "Custom" (for strings) or -1. (for numerics) to disable setting +BPHY8cf.CloseTrackCategoryName = ["LoosePt05", "LooSiHi1Pt05"] +BPHY8cf.CloseTrackCutLevel = ["Loose" , "Loose" ] +BPHY8cf.CloseTrackPtCut = [ 500. , 500. ] +BPHY8cf.CloseTrackEtaCut = [ -1. , -1. ] +BPHY8cf.CloseTrackPixelHits = [ -1 , 1 ] +BPHY8cf.CloseTrackSCTHits = [ -1 , 2 ] +BPHY8cf.CloseTrackbLayerHits = [ -1 , -1 ] +BPHY8cf.CloseTrackIBLHits = [ -1 , -1 ] +# Track types to be used (bit pattern) +# track sets to consider: +# bit : meaning +# 0 : tracks close to PV associated +# with SV +# 1 : tracks associated with dummy PV +# ("type-0 PV tracks") +# 2 : tracks associated with PV of type 1 +# 3 : tracks associated with PV of type 2 +# 4 : tracks associated with PV of type 3 +# 5 : tracks associated with PV with types other than 0 to 4. +# 6 : tracks with missing pointer to PV (NULL pointer) +# 7-24: tracks being closest to assoc. PV +# decimal useRefittedPVs doDCAin3D chi2DefToUse +# 7 : 128 yes no 0 +# 8 : 256 no no 0 +# 9 : 512 yes yes 0 +# 10 : 1024 no yes 0 +# 11 : 2048 yes no 1 +# 12 : 4096 no no 1 +# 13 : 8192 yes yes 1 +# 14 : 16384 no yes 1 +# 15 : 32768 yes no 2 +# 16 : 65536 no no 2 +# 17 : 131072 yes yes 2 +# 18 : 262144 no yes 2 +# 19 : 524288 yes -- 3 +# 20 : 1048576 no -- 3 +# 21 : 2097152 yes -- 4 +# 22 : 4194304 no -- 4 +# 23 : 8388608 yes -- 5 +# 24 : 16777216 no -- 5 +# 25 : 33554432 yes yes 6 +# 26 : 67108864 no yes 6 +# 27 : 134217728 yes yes 7 +# 28 : 268435456 no yes 7 +# 29 : 536870912 yes yes 8 +# 30 : 1073741824 no yes 8 +# 31 : 2147483648 yes yes 9 +# 32 : 4294967296 no yes 9 +# useRefittedPVs: +# replace PV associated to decay candidate +# by the refitted PV +# doDCAin3D: +# use d0 and z0 in the determination of +# of the point of closest approach of +# a track to a vertex +# chi2DefToUse: +# PV uncertainties in the chi2 calculation +# in addition to track uncertainties +# 0 : from track perigee (old method) +# (only track uncertainties) +# 1 : from track perigee with +# uncertainties from track and vertex +# 2 : simple extrapolation from track +# parameters with uncertainties from +# track and vertex (extrapolation +# used for track swimming) +# 3 : CalcLogChi2toPV method from NtupleMaker +# using xAOD::TrackingHelpers. +# (only track uncertainties) +# 4 : CalcLogChi2toPV method from NtupleMaker +# using xAOD::TrackingHelpers. +# (track and vertex uncertainties) +# 5 : use TrackVertexAssociationTool +# 6 : full 3D chi2 from track perigee with uncertainties +# from track and vertex (sum of 3x3 covariance matrices) +# 7 : full 3D chi2 from track perigee with uncertainties +# from track and vertex (sum of 2x2 covariance matrices) +# 8 : simple extrapolation from track parameters with uncertainties +# from track and vertex (sum of 3x3 covariance matrices) +# 9 simple extrapolation from track parameters with uncertainties +# from track and vertex (sum of 2x2 covariance matrices) +# (E.g. 127 means to consider all tracks.) +# +# Correspondence to Run 1 settings: +# +# Option to only use tracks from specific primary vertices: +# (always excluding B decay tracks) +# CloseTrackOption: +# old new +# 0 : 63 : use all tracks (default) +# 1 : 1 : use only tracks from PV associated with B vertex +# 2 : 1 : use all tracks which are not from PVs other than +# PV associated with B vertex +# 3 : 35 : use all tracks which are not from PVs other than +# PV associated with B vertex but including those +# from the dummy vertex (type 0 vertex) +# 4 : 127: same as option 3 but using the vertex pointers +# for comparing in old setup; including tracks +# with broken (NULL) vertex pointers as well +BPHY8cf.useCloseTrackTypes = [ 35, 8388608, 134217728] +# Working point for TrackVertexAssociationTool (for chi2DefToUse == 5) +BPHY8cf.CloseTrackTvaWorkingPoint = "Loose" +# +# Close tracks chi2 related settings +# (The next five lists need to be exactly of the same length.) +BPHY8cf.CloseTrackChi2SetName = [ "201516", "f2dc2" ] +# use corrected chi2 calculation including SV uncertainties +# 0 : from track perigee (old method, only track uncertainties) +# 1 : from track perigee with uncertainties from track and vertex +# 2 : simple extrapolation from track parameters with uncertainties +# from track and vertex (extrapolation used for track swimming) +# 3 : CalcLogChi2toPV method from NtupleMaker using xAOD::TrackingHelpers. +# (only track uncertainties) +# 4 : CalcLogChi2toPV method from NtupleMaker using xAOD::TrackingHelpers. +# (track and vertex uncertainties) +# 5 : use TrackVertexAssociationTool +# 6 : full 3D chi2 from track perigee with uncertainties +# from track and vertex (sum of 3x3 covariance matrices) +# 7 : full 3D chi2 from track perigee with uncertainties +# from track and vertex (sum of 2x2 covariance matrices) +# 8 : simple extrapolation from track parameters with uncertainties +# from track and vertex (sum of 3x3 covariance matrices) +# 9 simple extrapolation from track parameters with uncertainties +# from track and vertex (sum of 2x2 covariance matrices) +# N.B.: Settings 3, 4 and 5 may be less reasonable here. Do not use. +BPHY8cf.CloseTrackCorrChi2 = [ 0 , 7 ] +# use 3-dimensional information in minimization +BPHY8cf.CloseTrackMinDCAin3D = [ True , True ] +# maximum chi2 distance of closest track to B vertex +BPHY8cf.CloseTrackMaxLogChi2 = [ 7. , 7. ] +# maximum chi2 distance of closest track to B vertex for track counting +BPHY8cf.NCloseTrackMaxLogChi2 = [ 1. , 2. ] +# +# Combinations to keep: save from removal as non-needed branches +# Tuples: (close track chi2 set|track types|ID track selection) +# Note: use an empty list to keep all +BPHY8cf.CloseTrackIncludes = ['201516|35|LoosePt05', # ACK + 'f2dc2|134217728|LooSiHi1Pt05', # BEL + 'f2dc2|8388608|LooSiHi1Pt05' ] # BGL + +# track/muon isolation and closest track tools +# debugging level for track types (output to log) +# (Set to 1 to enable, 0 otherwise.) +BPHY8cf.DebugTrackTypes = 0 + +# BTrackVertexMapLogger / BPhysTrackVertexMapTools +# maximum number of events to dump track-to-vertex assoc. maps for +# (Set to -1 for no limit, to 0 to disable.) +BPHY8cf.DebugTrkToVtxMaxEvents = 0 +#==================================================================== +# General job setup +#==================================================================== +# for MC run specific channel(s) only, +# for data run 2-, 3- and 4-prong algorithms in parallel +if BPHY8cf.isSimulation: + # MC channel number (ie dataset number for MC) + if len(BPHY8_f.infos['mc_channel_number']) > 0: + BPHY8cf.mcChNumber = int((BPHY8_f.infos['mc_channel_number'])[0]) + if (BPHY8cf.mcChNumber in BPHY8cf.mcBsmumu): + BPHY8cf.doChannels.append("Bsmumu") + if (BPHY8cf.mcChNumber in BPHY8cf.mcBplusJpsiKplus): + BPHY8cf.doChannels.append("BJpsiK") + if (BPHY8cf.mcChNumber in BPHY8cf.mcBsJpsiPhi): + BPHY8cf.doChannels.append("BsJpsiPhi") + if (BPHY8cf.mcChNumber in BPHY8cf.mcBplusJpsiPiplus): + BPHY8cf.doChannels.append("BJpsiPi") + if (BPHY8cf.mcChNumber in BPHY8cf.mcBhh): + BPHY8cf.doChannels.append("Bhh") + # use trigger? + if (BPHY8cf.mcChNumber in BPHY8cf.mcNoTrigger): + BPHY8cf.doTriggerInfo = False + # for special MC channels keep all ID tracks and all muons + if BPHY8cf.mcChNumber in BPHY8cf.specMcChannels: + BPHY8cf.thinLevel = 5 + # no blind search for MC + BPHY8cf.doBmumuBlinding = False +else: + # for data + BPHY8cf.doChannels += ["Bsmumu", "BJpsiK", "BsJpsiPhi"] + # for special data runs keep all ID tracks and all muons + if BPHY8cf.runNumber in BPHY8cf.specDataRuns: + BPHY8cf.thinLevel = 5 + # blinding key by year + if BPHY8cf.projectTag.startswith("data15"): + BPHY8cf.BlindingKey = BPHY8_data15BlindingKey + elif BPHY8cf.projectTag.startswith("data16"): + BPHY8cf.BlindingKey = BPHY8_data16BlindingKey + elif BPHY8cf.projectTag.startswith("data17"): + BPHY8cf.BlindingKey = BPHY8_data17BlindingKey + elif BPHY8cf.projectTag.startswith("data18"): + BPHY8cf.BlindingKey = BPHY8_data18BlindingKey + +# disable soft B tagging vertices if BJpsiK channel is not run +if not "BJpsiK" in BPHY8cf.doChannels: + BPHY8cf.doAddSoftBVertices = False + +print("BPHY8 job setup: run : %d" % BPHY8cf.runNumber) +print("BPHY8 job setup: MC channel number : %d" % BPHY8cf.mcChNumber) +print("BPHY8 job setup: isSimulation : %s" % BPHY8cf.isSimulation) +print("BPHY8 job setup: doChannels :", end=' ') +for BPHY8_channel in BPHY8cf.doChannels: + print("%s" % (BPHY8_channel), end=' ') +print() +print("BPHY8 job setup: thin level : %d" % BPHY8cf.thinLevel) +print("BPHY8 job setup: soft B vertices : %d" % BPHY8cf.doAddSoftBVertices) + +# abort if no channels are to be run on +assert len(BPHY8cf.doChannels) > 0 + +#==================================================================== +# Mass ranges +#==================================================================== +BPHY8cf.GlobalBMassUpperCut = 7000. +BPHY8cf.GlobalBMassLowerCut = 3500. +BPHY8cf.GlobalTrksMassUpperCut = 7500. +BPHY8cf.GlobalTrksMassLowerCut = 3000. +BPHY8cf.GlobalDiMuonMassUpperCut = 7000. +BPHY8cf.GlobalDiMuonMassLowerCut = 2000. +BPHY8cf.GlobalJpsiMassUpperCut = 7000. +BPHY8cf.GlobalJpsiMassLowerCut = 2000. +BPHY8cf.GlobalBlindLowerCut = 5166. +BPHY8cf.GlobalBlindUpperCut = 5526. + +if BPHY8cf.doUseWideMuMuMassRange: + BPHY8cf.GlobalBMassUpperCut = 10000. + BPHY8cf.GlobalBMassLowerCut = 3250. + BPHY8cf.GlobalTrksMassUpperCut = 10500. + BPHY8cf.GlobalTrksMassLowerCut = 2750. + BPHY8cf.GlobalDiMuonMassUpperCut = 10000. + BPHY8cf.GlobalDiMuonMassLowerCut = 2000. + BPHY8cf.GlobalJpsiMassUpperCut = 10000. + BPHY8cf.GlobalJpsiMassLowerCut = 2000. + +#==================================================================== +# Vertexing chi2 cuts for n-prong decays +#==================================================================== +# The global chi2 cut times NdF +BPHY8cf.Chi2Cut2Prong = BPHY8cf.GlobalChi2CutBase * 1. +BPHY8cf.Chi2Cut3Prong = BPHY8cf.GlobalChi2CutBase * 4. +BPHY8cf.Chi2Cut4Prong = BPHY8cf.GlobalChi2CutBase * 6. + +#==================================================================== +# Muons or tracks for JpsiFinder +#==================================================================== +BPHY8cf.JfTwoMuons = True +BPHY8cf.JfTwoTracks = False +BPHY8cf.JfTrackThresholdPt = 0. # MeV +if "Bhh" in BPHY8cf.doChannels: + BPHY8cf.JfTwoMuons = False + BPHY8cf.JfTwoTracks = True + BPHY8cf.JfTrackThresholdPt = 3500. # MeV + +#==================================================================== +# DEBUGGING SETUP +#==================================================================== +# +# Dump contents of this file to log +if BPHY8cf.verbose > 4: + import inspect + thisfile = inspect.getfile(inspect.currentframe()) + print("# >>>------------------ %s ------------------------" % thisfile) + with open (thisfile, 'r') as fin: + print(fin.read()) + print("# <<<------------------ %s ------------------------" % thisfile) + +# required for track jets for Soft B Tagging +if BPHY8cf.doAddSoftBVertices: + from DerivationFrameworkJetEtMiss.JetCommon import * + +#==================================================================== +# CALIBRATION SEQUENCES +#==================================================================== +# +# For parameters of the MuonCalibrationAndSmearingTool see: +# https://twiki.cern.ch/twiki/bin/view/AtlasProtected/MCPAnalysisGuidelinesMC15#Muon_momentum_scale_and_resoluti +# +# ordered dicts +from collections import OrderedDict +BPHY8_CalibrationAlgs = OrderedDict() + +# Create calibrated muons if requested +BPHY8cf.CalMuonCollection = BPHY8cf.MuonCollection +BPHY8cf.UsedMuonCollection = BPHY8cf.MuonCollection +BPHY8cf.AllMuonCollections = [ BPHY8cf.MuonCollection ] +if BPHY8cf.useCalibratedMuons > 0: + BPHY8cf.adjustMucalcKinematics = True + if BPHY8cf.useCalibratedMuons > 1: + BPHY8cf.CalMuonCollection = BPHY8cf.DerivationName+"_CalibratedMuons" + BPHY8cf.adjustMucalcKinematics = False + BPHY8cf.AllMuonCollections += [ BPHY8cf.CalMuonCollection ] + if BPHY8cf.useCalibratedMuons > 2: + BPHY8cf.UsedMuonCollection = BPHY8cf.CalMuonCollection + BPHY8cf.adjustMucalcKinematics = True + BPHY8_MuonCalTool = CfgMgr.CP__MuonCalibrationAndSmearingTool( + BPHY8cf.DerivationName+"_MCPTool", + OutputLevel = INFO ) + if BPHY8cf.McstYear != "_READ_": + BPHY8_MuonCalTool.Year = BPHY8cf.McstYear + if BPHY8cf.McstRelease != "_READ_": + BPHY8_MuonCalTool.Release = BPHY8cf.McstRelease + # read back string values + BPHY8cf.McstYear = getPropertyValue(BPHY8_MuonCalTool, "Year") + BPHY8cf.McstRelease = getPropertyValue(BPHY8_MuonCalTool, "Release") + # additional options for MuonMomentumCorrections-01-00-64 and up + # Don't decorate with Eigen (from MuonMomentumCorrections-01-00-62 + # onwards, see ATLASG-1126) + BPHY8_MuonCalTool.noEigenDecor = True + BPHY8_MuonCalTool.StatComb = BPHY8cf.McstStatComb + BPHY8_MuonCalTool.SagittaCorr = BPHY8cf.McstSagittaCorr + BPHY8_MuonCalTool.doSagittaMCDistortion = BPHY8cf.McstDoSagittaMCDistortion + BPHY8_MuonCalTool.SagittaCorrPhaseSpace = BPHY8cf.McstSagittaCorrPhaseSpace + if BPHY8cf.McstSagittaRelease != "_READ_": + BPHY8_MuonCalTool.SagittaRelease = BPHY8cf.McstSagittaRelease + # read back string value + BPHY8cf.McstSagittaRelease = getPropertyValue(BPHY8_MuonCalTool, + "SagittaRelease") + ToolSvc += BPHY8_MuonCalTool + print(BPHY8_MuonCalTool) + pprint(BPHY8_MuonCalTool.properties()) + BPHY8_CalibrationAlgs["CalMuonProvider"] = CfgMgr.CP__CalibratedMuonsProvider( + BPHY8cf.DerivationName+"_CalMuonProvider", + Input = BPHY8cf.MuonCollection, + Output = BPHY8cf.CalMuonCollection, + Tool = BPHY8_MuonCalTool, + OutputLevel = INFO ) # output only if set to VERBOSE + +# for quick debugging +## BPHY8cf.adjustMucalcKinematics = False + +for BPHY8_name in list(BPHY8_CalibrationAlgs.keys()): + print(BPHY8_CalibrationAlgs[BPHY8_name]) + pprint(BPHY8_CalibrationAlgs[BPHY8_name].properties()) + +#==================================================================== +# Muon extrapolation for trigger scaling +#==================================================================== +# Introduced by BPhys trigger group, see merge request 7857 +# (https://gitlab.cern.ch/atlas/athena/merge_requests/7857) +# +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf \ + import DerivationFramework__MuonExtrapolationTool + +BPHY8_MuonExtrapTool = DerivationFramework__MuonExtrapolationTool( + name = "BPHY8_MuonExtrapolationTool", + MuonCollection = BPHY8cf.UsedMuonCollection, + OutputLevel = INFO ) + +ToolSvc += BPHY8_MuonExtrapTool +print(BPHY8_MuonExtrapTool) +pprint(BPHY8_MuonExtrapTool.properties()) + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +# setup vertexing tools and services +# superseeded by BPHYVertexTools +## include( "JpsiUpsilonTools/configureServices.py" ) + +# we need to have the DiMuon finder running for channels with Jpsi as well +BPHY8_recoList = [] +if [BPHY8_i for BPHY8_i in BPHY8cf.doChannels \ + if BPHY8_i in ["Bsmumu", "BJpsiK", "BsJpsiPhi", "BJpsiPi", "Bhh"]]: + BPHY8_recoList += ["DiMuon"] +if "BJpsiK" in BPHY8cf.doChannels: BPHY8_recoList += [ "BJpsiK" ] +if "BsJpsiPhi" in BPHY8cf.doChannels: BPHY8_recoList += [ "BsJpsiPhi" ] +if "BJpsiPi" in BPHY8cf.doChannels: BPHY8_recoList += [ "BJpsiPi" ] + +# setup of vertexing tools per channel +include("DerivationFrameworkBPhys/configureVertexing.py") +BPHY8_VertexTools = OrderedDict() +for BPHY8_reco in BPHY8_recoList: + BPHY8_VertexTools[BPHY8_reco] = BPHYVertexTools(BPHY8cf.DerivationName+"_"+BPHY8_reco) + print(BPHY8_VertexTools[BPHY8_reco]) + +# setup of vertex finder tools +from JpsiUpsilonTools.JpsiUpsilonToolsConf import \ + Analysis__JpsiFinder, Analysis__JpsiPlus1Track, Analysis__JpsiPlus2Tracks + +BPHY8_FinderTools = OrderedDict() +for BPHY8_reco in BPHY8_recoList: +# a) for DiMuon + if BPHY8_reco == "DiMuon": + BPHY8_FinderTools[BPHY8_reco] = Analysis__JpsiFinder( + name = BPHY8cf.DerivationName+"_"+BPHY8_reco+"_Finder", + OutputLevel = INFO, + muAndMu = BPHY8cf.JfTwoMuons, + muAndTrack = False, + TrackAndTrack = BPHY8cf.JfTwoTracks, + doTagAndProbe = False, + assumeDiMuons = False, # If true, will assume dimu hypothesis and use PDG value for mu mass + track1Mass = BPHY8cf.GlobalMuonMass, + track2Mass = BPHY8cf.GlobalMuonMass, + muonThresholdPt = 0., + trackThresholdPt = BPHY8cf.JfTrackThresholdPt, + invMassUpper = BPHY8cf.GlobalDiMuonMassUpperCut, + invMassLower = BPHY8cf.GlobalDiMuonMassLowerCut, + # For JpsiFinder the cut is really on chi2 and not on chi2/ndf + Chi2Cut = BPHY8cf.Chi2Cut2Prong, + oppChargesOnly = True, + sameChargesOnly = False, + allChargeCombinations = False, + allMuons = True, + combOnly = False, + atLeastOneComb = False, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = BPHY8cf.UsedMuonCollection, + TrackParticleCollection = BPHY8cf.TrkPartContName, + V0VertexFitterTool = BPHY8_VertexTools[BPHY8_reco].TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY8_VertexTools[BPHY8_reco].TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY8_VertexTools[BPHY8_reco].InDetTrackSelectorTool, + VertexPointEstimator = BPHY8_VertexTools[BPHY8_reco].VtxPointEstimator, + useMCPCuts = BPHY8cf.useJpsiFinderMCPCuts ) + +# b) for BJpsiK or BJpsiPi + if BPHY8_reco in [ "BJpsiK", "BJpsiPi" ] : + BPHY8_kaonHypo = (False if BPHY8_reco == "BJpsiPi" else True) + BPHY8_FinderTools[BPHY8_reco] = Analysis__JpsiPlus1Track( + name = BPHY8cf.DerivationName+"_"+BPHY8_reco+"_Finder", + OutputLevel = INFO, + pionHypothesis = not BPHY8_kaonHypo, + kaonHypothesis = BPHY8_kaonHypo, + trkThresholdPt = BPHY8cf.GlobalKaonPtCut, + trkMaxEta = BPHY8cf.GlobalKaonEtaCut, + BThresholdPt = 1000., + TrkTrippletMassUpper = BPHY8cf.GlobalTrksMassUpperCut, + TrkTrippletMassLower = BPHY8cf.GlobalTrksMassLowerCut, + BMassUpper = BPHY8cf.GlobalBMassUpperCut, + BMassLower = BPHY8cf.GlobalBMassLowerCut, + JpsiContainerKey = BPHY8cf.DerivationName+"DiMuonCandidates", + JpsiMassUpper = BPHY8cf.GlobalJpsiMassUpperCut, + JpsiMassLower = BPHY8cf.GlobalJpsiMassLowerCut, + MuonsUsedInJpsi = BPHY8cf.GlobalMuonsUsedInJpsi, + TrackParticleCollection = BPHY8cf.TrkPartContName, + TrkVertexFitterTool = BPHY8_VertexTools[BPHY8_reco].TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY8_VertexTools[BPHY8_reco].InDetTrackSelectorTool, + # This JO should rather be named Chi2byNdfCut + Chi2Cut = BPHY8cf.GlobalChi2CutBase, + UseMassConstraint = True, + ExcludeJpsiMuonsOnly = True, + ExcludeCrossJpsiTracks = False) + +# c) for BsJpsiPhi + if BPHY8_reco == "BsJpsiPhi": + BPHY8_FinderTools[BPHY8_reco] = Analysis__JpsiPlus2Tracks( + name = BPHY8cf.DerivationName+"_"+BPHY8_reco+"_Finder", + OutputLevel = INFO, + pionpionHypothesis = False, + kaonkaonHypothesis = True, + kaonpionHypothesis = False, + trkThresholdPt = BPHY8cf.GlobalKaonPtCut, + trkMaxEta = BPHY8cf.GlobalKaonEtaCut, + BThresholdPt = 1000., + TrkQuadrupletMassUpper = BPHY8cf.GlobalTrksMassUpperCut, + TrkQuadrupletMassLower = BPHY8cf.GlobalTrksMassLowerCut, + BMassUpper = BPHY8cf.GlobalBMassUpperCut, + BMassLower = BPHY8cf.GlobalBMassLowerCut, + JpsiContainerKey = BPHY8cf.DerivationName+"DiMuonCandidates", + JpsiMassUpper = BPHY8cf.GlobalJpsiMassUpperCut, + JpsiMassLower = BPHY8cf.GlobalJpsiMassLowerCut, + MuonsUsedInJpsi = BPHY8cf.GlobalMuonsUsedInJpsi, + TrackParticleCollection = BPHY8cf.TrkPartContName, + TrkVertexFitterTool = BPHY8_VertexTools[BPHY8_reco].TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY8_VertexTools[BPHY8_reco].InDetTrackSelectorTool, + # This JO should rather be named Chi2byNdfCut + Chi2Cut = BPHY8cf.GlobalChi2CutBase, + UseMassConstraint = True, + ExcludeJpsiMuonsOnly = True, + ExcludeCrossJpsiTracks = False) + +ToolSvc += list(BPHY8_FinderTools.values()) +for BPHY8_name in list(BPHY8_FinderTools.keys()): + print(BPHY8_FinderTools[BPHY8_name]) + pprint(BPHY8_FinderTools[BPHY8_name].properties()) + +#-------------------------------------------------------------------- +# Setup the vertex reconstruction "call" tool(s). They are part of the +# derivation framework. +# These Augmentation tools add output vertex collection(s) into the +# StoreGate and add basic decorations which do not depend on the vertex +# mass hypothesis (e.g. lxy, ptError, etc). +# There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need +# two instance of the Reco tool if the JpsiFinder mass window is wide enough. +# +# The reconstruction tools must be interleaved with the vertex selection +# and augmentation tools as e.g. the Jpsimumu container ist needed for +# ???? +# + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf \ + import DerivationFramework__Reco_Vertex +BPHY8_RecoTools = OrderedDict() +for BPHY8_reco in BPHY8_recoList: +# a) for DiMuon + if BPHY8_reco == "DiMuon": + BPHY8_RecoTools[BPHY8_reco] = DerivationFramework__Reco_Vertex( + name = BPHY8cf.DerivationName+"_"+BPHY8_reco+"_Reco", + JpsiFinder = BPHY8_FinderTools[BPHY8_reco], + OutputVtxContainerName = BPHY8cf.DerivationName+BPHY8_reco+"Candidates", + PVContainerName = BPHY8cf.PVContName, + RefPVContainerName = BPHY8cf.DerivationName+"DiMuonRefittedPrimaryVertices", + RefitPV = True, + Do3d = BPHY8cf.do3dProperTime, + MaxPVrefit = 100000, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + DoVertexType = BPHY8cf.doVertexType) +# b) for BJpsiK + if BPHY8_reco in [ "BJpsiK", "BJpsiPi" ] : + BPHY8_RecoTools[BPHY8_reco] = DerivationFramework__Reco_Vertex( + name = BPHY8cf.DerivationName+"_"+BPHY8_reco+"_Reco", + Jpsi1PlusTrackName = BPHY8_FinderTools[BPHY8_reco], + OutputVtxContainerName = BPHY8cf.DerivationName+BPHY8_reco+"Candidates", + PVContainerName = BPHY8cf.PVContName, + RefPVContainerName = BPHY8cf.DerivationName+BPHY8_reco+"RefittedPrimaryVertices", + RefitPV = True, + Do3d = BPHY8cf.do3dProperTime, + MaxPVrefit = 100000, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + DoVertexType = BPHY8cf.doVertexType) +# c) for BsJpsiPhi + if BPHY8_reco == "BsJpsiPhi": + BPHY8_RecoTools[BPHY8_reco] = DerivationFramework__Reco_Vertex( + name = BPHY8cf.DerivationName+"_"+BPHY8_reco+"_Reco", + Jpsi2PlusTrackName = BPHY8_FinderTools[BPHY8_reco], + OutputVtxContainerName = BPHY8cf.DerivationName+BPHY8_reco+"Candidates", + PVContainerName = BPHY8cf.PVContName, + RefPVContainerName = BPHY8cf.DerivationName+BPHY8_reco+"RefittedPrimaryVertices", + RefitPV = True, + Do3d = BPHY8cf.do3dProperTime, + MaxPVrefit = 100000, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + DoVertexType = BPHY8cf.doVertexType) + +ToolSvc += list(BPHY8_RecoTools.values()) +for BPHY8_name in list(BPHY8_RecoTools.keys()): + print(BPHY8_RecoTools[BPHY8_name]) + pprint(BPHY8_RecoTools[BPHY8_name].properties()) + +#-------------------------------------------------------------------- +# Augmentation of vertices by MUCALC mass and it's error +# +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf \ + import DerivationFramework__BPhysAddMuonBasedInvMass + +BPHY8_MuMassTools = OrderedDict() +# a) for Bsmumu +if "Bsmumu" in BPHY8cf.doChannels: + # augment B(s)->mumu candidates + BPHY8_MuMassTools["Bsmumu"] = DerivationFramework__BPhysAddMuonBasedInvMass( + name = "BPHY8_MuMass_Bsmumu", + BranchPrefix = "Bsmumu", + OutputLevel = WARNING, + AdjustToMuonKinematics = BPHY8cf.adjustMucalcKinematics, + VertexContainerName = BPHY8cf.DerivationName+"DiMuonCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass], + AddMinChi2ToAnyPVMode = BPHY8cf.AddMinChi2ToAnyPVMode, + PrimaryVertexContainerName = BPHY8cf.PVContName, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + DoVertexType = BPHY8cf.doVertexType) +# b) for BJpsiK, BsJpsiPhi and BJpsiPi retain the Jpsi +if [i for i in BPHY8cf.doChannels if i in ["BJpsiK", "BsJpsiPhi", "BJpsiPi"]]: + BPHY8_MuMassTools["Jpsimumu"] = DerivationFramework__BPhysAddMuonBasedInvMass( + name = "BPHY8_MuMass_Jpsimumu", + BranchPrefix = "Jpsimumu", + OutputLevel = WARNING, + AdjustToMuonKinematics = BPHY8cf.adjustMucalcKinematics, + VertexContainerName = BPHY8cf.DerivationName+"DiMuonCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass], + AddMinChi2ToAnyPVMode = BPHY8cf.AddMinChi2ToAnyPVMode, + PrimaryVertexContainerName = BPHY8cf.PVContName, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + DoVertexType = BPHY8cf.doVertexType) +# c) for BJpsiK +if "BJpsiK" in BPHY8cf.doChannels: + # augment B+/- ->JpsiK+/- candidates + BPHY8_MuMassTools["BJpsiK"] = DerivationFramework__BPhysAddMuonBasedInvMass( + name = "BPHY8_MuMass_BJpsiK", + BranchPrefix = "BJpsiK", + OutputLevel = WARNING, + AdjustToMuonKinematics = BPHY8cf.adjustMucalcKinematics, + VertexContainerName = BPHY8cf.DerivationName+"BJpsiKCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalKaonMass], + AddMinChi2ToAnyPVMode = BPHY8cf.AddMinChi2ToAnyPVMode, + PrimaryVertexContainerName = BPHY8cf.PVContName, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + DoVertexType = BPHY8cf.doVertexType) +# d) for BsJpsiPhi +if "BsJpsiPhi" in BPHY8cf.doChannels: + # augment Bs ->JpsiPhi candidates + BPHY8_MuMassTools["BsJpsiPhi"] = DerivationFramework__BPhysAddMuonBasedInvMass( + name = "BPHY8_MuMass_BsJpsiPhi", + BranchPrefix = "BsJpsiPhi", + OutputLevel = WARNING, + AdjustToMuonKinematics = BPHY8cf.adjustMucalcKinematics, + VertexContainerName = BPHY8cf.DerivationName+"BsJpsiPhiCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalKaonMass, BPHY8cf.GlobalKaonMass], + AddMinChi2ToAnyPVMode = BPHY8cf.AddMinChi2ToAnyPVMode, + PrimaryVertexContainerName = BPHY8cf.PVContName, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + DoVertexType = BPHY8cf.doVertexType) +# e) for BJpsiPi +if "BJpsiPi" in BPHY8cf.doChannels: + # augment B+/- ->JpsiPi+/- candidates + BPHY8_MuMassTools["BJpsiPi"] = DerivationFramework__BPhysAddMuonBasedInvMass( + name = "BPHY8_MuMass_BJpsiPi", + BranchPrefix = "BJpsiPi", + OutputLevel = WARNING, + AdjustToMuonKinematics = BPHY8cf.adjustMucalcKinematics, + VertexContainerName = BPHY8cf.DerivationName+"BJpsiPiCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalPionMass], + AddMinChi2ToAnyPVMode = BPHY8cf.AddMinChi2ToAnyPVMode, + PrimaryVertexContainerName = BPHY8cf.PVContName, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + DoVertexType = BPHY8cf.doVertexType) +# f) for Bhh +if "Bhh" in BPHY8cf.doChannels: + # augment B->hh candidates + BPHY8_MuMassTools["Bhh"] = DerivationFramework__BPhysAddMuonBasedInvMass( + name = "BPHY8_MuMass_Bhh", + BranchPrefix = "Bhh", + OutputLevel = WARNING, + AdjustToMuonKinematics = BPHY8cf.adjustMucalcKinematics, + VertexContainerName = BPHY8cf.DerivationName+"DiMuonCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass], + AddMinChi2ToAnyPVMode = BPHY8cf.AddMinChi2ToAnyPVMode, + PrimaryVertexContainerName = BPHY8cf.PVContName, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + DoVertexType = BPHY8cf.doVertexType) + +######################## duplication for debugging only ####################### + +if BPHY8cf.addMucalcMassForDebug: + # a) for Bsmumu + if "Bsmumu" in BPHY8cf.doChannels: + # augment B(s)->mumu candidates + BPHY8_MuMassTools["Bsmumu2"] = DerivationFramework__BPhysAddMuonBasedInvMass( + name = "BPHY8_MuMass_Bsmumu2", + BranchPrefix = "Bsmumu2", + OutputLevel = WARNING, + AdjustToMuonKinematics = False, + VertexContainerName = BPHY8cf.DerivationName+"DiMuonCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass], + AddMinChi2ToAnyPVMode = BPHY8cf.AddMinChi2ToAnyPVMode, + PrimaryVertexContainerName = BPHY8cf.PVContName, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + DoVertexType = BPHY8cf.doVertexType) + # b) for BJpsiK and BsJpsiPhi retain the Jpsi + if [i for i in BPHY8cf.doChannels if i in ["BJpsiK", "BsJpsiPhi"]]: + BPHY8_MuMassTools["Jpsimumu2"] = DerivationFramework__BPhysAddMuonBasedInvMass( + name = "BPHY8_MuMass_Jpsimumu2", + BranchPrefix = "Jpsimumu2", + OutputLevel = WARNING, + AdjustToMuonKinematics = False, + VertexContainerName = BPHY8cf.DerivationName+"DiMuonCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass], + AddMinChi2ToAnyPVMode = BPHY8cf.AddMinChi2ToAnyPVMode, + PrimaryVertexContainerName = BPHY8cf.PVContName, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + DoVertexType = BPHY8cf.doVertexType) + # c) for BJpsiK + if "BJpsiK" in BPHY8cf.doChannels: + # augment B+/- ->JpsiK+/- candidates + BPHY8_MuMassTools["BJpsiK2"] = DerivationFramework__BPhysAddMuonBasedInvMass( + name = "BPHY8_MuMass_BJpsiK2", + BranchPrefix = "BJpsiK2", + OutputLevel = WARNING, + AdjustToMuonKinematics = False, + VertexContainerName = BPHY8cf.DerivationName+"BJpsiKCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalKaonMass], + AddMinChi2ToAnyPVMode = BPHY8cf.AddMinChi2ToAnyPVMode, + PrimaryVertexContainerName = BPHY8cf.PVContName, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + DoVertexType = BPHY8cf.doVertexType) + # d) for BsJpsiPhi + if "BsJpsiPhi" in BPHY8cf.doChannels: + # augment Bs ->JpsiPhi candidates + BPHY8_MuMassTools["BsJpsiPhi2"] = DerivationFramework__BPhysAddMuonBasedInvMass( + name = "BPHY8_MuMass_BsJpsiPhi2", + BranchPrefix = "BsJpsiPhi2", + OutputLevel = WARNING, + AdjustToMuonKinematics = False, + VertexContainerName = BPHY8cf.DerivationName+"BsJpsiPhiCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalKaonMass, BPHY8cf.GlobalKaonMass], + AddMinChi2ToAnyPVMode = BPHY8cf.AddMinChi2ToAnyPVMode, + PrimaryVertexContainerName = BPHY8cf.PVContName, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + DoVertexType = BPHY8cf.doVertexType) + # e) for BJpsiPi + if "BJpsiPi" in BPHY8cf.doChannels: + # augment B+/- ->JpsiPi+/- candidates + BPHY8_MuMassTools["BJpsiPi2"] = DerivationFramework__BPhysAddMuonBasedInvMass( + name = "BPHY8_MuMass_BJpsiPi2", + BranchPrefix = "BJpsiPi2", + OutputLevel = WARNING, + AdjustToMuonKinematics = False, + VertexContainerName = BPHY8cf.DerivationName+"BJpsiPiCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalPionMass], + AddMinChi2ToAnyPVMode = BPHY8cf.AddMinChi2ToAnyPVMode, + PrimaryVertexContainerName = BPHY8cf.PVContName, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + DoVertexType = BPHY8cf.doVertexType) + # f) for Bhh + if "Bhh" in BPHY8cf.doChannels: + # augment B->hh candidates + BPHY8_MuMassTools["Bhh2"] = DerivationFramework__BPhysAddMuonBasedInvMass( + name = "BPHY8_MuMass_Bhh2", + BranchPrefix = "Bhh2", + OutputLevel = WARNING, + AdjustToMuonKinematics = False, + VertexContainerName = BPHY8cf.DerivationName+"DiMuonCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass], + AddMinChi2ToAnyPVMode = BPHY8cf.AddMinChi2ToAnyPVMode, + PrimaryVertexContainerName = BPHY8cf.PVContName, + MinNTracksInPV = BPHY8cf.minNTracksInPV, + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + DoVertexType = BPHY8cf.doVertexType) + +######################## duplication for debugging only ####################### + +ToolSvc += list(BPHY8_MuMassTools.values()) +for BPHY8_name in list(BPHY8_MuMassTools.keys()): + print(BPHY8_MuMassTools[BPHY8_name]) + pprint(BPHY8_MuMassTools[BPHY8_name].properties()) + +#-------------------------------------------------------------------- +# Augmentation of vertices by vertex track isolation values, +# closest track information and muon isolation values. +# +# First: Set up track selections +# +from InDetTrackSelectionTool.InDetTrackSelectionToolConf \ + import InDet__InDetTrackSelectionTool + +# a) for BVertexTrackIsoTool +BPHY8_IsoTrkSelTools = OrderedDict() +for i in range(len(BPHY8cf.IsoTrackCategoryName)): + BPHY8_name = BPHY8cf.DerivationName+"_iso_"+BPHY8cf.IsoTrackCategoryName[i] + BPHY8_Tool = InDet__InDetTrackSelectionTool( + name = BPHY8_name, + OutputLevel = INFO + ) + if BPHY8cf.IsoTrackCutLevel[i] != "Custom" : + BPHY8_Tool.CutLevel = BPHY8cf.IsoTrackCutLevel[i] + if BPHY8cf.IsoTrackPtCut[i] > -1. : + BPHY8_Tool.minPt = BPHY8cf.IsoTrackPtCut[i] + if BPHY8cf.IsoTrackEtaCut[i] > -1. : + BPHY8_Tool.maxAbsEta = BPHY8cf.IsoTrackEtaCut[i] + if BPHY8cf.IsoTrackPixelHits[i] > -1 : + BPHY8_Tool.minNPixelHits = BPHY8cf.IsoTrackPixelHits[i] + if BPHY8cf.IsoTrackSCTHits[i] > -1 : + BPHY8_Tool.minNSctHits = BPHY8cf.IsoTrackSCTHits[i] + if BPHY8cf.IsoTrackIBLHits[i] > -1 : + BPHY8_Tool.minNInnermostLayerHits = BPHY8cf.IsoTrackIBLHits[i] + if BPHY8cf.IsoTrackbLayerHits[i] > -1 : + BPHY8_Tool.minNNextToInnermostLayerHits = BPHY8cf.IsoTrackbLayerHits[i] + BPHY8_IsoTrkSelTools[BPHY8_name] = BPHY8_Tool + +ToolSvc += list(BPHY8_IsoTrkSelTools.values()) +for BPHY8_name in list(BPHY8_IsoTrkSelTools.keys()): + print(BPHY8_IsoTrkSelTools[BPHY8_name]) + pprint(BPHY8_IsoTrkSelTools[BPHY8_name].properties()) + +# b) for BMuonTrackIsoTool +BPHY8_MuIsoTrkSelTools = OrderedDict() +for i in range(len(BPHY8cf.MuIsoTrackCategoryName)): + BPHY8_name = BPHY8cf.DerivationName+"_muiso_" + \ + BPHY8cf.MuIsoTrackCategoryName[i] + BPHY8_Tool = InDet__InDetTrackSelectionTool( + name = BPHY8_name, + OutputLevel = INFO + ) + if BPHY8cf.MuIsoTrackCutLevel[i] != "Custom" : + BPHY8_Tool.CutLevel = BPHY8cf.MuIsoTrackCutLevel[i] + if BPHY8cf.MuIsoTrackPtCut[i] > -1. : + BPHY8_Tool.minPt = BPHY8cf.MuIsoTrackPtCut[i] + if BPHY8cf.MuIsoTrackEtaCut[i] > -1. : + BPHY8_Tool.maxAbsEta = BPHY8cf.MuIsoTrackEtaCut[i] + if BPHY8cf.MuIsoTrackPixelHits[i] > -1 : + BPHY8_Tool.minNPixelHits = BPHY8cf.MuIsoTrackPixelHits[i] + if BPHY8cf.MuIsoTrackSCTHits[i] > -1 : + BPHY8_Tool.minNSctHits = BPHY8cf.MuIsoTrackSCTHits[i] + if BPHY8cf.MuIsoTrackIBLHits[i] > -1 : + BPHY8_Tool.minNInnermostLayerHits = BPHY8cf.MuIsoTrackIBLHits[i] + if BPHY8cf.MuIsoTrackbLayerHits[i] > -1 : + BPHY8_Tool.minNNextToInnermostLayerHits \ + = BPHY8cf.MuIsoTrackbLayerHits[i] + BPHY8_MuIsoTrkSelTools[BPHY8_name] = BPHY8_Tool + +ToolSvc += list(BPHY8_MuIsoTrkSelTools.values()) +for BPHY8_name in list(BPHY8_MuIsoTrkSelTools.keys()): + print(BPHY8_MuIsoTrkSelTools[BPHY8_name]) + pprint(BPHY8_MuIsoTrkSelTools[BPHY8_name].properties()) + +# c) for ClosestTrackTool +BPHY8_CtTrkSelTools = OrderedDict() +for i in range(len(BPHY8cf.CloseTrackCategoryName)): + BPHY8_name = BPHY8cf.DerivationName+"_ct_"+BPHY8cf.CloseTrackCategoryName[i] + BPHY8_Tool = InDet__InDetTrackSelectionTool( + name = BPHY8_name, + OutputLevel = INFO + ) + if BPHY8cf.CloseTrackCutLevel[i] != "Custom" : + BPHY8_Tool.CutLevel = BPHY8cf.CloseTrackCutLevel[i] + if BPHY8cf.CloseTrackPtCut[i] > -1. : + BPHY8_Tool.minPt = BPHY8cf.CloseTrackPtCut[i] + if BPHY8cf.CloseTrackEtaCut[i] > -1. : + BPHY8_Tool.maxAbsEta = BPHY8cf.CloseTrackEtaCut[i] + if BPHY8cf.CloseTrackPixelHits[i] > -1 : + BPHY8_Tool.minNPixelHits = BPHY8cf.CloseTrackPixelHits[i] + if BPHY8cf.CloseTrackSCTHits[i] > -1 : + BPHY8_Tool.minNSctHits = BPHY8cf.CloseTrackSCTHits[i] + if BPHY8cf.CloseTrackIBLHits[i] > -1 : + BPHY8_Tool.minNInnermostLayerHits = BPHY8cf.CloseTrackIBLHits[i] + if BPHY8cf.CloseTrackbLayerHits[i] > -1 : + BPHY8_Tool.minNNextToInnermostLayerHits \ + = BPHY8cf.CloseTrackbLayerHits[i] + BPHY8_CtTrkSelTools[BPHY8_name] = BPHY8_Tool + +ToolSvc += list(BPHY8_CtTrkSelTools.values()) +for BPHY8_name in list(BPHY8_CtTrkSelTools.keys()): + print(BPHY8_CtTrkSelTools[BPHY8_name]) + pprint(BPHY8_CtTrkSelTools[BPHY8_name].properties()) + +# +# Step 1.5: Set up track vertex assocation tool +# +from TrackVertexAssociationTool.TrackVertexAssociationToolConf \ + import CP__TrackVertexAssociationTool + +BPHY8_TvaTools = OrderedDict() + +# a) for BVertexTrackIsoTool +BPHY8_TvaTools["TrackVtxIsoTva"] = CP__TrackVertexAssociationTool( + name = BPHY8cf.DerivationName+"_VtxIsoTvaTool", + WorkingPoint = BPHY8cf.IsoTvaWorkingPoint, + OutputLevel = WARNING) + +# b) for BMuonTrackIsoTool +BPHY8_TvaTools["TrackMuonIsoTva"] = CP__TrackVertexAssociationTool( + name = BPHY8cf.DerivationName+"_MuonIsoTvaTool", + WorkingPoint = BPHY8cf.MuIsoTvaWorkingPoint, + OutputLevel = WARNING) + +# c) for ClosestTrackTool +BPHY8_TvaTools["TrackVtxCtTva"] = CP__TrackVertexAssociationTool( + name = BPHY8cf.DerivationName+"_VtxCtTvaTool", + WorkingPoint = BPHY8cf.CloseTrackTvaWorkingPoint, + OutputLevel = WARNING) + +# attach to ToolSvc +ToolSvc += list(BPHY8_TvaTools.values()) +for BPHY8_name in list(BPHY8_TvaTools.keys()): + print(BPHY8_TvaTools[BPHY8_name]) + pprint(BPHY8_TvaTools[BPHY8_name].properties()) + +# +# Second: Set up the B candidate vertex container arrays +# +BPHY8cf.VtxContNames = []; +BPHY8cf.RefPVContNames = []; +BPHY8cf.BranchPrefixes = []; +if [i for i in BPHY8cf.doChannels \ + if i in ["Bsmumu", "BJpsiK", "BsJpsiPhi", "BJpsiPi", "Bhh"]]: + BPHY8cf.VtxContNames += [ BPHY8cf.DerivationName+"DiMuonCandidates" ] + BPHY8cf.RefPVContNames += [ BPHY8cf.DerivationName + +"DiMuonRefittedPrimaryVertices" ] + BPHY8cf.BranchPrefixes += [ "DiMuon" ]; +if "BJpsiK" in BPHY8cf.doChannels: + BPHY8cf.VtxContNames += [ BPHY8cf.DerivationName+"BJpsiKCandidates" ] + BPHY8cf.RefPVContNames += [ BPHY8cf.DerivationName + +"BJpsiKRefittedPrimaryVertices" ] + BPHY8cf.BranchPrefixes += [ "BJpsiK" ]; +if "BsJpsiPhi" in BPHY8cf.doChannels: + BPHY8cf.VtxContNames += [ BPHY8cf.DerivationName+"BsJpsiPhiCandidates" ] + BPHY8cf.RefPVContNames += [ BPHY8cf.DerivationName + +"BsJpsiPhiRefittedPrimaryVertices" ] + BPHY8cf.BranchPrefixes += [ "BsJpsiPhi" ]; +if "BJpsiPi" in BPHY8cf.doChannels: + BPHY8cf.VtxContNames += [ BPHY8cf.DerivationName+"BJpsiPiCandidates" ] + BPHY8cf.RefPVContNames += [ BPHY8cf.DerivationName + +"BJpsiPiRefittedPrimaryVertices" ] + BPHY8cf.BranchPrefixes += [ "BJpsiPi" ]; + +# +# Third: Set up the real tools +# +BPHY8_IsoTools = OrderedDict() + +# a) BVertexTrackIsoTool +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf \ + import DerivationFramework__BVertexTrackIsoTool + +BPHY8_IsoTools["TrackVtxIso"] = DerivationFramework__BVertexTrackIsoTool( + name = "BPHY8_VtxIsoTool", + BranchPrefixes = BPHY8cf.BranchPrefixes, + BranchBaseName = "iso", + OutputLevel = INFO, + VertexContainerNames = BPHY8cf.VtxContNames, + RefPVContainerNames = BPHY8cf.RefPVContNames, + TrackParticleContainerName = BPHY8cf.TrkPartContName, + PVContainerName = BPHY8cf.PVContName, + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + TrackSelectionTools = list(BPHY8_IsoTrkSelTools.values()), + TVATool = BPHY8_TvaTools["TrackVtxIsoTva"], + IsolationConeSizes = BPHY8cf.IsolationConeSizes, + IsoTrkImpLogChi2Max = BPHY8cf.IsoTrkImpLogChi2Max, + IsoDoTrkImpLogChi2Cut = BPHY8cf.IsoDoTrkImpLogChi2Cut, + DoVertexType = BPHY8cf.doVertexType, + UseTrackTypes = BPHY8cf.useIsoTrackTypes, + UseOptimizedAlgo = BPHY8cf.IsoUseOptimizedAlgo, + DebugTrackTypes = BPHY8cf.DebugTrackTypes, + DebugTracksInEvents = []) + +# b) BMuonTrackIsoTool +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf \ + import DerivationFramework__BMuonTrackIsoTool + +BPHY8_IsoTools["TrackMuonIso"] = DerivationFramework__BMuonTrackIsoTool( + name = "BPHY8_MuonIsoTool", + BranchPrefixes = BPHY8cf.BranchPrefixes, + BranchBaseName = "muiso", + OutputLevel = INFO, + VertexContainerNames = BPHY8cf.VtxContNames, + RefPVContainerNames = BPHY8cf.RefPVContNames, + TrackParticleContainerName = BPHY8cf.TrkPartContName, + PVContainerName = BPHY8cf.PVContName, + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + MuonContainerName = BPHY8cf.UsedMuonCollection, + TrackSelectionTools = list(BPHY8_MuIsoTrkSelTools.values()), + TVATool = BPHY8_TvaTools["TrackMuonIsoTva"], + IsolationConeSizes = BPHY8cf.MuIsolationConeSizes, + IsoTrkImpLogChi2Max = BPHY8cf.MuIsoTrkImpLogChi2Max, + IsoDoTrkImpLogChi2Cut = BPHY8cf.MuIsoDoTrkImpLogChi2Cut, + DoVertexType = BPHY8cf.doVertexType, + UseTrackTypes = BPHY8cf.useMuIsoTrackTypes, + DebugTrackTypes = BPHY8cf.DebugTrackTypes, + DebugTracksInEvents = []) + +# c) BVertexClosestTrackTool +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf \ + import DerivationFramework__BVertexClosestTrackTool + +BPHY8_IsoTools["TrackVtxCt"] = DerivationFramework__BVertexClosestTrackTool( + name = "BPHY8_VtxClosestTrkTool", + BranchPrefixes = BPHY8cf.BranchPrefixes, + BranchBaseName = "ct", + OutputLevel = INFO, + VertexContainerNames = BPHY8cf.VtxContNames, + RefPVContainerNames = BPHY8cf.RefPVContNames, + TrackParticleContainerName = BPHY8cf.TrkPartContName, + PVContainerName = BPHY8cf.PVContName, + TrackSelectionTools = list(BPHY8_CtTrkSelTools.values()), + TVATool = BPHY8_TvaTools["TrackVtxCtTva"], + PVTypesToConsider = BPHY8cf.MinChi2ToAnyPVTypes, + DoVertexType = BPHY8cf.doVertexType, + UseTrackTypes = BPHY8cf.useCloseTrackTypes, + CloseTrackChi2SetName = BPHY8cf.CloseTrackChi2SetName, + CloseTrackCorrChi2 = BPHY8cf.CloseTrackCorrChi2, + CloseTrackMinDCAin3D = BPHY8cf.CloseTrackMinDCAin3D, + CloseTrackMaxLogChi2 = BPHY8cf.CloseTrackMaxLogChi2, + NCloseTrackMaxLogChi2 = BPHY8cf.NCloseTrackMaxLogChi2, + DebugTrackTypes = BPHY8cf.DebugTrackTypes, + DebugTracksInEvents = []) + +# +# Fourth: Add track-to-vertex map debugging +# +BPHY8_TtvmTools = OrderedDict(); + +# a) configure BPhysTrackVertexMapTools +# Configure only if not 0 events requested +if BPHY8cf.DebugTrkToVtxMaxEvents != 0 : + for BPHY8_prefix, BPHY8_SVcont, BPHY8_refPVcont in \ + zip(BPHY8cf.BranchPrefixes, BPHY8cf.VtxContNames, + BPHY8cf.RefPVContNames): + BPHY8_hypos = BPHY8_prefix + if BPHY8_hypos == "DiMuon": + BPHY8_hypos += "|Bsmumu|Jpsimumu" + BPHY8_TtvmTools[BPHY8_prefix] = CfgMgr.xAOD__BPhysTrackVertexMapTool( + "BPHY8_ttvm_"+BPHY8_prefix, + OutputLevel = INFO, + VertexContainerName = BPHY8_SVcont, + RefPVContainerName = BPHY8_refPVcont, + PVContainerName = BPHY8cf.PVContName, + TrackParticleContainerName = BPHY8cf.TrkPartContName, + DebugTrkToVtxMaxEvents = BPHY8cf.DebugTrkToVtxMaxEvents, + DumpPrefix = "TTV2> ", + HypoName = BPHY8_hypos ) + + ToolSvc += list(BPHY8_TtvmTools.values()) + for BPHY8_name in list(BPHY8_TtvmTools.keys()): + print(BPHY8_TtvmTools[BPHY8_name]) + pprint(BPHY8_TtvmTools[BPHY8_name].properties()) + +# b) wrap into logger algorithm +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf \ + import DerivationFramework__BTrackVertexMapLogger + +# Configure only if not 0 events requested +if BPHY8cf.DebugTrkToVtxMaxEvents != 0 : + BPHY8_IsoTools["TvmLogger"] = DerivationFramework__BTrackVertexMapLogger( + name = "BPHY8_TrackVertexMapLogger", + OutputLevel = INFO, + TrackVertexMapTools = list(BPHY8_TtvmTools.values()), + Enable = True) + +# +# Fifth: Attach to ToolSvc +# +ToolSvc += list(BPHY8_IsoTools.values()) +for BPHY8_name in list(BPHY8_IsoTools.keys()): + print(BPHY8_IsoTools[BPHY8_name]) + pprint(BPHY8_IsoTools[BPHY8_name].properties()) + +#-------------------------------------------------------------------- +# Record the original counts for primary vertices and tracks +#-------------------------------------------------------------------- +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf \ + import DerivationFramework__AugOriginalCounts +BPHY8_AugOriginalCounts = DerivationFramework__AugOriginalCounts( + name = "BPHY8_AugOriginalCounts", + VertexContainer = BPHY8cf.PVContName, + TrackContainer = BPHY8cf.TrkPartContName, + AddPVCountsByType = True, + AddNTracksToPVs = True, + AddSqrtPt2SumToPVs = True) + +ToolSvc += BPHY8_AugOriginalCounts +pprint(BPHY8_AugOriginalCounts.properties()) + +#-------------------------------------------------------------------- +# Setup the vertex selection and augmentation tool(s). These tools decorate +# the vertices with variables that depend on the vertex mass hypothesis, +# e.g. invariant mass, proper decay time, etc. +# Property HypothesisName is used as a prefix for these decorations. +# They also perform tighter selection, flagging the vertecis that passed. +# The flag is a Char_t branch named "passed_"+HypothesisName. It is used +# later by the "SelectEvent" and "Thin_vtxTrk" tools to determine which +# events and candidates should be kept in the output stream. +# Multiple instances of the Select_* tools can be used on a single input +# collection as long as they use different "HypothesisName" flags. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf \ + import DerivationFramework__Select_Bmumu + +BPHY8_SelectTools = OrderedDict() +# a) for Bsmumu +if "Bsmumu" in BPHY8cf.doChannels: + # augment and select B(s)->mumu candidates + BPHY8_SelectTools["Bsmumu"] = DerivationFramework__Select_Bmumu( + name = "BPHY8_Select_Bsmumu", + HypothesisName = "Bsmumu", + InputVtxContainerName = BPHY8cf.DerivationName+"DiMuonCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass], + VtxMassHypo = BPHY8cf.GlobalBsMass, + MassMin = BPHY8cf.GlobalBMassLowerCut, + MassMax = BPHY8cf.GlobalBMassUpperCut, + Chi2Max = BPHY8cf.Chi2Cut2Prong, + DoVertexType = BPHY8cf.doVertexType, + Do3d = BPHY8cf.do3dProperTime, + BlindMassMin = BPHY8cf.GlobalBlindLowerCut, + BlindMassMax = BPHY8cf.GlobalBlindUpperCut, + DoBlinding = BPHY8cf.doBmumuBlinding, + DoCutBlinded = BPHY8cf.doCutBlinded, + BlindOnlyAllMuonsTight = BPHY8cf.blindOnlyAllMuonsTight, + UseMuCalcMass = BPHY8cf.useMuCalcMass, + OutputLevel = WARNING) +# b) for BJpsiK and BsJpsiPhi retain the Jpsi +if [i for i in BPHY8cf.doChannels if i in ["BJpsiK", "BsJpsiPhi"]]: + BPHY8_SelectTools["Jpsimumu"] = DerivationFramework__Select_Bmumu( + name = "BPHY8_Select_Jpsimumu", + HypothesisName = "Jpsimumu", + InputVtxContainerName = BPHY8cf.DerivationName+"DiMuonCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass], + VtxMassHypo = BPHY8cf.GlobalJpsiMass, + MassMin = BPHY8cf.GlobalJpsiMassLowerCut, + MassMax = BPHY8cf.GlobalJpsiMassUpperCut, + Chi2Max = BPHY8cf.Chi2Cut2Prong, + DoVertexType = BPHY8cf.doVertexType, + Do3d = BPHY8cf.do3dProperTime, + UseMuCalcMass = BPHY8cf.useMuCalcMass, + OutputLevel = WARNING) +# c) for BJpsiK +if "BJpsiK" in BPHY8cf.doChannels: + # augment and select B+/- ->JpsiK+/- candidates + BPHY8_SelectTools["BJpsiK"] = DerivationFramework__Select_Bmumu( + name = "BPHY8_Select_BJpsiK", + HypothesisName = "BJpsiK", + InputVtxContainerName = BPHY8cf.DerivationName+"BJpsiKCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalKaonMass], + VtxMassHypo = BPHY8cf.GlobalBplusMass, + MassMin = BPHY8cf.GlobalBMassLowerCut, + MassMax = BPHY8cf.GlobalBMassUpperCut, + Chi2Max = BPHY8cf.Chi2Cut3Prong, + DoVertexType = BPHY8cf.doVertexType, + Do3d = BPHY8cf.do3dProperTime, + UseMuCalcMass = BPHY8cf.useMuCalcMass, + SubDecVtxContNames = [BPHY8cf.DerivationName+"DiMuonCandidates"], + SubDecVtxHypoCondNames = ["Jpsimumu"], + SubDecVtxHypoFlagNames = ["JpsimumuSubDecay"], + OutputLevel = WARNING) +# d) for BsJpsiPhi +if "BsJpsiPhi" in BPHY8cf.doChannels: + # augment and select Bs ->JpsiPhi candidates + BPHY8_SelectTools["BsJpsiPhi"] = DerivationFramework__Select_Bmumu( + name = "BPHY8_Select_BsJpsiPhi", + HypothesisName = "BsJpsiPhi", + InputVtxContainerName = BPHY8cf.DerivationName+"BsJpsiPhiCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalKaonMass, BPHY8cf.GlobalKaonMass], + VtxMassHypo = BPHY8cf.GlobalBsMass, + MassMin = BPHY8cf.GlobalBMassLowerCut, + MassMax = BPHY8cf.GlobalBMassUpperCut, + Chi2Max = BPHY8cf.Chi2Cut4Prong, + DoVertexType = BPHY8cf.doVertexType, + Do3d = BPHY8cf.do3dProperTime, + UseMuCalcMass = BPHY8cf.useMuCalcMass, + SubDecVtxContNames = [BPHY8cf.DerivationName+"DiMuonCandidates"], + SubDecVtxHypoCondNames = ["Jpsimumu"], + SubDecVtxHypoFlagNames = ["JpsimumuSubDecay"], + OutputLevel = WARNING) +# e) for BJpsiPi +if "BJpsiPi" in BPHY8cf.doChannels: + # augment and select B+/- ->JpsiPi+/- candidates + BPHY8_SelectTools["BJpsiPi"] = DerivationFramework__Select_Bmumu( + name = "BPHY8_Select_BJpsiPi", + HypothesisName = "BJpsiPi", + InputVtxContainerName = BPHY8cf.DerivationName+"BJpsiPiCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalPionMass], + VtxMassHypo = BPHY8cf.GlobalBplusMass, + MassMin = BPHY8cf.GlobalBMassLowerCut, + MassMax = BPHY8cf.GlobalBMassUpperCut, + Chi2Max = BPHY8cf.Chi2Cut3Prong, + DoVertexType = BPHY8cf.doVertexType, + Do3d = BPHY8cf.do3dProperTime, + UseMuCalcMass = BPHY8cf.useMuCalcMass, + SubDecVtxContNames = [BPHY8cf.DerivationName+"DiMuonCandidates"], + SubDecVtxHypoCondNames = ["Jpsimumu"], + SubDecVtxHypoFlagNames = ["JpsimumuSubDecay"], + OutputLevel = WARNING) +# f) for Bhh +if "Bhh" in BPHY8cf.doChannels: + # augment and select B->hh candidates + BPHY8_SelectTools["Bhh"] = DerivationFramework__Select_Bmumu( + name = "BPHY8_Select_Bhh", + HypothesisName = "Bhh", + InputVtxContainerName = BPHY8cf.DerivationName+"DiMuonCandidates", + TrkMasses = [BPHY8cf.GlobalMuonMass, BPHY8cf.GlobalMuonMass], + VtxMassHypo = BPHY8cf.GlobalBsMass, + MassMin = BPHY8cf.GlobalBMassLowerCut, + MassMax = BPHY8cf.GlobalBMassUpperCut, + Chi2Max = BPHY8cf.Chi2Cut2Prong, + DoVertexType = BPHY8cf.doVertexType, + Do3d = BPHY8cf.do3dProperTime, + UseMuCalcMass = BPHY8cf.useMuCalcMass, + OutputLevel = WARNING) + +ToolSvc += list(BPHY8_SelectTools.values()) +for BPHY8_name in list(BPHY8_SelectTools.keys()): + print(BPHY8_SelectTools[BPHY8_name]) + pprint(BPHY8_SelectTools[BPHY8_name].properties()) + +#-------------------------------------------------------------------- +# Setup the vertex variable blinding tools. +# These tools are only used by the Bsmumu channel in case +# blinding is enabled. + +BPHY8_BlindingTools = OrderedDict() +BPHY8_BlinderTools = OrderedDict() + +if BPHY8cf.doBmumuBlinding and not BPHY8cf.doCutBlinded: + # BlindingTools + from BPhysTools.BPhysToolsConf import xAOD__BPhysBlindingTool + # 1) for Bsmumu + if "Bsmumu" in BPHY8cf.doChannels : + # setup blinding tool + BPHY8_BlindingTools["Bsmumu"] = xAOD__BPhysBlindingTool( + name = "BPHY8_BlindingTool_Bsmumu", + VertexContainerName = BPHY8cf.DerivationName+"DiMuonCandidates", + VarToBlindNames = BPHY8cf.BlindedVars, + BlindingFlag = BPHY8cf.BlindingFlag, + NegativeSigns = [True, True], + BlindingKey = BPHY8cf.BlindingKey, + OutputLevel = INFO) + + ToolSvc += list(BPHY8_BlindingTools.values()) + for BPHY8_name in list(BPHY8_BlindingTools.keys()): + print(BPHY8_BlindingTools[BPHY8_name]) + pprint(BPHY8_BlindingTools[BPHY8_name].properties()) + + # Blinders + from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf \ + import DerivationFramework__BPhysVarBlinder + # a2) for Bsmumu + if "Bsmumu" in BPHY8cf.doChannels : + # blind mass values for B(s)->mumu candidates + BPHY8_BlinderTools["Bsmumu"] = DerivationFramework__BPhysVarBlinder( + name = "BPHY8_Blinder_Bsmumu", + BlindingTool = BPHY8_BlindingTools["Bsmumu"], + EnableBlinding = True, + OutputLevel = INFO) + + ToolSvc += list(BPHY8_BlinderTools.values()) + for BPHY8_name in list(BPHY8_BlinderTools.keys()): + print(BPHY8_BlinderTools[BPHY8_name]) + pprint(BPHY8_BlinderTools[BPHY8_name].properties()) + +#==================================================================== +# Skimming tool to select only events with the correct vertices +#==================================================================== +#-------------------------------------------------------------------- +# Select the event. We only want to keep events that contain certain +# vertices which passed certain selection. This is specified by the +# "SelectionExpression" property, which contains the expression in the +# following format: +# +# "ContainerName.passed_HypoName > count" +# +# where "ContainerName" is output container form some Reco_* tool, +# "HypoName" is the hypothesis name setup in some "Select_*" tool and +# "count" is the number of candidates passing the selection you want to keep. +# + +# Build expression depending on "select tools" used: +# If any of them marked any candidate passed, add expression for it. +BPHY8_expressions = [] +for BPHY8_tool in list(BPHY8_SelectTools.values()): + BPHY8_passName = BPHY8_tool.HypothesisName + if BPHY8_tool.HypothesisName == "Jpsimumu" and BPHY8cf.thinLevel > 0: + BPHY8_passName = "JpsimumuSubDecay" + BPHY8_expressions += [ "count(%s.passed_%s) > 0" % \ + (BPHY8_tool.InputVtxContainerName, + BPHY8_passName) ] +BPHY8cf.SelExpression = " || ".join(BPHY8_expressions) + +from DerivationFrameworkTools.DerivationFrameworkToolsConf \ + import DerivationFramework__xAODStringSkimmingTool +BPHY8_SkimmingTool = DerivationFramework__xAODStringSkimmingTool( + name = "BPHY8_SelectEvent", + expression = BPHY8cf.SelExpression) + +ToolSvc += BPHY8_SkimmingTool +print(BPHY8_SkimmingTool) +pprint(BPHY8_SkimmingTool.properties()) + +# Check for global ToolSvc: +print(">>> Checking ToolSvc tools: <<<") +for BPHY8_i in ToolSvc: + print(BPHY8_i) +print(">>> End of ToolSvc tools. <<<") + +#==================================================================== +# SET UP STREAM +#==================================================================== +BPHY8_streamName = derivationFlags.WriteDAOD_BPHY8Stream.StreamName +BPHY8_fileName = buildFileName( derivationFlags.WriteDAOD_BPHY8Stream ) +BPHY8Stream = MSMgr.NewPoolRootStream(BPHY8_streamName, BPHY8_fileName ) +BPHY8Stream.AcceptAlgs(["BPHY8Kernel"]) +# +# Special lines for thinning +# Thinning service name must match the one passed to the thinning tools +## +## NOTE: We use the ThinningHelper which already instantiates a ThinningSvc +## +## from AthenaServices.Configurables import ThinningSvc, createThinningSvc +## BPHY8_augStream = MSMgr.GetStream( BPHY8_streamName ) +## BPHY8_evtStream = BPHY8_augStream.GetEventStream() +## svcMgr += createThinningSvc(svcName=BPHY8cf.DerivationName+"ThinningSvc", +## outStreams=[BPHY8_evtStream] ) + +# Additional metadata output +BPHY8Stream.AddMetaDataItem([ "xAOD::FileMetaData#%s*" % + BPHY8cf.DerivationName, + "xAOD::FileMetaDataAuxInfo#%s*Aux." % + BPHY8cf.DerivationName] ) + +#==================================================================== +# Thinning Helper and various thinning tools +#==================================================================== +#-------------------------------------------------------------------- +# Setup the thinning helper, only tool able to perform thinning +# of trigger navigation information. +# +from DerivationFrameworkCore.ThinningHelper import ThinningHelper +BPHY8ThinningHelper = ThinningHelper( BPHY8cf.DerivationName+"ThinningHelper" ) + +if BPHY8cf.doTrigNavThinning and BPHY8cf.doTriggerInfo: + BPHY8ThinningHelper.TriggerChains = '|'.join(BPHY8cf.TrigNavThinList) + +BPHY8ThinningHelper.AppendToStream( BPHY8Stream ) + +#-------------------------------------------------------------------- +# Thinning tools +BPHY8ThinningTools = [] + +# +# MC Truth Thinning +# +if BPHY8cf.isSimulation: + # + # PDG-ID list of truth decay particles whose decay chains are to be recorded + # B mesons + BPHY8cf.TruthDecayParents = [511, 521, 10511, 10521, 513, 523, 10513, 10523, 20513, 20523, 515, 525, 531, 10531, 533, 10533, 20533, 535, 541, 10541, 543, 10543, 20543, 545] + # b bbar mesons + BPHY8cf.TruthDecayParents += [551,10551,100551,110551,200551,210551,553,10553,20553,30553,100553,110553,120553,130553,200553,210553,220553,300553,9000553,9010553,555,10555,20555,100555,110555,120555,200555,557,100557] + BPHY8cf.TruthDecayParents += [5122,5112,5212,5222,5114,5214,5224,5132,5232,5312,5322,5314,5324,5332,5334,5142,5242,5412,5422,5414,5424,5342,5432,5434,5442,5444,5512,5522,5514,5524,5532,5534,5542,5544,5554] + # Charmed mesons + ## BPHY8cf.TruthDecayParents += [411, 421, 10411, 10421, 413, 423, 10413, 10423, 20413, 20423, 415, 425, 431, 10431, 433, 10433, 20433, 435] + # c cbar mesons + ## BPHY8cf.TruthDecayParents += [441, 10441, 100441, 443, 10443, 20443, 100443, 30443, 9000443, 9010443, 9020443, 445, 100445] + # charmed baryons + ## BPHY8cf.TruthDecayParents += [4122, 4222, 4212, 4112, 4224, 4214, 4114, 4232, 4132, 4322, 4312, 4324, 4314, 4332, 4334, 4412, 4422, 4414, 4424, 4432, 4434, 4444] + + # compose ParticleSelectionString + BPHY8_ParticleSelConds = [] + for BPHY8_pdgid in BPHY8cf.TruthDecayParents: + BPHY8_ParticleSelConds.append("abs(TruthParticles.pdgId) == %d" % + BPHY8_pdgid) + BPHY8_ParticleSelection = " || ".join(BPHY8_ParticleSelConds) + + # Only save truth information directly associated with B decays. + # We'll skip the GEANT particles (barcode >= 200000). + from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf \ + import DerivationFramework__GenericTruthThinning + BPHY8TruthThinTool = DerivationFramework__GenericTruthThinning( + name = BPHY8cf.DerivationName+"TruthThinTool", + ParticleSelectionString = BPHY8_ParticleSelection, + PreserveGeneratorDescendants = True, + PreserveDescendants = False, + PreserveAncestors = False) + + ToolSvc += BPHY8TruthThinTool + BPHY8ThinningTools.append(BPHY8TruthThinTool) + print(BPHY8TruthThinTool) + pprint(BPHY8TruthThinTool.properties()) + +# +# Vertex thinning +# +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf \ + import DerivationFramework__Thin_vtxTrk + +# Build list of input containers and passed flags depending on +# "select tools" used. +import re +BPHY8_vtxContainers = [] +BPHY8_refPVContainers = [] +BPHY8_passedFlags = [] +for BPHY8_tool in list(BPHY8_SelectTools.values()): + BPHY8_vtxContainers.append(BPHY8_tool.InputVtxContainerName) + BPHY8_refPVContainers.append(re.sub('Candidates$', + 'RefittedPrimaryVertices', + BPHY8_tool.InputVtxContainerName)) + BPHY8_passName = BPHY8_tool.HypothesisName + if BPHY8_tool.HypothesisName == "Jpsimumu" and BPHY8cf.thinLevel > 0: + BPHY8_passName = "JpsimumuSubDecay" + BPHY8_passedFlags.append("passed_%s" % BPHY8_passName) +# +# Use the general Thin_vtxTrk tool to thin the vertex containers only. +# +if BPHY8cf.thinLevel < 2: + BPHY8Thin_vtxTrk = DerivationFramework__Thin_vtxTrk( + name = BPHY8cf.DerivationName+"Thin_vtxTrk", + TrackParticleContainerName = BPHY8cf.TrkPartContName, + VertexContainerNames = BPHY8_vtxContainers, + PassFlags = BPHY8_passedFlags, + ThinTracks = False) + ToolSvc += BPHY8Thin_vtxTrk + BPHY8ThinningTools.append(BPHY8Thin_vtxTrk) + print(BPHY8Thin_vtxTrk) + pprint(BPHY8Thin_vtxTrk.properties()) + +# +# Bmumu PV, muon collections and ID track thinnning +# +if BPHY8cf.thinLevel > 1: + from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf \ + import DerivationFramework__BmumuThinningTool + BPHY8BmumuThinningTool = DerivationFramework__BmumuThinningTool( + name = BPHY8cf.DerivationName+"BmumuThinningTool", + TrackParticleContainerName = BPHY8cf.TrkPartContName, + VertexContainerNames = BPHY8_vtxContainers, + VertexPassFlags = BPHY8_passedFlags, + RefPVContainerNames = BPHY8_refPVContainers, + # RefPVContainerNames = BPHY8cf.RefPVContNames, + AlignPassToVertexList = True, + PVContainerName = BPHY8cf.PVContName, + MuonContainerName = BPHY8cf.MuonCollection, + CalibMuonContainerName = BPHY8cf.CalMuonCollection, + MatchCalibratedMuons = (BPHY8cf.useCalibratedMuons > 2), + MarkMatchedMuons = (BPHY8cf.useCalibratedMuons > 2), + MarkMatchedCalMuons = (BPHY8cf.useCalibratedMuons > 1 + and BPHY8cf.useCalibratedMuons < 3), + SyncMatchedMuonsBothWays = True, + AllowFastMuonMaskSync = True, + KeepTracksForMuons = True, + KeepTracksForCalMuons = True, + KeepMuonsForTracks = True, + KeepCalMuonsForTracks = True, + KeepCloseTracks = True, + ThinMuons = (BPHY8cf.thinLevel < 5), + CloseTrackBranchPrefixes = BPHY8cf.BranchPrefixes, + CloseTrackBranchBaseName = BPHY8_IsoTools["TrackVtxCt"].BranchBaseName, + ThinPVs = (BPHY8cf.thinLevel == 2), + ThinRefittedPVs = (BPHY8cf.thinLevel == 2), + ThinTracks = (BPHY8cf.thinLevel < 4), + KeepTracksForSelectedPVs = False, + OutputLevel = INFO) + ToolSvc += BPHY8BmumuThinningTool + BPHY8ThinningTools.append(BPHY8BmumuThinningTool) + print(BPHY8BmumuThinningTool) + pprint(BPHY8BmumuThinningTool.properties()) + +#==================================================================== +# CREATE THE DERIVATION KERNEL ALGORITHM AND PASS THE ABOVE TOOLS +#==================================================================== +# IMPORTANT bit. Don't forget to pass the tools to the DerivationKernel! +# If you don't do that, they will not be be executed! +# The name of the kernel (BPHY8Kernel in this case) must be unique to +# this derivation. +# Make use of a AthSequence in order to run the muon calibrations +# beforehand if requested. +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +BPHY8_seq = CfgMgr.AthSequencer("BPHY8Sequence") +DerivationFrameworkJob += BPHY8_seq +BPHY8_seq += list(BPHY8_CalibrationAlgs.values()) + +# required for Soft B tagging +if BPHY8cf.doAddSoftBVertices: + from DerivationFrameworkJetEtMiss.ExtendedJetCommon import replaceAODReducedJets + OutputJets["BPHY8"] = [] + reducedJetList = ["AntiKt4PV0TrackJets"] + replaceAODReducedJets(reducedJetList, BPHY8_seq, "BPHY8") + + from SoftBVrtClusterTool.SoftBVrtConfig import addSoftBVrt + addSoftBVrt(BPHY8_seq,'Loose') + addSoftBVrt(BPHY8_seq,'Medium') + addSoftBVrt(BPHY8_seq,'Tight') + +BPHY8_seq += CfgMgr.DerivationFramework__DerivationKernel( + "BPHY8Kernel", + OutputLevel = INFO, + AugmentationTools = [ BPHY8_MetaDataTool, BPHY8_AugOriginalCounts, + BPHY8_MuonExtrapTool] \ + + list(BPHY8_RecoTools.values()) + list(BPHY8_MuMassTools.values()) \ + + list(BPHY8_IsoTools.values()) \ + + list(BPHY8_SelectTools.values()) \ + + list(BPHY8_BlinderTools.values()), + SkimmingTools = [BPHY8_SkimmingTool], + ThinningTools = BPHY8ThinningTools + ) + +#==================================================================== +# Slimming +#==================================================================== +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY8SlimmingHelper = SlimmingHelper("BPHY8SlimmingHelper") +BPHY8_AllVariables = [] +BPHY8_StaticContent = [] +BPHY8_SmartCollections = [] +BPHY8_ExtraVariables = [] + +# Needed for trigger objects +BPHY8SlimmingHelper.IncludeMuonTriggerContent = BPHY8cf.doTriggerInfo +BPHY8SlimmingHelper.IncludeBPhysTriggerContent = BPHY8cf.doTriggerInfo + +# primary vertices +BPHY8_SmartCollections += [BPHY8cf.PVContName] +# +# 2018-03-12: These extra variables are not used by NtupleMaker +# which are removed by using SmartCollection instead of +# AllVariables. +## BPHY8_ExtraVariables += ["%s.covariance" % BPHY8cf.PVContName +## + ".chiSquared.numberDoF.sumPt2" +## + ".trackParticleLinks.trackWeights.neutralWeights"] +# +# 2018-05-04: The covariance matrix is occasionally used by the NtupleMaker +# 2019-11-15: adding original number of tracks and sqrt(sum(pt^2)) +BPHY8_ExtraVariables += ["%s.covariance" % BPHY8cf.PVContName + +".OrigNTracks.OrigSqrtPt2Sum"] + +for BPHY8_reco in BPHY8_recoList: + BPHY8_StaticContent \ + += ["xAOD::VertexContainer#BPHY8"+BPHY8_reco+"RefittedPrimaryVertices"] + BPHY8_StaticContent \ + += ["xAOD::VertexAuxContainer#BPHY8"+BPHY8_reco+"RefittedPrimaryVerticesAux."] + +# combined / extrapolated muon track particles +# (note: for tagged muons there is no extra TrackParticle collection since +# the ID tracks are stored in InDetTrackParticles collection) +BPHY8_AllVariables += ["CombinedMuonTrackParticles"] + +BPHY8_AllVariables += ["ExtrapolatedMuonTrackParticles"] +# TODO: copy smart slimming for calibrated muons. +if BPHY8cf.useCalibratedMuons > 1: + BPHY8_AllVariables += [BPHY8cf.CalMuonCollection] + BPHY8SlimmingHelper.AppendToDictionary = { + '%s' % BPHY8cf.CalMuonCollection : 'xAOD::MuonContainer', + '%sAux' % BPHY8cf.CalMuonCollection : 'xAOD::ShallowAuxContainer' } + +# muon container +## AllVariables += [BPHY8cf.MuonCollection] +# smart collection adds info needed for CP tools +BPHY8_SmartCollections += [BPHY8cf.MuonCollection] +BPHY8_ExtraVariables += ["%s.etcone30.etcone40" % BPHY8cf.MuonCollection + +".momentumBalanceSignificance" + +".scatteringCurvatureSignificance" + +".scatteringNeighbourSignificance" + +".msInnerMatchDOF.msInnerMatchChi2" + +".msOuterMatchDOF.msOuterMatchChi2" + +".EnergyLoss.ParamEnergyLoss.MeasEnergyLoss" + +".ET_Core" ] + +# ID track particles +BPHY8_SmartCollections += [BPHY8cf.TrkPartContName] +BPHY8_ExtraVariables += ["%s.vx.vy" % BPHY8cf.TrkPartContName] + +# decay candidates +# we have to disable vxTrackAtVertex branch since it is not xAOD compatible +# also remove not needed isolation and close-track branches from DxAOD +BPHY8_DoVertexTypeStr = ['PV_MIN_Z0_BA'] +BPHY8_IsoBranches = ['iso', 'iso_Ntracks'] +BPHY8_MuIsoBranches = ['muiso', 'muiso_Ntracks', 'muiso+muLink'] +BPHY8_CtBranches = ['ct_DCA', 'ct_DCAError', 'ct_ZCA', 'ct_ZCAError', + 'ct_NTracksChi2','ct_CloseTrack+Link'] +from DerivationFrameworkBPhys.BPhysPyHelpers import BPhysFilterBranches +for BPHY8_name in list(BPHY8_RecoTools.keys()): + BPHY8_StaticContent += ["xAOD::VertexContainer#%s" % + BPHY8_RecoTools[BPHY8_name].OutputVtxContainerName] + BPHY8_StaticContent += ["xAOD::VertexAuxContainer#%sAux." % + BPHY8_RecoTools[BPHY8_name].OutputVtxContainerName] + BPHY8_str = "xAOD::VertexAuxContainer#%sAux" % \ + BPHY8_RecoTools[BPHY8_name].OutputVtxContainerName + BPHY8_str += ".-vxTrackAtVertex" + # isolation branches + BPHY8_cones = ["%02d_LC%02dd%01d" % \ + (int(cs*10), int(BPHY8cf.IsoTrkImpLogChi2Max[i]*10), + BPHY8cf.IsoDoTrkImpLogChi2Cut[i]) + for i,cs in enumerate(BPHY8cf.IsolationConeSizes)] + BPHY8_str += BPhysFilterBranches(BPHY8_name, + BPHY8_IsoBranches, + BPHY8cf.IsoIncludes, + BPHY8_DoVertexTypeStr, + BPHY8cf.IsoTrackCategoryName, + BPHY8cf.useIsoTrackTypes, + BPHY8_cones, + False) + # muon isolation branches + BPHY8_cones = ["%02d_LC%02dd%01d" % \ + (int(cs*10), int(BPHY8cf.MuIsoTrkImpLogChi2Max[i]*10), + BPHY8cf.MuIsoDoTrkImpLogChi2Cut[i]) + for i,cs in enumerate(BPHY8cf.MuIsolationConeSizes)] + BPHY8_str += BPhysFilterBranches(BPHY8_name, + BPHY8_MuIsoBranches, + BPHY8cf.MuIsoIncludes, + BPHY8_DoVertexTypeStr, + BPHY8cf.MuIsoTrackCategoryName, + BPHY8cf.useMuIsoTrackTypes, + BPHY8_cones, + False) + # close track branches + BPHY8_str += BPhysFilterBranches(BPHY8_name, + BPHY8_CtBranches, + BPHY8cf.CloseTrackIncludes, + BPHY8_DoVertexTypeStr, + BPHY8cf.CloseTrackCategoryName, + BPHY8cf.useCloseTrackTypes, + BPHY8cf.CloseTrackChi2SetName, + True) + print(("Branches to be removed: %s" % BPHY8_str)) + BPHY8_StaticContent += [ BPHY8_str ] + +# Truth information for MC only +if BPHY8cf.isSimulation: + BPHY8_AllVariables += ["TruthEvents","TruthParticles","TruthVertices"] + +# required for Soft B Tagging +if BPHY8cf.doAddSoftBVertices: + excludedVertexAuxData = "-vxTrackAtVertex.-MvfFitInfo.-isInitialized.-VTAV" + BPHY8_StaticContent += ["xAOD::VertexContainer#" + +"SoftBVrtClusterTool_Tight_Vertices"] + BPHY8_StaticContent += ["xAOD::VertexAuxContainer#" + +"SoftBVrtClusterTool_Tight_VerticesAux." + + excludedVertexAuxData] + BPHY8_StaticContent += ["xAOD::VertexContainer#" + +"SoftBVrtClusterTool_Medium_Vertices"] + BPHY8_StaticContent += ["xAOD::VertexAuxContainer#" + +"SoftBVrtClusterTool_Medium_VerticesAux." + + excludedVertexAuxData] + BPHY8_StaticContent += ["xAOD::VertexContainer#" + +"SoftBVrtClusterTool_Loose_Vertices"] + BPHY8_StaticContent += ["xAOD::VertexAuxContainer#" + +"SoftBVrtClusterTool_Loose_VerticesAux." + + excludedVertexAuxData] + +BPHY8SlimmingHelper.AllVariables = BPHY8_AllVariables +BPHY8SlimmingHelper.SmartCollections = BPHY8_SmartCollections +BPHY8SlimmingHelper.StaticContent = BPHY8_StaticContent +BPHY8SlimmingHelper.ExtraVariables = BPHY8_ExtraVariables +BPHY8SlimmingHelper.AppendContentToStream(BPHY8Stream) + +#==================================================================== +# END OF BPHY8.py +#==================================================================== diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY9.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY9.py new file mode 100644 index 0000000000000000000000000000000000000000..a617264e2ce5a11e017cbd6893d9cf476fe66c61 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/BPHY9.py @@ -0,0 +1,398 @@ +#******************************************************************** +# BPHY9.py +# reductionConf flag BPHY9 in Reco_tf.py +#******************************************************************** + +from DerivationFrameworkCore.DerivationFrameworkMaster import * +from DerivationFrameworkInDet.InDetCommon import * +from DerivationFrameworkJetEtMiss.JetCommon import * +from DerivationFrameworkEGamma.EGammaCommon import * +from DerivationFrameworkMuons.MuonsCommon import * +from DerivationFrameworkHiggs.TruthCategories import * +from AthenaCommon.GlobalFlags import globalflags + +is_MC = globalflags.DataSource()=='geant4' + +print('is_MC = ',is_MC) + + +if is_MC: + from DerivationFrameworkMCTruth.MCTruthCommon import addStandardTruthContents + addStandardTruthContents() + from DerivationFrameworkMCTruth.HFHadronsCommon import * + +#==================================================================== +# SET UP STREAM +#==================================================================== +streamName = derivationFlags.WriteDAOD_BPHY9Stream.StreamName +fileName = buildFileName( derivationFlags.WriteDAOD_BPHY9Stream ) +BPHY9Stream = MSMgr.NewPoolRootStream( streamName, fileName ) +BPHY9Stream.AcceptAlgs(['BPHY9Kernel']) + +#==================================================================== +# AUGMENTATION TOOLS +#==================================================================== +augmentationTools = [] + +#-------------------------------------------------------------------- +# Jpsi vertexing Tool +#-------------------------------------------------------------------- +# 1/ setup vertexing tools and services +include('DerivationFrameworkBPhys/configureVertexing.py') +BPHY9_VertexTools = BPHYVertexTools('BPHY9') + +# 2/ Setup the vertex fitter tools (e.g. JpsiFinder, JpsiPlus1Track, etc). +# These are general tools independent of DerivationFramework that do the +# actual vertex fitting and some pre-selection. +from JpsiUpsilonTools.JpsiUpsilonToolsConf import Analysis__JpsiFinder +BPHY9JpsiFinder = Analysis__JpsiFinder(name = 'BPHY9JpsiFinder', + OutputLevel = INFO, + muAndMu = True, + muAndTrack = False, + TrackAndTrack = False, + assumeDiMuons = True, # If true, will assume dimu hypothesis and use PDG value for mu mass + invMassUpper = 100000.0, + invMassLower = 0.0, + Chi2Cut = 200., + oppChargesOnly = True, + atLeastOneComb = True, + useCombinedMeasurement = False, # Only takes effect if combOnly=True + muonCollectionKey = 'Muons', + TrackParticleCollection = 'InDetTrackParticles', + V0VertexFitterTool = BPHY9_VertexTools.TrkV0Fitter, # V0 vertex fitter + useV0Fitter = False, # if False a TrkVertexFitterTool will be used + TrkVertexFitterTool = BPHY9_VertexTools.TrkVKalVrtFitter, # VKalVrt vertex fitter + TrackSelectorTool = BPHY9_VertexTools.InDetTrackSelectorTool, + VertexPointEstimator = BPHY9_VertexTools.VtxPointEstimator, + useMCPCuts = False) +ToolSvc += BPHY9JpsiFinder +print(BPHY9JpsiFinder) + +# 3/ Setup the vertex reconstruction 'call' tool(s). They are part of the derivation framework. These Augmentation tools add +# output vertex collection(s) into the StoreGate and add basic decorations which do not depend on the vertex mass hypothesis +# (e.g. lxy, ptError, etc). There should be one tool per topology, i.e. Jpsi and Psi(2S) do not need two instance of the +# Reco tool is the JpsiFinder mass window is wide enough. +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Reco_Vertex +BPHY9_Reco_mumu = DerivationFramework__Reco_Vertex(name = 'BPHY9_Reco_mumu', + VertexSearchTool = BPHY9JpsiFinder, + OutputVtxContainerName = 'BPHY9OniaCandidates', + PVContainerName = 'PrimaryVertices', + RefPVContainerName = 'BPHY9RefittedPrimaryVertices', + RefitPV = True, + MaxPVrefit = 100000, + DoVertexType = 7) +ToolSvc += BPHY9_Reco_mumu +print(BPHY9_Reco_mumu) + +# 4/ Setup the vertex selection and augmentation tool(s). These tools decorate the vertices with variables that depend +# on the vertex mass hypothesis, e.g. invariant mass, proper decay time, etc. Property HypothesisName is used as a +# prefix for these decorations. They also perform tighter selection, flagging the vertecis that passed. The flag is +# a Char_t branch named 'passed_'+HypothesisName. It is used later by the 'SelectEvent' and 'Thin_vtxTrk' tools to +# determine which events and candidates should be kept in the output stream. Multiple instances of the Select_* tools +# can be used on a single input collection as long as they use different 'HypothesisName' flags. +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Select_onia2mumu + +# 4a/ augment and select Jpsi->mumu candidates +BPHY9_Select_Jpsi2mumu = DerivationFramework__Select_onia2mumu(name = 'BPHY9_Select_Jpsi2mumu', + HypothesisName = 'Jpsi', + InputVtxContainerName = 'BPHY9OniaCandidates', + VtxMassHypo = 3096.916, + MassMin = 2000.0, + MassMax = 3600.0, + Chi2Max = 200, + DoVertexType = 7) +ToolSvc += BPHY9_Select_Jpsi2mumu +print(BPHY9_Select_Jpsi2mumu) + +# 4b/ augment and select Psi(2S)->mumu candidates +BPHY9_Select_Psi2mumu = DerivationFramework__Select_onia2mumu(name = 'BPHY9_Select_Psi2mumu', + HypothesisName = 'Psi', + InputVtxContainerName = 'BPHY9OniaCandidates', + VtxMassHypo = 3686.09, + MassMin = 3300.0, + MassMax = 4500.0, + Chi2Max = 200, + DoVertexType = 7) +ToolSvc += BPHY9_Select_Psi2mumu +print(BPHY9_Select_Psi2mumu) + +# 5/ select the event. We only want to keep events that contain certain vertices which passed certain selection. +# This is specified by the 'SelectionExpression' property, which contains the expression in the following format: +# 'ContainerName.passed_HypoName > count' +# where 'ContainerName' is output container form some Reco_* tool, 'HypoName' is the hypothesis name setup in some 'Select_*' +# tool and 'count' is the number of candidates passing the selection you want to keep. + +# Skimming +# a/ High pt lepton +ptSelection = '( count(Electrons.pt > 20*GeV) > 0 || count(Muons.pt > 20*GeV) > 0 )' +# b/ >3 total leptons +threelepSelection = '( count(Muons.pt > 0) + count(Electrons.pt > 0) >= 3 )' +# c/ di-muon vertex near Onia peak +oniaSelection = '( count(BPHY9OniaCandidates.passed_Jpsi) > 0 || count(BPHY9OniaCandidates.passed_Psi) > 0 )' +# & +expression = oniaSelection + ' && ' + threelepSelection + ' && ' + ptSelection +from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool +BPHY9_SelectEvent = DerivationFramework__xAODStringSkimmingTool(name = 'BPHY9_SelectEvent', + expression = expression) +ToolSvc += BPHY9_SelectEvent + +# 6/ Track and vertex thinning. We want to remove all reconstructed secondary vertices which hasn't passed any of the +# selections defined by (Select_*) tools. We also want to keep only tracks which are associates with either muons or +# any of the vertices that passed the selection. Multiple thinning tools can perform the selection. The final thinning +# decision is based OR of all the decisions (by default, although it can be changed by the JO). + +# 6a/ Thining out vertices that didn't pass any selection and idetifying tracks associated with selected vertices. The +# 'VertexContainerNames' is a list of the vertex containers, and 'PassFlags' contains all pass flags for Select_* +# tools that must be satisfied. The vertex is kept is it satisfy any of the listed selections. + +from DerivationFrameworkBPhys.DerivationFrameworkBPhysConf import DerivationFramework__Thin_vtxTrk +BPHY9Thin_vtxTrk = DerivationFramework__Thin_vtxTrk(name = 'BPHY9Thin_vtxTrk', + TrackParticleContainerName = 'InDetTrackParticles', + VertexContainerNames = ['BPHY9OniaCandidates'], + PassFlags = ['passed_Jpsi', 'passed_Psi'] ) +ToolSvc += BPHY9Thin_vtxTrk + +# 6b/ thinning out tracks that are not attached to muons. The final thinning decision is based on the OR operation +# between decision from this and the previous tools. +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY9MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning(name = 'BPHY9MuonTPThinningTool', + MuonKey = 'Muons', + InDetTrackParticlesKey = 'InDetTrackParticles') +ToolSvc += BPHY9MuonTPThinningTool + +# 6c/ Only save truth informtion directly associated with Onia +from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__GenericTruthThinning +BPHY9TruthThinTool = DerivationFramework__GenericTruthThinning(name = 'BPHY9TruthThinTool', + ParticleSelectionString = 'TruthParticles.pdgId==443 || TruthParticles.pdgId==100443', + PreserveDescendants = True, + PreserveAncestors = True) +ToolSvc += BPHY9TruthThinTool +print(BPHY9TruthThinTool) + +#============================================================================== +# BACKGROUND ELECTRON DECORATION TYPE/ORIGIN +#============================================================================== +# PhysicsAnalysis/DerivationFramework/DerivationFrameworkEGamma/trunk/src/BkgElectronClassification.cxx + +if is_MC: + from MCTruthClassifier.MCTruthClassifierBase import MCTruthClassifier as BkgElectronMCTruthClassifier + from DerivationFrameworkEGamma.DerivationFrameworkEGammaConf import DerivationFramework__BkgElectronClassification + BPHY9BkgElectronClassificationTool = DerivationFramework__BkgElectronClassification (name = 'BkgElectronClassificationTool', + MCTruthClassifierTool = BkgElectronMCTruthClassifier) + ToolSvc += BPHY9BkgElectronClassificationTool + augmentationTools.append(BPHY9BkgElectronClassificationTool) + print('BkgElectronClassificationTool: ', BPHY9BkgElectronClassificationTool) + +#==================================================================== +# THINNING TOOLS +#==================================================================== +thinningTools=[] + +# Establish the thinning helper (which will set up the services behind the scenes) +from DerivationFrameworkCore.ThinningHelper import ThinningHelper +BPHY9ThinningHelper = ThinningHelper( 'BPHY9ThinningHelper' ) + +# Trigger Thinning Tool +elTrig = '^(?!.*_[0-9]*(mu|j|xe|tau|ht|xs|te))(?!HLT_e.*_[0-9]*e.*)HLT_e.*lhloose.*'\ + +'|^(?!.*_[0-9]*(mu|j|xe|tau|ht|xs|te))(?!HLT_e.*_[0-9]*e.*)HLT_e.*lhmedium.*'\ + +'|^(?!.*_[0-9]*(mu|j|xe|tau|ht|xs|te))(?!HLT_e.*_[0-9]*e.*)HLT_e.*lhtight.*'\ + +'|^(?!.*_[0-9]*(mu|j|xe|tau|ht|xs|te))(?!HLT_e.*_[0-9]*e.*)HLT_e.*lhvloose.*' +muTrig = '^(?!.*_[0-9]*(e|j|xe|tau|ht|xs|te))(?!HLT_mu.*_[0-9]*mu.*)HLT_mu.*' +BPHY9ThinningHelper.TriggerChains = elTrig + '|' + muTrig +BPHY9ThinningHelper.AppendToStream( BPHY9Stream ) + +# Jet tracks +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__JetTrackParticleThinning +BPHY9JetTPThinningTool = DerivationFramework__JetTrackParticleThinning(name = 'BPHY9JetTPThinningTool', + JetKey = 'AntiKt4EMTopoJets', + InDetTrackParticlesKey = 'InDetTrackParticles', + ApplyAnd = True) +ToolSvc += BPHY9JetTPThinningTool +thinningTools.append(BPHY9JetTPThinningTool) + +# Tracks associated with Muons +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__MuonTrackParticleThinning +BPHY9MuonTPThinningTool = DerivationFramework__MuonTrackParticleThinning(name = 'BPHY9MuonTPThinningTool', + MuonKey = 'Muons', + InDetTrackParticlesKey = 'InDetTrackParticles') +ToolSvc += BPHY9MuonTPThinningTool +thinningTools.append(BPHY9MuonTPThinningTool) + +# Tracks associated with Electrons +from DerivationFrameworkInDet.DerivationFrameworkInDetConf import DerivationFramework__EgammaTrackParticleThinning +BPHY9ElectronTPThinningTool = DerivationFramework__EgammaTrackParticleThinning(name = 'BPHY9ElectronTPThinningTool', + SGKey = 'Electrons', + InDetTrackParticlesKey = 'InDetTrackParticles') +ToolSvc += BPHY9ElectronTPThinningTool +thinningTools.append(BPHY9ElectronTPThinningTool) + +#==================================================================== +# Truth Thinning +#==================================================================== +# Truth selection strings +truth_cond_lep_list = [ +'(abs(TruthParticles.pdgId)>=11 && abs(TruthParticles.pdgId)<=14)', +'(TruthParticles.pt > 4.0*GeV)', +'(TruthParticles.status == 1)', +'(TruthParticles.barcode<200000)'] +truth_cond_lep = ' && '.join(truth_cond_lep_list) +truth_cond_photon = '(abs(TruthParticles.pdgId)==22) && (TruthParticles.pt>1*GeV)' +truth_cond_comb = '('+truth_cond_lep+') || ('+truth_cond_photon+')' + +# PreserveGeneratorDescendants only keeps particles that came directly from the event generator +# PreserveDescendants keeps all particles including those that come from Geant processes +BPHY9TruthTool = DerivationFramework__GenericTruthThinning(name = 'BPHY9TruthTool', + ParticleSelectionString = truth_cond_comb, + PreserveDescendants = True, + PreserveGeneratorDescendants = False, + PreserveAncestors = True, + TauHandling = False) + +from DerivationFrameworkMCTruth.DerivationFrameworkMCTruthConf import DerivationFramework__MenuTruthThinning +BPHY9TruthToolMenu = DerivationFramework__MenuTruthThinning(name = 'BPHY9TruthToolMenu', + WritePartons = False, + WriteHadrons = False, + WriteBHadrons = False, + WriteGeant = False, + GeantPhotonPtThresh = -1.0, + WriteTauHad = False, + PartonPtThresh = -1.0, + WriteBSM = False, + WriteBosons = True, + WriteBSMProducts = False, + WriteBosonProducts = True, + WriteTopAndDecays = True, + WriteEverything = False, + WriteAllLeptons = True, + WriteStatus3 = False, + WriteFirstN = -1) + +if is_MC: + ToolSvc += BPHY9TruthTool + thinningTools.append(BPHY9TruthTool) + ToolSvc += BPHY9TruthToolMenu + thinningTools.append(BPHY9TruthToolMenu) + +#======================================= +# CREATE PRIVATE SEQUENCE +#======================================= +BPHY9Seq = CfgMgr.AthSequencer('BPHY9Sequence') +from DerivationFrameworkFlavourTag.FlavourTagCommon import FlavorTagInit +FlavorTagInit(JetCollections=['AntiKt4EMPFlowJets'], Sequencer=BPHY9Seq) +#======================================= +# CREATE THE DERIVATION KERNEL ALGORITHM +#======================================= +BPHY9ThinningTools = [BPHY9Thin_vtxTrk, BPHY9MuonTPThinningTool] +if is_MC: + BPHY9ThinningTools.append(BPHY9TruthThinTool) + +from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel +DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel( + 'BPHY9Kernel', + AugmentationTools = [BPHY9_Reco_mumu, BPHY9_Select_Jpsi2mumu, BPHY9_Select_Psi2mumu], + SkimmingTools = [BPHY9_SelectEvent]) + +#==================================================================== +# JetTagNonPromptLepton decorations +#==================================================================== +import JetTagNonPromptLepton.JetTagNonPromptLeptonConfig as JetTagConfig +# Build AntiKt4PV0TrackJets and run b-tagging +JetTagConfig.ConfigureAntiKt4PV0TrackJets(BPHY9Seq, 'BPHY9') +# Add BDT decoration algs +BPHY9Seq += JetTagConfig.GetDecoratePromptLeptonAlgs() +DerivationFrameworkJob += BPHY9Seq + +#==================================================================== +# SLIMMING TOOL +#==================================================================== +from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper +BPHY9SlimmingHelper = SlimmingHelper('BPHY9SlimmingHelper') + +# Slimming for recontruction content +BPHY9SlimmingHelper.AllVariables = [] + +BPHY9SlimmingHelper.SmartCollections = ['AntiKt4EMPFlowJets', + 'AntiKt4EMPFlowJets_BTagging201903', + 'AntiKt4EMTopoJets', + 'AntiKt4EMTopoJets_BTagging201810', + 'BTagging_AntiKt4EMPFlow_201903', + 'BTagging_AntiKt4EMTopo_201810', + 'Electrons', + 'InDetTrackParticles', + 'MET_Reference_AntiKt4EMPFlow', + 'MET_Reference_AntiKt4EMTopo', + 'Muons', + 'PrimaryVertices'] + +extraJetVariables = '.JetEMScaleMomentum_pt.JetEMScaleMomentum_eta.JetEMScaleMomentum_phi.JetEMScaleMomentum_m'\ + +'.ConeTruthLabelID.PartonTruthLabelID.SumPtTrkPt1000.Jvt.JvtJvfcorr.JvtRpt'\ + +'.HECFrac.LArQuality.HECQuality.NegativeE.AverageLArQF' + +BPHY9SlimmingHelper.ExtraVariables = ['AntiKt4EMPFlowJets'+extraJetVariables, + 'AntiKt4EMTopoJets'+extraJetVariables, + 'CombinedMuonTrackParticles'+'.z0' + +'.vz' + +'.definingParametersCovMatrix', + 'Electrons'+'.author' + +'.charge', + 'ExtrapolatedMuonTrackParticles'+'.z0' + +'.vz' + +'.definingParametersCovMatrix', + 'GSFTrackParticles'+'.z0' + +'.vz' + +'.definingParametersCovMatrix', + 'Muons'+'.clusterLink' + +'.allAuthors' + +'.charge' + +'.extrapolatedMuonSpectrometerTrackParticleLink' + +'.scatteringCurvatureSignificance' + +'.scatteringNeighbourSignificance', + 'PrimaryVertices'+'.x' + +'.y'] + +BPHY9Stream.StaticContent = [] + +# Slimming for truth content +if is_MC: + BPHY9SlimmingHelper.AllVariables += ['TruthParticles', + 'TruthEvents', + 'TruthVertices'] + + BPHY9SlimmingHelper.SmartCollections += ['AntiKt4TruthJets', + 'AntiKt4TruthWZJets'] + + BPHY9SlimmingHelper.ExtraVariables += ['CombinedMuonTrackParticles'+'.truthOrigin' + +'.truthType' + +'.truthParticleLink', + 'Electrons'+'.truthOrigin' + +'.truthType' + +'.truthParticleLink' + +'.bkgTruthType' + +'.bkgTruthOrigin' + +'.bkgTruthParticleLink' + +'.bkgMotherPdgId' + +'.deltaPhi1', + 'InDetTrackParticles'+'.truthOrigin' + +'.truthType' + +'.truthParticleLink', + 'MuonTruthParticles'+'.truthOrigin' + +'.truthType' + +'.truthParticleLink'] + + BPHY9SlimmingHelper.StaticContent += ['xAOD::TruthParticleContainer#TruthMuons', + 'xAOD::TruthParticleAuxContainer#TruthMuonsAux.', + 'xAOD::TruthParticleContainer#TruthElectrons', + 'xAOD::TruthParticleAuxContainer#TruthElectronsAux.', + 'xAOD::TruthParticleContainer#TruthNeutrinos', + 'xAOD::TruthParticleAuxContainer#TruthNeutrinosAux.'] + +# Slimming for trigger content +BPHY9SlimmingHelper.IncludeEGammaTriggerContent = True +BPHY9SlimmingHelper.IncludeMuonTriggerContent = True + +# Slimming for charmonia content +BPHY9Stream.AddItem('xAOD::VertexContainer#BPHY9OniaCandidates') +BPHY9Stream.AddItem('xAOD::VertexAuxContainer#BPHY9OniaCandidatesAux.') +BPHY9Stream.AddItem('xAOD::VertexAuxContainer#BPHY9OniaCandidatesAux.-vxTrackAtVertex') + +BPHY9SlimmingHelper.AppendContentToStream(BPHY9Stream) \ No newline at end of file diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/SaveExtraMetadataInMerge_jobOFragment.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/SaveExtraMetadataInMerge_jobOFragment.py new file mode 100644 index 0000000000000000000000000000000000000000..0e7742c69c7fed4252af80ee52c95a2ee8ff1c1d --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/SaveExtraMetadataInMerge_jobOFragment.py @@ -0,0 +1,53 @@ +# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# +# JobOption fragment to be used during DAOD merging. It takes care of +# propagating all B-physics metadata objects to the output file. +# + +# Python import(s): +import re + +# Core import(s): +from RecExConfig.InputFilePeeker import inputFileSummary +from OutputStreamAthenaPool.MultipleStreamManager import MSMgr +from AthenaCommon.Logging import logging + +# Create a logger: +_logger = logging.getLogger( "SaveExtraMetadataInMerge_jobOFragment" ) + +# Find the exact name of xAOD::FileMetaData_vX in the inputFileSummary +# dictionary: +mdType = "" +for key in list(inputFileSummary[ 'metadata_itemsDic' ].keys()): + if re.match( 'xAOD::FileMetaData_v[0-9]+', key ): + mdType = key + break + pass + +# If there is, then let's do the rest of the setup: +if mdType != "": + + # Loop over the keys of all the xAOD::FileMetaData objects: + for key in inputFileSummary[ 'metadata_itemsDic' ][ mdType ]: + + # If it doesn't look like a b-physics metadata object, then leave + # it alone: + if not key.endswith( '_MetaData' ): + continue + + # Create the metadata tool for propagating this info: + toolName = "BPhysFileMetadataTool_%s" % key + ToolSvc += CfgMgr.xAODMaker__FileMetaDataTool( toolName, + InputKey = key, + OutputKey = key ) + svcMgr.MetaDataSvc.MetaDataTools += [ getattr( ToolSvc, toolName ) ] + _logger.info( "Created tool: %s" % toolName ) + + # Add the metadata to the output stream(s): + outputItems = [ 'xAOD::FileMetaData#%s' % key, + 'xAOD::FileMetaDataAuxInfo#%sAux.' % key ] + MSMgr.AddMetaDataItemToAllStreams( outputItems ) + _logger.info( "Added metatata items: %s" % str( outputItems ) ) + + pass + pass diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/configureConversionFinder.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/configureConversionFinder.py new file mode 100644 index 0000000000000000000000000000000000000000..67b775ec06119d79d621e4512ab815cc6710f1b0 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/configureConversionFinder.py @@ -0,0 +1,69 @@ +# +# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +# +#==================================================================== +# Collection of tools to run the conversion finder +#==================================================================== + +class BPHYConversionFinderTools: + + def __init__(self, derivation = ""): + + if derivation == "": + print('--------> FATAL: BPHYConversionFinderTools, "derivation" string not set!') + import sys + sys.exit() + + + prefix = derivation+"ConversionFinderTools" + + from AthenaCommon.AppMgr import ToolSvc + + # set up extrapolator + from TrkExTools.AtlasExtrapolator import AtlasExtrapolator + self.Extrapolator = AtlasExtrapolator(name = prefix+"_AtlasExtrapolator") + ToolSvc += self.Extrapolator + print(self.Extrapolator) + + from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter + self.InDetSecVxFitterTool = Trk__TrkVKalVrtFitter(name = prefix+"_Fitter", + Extrapolator = self.Extrapolator, + MakeExtendedVertex = True, + FirstMeasuredPoint = False, + Robustness = 6, + InputParticleMasses = [0.511,0.511], + VertexForConstraint = [0.,0.,0.], + CovVrtForConstraint = [0.015*0.015,0.,0.015*0.015,0.,0.,10000.*10000.], + FirstMeasuredPointLimit = True, + usePhiCnst = True, + useThetaCnst = True) + ToolSvc += self.InDetSecVxFitterTool + print(self.InDetSecVxFitterTool) + + + from TrkVertexSeedFinderUtils.TrkVertexSeedFinderUtilsConf import Trk__SeedNewtonTrkDistanceFinder + self.InDetSecVxTrkDistanceFinder = Trk__SeedNewtonTrkDistanceFinder(name = prefix+"_TrkDistanceFinder") + ToolSvc += self.InDetSecVxTrkDistanceFinder + print(self.InDetSecVxTrkDistanceFinder) + + + from InDetConversionFinderTools.InDetConversionFinderToolsConf import InDet__VertexPointEstimator + self.InDetSecVtxPointEstimator = InDet__VertexPointEstimator(name = prefix+"_PointEstimator", + MinDeltaR = [-5.,-25.,-50.], # D-R1-R2 min cut + MaxDeltaR = [5.,10.,10.] , # D-R1-R2 max cut + MaxPhi = [0.05, 0.5, 0.5]) # dphi cut at vertex + ToolSvc += self.InDetSecVtxPointEstimator + print(self.InDetSecVtxPointEstimator) + + + from InDetConversionFinderTools.InDetConversionFinderToolsConf import InDet__ConversionPostSelector + self.InDetSecVtxPostSelector = InDet__ConversionPostSelector(name = prefix+"_PostSelector", + MaxChi2Vtx = [10.,10.,10.], + MaxInvariantMass = [10000.,10000.,10000.], + MinFitMomentum = [0.,0.,0.], + MinRadius = [10.0,10.0,10.0], + MinPt = 0.0, + MaxdR = -10000.0, + MaxPhiVtxTrk = 10000.0) + ToolSvc += self.InDetSecVtxPostSelector + print(self.InDetSecVtxPostSelector) diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/configureSimpleV0Finder.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/configureSimpleV0Finder.py new file mode 100644 index 0000000000000000000000000000000000000000..bf1f8ca02ee874fbaa7be1cf51331b92b3aa3c5b --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/configureSimpleV0Finder.py @@ -0,0 +1,201 @@ +#==================================================================== +# Collection of tools required by V0Finder +#==================================================================== + +class BPHYV0FinderTools: + + def __init__(self, derivation = ""): + + if derivation == "": + print('--------> FATAL: BPHYV0FinderTools, "derivation" string not set!') + import sys + sys.exit() + + from AthenaCommon.AppMgr import ToolSvc + + # set up extrapolator + from TrkExTools.AtlasExtrapolator import AtlasExtrapolator + self.InDetExtrapolator = AtlasExtrapolator(name = derivation+"_AtlasExtrapolator") + ToolSvc += self.InDetExtrapolator + print(self.InDetExtrapolator) + + # Vertex point estimator + from InDetConversionFinderTools.InDetConversionFinderToolsConf import InDet__VertexPointEstimator + self.V0VtxPointEstimator = InDet__VertexPointEstimator(name = derivation+"_VtxPointEstimator", + MaxTrkXYDiffAtVtx = [20.,20.,20.], + MaxTrkZDiffAtVtx = [100.,100.,100.], + MaxTrkXYValue = [400.,400.,400.], + MinArcLength = [-800.,-800.,-800.], + MaxArcLength = [800.,800.,800.], + MinDeltaR = [-10000.,-10000.,-10000.], + MaxDeltaR = [10000.,10000.,10000.], + MaxPhi = [10000., 10000., 10000.], + MaxChi2OfVtxEstimation = 2000.) + ToolSvc += self.V0VtxPointEstimator + print(self.V0VtxPointEstimator) + + from InDetRecExample.InDetKeys import InDetKeys + + from InDetAssociationTools.InDetAssociationToolsConf import InDet__InDetPRD_AssociationToolGangedPixels + self.V0PrdAssociationTool = InDet__InDetPRD_AssociationToolGangedPixels(name = derivation+"_V0PrdAssociationTool", + PixelClusterAmbiguitiesMapName = InDetKeys.GangedPixelMap()) + ToolSvc += self.V0PrdAssociationTool + print(self.V0PrdAssociationTool) + + from RecExConfig.RecFlags import rec + CountDeadModulesAfterLastHit=False + #rec.Commissioning=False + + from InDetTrackHoleSearch.InDetTrackHoleSearchConf import InDet__InDetTrackHoleSearchTool + self.V0HoleSearchTool = InDet__InDetTrackHoleSearchTool(name = derivation+"_V0HoleSearchTool", + Extrapolator = self.InDetExtrapolator, + usePixel = DetFlags.haveRIO.pixel_on(), + useSCT = DetFlags.haveRIO.SCT_on(), + #Commissioning = rec.Commissioning()) + CountDeadModulesAfterLastHit = CountDeadModulesAfterLastHit) + ToolSvc += self.V0HoleSearchTool + print(self.V0HoleSearchTool) + + from InDetTrackSummaryHelperTool.InDetTrackSummaryHelperToolConf import InDet__InDetTrackSummaryHelperTool + self.V0TrackSummaryHelperTool = InDet__InDetTrackSummaryHelperTool(name = derivation+"_InDetSummaryHelper", + AssoTool = self.V0PrdAssociationTool, + DoSharedHits = False, + HoleSearch = self.V0HoleSearchTool, + usePixel = DetFlags.haveRIO.pixel_on(), + useSCT = DetFlags.haveRIO.SCT_on(), + useTRT = DetFlags.haveRIO.TRT_on()) + ToolSvc += self.V0TrackSummaryHelperTool + print(self.V0TrackSummaryHelperTool) + + from TrkTrackSummaryTool.TrkTrackSummaryToolConf import Trk__TrackSummaryTool + self.V0TrackSummaryTool = Trk__TrackSummaryTool(name = derivation+"_V0TrackSummaryTool", + InDetSummaryHelperTool = self.V0TrackSummaryHelperTool, + doSharedHits = False, + InDetHoleSearchTool = self.V0HoleSearchTool) + ToolSvc += self.V0TrackSummaryTool + print(self.V0TrackSummaryTool) + + + + from InDetTrackSelectorTool.InDetTrackSelectorToolConf import InDet__InDetConversionTrackSelectorTool + self.InDetV0VxTrackSelector = InDet__InDetConversionTrackSelectorTool(name = derivation+"InDetV0VxTrackSelector", + TrackSummaryTool = self.V0TrackSummaryTool, + Extrapolator = self.InDetExtrapolator, + # Extrapolator = "Trk::Extrapolator/InDetExtrapolator", + maxTrtD0 = 50., + maxSiZ0 = 250., + significanceD0_Si = 1., + significanceD0_Trt = 1., + significanceZ0_Trt = 3., + minPt = 400.0, + IsConversion = False) + ToolSvc += self.InDetV0VxTrackSelector + print(self.InDetV0VxTrackSelector) + + + # configure vertex fitters + + from TrkV0Fitter.TrkV0FitterConf import Trk__TrkV0VertexFitter + self.BPhysV0Fitter = Trk__TrkV0VertexFitter(name = derivation+'_BPhysV0Fitter', + MaxIterations = 10, + Use_deltaR = False, + FirstMeasuredPoint = True, + Extrapolator = self.InDetExtrapolator) + ToolSvc += self.BPhysV0Fitter + print(self.BPhysV0Fitter) +# + from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter + self.BPhysVKVertexFitter = Trk__TrkVKalVrtFitter(name = derivation+"_BPhysVKVFitter", + Extrapolator = self.InDetExtrapolator, + IterationNumber = 30, + FirstMeasuredPoint = True, + MakeExtendedVertex = True) + ToolSvc += self.BPhysVKVertexFitter + print(self.BPhysVKVertexFitter) + +# + from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter + self.BPhysKshortFitter = Trk__TrkVKalVrtFitter(name = derivation+"_BPhysVKKVFitter", + Extrapolator = self.InDetExtrapolator, + IterationNumber = 30, + FirstMeasuredPoint = True, + MakeExtendedVertex = True, + InputParticleMasses = [139.57,139.57], + MassForConstraint = 497.672) + ToolSvc += self.BPhysKshortFitter + print(self.BPhysKshortFitter) +# + from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter + self.BPhysLambdaFitter = Trk__TrkVKalVrtFitter(name = derivation+"_BPhysVKLFitter", + Extrapolator = self.InDetExtrapolator, + IterationNumber = 30, + FirstMeasuredPoint = True, + MakeExtendedVertex = True, + InputParticleMasses = [938.272,139.57], + MassForConstraint = 1115.68) + ToolSvc += self.BPhysLambdaFitter + print(self.BPhysLambdaFitter) +# + from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter + self.BPhysLambdabarFitter = Trk__TrkVKalVrtFitter(name = derivation+"_BPhysVKLbFitter", + Extrapolator = self.InDetExtrapolator, + IterationNumber = 30, + FirstMeasuredPoint = True, + MakeExtendedVertex = True, + InputParticleMasses = [139.57,938.272], + MassForConstraint = 1115.68) + ToolSvc += self.BPhysLambdabarFitter + print(self.BPhysLambdabarFitter) +# + from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter + self.BPhysGammaFitter = Trk__TrkVKalVrtFitter(name = derivation+"_BPhysVKGFitter", + Extrapolator = self.InDetExtrapolator, + IterationNumber = 30, + Robustness = 6, + FirstMeasuredPoint = True, + MakeExtendedVertex = True, + usePhiCnst = True, + useThetaCnst = True, + InputParticleMasses = [0.511,0.511]) + ToolSvc += self.BPhysGammaFitter + print(self.BPhysGammaFitter) + +##-------------------------------------------- +## Setup V0Finder +##-------------------------------------------- + from InDetV0Finder.InDetV0FinderConf import InDet__InDetV0FinderTool + self.V0FinderTool = InDet__InDetV0FinderTool(name = derivation+'_InDetV0FinderTool', + TrackParticleCollection = InDetKeys.xAODTrackParticleContainer(), + #TrackParticleCollection = "InDetTrackParticles", + useV0Fitter = True, + VertexFitterTool = self.BPhysV0Fitter, + VKVertexFitterTool = self.BPhysVKVertexFitter, + KshortFitterTool = self.BPhysKshortFitter, + LambdaFitterTool = self.BPhysLambdaFitter, + LambdabarFitterTool = self.BPhysLambdabarFitter, + GammaFitterTool = self.BPhysGammaFitter, + TrackSelectorTool = self.InDetV0VxTrackSelector, + VertexPointEstimator = self.V0VtxPointEstimator, + doSimpleV0 = True, + #useorigin = False, + #useTRTplusTRT = True, + #useTRTplusSi = True, + useVertexCollection = True, + #trkSelPV = True, + Extrapolator = self.InDetExtrapolator) + #Extrapolator = "Trk::Extrapolator/InDetExtrapolator") + ToolSvc += self.V0FinderTool + print(self.V0FinderTool) + + #from InDetV0Finder.InDetV0FinderConf import InDet__InDetV0Finder + #self.InDetV0Finder = InDet__InDetV0Finder(name = derivation+'InDetV0Finder', + # #decorateV0 = False, + # InDetV0FinderToolName = V0FinderTool, + # V0ContainerName = InDetKeys.xAODV0VertexContainer(), + # KshortContainerName = InDetKeys.xAODKshortVertexContainer(), + # LambdaContainerName = InDetKeys.xAODLambdaVertexContainer(), + # LambdabarContainerName = InDetKeys.xAODLambdabarVertexContainer()) + #topSequence += self.InDetV0Finder + #print self.InDetV0Finder + + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/configureV0Finder.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/configureV0Finder.py new file mode 100644 index 0000000000000000000000000000000000000000..0add564e26b901c7cb0df76857ed8529fdf2dc45 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/configureV0Finder.py @@ -0,0 +1,202 @@ +#==================================================================== +# Collection of tools required by V0Finder +#==================================================================== + +class BPHYV0FinderTools: + + def __init__(self, derivation = ""): + + if derivation == "": + print('--------> FATAL: BPHYV0FinderTools, "derivation" string not set!') + import sys + sys.exit() + + from AthenaCommon.AppMgr import ToolSvc + + # set up extrapolator + from TrkExTools.AtlasExtrapolator import AtlasExtrapolator + self.InDetExtrapolator = AtlasExtrapolator(name = derivation+"_AtlasExtrapolator") + ToolSvc += self.InDetExtrapolator + print(self.InDetExtrapolator) + + # Vertex point estimator + from InDetConversionFinderTools.InDetConversionFinderToolsConf import InDet__VertexPointEstimator + self.V0VtxPointEstimator = InDet__VertexPointEstimator(name = derivation+"_VtxPointEstimator", + MaxTrkXYDiffAtVtx = [20.,20.,20.], + MaxTrkZDiffAtVtx = [100.,100.,100.], + MaxTrkXYValue = [400.,400.,400.], + MinArcLength = [-800.,-800.,-800.], + MaxArcLength = [800.,800.,800.], + MinDeltaR = [-10000.,-10000.,-10000.], + MaxDeltaR = [10000.,10000.,10000.], + MaxPhi = [10000., 10000., 10000.], + MaxChi2OfVtxEstimation = 2000.) + ToolSvc += self.V0VtxPointEstimator + print(self.V0VtxPointEstimator) + + from InDetRecExample.InDetKeys import InDetKeys + + from InDetAssociationTools.InDetAssociationToolsConf import InDet__InDetPRD_AssociationToolGangedPixels + self.V0PrdAssociationTool = InDet__InDetPRD_AssociationToolGangedPixels(name = derivation+"_V0PrdAssociationTool", + PixelClusterAmbiguitiesMapName = InDetKeys.GangedPixelMap()) + ToolSvc += self.V0PrdAssociationTool + print(self.V0PrdAssociationTool) + + from RecExConfig.RecFlags import rec + CountDeadModulesAfterLastHit=False + #rec.Commissioning=False + + from InDetTrackHoleSearch.InDetTrackHoleSearchConf import InDet__InDetTrackHoleSearchTool + self.V0HoleSearchTool = InDet__InDetTrackHoleSearchTool(name = derivation+"_V0HoleSearchTool", + Extrapolator = self.InDetExtrapolator, + usePixel = DetFlags.haveRIO.pixel_on(), + useSCT = DetFlags.haveRIO.SCT_on(), + #Commissioning = rec.Commissioning()) + CountDeadModulesAfterLastHit = CountDeadModulesAfterLastHit) + ToolSvc += self.V0HoleSearchTool + print(self.V0HoleSearchTool) + + from InDetTrackSummaryHelperTool.InDetTrackSummaryHelperToolConf import InDet__InDetTrackSummaryHelperTool + self.V0TrackSummaryHelperTool = InDet__InDetTrackSummaryHelperTool(name = derivation+"_InDetSummaryHelper", + AssoTool = self.V0PrdAssociationTool, + DoSharedHits = False, + HoleSearch = self.V0HoleSearchTool, + usePixel = DetFlags.haveRIO.pixel_on(), + useSCT = DetFlags.haveRIO.SCT_on(), + useTRT = DetFlags.haveRIO.TRT_on()) + ToolSvc += self.V0TrackSummaryHelperTool + print(self.V0TrackSummaryHelperTool) + + from TrkTrackSummaryTool.TrkTrackSummaryToolConf import Trk__TrackSummaryTool + self.V0TrackSummaryTool = Trk__TrackSummaryTool(name = derivation+"_V0TrackSummaryTool", + InDetSummaryHelperTool = self.V0TrackSummaryHelperTool, + doSharedHits = False, + InDetHoleSearchTool = self.V0HoleSearchTool) + ToolSvc += self.V0TrackSummaryTool + print(self.V0TrackSummaryTool) + + + + from InDetTrackSelectorTool.InDetTrackSelectorToolConf import InDet__InDetConversionTrackSelectorTool + self.InDetV0VxTrackSelector = InDet__InDetConversionTrackSelectorTool(name = derivation+"InDetV0VxTrackSelector", + TrackSummaryTool = self.V0TrackSummaryTool, + Extrapolator = self.InDetExtrapolator, + # Extrapolator = "Trk::Extrapolator/InDetExtrapolator", + maxTrtD0 = 50., + maxSiZ0 = 250., + significanceD0_Si = 1., + significanceD0_Trt = 1., + significanceZ0_Trt = 3., + minPt = 400.0, + IsConversion = False) + ToolSvc += self.InDetV0VxTrackSelector + print(self.InDetV0VxTrackSelector) + + + # configure vertex fitters + + from TrkV0Fitter.TrkV0FitterConf import Trk__TrkV0VertexFitter + self.BPhysV0Fitter = Trk__TrkV0VertexFitter(name = derivation+'_BPhysV0Fitter', + MaxIterations = 10, + Use_deltaR = False, + FirstMeasuredPoint = True, + Extrapolator = self.InDetExtrapolator) + ToolSvc += self.BPhysV0Fitter + print(self.BPhysV0Fitter) +# + from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter + self.BPhysVKVertexFitter = Trk__TrkVKalVrtFitter(name = derivation+"_BPhysVKVFitter", + Extrapolator = self.InDetExtrapolator, + IterationNumber = 30, + FirstMeasuredPoint = True, + MakeExtendedVertex = True) + ToolSvc += self.BPhysVKVertexFitter + print(self.BPhysVKVertexFitter) + +# + from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter + self.BPhysKshortFitter = Trk__TrkVKalVrtFitter(name = derivation+"_BPhysVKKVFitter", + Extrapolator = self.InDetExtrapolator, + IterationNumber = 30, + FirstMeasuredPoint = True, + MakeExtendedVertex = True, + InputParticleMasses = [139.57,139.57], + MassForConstraint = 497.672) + ToolSvc += self.BPhysKshortFitter + print(self.BPhysKshortFitter) +# + from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter + self.BPhysLambdaFitter = Trk__TrkVKalVrtFitter(name = derivation+"_BPhysVKLFitter", + Extrapolator = self.InDetExtrapolator, + IterationNumber = 30, + FirstMeasuredPoint = True, + MakeExtendedVertex = True, + InputParticleMasses = [938.272,139.57], + MassForConstraint = 1115.68) + ToolSvc += self.BPhysLambdaFitter + print(self.BPhysLambdaFitter) +# + from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter + self.BPhysLambdabarFitter = Trk__TrkVKalVrtFitter(name = derivation+"_BPhysVKLbFitter", + Extrapolator = self.InDetExtrapolator, + IterationNumber = 30, + FirstMeasuredPoint = True, + MakeExtendedVertex = True, + InputParticleMasses = [139.57,938.272], + MassForConstraint = 1115.68) + ToolSvc += self.BPhysLambdabarFitter + print(self.BPhysLambdabarFitter) +# + from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter + self.BPhysGammaFitter = Trk__TrkVKalVrtFitter(name = derivation+"_BPhysVKGFitter", + Extrapolator = self.InDetExtrapolator, + IterationNumber = 30, + Robustness = 6, + FirstMeasuredPoint = True, + MakeExtendedVertex = True, + usePhiCnst = True, + useThetaCnst = True, + InputParticleMasses = [0.511,0.511]) + ToolSvc += self.BPhysGammaFitter + print(self.BPhysGammaFitter) + +##-------------------------------------------- +## Setup V0Finder +##-------------------------------------------- + from InDetV0Finder.InDetV0FinderConf import InDet__InDetV0FinderTool + self.V0FinderTool = InDet__InDetV0FinderTool(name = derivation+'_InDetV0FinderTool', + TrackParticleCollection = InDetKeys.xAODTrackParticleContainer(), + #TrackParticleCollection = "InDetTrackParticles", + useV0Fitter = True, + VertexFitterTool = self.BPhysV0Fitter, + VKVertexFitterTool = self.BPhysVKVertexFitter, + KshortFitterTool = self.BPhysKshortFitter, + LambdaFitterTool = self.BPhysLambdaFitter, + LambdabarFitterTool = self.BPhysLambdabarFitter, + GammaFitterTool = self.BPhysGammaFitter, + TrackSelectorTool = self.InDetV0VxTrackSelector, + VertexPointEstimator = self.V0VtxPointEstimator, + doSimpleV0 = False, + #doSimpleV0 = True, + #useorigin = False, + #useTRTplusTRT = True, + #useTRTplusSi = True, + useVertexCollection = True, + #trkSelPV = True, + Extrapolator = self.InDetExtrapolator) + #Extrapolator = "Trk::Extrapolator/InDetExtrapolator") + ToolSvc += self.V0FinderTool + print(self.V0FinderTool) + + #from InDetV0Finder.InDetV0FinderConf import InDet__InDetV0Finder + #self.InDetV0Finder = InDet__InDetV0Finder(name = derivation+'InDetV0Finder', + # #decorateV0 = False, + # InDetV0FinderToolName = V0FinderTool, + # V0ContainerName = InDetKeys.xAODV0VertexContainer(), + # KshortContainerName = InDetKeys.xAODKshortVertexContainer(), + # LambdaContainerName = InDetKeys.xAODLambdaVertexContainer(), + # LambdabarContainerName = InDetKeys.xAODLambdabarVertexContainer()) + #topSequence += self.InDetV0Finder + #print self.InDetV0Finder + + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/configureVertexing.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/configureVertexing.py new file mode 100644 index 0000000000000000000000000000000000000000..a3746d92509cfd9dea64d859f4fa12750fbc2097 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/share/configureVertexing.py @@ -0,0 +1,148 @@ +#==================================================================== +# Collection of tools required by JpsiFinder +# Based on JpsiUpsilonTools/configureServices.py +#==================================================================== + +class BPHYVertexTools: + + def __init__(self, derivation = ""): + + if derivation == "": + print ('--------> FATAL: BPHYVertexTools, "derivation" string not set!') + import sys + sys.exit() + + from AthenaCommon.AppMgr import ToolSvc + + # set up extrapolator + from TrkExTools.AtlasExtrapolator import AtlasExtrapolator + self.InDetExtrapolator = AtlasExtrapolator(name = derivation+"_AtlasExtrapolator") + ToolSvc += self.InDetExtrapolator + print((self.InDetExtrapolator)) + + # Vertex point estimator + from InDetConversionFinderTools.InDetConversionFinderToolsConf import InDet__VertexPointEstimator + self.VtxPointEstimator = InDet__VertexPointEstimator(name = derivation+"_VtxPointEstimator", + MinDeltaR = [-10000.,-10000.,-10000.], + MaxDeltaR = [10000.,10000.,10000.], + MaxPhi = [10000., 10000., 10000.], + MaxChi2OfVtxEstimation = 2000.) + ToolSvc += self.VtxPointEstimator + print((self.VtxPointEstimator)) + + from InDetConversionFinderTools.InDetConversionFinderToolsConf import InDet__ConversionFinderUtils + self.InDetConversionHelper = InDet__ConversionFinderUtils(name = derivation+"_InDetConversionFinderUtils") + ToolSvc += self.InDetConversionHelper + print((self.InDetConversionHelper)) + + from InDetRecExample.InDetKeys import InDetKeys + + from InDetAssociationTools.InDetAssociationToolsConf import InDet__InDetPRD_AssociationToolGangedPixels + self.InDetPrdAssociationTool = InDet__InDetPRD_AssociationToolGangedPixels(name = derivation+"_InDetPrdAssociationTool", + PixelClusterAmbiguitiesMapName = InDetKeys.GangedPixelMap()) + ToolSvc += self.InDetPrdAssociationTool + print((self.InDetPrdAssociationTool)) + + from RecExConfig.RecFlags import rec + CountDeadModulesAfterLastHit=False + #rec.Commissioning=False + + from InDetTrackHoleSearch.InDetTrackHoleSearchConf import InDet__InDetTrackHoleSearchTool + self.InDetHoleSearchTool = InDet__InDetTrackHoleSearchTool(name = derivation+"_InDetHoleSearchTool", + Extrapolator = self.InDetExtrapolator, + #usePixel = DetFlags.haveRIO.pixel_on(), + #useSCT = DetFlags.haveRIO.SCT_on(), + #Commissioning = rec.Commissioning()) + CountDeadModulesAfterLastHit = CountDeadModulesAfterLastHit) + ToolSvc += self.InDetHoleSearchTool + print((self.InDetHoleSearchTool)) + + from InDetTrackSummaryHelperTool.InDetTrackSummaryHelperToolConf import InDet__InDetTrackSummaryHelperTool + self.InDetTrackSummaryHelperTool = InDet__InDetTrackSummaryHelperTool(name = derivation+"_InDetSummaryHelper", + AssoTool = self.InDetPrdAssociationTool, + DoSharedHits = False, + HoleSearch = self.InDetHoleSearchTool, + usePixel = DetFlags.haveRIO.pixel_on(), + useSCT = DetFlags.haveRIO.SCT_on(), + useTRT = DetFlags.haveRIO.TRT_on()) + ToolSvc += self.InDetTrackSummaryHelperTool + print((self.InDetTrackSummaryHelperTool)) + + from TrkTrackSummaryTool.TrkTrackSummaryToolConf import Trk__TrackSummaryTool + self.InDetTrackSummaryTool = Trk__TrackSummaryTool(name = derivation+"_InDetTrackSummaryTool", + InDetSummaryHelperTool = self.InDetTrackSummaryHelperTool, + doSharedHits = False, + #InDetHoleSearchTool = self.InDetHoleSearchTool + ) + ToolSvc += self.InDetTrackSummaryTool + print((self.InDetTrackSummaryTool)) + + # ===================================================== + # THIS IS WHERE THE USER CONTROLS MAIN TRACK SELECTIONS + # ===================================================== + from InDetTrackSelectorTool.InDetTrackSelectorToolConf import InDet__InDetDetailedTrackSelectorTool + self.InDetTrackSelectorTool = InDet__InDetDetailedTrackSelectorTool(name = derivation+"_InDetDetailedTrackSelectorTool", + pTMin = 400.0, + IPd0Max = 10000.0, + IPz0Max = 10000.0, + z0Max = 10000.0, + sigIPd0Max = 10000.0, + sigIPz0Max = 10000.0, + d0significanceMax = -1., + z0significanceMax = -1., + etaMax = 9999., + useTrackSummaryInfo = True, + nHitBLayer = 0, + nHitPix = 1, + nHitBLayerPlusPix = 1, + nHitSct = 2, + nHitSi = 3, + nHitTrt = 0, + nHitTrtHighEFractionMax = 10000.0, + useSharedHitInfo = False, + useTrackQualityInfo = True, + fitChi2OnNdfMax = 10000.0, + TrtMaxEtaAcceptance = 1.9, + TrackSummaryTool = self.InDetTrackSummaryTool, + Extrapolator = self.InDetExtrapolator + ) + + ToolSvc += self.InDetTrackSelectorTool + print((self.InDetTrackSelectorTool)) + + # configure vertex fitters + from TrkVKalVrtFitter.TrkVKalVrtFitterConf import Trk__TrkVKalVrtFitter + self.TrkVKalVrtFitter = Trk__TrkVKalVrtFitter( + name = derivation+"_VKalVrtFitter", + Extrapolator = self.InDetExtrapolator, + # MagFieldSvc = InDetMagField, + FirstMeasuredPoint = False, + #FirstMeasuredPointLimit = True, + MakeExtendedVertex = True) + ToolSvc += self.TrkVKalVrtFitter + print((self.TrkVKalVrtFitter)) + + from TrkVertexFitterUtils.TrkVertexFitterUtilsConf import Trk__FullLinearizedTrackFactory + self.InDetLinFactory = Trk__FullLinearizedTrackFactory(name = derivation+"_Trk::InDetFullLinearizedTrackFactory", + Extrapolator = self.InDetExtrapolator, + # MagneticFieldTool = InDetMagField + ) + ToolSvc += self.InDetLinFactory + print((self.InDetLinFactory)) + + + from TrkV0Fitter.TrkV0FitterConf import Trk__TrkV0VertexFitter + self.TrkV0Fitter = Trk__TrkV0VertexFitter(name = derivation+"_TrkV0FitterName", + MaxIterations = 10, + Use_deltaR = False, + Extrapolator = self.InDetExtrapolator, + # MagneticFieldTool = InDetMagField + ) + ToolSvc += self.TrkV0Fitter + print((self.TrkV0Fitter)) + + # Primary vertex refitting + from TrkVertexFitterUtils.TrkVertexFitterUtilsConf import Trk__KalmanVertexUpdator + self.VertexUpdator = Trk__KalmanVertexUpdator(name = derivation+"_KalmanVertexUpdator") + ToolSvc += self.VertexUpdator + print((self.VertexUpdator)) diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/AugOriginalCounts.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/AugOriginalCounts.cxx new file mode 100644 index 0000000000000000000000000000000000000000..68c294d70fbe60396b5eb7985cf15a199dafda44 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/AugOriginalCounts.cxx @@ -0,0 +1,150 @@ +/* + Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +*/ + +/** + * @file AugOriginalCounts.cxx + * + */ + +#include "DerivationFrameworkBPhys/AugOriginalCounts.h" +#include <StoreGate/WriteDecorHandle.h> +#include "GaudiKernel/EventContext.h" + +using namespace xAOD; +namespace DerivationFramework { + + AugOriginalCounts::AugOriginalCounts(const std::string& t, + const std::string& n, + const IInterface* p) : + AthAlgTool(t,n,p), + m_TrackContainername("InDetTrackParticles"), + m_PVContainername("PrimaryVertices") + { + declareInterface<DerivationFramework::IAugmentationTool>(this); + + declareProperty("TrackContainer", m_TrackContainername); + declareProperty("VertexContainer", m_PVContainername); + declareProperty("AddPVCountsByType", m_addPVCountsByType = false); + // decorate PVs with track counts and/or sqrt(sum(pt^2)) + // (needed if track collection will be thinned) + declareProperty("AddNTracksToPVs", m_addNTracksToPVs = false); + declareProperty("AddSqrtPt2SumToPVs", m_addSqrtPt2SumToPVs = false); + } + + StatusCode AugOriginalCounts::initialize() + { + ATH_CHECK(m_TrackContainername.initialize(SG::AllowEmpty)); + ATH_CHECK(m_PVContainername.initialize(SG::AllowEmpty)); + + if(!m_PVContainername.empty()){ + std::string pvstring = "EventInfo.OriginalCount_"; + pvstring += m_PVContainername.key(); + m_OrigPVNTracks = std::move(pvstring); + ATH_CHECK(m_OrigPVNTracks.initialize()); + } + if ( m_addPVCountsByType ) { + std::string pv0string = "EventInfo.OriginalCount_type0_"+m_PVContainername.key(); + std::string pv1string = "EventInfo.OriginalCount_type1_"+m_PVContainername.key(); + std::string pv2string = "EventInfo.OriginalCount_type2_"+m_PVContainername.key(); + std::string pv3string = "EventInfo.OriginalCount_type3_"+m_PVContainername.key(); + std::string pvUstring = "EventInfo.OriginalCount_typeUnknown_"+m_PVContainername.key(); + m_OrigNtype0 = std::move(pv0string); + m_OrigNtype1 = std::move(pv1string); + m_OrigNtype2 = std::move(pv2string); + m_OrigNtype3 = std::move(pv3string); + m_OrigNtypeUnknown = std::move(pvUstring); + ATH_CHECK(m_OrigNtype0.initialize()); + ATH_CHECK(m_OrigNtype1.initialize()); + ATH_CHECK(m_OrigNtype2.initialize()); + ATH_CHECK(m_OrigNtype3.initialize()); + ATH_CHECK(m_OrigNtypeUnknown.initialize()); + } + if ( m_addSqrtPt2SumToPVs ) { + std::string trackcon = m_PVContainername.key(); + trackcon += ".OriginalCount_"; + trackcon += m_TrackContainername.key(); + m_OrigSqrtPt2Sum = std::move(trackcon); + ATH_CHECK(m_OrigSqrtPt2Sum.initialize()); + } + if ( m_addNTracksToPVs ) { + std::string name = m_PVContainername.key(); + name+= ".OrigNTracks"; + m_d_nPVTracks = std::move(name); + ATH_CHECK(m_d_nPVTracks.initialize()); + } + if(!m_TrackContainername.empty()){ + m_OrigNTracksKeys = "EventInfo.OriginalCount_" + m_TrackContainername.key(); + ATH_CHECK(m_OrigNTracksKeys.initialize()); + } + return StatusCode::SUCCESS; + } + + StatusCode AugOriginalCounts::addBranches() const + { + + const EventContext& ctx = Gaudi::Hive::currentContext(); + + if(!m_PVContainername.empty()){ + + SG::WriteDecorHandle<xAOD::EventInfo, int> PV_count(m_OrigPVNTracks, ctx); + SG::ReadHandle<xAOD::VertexContainer> vertices(m_PVContainername, ctx); + PV_count(0) = vertices->size(); + + if ( m_addPVCountsByType ) { + SG::WriteDecorHandle<xAOD::EventInfo, int> PV0_count(m_OrigNtype0, ctx); + SG::WriteDecorHandle<xAOD::EventInfo, int> PV1_count(m_OrigNtype1, ctx); + SG::WriteDecorHandle<xAOD::EventInfo, int> PV2_count(m_OrigNtype2, ctx); + SG::WriteDecorHandle<xAOD::EventInfo, int> PV3_count(m_OrigNtype3, ctx); + SG::WriteDecorHandle<xAOD::EventInfo, int> PVUnk_count(m_OrigNtypeUnknown, ctx); + + // now count + constexpr int nvtypes = 5; + int nvtc[] = {0, 0, 0, 0, 0}; + for (auto vtx : *vertices) { + VxType::VertexType vt = vtx->vertexType(); + if ( vt >=0 && vt < nvtypes ) { + nvtc[vt]++; // vertex types 0 - 3 + } else { + nvtc[nvtypes-1]++; // unknown + } + } + PV0_count(0) = nvtc[0]; + PV1_count(0) = nvtc[1]; + PV2_count(0) = nvtc[2]; + PV3_count(0) = nvtc[3]; + PVUnk_count(0) = nvtc[4]; + } // m_addPVCountsByType + + // decorate PVs with track counts + // (needed if track collection will be thinned) + if ( m_addNTracksToPVs ) { + SG::WriteDecorHandle<xAOD::VertexContainer, int> d_nPVTracks(m_d_nPVTracks, ctx); + for (auto vtx : *vertices) { + d_nPVTracks(*vtx) = (int)vtx->nTrackParticles(); + } + } // m_addNTracksToPVs + + // decorate PVs with sqrt(sum(pt^2)) of tracks + // (needed if track collection will be thinned) + if ( m_addSqrtPt2SumToPVs ) { + SG::WriteDecorHandle<xAOD::VertexContainer, float> d_pvSqrtPt2Sum(m_OrigSqrtPt2Sum, ctx); + for (auto vtx : *vertices) { + float sqrtPt2Sum(0.); + for (auto tp : vtx->trackParticleLinks()) { + sqrtPt2Sum += std::sqrt(pow((*tp)->pt(),2)); + } + d_pvSqrtPt2Sum(*vtx) = sqrtPt2Sum; + } + } // m_addSqrtPt2SumToPVs + } + + if(!m_TrackContainername.empty()){ + SG::ReadHandle<xAOD::TrackParticleContainer> tracks(m_TrackContainername, ctx); + SG::WriteDecorHandle<xAOD::EventInfo, int> track_count(m_OrigNTracksKeys, ctx); + track_count(0) = tracks->size(); + } + + return StatusCode::SUCCESS; + } +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BMuonTrackIsoTool.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BMuonTrackIsoTool.cxx new file mode 100644 index 0000000000000000000000000000000000000000..6cc8a053a1e858be7600953689a5440e63b3c3e1 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BMuonTrackIsoTool.cxx @@ -0,0 +1,448 @@ +/* + Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// BMuonTrackIsoTool.cxx +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// +// Add muon track isolation information for different configurations, +// different track selections and different PV-to-SV association methods. +// +// For an usage example see BPHY8.py . +// +// Job options provided by this class: +// - MuonContainerName -- name of muon container +// - IsolationConeSizes -- List of isolation cone sizes +// - IsoTrkImpLogChi2Max -- List of maximum log(chi2) cuts for +// association of tracks to the primary +// vertex picked. +// - IsoDoTrkImpLogChi2Cut -- apply log(chi2) cuts +// 0 : don't apply log(chi2) cuts +// 1 : apply log(chi2) cuts +// 2 : apply log(chi2) cuts [former version] +// (The last two job options must +// contain the same number of elements +// as the IsolationConeSizes list.) +// +//============================================================================ +// +#include "DerivationFrameworkBPhys/BMuonTrackIsoTool.h" +#include "xAODMuon/MuonContainer.h" +#include "xAODEventInfo/EventInfo.h" +#include "xAODBPhys/BPhysHelper.h" +#include "InDetTrackSelectionTool/IInDetTrackSelectionTool.h" +#include "EventPrimitives/EventPrimitivesHelpers.h" + +#include "boost/format.hpp" +#include "TVector3.h" +#include <algorithm> +#include <sstream> +#include <string> + +namespace DerivationFramework { + + //------------------------------------------------------------------------- + // + // helper class + BMuonTrackIsoTool::MuIsoItem::MuIsoItem(std::string Name, + std::string Bname, + std::string Prefix) : + BaseItem(Name, Bname, Prefix) { + } + + BMuonTrackIsoTool::MuIsoItem::~MuIsoItem() { + } + + void BMuonTrackIsoTool::MuIsoItem::resetVals() { + vIsoValues.clear(); + vNTracks.clear(); + vMuons.clear(); + } + + void BMuonTrackIsoTool::MuIsoItem::copyVals(const BaseItem& item) { + copyVals((const MuIsoItem&)item); + } + + void BMuonTrackIsoTool::MuIsoItem::copyVals(const MuIsoItem& item) { + vIsoValues = item.vIsoValues; + vNTracks = item.vNTracks; + vMuons = item.vMuons; + } + + void BMuonTrackIsoTool::MuIsoItem::fill(double isoValue, int nTracks, + const xAOD::Muon* muon) { + vIsoValues.push_back(isoValue); + vNTracks.push_back(nTracks); + vMuons.push_back(muon); + } + + std::string BMuonTrackIsoTool::MuIsoItem::muIsoName() { + return buildName(); + } + + std::string BMuonTrackIsoTool::MuIsoItem::nTracksName() { + return buildName("Ntracks"); + } + + std::string BMuonTrackIsoTool::MuIsoItem::muLinkName() { + return buildName("", "_muLink"); + } + + //-------------------------------------------------------------------------- + BMuonTrackIsoTool::BMuonTrackIsoTool(const std::string& t, + const std::string& n, + const IInterface* p) + : BPhysVertexTrackBase(t,n,p) { + + declareInterface<DerivationFramework::IAugmentationTool>(this); + + declareProperty("MuonContainerName" , m_muonContainerName=""); + declareProperty("IsolationConeSizes" , m_isoConeSizes); + declareProperty("IsoTrkImpLogChi2Max" , m_isoTrkImpLogChi2Max); + declareProperty("IsoDoTrkImpLogChi2Cut" , m_isoDoTrkImpLogChi2Cut); + } + //-------------------------------------------------------------------------- + StatusCode BMuonTrackIsoTool::initializeHook() { + + ATH_MSG_DEBUG("BMuonTrackIsoTool::initializeHook() -- begin"); + + // check like-sized arrays + if ( m_isoConeSizes.size() != m_isoTrkImpLogChi2Max.size() || + m_isoConeSizes.size() != m_isoDoTrkImpLogChi2Cut.size() ) { + ATH_MSG_ERROR("Size mismatch of IsolationConeSizes (" + << m_isoConeSizes.size() + << "), IsoTrkImpChi2Max (" + << m_isoTrkImpLogChi2Max.size() + << ") and IsoDoTrkImpChi2Cut (" + << m_isoDoTrkImpLogChi2Cut.size() << ") lists!"); + } + + // check muon container name + if ( m_muonContainerName == "" ) { + ATH_MSG_ERROR("No muon container name provided!"); + } + + // initialize results array + initResults(); + + ATH_MSG_DEBUG("BMuonTrackIsoTool::initializeHook() -- end"); + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode BMuonTrackIsoTool::finalizeHook() { + + ATH_MSG_DEBUG("BMuonTrackIsoTool::finalizeHook()"); + + // everything all right + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode + BMuonTrackIsoTool::addBranchesVCSetupHook(size_t ivc) const { + + ATH_MSG_DEBUG("BMuonTrackIsoTool::addBranchesVCLoopHook() -- begin"); + + ATH_MSG_DEBUG("BMuonTrackisoTool::addBranchesVCSetupHook: " + << "Vertex container index " << ivc + << " for collection " << m_vertexContainerNames[ivc] + << " with prefix " << m_branchPrefixes[ivc]); + + setResultsPrefix(m_branchPrefixes[ivc]); + + ATH_MSG_DEBUG("BMuonTrackIsoTool::addBranchesVCSetupHook() -- end"); + + // nothing to do here + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode + BMuonTrackIsoTool::addBranchesSVLoopHook(const xAOD::Vertex* vtx) const { + + ATH_MSG_DEBUG("BMuonTrackIsoTool::addBranchesSVLoopHook() -- begin"); + + // retrieve muon container + m_muons = NULL; + if ( m_muonContainerName != "" ) { + CHECK(evtStore()->retrieve(m_muons, m_muonContainerName)); + ATH_MSG_DEBUG("Found muon collection with key " << m_muonContainerName); + } + + ATH_MSG_DEBUG("BMuonTrackIsoTool::addBranchesSVLoopHook(): " + "calculate muon track isolation ..."); + CHECK(calculateValues(vtx)); + + ATH_MSG_DEBUG("BMuonTrackIsoTool::addBranchesSVLoopHook(): " + "save muon track isolation ..."); + // save the isolation values + CHECK(saveIsolation(vtx)); + + ATH_MSG_DEBUG("BMuonTrackIsoTool::addBranchesSVLoopHook() -- end"); + + // nothing to do here + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Calculate track isolation variables -- faster method with caching + //-------------------------------------------------------------------------- + StatusCode + BMuonTrackIsoTool::calcValuesHook(const xAOD::Vertex* vtx, + const unsigned int ipv, + const unsigned int its, + const unsigned int itt) const { + + ATH_MSG_DEBUG("calcValuesHook: ipv: " << ipv + << ", its: " << its << ", itt: " << itt); + + // candidate tracks and momentum + xAOD::BPhysHelper cand(vtx); + TVector3 candP = cand.totalP(); + const xAOD::Vertex* candRefPV = cand.pv(m_pvAssocTypes[ipv]); + + MuonBag muons; + // TrackBag candMuTracks = findAllMuonIdTracksInDecay(cand, muons); + std::vector<TVector3> candMuTracks = findMuonRefTrackMomenta(cand, muons); + + TrackBag tracks = selectTracks(m_tracks, cand, ipv, its, itt); + + ATH_MSG_DEBUG("calcValuesHook: found " << muons.size() << + " muons and " << candMuTracks.size() << + " tracks from B cand; " << tracks.size() << + " tracks to check."); + + // loop over isolation cones (pt and deltaR) + unsigned int nCones = m_isoConeSizes.size(); + for (unsigned int ic = 0; ic < nCones; ++ic) { + MuIsoItem& iso = m_results[ic][its][ipv][itt]; + // reset + iso.resetVals(); + + // loop over refitted ID tracks for muons in candidate + unsigned int id(0); + // for (TrackBag::const_iterator muTrkItr = candMuTracks.begin(); + // muTrkItr != candMuTracks.end(); ++muTrkItr, ++id) { + for (id=0; id < candMuTracks.size(); ++id) { + + // make sure there was an ID track for the muon + // if ( *muTrkItr != NULL ) { + if ( candMuTracks[id].Mag() > 0. ) { + + const double& coneSize = m_isoConeSizes[ic]; + const double& logChi2Max = m_isoTrkImpLogChi2Max[ic]; + const int& doLogChi2 = m_isoDoTrkImpLogChi2Cut[ic]; + + double nTracksInCone = 0; + double ptSumInCone = 0.; + + double isoValue(-5.); + + // make sure candRefPV exists + if ( candRefPV != NULL ) { + + for (TrackBag::const_iterator trkItr = tracks.begin(); + trkItr != tracks.end(); ++trkItr) { + double deltaR = candMuTracks[id].DeltaR((*trkItr)->p4().Vect()); + if ( deltaR < coneSize ) { + double logChi2 = (doLogChi2 > 0) ? + getTrackCandPVLogChi2(*trkItr, candRefPV) : -9999.; + // next line needed exactly as is for backward validation + if ( doLogChi2 == 2 ) logChi2 = abs(logChi2); + if ( doLogChi2 == 0 || logChi2 < logChi2Max ) { + nTracksInCone++; + ptSumInCone += (*trkItr)->pt(); + } + } // deltaR + } + // calculate result + if ( ptSumInCone + candMuTracks[id].Pt() > 0. ) { + isoValue = candMuTracks[id].Pt() + / ( ptSumInCone + candMuTracks[id].Pt() ); + } + + } else { + isoValue = -10.; + } // if candRefPV != NULL + + const xAOD::Muon* muon = id < muons.size() ? muons.at(id) : NULL; + iso.fill(isoValue, nTracksInCone, muon); + } // if *muTrkItr != NULL + } // for muTrkItr + } // for ic + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Fill track isolation values from cache if found + //-------------------------------------------------------------------------- + bool BMuonTrackIsoTool::fastFillHook(const xAOD::Vertex* vtx, + const int ipv) const { + + ATH_MSG_DEBUG("fastFillHook: ipv: " << ipv); + + bool found(false); + + StringIntMap_t::iterator itpv = + m_pvAssocResMap.find(buildPvAssocCacheName(vtx, ipv)); + if ( itpv != m_pvAssocResMap.end() ) { + found = true; + unsigned int nCones = m_isoConeSizes.size(); + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + for (unsigned int its = 0; its < nTrackSels; ++its) { + for (unsigned int ic = 0; ic < nCones; ++ic) { + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + m_results[ic][its][ipv][itt] + .copyVals(m_results[ic][its][itpv->second][itt]); + } // for its + } // for ic + } // for itt + } // if found + + ATH_MSG_DEBUG("fastFillHook: cache index: " + << buildPvAssocCacheName(vtx, ipv) + << ", found ? " << found + << ", ipv_ref: " + << (found ? itpv->second : -1)); + + return found; + } + //-------------------------------------------------------------------------- + StatusCode + BMuonTrackIsoTool::saveIsolation(const xAOD::Vertex* vtx) const { + + typedef ElementLink<xAOD::MuonContainer> MuonLink_t; + typedef std::vector<MuonLink_t> MuonLinkVector_t; + + unsigned int nCones = m_isoConeSizes.size(); + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nPvAssocs = m_pvAssocTypes.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + + for (unsigned int its = 0; its < nTrackSels; ++its) { + for (unsigned int ipv = 0; ipv < nPvAssocs; ++ipv) { + for (unsigned int ic = 0; ic < nCones; ++ic) { + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + MuIsoItem result = m_results[ic][its][ipv][itt]; + SG::AuxElement::Decorator< std::vector<float> > + dv_iso_values(result.muIsoName()); + SG::AuxElement::Decorator< std::vector<int> > + dv_iso_ntracks(result.nTracksName()); + dv_iso_values(*vtx) = result.vIsoValues; + dv_iso_ntracks(*vtx) = result.vNTracks; + ATH_MSG_DEBUG("BMuonTrackIsoTool::saveIsolation() -- isobn: " + << result.muIsoName() << ", ntbn: " + << result.nTracksName()); + ATH_MSG_DEBUG("BMuonTrackIsoTool::saveIsolation() -- vertex: (" + << vtx->x() << ", " + << vtx->y() << ", " + << vtx->z() << "), N(iso): " + << result.vIsoValues.size() << ", N(nTracks): " + << result.vNTracks.size()); + MuonLinkVector_t links; + for (const xAOD::Muon* muon : result.vMuons) { + if ( muon != NULL ) { + MuonLink_t link(muon, *m_muons); + links.push_back(link); + } else { + ATH_MSG_WARNING("BMuonTrackIsoTool::saveIsolation(): " + << " *muon == NULL -- EL not saved!"); + } + } + vtx->auxdecor<MuonLinkVector_t>(result.muLinkName()) = links; + ATH_MSG_DEBUG("BMuonTrackIsoTool::saveIsolation() -- muLinks: " + << "N_saved = " << links.size() ); + } // for itt + } // for ic + } // for ipv + } // for its + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + void BMuonTrackIsoTool::setResultsPrefix(std::string prefix) const { + + ATH_MSG_DEBUG("BMuonTrackIsoTool::setResultsPrefix -- begin"); + + unsigned int nCones = m_isoConeSizes.size(); + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nPvAssocs = m_pvAssocTypes.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + + for (unsigned int its = 0; its < nTrackSels; ++its) { + for (unsigned int ipv = 0; ipv < nPvAssocs; ++ipv) { + for (unsigned int ic = 0; ic < nCones; ++ic) { + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + m_results[ic][its][ipv][itt].setPrefix(prefix); + } // for itt + } // for ic + } // for ipv + } // for its + + ATH_MSG_DEBUG("BMuonTrackIsoTool::setResultsPrefix -- end"); + } + //-------------------------------------------------------------------------- + void BMuonTrackIsoTool::initResults() { + + unsigned int nCones = m_isoConeSizes.size(); + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nPvAssocs = m_pvAssocTypes.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + + ATH_MSG_DEBUG("BMuonTrackIsoTool::initResults -- begin"); + ATH_MSG_DEBUG("BMuonTrackIsoTool::initResults : nCones = " << nCones); + ATH_MSG_DEBUG("BMuonTrackIsoTool::initResults : nTrackSels = " + << nTrackSels); + ATH_MSG_DEBUG("BMuonTrackIsoTool::initResults : nPvAssocs = " + << nPvAssocs); + ATH_MSG_DEBUG("BMuonTrackIsoTool::initResults : nTrackTypes = " + << nTrackTypes); + m_results.resize(boost::extents[nCones][nTrackSels][nPvAssocs][nTrackTypes]); + for (unsigned int its = 0; its < nTrackSels; ++its) { + ATH_MSG_DEBUG("BMuonTrackIsoTool::initResults -- its = " << its); + for (unsigned int ipv = 0; ipv < nPvAssocs; ++ipv) { + ATH_MSG_DEBUG("BMuonTrackIsoTool::initResults -- ipv = " << ipv); + for (unsigned int ic = 0; ic < nCones; ++ic) { + ATH_MSG_DEBUG("BMuonTrackIsoTool::initResults -- ic = " << ic); + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + ATH_MSG_DEBUG("BMuonTrackIsoTool::initResults -- itt = " << itt); + + ATH_MSG_DEBUG("BMuonTrackIsoTool::initResults :" + << m_branchBaseName << buildBranchName(ic, its, + ipv, itt)); + + m_results[ic][its][ipv][itt].setup(buildBranchName(ic, its, + ipv, itt), + m_branchBaseName); + } // for itt + } // for ic + } // for ipv + } // for its + + ATH_MSG_DEBUG("BMuonTrackIsoTool::initResults -- end"); + } + //-------------------------------------------------------------------------- + std::string BMuonTrackIsoTool::buildBranchName(unsigned int ic, + unsigned int its, + unsigned int ipv, + unsigned int itt) const { + ATH_MSG_DEBUG("BMuonTrackIsoTool::buildBranchName -- begin"); + + double coneSize = m_isoConeSizes[ic]; + double logChi2Max = m_isoTrkImpLogChi2Max[ic]; + int doLogChi2 = m_isoDoTrkImpLogChi2Cut[ic]; + + // format it nicely + boost::format f("%02d_LC%02dd%1d_%s"); + f % (int)(coneSize*10.) % (int)(logChi2Max*10.) % doLogChi2 + % buildBranchBaseName(its, ipv, itt); + + ATH_MSG_DEBUG("BMuonTrackIsoTool::buildBranchName: " << f.str()); + + return f.str(); + } + //-------------------------------------------------------------------------- +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysAddMuonBasedInvMass.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysAddMuonBasedInvMass.cxx new file mode 100644 index 0000000000000000000000000000000000000000..79a285ffcfe3b100fc47719c7082c3479b1c7127 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysAddMuonBasedInvMass.cxx @@ -0,0 +1,702 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +/** + * @file BPhysAddMuonBasedInvMass.cxx + * @author Wolfgang Walkowiak <wolfgang.walkowiak@cern.ch> + */ + +// includes +#include "DerivationFrameworkBPhys/BPhysAddMuonBasedInvMass.h" +#include "xAODTracking/VertexContainer.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "xAODTracking/TrackParticleContainer.h" +#include "xAODMuon/MuonContainer.h" +#include "xAODBase/IParticleHelpers.h" +#include "xAODBPhys/BPhysHelper.h" +// for Amg::error(): +#include "EventPrimitives/EventPrimitivesHelpers.h" + +#include <sstream> +#include <limits> + +namespace DerivationFramework { + + //-------------------------------------------------------------------------- + BPhysAddMuonBasedInvMass::BPhysAddMuonBasedInvMass(const std::string& t, + const std::string& n, + const IInterface* p) + : AthAlgTool(t,n,p), m_trackToVertexTool("Reco::TrackToVertex") { + + declareInterface<DerivationFramework::IAugmentationTool>(this); + + // Declare branch prefix + declareProperty("BranchPrefix", m_branchPrefix = "_NONE_"); + // Necessary containers + declareProperty("VertexContainerName" , m_vertexContainerName = ""); + // track mass assignment + declareProperty("TrkMasses", m_trkMasses = std::vector<double>()); + // track-to-vertex tool + declareProperty("TrackToVertexTool" , m_trackToVertexTool); + // adjust track from muon kinematics? + declareProperty("AdjustToMuonKinematics", m_adjustToMuonKinematics = false); + // add minChi2ToAnyPV decoration + declareProperty("AddMinChi2ToAnyPVMode" , m_addMinChi2ToAnyPVMode = 0); + // name of container for primary vertices + declareProperty("PrimaryVertexContainerName", m_pvContainerName = ""); + // minimum number of tracks in PV for PV to be considered in calculation + // of minChi2MuToAnyPV variable + declareProperty("MinNTracksInPV" , m_minNTracksInPV = 0); + // list of primary vertex types to consider + declareProperty("PVTypesToConsider" , m_pvTypesToConsider = {1,3}); + // PV-to-SV association types to be considered + declareProperty("DoVertexType" , m_doVertexType = 63); + } + //-------------------------------------------------------------------------- + StatusCode BPhysAddMuonBasedInvMass::initialize() { + + ATH_MSG_DEBUG("BPhysAddMuonBasedInvMass::initialize() -- begin"); + + // candidate vertices container + if ( m_vertexContainerName == "" ) { + ATH_MSG_ERROR("No vertex container name provided!"); + } + + // TrackToVertexTool + CHECK(m_trackToVertexTool.retrieve()); + + // PV container if needed + if ( m_addMinChi2ToAnyPVMode > 0 && m_pvContainerName == "" ) { + ATH_MSG_ERROR("No primary vertex container name provided!"); + } + + // PV type list if needed + if ( m_addMinChi2ToAnyPVMode > 0 && m_pvTypesToConsider.size() == 0 ) { + ATH_MSG_ERROR("No primary vertex types to be considered provided!"); + } + + // PV-to-SV association type if needed + if ( m_addMinChi2ToAnyPVMode > 1 && m_doVertexType < 1 ) { + ATH_MSG_ERROR("No PV-to-SV association types to be considered provided!"); + } + + ATH_MSG_INFO("BPhysAddMuonBasedInvMass::initialize(): " + << "AdjustToMuonKinematics = " << m_adjustToMuonKinematics); + + ATH_MSG_INFO("BPhysAddMuonBasedInvMass::initialize(): " + << "AddMinChi2ToAnyPVMode = " << m_addMinChi2ToAnyPVMode); + + // initialize PV-to-SV association type vector + initPvAssocTypeVec(); + + ATH_MSG_DEBUG("BPhysAddMuonBasedInvMass::initialize() -- end"); + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode BPhysAddMuonBasedInvMass::finalize() { + + ATH_MSG_DEBUG("BPhysAddMuonBasedInvMass::finalize()"); + + // everything all right + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode BPhysAddMuonBasedInvMass::addBranches() const { + + ATH_MSG_DEBUG("BPhysAddMuonBasedInvMass::addBranches() -- begin"); + + // vertex container and its auxilliary store + xAOD::VertexContainer* vtxContainer = NULL; + xAOD::VertexAuxContainer* vtxAuxContainer = NULL; + + // retrieve from StoreGate + CHECK(evtStore()->retrieve(vtxContainer , m_vertexContainerName)); + CHECK(evtStore()->retrieve(vtxAuxContainer, m_vertexContainerName+"Aux.")); + + const xAOD::VertexContainer* pvContainer = NULL; + if ( m_addMinChi2ToAnyPVMode > 0 ) { + CHECK(evtStore()->retrieve(pvContainer , m_pvContainerName)); + } + + // apply the decorations + std::string branchPrefix(""); + if ( m_branchPrefix != "" && m_branchPrefix != "_NONE_" ) { + branchPrefix = m_branchPrefix + "_"; + } + + // loop over secondary vertices + for (xAOD::VertexContainer::iterator vtxItr = vtxContainer->begin(); + vtxItr!=vtxContainer->end(); ++vtxItr) { + + xAOD::BPhysHelper vtx(*vtxItr); + + SG::AuxElement::Decorator< float > + d_mucalc_mass(branchPrefix+"MUCALC_mass"); + SG::AuxElement::Decorator< float > + d_mucalc_massErr(branchPrefix+"MUCALC_massErr"); + + // TODO: check number of muons requested! + std::pair<double,double> MuCalcCandMass = + getMuCalcMass(vtx, m_trkMasses, 2); + + // fill default values + d_mucalc_mass(**vtxItr) = MuCalcCandMass.first; + d_mucalc_massErr(**vtxItr) = MuCalcCandMass.second; + + // add MinChi2ToAnyPV information if requested + if ( m_addMinChi2ToAnyPVMode > 0 ) { + + if (m_addMinChi2ToAnyPVMode == 1) { + // w.r.t. to all PVs + SG::AuxElement::Decorator< float > + d_minChi2ToAnyPV(branchPrefix+"minLogChi2ToAnyPV"); + // fill it + d_minChi2ToAnyPV(**vtxItr) = + getMinChi2ToAnyPV(vtx, pvContainer, m_pvTypesToConsider, + m_minNTracksInPV, m_addMinChi2ToAnyPVMode, + xAOD::BPhysHelper::PV_MIN_A0); // dummy + } else if (m_addMinChi2ToAnyPVMode > 1 && m_addMinChi2ToAnyPVMode < 4) { + // skip or replace associated PVs + for (auto pvAssocType : m_pvAssocTypes) { + SG::AuxElement::Decorator< float > + d_minChi2ToAnyPV(branchPrefix+"minLogChi2ToAnyPV_" + +xAOD::BPhysHelper::pv_type_str[pvAssocType]); + // fill it + d_minChi2ToAnyPV(**vtxItr) = + getMinChi2ToAnyPV(vtx, pvContainer, m_pvTypesToConsider, + m_minNTracksInPV, m_addMinChi2ToAnyPVMode, + pvAssocType); + + } // for pvAssocType + } else { + ATH_MSG_WARNING("BPhysAddMuonBasedInvMass::addBranches():" + << " Undefined AddMinChi2ToAnyPVMode value: " + << m_addMinChi2ToAnyPVMode); + } + } // if m_addMinChi2ToAnyPVMode + } // end of loop over vertices + + // clean cache + clearAdjTpCache(); + + ATH_MSG_DEBUG("BPhysAddMuonBasedInvMass::addBranches() -- end"); + + // nothing to do here + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Calculate invariant mass based on muon system information if available. + // + std::pair<double, double> + BPhysAddMuonBasedInvMass::getMuCalcMass(xAOD::BPhysHelper& vtx, + std::vector<double> trkMasses, + int nMuRequested) const { + + std::pair<double, double> mpe(0., -1.); + + std::pair<TrackBag, int> tracksWithMu = getTracksWithMuons(vtx); + + if ( tracksWithMu.second == nMuRequested ) { + if ( tracksWithMu.first.size() == trkMasses.size() ) { + mpe = getInvariantMassWithError(tracksWithMu.first, + trkMasses, + vtx.vtx()->position()); + } else { + ATH_MSG_WARNING("BPhysAddMuonBasedInvMass::getMuCalcMass:" + << " vector sizes disagree!" + << " tracksWithMu: " << tracksWithMu.first.size() + << " BtrkMasses: " << trkMasses.size()); + } + } else { + mpe.second = -10 - tracksWithMu.second; + ATH_MSG_DEBUG("BPhysAddMuonBasedInvMass::getMuCalcMass:" + << " muon number mismatch:" + << " tracksWithMu: " << tracksWithMu.second + << " requested: " << nMuRequested); + } + return mpe; + } + //-------------------------------------------------------------------------- + // Obtain a set of ID tracks for a set of muons + //-------------------------------------------------------------------------- + TrackBag BPhysAddMuonBasedInvMass::getIdTracksForMuons(MuonBag& muons) const { + + TrackBag muTracks; + + for (auto &muon : muons) { + if ( muon != nullptr ) { + const xAOD::TrackParticle* trk = + muon->trackParticle(xAOD::Muon::InnerDetectorTrackParticle); + if ( trk != nullptr ) { + muTracks.push_back(trk); + } else { + ATH_MSG_WARNING("BPhysAddMuonBasedInvMass::getIdTracksForMuon:" + << " no ID track for muon found."); + } + } else { + ATH_MSG_WARNING("BPhysAddMuonBasedInvMass::getIdTracksForMuon:" + << " muon pointer is NULL!"); + } + } // for muon + return muTracks; + } + //-------------------------------------------------------------------------- + // Obtain a set of tracks with muon track information if available + //-------------------------------------------------------------------------- + std::pair<TrackBag, int> + BPhysAddMuonBasedInvMass::getTracksWithMuons(xAOD::BPhysHelper& vtx) const { + + TrackBag tracksWithMu; + int nMuFound = 0; + std::vector<int> vnMuFound; + + MuonBag muons = findAllMuonsInDecay(vtx); + + if ( muons.size() > 0 ) { + for (int itrk=0; itrk<vtx.nRefTrks(); ++itrk) { + // only charged tracks are of interest + if ( abs(vtx.refTrkCharge(itrk)) > 0. ) { + const xAOD::TrackParticle* trkParticle = + (xAOD::TrackParticle*)vtx.refTrkOrigin(itrk); + for (unsigned int imu = 0; imu<muons.size(); ++imu) { + if ( vtx.refTrkOrigin(itrk) == + muons.at(imu)->trackParticle(xAOD::Muon::InnerDetectorTrackParticle) ) { + const xAOD::TrackParticle* trkMuon = + adjustTrackParticle(muons.at(imu)); + if ( trkMuon != NULL ) { + trkParticle = trkMuon; + nMuFound++; + break; + } + } + } // for imu + tracksWithMu.push_back(trkParticle); + vnMuFound.push_back(nMuFound); + } // for charged track + } // for itrk + } else { + ATH_MSG_DEBUG("BPhysAddMuonBasedInvMass::getTracksWithMuons: " + "vertex contains no muons, but " + << vtx.nRefTrks() << " refitted tracks ..."); + } + // debug output + std::string svnMuFound = "["; + for (unsigned int i=0; i<vnMuFound.size(); ++i) { + svnMuFound += std::to_string(vnMuFound[i]) + ','; + } + svnMuFound.back() = ']'; + ATH_MSG_DEBUG("BPhysAddMuonBasedInvMass::getTracksWithMuons: " + "nMuFound = " << nMuFound + << "\nvnMuFound = " << svnMuFound ); + + return std::pair<TrackBag, int>(tracksWithMu, nMuFound); + } + //-------------------------------------------------------------------------- + // adjustTrackParticle: extract primary track particle from muon + // if configured adjust pt, eta and phi of it before returning + // a pointer to it. + //-------------------------------------------------------------------------- + const xAOD::TrackParticle* BPhysAddMuonBasedInvMass + ::adjustTrackParticle(const xAOD::Muon* muon) const { + + const xAOD::TrackParticle* tp = NULL; + const xAOD::TrackParticle* org = muon->primaryTrackParticle(); + + if ( m_adjustToMuonKinematics ) { + if ( org != NULL ) { + TpMap_t::iterator it = m_adjTpCache.find(org); + if ( it != m_adjTpCache.end() ) { + // copy cached link + tp = it->second; + ATH_MSG_DEBUG("adjustTrackParticle(): from cache: tp = " << tp); + } else { + // copy object -- this does not work because of seg fault later + // xAOD::TrackParticle* newTp = new xAOD::TrackParticle(*org); + + // create new object and copy properties + xAOD::TrackParticle* newTp = new xAOD::TrackParticle(); + newTp->makePrivateStore(*org); + + // ajdust pt, eta and phi to the muon's properties + xAOD::IParticle::FourMom_t p4 = muon->p4(); + float qoverp = p4.P() > 0. ? 1./p4.P() : 10.e6; + if ( org->qOverP() < 0. ) qoverp *= -1.; + newTp->setDefiningParameters(org->d0(), org->z0(), + p4.Phi(), p4.Theta(), qoverp); + // cache new TrackParticle + m_adjTpCache[org] = newTp; + tp = newTp; + ATH_MSG_DEBUG("adjustTrackParticle(): new tp = " << tp + << " org = " << org); + } // if it != end() + } // if != NULL + } else { + // copy pointer + tp = org; + ATH_MSG_DEBUG("adjustTrackParticle(): copy: org: " << org + << " -> tp: " << tp); + } + + // debug output + if ( org != NULL ) { + ATH_MSG_DEBUG("adjustTrackParticle(): org: " << org << " (" + << org->d0() << "," << org->z0() << "," << org->phi0() + << "," << org->theta() << "," << org->qOverP() << ") pt: " + << org->pt()); + } else { + ATH_MSG_DEBUG("adjustTrackParticle(): org = NULL"); + } + if ( org != NULL ) { + ATH_MSG_DEBUG("adjustTrackParticle(): tp : " << tp << " (" + << tp->d0() << "," << tp->z0() << "," << tp->phi0() + << "," << tp->theta() << "," << tp->qOverP() << ") pt: " + << tp->pt()); + } else { + ATH_MSG_DEBUG("adjustTrackParticle(): tp = NULL"); + } + return tp; + } + //-------------------------------------------------------------------------- + // clearAdjTpCache: clear the cache of adjusted TrackParticles + //-------------------------------------------------------------------------- + void BPhysAddMuonBasedInvMass::clearAdjTpCache() const { + + for ( TpMap_t::iterator it = m_adjTpCache.begin(); it != m_adjTpCache.end(); + ++it) { + if ( it->second != NULL ) { + const_cast<xAOD::TrackParticle*>(it->second)->releasePrivateStore(); + delete(it->second); + it->second = NULL; + } + m_adjTpCache.clear(); + } + } + //-------------------------------------------------------------------------- + // findAllMuonsInDecay: returns a vector of xAOD::Muon objects found + // in this vertex and subsequent decay vertices. + // Recursively calls itself if necessary. + //-------------------------------------------------------------------------- + MuonBag BPhysAddMuonBasedInvMass::findAllMuonsInDecay(xAOD::BPhysHelper& vtx) + const { + + MuonBag muons = vtx.muons(); + + // loop over preceeding vertices + for (int ivtx = 0; ivtx < vtx.nPrecedingVertices(); ++ivtx) { + xAOD::BPhysHelper precVtx(vtx.precedingVertex(ivtx)); + MuonBag muonsForVtx = findAllMuonsInDecay(precVtx); + muons.insert(muons.end(), muonsForVtx.begin(), muonsForVtx.end()); + } + return muons; + } + //-------------------------------------------------------------------------- + // getMinChi2ToAnyPV: + // Find minimum chi2 distance of signal muons w.r.t any primary vertex + // of required types and with a minimum number of tracks cut. + // It also depends on the mode w.r.t. the treatment of the associated + // primary vertex and the type of PV-to-SV association. + // Returns this minimum chi2. + //-------------------------------------------------------------------------- + double + BPhysAddMuonBasedInvMass::getMinChi2ToAnyPV(xAOD::BPhysHelper& vtx, + const xAOD::VertexContainer* + pvContainer, + const std::vector<int>& pvtypes, + const int minNTracksInPV, + const int mode, + const xAOD::BPhysHelper::pv_type& + pvAssocType) const { + + MuonBag muons = findAllMuonsInDecay(vtx); + TrackBag tracks = getIdTracksForMuons(muons); + const xAOD::Vertex* origPV = nullptr; + const xAOD::Vertex* refPV = nullptr; + + if ( mode > 1 ) { + // need to obtain original PV + origPV = vtx.origPv(pvAssocType); + if ( origPV == nullptr ) { + ATH_MSG_WARNING("BPhysAddMuonBasedInvMass::getMinChi2ToAnyPV:" + << " origPV == NULL for pvAssocType = " + << pvAssocType); + } + if ( mode > 2 ) { + refPV = vtx.pv(pvAssocType); + if ( refPV == nullptr ) { + ATH_MSG_WARNING("BPhysAddMuonBasedInvMass::getMinChi2ToAnyPV:" + << " refPV == NULL for pvAssocType = " + << pvAssocType); + } + } + } + + double minChi2 = std::numeric_limits<double>::max(); + + for (const auto pvtx : *pvContainer) { + if ( pvtx != nullptr ) { + if ( std::find(pvtypes.begin(),pvtypes.end(),pvtx->vertexType()) + != pvtypes.end() ) { + const xAOD::Vertex* cvtx = pvtx; + // switch if PV matches original PV and replacement is requested + if ( mode > 1 && pvtx == origPV ) { + // mode 2 -- skip + switch(mode) { + case 2: // skip current PV + continue; + break; + case 3: // replace by refitted PV + if ( refPV != nullptr ) { + cvtx = refPV; + } else { + ATH_MSG_WARNING("BPhysAddMuonBasedInvMass::getMinChi2ToAnyPV:" + << " refPV == NULL!"); + continue; + } + break; + } + } + if ( (int)cvtx->nTrackParticles() >= minNTracksInPV ) { + for (auto &track : tracks) { + const Amg::Vector3D pos = cvtx->position(); + minChi2 = std::min(minChi2, getTrackPVChi2(*track, pos)); + } // for track + } // if minNTracksInPV + } // if pvTypes in pvtypes vector + } else { + ATH_MSG_WARNING("BPhysAddMuonBasedInvMass::getMinChi2ToAnyPV:" + << " pvtx == NULL!"); + } // if vtx != nullptr + } // for pvtx + + return minChi2; + } + //-------------------------------------------------------------------------- + // getTrackPVChi2: + // Calculate the chi2 ( = log((d0/d0e)^2+(z0/z0e)^2) contribution of + // a track at the position closest to the given PV. + //-------------------------------------------------------------------------- + double + BPhysAddMuonBasedInvMass::getTrackPVChi2(const xAOD::TrackParticle& track, + const Amg::Vector3D& pos) const { + + double chi2 = -100.; + + const Trk::Perigee* trkPerigee = + m_trackToVertexTool->perigeeAtVertex(track, pos); + if ( trkPerigee != NULL ) { + const AmgSymMatrix(5)* locError = trkPerigee->covariance(); + if ( locError != NULL ) { + double d0 = trkPerigee->parameters()[Trk::d0]; + double z0 = trkPerigee->parameters()[Trk::z0]; + double d0Err = Amg::error(*locError, Trk::d0); + double z0Err = Amg::error(*locError, Trk::z0); + if (fabs(d0Err) > 0. && fabs(z0Err) > 0.) { + chi2 = log( pow(d0/d0Err,2.0) + pow(z0/z0Err,2.0) ); + } else { + ATH_MSG_WARNING("BPhysAddMuonBasedInvMass::getTrackPVChi2():" + << " d0 = " << d0 << ", d0Err = " << d0Err + << ", z0 = " << z0 << ", z0Err = " << z0Err); + } + } // locError != NULL + delete trkPerigee; + trkPerigee = nullptr; + } else { + ATH_MSG_WARNING("getTrackPVChi2: Could not get perigee"); + } + + return chi2; +} + //-------------------------------------------------------------------------- + // getInvariantMassWithError: returns invariant mass and mass error given + // a set of tracks, their mass hypotheses and a reference position. + // Each track must have a separate mass hypothesis in + // the vector, and they must be in the same order as the tracks in the + // track vector. Otherwise it will go horribly wrong. + //-------------------------------------------------------------------------- + std::pair<double,double> BPhysAddMuonBasedInvMass:: + getInvariantMassWithError(TrackBag trksIn, + std::vector<double> massHypotheses, + const Amg::Vector3D& pos) const { + + std::pair<double, double> mass(0.,0.); + + // ensure there is a mass hypothesis for each track + if ( trksIn.size() == massHypotheses.size() ) { + std::vector<const xAOD::TrackParticle*>::iterator trItr = trksIn.begin(); + std::vector<const xAOD::TrackParticle*>::iterator trItrEnd =trksIn.end(); + std::vector<double>::iterator massHypItr = massHypotheses.begin(); + + double pxTmp,pyTmp,pzTmp,massTmp,eTmp; + + std::vector<TLorentzVector> trkMom; + TLorentzVector totMom; + std::vector<const Trk::Perigee*> trkPer; + + for (;trItr != trItrEnd; trItr++,massHypItr++){ + const Trk::Perigee* trkPerigee = + m_trackToVertexTool->perigeeAtVertex(*(*trItr), pos); + trkPer.push_back(trkPerigee); + if ( trkPerigee != NULL ) { + // try to get the correct momentum measurement + pxTmp = trkPerigee->momentum()[Trk::px]; + pyTmp = trkPerigee->momentum()[Trk::py]; + pzTmp = trkPerigee->momentum()[Trk::pz]; + ATH_MSG_DEBUG("getInvariantMassWithError(): pvec = (" + << pxTmp << "," << pyTmp << "," << pzTmp << ")"); + } else { + // otherwise default to this one + pxTmp = ((*trItr)->p4()).Px(); + pyTmp = ((*trItr)->p4()).Py(); + pzTmp = ((*trItr)->p4()).Pz(); + ATH_MSG_WARNING("BPhysAddMuonBasedInvMass::getInvariantMassError: " + "defaulting to simple momentum!"); + } + massTmp = *massHypItr; + eTmp = pxTmp*pxTmp+pyTmp*pyTmp+pzTmp*pzTmp+massTmp*massTmp; + eTmp = eTmp > 0. ? sqrt(eTmp) : 0.; + TLorentzVector tmpMom(pxTmp, pyTmp, pzTmp, eTmp); + trkMom.push_back(tmpMom); + totMom += tmpMom; + } + mass.first = totMom.M(); + double mErr2 = 0.; + // reset trItr + trItr = trksIn.begin(); + std::vector<TLorentzVector>::iterator tmItr = trkMom.begin(); + std::vector<const Trk::Perigee*>::iterator perItr = trkPer.begin(); + AmgVector(3) dMdP; + dMdP.setZero(); + for (; tmItr != trkMom.end(); ++tmItr, ++trItr, ++perItr) { + dMdP(0) = (totMom.E() * tmItr->Px()/tmItr->E() - totMom.Px())/totMom.M(); + dMdP(1) = (totMom.E() * tmItr->Py()/tmItr->E() - totMom.Py())/totMom.M(); + dMdP(2) = (totMom.E() * tmItr->Pz()/tmItr->E() - totMom.Pz())/totMom.M(); + if ( *perItr != NULL ) { + mErr2 += (dMdP.transpose() * getMomentumCov(*perItr) * dMdP)(0,0); + } else { + mErr2 += (dMdP.transpose() * getMomentumCov(*trItr ) * dMdP)(0,0); + } + } + mass.second = mErr2 > 0. ? sqrt(mErr2) : 0.; + // clean up + for ( perItr = trkPer.begin(); perItr != trkPer.end(); ++perItr) { + delete (*perItr); + } + } else { + ATH_MSG_WARNING("BPhysAddMuonBasedInvMass::getInvariantMassError: " + "size mismatch of tracks and mass hypotheses vectors!"); + } // if size comparison + + return mass; + } + //-------------------------------------------------------------------------- + // + // Extract the 3x3 momentum covariance matrix in (x,y,z) notation + // from the (phi, theta, qoverp) notation from a TrackParticle. + // + //-------------------------------------------------------------------------- + AmgSymMatrix(3) BPhysAddMuonBasedInvMass + ::getMomentumCov(const xAOD::TrackParticle* track) const { + + AmgSymMatrix(3) cov; + cov.setZero(); + + if ( track != NULL ) { + cov = getMomentumCov( &track->perigeeParameters() ); + } + return cov; + } + //-------------------------------------------------------------------------- + // + // Extract the 3x3 momentum covariance matrix in (x,y,z) notation + // from the (phi, theta, qoverp) notation from a Perigee. + // + //-------------------------------------------------------------------------- + AmgSymMatrix(3) BPhysAddMuonBasedInvMass + ::getMomentumCov(const Trk::Perigee* perigee) const { + + AmgSymMatrix(3) cov; + cov.setZero(); + + if ( perigee != NULL ) { + cov = getMomentumCov(perigee->parameters(), *perigee->covariance()); + } + return cov; + } + //-------------------------------------------------------------------------- + // Extract the 3x3 momentum covariance matrix in (x,y,z) notation + // from the (phi, theta, qoverp) notation from a vector of + // track parameters and the error matrix + // + // Coding ideas orignally taken from + // V0Tools::massErrorVKalVrt(...), + // Code converted from BPhysToolBox::getMomentumCov(...). + //-------------------------------------------------------------------------- + // + AmgSymMatrix(3) BPhysAddMuonBasedInvMass + ::getMomentumCov(const AmgVector(5)& pars, + const AmgSymMatrix(5)& cMatrix) const { + + AmgSymMatrix(3) cov; + cov.setZero(); + + AmgMatrix(3,3) der; + der.setZero(); + + double phi = pars[Trk::phi]; + double theta = pars[Trk::theta]; + double qoverp = pars[Trk::qOverP]; + + if ( qoverp != 0. ) { + AmgVector(3) p( cos(phi)*sin(theta)/fabs(qoverp), + sin(phi)*sin(theta)/fabs(qoverp), + cos(theta)/fabs(qoverp) ); + + // d(px,py,pz)/d(phi,theta,qoverp) + der(0,0) = - p.y(); + der(1,0) = p.x(); + der(2,0) = 0.; + der(0,1) = cos(phi) * p.z(); + der(1,1) = sin(phi) * p.z(); + der(2,1) = - sin(theta) / fabs(qoverp); + der(0,2) = - p.x()/qoverp; + der(1,2) = - p.y()/qoverp; + der(2,2) = - p.z()/qoverp; + + for (unsigned int i=0; i<3; i++) { + for (unsigned int j=0; j<3; j++) { + for (unsigned int k=0; k<3; k++) { + for (unsigned int l=0; l<3; l++) { + cov(i,j) += der(i,k)*cMatrix(k+2,l+2)*der(j,l); + } + } + } + } + + // debug output + ATH_MSG_DEBUG("BPhysAddMuonBasedInvMass::getTracksWithMuons:" + << "\nlocalErrCov:\n" + << std::setprecision(10) << cMatrix + << "\ncov:\n" + << std::setprecision(10) << cov + << "\np: " << std::setprecision(10) << p + << "\nder:\n" + << std::setprecision(10) << der); + } // if qoverp + + return cov; + } + //-------------------------------------------------------------------------- + // Initialize PV-to-SV association type vector + //-------------------------------------------------------------------------- + void BPhysAddMuonBasedInvMass::initPvAssocTypeVec() { + + m_pvAssocTypes.clear(); + for (unsigned int i=0; i<xAOD::BPhysHelper::n_pv_types; ++i) { + if ( (m_doVertexType & (1 << i)) > 0 ) + m_pvAssocTypes.push_back((xAOD::BPhysHelper::pv_type)i); + } + } + //-------------------------------------------------------------------------- +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysConversionFinder.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysConversionFinder.cxx new file mode 100644 index 0000000000000000000000000000000000000000..de43d2bf11847dcfcdb451c977a8f8ea5bc1a6ae --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysConversionFinder.cxx @@ -0,0 +1,589 @@ +/* + Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +*/ +///////////////////////////////////////////////////////////////// +// BPhysConversionFinder.cxx, (c) ATLAS Detector software +/////////////////////////////////////////////////////////////////// +// Author: A. Chisholm <andrew.chisholm@cern.ch> +#include "DerivationFrameworkBPhys/BPhysConversionFinder.h" +#include "xAODTracking/VertexContainer.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "GaudiKernel/IPartPropSvc.h" +#include "GeoPrimitives/GeoPrimitivesHelpers.h" +#include "TrkVertexFitterInterfaces/IVertexFitter.h" +#include "TrkVKalVrtFitter/TrkVKalVrtFitter.h" + +namespace DerivationFramework { + + BPhysConversionFinder::BPhysConversionFinder(const std::string& t, + const std::string& n, + const IInterface* p) : + AthAlgTool(t,n,p), + m_v0Tools("Trk::V0Tools"), + m_vertexFitter("Trk::TrkVKalVrtFitter"), + m_vertexEstimator("InDet::VertexPointEstimator"), + m_distanceTool("Trk::SeedNewtonDistanceFinder/InDetConversionTrkDistanceFinder"), + m_postSelector("InDet::ConversionPostSelector"), + m_cascadeFitter("Trk::TrkVKalVrtFitter"), + m_inputTrackParticleContainerName("InDetTrackParticles"), + m_conversionContainerName("BPhysConversionCandidates"), + m_maxDistBetweenTracks(10.0), + m_maxDeltaCotTheta(0.3), + m_requireDeltaM(true), + m_maxDeltaM(3000.0) + { + declareInterface<DerivationFramework::IAugmentationTool>(this); + + // Declare user-defined properties + declareProperty("DiMuonVertexContainer", m_diMuonCollectionToCheck); + declareProperty("PassFlagsToCheck", m_passFlagsToCheck); + declareProperty("V0Tools", m_v0Tools); + declareProperty("VertexFitterTool", m_vertexFitter); + declareProperty("VertexEstimator", m_vertexEstimator); + declareProperty("DistanceTool", m_distanceTool); + declareProperty("ConversionPostSelector", m_postSelector); + declareProperty("CascadeFitter", m_cascadeFitter); + declareProperty("InputTrackParticleContainerName", m_inputTrackParticleContainerName); + declareProperty("ConversionContainerName", m_conversionContainerName); + declareProperty("MaxDistBetweenTracks", m_maxDistBetweenTracks = 10.0); // Maximum allowed distance of minimum approach + declareProperty("MaxDeltaCotTheta", m_maxDeltaCotTheta = 0.3); // Maximum allowed dCotTheta between tracks + declareProperty("RequireDeltaM", m_requireDeltaM = true); // Only save a conversions if it's a chi_c,b candidate (must then pass "MaxDeltaM" requirement), if "False" all conversions in the event will be saved + declareProperty("MaxDeltaM", m_maxDeltaM = 3000.0); // Maximum mass difference between di-muon+conversion and di-muon + + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode BPhysConversionFinder::initialize() + { + + ATH_MSG_DEBUG("in initialize()"); + + ATH_CHECK( m_v0Tools.retrieve() ); + ATH_CHECK( m_vertexFitter.retrieve() ); + ATH_CHECK( m_vertexEstimator.retrieve() ); + ATH_CHECK( m_distanceTool.retrieve() ); + ATH_CHECK( m_postSelector.retrieve() ); + ATH_CHECK( m_cascadeFitter.retrieve() ); + + return StatusCode::SUCCESS; + + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode BPhysConversionFinder::finalize() + { + // everything all right + return StatusCode::SUCCESS; + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode BPhysConversionFinder::addBranches() const + { + + int nTrackPairs_Init = 0; + int nTrackPairs_Selected = 0; + int nConv_VertexFit = 0; + int nConv_Selected = 0; + int nConv_Selected_DeltaM = 0; + + std::vector<const xAOD::Vertex*> oniaVertices; + oniaVertices.clear(); + + //------------------------------------ + // Look for di-muons + //------------------------------------ + const xAOD::VertexContainer* diMuonContainer = NULL; + ATH_CHECK( evtStore()->retrieve(diMuonContainer, m_diMuonCollectionToCheck) ); + + if(diMuonContainer->size() == 0) { + + ATH_MSG_DEBUG("Vertex Container (" << m_diMuonCollectionToCheck << ") is empty"); + + } else { + + ATH_MSG_DEBUG("Vertex Container (" << m_diMuonCollectionToCheck << ") contains " << diMuonContainer->size() << " vertices"); + + for(xAOD::VertexContainer::const_iterator vtxItr = diMuonContainer->begin(); vtxItr != diMuonContainer->end(); ++vtxItr) { + + const xAOD::Vertex* vertex = (*vtxItr); + + bool passedHypothesis = false; + + for(const auto &flag : m_passFlagsToCheck) { + bool pass = vertex->auxdata<Char_t>(flag); + if(pass) passedHypothesis = true; + } + + if(passedHypothesis) { + oniaVertices.push_back(vertex); + } + + } + } + //------------------------------------ + + // Output conversion container + std::unique_ptr<xAOD::VertexContainer> conversionContainer( new xAOD::VertexContainer() ); + std::unique_ptr<xAOD::VertexAuxContainer> conversionAuxContainer( new xAOD::VertexAuxContainer() ); + conversionContainer->setStore(conversionAuxContainer.get()); + + // Only call conversion finder if we've found a di-muon candidate or + // we really want to look for conversions independently + const bool callConvFinder = !m_requireDeltaM || oniaVertices.size() > 0; + + if(callConvFinder) { + + // Retrieve track particles from StoreGate + const xAOD::TrackParticleContainer* inputTrackParticles = NULL; + ATH_CHECK( evtStore()->retrieve(inputTrackParticles,m_inputTrackParticleContainerName) ); + + ATH_MSG_DEBUG("Track particle container size " << inputTrackParticles->size()); + + // Track Selection + std::vector<const xAOD::TrackParticle*> posTracks; posTracks.clear(); + std::vector<const xAOD::TrackParticle*> negTracks; negTracks.clear(); + + // Track Loop + for(xAOD::TrackParticleContainer::const_iterator trkItr = inputTrackParticles->begin(); trkItr != inputTrackParticles->end(); ++trkItr) { + + const xAOD::TrackParticle* track = (*trkItr); + + uint8_t nSCT(0); + uint8_t nPIX(0); + + track->summaryValue(nPIX,xAOD::numberOfPixelHits); + track->summaryValue(nSCT,xAOD::numberOfSCTHits); + + // Don't want TRT-only tracks + // Require Si hits on all tracks + if( nSCT + nPIX < 1 ) continue; + + if( track->charge() > 0.0) { + posTracks.push_back(track); + } else { + negTracks.push_back(track); + } + + } // Track Loop + + ATH_MSG_DEBUG(posTracks.size() + negTracks.size() << " tracks pass pre-selection"); + + std::vector<const xAOD::TrackParticle*>::const_iterator tpIt1; + std::vector<const xAOD::TrackParticle*>::const_iterator tpIt2; + + // Pos Track Loop + for(tpIt1 = posTracks.begin(); tpIt1 != posTracks.end(); ++tpIt1) { + + const xAOD::TrackParticle* trackParticle1 = (*tpIt1); + + const Trk::Perigee& trackPerigee1 = trackParticle1->perigeeParameters(); + + // Neg Track Loop + for(tpIt2 = negTracks.begin(); tpIt2 != negTracks.end(); ++tpIt2) { + + if (*tpIt1 == *tpIt2) continue; + + const xAOD::TrackParticle* trackParticle2 = (*tpIt2); + + const Trk::Perigee& trackPerigee2 = trackParticle2->perigeeParameters(); + + nTrackPairs_Init++; + + //------------------------------------ + // Track pair selection + //------------------------------------ + const double deltaCotTheta = fabs(1./tan(trackPerigee1.parameters()[Trk::theta]) - 1./tan(trackPerigee2.parameters()[Trk::theta])); + if(deltaCotTheta > m_maxDeltaCotTheta) continue; + + double distance = 1000000.; + std::optional<std::pair<Amg::Vector3D,Amg::Vector3D>> result = m_distanceTool->CalculateMinimumDistance(trackParticle1->perigeeParameters(),trackParticle2->perigeeParameters() ); + bool gotDistance = result.has_value(); + if(gotDistance) distance = Amg::distance (result->first, result->second); + if(!gotDistance || (distance > m_maxDistBetweenTracks)) continue; + //------------------------------------ + + //------------------------------------ + // Estimate starting point + cuts on compatiblity of tracks + //------------------------------------ + int sflag = 0; + int errorcode = 0; + std::map<std::string, float> vertexOutput; + Amg::Vector3D startingPoint = m_vertexEstimator->getCirclesIntersectionPoint(&trackPerigee1,&trackPerigee2,sflag,errorcode, vertexOutput); + if(errorcode != 0) continue; + //------------------------------------ + + nTrackPairs_Selected++; + + std::vector<const xAOD::TrackParticle*> trackPair; + trackPair.clear(); + trackPair.push_back(trackParticle1); + trackPair.push_back(trackParticle2); + + // Do the vertex fit + std::unique_ptr<xAOD::Vertex> convVertexCandidate( m_vertexFitter->fit(trackPair, startingPoint) ); + + // Check for successful fit + if(convVertexCandidate != NULL) { + + ATH_MSG_DEBUG("Vertex Fit Succeeded"); + + convVertexCandidate->clearTracks(); + ElementLink<xAOD::TrackParticleContainer> newLink1; + newLink1.setElement(*tpIt1); + newLink1.setStorableObject(*inputTrackParticles); + ElementLink<xAOD::TrackParticleContainer> newLink2; + newLink2.setElement(*tpIt2); + newLink2.setStorableObject(*inputTrackParticles); + convVertexCandidate->addTrackAtVertex(newLink1); + convVertexCandidate->addTrackAtVertex(newLink2); + + nConv_VertexFit++; + + //------------------------------------ + // Post-vertexing cuts + //------------------------------------ + + // This is empty and only present for compatiblity. + // The cut this informtion pertains to is not used for Si-Si conversions so this is OK + std::vector<Amg::Vector3D> positionList; + + // Apply Si-Si converion post-selection + if( !m_postSelector->selectConversionCandidate(convVertexCandidate.get(),0,positionList) ) { + convVertexCandidate.reset(); + continue; + } + //------------------------------------ + + nConv_Selected++; + + // Get photon momentum 3-vector + const xAOD::Vertex * constConvVertex = convVertexCandidate.get(); + Amg::Vector3D momentum = m_v0Tools->V0Momentum(constConvVertex); + + TLorentzVector photon; + photon.SetXYZM(momentum.x(),momentum.y(),momentum.z(),0.0); + + //------------------------------------ + // Check if conversion is consistent with a chi_c,b candidate + // by requiring a small mass difference w.r.t. any di-muon in event + //------------------------------------ + bool passDeltaM = false; + + // Use to keep track of which dimuon(s) gave a chi_c/b candidate + std::vector<const xAOD::Vertex*> candidateOniaVertices; + candidateOniaVertices.clear(); + + for ( std::vector<const xAOD::Vertex*>::const_iterator vtxItr = oniaVertices.begin(); vtxItr != oniaVertices.end(); ++vtxItr ) { + + const xAOD::Vertex* oniaVertex = (*vtxItr); + + std::vector<float> diMuon_Px = oniaVertex->auxdata< std::vector<float> >("RefTrackPx"); + std::vector<float> diMuon_Py = oniaVertex->auxdata< std::vector<float> >("RefTrackPy"); + std::vector<float> diMuon_Pz = oniaVertex->auxdata< std::vector<float> >("RefTrackPz"); + + TLorentzVector muon1, muon2; + muon1.SetXYZM(diMuon_Px.at(0),diMuon_Py.at(0),diMuon_Pz.at(0),105.658); + muon2.SetXYZM(diMuon_Px.at(1),diMuon_Py.at(1),diMuon_Pz.at(1),105.658); + + TLorentzVector diMuon = muon1 + muon2; + + const double deltaM = (diMuon+photon).M() - diMuon.M(); + + ATH_MSG_DEBUG("Candidate DeltaM = " << deltaM << " MeV DiMuon " << oniaVertex->index() << " ( Mass = " << diMuon.M() << " MeV )"); + + // Did we find a one di-muon + photon candidate with a mass diff. consistent with chi_c/b? + if(deltaM < m_maxDeltaM) { + passDeltaM = true; + candidateOniaVertices.push_back(oniaVertex); + } + + } + + // Only keep the conversion candidate if it's consistent with a chi_c,b decay + if(m_requireDeltaM && !passDeltaM) { + convVertexCandidate.reset(); + continue; + } + //------------------------------------ + + //------------------------------------ + // Final conversion candidates + //------------------------------------ + nConv_Selected_DeltaM++; + + // Keep track of which dimuon(s) gave a chi_c/b candidate + std::vector< ElementLink<xAOD::VertexContainer> > diMuonLinks; + diMuonLinks.clear(); + + // Output of cascade fits with various di-muon mass hypotheses + std::vector<float> fit_Psi1S_Px, fit_Psi1S_Py, fit_Psi1S_Pz, fit_Psi1S_M, fit_Psi1S_ChiSq; + std::vector<float> fit_Psi2S_Px, fit_Psi2S_Py, fit_Psi2S_Pz, fit_Psi2S_M, fit_Psi2S_ChiSq; + std::vector<float> fit_Upsi1S_Px, fit_Upsi1S_Py, fit_Upsi1S_Pz, fit_Upsi1S_M, fit_Upsi1S_ChiSq; + std::vector<float> fit_Upsi2S_Px, fit_Upsi2S_Py, fit_Upsi2S_Pz, fit_Upsi2S_M, fit_Upsi2S_ChiSq; + std::vector<float> fit_Upsi3S_Px, fit_Upsi3S_Py, fit_Upsi3S_Pz, fit_Upsi3S_M, fit_Upsi3S_ChiSq; + + // Loop over di-muon vertices associated with a candidate + for(std::vector<const xAOD::Vertex*>::const_iterator vtxItr = candidateOniaVertices.begin(); vtxItr != candidateOniaVertices.end(); ++vtxItr ) { + + //------------------------------------ + // Add an element link to each dimuon which formed a + // candidate, leading to the decision to save this conversion + //------------------------------------ + ElementLink<xAOD::VertexContainer> myLink; + myLink.setElement(*vtxItr); + myLink.setStorableObject(*diMuonContainer); + + if(!myLink.isValid()) { + ATH_MSG_WARNING("Invalid DiMuon ElementLink!"); + } + + diMuonLinks.push_back(myLink); + //------------------------------------ + + // Check which mass window this di-muon passed + bool passed_Psi = (*vtxItr)->auxdata<Char_t>("passed_Psi"); + bool passed_Upsi = (*vtxItr)->auxdata<Char_t>("passed_Upsi"); + + //------------------------------------ + // Cascade fit with J/psi mass hypothesis + //------------------------------------ + float fitChiSq_Psi1S = 99999; + TLorentzVector fitResult_Psi1S; + + // Only bother with the fit if di-muon mass is within the relveant range, + // but still fill an dummy 4-vector to preserve one to one correspondance with "DiMuonLinks" + if(passed_Psi) { + ATH_CHECK( doCascadeFit(*vtxItr,constConvVertex,3096.916,fitResult_Psi1S,fitChiSq_Psi1S) ); + } + + fit_Psi1S_Px.push_back(fitResult_Psi1S.Px()); + fit_Psi1S_Py.push_back(fitResult_Psi1S.Py()); + fit_Psi1S_Pz.push_back(fitResult_Psi1S.Pz()); + fit_Psi1S_M.push_back(fitResult_Psi1S.M()); + fit_Psi1S_ChiSq.push_back(fitChiSq_Psi1S); + + //------------------------------------ + // Cascade fit with psi(2S) mass hypothesis + //------------------------------------ + float fitChiSq_Psi2S = 99999; + TLorentzVector fitResult_Psi2S; + + // Only bother with the fit if di-muon mass is within the relveant range, + // but still fill an dummy 4-vector to preserve one to one correspondance with "DiMuonLinks" + if(passed_Psi) { + ATH_CHECK( doCascadeFit(*vtxItr,constConvVertex,3686.097,fitResult_Psi2S,fitChiSq_Psi2S) ); + } + + fit_Psi2S_Px.push_back(fitResult_Psi2S.Px()); + fit_Psi2S_Py.push_back(fitResult_Psi2S.Py()); + fit_Psi2S_Pz.push_back(fitResult_Psi2S.Pz()); + fit_Psi2S_M.push_back(fitResult_Psi2S.M()); + fit_Psi2S_ChiSq.push_back(fitChiSq_Psi2S); + + //------------------------------------ + // Cascade fit with Upsi(1S) mass hypothesis + //------------------------------------ + float fitChiSq_Upsi1S = 99999; + TLorentzVector fitResult_Upsi1S; + + // Only bother with the fit if di-muon mass is within the relveant range, + // but still fill an dummy 4-vector to preserve one to one correspondance with "DiMuonLinks" + if(passed_Upsi) { + ATH_CHECK( doCascadeFit(*vtxItr,constConvVertex,9460.30,fitResult_Upsi1S,fitChiSq_Upsi1S) ); + } + + fit_Upsi1S_Px.push_back(fitResult_Upsi1S.Px()); + fit_Upsi1S_Py.push_back(fitResult_Upsi1S.Py()); + fit_Upsi1S_Pz.push_back(fitResult_Upsi1S.Pz()); + fit_Upsi1S_M.push_back(fitResult_Upsi1S.M()); + fit_Upsi1S_ChiSq.push_back(fitChiSq_Upsi1S); + + //------------------------------------ + // Cascade fit with Upsi(2S) mass hypothesis + //------------------------------------ + float fitChiSq_Upsi2S = 99999; + TLorentzVector fitResult_Upsi2S; + + // Only bother with the fit if di-muon mass is within the relveant range, + // but still fill an dummy 4-vector to preserve one to one correspondance with "DiMuonLinks" + if(passed_Upsi) { + ATH_CHECK( doCascadeFit(*vtxItr,constConvVertex,10023.26,fitResult_Upsi2S,fitChiSq_Upsi2S) ); + } + + fit_Upsi2S_Px.push_back(fitResult_Upsi2S.Px()); + fit_Upsi2S_Py.push_back(fitResult_Upsi2S.Py()); + fit_Upsi2S_Pz.push_back(fitResult_Upsi2S.Pz()); + fit_Upsi2S_M.push_back(fitResult_Upsi2S.M()); + fit_Upsi2S_ChiSq.push_back(fitChiSq_Upsi2S); + + //------------------------------------ + // Cascade fit with Upsi(3S) mass hypothesis + //------------------------------------ + float fitChiSq_Upsi3S = 99999; + TLorentzVector fitResult_Upsi3S; + + // Only bother with the fit if di-muon mass is within the relveant range, + // but still fill an dummy 4-vector to preserve one to one correspondance with "DiMuonLinks" + if(passed_Upsi) { + ATH_CHECK( doCascadeFit(*vtxItr,constConvVertex,10355.2,fitResult_Upsi3S,fitChiSq_Upsi3S) ); + } + + fit_Upsi3S_Px.push_back(fitResult_Upsi3S.Px()); + fit_Upsi3S_Py.push_back(fitResult_Upsi3S.Py()); + fit_Upsi3S_Pz.push_back(fitResult_Upsi3S.Pz()); + fit_Upsi3S_M.push_back(fitResult_Upsi3S.M()); + fit_Upsi3S_ChiSq.push_back(fitChiSq_Upsi3S); + + } + + //------------------------------------ + // Decorate selected conversions + //------------------------------------ + ATH_MSG_DEBUG("Decorating conversion vertices"); + + convVertexCandidate->auxdata< std::vector< ElementLink<xAOD::VertexContainer> > >("DiMuonLinks") = diMuonLinks; + + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Psi1S_Px") = fit_Psi1S_Px; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Psi1S_Py") = fit_Psi1S_Py; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Psi1S_Pz") = fit_Psi1S_Pz; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Psi1S_M") = fit_Psi1S_M; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Psi1S_ChiSq") = fit_Psi1S_ChiSq; + + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Psi2S_Px") = fit_Psi2S_Px; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Psi2S_Py") = fit_Psi2S_Py; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Psi2S_Pz") = fit_Psi2S_Pz; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Psi2S_M") = fit_Psi2S_M; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Psi2S_ChiSq") = fit_Psi2S_ChiSq; + + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi1S_Px") = fit_Upsi1S_Px; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi1S_Py") = fit_Upsi1S_Py; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi1S_Pz") = fit_Upsi1S_Pz; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi1S_M") = fit_Upsi1S_M; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi1S_ChiSq") = fit_Upsi1S_ChiSq; + + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi2S_Px") = fit_Upsi2S_Px; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi2S_Py") = fit_Upsi2S_Py; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi2S_Pz") = fit_Upsi2S_Pz; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi2S_M") = fit_Upsi2S_M; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi2S_ChiSq") = fit_Upsi2S_ChiSq; + + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi3S_Px") = fit_Upsi3S_Px; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi3S_Py") = fit_Upsi3S_Py; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi3S_Pz") = fit_Upsi3S_Pz; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi3S_M") = fit_Upsi3S_M; + convVertexCandidate->auxdata< std::vector<float> >("CascadeFit_Upsi3S_ChiSq") = fit_Upsi3S_ChiSq; + + convVertexCandidate->auxdata<float>("px") = momentum.x(); + convVertexCandidate->auxdata<float>("py") = momentum.y(); + convVertexCandidate->auxdata<float>("pz") = momentum.z(); + + convVertexCandidate->auxdata<float>("deltaCotThetaTrk") = deltaCotTheta; + convVertexCandidate->auxdata<float>("minimumDistanceTrk") = distance; + + convVertexCandidate->auxdata<float>("deltaPhiTracks") = vertexOutput["deltaPhiTracks"]; + convVertexCandidate->auxdata<float>("DR1R2") = vertexOutput["DR1R2"]; + + convVertexCandidate->auxdata<Char_t>("passed") = true; // Used in event skimming + + conversionContainer->push_back(convVertexCandidate.release()); + + } else { + ATH_MSG_DEBUG("Vertex Fit Failed"); + } + + } // Neg Track Loop + + } // Pos Track Loop + + } // callConvFinder + + // Write the results to StoreGate + CHECK(evtStore()->record(conversionContainer.release(), m_conversionContainerName)); + CHECK(evtStore()->record(conversionAuxContainer.release(), m_conversionContainerName+"Aux.")); + + ATH_MSG_DEBUG("-------------------------"); + ATH_MSG_DEBUG("Number of track pairs: " << nTrackPairs_Init); + ATH_MSG_DEBUG("Number of track pairs selected: " << nTrackPairs_Selected); + ATH_MSG_DEBUG("Number of successful vertex fits: " << nConv_VertexFit); + ATH_MSG_DEBUG("Number of selected vertices: " << nConv_Selected); + ATH_MSG_DEBUG("Number of selected vertices (after DeltaM req.): " << nConv_Selected_DeltaM); + + return StatusCode::SUCCESS; + } + + StatusCode BPhysConversionFinder::doCascadeFit(const xAOD::Vertex * diMuonVertex, const xAOD::Vertex * convVertex, const double diMuonMassConstraint, TLorentzVector & fitMom, float & chiSq) const { + + std::vector<const xAOD::TrackParticle*> diMuonTracks; + diMuonTracks.push_back(diMuonVertex->trackParticle(0)); + diMuonTracks.push_back(diMuonVertex->trackParticle(1)); + + std::vector<double> diMuonTrackMasses; + diMuonTrackMasses.push_back(105.658); + diMuonTrackMasses.push_back(105.658); + + std::vector<const xAOD::TrackParticle*> convTracks; + convTracks.push_back(convVertex->trackParticle(0)); + convTracks.push_back(convVertex->trackParticle(1)); + + std::vector<double> convTrackMasses; + convTrackMasses.push_back(0.511); + convTrackMasses.push_back(0.511); + + // Reset + std::unique_ptr<Trk::IVKalState> state = m_cascadeFitter->makeState(); + + // Set Robustness + m_cascadeFitter->setRobustness(0, *state); + + // Build up the topology + + // Vertex list + std::vector<Trk::VertexID> vrtList; + // V0 vertex + Trk::VertexID vID; + vID = m_cascadeFitter->startVertex(convTracks,convTrackMasses,*state,0.0); // Constrain converision mass to zero + + vrtList.push_back(vID); + + // chi_c/b vertex + Trk::VertexID vID2 = m_cascadeFitter->nextVertex(diMuonTracks,diMuonTrackMasses,vrtList,*state); + + std::vector<Trk::VertexID> cnstV; + cnstV.clear(); + if ( !m_cascadeFitter->addMassConstraint(vID2,diMuonTracks,cnstV,*state,diMuonMassConstraint).isSuccess() ) { + ATH_MSG_WARNING("addMassConstraint failed"); + } + + // Do the fit + std::unique_ptr<Trk::VxCascadeInfo> result(m_cascadeFitter->fitCascade(*state)); + + const std::vector< std::vector<TLorentzVector> > &moms = result->getParticleMoms(); + + // Check for a successful fit + if(result != NULL) { + + if(moms.size() > 2) ATH_MSG_WARNING("DoCascadeFit - More than two output momentum!?"); + + TLorentzVector conv_Fit = moms.at(0).at(0) + moms.at(0).at(1); + TLorentzVector diMuon_Fit = moms.at(1).at(0) + moms.at(1).at(1); + + // Momentum of DiMuon + photon system + fitMom = diMuon_Fit + conv_Fit; + + chiSq = result->fitChi2()/result->nDoF(); + + // Done with the fit result + result.reset(); + + } + + return StatusCode::SUCCESS; + + } + + +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysMetadataBase.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysMetadataBase.cxx new file mode 100644 index 0000000000000000000000000000000000000000..cfabb3f235681ce21ee2b1076458c6ed83aeec18 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysMetadataBase.cxx @@ -0,0 +1,270 @@ +/* + Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// BPhysMetadataBase.cxx +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// - w.w., 2017-01-22: Added use of BPhysMetaDataTool. +// - w.w., 2017-05-27: Removed use of BPhysMetaDataTool and +// IOVDbMetaDataTool. +// - w.w., 2019-12-05: Added long and vector<long> types +// +// Store JO metadata in the output file. +// +// It uses a FileMetaData object to store job option information +// as metadata in a specific branch whose name needs to prefixed by +// the derivation format name. +// +// This is a base class. Inherit from it to add the job options you want +// to store. For a usage example, see +// Bmumu_metadata.h / Bmumu_metadata.cxx +// and +// BPHY8.py . +// +// Job options provided by the base class: +// - DerivationName -- assign the name of the derivation format +// - MetadataFolderName -- assign the name of the metadata folder, +// should start with the derivation format name, +// defaults to DerivationName if not set. +// +//============================================================================ +// + +#include "DerivationFrameworkBPhys/BPhysMetadataBase.h" +#include "xAODMetaData/FileMetaData.h" +#include "xAODMetaData/FileMetaDataAuxInfo.h" + +namespace DerivationFramework { + + //-------------------------------------------------------------------------- + BPhysMetadataBase::BPhysMetadataBase(const std::string& t, + const std::string& n, + const IInterface* p) + : AthAlgTool(t,n,p), + m_outputMetaStore("StoreGateSvc/MetaDataStore", n) { + + declareInterface<DerivationFramework::IAugmentationTool>(this); + + // Declare derivation format name + declareProperty("DerivationName", m_derivationName = "_NOSUCHFORMAT_"); + + // Declare metadata folder name (should start with derivation name) + declareProperty("MetadataFolderName", m_mdFolderName = "_NONE_"); + + // Prefix would typically be the derivation format name + declareProperty("Prefix", m_prefix = ""); + + } + //-------------------------------------------------------------------------- + StatusCode BPhysMetadataBase::initialize() { + + ATH_MSG_DEBUG("BPhysMetaDataBase::initialize() -- begin"); + + // handle general prefix + if ( m_prefix == "" ) { + if ( m_derivationName == "_NOSUCHFORMAT_" ) { + m_prefix = name() +"_"; + } else { + m_prefix = m_derivationName + "_"; + } + } + + CHECK( saveMetaDataBPhys() ); + + ATH_MSG_DEBUG("BPhysMetaDataBase::initialize() -- end"); + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode BPhysMetadataBase::finalize() { + + ATH_MSG_DEBUG("BPhysMetaDataBase::finalize()"); + + // everything all right + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode BPhysMetadataBase::addBranches() const { + + // nothing to do here + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- +#define SET_VALUES_IMP( TYPE, MAP ) \ + for (auto const &ent : MAP) { \ + fm->auxdata< TYPE >( m_prefix + ent.first ) = ent.second; \ + } + + StatusCode BPhysMetadataBase::saveMetaDataBPhys() const { + + ATH_MSG_DEBUG("BPhysMetaDataBase::saveMetaDataBPhys() -- begin"); + + std::string mdFolderKey = buildFolderName() + "_MetaData"; + // protection + if ( m_outputMetaStore->contains< xAOD::FileMetaData >( mdFolderKey ) ) { + ATH_MSG_WARNING("saveMetaDataBPhys2: " + "xAOD::FileMetaData already in output: " + << mdFolderKey + << " -- BPhys metadata will NOT be saved!"); + } else { + // create a FileMetaData object + auto fm = std::make_unique< xAOD::FileMetaData >(); + auto fmAux = std::make_unique< xAOD::FileMetaDataAuxInfo >(); + fm->setStore( fmAux.get() ); + + // fill it + fm->auxdata< std::string >(m_prefix+"DerivationName" ) = + m_derivationName; + fm->auxdata< std::string >(m_prefix+"MetaDataFolderName") = + m_mdFolderName; + + // fill it with contents of maps + SET_VALUES_IMP( int , m_propInt ); + SET_VALUES_IMP( long , m_propLong ); + SET_VALUES_IMP( double , m_propDouble ); + SET_VALUES_IMP( bool , m_propBool ); + SET_VALUES_IMP( std::string , m_propString ); + SET_VALUES_IMP( std::vector<int> , m_propVInt ); + SET_VALUES_IMP( std::vector<long> , m_propVLong ); + SET_VALUES_IMP( std::vector<double> , m_propVDouble ); + SET_VALUES_IMP( std::vector<bool> , m_propVBool ); + SET_VALUES_IMP( std::vector<std::string>, m_propVString ); + + // record it + ATH_CHECK( m_outputMetaStore->record( std::move(fm), mdFolderKey ) ); + ATH_CHECK( m_outputMetaStore->record( std::move(fmAux), + mdFolderKey+"Aux." ) ); + } + + return StatusCode::SUCCESS; + } +#undef SET_VALUES_IMP + //-------------------------------------------------------------------------- + std::string BPhysMetadataBase::buildFolderName(const std::string& fname) const { + + std::string result = fname; + if ( m_mdFolderName != "_NONE_" && m_mdFolderName != "" ) { + result += m_mdFolderName; + } else { + if ( m_derivationName != "_NOSUCHFORMAT_" && m_derivationName != "" ) { + result += m_derivationName; + } else { + // default to the tool's name + result += name(); + } + } + return result; + } + //-------------------------------------------------------------------------- + void BPhysMetadataBase::recordPropertyI(const std::string& name, int val) { + ATH_MSG_INFO("Calling recordProperty(int)"); + declareProperty(name, m_propInt[name] = val); + } + //-------------------------------------------------------------------------- + void BPhysMetadataBase::recordPropertyL(const std::string& name, long val) { + ATH_MSG_INFO("Calling recordProperty(long)"); + declareProperty(name, m_propLong[name] = val); + } + //-------------------------------------------------------------------------- + void BPhysMetadataBase::recordPropertyD(const std::string& name, double val) { + ATH_MSG_INFO("Calling recordProperty(double)"); + declareProperty(name, m_propDouble[name] = val); + } + //-------------------------------------------------------------------------- + void BPhysMetadataBase::recordPropertyB(const std::string& name, bool val) { + ATH_MSG_INFO("Calling recordProperty(bool)"); + declareProperty(name, m_propBool[name] = val); + } + //-------------------------------------------------------------------------- + void BPhysMetadataBase::recordPropertyS(const std::string& name, const std::string& val) { + ATH_MSG_INFO("Calling recordProperty(string)"); + declareProperty(name, m_propString[name] = val); + } + //-------------------------------------------------------------------------- + void BPhysMetadataBase::recordPropertyVI(const std::string& name, + const std::vector<int>& val) { + ATH_MSG_INFO("Calling recordProperty(vector<int>)"); + declareProperty(name, m_propVInt[name] = val); + } + //-------------------------------------------------------------------------- + void BPhysMetadataBase::recordPropertyVL(const std::string& name, + const std::vector<long>& val) { + ATH_MSG_INFO("Calling recordProperty(vector<long>)"); + declareProperty(name, m_propVLong[name] = val); + } + //-------------------------------------------------------------------------- + void BPhysMetadataBase::recordPropertyVD(const std::string& name, + const std::vector<double>& val) { + ATH_MSG_INFO("Calling recordProperty(vector<double>)"); + declareProperty(name, m_propVDouble[name] = val); + } + //-------------------------------------------------------------------------- + void BPhysMetadataBase::recordPropertyVB(const std::string& name, + const std::vector<bool>& val) { + ATH_MSG_INFO("Calling recordProperty(vector<bool>)"); + declareProperty(name, m_propVBool[name] = val); + } + //-------------------------------------------------------------------------- + void BPhysMetadataBase::recordPropertyVS(const std::string& name, + const std::vector<std::string>& val) { + ATH_MSG_INFO("Calling recordProperty(vector<string>)"); + declareProperty(name, m_propVString[name] = val); + } + //-------------------------------------------------------------------------- + std::string BPhysMetadataBase::vecToString(const std::vector<int>& v) const { + std::string str("["); + for (unsigned int i=0; i<v.size(); ++i) { + str += std::to_string(v[i]); + if ( i < v.size()-1 ) str += ","; + } + str += "]"; + return str; + } + //-------------------------------------------------------------------------- + std::string BPhysMetadataBase::vecToString(const std::vector<long>& v) const { + std::string str("["); + for (unsigned int i=0; i<v.size(); ++i) { + str += std::to_string(v[i]); + if ( i < v.size()-1 ) str += ","; + } + str += "]"; + return str; + } + //-------------------------------------------------------------------------- + std::string BPhysMetadataBase::vecToString(const std::vector<double>& v) const { + std::string str("["); + for (unsigned int i=0; i<v.size(); ++i) { + str += std::to_string(v[i]); + if ( i < v.size()-1 ) str += ","; + } + str += "]"; + return str; + } + //-------------------------------------------------------------------------- + std::string BPhysMetadataBase::vecToString(const std::vector<bool>& v) const { + std::string str("["); + for (unsigned int i=0; i<v.size(); ++i) { + str += std::to_string(v[i]); + if ( i < v.size()-1 ) str += ","; + } + str += "]"; + return str; + } + //-------------------------------------------------------------------------- + std::string BPhysMetadataBase::vecToString(const std::vector<std::string>& v) const { + std::string str("["); + for (unsigned int i=0; i<v.size(); ++i) { + str += "'"; + str += v[i]; + str += "'"; + if ( i < v.size()-1 ) str += ","; + } + str += "]"; + return str; + } + //-------------------------------------------------------------------------- +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysPVCascadeTools.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysPVCascadeTools.cxx new file mode 100644 index 0000000000000000000000000000000000000000..09a3822d6d731145838c39f488dfedae463f09a4 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysPVCascadeTools.cxx @@ -0,0 +1,475 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +#include "DerivationFrameworkBPhys/BPhysPVCascadeTools.h" +#include "xAODTracking/VertexContainer.h" +#include "xAODBPhys/BPhysHelper.h" +#include "TVector3.h" +#include "DerivationFrameworkBPhys/BPhysPVTools.h" +#include "TrkVKalVrtFitter/VxCascadeInfo.h" +#include "DerivationFrameworkBPhys/LocalVector.h" +#include "JpsiUpsilonTools/PrimaryVertexRefitter.h" +#include "HepPDT/ParticleDataTable.hh" +#include "BeamSpotConditionsData/BeamSpotData.h" +#include <limits> +#include <iostream> + +DerivationFramework::BPhysPVCascadeTools::BPhysPVCascadeTools(const CascadeTools *cascadeTools) : + m_cascadeTools(cascadeTools), m_beamSpotData(nullptr), m_PV_minNTracks(0), + m_copyAllVertices(true) +{ +} + +DerivationFramework::BPhysPVCascadeTools::BPhysPVCascadeTools(const CascadeTools *cascadeTools, + const InDet::BeamSpotData* beamSpotSvc) : + m_cascadeTools(cascadeTools), m_beamSpotData(beamSpotSvc), m_PV_minNTracks(0), + m_copyAllVertices(true) +{ +} + +void DerivationFramework::BPhysPVCascadeTools::FillBPhysHelper(const std::vector<TLorentzVector> &mom, Amg::MatrixX cov, xAOD::BPhysHelper &vtx, + const xAOD::Vertex* PV, const xAOD::VertexContainer* PvContainer, + xAOD::BPhysHelper::pv_type pvtype, int refitCode) const { + + BPHYS_CHECK( vtx.setPv ( PV, PvContainer, pvtype ) ); + + // cout << "BPhysPVCascadeTools::FillBPhysHelper for pvtype = " << pvtype << endl; + // cout << "lxy " << m_cascadeTools->lxy(mom, vtx.vtx(), PV) << " error " << m_cascadeTools->lxyError(mom, cov, vtx.vtx(), PV) << endl; + + // set variables calculated from PV + BPHYS_CHECK( vtx.setLxy ( m_cascadeTools->lxy (mom, vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setLxyErr ( m_cascadeTools->lxyError (mom, cov, vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setA0 ( m_cascadeTools->a0 (mom, vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setA0Err ( m_cascadeTools->a0Error (mom, cov, vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setA0xy ( m_cascadeTools->a0xy (mom, vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setA0xyErr( m_cascadeTools->a0xyError (mom, cov, vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setZ0 ( m_cascadeTools->a0z (mom, vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setZ0Err ( m_cascadeTools->a0zError (mom, cov, vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setRefitPVStatus ( refitCode, pvtype ) ); + +} + +void DerivationFramework::BPhysPVCascadeTools::ProcessVertex(const std::vector<TLorentzVector> &mom, Amg::MatrixX cov, xAOD::BPhysHypoHelper &vtx, + xAOD::BPhysHelper::pv_type pvtype, double mass) const { + + const xAOD::Vertex* pv = vtx.pv(pvtype); + if (pv) { + // decorate the vertex. + vtx.setTau( m_cascadeTools->tau(mom, vtx.vtx(), pv), pvtype, xAOD::BPhysHypoHelper::TAU_INV_MASS ); + vtx.setTauErr( m_cascadeTools->tauError(mom, cov, vtx.vtx(), pv), pvtype, xAOD::BPhysHypoHelper::TAU_INV_MASS ); + // Proper decay time assuming constant mass hypothesis + vtx.setTau( m_cascadeTools->tau(mom, vtx.vtx(), pv, mass), pvtype, xAOD::BPhysHypoHelper::TAU_CONST_MASS ); + vtx.setTauErr( m_cascadeTools->tauError(mom, cov, vtx.vtx(), pv, mass), pvtype, xAOD::BPhysHypoHelper::TAU_CONST_MASS ); + //enum pv_type {PV_MAX_SUM_PT2, PV_MIN_A0, PV_MIN_Z0, PV_MIN_Z0_BA}; + } else { + const float errConst = -9999999.; + BPHYS_CHECK( vtx.setTau( errConst, pvtype, xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + BPHYS_CHECK( vtx.setTauErr( errConst, pvtype, xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + BPHYS_CHECK( vtx.setTau(errConst, pvtype, xAOD::BPhysHypoHelper::TAU_CONST_MASS) ); + BPHYS_CHECK( vtx.setTauErr( errConst, pvtype, xAOD::BPhysHypoHelper::TAU_CONST_MASS) ); + } + +} + +void DerivationFramework::BPhysPVCascadeTools::FillBPhysHelperNULL(xAOD::BPhysHelper &vtx, + const xAOD::VertexContainer* PvContainer, + xAOD::BPhysHelper::pv_type pvtype) { + DerivationFramework::BPhysPVTools::FillBPhysHelperNULL(vtx, PvContainer, pvtype); +} + +size_t DerivationFramework::BPhysPVCascadeTools::FindLowZIndex(const std::vector<TLorentzVector> &mom, const xAOD::BPhysHelper &Obj, + const std::vector<const xAOD::Vertex*> &PVlist, + const size_t PV_minNTracks) const { + size_t lowZ = 0; + if(PVlist.empty()) { + lowZ=std::numeric_limits<std::size_t>::max(); + return lowZ; + } + size_t size = PVlist.size(); + double lowA0zcalc = fabs(m_cascadeTools->a0z (mom, Obj.vtx(), PVlist[0])); + for(size_t i =1; i<size; i++) { + if ( PVlist[i]->nTrackParticles() >= PV_minNTracks ) { + double a0z = fabs(m_cascadeTools->a0z(mom, Obj.vtx(), PVlist[i])); + if(a0z < lowA0zcalc) { + lowA0zcalc = a0z; + lowZ =i; + } + } + } + return lowZ; +} + +size_t DerivationFramework::BPhysPVCascadeTools::FindHighPtIndex(const std::vector<const xAOD::Vertex*> &PVlist) { + return DerivationFramework::BPhysPVTools::FindHighPtIndex(PVlist); +} + +size_t DerivationFramework::BPhysPVCascadeTools::FindLowA0Index(const std::vector<TLorentzVector> &mom, const xAOD::BPhysHelper &Obj, + const std::vector<const xAOD::Vertex*> &PVlist, + const size_t PV_minNTracks) const { + size_t lowA0 = 0; + if(PVlist.empty()) { + lowA0=std::numeric_limits<std::size_t>::max(); + return lowA0; + } + size_t size = PVlist.size(); + double lowA0calc = m_cascadeTools->a0(mom, Obj.vtx(), PVlist[0]); + for(size_t i =1; i<size; i++) { + if ( PVlist[i]->nTrackParticles() >= PV_minNTracks ) { + double a0 = m_cascadeTools->a0(mom, Obj.vtx(), PVlist[i]); + if(a0 < lowA0calc) { + lowA0calc = a0; + lowA0 =i; + } + } + } + return lowA0; +} + +std::vector<const xAOD::Vertex*> DerivationFramework::BPhysPVCascadeTools::GetGoodPV(const xAOD::VertexContainer* pvContainer) { + typedef xAOD::VxType::VertexType VertexType; + VertexType Pvtx = xAOD::VxType::PriVtx; + VertexType Pileupvtx = xAOD::VxType::PileUp; + std::vector<const xAOD::Vertex*> goodPrimaryVertices; + goodPrimaryVertices.reserve(pvContainer->size()); + + for (auto ptr = pvContainer->begin(); ptr!= pvContainer->end(); ++ptr) { + VertexType thistype = (*ptr)->vertexType(); + if ( thistype == Pileupvtx || thistype == Pvtx ) { + goodPrimaryVertices.push_back(*ptr); + } else { +// cout << "vertex type " << thistype << endl; + } + } + return goodPrimaryVertices; +} +//----------------------------------------------------------------------------- +// +void DerivationFramework::BPhysPVCascadeTools::SetMinNTracksInPV(size_t PV_minNTracks) +{ + + m_PV_minNTracks = PV_minNTracks; +} +//----------------------------------------------------------------------------- +// +const Amg::Vector3D& DerivationFramework::BPhysPVCascadeTools::GetBeamSpot() const noexcept { + if(m_beamSpotData) return m_beamSpotData->beamPos(); + else { + static const Amg::Vector3D defaultBS(-10000.,-10000.,-10000.); + return defaultBS; + } +} +//----------------------------------------------------------------------------- +// +size_t DerivationFramework::BPhysPVCascadeTools::FindLowZ0BAIndex(const std::vector<TLorentzVector> &mom, const xAOD::BPhysHelper &obj, + const std::vector<const xAOD::Vertex*> &PVlist, + const size_t PV_minNTracks) const { + + size_t ilowZ0BA = std::numeric_limits<std::size_t>::max(); + double lowZ0BAcalc = std::numeric_limits<double>::max(); + for (size_t i = 0; i<PVlist.size(); ++i) { + if ( PVlist[i]->nTrackParticles() >= PV_minNTracks ) { + double z0BA = m_cascadeTools->a0(mom, obj.vtx(), PVlist[i]); + if (z0BA < lowZ0BAcalc) { + lowZ0BAcalc = z0BA; + ilowZ0BA = i; + } + } + } + return ilowZ0BA; +} +//----------------------------------------------------------------------------- +// +double DerivationFramework::BPhysPVCascadeTools::DistInZtoDOCA(const std::vector<TLorentzVector> &mom, const xAOD::BPhysHelper &obj, const xAOD::Vertex* vertex) const { + + Amg::Vector3D pv = vertex->position(); + Amg::Vector3D xDOCA = DocaExtrapToBeamSpot(mom, obj); + Amg::Vector3D vec = pv - xDOCA; + return vec.z(); +} +//----------------------------------------------------------------------------- +// +Amg::Vector3D DerivationFramework::BPhysPVCascadeTools::DocaExtrapToBeamSpot(const std::vector<TLorentzVector> &mom, const xAOD::BPhysHelper& obj) const { + + Amg::Vector3D xDOCA(-99999., -99999., -99999.); + TLorentzVector totalMom; + unsigned int NTrk = mom.size(); + for( unsigned int it=0; it<NTrk; it++) totalMom += mom[it]; + TVector3 totP = totalMom.Vect(); + Amg::Vector3D pSV(totP.X(), totP.Y(), totP.Z()); + Amg::Vector3D pT(pSV.x(), pSV.y(), 0.); + if ( pT.mag2() > 0 ) { + Amg::Vector3D xBS = GetBeamSpot(); + Amg::Vector3D xSV = obj.vtx()->position(); + Amg::Vector3D xT(xSV.x()-xBS.x(), xSV.y()-xBS.y(), 0.); + xDOCA = xSV - pSV*pT.dot(xT)/pT.mag2(); + } else { + std::cout << "BPhysPVCascadeTools::DocaExtrapToBeamSpot: WARNING pT == 0." + << std::endl; + } + return xDOCA; +} + +void DerivationFramework::BPhysPVCascadeTools::PrepareVertexLinks(Trk::VxCascadeInfo *result, const xAOD::TrackParticleContainer* importedTrackCollection) +{ + auto &collection = result->vertices(); + for(auto v : collection) + { + std::vector<ElementLink<DataVector<xAOD::TrackParticle> > > newLinkVector; + for(unsigned int i=0; i< v->trackParticleLinks().size(); i++) + { + ElementLink<DataVector<xAOD::TrackParticle> > mylink=v->trackParticleLinks()[i]; // makes a copy (non-const) + mylink.setStorableObject(*importedTrackCollection, true); + newLinkVector.push_back( mylink ); + } + v->clearTracks(); + v->setTrackParticleLinks( newLinkVector ); + } +} + +std::vector<const xAOD::TrackParticle*> DerivationFramework::BPhysPVCascadeTools::CollectAllChargedTracks(const std::vector<xAOD::Vertex*> &cascadeVertices) +{ + std::vector<const xAOD::TrackParticle*> exclTrk; + for( size_t jt=0; jt<cascadeVertices.size(); jt++) { + for( size_t it=0; it<cascadeVertices[jt]->vxTrackAtVertex().size(); it++) { + if(cascadeVertices[jt]->trackParticle(it)->charge() != 0) exclTrk.push_back(cascadeVertices[jt]->trackParticle(it)); + } + } + return exclTrk; +} + +StatusCode DerivationFramework::BPhysPVCascadeTools::FillCandwithRefittedVertices( bool refitPV, + const xAOD::VertexContainer* pvContainer, xAOD::VertexContainer* refPvContainer, + const Analysis::PrimaryVertexRefitter *pvRefitter, size_t in_PV_max, int DoVertexType, + Trk::VxCascadeInfo* casc, int index, + double mass, xAOD::BPhysHypoHelper &vtx) +{ + const std::vector<TLorentzVector> &mom = casc->getParticleMoms()[index]; + const Amg::MatrixX &cov = casc->getCovariance()[index]; + const std::vector<xAOD::Vertex*> &cascadeVertices = casc->vertices(); + const bool doPt = (DoVertexType & 1) != 0; + const bool doA0 = (DoVertexType & 2) != 0; + const bool doZ0 = (DoVertexType & 4) != 0; + const bool doZ0BA = (DoVertexType & 8) != 0; + + // Collect the tracks that should be excluded from the PV + std::vector<const xAOD::TrackParticle*> exclTrk = CollectAllChargedTracks(cascadeVertices); + + + const std::vector<const xAOD::Vertex*> GoodPVs = GetGoodPV(pvContainer); + // 2) PV dependent variables + if (GoodPVs.empty() == false) { + if (refitPV) { + size_t pVmax =std::min((size_t)in_PV_max, GoodPVs.size()); + std::vector<const xAOD::Vertex*> refPVvertexes; + std::vector<const xAOD::Vertex*> refPVvertexes_toDelete; + std::vector<int> exitCode; + refPVvertexes.reserve(pVmax); + refPVvertexes_toDelete.reserve(pVmax); + exitCode.reserve(pVmax); + + // Refit the primary vertex and set the related decorations. + + for (size_t i =0; i < pVmax ; i++) { + const xAOD::Vertex* oldPV = GoodPVs.at(i); + // when set to false this will return null when a new vertex is not required +// ATH_MSG_DEBUG("old PV x " << oldPV->x() << " y " << oldPV->y() << " z " << oldPV->z()); + int exitcode = 0; + const xAOD::Vertex* refPV = pvRefitter->refitVertex(oldPV, exclTrk, m_copyAllVertices, &exitcode); +// if (refPV) ATH_MSG_DEBUG("ref PV x " << refPV->x() << " y " << refPV->y() << " z " << refPV->z()); + exitCode.push_back(exitcode); + // we want positioning to match the goodPrimaryVertices + if (refPV == nullptr) { + refPVvertexes.push_back(oldPV); + refPVvertexes_toDelete.push_back(nullptr); + } else { + refPVvertexes.push_back(refPV); + refPVvertexes_toDelete.push_back(refPV); + } + } + LocalVector<size_t, 4> indexesUsed; + LocalVector<std::pair<size_t, xAOD::BPhysHelper::pv_type>, 4> indexestoProcess; + + if(doPt){ + indexestoProcess.push_back(std::make_pair + (FindHighPtIndex(refPVvertexes), xAOD::BPhysHelper::PV_MAX_SUM_PT2)); + } + if(doA0) { + indexestoProcess.push_back(std::make_pair( FindLowA0Index(mom, vtx, refPVvertexes, m_PV_minNTracks), + xAOD::BPhysHelper::PV_MIN_A0)); + } + if(doZ0) { + indexestoProcess.push_back(std::make_pair(FindLowZIndex(mom, vtx, refPVvertexes, m_PV_minNTracks), + xAOD::BPhysHelper::PV_MIN_Z0)); + } + if(doZ0BA) { + size_t lowZBA = FindLowZ0BAIndex(mom, vtx, refPVvertexes, m_PV_minNTracks); + if( lowZBA < pVmax ) { + indexestoProcess.push_back(std::make_pair(lowZBA, xAOD::BPhysHelper::PV_MIN_Z0_BA)); + } + else FillBPhysHelperNULL(vtx, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0_BA); + } + + for(size_t i =0 ; i<indexestoProcess.size(); i++){ + //if refitted add to refitted container + auto index = indexestoProcess[i].first; + auto pvtype = indexestoProcess[i].second; + const xAOD::VertexContainer* ParentContainer = + (refPVvertexes_toDelete.at(index)) ? refPvContainer : pvContainer; + if(ParentContainer == refPvContainer && !indexesUsed.contains(index)) { + // store the new vertex + refPvContainer->push_back(const_cast<xAOD::Vertex*>(refPVvertexes.at(index))); + indexesUsed.push_back(index); + } + FillBPhysHelper(mom, cov, vtx, refPVvertexes[index], + ParentContainer, pvtype, exitCode[index]); + vtx.setOrigPv(GoodPVs[index], pvContainer, pvtype); + } + //nullify ptrs we want to keep so these won't get deleted + //"delete null" is valid in C++ and does nothing so this is quicker than a lot of if statements + for(size_t x : indexesUsed) refPVvertexes_toDelete[x] = nullptr; + //Loop over toDELETE container, anything that is used or was not refitted is null + //This cleans up all extra vertices that were created and not used + for(const xAOD::Vertex* ptr : refPVvertexes_toDelete) delete ptr; + refPVvertexes.clear(); // Clear lists of now dangling ptrs + refPVvertexes_toDelete.clear(); + exitCode.clear(); + + } else { + // 2.a) the first PV with the largest sum pT. + if(doPt) { + size_t highPtindex = FindHighPtIndex(GoodPVs); // Should be 0 in PV ordering + FillBPhysHelper(mom, cov, vtx, GoodPVs[highPtindex], pvContainer, xAOD::BPhysHelper::PV_MAX_SUM_PT2, 0); + } + // 2.b) the closest in 3D: + if(doA0) { + size_t lowA0 = FindLowA0Index(mom, vtx, GoodPVs, m_PV_minNTracks); + FillBPhysHelper(mom, cov, vtx, GoodPVs[lowA0], pvContainer, xAOD::BPhysHelper::PV_MIN_A0, 0); + } + // 2.c) the closest in Z: + if(doZ0) { + size_t lowZ = FindLowZIndex(mom, vtx, GoodPVs, m_PV_minNTracks); + FillBPhysHelper(mom, cov, vtx, GoodPVs[lowZ], pvContainer, xAOD::BPhysHelper::PV_MIN_Z0, 0); + } + // 2.d) the closest in Z (DOCA w.r.t. beam axis): + if(doZ0BA) { + size_t lowZBA = FindLowZ0BAIndex(mom, vtx, GoodPVs, m_PV_minNTracks); + if ( lowZBA < GoodPVs.size() ) { // safety against vector index out-of-bounds + FillBPhysHelper(mom, cov, vtx, GoodPVs[lowZBA], pvContainer, xAOD::BPhysHelper::PV_MIN_Z0_BA, 0); + } else { + // nothing found -- fill NULL + FillBPhysHelperNULL(vtx, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0_BA); + } + } + } // refitPV + } else { + + if(pvContainer->empty()) return StatusCode::FAILURE; + const xAOD::Vertex* Dummy = pvContainer->at(0); + + // 2.a) the first PV with the largest sum pT. + if(doPt) { + FillBPhysHelper(mom, cov, vtx, Dummy, pvContainer, xAOD::BPhysHelper::PV_MAX_SUM_PT2, 0); + if(refitPV) vtx.setOrigPv(Dummy, pvContainer, xAOD::BPhysHelper::PV_MAX_SUM_PT2); + } + // 2.b) the closest in 3D: + if(doA0) { + FillBPhysHelper(mom, cov, vtx, Dummy, pvContainer, xAOD::BPhysHelper::PV_MIN_A0, 0); + if(refitPV) vtx.setOrigPv(Dummy, pvContainer, xAOD::BPhysHelper::PV_MIN_A0); + } + // 2.c) the closest in Z: + if(doZ0) { + FillBPhysHelper(mom, cov, vtx, Dummy, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0, 0); + if(refitPV) vtx.setOrigPv(Dummy, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0); + } + // 2.d) the closest in Z (DOCA w.r.t. beam axis): + if(doZ0BA) { + FillBPhysHelper(mom, cov, vtx, Dummy, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0_BA, 0); + if(refitPV) vtx.setOrigPv(Dummy, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0_BA); + } + } // GoodPVs.empty() + + // 3) proper decay time and error: + // retrieve the refitted PV (or the original one, if the PV refitting was turned off) + if(doPt) ProcessVertex(mom, cov, vtx, xAOD::BPhysHelper::PV_MAX_SUM_PT2, mass); + if(doA0) ProcessVertex(mom, cov, vtx, xAOD::BPhysHelper::PV_MIN_A0, mass); + if(doZ0) ProcessVertex(mom, cov, vtx, xAOD::BPhysHelper::PV_MIN_Z0, mass); + if(doZ0BA) ProcessVertex(mom, cov, vtx, xAOD::BPhysHelper::PV_MIN_Z0_BA, mass); + + return StatusCode::SUCCESS; +} + +//----------------------------------------------------------------------------- + +void DerivationFramework::BPhysPVCascadeTools::SetVectorInfo(xAOD::BPhysHelper &vtx, const Trk::VxCascadeInfo* casc){ + + const std::vector< std::vector<TLorentzVector> > &moms = casc->getParticleMoms(); + const std::vector<xAOD::Vertex*> &cascadeVertices = casc->vertices(); + // Get refitted track momenta from all vertices, charged tracks only + std::vector<float> px; + std::vector<float> py; + std::vector<float> pz; + for( size_t jt=0; jt<moms.size(); jt++) { + for( size_t it=0; it<cascadeVertices[jt]->vxTrackAtVertex().size(); it++) { + px.push_back( moms[jt][it].Px() ); + py.push_back( moms[jt][it].Py() ); + pz.push_back( moms[jt][it].Pz() ); + } + } + vtx.setRefTrks(std::move(px),std::move(py),std::move(pz)); + +} + +bool DerivationFramework::BPhysPVCascadeTools::uniqueCollection(const std::vector<const xAOD::TrackParticle*>&col){ + for(auto p : col){ + if(std::count(col.begin(), col.end(), p) > 1) return false; + } + return true; +} + +bool DerivationFramework::BPhysPVCascadeTools::uniqueCollection(const std::vector<const xAOD::TrackParticle*>&col1, const std::vector<const xAOD::TrackParticle*>&col2){ + for(auto p : col1){ + if((std::count(col1.begin(), col1.end(), p) + std::count(col2.begin(), col2.end(), p)) > 1) return false; + } + for(auto p : col2){ + if((std::count(col1.begin(), col1.end(), p) + std::count(col2.begin(), col2.end(), p)) > 1) return false; + } + return true; +} + +bool DerivationFramework::BPhysPVCascadeTools::LinkVertices(SG::AuxElement::Decorator<VertexLinkVector> &decor, const std::vector<const xAOD::Vertex*>& vertices, + const xAOD::VertexContainer* vertexContainer, const xAOD::Vertex* vert){ + // create tmp vector of preceding vertex links + VertexLinkVector precedingVertexLinks; + + // loop over input precedingVertices + auto precedingVerticesItr = vertices.begin(); + for(; precedingVerticesItr!=vertices.end(); ++precedingVerticesItr) { + // sanity check 1: protect against null pointers + if( !(*precedingVerticesItr) ) + return false; + + // create element link + VertexLink vertexLink; + vertexLink.setElement(*precedingVerticesItr); + vertexLink.setStorableObject(*vertexContainer); + + // sanity check 2: is the link valid? + if( !vertexLink.isValid() ) + return false; + + // link is OK, store it in the tmp vector + precedingVertexLinks.push_back( vertexLink ); + + } // end of loop over preceding vertices + + // all OK: store preceding vertex links in the aux store + decor(*vert) = precedingVertexLinks; + return true; +} + +double DerivationFramework::BPhysPVCascadeTools::getParticleMass(const HepPDT::ParticleDataTable* pdt, int pdgcode){ + auto ptr = pdt->particle( pdgcode ); + return ptr ? ptr->mass() : 0.; +} + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysPVThinningTool.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysPVThinningTool.cxx new file mode 100644 index 0000000000000000000000000000000000000000..33e5030c830f1f9835590d6ec4e2df90da8ad2b7 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysPVThinningTool.cxx @@ -0,0 +1,134 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +///////////////////////////////////////////////////////////////// +// ThinningToolExample.cxx +/////////////////////////////////////////////////////////////////// +// Author: James Catmore (James.Catmore@cern.ch) +// This is a trivial example of an implementation of a thinning tool +// which removes all ID tracks which do not pass a user-defined cut + +#include "DerivationFrameworkBPhys/BPhysPVThinningTool.h" +#include "xAODTracking/TrackParticleContainer.h" +#include "xAODTracking/TrackParticle.h" +#include "StoreGate/ThinningHandle.h" +#include "xAODBPhys/BPhysHelper.h" +#include "StoreGate/ThinningHandle.h" +#include <algorithm> +#include <numeric> +#include <vector> +#include <string> + +using namespace std; +using namespace xAOD; +// Constructor +DerivationFramework::BPhysPVThinningTool::BPhysPVThinningTool(const std::string& t, + const std::string& n, + const IInterface* p ) : + AthAlgTool(t,n,p), + m_TrackContainerName("InDetTrackParticles"), + m_PVContainerName("PrimaryVertices"), + m_ntot(0), + m_npass(0), m_tracks_kept(0), m_keepTracks(false) +{ + declareInterface<DerivationFramework::IThinningTool>(this); + declareProperty("CandidateCollections" , m_BPhyCandList); + declareProperty("KeepPVTracks", m_keepTracks); + declareProperty("TrackParticleContainerName", m_TrackContainerName); + declareProperty("PrimaryVertexContainerName", m_PVContainerName); +} + +// Destructor +DerivationFramework::BPhysPVThinningTool::~BPhysPVThinningTool() { +} + +// Athena initialize and finalize +StatusCode DerivationFramework::BPhysPVThinningTool::initialize() +{ + ATH_MSG_VERBOSE("initialize() ..."); + ATH_CHECK(m_BPhyCandList.initialize()); + if(not m_TrackContainerName.key().empty()) ATH_CHECK(m_TrackContainerName.initialize(m_streamName)); + ATH_CHECK(m_PVContainerName.initialize(m_streamName)); + + return StatusCode::SUCCESS; +} +StatusCode DerivationFramework::BPhysPVThinningTool::finalize() +{ + ATH_MSG_VERBOSE("finalize() ..."); + ATH_MSG_INFO("Processed "<< m_ntot <<" PV, "<< m_npass<< " were retained "); + if(m_keepTracks) ATH_MSG_INFO("Additional tracks kept " << m_tracks_kept); + return StatusCode::SUCCESS; +} + +// The thinning itself +StatusCode DerivationFramework::BPhysPVThinningTool::doThinning() const +{ + + // Get the track container + SG::ThinningHandle<xAOD::VertexContainer> PV_col(m_PVContainerName); + if(!PV_col.isValid()) { + ATH_MSG_ERROR ("Couldn't retrieve VertexContainer with key PrimaryVertices"); + return StatusCode::FAILURE; + } + m_ntot+=PV_col->size(); + // Loop over tracks, see if they pass, set mask + std::vector<bool> mask(PV_col->size(), false); + + BPhysHelper::pv_type pvtypes[] = {BPhysHelper::PV_MAX_SUM_PT2, + BPhysHelper::PV_MIN_A0, + BPhysHelper::PV_MIN_Z0, + BPhysHelper::PV_MIN_Z0_BA}; + + + + for(auto &str : m_BPhyCandList) { + SG::ReadHandle<xAOD::VertexContainer> Container(str); + ATH_CHECK(Container.isValid()); + size_t s = Container->size(); + for(size_t i = 0; i<s; i++) { + xAOD::BPhysHelper vtx(Container->at(i)); + + for(size_t i =0; i < 4; i++) { + const xAOD::Vertex* origPv = vtx.origPv(pvtypes[i]); + if(origPv==nullptr) continue; + auto pvit = std::find (PV_col->begin(), PV_col->end(), origPv); + if(pvit == PV_col->end()) { + ATH_MSG_WARNING("PV not found in container"); + continue; + } + size_t x = std::distance(PV_col->begin(), pvit); + mask.at(x) = true; + } + + } + } + + m_npass += std::accumulate(mask.begin(), mask.end(), 0); + + if(m_keepTracks){ + SG::ThinningHandle<xAOD::TrackParticleContainer> importedTrackParticles(m_TrackContainerName); + std::vector<bool> trackmask(importedTrackParticles->size(), false); + size_t pvnum = mask.size(); + for(size_t i =0; i<pvnum;i++){ + if(mask[i] == false) continue; + auto vtx = PV_col->at(i); + size_t s = vtx->nTrackParticles(); + for(size_t j=0;j<s;j++){ + auto trackit = std::find(importedTrackParticles->begin(), importedTrackParticles->end(), vtx->trackParticle(j)); + if(trackit == importedTrackParticles->end()){ + ATH_MSG_WARNING("track not found in container"); + continue; + } + size_t x = std::distance(importedTrackParticles->begin(), trackit); + trackmask.at(x) = true; + } + } + importedTrackParticles.keep(trackmask, SG::ThinningHandleBase::Op::Or); + m_tracks_kept += std::accumulate(trackmask.begin(), trackmask.end(), 0); + } + PV_col.keep(mask, SG::ThinningHandleBase::Op::Or); + + return StatusCode::SUCCESS; +} + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysPVTools.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysPVTools.cxx new file mode 100644 index 0000000000000000000000000000000000000000..6bb75c4d31d35b852762eebc74d14e2b47c37abf --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysPVTools.cxx @@ -0,0 +1,545 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +#include "DerivationFrameworkBPhys/BPhysPVTools.h" +#include "xAODTracking/VertexContainer.h" +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "xAODBPhys/BPhysHelper.h" +#include "JpsiUpsilonTools/PrimaryVertexRefitter.h" +#include "TVector3.h" +#include "BeamSpotConditionsData/BeamSpotData.h" + +#include <limits> +#include <iostream> +#include <cmath> +using namespace std; + +DerivationFramework::BPhysPVTools::BPhysPVTools(const Trk::V0Tools *v0Tools) : + m_v0Tools(v0Tools), m_beamSpotData(nullptr), m_PV_minNTracks(0), + m_3dCalc(false) +{ +} + +DerivationFramework::BPhysPVTools::BPhysPVTools(const Trk::V0Tools *v0Tools, const InDet::BeamSpotData *beamSpotSvc) : + m_v0Tools(v0Tools), m_beamSpotData(beamSpotSvc), m_PV_minNTracks(0), + m_3dCalc(false) +{ +} + +void DerivationFramework::BPhysPVTools::FillBPhysHelper(xAOD::BPhysHelper &vtx, + const xAOD::Vertex* PV, const xAOD::VertexContainer* PvContainer, + xAOD::BPhysHelper::pv_type pvtype, int refitCode) const { + + BPHYS_CHECK( vtx.setPv ( PV, PvContainer, pvtype ) ); + + // cout << "BPhysPVTools::FillBPhysHelper for pvtype = " << pvtype << endl; + + // set variables calculated from PV + if(m_3dCalc){ + BPHYS_CHECK( vtx.setLxyz ( m_v0Tools->lxyz (vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setLxyzErr ( m_v0Tools->lxyzError (vtx.vtx(), PV), pvtype ) ); + } + BPHYS_CHECK( vtx.setLxy ( m_v0Tools->lxy (vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setLxyErr ( m_v0Tools->lxyError (vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setA0 ( m_v0Tools->a0 (vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setA0Err ( m_v0Tools->a0Error (vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setA0xy ( m_v0Tools->a0xy (vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setA0xyErr( m_v0Tools->a0xyError (vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setZ0 ( m_v0Tools->a0z (vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setZ0Err ( m_v0Tools->a0zError (vtx.vtx(), PV), pvtype ) ); + BPHYS_CHECK( vtx.setRefitPVStatus ( refitCode, pvtype ) ); + +} + +void DerivationFramework::BPhysPVTools::FillBPhysHelperNULL(xAOD::BPhysHelper &vtx, + const xAOD::VertexContainer* PvContainer, + xAOD::BPhysHelper::pv_type pvtype, bool do3d) { + const xAOD::Vertex* PV = nullptr; + BPHYS_CHECK( vtx.setPv ( PV, PvContainer, pvtype ) ); + constexpr float errConst = std::numeric_limits<float>::lowest(); + // set variables claculated from PV + if(do3d){ + BPHYS_CHECK( vtx.setLxyz ( errConst, pvtype ) ); + BPHYS_CHECK( vtx.setLxyzErr ( errConst, pvtype ) ); + } + BPHYS_CHECK( vtx.setLxy ( errConst, pvtype ) ); + BPHYS_CHECK( vtx.setLxyErr ( errConst, pvtype ) ); + BPHYS_CHECK( vtx.setA0 ( errConst, pvtype ) ); + BPHYS_CHECK( vtx.setA0Err ( errConst, pvtype ) ); + BPHYS_CHECK( vtx.setA0xy ( errConst, pvtype ) ); + BPHYS_CHECK( vtx.setA0xyErr( errConst, pvtype ) ); + BPHYS_CHECK( vtx.setZ0 ( errConst, pvtype ) ); + BPHYS_CHECK( vtx.setZ0Err ( errConst, pvtype ) ); + BPHYS_CHECK( vtx.setRefitPVStatus ( 0, pvtype ) ); +} + +size_t DerivationFramework::BPhysPVTools::FindLowZIndex(const xAOD::BPhysHelper &Obj, + const std::vector<const xAOD::Vertex*> &PVlist, + const size_t PV_minNTracks) const { + size_t lowZ = 0; + if(PVlist.empty()) { + lowZ=std::numeric_limits<std::size_t>::max(); + return lowZ; + } + size_t size = PVlist.size(); + double lowA0zcalc = fabs(m_v0Tools->a0z (Obj.vtx(), PVlist[0])); + for(size_t i =1; i<size; i++) { + if ( PVlist[i]->nTrackParticles() >= PV_minNTracks ) { + double a0z = fabs(m_v0Tools->a0z(Obj.vtx(), PVlist[i])); + if(a0z < lowA0zcalc) { + lowA0zcalc = a0z; + lowZ =i; + } + } + } + return lowZ; +} + +void DerivationFramework::BPhysPVTools::DecorateWithDummyVertex(xAOD::VertexContainer* vtxContainer, + const xAOD::VertexContainer* pvContainer, const xAOD::Vertex* Dummy, + int DoVertexType, const bool SetOrignal) const { + const bool doPt = (DoVertexType & 1) != 0; + const bool doA0 = (DoVertexType & 2) != 0; + const bool doZ0 = (DoVertexType & 4) != 0; + const bool doZ0BA = (DoVertexType & 8) != 0; + + xAOD::VertexContainer::iterator vtxItr = vtxContainer->begin(); + for(; vtxItr!=vtxContainer->end(); ++vtxItr) { + xAOD::BPhysHelper vtx(*vtxItr); + + // 1) pT error + double ptErr = m_v0Tools->pTError( vtx.vtx() ); + BPHYS_CHECK( vtx.setPtErr(ptErr) ); + if(doPt) { + // 2.a) the first PV with the largest sum pT. + FillBPhysHelper(vtx, Dummy, pvContainer, xAOD::BPhysHelper::PV_MAX_SUM_PT2, 0); + if(SetOrignal) vtx.setOrigPv(Dummy, pvContainer, xAOD::BPhysHelper::PV_MAX_SUM_PT2); + } + + if(doA0) { + // 2.b) the closest in 3D: + FillBPhysHelper(vtx, Dummy, pvContainer, xAOD::BPhysHelper::PV_MIN_A0, 0); + if(SetOrignal) vtx.setOrigPv(Dummy, pvContainer, xAOD::BPhysHelper::PV_MIN_A0); + } + + if(doZ0) { + FillBPhysHelper(vtx, Dummy, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0, 0); + if(SetOrignal) vtx.setOrigPv(Dummy, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0); + } + + if(doZ0BA) { + FillBPhysHelper(vtx, Dummy, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0_BA, 0); + if(SetOrignal) vtx.setOrigPv(Dummy, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0_BA); + } + } +} + +void DerivationFramework::BPhysPVTools::DecorateWithNULL(xAOD::VertexContainer* vtxContainer, + const xAOD::VertexContainer* pvContainer, int DoVertexType) const { + const bool doPt = (DoVertexType & 1) != 0; + const bool doA0 = (DoVertexType & 2) != 0; + const bool doZ0 = (DoVertexType & 4) != 0; + const bool doZ0BA = (DoVertexType & 8) != 0; + xAOD::VertexContainer::iterator vtxItr = vtxContainer->begin(); + for(; vtxItr!=vtxContainer->end(); ++vtxItr) { + xAOD::BPhysHelper vtx(*vtxItr); + + // 1) pT error + double ptErr = m_v0Tools->pTError( vtx.vtx() ); + BPHYS_CHECK( vtx.setPtErr(ptErr) ); + if(doPt) { + // 2.a) the first PV with the largest sum pT. + FillBPhysHelperNULL(vtx, pvContainer, xAOD::BPhysHelper::PV_MAX_SUM_PT2, m_3dCalc); + } + + if(doA0) { + // 2.b) the closest in 3D: + FillBPhysHelperNULL(vtx, pvContainer, xAOD::BPhysHelper::PV_MIN_A0, m_3dCalc); + } + + if(doZ0) { + FillBPhysHelperNULL(vtx, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0, m_3dCalc); + } + if(doZ0BA) { + FillBPhysHelperNULL(vtx, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0_BA, m_3dCalc); + } + } +} + +StatusCode DerivationFramework::BPhysPVTools::FillCandExistingVertices(xAOD::VertexContainer* vtxContainer, + const xAOD::VertexContainer* pvContainer, int DoVertexType) { + + + + //---------------------------------------------------- + // decorate the vertex + //---------------------------------------------------- + // loop over candidates -- Don't apply PV_minNTracks requirement here + // because it may result in exclusion of the high-pt PV. + const std::vector<const xAOD::Vertex*> GoodPVs = GetGoodPV(pvContainer); + + + if(GoodPVs.empty() == false) { + + const bool doPt = (DoVertexType & 1) != 0; + const bool doA0 = (DoVertexType & 2) != 0; + const bool doZ0 = (DoVertexType & 4) != 0; + const bool doZ0BA = (DoVertexType & 8) != 0; + + xAOD::VertexContainer::iterator vtxItr = vtxContainer->begin(); + for(; vtxItr!=vtxContainer->end(); ++vtxItr) { + xAOD::BPhysHelper vtx(*vtxItr); + + // 1) pT error + double ptErr = m_v0Tools->pTError( vtx.vtx() ); + BPHYS_CHECK( vtx.setPtErr(ptErr) ); + + // 2) refit the primary vertex and set the related decorations. + if(doPt) { + size_t highPtindex = FindHighPtIndex(GoodPVs); //Should be 0 in PV ordering + // 2.a) the first PV with the largest sum pT. + FillBPhysHelper(vtx, GoodPVs[highPtindex], pvContainer, xAOD::BPhysHelper::PV_MAX_SUM_PT2, 0); + } + + if(doA0) { + // 2.b) the closest in 3D: + size_t lowA0 = FindLowA0Index(vtx, GoodPVs, m_PV_minNTracks); + FillBPhysHelper(vtx, GoodPVs[lowA0], pvContainer, xAOD::BPhysHelper::PV_MIN_A0, 0); + } + + if(doZ0) { + size_t lowZ = FindLowZIndex(vtx, GoodPVs, m_PV_minNTracks); + FillBPhysHelper(vtx, GoodPVs[lowZ], pvContainer, xAOD::BPhysHelper::PV_MIN_Z0, 0); + } + + if(doZ0BA) { + size_t lowZBA = FindLowZ0BAIndex(vtx, GoodPVs, m_PV_minNTracks); + if ( lowZBA < GoodPVs.size() ) { // safety against vector index out-of-bounds + FillBPhysHelper(vtx, GoodPVs[lowZBA], pvContainer, xAOD::BPhysHelper::PV_MIN_Z0_BA, 0); + } else { + // nothing found -- fill nullptr + FillBPhysHelperNULL(vtx, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0_BA, m_3dCalc); + } + } + + }// end of loop over vertices + } //end of check for vertices + else { + // cout << "Warning: DerivationFramework::BPhysPVTools::FillCandExistingVertices No Primary Vertices Found trying to decorate wilth dummy \n"; + if(pvContainer->empty()) return StatusCode::FAILURE; + const xAOD::Vertex* dummy = pvContainer->at(0); //No good vertices so last vertex must be dummy + DecorateWithDummyVertex(vtxContainer, pvContainer, dummy, DoVertexType, false); + } + return StatusCode::SUCCESS; +} + + +StatusCode DerivationFramework::BPhysPVTools::FillCandwithRefittedVertices(xAOD::VertexContainer* vtxContainer, + const xAOD::VertexContainer* pvContainer, xAOD::VertexContainer* refPvContainer, + const Analysis::PrimaryVertexRefitter *pvRefitter, size_t in_PV_max, int DoVertexType) { + + + //---------------------------------------------------- + // decorate the vertex + //---------------------------------------------------- + // loop over candidates -- Don't apply PV_minNTracks requirement here + // because it may result in exclusion of the high-pt PV. + std::vector<const xAOD::Vertex*> goodPrimaryVertices = GetGoodPV(pvContainer); + + /* + cout << "BPhysPVTools::FillCandwithRefittedVertices: #PVs: all: " + << pvContainer->size() << " ref: " << refPvContainer->size() + << " good: " << goodPrimaryVertices.size() + << " PV_minNTracks: " << m_PV_minNTracks << endl; + */ + if(goodPrimaryVertices.empty() == false) { + + size_t pVmax =std::min(in_PV_max, goodPrimaryVertices.size()); + std::vector<const xAOD::Vertex*> refPVvertexes; + std::vector<const xAOD::Vertex*> refPVvertexes_toDelete; + std::vector<int> exitCode; + refPVvertexes.reserve(pVmax); + refPVvertexes_toDelete.reserve(pVmax); + exitCode.reserve(pVmax); + + bool doPt = (DoVertexType & 1) != 0; + bool doA0 = (DoVertexType & 2) != 0; + bool doZ0 = (DoVertexType & 4) != 0; + bool doZ0BA = (DoVertexType & 8) != 0; + + xAOD::VertexContainer::iterator vtxItr = vtxContainer->begin(); + for(; vtxItr!=vtxContainer->end(); ++vtxItr) { + xAOD::BPhysHelper vtx(*vtxItr); + + // 1) pT error + double ptErr = m_v0Tools->pTError( vtx.vtx() ); + BPHYS_CHECK( vtx.setPtErr(ptErr) ); + + for(size_t i =0; i < pVmax ; i++) { + const xAOD::Vertex* oldPV = goodPrimaryVertices.at(i); + //when set to false this will return nullptr when a new vertex is not required + int exit =0; + const xAOD::Vertex* refPV = pvRefitter->refitVertex(oldPV, vtx.vtx(), false, &exit); + exitCode.push_back(exit); + //I want positioning to match the goodPrimaryVertices + if(refPV == nullptr){ + refPVvertexes.push_back(oldPV); + refPVvertexes_toDelete.push_back(nullptr); + }else{ + refPVvertexes.push_back(refPV); + refPVvertexes_toDelete.push_back(refPV); + } + } + + // 2) refit the primary vertex and set the related decorations. + + size_t highPtindex = doPt ? FindHighPtIndex(refPVvertexes) : 9999999; //Should be 0 in PV ordering + size_t lowA0 = doA0 ? + FindLowA0Index(vtx, refPVvertexes, m_PV_minNTracks) : 9999998; + size_t lowZ = doZ0 ? + FindLowZIndex(vtx, refPVvertexes, m_PV_minNTracks) : 9999997; + size_t lowZBA = doZ0BA ? + FindLowZ0BAIndex(vtx, refPVvertexes, m_PV_minNTracks) : 9999996; + /* + cout << "BPhysPVTools::FillCandwithRefittedVertices: in_PV_max/pVMax = " + << in_PV_max << ", " << pVmax << endl; + cout << "BPhysPVTools::FillCandwithRefittedVertices: m_PV_minNTracks = " + << m_PV_minNTracks << endl; + cout << "BPhysPVTools::FillCandwithRefittedVertices: hPt,lowA0/Z/ZBA = " + << highPtindex << ", " + << lowA0 << ", " << lowZ << ", " << lowZBA << " " + << (lowA0 != lowZ ? "1!" : " ") + << (lowA0 != lowZBA ? "2!" : " ") + << (lowZ != lowZBA ? "3!" : " ") + << (highPtindex != lowA0 ? "4!" : " ") + << (highPtindex != lowZ ? "5!" : " ") + << (highPtindex != lowZBA ? "6!" : " ") + << endl; + */ + if(doPt) { + //Choose old PV container if not refitted + const xAOD::VertexContainer* ParentContainer = + (refPVvertexes_toDelete.at(highPtindex)) ? refPvContainer : pvContainer; + if(ParentContainer == refPvContainer) //if refitted add to refitted container + refPvContainer->push_back(const_cast<xAOD::Vertex*>(refPVvertexes.at(highPtindex))); // store the new vertex + + FillBPhysHelper(vtx, refPVvertexes[highPtindex], + ParentContainer, xAOD::BPhysHelper::PV_MAX_SUM_PT2, exitCode[highPtindex]); + vtx.setOrigPv(goodPrimaryVertices[highPtindex], pvContainer, xAOD::BPhysHelper::PV_MAX_SUM_PT2); + } + + if(doA0) { + const xAOD::VertexContainer* ParentContainer = + (refPVvertexes_toDelete.at(lowA0)) ? refPvContainer : pvContainer; + if(ParentContainer == refPvContainer && highPtindex!=lowA0) + refPvContainer->push_back(const_cast<xAOD::Vertex*>(refPVvertexes.at(lowA0))); // store the new vertex + + FillBPhysHelper(vtx, refPVvertexes[lowA0], + ParentContainer, xAOD::BPhysHelper::PV_MIN_A0, exitCode[lowA0]); + vtx.setOrigPv(goodPrimaryVertices[lowA0], pvContainer, xAOD::BPhysHelper::PV_MIN_A0); + } + + + // 2.c) the closest in Z: + if(doZ0) { + + const xAOD::VertexContainer* ParentContainer = + (refPVvertexes_toDelete.at(lowZ)) ? refPvContainer : pvContainer; + if(ParentContainer == refPvContainer && highPtindex!=lowZ && lowZ!=lowA0) + refPvContainer->push_back(const_cast<xAOD::Vertex*>(refPVvertexes.at(lowZ))); // store the new vertex + + FillBPhysHelper(vtx, refPVvertexes[lowZ], + ParentContainer, xAOD::BPhysHelper::PV_MIN_Z0, exitCode[lowZ]); + vtx.setOrigPv(goodPrimaryVertices[lowZ], pvContainer, xAOD::BPhysHelper::PV_MIN_Z0); + } + + // 2.d) the closest in Z (DOCA w.r.t. beam axis): + if (doZ0BA) { + if ( lowZBA < pVmax ) { // safety for vector indices + const xAOD::VertexContainer* ParentContainer = + (refPVvertexes_toDelete.at(lowZBA)) ? + refPvContainer : pvContainer; + if (ParentContainer == refPvContainer && highPtindex!=lowZBA + && lowZBA!=lowA0 && lowZBA != lowZ) { + // store the new vertex + refPvContainer->push_back(const_cast<xAOD::Vertex*> + (refPVvertexes.at(lowZBA))); + } + FillBPhysHelper(vtx, refPVvertexes[lowZBA], + ParentContainer, xAOD::BPhysHelper::PV_MIN_Z0_BA, + exitCode[lowZBA]); + vtx.setOrigPv(goodPrimaryVertices[lowZBA], pvContainer, + xAOD::BPhysHelper::PV_MIN_Z0_BA); + } else { + // nothing found -- fill nullptr + FillBPhysHelperNULL(vtx, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0_BA, m_3dCalc); + // nothing found -- fill dummy vertex (type-0 vertex) + // if(pvContainer->empty()) return StatusCode::FAILURE; + // const xAOD::Vertex* dummy = pvContainer->at(pvContainer->size()-1); //No good vertices so last vertex must be dummy + // FillBPhysHelper(vtx, dummy, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0_BA, 0); + // vtx.setOrigPv(Dummy, pvContainer, xAOD::BPhysHelper::PV_MIN_Z0_BA); + } + } + + //nullptrify ptrs we want to keep so these won't get deleted + //"delete nullptr" is valid in C++ and does nothing so this is quicker than a lot of if statements + if(doPt) refPVvertexes_toDelete[highPtindex] = nullptr; + if(doA0) refPVvertexes_toDelete[lowA0] = nullptr; + if(doZ0) refPVvertexes_toDelete[lowZ] = nullptr; + if(doZ0BA && lowZBA < pVmax) refPVvertexes_toDelete[lowZBA] = nullptr; + //Loop over toDELETE container, anything that is used or was not refitted is nullptr + //This cleans up all extra vertices that were created and not used + for(const xAOD::Vertex* ptr : refPVvertexes_toDelete) { + delete ptr; + } + refPVvertexes.clear();// Clear lists of now dangling ptrs + refPVvertexes_toDelete.clear(); + exitCode.clear(); + } // end of loop over vertices + + } else { +// cout << "Warning: DerivationFramework::BPhysPVTools::FillCandwithRefittedVertices No Primary Vertices Found trying to decorate wilth dummy \n"; + if(pvContainer->empty()) return StatusCode::FAILURE; + const xAOD::Vertex* dummy = pvContainer->at(0); //No good vertices so last vertex must be dummy + DecorateWithDummyVertex(vtxContainer, pvContainer, dummy, DoVertexType, true); + } + + return StatusCode::SUCCESS; +} + +size_t DerivationFramework::BPhysPVTools::FindHighPtIndex(const std::vector<const xAOD::Vertex*> &PVlist) { + // it SHOULD be the first one in the collection but it shouldn't take long to do a quick check + for(size_t i =0; i<PVlist.size(); i++) { + if(PVlist[i]->vertexType() == xAOD::VxType::PriVtx) return i; + } + cout << "FATAL ERROR High Pt Primary vertex not found - this should not happen\n"; + return std::numeric_limits<std::size_t>::max(); //This should not happen +} + +size_t DerivationFramework::BPhysPVTools::FindLowA0Index(const xAOD::BPhysHelper &Obj, + const std::vector<const xAOD::Vertex*> &PVlist, + const size_t PV_minNTracks) const { + size_t lowA0 = 0; + if(PVlist.empty()) { + lowA0=std::numeric_limits<std::size_t>::max(); + return lowA0; + } + size_t size = PVlist.size(); + double lowA0calc = m_v0Tools->a0(Obj.vtx(), PVlist[0]); + for(size_t i =1; i<size; i++) { + if ( PVlist[i]->nTrackParticles() >= PV_minNTracks ) { + double a0 = m_v0Tools->a0(Obj.vtx(), PVlist[i]); + if(a0 < lowA0calc) { + lowA0calc = a0; + lowA0 =i; + } + } + } + return lowA0; +} + +vector<const xAOD::Vertex*> DerivationFramework::BPhysPVTools::GetGoodPV(const xAOD::VertexContainer* pvContainer) { + typedef xAOD::VxType::VertexType VertexType; + VertexType Pvtx = xAOD::VxType::PriVtx; + VertexType Pileupvtx = xAOD::VxType::PileUp; + std::vector<const xAOD::Vertex*> goodPrimaryVertices; + goodPrimaryVertices.reserve(pvContainer->size()); + + for (auto ptr = pvContainer->begin(); ptr!= pvContainer->end(); ++ptr) { + VertexType thistype = (*ptr)->vertexType(); + if ( thistype == Pileupvtx || thistype == Pvtx ) { + goodPrimaryVertices.push_back(*ptr); + } else { +// cout << "vertex type " << thistype << endl; + } + } + return goodPrimaryVertices; +} +//----------------------------------------------------------------------------- +// added by WW: +// +void DerivationFramework::BPhysPVTools::SetMinNTracksInPV(size_t PV_minNTracks) +{ + + m_PV_minNTracks = PV_minNTracks; +} +//----------------------------------------------------------------------------- +// added by WW: +// +const Amg::Vector3D& DerivationFramework::BPhysPVTools::GetBeamSpot() const noexcept { + + if(m_beamSpotData) return m_beamSpotData->beamPos(); + else { + static const Amg::Vector3D defaultBS(-10000.,-10000.,-10000.); + return defaultBS; + } +} +//----------------------------------------------------------------------------- +// added by WW: +// +size_t DerivationFramework::BPhysPVTools::FindLowZ0BAIndex(const xAOD::BPhysHelper &obj, + const std::vector<const xAOD::Vertex*> &PVlist, + const size_t PV_minNTracks) const { + + size_t ilowZ0BA = std::numeric_limits<std::size_t>::max(); + double lowZ0BAcalc = std::numeric_limits<double>::max(); + for (size_t i = 0; i<PVlist.size(); ++i) { + if ( PVlist[i]->nTrackParticles() >= PV_minNTracks ) { + double z0BA = m_v0Tools->a0(obj.vtx(), PVlist[i]); + if (z0BA < lowZ0BAcalc) { + lowZ0BAcalc = z0BA; + ilowZ0BA = i; + } + } + } + return ilowZ0BA; +} +//----------------------------------------------------------------------------- +// added by WW: +// +double DerivationFramework::BPhysPVTools::DistInZtoDOCA(const xAOD::BPhysHelper &obj, const xAOD::Vertex* vertex) const { + + Amg::Vector3D pv = vertex->position(); + Amg::Vector3D xDOCA = DocaExtrapToBeamSpot(obj); + Amg::Vector3D vec = pv - xDOCA; + return vec.z(); +} +//----------------------------------------------------------------------------- +// added by WW: +// +Amg::Vector3D DerivationFramework::BPhysPVTools::DocaExtrapToBeamSpot(const xAOD::BPhysHelper& obj) const { + + Amg::Vector3D xDOCA(-99999., -99999., -99999.); + TVector3 totP(const_cast<xAOD::BPhysHelper&>(obj).totalP()); + Amg::Vector3D pSV(totP.X(), totP.Y(), totP.Z()); + Amg::Vector3D pT(pSV.x(), pSV.y(), 0.); + if ( pT.mag2() > 0 ) { + Amg::Vector3D xBS = GetBeamSpot(); + Amg::Vector3D xSV = m_v0Tools->vtx(obj.vtx()); + Amg::Vector3D xT(xSV.x()-xBS.x(), xSV.y()-xBS.y(), 0.); + xDOCA = xSV - pSV*pT.dot(xT)/pT.mag2(); + } else { + std::cout << "BPhysPVTools::DocaExtrapToBeamSpot: WARNING pT == 0." + << std::endl; + } + return xDOCA; +} + +void DerivationFramework::BPhysPVTools::PrepareVertexLinks(xAOD::Vertex* theResult, + const xAOD::TrackParticleContainer* importedTrackCollection) +{ + std::vector<ElementLink<DataVector<xAOD::TrackParticle> > > newLinkVector; + const auto &trkprtl = theResult->trackParticleLinks(); + for(unsigned int i=0; i< trkprtl.size(); i++) + { + ElementLink<DataVector<xAOD::TrackParticle> > mylink=trkprtl[i]; //makes a copy (non-const) + mylink.setStorableObject(*importedTrackCollection, true); + newLinkVector.push_back( mylink ); + } + theResult->clearTracks(); + theResult->setTrackParticleLinks( newLinkVector ); +} + + +//----------------------------------------------------------------------------- diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysVarBlinder.cxxNoCompile b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysVarBlinder.cxxNoCompile new file mode 100644 index 0000000000000000000000000000000000000000..13a63f881babe5a25fe704fefc3dac11235f0068 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysVarBlinder.cxxNoCompile @@ -0,0 +1,72 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// BPhysVarBlinder.cxx +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// +//============================================================================ +// +#include "BPhysTools/BPhysBlindingTool.h" +#include "DerivationFrameworkBPhys/BPhysVarBlinder.h" + +namespace DerivationFramework { + + //--------------------------------------------------------------------------- + // Constructor + //--------------------------------------------------------------------------- + BPhysVarBlinder::BPhysVarBlinder(const std::string& t, + const std::string& n, + const IInterface* p) : + CfAthAlgTool(t,n,p), + m_blindingTool("xAOD::BPhysBlindingTool") { + + declareInterface<DerivationFramework::IAugmentationTool>(this); + + // Declare tools + declareProperty("BlindingTool", m_blindingTool); + + // Declare user-defined properties + declareProperty("EnableBlinding", m_enableBlinding = true); + + } + //--------------------------------------------------------------------------- + // Initialization + //--------------------------------------------------------------------------- + StatusCode BPhysVarBlinder::initialize() { + + ATH_MSG_DEBUG("in initialize()"); + + // retrieve blinding tool + CHECK( m_blindingTool.retrieve() ); + + ATH_MSG_INFO("initialize(): EnableBlinding: " << m_enableBlinding); + + return StatusCode::SUCCESS; + } + //--------------------------------------------------------------------------- + // Finalization + //--------------------------------------------------------------------------- + StatusCode BPhysVarBlinder::finalize() { + + // everything all right + return StatusCode::SUCCESS; + } + //--------------------------------------------------------------------------- + // Perform blinding + //--------------------------------------------------------------------------- + StatusCode BPhysVarBlinder::addBranches() const { + + if ( m_enableBlinding ) { + CHECK( m_blindingTool->doBlind() ); + } + + return StatusCode::SUCCESS; + } + //--------------------------------------------------------------------------- + +} // namespace DerivationFramework diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysVertexTrackBase.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysVertexTrackBase.cxx new file mode 100644 index 0000000000000000000000000000000000000000..2f2cef79bfca9169fddd8060020ccf6d9264a38e --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BPhysVertexTrackBase.cxx @@ -0,0 +1,1480 @@ +/* + Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// BPhysVertexTrackBase.cxx +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// +// Base class for vertex-track related classes in need of +// track-to-vertex association handling. +// +// Derived classes should overwrite the following methods instead +// of the orginal initialize() etc methods: +// - initializeHook() +// - finalizeHook() +// - addBranchesHook() -- only called at end of addBranches() +// - addBranchesSVLoopHook() -- preferred (inside SV loop) +// +// From within addBranchesSVLoopHook() +// - calculateValues() +// should be called and the following hook methods should be overwritten: +// - calcValuesHook() -- called for the actual calculation of values +// - fastFillHook() -- called to check and apply caching of values +// +// For an usage example see BVertexTrackIsoTool and BPHY8.py . +// +// Job options provided by this class: +// - BranchPrefixes -- vector of prefixes to assign to +// added branches +// (Must be of same size as VertexContainerNames +// and in the same order.) +// - BranchBaseName -- assign the base name of added branches +// (default: iso) +// - BranchSuffix -- assign the suffix of added branches +// (default: empty = none) +// - VertexContainerNames -- names of containers for secondary vertices +// - TrackParticleContainerName -- name of container for TrackParticles +// - TrackToVertexTool -- ToolHandle for track-to-vertex tool +// - TrackSelectionTools -- Array of ToolHandles for track +// selection tools; each tool should +// be named XXXX_YYYY, where the YYYY +// suffix string which needs to be unique; +// will be used to name the isolation track +// category (part of the new attribute names) +// - RefPVContainerNames -- List of refitted PV container names. +// (Must be of same size as VertexContainerNames +// and in the same order.) +// - DoVertexType -- PV-to-SV association types to be +// considered (bitwise variable, see +// xAODBPhys::BPhysHelper) +// - UseTrackTypes -- List of or-ed bit-wise selection of +// track sets to consider: +// bit : meaning +// 0 : tracks close to PV associated +// with SV +// 1 : tracks associated with dummy PV +// ("type-0 PV tracks") +// 2 : tracks associated with PV of type 1 +// 3 : tracks associated with PV of type 2 +// 4 : tracks associated with PV of type 3 +// 5 : tracks associated with PV with types +// other than 0 to 4. +// 6 : tracks with missing pointer to +// PV (NULL pointer) +// 7-22: tracks being closest to assoc. PV +// useRefittedPVs doDCAin3D chi2DefToUse +// 7 : yes no 0 +// 8 : no no 0 +// 9 : yes yes 0 +// 10 : no yes 0 +// 11 : yes no 1 +// 12 : no no 1 +// 13 : yes yes 1 +// 14 : no yes 1 +// 15 : yes no 2 +// 16 : no no 2 +// 17 : yes yes 2 +// 18 : no yes 2 +// 19 : yes -- 3 +// 20 : no -- 3 +// 21 : yes -- 4 +// 22 : no -- 4 +// 23 : yes -- 5 +// 24 : no -- 5 +// 25 : yes yes 6 +// 26 : no yes 6 +// 27 : yes yes 7 +// 28 : no yes 7 +// 29 : yes yes 8 +// 30 : no yes 8 +// 31 : yes yes 9 +// 32 : no yes 9 +// useRefittedPVs: +// replace PV associated to decay candidate +// by the refitted PV +// doDCAin3D: +// use d0 and z0 in the determination of +// of the point of closest approach of +// a track to a vertex +// chi2DefToUse: +// PV uncertainties in the chi2 calculation +// in addition to track uncertainties +// 0 : use old method +// (only track uncertainties) +// 1 : from track perigee with +// uncertainties from track and vertex +// 2 : simple extrapolation from track +// parameters with uncertainties from +// track and vertex (extrapolation +// used for track swimming) +// 3 : CalcLogChi2toPV method from NtupleMaker +// using xAOD::TrackingHelpers. +// (only track uncertainties) +// 4 : CalcLogChi2toPV method from NtupleMaker +// using xAOD::TrackingHelpers. +// (track and vertex uncertainties) +// 5 : use TrackVertexAssociationTool +// 6 : full 3D chi2 from track perigee +// with uncertainties from track and +// vertex (sum of 3x3 covariance matrices) +// 7 : full 3D chi2 from track perigee with +// uncertainties from track and vertex +// (sum of 2x2 covariance matrices) +// 8 : simple extrapolation from track +// parameters with uncertainties +// from track and vertex +// (sum of 3x3 covariance matrices) +// 9 simple extrapolation from track +// parameters with uncertainties +// from track and vertex +// (sum of 2x2 covariance matrices) +// (E.g. 127 means to consider all tracks.) +// - IncPrecVerticesInDecay -- Include preceeding vertices in search +// for ID tracks and muons from decaying +// particle. (May be a bit slower but +// more accurate. Double-counting of track +// or muon objects is excluded. +// Default: True) +// - MinNTracksInPV -- Minimum number of tracks in PV for +// PV to be considered in calculation +// of closest PV to a track +// - PVTypesToConsider -- List of primary vertex types to consider +// in calculation of closest PV to a track +// - DebugTrackTypes -- Count tracks of specific types (bit +// patterns w.r.t. vertex association) +// and dump statistics to log file +// 0 : disabled +// 1 : count tracks of certain types +// - DebugTracksInEvents -- debug track selections in detail for +// a list of event numbers. +// +// +//============================================================================ +// +#include "DerivationFrameworkBPhys/BPhysVertexTrackBase.h" +#include "xAODTracking/TrackParticlexAODHelpers.h" +#include "xAODBPhys/BPhysHelper.h" +#include "InDetTrackSelectionTool/IInDetTrackSelectionTool.h" +#include "EventPrimitives/EventPrimitivesHelpers.h" +#include "TrackVertexAssociationTool/TrackVertexAssociationTool.h" +#include "TVector3.h" +#include "TString.h" +#include "boost/format.hpp" +#include <algorithm> +#include <sstream> +#include <limits> + +namespace DerivationFramework { + + //------------------------------------------------------------------------- + // + // helper class + BPhysVertexTrackBase::BaseItem::BaseItem(std::string Name, + std::string Bname, + std::string Prefix) : + name(std::move(Name)), bname(std::move(Bname)), prefix(std::move(Prefix)) { + } + + BPhysVertexTrackBase::BaseItem::~BaseItem() { + } + + void BPhysVertexTrackBase::BaseItem::setup(std::string Name, + std::string Bname, + std::string Prefix) { + name = std::move(Name); + bname = std::move(Bname); + prefix = std::move(Prefix); + } + + void BPhysVertexTrackBase::BaseItem::setPrefix(std::string Prefix) { + prefix = std::move(Prefix); + } + + void BPhysVertexTrackBase::BaseItem::resetVals() { + // needs to be implemented by derived class + } + + std::string BPhysVertexTrackBase::BaseItem::buildName(std::string qualifier, + std::string suffix) { + boost::format f("%s%s%s%s%s"); + f % (prefix.length() > 0 ? prefix+"_" : "") + % (bname.length() > 0 ? bname+"_" : "") + % (qualifier.length() > 0 ? qualifier+"_" : "") + % name + % suffix; + return f.str(); + } + + std::string BPhysVertexTrackBase::BaseItem::toString() const { + boost::format f("nm: %s\nbn: %s"); + f % name % bname; + return f.str(); + } + //------------------------------------------------------------------------- + // + // helper class (for track types) + // + BPhysVertexTrackBase::TrackTypeCounter:: + TrackTypeCounter(BPhysVertexTrackBase& Parent, std::string Name) + : name(std::move(Name)), m_parent(Parent) { + } + + BPhysVertexTrackBase::TrackTypeCounter::~TrackTypeCounter() { + } + + void BPhysVertexTrackBase::TrackTypeCounter::addToCounter(uint64_t atype, + uint64_t rtype, + std::string prefix, + std::string suffix, + uint64_t counts) { + boost::format f("%sT%010d_R%010d%s"); + f % (prefix.length() > 0 ? prefix+"_" : "") + % atype + % m_parent.m_useTrackTypes[rtype] + % (suffix.length() > 0 ? "_"+suffix : ""); + + addToCounter(f.str(), atype, counts); + } + + void BPhysVertexTrackBase::TrackTypeCounter::addToCounter(std::string name, + uint64_t atype, + uint64_t counts) { + + NameCountMap_t::const_iterator it = m_cnts.find(name); + + if ( it != m_cnts.end() ) { + m_cnts[name].first += counts; + } else { + m_cnts[name] = std::make_pair(counts, atype); + } + } + + std::string BPhysVertexTrackBase::TrackTypeCounter:: + countsToString(uint indent) const { + + boost::format f("%sCounters for %s:\n"); + f % boost::io::group(std::setw(indent), " ") % name; + std::string str = f.str(); + + int lmax(0); + for (NameCountMap_t::const_iterator it = m_cnts.begin(); + it != m_cnts.end(); ++it) { + lmax = std::max(lmax, (int)(it->first).length()); + } + + for (NameCountMap_t::const_iterator it = m_cnts.begin(); + it != m_cnts.end(); ++it) { + boost::format f("%s%-s : %10lld %33s"); + f % boost::io::group(std::setw(indent+4), " ") + % boost::io::group(std::setw(lmax), it->first) + % (it->second).first + % std::bitset<33>((it->second).second).to_string(); + str += f.str() + "\n"; + } + // clean up last newline + str.erase(str.length()-1); + + return str; + } + //-------------------------------------------------------------------------- + //------------------------------------------------------------------------- + // static members + const int BPhysVertexTrackBase::n_track_types = 33; + const std::string BPhysVertexTrackBase::track_type_str[] = + {"ASSOCPV", "PVTYPE0", "PVTYPE1", "PVTYPE2", "PVTYPE3", "NONE", "NULLVP", + "CAPVRFN3U0", "CAPVNRN3U0", "CAPVRF3DU0", "CAPVNR3DU0", + "CAPVRFN3U1", "CAPVNRN3U1", "CAPVRF3DU1", "CAPVNR3DU1", + "CAPVRFN3U2", "CAPVNRN3U2", "CAPVRF3DU2", "CAPVNR3DU2", + "CAPVRFNNU3", "CAPVNRNNU3", "CAPVRFNNU4", "CAPVNRNNU4", + "CAPVRFNNU5", "CAPVNRNNU5", "CAPVRFNNU6", "CAPVNRNNU6", + "CAPVRFNNU7", "CAPVNRNNU7", "CAPVRFNNU8", "CAPVNRNNU8", + "CAPVRFNNU9", "CAPVNRNNU9"}; + const uint64_t BPhysVertexTrackBase::track_type_bit[] = + {0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, + 0x80, 0x100, 0x200, 0x400, + 0x800, 0x1000, 0x2000, 0x4000, + 0x8000, 0x10000, 0x20000, 0x40000, + 0x80000, 0x100000, 0x200000, 0x400000, + 0x800000, 0x1000000, 0x2000000, 0x4000000, + 0x8000000, 0x10000000, 0x20000000, 0x40000000, + 0x80000000, 0x100000000}; + uint64_t BPhysVertexTrackBase::s_track_type_all_cached = 0x0; + + // static methods + const std::string + BPhysVertexTrackBase::tts(BPhysVertexTrackBase::track_type type) { + return track_type_str[type]; + } + + uint64_t BPhysVertexTrackBase::ttb(BPhysVertexTrackBase::track_type type) { + return track_type_bit[type]; + } + + uint64_t BPhysVertexTrackBase::ttallMin() { + // only bits 0 - 6 + return 127; + } + + uint64_t BPhysVertexTrackBase::ttall() { + if ( s_track_type_all_cached == 0x0 ) { + for (unsigned int i=0; i<n_track_types; ++i) { + s_track_type_all_cached |= track_type_bit[i]; + } + } + return s_track_type_all_cached; + } + + uint64_t BPhysVertexTrackBase::rttor(const std::vector<uint64_t> &vtypes) { + // or of requested track types + uint64_t ttor(0); + for (size_t i=0; i<vtypes.size(); ++i) { + ttor |= vtypes[i]; + } + return ttor; + } + + // track to string + std::string + BPhysVertexTrackBase::trackToString(const xAOD::TrackParticle* track) { + std::string str; + if (track != nullptr) { + boost::format f("p(%10.4f,%10.4f,%10.4f)\n" + "d:(%10.5f,%10.5f,%10.5f,%10.5f,%10.6f)"); + f % (track->p4()).Px() % (track->p4()).Py() % (track->p4()).Pz(); + f % track->d0() % track->z0() % track->phi0() % track->theta(); + f % track->qOverP(); + str = f.str(); + } // if track + return str; + } + + //-------------------------------------------------------------------------- + // Static utility method to prefix every line by a certain string + //-------------------------------------------------------------------------- + std::string BPhysVertexTrackBase::wrapLines(std::string lines, + std::string prefix) { + + std::string ostr; + std::istringstream stream(lines); + std::string line; + while ( std::getline(stream, line) ) { + if ( !ostr.length() == 0 ) ostr += "\n"; + ostr += prefix + line; + } + return ostr; + } + //-------------------------------------------------------------------------- + //-------------------------------------------------------------------------- + BPhysVertexTrackBase::BPhysVertexTrackBase(const std::string& t, + const std::string& n, + const IInterface* p) + : AthAlgTool(t,n,p), m_trackToVertexTool("Reco::TrackToVertex"), + m_tvaTool("CP::TrackVertexAssociationTool"), + m_tvaToolHasWpLoose(false), + m_tracks(NULL), m_tracksAux(NULL), m_nEvtsSeen(0), m_eventInfo(nullptr), + m_trackTypesUsed(0), m_runNumber(0), m_evtNumber(0), + m_debugTracksInThisEvent(false) { + + declareInterface<DerivationFramework::IAugmentationTool>(this); + + // Declare branch prefix + declareProperty("BranchPrefixes", m_branchPrefixes); + declareProperty("BranchBaseName", m_branchBaseName = "iso"); + declareProperty("BranchSuffix" , m_branchSuffix = "" ); + + // Necessary containers + declareProperty("VertexContainerNames" , m_vertexContainerNames); + declareProperty("TrackParticleContainerName", + m_trackParticleContainerName); + declareProperty("TrackToVertexTool" , m_trackToVertexTool); + declareProperty("TrackSelectionTools" , m_trackSelectionTools); + declareProperty("TVATool" , m_tvaTool); + declareProperty("PVContainerName", m_pvContainerName = "PrimaryVertices"); + declareProperty("RefPVContainerNames" , m_refPVContainerNames); + declareProperty("DoVertexType" , m_doVertexType = 8); + declareProperty("UseTrackTypes" , m_useTrackTypes = {7}); + declareProperty("IncPrecVerticesInDecay", m_incPrecVerticesInDecay = true); + declareProperty("MinNTracksInPV" , m_minNTracksInPV = 0); + declareProperty("PVTypesToConsider" , m_pvTypesToConsider = {1,3}); + declareProperty("DebugTrackTypes" , m_debugTrackTypes=0); + declareProperty("DebugTracksInEvents" , m_debugTracksInEvents = {}); + } + //-------------------------------------------------------------------------- + StatusCode BPhysVertexTrackBase::initialize() { + + ATH_MSG_DEBUG("BPhysVertexTrackBase::initialize() -- begin"); + + if ( m_vertexContainerNames.size() == 0 ) { + ATH_MSG_ERROR("No vertex container names provided!"); + } + if ( m_refPVContainerNames.size() == 0 ) { + ATH_MSG_ERROR("No refitted PV container names provided!"); + } + if ( m_trackParticleContainerName == "" ) { + ATH_MSG_ERROR("No track particle container name provided!"); + } + if ( m_pvContainerName == "" ) { + ATH_MSG_ERROR("No PV container name provided!"); + } + if ( m_vertexContainerNames.size() != m_refPVContainerNames.size() ) { + ATH_MSG_ERROR("Size mismatch of VertexContainerNames (" + << m_vertexContainerNames.size() + << ") and RefPVContainerNames (" + << m_refPVContainerNames.size() << ") lists!"); + } + + if ( m_vertexContainerNames.size() != m_branchPrefixes.size() ) { + ATH_MSG_ERROR("Size mismatch of VertexContainerNames (" + << m_vertexContainerNames.size() + << ") and BranchPrefixes (" + << m_branchPrefixes.size() << ") lists!"); + } + + // TrackToVertexTool + ATH_CHECK(m_trackToVertexTool.retrieve()); + + // TrackSelectionTools + for (auto selTool : m_trackSelectionTools ) { + ATH_CHECK(selTool.retrieve()); + } + + // TrackVertexAssociationTool + ATH_CHECK(m_tvaTool.retrieve()); + // take note of working point + // const std::string tvaWp("Loose"); + const std::string tvaWp = + dynamic_cast<CP::TrackVertexAssociationTool*>(m_tvaTool.get())->getProperty("WorkingPoint").toString(); + m_tvaToolHasWpLoose = (tvaWp == "Loose"); + + // initialize PV-to-SV association type vector + initPvAssocTypeVec(); + + // initialize track type request pattern + m_trackTypesUsed = rttor(m_useTrackTypes); + + // initialize track type counters + if ( m_debugTrackTypes > 0 ) { + m_mttc = std::make_unique<TrackTypeCounter>(*this, name()); + } + + ATH_MSG_DEBUG("BPhysVertexTrackBase::initialize() -- end"); + + return initializeHook(); + } + //-------------------------------------------------------------------------- + StatusCode BPhysVertexTrackBase::finalize() { + + ATH_MSG_DEBUG("BPhysVertexTrackBase::finalize()"); + + // dump track type counters to log + if ( m_debugTrackTypes > 0 ) { + ATH_MSG_INFO("Track type counters:\n" << m_mttc->countsToString()); + } + + // everything all right + return finalizeHook(); + } + //-------------------------------------------------------------------------- + StatusCode BPhysVertexTrackBase::addBranches() const { + + ATH_MSG_DEBUG("BPhysVertexTrackBase::addBranches() -- begin"); + + // counter + m_nEvtsSeen++; + + // run and event numbers + CHECK(evtStore()->retrieve(m_eventInfo)); + m_runNumber = m_eventInfo->runNumber(); + m_evtNumber = m_eventInfo->eventNumber(); + + // debug tracks in current event? + m_debugTracksInThisEvent = (std::find(m_debugTracksInEvents.begin(), + m_debugTracksInEvents.end(), + m_evtNumber) + != m_debugTracksInEvents.end()); + + // retrieve primary vertices container + m_pvtxContainer = NULL; + CHECK(evtStore()->retrieve(m_pvtxContainer, m_pvContainerName)); + ATH_MSG_DEBUG("Found PV collection with key " << m_pvContainerName); + + + // retrieve ID track container + m_tracks = NULL; + m_tracksAux = NULL; + CHECK(evtStore()->retrieve(m_tracks, m_trackParticleContainerName)); + if (evtStore()->contains<xAOD:: + TrackParticleAuxContainer>(m_trackParticleContainerName+"Aux.")) { + CHECK(evtStore()->retrieve(m_tracksAux, + m_trackParticleContainerName+"Aux.")); + } else { + ATH_MSG_DEBUG("No aux track collection with key " + << m_trackParticleContainerName+"Aux."); + } + ATH_MSG_DEBUG("Found track collection with key " + << m_trackParticleContainerName); + + // Loop over all vertex containers + for (size_t i=0; i<m_vertexContainerNames.size(); ++i) { + // vertex container and its auxilliary store + const xAOD::VertexContainer* svtxContainer = NULL; + const xAOD::VertexAuxContainer* svtxAuxContainer = NULL; + // refitted primary vertex container and its auxilliary store + const xAOD::VertexContainer* refPVContainer = NULL; + const xAOD::VertexAuxContainer* refPVAuxContainer = NULL; + + // retrieve from StoreGate + CHECK(evtStore()->retrieve(svtxContainer, m_vertexContainerNames[i])); + CHECK(evtStore()->retrieve(svtxAuxContainer, + m_vertexContainerNames[i]+"Aux.")); + ATH_MSG_DEBUG("Found SV collection with key " + << m_vertexContainerNames[i]); + CHECK(evtStore()->retrieve(refPVContainer , + m_refPVContainerNames[i])); + CHECK(evtStore()->retrieve(refPVAuxContainer, + m_refPVContainerNames[i]+"Aux.")); + ATH_MSG_DEBUG("Found refitted PV collection with key " + << m_refPVContainerNames[i]); + + // vertex container depending setup in derived class + CHECK(addBranchesVCSetupHook(i)); + + // loop over secondary vertices + for (xAOD::VertexContainer::const_iterator vtxItr = + svtxContainer->begin(); vtxItr!=svtxContainer->end(); + ++vtxItr) { + + CHECK(addBranchesSVLoopHook(*vtxItr)); + + } // end of loop over vertices + } // end of loop over vertex container names + + ATH_MSG_DEBUG("BPhysVertexTrackBase::addBranches() -- end"); + + // nothing to do here + return addBranchesHook(); + } + + //-------------------------------------------------------------------------- + // Hook method for initialize() -- to be overwritten by derived class + //-------------------------------------------------------------------------- + StatusCode BPhysVertexTrackBase::initializeHook() { + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Hook method for finalize() -- to be overwritten by derived class + //-------------------------------------------------------------------------- + StatusCode BPhysVertexTrackBase::finalizeHook() { + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Hook method for addBranches() -- to be overwritten by derived class + //-------------------------------------------------------------------------- + StatusCode BPhysVertexTrackBase::addBranchesHook() const { + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Hook method for addBranches() VC setup + // -- to be overwritten by derived class + //-------------------------------------------------------------------------- + StatusCode BPhysVertexTrackBase::addBranchesVCSetupHook(size_t ivc) const { + + // just to avoid a compiler warning + ATH_MSG_DEBUG("addBranchesVCSetupHook: Vertex container index " << ivc + << " for collection " << m_vertexContainerNames[ivc] + << " with prefix " << m_branchPrefixes[ivc]); + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Hook method for addBranches() SV loop -- to be overwritten by derived class + //-------------------------------------------------------------------------- + StatusCode + BPhysVertexTrackBase::addBranchesSVLoopHook(const xAOD::Vertex* vtx) const { + + // just to avoid a compiler warning + ATH_MSG_DEBUG("addBranchesSVLoopHook: Vertex " << vtx); + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Calculate values -- used by calculateValues() + // -- to be overwritten by derived class + //-------------------------------------------------------------------------- + StatusCode + BPhysVertexTrackBase::calcValuesHook(const xAOD::Vertex* vtx, + const unsigned int ipv, + const unsigned int its, + const unsigned int itt) const { + + // just to avoid a compiler warning + ATH_MSG_DEBUG("calcIsolationOpti: vtx: " << vtx << ", ipv: " << ipv + << ", its: " << its << ", itt: " << itt); + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Fill values from cache if found -- used by calculateValues() + // -- to be overwritten by derived class + //-------------------------------------------------------------------------- + bool BPhysVertexTrackBase::fastFillHook(const xAOD::Vertex* vtx, + const int ipv) const { + + // just to avoid a compiler warning + ATH_MSG_DEBUG("fastIsoFill: vtx: " << vtx << ", ipv: " << ipv); + + return false; + } + //-------------------------------------------------------------------------- + // Calculation loops -- needs to be called from inside the implementation + // of addBranchesSVLoopHook() in the derived class. + // Derived class also needs to provide override methods for + // - fastFillHook -- needs to return true if cached value is used + // - calcValuesHook -- actually calculating value(s) + //-------------------------------------------------------------------------- + StatusCode + BPhysVertexTrackBase::calculateValues(const xAOD::Vertex* vtx) const { + + ATH_MSG_DEBUG("BPhysVertexTrackBase::calculateValues -- begin"); + + unsigned int nPvAssocs = m_pvAssocTypes.size(); + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + + m_pvAssocResMap.clear(); + + const xAOD::BPhysHelper cand(vtx); + for (unsigned int ipv = 0; ipv < nPvAssocs; ++ipv) { + if ( ipv == 0 || ! fastFillHook(vtx, ipv) ) { + for (unsigned int its = 0; its < nTrackSels; ++its) { + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + ATH_MSG_DEBUG("Calling calcValuesHook with ipv: " << ipv + << ", its: " << its << ", itt: " << itt); + CHECK(calcValuesHook(vtx, ipv, its, itt)); + } // for itt + } // for its + // cache result index -- only needed once per ipv value + m_pvAssocResMap[buildPvAssocCacheName(vtx, ipv)] = ipv; + ATH_MSG_DEBUG("calculateValues: cache index: " + << buildPvAssocCacheName(vtx, ipv) + << " -- cached ipv: " << ipv); + } // if !fastFillHook() + } // for ipv + + return StatusCode::SUCCESS; + } + //------------------------------------------------------------------------- + // Build SV-to-PV association cache index string + //------------------------------------------------------------------------- + std::string + BPhysVertexTrackBase::buildPvAssocCacheName(const xAOD::Vertex* vtx, + const int ipv) const { + xAOD::BPhysHelper cand(vtx); + boost::format f("SV_%p_RPV_%p"); + f % cand.vtx() % cand.pv(m_pvAssocTypes[ipv]); + + return f.str(); + } + //-------------------------------------------------------------------------- + // getTrackCandPVLogChi2() + // Calculate the logChi2 (= log((d0/d0e)^2+(z0/z0e)^2) contribution of a + // track at the position closest to the PV associated with the SV. + //-------------------------------------------------------------------------- + double BPhysVertexTrackBase::getTrackCandPVLogChi2(const xAOD::TrackParticle* + track, + const xAOD::Vertex* vtx, + bool doDCAin3D, + int chi2DefToUse + ) const { + + return getTrackLogChi2DCA(track, vtx, doDCAin3D, chi2DefToUse)[4]; + } + //-------------------------------------------------------------------------- + // getTrackLogChi2DCA() + // Calculate logChi2 (= log((d0/d0e)^2+(z0/z0e)^2) contribution of a + // track at the position closest to a position and + // the distance of closest approach of a track w.r.t. + // a position. Either only in the transverse plane or in 3 dimensions. + // Option chi2DefToUse: + // 0 : from track perigee with uncertainties from track only + // 1 : from track perigee with uncertainties from track and vertex + // 2 : simple extrapolation from track parameters + // with uncertainties from track and vertex + // 3 : CalcLogChi2toPV method from NtupleMaker using xAOD::TrackingHelpers. + // (only track uncertainties) + // 4 : CalcLogChi2toPV method from NtupleMaker using xAOD::TrackingHelpers. + // (track and vertex uncertainties) + // 5 : use TrackVertexAssociationTool + // 6 : full 3D chi2 from track perigee with uncertainties + // from track and vertex (sum of 3x3 covariance matrices) + // 7 : full 3D chi2 from track perigee with uncertainties + // from track and vertex (sum of 2x2 covariance matrices) + // 8 : simple extrapolation from track parameters with uncertainties + // from track and vertex (sum of 3x3 covariance matrices) + // 9 simple extrapolation from track parameters with uncertainties + // from track and vertex (sum of 2x2 covariance matrices) + // Returned vector components: + // 0: d0, 1: d0Err, 2: z0, 3: z0Err, 4: logChi2, 5: dca, 6: okFlag + // 7: vtxErrPart2, 8: trkErrPart2, 9: phi0Used + //-------------------------------------------------------------------------- + std::vector<double> + BPhysVertexTrackBase::getTrackLogChi2DCA(const xAOD::TrackParticle* track, + const xAOD::Vertex* vtx, + bool doDCAin3D, + int chi2DefToUse) const { + // presets + std::vector<double> res = {-999., -99., -999., -99., -100., -100., -1., + -99., -99., -999.}; + + const Amg::Vector3D pos = vtx->position(); + const AmgSymMatrix(3) poscov = vtx->covariancePosition(); + + if ( track != NULL ) { + if ( chi2DefToUse < 2 || (chi2DefToUse > 5 && chi2DefToUse < 8) ) { + // use track perigee method + std::unique_ptr<const Trk::Perigee> + trkPerigee(m_trackToVertexTool->perigeeAtVertex(*track, pos)); + if ( trkPerigee != NULL ) { + res[0] = trkPerigee->parameters()[Trk::d0]; + res[2] = trkPerigee->parameters()[Trk::z0]; + const AmgSymMatrix(5)* locError = trkPerigee->covariance(); + if ( locError != NULL ) { + // uncertainties from track + res[1] = Amg::error(*locError, Trk::d0); + res[3] = Amg::error(*locError, Trk::z0); + if ( chi2DefToUse == 1 ) { + // add uncertainties from vertex + Amg::Vector3D perppt(trkPerigee->momentum().y()/trkPerigee->pT(), + -trkPerigee->momentum().x()/trkPerigee->pT(), + 0.); + double vtxD0Err2 = perppt.transpose()*poscov*perppt; + res[1] = sqrt( pow(res[1], 2.) + vtxD0Err2 ); + res[3] = sqrt( pow(res[3], 2.) + poscov(2,2) ); + } + if ( chi2DefToUse < 2 ) { + if ( fabs(res[1]) > 0. && fabs(res[3]) > 0. ) { + res[4] = log( pow(res[0]/res[1], 2.) + + pow(res[2]/res[3], 2.) ); + res[6] = 2.; // ok + } else { + ATH_MSG_WARNING("BPhysVertexTrackBase::getTrackLogChi2DCA():" + << " d0 = " << res[0] << ", d0Err = " + << res[1] << ", z0 = " << res[2] + << ", z0Err = " << res[3]); + } + } + // chi2DefToUse 6 or 7 + if ( chi2DefToUse > 5 && chi2DefToUse < 8 ) { + double phi0 = trkPerigee->parameters()[Trk::phi0]; + double doca = sqrt(pow(res[0],2.) + pow(res[2], 2.)); + res[9] = phi0; + if ( doca > 0. ) { + if ( chi2DefToUse == 6 ) { + AmgMatrix(5,3) dmat = AmgMatrix(5,3)::Zero(); + dmat(0,0) = -sin(phi0); + dmat(0,1) = cos(phi0); + dmat(1,2) = 1.; + dmat(2,0) = -res[0]*cos(phi0); + dmat(2,1) = -res[0]*sin(phi0); + AmgSymMatrix(3) mCovTrk3D = dmat.transpose()*(*locError)*dmat; + Amg::Vector3D dvec(-res[0]*sin(phi0), res[0]*cos(phi0), + res[2]); // (x,y,z) + Amg::Vector3D duvec = dvec.unit(); + // log(chi2) = log( docavec^T * V^-1 * docavec ) + res[4] = log( dvec.transpose() * (poscov+mCovTrk3D).inverse() + * dvec ); + res[7] = duvec.transpose()*poscov*duvec; + res[8] = duvec.transpose()*mCovTrk3D*duvec; + res[6] = 3.; // ok + } + if ( chi2DefToUse == 7 ) { + AmgMatrix(3,2) dmat = AmgMatrix(3,2)::Zero(); + dmat(0,0) = -sin(phi0); + dmat(1,0) = cos(phi0); + dmat(2,0) = 0.; + dmat(0,1) = 0.; + dmat(1,1) = 0.; + dmat(2,1) = 1.; + AmgSymMatrix(2) mCovVtx2D = dmat.transpose()*poscov*dmat; + AmgSymMatrix(2) mCovTrk2D = AmgSymMatrix(2)::Zero(); + mCovTrk2D(0,0) = (*locError)(Trk::d0,Trk::d0); + mCovTrk2D(0,1) = (*locError)(Trk::d0,Trk::z0); + mCovTrk2D(1,0) = (*locError)(Trk::d0,Trk::z0); + mCovTrk2D(1,1) = (*locError)(Trk::z0,Trk::z0); + Amg::Vector2D dvec(res[0], res[2]); // (d0, z0) + Amg::Vector2D duvec = dvec.unit(); + // log(chi2) = log( (d0, z0) * V^-1 * (d0, z0)^T ) + res[4] = log( dvec.transpose()*(mCovVtx2D+mCovTrk2D).inverse() + * dvec ); + res[7] = duvec.transpose()*mCovVtx2D*duvec; + res[8] = duvec.transpose()*mCovTrk2D*duvec; + res[6] = 4.; // ok + } + } else { + ATH_MSG_WARNING("BPhysVertexTrackBase::getTrackLogChi2DCA():" + << " doca == 0 !"); + } + } // if chi2DefToUse > 5 && chi2DefToUse < 8 + res[5] = doDCAin3D ? + sqrt( pow(res[0], 2.) + pow(res[2], 2.) ) : res[0]; + res[6] += 1.; // ok + } else { + ATH_MSG_WARNING("BPhysVertexTrackBase::getTrackLogChi2DCA():" + " locError pointer is NULL!"); + } + } else { + ATH_MSG_WARNING("BPhysVertexTrackBase::getTrackLogChi2DCA():" + " trkPerigee pointer is NULL!"); + } // if trkPerigee + + } else if ( chi2DefToUse == 2 + || (chi2DefToUse > 7 && chi2DefToUse < 10 )) { + // simple extrapolation method + // (directly taken from NtupleMaker for comparisons) + + // SV position and covariance matrix + TVector3 SV_def(vtx->x(), vtx->y(), vtx->z()); + const AmgSymMatrix(3)& SV_cov = poscov; + + // chi2 track to SV + double px = ( track->p4() ).Px(); + double py = ( track->p4() ).Py(); + double pt = ( track->p4() ).Pt(); + double d0 = track->d0(); + double d0Err2 = track->definingParametersCovMatrixVec()[0]; + double z0 = track->z0(); + double z0Err2 = track->definingParametersCovMatrixVec()[2]; + double theta = track->theta(); + double d0z0Cov = track->definingParametersCovMatrixVec()[1]; + double phi = track->phi(); + + TVector3 trk_origin( track->vx(), track->vy(), track->vz() ); + TVector3 SV = SV_def - trk_origin; + + // calc. error in direction perpendicular to pT (still x-y plane) + double upx = py/pt; + double upy = -px/pt; + double d0toSV = d0 + (SV[0]*upx + SV[1]*upy); + double d0toSVErr2 = upx*SV_cov(0, 0)*upx + 2*upx*SV_cov(1, 0)*upy + + upy*SV_cov(1, 1)*upy + d0Err2; + + upx = px/pt; + upy = py/pt; + double cot_theta = cos(theta)/sin(theta); + double z0corr = (SV[0]*upx + SV[1]*upy)*cot_theta; + double z0toSV = z0 + z0corr - SV[2]; + double z0toSVErr2 = SV_cov(2, 2) + z0Err2; + + double docaSV = sqrt( pow(d0toSV, 2) + pow(z0toSV, 2) ); + + double chi2testSV(999.); + if ( chi2DefToUse == 2 ) { + if (d0toSVErr2 !=0 && z0toSVErr2 != 0) + chi2testSV = log(pow( d0toSV, 2)/d0toSVErr2 + + pow( z0toSV, 2)/z0toSVErr2); + // set results + res = {d0toSV, sqrt(d0toSVErr2), z0toSV, sqrt(z0toSVErr2), + chi2testSV, (doDCAin3D ? docaSV : d0toSV), 4, + -99., -99., -999.}; + } + if ( chi2DefToUse > 7 && chi2DefToUse < 10 ) { + if ( docaSV > 0. ) { + if ( chi2DefToUse == 8 ) { + AmgMatrix(5,3) dmat = AmgMatrix(5,3)::Zero(); + dmat(0,0) = -sin(phi); + dmat(0,1) = cos(phi); + dmat(1,2) = 1.; + dmat(2,0) = -d0toSV*cos(phi); + dmat(2,1) = -d0toSV*sin(phi); + const AmgSymMatrix(5) mCovTrk5D = + track->definingParametersCovMatrix(); + AmgSymMatrix(3) mCovTrk3D = dmat.transpose()*mCovTrk5D*dmat; + Amg::Vector3D dvec(-d0toSV*sin(phi), d0toSV*cos(phi), + z0toSV); // (x,y,z) + Amg::Vector3D duvec = dvec.unit(); + // log(chi2) = log( docavec^T * V^-1 * docavec ) + double chi2testSV = log( dvec.transpose() + * (poscov+mCovTrk3D).inverse() + * dvec ); + double vtx3DErr2 = duvec.transpose()*poscov*duvec; + double trk3DErr2 = duvec.transpose()*mCovTrk3D*duvec; + // set results + res = {d0toSV, sqrt(d0Err2), z0toSV, sqrt(z0Err2), + chi2testSV, (doDCAin3D ? docaSV : d0toSV), 5, + vtx3DErr2, trk3DErr2, phi}; + } + if ( chi2DefToUse == 9 ) { + AmgMatrix(3,2) dmat = AmgMatrix(3,2)::Zero(); + dmat(0,0) = -sin(phi); + dmat(1,0) = cos(phi); + dmat(2,0) = 0.; + dmat(0,1) = 0.; + dmat(1,1) = 0.; + dmat(2,1) = 1.; + AmgSymMatrix(2) mCovVtx2D = dmat.transpose()*SV_cov*dmat; + AmgSymMatrix(2) mCovTrk2D = AmgSymMatrix(2)::Zero(); + mCovTrk2D(0,0) = d0Err2; + mCovTrk2D(0,1) = d0z0Cov; + mCovTrk2D(1,0) = d0z0Cov; + mCovTrk2D(1,1) = z0Err2; + Amg::Vector2D dvec(d0toSV, z0toSV); + Amg::Vector2D duvec = dvec.unit(); + // log(chi2) = log( (d0, z0) * V^-1 * (d0, z0)^T ) + chi2testSV = log( dvec.transpose()*(mCovVtx2D+mCovTrk2D).inverse() + * dvec ); + double vtx2DErr2 = duvec.transpose()*mCovVtx2D*duvec; + double trk2DErr2 = duvec.transpose()*mCovTrk2D*duvec; + + if ( vtx2DErr2 < 0. || trk2DErr2 < 0. ) { + ATH_MSG_WARNING("BPhysVertexTrackBase::" + "getTrackLogChi2DCA(): " + << "vtx2DErr2 = " << vtx2DErr2 + << " trk2DErr2 = " << trk2DErr2 + << " chi2testSV = " << chi2testSV); + ATH_MSG_WARNING("dvec = " << dvec); + ATH_MSG_WARNING("mCovVtx2D = " << mCovVtx2D); + ATH_MSG_WARNING("mCovTrk2D = " << mCovTrk2D); + ATH_MSG_WARNING("dmat = " << dmat); + ATH_MSG_WARNING("SV_cov = " << SV_cov); + ATH_MSG_WARNING("det(mCovVtx2D) = " << mCovVtx2D.determinant()); + ATH_MSG_WARNING("det(mCovTrk2D) = " << mCovTrk2D.determinant()); + ATH_MSG_WARNING("det(SV_cov) = " << SV_cov.determinant()); + ATH_MSG_WARNING("d0toSV = " << d0toSV + << " z0toSV = " << z0toSV + << " phi = " << phi + << " docaSV = " << docaSV); + } + + // set results + res = {d0toSV, sqrt(d0Err2), z0toSV, sqrt(z0Err2), + chi2testSV, (doDCAin3D ? docaSV : d0toSV), 6, + vtx2DErr2, trk2DErr2, phi}; + } + } else { + ATH_MSG_WARNING("BPhysVertexTrackBase::getTrackLogChi2DCA():" + << " docaSV == 0 !"); + } + } // if chi2DefToUse > 7 && chi2DefToUse < 10 + + } else if ( chi2DefToUse > 2 && chi2DefToUse < 5 ) { + // CalcLogChi2toPV method using xAOD::TrackingHelpers + // (simply taken from NtupleMaker for comparisons) + // N.B. z0significance method of the helper doesn't include pv_z0 + // uncertainty + double d0sign(0.); + if (chi2DefToUse == 4) { + d0sign = + xAOD::TrackingHelpers::d0significance(track, + m_eventInfo->beamPosSigmaX(), + m_eventInfo->beamPosSigmaY(), + m_eventInfo->beamPosSigmaXY() + ); + } else { + d0sign = xAOD::TrackingHelpers::d0significance( track ); + } + // trk z0 is expressed relative to the beamspot position along z-axis + // (trk->vz()) + // DCA always in 3D + double z0toPV = track->z0() + track->vz() - vtx->z(); + double z0Err2 = track->definingParametersCovMatrixVec()[2]; + if (chi2DefToUse == 4) z0Err2+= vtx->covariancePosition()(2,2); + double z0sign = z0toPV / sqrt( z0Err2 ); + double chi2 = log( pow(d0sign, 2.) + pow(z0sign, 2.) ); + // set results + res = {-999., -99., z0toPV, sqrt(z0Err2), chi2, -100., 4, -99., -99., + -999.}; + + } // if chi2DefToUse + } else { + ATH_MSG_WARNING("BPhysVertexTrackBase::getTrackLogChi2DCA():" + " track pointer is NULL!"); + res[6] = -2.; + } // if track != NULL + return res; + } + //-------------------------------------------------------------------------- + // detTrackTypes(): returns a bit pattern of the applicable + // track types from {ASSOCPV, PVTYPE0, PVTYPE1, PVTYPE2, PVTYPE3, NONE, + // NULLVP, CAPVXXXXXXX, ...} (or'd). + //-------------------------------------------------------------------------- + uint64_t BPhysVertexTrackBase::detTrackTypes(const xAOD::TrackParticle* track, + const xAOD::Vertex* candPV, + const xAOD::Vertex* candRefPV) const { + int bits = 0x0; + + // PVTYPE0 - PVTYPE3, NONE + ATH_MSG_ERROR("BPhysVertexTrackBase::detTrackTypes must be adjusted due to changes in TrackParticle"); + + // ASOCPV + if ( candPV != NULL ) { + bool found(false); + for (size_t i=0; i<candPV->nTrackParticles(); ++i) { + if ( track == candPV->trackParticle(i) ) { + found = true; + break; + } + } + if ( found ) bits |= track_type_bit[ASSOCPV]; + // + // CLOSEAPV + for (unsigned int i=7; i<n_track_types; ++i) { + if ( (track_type_bit[i] & m_trackTypesUsed) > 0x0 ) { + bool useRefittedPvs = ( i%2 == 1 ); + bool doDCAin3D = ( (i-7)%4 > 1 ); + int chi2DefToUse = (i-7)/4; + // adjustment above bit 20 + if ( i > 20 ) { + doDCAin3D = true; + chi2DefToUse = (i-13)/2; + } + const xAOD::Vertex* minChi2PV(nullptr); + if ( chi2DefToUse == 5 ) { + minChi2PV = + findAssocPV(track, candPV, candRefPV, m_pvTypesToConsider, + m_minNTracksInPV, useRefittedPvs); + } else { + minChi2PV = + findMinChi2PV(track, candPV, candRefPV, m_pvTypesToConsider, + m_minNTracksInPV, useRefittedPvs, + doDCAin3D, chi2DefToUse).first; + } // if chi2DefToUse + if ( candPV == minChi2PV + || (candRefPV != nullptr && candRefPV == minChi2PV) ) { + bits |= track_type_bit[i]; + } + } // if m_trackTypesUsed + } // for i + + } // if candPV != NULL + + return bits; + } + //-------------------------------------------------------------------------- + // findAllTracksInDecay: returns a vector of xAOD::TrackParticle objects + // found in this vertex and subsequent decay vertices (if chosen). + //-------------------------------------------------------------------------- + TrackBag BPhysVertexTrackBase::findAllTracksInDecay(xAOD::BPhysHelper& vtx) + const { + + TrackBag tracks; + findAllTracksInDecay(vtx, tracks); + + return tracks; + } + //-------------------------------------------------------------------------- + // findAllTracksInDecay: fills a vector of xAOD::TrackParticle objects + // found in this vertex and subsequent decay vertices (if chosen). + // Method avoids duplicate entries in vector. + // Recursively calls itself if necessary. + //-------------------------------------------------------------------------- + void BPhysVertexTrackBase::findAllTracksInDecay(xAOD::BPhysHelper& vtx, + TrackBag& tracks) + const { + + for (unsigned int i=0; i < vtx.vtx()->nTrackParticles(); ++i) { + const xAOD::TrackParticle* track = vtx.vtx()->trackParticle(i); + if ( std::find(tracks.begin(),tracks.end(),track) == tracks.end() ) { + tracks.push_back(track); + } // if + } // for + // loop over preceeding vertices + if ( m_incPrecVerticesInDecay ) { + for (int ivtx = 0; ivtx < vtx.nPrecedingVertices(); ++ivtx) { + xAOD::BPhysHelper precVtx(vtx.precedingVertex(ivtx)); + findAllTracksInDecay(precVtx, tracks); + } // if + } // for + } + //-------------------------------------------------------------------------- + // findAllMuonsInDecay: returns a vector of xAOD::Muon objects + // found in this vertex and subsequent decay vertices (if chosen). + //-------------------------------------------------------------------------- + MuonBag BPhysVertexTrackBase::findAllMuonsInDecay(xAOD::BPhysHelper& vtx) + const { + + MuonBag muons; + findAllMuonsInDecay(vtx, muons); + + return muons; + } + //-------------------------------------------------------------------------- + // findAllMuonsInDecay: fills vector of xAOD::Muon objects + // found in this vertex and subsequent decay vertices (if chosen). + // Method avoids duplicate entries in vector. + // Recursively calls itself if necessary. + //-------------------------------------------------------------------------- + void BPhysVertexTrackBase::findAllMuonsInDecay(xAOD::BPhysHelper& vtx, + MuonBag& muons) + const { + + for (int i=0; i < vtx.nMuons(); ++i) { + if ( std::find(muons.begin(),muons.end(),vtx.muon(i)) == muons.end() ) { + muons.push_back(vtx.muon(i)); + } // if + } // for + // loop over preceeding vertices + if ( m_incPrecVerticesInDecay ) { + for (int ivtx = 0; ivtx < vtx.nPrecedingVertices(); ++ivtx) { + xAOD::BPhysHelper precVtx(vtx.precedingVertex(ivtx)); + findAllMuonsInDecay(precVtx, muons); + } // for + } // if + } + //-------------------------------------------------------------------------- + // findAllMuonsIdTracksInDecay: returns a vector of xAOD::TrackParticle + // objects found in this vertex and subsequent decay vertices. + // Returns the tracks. + // The vector of track pointers reeturned may contain NULL elements. + //-------------------------------------------------------------------------- + TrackBag + BPhysVertexTrackBase::findAllMuonIdTracksInDecay(xAOD::BPhysHelper& vtx, + MuonBag& muons) const { + + TrackBag tracks; + muons = findAllMuonsInDecay(vtx); + + for (MuonBag::const_iterator muItr = muons.begin(); muItr != muons.end(); + ++muItr) { + const xAOD::TrackParticle* track = + (*muItr)->trackParticle(xAOD::Muon::InnerDetectorTrackParticle); + tracks.push_back(track); + } // for + + return tracks; + } + //-------------------------------------------------------------------------- + // findMuonRefTrackMomenta: returns a vector<TVector3> containing the + // three momenta of refitted tracks identified as muons. + // The vector may contain (0,0,0) elements indicating an error. + //-------------------------------------------------------------------------- + std::vector<TVector3> + BPhysVertexTrackBase::findMuonRefTrackMomenta(xAOD::BPhysHelper& vtx, + MuonBag& muons) const { + + std::vector<TVector3> refMuTracks; + + // quick solution if nRefTrks == nMuons: + if ( vtx.nRefTrks() == vtx.nMuons() && !m_incPrecVerticesInDecay ) { + muons = vtx.muons(); + for ( auto refMuTrack : vtx.refTrks() ) { + refMuTracks.push_back(refMuTrack); + } + } else { + TrackBag muonIdTracks = findAllMuonIdTracksInDecay(vtx, muons); + if ( vtx.nRefTrks() == (int)vtx.vtx()->nTrackParticles() ) { + for (int i=0; i<vtx.nRefTrks(); ++i) { + const xAOD::TrackParticle* otp = + (const xAOD::TrackParticle*)vtx.refTrkOrigin(i); + if ( otp != NULL ) { + if ( std::find(muonIdTracks.begin(), muonIdTracks.end(), otp) + != muonIdTracks.end() ) { + refMuTracks.push_back(vtx.refTrk(i)); + } + } else { + ATH_MSG_WARNING("BPhysVertexTrackBase::findMuonRefTrackMomenta():" + " refTrkOrigin == NULL for refTrk # " + << i << " !"); + } + } // for + } else { + ATH_MSG_WARNING("BPhysVertexTrackBase::findMuonRefTrackMomenta():" + " size mismatch #refTrks = " << vtx.nRefTrks() + << "#trackParticles = " << vtx.vtx()->nTrackParticles() + << " !"); + } // if nRefTracks == nTrackParticles + // loop over preceeding vertices -- only if not all refMuTrks found yet + if ( m_incPrecVerticesInDecay && muons.size() > refMuTracks.size() ) { + for (int ivtx = 0; ivtx < vtx.nPrecedingVertices(); ++ivtx) { + xAOD::BPhysHelper precVtx(vtx.precedingVertex(ivtx)); + std::vector<TVector3> precRefMuTracks = + findMuonRefTrackMomenta(precVtx, muons); + // append only if not yet contained in + for ( auto precRefMuTrack : precRefMuTracks ) { + if ( std::find(refMuTracks.begin(), refMuTracks.end(), + precRefMuTrack) == refMuTracks.end() ) { + refMuTracks.push_back(precRefMuTrack); + } // if + } // for + } // for ivtx + } // if + } // if (shortcut) + + // debug output + if ( msgLvl( MSG::DEBUG ) ) { + ATH_MSG_DEBUG("BPhysVertexTrackBase::findMuonRefTrackMomenta():" + << " #muons: " << muons.size() + << " #refMuTrks: " << refMuTracks.size()); + TString str = Form(">> refMuTracks(%d):\n", (int)refMuTracks.size()); + for (unsigned int i=0; i < refMuTracks.size(); ++i) { + str += Form("(%10.4f,%10.4f,%10.4f) ", + refMuTracks[i].x(), refMuTracks[i].y(), + refMuTracks[i].z()); + } + ATH_MSG_DEBUG(str.Data()); + } + + return refMuTracks; + } + + //-------------------------------------------------------------------------- + // selectTracks: returns a vector of xAOD::TrackParticle objects + // seleted from the input track collection according to the selection + // criteria and with respect to the B candidate vertex. + //-------------------------------------------------------------------------- + TrackBag BPhysVertexTrackBase::selectTracks(const + xAOD::TrackParticleContainer* + inpTracks, + xAOD::BPhysHelper& cand, + const unsigned int ipv, + const unsigned int its, + const unsigned int itt) const { + + return selectTracks(inpTracks, findAllTracksInDecay(cand), cand, + ipv, its, itt); + } + //-------------------------------------------------------------------------- + // selectTracks: returns a vector of xAOD::TrackParticle objects + // seleted from the input track collection according to the selection + // criteria and with respect to the B candidate vertex. + //-------------------------------------------------------------------------- + TrackBag BPhysVertexTrackBase::selectTracks(const + xAOD::TrackParticleContainer* + inpTracks, + const TrackBag& exclTracks, + xAOD::BPhysHelper& cand, + const unsigned int ipv, + const unsigned int its, + const unsigned int itt) const { + + const xAOD::Vertex* candRefPV = cand.pv(m_pvAssocTypes[ipv]); + const xAOD::Vertex* candPV = cand.origPv(m_pvAssocTypes[ipv]); + + ATH_MSG_DEBUG("selectTracks: Found " << exclTracks.size() + << " " << exclTracks + << " for decay candidate " << cand.vtx() + << "; candPV: " << candPV << " candRefPV: " << candRefPV); + + std::string bname(buildBranchBaseName(its, ipv, itt)); + + // tracks to be considered + TrackBag tracks; + for (xAOD::TrackParticleContainer::const_iterator trkItr = + inpTracks->begin(); trkItr != inpTracks->end(); ++trkItr) { + const xAOD::TrackParticle* track = *trkItr; + uint64_t trackTypesForTrack(0x0); + // debug track types (before any cuts) + if ( m_debugTrackTypes > 0 ) { + trackTypesForTrack = detTrackTypes(track, candPV, candRefPV); + m_mttc->addToCounter(trackTypesForTrack, itt, bname, "all"); + } + // track selection check + if ( ! m_trackSelectionTools[its]->accept(*track, candRefPV) ) continue; + // debug track types (after track selection cuts) + if ( m_debugTrackTypes > 0 ) { + m_mttc->addToCounter(trackTypesForTrack, itt, bname, "ats"); + } + + // calcluation of track type bits not necessary if all bits requested + if ( ! ((unsigned int)m_useTrackTypes[itt] == ttall() || + (unsigned int)m_useTrackTypes[itt] == ttallMin()) ) { + // track type check -- determination if not in debugging mode + // delayed for execution speed reasons + if ( trackTypesForTrack == 0x0 ) { + trackTypesForTrack = detTrackTypes(track, candPV, candRefPV); + } + if ( (trackTypesForTrack & m_useTrackTypes[itt]) == 0x0 ) { + continue; + } + } + // debug track types (after track type cuts) + if ( m_debugTrackTypes > 0 ) { + m_mttc->addToCounter(trackTypesForTrack, itt, bname, "att"); + } + // track not in list of tracks to exclude + if ( std::find(exclTracks.begin(), exclTracks.end(), track) + != exclTracks.end() ) continue; + // debug track types (after all cuts) + if ( m_debugTrackTypes > 0 ) { + m_mttc->addToCounter(trackTypesForTrack, itt, bname, "fin"); + } + // tracks that survived so far + tracks.push_back(track); + } // for + + return tracks; + } + //-------------------------------------------------------------------------- + // buildBranchBaseName: build branch name from track selection, primary + // vertex association and track type qualifiers. + //-------------------------------------------------------------------------- + std::string BPhysVertexTrackBase::buildBranchBaseName(unsigned int its, + unsigned int ipv, + unsigned int itt, + std::string preSuffix) + const { + + ATH_MSG_DEBUG("BPhysVertexTrackBase::buildBranchBaseName -- begin"); + + std::string tsName = m_trackSelectionTools[its].name(); + std::string pvAssoc = xAOD::BPhysHelper::pv_type_str[m_pvAssocTypes[ipv]]; + + // need to get part of tsname after last underscore + std::size_t ipos = tsName.find_last_of("_"); + if ( ipos != std::string::npos ) tsName = tsName.substr(ipos+1); + + // format it nicely + boost::format f("T%010d_%s_%s%s%s"); + f % m_useTrackTypes[itt] % tsName % pvAssoc; + f % (preSuffix.length() > 0 ? "_"+preSuffix : ""); + f % (m_branchSuffix.length() > 0 ? "_"+m_branchSuffix : ""); + + ATH_MSG_DEBUG("BPhysVertexBaseTrackBase::buildBranchBaseName: " << f.str()); + + return f.str(); + } + //-------------------------------------------------------------------------- + // + // Initialize PV-to-SV association type vector + // + //-------------------------------------------------------------------------- + void BPhysVertexTrackBase::initPvAssocTypeVec() { + + m_pvAssocTypes.clear(); + for (unsigned int i=0; i<xAOD::BPhysHelper::n_pv_types; ++i) { + if ( (m_doVertexType & (1 << i)) > 0 ) + m_pvAssocTypes.push_back((xAOD::BPhysHelper::pv_type)i); + } + } + //-------------------------------------------------------------------------- + // + // Find primary vertex to which a track is closest to in terms of minimum + // chi2 to any primary vertex. Replace primary vertex by refitted primary + // vertex (for B candidate associated primary vertices) + // if appropriate (and available). + // Only consider primary vertices of specified primary vertex types and + // with a minimum number of tracks. + // + //-------------------------------------------------------------------------- + std::pair<const xAOD::Vertex*, double> + BPhysVertexTrackBase::findMinChi2PV(const xAOD::TrackParticle* track, + const xAOD::Vertex* candPV, + const xAOD::Vertex* candRefPV, + const std::vector<uint64_t>& pvtypes, + const int minNTracksInPV, + const bool useRefittedPvs, + const bool doDCAin3D, + const int chi2DefToUse) const { + + double minChi2 = std::numeric_limits<double>::max(); + const xAOD::Vertex* minChi2PV(nullptr); + + for (auto pvtx: *m_pvtxContainer) { + if ( pvtx != nullptr ) { + if ( std::find(pvtypes.begin(),pvtypes.end(),pvtx->vertexType()) + != pvtypes.end() ) { + const xAOD::Vertex* cvtx = pvtx; + // replace by refitted PV if associated PV matches orignal PV + if ( useRefittedPvs && pvtx == candPV ) { + if ( candRefPV != nullptr ) { + cvtx = candRefPV; + } else { + ATH_MSG_WARNING(" BPhysVertexTrackBase::findMinChi2PV:" + << " candRefPV == NULL!"); + continue; + } + } // if pvtx == candPV + if ( (int)cvtx->nTrackParticles() >= minNTracksInPV ) { + double chi2 = getTrackLogChi2DCA(track, cvtx, doDCAin3D, + chi2DefToUse)[4]; + if ( chi2 < minChi2 ) { + minChi2 = chi2; + minChi2PV = cvtx; + } // if chi2 < minChi2 + } // if minNTracksInPV + } // if pvTypes in pvtypes vector + } // if pvtx != nullptr + } // for pvtx + + return std::make_pair(minChi2PV, minChi2); + } + //-------------------------------------------------------------------------- + // + // Find primary vertex to which a track is closest using the + // TrackVertexAssociationTool. Replace primary vertex by refitted primary + // vertex (for B candidate associated primary vertices) + // if appropriate (and available). + // Only consider primary vertices of specified primary vertex types and + // with a minimum number of tracks. + // + //-------------------------------------------------------------------------- + const xAOD::Vertex* + BPhysVertexTrackBase::findAssocPV(const xAOD::TrackParticle* track, + const xAOD::Vertex* candPV, + const xAOD::Vertex* candRefPV, + const std::vector<uint64_t>& pvtypes, + const int minNTracksInPV, + const bool useRefittedPvs) const { + + // select PVs to be considered/replace candPV by candRefPV if requested + std::vector<const xAOD::Vertex*> vpvtx; + for (auto pvtx: *m_pvtxContainer) { + if ( pvtx != nullptr ) { + if ( std::find(pvtypes.begin(),pvtypes.end(),pvtx->vertexType()) + != pvtypes.end() ) { + const xAOD::Vertex* cvtx = pvtx; + // replace by refitted PV if associated PV matches orignal PV + if ( useRefittedPvs && pvtx == candPV ) { + if ( candRefPV != nullptr ) { + cvtx = candRefPV; + } else { + ATH_MSG_WARNING("BPhysVertexTrackBase::findAssocPV:" + << " candRefPV == NULL!"); + continue; + } + } // if pvtx == candPV + if ( (int)cvtx->nTrackParticles() >= minNTracksInPV ) { + vpvtx.push_back(cvtx); + } // if minNTracksInPV + } // if pvTypes in pvtypes vector + } // if pvtx != nullptr + } // for pvtx + + const xAOD::Vertex* assocPV(NULL); + if ( useRefittedPvs && m_tvaToolHasWpLoose ) { + // check whether track is in refitted PV - if so accept + // Need to do this here as the TrackVertexAssociationTool + // with WP 'Loose' only checks the track->vertex() pointer + // which always points to the original PV. + for (const auto &tp : candRefPV->trackParticleLinks()) { + if ( *tp == track ) { + // track is part of refitted PV -- accept it + assocPV = candRefPV; + break; + } + } // for tp + // if not matching use the TrackVertexAssociationTool (other PVs etc) + if ( assocPV == nullptr ) { + assocPV = m_tvaTool->getUniqueMatchVertex(*track, vpvtx); + } + } else { + assocPV = m_tvaTool->getUniqueMatchVertex(*track, vpvtx); + } // if useRefittedPvs && m_tvaToolHasWpLoose + if ( assocPV == nullptr ) { + ATH_MSG_WARNING("BPhysVertexTrackBase::findAssocPV:" + << " assocPV == NULL for track!" + << " len(vpvtx) = " << vpvtx.size() + << " useRefittedPvs = " << useRefittedPvs + << " minNTracksInPV = " << minNTracksInPV); + } + + return assocPV; + } + //-------------------------------------------------------------------------- +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BTrackVertexMapLogger.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BTrackVertexMapLogger.cxx new file mode 100644 index 0000000000000000000000000000000000000000..aa3a40ab2a591b1ae9ebf340948ce0750ee5575c --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BTrackVertexMapLogger.cxx @@ -0,0 +1,104 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// BTrackVertexMapLogger.cxx +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// - w.w., 2017-01-22: Added use of BPhysMetaDataTool. +// +// Store JO metadata in the output file. +// +// It uses the BPhysMetaDataTool (default) or the IOVDbMetaDataTool to +// store job option information as metadata in a specific branch whose +// name needs to prefixed by the deriviation format name. +// Note: Metadata stored by the IOVDbMetaDataTool is not readable on +// 'RootCore' level. +// +// This is a base class. Inherit from it to add the job options you want +// to store. For a usage example, see +// Bmumu_metadata.h / Bmumu_metadata.cxx +// and +// BPHY8.py . +// +// Job options provided by the base class: +// - DerivationName -- assign the name of the derivation format +// - MetadataFolderName -- assign the name of the metadata folder, +// should start with the derivation format name, +// defaults to DerivationName if not set. +// - UseIOVDbMetaDataTool -- use the IOVDbMetaDataTool to store +// the additional metadata +// - UseBPhysMetaDataTool -- use the BPhysMetaDataTool to store +// the additional metadata +// +//============================================================================ +// + +#include "DerivationFrameworkBPhys/BTrackVertexMapLogger.h" +#include "AthenaPoolUtilities/CondAttrListCollection.h" + +namespace DerivationFramework { + + //-------------------------------------------------------------------------- + BTrackVertexMapLogger::BTrackVertexMapLogger(const std::string& t, + const std::string& n, + const IInterface* p) + : AthAlgTool(t,n,p) { + + declareInterface<DerivationFramework::IAugmentationTool>(this); + + // Declare BPhysTrackVertexMapTool handles + declareProperty("TrackVertexMapTools", m_ttvmTools); + + // Enable log output? + declareProperty("Enable", m_enable = true); + } + //-------------------------------------------------------------------------- + StatusCode BTrackVertexMapLogger::initialize() { + + ATH_MSG_DEBUG("BTrackVertexMapLogger::initialize() -- begin"); + + // get the BPhysTrackVertexMapTools + if ( m_enable ) { + for (auto ttvmTool : m_ttvmTools) { + ATH_CHECK( ttvmTool.retrieve() ); + ATH_MSG_INFO("initialize: Successfully retrieved " + << ttvmTool.name() << " ...."); + } + } // if m_enable + + ATH_MSG_DEBUG("BTrackVertexMapLogger::initialize() -- end"); + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode BTrackVertexMapLogger::finalize() { + + ATH_MSG_DEBUG("BTrackVertexMapLogger::finalize()"); + + // everything all right + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode BTrackVertexMapLogger::addBranches() const { + + ATH_MSG_DEBUG("BTrackVertexMapLogger::addBranches()"); + + // call the BPhysTrackVertexMapTools + if ( m_enable ) { + for (auto ttvmTool : m_ttvmTools) { + if ( ttvmTool->doLog() ) { + ATH_MSG_INFO("addBranches: dump for " << ttvmTool.name() << ":"); + ATH_CHECK( ttvmTool->logEvent() ); + } // if doLog() + } // for + } // if m_enable + + // still everything is ok + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BVertexClosestTrackTool.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BVertexClosestTrackTool.cxx new file mode 100644 index 0000000000000000000000000000000000000000..e6771bb459f9df167f6f11c511a1ef12c9b764e8 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BVertexClosestTrackTool.cxx @@ -0,0 +1,672 @@ +/* + Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// BVertexClosestTrackTool.cxx +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// +// Add B vertex closest track information for different configurations, +// different track selections and different PV-to-SV association methods. +// +// For an usage example see BPHY8.py . +// +// Job options provided by this class: +// - CloseTrackChi2SetName -- list with labels for the following +// four settings (all five lists must +// be of exactly same length) +// - CloseTrackCorrChi2 -- list with options for using the +// SV uncertainties in the chi2 calculation +// in addition to track uncertainties +// 0 : use old method +// (only track uncertainties) +// 1 : from track perigee with +// uncertainties from track and vertex +// 2 : simple extrapolation from track +// parameters with uncertainties from +// track and vertex (extrapolation +// used for track swimming) +// - CloseTrackMinDCAin3D -- use 3-dimensional information in +// minimization (list) +// - CloseTrackMaxLogChi2 -- maximum chi2 distance of closest track +// to B vertex (list) +// - NCloseTrackMaxLogChi2 -- maximum chi2 distance of track +// to B vertex for track counting (list) +// +//============================================================================ +// +#include "DerivationFrameworkBPhys/BVertexClosestTrackTool.h" +#include "xAODMuon/MuonContainer.h" +#include "xAODEventInfo/EventInfo.h" +#include "xAODBPhys/BPhysHelper.h" +#include "InDetTrackSelectionTool/IInDetTrackSelectionTool.h" +#include "EventPrimitives/EventPrimitivesHelpers.h" +#include "AthLinks/ElementLink.h" + +#include "boost/format.hpp" +#include "TVector3.h" +#include <algorithm> +#include <sstream> + +namespace DerivationFramework { + + //------------------------------------------------------------------------- + // + // helper class + BVertexClosestTrackTool::CtItem::CtItem(std::string Name, std::string Prefix, + std::string Bname, + double Dca, double DcaErr, + double Zca, double ZcaErr, + double VtxNDErr2, double TrkNDErr2, double Phi0Used, + int NTrksChi2, + xAOD::TrackParticle* CloseTrack, + TrackBag Tracks, + std::vector<std::vector<double> > Vtap, + std::vector<unsigned short> Selpat) + : BaseItem(Name, Bname, Prefix), dca(Dca), dcaErr(DcaErr), + zca(Zca), zcaErr(ZcaErr), vtxNDErr2(VtxNDErr2), trkNDErr2(TrkNDErr2), + phi0Used(Phi0Used), + nTrksChi2(NTrksChi2), closeTrack(CloseTrack), + tracks(Tracks), vtap(Vtap), selpat(Selpat) { + } + + BVertexClosestTrackTool::CtItem::~CtItem() { + } + + void BVertexClosestTrackTool::CtItem::setup(std::string Name, + std::string Bname, + std::string Prefix) { + BaseItem::setup(Name, Bname, Prefix); + dca = -999.; + dcaErr = -99.; + zca = -999.; + zcaErr = -99.; + vtxNDErr2 = -99.; + trkNDErr2 = -99.; + phi0Used = -999.; + nTrksChi2 = 0; + closeTrack = NULL; + tracks.clear(); + vtap.clear(); + selpat.clear(); + } + + void BVertexClosestTrackTool::CtItem::setup(std::string Name, + std::string Bname, + std::string Prefix, + double Dca, double DcaErr, + double Zca, double ZcaErr, + double VtxNDErr2, double TrkNDErr2, double Phi0Used, + int NTrksChi2, + xAOD::TrackParticle* + CloseTrack, + TrackBag Tracks, + std::vector<std::vector<double> > Vtap, + std::vector<unsigned short> Selpat) { + BaseItem::setup(Name, Bname, Prefix); + dca = Dca; + dcaErr = DcaErr; + zca = Zca; + zcaErr = ZcaErr; + vtxNDErr2 = VtxNDErr2; + trkNDErr2 = TrkNDErr2; + phi0Used = Phi0Used; + nTrksChi2 = NTrksChi2; + closeTrack = CloseTrack; + tracks = Tracks; + vtap = Vtap; + selpat = Selpat; + } + + void BVertexClosestTrackTool::CtItem::resetVals() { + dca = -999.; + dcaErr = -99.; + zca = -999.; + zcaErr = -99.; + vtxNDErr2 = -99.; + trkNDErr2 = -99.; + phi0Used = -999.; + nTrksChi2 = 0; + closeTrack = NULL; + tracks.clear(); + vtap.clear(); + selpat.clear(); + } + + void BVertexClosestTrackTool::CtItem::copyVals(const BaseItem& item) { + copyVals((const CtItem&)item); + } + + void BVertexClosestTrackTool::CtItem::copyVals(const CtItem& item) { + dca = item.dca; + dcaErr = item.dcaErr; + zca = item.zca; + zcaErr = item.zcaErr; + vtxNDErr2 = item.vtxNDErr2; + trkNDErr2 = item.trkNDErr2; + phi0Used = item.phi0Used; + nTrksChi2 = item.nTrksChi2; + closeTrack = item.closeTrack; + tracks = item.tracks; + vtap = item.vtap; + selpat = item.selpat; + } + + std::string BVertexClosestTrackTool::CtItem::dcaName() { + return buildName("DCA"); + } + + std::string BVertexClosestTrackTool::CtItem::dcaErrName() { + return buildName("DCAError"); + } + + std::string BVertexClosestTrackTool::CtItem::zcaName() { + return buildName("ZCA"); + } + + std::string BVertexClosestTrackTool::CtItem::zcaErrName() { + return buildName("ZCAError"); + } + + std::string BVertexClosestTrackTool::CtItem::vtxNDErr2Name() { + return buildName("VtxNDError2"); + } + + std::string BVertexClosestTrackTool::CtItem::trkNDErr2Name() { + return buildName("TrkNDError2"); + } + + std::string BVertexClosestTrackTool::CtItem::phi0UsedName() { + return buildName("Phi0Used"); + } + + std::string BVertexClosestTrackTool::CtItem::nTrksChi2Name() { + return buildName("NTracksChi2"); + } + + std::string BVertexClosestTrackTool::CtItem::closeTrackName() { + return buildName("CloseTrack", "_Link"); + } + + std::string BVertexClosestTrackTool::CtItem::toString() const { + boost::format f1("dca: %10.6f %10.6f zca: %10.6f %10.6f nt: %10d"); + f1 % dca % dcaErr % zca % zcaErr % nTrksChi2; + boost::format f2("%s\n %s\n"); + f2 % BPhysVertexTrackBase::BaseItem::toString(); + f2 % f1.str(); + std::string rstr = f2.str(); + rstr += "per track: p(px, py, pz)\n"; + rstr += " d(d0, z0, phi, theta, qoverp)\n"; + rstr += " d0, d0Err, z0, z0Err, logChi2, dca, okFlag\n"; + rstr += " vtxNDErr2, trkNDErr2, phi0Used\n"; + rstr += " vtxNDErr, trkNDErr, log(chi2Err2Sum)\n"; + // loop over tracks + if (tracks.size() == vtap.size() && vtap.size() == selpat.size()) { + for (unsigned int i=0; i<tracks.size(); ++i) { + boost::format f3(" %3d %2d "); + f3 % i % selpat[i]; + std::string f3str = f3.str(); + // 0: d0, 1: d0Err, 2: z0, 3: z0Err, 4: logChi2, 5: dca, 6: okFlag + // 7: vtxNDErr2, 8: trkNDErr2, 9: phi0Used + boost::format f4("%s\nd0: %10.4f %10.4f z0: %10.4f %10.4f " + "lc2: %10.4f dca: %10.4f ok: %3f\n" + "vtxNDErr2: %10.4f trkNDErr2: %10.4f " + "phi0Used: %10.4f\n" + "vtxNDErr: %10.4f trkNDErr2 %10.4f " + "logChi2Err2Sum: %10.4f"); + f4 % trackToString(tracks[i]); + f4 % vtap[i][0] % vtap[i][1] % vtap[i][2] % vtap[i][3]; + f4 % vtap[i][4] % vtap[i][5] % vtap[i][6]; + f4 % vtap[i][7] % vtap[i][8] % vtap[i][9]; + f4 % (vtap[i][7] < 0. ? -99. : sqrt(vtap[i][7])); + f4 % (vtap[i][8] < 0. ? -99. : sqrt(vtap[i][8])); + f4 % (vtap[i][7]+vtap[i][8] > 0. ? + log(vtap[i][5]*vtap[i][5]/(vtap[i][7]+vtap[i][8])) : -999.); + std::string tstr = wrapLines(f4.str(), + std::string(f3str.length(), ' ')); + tstr.replace(0,f3str.length(),f3str); + rstr.append(tstr+"\n"); + } // for i + } else { + boost::format f5("Mismatch: nTracks: %d nVtap: %d nSelpat: %d\n"); + f5 % tracks.size() % vtap.size() % selpat.size(); + rstr.append(f5.str()); + } // if sizes + + rstr.erase(rstr.length()-1); + return rstr; + } + + //-------------------------------------------------------------------------- + BVertexClosestTrackTool::BVertexClosestTrackTool(const std::string& t, + const std::string& n, + const IInterface* p) + : BPhysVertexTrackBase(t,n,p), m_lastRunNumber(0), m_lastEvtNumber(0), + m_svIdx(0) { + + declareInterface<DerivationFramework::IAugmentationTool>(this); + + declareProperty("CloseTrackChi2SetName", m_closeTrackChi2SetName = {"def"}); + declareProperty("CloseTrackCorrChi2" , m_closeTrackCorrChi2 = {0}); + declareProperty("CloseTrackMinDCAin3D" , m_minDCAin3D = {true}); + declareProperty("CloseTrackMaxLogChi2" , m_closeTrackMaxLogChi2 = {-1.}); + declareProperty("NCloseTrackMaxLogChi2", m_nCloseTrackMaxLogChi2 = {-1.}); + } + //-------------------------------------------------------------------------- + StatusCode BVertexClosestTrackTool::initializeHook() { + + ATH_MSG_DEBUG("BVertexClosestTrackTool::initializeHook() -- begin"); + + // check job options + if ( m_closeTrackChi2SetName.size() == 0 ) { + ATH_MSG_ERROR("No chi2 set names provided!"); + } + if ( m_closeTrackCorrChi2.size() == 0 ) { + ATH_MSG_ERROR("No use of corrected chi2 settings provided!"); + } + if ( m_minDCAin3D.size() == 0 ) { + ATH_MSG_ERROR("No use of min DCA in 3D settings provided!"); + } + if ( m_closeTrackMaxLogChi2.size() == 0 ) { + ATH_MSG_ERROR("No cuts on max log chi2 for DOCA calculation provided!"); + } + if ( m_nCloseTrackMaxLogChi2.size() == 0 ) { + ATH_MSG_ERROR("No cuts on max log chi2 for nClosetTracks calculation " + "provided!"); + } + if ( m_closeTrackCorrChi2.size() != m_closeTrackChi2SetName.size() || + m_minDCAin3D.size() != m_closeTrackChi2SetName.size() || + m_closeTrackMaxLogChi2.size() != m_closeTrackChi2SetName.size() || + m_nCloseTrackMaxLogChi2.size() != m_closeTrackChi2SetName.size() ) { + ATH_MSG_ERROR("Size mismatch of CloseTrackChi2SetName (" + << m_closeTrackChi2SetName.size() << "), " + << "CloseTrackCorrChi2 (" + << m_closeTrackCorrChi2 << "), " + << "CloseTrackMinDCAin3D (" + << m_minDCAin3D.size() << "), " + << "CloseTrackMaxLogChi2 (" + << m_closeTrackMaxLogChi2.size() << ") and/or " + << "NCloseTrackMaxLogChi2 (" + << m_nCloseTrackMaxLogChi2.size() << ")"); + } + + // initialize results array + initResults(); + + ATH_MSG_DEBUG("BVertexClosestTrackTool::initializeHook() -- end"); + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode BVertexClosestTrackTool::finalizeHook() { + + ATH_MSG_DEBUG("BVertexClosestTrackTool::finalizeHook()"); + + // everything all right + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode + BVertexClosestTrackTool::addBranchesVCSetupHook(size_t ivc) const { + + ATH_MSG_DEBUG("BVertexClosestTrackTool::addBranchesVCLoopHook() -- begin"); + + ATH_MSG_DEBUG("BVertexClosestTrackTool::addBranchesVCSetupHook: " + << "Vertex container index " << ivc + << " for collection " << m_vertexContainerNames[ivc] + << " with prefix " << m_branchPrefixes[ivc]); + + setResultsPrefix(m_branchPrefixes[ivc]); + + ATH_MSG_DEBUG("BVertexClosestTrackTool::addBranchesVCSetupHook() -- end"); + + // nothing to do here + return StatusCode::SUCCESS; + } + + //-------------------------------------------------------------------------- + StatusCode + BVertexClosestTrackTool::addBranchesSVLoopHook(const xAOD::Vertex* vtx) const { + + ATH_MSG_DEBUG("BVertexClosestTrackTool::addBranchesSVLoopHook() -- begin"); + + // calculate closest track values + ATH_MSG_DEBUG("BVertexClosestTrackTool::addBranchesSVLoopHook(): " + "calculate closest track ..."); + CHECK(calculateValues(vtx)); + + // save the closest track values + ATH_MSG_DEBUG("BVertexClosestTrackTool::addBranchesSVLoopHook(): " + "save closest track ..."); + CHECK(saveClosestTrack(vtx)); + + // dump close track item debugging information + if (m_debugTracksInThisEvent) { + CHECK(logCloseTracksDebugInfo()); + } + + ATH_MSG_DEBUG("BVertexClosestTrackTool::addBranchesSVLoopHook() -- end"); + + // nothing to do here + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Calculate closest track variables -- method called from caching loop + //-------------------------------------------------------------------------- + StatusCode + BVertexClosestTrackTool::calcValuesHook(const xAOD::Vertex* vtx, + const unsigned int ipv, + const unsigned int its, + const unsigned int itt) const { + + ATH_MSG_DEBUG("calcValuesHook: ipv: " << ipv + << ", its: " << its << ", itt: " << itt); + + // candidate tracks and momentum + xAOD::BPhysHelper cand(vtx); + + // tracks to be considered + TrackBag tracks = selectTracks(m_tracks, cand, ipv, its, itt); + + // loop over chi2 setting sets + unsigned int nChi2Sets = m_closeTrackChi2SetName.size(); + for (unsigned int ics = 0; ics < nChi2Sets; ++ics) { + + CtItem& cti = m_results[its][ipv][itt][ics]; + + // presets + cti.resetVals(); + + double closestTrkDCA = 9999.; + int closestTrkIdx(-1); + unsigned int trkIdx(0); + for (TrackBag::const_iterator trkItr = tracks.begin(); + trkItr != tracks.end(); ++trkItr, ++trkIdx) { + + // + // track selection bit pattern: + // bit 0 : included in number of close tracks + // bit 1 : chosen as closest track + // + unsigned short selpat(0); + + // Returned vector components: + // 0: d0, 1: d0Err, 2: z0, 3: z0Err, 4: logChi2, 5: dca, 6: okFlag + // 7: vtxErrPart2, 8: trkErrPart2, 9: phi0Used + std::vector<double> vtap = + getTrackLogChi2DCA(*trkItr, cand.vtx(), + m_minDCAin3D[ics], m_closeTrackCorrChi2[ics]); + ATH_MSG_DEBUG("calcValuesHook: track: " << *trkItr + << ", logChi2: " << vtap[4] << ", dca: " << vtap[5]); + + // track values at perigee found? + if ( vtap[6] >= 0. ) { + ATH_MSG_DEBUG("calcValuesHook: checking track count for " + "m_nCloseTrackMaxLogChi2[ics] = " + << m_nCloseTrackMaxLogChi2[ics]); + // count tracks + if ( vtap[4] < m_nCloseTrackMaxLogChi2[ics] ) { + cti.nTrksChi2++; + selpat |= 1; + ATH_MSG_DEBUG("calcValuesHook: nTrksChi2++ for track " << *trkItr); + } + // find closest track + ATH_MSG_DEBUG("calcValuesHook: log(chi2) check: " + "m_closeTrackMaxLogChi2[ics]: " + << m_closeTrackMaxLogChi2[ics] + << ", logChi2: " << vtap[4] + << ", closestTrkDCA: " << closestTrkDCA + << ", dca: " << fabs(vtap[5])); + if ( fabs(vtap[5]) < closestTrkDCA && + vtap[4] < m_closeTrackMaxLogChi2[ics] ) { + closestTrkDCA = fabs(vtap[5]); + cti.dca = vtap[0]; + cti.dcaErr = vtap[1]; + cti.zca = vtap[2]; + cti.zcaErr = vtap[3]; + cti.vtxNDErr2 = vtap[7]; + cti.trkNDErr2 = vtap[8]; + cti.phi0Used = vtap[9]; + cti.closeTrack = *trkItr; + closestTrkIdx = trkIdx; + ATH_MSG_DEBUG("calcValuesHook: closestTrkDCA now: " + << closestTrkDCA + << " for track " << *trkItr); + } + } // if ok + cti.tracks.push_back(*trkItr); + cti.vtap.push_back(vtap); + cti.selpat.push_back(selpat); + } // for tracks + // mark closest track + if (closestTrkIdx > -1 && closestTrkIdx < (int)cti.selpat.size()) { + cti.selpat[closestTrkIdx] |= 2; + } + } // for ics + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Fill closest track values from cache if found + //-------------------------------------------------------------------------- + bool BVertexClosestTrackTool::fastFillHook(const xAOD::Vertex* vtx, + const int ipv) const { + + ATH_MSG_DEBUG("fastFillHook: ipv: " << ipv); + + bool found(false); + + StringIntMap_t::iterator itpv = + m_pvAssocResMap.find(buildPvAssocCacheName(vtx, ipv)); + if ( itpv != m_pvAssocResMap.end() ) { + found = true; + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + unsigned int nChi2Sets = m_closeTrackChi2SetName.size(); + for (unsigned int its = 0; its < nTrackSels; ++its) { + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + for (unsigned int ics = 0; ics < nChi2Sets; ++ics) { + m_results[its][ipv][itt][ics] + .copyVals(m_results[its][itpv->second][itt][ics]); + } // for ics + } // for its + } // for itt + } // if found + + ATH_MSG_DEBUG("fastFillHook: cache index: " + << buildPvAssocCacheName(vtx, ipv) + << ", found ? " << found + << ", ipv_ref: " + << (found ? itpv->second : -1)); + + return found; + } + //-------------------------------------------------------------------------- + StatusCode + BVertexClosestTrackTool::saveClosestTrack(const xAOD::Vertex* vtx) const { + + typedef ElementLink< xAOD::TrackParticleContainer > TrackParticleLink_t; + + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nPvAssocs = m_pvAssocTypes.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + unsigned int nChi2Sets = m_closeTrackChi2SetName.size(); + + for (unsigned int its = 0; its < nTrackSels; ++its) { + for (unsigned int ipv = 0; ipv < nPvAssocs; ++ipv) { + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + for (unsigned int ics = 0; ics < nChi2Sets; ++ics) { + CtItem result = m_results[its][ipv][itt][ics]; + SG::AuxElement::Decorator< float > + d_dca_value(result.dcaName()); + SG::AuxElement::Decorator< float > + d_dcaErr_value(result.dcaErrName()); + SG::AuxElement::Decorator< float > + d_zca_value(result.zcaName()); + SG::AuxElement::Decorator< float > + d_zcaErr_value(result.zcaErrName()); + SG::AuxElement::Decorator< int > + d_nTrksChi2_value(result.nTrksChi2Name()); + d_dca_value(*vtx) = result.dca; + d_dcaErr_value(*vtx) = result.dcaErr; + d_zca_value(*vtx) = result.zca; + d_zcaErr_value(*vtx) = result.zcaErr; + d_nTrksChi2_value(*vtx) = result.nTrksChi2; + ATH_MSG_DEBUG("BVertexClosestTrackTool::saveClosestTrack() " + << "-- dca: " << result.dcaName() + << ", dcaErr: " << result.dcaErrName() + << ", zca: " << result.zcaName() + << ", zcaErr: " << result.zcaErrName() + << ", nTrksChi2: " << result.nTrksChi2Name()); + ATH_MSG_DEBUG("BVertexClosestTrackTool::saveClosestTrack() " + << "-- vertex: (" + << vtx->x() << ", " + << vtx->y() << ", " + << vtx->z() << ")" + << ", dca: " << result.dca + << ", dcaErr: " << result.dcaErr + << ", zca: " << result.zca + << ", zcaErr: " << result.zcaErr + << ", nTrksChi2: " << result.nTrksChi2); + // add ElementLink to closest track + std::string linkName = result.closeTrackName(); + SG::AuxElement::Decorator<TrackParticleLink_t> + tpLinkDecor(linkName); + TrackParticleLink_t tpLink; + if ( result.closeTrack != NULL ) { + tpLink.toContainedElement( *m_tracks, result.closeTrack ); + } + ATH_MSG_DEBUG("saveClosestTrack: Decorate vtx " + << vtx << " with " << linkName + << ", closeTrkPtr: " << result.closeTrack); + tpLinkDecor(*vtx) = tpLink; + if ( tpLink.isValid() ) { + ATH_MSG_DEBUG("saveClosestTrack: Decorated vtx " + << vtx << " with " << linkName + << ", closeTrkPtr: " << result.closeTrack); + } else { + ATH_MSG_VERBOSE("saveClosestTrack: Failed to decorate vtx " + << vtx << " with " << linkName + << ", closeTrkPtr: " + << result.closeTrack << " !"); + } + // if valid + } // for ics + } // for itt + } // for ipv + } // for its + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + void BVertexClosestTrackTool::setResultsPrefix(std::string prefix) const { + + ATH_MSG_DEBUG("BVertexClosestTrackTool::setResultsPrefix -- begin"); + + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nPvAssocs = m_pvAssocTypes.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + unsigned int nChi2Sets = m_closeTrackChi2SetName.size(); + + for (unsigned int its = 0; its < nTrackSels; ++its) { + for (unsigned int ipv = 0; ipv < nPvAssocs; ++ipv) { + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + for (unsigned int ics = 0; ics < nChi2Sets; ++ics) { + m_results[its][ipv][itt][ics].setPrefix(prefix); + } // for ics + } // for itt + } // for ipv + } // for its + + ATH_MSG_DEBUG("BVertexClosestTrackTool::setResultsPrefix -- end"); + } + //-------------------------------------------------------------------------- + void BVertexClosestTrackTool::initResults() { + + ATH_MSG_DEBUG("BVertexClosestTrackTool::initResults -- begin"); + + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nPvAssocs = m_pvAssocTypes.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + unsigned int nChi2Sets = m_closeTrackChi2SetName.size(); + + ATH_MSG_DEBUG("BVertexClosestTrackTool::initResults : nTrackSels = " + << nTrackSels); + ATH_MSG_DEBUG("BVertexClosestTrackTool::initResults : nPvAssocs = " + << nPvAssocs); + ATH_MSG_DEBUG("BVertexClosestTrackTool::initResults : nTrackTypes = " + << nTrackTypes); + m_results.resize(boost::extents[nTrackSels][nPvAssocs][nTrackTypes][nChi2Sets]); + for (unsigned int its = 0; its < nTrackSels; ++its) { + ATH_MSG_DEBUG("BVertexClosestTrackTool::initResults -- its = " << its); + for (unsigned int ipv = 0; ipv < nPvAssocs; ++ipv) { + ATH_MSG_DEBUG("BVertexClosestTrackTool::initResults -- ipv = " << ipv); + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + ATH_MSG_DEBUG("BVertexClosestTrackTool::initResults -- itt = " + << itt); + for (unsigned int ics = 0; ics < nChi2Sets; ++ics) { + ATH_MSG_DEBUG("BVertexClosestTrackTool::initResults -- ics = " + << ics); + std::string csname = m_closeTrackChi2SetName[ics]; + ATH_MSG_DEBUG("BVertexClosestTrackTool::initResults : " + << m_branchBaseName << ", " + << buildBranchBaseName(its, ipv, itt, csname)); + m_results[its][ipv][itt][ics].setup(buildBranchBaseName(its, ipv, + itt, + csname), + m_branchBaseName); + } // for ics + } // for itt + } // for ipv + } // for its + + ATH_MSG_DEBUG("BVertexClosestTrackTool::initResults -- end"); + } + //-------------------------------------------------------------------------- + // Dump CloseTracks debug information to log file + //-------------------------------------------------------------------------- + StatusCode BVertexClosestTrackTool::logCloseTracksDebugInfo() const { + + // Count candidates + if (m_runNumber != m_lastRunNumber || m_evtNumber != m_lastEvtNumber) { + m_lastRunNumber = m_runNumber; + m_lastEvtNumber = m_evtNumber; + m_svIdx = 0; + } else { + m_svIdx++; + } + + std::string str(">>>>> logCloseTracksDebugInfo:\n"); + boost::format f("Run %d Event %d SV %d\n"); + f % m_runNumber % m_evtNumber % m_svIdx; + str.append(f.str()); + + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nPvAssocs = m_pvAssocTypes.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + unsigned int nChi2Sets = m_closeTrackChi2SetName.size(); + + for (unsigned int its = 0; its < nTrackSels; ++its) { + for (unsigned int ipv = 0; ipv < nPvAssocs; ++ipv) { + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + for (unsigned int ics = 0; ics < nChi2Sets; ++ics) { + boost::format f1("its: %d ipv: %d itt: %d ics: %d\n"); + f1 % its % ipv % itt % its; + str.append(f1.str()); + CtItem result = m_results[its][ipv][itt][ics]; + str.append(result.toString()+"\n"); + } // for ics + } // for itt + } // for ipv + } // for its + + str.append("<<<<< logCloseTracksDebugInfo"); + ATH_MSG_INFO(str); + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BVertexTrackIsoTool.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BVertexTrackIsoTool.cxx new file mode 100644 index 0000000000000000000000000000000000000000..6bb527ea80ebf43fe0a5ceb76be139a433e39f42 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BVertexTrackIsoTool.cxx @@ -0,0 +1,511 @@ +/* + Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// BVertexTrackIsoTool.cxx +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// +// Add B vertex track isolation information for different configurations, +// different track selections and different PV-to-SV association methods. +// +// For an usage example see BPHY8.py . +// +// Job options provided by this class: +// - IsolationConeSizes -- List of isolation cone sizes +// - IsoTrkImpLogChi2Max -- List of maximum log(chi2) cuts for +// association of tracks to the primary +// vertex picked. +// - IsoDoTrkImpLogChi2Cut -- apply log(chi2) cuts +// 0 : don't apply log(chi2) cuts +// 1 : apply log(chi2) cuts +// 2 : apply log(chi2) cuts [former version] +// (The last two job options must +// contain the same number of elements +// as the IsolationConeSizes list.) +// - UseOptimizedAlgo -- Use the speed-optimized algorithm. +// +//============================================================================ +// +#include "DerivationFrameworkBPhys/BVertexTrackIsoTool.h" +#include "xAODEventInfo/EventInfo.h" +#include "xAODBPhys/BPhysHelper.h" +#include "InDetTrackSelectionTool/IInDetTrackSelectionTool.h" +#include "EventPrimitives/EventPrimitivesHelpers.h" + +#include "boost/format.hpp" +#include "TVector3.h" +#include <algorithm> +#include <sstream> + +namespace DerivationFramework { + + //------------------------------------------------------------------------- + // + // helper class + BVertexTrackIsoTool::IsoItem::IsoItem(std::string Name, + std::string Bname, + std::string Prefix, + double IsoValue, + int NTracks) : + BaseItem(Name, Bname, Prefix), isoValue(IsoValue), nTracks(NTracks) { + } + + BVertexTrackIsoTool::IsoItem::~IsoItem() { + } + + void BVertexTrackIsoTool::IsoItem::setup(std::string Name, + std::string Bname, + std::string Prefix) { + BaseItem::setup(Name, Bname, Prefix); + isoValue = -1.; + nTracks = 0; + } + + void BVertexTrackIsoTool::IsoItem::setup(std::string Name, + std::string Bname, + std::string Prefix, + double IsoValue, + int NTracks) { + BaseItem::setup(Name, Bname, Prefix); + isoValue = IsoValue; + nTracks = NTracks; + } + + void BVertexTrackIsoTool::IsoItem::resetVals() { + isoValue = -2.; + nTracks = -1; + } + + void BVertexTrackIsoTool::IsoItem::copyVals(const BaseItem& item) { + copyVals((const IsoItem&)item); + } + + void BVertexTrackIsoTool::IsoItem::copyVals(const IsoItem& item) { + isoValue = item.isoValue; + nTracks = item.nTracks; + } + + std::string BVertexTrackIsoTool::IsoItem::isoName() { + return buildName(); + } + + std::string BVertexTrackIsoTool::IsoItem::nTracksName() { + return buildName("Ntracks"); + } + + //-------------------------------------------------------------------------- + BVertexTrackIsoTool::BVertexTrackIsoTool(const std::string& t, + const std::string& n, + const IInterface* p) + : BPhysVertexTrackBase(t,n,p) { + + declareInterface<DerivationFramework::IAugmentationTool>(this); + + declareProperty("IsolationConeSizes" , m_isoConeSizes); + declareProperty("IsoTrkImpLogChi2Max" , m_isoTrkImpLogChi2Max); + declareProperty("IsoDoTrkImpLogChi2Cut" , m_isoDoTrkImpLogChi2Cut); + declareProperty("UseOptimizedAlgo" , m_useOptimizedAlgo = true); + } + //-------------------------------------------------------------------------- + StatusCode BVertexTrackIsoTool::initializeHook() { + + ATH_MSG_DEBUG("BVertexTrackIsoTool::initializeHook() -- begin"); + + // check like-sized arrays + if ( m_isoConeSizes.size() != m_isoTrkImpLogChi2Max.size() || + m_isoConeSizes.size() != m_isoDoTrkImpLogChi2Cut.size() ) { + ATH_MSG_ERROR("Size mismatch of IsolationConeSizes (" + << m_isoConeSizes.size() + << "), IsoTrkImpChi2Max (" + << m_isoTrkImpLogChi2Max.size() + << ") and IsoDoTrkImpChi2Cut (" + << m_isoDoTrkImpLogChi2Cut.size() << ") lists!"); + } + + // initialize results array + initResults(); + + // info output + ATH_MSG_INFO("calculateIsolation: using " + << (m_useOptimizedAlgo ? + "optimized (faster)" : "regular (slower)") + << "track isolation calculation methd."); + + ATH_MSG_DEBUG("BVertexTrackIsoTool::initializeHook() -- end"); + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode BVertexTrackIsoTool::finalizeHook() { + + ATH_MSG_DEBUG("BVertexTrackIsoTool::finalizeHook()"); + + // everything all right + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode + BVertexTrackIsoTool::addBranchesVCSetupHook(size_t ivc) const { + + ATH_MSG_DEBUG("BVertexTrackIsoTool::addBranchesVCLoopHook() -- begin"); + + ATH_MSG_DEBUG("BVertexTrackIsoTool::addBranchesVCSetupHook: " + << "Vertex container index " << ivc + << " for collection " << m_vertexContainerNames[ivc] + << " with prefix " << m_branchPrefixes[ivc]); + + setResultsPrefix(m_branchPrefixes[ivc]); + + ATH_MSG_DEBUG("BVertexTrackIsoTool::addBranchesVCSetupHook() -- end"); + + // nothing to do here + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode + BVertexTrackIsoTool::addBranchesSVLoopHook(const xAOD::Vertex* vtx) const { + + ATH_MSG_DEBUG("BVertexTrackIsoTool::addBranchesSVLoopHook() -- begin"); + + ATH_MSG_DEBUG("BVertexTrackIsoTool::addBranchesSVLoopHook(): " + "calculate isolation ..."); + if ( m_useOptimizedAlgo ) { + CHECK(calculateValues(vtx)); + } else { + CHECK(calculateIsolation(vtx)); + } + ATH_MSG_DEBUG("BVertexTrackIsoTool::addBranchesSVLoopHook(): " + "save isolation ..."); + // save the isolation values + CHECK(saveIsolation(vtx)); + + ATH_MSG_DEBUG("BVertexTrackIsoTool::addBranchesSVLoopHook() -- end"); + + // nothing to do here + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Calculate track isolation variables -- faster method with caching + //-------------------------------------------------------------------------- + StatusCode + BVertexTrackIsoTool::calcValuesHook(const xAOD::Vertex* vtx, + const unsigned int ipv, + const unsigned int its, + const unsigned int itt) const { + + ATH_MSG_DEBUG("calcValuesHook: ipv: " << ipv + << ", its: " << its << ", itt: " << itt); + + // candidate tracks and momentum + xAOD::BPhysHelper cand(vtx); + TVector3 candP = cand.totalP(); + const xAOD::Vertex* candRefPV = cand.pv(m_pvAssocTypes[ipv]); + + TrackBag tracks = selectTracks(m_tracks, cand, ipv, its, itt); + + // loop over isolation cones (pt and deltaR) + unsigned int nCones = m_isoConeSizes.size(); + for (unsigned int ic = 0; ic < nCones; ++ic) { + + IsoItem& iso = m_results[ic][its][ipv][itt]; + const double& coneSize = m_isoConeSizes[ic]; + const double& logChi2Max = m_isoTrkImpLogChi2Max[ic]; + const int& doLogChi2 = m_isoDoTrkImpLogChi2Cut[ic]; + + // presets + iso.resetVals(); + + double nTracksInCone = 0; + double ptSumInCone = 0.; + + // make sure candRefPV exists + if ( candRefPV != NULL ) { + + for (TrackBag::const_iterator trkItr = tracks.begin(); + trkItr != tracks.end(); ++trkItr) { + double deltaR = candP.DeltaR((*trkItr)->p4().Vect()); + if ( deltaR < coneSize ) { + double logChi2 = (doLogChi2 > 0) ? + getTrackCandPVLogChi2(*trkItr, candRefPV) : -9999.; + // next line needed exactly as is for backward validation + if ( doLogChi2 == 2 ) logChi2 = abs(logChi2); + if ( doLogChi2 == 0 || logChi2 < logChi2Max ) { + nTracksInCone++; + ptSumInCone += (*trkItr)->pt(); + } // logChi2 + } // deltaR + } + // calculate result + if ( ptSumInCone + candP.Pt() > 0. ) { + iso.isoValue = candP.Pt() / ( ptSumInCone + candP.Pt() ); + } else { + iso.isoValue = -5.; + } + + } else { + iso.isoValue = -10.; + } // if candRefPV != NULL + + iso.nTracks = nTracksInCone; + } // for ic + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Fill track isolation values from cache if found + //-------------------------------------------------------------------------- + bool BVertexTrackIsoTool::fastFillHook(const xAOD::Vertex* vtx, + const int ipv) const { + + ATH_MSG_DEBUG("fastFillHook: ipv: " << ipv); + + bool found(false); + + StringIntMap_t::iterator itpv = + m_pvAssocResMap.find(buildPvAssocCacheName(vtx, ipv)); + if ( itpv != m_pvAssocResMap.end() ) { + found = true; + unsigned int nCones = m_isoConeSizes.size(); + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + for (unsigned int its = 0; its < nTrackSels; ++its) { + for (unsigned int ic = 0; ic < nCones; ++ic) { + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + m_results[ic][its][ipv][itt] + .copyVals(m_results[ic][its][itpv->second][itt]); + } // for its + } // for ic + } // for itt + } // if found + + ATH_MSG_DEBUG("fastFillHook: cache index: " + << buildPvAssocCacheName(vtx, ipv) + << ", found ? " << found + << ", ipv_ref: " + << (found ? itpv->second : -1)); + + return found; + } + //-------------------------------------------------------------------------- + // Track isolation calculation loops -- slower method + //-------------------------------------------------------------------------- + StatusCode + BVertexTrackIsoTool::calculateIsolation(const xAOD::Vertex* vtx) const { + + ATH_MSG_DEBUG("BVertexTrackIsoTool::calculateIsolation -- begin"); + + unsigned int nCones = m_isoConeSizes.size(); + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nPvAssocs = m_pvAssocTypes.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + + for (unsigned int its = 0; its < nTrackSels; ++its) { + for (unsigned int ipv = 0; ipv < nPvAssocs; ++ipv) { + for (unsigned int ic = 0; ic < nCones; ++ic) { + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + CHECK(calcIsolation(m_results[ic][its][ipv][itt], vtx, + m_isoConeSizes[ic], m_isoTrkImpLogChi2Max[ic], + m_isoDoTrkImpLogChi2Cut[ic], + m_trackSelectionTools[its], + m_pvAssocTypes[ipv], m_useTrackTypes[itt])); + } // for itt + } // for ic + } // for ipv + } // for its + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Calculate track isolation variables -- slower method + //-------------------------------------------------------------------------- + StatusCode BVertexTrackIsoTool:: + calcIsolation(const IsoItem& iso, + const xAOD::Vertex* vtx, + const double coneSize, + const double logChi2Max, + const int doLogChi2, + const ToolHandle<TrkSelTool>& tSelTool, + const xAOD::BPhysHelper::pv_type pvAssocType, + const int trackTypes ) const { + + // preset + iso.nTracks = -1; + iso.isoValue = -2.; + + // candidate tracks and momentum + xAOD::BPhysHelper cand(vtx); + TrackBag candTracks = findAllTracksInDecay(cand); + TVector3 candP = cand.totalP(); + const xAOD::Vertex* candRefPV = cand.pv(pvAssocType); + const xAOD::Vertex* candPV = cand.origPv(pvAssocType); + + // tracks to be considered + TrackBag tracks; + for (xAOD::TrackParticleContainer::const_iterator trkItr = + m_tracks->begin(); trkItr != m_tracks->end(); ++trkItr) { + const xAOD::TrackParticle* track = *trkItr; + // track selection check + if ( ! tSelTool->accept(*track, candRefPV) ) continue; + // track type check + if ( ! ((unsigned int)trackTypes == ttall() || + (unsigned int)trackTypes == ttallMin() || + (detTrackTypes(track, candPV, candRefPV) + & trackTypes) > 0x0) ) continue; + // track not in SV + if ( std::find(candTracks.begin(), candTracks.end(), track) + != candTracks.end() ) continue; + // tracks that survived so far + tracks.push_back(track); + } + + double nTracksInCone = 0; + double ptSumInCone = 0.; + for (TrackBag::const_iterator trkItr = tracks.begin(); + trkItr != tracks.end(); ++trkItr) { + double deltaR = candP.DeltaR((*trkItr)->p4().Vect()); + if ( deltaR < coneSize ) { + double logChi2 = (doLogChi2 > 0) ? + getTrackCandPVLogChi2(*trkItr, candRefPV) : -9999.; + // next line needed exactly as is for backward validation + if ( doLogChi2 == 2 ) logChi2 = abs(logChi2); + if ( doLogChi2 == 0 || logChi2 < logChi2Max ) { + nTracksInCone++; + ptSumInCone += (*trkItr)->pt(); + } + } // deltaR + } + // calculate result + if ( ptSumInCone + candP.Pt() > 0. ) { + iso.isoValue = candP.Pt() / ( ptSumInCone + candP.Pt() ); + } else { + iso.isoValue = -5; + } + iso.nTracks = nTracksInCone; + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + StatusCode + BVertexTrackIsoTool::saveIsolation(const xAOD::Vertex* vtx) const { + + unsigned int nCones = m_isoConeSizes.size(); + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nPvAssocs = m_pvAssocTypes.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + + for (unsigned int its = 0; its < nTrackSels; ++its) { + for (unsigned int ipv = 0; ipv < nPvAssocs; ++ipv) { + for (unsigned int ic = 0; ic < nCones; ++ic) { + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + IsoItem result = m_results[ic][its][ipv][itt]; + SG::AuxElement::Decorator< float > + d_iso_value(result.isoName()); + SG::AuxElement::Decorator< int > + d_iso_ntracks(result.nTracksName()); + d_iso_value(*vtx) = result.isoValue; + d_iso_ntracks(*vtx) = result.nTracks; + ATH_MSG_DEBUG("BVertexTrackIsoTool::saveIsolation() -- isobn: " + << result.isoName() << ", ntbn: " + << result.nTracksName()); + ATH_MSG_DEBUG("BVertexTrackIsoTool::saveIsolation() -- vertex: (" + << vtx->x() << ", " + << vtx->y() << ", " + << vtx->z() << "), iso: " + << result.isoValue << ", nTracks: " + << result.nTracks); + } // for itt + } // for ic + } // for ipv + } // for its + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + void BVertexTrackIsoTool::setResultsPrefix(std::string prefix) const { + + ATH_MSG_DEBUG("BVertexTrackIsoTool::setResultsPrefix -- begin"); + + unsigned int nCones = m_isoConeSizes.size(); + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nPvAssocs = m_pvAssocTypes.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + + for (unsigned int its = 0; its < nTrackSels; ++its) { + for (unsigned int ipv = 0; ipv < nPvAssocs; ++ipv) { + for (unsigned int ic = 0; ic < nCones; ++ic) { + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + m_results[ic][its][ipv][itt].setPrefix(prefix); + } // for itt + } // for ic + } // for ipv + } // for its + + ATH_MSG_DEBUG("BVertexTrackIsoTool::setResultsPrefix -- end"); + } + //-------------------------------------------------------------------------- + void BVertexTrackIsoTool::initResults() { + + unsigned int nCones = m_isoConeSizes.size(); + unsigned int nTrackSels = m_trackSelectionTools.size(); + unsigned int nPvAssocs = m_pvAssocTypes.size(); + unsigned int nTrackTypes = m_useTrackTypes.size(); + + ATH_MSG_DEBUG("BVertexTrackIsoTool::initResults -- begin"); + ATH_MSG_DEBUG("BVertexTrackIsoTool::initResults : nCones = " << nCones); + ATH_MSG_DEBUG("BVertexTrackIsoTool::initResults : nTrackSels = " + << nTrackSels); + ATH_MSG_DEBUG("BVertexTrackIsoTool::initResults : nPvAssocs = " + << nPvAssocs); + ATH_MSG_DEBUG("BVertexTrackIsoTool::initResults : nTrackTypes = " + << nTrackTypes); + m_results.resize(boost::extents[nCones][nTrackSels][nPvAssocs][nTrackTypes]); + for (unsigned int its = 0; its < nTrackSels; ++its) { + ATH_MSG_DEBUG("BVertexTrackIsoTool::initResults -- its = " << its); + for (unsigned int ipv = 0; ipv < nPvAssocs; ++ipv) { + ATH_MSG_DEBUG("BVertexTrackIsoTool::initResults -- ipv = " << ipv); + for (unsigned int ic = 0; ic < nCones; ++ic) { + ATH_MSG_DEBUG("BVertexTrackIsoTool::initResults -- ic = " << ic); + for (unsigned int itt = 0; itt < nTrackTypes; ++itt) { + ATH_MSG_DEBUG("BVertexTrackIsoTool::initResults -- itt = " << itt); + + ATH_MSG_DEBUG("BVertexTrackIsoTool::initResults :" + << m_branchBaseName << buildBranchName(ic, its, + ipv, itt)); + + m_results[ic][its][ipv][itt].setup(buildBranchName(ic, its, + ipv, itt), + m_branchBaseName); + } // for itt + } // for ic + } // for ipv + } // for its + + ATH_MSG_DEBUG("BVertexTrackIsoTool::initResults -- end"); + } + //-------------------------------------------------------------------------- + std::string BVertexTrackIsoTool::buildBranchName(unsigned int ic, + unsigned int its, + unsigned int ipv, + unsigned int itt) const { + ATH_MSG_DEBUG("BVertexTrackIsoTool::buildBranchName -- begin"); + + double coneSize = m_isoConeSizes[ic]; + double logChi2Max = m_isoTrkImpLogChi2Max[ic]; + int doLogChi2 = m_isoDoTrkImpLogChi2Cut[ic]; + + // format it nicely + boost::format f("%02d_LC%02dd%1d_%s"); + f % (int)(coneSize*10.) % (int)(logChi2Max*10.) % doLogChi2 + % buildBranchBaseName(its, ipv, itt); + + ATH_MSG_DEBUG("BVertexTrackIsoTool::buildBranchName: " << f.str()); + + return f.str(); + } + //-------------------------------------------------------------------------- +} + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BmumuThinningTool.cxx_NoCompile b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BmumuThinningTool.cxx_NoCompile new file mode 100644 index 0000000000000000000000000000000000000000..c8a539972c6a664c0b76e37c03bf56dfe9d0b2dd --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/BmumuThinningTool.cxx_NoCompile @@ -0,0 +1,1167 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +/** + * @file BmumuThinningTool.cxx + * @author Wolfgang Walkowiak <wolfgang.walkowiak@cern.ch> + * + */ + +#include "DerivationFrameworkBPhys/BmumuThinningTool.h" +#include "AthenaKernel/IThinningSvc.h" +#include "xAODTracking/TrackParticleContainer.h" +#include "xAODTracking/VertexContainer.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "xAODBase/IParticleHelpers.h" +#include "AthContainers/AuxElement.h" +#include "AthContainers/AuxTypeRegistry.h" + +#include <string> +#include <sstream> +#include <istream> +#include <vector> +#include <iomanip> +#include <algorithm> +#include <numeric> +#include <regex> +#include <boost/algorithm/string.hpp> + +namespace DerivationFramework { + + // static members + // Note: may later be migrated to xAODBPhys/BPhysHelper + std::map<xAOD::BPhysHelper::pv_type, std::string> + BmumuThinningTool::PvTypeToVarNameMap = + { {xAOD::BPhysHelper::PV_MAX_SUM_PT2, "MaxSumPt2"}, + {xAOD::BPhysHelper::PV_MIN_A0 , "MinA0" }, + {xAOD::BPhysHelper::PV_MIN_Z0 , "MinZ0" }, + {xAOD::BPhysHelper::PV_MIN_Z0_BA , "MinZ0BA" } }; + + //-------------------------------------------------------------------------- + // Constructor + //-------------------------------------------------------------------------- + BmumuThinningTool::BmumuThinningTool(const std::string& t, + const std::string& n, + const IInterface* p) : + CfAthAlgTool(t, n, p), + m_thinningSvc("ThinningSvc", n), + m_doCloseTracks(false), + m_doPVs(false), + m_doRefPVs(false), + m_doMuons(false), + m_doCalMuons(false), + m_doTracks(false) { + + declareInterface<DerivationFramework::IThinningTool>(this); + + // thinning service + declareProperty("ThinningService" , m_thinningSvc); + // TrackParticle container name + declareProperty("TrackParticleContainerName", + m_trkPartContName = "InDetTrackParticles"); + // list of secondary vertex container names + declareProperty("VertexContainerNames" , m_vtxContNames); + // list of pass flags for the seconary vertices + // empty list lets all vertices pass + // list length needs to be identical to length of + // VertexContainerNames list if AlignPassToVertexList is True + declareProperty("VertexPassFlags" , m_vtxPassFlags); + // align VertexPassFlags to VertexContainerNames list? + // This option causes a 1:1 correlation between the two lists, + // i.e. a flag is only applied to the corresponding container + // if this option is set to True. (default: false) + declareProperty("AlignPassToVertexList", + m_alignPassToVertexList = false); + // Primary vertex container name + declareProperty("PVContainerName" , m_PVContName); + // Refitted primary vertex container names + // This list must be of same length and order as the m_vtxContNames list + // (or empty => no thinning of refitted primary vertex containers) + declareProperty("RefPVContainerNames" , m_refPVContNames); + // name of the used muon container + declareProperty("MuonContainerName" , m_muonContName = ""); + // name of the calibrated muons container + declareProperty("CalibMuonContainerName" , m_calMuonContName = ""); + // closest track branch base name + declareProperty("CloseTrackBranchBaseName", m_ctBranchBaseName); + // closest track branch prefixes + declareProperty("CloseTrackBranchPrefixes", m_ctBranchPrefixes); + // keep tracks for selected (refitted) primary vertices + declareProperty("KeepTracksForSelectedPVs", m_keepPVTracks = false); + // match vertex muons with calibrated muons + declareProperty("MatchCalibratedMuons" , m_matchCalMuons = false); + // mark orginal muons for matched calibrated muons as well + // (only makes sense if MatchCalibratedMuons = True) + declareProperty("MarkMatchedMuons" , m_markMuons = false); + // mark calibrated muons for matched original muons as well + // (only makes sense if MatchCalibratedMuons = False) + declareProperty("MarkMatchedCalMuons" , m_markCalMuons = false); + // sync marked muons both ways (forces it) + declareProperty("SyncMatchedMuonsBothWays", m_syncMuonsBothWays = false); + // allow fast sync of myon masks + // (Set to 'False' to force in-depth synchronization of muon masks.) + declareProperty("AllowFastMuonMaskSync" , m_allowFastMuonMaskSync = true); + // keep tracks for closest tracks + declareProperty("KeepCloseTracks" , m_keepCloseTracks = false); + // keep tracks for selected muons + declareProperty("KeepTracksForMuons" , m_keepSelMuonTracks = false); + // keep tracks for selected calibrated muons + declareProperty("KeepTracksForCalMuons" , m_keepSelCalMuonTracks = false); + // keep (original) muons for selected tracks + declareProperty("KeepMuonsForTracks" , m_keepSelTrackMuons = false); + // keep calibrated muons for selected tracks + declareProperty("KeepCalMuonsForTracks" , m_keepSelTrackCalMuons = false); + // apply AND for mask matching for vertices (default: false) + declareProperty("ApplyAndForVertices" , m_vertexAnd = false); + // apply AND for mask matching for tracks (default: false) + declareProperty("ApplyAndForTracks" , m_trackAnd = false); + // apply AND for mask matching for muons (default: false) + declareProperty("ApplyAndForMuons" , m_muonAnd = false); + // thin primary vertex collection + declareProperty("ThinPVs" , m_thinPVs = true); + // thin refittd primary vertex collections + declareProperty("ThinRefittedPVs" , m_thinRefPVs = true); + // thin ID track collection + declareProperty("ThinTracks" , m_thinTracks = true); + // thin muon collections + declareProperty("ThinMuons" , m_thinMuons = true); + } + //-------------------------------------------------------------------------- + // Destructor + //-------------------------------------------------------------------------- + BmumuThinningTool::~BmumuThinningTool() { + } + //-------------------------------------------------------------------------- + // initialization + //-------------------------------------------------------------------------- + StatusCode BmumuThinningTool::initialize() { + + ATH_MSG_INFO("BmumuThinningTool::initialize()"); + + // check TrackParticle container name + if ( m_trkPartContName == "" ) { + ATH_MSG_INFO("No ID track collection provided for thinning."); + } else { + ATH_MSG_INFO("Using " << m_trkPartContName + << " as the source collection for ID track particles."); + m_doTracks = true; + } + + // check secondary vertex container names + if ( m_vtxContNames.empty() ) { + ATH_MSG_FATAL("No secondary vertex collections provided for thinning."); + return StatusCode::FAILURE; + } else { + for (std::vector<std::string>::iterator it = m_vtxContNames.begin(); + it != m_vtxContNames.end(); ++it) { + ATH_MSG_INFO("Using " << *it + << " as a source collection for secondary vertices."); + } + } + + // check vertex pass flags + if ( m_alignPassToVertexList ) { + if ( m_vtxPassFlags.size() != m_vtxContNames.size() ) { + ATH_MSG_FATAL("Size mismatch of VertexContainerNames (" + << m_vtxContNames.size() + << ") and VertexPassFlags (" + << m_vtxPassFlags.size() << ")"); + return StatusCode::FAILURE; + } else { + ATH_MSG_INFO(std::left << std::setw(35) << "VertexContainerNames" + << " : " << "VertexPassFlags"); + ATH_MSG_INFO(std::setfill('-') << std::setw(70) << "" + << std::setfill(' ')); + for (size_t i=0; i<m_vtxContNames.size(); ++i) { + ATH_MSG_INFO(std::left << std::setw(35) << m_vtxContNames[i] + << " : " << m_vtxPassFlags[i]); + } + } + } else { + if ( m_vtxPassFlags.empty() ) { + ATH_MSG_INFO("No VertexPassFlags: all secondary vertices will be " + << "accepted."); + } else { + std::string str; + for (size_t i=0; i < m_vtxPassFlags.size(); ++i) { + if (i > 0) str += ", "; + str += m_vtxPassFlags[i]; + } + ATH_MSG_INFO("VertexPassFlags applied to all vertices:"); + ATH_MSG_INFO(str); + } + } + + // check primary vertex container name + if ( m_PVContName == "" ) { + ATH_MSG_FATAL("No primary vertex collection provided for thinning."); + return StatusCode::FAILURE; + } else { + ATH_MSG_INFO("Using " << m_PVContName + << " as the source collection for primary vertices."); + m_doPVs = true; + } + + // check refitted primary vertex container names + if ( m_refPVContNames.empty() ) { + ATH_MSG_INFO("No refitted PV collections provided for thinning."); + } else { + if ( m_refPVContNames.size() != m_vtxContNames.size() ) { + ATH_MSG_FATAL("Size mismatch of VertexContainerNames (" + << m_vtxContNames.size() + << ") and RefPVContainerNames (" + << m_refPVContNames.size() << ")"); + return StatusCode::FAILURE; + } else { + for (std::vector<std::string>::iterator it = m_refPVContNames.begin(); + it != m_refPVContNames.end(); ++it) { + ATH_MSG_INFO("Using " << *it + << " as a source collection for refitted PVs."); + } + m_doRefPVs = true; + } + } + + // check muon container name + if ( m_muonContName == "" ) { + ATH_MSG_INFO("No (orginal) muon collection provided for thinning."); + } else { + ATH_MSG_INFO("Using " << m_muonContName + << " as a source collection for (original) muons."); + m_doMuons = true; + } + + // check calibrated muons container name + if ( m_calMuonContName == "" ) { + ATH_MSG_INFO("No calibrated muons collection provided for thinning."); + } else { + ATH_MSG_INFO("Using " << m_calMuonContName + << " as a source collection for calibrated muons."); + m_doCalMuons = true; + } + + // check muon thinning settings + if ( m_thinMuons ) { + if ( (m_matchCalMuons || m_markCalMuons) && !m_doCalMuons ) { + ATH_MSG_ERROR("No container for calibrated muons given!"); + } + if ( (!m_matchCalMuons || m_markMuons) && !m_doMuons ) { + ATH_MSG_ERROR("No container for (original) muons given!"); + } + if ( m_matchCalMuons && m_markCalMuons ) { + ATH_MSG_WARNING("Configuration issue: both MatchWithCalMuons and " + << "MarkMatchedCalMuons set to true! " + << "Ignoring the second setting."); + } + if ( !m_matchCalMuons && m_markMuons ) { + ATH_MSG_WARNING("Configuration issue: MatchWithCalMuons set to " + << "false and " + << "MarkMatchedMuons set to true! " + << "Ignoring the second setting."); + } + ATH_MSG_INFO("MatchWithCalMuons: " << m_matchCalMuons + << ", MarkMatchedMuons: " << m_markMuons + << ", MarkMatchedCalMuons: " << m_markCalMuons); + } + + // check closest track settings + m_doCloseTracks = (m_ctBranchBaseName != "" && !m_ctBranchPrefixes.empty()); + if ( m_doCloseTracks ) { + for (std::vector<std::string>::iterator it = m_ctBranchPrefixes.begin(); + it != m_ctBranchPrefixes.end(); ++it) { + + ATH_MSG_INFO("Keeping tracks for " + << *it << "_" << m_ctBranchBaseName << "_*"); + } + } else { + ATH_MSG_INFO("Not keeping anything for closest tracks in thinning."); + } + + // check track container for combination of track and muon thinning + if ( (m_thinTracks || m_thinMuons) && !m_doTracks) { + ATH_MSG_FATAL("Requested track or muon thinning but required " + "track container not provided"); + return StatusCode::FAILURE; + } + + // Output of options + ATH_MSG_INFO("=== Option settings - begin ==="); + ATH_MSG_INFO("KeepTracksForSelectedPVs : " << m_keepPVTracks); + ATH_MSG_INFO("MatchCalibratedMuons : " << m_matchCalMuons); + ATH_MSG_INFO("MarkMatchedMuons : " << m_markMuons); + ATH_MSG_INFO("MarkMatchedCalMuons : " << m_markCalMuons); + ATH_MSG_INFO("SyncMatchedMuonsBothWays : " << m_syncMuonsBothWays); + ATH_MSG_INFO("AllowFastMuonMaskSync : " << m_allowFastMuonMaskSync); + ATH_MSG_INFO("KeepCloseTracks : " << m_keepCloseTracks); + ATH_MSG_INFO("KeepTracksForMuons : " << m_keepSelMuonTracks); + ATH_MSG_INFO("KeepTracksForCalMuons : " << m_keepSelCalMuonTracks); + ATH_MSG_INFO("KeepMuonsForTracks : " << m_keepSelTrackMuons); + ATH_MSG_INFO("KeepCalMuonsForTracks : " << m_keepSelTrackCalMuons); + ATH_MSG_INFO("ApplyAndForVertices : " << m_vertexAnd); + ATH_MSG_INFO("ApplyAndForTracks : " << m_trackAnd); + ATH_MSG_INFO("ApplyAndForMuons : " << m_muonAnd); + ATH_MSG_INFO("ThinPVs : " << m_thinPVs); + ATH_MSG_INFO("ThinRefittedPVs : " << m_thinRefPVs); + ATH_MSG_INFO("ThinTracks : " << m_thinTracks); + ATH_MSG_INFO("ThinMuons : " << m_thinMuons); + ATH_MSG_INFO("=== Option settings - end ==="); + + + // initialize cache vector-of-vectors (one per vertex container) + for (size_t ivcname=0; ivcname < m_vtxContNames.size(); ++ivcname) { + m_vvOrigPVLinkNames.push_back(std::vector<std::string>()); + m_vvOrigPVLinkTypes.push_back(std::vector<pv_type>()); + m_vvRefPVLinkNames.push_back(std::vector<std::string>()); + m_vvRefPVLinkTypes.push_back(std::vector<pv_type>()); + m_vvCtLinkNames.push_back(std::vector<std::string>()); + m_vvCtLinkTypes.push_back(std::vector<pv_type>()); + } // for ivcname + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // finalization + //-------------------------------------------------------------------------- + StatusCode BmumuThinningTool::finalize() { + + ATH_MSG_INFO("BmumuThinningTool::finalize()"); + + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // apply thinning + //-------------------------------------------------------------------------- + StatusCode BmumuThinningTool::doThinning() const { + + ATH_MSG_DEBUG("BmumuThinningTool::doThinning()"); + + // retrieve TrackParticle container + const xAOD::TrackParticleContainer* trkPartCont = nullptr; + std::vector<bool> trkMask; + if ( m_doTracks && (m_thinTracks || m_thinMuons) ) { + CHECK( evtStore()->retrieve(trkPartCont, m_trkPartContName) ); + // default: keep no track + trkMask.assign(trkPartCont->size(), false); + } + + // retrieve PV container + const xAOD::VertexContainer* pvCont = nullptr; + std::vector<bool> pvMask; + if ( m_doPVs && (m_thinPVs || m_keepPVTracks) ) { + CHECK( evtStore()->retrieve(pvCont, m_PVContName) ); + // default: keep no PV + pvMask.assign(pvCont->size(), false); + } + // retrieve refitted PV container + std::vector<const xAOD::VertexContainer*> vRefPvCont = + std::vector<const xAOD::VertexContainer*>(m_refPVContNames.size(), + nullptr); + std::vector<std::vector<bool> > vRefPvMasks; + if ( m_thinRefPVs && m_doRefPVs ) { + for (size_t irpv = 0; irpv < m_refPVContNames.size(); ++irpv) { + CHECK( evtStore()->retrieve(vRefPvCont[irpv], m_refPVContNames[irpv]) ); + // default: keep no refitted PV + vRefPvMasks.push_back(std::vector<bool>(vRefPvCont[irpv]->size(), + false)); + } + } + + // retrieve container of (original) muons + const xAOD::MuonContainer* muonCont = nullptr; + std::vector<bool> muonMask; + if ( m_thinMuons && m_doMuons && (!m_matchCalMuons || m_markMuons) ) { + CHECK( evtStore()->retrieve(muonCont, m_muonContName) ); + // default: keep no muon + muonMask.assign(muonCont->size(), false); + } + + // retrieve container of calibrated muons + const xAOD::MuonContainer* calMuonCont = nullptr; + std::vector<bool> calMuonMask; + if ( m_thinMuons && m_doCalMuons && (m_matchCalMuons || m_markCalMuons) ) { + CHECK( evtStore()->retrieve(calMuonCont, m_calMuonContName) ); + // default: keep no muon + calMuonMask.assign(calMuonCont->size(), false); + } + + // retrieve vertex containers + for (size_t ivcname=0; ivcname < m_vtxContNames.size(); ++ivcname) { + auto &vtxContName = m_vtxContNames[ivcname]; + ATH_MSG_DEBUG("doThinning(): vtxContName: " << vtxContName); + const xAOD::VertexContainer* vtxCont = nullptr; + const xAOD::VertexAuxContainer* vtxAuxCont = nullptr; + CHECK( evtStore()->retrieve(vtxCont , vtxContName) ); + CHECK( evtStore()->retrieve(vtxAuxCont, vtxContName+"Aux.") ); + size_t vtxContSize = vtxCont->size(); + std::vector<bool> vtxMask; + // default: keep no vertex + vtxMask.assign(vtxContSize, false); + ATH_MSG_DEBUG("doThinning(): vtxContSize: " << vtxContSize); + // loop over vertices + for (size_t ivtx = 0; ivtx < vtxContSize; ++ivtx) { + xAOD::BPhysHelper vtx(vtxCont->at(ivtx)); + bool vtxPassed = false; + addToCounter(vtxContName+"_allVertices"); + ATH_MSG_DEBUG("doThinning(): ivtx = " << ivtx); + if ( m_alignPassToVertexList ) { + ATH_MSG_DEBUG("doThinning(): 1: passFlag(" << ivcname << ") = " + << m_vtxPassFlags[ivcname]); + vtxPassed = pass(*vtx.vtx(), m_vtxPassFlags[ivcname]); + } else { + for (size_t ipass = 0; ipass < m_vtxPassFlags.size(); ++ipass) { + ATH_MSG_DEBUG("doThinning(): 2: passFlag(" << ipass << ") = " + << m_vtxPassFlags[ipass]); + if ( pass(*vtx.vtx(), m_vtxPassFlags[ipass]) ) { + vtxPassed = true; + break; + } + } // for ipass + } + if ( vtxPassed ) { + // + // vertex passed selection + // + ATH_MSG_DEBUG("doThinning(): ivtx " << ivtx << " passed selection"); + vtxMask[ivtx] = true; + addToCounter(vtxContName+"_passedVertices"); + + // keep tracks from secondary vertices + if ( m_doTracks && (m_thinTracks || m_thinMuons) ) { + for (size_t itrk=0; itrk < trkPartCont->size(); ++itrk) { + // only consider if not yet kept + if ( !trkMask[itrk] ) { + for (size_t ivt=0; ivt < vtx.vtx()->nTrackParticles(); ++ivt) { + if ( vtx.vtx()->trackParticle(ivt) + == trkPartCont->at(itrk) ) { + trkMask[itrk] = true; + addToCounter(vtxContName+"_accTracks"); + } + } // for trk + } + } // for itrk + } // if m_doTracks + + // find aux variable names for closest tracks + if ( (m_thinTracks || m_thinMuons) && m_keepCloseTracks + && m_doCloseTracks && m_vvCtLinkNames[ivcname].empty() ) { + std::string prefix = + m_vtxContNames[ivcname]+"_"+m_ctBranchBaseName; + ATH_MSG_DEBUG("doThinning(): CT basename: " << prefix); + selectAuxElements(vtxAuxCont, m_vvCtLinkNames[ivcname], + m_ctBranchPrefixes, + m_vvCtLinkTypes[ivcname], + "_"+m_ctBranchBaseName+".*._Link"); + if ( msgLvl(MSG::INFO) ) { + std::string sAuxVars = + dumpVS(m_vvCtLinkNames[ivcname], + "doThinning(): "+vtxContName+": CT aux vars: (" + +std::to_string(m_vvCtLinkNames[ivcname].size())+")", + 4); + logWrappedMsg(sAuxVars, MSG::INFO); + } + } + + // keep tracks identified as closest tracks + if ( (m_thinTracks || m_thinMuons) && m_doTracks && m_keepCloseTracks + && m_doCloseTracks ) { + for (size_t i = 0; i < m_vvCtLinkNames[ivcname].size(); ++i) { + const xAOD::TrackParticle* closeTrack = + getTrackParticle(vtx.vtx(), m_vvCtLinkNames[ivcname][i]); + if ( closeTrack == nullptr ) continue; + auto tpit = std::find(trkPartCont->begin(), trkPartCont->end(), + closeTrack); + if ( tpit == trkPartCont->end() ) { + ATH_MSG_WARNING("ClosestTrack not found in " + << m_trkPartContName + << " for " << m_vvCtLinkNames[ivcname][i]); + continue; + } + size_t x = std::distance(trkPartCont->begin(), tpit); + if ( !trkMask.at(x) ) { + trkMask.at(x) = true; + addToCounter(vtxContName+"_addTracksByCT"); + } + } // for i (CT link names) + } // if + + // find aux variable names for original PVs + if ( m_thinPVs && m_vvOrigPVLinkNames[ivcname].empty() ) { + selectAuxElements(vtxAuxCont, m_vvOrigPVLinkNames[ivcname], + m_vvOrigPVLinkTypes[ivcname], "OrigPv.*.Link"); + if ( msgLvl(MSG::INFO) ) { + std::string sAuxVars = + dumpVS(m_vvOrigPVLinkNames[ivcname], + "doThinning(): "+vtxContName+": OrigPV aux vars: (" + +std::to_string(m_vvOrigPVLinkNames[ivcname].size())+")", + 4); + logWrappedMsg(sAuxVars, MSG::INFO); + } + } + // find aux variable names for refitted PVs + if ( m_thinRefPVs && m_vvRefPVLinkNames[ivcname].empty() ) { + selectAuxElements(vtxAuxCont, m_vvRefPVLinkNames[ivcname], + m_vvRefPVLinkTypes[ivcname], "Pv.*.Link"); + if ( msgLvl(MSG::INFO) ) { + std::string sAuxVars = + dumpVS(m_vvRefPVLinkNames[ivcname], + "doThinning() :"+vtxContName+": RefPV aux vars: (" + +std::to_string(m_vvRefPVLinkNames[ivcname].size())+")", + 4); + logWrappedMsg(sAuxVars, MSG::INFO); + } + } + + // debug stuff + if ( msgLvl(MSG::VERBOSE) ) { + std::vector<std::string> auxVars = + filterAuxElements(vtxAuxCont, ".*Link"); + std::string sAuxVars = + dumpVS(auxVars, + "doThinning(): "+vtxContName+": vtxAuxContVarNames:", 4); + logWrappedMsg(sAuxVars, MSG::DEBUG); + } + + // now mark associated PVs + if ( m_thinPVs && m_doPVs ) { + for (size_t i = 0; i < m_vvOrigPVLinkTypes[ivcname].size(); ++i) { + const xAOD::Vertex* origPv = + vtx.origPv(m_vvOrigPVLinkTypes[ivcname][i]); + if ( origPv == nullptr ) continue; + auto pvit = std::find(pvCont->begin(), pvCont->end(), origPv); + if ( pvit == pvCont->end() ) { + ATH_MSG_WARNING("PV not found in " << m_PVContName + << " for " << m_vvOrigPVLinkNames[ivcname][i]); + continue; + } + size_t x = std::distance(pvCont->begin(), pvit); + if ( !pvMask.at(x) ) { + pvMask.at(x) = true; + addToCounter(vtxContName+"_accAssocPVs"); + } + // keep tracks for selected PVs + if ( (m_thinTracks || m_thinMuons) && m_doTracks && m_keepPVTracks ) { + for (size_t ipvt=0; ipvt < (*pvit)->nTrackParticles(); + ++ipvt) { + const xAOD::TrackParticle* tp = (*pvit)->trackParticle(ipvt); + if ( tp == nullptr ) continue; + auto tpit = std::find(trkPartCont->begin(), + trkPartCont->end(), tp); + if ( tpit == trkPartCont->end() ) { + ATH_MSG_WARNING("PV track not found in " + << m_trkPartContName << " for PV from " + << m_PVContName); + continue; + } + size_t x = std::distance(trkPartCont->begin(), tpit); + if ( !trkMask.at(x) ) { + trkMask.at(x) = true; + addToCounter(vtxContName+"_addTracksBySelPVs"); + } + } // for ipvt + } // if m_keepPVTracks + } // for i (PVs) + } // if m_thinPVs && m_doPVs + + // now mark associated refittedPVs + if ( m_thinRefPVs && m_doRefPVs ) { + for (size_t i = 0; i < m_vvRefPVLinkTypes[ivcname].size(); ++i) { + const xAOD::Vertex* refPv = + vtx.pv(m_vvRefPVLinkTypes[ivcname][i]); + if ( refPv == nullptr ) continue; + auto pvit = std::find(vRefPvCont[ivcname]->begin(), + vRefPvCont[ivcname]->end(), refPv); + if ( pvit == vRefPvCont[ivcname]->end() ) { + ATH_MSG_WARNING("PV not found in " << m_refPVContNames[ivcname] + << " for " << m_vvRefPVLinkNames[ivcname][i]); + continue; + } + size_t x = std::distance(vRefPvCont[ivcname]->begin(), pvit); + if ( !vRefPvMasks[ivcname].at(x) ) { + vRefPvMasks[ivcname].at(x) = true; + addToCounter(vtxContName+"_accAssocRefPVs"); + } + // keep tracks for associated refitted PVs + if ( (m_thinTracks || m_thinMuons) && m_doTracks + && m_keepPVTracks ) { + for (size_t ipvt=0; ipvt < (*pvit)->nTrackParticles(); + ++ipvt) { + const xAOD::TrackParticle* tp = (*pvit)->trackParticle(ipvt); + if ( tp == nullptr ) continue; + auto tpit = std::find(trkPartCont->begin(), + trkPartCont->end(), tp); + if ( tpit == trkPartCont->end() ) { + ATH_MSG_WARNING("Refitted PV track not found in " + << m_trkPartContName + << " for refitted PV from " + << m_refPVContNames[ivcname][i]); + continue; + } + size_t x = std::distance(trkPartCont->begin(), tpit); + if ( !trkMask.at(x) ) { + trkMask.at(x) = true; + addToCounter(vtxContName+"_addTracksByAssocRefPVs"); + } + } // for ipvt + } // if (m_thinTracks || m_thinMuons) && m_doTracks + // && m_keepPVTracks + } // for i (refPVs) + } // if m_thinRefPVs && m_doRefPVs + + // keep muons from secondary vertices + if ( m_thinMuons ) { + if ( m_matchCalMuons ) { // calibrated muons collection + if ( m_doCalMuons ) { + addToCounter(vtxContName+"_accCalMuons_calls"); + CHECK( matchMuons(calMuonCont, calMuonMask, vtx, + vtxContName+"_accCalMuons") ); + } + } else { // original muons collection + if ( m_doMuons ) { + addToCounter(vtxContName+"_accMuons_calls"); + CHECK( matchMuons(muonCont, muonMask, vtx, + vtxContName+"_accMuons") ); + } + } // if m_matchCalMuons + } // if m_thinMuons + } else { + ATH_MSG_DEBUG("doThinning(): ivtx " << ivtx + << " did not pass selection"); + } + } // for ivtx (B vertex) + + // Apply the thinning service based on vtxMask + CHECK( applyThinMask(vtxCont, vtxMask, m_vertexAnd, vtxContName) ); + + } // for m_vtxContNames + + // Keep tracks for all PVs (i.e. if all PVs are kept) + if ( (m_thinTracks || m_thinMuons) && m_doTracks && m_keepPVTracks && + !m_thinPVs && m_doPVs ) { + for (auto &pv : *pvCont) { + for (size_t ipvt=0; ipvt < pv->nTrackParticles(); ++ipvt) { + const xAOD::TrackParticle* tp = pv->trackParticle(ipvt); + if ( tp == nullptr ) continue; + auto tpit = std::find(trkPartCont->begin(), trkPartCont->end(), tp); + if ( tpit == trkPartCont->end() ) { + ATH_MSG_WARNING("PV track not found in " + << m_trkPartContName << " for PV from " + << m_PVContName); + continue; + } + size_t x = std::distance(trkPartCont->begin(), tpit); + if ( !trkMask.at(x) ) { + trkMask.at(x) = true; + addToCounter("addTracksByAllPVs"); + } + } // for ipvt + } // for pv + } // if m_keepPVTracks && m_doTracks && m_keepPVTracks && !m_thinPVs + + // Keep (original) muons for selected ID tracks + if (m_keepSelTrackMuons && m_thinMuons && m_doMuons && m_doTracks) { + CHECK( markMuonsForSelTracks(trkPartCont, trkMask, muonCont, muonMask, + "addMuonsBySelTracks") ); + } + + // Keep (original) muons for selected ID tracks + if (m_keepSelTrackCalMuons && m_thinMuons && m_doCalMuons && m_doTracks) { + CHECK( markMuonsForSelTracks(trkPartCont, trkMask, calMuonCont, + calMuonMask, "addCalMuonsBySelTracks") ); + } + + // mark 'other' muon container elements if requested + if ( m_thinMuons ) { + if ( m_syncMuonsBothWays || m_matchCalMuons ) { + // calibrated muons -> original muons + if ( (m_markMuons || m_syncMuonsBothWays) + && m_doMuons && m_doCalMuons) { + CHECK( markOrigMuons(muonCont, calMuonCont, muonMask, calMuonMask, + "addMarkedMuons", m_allowFastMuonMaskSync) ); + } + } // if m_syncMuonsBothWays || m_matchCalMuons ) { + if ( m_syncMuonsBothWays || !m_matchCalMuons ) { + // 'orignal' muons -> calibrated muons + if ( (m_markCalMuons || m_syncMuonsBothWays) + && m_doCalMuons && m_doMuons ) { + CHECK( markCalibMuons(muonCont, calMuonCont, muonMask, calMuonMask, + "addMarkedCalMuons", m_allowFastMuonMaskSync) ); + } + } // if m_syncMuonsBothWays || !m_matchCalMuons ) { + } // if m_thinMuons + + // Keep tracks for selected (original) muons + if ( (m_thinTracks || m_thinMuons) && m_doTracks && m_keepSelMuonTracks + && m_doMuons ) { + CHECK( markTrksForSelMuons(trkPartCont, trkMask, muonCont, muonMask, + "addTracksBySelMuons") ); + } // if (m_thinTracks || m_thinMuons) && m_doTracks && m_keepSelMuonTracks + // && m_doMuons + + // Keep tracks for selected (calibrated) muons + if ( (m_thinTracks || m_thinMuons) && m_doTracks && m_keepSelCalMuonTracks + && m_doCalMuons ) { + CHECK( markTrksForSelMuons(trkPartCont, trkMask, calMuonCont, + calMuonMask, "addTracksBySelCalMuons") ); + } // if (m_thinTracks || m_thinMuons) && m_doTracks + // && m_keepSelCalMuonTracks && m_doCalMuons + + // debug: check muon masks' consistency + if ( msgLvl(MSG::DEBUG) ) { + std::string msg = + checkMaskConsistency(muonMask, calMuonMask, + m_muonContName+"Mask", + m_calMuonContName+"Mask", + "Muon mask consistency check:"); + logWrappedMsg(msg, MSG::DEBUG); + } + + // Apply the thinning service for TrackParticles based on trkMask + if ( m_thinTracks && m_doTracks ) { + addToCounter(m_trkPartContName+"_allTracks", trkPartCont->size()); + addToCounter(m_trkPartContName+"_passedTracks", + std::accumulate(trkMask.begin(), trkMask.end(), 0)); + CHECK( applyThinMask(trkPartCont, trkMask, m_trackAnd, + m_trkPartContName) ); + } // if m_thinTracks && m_doTracks + + // Apply the thinning service for PVs based on pvMask + if ( m_thinPVs && m_doPVs ) { + addToCounter(m_PVContName+"_allVertices", pvCont->size()); + addToCounter(m_PVContName+"_passedVertices", + std::accumulate(pvMask.begin(), pvMask.end(), 0)); + CHECK( applyThinMask(pvCont, pvMask, m_vertexAnd, m_PVContName) ); + } // if m_doPVs + + // Apply the thinning service for refPVs based on vRefPvMasks + if ( m_thinRefPVs && m_doRefPVs ) { + for (size_t irpv = 0; irpv < m_refPVContNames.size(); ++irpv) { + addToCounter(m_refPVContNames[irpv]+"_allVertices", + vRefPvCont[irpv]->size()); + addToCounter(m_refPVContNames[irpv]+"_passedVertices", + std::accumulate(vRefPvMasks[irpv].begin(), + vRefPvMasks[irpv].end(), 0)); + CHECK( applyThinMask(vRefPvCont[irpv], vRefPvMasks[irpv], + m_vertexAnd, m_refPVContNames[irpv]) ); + } // for irpv + } // if m_doRefPVs + + // Apply the thinning service for (original) Muons based on muonMask + if ( m_thinMuons && m_doMuons ) { + addToCounter(m_muonContName+"_allMuons", muonCont->size()); + addToCounter(m_muonContName+"_passedMuons", + std::accumulate(muonMask.begin(), muonMask.end(), 0)); + CHECK( applyThinMask(muonCont, muonMask, m_muonAnd, + m_muonContName) ); + } // if m_thinMuons && m_doMuons + + // Apply the thinning service for calibrated Muons based on calMuonMask + if ( m_thinMuons && m_doCalMuons ) { + addToCounter(m_calMuonContName+"_allMuons", calMuonCont->size()); + addToCounter(m_calMuonContName+"_passedMuons", + std::accumulate(calMuonMask.begin(), calMuonMask.end(), + 0)); + CHECK( applyThinMask(calMuonCont, calMuonMask, m_muonAnd, + m_calMuonContName) ); + } // if m_thinMuons && m_doCalMuons + + return StatusCode::SUCCESS; + } + + //-------------------------------------------------------------------------- + // Helper to apply thinning service mask -- for all Containers + //-------------------------------------------------------------------------- + template<typename TYPE> + StatusCode + BmumuThinningTool::applyThinMask(SG::ThinningHandle<TYPE> &muCont, + const std::vector<bool>& muMask, + bool doAnd) const { + + if (doAnd) { + ATH_MSG_DEBUG("doThinning(): apply thinning (AND) for " << muCont.key()); + muCont.keep(muMask, SG::ThinningHandleBase::Op::And); + } else { + ATH_MSG_DEBUG("doThinning(): apply thinning (OR) for " << muCont.key()); + muCont.keep(muMask, SG::ThinningHandleBase::Op::Or); + } + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Helper to check whether an element is marked as passing a specific + // hypothesis. + //-------------------------------------------------------------------------- + bool BmumuThinningTool::pass(const SG::AuxElement& em, std::string hypo) + const { + + if ( !boost::algorithm::starts_with(hypo, "passed_") ) + hypo = "passed_" + hypo; + SG::AuxElement::Accessor<Char_t> flagAcc(hypo); + return flagAcc.isAvailable(em) && flagAcc(em) != 0; + } + //-------------------------------------------------------------------------- + // Helper to get a TrackParticle link + //-------------------- ------------------------------------------------------ + const xAOD::TrackParticle* + BmumuThinningTool::getTrackParticle(const xAOD::Vertex* vtx, + std::string name) const { + SG::AuxElement::Accessor<TrackParticleLink> tpLinkAcc(name); + if (!tpLinkAcc.isAvailable(*vtx)) { + return nullptr; + } + const TrackParticleLink& tpLink = tpLinkAcc(*vtx); + if (!tpLink.isValid()) { + return nullptr; + } + return *tpLink; + } + //-------------------------------------------------------------------------- + // Helper to filter all names of auxillary elements of an aux container + // according to a certain pattern. The pattern must be a regular + // expression pattern. + //-------------------------------------------------------------------------- + std::vector<std::string> + BmumuThinningTool::filterAuxElements(const xAOD::AuxContainerBase* auxCont, + std::string pattern) const { + + SG::AuxTypeRegistry& reg = SG::AuxTypeRegistry::instance(); + + std::vector<std::string> vElNames; + std::regex re(pattern); + + const SG::auxid_set_t& auxids = auxCont->getAuxIDs(); + for ( SG::auxid_t auxid : auxids ) { + const std::string name = reg.getName(auxid); + if ( std::regex_match(name, re) ) { + vElNames.push_back(name); + } + } // for auxids + + return vElNames; + } + //-------------------------------------------------------------------------- + // Determine aux elements to be looked at -- for (refitted) PVs + //-------------------------------------------------------------------------- + void + BmumuThinningTool::selectAuxElements(const xAOD::AuxContainerBase* auxCont, + std::vector<std::string>& vLinkNames, + std::vector<pv_type>& vLinkTypes, + std::string pattern) const { + // find aux element names matching pattern + std::vector<std::string> vAuxNames = + filterAuxElements(auxCont, pattern); + + // select aux element names matching our PV-to-SV association types + for (auto &name : vAuxNames) { + for (size_t ipvt=0; ipvt < xAOD::BPhysHelper::n_pv_types; ++ipvt) { + if ( name.find(PvTypeToVarNameMap[(pv_type)ipvt] + +"Link") != std::string::npos) { + vLinkNames.push_back(name); + vLinkTypes.push_back((pv_type)ipvt); + } + } // for ipvt + } // for name + } + //-------------------------------------------------------------------------- + // Determine aux elements to be looked at -- for closest tracks + //-------------------------------------------------------------------------- + void + BmumuThinningTool::selectAuxElements(const xAOD::AuxContainerBase* auxCont, + std::vector<std::string>& vLinkNames, + std::vector<std::string> vPrefixes, + std::vector<pv_type>& vLinkTypes, + std::string pattern) const { + // find aux element names matching pattern + std::vector<std::string> vAuxNames; + for (auto &prefix : vPrefixes) { + std::string cpat = prefix+pattern; + std::vector<std::string> vMoreAuxNames = + filterAuxElements(auxCont, cpat); + vAuxNames.insert(vAuxNames.end(), vMoreAuxNames.begin(), + vMoreAuxNames.end()); + } // for prefix + + // select aux element names matching our PV-to-SV association types + for (auto &name : vAuxNames) { + for (size_t ipvt=0; ipvt < xAOD::BPhysHelper::n_pv_types; ++ipvt) { + std::regex re(".*"+xAOD::BPhysHelper::pv_type_str[ipvt]+".*_Link"); + if ( std::regex_match(name, re) ) { + vLinkNames.push_back(name); + vLinkTypes.push_back((pv_type)ipvt); + } + } // for ipvt + } // for name + } + //-------------------------------------------------------------------------- + // Mark muons matched to secondary vertices + //-------------------------------------------------------------------------- + StatusCode + BmumuThinningTool::matchMuons(const xAOD::MuonContainer* muCont, + std::vector<bool>& muMask, + xAOD::BPhysHelper& vtx, + std::string counterName) const { + + for (size_t imu=0; imu < muCont->size(); ++imu) { + // only consider if not yet kept + if ( !muMask[imu] ) { + for (int ivm=0; ivm < vtx.nMuons(); ++ivm) { + if ( vtx.muon(ivm) == muCont->at(imu) ) { + muMask[imu] = true; + addToCounter(counterName); + } + } // for ivm + } + } // for imu + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Mark original muons for accepted calibrated muons + //-------------------------------------------------------------------------- + StatusCode + BmumuThinningTool::markOrigMuons(const xAOD::MuonContainer* muCont, + const xAOD::MuonContainer* cmuCont, + std::vector<bool>& muMask, + std::vector<bool>& cmuMask, + std::string counterName, + bool allowFastSync) const { + + bool fastSync = allowFastSync; + // go to slow sync if muon mask sizes do not match + if ( muMask.size() != cmuMask.size() ) { + fastSync = false; + addToCounter(counterName+"_maskSizeMismatches"); + } + + for (size_t imu=0; imu < cmuMask.size(); ++imu) { + if ( cmuMask[imu] ) { + if ( fastSync ) { + if ( !muMask[imu] ) { + muMask[imu] = true; + addToCounter(counterName); + } + } else { + const xAOD::Muon* cMuon = cmuCont->at(imu); + if ( cMuon != nullptr ) { + const xAOD::Muon* oMuon = + (const xAOD::Muon*)xAOD::getOriginalObject(*cMuon); + if ( oMuon != nullptr ) { + auto muit = std::find(muCont->begin(), muCont->end(), oMuon); + if ( muit == muCont->end() ) { + ATH_MSG_WARNING("Muon not found in " << m_muonContName + << " for calibrated muon index " << imu); + } else { + size_t x = std::distance(muCont->begin(), muit); + if ( !muMask.at(x) ) { + muMask.at(x) = true; + addToCounter(counterName); + } + } + } else { + ATH_MSG_WARNING("No orignal muon for calibrated muon index " + << imu); + } + } else { + ATH_MSG_WARNING("No calibrated muon for index " << imu); + } + } // if fastSync + } + } // for imu + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Mark calibrated muons for accepted (original) muons + //-------------------------------------------------------------------------- + StatusCode + BmumuThinningTool::markCalibMuons(const xAOD::MuonContainer* muCont, + const xAOD::MuonContainer* cmuCont, + std::vector<bool>& muMask, + std::vector<bool>& cmuMask, + std::string counterName, + bool allowFastSync) const { + + bool fastSync = allowFastSync; + // go to slow sync if muon mask sizes do not match + if ( muMask.size() != cmuMask.size() ) { + fastSync = false; + addToCounter(counterName+"_maskSizeMismatches"); + } + + for (size_t imu=0; imu < muMask.size(); ++imu) { + if ( muMask[imu] ) { + if ( fastSync ) { + if ( !cmuMask[imu] ) { + cmuMask[imu] = true; + addToCounter(counterName); + } + } else { + const xAOD::Muon* oMuon = muCont->at(imu); + if ( oMuon != nullptr ) { + bool found = false; + for (size_t icmu = 0; icmu < cmuCont->size(); ++icmu) { + const xAOD::Muon* cMuon = cmuCont->at(icmu); + if ( cMuon != nullptr ) { + const xAOD::Muon* aMuon = + (const xAOD::Muon*)xAOD::getOriginalObject(*cMuon); + if ( aMuon == oMuon ) { + found = true; + if ( !cmuMask.at(icmu) ) { + cmuMask.at(icmu) = true; + addToCounter(counterName); + } + } + } else { + ATH_MSG_WARNING("No calibrated muon for calibrated " + << "muon index " << icmu); + } + } // for icmu + if ( !found ) { + ATH_MSG_WARNING("No calibrated muon found for orignal " + << "muon index " << imu); + } + } else { + ATH_MSG_WARNING("No (original) muon for index " << imu); + } + } // if fastSync + } + } // for imu + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Mark ID tracks of selected (original or calibrated) muons + //-------------------------------------------------------------------------- + StatusCode + BmumuThinningTool::markTrksForSelMuons(const xAOD::TrackParticleContainer* + trkPartCont, + std::vector<bool>& trkMask, + const xAOD::MuonContainer* muCont, + std::vector<bool>& muMask, + std::string counterName) const { + + for (size_t itrk=0; itrk < trkPartCont->size(); ++itrk) { + if ( trkMask[itrk] ) continue; + const xAOD::TrackParticle* tp = trkPartCont->at(itrk); + if ( tp == nullptr ) continue; + for (size_t imu=0; imu < muCont->size(); ++imu) { + if ( muMask[imu] ) { + const xAOD::Muon* muon = muCont->at(imu); + if ( muon != nullptr ) { + const xAOD::TrackParticle* mutp = + muon->trackParticle(xAOD::Muon::InnerDetectorTrackParticle); + if ( mutp == tp ) { + trkMask[itrk] = true; + addToCounter(counterName); + } + } + } + } // for imu + } // for itrk + return StatusCode::SUCCESS; + } + //-------------------------------------------------------------------------- + // Mark muons for selected ID tracks + //-------------------------------------------------------------------------- + StatusCode + BmumuThinningTool::markMuonsForSelTracks(const xAOD::TrackParticleContainer* + trkPartCont, + std::vector<bool>& trkMask, + const xAOD::MuonContainer* muCont, + std::vector<bool>& muMask, + std::string counterName) const { + + for (size_t imu=0; imu < muCont->size(); ++imu) { + if ( muMask[imu] ) continue; + const xAOD::Muon* muon = muCont->at(imu); + if ( muon == nullptr ) continue; + const xAOD::TrackParticle* mutp = + muon->trackParticle(xAOD::Muon::InnerDetectorTrackParticle); + if ( mutp == nullptr) continue; + auto tpit = std::find(trkPartCont->begin(), trkPartCont->end(), mutp); + if ( tpit == trkPartCont->end() ) { + ATH_MSG_WARNING("Muon track not found in " << m_trkPartContName + << " for counter " << counterName); + addToCounter(counterName+"_missingTracksForMuons"); + continue; + } + size_t x = std::distance(trkPartCont->begin(), tpit); + if ( trkMask.at(x) ) { + muMask[imu] = true; + addToCounter(counterName); + } + } // for imu + return StatusCode::SUCCESS; + } + + //-------------------------------------------------------------------------- + // Check two masks for consistency + //-------------------------------------------------------------------------- + std::string + BmumuThinningTool::checkMaskConsistency(const std::vector<bool>& mask1, + const std::vector<bool>& mask2, + const std::string name1, + const std::string name2, + const std::string header) const { + + bool sizesMatch = (mask1.size() == mask2.size()); + int nTrueMask1 = std::accumulate(mask1.begin(), mask1.end(), 0); + int nTrueMask2 = std::accumulate(mask2.begin(), mask2.end(), 0); + + std::string basecname = name1+"_"+name2; + + int nEntryMismatches(0); + int nMoreTrueMask1(0); + int nMoreTrueMask2(0); + for (size_t i=0; i < std::min(mask1.size(), mask2.size()); ++i) { + if ( mask1[i] != mask2[i] ) nEntryMismatches++; + if ( mask1[i] && !mask2[i] ) nMoreTrueMask1++; + if ( !mask1[i] && mask2[i] ) nMoreTrueMask2++; + } + + std::string str(header); + if ( str.length() > 0 ) str += "\n"; + if ( sizesMatch && nTrueMask1 == nTrueMask2 && nEntryMismatches == 0 ) { + str += "Masks match OK: "+name1+" ("+mask1.size()+") : " + + name2+" ("+mask2.size()+")"; + addToCounter(basecname+"_matchedOK"); + } else { + str += "Masks do NOT match: "+name1+" ("+mask1.size()+") : " + + name2+" ("+mask2.size()+")"; + str += "\nnTrueMask1: "+std::to_string(nTrueMask1) + +", nTrueMask2: "+std::to_string(nTrueMask2); + str += "\nnEntryMismatches: "+std::to_string(nEntryMismatches) + +"nMoreTrueMas1: "+std::to_string(nMoreTrueMask1) + +", nMoreTrueMask2: "+std::to_string(nMoreTrueMask2); + addToCounter(basecname+"_NOTmatched"); + if (!sizesMatch) addToCounter(basecname+"_sizeMismatch"); + addToCounter(basecname+"_nEntryMismatches", nEntryMismatches); + addToCounter(basecname+"_nMoreTrueMask1", nMoreTrueMask1); + addToCounter(basecname+"_nMoreTrueMask2", nMoreTrueMask2); + } + return str; + } + //-------------------------------------------------------------------------- + // Dump a vector<string> to string. + //-------------------------------------------------------------------------- + std::string BmumuThinningTool::dumpVS(const std::vector<std::string>& vs, + const std::string header, + size_t nBlanks) const { + std::string str(header); + for (const std::string& s : vs) { + if ( str.length() > 0 ) str += "\n"; + str += std::string(nBlanks, ' ') + s; + } // for s + + return str; + } + //-------------------------------------------------------------------------- + // Wrap string at line breaks and print with appropriate message level + //-------------------------------------------------------------------------- + void BmumuThinningTool::logWrappedMsg(const std::string& str, + const MSG::Level lvl) const { + std::istringstream isstr(str); + std::string s; + while (std::getline(isstr, s)) { + msg(lvl) << s << endreq; + } + } + //-------------------------------------------------------------------------- + +} // namespace DerivationFramework diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Bmumu_metadata.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Bmumu_metadata.cxx new file mode 100644 index 0000000000000000000000000000000000000000..5d45a327e69cc58a170e25dbd15630f4006b04ae --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Bmumu_metadata.cxx @@ -0,0 +1,238 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +/** + * @file Bmumu_metadata.cxx + * @author Wolfgang Walkowiak <wolfgang.walkowiak@cern.ch> + */ + +#include "DerivationFrameworkBPhys/Bmumu_metadata.h" + +namespace DerivationFramework { + + //-------------------------------------------------------------------------- + Bmumu_metadata::Bmumu_metadata(const std::string& t, + const std::string& n, + const IInterface* p) : + AthAlgTool(t,n,p), BPhysMetadataBase(t,n,p) { + + // configuration defaults etc. + recordPropertyI("verbose", 0); + recordPropertyB("isSimulation", false); + recordPropertyS("projectTag", "__NONE__"); + recordPropertyB("isRelease21" , true); + recordPropertyS("mcCampaign", "__NONE__"); + recordPropertyS("triggerStream", "__NONE__"); + + // MC dataset number lists + recordPropertyVI("mcBsmumu" , {}); + recordPropertyVI("mcBplusJpsiKplus" , {}); + recordPropertyVI("mcBsJpsiPhi" , {}); + recordPropertyVI("mcBplusJpsiPiplus", {}); + recordPropertyVI("mcBhh" , {}); + + // MC datasets without trigger information + recordPropertyVI("mcNoTrigger" , {}); + + // special data runs + recordPropertyVI("specDataRuns", {}); + + // special MC channels + recordPropertyVI("specMcChannels", {}); + + // blind search + recordPropertyB("doBmumuBlinding" , true); + recordPropertyB("doCutBlinded" , true); + recordPropertyB("blindOnlyAllMuonsTight", true); + recordPropertyS("BlindingKey" , ""); + recordPropertyS("BlindedVars" , ""); + recordPropertyS("BlindingFlag" , ""); + + // include trigger + recordPropertyB("doTriggerInfo" , true); + + // include soft B tagging vertex containers + recordPropertyB("doAddSoftBVertices" , true); + + // trigger navigation thinning + recordPropertyB("doTrigNavThinning", true); + recordPropertyVS("TrigNavThinList" , {}); + + // wide mumu mass range + recordPropertyB("doUseWideMuMuMassRange", false); + + // use mass calculated using the combined muon track information in cuts? + recordPropertyB("useMuCalcMass", true); + + // use calibrated muons instead of the original ones + recordPropertyI("useCalibratedMuons", 0); + + // adjust primary track to muon kinematics for MUCALC mass + recordPropertyB("adjustMucalcKinematics", false); + + // add MUCALC mass from non-modified muons for debugging + recordPropertyB("addMucalcMassForDebug", false); + + // primary vertex types to consider for MinChi2ToAnyPV + recordPropertyVI("MinChi2ToAnyPVTypes", {1, 3}); + + // JpsiFinder: muAndMu or TrackAndTrack option? + recordPropertyB("JfTwoMuons" , true ); + recordPropertyB("JfTwoTracks", false); + + // JpsiFinder: TrackThresholdPt + recordPropertyD("JfTrackThresholdPt", 0.); + + // muon calibration and smearing tool configuration + recordPropertyS("McstYear" , "Data16"); + recordPropertyS("McstRelease" , "_NONE_"); + recordPropertyB("McstStatComb" , true); + recordPropertyB("McstSagittaCorr" , true); + recordPropertyS("McstSagittaRelease" , "_NONE_"); + recordPropertyB("McstDoSagittaMCDistortion", false); + recordPropertyB("McstSagittaCorrPhaseSpace", true); + + // muon collections + recordPropertyS("MuonCollection" , "Muons"); + recordPropertyS("CalMuonCollection" , "Muons"); + recordPropertyS("UsedMuonCollection" , "Muons"); + recordPropertyVS("AllMuonCollections", {} ); + + // Global mass values (in MeV, from PDG 2015) + recordPropertyD("GlobalMuonMass" , 105.6584); + recordPropertyD("GlobalPionMass" , 139.57061); + recordPropertyD("GlobalKaonMass" , 493.677 ); + recordPropertyD("GlobalJpsiMass" , 3096.92 ); + recordPropertyD("GlobalBplusMass", 5279.29 ); + recordPropertyD("GlobalB0Mass" , 5279.61 ); + recordPropertyD("GlobalBsMass" , 5366.79 ); + + // mass ranges + recordPropertyD("GlobalBMassUpperCut" , 7000.); + recordPropertyD("GlobalBMassLowerCut" , 3500.); + recordPropertyD("GlobalDiMuonMassUpperCut", 7000.); + recordPropertyD("GlobalDiMuonMassLowerCut", 2000.); + recordPropertyD("GlobalJpsiMassUpperCut" , 7000.); + recordPropertyD("GlobalJpsiMassLowerCut" , 2000.); + recordPropertyD("GlobalBlindUpperCut" , 5166.); + recordPropertyD("GlobalBlindLowerCut" , 5526.); + recordPropertyD("GlobalTrksMassUpperCut" , 7500.); + recordPropertyD("GlobalTrksMassLowerCut" , 3000.); + + // Global chi2 cut for vertexing + recordPropertyD("GlobalChi2CutBase", 15.0); + // Different chi2 cuts for 2-, 3- and 4-prong vertices + recordPropertyD("Chi2Cut2Prong" , 30.0); + recordPropertyD("Chi2Cut3Prong" , 45.0); + recordPropertyD("Chi2Cut4Prong" , 60.0); + + // Cut values for kaon candidates + recordPropertyD("GlobalKaonPtCut" , 1000.); // MeV + recordPropertyD("GlobalKaonEtaCut", 2.5 ); + + // MCP cuts for JpsiFinder + recordPropertyB("useJpsiFinderMCPCuts", false); + + // reject muons in JpsiPlus1Track or JpsiPlus2Track finders + recordPropertyS("GlobalMuonsUsedInJpsi", "NONE"); // turn off by default + + // run number + recordPropertyI("runNumber", -1); + + // MC channel number + recordPropertyI("mcChNumber", -1); + + // channels to be processed + recordPropertyVS("doChannels", {}); + + // vertex types to be done + recordPropertyI("doVertexType", 7); + + // minimum number of tracks in PV considered for PV association + recordPropertyI("minNTracksInPV", 0); + + // mode of minLogChi2ToAnyPV calculation + recordPropertyI("AddMinChi2ToAnyPVMode", 0); + + // record 3-dimensional proper time in addition + recordPropertyB("do3dProperTime", false); + + // thinning level + recordPropertyI("thinLevel", 0); + + // selection expression + recordPropertyS("SelExpression", ""); + + // MC truth decay parents + recordPropertyVI("TruthDecayParents", {}); + + // vertex isolation properties + recordPropertyVS("IsoTrackCategoryName" , {}); + recordPropertyVS("IsoTrackCutLevel" , {}); + recordPropertyVD("IsoTrackPtCut" , {}); + recordPropertyVD("IsoTrackEtaCut" , {}); + recordPropertyVI("IsoTrackPixelHits" , {}); + recordPropertyVI("IsoTrackSCTHits" , {}); + recordPropertyVI("IsoTrackbLayerHits" , {}); + recordPropertyVI("IsoTrackIBLHits" , {}); + recordPropertyVD("IsolationConeSizes" , {}); + recordPropertyVD("IsoTrkImpLogChi2Max" , {}); + recordPropertyVI("IsoDoTrkImpLogChi2Cut", {}); + recordPropertyVL("useIsoTrackTypes" , {}); + recordPropertyB("IsoUseOptimizedAlgo" , true); + recordPropertyS("IsoTvaWorkingPoint" , "Nominal"); + recordPropertyVS("IsoIncludes" , {}); + + + // muon isolation properties (muons of B candidate) + recordPropertyVS("MuIsoTrackCategoryName" , {}); + recordPropertyVS("MuIsoTrackCutLevel" , {}); + recordPropertyVD("MuIsoTrackPtCut" , {}); + recordPropertyVD("MuIsoTrackEtaCut" , {}); + recordPropertyVI("MuIsoTrackPixelHits" , {}); + recordPropertyVI("MuIsoTrackSCTHits" , {}); + recordPropertyVI("MuIsoTrackbLayerHits" , {}); + recordPropertyVI("MuIsoTrackIBLHits" , {}); + recordPropertyVD("MuIsolationConeSizes" , {}); + recordPropertyVD("MuIsoTrkImpLogChi2Max" , {}); + recordPropertyVI("MuIsoDoTrkImpLogChi2Cut", {}); + recordPropertyVL("useMuIsoTrackTypes" , {}); + recordPropertyS("MuIsoTvaWorkingPoint" , "Nominal"); + recordPropertyVS("MuIsoIncludes" , {}); + + // closest track properties + recordPropertyVS("CloseTrackCategoryName" , {}); + recordPropertyVS("CloseTrackCutLevel" , {}); + recordPropertyVD("CloseTrackPtCut" , {}); + recordPropertyVD("CloseTrackEtaCut" , {}); + recordPropertyVI("CloseTrackPixelHits" , {}); + recordPropertyVI("CloseTrackSCTHits" , {}); + recordPropertyVI("CloseTrackbLayerHits" , {}); + recordPropertyVI("CloseTrackIBLHits" , {}); + recordPropertyVL("useCloseTrackTypes" , {}); + recordPropertyVS("CloseTrackChi2SetName" , {}); + recordPropertyVI("CloseTrackCorrChi2" , {}); + recordPropertyVB("CloseTrackMinDCAin3D" , {}); + recordPropertyVD("CloseTrackMaxLogChi2" , {}); + recordPropertyVD("NCloseTrackMaxLogChi2" , {}); + recordPropertyS("CloseTrackTvaWorkingPoint", "Nominal"); + recordPropertyVS("CloseTrackIncludes" , {}); + + // debug track types for isolation and closest track tools + recordPropertyI("DebugTrackTypes", 0); + + // track-to-vertex association check tool + recordPropertyI("DebugTrkToVtxMaxEvents" , 0); + + // output containers and branch prefixes + // (mostly used for isolation tools) + recordPropertyS("TrkPartContName", "InDetTrackParticles"); + recordPropertyS("PVContName" , "PrimaryVertices"); + recordPropertyVS("VtxContNames" , {} ); + recordPropertyVS("RefPVContNames", {} ); + recordPropertyVS("BranchPrefixes", {} ); + + } + //-------------------------------------------------------------------------- +} // namespace diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Bmumu_reco_mumu.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Bmumu_reco_mumu.cxx new file mode 100644 index 0000000000000000000000000000000000000000..a3c06874ffae9d29728b5f5cb69b460cc504352b --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Bmumu_reco_mumu.cxx @@ -0,0 +1,177 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ + +///////////////////////////////////////////////////////////////// +// Bmumu_reco_mumu.cxx +/////////////////////////////////////////////////////////////////// +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Original author (Reco_mumu): +// Daniel Scheirich <daniel.scheirich@cern.ch> +// +// Changes: +// Basic dimuon reconstruction for the derivation framework. +// This class inherits from CfAthAlgTool instead of AthAlgTool in order +// to have access to the CutFlowSvc instance. +// +//============================================================================ +// + +#include "DerivationFrameworkBPhys/Bmumu_reco_mumu.h" + +#include "xAODTracking/VertexContainer.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "BeamSpotConditionsData/BeamSpotData.h" +#include "DerivationFrameworkBPhys/BPhysPVTools.h" + + +namespace DerivationFramework { + + Bmumu_reco_mumu::Bmumu_reco_mumu(const std::string& t, + const std::string& n, + const IInterface* p) : + CfAthAlgTool(t,n,p), + m_v0Tools("Trk::V0Tools"), + m_jpsiFinder("Analysis::JpsiFinder"), + m_pvRefitter("Analysis::PrimaryVertexRefitter") + { + declareInterface<DerivationFramework::IAugmentationTool>(this); + + // Declare tools + declareProperty("V0Tools" , m_v0Tools); + declareProperty("JpsiFinder", m_jpsiFinder); + declareProperty("PVRefitter", m_pvRefitter); + + // Declare user-defined properties + declareProperty("OutputVtxContainerName", m_outputVtxContainerName = "OniaCandidates"); + declareProperty("PVContainerName" , m_pvContainerName = "PrimaryVertices"); + declareProperty("RefPVContainerName" , m_refPVContainerName = "RefittedPrimaryVertices"); + declareProperty("RefitPV" , m_refitPV = false); + declareProperty("MaxPVrefit" , m_PV_max = 1); + declareProperty("DoVertexType" , m_DoVertexType = 1); + // minimum number of tracks for PV to be considered for PV association + declareProperty("MinNTracksInPV" , m_PV_minNTracks = 0); + declareProperty("Do3d" , m_do3d = false); + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode Bmumu_reco_mumu::initialize() + { + + ATH_MSG_DEBUG("in initialize()"); + + // retrieve V0 tools + CHECK( m_v0Tools.retrieve() ); + + // get the JpsiFinder tool + CHECK( m_jpsiFinder.retrieve() ); + + // get the PrimaryVertexRefitter tool + CHECK( m_pvRefitter.retrieve() ); + + // Get the beam spot service + CHECK( m_beamSpotKey.initialize() ); + + return StatusCode::SUCCESS; + + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode Bmumu_reco_mumu::finalize() + { + // everything all right + return StatusCode::SUCCESS; + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode Bmumu_reco_mumu::addBranches() const + { + // Jpsi container and its auxilliary store + xAOD::VertexContainer* vtxContainer = NULL; + xAOD::VertexAuxContainer* vtxAuxContainer = NULL; + + //---------------------------------------------------- + // call Jpsi finder + //---------------------------------------------------- + if( !m_jpsiFinder->performSearch(vtxContainer, vtxAuxContainer).isSuccess() ) { + ATH_MSG_FATAL("Jpsi finder (" << m_jpsiFinder << ") failed."); + return StatusCode::FAILURE; + } + + //---------------------------------------------------- + // retrieve primary vertices + //---------------------------------------------------- + const xAOD::VertexContainer* pvContainer = NULL; + CHECK( evtStore()->retrieve(pvContainer, m_pvContainerName) ); + + //---------------------------------------------------- + // Try to retrieve refitted primary vertices + //---------------------------------------------------- + bool refPvExists = false; + xAOD::VertexContainer* refPvContainer = NULL; + xAOD::VertexAuxContainer* refPvAuxContainer = NULL; + if(m_refitPV) { + if(evtStore()->contains<xAOD::VertexContainer>(m_refPVContainerName)) { + // refitted PV container exists. Get it from the store gate + CHECK( evtStore()->retrieve(refPvContainer, m_refPVContainerName) ); + CHECK( evtStore()->retrieve(refPvAuxContainer, m_refPVContainerName+"Aux.") ); + refPvExists = true; + } else { + // refitted PV container does not exist. Create a new one. + refPvContainer = new xAOD::VertexContainer; + refPvAuxContainer = new xAOD::VertexAuxContainer; + refPvContainer->setStore(refPvAuxContainer); + } + } + + // Give the helper class the ptr to v0tools and beamSpotsSvc to use + SG::ReadCondHandle<InDet::BeamSpotData> beamSpotHandle { m_beamSpotKey }; + if(not beamSpotHandle.isValid()) ATH_MSG_ERROR("Cannot Retrieve " << m_beamSpotKey.key() ); + BPhysPVTools helper(&(*m_v0Tools), beamSpotHandle.cptr()); + helper.SetMinNTracksInPV(m_PV_minNTracks); + helper.SetSave3d(m_do3d); + + if(m_refitPV){ + if(vtxContainer->size() >0){ + StatusCode SC = helper.FillCandwithRefittedVertices(vtxContainer, pvContainer, refPvContainer, &(*m_pvRefitter) , m_PV_max, m_DoVertexType); + if(SC.isFailure()){ + ATH_MSG_FATAL("refitting failed - check the vertices you passed"); + return SC; + } + } + }else{ + if(vtxContainer->size() >0)CHECK(helper.FillCandExistingVertices(vtxContainer, pvContainer, m_DoVertexType)); + } + + + //---------------------------------------------------- + // save in the StoreGate + //---------------------------------------------------- + if (!evtStore()->contains<xAOD::VertexContainer>(m_outputVtxContainerName)) + CHECK(evtStore()->record(vtxContainer, m_outputVtxContainerName)); + + if (!evtStore()->contains<xAOD::VertexAuxContainer>(m_outputVtxContainerName+"Aux.")) + CHECK(evtStore()->record(vtxAuxContainer, m_outputVtxContainerName+"Aux.")); + + if(!refPvExists && m_refitPV) { + CHECK(evtStore()->record(refPvContainer , m_refPVContainerName)); + CHECK(evtStore()->record(refPvAuxContainer, m_refPVContainerName+"Aux.")); + } + + // add counter for number of events seen + addEvent("dimuEvents"); + // add counter for the number of events with >= 1 reco'd vertices + if ( vtxContainer->size() > 0 ) { + addEvent("dimuWithVertexCand"); + } + // add counter for the number of vertices + addToCounter("dimuNumVertices", vtxContainer->size()); + + return StatusCode::SUCCESS; + } +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Cascade3Plus1.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Cascade3Plus1.cxx new file mode 100644 index 0000000000000000000000000000000000000000..fbc231d3666cf2cb79d4326a1c5ae23a40ec2c9c --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Cascade3Plus1.cxx @@ -0,0 +1,524 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ +///////////////////////////////////////////////////////////////// +// Cascade3Plus1.cxx, (c) ATLAS Detector software +///////////////////////////////////////////////////////////////// +#include "DerivationFrameworkBPhys/Cascade3Plus1.h" +#include "xAODTracking/TrackParticle.h" +#include "xAODTracking/TrackParticleContainer.h" +#include "TrkToolInterfaces/ITrackSelectorTool.h" +#include "TrkVKalVrtFitter/TrkVKalVrtFitter.h" +#include "xAODTracking/VertexContainer.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "DerivationFrameworkBPhys/BPhysPVCascadeTools.h" +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "DerivationFrameworkBPhys/BPhysPVTools.h" +#include "xAODBPhys/BPhysHelper.h" +#include "Math/Vector4D.h" + +namespace DerivationFramework { +typedef ElementLink<xAOD::VertexContainer> VertexLink; +typedef std::vector<VertexLink> VertexLinkVector; +typedef std::vector<const xAOD::TrackParticle*> TrackBag; +/// Base 4 Momentum type for TrackParticle +typedef ROOT::Math::LorentzVector<ROOT::Math::PtEtaPhiM4D<double> > GenVecFourMom_t; + +template<size_t N> +struct Candidate { + std::array<const xAOD::TrackParticle*, N> tracks; +}; + +struct VertexCand : Candidate<4> { + std::unique_ptr<Trk::VxCascadeInfo> cascVertex; +}; + +template<size_t N> +GenVecFourMom_t SumVector(const std::array<GenVecFourMom_t, N> &vectors) { + GenVecFourMom_t total = vectors[0]; + for(size_t i =1; i<N; i++) total+= vectors[i]; + return total; +} + +Cascade3Plus1::Cascade3Plus1(const std::string& t, const std::string& n, const IInterface* p) : AthAlgTool(t,n,p), + m_trkSelector("InDet::TrackSelectorTool"), + m_iVertexFitter("Trk::TrkVKalVrtFitter"), + m_V0Tools("Trk::V0Tools"), + m_CascadeTools("DerivationFramework::CascadeTools"), + m_pvRefitter("Analysis::PrimaryVertexRefitter"), + m_cascadeOutputsKeys{ "CascadeVtx1", "CascadeVtx2" } +{ + + declareProperty("TrackMassHyp", m_trackMasses); + declareProperty("CascadeVertexCollections", m_cascadeOutputsKeys); + declareProperty("TwoTrackMassMin", m_2trackmassMin); + declareProperty("TwoTrackMassMax", m_2trackmassMax); + declareProperty("ThreeTrackMassMin", m_3trackmassMin); + declareProperty("ThreeTrackMassMax", m_3trackmassMax); + declareProperty("FourTrackMassMin", m_4trackmassMin); + declareProperty("FourTrackMassMax", m_4trackmassMax); + declareProperty("TwoTracksMass", m_2tracksMass); + declareProperty("ThreeTracksMass", m_3tracksMass); + declareProperty("FourTracksMass", m_4tracksMass); + declareProperty("TrackSelectorTool",m_trkSelector); + declareProperty("CascadeTools", m_CascadeTools); + declareProperty("MinNTracksInPV", m_PV_minNTracks = 0); + declareProperty("HypothesisName", m_hypoName = "Bs"); + declareProperty("Track3Name", m_3TrackName = "Ds"); + declareProperty("MaxnPV", m_PV_max = 999); + declareProperty("DoVertexType", m_DoVertexType = 7); + declareProperty("RefitPV", m_refitPV = true); + declareProperty("RefPVContainerName", m_refPVContainerName = "RefittedPrimaryVertices"); + declareProperty("PVRefitter", m_pvRefitter); + declareProperty("TrkVertexFitterTool", m_iVertexFitter); + declareProperty("PVContainerName", m_VxPrimaryCandidateName); + declareProperty("ThreeTrackMassConstraint", m_3TrackMassConstraint); + declareProperty("TwoTrackMassConstraint", m_2TrackMassConstraint); + declareProperty("Chi2NDFCut", m_Chi2NDFCut); + + declareProperty("FourTrackMassFinalMin", m_4trackmassFinalMin); + declareProperty("FourTrackMassFinalMax", m_4trackmassFinalMax); + declareProperty("FourTrackTauCut", m_tauCut); + declareProperty("UseMuonsForTracks", m_requireMuonsOnTrack); + declareProperty("ThreeVertexOutputContainer", m_3TrackVertexOutput); + declareProperty("VertexEstimator", m_vertexEstimator); + declareProperty("ThreeTrackChi2NDF", m_3TrackChi2NDFCut); + declareProperty("EliminateBad3Tracksfrom4Track", m_eliminateBad3Tracksfrom4Track); + declareProperty("CopyAllVertices", m_copyAllVertices); + declareProperty("PTCutPerTrack", m_ptCutPerTrack); + m_ptCutPerVertex.fill(0); + declareProperty("PTCutVertex1", m_ptCutPerVertex[0]); + declareProperty("PTCutVertex2", m_ptCutPerVertex[1]); + declareProperty("PTCutVertex3", m_ptCutPerVertex[2]); +} + +StatusCode Cascade3Plus1::initialize() { + if(m_trackMasses.size()!=4) { + ATH_MSG_ERROR("4 mass hypotheses must be provided"); + return StatusCode::FAILURE; + } + if(m_cascadeOutputsKeys.size() !=s_topoN) { + ATH_MSG_FATAL("Incorrect number of VtxContainers"); + return StatusCode::FAILURE; + } + // retrieving vertex Fitter + ATH_CHECK( m_iVertexFitter.retrieve()); + + // retrieving the Cascade tools + ATH_CHECK( m_CascadeTools.retrieve()); + + // Get the beam spot service + CHECK( m_beamSpotKey.initialize() ); + + ATH_CHECK(m_vertexEstimator.retrieve()); + if(m_eliminateBad3Tracksfrom4Track && m_3TrackChi2NDFCut<=0.0) { + ATH_MSG_FATAL("Invalid configuration"); + return StatusCode::FAILURE; + } + + if(m_ptCutPerTrack.size() == 1 || m_ptCutPerTrack.size() > 4){ + ATH_MSG_FATAL("Invalid configuration"); + return StatusCode::FAILURE; + } + if(m_ptCutPerTrack.size() >=2 && m_ptCutPerTrack[0] != m_ptCutPerTrack[1]){ + ATH_MSG_FATAL("Invalid configuration"); + return StatusCode::FAILURE; + } + m_muonTrackBit.reset(); + for(int i : m_requireMuonsOnTrack) { + if(i>=4) { + ATH_MSG_FATAL("Invalid configuration" << " muon track " << i); + return StatusCode::FAILURE; + } + m_muonTrackBit[i] = true; + } + m_requireMuonsOnTrack.clear(); + m_requireMuonsOnTrack.shrink_to_fit(); + + if(m_muonTrackBit[0] != m_muonTrackBit[1]) + { + ATH_MSG_FATAL("Invalid configuration" << " variable is " << m_muonTrackBit.to_string()); + return StatusCode::FAILURE; + } + + + return StatusCode::SUCCESS; +} + +Cascade3Plus1::~Cascade3Plus1() { } + + +const TrackBag& Cascade3Plus1::ApplyAdditionalCuts(const TrackBag& alltracks, const TrackBag& muonTracks, TrackBag& cuttracks, size_t track) const { + const TrackBag& tracks = m_muonTrackBit[track] ? muonTracks : alltracks; + if(track >= m_ptCutPerTrack.size()) return tracks; + double ptCut = m_ptCutPerTrack.at(track); + if(ptCut <=0.0) return tracks; + cuttracks.clear();//reset any previous selections + for(auto ptr : tracks){ + if(ptr->pt() > ptCut) cuttracks.push_back(ptr); + } + return cuttracks; +} + +StatusCode Cascade3Plus1::addBranches() const +{ + const xAOD::TrackParticleContainer *trackContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(trackContainer, "InDetTrackParticles" )); + + //---------------------------------------------------- + // Try to retrieve refitted primary vertices + //---------------------------------------------------- + xAOD::VertexContainer* refPvContainer = nullptr; + xAOD::VertexAuxContainer* refPvAuxContainer = nullptr; + if (m_refitPV) { + if (evtStore()->contains<xAOD::VertexContainer>(m_refPVContainerName)) { + // refitted PV container exists. Get it from the store gate + ATH_CHECK(evtStore()->retrieve(refPvContainer, m_refPVContainerName )); + ATH_CHECK(evtStore()->retrieve(refPvAuxContainer, m_refPVContainerName + "Aux.")); + } else { + // refitted PV container does not exist. Create a new one. + refPvContainer = new xAOD::VertexContainer; + refPvAuxContainer = new xAOD::VertexAuxContainer; + refPvContainer->setStore(refPvAuxContainer); + ATH_CHECK(evtStore()->record(refPvContainer, m_refPVContainerName)); + ATH_CHECK(evtStore()->record(refPvAuxContainer, m_refPVContainerName+"Aux.")); + } + } + + std::array<xAOD::VertexContainer*, s_topoN> Vtxwritehandles; + std::array<xAOD::VertexAuxContainer*, s_topoN> Vtxwritehandlesaux; + + for(int i =0; i<s_topoN; i++) { + Vtxwritehandles[i] = new xAOD::VertexContainer(); + Vtxwritehandlesaux[i] = new xAOD::VertexAuxContainer(); + Vtxwritehandles[i]->setStore(Vtxwritehandlesaux[i]); + ATH_CHECK(evtStore()->record(Vtxwritehandles[i], m_cascadeOutputsKeys[i] )); + ATH_CHECK(evtStore()->record(Vtxwritehandlesaux[i], m_cascadeOutputsKeys[i] + "Aux.")); + } + xAOD::VertexContainer *v3container = nullptr; + if(!m_3TrackVertexOutput.empty()) { + v3container = new xAOD::VertexContainer(); + auto vcontaineraux = new xAOD::VertexAuxContainer(); + v3container->setStore(vcontaineraux); + ATH_CHECK(evtStore()->record(v3container, m_3TrackVertexOutput )); + ATH_CHECK(evtStore()->record(vcontaineraux, m_3TrackVertexOutput + "Aux.")); + } + //---------------------------------------------------- + // retrieve primary vertices + //---------------------------------------------------- + + const xAOD::Vertex * primaryVertex(nullptr); + const xAOD::VertexContainer *pvContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(pvContainer, m_VxPrimaryCandidateName)); + + if (pvContainer->size()==0) { + ATH_MSG_WARNING("You have no primary vertices: " << pvContainer->size()); + return StatusCode::RECOVERABLE; + } else { + primaryVertex = (*pvContainer)[0]; + } + + + TrackBag theIDTracksAfterSelection; + TrackBag theIDTracksAfterAdditionalSelection; + for(auto x : *trackContainer) { + if ( m_trkSelector->decision(*x, nullptr) ) theIDTracksAfterSelection.push_back(x); + } + ATH_MSG_DEBUG("Found good tracks N="<<theIDTracksAfterSelection.size()); + TrackBag theMuonsAfterSelection; + if(m_muonTrackBit.any()) { + const xAOD::MuonContainer* importedMuonCollection(0); + ATH_CHECK(evtStore()->retrieve(importedMuonCollection, "Muons")); + for(auto muon : *importedMuonCollection) { + if(muon->muonType() == xAOD::Muon::SiliconAssociatedForwardMuon) continue; + auto ptr = muon->trackParticle( xAOD::Muon::InnerDetectorTrackParticle ); + if(ptr) theMuonsAfterSelection.push_back(ptr); + } + } + + std::vector<Candidate<2>> Initialcandidates; + { //Isolate scope for safety + const TrackBag &Tracksfor2Vertex = ApplyAdditionalCuts(theIDTracksAfterSelection, theMuonsAfterSelection, theIDTracksAfterAdditionalSelection, 0); + for(auto track1itr = Tracksfor2Vertex.cbegin(); track1itr != Tracksfor2Vertex.cend(); ++track1itr) { + Candidate<2> cand; + std::array<GenVecFourMom_t, 2> vectors; + cand.tracks[0] = *track1itr; + vectors[0].SetCoordinates(cand.tracks[0]->pt(), cand.tracks[0]->eta(), cand.tracks[0]->phi(), m_trackMasses[0]); + for(auto track2itr = track1itr+1; track2itr != Tracksfor2Vertex.cend(); ++track2itr) { + cand.tracks[1] = *track2itr; + if(cand.tracks[0]->qOverP() * cand.tracks[1]->qOverP() >= 0.) continue; //Skip same signed + vectors[1].SetCoordinates(cand.tracks[1]->pt(), cand.tracks[1]->eta(), cand.tracks[1]->phi(), m_trackMasses[1]); + GenVecFourMom_t pair = SumVector(vectors); + if(pair.Pt() < m_ptCutPerVertex[0]) continue; + if(pair.M() >= m_2trackmassMin && pair.M() < m_2trackmassMax) { + ATH_MSG_VERBOSE("2 Track candidate found: " << pair.M() << " Within " << m_2trackmassMin << " and " << m_2trackmassMax); + Initialcandidates.push_back(cand); + } + } + } + } + ATH_MSG_DEBUG("2 Track candidates found: " << Initialcandidates.size()); + if(Initialcandidates.empty()) { + //No work to do Leave method early + return StatusCode::SUCCESS; + } + std::vector<Candidate<3>> Candidates3; + + {//isolate scope + const TrackBag &Tracksfor3Vertex = ApplyAdditionalCuts(theIDTracksAfterSelection, theMuonsAfterSelection, theIDTracksAfterAdditionalSelection, 2); + for(auto &c : Initialcandidates) { + Candidate<3> c3; + std::copy(c.tracks.begin(), c.tracks.end(), c3.tracks.begin()); + std::array<GenVecFourMom_t, 3> vectors; + vectors[0].SetCoordinates(c.tracks[0]->pt(), c.tracks[0]->eta(), c.tracks[0]->phi(), m_trackMasses[0]); + vectors[1].SetCoordinates(c.tracks[1]->pt(), c.tracks[1]->eta(), c.tracks[1]->phi(), m_trackMasses[1]); + for(auto track3itr = Tracksfor3Vertex.cbegin(); track3itr != Tracksfor3Vertex.cend(); ++track3itr) { + if(std::find(c3.tracks.begin(), c3.tracks.end(), *track3itr) != c3.tracks.end()) continue; + c3.tracks[2] = *track3itr; + vectors[2].SetCoordinates(c3.tracks[2]->pt(), c3.tracks[2]->eta(), c3.tracks[2]->phi(), m_trackMasses[2]); + GenVecFourMom_t tripple = SumVector(vectors); + if(tripple.Pt() < m_ptCutPerVertex[1]) continue; + if(tripple.M() >= m_3trackmassMin && tripple.M() < m_3trackmassMax) { + ATH_MSG_VERBOSE("3 Track candidate found: " << tripple.M() << " Within " << m_3trackmassMin << " and " << m_3trackmassMax); + Candidates3.push_back(c3); + } + } + } + } + Initialcandidates.clear(); + Initialcandidates.shrink_to_fit(); + + ATH_MSG_DEBUG("3 Track candidates found: " << Candidates3.size()); + std::map<const std::array<const xAOD::TrackParticle*, 3>, xAOD::Vertex* > threeVertexMap; + + if(!m_3TrackVertexOutput.empty()) { + SG::ReadCondHandle<InDet::BeamSpotData> beamSpotHandle { m_beamSpotKey }; + if(not beamSpotHandle.isValid()) ATH_MSG_ERROR("Cannot Retrieve " << m_beamSpotKey.key() ); + BPhysPVTools helper(&(*m_V0Tools), beamSpotHandle.cptr()); + helper.SetMinNTracksInPV(0); + helper.SetSave3d(false); + std::vector<const xAOD::TrackParticle*> tracksforfit; + std::vector<Candidate<3>> Candidates3PassCuts; + if(m_eliminateBad3Tracksfrom4Track) Candidates3PassCuts.reserve(Candidates3.size()); + for(const auto &c3 : Candidates3) { + tracksforfit.assign(c3.tracks.begin(), c3.tracks.end()); + auto v = StandardFit(tracksforfit, trackContainer); + if(v==nullptr) { + ATH_MSG_DEBUG("3Vertex fit returned null"); + continue; + } + if(m_3TrackChi2NDFCut > 0. && v->chiSquared() / v->numberDoF() > m_3TrackChi2NDFCut) { + ATH_MSG_DEBUG("Rejecting 3 track vertex because Chi " << v->chiSquared() / v->numberDoF() << " > " << m_3TrackChi2NDFCut); + continue; + } + if(m_eliminateBad3Tracksfrom4Track) Candidates3PassCuts.push_back(c3); + threeVertexMap[c3.tracks] = v.get(); + xAOD::BPhysHelper bHelper(v.get());//"get" does not "release" still automatically deleted + bHelper.setRefTrks(); + v3container->push_back(v.release()); + } + + if(v3container->size() >0) ATH_CHECK(helper.FillCandExistingVertices(v3container, pvContainer, 1)); + + if(m_eliminateBad3Tracksfrom4Track) { + ATH_MSG_DEBUG("Swapping container to N = "<< Candidates3PassCuts.size() << " from " << Candidates3.size()); + Candidates3PassCuts.swap(Candidates3);//Swap old container with one that passed cuts + } + + } + std::vector<VertexCand> Candidates4; + {//isolate scope + const TrackBag &Tracksfor4Vertex = ApplyAdditionalCuts(theIDTracksAfterSelection, theMuonsAfterSelection, theIDTracksAfterAdditionalSelection, 3); + for(auto &c : Candidates3) { + VertexCand c4; + std::copy(c.tracks.begin(), c.tracks.end(), c4.tracks.begin()); + std::array<GenVecFourMom_t, 4> vectors; + vectors[0].SetCoordinates(c.tracks[0]->pt(), c.tracks[0]->eta(), c.tracks[0]->phi(), m_trackMasses[0]); + vectors[1].SetCoordinates(c.tracks[1]->pt(), c.tracks[1]->eta(), c.tracks[1]->phi(), m_trackMasses[1]); + vectors[2].SetCoordinates(c.tracks[2]->pt(), c.tracks[2]->eta(), c.tracks[2]->phi(), m_trackMasses[2]); + for(auto track4itr = Tracksfor4Vertex.cbegin(); track4itr != Tracksfor4Vertex.cend(); ++track4itr) { + if(std::find(c4.tracks.begin(), c4.tracks.end(), *track4itr) != c4.tracks.end()) continue; + c4.tracks[3] = *track4itr; + if(c4.tracks[2]->qOverP() * c4.tracks[3]->qOverP() >= 0.) continue; //Skip same signed + vectors[3].SetCoordinates(c4.tracks[3]->pt(), c4.tracks[3]->eta(), c4.tracks[3]->phi(), m_trackMasses[3]); + GenVecFourMom_t fourtrack = SumVector(vectors); + if(fourtrack.Pt() < m_ptCutPerVertex[2]) continue; + if(fourtrack.M() >= m_4trackmassMin && fourtrack.M() < m_4trackmassMax) { + ATH_MSG_VERBOSE("3 Track candidate found: " << fourtrack.M() << " Within " << m_4trackmassMin << " and " << m_4trackmassMax); + Candidates4.push_back(std::move(c4)); + } + } + } + } + Candidates3.clear(); + Candidates3.shrink_to_fit(); + + ATH_MSG_DEBUG("4 Track candidates found: " << Candidates4.size() << " running cascade"); + for(auto &c : Candidates4) { + c.cascVertex = CascadeFit(c.tracks); + if(c.cascVertex!=nullptr) { + c.cascVertex->setSVOwnership(true); + } + } + + SG::AuxElement::Decorator<VertexLinkVector> CascadeLinksDecor("CascadeVertexLinks"); + SG::AuxElement::Decorator<VertexLink> Vertex3Decor(m_3TrackName+ "_VertexLink"); + SG::AuxElement::Decorator<float> chi2_decor("ChiSquared"); + SG::AuxElement::Decorator<float> ndof_decor("NumberDoF"); +// SG::AuxElement::Decorator<float> TotalProb_decor("TotalProb"); + SG::AuxElement::Decorator<float> Pt_decor("Pt"); + SG::AuxElement::Decorator<float> PtErr_decor("PtErr"); + SG::AuxElement::Decorator<float> Mass_svdecor(m_3TrackName+ "_mass"); + SG::AuxElement::Decorator<float> MassErr_svdecor(m_3TrackName+"_massErr"); + SG::AuxElement::Decorator<float> Pt_svdecor(m_3TrackName+"_Pt"); + SG::AuxElement::Decorator<float> PtErr_svdecor(m_3TrackName+"_PtErr"); + SG::AuxElement::Decorator<float> Lxy_svdecor(m_3TrackName+"_Lxy"); + SG::AuxElement::Decorator<float> LxyErr_svdecor(m_3TrackName+"_LxyErr"); + SG::AuxElement::Decorator<float> Tau_svdecor(m_3TrackName+"_Tau"); + SG::AuxElement::Decorator<float> TauErr_svdecor(m_3TrackName+"_TauErr"); + + + SG::ReadCondHandle<InDet::BeamSpotData> beamSpotHandle { m_beamSpotKey }; + if(not beamSpotHandle.isValid()) ATH_MSG_ERROR("Cannot Retrieve " << m_beamSpotKey.key() ); + BPhysPVCascadeTools helper(&(*m_CascadeTools), beamSpotHandle.cptr()); + helper.SetMinNTracksInPV(m_PV_minNTracks); + helper.m_copyAllVertices = this->m_copyAllVertices; + + + + int totalnotnull=0; + for(auto &c : Candidates4) { + if(c.cascVertex==nullptr) { + totalnotnull++; + continue; + } + auto x = c.cascVertex.get(); + const std::vector<xAOD::Vertex*> &cascadeVertices = x->vertices(); + if(cascadeVertices.size()!=s_topoN) { + ATH_MSG_ERROR("Incorrect number of vertices"); + continue; + } + if(cascadeVertices[0] == nullptr || cascadeVertices[1] == nullptr) { + ATH_MSG_ERROR("Error null vertex"); + continue; + } + if( m_Chi2NDFCut > 0.0 && (x->fitChi2() / x->nDoF()) > m_Chi2NDFCut) { + continue; + } + BPhysPVCascadeTools::PrepareVertexLinks(c.cascVertex.get(), trackContainer); + const std::vector< std::vector<TLorentzVector> > &moms = x->getParticleMoms(); + double mass1 = m_CascadeTools->invariantMass(moms[1]); + if(m_4trackmassFinalMin > 0. && mass1 < m_4trackmassFinalMin) continue; + if(m_4trackmassFinalMax > 0. && mass1 > m_4trackmassFinalMax) continue; + double tau = m_CascadeTools->tau(moms[1],cascadeVertices[1],primaryVertex); + if(tau < m_tauCut) continue; +// ATH_MSG_INFO("Total chi " << x->fitChi2()<< " sum chi2 " << cascadeVertices[0]->chiSquared() + cascadeVertices[1]->chiSquared() ); + // Keep vertices (bear in mind that they come in reverse order!) + for(int i =0; i<s_topoN; i++) Vtxwritehandles[i]->push_back(cascadeVertices[i]); + x->setSVOwnership(false); // Prevent Container from deleting vertices + const auto mainVertex = cascadeVertices[1]; // this is the B vertex + + // Set links to cascade vertices + std::vector<const xAOD::Vertex*> verticestoLink; + verticestoLink.push_back(cascadeVertices[0]); + if(!BPhysPVCascadeTools::LinkVertices(CascadeLinksDecor, verticestoLink, Vtxwritehandles[0], cascadeVertices[1])) { + ATH_MSG_ERROR("Error decorating with cascade vertices"); + } + + // loop over candidates -- Don't apply PV_minNTracks requirement here + // because it may result in exclusion of the high-pt PV. + // get good PVs + + xAOD::BPhysHypoHelper vtx(m_hypoName, mainVertex); + + // Get refitted track momenta from all vertices, charged tracks only + BPhysPVCascadeTools::SetVectorInfo(vtx, x); + + // Decorate main vertex + // + // 1.a) mass, mass error + + BPHYS_CHECK( vtx.setMass(m_CascadeTools->invariantMass(moms[1])) ); + BPHYS_CHECK( vtx.setMassErr(m_CascadeTools->invariantMassError(moms[1],x->getCovariance()[1])) ); + // 1.b) pt and pT error (the default pt of mainVertex is != the pt of the full cascade fit!) + Pt_decor(*mainVertex) = m_CascadeTools->pT(moms[1]); + PtErr_decor(*mainVertex) = m_CascadeTools->pTError(moms[1],x->getCovariance()[1]); + // 1.c) chi2 and ndof (the default chi2 of mainVertex is != the chi2 of the full cascade fit!) + chi2_decor(*mainVertex) = x->fitChi2(); + ndof_decor(*mainVertex) = x->nDoF(); + ATH_CHECK(helper.FillCandwithRefittedVertices(m_refitPV, pvContainer, + refPvContainer, &(*m_pvRefitter), m_PV_max, m_DoVertexType, x, 1, m_4tracksMass, vtx)); + // 4) decorate the main vertex with D0 vertex mass, pt, lifetime and lxy values (plus errors) + // D0 points to the main vertex, so lifetime and lxy are w.r.t the main vertex + Mass_svdecor(*mainVertex) = m_CascadeTools->invariantMass(moms[0]); + MassErr_svdecor(*mainVertex) = m_CascadeTools->invariantMassError(moms[0],x->getCovariance()[0]); + Pt_svdecor(*mainVertex) = m_CascadeTools->pT(moms[0]); + PtErr_svdecor(*mainVertex) = m_CascadeTools->pTError(moms[0],x->getCovariance()[0]); + Lxy_svdecor(*mainVertex) = m_CascadeTools->lxy(moms[0],cascadeVertices[0],cascadeVertices[1]); + LxyErr_svdecor(*mainVertex) = m_CascadeTools->lxyError(moms[0],x->getCovariance()[0], cascadeVertices[0],cascadeVertices[1]); + Tau_svdecor(*mainVertex) = m_CascadeTools->tau(moms[0],cascadeVertices[0],cascadeVertices[1]); + TauErr_svdecor(*mainVertex) = m_CascadeTools->tauError(moms[0],x->getCovariance()[0], cascadeVertices[0],cascadeVertices[1]); + if(!threeVertexMap.empty()) { + std::array<const xAOD::TrackParticle*, 3> lookuparray; + std::copy(c.tracks.begin(), c.tracks.begin()+3, lookuparray.begin()); + auto ptr = threeVertexMap[lookuparray]; + if(ptr == nullptr) ATH_MSG_WARNING("3Vertex lookup found null"); + Vertex3Decor(*mainVertex) = ptr ? VertexLink(ptr, *v3container) : VertexLink(); + } + } + + + ATH_MSG_DEBUG("Found " << Vtxwritehandles[0]->size() << " candidates " << totalnotnull << " were null"); + if(Vtxwritehandles[0]->size() > 200) ATH_MSG_WARNING("A lot of candidates N=" << Vtxwritehandles[0]->size()); + return StatusCode::SUCCESS; +} + +std::unique_ptr<Trk::VxCascadeInfo> Cascade3Plus1::CascadeFit(std::array<const xAOD::TrackParticle*, 4> &Track) const { +// ATH_MSG_DEBUG("Running Cascade Fit"); + std::vector<const xAOD::TrackParticle*> tracksDs(Track.begin(), Track.begin()+3); + std::vector<const xAOD::TrackParticle*> tracksBs(1, Track[3]); + + std::vector<double> massesDs(m_trackMasses.begin(), m_trackMasses.begin()+3); + std::vector<double> massesBs(1, m_trackMasses[3]); + + std::unique_ptr<Trk::IVKalState> state = m_iVertexFitter->makeState(); + int robustness = 0; + m_iVertexFitter->setRobustness(robustness, *state); +// if(tracksDs.size() != massesDs.size()) ATH_MSG_ERROR("Track sizes do not match"); +// for(int i =0;i < tracksDs.size();i++) ATH_MSG_DEBUG("Num " << i << " track " << tracksDs[i] << " mass " << massesDs[i]); + // Vertex list + std::vector<Trk::VertexID> vrtList; + // Ds vertex + auto vID = m_iVertexFitter->startVertex(tracksDs, massesDs, *state, m_3TrackMassConstraint ? m_3tracksMass : 0.0); + std::vector<Trk::VertexID> cnstV; + if (m_2TrackMassConstraint && !m_iVertexFitter->addMassConstraint(vID, tracksDs, cnstV, *state, m_2tracksMass).isSuccess() ) { + ATH_MSG_WARNING("addMassConstraint failed"); + } + vrtList.push_back(vID); + // Bs vertex + m_iVertexFitter->nextVertex(tracksBs,massesBs,vrtList, *state); + // Do the work + auto x = std::unique_ptr<Trk::VxCascadeInfo> (m_iVertexFitter->fitCascade(*state)); + if(x==nullptr) ATH_MSG_VERBOSE("Cascade returned null"); + return x; +} + +std::unique_ptr<xAOD::Vertex> Cascade3Plus1::StandardFit(const std::vector<const xAOD::TrackParticle*> &inputTracks, const xAOD::TrackParticleContainer* importedTrackCollection) const { + assert(inputTracks.size() >=2); + const Trk::Perigee& aPerigee1 = inputTracks[0]->perigeeParameters(); + const Trk::Perigee& aPerigee2 = inputTracks[1]->perigeeParameters(); + int sflag = 0; + int errorcode = 0; + Amg::Vector3D startingPoint = m_vertexEstimator->getCirclesIntersectionPoint(&aPerigee1,&aPerigee2,sflag,errorcode); + if (errorcode != 0) { + startingPoint(0) = 0.0; + startingPoint(1) = 0.0; + startingPoint(2) = 0.0; + } + std::unique_ptr<Trk::IVKalState> state = m_iVertexFitter->makeState(); + auto theResult = std::unique_ptr<xAOD::Vertex>( m_iVertexFitter->fit(inputTracks, startingPoint, *state)); + if(theResult) BPhysPVTools::PrepareVertexLinks(theResult.get(), importedTrackCollection); + return theResult; +} + +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/CascadeTools.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/CascadeTools.cxx new file mode 100644 index 0000000000000000000000000000000000000000..7721d36d9ee24de55cde55e8701efc15514fa50e --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/CascadeTools.cxx @@ -0,0 +1,608 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ +///////////////////////////////////////////////////////////////// +// CascadeTools.cxx, (c) ATLAS Detector software +///////////////////////////////////////////////////////////////// +#include "DerivationFrameworkBPhys/CascadeTools.h" +#include "CLHEP/GenericFunctions/CumulativeChiSquare.hh" + + +namespace DerivationFramework { + + +CascadeTools::CascadeTools(const std::string& t, const std::string& n, const IInterface* p) : + AthAlgTool(t,n,p) +{ + declareInterface<CascadeTools>(this); +} + +CascadeTools::~CascadeTools() {} + +//Light speed constant for various calculations +constexpr double s_CONST = 1000./299.792; + +double CascadeTools::invariantMass(const std::vector<TLorentzVector> &particleMom) const +{ + if(particleMom.size() == 0) return -999999.; + TLorentzVector totalMom; + unsigned int NTrk = particleMom.size(); + for( unsigned int it=0; it<NTrk; it++) totalMom += particleMom[it]; + return totalMom.M(); +} + +double CascadeTools::invariantMass(const std::vector<TLorentzVector> &particleMom2, const std::vector<double> &masses) const +{ + if(particleMom2.size() == 0) return -999999.; + TLorentzVector totalMom; + unsigned int NTrk = particleMom2.size(); + if (masses.size() != NTrk) { + ATH_MSG_DEBUG("The provided number of masses does not match the number of tracks in the vertex"); + return -999999.; + } + TLorentzVector temp; + for( unsigned int it=0; it<NTrk; it++) { + double esq = particleMom2[it].Px()*particleMom2[it].Px() + particleMom2[it].Py()*particleMom2[it].Py() + + particleMom2[it].Pz()*particleMom2[it].Pz() + masses[it]*masses[it]; + double e = (esq>0.) ? sqrt(esq) : 0.; + + temp.SetPxPyPzE(particleMom2[it].Px(),particleMom2[it].Py(),particleMom2[it].Pz(),e); + totalMom += temp; + } + return totalMom.M(); +} + +double CascadeTools::invariantMassError(const std::vector<TLorentzVector> &particleMom, const Amg::MatrixX& cov) const +{ + if(particleMom.size() == 0) return -999999.; + unsigned int NTrk = particleMom.size(); + TLorentzVector totalMom; + for( unsigned int it=0; it<NTrk; it++) totalMom += particleMom[it]; + + Amg::MatrixX D_vec(3*NTrk+3,1); D_vec.setZero(); + for( unsigned int it=0; it<NTrk; it++) { + D_vec(3*it+3) = 2.*(totalMom.E()*particleMom[it].Px()/particleMom[it].E()-totalMom.Px()); + D_vec(3*it+4) = 2.*(totalMom.E()*particleMom[it].Py()/particleMom[it].E()-totalMom.Py()); + D_vec(3*it+5) = 2.*(totalMom.E()*particleMom[it].Pz()/particleMom[it].E()-totalMom.Pz()); + } + Amg::MatrixX merr = D_vec.transpose() * cov * D_vec; + double massVarsq = merr(0,0); + if (massVarsq <= 0.) ATH_MSG_DEBUG("massError: negative sqrt massVarsq " << massVarsq); + double massVar = (massVarsq>0.) ? sqrt(massVarsq) : 0.; + double massErr = massVar/(2.*totalMom.M()); + return massErr; +} + +double CascadeTools::invariantMassError(const std::vector<TLorentzVector> &particleMom2, const Amg::MatrixX& cov, const std::vector<double> &masses) const +{ + if(particleMom2.size() == 0) return -999999.; + unsigned int NTrk = particleMom2.size(); + if (masses.size() != NTrk) { + ATH_MSG_DEBUG("The provided number of masses does not match the number of tracks in the vertex"); + return -999999.; + } + std::vector<TLorentzVector> particleMom(NTrk); particleMom.clear(); + for( unsigned int it=0; it<NTrk; it++) { + double esq = particleMom2[it].Px()*particleMom2[it].Px() + particleMom2[it].Py()*particleMom2[it].Py() + + particleMom2[it].Pz()*particleMom2[it].Pz() + masses[it]*masses[it]; + double e = (esq>0.) ? sqrt(esq) : 0.; + particleMom[it].SetPxPyPzE(particleMom2[it].Px(),particleMom2[it].Py(),particleMom2[it].Pz(),e); + } + TLorentzVector totalMom; + for( unsigned int it=0; it<NTrk; it++) totalMom += particleMom[it]; + + std::vector<double>dm2dpx(NTrk), dm2dpy(NTrk), dm2dpz(NTrk); + for( unsigned int it=0; it<NTrk; it++) { + dm2dpx[it] = 2.*(totalMom.E()*particleMom[it].Px()/particleMom[it].E()-totalMom.Px()); + dm2dpy[it] = 2.*(totalMom.E()*particleMom[it].Py()/particleMom[it].E()-totalMom.Py()); + dm2dpz[it] = 2.*(totalMom.E()*particleMom[it].Pz()/particleMom[it].E()-totalMom.Pz()); + } + Amg::MatrixX D_vec(3*NTrk+3,1); D_vec.setZero(); + for( unsigned int it=0; it<NTrk; it++) { + D_vec(3*it+3) = dm2dpx[it]; + D_vec(3*it+4) = dm2dpy[it]; + D_vec(3*it+5) = dm2dpz[it]; + } + Amg::MatrixX merr = D_vec.transpose() * cov * D_vec; + double massVarsq = merr(0,0); + if (massVarsq <= 0.) ATH_MSG_DEBUG("massError: negative sqrt massVarsq " << massVarsq); + double massVar = (massVarsq>0.) ? sqrt(massVarsq) : 0.; + double massErr = massVar/(2.*totalMom.M()); + return massErr; +} + +double CascadeTools::pT(const std::vector<TLorentzVector> &particleMom) const +{ + if(particleMom.size() == 0) return -999999.; + Amg::Vector3D P = momentum(particleMom);; + return P.perp(); +} + +double CascadeTools::pTError(const std::vector<TLorentzVector> &particleMom, const Amg::MatrixX& cov) const +{ + if(particleMom.size() == 0) return -999999.; + Amg::Vector3D P = momentum(particleMom);; + double Px = P.x(); + double Py = P.y(); + double PT = P.perp(); + + unsigned int NTrk = particleMom.size(); + Amg::MatrixX D_vec(3*NTrk+3,1); D_vec.setZero(); + for( unsigned int it=0; it<NTrk; it++) { + D_vec(3*it+3) = Px/PT; + D_vec(3*it+4) = Py/PT; + D_vec(3*it+5) = 0.; + } + Amg::MatrixX PtErrSq = D_vec.transpose() * cov * D_vec; + double PtErrsq = PtErrSq(0,0); + if (PtErrsq <= 0.) ATH_MSG_DEBUG("ptError: negative sqrt PtErrsq " << PtErrsq); + return (PtErrsq>0.) ? sqrt(PtErrsq) : 0.; +} + +double CascadeTools::lxy(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const +{ + if(particleMom.size() == 0) return -999999.; + auto vert = SV->position() - PV->position(); + double dx = vert.x(); + double dy = vert.y(); + Amg::Vector3D P = momentum(particleMom);; + double dxy = (P.x()*dx + P.y()*dy)/P.perp(); + return dxy; +} + +double CascadeTools::lxyError(const std::vector<TLorentzVector> &particleMom, const Amg::MatrixX& cov, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const +{ + if(particleMom.size() == 0) return -999999.; + auto vert = SV->position() - PV->position(); + double dx = vert.x(); + double dy = vert.y(); + Amg::Vector3D P = momentum(particleMom);; + double Px = P.x(); + double Py = P.y(); + double PT = P.perp(); + double LXYoverPT = (Px*dx+Py*dy)/(PT*PT); + + unsigned int NTrk = particleMom.size(); + + double dLxydx = Px/PT; + double dLxydy = Py/PT; + double dLxydx0 = -dLxydx; + double dLxydy0 = -dLxydy; + + Amg::MatrixX D_vec(3*NTrk+6,1); D_vec.setZero(); + D_vec(0) = dLxydx; + D_vec(1) = dLxydy; + D_vec(2) = 0.; + for( unsigned int it=0; it<NTrk; it++) { + D_vec(3*it+3) = (dx - LXYoverPT*Px)/PT; + D_vec(3*it+4) = (dy - LXYoverPT*Py)/PT; + D_vec(3*it+5) = 0.; + } + D_vec(3*NTrk+3) = dLxydx0; + D_vec(3*NTrk+4) = dLxydy0; + D_vec(3*NTrk+5) = 0.; + + unsigned int ndim = 3*NTrk+3; + Amg::MatrixX W_mat(3*NTrk+6,3*NTrk+6); W_mat.setZero(); + W_mat.block(0,0,ndim,ndim) = cov; + W_mat.block(3*NTrk+3,3*NTrk+3,3,3) = PV->covariancePosition(); + Amg::MatrixX V_err = D_vec.transpose() * W_mat * D_vec; + + double LxyErrsq = V_err(0,0); + if (LxyErrsq <= 0.) ATH_MSG_DEBUG("lxyError: negative sqrt LxyErrsq " << LxyErrsq); + return (LxyErrsq>0.) ? sqrt(LxyErrsq) : 0.; +} + +double CascadeTools::tau(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const +{ + if(particleMom.size() == 0) return -999999.; + double M = invariantMass(particleMom); + double LXY = lxy(particleMom,SV,PV); + double PT = pT(particleMom); + return s_CONST*M*LXY/PT; +} + +double CascadeTools::tauError(const std::vector<TLorentzVector> &particleMom, const Amg::MatrixX& cov, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const +{ + if(particleMom.size() == 0) return -999999.; + double M = invariantMass(particleMom); + auto vert = SV->position() - PV->position(); + double dx = vert.x(); + double dy = vert.y(); + Amg::Vector3D P = momentum(particleMom);; + double Px = P.x(); + double Py = P.y(); + double PT = P.perp(); + double LXY = Px*dx+Py*dy; + + unsigned int NTrk = particleMom.size(); + TLorentzVector totalMom; + for( unsigned int it=0; it<NTrk; it++) totalMom += particleMom[it]; + + double dTaudx = (M*Px)/(PT*PT); + double dTaudy = (M*Py)/(PT*PT); + double dTaudx0 = -dTaudx; + double dTaudy0 = -dTaudy; + + Amg::MatrixX D_vec(3*NTrk+6,1); D_vec.setZero(); + D_vec(0) = dTaudx; + D_vec(1) = dTaudy; + D_vec(2) = 0.; + for( unsigned int it=0; it<NTrk; it++) { + D_vec(3*it+3) = (((totalMom.E()*particleMom[it].Px()*LXY)/(M*particleMom[it].E()))-Px*LXY/M+M*dx)/(PT*PT) - 2.*M*LXY*Px/(PT*PT*PT*PT); + D_vec(3*it+4) = (((totalMom.E()*particleMom[it].Py()*LXY)/(M*particleMom[it].E()))-Py*LXY/M+M*dy)/(PT*PT) - 2.*M*LXY*Py/(PT*PT*PT*PT); + D_vec(3*it+5) = 0.; + } + D_vec(3*NTrk+3) = dTaudx0; + D_vec(3*NTrk+4) = dTaudy0; + D_vec(3*NTrk+5) = 0.; + + unsigned int ndim = 3*NTrk+3; + Amg::MatrixX W_mat(3*NTrk+6,3*NTrk+6); W_mat.setZero(); + W_mat.block(0,0,ndim,ndim) = cov; + W_mat.block(3*NTrk+3,3*NTrk+3,3,3) = PV->covariancePosition(); + Amg::MatrixX V_err = D_vec.transpose() * W_mat * D_vec; + + double tauErrsq = V_err(0,0); + if (tauErrsq <= 0.) ATH_MSG_DEBUG("tauError: negative sqrt tauErrsq " << tauErrsq); + double tauErr = (tauErrsq>0.) ? sqrt(tauErrsq) : 0.; + return s_CONST*tauErr; +} + +double CascadeTools::tau(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV, double M) const +{ + if(particleMom.size() == 0) return -999999.; + double LXY = lxy(particleMom,SV,PV); + double PT = pT(particleMom); + return s_CONST*M*LXY/PT; +} + +double CascadeTools::tauError(const std::vector<TLorentzVector> &particleMom, const Amg::MatrixX& cov, const xAOD::Vertex* SV, const xAOD::Vertex* PV, double M) const +{ + if(particleMom.size() == 0) return -999999.; + auto vert = SV->position() - PV->position(); + double dx = vert.x(); + double dy = vert.y(); + Amg::Vector3D P = momentum(particleMom);; + double Px = P.x(); + double Py = P.y(); + double PT = P.perp(); + double LXY = Px*dx+Py*dy; + + unsigned int NTrk = particleMom.size(); + TLorentzVector totalMom; + for( unsigned int it=0; it<NTrk; it++) totalMom += particleMom[it]; + + double dTaudx = (M*Px)/(PT*PT); + double dTaudy = (M*Py)/(PT*PT); + double dTaudx0 = -dTaudx; + double dTaudy0 = -dTaudy; + + Amg::MatrixX D_vec(3*NTrk+6,1); D_vec.setZero(); + D_vec(0) = dTaudx; + D_vec(1) = dTaudy; + D_vec(2) = 0.; + for( unsigned int it=0; it<NTrk; it++) { + D_vec(3*it+3) = (M*dx)/(PT*PT) - 2.*M*LXY*Px/(PT*PT*PT*PT); + D_vec(3*it+4) = (M*dy)/(PT*PT) - 2.*M*LXY*Py/(PT*PT*PT*PT); + D_vec(3*it+5) = 0.; + } + D_vec(3*NTrk+3) = dTaudx0; + D_vec(3*NTrk+4) = dTaudy0; + D_vec(3*NTrk+5) = 0.; + + unsigned int ndim = 3*NTrk+3; + Amg::MatrixX W_mat(3*NTrk+6,3*NTrk+6); W_mat.setZero(); + W_mat.block(0,0,ndim,ndim) = cov; + W_mat.block(3*NTrk+3,3*NTrk+3,3,3) = PV->covariancePosition(); + Amg::MatrixX V_err = D_vec.transpose() * W_mat * D_vec; + + double tauErrsq = V_err(0,0); + if (tauErrsq <= 0.) ATH_MSG_DEBUG("tauError: negative sqrt tauErrsq " << tauErrsq); + double tauErr = (tauErrsq>0.) ? sqrt(tauErrsq) : 0.; + return s_CONST*tauErr; +} + +Amg::Vector3D CascadeTools::pca(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const +{ + if(particleMom.size() == 0) { + Amg::Vector3D p; p.setZero(); + return p; + } + Amg::Vector3D pv = PV->position(); + Amg::Vector3D sv = SV->position(); + Amg::Vector3D P = momentum(particleMom);; + double p2 = P.mag2(); + double pdr = P.dot((sv - pv)); + return sv - P*pdr/p2; +} + +double CascadeTools::cosTheta(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const +{ + if(particleMom.size() == 0) return -999999.; + Amg::Vector3D P = momentum(particleMom);; + Amg::Vector3D vtx = SV->position(); + vtx -= PV->position(); + return (P.dot(vtx))/(P.mag()*vtx.mag()); +} + +double CascadeTools::cosTheta_xy(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const +{ + if(particleMom.size() == 0) return -999999.; + Amg::Vector3D P = momentum(particleMom);; + Amg::Vector3D vtx = SV->position(); + vtx -= PV->position(); + double pT = P.perp(); + return (P.x()*vtx.x()+P.y()*vtx.y())/(pT*vtx.perp()); +} + +double CascadeTools::a0z(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const +{ + if(particleMom.size() == 0) return -999999.; + Amg::Vector3D pv = PV->position(); + Amg::Vector3D ca_point = pca(particleMom,SV,PV); + Amg::Vector3D a0_vec = pv - ca_point; + return a0_vec.z(); +} + +double CascadeTools::a0zError(const std::vector<TLorentzVector> &particleMom, const Amg::MatrixX& cov, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const +{ + if(particleMom.size() == 0) return -999999.; + auto vert = SV->position() - PV->position(); + double dx = vert.x(); + double dy = vert.y(); + double dz = vert.z(); + Amg::Vector3D P = momentum(particleMom);; + double Px = P.x(); + double Py = P.y(); + double Pz = P.z(); + double P2 = P.mag2(); + double L = Px*dx+Py*dy+Pz*dz; + + unsigned int NTrk = particleMom.size(); + + + double da0zdx = (Px*Pz)/P2; + double da0zdy = (Py*Pz)/P2; + double da0zdz = (Pz*Pz)/P2 - 1.; + double da0zdx0 = -da0zdx; + double da0zdy0 = -da0zdy; + double da0zdz0 = -da0zdz; + + Amg::MatrixX D_vec(3*NTrk+6,1); D_vec.setZero(); + D_vec(0) = da0zdx; + D_vec(1) = da0zdy; + D_vec(2) = da0zdz; + for( unsigned int it=0; it<NTrk; it++) { + D_vec(3*it+3) = (Pz*(P2*dx-2.*L*Px))/(P2*P2); + D_vec(3*it+4) = (Pz*(P2*dy-2.*L*Py))/(P2*P2); + D_vec(3*it+5) = (Pz*(P2*dz-2.*L*Pz))/(P2*P2)+L/P2; + } + D_vec(3*NTrk+3) = da0zdx0; + D_vec(3*NTrk+4) = da0zdy0; + D_vec(3*NTrk+5) = da0zdz0; + + unsigned int ndim = 3*NTrk+3; + Amg::MatrixX W_mat(3*NTrk+6,3*NTrk+6); W_mat.setZero(); + W_mat.block(0,0,ndim,ndim) = cov; + W_mat.block(3*NTrk+3,3*NTrk+3,3,3) = PV->covariancePosition(); + Amg::MatrixX V_err = D_vec.transpose() * W_mat * D_vec; + + double a0zErrsq = V_err(0,0); + if (a0zErrsq <= 0.) ATH_MSG_DEBUG("a0zError: negative sqrt a0zErrsq " << a0zErrsq); + return (a0zErrsq>0.) ? sqrt(a0zErrsq) : 0.; +} + +double CascadeTools::a0xy(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const +{ + if(particleMom.size() == 0) return -999999.; + double cosineTheta_xy = cosTheta_xy(particleMom,SV,PV); + double sinTheta_xy = ((1.-cosineTheta_xy*cosineTheta_xy)>0.) ? sqrt((1.-cosineTheta_xy*cosineTheta_xy)) : 0.; + return (SV->position()-PV->position()).perp() * sinTheta_xy; +} + +double CascadeTools::a0xyError(const std::vector<TLorentzVector> &particleMom, const Amg::MatrixX& cov, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const +{ + if(particleMom.size() == 0) return -999999.; + auto vert = SV->position() - PV->position(); + double dx = vert.x(); + double dy = vert.y(); + Amg::Vector3D P = momentum(particleMom);; + double Px = P.x(); + double Py = P.y(); + double P2 = P.perp()*P.perp(); + double L = Px*dx+Py*dy; + double dR2 = vert.perp()*vert.perp(); + double d = sqrt((P2*dR2-L*L)/P2); + + unsigned int NTrk = particleMom.size(); + + double da0dx = (P2*dx-L*Px)/(P2*d); + double da0dy = (P2*dy-L*Py)/(P2*d); + double da0dx0 = -da0dx; + double da0dy0 = -da0dy; + + Amg::MatrixX D_vec(3*NTrk+6,1); D_vec.setZero(); + D_vec(0) = da0dx; + D_vec(1) = da0dy; + D_vec(2) = 0.; + for( unsigned int it=0; it<NTrk; it++) { + D_vec(3*it+3) = (L*(L*Px-P2*dx))/(P2*P2*d); + D_vec(3*it+4) = (L*(L*Py-P2*dy))/(P2*P2*d); + D_vec(3*it+5) = 0.; + } + D_vec(3*NTrk+3) = da0dx0; + D_vec(3*NTrk+4) = da0dy0; + D_vec(3*NTrk+5) = 0.; + + unsigned int ndim = 3*NTrk+3; + Amg::MatrixX W_mat(3*NTrk+6,3*NTrk+6); W_mat.setZero(); + W_mat.block(0,0,ndim,ndim) = cov; + W_mat.block(3*NTrk+3,3*NTrk+3,3,3) = PV->covariancePosition(); + Amg::MatrixX V_err = D_vec.transpose() * W_mat * D_vec; + + double a0xyErrsq = V_err(0,0); + if (a0xyErrsq <= 0.) ATH_MSG_DEBUG("a0xyError: negative sqrt a0xyErrsq " << a0xyErrsq); + return (a0xyErrsq>0.) ? sqrt(a0xyErrsq) : 0.; +} + +double CascadeTools::a0(const std::vector<TLorentzVector> &particleMom, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const +{ + if(particleMom.size() == 0) return -999999.; + double cosineTheta = cosTheta(particleMom,SV,PV); + double sinTheta = ((1.-cosineTheta*cosineTheta)>0.) ? sqrt((1.-cosineTheta*cosineTheta)) : 0.; + return (SV->position()-PV->position()).mag() * sinTheta; +} + +double CascadeTools::a0Error(const std::vector<TLorentzVector> &particleMom, const Amg::MatrixX& cov, const xAOD::Vertex* SV, const xAOD::Vertex* PV) const +{ + if(particleMom.size() == 0) return -999999.; + auto vert = SV->position() - PV->position(); + double dx = vert.x(); + double dy = vert.y(); + double dz = vert.z(); + Amg::Vector3D P = momentum(particleMom);; + double Px = P.x(); + double Py = P.y(); + double Pz = P.z(); + double P2 = P.mag2(); + double L = Px*dx+Py*dy+Pz*dz; + double dR2 = vert.mag2(); + double d = sqrt((P2*dR2-L*L)/P2); + + unsigned int NTrk = particleMom.size(); + + double da0dx = (P2*dx-L*Px)/(P2*d); + double da0dy = (P2*dy-L*Py)/(P2*d); + double da0dz = (P2*dz-L*Pz)/(P2*d); + double da0dx0 = -da0dx; + double da0dy0 = -da0dy; + double da0dz0 = -da0dz; + + Amg::MatrixX D_vec(3*NTrk+6,1); D_vec.setZero(); + D_vec(0) = da0dx; + D_vec(1) = da0dy; + D_vec(2) = da0dz; + for( unsigned int it=0; it<NTrk; it++) { + D_vec(3*it+3) = (L*(L*Px-P2*dx))/(P2*P2*d); + D_vec(3*it+4) = (L*(L*Py-P2*dy))/(P2*P2*d); + D_vec(3*it+5) = (L*(L*Pz-P2*dz))/(P2*P2*d); + } + D_vec(3*NTrk+3) = da0dx0; + D_vec(3*NTrk+4) = da0dy0; + D_vec(3*NTrk+5) = da0dz0; + + unsigned int ndim = 3*NTrk+3; + Amg::MatrixX W_mat(3*NTrk+6,3*NTrk+6); W_mat.setZero(); + W_mat.block(0,0,ndim,ndim) = cov; + W_mat.block(3*NTrk+3,3*NTrk+3,3,3) = PV->covariancePosition(); + Amg::MatrixX V_err = D_vec.transpose() * W_mat * D_vec; + + double a0Errsq = V_err(0,0); + if (a0Errsq <= 0.) ATH_MSG_DEBUG("a0Error: negative sqrt a0Errsq " << a0Errsq); + return (a0Errsq>0.) ? sqrt(a0Errsq) : 0.; +} + +Amg::Vector3D CascadeTools::momentum(const std::vector<TLorentzVector> &particleMom) const +{ + if(particleMom.size() == 0) { + Amg::Vector3D p; p.setZero(); + return p; + } + TLorentzVector totalMom; + unsigned int NTrk = particleMom.size(); + for( unsigned int it=0; it<NTrk; it++) totalMom += particleMom[it]; + TVector3 P3 = totalMom.Vect(); + Amg::Vector3D mom(P3.Px(),P3.Py(),P3.Pz()); + return mom; +} + +double CascadeTools::massProbability(double V0Mass, double mass, double massErr) const +{ + if(massErr > 0.) { + double chi2 = (V0Mass - mass)*(V0Mass - mass)/(massErr*massErr); + int ndf = 1; + Genfun::CumulativeChiSquare myCumulativeChiSquare(ndf); + if (chi2 > 0.) { + double achi2prob = 1.-myCumulativeChiSquare(chi2); + return achi2prob; + } else { + ATH_MSG_DEBUG("chi2 <= 0"); + return -1.; + } + } else { + return -1.; + } +} + +double CascadeTools::vertexProbability(int ndf, double chi2) const +{ + if (ndf > 0.) { + Genfun::CumulativeChiSquare myCumulativeChiSquare(ndf); + if (chi2 > 0.) { + double chi2prob = 1.-myCumulativeChiSquare(chi2); + return chi2prob; + } else { + ATH_MSG_DEBUG("chi2 <= 0"); + return -1.; + } + } else { + ATH_MSG_DEBUG("ndf <= 0"); + return -1.; + } +} + + +Amg::MatrixX * CascadeTools::convertCovMatrix(const xAOD::Vertex * vxCandidate) const +{ + unsigned int NTrk = vxCandidate->nTrackParticles(); + std::vector<float> matrix = vxCandidate->covariance(); + + int ndim = 0; + + if ( matrix.size() == (3*NTrk+3)*(3*NTrk+3+1)/2) { + ndim = 3*NTrk+3; + } else if (matrix.size() == (5*NTrk+3)*(5*NTrk+3+1)/2) { + ndim = 5*NTrk+3; + } else { + return nullptr; + } + + Amg::MatrixX* mtx = new Amg::MatrixX(ndim,ndim); + + long int ij=0; + for (int i=1; i<= ndim; i++) { + for (int j=1; j<=i; j++){ + if (i==j) { + (*mtx)(i-1,j-1)=matrix[ij]; + } else { + (*mtx).fillSymmetric(i-1,j-1,matrix[ij]); + } + ij++; + } + } + // NOTE: mtx is a pointer! Take care of deleting it after you do not need it anymore!!! + + return mtx; +} + +Amg::MatrixX CascadeTools::SetFullMatrix(int NTrk, const std::vector<float> & Matrix) const +{ + + Amg::MatrixX mtx(3+3*NTrk,3+3*NTrk); // Create identity matrix of needed size + + int ij=0; + + for(int i=0; i<(3+3*NTrk); i++){ + for(int j=0; j<=i; j++){ + mtx(i,j)=Matrix[ij]; + if(i!=j) mtx(j,i)=Matrix[ij]; + ij++; + } + } + + return mtx; +} + +} //end of namespace definitions + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/CfAthAlgTool.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/CfAthAlgTool.cxx new file mode 100644 index 0000000000000000000000000000000000000000..299340b7b3accab4460b059bc9341e5c5efdd67f --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/CfAthAlgTool.cxx @@ -0,0 +1,178 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// CfAthAlgTool.h +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// +// Wrapper around AthAlgTool to provide easy access to CutFlowSvc +// and some utility methods for it. +// Methods for accessing the CutFlowSvc are modelled after +// AthFilterAlgorithm's implementation. +// +// This class inherits from AthAlgTool. It should be inherited from. +// +//----------------------------------------------------------------------------- +// +// Usage information +// +// Example: +// +// // Bmumu_reco_mumu.h: +// class Bmumu_reco_mumu : public CfAthAlgTool, public IAugmentationTool { +// public: +// Bmumu_reco_mumu(const std::string& t, const std::string& n, +// const IInterface* p); +// ... +// +// // Bmumu_reco_mumu.cxx: +// Bmumu_reco_mumu::Bmumu_reco_mumu(const std::string& t, +// const std::string& n, +// const IInterface* p) : +// CfAthAlgTool(t,n,p), +// ... +// +// // inside a method like Bmumu_reco_mumu::addBranches(): +// ... +// // add counter for number of events seen +// addEvent("dimuEvents"); +// // add counter for the number of events with >= 1 reco'd vertices +// if ( vtxContainer->size() > 0 ) { +// addEvent("dimuWithVertexCand"); +// } +// // add counter for the number of vertices +// addToCounter("dimuNumVertices", vtxContainer->size()); +// ... +// +// Please note that a line for +// addEvent(nameString, weight=1.); +// or +// addToCounter(nameString, counts=1, weight=1.); +// is sufficient. +// In a case a counter with that name does not exist yet, it will be +// initialized automatically. +// +//============================================================================ +// +#include "DerivationFrameworkBPhys/CfAthAlgTool.h" + +namespace DerivationFramework { + + //-------------------------------------------------------------------------- + // Constructor + CfAthAlgTool::CfAthAlgTool(const std::string& t, + const std::string& n, + const IInterface* p) : + AthAlgTool(t,n,p), + m_cutFlowSvc("CutFlowSvc/CutFlowSvc", n), + m_ctbasename(n), + m_bid(0), m_bidisset(false) { + + ATH_MSG_DEBUG("Calling constructor with parameters"); + + // Declare counters base name + declareProperty("CountersBaseName", m_ctbasename); + + // clean-up counter base name + std::string fstr("ToolSvc."); + std::string::size_type ind = m_ctbasename.find(fstr); + if (ind != std::string::npos) m_ctbasename.erase(ind, fstr.length()); + + } + //-------------------------------------------------------------------------- + // Destructor + CfAthAlgTool::~CfAthAlgTool() { + + ATH_MSG_DEBUG("Calling destructor"); + } + //-------------------------------------------------------------------------- + // return a handle to an ICutFlowSvc instance + ServiceHandle<ICutFlowSvc>& CfAthAlgTool::cutFlowSvc() const { + + return m_cutFlowSvc; + } + //-------------------------------------------------------------------------- + // Initialization method invoked by the framework. + StatusCode CfAthAlgTool::sysInitialize() { + + // retrieve CutFlowSvc instance + CHECK( cutFlowSvc().retrieve() ); + + // re-direct to base class... + return AthAlgTool::sysInitialize(); + } + //-------------------------------------------------------------------------- + // add one event to a named counter -- returns true on success + bool CfAthAlgTool::addEvent(const std::string &name, double weight) const { + + CutIdentifier id = getCounter(name); + if ( id > 0 ) { + cutFlowSvc()->addEvent(id, weight); + } + return (id > 0); + } + //-------------------------------------------------------------------------- + // add to a named counter -- returns true on success + // if counts > 1 : same weight is added multiple times + bool CfAthAlgTool::addToCounter(const std::string &name, uint64_t counts, + double weight) const { + + CutIdentifier id = getCounter(name); + if ( id > 0 ) { + for (uint64_t i=0; i<counts; ++i) { + cutFlowSvc()->addEvent(id, weight); + } + } + return (id > 0); + } + //-------------------------------------------------------------------------- + // add a counter by name -- simply returns id if counter already exists + CutIdentifier CfAthAlgTool::getCounter(const std::string &name) const { + + CutIdentifier id = getCounterIdByName(name); + if ( id < 1 ) { + std::string fullname = m_ctbasename + "_" + name; + if ( ! m_bidisset ) { + throw std::runtime_error("cutFlowSvc()->registerFilter is no longer supported. code an alternative here"); + //id = cutFlowSvc()->registerFilter(fullname, "N/A"); + m_bid = id; + } else { + throw std::runtime_error("cutFlowSvc()->registerCut is no longer supported. code an alternative here"); + //id = cutFlowSvc()->registerCut(fullname, "N/A", m_bid); + } + m_mctn[name] = id; + } + return id; + } + //-------------------------------------------------------------------------- + // returns counter name by id + std::string CfAthAlgTool::getCounterNameById(CutIdentifier id) const { + + std::string res = "__NOT_FOUND__"; + + for (NameIdMap_t::iterator it = m_mctn.begin(); it != m_mctn.end(); ++it) { + if ( it->second == id ) { + res = it->first; + break; + } + } + return res; + } + //-------------------------------------------------------------------------- + // returns counter id by name + CutIdentifier CfAthAlgTool::getCounterIdByName(const std::string &name) const { + + CutIdentifier id = 0; + + NameIdMap_t::const_iterator it = m_mctn.find(name); + if ( it != m_mctn.end() ) { + id = it->second; + } + return id; + } + //-------------------------------------------------------------------------- +} // namespace diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/FourMuonTool.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/FourMuonTool.cxx new file mode 100644 index 0000000000000000000000000000000000000000..bdf1acabf9e5827e83b3429985f5d9ffbd3073e6 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/FourMuonTool.cxx @@ -0,0 +1,449 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +// **************************************************************************** +// ---------------------------------------------------------------------------- +// FourMuonTool +// James Catmore <James.Catmore@cern.ch> +// Evelina Bouhova-Thacker <e.bouhova@cern.ch> +// ---------------------------------------------------------------------------- +// **************************************************************************** + +#include "DerivationFrameworkBPhys/FourMuonTool.h" +#include "DerivationFrameworkBPhys/BPhysPVTools.h" +#include "xAODBPhys/BPhysHelper.h" +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "TrkVertexFitterInterfaces/IVertexFitter.h" +#include "TrkVKalVrtFitter/TrkVKalVrtFitter.h" +#include "TrkV0Fitter/TrkV0VertexFitter.h" +#include "InDetConversionFinderTools/ConversionFinderUtils.h" +#include "InDetConversionFinderTools/VertexPointEstimator.h" +#include "TrkToolInterfaces/ITrackSelectorTool.h" +#include "AthLinks/ElementLink.h" +#include "BeamSpotConditionsData/BeamSpotData.h" + +#include "xAODTracking/Vertex.h" +#include "xAODTracking/VertexContainer.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "xAODTracking/TrackParticle.h" +#include "xAODTracking/TrackParticleContainer.h" +#include "StoreGate/WriteDecorHandle.h" + +#include <algorithm> + +namespace DerivationFramework { + + StatusCode FourMuonTool::initialize() { + + // retrieving vertex Fitter + if ( m_iVertexFitter.retrieve().isFailure() ) { + ATH_MSG_FATAL("Failed to retrieve tool " << m_iVertexFitter); + return StatusCode::FAILURE; + } else { + ATH_MSG_DEBUG("Retrieved tool " << m_iVertexFitter); + } + + // retrieving V0 Fitter + if ( m_iV0VertexFitter.retrieve().isFailure() ) { + ATH_MSG_FATAL("Failed to retrieve tool " << m_iV0VertexFitter); + return StatusCode::FAILURE; + } else { + ATH_MSG_DEBUG("Retrieved tool " << m_iV0VertexFitter); + } + + // Get the track selector tool from ToolSvc + if ( m_trkSelector.retrieve().isFailure() ) { + ATH_MSG_FATAL("Failed to retrieve tool " << m_trkSelector); + return StatusCode::FAILURE; + } else { + ATH_MSG_DEBUG("Retrieved tool " << m_trkSelector); + } + + // uploading the V0 tools + if ( m_V0Tools.retrieve().isFailure() ) { + ATH_MSG_FATAL("Failed to retrieve tool " << m_V0Tools); + return StatusCode::FAILURE; + } else { + ATH_MSG_DEBUG("Retrieved tool " << m_V0Tools); + } + + // Get the vertex point estimator tool from ToolSvc + if ( m_vertexEstimator.retrieve().isFailure() ) { + ATH_MSG_FATAL("Failed to retrieve tool " << m_vertexEstimator); + return StatusCode::FAILURE; + } else { + ATH_MSG_DEBUG("Retrieved tool " << m_vertexEstimator); + } + + // Get the beam spot service + CHECK( m_beamSpotKey.initialize() ); + + m_muonIndex = m_muonCollectionKey.key() + ".BPHY4MuonIndex"; + ATH_CHECK(m_muonIndex.initialize()); + ATH_MSG_DEBUG("Initialize successful"); + + return StatusCode::SUCCESS; + + } + + FourMuonTool::FourMuonTool(const std::string& t, const std::string& n, const IInterface* p) : AthAlgTool(t,n,p), + m_ptCut(0.0), + m_etaCut(0.0), + m_useV0Fitter(false), + m_muonCollectionKey("Muons"), + m_TrkParticleCollection("TrackParticleCandidate"), + m_iVertexFitter("Trk::TrkVKalVrtFitter"), + m_iV0VertexFitter("Trk::V0VertexFitter"), + m_V0Tools("Trk::V0Tools"), + m_trkSelector("InDet::TrackSelectorTool"), + m_vertexEstimator("InDet::VertexPointEstimator") + { + declareInterface<FourMuonTool>(this); + declareProperty("ptCut",m_ptCut); + declareProperty("etaCut",m_etaCut); + declareProperty("useV0Fitter",m_useV0Fitter); + declareProperty("muonCollectionKey",m_muonCollectionKey); + declareProperty("TrackParticleCollection",m_TrkParticleCollection); + declareProperty("TrkVertexFitterTool",m_iVertexFitter); + declareProperty("V0VertexFitterTool",m_iV0VertexFitter); + declareProperty("V0Tools",m_V0Tools); + declareProperty("TrackSelectorTool",m_trkSelector); + declareProperty("VertexPointEstimator",m_vertexEstimator); + } + + FourMuonTool::~FourMuonTool() { } + + //------------------------------------------------------------------------------------- + // Find the candidates + //------------------------------------------------------------------------------------- + StatusCode FourMuonTool::performSearch(xAOD::VertexContainer*& pairVxContainer, xAOD::VertexAuxContainer*& pairVxAuxContainer, + xAOD::VertexContainer*& quadVxContainer, xAOD::VertexAuxContainer*& quadVxAuxContainer, bool &selectEvent) const + { + ATH_MSG_DEBUG( "FourMuonTool::performSearch" ); + selectEvent = false; + + // pairs + pairVxContainer = new xAOD::VertexContainer; + pairVxAuxContainer = new xAOD::VertexAuxContainer; + pairVxContainer->setStore(pairVxAuxContainer); + // quads + quadVxContainer = new xAOD::VertexContainer; + quadVxAuxContainer = new xAOD::VertexAuxContainer; + quadVxContainer->setStore(quadVxAuxContainer); + + + // Get the muons from StoreGate + SG::ReadHandle<xAOD::MuonContainer> importedMuonCollection(m_muonCollectionKey); + ATH_CHECK(importedMuonCollection.isValid()); + ATH_MSG_DEBUG("Muon container size "<<importedMuonCollection->size()); + + // Get ID tracks + SG::ReadHandle<xAOD::TrackParticleContainer> importedTrackCollection(m_TrkParticleCollection); + ATH_CHECK(importedTrackCollection.isValid()); + ATH_MSG_DEBUG("ID TrackParticle container size "<< importedTrackCollection->size()); + + // Select the muons + std::vector<const xAOD::Muon*> theMuonsAfterSelection; + SG::WriteDecorHandle<xAOD::MuonContainer, int> muonDecorator(m_muonIndex); + unsigned int nCombMuons = 0; + unsigned int nSegmentTaggedMuons = 0; + + for (auto muItr=importedMuonCollection->begin(); muItr!=importedMuonCollection->end(); ++muItr) { + if ( *muItr == NULL ) continue; + muonDecorator(**muItr) = -1; // all muons must be decorated + if ( ((*muItr)->muonType() != xAOD::Muon::Combined ) && ((*muItr)->muonType() != xAOD::Muon::SegmentTagged ) ) continue; + if (!(*muItr)->inDetTrackParticleLink().isValid()) continue; // No muons without ID tracks + auto& link = (*muItr)->inDetTrackParticleLink(); + const xAOD::TrackParticle* muonTrk = *link; + if ( muonTrk==NULL) continue; + const xAOD::Vertex* vx = nullptr; + if ( !m_trkSelector->decision(*muonTrk, vx) ) continue; // all ID tracks must pass basic tracking cuts + if ( fabs(muonTrk->pt())<m_ptCut ) continue; // pt cut + if ( fabs(muonTrk->eta())>m_etaCut ) continue; // eta cut + if ( (*muItr)->muonType() == xAOD::Muon::Combined ) ++nCombMuons; + if ( (*muItr)->muonType() == xAOD::Muon::SegmentTagged ) ++nSegmentTaggedMuons; + theMuonsAfterSelection.push_back(*muItr); + } + unsigned int nSelectedMuons = theMuonsAfterSelection.size(); + ATH_MSG_DEBUG("Number of muons after selection: " << nSelectedMuons); + ATH_MSG_DEBUG("of which " << nCombMuons << " are combined"); + ATH_MSG_DEBUG("and " << nSegmentTaggedMuons << " are segment tagged"); + if ( (nSelectedMuons < 4) || (nCombMuons < 1) ) { + ATH_MSG_DEBUG("Muon criteria not met. Skipping event."); + return StatusCode::SUCCESS; + } + selectEvent = true; // if we got this far we should definitively accept the event + + // Decorators + SG::AuxElement::Decorator< std::string > indexDecorator("CombinationCode"); + SG::AuxElement::Decorator< std::string > chargeDecorator("ChargeCode"); + //SG::AuxElement::Decorator< double > acdcDecorator("ACminusDC"); + //SG::AuxElement::Decorator< double > ssdcDecorator("SSminusDC"); + + // Order by pT + std::sort(theMuonsAfterSelection.begin(), theMuonsAfterSelection.end(), [](const xAOD::Muon *a, const xAOD::Muon *b) { + return b->pt() < a->pt(); + }); + + // Decorate the selected muons (now pT ordered) with their index + unsigned int muonIndex(0); + for (auto selMuon : theMuonsAfterSelection) { + muonDecorator(*selMuon) = muonIndex; + ++muonIndex; + } + + // Quadruplet combinatorics + std::vector<Combination> quadruplets; + std::vector<Combination> pairs; + buildCombinations(theMuonsAfterSelection,pairs,quadruplets,nSelectedMuons); + if (quadruplets.size()==0) { + ATH_MSG_DEBUG("No acceptable quadruplets"); + return StatusCode::SUCCESS; + } + + // Get the beam spot (for the vertexing starting point) + SG::ReadCondHandle<InDet::BeamSpotData> beamSpotHandle { m_beamSpotKey }; + if(not beamSpotHandle.isValid()) ATH_MSG_ERROR("Cannot Retrieve " << m_beamSpotKey.key() ); + const Amg::Vector3D &beamSpot = beamSpotHandle->beamPos(); + + // fit pairs + ATH_MSG_DEBUG("Successful pairs....."); + for (std::vector<Combination>::iterator pairItr = pairs.begin(); pairItr!=pairs.end(); ++pairItr) { + std::vector<const xAOD::TrackParticle*> theTracks = (*pairItr).trackParticles("pair1"); + xAOD::Vertex* pairVxCandidate = fit(theTracks,importedTrackCollection.get(),beamSpot); // This line actually does the fitting and object making + if (pairVxCandidate != 0) { + // decorate the candidate with its codes + indexDecorator(*pairVxCandidate) = (*pairItr).combinationIndices(); + chargeDecorator(*pairVxCandidate) = (*pairItr).combinationCharges(); + // decorate the candidate with refitted tracks and muons via the BPhysHelper + xAOD::BPhysHelper helper(pairVxCandidate); + helper.setRefTrks(); + std::vector<const xAOD::Muon*> theStoredMuons; + theStoredMuons = (*pairItr).muons; + helper.setMuons(theStoredMuons,importedMuonCollection.get()); + // Retain the vertex + pairVxContainer->push_back(pairVxCandidate); + ATH_MSG_DEBUG("..... indices: " << (*pairItr).combinationIndices() << + " charges: " << (*pairItr).combinationCharges() << + " chi2: " << pairVxCandidate->chiSquared()); + } else { // fit failed + ATH_MSG_DEBUG("Fitter failed!"); + } + } + ATH_MSG_DEBUG("pairContainer size " << pairVxContainer->size()); + + // fit quadruplets + ATH_MSG_DEBUG("Successful quadruplets....."); + for (std::vector<Combination>::iterator quadItr = quadruplets.begin(); quadItr!=quadruplets.end(); ++quadItr) { + std::vector<const xAOD::TrackParticle*> theDCTracks; theDCTracks.clear(); + //std::vector<const xAOD::TrackParticle*> theACTracks; theACTracks.clear(); + //std::vector<const xAOD::TrackParticle*> theSSTracks; theSSTracks.clear(); + theDCTracks = (*quadItr).trackParticles("DC"); + //theACTracks = (*quadItr).trackParticles("AC"); + //theSSTracks = (*quadItr).trackParticles("SS"); + xAOD::Vertex* dcVxCandidate = fit(theDCTracks,importedTrackCollection.get(), beamSpot); + //xAOD::Vertex* acVxCandidate = fit(theACTracks,importedTrackCollection, beamSpot); + //xAOD::Vertex* ssVxCandidate = fit(theSSTracks,importedTrackCollection, beamSpot); + // Get the chi2 for each one + //double acChi2(0.0); + //double ssChi2(0.0); + //if (acVxCandidate != 0) {acChi2 = acVxCandidate->chiSquared();} + //if (ssVxCandidate != 0) {ssChi2 = ssVxCandidate->chiSquared();} + if (dcVxCandidate != 0) { + // decorate the candidate with its codes + indexDecorator(*dcVxCandidate) = (*quadItr).combinationIndices(); + chargeDecorator(*dcVxCandidate) = (*quadItr).combinationCharges(); + // Decorate the DC candidate with the differences between its chi2 and the other + double dcChi2 = dcVxCandidate->chiSquared(); + //acdcDecorator(*dcVxCandidate) = acChi2 - dcChi2; + //ssdcDecorator(*dcVxCandidate) = ssChi2 - dcChi2; + // decorate the candidate with refitted tracks and muons via the BPhysHelper + xAOD::BPhysHelper helper(dcVxCandidate); + helper.setRefTrks(); + const std::vector<const xAOD::Muon*> &theStoredMuons = (*quadItr).muons; + helper.setMuons(theStoredMuons,importedMuonCollection.get()); + // Retain the vertex + quadVxContainer->push_back(dcVxCandidate); + ATH_MSG_DEBUG("..... indices: " << (*quadItr).combinationIndices() << + " charges: " << (*quadItr).combinationCharges() << + " chi2(DC): " << dcChi2); + //" chi2(AC): " << acChi2 << + //" chi2(SS): " << ssChi2); + } else { // fit failed + ATH_MSG_DEBUG("Fitter failed!"); + } + } + ATH_MSG_DEBUG("quadruplet container size " << quadVxContainer->size()); + + return StatusCode::SUCCESS;; + } + + // ********************************************************************************* + + // --------------------------------------------------------------------------------- + // fit - does the fit + // --------------------------------------------------------------------------------- + + xAOD::Vertex* FourMuonTool::fit(const std::vector<const xAOD::TrackParticle*> &inputTracks, + const xAOD::TrackParticleContainer* importedTrackCollection, + const Amg::Vector3D &beamSpot) const { + + const Trk::TrkV0VertexFitter* concreteVertexFitter=0; + if (m_useV0Fitter) { + // making a concrete fitter for the V0Fitter + concreteVertexFitter = dynamic_cast<const Trk::TrkV0VertexFitter * >(&(*m_iV0VertexFitter)); + if(concreteVertexFitter == 0) { + ATH_MSG_FATAL("The vertex fitter passed is not a V0 Vertex Fitter"); + return NULL; + } + } + + //int sflag = 0; + //int errorcode = 0; + //Amg::Vector3D startingPoint = m_vertexEstimator->getCirclesIntersectionPoint(&aPerigee1,&aPerigee2,sflag,errorcode); + //startingPoint(0) = 0.0; startingPoint(1) = 0.0; startingPoint(2) = 0.0;} + //Trk::Vertex vertex(beamSpot); + + xAOD::Vertex* myVxCandidate = nullptr; + if (m_useV0Fitter) { + myVxCandidate = concreteVertexFitter->fit(inputTracks, beamSpot /*vertex startingPoint*/ ); + } else { + myVxCandidate = m_iVertexFitter->fit(inputTracks, beamSpot /*vertex startingPoint*/ ); + } + + if(myVxCandidate) BPhysPVTools::PrepareVertexLinks(myVxCandidate, importedTrackCollection); + + return myVxCandidate; + + } // End of fit method + + + // ********************************************************************************* + + // --------------------------------------------------------------------------------- + // getQuadIndices: forms up index lists + // --------------------------------------------------------------------------------- + + std::vector<std::vector<unsigned int> > FourMuonTool::getQuadIndices(unsigned int length) { + + std::vector<std::vector<unsigned int> > quadIndices = mFromN(4,length); + return(quadIndices); + } + + + // ********************************************************************************* + + // --------------------------------------------------------------------------------- + // mFromN and combinatorics + // --------------------------------------------------------------------------------- + std::vector<std::vector<unsigned int> > FourMuonTool::mFromN(unsigned int m, unsigned int N) { + + std::vector<std::vector<unsigned int> > allCombinations; + std::vector<unsigned int> mainList; + std::vector<unsigned int> combination; + for (unsigned int i=0; i<N; ++i) mainList.push_back(i); + combinatorics(0,m,combination,mainList,allCombinations); + return allCombinations; + } + + void FourMuonTool::combinatorics(unsigned int offset, + unsigned int k, + std::vector<unsigned int> &combination, + std::vector<unsigned int> &mainList, + std::vector<std::vector<unsigned int> > &allCombinations) { + if (k==0) { + allCombinations.push_back(combination); + return; + } + if (k>0) { + for (unsigned int i=offset; i<=mainList.size()-k; ++i) { + combination.push_back(mainList[i]); + combinatorics(i+1,k-1,combination,mainList,allCombinations); + combination.pop_back(); + } + } + } + + // --------------------------------------------------------------------------------- + // getPairIndices + // --------------------------------------------------------------------------------- + + std::vector<std::pair<unsigned int, unsigned int> > FourMuonTool::getPairIndices(unsigned int length){ + + std::vector<std::pair<unsigned int, unsigned int> > uniquePairs; + std::vector<std::vector<unsigned int> > doublets = mFromN(2,length); + for (std::vector<std::vector<unsigned int> >::iterator it=doublets.begin(); it!=doublets.end(); ++it) { + std::pair<unsigned int, unsigned int> tmpPair = std::make_pair((*it).at(0),(*it).at(1)); + uniquePairs.push_back(tmpPair); + } + + return(uniquePairs); + } + + + + // ********************************************************************************* + + // --------------------------------------------------------------------------------- + // buildCombinations: forms up the quadruplet of muons/tracks + // --------------------------------------------------------------------------------- + + void FourMuonTool::buildCombinations(const std::vector<const xAOD::Muon*> &muonsIn, + std::vector<Combination> &pairs, + std::vector<Combination> &quadruplets, + unsigned int nSelectedMuons) { + + std::vector<std::vector<unsigned int> > quadrupletIndices = getQuadIndices(nSelectedMuons); + std::vector<std::pair<unsigned int, unsigned int> > pairIndices = getPairIndices(nSelectedMuons); + + // Quadruplets + std::vector<std::vector<unsigned int> >::iterator quadItr; + for (quadItr=quadrupletIndices.begin(); quadItr!=quadrupletIndices.end(); ++quadItr) { + const std::vector<unsigned int> &quad = (*quadItr); + std::vector<const xAOD::Muon*> theMuons = {muonsIn[quad[0]],muonsIn[quad[1]],muonsIn[quad[2]],muonsIn[quad[3]]}; + if (!passesQuadSelection(theMuons)) continue; + Combination tmpQuad; + tmpQuad.muons = std::move(theMuons); + tmpQuad.quadIndices = quad; + quadruplets.emplace_back(std::move(tmpQuad)); + } + if (quadruplets.size() == 0) return; + + // pairs + std::vector<std::pair<unsigned int, unsigned int> >::iterator pairItr; + for (pairItr=pairIndices.begin(); pairItr!=pairIndices.end(); ++pairItr) { + std::pair<unsigned int, unsigned int> pair = (*pairItr); + Combination tmpPair; + std::vector<const xAOD::Muon*> theMuons = {muonsIn[pair.first],muonsIn[pair.second]}; + tmpPair.muons = std::move(theMuons); + tmpPair.pairIndices = pair; + pairs.emplace_back(std::move(tmpPair)); + } + + return; + + } + + // ********************************************************************************* + + // --------------------------------------------------------------------------------- + // passesQuadSelection: 4-muon selection + // --------------------------------------------------------------------------------- + + bool FourMuonTool::passesQuadSelection(const std::vector<const xAOD::Muon*> &muons) { + bool accept(false); + bool charges(true); + bool quality(false); + //unsigned int sumCharges = abs(mu0->charge() + mu1->charge() + mu2->charge() + mu3->charge()); + //if (sumCharges<4) charges = true; + if (( muons.at(0)->muonType() == xAOD::Muon::Combined ) || + ( muons.at(1)->muonType() == xAOD::Muon::Combined ) || + ( muons.at(2)->muonType() == xAOD::Muon::Combined ) || + ( muons.at(3)->muonType() == xAOD::Muon::Combined ) + ) quality = true; + if (charges && quality) accept = true; + return accept; + } + +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/JpsiPlusDpstCascade.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/JpsiPlusDpstCascade.cxx new file mode 100644 index 0000000000000000000000000000000000000000..04f187bf3d8cce3aced6599e20e6fea15357756d --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/JpsiPlusDpstCascade.cxx @@ -0,0 +1,724 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ +///////////////////////////////////////////////////////////////// +// JpsiPlusDpstCascade.cxx, (c) ATLAS Detector software +///////////////////////////////////////////////////////////////// +#include "DerivationFrameworkBPhys/JpsiPlusDpstCascade.h" +#include "TrkVertexFitterInterfaces/IVertexFitter.h" +#include "TrkVKalVrtFitter/TrkVKalVrtFitter.h" +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "GaudiKernel/IPartPropSvc.h" +#include "DerivationFrameworkBPhys/CascadeTools.h" +#include "DerivationFrameworkBPhys/BPhysPVCascadeTools.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "xAODBPhys/BPhysHypoHelper.h" +#include <algorithm> +#include "xAODTracking/VertexContainer.h" +#include "DerivationFrameworkBPhys/LocalVector.h" +#include "HepPDT/ParticleDataTable.hh" + +namespace DerivationFramework { + typedef ElementLink<xAOD::VertexContainer> VertexLink; + typedef std::vector<VertexLink> VertexLinkVector; + typedef std::vector<const xAOD::TrackParticle*> TrackBag; + + + StatusCode JpsiPlusDpstCascade::initialize() { + + // retrieving vertex Fitter + ATH_CHECK( m_iVertexFitter.retrieve()); + + // retrieving the V0 tools + ATH_CHECK( m_V0Tools.retrieve()); + + // retrieving the Cascade tools + ATH_CHECK( m_CascadeTools.retrieve()); + + // Get the beam spot service + ATH_CHECK(m_beamSpotKey.initialize()); + + IPartPropSvc* partPropSvc = nullptr; + ATH_CHECK( service("PartPropSvc", partPropSvc, true) ); + auto pdt = partPropSvc->PDT(); + + // retrieve particle masses + if(m_mass_jpsi < 0. ) m_mass_jpsi = BPhysPVCascadeTools::getParticleMass(pdt, PDG::J_psi); + if(m_vtx0MassHypo < 0.) m_vtx0MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::B_c_plus); + if(m_vtx1MassHypo < 0.) m_vtx1MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::D0); + + if(m_vtx0Daug1MassHypo < 0.) m_vtx0Daug1MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::mu_minus); + if(m_vtx0Daug2MassHypo < 0.) m_vtx0Daug2MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::mu_minus); + if(m_vtx0Daug3MassHypo < 0.) m_vtx0Daug3MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::pi_plus); + if(m_vtx1Daug1MassHypo < 0.) m_vtx1Daug1MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::pi_plus); + if(m_vtx1Daug2MassHypo < 0.) m_vtx1Daug2MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::K_plus); + + return StatusCode::SUCCESS; + } + + + StatusCode JpsiPlusDpstCascade::addBranches() const + { + std::vector<Trk::VxCascadeInfo*> cascadeinfoContainer; + constexpr int topoN = 2; + std::array<xAOD::VertexContainer*, topoN> Vtxwritehandles; + std::array<xAOD::VertexAuxContainer*, topoN> Vtxwritehandlesaux; + if(m_cascadeOutputsKeys.size() !=topoN) { ATH_MSG_FATAL("Incorrect number of VtxContainers"); return StatusCode::FAILURE; } + + for(int i =0; i<topoN;i++){ + Vtxwritehandles[i] = new xAOD::VertexContainer(); + Vtxwritehandlesaux[i] = new xAOD::VertexAuxContainer(); + Vtxwritehandles[i]->setStore(Vtxwritehandlesaux[i]); + ATH_CHECK(evtStore()->record(Vtxwritehandles[i] , m_cascadeOutputsKeys[i] )); + ATH_CHECK(evtStore()->record(Vtxwritehandlesaux[i], m_cascadeOutputsKeys[i] + "Aux.")); + } + + //---------------------------------------------------- + // retrieve primary vertices + //---------------------------------------------------- + const xAOD::Vertex * primaryVertex(nullptr); + const xAOD::VertexContainer *pvContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(pvContainer, m_VxPrimaryCandidateName)); + ATH_MSG_DEBUG("Found " << m_VxPrimaryCandidateName << " in StoreGate!"); + + if (pvContainer->size()==0){ + ATH_MSG_WARNING("You have no primary vertices: " << pvContainer->size()); + return StatusCode::RECOVERABLE; + } else { + primaryVertex = (*pvContainer)[0]; + } + + //---------------------------------------------------- + // Try to retrieve refitted primary vertices + //---------------------------------------------------- + xAOD::VertexContainer* refPvContainer = nullptr; + xAOD::VertexAuxContainer* refPvAuxContainer = nullptr; + if (m_refitPV) { + if (evtStore()->contains<xAOD::VertexContainer>(m_refPVContainerName)) { + // refitted PV container exists. Get it from the store gate + ATH_CHECK(evtStore()->retrieve(refPvContainer , m_refPVContainerName )); + ATH_CHECK(evtStore()->retrieve(refPvAuxContainer, m_refPVContainerName + "Aux.")); + } else { + // refitted PV container does not exist. Create a new one. + refPvContainer = new xAOD::VertexContainer; + refPvAuxContainer = new xAOD::VertexAuxContainer; + refPvContainer->setStore(refPvAuxContainer); + ATH_CHECK(evtStore()->record(refPvContainer , m_refPVContainerName)); + ATH_CHECK(evtStore()->record(refPvAuxContainer, m_refPVContainerName+"Aux.")); + } + } + + ATH_CHECK(performSearch(&cascadeinfoContainer)); + + SG::ReadCondHandle<InDet::BeamSpotData> beamSpotHandle { m_beamSpotKey }; + if(not beamSpotHandle.isValid()) ATH_MSG_ERROR("Cannot Retrieve " << m_beamSpotKey.key() ); + BPhysPVCascadeTools helper(&(*m_CascadeTools), beamSpotHandle.cptr()); + helper.SetMinNTracksInPV(m_PV_minNTracks); + + // Decorators for the main vertex: chi2, ndf, pt and pt error, plus the D0 vertex variables + SG::AuxElement::Decorator<VertexLinkVector> CascadeLinksDecor("CascadeVertexLinks"); + SG::AuxElement::Decorator<VertexLinkVector> JpsipiLinksDecor("JpsipiVertexLinks"); + SG::AuxElement::Decorator<VertexLinkVector> D0LinksDecor("D0VertexLinks"); + SG::AuxElement::Decorator<float> chi2_decor("ChiSquared"); + SG::AuxElement::Decorator<float> ndof_decor("NumberDoF"); + SG::AuxElement::Decorator<float> Pt_decor("Pt"); + SG::AuxElement::Decorator<float> PtErr_decor("PtErr"); + SG::AuxElement::Decorator<float> Mass_svdecor("D0_mass"); + SG::AuxElement::Decorator<float> MassErr_svdecor("D0_massErr"); + SG::AuxElement::Decorator<float> Pt_svdecor("D0_Pt"); + SG::AuxElement::Decorator<float> PtErr_svdecor("D0_PtErr"); + SG::AuxElement::Decorator<float> Lxy_svdecor("D0_Lxy"); + SG::AuxElement::Decorator<float> LxyErr_svdecor("D0_LxyErr"); + SG::AuxElement::Decorator<float> Tau_svdecor("D0_Tau"); + SG::AuxElement::Decorator<float> TauErr_svdecor("D0_TauErr"); + + SG::AuxElement::Decorator<float> MassMumu_decor("Mumu_mass"); + SG::AuxElement::Decorator<float> MassKpi_svdecor("Kpi_mass"); + SG::AuxElement::Decorator<float> MassJpsi_decor("Jpsi_mass"); + SG::AuxElement::Decorator<float> MassPiD0_decor("PiD0_mass"); + + ATH_MSG_DEBUG("cascadeinfoContainer size " << cascadeinfoContainer.size()); + + // Get Jpsi+pi container and identify the input Jpsi+pi + const xAOD::VertexContainer *jpsipiContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(jpsipiContainer , m_vertexContainerKey )); + const xAOD::VertexContainer *d0Container(nullptr); + ATH_CHECK(evtStore()->retrieve(d0Container , m_vertexD0ContainerKey )); + + for (Trk::VxCascadeInfo* x : cascadeinfoContainer) { + if(x==nullptr) ATH_MSG_ERROR("cascadeinfoContainer is null"); + + // the cascade fitter returns: + // std::vector<xAOD::Vertex*>, each xAOD::Vertex contains the refitted track parameters (perigee at the vertex position) + // vertices[iv] the links to the original TPs and a covariance of size 3+5*NTRK; the chi2 of the total fit + // is split between the cascade vertices as per track contribution + // std::vector< std::vector<TLorentzVector> >, each std::vector<TLorentzVector> contains the refitted momenta (TLorentzVector) + // momenta[iv][...] of all tracks in the corresponding vertex, including any pseudotracks (from cascade vertices) + // originating in this vertex; the masses are as assigned in the cascade fit + // std::vector<Amg::MatrixX>, the corresponding covariance matrices in momentum space + // covariance[iv] + // int nDoF, double Chi2 + // + // the invariant mass, pt, lifetime etc. errors should be calculated using the covariance matrices in momentum space as these + // take into account the full track-track and track-vertex correlations + // + // in the case of Jpsi+V0: vertices[0] is the V0 vertex, vertices[1] is the B/Lambda_b(bar) vertex, containing the 2 Jpsi tracks. + // The covariance terms between the two vertices are not stored. In momentum space momenta[0] contains the 2 V0 tracks, + // their momenta add up to the momentum of the 3rd track in momenta[1], the first two being the Jpsi tracks + + const std::vector<xAOD::Vertex*> &cascadeVertices = x->vertices(); + if(cascadeVertices.size()!=topoN) + ATH_MSG_ERROR("Incorrect number of vertices"); + if(cascadeVertices[0] == nullptr || cascadeVertices[1] == nullptr) ATH_MSG_ERROR("Error null vertex"); + // Keep vertices (bear in mind that they come in reverse order!) + for(int i =0;i<topoN;i++) Vtxwritehandles[i]->push_back(cascadeVertices[i]); + + x->setSVOwnership(false); // Prevent Container from deleting vertices + const auto mainVertex = cascadeVertices[1]; // this is the B_c+/- vertex + const std::vector< std::vector<TLorentzVector> > &moms = x->getParticleMoms(); + + // Set links to cascade vertices + std::vector<const xAOD::Vertex*> verticestoLink; + verticestoLink.push_back(cascadeVertices[0]); + if(Vtxwritehandles[1] == nullptr) ATH_MSG_ERROR("Vtxwritehandles[1] is null"); + if(!BPhysPVCascadeTools::LinkVertices(CascadeLinksDecor, verticestoLink, Vtxwritehandles[0], cascadeVertices[1])) + ATH_MSG_ERROR("Error decorating with cascade vertices"); + + // Identify the input Jpsi+pi + const xAOD::Vertex* jpsipiVertex = BPhysPVCascadeTools::FindVertex<3>(jpsipiContainer, cascadeVertices[1]); + ATH_MSG_DEBUG("1 pt Jpsi+pi tracks " << cascadeVertices[1]->trackParticle(0)->pt() << ", " << cascadeVertices[1]->trackParticle(1)->pt() << ", " << cascadeVertices[1]->trackParticle(2)->pt()); + if (jpsipiVertex) ATH_MSG_DEBUG("2 pt Jpsi+pi tracks " << jpsipiVertex->trackParticle(0)->pt() << ", " << jpsipiVertex->trackParticle(1)->pt() << ", " << jpsipiVertex->trackParticle(2)->pt()); + + // Identify the input D0 + const xAOD::Vertex* d0Vertex = BPhysPVCascadeTools::FindVertex<2>(d0Container, cascadeVertices[0]);; + ATH_MSG_DEBUG("1 pt D0 tracks " << cascadeVertices[0]->trackParticle(0)->pt() << ", " << cascadeVertices[0]->trackParticle(1)->pt()); + if (d0Vertex) ATH_MSG_DEBUG("2 pt D0 tracks " << d0Vertex->trackParticle(0)->pt() << ", " << d0Vertex->trackParticle(1)->pt()); + + // Set links to input vertices + std::vector<const xAOD::Vertex*> jpsipiVerticestoLink; + if (jpsipiVertex) jpsipiVerticestoLink.push_back(jpsipiVertex); + else ATH_MSG_WARNING("Could not find linking Jpsi+pi"); + if(!BPhysPVCascadeTools::LinkVertices(JpsipiLinksDecor, jpsipiVerticestoLink, jpsipiContainer, cascadeVertices[1])) + ATH_MSG_ERROR("Error decorating with Jpsi+pi vertices"); + + std::vector<const xAOD::Vertex*> d0VerticestoLink; + if (d0Vertex) d0VerticestoLink.push_back(d0Vertex); + else ATH_MSG_WARNING("Could not find linking D0"); + if(!BPhysPVCascadeTools::LinkVertices(D0LinksDecor, d0VerticestoLink, d0Container, cascadeVertices[1])) + ATH_MSG_ERROR("Error decorating with D0 vertices"); + + bool tagD0(true); + if (jpsipiVertex){ + if(abs(m_Dx_pid)==421 && (jpsipiVertex->trackParticle(2)->charge()==-1)) tagD0 = false; + } + + double mass_b = m_vtx0MassHypo; + double mass_d0 = m_vtx1MassHypo; + std::vector<double> massesJpsipi; + massesJpsipi.push_back(m_vtx0Daug1MassHypo); + massesJpsipi.push_back(m_vtx0Daug2MassHypo); + massesJpsipi.push_back(m_vtx0Daug3MassHypo); + std::vector<double> massesD0; + if(tagD0){ + massesD0.push_back(m_vtx1Daug1MassHypo); + massesD0.push_back(m_vtx1Daug2MassHypo); + }else{ // Change the oreder of masses for D*-->D0bar pi-, D0bar->K+pi- + massesD0.push_back(m_vtx1Daug2MassHypo); + massesD0.push_back(m_vtx1Daug1MassHypo); + } + std::vector<double> Masses; + Masses.push_back(m_vtx0Daug1MassHypo); + Masses.push_back(m_vtx0Daug2MassHypo); + Masses.push_back(m_vtx0Daug3MassHypo); + Masses.push_back(m_vtx1MassHypo); + + // loop over candidates -- Don't apply PV_minNTracks requirement here + // because it may result in exclusion of the high-pt PV. + // get good PVs + + xAOD::BPhysHypoHelper vtx(m_hypoName, mainVertex); + + // Get refitted track momenta from all vertices, charged tracks only + BPhysPVCascadeTools::SetVectorInfo(vtx, x); + + // Decorate main vertex + // + // 1.a) mass, mass error + BPHYS_CHECK( vtx.setMass(m_CascadeTools->invariantMass(moms[1])) ); + BPHYS_CHECK( vtx.setMassErr(m_CascadeTools->invariantMassError(moms[1],x->getCovariance()[1])) ); + // 1.b) pt and pT error (the default pt of mainVertex is != the pt of the full cascade fit!) + Pt_decor(*mainVertex) = m_CascadeTools->pT(moms[1]); + PtErr_decor(*mainVertex) = m_CascadeTools->pTError(moms[1],x->getCovariance()[1]); + // 1.c) chi2 and ndof (the default chi2 of mainVertex is != the chi2 of the full cascade fit!) + chi2_decor(*mainVertex) = x->fitChi2(); + ndof_decor(*mainVertex) = x->nDoF(); + + float massMumu = 0.; + if (jpsipiVertex) { + TLorentzVector p4_mu1, p4_mu2; + p4_mu1.SetPtEtaPhiM(jpsipiVertex->trackParticle(0)->pt(), + jpsipiVertex->trackParticle(0)->eta(), + jpsipiVertex->trackParticle(0)->phi(), m_vtx0Daug1MassHypo); + p4_mu2.SetPtEtaPhiM(jpsipiVertex->trackParticle(1)->pt(), + jpsipiVertex->trackParticle(1)->eta(), + jpsipiVertex->trackParticle(1)->phi(), m_vtx0Daug2MassHypo); + massMumu = (p4_mu1 + p4_mu2).M(); + } + MassMumu_decor(*mainVertex) = massMumu; + + float massKpi = 0.; + if (d0Vertex) { + TLorentzVector p4_ka, p4_pi; + if(tagD0){ + p4_pi.SetPtEtaPhiM(d0Vertex->trackParticle(0)->pt(), + d0Vertex->trackParticle(0)->eta(), + d0Vertex->trackParticle(0)->phi(), m_vtx1Daug1MassHypo); + p4_ka.SetPtEtaPhiM(d0Vertex->trackParticle(1)->pt(), + d0Vertex->trackParticle(1)->eta(), + d0Vertex->trackParticle(1)->phi(), m_vtx1Daug2MassHypo); + }else{ // Change the oreder of masses for D*-->D0bar pi-, D0bar->K+pi- + p4_pi.SetPtEtaPhiM(d0Vertex->trackParticle(1)->pt(), + d0Vertex->trackParticle(1)->eta(), + d0Vertex->trackParticle(1)->phi(), m_vtx1Daug1MassHypo); + p4_ka.SetPtEtaPhiM(d0Vertex->trackParticle(0)->pt(), + d0Vertex->trackParticle(0)->eta(), + d0Vertex->trackParticle(0)->phi(), m_vtx1Daug2MassHypo); + } + massKpi = (p4_ka + p4_pi).M(); + } + MassKpi_svdecor(*mainVertex) = massKpi; + MassJpsi_decor(*mainVertex) = (moms[1][0] + moms[1][1]).M(); + MassPiD0_decor(*mainVertex) = (moms[1][2] + moms[1][3]).M(); + + + ATH_CHECK(helper.FillCandwithRefittedVertices(m_refitPV, pvContainer, + refPvContainer, &(*m_pvRefitter), m_PV_max, m_DoVertexType, x, 1, mass_b, vtx)); + + // 4) decorate the main vertex with D0 vertex mass, pt, lifetime and lxy values (plus errors) + // D0 points to the main vertex, so lifetime and lxy are w.r.t the main vertex + Mass_svdecor(*mainVertex) = m_CascadeTools->invariantMass(moms[0]); + MassErr_svdecor(*mainVertex) = m_CascadeTools->invariantMassError(moms[0],x->getCovariance()[0]); + Pt_svdecor(*mainVertex) = m_CascadeTools->pT(moms[0]); + PtErr_svdecor(*mainVertex) = m_CascadeTools->pTError(moms[0],x->getCovariance()[0]); + Lxy_svdecor(*mainVertex) = m_CascadeTools->lxy(moms[0],cascadeVertices[0],cascadeVertices[1]); + LxyErr_svdecor(*mainVertex) = m_CascadeTools->lxyError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]); + Tau_svdecor(*mainVertex) = m_CascadeTools->tau(moms[0],cascadeVertices[0],cascadeVertices[1]); + TauErr_svdecor(*mainVertex) = m_CascadeTools->tauError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]); + + // Some checks in DEBUG mode + ATH_MSG_DEBUG("chi2 " << x->fitChi2() + << " chi2_1 " << m_V0Tools->chisq(cascadeVertices[0]) + << " chi2_2 " << m_V0Tools->chisq(cascadeVertices[1]) + << " vprob " << m_CascadeTools->vertexProbability(x->nDoF(),x->fitChi2())); + ATH_MSG_DEBUG("ndf " << x->nDoF() << " ndf_1 " << m_V0Tools->ndof(cascadeVertices[0]) << " ndf_2 " << m_V0Tools->ndof(cascadeVertices[1])); + ATH_MSG_DEBUG("V0Tools mass_d0 " << m_V0Tools->invariantMass(cascadeVertices[0],massesD0) + << " error " << m_V0Tools->invariantMassError(cascadeVertices[0],massesD0) + << " mass_J " << m_V0Tools->invariantMass(cascadeVertices[1],massesJpsipi) + << " error " << m_V0Tools->invariantMassError(cascadeVertices[1],massesJpsipi)); + // masses and errors, using track masses assigned in the fit + double Mass_B = m_CascadeTools->invariantMass(moms[1]); + double Mass_D0 = m_CascadeTools->invariantMass(moms[0]); + double Mass_B_err = m_CascadeTools->invariantMassError(moms[1],x->getCovariance()[1]); + double Mass_D0_err = m_CascadeTools->invariantMassError(moms[0],x->getCovariance()[0]); + ATH_MSG_DEBUG("Mass_B " << Mass_B << " Mass_D0 " << Mass_D0); + ATH_MSG_DEBUG("Mass_B_err " << Mass_B_err << " Mass_D0_err " << Mass_D0_err); + double mprob_B = m_CascadeTools->massProbability(mass_b,Mass_B,Mass_B_err); + double mprob_D0 = m_CascadeTools->massProbability(mass_d0,Mass_D0,Mass_D0_err); + ATH_MSG_DEBUG("mprob_B " << mprob_B << " mprob_D0 " << mprob_D0); + // masses and errors, assigning user defined track masses + ATH_MSG_DEBUG("Mass_b " << m_CascadeTools->invariantMass(moms[1],Masses) + << " Mass_d0 " << m_CascadeTools->invariantMass(moms[0],massesD0)); + ATH_MSG_DEBUG("Mass_b_err " << m_CascadeTools->invariantMassError(moms[1],x->getCovariance()[1],Masses) + << " Mass_d0_err " << m_CascadeTools->invariantMassError(moms[0],x->getCovariance()[0],massesD0)); + ATH_MSG_DEBUG("pt_b " << m_CascadeTools->pT(moms[1]) + << " pt_d " << m_CascadeTools->pT(moms[0]) + << " pt_d0 " << m_V0Tools->pT(cascadeVertices[0])); + ATH_MSG_DEBUG("ptErr_b " << m_CascadeTools->pTError(moms[1],x->getCovariance()[1]) + << " ptErr_d " << m_CascadeTools->pTError(moms[0],x->getCovariance()[0]) + << " ptErr_d0 " << m_V0Tools->pTError(cascadeVertices[0])); + ATH_MSG_DEBUG("lxy_B " << m_V0Tools->lxy(cascadeVertices[1],primaryVertex) << " lxy_D " << m_V0Tools->lxy(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("lxy_b " << m_CascadeTools->lxy(moms[1],cascadeVertices[1],primaryVertex) << " lxy_d " << m_CascadeTools->lxy(moms[0],cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("lxyErr_b " << m_CascadeTools->lxyError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << " lxyErr_d " << m_CascadeTools->lxyError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << " lxyErr_d0 " << m_V0Tools->lxyError(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("tau_B " << m_CascadeTools->tau(moms[1],cascadeVertices[1],primaryVertex,mass_b) + << " tau_d0 " << m_V0Tools->tau(cascadeVertices[0],cascadeVertices[1],massesD0)); + ATH_MSG_DEBUG("tau_b " << m_CascadeTools->tau(moms[1],cascadeVertices[1],primaryVertex) + << " tau_d " << m_CascadeTools->tau(moms[0],cascadeVertices[0],cascadeVertices[1]) + << " tau_D " << m_CascadeTools->tau(moms[0],cascadeVertices[0],cascadeVertices[1],mass_d0)); + ATH_MSG_DEBUG("tauErr_b " << m_CascadeTools->tauError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << " tauErr_d " << m_CascadeTools->tauError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << " tauErr_d0 " << m_V0Tools->tauError(cascadeVertices[0],cascadeVertices[1],massesD0)); + ATH_MSG_DEBUG("TauErr_b " << m_CascadeTools->tauError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex,mass_b) + << " TauErr_d " << m_CascadeTools->tauError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1],mass_d0) + << " TauErr_d0 " << m_V0Tools->tauError(cascadeVertices[0],cascadeVertices[1],massesD0,mass_d0)); + + ATH_MSG_DEBUG("CascadeTools main vert wrt PV " << " CascadeTools SV " << " V0Tools SV"); + ATH_MSG_DEBUG("a0z " << m_CascadeTools->a0z(moms[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0z(moms[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0z(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0zErr " << m_CascadeTools->a0zError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0zError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0zError(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0xy " << m_CascadeTools->a0xy(moms[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0xy(moms[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0xy(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0xyErr " << m_CascadeTools->a0xyError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0xyError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0xyError(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0 " << m_CascadeTools->a0(moms[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0(moms[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0Err " << m_CascadeTools->a0Error(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0Error(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0Error(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("x0 " << m_V0Tools->vtx(cascadeVertices[0]).x() << " y0 " << m_V0Tools->vtx(cascadeVertices[0]).y() << " z0 " << m_V0Tools->vtx(cascadeVertices[0]).z()); + ATH_MSG_DEBUG("x1 " << m_V0Tools->vtx(cascadeVertices[1]).x() << " y1 " << m_V0Tools->vtx(cascadeVertices[1]).y() << " z1 " << m_V0Tools->vtx(cascadeVertices[1]).z()); + ATH_MSG_DEBUG("X0 " << primaryVertex->x() << " Y0 " << primaryVertex->y() << " Z0 " << primaryVertex->z()); + ATH_MSG_DEBUG("rxy0 " << m_V0Tools->rxy(cascadeVertices[0]) << " rxyErr0 " << m_V0Tools->rxyError(cascadeVertices[0])); + ATH_MSG_DEBUG("rxy1 " << m_V0Tools->rxy(cascadeVertices[1]) << " rxyErr1 " << m_V0Tools->rxyError(cascadeVertices[1])); + ATH_MSG_DEBUG("Rxy0 wrt PV " << m_V0Tools->rxy(cascadeVertices[0],primaryVertex) << " RxyErr0 wrt PV " << m_V0Tools->rxyError(cascadeVertices[0],primaryVertex)); + ATH_MSG_DEBUG("Rxy1 wrt PV " << m_V0Tools->rxy(cascadeVertices[1],primaryVertex) << " RxyErr1 wrt PV " << m_V0Tools->rxyError(cascadeVertices[1],primaryVertex)); + ATH_MSG_DEBUG("number of covariance matrices " << (x->getCovariance()).size()); + } // loop over cascadeinfoContainer + + // Deleting cascadeinfo since this won't be stored. + // Vertices have been kept in m_cascadeOutputs and should be owned by their container + for (auto x : cascadeinfoContainer) delete x; + + return StatusCode::SUCCESS; + } + + + JpsiPlusDpstCascade::JpsiPlusDpstCascade(const std::string& t, const std::string& n, const IInterface* p) : AthAlgTool(t,n,p), + m_vertexContainerKey(""), + m_vertexD0ContainerKey(""), + m_cascadeOutputsKeys{ "JpsiPlusDpstCascadeVtx1", "JpsiPlusDpstCascadeVtx2" }, + m_VxPrimaryCandidateName("PrimaryVertices"), + m_jpsiMassLower(0.0), + m_jpsiMassUpper(10000.0), + m_jpsipiMassLower(0.0), + m_jpsipiMassUpper(10000.0), + m_D0MassLower(0.0), + m_D0MassUpper(10000.0), + m_DstMassLower(0.0), + m_DstMassUpper(10000.0), + m_MassLower(0.0), + m_MassUpper(20000.0), + m_vtx0MassHypo(-1), + m_vtx1MassHypo(-1), + m_vtx0Daug1MassHypo(-1), + m_vtx0Daug2MassHypo(-1), + m_vtx0Daug3MassHypo(-1), + m_vtx1Daug1MassHypo(-1), + m_vtx1Daug2MassHypo(-1), + m_mass_jpsi(-1), + m_Dx_pid(421), + m_constrD0(true), + m_constrJpsi(true), + m_chi2cut(-1.0), + m_iVertexFitter("Trk::TrkVKalVrtFitter"), + m_pvRefitter("Analysis::PrimaryVertexRefitter"), + m_V0Tools("Trk::V0Tools"), + m_CascadeTools("DerivationFramework::CascadeTools") + { + declareProperty("JpsipiVertices", m_vertexContainerKey); + declareProperty("D0Vertices", m_vertexD0ContainerKey); + declareProperty("VxPrimaryCandidateName", m_VxPrimaryCandidateName); + declareProperty("RefPVContainerName", m_refPVContainerName = "RefittedPrimaryVertices"); + declareProperty("JpsiMassLowerCut", m_jpsiMassLower); + declareProperty("JpsiMassUpperCut", m_jpsiMassUpper); + declareProperty("JpsipiMassLowerCut", m_jpsipiMassLower); + declareProperty("JpsipiMassUpperCut", m_jpsipiMassUpper); + declareProperty("D0MassLowerCut", m_D0MassLower); + declareProperty("D0MassUpperCut", m_D0MassUpper); + declareProperty("DstMassLowerCut", m_DstMassLower); + declareProperty("DstMassUpperCut", m_DstMassUpper); + declareProperty("MassLowerCut", m_MassLower); + declareProperty("MassUpperCut", m_MassUpper); + declareProperty("HypothesisName", m_hypoName = "Bc"); + declareProperty("Vtx0MassHypo", m_vtx0MassHypo); + declareProperty("Vtx1MassHypo", m_vtx1MassHypo); + declareProperty("Vtx0Daug1MassHypo", m_vtx0Daug1MassHypo); + declareProperty("Vtx0Daug2MassHypo", m_vtx0Daug2MassHypo); + declareProperty("Vtx0Daug3MassHypo", m_vtx0Daug3MassHypo); + declareProperty("Vtx1Daug1MassHypo", m_vtx1Daug1MassHypo); + declareProperty("Vtx1Daug2MassHypo", m_vtx1Daug2MassHypo); + declareProperty("JpsiMass", m_mass_jpsi); + declareProperty("DxHypothesis", m_Dx_pid); + declareProperty("ApplyD0MassConstraint", m_constrD0); + declareProperty("ApplyJpsiMassConstraint", m_constrJpsi); + declareProperty("Chi2Cut", m_chi2cut); + declareProperty("RefitPV", m_refitPV = true); + declareProperty("MaxnPV", m_PV_max = 999); + declareProperty("MinNTracksInPV", m_PV_minNTracks = 0); + declareProperty("DoVertexType", m_DoVertexType = 7); + declareProperty("TrkVertexFitterTool", m_iVertexFitter); + declareProperty("PVRefitter", m_pvRefitter); + declareProperty("V0Tools", m_V0Tools); + declareProperty("CascadeTools", m_CascadeTools); + declareProperty("CascadeVertexCollections", m_cascadeOutputsKeys); + } + + JpsiPlusDpstCascade::~JpsiPlusDpstCascade(){ } + + StatusCode JpsiPlusDpstCascade::performSearch(std::vector<Trk::VxCascadeInfo*> *cascadeinfoContainer) const + { + ATH_MSG_DEBUG( "JpsiPlusDpstCascade::performSearch" ); + assert(cascadeinfoContainer!=nullptr); + + // Get TrackParticle container (for setting links to the original tracks) + const xAOD::TrackParticleContainer *trackContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(trackContainer , "InDetTrackParticles" )); + + // Get Jpsi+pi container + const xAOD::VertexContainer *jpsipiContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(jpsipiContainer , m_vertexContainerKey )); + + // Get D0 container + const xAOD::VertexContainer *d0Container(nullptr); + ATH_CHECK(evtStore()->retrieve(d0Container , m_vertexD0ContainerKey )); + + double mass_d0 = m_vtx1MassHypo; + std::vector<const xAOD::TrackParticle*> tracksJpsipi; + std::vector<const xAOD::TrackParticle*> tracksJpsi; + std::vector<const xAOD::TrackParticle*> tracksD0; + std::vector<const xAOD::TrackParticle*> tracksBc; + std::vector<double> massesJpsipi; + massesJpsipi.push_back(m_vtx0Daug1MassHypo); + massesJpsipi.push_back(m_vtx0Daug2MassHypo); + massesJpsipi.push_back(m_vtx0Daug3MassHypo); + std::vector<double> massesD0; + massesD0.push_back(m_vtx1Daug1MassHypo); + massesD0.push_back(m_vtx1Daug2MassHypo); + std::vector<double> massesD0b; // Change the oreder of masses for D*-->D0bar pi-, D0bar->K+pi- + massesD0b.push_back(m_vtx1Daug2MassHypo); + massesD0b.push_back(m_vtx1Daug1MassHypo); + std::vector<double> Masses; + Masses.push_back(m_vtx0Daug1MassHypo); + Masses.push_back(m_vtx0Daug2MassHypo); + Masses.push_back(m_vtx0Daug3MassHypo); + Masses.push_back(m_vtx1MassHypo); + + // Select J/psi pi+ candidates before calling cascade fit + std::vector<const xAOD::Vertex*> selectedJpsipiCandidates; + for(auto vxcItr=jpsipiContainer->cbegin(); vxcItr!=jpsipiContainer->cend(); ++vxcItr) { + + // Check the passed flag first + const xAOD::Vertex* vtx = *vxcItr; + SG::AuxElement::Accessor<Char_t> flagAcc1("passed_Jpsipi"); + if(flagAcc1.isAvailable(*vtx)){ + if(!flagAcc1(*vtx)) continue; + } + + // Check J/psi candidate invariant mass and skip if need be + TLorentzVector p4Mup_in, p4Mum_in; + p4Mup_in.SetPtEtaPhiM((*vxcItr)->trackParticle(0)->pt(), + (*vxcItr)->trackParticle(0)->eta(), + (*vxcItr)->trackParticle(0)->phi(), m_vtx0Daug1MassHypo); + p4Mum_in.SetPtEtaPhiM((*vxcItr)->trackParticle(1)->pt(), + (*vxcItr)->trackParticle(1)->eta(), + (*vxcItr)->trackParticle(1)->phi(), m_vtx0Daug2MassHypo); + double mass_Jpsi = (p4Mup_in + p4Mum_in).M(); + ATH_MSG_DEBUG("Jpsi mass " << mass_Jpsi); + if (mass_Jpsi < m_jpsiMassLower || mass_Jpsi > m_jpsiMassUpper) { + ATH_MSG_DEBUG(" Original Jpsi candidate rejected by the mass cut: mass = " + << mass_Jpsi << " != (" << m_jpsiMassLower << ", " << m_jpsiMassUpper << ")" ); + continue; + } + + // Check J/psi pi+ candidate invariant mass and skip if need be + double mass_Jpsipi = m_V0Tools->invariantMass(*vxcItr, massesJpsipi); + ATH_MSG_DEBUG("Jpsipi mass " << mass_Jpsipi); + if (mass_Jpsipi < m_jpsipiMassLower || mass_Jpsipi > m_jpsipiMassUpper) { + ATH_MSG_DEBUG(" Original Jpsipi candidate rejected by the mass cut: mass = " + << mass_Jpsipi << " != (" << m_jpsipiMassLower << ", " << m_jpsipiMassUpper << ")" ); + continue; + } + + selectedJpsipiCandidates.push_back(*vxcItr); + } + if(selectedJpsipiCandidates.size()<1) return StatusCode::SUCCESS; + + // Select the D0/D0b candidates before calling cascade fit + std::vector<const xAOD::Vertex*> selectedD0Candidates; + for(auto vxcItr=d0Container->cbegin(); vxcItr!=d0Container->cend(); ++vxcItr) { + + // Check the passed flag first + const xAOD::Vertex* vtx = *vxcItr; + SG::AuxElement::Accessor<Char_t> flagAcc1("passed_D0"); + SG::AuxElement::Accessor<Char_t> flagAcc2("passed_D0b"); + bool isD0(true); + bool isD0b(true); + if(flagAcc1.isAvailable(*vtx)){ + if(!flagAcc1(*vtx)) isD0 = false; + } + if(flagAcc2.isAvailable(*vtx)){ + if(!flagAcc2(*vtx)) isD0b = false; + } + if(!(isD0||isD0b)) continue; + + // Ensure the total charge is correct + if ((*vxcItr)->trackParticle(0)->charge() != 1 || (*vxcItr)->trackParticle(1)->charge() != -1) { + ATH_MSG_DEBUG(" Original D0/D0-bar candidate rejected by the charge requirement: " + << (*vxcItr)->trackParticle(0)->charge() << ", " << (*vxcItr)->trackParticle(1)->charge() ); + continue; + } + + // Check D0/D0bar candidate invariant mass and skip if need be + double mass_D0 = m_V0Tools->invariantMass(*vxcItr,massesD0); + double mass_D0b = m_V0Tools->invariantMass(*vxcItr,massesD0b); + ATH_MSG_DEBUG("D0 mass " << mass_D0 << ", D0b mass "<<mass_D0b); + if ((mass_D0 < m_D0MassLower || mass_D0 > m_D0MassUpper) && (mass_D0b < m_D0MassLower || mass_D0b > m_D0MassUpper)) { + ATH_MSG_DEBUG(" Original D0 candidate rejected by the mass cut: mass = " + << mass_D0 << " != (" << m_D0MassLower << ", " << m_D0MassUpper << ") " + << mass_D0b << " != (" << m_D0MassLower << ", " << m_D0MassUpper << ") " ); + continue; + } + + selectedD0Candidates.push_back(*vxcItr); + } + if(selectedD0Candidates.size()<1) return StatusCode::SUCCESS; + + // Select J/psi D*+ candidates + // Iterate over Jpsi+pi vertices + for(auto jpsipiItr=selectedJpsipiCandidates.cbegin(); jpsipiItr!=selectedJpsipiCandidates.cend(); ++jpsipiItr) { + + size_t jpsipiTrkNum = (*jpsipiItr)->nTrackParticles(); + tracksJpsipi.clear(); + tracksJpsi.clear(); + for( unsigned int it=0; it<jpsipiTrkNum; it++) tracksJpsipi.push_back((*jpsipiItr)->trackParticle(it)); + for( unsigned int it=0; it<jpsipiTrkNum-1; it++) tracksJpsi.push_back((*jpsipiItr)->trackParticle(it)); + + if (tracksJpsipi.size() != 3 || massesJpsipi.size() != 3 ) { + ATH_MSG_INFO("problems with Jpsi+pi input"); + } + + bool tagD0(true); + if(abs(m_Dx_pid)==421 && (*jpsipiItr)->trackParticle(2)->charge()==-1) tagD0 = false; + + TLorentzVector p4_pi1; // Momentum of soft pion + p4_pi1.SetPtEtaPhiM((*jpsipiItr)->trackParticle(2)->pt(), + (*jpsipiItr)->trackParticle(2)->eta(), + (*jpsipiItr)->trackParticle(2)->phi(), m_vtx0Daug3MassHypo); + + // Iterate over D0/D0bar vertices + for(auto d0Itr=selectedD0Candidates.cbegin(); d0Itr!=selectedD0Candidates.cend(); ++d0Itr) { + + // Check identical tracks in input + if(std::find(tracksJpsipi.cbegin(), tracksJpsipi.cend(), (*d0Itr)->trackParticle(0)) != tracksJpsipi.cend()) continue; + if(std::find(tracksJpsipi.cbegin(), tracksJpsipi.cend(), (*d0Itr)->trackParticle(1)) != tracksJpsipi.cend()) continue; + + + TLorentzVector p4_ka, p4_pi2; + if(tagD0){ // for D*+ + p4_pi2.SetPtEtaPhiM((*d0Itr)->trackParticle(0)->pt(), + (*d0Itr)->trackParticle(0)->eta(), + (*d0Itr)->trackParticle(0)->phi(), m_vtx1Daug1MassHypo); + p4_ka.SetPtEtaPhiM( (*d0Itr)->trackParticle(1)->pt(), + (*d0Itr)->trackParticle(1)->eta(), + (*d0Itr)->trackParticle(1)->phi(), m_vtx1Daug2MassHypo); + }else{ // change the order in the case of D*- + p4_pi2.SetPtEtaPhiM((*d0Itr)->trackParticle(1)->pt(), + (*d0Itr)->trackParticle(1)->eta(), + (*d0Itr)->trackParticle(1)->phi(), m_vtx1Daug1MassHypo); + p4_ka.SetPtEtaPhiM( (*d0Itr)->trackParticle(0)->pt(), + (*d0Itr)->trackParticle(0)->eta(), + (*d0Itr)->trackParticle(0)->phi(), m_vtx1Daug2MassHypo); + } + // Check D*+/- candidate invariant mass and skip if need be + double mass_Dst= (p4_pi1 + p4_ka + p4_pi2).M(); + ATH_MSG_DEBUG("D*+/- mass " << mass_Dst); + if (mass_Dst < m_DstMassLower || mass_Dst > m_DstMassUpper) { + ATH_MSG_DEBUG(" Original D*+/- candidate rejected by the mass cut: mass = " + << mass_Dst << " != (" << m_DstMassLower << ", " << m_DstMassUpper << ")" ); + continue; + } + + size_t d0TrkNum = (*d0Itr)->nTrackParticles(); + tracksD0.clear(); + for( unsigned int it=0; it<d0TrkNum; it++) tracksD0.push_back((*d0Itr)->trackParticle(it)); + if (tracksD0.size() != 2 || massesD0.size() != 2 ) { + ATH_MSG_INFO("problems with D0 input"); + } + + ATH_MSG_DEBUG("using tracks" << tracksJpsipi[0] << ", " << tracksJpsipi[1] << ", " << tracksJpsipi[2] << ", " << tracksD0[0] << ", " << tracksD0[1]); + ATH_MSG_DEBUG("Charge of Jpsi+pi tracks: "<<(*jpsipiItr)->trackParticle(0)->charge()<<", "<<(*jpsipiItr)->trackParticle(1)->charge()<<", "<<(*jpsipiItr)->trackParticle(2)->charge()); + ATH_MSG_DEBUG("Charge of D0 tracks: "<<(*d0Itr)->trackParticle(0)->charge()<<", "<<(*d0Itr)->trackParticle(1)->charge()); + + tracksBc.clear(); + for( unsigned int it=0; it<jpsipiTrkNum; it++) tracksBc.push_back((*jpsipiItr)->trackParticle(it)); + for( unsigned int it=0; it<d0TrkNum; it++) tracksBc.push_back((*d0Itr)->trackParticle(it)); + + + // Apply the user's settings to the fitter + // Reset + std::unique_ptr<Trk::IVKalState> state (m_iVertexFitter->makeState()); + // Robustness + int robustness = 0; + m_iVertexFitter->setRobustness(robustness, *state); + // Build up the topology + // Vertex list + std::vector<Trk::VertexID> vrtList; + // D0 vertex + Trk::VertexID vID; + if (m_constrD0) { + if(tagD0) vID = m_iVertexFitter->startVertex(tracksD0,massesD0,*state,mass_d0); + else vID = m_iVertexFitter->startVertex(tracksD0,massesD0b,*state,mass_d0); + } else { + if(tagD0) vID = m_iVertexFitter->startVertex(tracksD0,massesD0,*state); + else vID = m_iVertexFitter->startVertex(tracksD0,massesD0b,*state); + } + vrtList.push_back(vID); + // B vertex including Jpsi+pi + Trk::VertexID vID2 = m_iVertexFitter->nextVertex(tracksJpsipi,massesJpsipi,vrtList,*state); + if (m_constrJpsi) { + std::vector<Trk::VertexID> cnstV; + cnstV.clear(); + if ( !m_iVertexFitter->addMassConstraint(vID2,tracksJpsi,cnstV,*state,m_mass_jpsi).isSuccess() ) { + ATH_MSG_WARNING("addMassConstraint failed"); + //return StatusCode::FAILURE; + } + } + // Do the work + std::unique_ptr<Trk::VxCascadeInfo> result(m_iVertexFitter->fitCascade(*state)); + + if (result != nullptr) { + + // reset links to original tracks + BPhysPVCascadeTools::PrepareVertexLinks(result.get(), trackContainer); + ATH_MSG_DEBUG("storing tracks " << ((result->vertices())[0])->trackParticle(0) << ", " + << ((result->vertices())[0])->trackParticle(1) << ", " + << ((result->vertices())[1])->trackParticle(0) << ", " + << ((result->vertices())[1])->trackParticle(1) << ", " + << ((result->vertices())[1])->trackParticle(2)); + // necessary to prevent memory leak + result->setSVOwnership(true); + + // Chi2/DOF cut + double bChi2DOF = result->fitChi2()/result->nDoF(); + ATH_MSG_DEBUG("Candidate chi2/DOF is " << bChi2DOF); + bool chi2CutPassed = (m_chi2cut <= 0.0 || bChi2DOF < m_chi2cut); + + const std::vector< std::vector<TLorentzVector> > &moms = result->getParticleMoms(); + double mass = m_CascadeTools->invariantMass(moms[1]); + if(chi2CutPassed) { + if (mass >= m_MassLower && mass <= m_MassUpper) { + cascadeinfoContainer->push_back(result.release()); + } else { + ATH_MSG_DEBUG("Candidate rejected by the mass cut: mass = " + << mass << " != (" << m_MassLower << ", " << m_MassUpper << ")" ); + } + } + } + + } //Iterate over D0 vertices + + } //Iterate over Jpsi+pi vertices + + ATH_MSG_DEBUG("cascadeinfoContainer size " << cascadeinfoContainer->size()); + + return StatusCode::SUCCESS; + } + +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/JpsiPlusDs1Cascade.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/JpsiPlusDs1Cascade.cxx new file mode 100644 index 0000000000000000000000000000000000000000..e49a95911bc838327dc5c49d35b0c21060bd8b9a --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/JpsiPlusDs1Cascade.cxx @@ -0,0 +1,905 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ +///////////////////////////////////////////////////////////////// +// JpsiPlusDs1Cascade.cxx, (c) ATLAS Detector software +///////////////////////////////////////////////////////////////// +#include "DerivationFrameworkBPhys/JpsiPlusDs1Cascade.h" +#include "TrkVertexFitterInterfaces/IVertexFitter.h" +#include "TrkVKalVrtFitter/TrkVKalVrtFitter.h" +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "GaudiKernel/IPartPropSvc.h" +#include "DerivationFrameworkBPhys/CascadeTools.h" +#include "DerivationFrameworkBPhys/BPhysPVCascadeTools.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "xAODBPhys/BPhysHypoHelper.h" +#include <algorithm> +#include "xAODTracking/VertexContainer.h" +#include "DerivationFrameworkBPhys/LocalVector.h" + +namespace DerivationFramework { + typedef ElementLink<xAOD::VertexContainer> VertexLink; + typedef std::vector<VertexLink> VertexLinkVector; + typedef std::vector<const xAOD::TrackParticle*> TrackBag; + + double JpsiPlusDs1Cascade::getParticleMass(int pdgcode) const{ + auto ptr = m_particleDataTable->particle( pdgcode ); + return ptr ? ptr->mass() : 0.; + } + + StatusCode JpsiPlusDs1Cascade::initialize() { + + // retrieving vertex Fitter + ATH_CHECK( m_iVertexFitter.retrieve()); + + // retrieving the V0 tools + ATH_CHECK( m_V0Tools.retrieve()); + + // retrieving the Cascade tools + ATH_CHECK( m_CascadeTools.retrieve()); + + // Get the beam spot service + ATH_CHECK(m_beamSpotKey.initialize()); + + IPartPropSvc* partPropSvc = nullptr; + ATH_CHECK( service("PartPropSvc", partPropSvc, true) ); + m_particleDataTable = partPropSvc->PDT(); + + // retrieve particle masses + if(m_mass_jpsi < 0. ) m_mass_jpsi = getParticleMass(PDG::J_psi); + if(m_vtx0MassHypo < 0.) m_vtx0MassHypo = getParticleMass(PDG::B_c_plus); + if(m_vtx1MassHypo < 0.) m_vtx1MassHypo = getParticleMass(PDG::D0); + if(m_vtx2MassHypo < 0.) m_vtx2MassHypo = getParticleMass(PDG::K_S0); + + if(m_vtx0Daug1MassHypo < 0.) m_vtx0Daug1MassHypo = getParticleMass(PDG::mu_minus); + if(m_vtx0Daug2MassHypo < 0.) m_vtx0Daug2MassHypo = getParticleMass(PDG::mu_minus); + if(m_vtx0Daug3MassHypo < 0.) m_vtx0Daug3MassHypo = getParticleMass(PDG::pi_plus); + if(m_vtx1Daug1MassHypo < 0.) m_vtx1Daug1MassHypo = getParticleMass(PDG::pi_plus); + if(m_vtx1Daug2MassHypo < 0.) m_vtx1Daug2MassHypo = getParticleMass(PDG::K_plus); + if(m_vtx2Daug1MassHypo < 0.) m_vtx2Daug1MassHypo = getParticleMass(PDG::pi_plus); + if(m_vtx2Daug2MassHypo < 0.) m_vtx2Daug2MassHypo = getParticleMass(PDG::pi_plus); + + return StatusCode::SUCCESS; + } + + + StatusCode JpsiPlusDs1Cascade::addBranches() const + { + std::vector<Trk::VxCascadeInfo*> cascadeinfoContainer; + constexpr int topoN = 3; + std::array<xAOD::VertexContainer*, topoN> Vtxwritehandles; + std::array<xAOD::VertexAuxContainer*, topoN> Vtxwritehandlesaux; + if(m_cascadeOutputsKeys.size() !=topoN) { ATH_MSG_FATAL("Incorrect number of VtxContainers"); return StatusCode::FAILURE; } + + for(int i =0; i<topoN;i++){ + Vtxwritehandles[i] = new xAOD::VertexContainer(); + Vtxwritehandlesaux[i] = new xAOD::VertexAuxContainer(); + Vtxwritehandles[i]->setStore(Vtxwritehandlesaux[i]); + ATH_CHECK(evtStore()->record(Vtxwritehandles[i] , m_cascadeOutputsKeys[i] )); + ATH_CHECK(evtStore()->record(Vtxwritehandlesaux[i], m_cascadeOutputsKeys[i] + "Aux.")); + } + + //---------------------------------------------------- + // retrieve primary vertices + //---------------------------------------------------- + const xAOD::Vertex * primaryVertex(nullptr); + const xAOD::VertexContainer *pvContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(pvContainer, m_VxPrimaryCandidateName)); + ATH_MSG_DEBUG("Found " << m_VxPrimaryCandidateName << " in StoreGate!"); + + if (pvContainer->size()==0){ + ATH_MSG_WARNING("You have no primary vertices: " << pvContainer->size()); + return StatusCode::RECOVERABLE; + } else { + primaryVertex = (*pvContainer)[0]; + } + + //---------------------------------------------------- + // Try to retrieve refitted primary vertices + //---------------------------------------------------- + xAOD::VertexContainer* refPvContainer = nullptr; + xAOD::VertexAuxContainer* refPvAuxContainer = nullptr; + if (m_refitPV) { + if (evtStore()->contains<xAOD::VertexContainer>(m_refPVContainerName)) { + // refitted PV container exists. Get it from the store gate + ATH_CHECK(evtStore()->retrieve(refPvContainer , m_refPVContainerName )); + ATH_CHECK(evtStore()->retrieve(refPvAuxContainer, m_refPVContainerName + "Aux.")); + } else { + // refitted PV container does not exist. Create a new one. + refPvContainer = new xAOD::VertexContainer; + refPvAuxContainer = new xAOD::VertexAuxContainer; + refPvContainer->setStore(refPvAuxContainer); + ATH_CHECK(evtStore()->record(refPvContainer , m_refPVContainerName)); + ATH_CHECK(evtStore()->record(refPvAuxContainer, m_refPVContainerName+"Aux.")); + } + } + + ATH_CHECK(performSearch(&cascadeinfoContainer)); + + SG::ReadCondHandle<InDet::BeamSpotData> beamSpotHandle { m_beamSpotKey }; + if(not beamSpotHandle.isValid()) ATH_MSG_ERROR("Cannot Retrieve " << m_beamSpotKey.key() ); + BPhysPVCascadeTools helper(&(*m_CascadeTools), beamSpotHandle.cptr()); + helper.SetMinNTracksInPV(m_PV_minNTracks); + + // Decorators for the main vertex: chi2, ndf, pt and pt error, plus the D0, K0 vertex variables + SG::AuxElement::Decorator<VertexLinkVector> CascadeV1LinksDecor("CascadeVertex1Links"); + SG::AuxElement::Decorator<VertexLinkVector> CascadeV2LinksDecor("CascadeVertex2Links"); + SG::AuxElement::Decorator<VertexLinkVector> JpsipiLinksDecor("JpsipiVertexLinks"); + SG::AuxElement::Decorator<VertexLinkVector> D0LinksDecor("D0VertexLinks"); + SG::AuxElement::Decorator<VertexLinkVector> K0LinksDecor("K0VertexLinks"); + SG::AuxElement::Decorator<float> chi2_decor("ChiSquared"); + SG::AuxElement::Decorator<float> ndof_decor("NumberDoF"); + SG::AuxElement::Decorator<float> Pt_decor("Pt"); + SG::AuxElement::Decorator<float> PtErr_decor("PtErr"); + SG::AuxElement::Decorator<float> Mass_svdecor("D0_mass"); + SG::AuxElement::Decorator<float> MassErr_svdecor("D0_massErr"); + SG::AuxElement::Decorator<float> Pt_svdecor("D0_Pt"); + SG::AuxElement::Decorator<float> PtErr_svdecor("D0_PtErr"); + SG::AuxElement::Decorator<float> Lxy_svdecor("D0_Lxy"); + SG::AuxElement::Decorator<float> LxyErr_svdecor("D0_LxyErr"); + SG::AuxElement::Decorator<float> Tau_svdecor("D0_Tau"); + SG::AuxElement::Decorator<float> TauErr_svdecor("D0_TauErr"); + + SG::AuxElement::Decorator<float> Mass_sv2decor("K0_mass"); + SG::AuxElement::Decorator<float> MassErr_sv2decor("K0_massErr"); + SG::AuxElement::Decorator<float> Pt_sv2decor("K0_Pt"); + SG::AuxElement::Decorator<float> PtErr_sv2decor("K0_PtErr"); + SG::AuxElement::Decorator<float> Lxy_sv2decor("K0_Lxy"); + SG::AuxElement::Decorator<float> LxyErr_sv2decor("K0_LxyErr"); + SG::AuxElement::Decorator<float> Tau_sv2decor("K0_Tau"); + SG::AuxElement::Decorator<float> TauErr_sv2decor("K0_TauErr"); + + SG::AuxElement::Decorator<float> MassJpsi_decor("Jpsi_mass"); + SG::AuxElement::Decorator<float> MassPiD0_decor("PiD0_mass"); + SG::AuxElement::Decorator<float> MassPiD0K0_decor("PiD0K0_mass"); + + SG::AuxElement::Decorator<float> MassMumu_decor("Mumu_mass"); + SG::AuxElement::Decorator<float> MassKpi_svdecor("Kpi_mass"); + SG::AuxElement::Decorator<float> MassPipi_sv2decor("Pipi_mass"); + + ATH_MSG_DEBUG("cascadeinfoContainer size " << cascadeinfoContainer.size()); + + // Get Jpsi+pi container and identify the input Jpsi+pi + const xAOD::VertexContainer *jpsipiContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(jpsipiContainer , m_vertexContainerKey )); + // Get D0 container and identify the input D0 + const xAOD::VertexContainer *d0Container(nullptr); + ATH_CHECK(evtStore()->retrieve(d0Container , m_vertexD0ContainerKey )); + // Get K0 container and identify the input K0 + const xAOD::VertexContainer *k0Container(nullptr); + ATH_CHECK(evtStore()->retrieve(k0Container , m_vertexK0ContainerKey )); + + for (Trk::VxCascadeInfo* x : cascadeinfoContainer) { + if(x==nullptr) ATH_MSG_ERROR("cascadeinfoContainer is null"); + + // the cascade fitter returns: + // std::vector<xAOD::Vertex*>, each xAOD::Vertex contains the refitted track parameters (perigee at the vertex position) + // vertices[iv] the links to the original TPs and a covariance of size 3+5*NTRK; the chi2 of the total fit + // is split between the cascade vertices as per track contribution + // std::vector< std::vector<TLorentzVector> >, each std::vector<TLorentzVector> contains the refitted momenta (TLorentzVector) + // momenta[iv][...] of all tracks in the corresponding vertex, including any pseudotracks (from cascade vertices) + // originating in this vertex; the masses are as assigned in the cascade fit + // std::vector<Amg::MatrixX>, the corresponding covariance matrices in momentum space + // covariance[iv] + // int nDoF, double Chi2 + // + // the invariant mass, pt, lifetime etc. errors should be calculated using the covariance matrices in momentum space as these + // take into account the full track-track and track-vertex correlations + // + // in the case of Jpsi+V0: vertices[0] is the V0 vertex, vertices[1] is the B/Lambda_b(bar) vertex, containing the 2 Jpsi tracks. + // The covariance terms between the two vertices are not stored. In momentum space momenta[0] contains the 2 V0 tracks, + // their momenta add up to the momentum of the 3rd track in momenta[1], the first two being the Jpsi tracks + + const std::vector<xAOD::Vertex*> &cascadeVertices = x->vertices(); + if(cascadeVertices.size()!=topoN) + ATH_MSG_ERROR("Incorrect number of vertices"); + if(cascadeVertices[0] == nullptr || cascadeVertices[1] == nullptr || cascadeVertices[2] == nullptr) ATH_MSG_ERROR("Error null vertex"); + // Keep vertices (bear in mind that they come in reverse order!) + for(int i =0;i<topoN;i++) Vtxwritehandles[i]->push_back(cascadeVertices[i]); + + x->setSVOwnership(false); // Prevent Container from deleting vertices + const auto mainVertex = cascadeVertices[2]; // this is the B_c+/- vertex + const std::vector< std::vector<TLorentzVector> > &moms = x->getParticleMoms(); + + // Set links to cascade vertices + std::vector<const xAOD::Vertex*> verticestoLink; + verticestoLink.push_back(cascadeVertices[0]); + //if(Vtxwritehandles[1] == nullptr) ATH_MSG_ERROR("Vtxwritehandles[1] is null"); + if(Vtxwritehandles[2] == nullptr) ATH_MSG_ERROR("Vtxwritehandles[2] is null"); + if(!BPhysPVCascadeTools::LinkVertices(CascadeV1LinksDecor, verticestoLink, Vtxwritehandles[0], cascadeVertices[2])) + ATH_MSG_ERROR("Error decorating with cascade vertices"); + + verticestoLink.clear(); + verticestoLink.push_back(cascadeVertices[1]); + if(!BPhysPVCascadeTools::LinkVertices(CascadeV2LinksDecor, verticestoLink, Vtxwritehandles[1], cascadeVertices[2])) + ATH_MSG_ERROR("Error decorating with cascade vertices"); + + // Identify the input Jpsi+pi + const xAOD::Vertex* jpsipiVertex = BPhysPVCascadeTools::FindVertex<3>(jpsipiContainer, cascadeVertices[2]); + ATH_MSG_DEBUG("1 pt Jpsi+pi tracks " << cascadeVertices[2]->trackParticle(0)->pt() << ", " << cascadeVertices[2]->trackParticle(1)->pt() << ", " << cascadeVertices[2]->trackParticle(2)->pt()); + if (jpsipiVertex) ATH_MSG_DEBUG("2 pt Jpsi+pi tracks " << jpsipiVertex->trackParticle(0)->pt() << ", " << jpsipiVertex->trackParticle(1)->pt() << ", " << jpsipiVertex->trackParticle(2)->pt()); + + // Identify the input D0 + const xAOD::Vertex* d0Vertex = BPhysPVCascadeTools::FindVertex<2>(d0Container, cascadeVertices[1]);; + ATH_MSG_DEBUG("1 pt D0 tracks " << cascadeVertices[1]->trackParticle(0)->pt() << ", " << cascadeVertices[1]->trackParticle(1)->pt()); + if (d0Vertex) ATH_MSG_DEBUG("2 pt D0 tracks " << d0Vertex->trackParticle(0)->pt() << ", " << d0Vertex->trackParticle(1)->pt()); + + // Identify the input K_S0 + const xAOD::Vertex* k0Vertex = BPhysPVCascadeTools::FindVertex<2>(k0Container, cascadeVertices[0]);; + ATH_MSG_DEBUG("1 pt K_S0 tracks " << cascadeVertices[0]->trackParticle(0)->pt() << ", " << cascadeVertices[0]->trackParticle(1)->pt()); + if (k0Vertex) ATH_MSG_DEBUG("2 pt K_S0 tracks " << k0Vertex->trackParticle(0)->pt() << ", " << k0Vertex->trackParticle(1)->pt()); + + // Set links to input vertices + std::vector<const xAOD::Vertex*> jpsipiVerticestoLink; + if (jpsipiVertex) jpsipiVerticestoLink.push_back(jpsipiVertex); + else ATH_MSG_WARNING("Could not find linking Jpsi+pi"); + if(!BPhysPVCascadeTools::LinkVertices(JpsipiLinksDecor, jpsipiVerticestoLink, jpsipiContainer, cascadeVertices[2])) + ATH_MSG_ERROR("Error decorating with Jpsi+pi vertices"); + + std::vector<const xAOD::Vertex*> d0VerticestoLink; + if (d0Vertex) d0VerticestoLink.push_back(d0Vertex); + else ATH_MSG_WARNING("Could not find linking D0"); + if(!BPhysPVCascadeTools::LinkVertices(D0LinksDecor, d0VerticestoLink, d0Container, cascadeVertices[2])) + ATH_MSG_ERROR("Error decorating with D0 vertices"); + + std::vector<const xAOD::Vertex*> k0VerticestoLink; + if (k0Vertex) k0VerticestoLink.push_back(k0Vertex); + else ATH_MSG_WARNING("Could not find linking K_S0"); + if(!BPhysPVCascadeTools::LinkVertices(K0LinksDecor, k0VerticestoLink, k0Container, cascadeVertices[2])) + ATH_MSG_ERROR("Error decorating with K_S0 vertices"); + + bool tagD0(true); + if (jpsipiVertex){ + if(abs(m_Dx_pid)==421 && (jpsipiVertex->trackParticle(2)->charge()==-1)) tagD0 = false; + } + + double mass_b = m_vtx0MassHypo; + double mass_d0 = m_vtx1MassHypo; + double mass_k0 = m_vtx2MassHypo; + std::vector<double> massesJpsipi; + massesJpsipi.push_back(m_vtx0Daug1MassHypo); + massesJpsipi.push_back(m_vtx0Daug2MassHypo); + massesJpsipi.push_back(m_vtx0Daug3MassHypo); + std::vector<double> massesD0; + if(tagD0){ + massesD0.push_back(m_vtx1Daug1MassHypo); + massesD0.push_back(m_vtx1Daug2MassHypo); + }else{ // Change the oreder of masses for D*-->D0bar pi-, D0bar->K+pi- + massesD0.push_back(m_vtx1Daug2MassHypo); + massesD0.push_back(m_vtx1Daug1MassHypo); + } + std::vector<double> massesK0; + massesK0.push_back(m_vtx2Daug1MassHypo); + massesK0.push_back(m_vtx2Daug2MassHypo); + std::vector<double> Masses; + Masses.push_back(m_vtx0Daug1MassHypo); + Masses.push_back(m_vtx0Daug2MassHypo); + Masses.push_back(m_vtx0Daug3MassHypo); + Masses.push_back(m_vtx1MassHypo); + Masses.push_back(m_vtx2MassHypo); + + // loop over candidates -- Don't apply PV_minNTracks requirement here + // because it may result in exclusion of the high-pt PV. + // get good PVs + + xAOD::BPhysHypoHelper vtx(m_hypoName, mainVertex); + + BPhysPVCascadeTools::SetVectorInfo(vtx, x); + + // Decorate main vertex + // + // 1.a) mass, mass error + BPHYS_CHECK( vtx.setMass(m_CascadeTools->invariantMass(moms[2])) ); + BPHYS_CHECK( vtx.setMassErr(m_CascadeTools->invariantMassError(moms[2],x->getCovariance()[2])) ); + // 1.b) pt and pT error (the default pt of mainVertex is != the pt of the full cascade fit!) + Pt_decor(*mainVertex) = m_CascadeTools->pT(moms[2]); + PtErr_decor(*mainVertex) = m_CascadeTools->pTError(moms[2],x->getCovariance()[2]); + // 1.c) chi2 and ndof (the default chi2 of mainVertex is != the chi2 of the full cascade fit!) + chi2_decor(*mainVertex) = x->fitChi2(); + ndof_decor(*mainVertex) = x->nDoF(); + + float massMumu = 0.; + if (jpsipiVertex) { + TLorentzVector p4_mu1, p4_mu2; + p4_mu1.SetPtEtaPhiM(jpsipiVertex->trackParticle(0)->pt(), + jpsipiVertex->trackParticle(0)->eta(), + jpsipiVertex->trackParticle(0)->phi(), m_vtx0Daug1MassHypo); + p4_mu2.SetPtEtaPhiM(jpsipiVertex->trackParticle(1)->pt(), + jpsipiVertex->trackParticle(1)->eta(), + jpsipiVertex->trackParticle(1)->phi(), m_vtx0Daug2MassHypo); + massMumu = (p4_mu1 + p4_mu2).M(); + } + MassMumu_decor(*mainVertex) = massMumu; + + float massKpi = 0.; + if (d0Vertex) { + TLorentzVector p4_ka, p4_pi; + if(tagD0){ + p4_pi.SetPtEtaPhiM(d0Vertex->trackParticle(0)->pt(), + d0Vertex->trackParticle(0)->eta(), + d0Vertex->trackParticle(0)->phi(), m_vtx1Daug1MassHypo); + p4_ka.SetPtEtaPhiM(d0Vertex->trackParticle(1)->pt(), + d0Vertex->trackParticle(1)->eta(), + d0Vertex->trackParticle(1)->phi(), m_vtx1Daug2MassHypo); + }else{ // Change the oreder of masses for D*-->D0bar pi-, D0bar->K+pi- + p4_pi.SetPtEtaPhiM(d0Vertex->trackParticle(1)->pt(), + d0Vertex->trackParticle(1)->eta(), + d0Vertex->trackParticle(1)->phi(), m_vtx1Daug1MassHypo); + p4_ka.SetPtEtaPhiM(d0Vertex->trackParticle(0)->pt(), + d0Vertex->trackParticle(0)->eta(), + d0Vertex->trackParticle(0)->phi(), m_vtx1Daug2MassHypo); + } + massKpi = (p4_ka + p4_pi).M(); + } + MassKpi_svdecor(*mainVertex) = massKpi; + + float massPipi = 0.; + if (k0Vertex) { + TLorentzVector p4_pip, p4_pim; + p4_pip.SetPtEtaPhiM(k0Vertex->trackParticle(0)->pt(), + k0Vertex->trackParticle(0)->eta(), + k0Vertex->trackParticle(0)->phi(), m_vtx2Daug1MassHypo); + p4_pim.SetPtEtaPhiM(k0Vertex->trackParticle(1)->pt(), + k0Vertex->trackParticle(1)->eta(), + k0Vertex->trackParticle(1)->phi(), m_vtx2Daug2MassHypo); + massPipi = (p4_pip + p4_pim).M(); + } + MassPipi_sv2decor(*mainVertex) = massPipi; + + MassJpsi_decor(*mainVertex) = (moms[2][0] + moms[2][1]).M(); + MassPiD0_decor(*mainVertex) = (moms[2][2] + moms[2][4]).M(); + MassPiD0K0_decor(*mainVertex) = (moms[2][2] + moms[2][4] + moms[2][3]).M(); + + ATH_CHECK(helper.FillCandwithRefittedVertices(m_refitPV, pvContainer, + refPvContainer, &(*m_pvRefitter), m_PV_max, m_DoVertexType, x, 2, mass_b, vtx)); + + // 4) decorate the main vertex with D0 vertex mass, pt, lifetime and lxy values (plus errors) + // D0 points to the main vertex, so lifetime and lxy are w.r.t the main vertex + Mass_svdecor(*mainVertex) = m_CascadeTools->invariantMass(moms[1]); + MassErr_svdecor(*mainVertex) = m_CascadeTools->invariantMassError(moms[1],x->getCovariance()[1]); + Pt_svdecor(*mainVertex) = m_CascadeTools->pT(moms[1]); + PtErr_svdecor(*mainVertex) = m_CascadeTools->pTError(moms[1],x->getCovariance()[1]); + Lxy_svdecor(*mainVertex) = m_CascadeTools->lxy(moms[1],cascadeVertices[1],cascadeVertices[2]); + LxyErr_svdecor(*mainVertex) = m_CascadeTools->lxyError(moms[1],x->getCovariance()[1],cascadeVertices[1],cascadeVertices[2]); + Tau_svdecor(*mainVertex) = m_CascadeTools->tau(moms[1],cascadeVertices[1],cascadeVertices[2]); + TauErr_svdecor(*mainVertex) = m_CascadeTools->tauError(moms[1],x->getCovariance()[1],cascadeVertices[1],cascadeVertices[2]); + + // 5) decorate the main vertex with K_S0 vertex mass, pt, lifetime and lxy values (plus errors) + // K_S0 points to the main vertex, so lifetime and lxy are w.r.t the main vertex + Mass_sv2decor(*mainVertex) = m_CascadeTools->invariantMass(moms[0]); + MassErr_sv2decor(*mainVertex) = m_CascadeTools->invariantMassError(moms[0],x->getCovariance()[0]); + Pt_sv2decor(*mainVertex) = m_CascadeTools->pT(moms[0]); + PtErr_sv2decor(*mainVertex) = m_CascadeTools->pTError(moms[0],x->getCovariance()[0]); + Lxy_sv2decor(*mainVertex) = m_CascadeTools->lxy(moms[0],cascadeVertices[0],cascadeVertices[2]); + LxyErr_sv2decor(*mainVertex) = m_CascadeTools->lxyError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[2]); + Tau_sv2decor(*mainVertex) = m_CascadeTools->tau(moms[0],cascadeVertices[0],cascadeVertices[2]); + TauErr_sv2decor(*mainVertex) = m_CascadeTools->tauError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[2]); + + // Some checks in DEBUG mode + ATH_MSG_DEBUG("chi2 " << x->fitChi2() + << " chi2_1 " << m_V0Tools->chisq(cascadeVertices[0]) + << " chi2_2 " << m_V0Tools->chisq(cascadeVertices[1]) + << " chi2_3 " << m_V0Tools->chisq(cascadeVertices[2]) + << " vprob " << m_CascadeTools->vertexProbability(x->nDoF(),x->fitChi2())); + ATH_MSG_DEBUG("ndf " << x->nDoF() << " ndf_1 " << m_V0Tools->ndof(cascadeVertices[0]) << " ndf_2 " << m_V0Tools->ndof(cascadeVertices[1]) << " ndf_3 " << m_V0Tools->ndof(cascadeVertices[2])); + ATH_MSG_DEBUG("V0Tools mass_k0 " << m_V0Tools->invariantMass(cascadeVertices[0],massesK0) + << " error " << m_V0Tools->invariantMassError(cascadeVertices[0],massesK0) + << " mass_d0 " << m_V0Tools->invariantMass(cascadeVertices[1],massesD0) + << " error " << m_V0Tools->invariantMassError(cascadeVertices[1],massesD0) + << " mass_J " << m_V0Tools->invariantMass(cascadeVertices[2],massesJpsipi) + << " error " << m_V0Tools->invariantMassError(cascadeVertices[2],massesJpsipi)); + // masses and errors, using track masses assigned in the fit + double Mass_B = m_CascadeTools->invariantMass(moms[2]); + double Mass_D0 = m_CascadeTools->invariantMass(moms[1]); + double Mass_K0 = m_CascadeTools->invariantMass(moms[0]); + double Mass_B_err = m_CascadeTools->invariantMassError(moms[2],x->getCovariance()[2]); + double Mass_D0_err = m_CascadeTools->invariantMassError(moms[1],x->getCovariance()[1]); + double Mass_K0_err = m_CascadeTools->invariantMassError(moms[0],x->getCovariance()[0]); + ATH_MSG_DEBUG("Mass_B " << Mass_B << " Mass_D0 " << Mass_D0 << " Mass_K0 " << Mass_K0); + ATH_MSG_DEBUG("Mass_B_err " << Mass_B_err << " Mass_D0_err " << Mass_D0_err << " Mass_K0_err " << Mass_K0_err); + double mprob_B = m_CascadeTools->massProbability(mass_b,Mass_B,Mass_B_err); + double mprob_D0 = m_CascadeTools->massProbability(mass_d0,Mass_D0,Mass_D0_err); + double mprob_K0 = m_CascadeTools->massProbability(mass_k0,Mass_K0,Mass_K0_err); + ATH_MSG_DEBUG("mprob_B " << mprob_B << " mprob_D0 " << mprob_D0 << " mprob_K0 " << mprob_K0); + // masses and errors, assigning user defined track masses + ATH_MSG_DEBUG("Mass_b " << m_CascadeTools->invariantMass(moms[2],Masses) + << " Mass_d0 " << m_CascadeTools->invariantMass(moms[1],massesD0) + << " Mass_k0 " << m_CascadeTools->invariantMass(moms[0],massesD0)); + ATH_MSG_DEBUG("Mass_b_err " << m_CascadeTools->invariantMassError(moms[2],x->getCovariance()[2],Masses) + << " Mass_d0_err " << m_CascadeTools->invariantMassError(moms[1],x->getCovariance()[1],massesD0) + << " Mass_k0_err " << m_CascadeTools->invariantMassError(moms[0],x->getCovariance()[0],massesK0)); + ATH_MSG_DEBUG("pt_b " << m_CascadeTools->pT(moms[2]) + << " pt_d " << m_CascadeTools->pT(moms[1]) + << " pt_d0 " << m_V0Tools->pT(cascadeVertices[1]) + << " pt_k " << m_CascadeTools->pT(moms[0]) + << " pt_k0 " << m_V0Tools->pT(cascadeVertices[0])); + ATH_MSG_DEBUG("ptErr_b " << m_CascadeTools->pTError(moms[2],x->getCovariance()[2]) + << " ptErr_d " << m_CascadeTools->pTError(moms[1],x->getCovariance()[1]) + << " ptErr_d0 " << m_V0Tools->pTError(cascadeVertices[1]) + << " ptErr_k " << m_CascadeTools->pTError(moms[0],x->getCovariance()[0]) + << " ptErr_k0 " << m_V0Tools->pTError(cascadeVertices[0])); + ATH_MSG_DEBUG("lxy_B " << m_V0Tools->lxy(cascadeVertices[2],primaryVertex) << " lxy_D " << m_V0Tools->lxy(cascadeVertices[1],cascadeVertices[2]) << " lxy_K " << m_V0Tools->lxy(cascadeVertices[0],cascadeVertices[2])); + ATH_MSG_DEBUG("lxy_b " << m_CascadeTools->lxy(moms[2],cascadeVertices[2],primaryVertex) << " lxy_d " << m_CascadeTools->lxy(moms[1],cascadeVertices[1],cascadeVertices[2]) << " lxy_k " << m_CascadeTools->lxy(moms[0],cascadeVertices[0],cascadeVertices[2])); + ATH_MSG_DEBUG("lxyErr_b " << m_CascadeTools->lxyError(moms[2],x->getCovariance()[2],cascadeVertices[2],primaryVertex) + << " lxyErr_d " << m_CascadeTools->lxyError(moms[1],x->getCovariance()[1],cascadeVertices[1],cascadeVertices[2]) + << " lxyErr_d0 " << m_V0Tools->lxyError(cascadeVertices[1],cascadeVertices[2]) + << " lxyErr_k " << m_CascadeTools->lxyError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[2]) + << " lxyErr_k0 " << m_V0Tools->lxyError(cascadeVertices[0],cascadeVertices[2])); + ATH_MSG_DEBUG("tau_B " << m_CascadeTools->tau(moms[2],cascadeVertices[2],primaryVertex,mass_b) + << " tau_d0 " << m_V0Tools->tau(cascadeVertices[1],cascadeVertices[2],massesD0) + << " tau_k0 " << m_V0Tools->tau(cascadeVertices[0],cascadeVertices[2],massesK0)); + ATH_MSG_DEBUG("tau_b " << m_CascadeTools->tau(moms[2],cascadeVertices[2],primaryVertex) + << " tau_d " << m_CascadeTools->tau(moms[1],cascadeVertices[1],cascadeVertices[2]) + << " tau_D " << m_CascadeTools->tau(moms[1],cascadeVertices[1],cascadeVertices[2],mass_d0) + << " tau_k " << m_CascadeTools->tau(moms[0],cascadeVertices[0],cascadeVertices[2]) + << " tau_K " << m_CascadeTools->tau(moms[0],cascadeVertices[0],cascadeVertices[2],mass_k0)); + ATH_MSG_DEBUG("tauErr_b " << m_CascadeTools->tauError(moms[2],x->getCovariance()[2],cascadeVertices[2],primaryVertex) + << " tauErr_d " << m_CascadeTools->tauError(moms[1],x->getCovariance()[1],cascadeVertices[1],cascadeVertices[2]) + << " tauErr_d0 " << m_V0Tools->tauError(cascadeVertices[1],cascadeVertices[2],massesD0) + << " tauErr_k " << m_CascadeTools->tauError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[2]) + << " tauErr_k0 " << m_V0Tools->tauError(cascadeVertices[0],cascadeVertices[2],massesK0)); + ATH_MSG_DEBUG("TauErr_b " << m_CascadeTools->tauError(moms[2],x->getCovariance()[2],cascadeVertices[2],primaryVertex,mass_b) + << " TauErr_d " << m_CascadeTools->tauError(moms[1],x->getCovariance()[1],cascadeVertices[1],cascadeVertices[2],mass_d0) + << " TauErr_d0 " << m_V0Tools->tauError(cascadeVertices[1],cascadeVertices[2],massesD0,mass_d0) + << " TauErr_k " << m_CascadeTools->tauError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[2],mass_k0) + << " TauErr_k0 " << m_V0Tools->tauError(cascadeVertices[0],cascadeVertices[2],massesD0,mass_k0)); + + ATH_MSG_DEBUG("CascadeTools main vert wrt PV " << " CascadeTools SV " << " V0Tools SV"); + ATH_MSG_DEBUG("a0z " << m_CascadeTools->a0z(moms[2],cascadeVertices[2],primaryVertex) + << ", " << m_CascadeTools->a0z(moms[1],cascadeVertices[1],cascadeVertices[2]) + << ", " << m_CascadeTools->a0z(moms[0],cascadeVertices[0],cascadeVertices[2]) + << ", " << m_V0Tools->a0z(cascadeVertices[1],cascadeVertices[2]) + << ", " << m_V0Tools->a0z(cascadeVertices[0],cascadeVertices[2])); + ATH_MSG_DEBUG("a0zErr " << m_CascadeTools->a0zError(moms[2],x->getCovariance()[2],cascadeVertices[2],primaryVertex) + << ", " << m_CascadeTools->a0zError(moms[1],x->getCovariance()[1],cascadeVertices[1],cascadeVertices[2]) + << ", " << m_CascadeTools->a0zError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[2]) + << ", " << m_V0Tools->a0zError(cascadeVertices[1],cascadeVertices[2]) + << ", " << m_V0Tools->a0zError(cascadeVertices[0],cascadeVertices[2])); + ATH_MSG_DEBUG("a0xy " << m_CascadeTools->a0xy(moms[2],cascadeVertices[2],primaryVertex) + << ", " << m_CascadeTools->a0xy(moms[1],cascadeVertices[1],cascadeVertices[2]) + << ", " << m_CascadeTools->a0xy(moms[0],cascadeVertices[0],cascadeVertices[2]) + << ", " << m_V0Tools->a0xy(cascadeVertices[1],cascadeVertices[2]) + << ", " << m_V0Tools->a0xy(cascadeVertices[0],cascadeVertices[2])); + ATH_MSG_DEBUG("a0xyErr " << m_CascadeTools->a0xyError(moms[2],x->getCovariance()[2],cascadeVertices[2],primaryVertex) + << ", " << m_CascadeTools->a0xyError(moms[1],x->getCovariance()[1],cascadeVertices[1],cascadeVertices[2]) + << ", " << m_CascadeTools->a0xyError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[2]) + << ", " << m_V0Tools->a0xyError(cascadeVertices[1],cascadeVertices[2]) + << ", " << m_V0Tools->a0xyError(cascadeVertices[0],cascadeVertices[2])); + ATH_MSG_DEBUG("a0 " << m_CascadeTools->a0(moms[2],cascadeVertices[2],primaryVertex) + << ", " << m_CascadeTools->a0(moms[1],cascadeVertices[1],cascadeVertices[2]) + << ", " << m_CascadeTools->a0(moms[0],cascadeVertices[0],cascadeVertices[2]) + << ", " << m_V0Tools->a0(cascadeVertices[1],cascadeVertices[2]) + << ", " << m_V0Tools->a0(cascadeVertices[0],cascadeVertices[2])); + ATH_MSG_DEBUG("a0Err " << m_CascadeTools->a0Error(moms[2],x->getCovariance()[2],cascadeVertices[2],primaryVertex) + << ", " << m_CascadeTools->a0Error(moms[1],x->getCovariance()[1],cascadeVertices[1],cascadeVertices[2]) + << ", " << m_CascadeTools->a0Error(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[2]) + << ", " << m_V0Tools->a0Error(cascadeVertices[1],cascadeVertices[2]) + << ", " << m_V0Tools->a0Error(cascadeVertices[0],cascadeVertices[2])); + ATH_MSG_DEBUG("x0 " << m_V0Tools->vtx(cascadeVertices[0]).x() << " y0 " << m_V0Tools->vtx(cascadeVertices[0]).y() << " z0 " << m_V0Tools->vtx(cascadeVertices[0]).z()); + ATH_MSG_DEBUG("x1 " << m_V0Tools->vtx(cascadeVertices[1]).x() << " y1 " << m_V0Tools->vtx(cascadeVertices[1]).y() << " z1 " << m_V0Tools->vtx(cascadeVertices[1]).z()); + ATH_MSG_DEBUG("x2 " << m_V0Tools->vtx(cascadeVertices[2]).x() << " y2 " << m_V0Tools->vtx(cascadeVertices[2]).y() << " z2 " << m_V0Tools->vtx(cascadeVertices[2]).z()); + ATH_MSG_DEBUG("X0 " << primaryVertex->x() << " Y0 " << primaryVertex->y() << " Z0 " << primaryVertex->z()); + ATH_MSG_DEBUG("rxy0 " << m_V0Tools->rxy(cascadeVertices[0]) << " rxyErr0 " << m_V0Tools->rxyError(cascadeVertices[0])); + ATH_MSG_DEBUG("rxy1 " << m_V0Tools->rxy(cascadeVertices[1]) << " rxyErr1 " << m_V0Tools->rxyError(cascadeVertices[1])); + ATH_MSG_DEBUG("rxy2 " << m_V0Tools->rxy(cascadeVertices[2]) << " rxyErr2 " << m_V0Tools->rxyError(cascadeVertices[2])); + ATH_MSG_DEBUG("Rxy0 wrt PV " << m_V0Tools->rxy(cascadeVertices[0],primaryVertex) << " RxyErr0 wrt PV " << m_V0Tools->rxyError(cascadeVertices[0],primaryVertex)); + ATH_MSG_DEBUG("Rxy1 wrt PV " << m_V0Tools->rxy(cascadeVertices[1],primaryVertex) << " RxyErr1 wrt PV " << m_V0Tools->rxyError(cascadeVertices[1],primaryVertex)); + ATH_MSG_DEBUG("Rxy2 wrt PV " << m_V0Tools->rxy(cascadeVertices[2],primaryVertex) << " RxyErr2 wrt PV " << m_V0Tools->rxyError(cascadeVertices[2],primaryVertex)); + ATH_MSG_DEBUG("number of covariance matrices " << (x->getCovariance()).size()); + } // loop over cascadeinfoContainer + + // Deleting cascadeinfo since this won't be stored. + // Vertices have been kept in m_cascadeOutputs and should be owned by their container + for (auto x : cascadeinfoContainer) delete x; + + return StatusCode::SUCCESS; + } + + + JpsiPlusDs1Cascade::JpsiPlusDs1Cascade(const std::string& t, const std::string& n, const IInterface* p) : AthAlgTool(t,n,p), + m_vertexContainerKey(""), + m_vertexD0ContainerKey(""), + m_vertexK0ContainerKey(""), + m_cascadeOutputsKeys{ "JpsiPlusDs1CascadeVtx1", "JpsiPlusDs1CascadeVtx2", "JpsiPlusDs1CascadeVtx3" }, + m_VxPrimaryCandidateName("PrimaryVertices"), + m_jpsiMassLower(0.0), + m_jpsiMassUpper(10000.0), + m_jpsipiMassLower(0.0), + m_jpsipiMassUpper(10000.0), + m_D0MassLower(0.0), + m_D0MassUpper(10000.0), + m_K0MassLower(0.0), + m_K0MassUpper(10000.0), + m_DstMassLower(0.0), + m_DstMassUpper(10000.0), + m_MassLower(0.0), + m_MassUpper(20000.0), + m_vtx0MassHypo(-1), + m_vtx1MassHypo(-1), + m_vtx2MassHypo(-1), + m_vtx0Daug1MassHypo(-1), + m_vtx0Daug2MassHypo(-1), + m_vtx0Daug3MassHypo(-1), + m_vtx1Daug1MassHypo(-1), + m_vtx1Daug2MassHypo(-1), + m_vtx2Daug1MassHypo(-1), + m_vtx2Daug2MassHypo(-1), + m_particleDataTable(nullptr), + m_mass_jpsi(-1), + m_Dx_pid(421), + m_constrD0(true), + m_constrK0(true), + m_constrJpsi(true), + m_chi2cut(-1.0), + m_iVertexFitter("Trk::TrkVKalVrtFitter"), + m_pvRefitter("Analysis::PrimaryVertexRefitter"), + m_V0Tools("Trk::V0Tools"), + m_CascadeTools("DerivationFramework::CascadeTools") + { + declareProperty("JpsipiVertices", m_vertexContainerKey); + declareProperty("D0Vertices", m_vertexD0ContainerKey); + declareProperty("K0Vertices", m_vertexK0ContainerKey); + declareProperty("VxPrimaryCandidateName", m_VxPrimaryCandidateName); + declareProperty("RefPVContainerName", m_refPVContainerName = "RefittedPrimaryVertices"); + declareProperty("JpsiMassLowerCut", m_jpsiMassLower); + declareProperty("JpsiMassUpperCut", m_jpsiMassUpper); + declareProperty("JpsipiMassLowerCut", m_jpsipiMassLower); + declareProperty("JpsipiMassUpperCut", m_jpsipiMassUpper); + declareProperty("D0MassLowerCut", m_D0MassLower); + declareProperty("D0MassUpperCut", m_D0MassUpper); + declareProperty("K0MassLowerCut", m_K0MassLower); + declareProperty("K0MassUpperCut", m_K0MassUpper); + declareProperty("DstMassLowerCut", m_DstMassLower); + declareProperty("DstMassUpperCut", m_DstMassUpper); + declareProperty("MassLowerCut", m_MassLower); + declareProperty("MassUpperCut", m_MassUpper); + declareProperty("HypothesisName", m_hypoName = "Bc"); + declareProperty("Vtx0MassHypo", m_vtx0MassHypo); + declareProperty("Vtx1MassHypo", m_vtx1MassHypo); + declareProperty("Vtx2MassHypo", m_vtx2MassHypo); + declareProperty("Vtx0Daug1MassHypo", m_vtx0Daug1MassHypo); + declareProperty("Vtx0Daug2MassHypo", m_vtx0Daug2MassHypo); + declareProperty("Vtx0Daug3MassHypo", m_vtx0Daug3MassHypo); + declareProperty("Vtx1Daug1MassHypo", m_vtx1Daug1MassHypo); + declareProperty("Vtx1Daug2MassHypo", m_vtx1Daug2MassHypo); + declareProperty("Vtx2Daug1MassHypo", m_vtx2Daug1MassHypo); + declareProperty("Vtx2Daug2MassHypo", m_vtx2Daug2MassHypo); + declareProperty("JpsiMass", m_mass_jpsi); + declareProperty("DxHypothesis", m_Dx_pid); + declareProperty("ApplyD0MassConstraint", m_constrD0); + declareProperty("ApplyK0MassConstraint", m_constrK0); + declareProperty("ApplyJpsiMassConstraint", m_constrJpsi); + declareProperty("Chi2Cut", m_chi2cut); + declareProperty("RefitPV", m_refitPV = true); + declareProperty("MaxnPV", m_PV_max = 999); + declareProperty("MinNTracksInPV", m_PV_minNTracks = 0); + declareProperty("DoVertexType", m_DoVertexType = 7); + declareProperty("TrkVertexFitterTool", m_iVertexFitter); + declareProperty("PVRefitter", m_pvRefitter); + declareProperty("V0Tools", m_V0Tools); + declareProperty("CascadeTools", m_CascadeTools); + declareProperty("CascadeVertexCollections", m_cascadeOutputsKeys); + } + + JpsiPlusDs1Cascade::~JpsiPlusDs1Cascade(){ } + + StatusCode JpsiPlusDs1Cascade::performSearch(std::vector<Trk::VxCascadeInfo*> *cascadeinfoContainer) const + { + ATH_MSG_DEBUG( "JpsiPlusDs1Cascade::performSearch" ); + assert(cascadeinfoContainer!=nullptr); + + // Get TrackParticle container (for setting links to the original tracks) + const xAOD::TrackParticleContainer *trackContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(trackContainer , "InDetTrackParticles" )); + + // Get Jpsi+pi container + const xAOD::VertexContainer *jpsipiContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(jpsipiContainer , m_vertexContainerKey )); + + // Get D0 container + const xAOD::VertexContainer *d0Container(nullptr); + ATH_CHECK(evtStore()->retrieve(d0Container , m_vertexD0ContainerKey )); + + // Get K_S0 container + const xAOD::VertexContainer *k0Container(nullptr); + ATH_CHECK(evtStore()->retrieve(k0Container , m_vertexK0ContainerKey )); + + double mass_d0 = m_vtx1MassHypo; + double mass_k0 = m_vtx2MassHypo; + std::vector<const xAOD::TrackParticle*> tracksJpsipi; + std::vector<const xAOD::TrackParticle*> tracksJpsi; + std::vector<const xAOD::TrackParticle*> tracksD0; + std::vector<const xAOD::TrackParticle*> tracksK0; + std::vector<const xAOD::TrackParticle*> tracksBc; + std::vector<double> massesJpsipi; + massesJpsipi.push_back(m_vtx0Daug1MassHypo); + massesJpsipi.push_back(m_vtx0Daug2MassHypo); + massesJpsipi.push_back(m_vtx0Daug3MassHypo); + std::vector<double> massesD0; + massesD0.push_back(m_vtx1Daug1MassHypo); + massesD0.push_back(m_vtx1Daug2MassHypo); + std::vector<double> massesD0b; // Change the oreder of masses for D*-->D0bar pi-, D0bar->K+pi- + massesD0b.push_back(m_vtx1Daug2MassHypo); + massesD0b.push_back(m_vtx1Daug1MassHypo); + std::vector<double> massesK0; + massesK0.push_back(m_vtx2Daug1MassHypo); + massesK0.push_back(m_vtx2Daug2MassHypo); + std::vector<double> Masses; + Masses.push_back(m_vtx0Daug1MassHypo); + Masses.push_back(m_vtx0Daug2MassHypo); + Masses.push_back(m_vtx0Daug3MassHypo); + Masses.push_back(m_vtx1MassHypo); + Masses.push_back(m_vtx2MassHypo); + + // Select J/psi pi+ candidates before calling cascade fit + std::vector<const xAOD::Vertex*> selectedJpsipiCandidates; + for(auto vxcItr=jpsipiContainer->cbegin(); vxcItr!=jpsipiContainer->cend(); ++vxcItr) { + + // Check the passed flag first + const xAOD::Vertex* vtx = *vxcItr; + SG::AuxElement::Accessor<Char_t> flagAcc1("passed_Jpsipi"); + if(flagAcc1.isAvailable(*vtx)){ + if(!flagAcc1(*vtx)) continue; + } + + // Check J/psi candidate invariant mass and skip if need be + TLorentzVector p4Mup_in, p4Mum_in; + p4Mup_in.SetPtEtaPhiM((*vxcItr)->trackParticle(0)->pt(), + (*vxcItr)->trackParticle(0)->eta(), + (*vxcItr)->trackParticle(0)->phi(), m_vtx0Daug1MassHypo); + p4Mum_in.SetPtEtaPhiM((*vxcItr)->trackParticle(1)->pt(), + (*vxcItr)->trackParticle(1)->eta(), + (*vxcItr)->trackParticle(1)->phi(), m_vtx0Daug2MassHypo); + double mass_Jpsi = (p4Mup_in + p4Mum_in).M(); + ATH_MSG_DEBUG("Jpsi mass " << mass_Jpsi); + if (mass_Jpsi < m_jpsiMassLower || mass_Jpsi > m_jpsiMassUpper) { + ATH_MSG_DEBUG(" Original Jpsi candidate rejected by the mass cut: mass = " + << mass_Jpsi << " != (" << m_jpsiMassLower << ", " << m_jpsiMassUpper << ")" ); + continue; + } + + // Check J/psi pi+ candidate invariant mass and skip if need be + double mass_Jpsipi = m_V0Tools->invariantMass(*vxcItr, massesJpsipi); + ATH_MSG_DEBUG("Jpsipi mass " << mass_Jpsipi); + if (mass_Jpsipi < m_jpsipiMassLower || mass_Jpsipi > m_jpsipiMassUpper) { + ATH_MSG_DEBUG(" Original Jpsipi candidate rejected by the mass cut: mass = " + << mass_Jpsipi << " != (" << m_jpsipiMassLower << ", " << m_jpsipiMassUpper << ")" ); + continue; + } + + selectedJpsipiCandidates.push_back(*vxcItr); + } + if(selectedJpsipiCandidates.size()<1) return StatusCode::SUCCESS; + + // Select the D0/D0b candidates before calling cascade fit + std::vector<const xAOD::Vertex*> selectedD0Candidates; + for(auto vxcItr=d0Container->cbegin(); vxcItr!=d0Container->cend(); ++vxcItr) { + + // Check the passed flag first + const xAOD::Vertex* vtx = *vxcItr; + SG::AuxElement::Accessor<Char_t> flagAcc1("passed_D0"); + SG::AuxElement::Accessor<Char_t> flagAcc2("passed_D0b"); + bool isD0(true); + bool isD0b(true); + if(flagAcc1.isAvailable(*vtx)){ + if(!flagAcc1(*vtx)) isD0 = false; + } + if(flagAcc2.isAvailable(*vtx)){ + if(!flagAcc2(*vtx)) isD0b = false; + } + if(!(isD0||isD0b)) continue; + + // Ensure the total charge is correct + if ((*vxcItr)->trackParticle(0)->charge() != 1 || (*vxcItr)->trackParticle(1)->charge() != -1) { + ATH_MSG_DEBUG(" Original D0/D0-bar candidate rejected by the charge requirement: " + << (*vxcItr)->trackParticle(0)->charge() << ", " << (*vxcItr)->trackParticle(1)->charge() ); + continue; + } + + // Check D0/D0bar candidate invariant mass and skip if need be + double mass_D0 = m_V0Tools->invariantMass(*vxcItr,massesD0); + double mass_D0b = m_V0Tools->invariantMass(*vxcItr,massesD0b); + ATH_MSG_DEBUG("D0 mass " << mass_D0 << ", D0b mass "<<mass_D0b); + if ((mass_D0 < m_D0MassLower || mass_D0 > m_D0MassUpper) && (mass_D0b < m_D0MassLower || mass_D0b > m_D0MassUpper)) { + ATH_MSG_DEBUG(" Original D0 candidate rejected by the mass cut: mass = " + << mass_D0 << " != (" << m_D0MassLower << ", " << m_D0MassUpper << ") " + << mass_D0b << " != (" << m_D0MassLower << ", " << m_D0MassUpper << ") " ); + continue; + } + + selectedD0Candidates.push_back(*vxcItr); + } + if(selectedD0Candidates.size()<1) return StatusCode::SUCCESS; + + // Select the D0/D0b candidates before calling cascade fit + std::vector<const xAOD::Vertex*> selectedK0Candidates; + for(auto vxcItr=k0Container->cbegin(); vxcItr!=k0Container->cend(); ++vxcItr) { + + // Check the passed flag first + const xAOD::Vertex* vtx = *vxcItr; + SG::AuxElement::Accessor<Char_t> flagAcc1("passed_K0"); + if(flagAcc1.isAvailable(*vtx)){ + if(!flagAcc1(*vtx)) continue; + } + + // Check K_S0 candidate invariant mass and skip if need be + double mass_K0 = m_V0Tools->invariantMass(*vxcItr, massesK0); + ATH_MSG_DEBUG("K_S0 mass " << mass_K0); + if (mass_K0 < m_K0MassLower || mass_K0 > m_K0MassUpper) { + ATH_MSG_DEBUG(" Original K_S0 candidate rejected by the mass cut: mass = " + << mass_K0 << " != (" << m_K0MassLower << ", " << m_K0MassUpper << ")" ); + continue; + } + + selectedK0Candidates.push_back(*vxcItr); + } + if(selectedK0Candidates.size()<1) return StatusCode::SUCCESS; + + // Select J/psi D*+ candidates + // Iterate over Jpsi+pi vertices + for(auto jpsipiItr=selectedJpsipiCandidates.cbegin(); jpsipiItr!=selectedJpsipiCandidates.cend(); ++jpsipiItr) { + + size_t jpsipiTrkNum = (*jpsipiItr)->nTrackParticles(); + tracksJpsipi.clear(); + tracksJpsi.clear(); + for( unsigned int it=0; it<jpsipiTrkNum; it++) tracksJpsipi.push_back((*jpsipiItr)->trackParticle(it)); + for( unsigned int it=0; it<jpsipiTrkNum-1; it++) tracksJpsi.push_back((*jpsipiItr)->trackParticle(it)); + + if (tracksJpsipi.size() != 3 || massesJpsipi.size() != 3 ) { + ATH_MSG_INFO("problems with Jpsi+pi input"); + } + + bool tagD0(true); + if(abs(m_Dx_pid)==421 && (*jpsipiItr)->trackParticle(2)->charge()==-1) tagD0 = false; + + TLorentzVector p4_pi1; // Momentum of soft pion + p4_pi1.SetPtEtaPhiM((*jpsipiItr)->trackParticle(2)->pt(), + (*jpsipiItr)->trackParticle(2)->eta(), + (*jpsipiItr)->trackParticle(2)->phi(), m_vtx0Daug3MassHypo); + + // Iterate over D0/D0bar vertices + for(auto d0Itr=selectedD0Candidates.cbegin(); d0Itr!=selectedD0Candidates.cend(); ++d0Itr) { + + // Check identical tracks in input + if(std::find(tracksJpsipi.cbegin(), tracksJpsipi.cend(), (*d0Itr)->trackParticle(0)) != tracksJpsipi.cend()) continue; + if(std::find(tracksJpsipi.cbegin(), tracksJpsipi.cend(), (*d0Itr)->trackParticle(1)) != tracksJpsipi.cend()) continue; + + TLorentzVector p4_ka, p4_pi2; + if(tagD0){ // for D*+ + p4_pi2.SetPtEtaPhiM((*d0Itr)->trackParticle(0)->pt(), + (*d0Itr)->trackParticle(0)->eta(), + (*d0Itr)->trackParticle(0)->phi(), m_vtx1Daug1MassHypo); + p4_ka.SetPtEtaPhiM( (*d0Itr)->trackParticle(1)->pt(), + (*d0Itr)->trackParticle(1)->eta(), + (*d0Itr)->trackParticle(1)->phi(), m_vtx1Daug2MassHypo); + }else{ // change the order in the case of D*- + p4_pi2.SetPtEtaPhiM((*d0Itr)->trackParticle(1)->pt(), + (*d0Itr)->trackParticle(1)->eta(), + (*d0Itr)->trackParticle(1)->phi(), m_vtx1Daug1MassHypo); + p4_ka.SetPtEtaPhiM( (*d0Itr)->trackParticle(0)->pt(), + (*d0Itr)->trackParticle(0)->eta(), + (*d0Itr)->trackParticle(0)->phi(), m_vtx1Daug2MassHypo); + } + // Check D*+/- candidate invariant mass and skip if need be + double mass_Dst= (p4_pi1 + p4_ka + p4_pi2).M(); + ATH_MSG_DEBUG("D*+/- mass " << mass_Dst); + if (mass_Dst < m_DstMassLower || mass_Dst > m_DstMassUpper) { + ATH_MSG_DEBUG(" Original D*+/- candidate rejected by the mass cut: mass = " + << mass_Dst << " != (" << m_DstMassLower << ", " << m_DstMassUpper << ")" ); + continue; + } + + size_t d0TrkNum = (*d0Itr)->nTrackParticles(); + tracksD0.clear(); + for( unsigned int it=0; it<d0TrkNum; it++) tracksD0.push_back((*d0Itr)->trackParticle(it)); + if (tracksD0.size() != 2 || massesD0.size() != 2 ) { + ATH_MSG_INFO("problems with D0 input"); + } + + // Iterate over K0 vertices + for(auto k0Itr=selectedK0Candidates.cbegin(); k0Itr!=selectedK0Candidates.cend(); ++k0Itr) { + + // Check identical tracks in input + if(std::find(tracksJpsipi.cbegin(), tracksJpsipi.cend(), (*k0Itr)->trackParticle(0)) != tracksJpsipi.cend()) continue; + if(std::find(tracksJpsipi.cbegin(), tracksJpsipi.cend(), (*k0Itr)->trackParticle(1)) != tracksJpsipi.cend()) continue; + if(std::find(tracksD0.cbegin(), tracksD0.cend(), (*k0Itr)->trackParticle(0)) != tracksD0.cend()) continue; + if(std::find(tracksD0.cbegin(), tracksD0.cend(), (*k0Itr)->trackParticle(1)) != tracksD0.cend()) continue; + + size_t k0TrkNum = (*k0Itr)->nTrackParticles(); + tracksK0.clear(); + for( unsigned int it=0; it<k0TrkNum; it++) tracksK0.push_back((*k0Itr)->trackParticle(it)); + if (tracksK0.size() != 2 || massesK0.size() != 2 ) { + ATH_MSG_INFO("problems with K0 input"); + } + + ATH_MSG_DEBUG("using tracks" << tracksJpsipi[0] << ", " << tracksJpsipi[1] << ", " << tracksJpsipi[2] << ", " << tracksD0[0] << ", " << tracksD0[1] << ", " << tracksK0[0] << ", " << tracksK0[1]); + + tracksBc.clear(); + for( unsigned int it=0; it<jpsipiTrkNum; it++) tracksBc.push_back((*jpsipiItr)->trackParticle(it)); + for( unsigned int it=0; it<d0TrkNum; it++) tracksBc.push_back((*d0Itr)->trackParticle(it)); + for( unsigned int it=0; it<k0TrkNum; it++) tracksBc.push_back((*k0Itr)->trackParticle(it)); + + + // Apply the user's settings to the fitter + // Reset + std::unique_ptr<Trk::IVKalState> state (m_iVertexFitter->makeState()); + // Robustness + int robustness = 0; + m_iVertexFitter->setRobustness(robustness, *state); + // Build up the topology + // Vertex list + std::vector<Trk::VertexID> vrtList; + // K_S0 vertex + Trk::VertexID vK0ID; + if (m_constrK0) { + vK0ID = m_iVertexFitter->startVertex(tracksK0,massesK0, *state, mass_k0); + } else { + vK0ID = m_iVertexFitter->startVertex(tracksK0,massesK0, *state); + } + vrtList.push_back(vK0ID); + // D0 vertex + Trk::VertexID vD0ID; + if (m_constrD0) { + if(tagD0) vD0ID = m_iVertexFitter->nextVertex(tracksD0,massesD0, *state, mass_d0); + else vD0ID = m_iVertexFitter->nextVertex(tracksD0,massesD0b, *state, mass_d0); + } else { + if(tagD0) vD0ID = m_iVertexFitter->nextVertex(tracksD0,massesD0, *state); + else vD0ID = m_iVertexFitter->nextVertex(tracksD0,massesD0b, *state); + } + vrtList.push_back(vD0ID); + // B vertex including Jpsi+pi + Trk::VertexID vBcID = m_iVertexFitter->nextVertex(tracksJpsipi,massesJpsipi,vrtList, *state); + if (m_constrJpsi) { + std::vector<Trk::VertexID> cnstV; + cnstV.clear(); + if ( !m_iVertexFitter->addMassConstraint(vBcID,tracksJpsi,cnstV, *state, m_mass_jpsi).isSuccess() ) { + ATH_MSG_WARNING("addMassConstraint failed"); + //return StatusCode::FAILURE; + } + } + // Do the work + std::unique_ptr<Trk::VxCascadeInfo> result(m_iVertexFitter->fitCascade(*state)); + + if (result != nullptr) { + + // reset links to original tracks + BPhysPVCascadeTools::PrepareVertexLinks(result.get(), trackContainer); + ATH_MSG_DEBUG("storing tracks " << ((result->vertices())[0])->trackParticle(0) << ", " + << ((result->vertices())[0])->trackParticle(1) << ", " + << ((result->vertices())[1])->trackParticle(0) << ", " + << ((result->vertices())[1])->trackParticle(1) << ", " + << ((result->vertices())[2])->trackParticle(0) << ", " + << ((result->vertices())[2])->trackParticle(1) << ", " + << ((result->vertices())[2])->trackParticle(2)); + // necessary to prevent memory leak + result->setSVOwnership(true); + + // Chi2/DOF cut + double bChi2DOF = result->fitChi2()/result->nDoF(); + ATH_MSG_DEBUG("Candidate chi2/DOF is " << bChi2DOF); + bool chi2CutPassed = (m_chi2cut <= 0.0 || bChi2DOF < m_chi2cut); + + const std::vector< std::vector<TLorentzVector> > &moms = result->getParticleMoms(); + double mass = m_CascadeTools->invariantMass(moms[2]); + if(chi2CutPassed) { + if (mass >= m_MassLower && mass <= m_MassUpper) { + cascadeinfoContainer->push_back(result.release()); + } else { + ATH_MSG_DEBUG("Candidate rejected by the mass cut: mass = " + << mass << " != (" << m_MassLower << ", " << m_MassUpper << ")" ); + } + } + } + + } //Iterate over K0 vertices + + } //Iterate over D0 vertices + + } //Iterate over Jpsi+pi vertices + + ATH_MSG_DEBUG("cascadeinfoContainer size " << cascadeinfoContainer->size()); + + return StatusCode::SUCCESS; + } + +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/JpsiPlusDsCascade.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/JpsiPlusDsCascade.cxx new file mode 100644 index 0000000000000000000000000000000000000000..f7d485b75f79fa359ecc2a61aa21cd2597f1f404 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/JpsiPlusDsCascade.cxx @@ -0,0 +1,702 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ +///////////////////////////////////////////////////////////////// +// JpsiPlusDsCascade.cxx, (c) ATLAS Detector software +///////////////////////////////////////////////////////////////// +#include "DerivationFrameworkBPhys/JpsiPlusDsCascade.h" +#include "TrkVertexFitterInterfaces/IVertexFitter.h" +#include "TrkVKalVrtFitter/TrkVKalVrtFitter.h" +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "GaudiKernel/IPartPropSvc.h" +#include "DerivationFrameworkBPhys/CascadeTools.h" +#include "DerivationFrameworkBPhys/BPhysPVCascadeTools.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "BeamSpotConditionsData/BeamSpotData.h" +#include "xAODBPhys/BPhysHypoHelper.h" +#include <algorithm> +#include "xAODTracking/VertexContainer.h" +#include "DerivationFrameworkBPhys/LocalVector.h" +#include "HepPDT/ParticleDataTable.hh" + +namespace DerivationFramework { + typedef ElementLink<xAOD::VertexContainer> VertexLink; + typedef std::vector<VertexLink> VertexLinkVector; + typedef std::vector<const xAOD::TrackParticle*> TrackBag; + + StatusCode JpsiPlusDsCascade::initialize() { + + // retrieving vertex Fitter + ATH_CHECK( m_iVertexFitter.retrieve()); + + // retrieving the V0 tools + ATH_CHECK( m_V0Tools.retrieve()); + + // retrieving the Cascade tools + ATH_CHECK( m_CascadeTools.retrieve()); + + // Get the beam spot service + ATH_CHECK( m_beamSpotKey.initialize() ); + + IPartPropSvc* partPropSvc = nullptr; + ATH_CHECK( service("PartPropSvc", partPropSvc, true) ); + auto pdt = partPropSvc->PDT(); + + // retrieve particle masses + if(m_mass_jpsi < 0. ) m_mass_jpsi = BPhysPVCascadeTools::getParticleMass(pdt, PDG::J_psi); + if(m_vtx0MassHypo < 0.) + m_vtx0MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::B_c_plus); + if(m_vtx1MassHypo < 0.) { + if(abs(m_Dx_pid) == 411) m_vtx1MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::D_plus); + else m_vtx1MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::D_s_plus); + } + + if(m_vtx0Daug1MassHypo < 0.) m_vtx0Daug1MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::mu_minus); + if(m_vtx0Daug2MassHypo < 0.) m_vtx0Daug2MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::mu_minus); + if(m_vtx1Daug1MassHypo < 0.) { + if(abs(m_Dx_pid) == 411) m_vtx1Daug1MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::pi_plus); + else m_vtx1Daug1MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::K_plus); + } + if(m_vtx1Daug2MassHypo < 0.) m_vtx1Daug2MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::K_plus); + if(m_vtx1Daug3MassHypo < 0.) m_vtx1Daug3MassHypo = BPhysPVCascadeTools::getParticleMass(pdt, PDG::pi_plus); + + return StatusCode::SUCCESS; + } + + + StatusCode JpsiPlusDsCascade::addBranches() const + { + std::vector<Trk::VxCascadeInfo*> cascadeinfoContainer; + constexpr int topoN = 2; + std::array<xAOD::VertexContainer*, topoN> Vtxwritehandles; + std::array<xAOD::VertexAuxContainer*, topoN> Vtxwritehandlesaux; + if(m_cascadeOutputsKeys.size() !=topoN) { ATH_MSG_FATAL("Incorrect number of VtxContainers"); return StatusCode::FAILURE; } + + for(int i =0; i<topoN;i++){ + Vtxwritehandles[i] = new xAOD::VertexContainer(); + Vtxwritehandlesaux[i] = new xAOD::VertexAuxContainer(); + Vtxwritehandles[i]->setStore(Vtxwritehandlesaux[i]); + ATH_CHECK(evtStore()->record(Vtxwritehandles[i] , m_cascadeOutputsKeys[i] )); + ATH_CHECK(evtStore()->record(Vtxwritehandlesaux[i], m_cascadeOutputsKeys[i] + "Aux.")); + } + + //---------------------------------------------------- + // retrieve primary vertices + //---------------------------------------------------- + const xAOD::Vertex * primaryVertex(nullptr); + const xAOD::VertexContainer *pvContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(pvContainer, m_VxPrimaryCandidateName)); + ATH_MSG_DEBUG("Found " << m_VxPrimaryCandidateName << " in StoreGate!"); + + if (pvContainer->size()==0){ + ATH_MSG_WARNING("You have no primary vertices: " << pvContainer->size()); + return StatusCode::RECOVERABLE; + } else { + primaryVertex = (*pvContainer)[0]; + } + + //---------------------------------------------------- + // Try to retrieve refitted primary vertices + //---------------------------------------------------- + xAOD::VertexContainer* refPvContainer = nullptr; + xAOD::VertexAuxContainer* refPvAuxContainer = nullptr; + if (m_refitPV) { + if (evtStore()->contains<xAOD::VertexContainer>(m_refPVContainerName)) { + // refitted PV container exists. Get it from the store gate + ATH_CHECK(evtStore()->retrieve(refPvContainer , m_refPVContainerName )); + ATH_CHECK(evtStore()->retrieve(refPvAuxContainer, m_refPVContainerName + "Aux.")); + } else { + // refitted PV container does not exist. Create a new one. + refPvContainer = new xAOD::VertexContainer; + refPvAuxContainer = new xAOD::VertexAuxContainer; + refPvContainer->setStore(refPvAuxContainer); + ATH_CHECK(evtStore()->record(refPvContainer , m_refPVContainerName)); + ATH_CHECK(evtStore()->record(refPvAuxContainer, m_refPVContainerName+"Aux.")); + } + } + + ATH_CHECK(performSearch(&cascadeinfoContainer)); + + SG::ReadCondHandle<InDet::BeamSpotData> beamSpotHandle { m_beamSpotKey }; + if(not beamSpotHandle.isValid()) ATH_MSG_ERROR("Cannot Retrieve " << m_beamSpotKey.key() ); + BPhysPVCascadeTools helper(&(*m_CascadeTools), beamSpotHandle.cptr()); + helper.SetMinNTracksInPV(m_PV_minNTracks); + + // Decorators for the main vertex: chi2, ndf, pt and pt error, plus the V0 vertex variables + SG::AuxElement::Decorator<VertexLinkVector> CascadeLinksDecor("CascadeVertexLinks"); + SG::AuxElement::Decorator<VertexLinkVector> JpsiLinksDecor("JpsiVertexLinks"); + SG::AuxElement::Decorator<VertexLinkVector> DxLinksDecor("DxVertexLinks"); + SG::AuxElement::Decorator<float> chi2_decor("ChiSquared"); + SG::AuxElement::Decorator<float> ndof_decor("NumberDoF"); + SG::AuxElement::Decorator<float> Pt_decor("Pt"); + SG::AuxElement::Decorator<float> PtErr_decor("PtErr"); + SG::AuxElement::Decorator<float> Mass_svdecor("Dx_mass"); + SG::AuxElement::Decorator<float> MassErr_svdecor("Dx_massErr"); + SG::AuxElement::Decorator<float> Pt_svdecor("Dx_Pt"); + SG::AuxElement::Decorator<float> PtErr_svdecor("Dx_PtErr"); + SG::AuxElement::Decorator<float> Lxy_svdecor("Dx_Lxy"); + SG::AuxElement::Decorator<float> LxyErr_svdecor("Dx_LxyErr"); + SG::AuxElement::Decorator<float> Tau_svdecor("Dx_Tau"); + SG::AuxElement::Decorator<float> TauErr_svdecor("Dx_TauErr"); + + SG::AuxElement::Decorator<float> MassMumu_decor("Mumu_mass"); + SG::AuxElement::Decorator<float> MassKX_svdecor("KX_mass"); + SG::AuxElement::Decorator<float> MassKXpi_svdecor("KXpi_mass"); + + ATH_MSG_DEBUG("cascadeinfoContainer size " << cascadeinfoContainer.size()); + + // Get Jpsi container and identify the input Jpsi + const xAOD::VertexContainer *jpsiContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(jpsiContainer , m_vertexContainerKey )); + const xAOD::VertexContainer *dxContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(dxContainer , m_vertexDxContainerKey )); + + for (Trk::VxCascadeInfo* x : cascadeinfoContainer) { + if(x==nullptr) ATH_MSG_ERROR("cascadeinfoContainer is null"); + + // the cascade fitter returns: + // std::vector<xAOD::Vertex*>, each xAOD::Vertex contains the refitted track parameters (perigee at the vertex position) + // vertices[iv] the links to the original TPs and a covariance of size 3+5*NTRK; the chi2 of the total fit + // is split between the cascade vertices as per track contribution + // std::vector< std::vector<TLorentzVector> >, each std::vector<TLorentzVector> contains the refitted momenta (TLorentzVector) + // momenta[iv][...] of all tracks in the corresponding vertex, including any pseudotracks (from cascade vertices) + // originating in this vertex; the masses are as assigned in the cascade fit + // std::vector<Amg::MatrixX>, the corresponding covariance matrices in momentum space + // covariance[iv] + // int nDoF, double Chi2 + // + // the invariant mass, pt, lifetime etc. errors should be calculated using the covariance matrices in momentum space as these + // take into account the full track-track and track-vertex correlations + // + // in the case of Jpsi+V0: vertices[0] is the V0 vertex, vertices[1] is the B/Lambda_b(bar) vertex, containing the 2 Jpsi tracks. + // The covariance terms between the two vertices are not stored. In momentum space momenta[0] contains the 2 V0 tracks, + // their momenta add up to the momentum of the 3rd track in momenta[1], the first two being the Jpsi tracks + + const std::vector<xAOD::Vertex*> &cascadeVertices = x->vertices(); + if(cascadeVertices.size()!=topoN) + ATH_MSG_ERROR("Incorrect number of vertices"); + if(cascadeVertices[0] == nullptr || cascadeVertices[1] == nullptr) ATH_MSG_ERROR("Error null vertex"); + // Keep vertices (bear in mind that they come in reverse order!) + for(int i =0;i<topoN;i++) Vtxwritehandles[i]->push_back(cascadeVertices[i]); + + x->setSVOwnership(false); // Prevent Container from deleting vertices + const auto mainVertex = cascadeVertices[1]; // this is the B_c+/- vertex + const std::vector< std::vector<TLorentzVector> > &moms = x->getParticleMoms(); + + // Set links to cascade vertices + std::vector<const xAOD::Vertex*> verticestoLink; + verticestoLink.push_back(cascadeVertices[0]); + if(Vtxwritehandles[1] == nullptr) ATH_MSG_ERROR("Vtxwritehandles[1] is null"); + if(!BPhysPVCascadeTools::LinkVertices(CascadeLinksDecor, verticestoLink, Vtxwritehandles[0], cascadeVertices[1])) + ATH_MSG_ERROR("Error decorating with cascade vertices"); + + // Identify the input Jpsi + const xAOD::Vertex* jpsiVertex = BPhysPVCascadeTools::FindVertex<2>(jpsiContainer, cascadeVertices[1]); + ATH_MSG_DEBUG("1 pt Jpsi tracks " << cascadeVertices[1]->trackParticle(0)->pt() << ", " << cascadeVertices[1]->trackParticle(1)->pt()); + if (jpsiVertex) ATH_MSG_DEBUG("2 pt Jpsi tracks " << jpsiVertex->trackParticle(0)->pt() << ", " << jpsiVertex->trackParticle(1)->pt()); + + // Identify the input D_(s)+ + const xAOD::Vertex* dxVertex = BPhysPVCascadeTools::FindVertex<3>(dxContainer, cascadeVertices[0]);; + ATH_MSG_DEBUG("1 pt D_(s)+ tracks " << cascadeVertices[0]->trackParticle(0)->pt() << ", " << cascadeVertices[0]->trackParticle(1)->pt() << ", " << cascadeVertices[0]->trackParticle(2)->pt()); + if (dxVertex) ATH_MSG_DEBUG("2 pt D_(s)+ tracks " << dxVertex->trackParticle(0)->pt() << ", " << dxVertex->trackParticle(1)->pt() << ", " << dxVertex->trackParticle(2)->pt()); + + // Set links to input vertices + std::vector<const xAOD::Vertex*> jpsiVerticestoLink; + if (jpsiVertex) jpsiVerticestoLink.push_back(jpsiVertex); + else ATH_MSG_WARNING("Could not find linking Jpsi"); + if(!BPhysPVCascadeTools::LinkVertices(JpsiLinksDecor, jpsiVerticestoLink, jpsiContainer, cascadeVertices[1])) + ATH_MSG_ERROR("Error decorating with Jpsi vertices"); + + std::vector<const xAOD::Vertex*> dxVerticestoLink; + if (dxVertex) dxVerticestoLink.push_back(dxVertex); + else ATH_MSG_WARNING("Could not find linking D_(s)+"); + if(!BPhysPVCascadeTools::LinkVertices(DxLinksDecor, dxVerticestoLink, dxContainer, cascadeVertices[1])) + ATH_MSG_ERROR("Error decorating with D_(s)+ vertices"); + + bool tagDp(true); + if (dxVertex) { + if(abs(m_Dx_pid)==411 && (dxVertex->trackParticle(2)->charge()==-1)) tagDp = false; + } + + double mass_b = m_vtx0MassHypo; + double mass_d = m_vtx1MassHypo; + std::vector<double> massesJpsi; + massesJpsi.push_back(m_vtx0Daug1MassHypo); + massesJpsi.push_back(m_vtx0Daug2MassHypo); + std::vector<double> massesDx; + if(tagDp){ + massesDx.push_back(m_vtx1Daug1MassHypo); + massesDx.push_back(m_vtx1Daug2MassHypo); + }else{ // Change the order for D- + massesDx.push_back(m_vtx1Daug2MassHypo); + massesDx.push_back(m_vtx1Daug1MassHypo); + } + massesDx.push_back(m_vtx1Daug3MassHypo); + std::vector<double> Masses; + Masses.push_back(m_vtx0Daug1MassHypo); + Masses.push_back(m_vtx0Daug2MassHypo); + Masses.push_back(m_vtx1MassHypo); + + // loop over candidates -- Don't apply PV_minNTracks requirement here + // because it may result in exclusion of the high-pt PV. + // get good PVs + + + xAOD::BPhysHypoHelper vtx(m_hypoName, mainVertex); + + // Get refitted track momenta from all vertices, charged tracks only + BPhysPVCascadeTools::SetVectorInfo(vtx, x); + + // Decorate main vertex + // + // 1.a) mass, mass error + BPHYS_CHECK( vtx.setMass(m_CascadeTools->invariantMass(moms[1])) ); + BPHYS_CHECK( vtx.setMassErr(m_CascadeTools->invariantMassError(moms[1],x->getCovariance()[1])) ); + // 1.b) pt and pT error (the default pt of mainVertex is != the pt of the full cascade fit!) + Pt_decor(*mainVertex) = m_CascadeTools->pT(moms[1]); + PtErr_decor(*mainVertex) = m_CascadeTools->pTError(moms[1],x->getCovariance()[1]); + // 1.c) chi2 and ndof (the default chi2 of mainVertex is != the chi2 of the full cascade fit!) + chi2_decor(*mainVertex) = x->fitChi2(); + ndof_decor(*mainVertex) = x->nDoF(); + + float massMumu = 0.; + if (jpsiVertex) { + TLorentzVector p4_mu1, p4_mu2; + p4_mu1.SetPtEtaPhiM(jpsiVertex->trackParticle(0)->pt(), + jpsiVertex->trackParticle(0)->eta(), + jpsiVertex->trackParticle(0)->phi(), m_vtx0Daug1MassHypo); + p4_mu2.SetPtEtaPhiM(jpsiVertex->trackParticle(1)->pt(), + jpsiVertex->trackParticle(1)->eta(), + jpsiVertex->trackParticle(1)->phi(), m_vtx0Daug2MassHypo); + massMumu = (p4_mu1 + p4_mu2).M(); + } + MassMumu_decor(*mainVertex) = massMumu; + + float massKX = 0., massKXpi = 0.; + if (dxVertex) { + TLorentzVector p4_h1, p4_h2, p4_h3; + if(tagDp){ + p4_h1.SetPtEtaPhiM(dxVertex->trackParticle(0)->pt(), + dxVertex->trackParticle(0)->eta(), + dxVertex->trackParticle(0)->phi(), m_vtx1Daug1MassHypo); + p4_h2.SetPtEtaPhiM(dxVertex->trackParticle(1)->pt(), + dxVertex->trackParticle(1)->eta(), + dxVertex->trackParticle(1)->phi(), m_vtx1Daug2MassHypo); + }else{ // Change the order for D- + p4_h1.SetPtEtaPhiM(dxVertex->trackParticle(0)->pt(), + dxVertex->trackParticle(0)->eta(), + dxVertex->trackParticle(0)->phi(), m_vtx1Daug2MassHypo); + p4_h2.SetPtEtaPhiM(dxVertex->trackParticle(1)->pt(), + dxVertex->trackParticle(1)->eta(), + dxVertex->trackParticle(1)->phi(), m_vtx1Daug1MassHypo); + } + p4_h3.SetPtEtaPhiM(dxVertex->trackParticle(2)->pt(), + dxVertex->trackParticle(2)->eta(), + dxVertex->trackParticle(2)->phi(), m_vtx1Daug3MassHypo); + massKX = (p4_h1 + p4_h2).M(); + massKXpi = (p4_h1 + p4_h2 + p4_h3).M(); + } + MassKX_svdecor(*mainVertex) = massKX; + MassKXpi_svdecor(*mainVertex) = massKXpi; + + ATH_CHECK(helper.FillCandwithRefittedVertices(m_refitPV, pvContainer, + refPvContainer, &(*m_pvRefitter), m_PV_max, m_DoVertexType, x, 1, mass_b, vtx)); + + + // 4) decorate the main vertex with V0 vertex mass, pt, lifetime and lxy values (plus errors) + // V0 points to the main vertex, so lifetime and lxy are w.r.t the main vertex + Mass_svdecor(*mainVertex) = m_CascadeTools->invariantMass(moms[0]); + MassErr_svdecor(*mainVertex) = m_CascadeTools->invariantMassError(moms[0],x->getCovariance()[0]); + Pt_svdecor(*mainVertex) = m_CascadeTools->pT(moms[0]); + PtErr_svdecor(*mainVertex) = m_CascadeTools->pTError(moms[0],x->getCovariance()[0]); + Lxy_svdecor(*mainVertex) = m_CascadeTools->lxy(moms[0],cascadeVertices[0],cascadeVertices[1]); + LxyErr_svdecor(*mainVertex) = m_CascadeTools->lxyError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]); + Tau_svdecor(*mainVertex) = m_CascadeTools->tau(moms[0],cascadeVertices[0],cascadeVertices[1]); + TauErr_svdecor(*mainVertex) = m_CascadeTools->tauError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]); + + // Some checks in DEBUG mode + ATH_MSG_DEBUG("chi2 " << x->fitChi2() + << " chi2_1 " << m_V0Tools->chisq(cascadeVertices[0]) + << " chi2_2 " << m_V0Tools->chisq(cascadeVertices[1]) + << " vprob " << m_CascadeTools->vertexProbability(x->nDoF(),x->fitChi2())); + ATH_MSG_DEBUG("ndf " << x->nDoF() << " ndf_1 " << m_V0Tools->ndof(cascadeVertices[0]) << " ndf_2 " << m_V0Tools->ndof(cascadeVertices[1])); + ATH_MSG_DEBUG("V0Tools mass_d " << m_V0Tools->invariantMass(cascadeVertices[0],massesDx) + << " error " << m_V0Tools->invariantMassError(cascadeVertices[0],massesDx) + << " mass_J " << m_V0Tools->invariantMass(cascadeVertices[1],massesJpsi) + << " error " << m_V0Tools->invariantMassError(cascadeVertices[1],massesJpsi)); + // masses and errors, using track masses assigned in the fit + double Mass_B = m_CascadeTools->invariantMass(moms[1]); + double Mass_D = m_CascadeTools->invariantMass(moms[0]); + double Mass_B_err = m_CascadeTools->invariantMassError(moms[1],x->getCovariance()[1]); + double Mass_D_err = m_CascadeTools->invariantMassError(moms[0],x->getCovariance()[0]); + ATH_MSG_DEBUG("Mass_B " << Mass_B << " Mass_D " << Mass_D); + ATH_MSG_DEBUG("Mass_B_err " << Mass_B_err << " Mass_D_err " << Mass_D_err); + double mprob_B = m_CascadeTools->massProbability(mass_b,Mass_B,Mass_B_err); + double mprob_D = m_CascadeTools->massProbability(mass_d,Mass_D,Mass_D_err); + ATH_MSG_DEBUG("mprob_B " << mprob_B << " mprob_D " << mprob_D); + // masses and errors, assigning user defined track masses + ATH_MSG_DEBUG("Mass_b " << m_CascadeTools->invariantMass(moms[1],Masses) + << " Mass_d " << m_CascadeTools->invariantMass(moms[0],massesDx)); + ATH_MSG_DEBUG("Mass_b_err " << m_CascadeTools->invariantMassError(moms[1],x->getCovariance()[1],Masses) + << " Mass_d_err " << m_CascadeTools->invariantMassError(moms[0],x->getCovariance()[0],massesDx)); + ATH_MSG_DEBUG("pt_b " << m_CascadeTools->pT(moms[1]) + << " pt_d " << m_CascadeTools->pT(moms[0]) + << " pt_dp " << m_V0Tools->pT(cascadeVertices[0])); + ATH_MSG_DEBUG("ptErr_b " << m_CascadeTools->pTError(moms[1],x->getCovariance()[1]) + << " ptErr_d " << m_CascadeTools->pTError(moms[0],x->getCovariance()[0]) + << " ptErr_dp " << m_V0Tools->pTError(cascadeVertices[0])); + ATH_MSG_DEBUG("lxy_B " << m_V0Tools->lxy(cascadeVertices[1],primaryVertex) << " lxy_D " << m_V0Tools->lxy(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("lxy_b " << m_CascadeTools->lxy(moms[1],cascadeVertices[1],primaryVertex) << " lxy_d " << m_CascadeTools->lxy(moms[0],cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("lxyErr_b " << m_CascadeTools->lxyError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << " lxyErr_d " << m_CascadeTools->lxyError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << " lxyErr_dp " << m_V0Tools->lxyError(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("tau_B " << m_CascadeTools->tau(moms[1],cascadeVertices[1],primaryVertex,mass_b) + << " tau_dp " << m_V0Tools->tau(cascadeVertices[0],cascadeVertices[1],massesDx)); + ATH_MSG_DEBUG("tau_b " << m_CascadeTools->tau(moms[1],cascadeVertices[1],primaryVertex) + << " tau_d " << m_CascadeTools->tau(moms[0],cascadeVertices[0],cascadeVertices[1]) + << " tau_D " << m_CascadeTools->tau(moms[0],cascadeVertices[0],cascadeVertices[1],mass_d)); + ATH_MSG_DEBUG("tauErr_b " << m_CascadeTools->tauError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << " tauErr_d " << m_CascadeTools->tauError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << " tauErr_dp " << m_V0Tools->tauError(cascadeVertices[0],cascadeVertices[1],massesDx)); + ATH_MSG_DEBUG("TauErr_b " << m_CascadeTools->tauError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex,mass_b) + << " TauErr_d " << m_CascadeTools->tauError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1],mass_d) + << " TauErr_dp " << m_V0Tools->tauError(cascadeVertices[0],cascadeVertices[1],massesDx,mass_d)); + + ATH_MSG_DEBUG("CascadeTools main vert wrt PV " << " CascadeTools SV " << " V0Tools SV"); + ATH_MSG_DEBUG("a0z " << m_CascadeTools->a0z(moms[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0z(moms[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0z(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0zErr " << m_CascadeTools->a0zError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0zError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0zError(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0xy " << m_CascadeTools->a0xy(moms[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0xy(moms[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0xy(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0xyErr " << m_CascadeTools->a0xyError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0xyError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0xyError(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0 " << m_CascadeTools->a0(moms[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0(moms[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0Err " << m_CascadeTools->a0Error(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0Error(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0Error(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("x0 " << m_V0Tools->vtx(cascadeVertices[0]).x() << " y0 " << m_V0Tools->vtx(cascadeVertices[0]).y() << " z0 " << m_V0Tools->vtx(cascadeVertices[0]).z()); + ATH_MSG_DEBUG("x1 " << m_V0Tools->vtx(cascadeVertices[1]).x() << " y1 " << m_V0Tools->vtx(cascadeVertices[1]).y() << " z1 " << m_V0Tools->vtx(cascadeVertices[1]).z()); + ATH_MSG_DEBUG("X0 " << primaryVertex->x() << " Y0 " << primaryVertex->y() << " Z0 " << primaryVertex->z()); + ATH_MSG_DEBUG("rxy0 " << m_V0Tools->rxy(cascadeVertices[0]) << " rxyErr0 " << m_V0Tools->rxyError(cascadeVertices[0])); + ATH_MSG_DEBUG("rxy1 " << m_V0Tools->rxy(cascadeVertices[1]) << " rxyErr1 " << m_V0Tools->rxyError(cascadeVertices[1])); + ATH_MSG_DEBUG("Rxy0 wrt PV " << m_V0Tools->rxy(cascadeVertices[0],primaryVertex) << " RxyErr0 wrt PV " << m_V0Tools->rxyError(cascadeVertices[0],primaryVertex)); + ATH_MSG_DEBUG("Rxy1 wrt PV " << m_V0Tools->rxy(cascadeVertices[1],primaryVertex) << " RxyErr1 wrt PV " << m_V0Tools->rxyError(cascadeVertices[1],primaryVertex)); + ATH_MSG_DEBUG("number of covariance matrices " << (x->getCovariance()).size()); + } // loop over cascadeinfoContainer + + // Deleting cascadeinfo since this won't be stored. + // Vertices have been kept in m_cascadeOutputs and should be owned by their container + for (auto x : cascadeinfoContainer) delete x; + + return StatusCode::SUCCESS; + } + + + JpsiPlusDsCascade::JpsiPlusDsCascade(const std::string& t, const std::string& n, const IInterface* p) : AthAlgTool(t,n,p), + m_vertexContainerKey(""), + m_vertexDxContainerKey(""), + m_cascadeOutputsKeys{ "JpsiPlusDsCascadeVtx1", "JpsiPlusDsCascadeVtx2" }, + m_VxPrimaryCandidateName("PrimaryVertices"), + m_jpsiMassLower(0.0), + m_jpsiMassUpper(10000.0), + m_DxMassLower(0.0), + m_DxMassUpper(10000.0), + m_MassLower(0.0), + m_MassUpper(20000.0), + m_vtx0MassHypo(-1), + m_vtx1MassHypo(-1), + m_vtx0Daug1MassHypo(-1), + m_vtx0Daug2MassHypo(-1), + m_vtx1Daug1MassHypo(-1), + m_vtx1Daug2MassHypo(-1), + m_vtx1Daug3MassHypo(-1), + m_mass_jpsi(-1), + m_Dx_pid(431), + m_constrDx(true), + m_constrJpsi(true), + m_chi2cut(-1.0), + m_iVertexFitter("Trk::TrkVKalVrtFitter"), + m_pvRefitter("Analysis::PrimaryVertexRefitter"), + m_V0Tools("Trk::V0Tools"), + m_CascadeTools("DerivationFramework::CascadeTools") + { + declareProperty("JpsiVertices", m_vertexContainerKey); + declareProperty("DxVertices", m_vertexDxContainerKey); + declareProperty("VxPrimaryCandidateName", m_VxPrimaryCandidateName); + declareProperty("RefPVContainerName", m_refPVContainerName = "RefittedPrimaryVertices"); + declareProperty("JpsiMassLowerCut", m_jpsiMassLower); + declareProperty("JpsiMassUpperCut", m_jpsiMassUpper); + declareProperty("DxMassLowerCut", m_DxMassLower); + declareProperty("DxMassUpperCut", m_DxMassUpper); + declareProperty("MassLowerCut", m_MassLower); + declareProperty("MassUpperCut", m_MassUpper); + declareProperty("HypothesisName", m_hypoName = "Bc"); + declareProperty("Vtx0MassHypo", m_vtx0MassHypo); + declareProperty("Vtx1MassHypo", m_vtx1MassHypo); + declareProperty("Vtx0Daug1MassHypo", m_vtx0Daug1MassHypo); + declareProperty("Vtx0Daug2MassHypo", m_vtx0Daug2MassHypo); + declareProperty("Vtx1Daug1MassHypo", m_vtx1Daug1MassHypo); + declareProperty("Vtx1Daug2MassHypo", m_vtx1Daug2MassHypo); + declareProperty("Vtx1Daug3MassHypo", m_vtx1Daug3MassHypo); + declareProperty("JpsiMass", m_mass_jpsi); + declareProperty("DxHypothesis", m_Dx_pid); + declareProperty("ApplyDxMassConstraint", m_constrDx); + declareProperty("ApplyJpsiMassConstraint", m_constrJpsi); + declareProperty("Chi2Cut", m_chi2cut); + declareProperty("RefitPV", m_refitPV = true); + declareProperty("MaxnPV", m_PV_max = 999); + declareProperty("MinNTracksInPV", m_PV_minNTracks = 0); + declareProperty("DoVertexType", m_DoVertexType = 7); + declareProperty("TrkVertexFitterTool", m_iVertexFitter); + declareProperty("PVRefitter", m_pvRefitter); + declareProperty("V0Tools", m_V0Tools); + declareProperty("CascadeTools", m_CascadeTools); + declareProperty("CascadeVertexCollections", m_cascadeOutputsKeys); + } + + JpsiPlusDsCascade::~JpsiPlusDsCascade(){ } + + StatusCode JpsiPlusDsCascade::performSearch(std::vector<Trk::VxCascadeInfo*> *cascadeinfoContainer) const + { + ATH_MSG_DEBUG( "JpsiPlusDsCascade::performSearch" ); + assert(cascadeinfoContainer!=nullptr); + + // Get TrackParticle container (for setting links to the original tracks) + const xAOD::TrackParticleContainer *trackContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(trackContainer , "InDetTrackParticles" )); + + // Get Jpsi container + const xAOD::VertexContainer *jpsiContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(jpsiContainer , m_vertexContainerKey )); + + // Get V0 container + const xAOD::VertexContainer *dxContainer(nullptr); + ATH_CHECK(evtStore()->retrieve(dxContainer , m_vertexDxContainerKey )); + + + + double mass_d = m_vtx1MassHypo; + std::vector<const xAOD::TrackParticle*> tracksJpsi; + std::vector<const xAOD::TrackParticle*> tracksDx; + std::vector<const xAOD::TrackParticle*> tracksBc; + std::vector<double> massesJpsi; + massesJpsi.push_back(m_vtx0Daug1MassHypo); + massesJpsi.push_back(m_vtx0Daug2MassHypo); + std::vector<double> massesDx; + massesDx.push_back(m_vtx1Daug1MassHypo); + massesDx.push_back(m_vtx1Daug2MassHypo); + massesDx.push_back(m_vtx1Daug3MassHypo); + std::vector<double> massesDm; // Alter the order of masses for D- + massesDm.push_back(m_vtx1Daug2MassHypo); + massesDm.push_back(m_vtx1Daug1MassHypo); + massesDm.push_back(m_vtx1Daug3MassHypo); + std::vector<double> Masses; + Masses.push_back(m_vtx0Daug1MassHypo); + Masses.push_back(m_vtx0Daug2MassHypo); + Masses.push_back(m_vtx1MassHypo); + + // Select the J/psi candidates before calling cascade fit + std::vector<const xAOD::Vertex*> selectedJpsiCandidates; + for(auto vxcItr=jpsiContainer->cbegin(); vxcItr!=jpsiContainer->cend(); ++vxcItr) { + + // Check the passed flag first + const xAOD::Vertex* vtx = *vxcItr; + SG::AuxElement::Accessor<Char_t> flagAcc1("passed_Jpsi"); + if(flagAcc1.isAvailable(*vtx)){ + if(!flagAcc1(*vtx)) continue; + } + + // Check J/psi candidate invariant mass and skip if need be + double mass_Jpsi = m_V0Tools->invariantMass(*vxcItr, massesJpsi); + if (mass_Jpsi < m_jpsiMassLower || mass_Jpsi > m_jpsiMassUpper) { + ATH_MSG_DEBUG(" Original Jpsi candidate rejected by the mass cut: mass = " + << mass_Jpsi << " != (" << m_jpsiMassLower << ", " << m_jpsiMassUpper << ")" ); + continue; + } + selectedJpsiCandidates.push_back(*vxcItr); + } + if(selectedJpsiCandidates.size()<1) return StatusCode::SUCCESS; + + // Select the D_s+/D+ candidates before calling cascade fit + std::vector<const xAOD::Vertex*> selectedDxCandidates; + for(auto vxcItr=dxContainer->cbegin(); vxcItr!=dxContainer->cend(); ++vxcItr) { + + // Check the passed flag first + const xAOD::Vertex* vtx = *vxcItr; + if(abs(m_Dx_pid)==431) { // D_s+/- + SG::AuxElement::Accessor<Char_t> flagAcc1("passed_Ds"); + if(flagAcc1.isAvailable(*vtx)){ + if(!flagAcc1(*vtx)) continue; + } + } + + if(abs(m_Dx_pid==411)) { // D+/- + SG::AuxElement::Accessor<Char_t> flagAcc1("passed_Dp"); + SG::AuxElement::Accessor<Char_t> flagAcc2("passed_Dm"); + bool isDp(true); + bool isDm(true); + if(flagAcc1.isAvailable(*vtx)){ + if(!flagAcc1(*vtx)) isDp = false; + } + if(flagAcc2.isAvailable(*vtx)){ + if(!flagAcc2(*vtx)) isDm = false; + } + if(!(isDp||isDm)) continue; + } + + + // Ensure the total charge is correct + if(abs((*vxcItr)->trackParticle(0)->charge()+(*vxcItr)->trackParticle(1)->charge()+(*vxcItr)->trackParticle(2)->charge()) != 1){ + ATH_MSG_DEBUG(" Original D+ candidate rejected by the charge requirement: " + << (*vxcItr)->trackParticle(0)->charge() << ", " << (*vxcItr)->trackParticle(1)->charge() << ", " << (*vxcItr)->trackParticle(2)->charge() ); + continue; + } + + // Check D_(s)+/- candidate invariant mass and skip if need be + double mass_D; + if(abs(m_Dx_pid)==411 && (*vxcItr)->trackParticle(2)->charge()<0) // D- + mass_D = m_V0Tools->invariantMass(*vxcItr,massesDm); + else // D+, D_s+/- + mass_D = m_V0Tools->invariantMass(*vxcItr,massesDx); + ATH_MSG_DEBUG("D_(s) mass " << mass_D); + if(mass_D < m_DxMassLower || mass_D > m_DxMassUpper) { + ATH_MSG_DEBUG(" Original D_(s) candidate rejected by the mass cut: mass = " + << mass_D << " != (" << m_DxMassLower << ", " << m_DxMassUpper << ")" ); + continue; + } + + // Add loose cut on K+k- mass for D_s->phi pi + if(m_Dx_pid==431){ + TLorentzVector p4Kp_in, p4Km_in; + p4Kp_in.SetPtEtaPhiM( (*vxcItr)->trackParticle(0)->pt(), + (*vxcItr)->trackParticle(0)->eta(), + (*vxcItr)->trackParticle(0)->phi(), m_vtx1Daug1MassHypo); + p4Km_in.SetPtEtaPhiM( (*vxcItr)->trackParticle(1)->pt(), + (*vxcItr)->trackParticle(1)->eta(), + (*vxcItr)->trackParticle(1)->phi(), m_vtx1Daug2MassHypo); + double mass_phi = (p4Kp_in + p4Km_in).M(); + ATH_MSG_DEBUG("phi mass " << mass_phi); + if(mass_phi > 1200) { + ATH_MSG_DEBUG(" Original phi candidate rejected by the mass cut: mass = " << mass_phi ); + continue; + } + } + selectedDxCandidates.push_back(*vxcItr); + } + if(selectedDxCandidates.size()<1) return StatusCode::SUCCESS; + + // Select J/psi D_(s)+ candidates + // Iterate over Jpsi vertices + for(auto jpsiItr=selectedJpsiCandidates.cbegin(); jpsiItr!=selectedJpsiCandidates.cend(); ++jpsiItr) { + + size_t jpsiTrkNum = (*jpsiItr)->nTrackParticles(); + tracksJpsi.clear(); + for( unsigned int it=0; it<jpsiTrkNum; it++) tracksJpsi.push_back((*jpsiItr)->trackParticle(it)); + + if (tracksJpsi.size() != 2 || massesJpsi.size() != 2 ) { + ATH_MSG_INFO("problems with Jpsi input"); + } + + // Iterate over D_(s)+/- vertices + for(auto dxItr=selectedDxCandidates.cbegin(); dxItr!=selectedDxCandidates.cend(); ++dxItr) { + + // Check identical tracks in input + if(std::find(tracksJpsi.cbegin(), tracksJpsi.cend(), (*dxItr)->trackParticle(0)) != tracksJpsi.cend()) continue; + if(std::find(tracksJpsi.cbegin(), tracksJpsi.cend(), (*dxItr)->trackParticle(1)) != tracksJpsi.cend()) continue; + if(std::find(tracksJpsi.cbegin(), tracksJpsi.cend(), (*dxItr)->trackParticle(2)) != tracksJpsi.cend()) continue; + + size_t dxTrkNum = (*dxItr)->nTrackParticles(); + tracksDx.clear(); + for( unsigned int it=0; it<dxTrkNum; it++) tracksDx.push_back((*dxItr)->trackParticle(it)); + if (tracksDx.size() != 3 || massesDx.size() != 3 ) { + ATH_MSG_INFO("problems with D_(s) input"); + } + + ATH_MSG_DEBUG("using tracks" << tracksJpsi[0] << ", " << tracksJpsi[1] << ", " << tracksDx[0] << ", " << tracksDx[1] << ", " << tracksDx[2]); + tracksBc.clear(); + for( unsigned int it=0; it<jpsiTrkNum; it++) tracksBc.push_back((*jpsiItr)->trackParticle(it)); + for( unsigned int it=0; it<dxTrkNum; it++) tracksBc.push_back((*dxItr)->trackParticle(it)); + + // Apply the user's settings to the fitter + // Reset + std::unique_ptr<Trk::IVKalState> state (m_iVertexFitter->makeState()); + // Robustness + int robustness = 0; + m_iVertexFitter->setRobustness(robustness, *state); + // Build up the topology + // Vertex list + std::vector<Trk::VertexID> vrtList; + // D_(s)+/- vertex + Trk::VertexID vID; + if (m_constrDx) { + if(abs(m_Dx_pid)==411 && (*dxItr)->trackParticle(2)->charge()<0) // D- + vID = m_iVertexFitter->startVertex(tracksDx,massesDm,*state,mass_d); + else // D+, D_s+/- + vID = m_iVertexFitter->startVertex(tracksDx,massesDx,*state,mass_d); + } else { + if(abs(m_Dx_pid)==411 && (*dxItr)->trackParticle(2)->charge()<0) // D- + vID = m_iVertexFitter->startVertex(tracksDx,massesDm,*state); + else // D+, D_s+/- + vID = m_iVertexFitter->startVertex(tracksDx,massesDx,*state); + } + vrtList.push_back(vID); + // B vertex including Jpsi + Trk::VertexID vID2 = m_iVertexFitter->nextVertex(tracksJpsi,massesJpsi,vrtList,*state); + if (m_constrJpsi) { + std::vector<Trk::VertexID> cnstV; + cnstV.clear(); + if ( !m_iVertexFitter->addMassConstraint(vID2,tracksJpsi,cnstV,*state,m_mass_jpsi).isSuccess() ) { + ATH_MSG_WARNING("addMassConstraint failed"); + //return StatusCode::FAILURE; + } + } + // Do the work + std::unique_ptr<Trk::VxCascadeInfo> result(m_iVertexFitter->fitCascade(*state)); + + if (result != nullptr) { + // reset links to original tracks + BPhysPVCascadeTools::PrepareVertexLinks(result.get(), trackContainer); + ATH_MSG_DEBUG("storing tracks " << ((result->vertices())[0])->trackParticle(0) << ", " + << ((result->vertices())[0])->trackParticle(1) << ", " + << ((result->vertices())[0])->trackParticle(2) << ", " + << ((result->vertices())[1])->trackParticle(0) << ", " + << ((result->vertices())[1])->trackParticle(1)); + // necessary to prevent memory leak + result->setSVOwnership(true); + + // Chi2/DOF cut + double bChi2DOF = result->fitChi2()/result->nDoF(); + ATH_MSG_DEBUG("Candidate chi2/DOF is " << bChi2DOF); + bool chi2CutPassed = (m_chi2cut <= 0.0 || bChi2DOF < m_chi2cut); + + const std::vector< std::vector<TLorentzVector> > &moms = result->getParticleMoms(); + double mass = m_CascadeTools->invariantMass(moms[1]); + if(chi2CutPassed) { + if (mass >= m_MassLower && mass <= m_MassUpper) { + cascadeinfoContainer->push_back(result.release()); + } else { + ATH_MSG_DEBUG("Candidate rejected by the mass cut: mass = " + << mass << " != (" << m_MassLower << ", " << m_MassUpper << ")" ); + } + } + } + + } //Iterate over D_(s)+ vertices + + } //Iterate over Jpsi vertices + + ATH_MSG_DEBUG("cascadeinfoContainer size " << cascadeinfoContainer->size()); + + return StatusCode::SUCCESS; + } + +} + + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/JpsiPlusV0Cascade.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/JpsiPlusV0Cascade.cxx new file mode 100644 index 0000000000000000000000000000000000000000..6f66478553aa12d370babd3fb57a0a59433ce65f --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/JpsiPlusV0Cascade.cxx @@ -0,0 +1,576 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ +///////////////////////////////////////////////////////////////// +// JpsiPlusV0Cascade.cxx, (c) ATLAS Detector software +///////////////////////////////////////////////////////////////// +#include "DerivationFrameworkBPhys/JpsiPlusV0Cascade.h" +#include "TrkVertexFitterInterfaces/IVertexFitter.h" +#include "TrkVKalVrtFitter/TrkVKalVrtFitter.h" +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "GaudiKernel/IPartPropSvc.h" +#include "DerivationFrameworkBPhys/CascadeTools.h" +#include "DerivationFrameworkBPhys/BPhysPVCascadeTools.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "BeamSpotConditionsData/BeamSpotData.h" +#include "xAODBPhys/BPhysHypoHelper.h" +#include <algorithm> +#include "xAODTracking/VertexContainer.h" +#include "DerivationFrameworkBPhys/LocalVector.h" +#include "HepPDT/ParticleDataTable.hh" + +namespace DerivationFramework { + typedef ElementLink<xAOD::VertexContainer> VertexLink; + typedef std::vector<VertexLink> VertexLinkVector; + typedef std::vector<const xAOD::TrackParticle*> TrackBag; + + + StatusCode JpsiPlusV0Cascade::initialize() { + + // retrieving vertex Fitter + if ( m_iVertexFitter.retrieve().isFailure() ) { + ATH_MSG_FATAL("Failed to retrieve tool " << m_iVertexFitter); + return StatusCode::FAILURE; + } else { + ATH_MSG_DEBUG("Retrieved tool " << m_iVertexFitter); + } + + // retrieving the V0 tools + if ( m_V0Tools.retrieve().isFailure() ) { + ATH_MSG_FATAL("Failed to retrieve tool " << m_V0Tools); + return StatusCode::FAILURE; + } else { + ATH_MSG_INFO("Retrieved tool " << m_V0Tools); + } + + // retrieving the Cascade tools + if ( m_CascadeTools.retrieve().isFailure() ) { + ATH_MSG_FATAL("Failed to retrieve tool " << m_CascadeTools); + return StatusCode::FAILURE; + } else { + ATH_MSG_INFO("Retrieved tool " << m_CascadeTools); + } + + ATH_CHECK(m_beamSpotKey.initialize()); + + IPartPropSvc* partPropSvc = 0; + StatusCode sc = service("PartPropSvc", partPropSvc, true); + if (sc.isFailure()) { + msg(MSG::ERROR) << "Could not initialize Particle Properties Service" << endmsg; + return StatusCode::FAILURE; + } + const HepPDT::ParticleDataTable* pdt = partPropSvc->PDT(); + + // retrieve particle masses + m_mass_muon = BPhysPVCascadeTools::getParticleMass(pdt, PDG::mu_minus); + m_mass_pion = BPhysPVCascadeTools::getParticleMass(pdt, PDG::pi_plus); + m_mass_proton = BPhysPVCascadeTools::getParticleMass(pdt, PDG::p_plus); + m_mass_lambda = BPhysPVCascadeTools::getParticleMass(pdt, PDG::Lambda0); + m_mass_ks = BPhysPVCascadeTools::getParticleMass(pdt, PDG::K_S0); + m_mass_jpsi = BPhysPVCascadeTools::getParticleMass(pdt, PDG::J_psi); + m_mass_b0 = BPhysPVCascadeTools::getParticleMass(pdt, PDG::B0); + m_mass_lambdaB = BPhysPVCascadeTools::getParticleMass(pdt, PDG::Lambda_b0); + + return StatusCode::SUCCESS; + } + + + StatusCode JpsiPlusV0Cascade::addBranches() const + { + std::vector<Trk::VxCascadeInfo*> cascadeinfoContainer; + constexpr int topoN = 2; + std::array<xAOD::VertexContainer*, topoN> Vtxwritehandles; + std::array<xAOD::VertexAuxContainer*, topoN> Vtxwritehandlesaux; + if(m_cascadeOutputsKeys.size() !=topoN) { ATH_MSG_FATAL("Incorrect number of VtxContainers"); return StatusCode::FAILURE; } + + for(int i =0; i<topoN;i++){ + Vtxwritehandles[i] = new xAOD::VertexContainer(); + Vtxwritehandlesaux[i] = new xAOD::VertexAuxContainer(); + Vtxwritehandles[i]->setStore(Vtxwritehandlesaux[i]); + CHECK(evtStore()->record(Vtxwritehandles[i] , m_cascadeOutputsKeys[i] )); + CHECK(evtStore()->record(Vtxwritehandlesaux[i], m_cascadeOutputsKeys[i] + "Aux.")); + } + + //---------------------------------------------------- + // retrieve primary vertices + //---------------------------------------------------- + const xAOD::Vertex * primaryVertex(nullptr); + const xAOD::VertexContainer *pvContainer(nullptr); + CHECK(evtStore()->retrieve(pvContainer, m_VxPrimaryCandidateName)); + ATH_MSG_DEBUG("Found " << m_VxPrimaryCandidateName << " in StoreGate!"); + + if (pvContainer->size()==0){ + ATH_MSG_WARNING("You have no primary vertices: " << pvContainer->size()); + return StatusCode::RECOVERABLE; + } else { + primaryVertex = (*pvContainer)[0]; + } + + //---------------------------------------------------- + // Try to retrieve refitted primary vertices + //---------------------------------------------------- + xAOD::VertexContainer* refPvContainer = NULL; + xAOD::VertexAuxContainer* refPvAuxContainer = NULL; + if (m_refitPV) { + if (evtStore()->contains<xAOD::VertexContainer>(m_refPVContainerName)) { + // refitted PV container exists. Get it from the store gate + CHECK(evtStore()->retrieve(refPvContainer , m_refPVContainerName )); + CHECK(evtStore()->retrieve(refPvAuxContainer, m_refPVContainerName + "Aux.")); + } else { + // refitted PV container does not exist. Create a new one. + refPvContainer = new xAOD::VertexContainer; + refPvAuxContainer = new xAOD::VertexAuxContainer; + refPvContainer->setStore(refPvAuxContainer); + CHECK(evtStore()->record(refPvContainer , m_refPVContainerName)); + CHECK(evtStore()->record(refPvAuxContainer, m_refPVContainerName+"Aux.")); + } + } + + ATH_CHECK(performSearch(&cascadeinfoContainer)); + + SG::ReadCondHandle<InDet::BeamSpotData> beamSpotHandle { m_beamSpotKey }; + if(not beamSpotHandle.isValid()) ATH_MSG_ERROR("Cannot Retrieve " << m_beamSpotKey.key() ); + BPhysPVCascadeTools helper(&(*m_CascadeTools), beamSpotHandle.cptr()); + helper.SetMinNTracksInPV(m_PV_minNTracks); + + // Decorators for the main vertex: chi2, ndf, pt and pt error, plus the V0 vertex variables + SG::AuxElement::Decorator<VertexLinkVector> CascadeLinksDecor("CascadeVertexLinks"); + SG::AuxElement::Decorator<VertexLinkVector> JpsiLinksDecor("JpsiVertexLinks"); + SG::AuxElement::Decorator<VertexLinkVector> V0LinksDecor("V0VertexLinks"); + SG::AuxElement::Decorator<float> chi2_decor("ChiSquared"); + SG::AuxElement::Decorator<float> ndof_decor("NumberDoF"); + SG::AuxElement::Decorator<float> Pt_decor("Pt"); + SG::AuxElement::Decorator<float> PtErr_decor("PtErr"); + SG::AuxElement::Decorator<float> Mass_svdecor("V0_mass"); + SG::AuxElement::Decorator<float> MassErr_svdecor("V0_massErr"); + SG::AuxElement::Decorator<float> Pt_svdecor("V0_Pt"); + SG::AuxElement::Decorator<float> PtErr_svdecor("V0_PtErr"); + SG::AuxElement::Decorator<float> Lxy_svdecor("V0_Lxy"); + SG::AuxElement::Decorator<float> LxyErr_svdecor("V0_LxyErr"); + SG::AuxElement::Decorator<float> Tau_svdecor("V0_Tau"); + SG::AuxElement::Decorator<float> TauErr_svdecor("V0_TauErr"); + + ATH_MSG_DEBUG("cascadeinfoContainer size " << cascadeinfoContainer.size()); + + // Get Jpsi container and identify the input Jpsi + const xAOD::VertexContainer *jpsiContainer(nullptr); + CHECK(evtStore()->retrieve(jpsiContainer , m_vertexContainerKey )); + const xAOD::VertexContainer *v0Container(nullptr); + CHECK(evtStore()->retrieve(v0Container , m_vertexV0ContainerKey )); + + for (Trk::VxCascadeInfo* x : cascadeinfoContainer) { + if(x==nullptr) ATH_MSG_ERROR("cascadeinfoContainer is null"); + + // the cascade fitter returns: + // std::vector<xAOD::Vertex*>, each xAOD::Vertex contains the refitted track parameters (perigee at the vertex position) + // vertices[iv] the links to the original TPs and a covariance of size 3+5*NTRK; the chi2 of the total fit + // is split between the cascade vertices as per track contribution + // std::vector< std::vector<TLorentzVector> >, each std::vector<TLorentzVector> contains the refitted momenta (TLorentzVector) + // momenta[iv][...] of all tracks in the corresponding vertex, including any pseudotracks (from cascade vertices) + // originating in this vertex; the masses are as assigned in the cascade fit + // std::vector<Amg::MatrixX>, the corresponding covariance matrices in momentum space + // covariance[iv] + // int nDoF, double Chi2 + // + // the invariant mass, pt, lifetime etc. errors should be calculated using the covariance matrices in momentum space as these + // take into account the full track-track and track-vertex correlations + // + // in the case of Jpsi+V0: vertices[0] is the V0 vertex, vertices[1] is the B/Lambda_b(bar) vertex, containing the 2 Jpsi tracks. + // The covariance terms between the two vertices are not stored. In momentum space momenta[0] contains the 2 V0 tracks, + // their momenta add up to the momentum of the 3rd track in momenta[1], the first two being the Jpsi tracks + + const std::vector<xAOD::Vertex*> &cascadeVertices = x->vertices(); + if(cascadeVertices.size()!=topoN) + ATH_MSG_ERROR("Incorrect number of vertices"); + if(cascadeVertices[0] == nullptr || cascadeVertices[1] == nullptr) ATH_MSG_ERROR("Error null vertex"); + // Keep vertices (bear in mind that they come in reverse order!) + for(int i =0;i<topoN;i++) Vtxwritehandles[i]->push_back(cascadeVertices[i]); + + x->setSVOwnership(false); // Prevent Container from deleting vertices + const auto mainVertex = cascadeVertices[1]; // this is the Bd (Bd, Lambda_b, Lambda_bbar) vertex + //const auto v0Vertex = cascadeVertices[0]; // this is the V0 (Kshort, Lambda, Lambdabar) vertex + const std::vector< std::vector<TLorentzVector> > &moms = x->getParticleMoms(); + + // Set links to cascade vertices + std::vector<const xAOD::Vertex*> verticestoLink; + verticestoLink.push_back(cascadeVertices[0]); + if(Vtxwritehandles[1] == nullptr) ATH_MSG_ERROR("Vtxwritehandles[1] is null"); + if(!BPhysPVCascadeTools::LinkVertices(CascadeLinksDecor, verticestoLink, Vtxwritehandles[0], cascadeVertices[1])) + ATH_MSG_ERROR("Error decorating with cascade vertices"); + + // Identify the input Jpsi + const xAOD::Vertex* jpsiVertex = BPhysPVCascadeTools::FindVertex<2>(jpsiContainer, cascadeVertices[1]); + ATH_MSG_DEBUG("1 pt Jpsi tracks " << cascadeVertices[1]->trackParticle(0)->pt() << ", " << cascadeVertices[1]->trackParticle(1)->pt()); + if (jpsiVertex) ATH_MSG_DEBUG("2 pt Jpsi tracks " << jpsiVertex->trackParticle(0)->pt() << ", " << jpsiVertex->trackParticle(1)->pt()); + + // Identify the input V0 + const xAOD::Vertex* v0Vertex = BPhysPVCascadeTools::FindVertex<2>(v0Container, cascadeVertices[0]);; + ATH_MSG_DEBUG("1 pt V0 tracks " << cascadeVertices[0]->trackParticle(0)->pt() << ", " << cascadeVertices[0]->trackParticle(1)->pt()); + if (v0Vertex) ATH_MSG_DEBUG("2 pt V0 tracks " << v0Vertex->trackParticle(0)->pt() << ", " << v0Vertex->trackParticle(1)->pt()); + + // Set links to input vertices + std::vector<const xAOD::Vertex*> jpsiVerticestoLink; + if (jpsiVertex) jpsiVerticestoLink.push_back(jpsiVertex); + else ATH_MSG_WARNING("Could not find linking Jpsi"); + if(!BPhysPVCascadeTools::LinkVertices(JpsiLinksDecor, jpsiVerticestoLink, jpsiContainer, cascadeVertices[1])) + ATH_MSG_ERROR("Error decorating with Jpsi vertices"); + + std::vector<const xAOD::Vertex*> v0VerticestoLink; + if (v0Vertex) v0VerticestoLink.push_back(v0Vertex); + else ATH_MSG_WARNING("Could not find linking V0"); + if(!BPhysPVCascadeTools::LinkVertices(V0LinksDecor, v0VerticestoLink, v0Container, cascadeVertices[1])) + ATH_MSG_ERROR("Error decorating with V0 vertices"); + + double mass_v0 = m_mass_ks; + double mass_b = m_mass_b0; + std::vector<double> massesJpsi(2, m_mass_muon); + std::vector<double> massesV0; + std::vector<double> Masses(2, m_mass_muon); + if (m_v0_pid == 310) { + massesV0.push_back(m_mass_pion); + massesV0.push_back(m_mass_pion); + Masses.push_back(m_mass_ks); + } else if (m_v0_pid == 3122) { + massesV0.push_back(m_mass_proton); + massesV0.push_back(m_mass_pion); + Masses.push_back(m_mass_lambda); + mass_v0 = m_mass_lambda; + mass_b = m_mass_lambdaB; + } else if (m_v0_pid == -3122) { + massesV0.push_back(m_mass_pion); + massesV0.push_back(m_mass_proton); + Masses.push_back(m_mass_lambda); + mass_v0 = m_mass_lambda; + mass_b = m_mass_lambdaB; + } + + // loop over candidates -- Don't apply PV_minNTracks requirement here + // because it may result in exclusion of the high-pt PV. + // get good PVs + + xAOD::BPhysHypoHelper vtx(m_hypoName, mainVertex); + + BPhysPVCascadeTools::SetVectorInfo(vtx, x); + + + // Decorate main vertex + // + // 1.a) mass, mass error + BPHYS_CHECK( vtx.setMass(m_CascadeTools->invariantMass(moms[1])) ); + BPHYS_CHECK( vtx.setMassErr(m_CascadeTools->invariantMassError(moms[1],x->getCovariance()[1])) ); + // 1.b) pt and pT error (the default pt of mainVertex is != the pt of the full cascade fit!) + Pt_decor(*mainVertex) = m_CascadeTools->pT(moms[1]); + PtErr_decor(*mainVertex) = m_CascadeTools->pTError(moms[1],x->getCovariance()[1]); + // 1.c) chi2 and ndof (the default chi2 of mainVertex is != the chi2 of the full cascade fit!) + chi2_decor(*mainVertex) = x->fitChi2(); + ndof_decor(*mainVertex) = x->nDoF(); + + ATH_CHECK(helper.FillCandwithRefittedVertices(m_refitPV, pvContainer, + refPvContainer, &(*m_pvRefitter), m_PV_max, m_DoVertexType, x, 1, mass_b, vtx)); + + // 4) decorate the main vertex with V0 vertex mass, pt, lifetime and lxy values (plus errors) + // V0 points to the main vertex, so lifetime and lxy are w.r.t the main vertex + Mass_svdecor(*mainVertex) = m_CascadeTools->invariantMass(moms[0]); + MassErr_svdecor(*mainVertex) = m_CascadeTools->invariantMassError(moms[0],x->getCovariance()[0]); + Pt_svdecor(*mainVertex) = m_CascadeTools->pT(moms[0]); + PtErr_svdecor(*mainVertex) = m_CascadeTools->pTError(moms[0],x->getCovariance()[0]); + Lxy_svdecor(*mainVertex) = m_CascadeTools->lxy(moms[0],cascadeVertices[0],cascadeVertices[1]); + LxyErr_svdecor(*mainVertex) = m_CascadeTools->lxyError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]); + Tau_svdecor(*mainVertex) = m_CascadeTools->tau(moms[0],cascadeVertices[0],cascadeVertices[1]); + TauErr_svdecor(*mainVertex) = m_CascadeTools->tauError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]); + + // Some checks in DEBUG mode + ATH_MSG_DEBUG("chi2 " << x->fitChi2() + << " chi2_1 " << m_V0Tools->chisq(cascadeVertices[0]) + << " chi2_2 " << m_V0Tools->chisq(cascadeVertices[1]) + << " vprob " << m_CascadeTools->vertexProbability(x->nDoF(),x->fitChi2())); + ATH_MSG_DEBUG("ndf " << x->nDoF() << " ndf_1 " << m_V0Tools->ndof(cascadeVertices[0]) << " ndf_2 " << m_V0Tools->ndof(cascadeVertices[1])); + ATH_MSG_DEBUG("V0Tools mass_v0 " << m_V0Tools->invariantMass(cascadeVertices[0],massesV0) + << " error " << m_V0Tools->invariantMassError(cascadeVertices[0],massesV0) + << " mass_J " << m_V0Tools->invariantMass(cascadeVertices[1],massesJpsi) + << " error " << m_V0Tools->invariantMassError(cascadeVertices[1],massesJpsi)); + // masses and errors, using track masses assigned in the fit + double Mass_B = m_CascadeTools->invariantMass(moms[1]); + double Mass_V0 = m_CascadeTools->invariantMass(moms[0]); + double Mass_B_err = m_CascadeTools->invariantMassError(moms[1],x->getCovariance()[1]); + double Mass_V0_err = m_CascadeTools->invariantMassError(moms[0],x->getCovariance()[0]); + ATH_MSG_DEBUG("Mass_B " << Mass_B << " Mass_V0 " << Mass_V0); + ATH_MSG_DEBUG("Mass_B_err " << Mass_B_err << " Mass_V0_err " << Mass_V0_err); + double mprob_B = m_CascadeTools->massProbability(mass_b,Mass_B,Mass_B_err); + double mprob_V0 = m_CascadeTools->massProbability(mass_v0,Mass_V0,Mass_V0_err); + ATH_MSG_DEBUG("mprob_B " << mprob_B << " mprob_V0 " << mprob_V0); + // masses and errors, assigning user defined track masses + ATH_MSG_DEBUG("Mass_b " << m_CascadeTools->invariantMass(moms[1],Masses) + << " Mass_v0 " << m_CascadeTools->invariantMass(moms[0],massesV0)); + ATH_MSG_DEBUG("Mass_b_err " << m_CascadeTools->invariantMassError(moms[1],x->getCovariance()[1],Masses) + << " Mass_v0_err " << m_CascadeTools->invariantMassError(moms[0],x->getCovariance()[0],massesV0)); + ATH_MSG_DEBUG("pt_b " << m_CascadeTools->pT(moms[1]) + << " pt_v " << m_CascadeTools->pT(moms[0]) + << " pt_v0 " << m_V0Tools->pT(cascadeVertices[0])); + ATH_MSG_DEBUG("ptErr_b " << m_CascadeTools->pTError(moms[1],x->getCovariance()[1]) + << " ptErr_v " << m_CascadeTools->pTError(moms[0],x->getCovariance()[0]) + << " ptErr_v0 " << m_V0Tools->pTError(cascadeVertices[0])); + ATH_MSG_DEBUG("lxy_B " << m_V0Tools->lxy(cascadeVertices[1],primaryVertex) << " lxy_V " << m_V0Tools->lxy(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("lxy_b " << m_CascadeTools->lxy(moms[1],cascadeVertices[1],primaryVertex) << " lxy_v " << m_CascadeTools->lxy(moms[0],cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("lxyErr_b " << m_CascadeTools->lxyError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << " lxyErr_v " << m_CascadeTools->lxyError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << " lxyErr_v0 " << m_V0Tools->lxyError(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("tau_B " << m_CascadeTools->tau(moms[1],cascadeVertices[1],primaryVertex,mass_b) + << " tau_v0 " << m_V0Tools->tau(cascadeVertices[0],cascadeVertices[1],massesV0)); + ATH_MSG_DEBUG("tau_b " << m_CascadeTools->tau(moms[1],cascadeVertices[1],primaryVertex) + << " tau_v " << m_CascadeTools->tau(moms[0],cascadeVertices[0],cascadeVertices[1]) + << " tau_V " << m_CascadeTools->tau(moms[0],cascadeVertices[0],cascadeVertices[1],mass_v0)); + ATH_MSG_DEBUG("tauErr_b " << m_CascadeTools->tauError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << " tauErr_v " << m_CascadeTools->tauError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << " tauErr_v0 " << m_V0Tools->tauError(cascadeVertices[0],cascadeVertices[1],massesV0)); + ATH_MSG_DEBUG("TauErr_b " << m_CascadeTools->tauError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex,mass_b) + << " TauErr_v " << m_CascadeTools->tauError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1],mass_v0) + << " TauErr_v0 " << m_V0Tools->tauError(cascadeVertices[0],cascadeVertices[1],massesV0,mass_v0)); + + ATH_MSG_DEBUG("CascadeTools main vert wrt PV " << " CascadeTools SV " << " V0Tools SV"); + ATH_MSG_DEBUG("a0z " << m_CascadeTools->a0z(moms[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0z(moms[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0z(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0zErr " << m_CascadeTools->a0zError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0zError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0zError(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0xy " << m_CascadeTools->a0xy(moms[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0xy(moms[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0xy(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0xyErr " << m_CascadeTools->a0xyError(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0xyError(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0xyError(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0 " << m_CascadeTools->a0(moms[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0(moms[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("a0Err " << m_CascadeTools->a0Error(moms[1],x->getCovariance()[1],cascadeVertices[1],primaryVertex) + << ", " << m_CascadeTools->a0Error(moms[0],x->getCovariance()[0],cascadeVertices[0],cascadeVertices[1]) + << ", " << m_V0Tools->a0Error(cascadeVertices[0],cascadeVertices[1])); + ATH_MSG_DEBUG("x0 " << m_V0Tools->vtx(cascadeVertices[0]).x() << " y0 " << m_V0Tools->vtx(cascadeVertices[0]).y() << " z0 " << m_V0Tools->vtx(cascadeVertices[0]).z()); + ATH_MSG_DEBUG("x1 " << m_V0Tools->vtx(cascadeVertices[1]).x() << " y1 " << m_V0Tools->vtx(cascadeVertices[1]).y() << " z1 " << m_V0Tools->vtx(cascadeVertices[1]).z()); + ATH_MSG_DEBUG("X0 " << primaryVertex->x() << " Y0 " << primaryVertex->y() << " Z0 " << primaryVertex->z()); + ATH_MSG_DEBUG("rxy0 " << m_V0Tools->rxy(cascadeVertices[0]) << " rxyErr0 " << m_V0Tools->rxyError(cascadeVertices[0])); + ATH_MSG_DEBUG("rxy1 " << m_V0Tools->rxy(cascadeVertices[1]) << " rxyErr1 " << m_V0Tools->rxyError(cascadeVertices[1])); + ATH_MSG_DEBUG("Rxy0 wrt PV " << m_V0Tools->rxy(cascadeVertices[0],primaryVertex) << " RxyErr0 wrt PV " << m_V0Tools->rxyError(cascadeVertices[0],primaryVertex)); + ATH_MSG_DEBUG("Rxy1 wrt PV " << m_V0Tools->rxy(cascadeVertices[1],primaryVertex) << " RxyErr1 wrt PV " << m_V0Tools->rxyError(cascadeVertices[1],primaryVertex)); + ATH_MSG_DEBUG("number of covariance matrices " << (x->getCovariance()).size()); + //const Amg::MatrixX& cov30 = (cascadeVertices[0])->covariancePosition(); + //const Amg::MatrixX& cov31 = (cascadeVertices[1])->covariancePosition(); + //ATH_MSG_DEBUG("cov30 " << cov30); + //ATH_MSG_DEBUG("cov31 " << cov31); + + + } // loop over cascadeinfoContainer + + // Deleting cascadeinfo since this won't be stored. + // Vertices have been kept in m_cascadeOutputs and should be owned by their container + for (auto x : cascadeinfoContainer) delete x; + + return StatusCode::SUCCESS; + } + + + JpsiPlusV0Cascade::JpsiPlusV0Cascade(const std::string& t, const std::string& n, const IInterface* p) : AthAlgTool(t,n,p), + m_vertexContainerKey(""), + m_vertexV0ContainerKey(""), + m_cascadeOutputsKeys{ "JpsiPlusV0CascadeVtx1", "JpsiPlusV0CascadeVtx2" }, + m_VxPrimaryCandidateName("PrimaryVertices"), + m_jpsiMassLower(0.0), + m_jpsiMassUpper(10000.0), + m_V0MassLower(0.0), + m_V0MassUpper(10000.0), + m_MassLower(0.0), + m_MassUpper(20000.0), + m_mass_muon ( 0 ), + m_mass_pion ( 0 ), + m_mass_proton ( 0 ), + m_mass_lambda ( 0 ), + m_mass_ks ( 0 ), + m_mass_jpsi ( 0 ), + m_mass_b0 ( 0 ), + m_mass_lambdaB( 0 ), + m_v0_pid(310), + m_constrV0(true), + m_constrJpsi(true), + m_iVertexFitter("Trk::TrkVKalVrtFitter"), + m_pvRefitter("Analysis::PrimaryVertexRefitter"), + m_V0Tools("Trk::V0Tools"), + m_CascadeTools("DerivationFramework::CascadeTools") + { + declareProperty("JpsiVertices", m_vertexContainerKey); + declareProperty("V0Vertices", m_vertexV0ContainerKey); + declareProperty("VxPrimaryCandidateName", m_VxPrimaryCandidateName); + declareProperty("RefPVContainerName", m_refPVContainerName = "RefittedPrimaryVertices"); + declareProperty("JpsiMassLowerCut", m_jpsiMassLower); + declareProperty("JpsiMassUpperCut", m_jpsiMassUpper); + declareProperty("V0MassLowerCut", m_V0MassLower); + declareProperty("V0MassUpperCut", m_V0MassUpper); + declareProperty("MassLowerCut", m_MassLower); + declareProperty("MassUpperCut", m_MassUpper); + declareProperty("HypothesisName", m_hypoName = "Bd"); + declareProperty("V0Hypothesis", m_v0_pid); + declareProperty("ApplyV0MassConstraint", m_constrV0); + declareProperty("ApplyJpsiMassConstraint", m_constrJpsi); + declareProperty("RefitPV", m_refitPV = true); + declareProperty("MaxnPV", m_PV_max = 999); + declareProperty("MinNTracksInPV", m_PV_minNTracks = 0); + declareProperty("DoVertexType", m_DoVertexType = 7); + declareProperty("TrkVertexFitterTool", m_iVertexFitter); + declareProperty("PVRefitter", m_pvRefitter); + declareProperty("V0Tools", m_V0Tools); + declareProperty("CascadeTools", m_CascadeTools); + declareProperty("CascadeVertexCollections", m_cascadeOutputsKeys); + } + + JpsiPlusV0Cascade::~JpsiPlusV0Cascade(){ } + + StatusCode JpsiPlusV0Cascade::performSearch(std::vector<Trk::VxCascadeInfo*> *cascadeinfoContainer) const + { + ATH_MSG_DEBUG( "JpsiPlusV0Cascade::performSearch" ); + assert(cascadeinfoContainer!=nullptr); + + // Get TrackParticle container (for setting links to the original tracks) + const xAOD::TrackParticleContainer *trackContainer(nullptr); + CHECK(evtStore()->retrieve(trackContainer , "InDetTrackParticles" )); + + // Get Jpsi container + const xAOD::VertexContainer *jpsiContainer(nullptr); + CHECK(evtStore()->retrieve(jpsiContainer , m_vertexContainerKey )); + + // Get V0 container + const xAOD::VertexContainer *v0Container(nullptr); + CHECK(evtStore()->retrieve(v0Container , m_vertexV0ContainerKey )); + + double mass_v0 = m_mass_ks; + std::vector<const xAOD::TrackParticle*> tracksJpsi; + std::vector<const xAOD::TrackParticle*> tracksV0; + std::vector<double> massesJpsi(2, m_mass_muon); + std::vector<double> massesV0; + std::vector<double> Masses(2, m_mass_muon); + if (m_v0_pid == 310) { + massesV0.push_back(m_mass_pion); + massesV0.push_back(m_mass_pion); + Masses.push_back(m_mass_ks); + } else if (m_v0_pid == 3122) { + massesV0.push_back(m_mass_proton); + massesV0.push_back(m_mass_pion); + mass_v0 = m_mass_lambda; + Masses.push_back(m_mass_lambda); + } else if (m_v0_pid == -3122) { + massesV0.push_back(m_mass_pion); + massesV0.push_back(m_mass_proton); + mass_v0 = m_mass_lambda; + Masses.push_back(m_mass_lambda); + } + + for(auto jpsi : *jpsiContainer) { //Iterate over Jpsi vertices + + size_t jpsiTrkNum = jpsi->nTrackParticles(); + tracksJpsi.clear(); + for( unsigned int it=0; it<jpsiTrkNum; it++) tracksJpsi.push_back(jpsi->trackParticle(it)); + + if (tracksJpsi.size() != 2 || massesJpsi.size() != 2 ) { + ATH_MSG_INFO("problems with Jpsi input"); + } + double mass_Jpsi = m_V0Tools->invariantMass(jpsi,massesJpsi); + ATH_MSG_DEBUG("Jpsi mass " << mass_Jpsi); + if (mass_Jpsi < m_jpsiMassLower || mass_Jpsi > m_jpsiMassUpper) { + ATH_MSG_DEBUG(" Original Jpsi candidate rejected by the mass cut: mass = " + << mass_Jpsi << " != (" << m_jpsiMassLower << ", " << m_jpsiMassUpper << ")" ); + continue; + } + + for(auto v0 : *v0Container) { //Iterate over V0 vertices + + size_t v0TrkNum = v0->nTrackParticles(); + tracksV0.clear(); + for( unsigned int it=0; it<v0TrkNum; it++) tracksV0.push_back(v0->trackParticle(it)); + if (tracksV0.size() != 2 || massesV0.size() != 2 ) { + ATH_MSG_INFO("problems with V0 input"); + } + double mass_V0 = m_V0Tools->invariantMass(v0,massesV0); + ATH_MSG_DEBUG("V0 mass " << mass_V0); + if (mass_V0 < m_V0MassLower || mass_V0 > m_V0MassUpper) { + ATH_MSG_DEBUG(" Original V0 candidate rejected by the mass cut: mass = " + << mass_V0 << " != (" << m_V0MassLower << ", " << m_V0MassUpper << ")" ); + continue; + } + ATH_MSG_DEBUG("using tracks" << tracksJpsi[0] << ", " << tracksJpsi[1] << ", " << tracksV0[0] << ", " << tracksV0[1]); + if(!BPhysPVCascadeTools::uniqueCollection(tracksJpsi, tracksV0)) continue; + + + //if (std::find(trackContainer->begin(), trackContainer->end(), tracksJpsi[0]) == trackContainer->end()) { + // ATH_MSG_ERROR("Track is not in standard container"); + //} else { + // ATH_MSG_DEBUG("Track " << tracksJpsi[0] << " is at position " << std::distance(trackContainer->begin(), std::find(trackContainer->begin(), trackContainer->end(), tracksJpsi[0])) ); + //} + //ATH_MSG_DEBUG("using tracks " << tracksJpsi[0] << ", " << tracksJpsi[1] << ", " << tracksV0[0] << ", " << tracksV0[1]); + + // Apply the user's settings to the fitter + // Reset + std::unique_ptr<Trk::IVKalState> state = m_iVertexFitter->makeState(); + // Robustness + int robustness = 0; + m_iVertexFitter->setRobustness(robustness, *state); + // Build up the topology + // Vertex list + std::vector<Trk::VertexID> vrtList; + // V0 vertex + Trk::VertexID vID; + if (m_constrV0) { + vID = m_iVertexFitter->startVertex(tracksV0,massesV0,*state, mass_v0); + } else { + vID = m_iVertexFitter->startVertex(tracksV0,massesV0, *state); + } + vrtList.push_back(vID); + // B vertex including Jpsi + Trk::VertexID vID2 = m_iVertexFitter->nextVertex(tracksJpsi,massesJpsi,vrtList, *state); + if (m_constrJpsi) { + std::vector<Trk::VertexID> cnstV; + cnstV.clear(); + if ( !m_iVertexFitter->addMassConstraint(vID2,tracksJpsi,cnstV,*state, m_mass_jpsi).isSuccess() ) { + ATH_MSG_WARNING("addMassConstraint failed"); + //return StatusCode::FAILURE; + } + } + // Do the work + std::unique_ptr<Trk::VxCascadeInfo> result(m_iVertexFitter->fitCascade(*state)); + + if (result != NULL) { + // reset links to original tracks + BPhysPVCascadeTools::PrepareVertexLinks(result.get(), trackContainer); + + ATH_MSG_DEBUG("storing tracks " << ((result->vertices())[0])->trackParticle(0) << ", " + << ((result->vertices())[0])->trackParticle(1) << ", " + << ((result->vertices())[1])->trackParticle(0) << ", " + << ((result->vertices())[1])->trackParticle(1)); + + // necessary to prevent memory leak + result->setSVOwnership(true); + const std::vector< std::vector<TLorentzVector> > &moms = result->getParticleMoms(); + if(moms.size() < 2){ + ATH_MSG_FATAL("Incorrect size " << __FILE__ << __LINE__ ); + return StatusCode::FAILURE; + } + double mass = m_CascadeTools->invariantMass(moms[1]); + if (mass >= m_MassLower && mass <= m_MassUpper) { + + cascadeinfoContainer->push_back(result.release()); + } else { + ATH_MSG_DEBUG("Candidate rejected by the mass cut: mass = " + << mass << " != (" << m_MassLower << ", " << m_MassUpper << ")" ); + } + } + + } //Iterate over V0 vertices + + } //Iterate over Jpsi vertices + + ATH_MSG_DEBUG("cascadeinfoContainer size " << cascadeinfoContainer->size()); + + return StatusCode::SUCCESS; + } + +} + + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/MuonExtrapolationTool.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/MuonExtrapolationTool.cxx new file mode 100644 index 0000000000000000000000000000000000000000..4a55beb9601ee01983669f5be329bc0c78b89d9f --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/MuonExtrapolationTool.cxx @@ -0,0 +1,182 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ +// MuonExtrapolationTool.cxx +#include "DerivationFrameworkBPhys/MuonExtrapolationTool.h" +#include "xAODTruth/TruthParticleContainer.h" +#include "xAODEventInfo/EventInfo.h" +#include "TrkSurfaces/DiscSurface.h" +#include "TrkSurfaces/CylinderSurface.h" +#include "TrkExInterfaces/IExtrapolator.h" +#include "TVector2.h" +#include "xAODMuon/MuonContainer.h" +//********************************************************************** + +namespace DerivationFramework { + +MuonExtrapolationTool::MuonExtrapolationTool(const std::string &t, const std::string& n, const IInterface* p) + : + AthAlgTool(t, n, p), + m_extrapolator("Trk::Extrapolator/AtlasExtrapolator") +{ + declareInterface<DerivationFramework::IAugmentationTool>(this); + declareProperty("EndcapPivotPlaneZ", m_endcapPivotPlaneZ = 15525.);// z position of pivot plane in endcap region + declareProperty("EndcapPivotPlaneMinimumRadius", m_endcapPivotPlaneMinimumRadius = 0.);// minimum radius of pivot plane in endcap region + declareProperty("EndcapPivotPlaneMaximumRadius", m_endcapPivotPlaneMaximumRadius = 11977.); // maximum radius of pivot plane in endcap region + declareProperty("BarrelPivotPlaneRadius", m_barrelPivotPlaneRadius = 8000.);// radius of pivot plane in barrel region + declareProperty("BarrelPivotPlaneHalfLength", m_barrelPivotPlaneHalfLength = 9700.);// half length of pivot plane in barrel region + declareProperty("Extrapolator", m_extrapolator); + declareProperty("MuonCollection", m_muonContainerName = "Muons"); +} + +//********************************************************************** + + +StatusCode MuonExtrapolationTool::initialize() +{ + ATH_CHECK(m_extrapolator.retrieve()); + return StatusCode::SUCCESS; +} + + +//********************************************************************** + +bool MuonExtrapolationTool::extrapolateAndDecorateTrackParticle(const xAOD::TrackParticle* particle, float & eta, float & phi) const +{ + + // decorators used to access or store the information + static SG::AuxElement::Decorator< char > Decorated ("DecoratedPivotEtaPhi"); + static SG::AuxElement::Decorator< float > Eta ("EtaTriggerPivot"); + static SG::AuxElement::Decorator< float > Phi ("PhiTriggerPivot"); + + if (! Decorated.isAvailable(*particle) || !Decorated(*particle)){ + // in the athena release, we can run the extrapolation if needed + const Trk::TrackParameters* pTag = extrapolateToTriggerPivotPlane(*particle); + if(!pTag) { + Decorated(*particle) = false; + return false; + } + Eta(*particle) = pTag->position().eta(); + Phi(*particle) = pTag->position().phi(); + Decorated(*particle) = true; + delete pTag; + } + // if we get here, the decoration was either already present or just added by us + // so we can finally read the values + eta = Eta(*particle); + phi = Phi(*particle); + return true; +} + +//********************************************************************** + +const xAOD::TrackParticle* MuonExtrapolationTool::getPreferredTrackParticle (const xAOD::IParticle* muon) const +{ + if (dynamic_cast<const xAOD::TruthParticle*>(muon)){ + ATH_MSG_WARNING("Pivot plane extrapolation not supported for Truth muons!"); + return 0; + } + const xAOD::TrackParticle* muonTrack = dynamic_cast<const xAOD::TrackParticle*>(muon); + if(!muonTrack && dynamic_cast<const xAOD::Muon*>(muon)) { + const xAOD::Muon* theMuon = dynamic_cast<const xAOD::Muon*>(muon); + muonTrack = theMuon->trackParticle( xAOD::Muon::MuonSpectrometerTrackParticle ); + if(!muonTrack) { + muonTrack = theMuon->primaryTrackParticle(); + if(!muonTrack) { + muonTrack = theMuon->trackParticle( xAOD::Muon::InnerDetectorTrackParticle ); + } + } + } + if(!muonTrack){ + ATH_MSG_WARNING("no valid track found for extrapolating the muon to the pivot plane!"); + } + return muonTrack; + +} + +StatusCode MuonExtrapolationTool::addBranches() const +{ + const xAOD::MuonContainer* muons = NULL; + CHECK(evtStore()->retrieve(muons, m_muonContainerName)); + for(auto muon : *muons){ + const xAOD::TrackParticle* track = getPreferredTrackParticle(muon); + float eta, phi = 0; + if( !extrapolateAndDecorateTrackParticle( track, eta, phi )){ + if( muon->pt() > 3500.){ + //only complain if the muon has sufficient pT to actually reach the pivot plane + //extrapolation will often fail for muons with pT < 3500 MeV + ATH_MSG_WARNING("Failed to extrapolate+decorate muon with pivot plane coords - Muon params: pt "<<muon->pt()<<", eta "<< muon->eta()<<", phi "<< muon->phi()); + } + } + } + return StatusCode::SUCCESS; +} + +const Trk::TrackParameters* MuonExtrapolationTool::extrapolateToTriggerPivotPlane(const xAOD::TrackParticle& track) const +{ + // BARREL + const Trk::Perigee& perigee = track.perigeeParameters(); + + // create the barrel as a cylinder surface centered at 0,0,0 + Amg::Vector3D barrelCentre(0., 0., 0.); + Amg::Transform3D* matrix = new Amg::Transform3D(Amg::RotationMatrix3D::Identity(), barrelCentre); + + Trk::CylinderSurface* cylinder = + new Trk::CylinderSurface(matrix, + m_barrelPivotPlaneRadius, + m_barrelPivotPlaneHalfLength); + if (!cylinder) { + ATH_MSG_WARNING("extrapolateToTriggerPivotPlane :: new Trk::CylinderSurface failed."); + delete matrix; + matrix = 0; + return 0; + } + // and then attempt to extrapolate our track to this surface, checking for the boundaries of the barrel + bool boundaryCheck = true; + const Trk::Surface* surface = cylinder; + const Trk::TrackParameters* p = m_extrapolator->extrapolate(perigee, + *surface, + Trk::alongMomentum, + boundaryCheck, + Trk::muon); + delete cylinder; + // if the extrapolation worked out (so we are in the barrel) we are done and can return the + // track parameters at this surface. + if (p) return p; + + // if we get here, the muon did not cross the barrel surface + // so we assume it is going into the endcap. + // ENDCAP + + // After 2 years of using this code, we realised that ATLAS actually has endcaps on both sides ;-) + // So better make sure we place our endcap at the correct side of the detector! + // Hopefully no-one will ever read this comment... + float SignOfEta = track.eta() > 0 ? 1. : -1.; + + Amg::Vector3D endcapCentre(0., 0., m_endcapPivotPlaneZ); + // much better! + matrix = new Amg::Transform3D(Amg::RotationMatrix3D::Identity(), SignOfEta * endcapCentre); + + Trk::DiscSurface* disc = + new Trk::DiscSurface(matrix, + m_endcapPivotPlaneMinimumRadius, + m_endcapPivotPlaneMaximumRadius); + if (!disc) { + ATH_MSG_WARNING("extrapolateToTriggerPivotPlane :: new Trk::DiscSurface failed."); + delete matrix; + matrix = 0; + return 0; + } + + // for the endcap, we turn off the boundary check, extending the EC infinitely to catch stuff heading for the transition region + boundaryCheck = false; + surface = disc; + p = m_extrapolator->extrapolate(perigee, + *surface, + Trk::alongMomentum, + boundaryCheck, + Trk::muon); + delete disc; + return p; +} +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/ReVertex.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/ReVertex.cxx new file mode 100644 index 0000000000000000000000000000000000000000..9ca2524041251cd3abda4168c33f9f465c8dff9e --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/ReVertex.cxx @@ -0,0 +1,284 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ +// **************************************************************************** +// ---------------------------------------------------------------------------- +// ReVertex +// +// Konstantin Beloborodov <Konstantin.Beloborodov@cern.ch> +// +// ---------------------------------------------------------------------------- +// **************************************************************************** +#include "DerivationFrameworkBPhys/ReVertex.h" +#include "xAODTracking/VertexContainer.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "JpsiUpsilonTools/PrimaryVertexRefitter.h" +#include "JpsiUpsilonTools/JpsiUpsilonCommon.h" + +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "BeamSpotConditionsData/BeamSpotData.h" +#include "DerivationFrameworkBPhys/BPhysPVTools.h" +#include "TrkVertexFitterInterfaces/IVertexFitter.h" +#include "TrkVKalVrtFitter/TrkVKalVrtFitter.h" +#include "InDetConversionFinderTools/VertexPointEstimator.h" +#include "xAODBPhys/BPhysHypoHelper.h" + +using namespace DerivationFramework; + +ReVertex::ReVertex(const std::string& t, + const std::string& n, + const IInterface* p) : + AthAlgTool(t,n,p), m_vertexEstimator("InDet::VertexPointEstimator"), m_iVertexFitter("Trk::TrkVKalVrtFitter"), + m_massConst(0.), + m_totalMassConst(0.), + m_v0Tools("Trk::V0Tools"), + m_pvRefitter("Analysis::PrimaryVertexRefitter"), + m_doMassConst(false), + m_vertexFittingWithPV(false), + m_chi2cut(-1.0), + m_trkDeltaZ(-1.0), + m_useAdditionalTrack(false) +{ + + declareInterface<DerivationFramework::IAugmentationTool>(this); + declareProperty("TrackIndices", m_TrackIndices); + declareProperty("TrkVertexFitterTool", m_iVertexFitter); + declareProperty("VertexPointEstimator",m_vertexEstimator); + + declareProperty("OutputVtxContainerName", m_OutputContainerName); + declareProperty("InputVtxContainerName", m_inputContainerName); + declareProperty("TrackContainerName", m_trackContainer = "InDetTrackParticles"); + declareProperty("UseVertexFittingWithPV", m_vertexFittingWithPV); + + declareProperty("HypothesisNames",m_hypoNames); + + declareProperty("V0Tools" , m_v0Tools); + declareProperty("PVRefitter" , m_pvRefitter); + declareProperty("PVContainerName" , m_pvContainerName = "PrimaryVertices"); + declareProperty("RefPVContainerName" , m_refPVContainerName = "RefittedPrimaryVertices"); + + declareProperty("UseMassConstraint", m_doMassConst); + declareProperty("VertexMass", m_totalMassConst); + declareProperty("SubVertexMass", m_massConst); + declareProperty("MassInputParticles", m_trkMasses); + declareProperty("SubVertexTrackIndices", m_indices); + + declareProperty("UseAdditionalTrack", m_useAdditionalTrack); + + declareProperty("RefitPV" , m_refitPV = false); + //This parameter will allow us to optimize the number of PVs under consideration as the probability + //of a useful primary vertex drops significantly the higher you go + declareProperty("MaxPVrefit" , m_PV_max = 1000); + declareProperty("DoVertexType" , m_DoVertexType = 7); + // minimum number of tracks for PV to be considered for PV association + declareProperty("MinNTracksInPV" , m_PV_minNTracks = 0); + declareProperty("Do3d" , m_do3d = false); + declareProperty("AddPVData" , m_AddPVData = true); + declareProperty("StartingPoint0" , m_startingpoint0 = false); + declareProperty("BMassUpper",m_BMassUpper = std::numeric_limits<double>::max() ); + declareProperty("BMassLower",m_BMassLower = std::numeric_limits<double>::min() ); + declareProperty("Chi2Cut",m_chi2cut = std::numeric_limits<double>::max() ); + declareProperty("TrkDeltaZ",m_trkDeltaZ); + + +} + +StatusCode ReVertex::initialize() { + ATH_MSG_DEBUG("in initialize()"); + if(m_TrackIndices.empty()) { + ATH_MSG_FATAL("No Indices provided"); + return StatusCode::FAILURE; + } + m_VKVFitter = dynamic_cast<Trk::TrkVKalVrtFitter*>(&(*m_iVertexFitter)); + if(m_VKVFitter==nullptr) return StatusCode::FAILURE; + ATH_CHECK(m_OutputContainerName.initialize()); + ATH_CHECK(m_inputContainerName.initialize()); + ATH_CHECK(m_trackContainer.initialize()); + ATH_CHECK(m_pvContainerName.initialize()); + ATH_CHECK(m_refPVContainerName.initialize()); + + return StatusCode::SUCCESS; +} + + +StatusCode ReVertex::addBranches() const { + + SG::WriteHandle<xAOD::VertexContainer> vtxContainer(m_OutputContainerName); + ATH_CHECK(vtxContainer.record(std::make_unique<xAOD::VertexContainer>(), std::make_unique<xAOD::VertexAuxContainer>())); + + const size_t Ntracks = m_TrackIndices.size(); + + SG::ReadHandle<xAOD::VertexContainer> InVtxContainer(m_inputContainerName); + SG::ReadHandle<xAOD::TrackParticleContainer> importedTrackCollection(m_trackContainer); + ATH_CHECK(InVtxContainer.isValid()); + ATH_CHECK(importedTrackCollection.isValid()); + //---------------------------------------------------- + // retrieve primary vertices + //---------------------------------------------------- + SG::ReadHandle<xAOD::VertexContainer> pvContainer(m_pvContainerName); + ATH_CHECK(pvContainer.isValid()); + + std::vector<const xAOD::TrackParticle*> fitpair(Ntracks + m_useAdditionalTrack); + for(const xAOD::Vertex* v : *InVtxContainer) + { + + bool passed = false; + for(size_t i=0;i<m_hypoNames.size();i++) { + xAOD::BPhysHypoHelper onia(m_hypoNames.at(i), v); + passed |= onia.pass(); + } + if (!passed && m_hypoNames.size()) continue; + + for(size_t i =0; i<Ntracks; i++) + { + size_t trackN = m_TrackIndices[i]; + if(trackN >= v->nTrackParticles()) + { + ATH_MSG_FATAL("Indices exceeds limit in particle"); + return StatusCode::FAILURE; + } + fitpair[i] = v->trackParticle(trackN); + } + + if (m_useAdditionalTrack) + { + // Loop over ID tracks, call vertexing + for (auto trkItr=importedTrackCollection->cbegin(); trkItr!=importedTrackCollection->cend(); ++trkItr) { + const xAOD::TrackParticle* tp (*trkItr); + fitpair.back() = nullptr; + if (Analysis::JpsiUpsilonCommon::isContainedIn(tp,fitpair)) continue; // remove tracks which were used to build J/psi+2Tracks + fitpair.back() = tp; + + // Daniel Scheirich: remove track too far from the Jpsi+2Tracks vertex (DeltaZ cut) + if(m_trkDeltaZ>0 && + std::abs((tp)->z0() + (tp)->vz() - v->z()) > m_trkDeltaZ ) + continue; + + fitAndStore(vtxContainer.ptr(),v,InVtxContainer.cptr(),fitpair,importedTrackCollection.cptr(),pvContainer.cptr()); + } + } + else + { + fitAndStore(vtxContainer.ptr(),v,InVtxContainer.cptr(),fitpair,importedTrackCollection.cptr(),pvContainer.cptr()); + } + } + + if(m_AddPVData){ + // Give the helper class the ptr to v0tools and beamSpotsSvc to use + SG::ReadCondHandle<InDet::BeamSpotData> beamSpotHandle { m_beamSpotKey }; + if(not beamSpotHandle.isValid()) ATH_MSG_ERROR("Cannot Retrieve " << m_beamSpotKey.key() ); + BPhysPVTools helper(&(*m_v0Tools), beamSpotHandle.cptr()); + helper.SetMinNTracksInPV(m_PV_minNTracks); + helper.SetSave3d(m_do3d); + + if(m_refitPV) { + //---------------------------------------------------- + // Try to retrieve refitted primary vertices + //---------------------------------------------------- + SG::WriteHandle<xAOD::VertexContainer> refPvContainer(m_refPVContainerName); + ATH_CHECK(refPvContainer.record(std::make_unique<xAOD::VertexContainer>(), std::make_unique<xAOD::VertexAuxContainer>())); + + if(vtxContainer->size() >0){ + ATH_CHECK(helper.FillCandwithRefittedVertices(vtxContainer.ptr(), pvContainer.cptr(), refPvContainer.ptr(), &(*m_pvRefitter) , m_PV_max, m_DoVertexType)); + } + }else{ + if(vtxContainer->size() >0) ATH_CHECK(helper.FillCandExistingVertices(vtxContainer.ptr(), pvContainer.cptr(), m_DoVertexType)); + } + } + + return StatusCode::SUCCESS; +} + +void ReVertex::fitAndStore(xAOD::VertexContainer* vtxContainer, + const xAOD::Vertex* v, + const xAOD::VertexContainer *InVtxContainer, + const std::vector<const xAOD::TrackParticle*> &inputTracks, + const xAOD::TrackParticleContainer* importedTrackCollection, + const xAOD::VertexContainer* pvContainer) const +{ + std::unique_ptr<xAOD::Vertex> ptr(fit(inputTracks, nullptr)); + if(!ptr)return; + + double chi2DOF = ptr->chiSquared()/ptr->numberDoF(); + ATH_MSG_DEBUG("Candidate chi2/DOF is " << chi2DOF); + bool chi2CutPassed = (m_chi2cut <= 0.0 || chi2DOF < m_chi2cut); + if(!chi2CutPassed) { ATH_MSG_DEBUG("Chi Cut failed!"); return; } + xAOD::BPhysHelper bHelper(ptr.get());//"get" does not "release" still automatically deleted + bHelper.setRefTrks(); + if (m_trkMasses.size()==inputTracks.size()) { + TLorentzVector bMomentum = bHelper.totalP(m_trkMasses); + double bMass = bMomentum.M(); + bool passesCuts = (m_BMassUpper > bMass && bMass > m_BMassLower); + if(!passesCuts)return; + } + + DerivationFramework::BPhysPVTools::PrepareVertexLinks( ptr.get(), importedTrackCollection ); + std::vector<const xAOD::Vertex*> thePreceding; + thePreceding.push_back(v); + if(m_vertexFittingWithPV){ + const xAOD::Vertex* closestPV = Analysis::JpsiUpsilonCommon::ClosestPV(bHelper, pvContainer); + if (!closestPV) return; + std::unique_ptr<xAOD::Vertex> ptrPV(fit(inputTracks, closestPV)); + if(!ptrPV) return; + + double chi2DOFPV = ptrPV->chiSquared()/ptrPV->numberDoF(); + ATH_MSG_DEBUG("CandidatePV chi2/DOF is " << chi2DOFPV); + bool chi2CutPassed = (m_chi2cut <= 0.0 || chi2DOFPV < m_chi2cut); + if(!chi2CutPassed) { ATH_MSG_DEBUG("Chi Cut failed!"); return; } + xAOD::BPhysHelper bHelperPV(ptrPV.get());//"get" does not "release" still automatically deleted + bHelperPV.setRefTrks(); + if (m_trkMasses.size()==inputTracks.size()) { + TLorentzVector bMomentumPV = bHelperPV.totalP(m_trkMasses); + double bMass = bMomentumPV.M(); + bool passesCuts = (m_BMassUpper > bMass && bMass > m_BMassLower); + if(!passesCuts)return; + } + + bHelperPV.setPrecedingVertices(thePreceding, InVtxContainer); + vtxContainer->push_back(ptrPV.release()); + return; //Don't store other vertex + } + bHelper.setPrecedingVertices(thePreceding, InVtxContainer); + vtxContainer->push_back(ptr.release()); +} + + // ********************************************************************************* + + // --------------------------------------------------------------------------------- + // fit - does the fit + // --------------------------------------------------------------------------------- + +xAOD::Vertex* ReVertex::fit(const std::vector<const xAOD::TrackParticle*> &inputTracks, + const xAOD::Vertex* pv) const +{ + std::unique_ptr<Trk::IVKalState> state = m_VKVFitter->makeState(); + if (m_doMassConst && (m_trkMasses.size()==inputTracks.size())) { + m_VKVFitter->setMassInputParticles(m_trkMasses, *state); + if (m_totalMassConst) m_VKVFitter->setMassForConstraint(m_totalMassConst, *state); + if (m_massConst) m_VKVFitter->setMassForConstraint(m_massConst, m_indices, *state); + } + if (pv) { + m_VKVFitter->setCnstType(8, *state); + m_VKVFitter->setVertexForConstraint(pv->position().x(), + pv->position().y(), + pv->position().z(), *state); + m_VKVFitter->setCovVrtForConstraint(pv->covariancePosition()(Trk::x,Trk::x), + pv->covariancePosition()(Trk::y,Trk::x), + pv->covariancePosition()(Trk::y,Trk::y), + pv->covariancePosition()(Trk::z,Trk::x), + pv->covariancePosition()(Trk::z,Trk::y), + pv->covariancePosition()(Trk::z,Trk::z), *state ); + } + + // Do the fit itself....... + // Starting point (use the J/psi position) + const Trk::Perigee& aPerigee1 = inputTracks[0]->perigeeParameters(); + const Trk::Perigee& aPerigee2 = inputTracks[1]->perigeeParameters(); + int sflag = 0; + int errorcode = 0; + Amg::Vector3D startingPoint = m_vertexEstimator->getCirclesIntersectionPoint(&aPerigee1,&aPerigee2,sflag,errorcode); + if (errorcode != 0) {startingPoint(0) = 0.0; startingPoint(1) = 0.0; startingPoint(2) = 0.0;} + xAOD::Vertex* theResult = m_VKVFitter->fit(inputTracks, startingPoint, *state); + + return theResult; +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Reco_4mu.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Reco_4mu.cxx new file mode 100644 index 0000000000000000000000000000000000000000..4d4a7c80258d5c7a560170cab0635dd5246ef71a --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Reco_4mu.cxx @@ -0,0 +1,269 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +///////////////////////////////////////////////////////////////// +// Reco_4mu.cxx +/////////////////////////////////////////////////////////////////// +// Author: James Catmore <james.catmore@cern.ch> + +#include "DerivationFrameworkBPhys/Reco_4mu.h" +#include "xAODTracking/VertexContainer.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "DerivationFrameworkBPhys/BPhysPVTools.h" +#include "xAODBPhys/BPhysHypoHelper.h" + +namespace DerivationFramework { + + Reco_4mu::Reco_4mu(const std::string& t, + const std::string& n, + const IInterface* p) : + AthAlgTool(t,n,p), + m_v0Tools("Trk::V0Tools"), + m_fourMuonTool("DerivationFramework::FourMuonTool"), + m_pvRefitter("Analysis::PrimaryVertexRefitter") + { + declareInterface<DerivationFramework::ISkimmingTool>(this); + + // Declare tools + declareProperty("V0Tools" , m_v0Tools); + declareProperty("FourMuonTool", m_fourMuonTool); + declareProperty("PVRefitter", m_pvRefitter); + + // Declare user-defined properties + declareProperty("PairContainerName" , m_pairName = "Pairs"); + declareProperty("QuadrupletContainerName", m_quadName = "Quadruplets"); + declareProperty("PVContainerName" , m_pvContainerName = "PrimaryVertices"); + declareProperty("RefPVContainerName" , m_refPVContainerName = "RefittedPrimaryVertices"); + declareProperty("RefitPV" , m_refitPV = false); + declareProperty("MaxPVrefit" , m_PV_max = 1); + declareProperty("DoVertexType" , m_DoVertexType = 1); + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode Reco_4mu::initialize() + { + + ATH_MSG_DEBUG("in initialize()"); + + // retrieve V0 tools + CHECK( m_v0Tools.retrieve() ); + + // get the JpsiFinder tool + CHECK( m_fourMuonTool.retrieve() ); + + // get the PrimaryVertexRefitter tool + CHECK( m_pvRefitter.retrieve() ); + + return StatusCode::SUCCESS; + + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode Reco_4mu::finalize() + { + // everything all right + return StatusCode::SUCCESS; + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + bool Reco_4mu::eventPassesFilter() const + { + // Output containers and its auxilliary store + xAOD::VertexContainer* pairContainer = NULL; + xAOD::VertexAuxContainer* pairAuxContainer = NULL; + xAOD::VertexContainer* quadContainer = NULL; + xAOD::VertexAuxContainer* quadAuxContainer = NULL; + bool acceptEvent(false); + //---------------------------------------------------- + // call finder + //---------------------------------------------------- + if( !m_fourMuonTool->performSearch(pairContainer, pairAuxContainer, quadContainer, quadAuxContainer, acceptEvent).isSuccess() ) { + ATH_MSG_FATAL("4mu tool (" << m_fourMuonTool << ") failed."); + return(false); + } + + //---------------------------------------------------- + // event selection + //---------------------------------------------------- + /*if (quadContainer->size()==0) { + if (pairContainer!=NULL) delete pairContainer; + if (pairAuxContainer!=NULL) delete pairAuxContainer; + if (quadContainer!=NULL) delete quadContainer; + if (quadAuxContainer!=NULL) delete quadAuxContainer; + return(acceptEvent); + // acceptEvent based on muon selection only, not quads + }*/ + + //---------------------------------------------------- + // retrieve primary vertices + //---------------------------------------------------- + const xAOD::VertexContainer* pvContainer = NULL; + auto sc = evtStore()->retrieve(pvContainer, m_pvContainerName); + if(sc.isFailure()){ + ATH_MSG_FATAL("Cannot find PV Container"); + return false; + } + //---------------------------------------------------- + // Refit primary vertices + //---------------------------------------------------- + xAOD::VertexContainer* refPvContainer = NULL; + xAOD::VertexAuxContainer* refPvAuxContainer = NULL; + + if(m_refitPV) { + refPvContainer = new xAOD::VertexContainer; + refPvAuxContainer = new xAOD::VertexAuxContainer; + refPvContainer->setStore(refPvAuxContainer); + } + + BPhysPVTools helper(&(*m_v0Tools));//Give the helper class the ptr to v0tools to use + + if(m_refitPV){ + if(quadContainer->size() >0){ + StatusCode SC = helper.FillCandwithRefittedVertices(quadContainer, pvContainer, refPvContainer, &(*m_pvRefitter) , m_PV_max, m_DoVertexType); + if(SC.isFailure()){ + ATH_MSG_FATAL("refitting failed - check the vertices you passed"); + return false; + } + } + if(pairContainer->size()>0) { + StatusCode SC = helper.FillCandwithRefittedVertices(pairContainer, pvContainer, refPvContainer, &(*m_pvRefitter) , m_PV_max, m_DoVertexType); + if(SC.isFailure()){ + ATH_MSG_FATAL("refitting failed - check the vertices you passed"); + return false; + } + } + }else{ + if(quadContainer->size() >0){ + auto sc = helper.FillCandExistingVertices(quadContainer, pvContainer, m_DoVertexType); + sc.ignore(); + } + if(pairContainer->size() >0) + { + auto sc = helper.FillCandExistingVertices(pairContainer, pvContainer, m_DoVertexType); + sc.ignore(); + } + } + + //---------------------------------------------------- + // Mass-hypothesis dependent quantities + //---------------------------------------------------- + + std::vector<double> muonPairMasses = std::vector<double>(2, 105.658); + std::vector<double> muonQuadMasses = std::vector<double>(4, 105.658); + + bool doPt = (m_DoVertexType & 1) != 0; + bool doA0 = (m_DoVertexType & 2) != 0; + bool doZ0 = (m_DoVertexType & 4) != 0; + bool doZ0BA = (m_DoVertexType & 8) != 0; + + // loop over pairs + xAOD::VertexContainer::iterator pairItr = pairContainer->begin(); + ATH_MSG_DEBUG("Indices/masses of pairs follows...."); + for(; pairItr!=pairContainer->end(); ++pairItr) { + // create BPhysHypoHelper + xAOD::BPhysHypoHelper pairHelper("PAIR", *pairItr); + + //---------------------------------------------------- + // decorate the vertex + //---------------------------------------------------- + // a) invariant mass and error + if( !pairHelper.setMass(muonPairMasses) ) ATH_MSG_WARNING("Decoration pair.setMass failed"); + + double massErr = m_v0Tools->invariantMassError(pairHelper.vtx(), muonPairMasses); + if( !pairHelper.setMassErr(massErr) ) ATH_MSG_WARNING("Decoration pair.setMassErr failed"); + + // b) proper decay time and error: + // retrieve the refitted PV (or the original one, if the PV refitting was turned off) + if(doPt) ProcessVertex(pairHelper, xAOD::BPhysHelper::PV_MAX_SUM_PT2, muonPairMasses); + if(doA0) ProcessVertex(pairHelper, xAOD::BPhysHelper::PV_MIN_A0, muonPairMasses); + if(doZ0) ProcessVertex(pairHelper, xAOD::BPhysHelper::PV_MIN_Z0, muonPairMasses); + if(doZ0BA) ProcessVertex(pairHelper, xAOD::BPhysHelper::PV_MIN_Z0_BA, muonPairMasses); + ATH_MSG_DEBUG((*pairItr)->auxdata<std::string>("CombinationCode") << " : " << pairHelper.mass() << " +/- " << pairHelper.massErr()); + } + + // loop over quadruplets + xAOD::VertexContainer::iterator quadItr = quadContainer->begin(); + ATH_MSG_DEBUG("Indices/masses of quadruplets follows...."); + for(; quadItr!=quadContainer->end(); ++quadItr) { + // create BPhysHypoHelper + xAOD::BPhysHypoHelper quadHelper("QUAD", *quadItr); + + //---------------------------------------------------- + // decorate the vertex + //---------------------------------------------------- + // a) invariant mass and error + if( !quadHelper.setMass(muonQuadMasses) ) ATH_MSG_WARNING("Decoration quad.setMass failed"); + + double massErr = m_v0Tools->invariantMassError(quadHelper.vtx(), muonQuadMasses); + if( !quadHelper.setMassErr(massErr) ) ATH_MSG_WARNING("Decoration quad.setMassErr failed"); + + // b) proper decay time and error: + // retrieve the refitted PV (or the original one, if the PV refitting was turned off) + if(doPt) ProcessVertex(quadHelper, xAOD::BPhysHelper::PV_MAX_SUM_PT2, muonQuadMasses); + if(doA0) ProcessVertex(quadHelper, xAOD::BPhysHelper::PV_MIN_A0, muonQuadMasses); + if(doZ0) ProcessVertex(quadHelper, xAOD::BPhysHelper::PV_MIN_Z0, muonQuadMasses); + if(doZ0BA) ProcessVertex(quadHelper, xAOD::BPhysHelper::PV_MIN_Z0_BA, muonQuadMasses); + ATH_MSG_DEBUG((*quadItr)->auxdata<std::string>("CombinationCode") << " : " << quadHelper.mass() << " +/- " << quadHelper.massErr()); + } + + //---------------------------------------------------- + // save in the StoreGate + //---------------------------------------------------- + // Pairs + if (!evtStore()->contains<xAOD::VertexContainer>(m_pairName)) + evtStore()->record(pairContainer, m_pairName).ignore(); + if (!evtStore()->contains<xAOD::VertexAuxContainer>(m_pairName+"Aux.")) + evtStore()->record(pairAuxContainer, m_pairName+"Aux.").ignore(); + + // Quads + if (!evtStore()->contains<xAOD::VertexContainer>(m_quadName)) + evtStore()->record(quadContainer, m_quadName).ignore(); + if (!evtStore()->contains<xAOD::VertexAuxContainer>(m_quadName+"Aux.")) + evtStore()->record(quadAuxContainer, m_quadName+"Aux.").ignore(); + + // Refitted PVs + if(m_refitPV) { + evtStore()->record(refPvContainer , m_refPVContainerName).ignore(); + evtStore()->record(refPvAuxContainer, m_refPVContainerName+"Aux.").ignore(); + } + + return(acceptEvent); + } + + + void Reco_4mu::ProcessVertex(xAOD::BPhysHypoHelper &hypoHelper, xAOD::BPhysHelper::pv_type pv_t, std::vector<double> trackMasses) const{ + + const xAOD::Vertex* pv = hypoHelper.pv(pv_t); + if(pv) { + // decorate the vertex. + + BPHYS_CHECK( hypoHelper.setTau( m_v0Tools->tau(hypoHelper.vtx(), pv, trackMasses), + pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + + BPHYS_CHECK( hypoHelper.setTauErr( m_v0Tools->tauError(hypoHelper.vtx(), pv, trackMasses), + pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + + //enum pv_type {PV_MAX_SUM_PT2, PV_MIN_A0, PV_MIN_Z0, PV_MIN_Z0BA}; + }else{ + + const float errConst = -9999999; + BPHYS_CHECK( hypoHelper.setTau( errConst, + pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + + BPHYS_CHECK( hypoHelper.setTauErr( errConst, + pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + } + + return; + } + +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Reco_V0Finder.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Reco_V0Finder.cxx new file mode 100644 index 0000000000000000000000000000000000000000..0b19d017b1365e75cfc01a0df9d424427d7df456 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Reco_V0Finder.cxx @@ -0,0 +1,307 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ +///////////////////////////////////////////////////////////////// +// Reco_V0Finder.cxx, (c) ATLAS Detector software +/////////////////////////////////////////////////////////////////// +// Author: Adam Barton +#include "DerivationFrameworkBPhys/Reco_V0Finder.h" +#include "xAODTracking/VertexContainer.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "GaudiKernel/IPartPropSvc.h" +#include "EventKernel/PdtPdg.h" + +namespace DerivationFramework { + + Reco_V0Finder::Reco_V0Finder(const std::string& t, + const std::string& n, + const IInterface* p) : + AthAlgTool(t,n,p), + m_v0FinderTool("InDet::V0FinderTool"), + m_V0Tools("Trk::V0Tools"), + m_particleDataTable(nullptr), + m_masses(1), + m_masspi(139.57), + m_massp(938.272), + m_masse(0.510999), + m_massK0S(497.672), + m_massLambda(1115.68), + m_VxPrimaryCandidateName("PrimaryVertices"), + m_v0ContainerName("RecoV0Candidates"), + m_ksContainerName("RecoKshortCandidates"), + m_laContainerName("RecoLambdaCandidates"), + m_lbContainerName("RecoLambdabarCandidates") + { + declareInterface<DerivationFramework::IAugmentationTool>(this); + + // Declare user-defined properties + declareProperty("CheckVertexContainers", m_CollectionsToCheck); + declareProperty("V0FinderTool", m_v0FinderTool); + declareProperty("V0Tools", m_V0Tools); + declareProperty("masses", m_masses); + declareProperty("masspi", m_masspi); + declareProperty("massp", m_massp); + declareProperty("masse", m_masse); + declareProperty("massK0S", m_massK0S); + declareProperty("massLambda", m_massLambda); + declareProperty("VxPrimaryCandidateName", m_VxPrimaryCandidateName); + declareProperty("V0ContainerName", m_v0ContainerName); + declareProperty("KshortContainerName", m_ksContainerName); + declareProperty("LambdaContainerName", m_laContainerName); + declareProperty("LambdabarContainerName", m_lbContainerName); + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode Reco_V0Finder::initialize() + { + + ATH_MSG_DEBUG("in initialize()"); + + // get the V0Finder tool + ATH_CHECK( m_v0FinderTool.retrieve()); + + // uploading the V0 tools + ATH_CHECK( m_V0Tools.retrieve()); + + // get the Particle Properties Service + IPartPropSvc* partPropSvc = nullptr; + StatusCode sc = service("PartPropSvc", partPropSvc, true); + if (sc.isFailure()) { + msg(MSG::ERROR) << "Could not initialize Particle Properties Service" << endmsg; + return StatusCode::FAILURE; + } + m_particleDataTable = partPropSvc->PDT(); + + const HepPDT::ParticleData* pd_pi = m_particleDataTable->particle(PDG::pi_plus); + const HepPDT::ParticleData* pd_p = m_particleDataTable->particle(PDG::p_plus); + const HepPDT::ParticleData* pd_e = m_particleDataTable->particle(PDG::e_minus); + const HepPDT::ParticleData* pd_K = m_particleDataTable->particle(PDG::K_S0); + const HepPDT::ParticleData* pd_L = m_particleDataTable->particle(PDG::Lambda0); + if (m_masses == 1) { + m_masspi = pd_pi->mass(); + m_massp = pd_p->mass(); + m_masse = pd_e->mass(); + m_massK0S = pd_K->mass(); + m_massLambda = pd_L->mass(); + } + + + return StatusCode::SUCCESS; + + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode Reco_V0Finder::finalize() + { + // everything all right + return StatusCode::SUCCESS; + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode Reco_V0Finder::addBranches() const + { + bool callV0Finder = false; + // Jpsi container and its auxilliary store + for(const auto &str : m_CollectionsToCheck){ + const xAOD::VertexContainer* vertContainer = nullptr; + ATH_CHECK( evtStore()->retrieve(vertContainer, str) ); + if(vertContainer->size() == 0) { + ATH_MSG_DEBUG("Container VertexContainer (" << str << ") is empty"); + }else{ + callV0Finder = true; + ATH_MSG_DEBUG("Container VertexContainer (" << str << ") has events N= " << vertContainer->size()); + break;//No point checking other containers + } + } + + // Call V0Finder + + +// InDetV0 container and its auxilliary store + xAOD::VertexContainer* v0Container(nullptr); + xAOD::VertexAuxContainer* v0AuxContainer(nullptr); + xAOD::VertexContainer* ksContainer(nullptr); + xAOD::VertexAuxContainer* ksAuxContainer(nullptr); + xAOD::VertexContainer* laContainer(nullptr); + xAOD::VertexAuxContainer* laAuxContainer(nullptr); + xAOD::VertexContainer* lbContainer(nullptr); + xAOD::VertexAuxContainer* lbAuxContainer(nullptr); + + if (callV0Finder) { + + // Get primary vertex from StoreGate + const xAOD::Vertex * primaryVertex(0); + const xAOD::VertexContainer * importedVxContainer(0); + ATH_CHECK( evtStore()->retrieve(importedVxContainer, m_VxPrimaryCandidateName) ); + ATH_MSG_DEBUG("Found " << m_VxPrimaryCandidateName << " in StoreGate!"); + if (importedVxContainer->size()==0){ + ATH_MSG_WARNING("You have no primary vertices: " << importedVxContainer->size()); + } else { + primaryVertex = (*importedVxContainer)[0]; + } + + ATH_CHECK(m_v0FinderTool->performSearch(v0Container, v0AuxContainer, ksContainer, ksAuxContainer, laContainer, laAuxContainer, lbContainer, lbAuxContainer, primaryVertex, importedVxContainer)); + + ATH_MSG_DEBUG("Reco_V0Finder v0Container->size() " << v0Container->size()); + ATH_MSG_DEBUG("Reco_V0Finder ksContainer->size() " << ksContainer->size()); + ATH_MSG_DEBUG("Reco_V0Finder laContainer->size() " << laContainer->size()); + ATH_MSG_DEBUG("Reco_V0Finder lbContainer->size() " << lbContainer->size()); + + SG::AuxElement::Decorator<float> mDecor_Ksmass("Kshort_mass"); + SG::AuxElement::Decorator<float> mDecor_Ksmasse("Kshort_massError"); + SG::AuxElement::Decorator<float> mDecor_Lamass("Lambda_mass"); + SG::AuxElement::Decorator<float> mDecor_Lamasse("Lambda_massError"); + SG::AuxElement::Decorator<float> mDecor_Lbmass("Lambdabar_mass"); + SG::AuxElement::Decorator<float> mDecor_Lbmasse("Lambdabar_massError"); + SG::AuxElement::Decorator<float> mDecor_mass("mass"); + SG::AuxElement::Decorator<float> mDecor_massError("massError"); + SG::AuxElement::Decorator<float> mDecor_pt("pT"); + SG::AuxElement::Decorator<float> mDecor_ptError("pTError"); + SG::AuxElement::Decorator<float> mDecor_rxy("Rxy"); + SG::AuxElement::Decorator<float> mDecor_rxyError("RxyError"); + SG::AuxElement::Decorator<float> mDecor_px("px"); + SG::AuxElement::Decorator<float> mDecor_py("py"); + SG::AuxElement::Decorator<float> mDecor_pz("pz"); + + xAOD::VertexContainer::const_iterator v0Itr = v0Container->begin(); + for ( v0Itr=v0Container->begin(); v0Itr!=v0Container->end(); ++v0Itr ) + { + const xAOD::Vertex * unconstrV0 = (*v0Itr); + double mass_ks = m_V0Tools->invariantMass(unconstrV0,m_masspi,m_masspi); + double mass_error_ks = m_V0Tools->invariantMassError(unconstrV0,m_masspi,m_masspi); + double mass_la = m_V0Tools->invariantMass(unconstrV0,m_massp,m_masspi); + double mass_error_la = m_V0Tools->invariantMassError(unconstrV0,m_massp,m_masspi); + double mass_lb = m_V0Tools->invariantMass(unconstrV0,m_masspi,m_massp); + double mass_error_lb = m_V0Tools->invariantMassError(unconstrV0,m_masspi,m_massp); + double pt = m_V0Tools->pT(unconstrV0); + double ptError = m_V0Tools->pTError(unconstrV0); + double rxy = m_V0Tools->rxy(unconstrV0); + double rxyError = m_V0Tools->rxyError(unconstrV0); + Amg::Vector3D momentum = m_V0Tools->V0Momentum(unconstrV0); + mDecor_Ksmass( *unconstrV0 ) = mass_ks; + mDecor_Ksmasse( *unconstrV0 ) = mass_error_ks; + mDecor_Lamass( *unconstrV0 ) = mass_la; + mDecor_Lamasse( *unconstrV0 ) = mass_error_la; + mDecor_Lbmass( *unconstrV0 ) = mass_lb; + mDecor_Lbmasse( *unconstrV0 ) = mass_error_lb; + mDecor_pt( *unconstrV0 ) = pt; + mDecor_ptError( *unconstrV0 ) = ptError; + mDecor_rxy( *unconstrV0 ) = rxy; + mDecor_rxyError( *unconstrV0 ) = rxyError; + mDecor_px( *unconstrV0 ) = momentum.x(); + mDecor_py( *unconstrV0 ) = momentum.y(); + mDecor_pz( *unconstrV0 ) = momentum.z(); + ATH_MSG_DEBUG("Reco_V0Finder mass_ks " << mass_ks << " mass_la " << mass_la << " mass_lb " << mass_lb); + } + xAOD::VertexContainer::const_iterator ksItr = ksContainer->begin(); + for ( ksItr=ksContainer->begin(); ksItr!=ksContainer->end(); ++ksItr ) + { + const xAOD::Vertex * ksV0 = (*ksItr); + double mass_ks = m_V0Tools->invariantMass(ksV0,m_masspi,m_masspi); + double mass_error_ks = m_V0Tools->invariantMassError(ksV0,m_masspi,m_masspi); + double pt = m_V0Tools->pT(ksV0); + double ptError = m_V0Tools->pTError(ksV0); + double rxy = m_V0Tools->rxy(ksV0); + double rxyError = m_V0Tools->rxyError(ksV0); + Amg::Vector3D momentum = m_V0Tools->V0Momentum(ksV0); + mDecor_mass( *ksV0 ) = mass_ks; + mDecor_massError( *ksV0 ) = mass_error_ks; + mDecor_pt( *ksV0 ) = pt; + mDecor_ptError( *ksV0 ) = ptError; + mDecor_rxy( *ksV0 ) = rxy; + mDecor_rxyError( *ksV0 ) = rxyError; + mDecor_px( *ksV0 ) = momentum.x(); + mDecor_py( *ksV0 ) = momentum.y(); + mDecor_pz( *ksV0 ) = momentum.z(); + ATH_MSG_DEBUG("Reco_V0Finder mass_ks " << mass_ks << " mass_error_ks " << mass_error_ks << " pt " << pt << " rxy " << rxy); + } + xAOD::VertexContainer::const_iterator laItr = laContainer->begin(); + for ( laItr=laContainer->begin(); laItr!=laContainer->end(); ++laItr ) + { + const xAOD::Vertex * laV0 = (*laItr); + double mass_la = m_V0Tools->invariantMass(laV0,m_massp,m_masspi); + double mass_error_la = m_V0Tools->invariantMassError(laV0,m_massp,m_masspi); + double pt = m_V0Tools->pT(laV0); + double ptError = m_V0Tools->pTError(laV0); + double rxy = m_V0Tools->rxy(laV0); + double rxyError = m_V0Tools->rxyError(laV0); + Amg::Vector3D momentum = m_V0Tools->V0Momentum(laV0); + mDecor_mass( *laV0 ) = mass_la; + mDecor_massError( *laV0 ) = mass_error_la; + mDecor_pt( *laV0 ) = pt; + mDecor_ptError( *laV0 ) = ptError; + mDecor_rxy( *laV0 ) = rxy; + mDecor_rxyError( *laV0 ) = rxyError; + mDecor_px( *laV0 ) = momentum.x(); + mDecor_py( *laV0 ) = momentum.y(); + mDecor_pz( *laV0 ) = momentum.z(); + ATH_MSG_DEBUG("Reco_V0Finder mass_la " << mass_la << " mass_error_la " << mass_error_la << " pt " << pt << " rxy " << rxy); + } + xAOD::VertexContainer::const_iterator lbItr = lbContainer->begin(); + for ( lbItr=lbContainer->begin(); lbItr!=lbContainer->end(); ++lbItr ) + { + const xAOD::Vertex * lbV0 = (*lbItr); + double mass_lb = m_V0Tools->invariantMass(lbV0,m_masspi,m_massp); + double mass_error_lb = m_V0Tools->invariantMassError(lbV0,m_masspi,m_massp); + double pt = m_V0Tools->pT(lbV0); + double ptError = m_V0Tools->pTError(lbV0); + double rxy = m_V0Tools->rxy(lbV0); + double rxyError = m_V0Tools->rxyError(lbV0); + Amg::Vector3D momentum = m_V0Tools->V0Momentum(lbV0); + mDecor_mass( *lbV0 ) = mass_lb; + mDecor_massError( *lbV0 ) = mass_error_lb; + mDecor_pt( *lbV0 ) = pt; + mDecor_ptError( *lbV0 ) = ptError; + mDecor_rxy( *lbV0 ) = rxy; + mDecor_rxyError( *lbV0 ) = rxyError; + mDecor_px( *lbV0 ) = momentum.x(); + mDecor_py( *lbV0 ) = momentum.y(); + mDecor_pz( *lbV0 ) = momentum.z(); + ATH_MSG_DEBUG("Reco_V0Finder mass_lb " << mass_lb << " mass_error_lb " << mass_error_lb << " pt " << pt << " rxy " << rxy); + } + } + + + if(!callV0Finder){ //Fill with empty containers + v0Container = new xAOD::VertexContainer; + v0AuxContainer = new xAOD::VertexAuxContainer; + v0Container->setStore(v0AuxContainer); + ksContainer = new xAOD::VertexContainer; + ksAuxContainer = new xAOD::VertexAuxContainer; + ksContainer->setStore(ksAuxContainer); + laContainer = new xAOD::VertexContainer; + laAuxContainer = new xAOD::VertexAuxContainer; + laContainer->setStore(laAuxContainer); + lbContainer = new xAOD::VertexContainer; + lbAuxContainer = new xAOD::VertexAuxContainer; + lbContainer->setStore(lbAuxContainer); + } + + //---- Recording section: write the results to StoreGate ---// + CHECK(evtStore()->record(v0Container, m_v0ContainerName)); + + CHECK(evtStore()->record(v0AuxContainer, m_v0ContainerName+"Aux.")); + + CHECK(evtStore()->record(ksContainer, m_ksContainerName)); + + CHECK(evtStore()->record(ksAuxContainer, m_ksContainerName+"Aux.")); + + CHECK(evtStore()->record(laContainer, m_laContainerName)); + + CHECK(evtStore()->record(laAuxContainer, m_laContainerName+"Aux.")); + + CHECK(evtStore()->record(lbContainer, m_lbContainerName)); + + CHECK(evtStore()->record(lbAuxContainer, m_lbContainerName+"Aux.")); + + return StatusCode::SUCCESS; + } +} + + + + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Reco_Vertex.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Reco_Vertex.cxx new file mode 100644 index 0000000000000000000000000000000000000000..9d2b236c8415989bdbb4e2e5b7134d16fc72d567 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Reco_Vertex.cxx @@ -0,0 +1,163 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ + +///////////////////////////////////////////////////////////////// +// Reco_Vertex.cxx +/////////////////////////////////////////////////////////////////// +// Author: Daniel Scheirich <daniel.scheirich@cern.ch> +// Based on the Integrated Simulation Framework +// +// Basic Jpsi->mu mu derivation example + +#include "DerivationFrameworkBPhys/Reco_Vertex.h" +#include "DerivationFrameworkBPhys/BPhysPVTools.h" +#include "xAODTracking/VertexContainer.h" +#include "xAODTracking/VertexAuxContainer.h" + +namespace DerivationFramework { + + Reco_Vertex::Reco_Vertex(const std::string& t, + const std::string& n, + const IInterface* p) : + AthAlgTool(t,n,p), + m_v0Tools("Trk::V0Tools"), + m_SearchTool(), + m_pvRefitter("Analysis::PrimaryVertexRefitter") + { + declareInterface<DerivationFramework::IAugmentationTool>(this); + + // Declare tools + declareProperty("V0Tools" , m_v0Tools); + declareProperty("VertexSearchTool", m_SearchTool); + declareProperty("PVRefitter", m_pvRefitter); + + // Declare user-defined properties + declareProperty("OutputVtxContainerName", m_outputVtxContainerName = "OniaCandidates"); + declareProperty("PVContainerName" , m_pvContainerName = "PrimaryVertices"); + declareProperty("RefPVContainerName" , m_refPVContainerName = "RefittedPrimaryVertices"); + declareProperty("RefitPV" , m_refitPV = false); + declareProperty("MaxPVrefit" , m_PV_max = 1000); + declareProperty("DoVertexType" , m_DoVertexType = 7); + // minimum number of tracks for PV to be considered for PV association + declareProperty("MinNTracksInPV" , m_PV_minNTracks = 0); + declareProperty("Do3d" , m_do3d = false); + declareProperty("CheckCollections" , m_checkCollections = false); + declareProperty("CheckVertexContainers" , m_CollectionsToCheck); + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode Reco_Vertex::initialize() + { + + ATH_MSG_DEBUG("in initialize()"); + + // retrieve V0 tools + CHECK( m_v0Tools.retrieve() ); + + // get the Search tool + CHECK( m_SearchTool.retrieve() ); + + // get the PrimaryVertexRefitter tool + CHECK( m_pvRefitter.retrieve() ); + + // Get the beam spot service + ATH_CHECK(m_beamSpotKey.initialize()); + + + ATH_CHECK(m_outputVtxContainerName.initialize()); + ATH_CHECK(m_pvContainerName.initialize()); + ATH_CHECK(m_refPVContainerName.initialize()); + if(m_checkCollections) ATH_CHECK(m_CollectionsToCheck.initialize()); + return StatusCode::SUCCESS; + + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode Reco_Vertex::addBranches() const + { + bool callTool = true; + if(m_checkCollections) { + for(const auto &str : m_CollectionsToCheck){ + SG::ReadHandle<xAOD::VertexContainer> handle(str); + ATH_CHECK(handle.isValid()); + if(handle->size() == 0) { + callTool = false; + ATH_MSG_DEBUG("Container VertexContainer (" << str << ") is empty"); + break;//No point checking other containers + } + } + } + + // Vertex container and its auxilliary store + xAOD::VertexContainer* vtxContainer = nullptr; + xAOD::VertexAuxContainer* vtxAuxContainer = nullptr; + + if(callTool) { + //---------------------------------------------------- + // call Tool + //---------------------------------------------------- + if( !m_SearchTool->performSearch(vtxContainer, vtxAuxContainer).isSuccess() ) { + ATH_MSG_FATAL("Tool (" << m_SearchTool << ") failed."); + return StatusCode::FAILURE; + } + + //---------------------------------------------------- + // retrieve primary vertices + //---------------------------------------------------- + SG::ReadHandle<xAOD::VertexContainer> pvContainer(m_pvContainerName); + + //---------------------------------------------------- + // Try to retrieve refitted primary vertices + //---------------------------------------------------- + xAOD::VertexContainer* refPvContainer = nullptr; + xAOD::VertexAuxContainer* refPvAuxContainer = nullptr; + if(m_refitPV) { + // refitted PV container does not exist. Create a new one. + refPvContainer = new xAOD::VertexContainer; + refPvAuxContainer = new xAOD::VertexAuxContainer; + refPvContainer->setStore(refPvAuxContainer); + } + + // Give the helper class the ptr to v0tools and beamSpotsSvc to use + SG::ReadCondHandle<InDet::BeamSpotData> beamSpotHandle { m_beamSpotKey }; + if(not beamSpotHandle.isValid()) ATH_MSG_ERROR("Cannot Retrieve " << m_beamSpotKey.key() ); + BPhysPVTools helper(&(*m_v0Tools), beamSpotHandle.cptr()); + helper.SetMinNTracksInPV(m_PV_minNTracks); + helper.SetSave3d(m_do3d); + + if(m_refitPV){ + if(vtxContainer->size() >0){ + StatusCode SC = helper.FillCandwithRefittedVertices(vtxContainer, pvContainer.cptr(), refPvContainer, &(*m_pvRefitter) , m_PV_max, m_DoVertexType); + if(SC.isFailure()){ + ATH_MSG_FATAL("refitting failed - check the vertices you passed"); + return SC; + } + } + }else{ + if(vtxContainer->size() >0)CHECK(helper.FillCandExistingVertices(vtxContainer, pvContainer.cptr(), m_DoVertexType)); + } + + //---------------------------------------------------- + // save in the StoreGate + //---------------------------------------------------- + SG::WriteHandle<xAOD::VertexContainer> handle(m_outputVtxContainerName); + ATH_CHECK(handle.record(std::unique_ptr<xAOD::VertexContainer>(vtxContainer ), std::unique_ptr<xAOD::VertexAuxContainer>(vtxAuxContainer ))); + + if(m_refitPV) { + SG::WriteHandle<xAOD::VertexContainer> handle(m_refPVContainerName); + ATH_CHECK(handle.record(std::unique_ptr<xAOD::VertexContainer>(refPvContainer ), std::unique_ptr<xAOD::VertexAuxContainer>(refPvAuxContainer ))); + } + } + + if (!callTool) { //Fill with empty containers + SG::WriteHandle<xAOD::VertexContainer> handle(m_outputVtxContainerName); + ATH_CHECK(handle.record(std::unique_ptr<xAOD::VertexContainer>(new xAOD::VertexContainer ), + std::unique_ptr<xAOD::VertexAuxContainer>(new xAOD::VertexAuxContainer ))); + } + + return StatusCode::SUCCESS; + } +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Select_Bmumu.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Select_Bmumu.cxx new file mode 100644 index 0000000000000000000000000000000000000000..d1027b9b2c3559242498dc04d4ff8728fc7102dd --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Select_Bmumu.cxx @@ -0,0 +1,563 @@ +/* + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration +*/ + +//============================================================================ +// Select_Bmumu.cxx +//============================================================================ +// +// Author : Wolfgang Walkowiak <Wolfgang.Walkowiak@cern.ch.> +// Changes: +// +// Based on Select_onia2mumu.h. +// Original author: Daniel Scheirich <daniel.scheirich@cern.ch> +// +// Select B candidates for the B(s)mumu analysis including for +// the reference channels used. +// +// For an example see BPHY8.py . +// +// Job options provided by this class: +// - V0Tools -- ToolHandle for V0Tools (default: Trk::V0Tools) +// - HypothesisName -- name given to the hypothesis (passed flag) +// - InputVtxContainerName -- name of the input vertex container +// - TrkMasses" -- list of masses to be assigned to the tracks +// used for lifetime calculation +// (Make sure to give them in correct order!) +// - VtxMassHypo -- mass used in the calculation of lifetime +// - MassMin -- minimum of mass range +// - MassMax -- maximum of mass range +// - Chi2Max -- maximum chi2 cut +// - DoVertexType -- bits defining vertex association types +// to be used +// - Do3d -- add 3d proper time +// - BlindMassMin -- minimum of blinded mass range +// - BlindMassMax -- maximum blinded mass range +// - DoBlinding -- switch to enable blinding (default: false) +// - DoCutBlinded -- cut blinded vertices (default: false) +// - BlindOnlyAllMuonsTight -- only blind candidates with all tight muons +// - UseMuCalcMass -- use MUCALC mass in mass cuts (default: false) +// - SubDecVtxContNames -- names of containers with sub-decay candidates +// (in order of sub decays) +// - SubDecVtxHypoCondNames -- names of hypothesis required to be passed +// by sub-decay candidates +// - SubDecVtxHypoFlagNames -- names of hypothesis passed flags set by +// this algorithm on sub-decay candidates +// (taken as +// SupDecVtxHypoCondName+'_'+HypthesisName +// if not explicitely given) +// +//============================================================================ +// +#include "DerivationFrameworkBPhys/Select_Bmumu.h" + +#include <vector> +#include <string> +#include "TVector3.h" + +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "xAODTracking/VertexContainer.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "xAODBPhys/BPhysHypoHelper.h" +#include "AthContainers/AuxElement.h" + +/* + * Some useful typedefs + */ +typedef ElementLink<xAOD::VertexContainer> VertexLink; +typedef std::vector<VertexLink> VertexLinkVector; + +namespace DerivationFramework { + + Select_Bmumu::Select_Bmumu(const std::string& t, + const std::string& n, + const IInterface* p) : + CfAthAlgTool(t,n,p), + m_v0Tools("Trk::V0Tools"), + m_muSelectionTool("CP::MuonSelectionTool/MuonSelectionTool") { + + declareInterface<DerivationFramework::IAugmentationTool>(this); + + // Declare tools + declareProperty("V0Tools", m_v0Tools); + declareProperty("MuonSelectionTool", m_muSelectionTool); + + // Declare user-defined properties + + declareProperty("HypothesisName" , m_hypoName = "A"); + declareProperty("InputVtxContainerName" , m_inputVtxContainerName = "JpsiCandidates"); + declareProperty("TrkMasses" , m_trkMasses = std::vector<double>(2, 105.658) ); + declareProperty("VtxMassHypo" , m_massHypo = 3096.916 ); + declareProperty("MassMax" , m_massMax = 6000); + declareProperty("MassMin" , m_massMin = 2000); + declareProperty("Chi2Max" , m_chi2Max = 200); + declareProperty("DoVertexType" , m_DoVertexType = 7); + declareProperty("Do3d" , m_do3d = false); + declareProperty("BlindMassMin" , m_blindMassMin = 0.); + declareProperty("BlindMassMax" , m_blindMassMax = 0.); + declareProperty("DoBlinding" , m_doBlinding = false); + declareProperty("DoCutBlinded" , m_doCutBlinded = false); + declareProperty("BlindOnlyAllMuonsTight", m_blindOnlyAllMuonsTight = false); + declareProperty("UseMuCalcMass" , m_useMuCalcMass = false); + declareProperty("SubDecVtxContNames" , m_subDecVtxContNames = {}); + declareProperty("SubDecVtxHypoCondNames", m_subDecVtxHypoCondNames = {}); + declareProperty("SubDecVtxHypoFlagNames", m_subDecVtxHypoFlagNames = {}); + } + //---------------------------------------------------------------------------- + StatusCode Select_Bmumu::initialize() { + + ATH_MSG_DEBUG("in initialize()"); + + // retrieve V0 tools + CHECK( m_v0Tools.retrieve() ); + + // retrieve MuonSelectionTool + if ( m_blindOnlyAllMuonsTight ) { + CHECK( m_muSelectionTool.retrieve() ); + } + + // check length of sub-decay vertex container and required hypo name + // vectors + if ( m_subDecVtxContNames.size() != m_subDecVtxHypoCondNames.size() ) { + ATH_MSG_ERROR("initialize(): number of elements for options " + << "SubDecVtxContNames and SubDecVtxHypoCondNames does not " + << "match : " << m_subDecVtxContNames.size() + << " != " << m_subDecVtxHypoCondNames << " !!"); + } + // check the length of condition and flag hypo name vectors and append + // to the later if necessary + if ( m_subDecVtxHypoCondNames.size() > m_subDecVtxHypoFlagNames.size() ) { + ATH_MSG_INFO("initialize(): SubDecVtxHypoFlagNames (" + << m_subDecVtxHypoFlagNames.size() + << ") < SubDecVtxHypoCondNames (" + << m_subDecVtxHypoCondNames.size() + << ") ... appending to the first."); + for ( unsigned int i = m_subDecVtxHypoFlagNames.size(); + i < m_subDecVtxHypoCondNames.size(); ++i) { + std::string flagname = m_hypoName+"_"+m_subDecVtxHypoCondNames[i]; + ATH_MSG_INFO("initialize(): SubDecVtxHypoFlagNames[" << i << "] = " + << flagname); + m_subDecVtxHypoFlagNames.push_back(flagname); + } + } else if ( m_subDecVtxHypoCondNames.size() + < m_subDecVtxHypoFlagNames.size() ) { + ATH_MSG_ERROR("initialize(): SubDecVtxHypoFlagNames (" + << m_subDecVtxHypoFlagNames.size() + << ") > SubDecVtxHypoCondNames (" + << m_subDecVtxHypoCondNames.size() + << ") ! Configuration error!"); + } + return StatusCode::SUCCESS; + } + //---------------------------------------------------------------------------- + StatusCode Select_Bmumu::finalize() { + + // everything all right + return StatusCode::SUCCESS; + } + //--------------------------------------------------------------------------- + void Select_Bmumu::ProcessVertex(xAOD::BPhysHypoHelper &bcand, + xAOD::BPhysHelper::pv_type pv_t) const { + + constexpr float errConst = -9999999; + const xAOD::Vertex* pv = bcand.pv(pv_t); + if (pv) { + // decorate the vertex. + // Proper decay time assuming constant mass hypothesis m_massHypo + BPHYS_CHECK( bcand.setTau(m_v0Tools->tau(bcand.vtx(), pv, m_massHypo), + pv_t, + xAOD::BPhysHypoHelper::TAU_CONST_MASS) ); + // Proper decay time assuming error constant mass hypothesis m_massHypo + BPHYS_CHECK( bcand.setTauErr( m_v0Tools->tauError(bcand.vtx(), pv, + m_massHypo), + pv_t, + xAOD::BPhysHypoHelper::TAU_CONST_MASS) ); + + BPHYS_CHECK( bcand.setTau(m_v0Tools->tau(bcand.vtx(),pv, m_trkMasses), + pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + + BPHYS_CHECK( bcand.setTauErr(m_v0Tools->tauError(bcand.vtx(), pv, + m_trkMasses), + pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + //enum pv_type {PV_MAX_SUM_PT2, PV_MIN_A0, PV_MIN_Z0, PV_MIN_Z0_BA}; + } else { + + BPHYS_CHECK( bcand.setTau(errConst, pv_t, + xAOD::BPhysHypoHelper::TAU_CONST_MASS) ); + // Proper decay time assuming error constant mass hypothesis m_massHypo + BPHYS_CHECK( bcand.setTauErr( errConst, + pv_t, + xAOD::BPhysHypoHelper::TAU_CONST_MASS) ); + + BPHYS_CHECK( bcand.setTau( errConst, + pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + + BPHYS_CHECK( bcand.setTauErr( errConst, + pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + } + + if(m_do3d){ + + BPHYS_CHECK( bcand.setTau3d( pv ? + m_v0Tools->tau3D(bcand.vtx(), pv, + m_massHypo) + : errConst, pv_t, + xAOD::BPhysHypoHelper::TAU_CONST_MASS) ); + // Proper decay time assuming error constant mass hypothesis m_massHypo + BPHYS_CHECK( bcand.setTau3dErr( pv ? + m_v0Tools->tau3DError(bcand.vtx(), pv, + m_massHypo) + : errConst, pv_t, + xAOD::BPhysHypoHelper::TAU_CONST_MASS) + ); + + BPHYS_CHECK( bcand.setTau3d( pv ? + m_v0Tools->tau3D(bcand.vtx(), pv, + m_trkMasses) + : errConst, pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + + BPHYS_CHECK( bcand.setTau3dErr( pv ? + m_v0Tools->tau3DError(bcand.vtx(), pv, + m_trkMasses) + : errConst, pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) + ); + } + + } + //--------------------------------------------------------------------------- + StatusCode Select_Bmumu::addBranches() const { + + // Jpsi container and its auxilliary store + xAOD::VertexContainer* bcandContainer = NULL; + xAOD::VertexAuxContainer* bcandAuxContainer = NULL; + + // retrieve from the StoreGate + CHECK(evtStore()->retrieve(bcandContainer, m_inputVtxContainerName)); + CHECK(evtStore()->retrieve(bcandAuxContainer, + m_inputVtxContainerName+"Aux.")); + + // for sub-decays + std::vector<xAOD::VertexContainer*> subCandConts; + std::vector<xAOD::VertexAuxContainer*> subCandAuxConts; + + // retrieve from StoreGate + for (auto cname : m_subDecVtxContNames) { + xAOD::VertexContainer* subCandCont = NULL; + xAOD::VertexAuxContainer* subCandAuxCont = NULL; + CHECK(evtStore()->retrieve(subCandCont , cname)); + CHECK(evtStore()->retrieve(subCandAuxCont, cname+"Aux.")); + subCandConts.push_back(subCandCont); + subCandAuxConts.push_back(subCandAuxCont); + } + + // preset pass flag to false for subdecays + for (unsigned int isub=0; isub < subCandConts.size(); ++isub) { + xAOD::VertexContainer* subCandCont = subCandConts[isub]; + if ( subCandCont != NULL ) { + for (xAOD::VertexContainer::iterator it = subCandCont->begin(); + it != subCandCont->end(); ++it) { + if ( *it != NULL ) { + // only set subdecay passed flag to false if not yet set at all + setPassIfNotAvailable(**it, m_subDecVtxHypoFlagNames[isub], false); + // set subdecay blinding flag to true if not yet set at all + // and blinding is requested + if ( m_doBlinding ) { + setPassIfNotAvailable(**it, + m_subDecVtxHypoFlagNames[isub]+"_blinded", + true); + } + } else { + ATH_MSG_WARNING("addBranches(): NULL pointer elements in " + "xAOD::VertexContainer !!"); + } + } // for subCandCont + } // if subCandCont != NULL + } // for subCandConts + + bool doPt = (m_DoVertexType & 1) != 0; + bool doA0 = (m_DoVertexType & 2) != 0; + bool doZ0 = (m_DoVertexType & 4) != 0; + bool doZ0BA = (m_DoVertexType & 8) != 0; + + // loop over B candidates and perform selection and augmentation + // counters + int nPassMassCuts = 0; + int nPassChi2Cut = 0; + int nPassPrecVtxCut = 0; + int nInBlindedRegion = 0; + int nInBlindedRegionAllMuonsTight = 0; + xAOD::VertexContainer::iterator bcandItr = bcandContainer->begin(); + for (; bcandItr!=bcandContainer->end(); ++bcandItr) { + // create BPhysHypoHelper + xAOD::BPhysHypoHelper bcand(m_hypoName, *bcandItr); + + //---------------------------------------------------- + // decorate the vertex - part 1 + //---------------------------------------------------- + // a) invariant mass and error + if ( !bcand.setMass(m_trkMasses) ) + ATH_MSG_WARNING("Decoration bcand.setMass failed"); + + double massErr = m_v0Tools->invariantMassError(bcand.vtx(), m_trkMasses); + if ( !bcand.setMassErr(massErr) ) + ATH_MSG_WARNING("Decoration bcand.setMassErr failed"); + + // b) proper decay time and error: + // retrieve the refitted PV (or the original one, + // if the PV refitting was turned off) + // -- deferred to after the selection -- + /* + if (doPt) ProcessVertex(bcand, xAOD::BPhysHelper::PV_MAX_SUM_PT2); + if (doA0) ProcessVertex(bcand, xAOD::BPhysHelper::PV_MIN_A0); + if (doZ0) ProcessVertex(bcand, xAOD::BPhysHelper::PV_MIN_Z0); + if (doZ0BA) ProcessVertex(bcand, xAOD::BPhysHelper::PV_MIN_Z0_BA); + */ + + //---------------------------------------------------- + // perform the selection (i.e. flag the vertex) + //---------------------------------------------------- + // flag the vertex indicating that it is selected by this selector + bcand.setPass(true); + if ( m_doBlinding ) { + setPass(*bcand.vtx(), + m_hypoName+"_blinded", false); + } + + // now we check other cuts. if one of them didn't pass, set the flag to 0 + // and continue to the next candidate: + + // 1) invariant mass cuts + bool passedMuCalcMassCut(m_useMuCalcMass); + bool blindedMuCalcMass(true); + if ( m_useMuCalcMass ) { + std::string bname = m_hypoName+"_MUCALC_mass"; + static SG::AuxElement::Accessor<float> mucalcAcc(bname); + if ( mucalcAcc.isAvailable(**bcandItr) ) { + passedMuCalcMassCut = massCuts(mucalcAcc(**bcandItr)); + blindedMuCalcMass = massInBlindedRegion(mucalcAcc(**bcandItr)); + } else { + passedMuCalcMassCut = false; + blindedMuCalcMass = false; + ATH_MSG_INFO("MUCALC mass not available: " << bname << " !"); + } + } + bool passedMassCut = massCuts(bcand.mass()); + bool blindedMass = massInBlindedRegion(bcand.mass()); + + // 1a) muon quality cuts + bool allMuonsTight = + !m_blindOnlyAllMuonsTight || checkAllMuonsTight(bcand.muons()); + + // 1b) mark candidates in blinded region + if ( blindedMass && blindedMuCalcMass ) { + if ( m_doBlinding ) { + nInBlindedRegion++; + if ( allMuonsTight ) { + nInBlindedRegionAllMuonsTight++; + setPass(*bcand.vtx(), + m_hypoName+"_blinded", true); + } + } + } + + // 1c) cut on the mass range + if ( !(passedMassCut || passedMuCalcMassCut) ) { + bcand.setPass(false); // flag as failed + continue; + } + nPassMassCuts++; + + // 2) chi2 cut + if ( bcand.vtx()->chiSquared() > m_chi2Max) { + bcand.setPass(false);; // flag as failed + continue; + } + nPassChi2Cut++; + + // 3) preceeding vertices: within their mass ranges? + int npVtx = bcand.nPrecedingVertices(); + if ( npVtx > (int)m_subDecVtxContNames.size() ) { + ATH_MSG_WARNING("addBranches(): npVtx > m_subDecVtxContNames.size() !" + " (" << npVtx << " > " << m_subDecVtxContNames.size() + << ")"); + } + npVtx = std::min(npVtx, (int)m_subDecVtxContNames.size()); + // check preceeding vertices + bool pVtxOk = true; + for (int ipv=0; ipv<npVtx; ++ipv) { + const xAOD::Vertex* pVtx = bcand.precedingVertex(ipv); + if ( !pass(*pVtx, m_subDecVtxHypoCondNames[ipv]) ) { + pVtxOk = false; + continue; + } + } + if ( !pVtxOk ) { + bcand.setPass(false);; // flag as failed + continue; + } + // mark preceeding vertices + for (int ipv=0; ipv<npVtx; ++ipv) { + setPass(*bcand.precedingVertex(ipv), + m_subDecVtxHypoFlagNames[ipv], true); + if ( m_doBlinding && !(blindedMass && blindedMuCalcMass + && allMuonsTight) ) { + setPass(*bcand.precedingVertex(ipv), + m_subDecVtxHypoFlagNames[ipv]+"_blinded", false); + } + } + nPassPrecVtxCut++; + + //---------------------------------------------------- + // decorate the vertex - part 2 + //---------------------------------------------------- + // b) proper decay time and error: + // retrieve the refitted PV (or the original one, + // if the PV refitting was turned off) + if (doPt) ProcessVertex(bcand, xAOD::BPhysHelper::PV_MAX_SUM_PT2); + if (doA0) ProcessVertex(bcand, xAOD::BPhysHelper::PV_MIN_A0); + if (doZ0) ProcessVertex(bcand, xAOD::BPhysHelper::PV_MIN_Z0); + if (doZ0BA) ProcessVertex(bcand, xAOD::BPhysHelper::PV_MIN_Z0_BA); + + } // end of loop over bcand candidates + + // counters + // event level + addEvent("allEvents"); + if ( bcandContainer->size() > 0 ) addEvent("eventsWithCands"); + if ( nPassMassCuts > 0 ) addEvent("massCutEvents"); + if ( nPassChi2Cut > 0 ) addEvent("chi2CutEvents"); + if ( nPassPrecVtxCut > 0 ) addEvent("precVtxCutEvents"); + if ( m_doBlinding && nInBlindedRegion > 0 ) addEvent("blindedRegionEvents"); + // candidate level + addToCounter("allCandidates" , bcandContainer->size()); + addToCounter("massCutCandidates" , nPassMassCuts); + addToCounter("chi2CutCandidates" , nPassChi2Cut); + addToCounter("precVtxCutCandidates", nPassPrecVtxCut); + if ( m_doBlinding ) { + addToCounter("blindedRegionCandidates", nInBlindedRegion); + if ( m_blindOnlyAllMuonsTight ) { + addToCounter("blindedRegionCandidatesWithAllMuonsTight", + nInBlindedRegionAllMuonsTight); + } + } + + // all OK + return StatusCode::SUCCESS; + } + //--------------------------------------------------------------------------- + // Check whether mass cuts (including a possibly blinding region cut) + // are passed. + //--------------------------------------------------------------------------- + bool Select_Bmumu::massCuts(float mass) const { + + return (mass > m_massMin && mass < m_massMax) + && !(m_doBlinding && m_doCutBlinded && massInBlindedRegion(mass) ); + } + //--------------------------------------------------------------------------- + // Check whether mass cuts (including a possibly blinding region cut) + // are passed. + //--------------------------------------------------------------------------- + bool Select_Bmumu::massInBlindedRegion(float mass) const { + return ( mass > m_blindMassMin && mass < m_blindMassMax ); + } + //-------------------------------------------------------------------------- + // Check whether all muons are of quality tight. + //-------------------------------------------------------------------------- + bool Select_Bmumu::checkAllMuonsTight(const std::vector<const xAOD::Muon*>& + muons, int maxMuonsToCheck) const { + + bool allTight(true); + int ncheckMax = muons.size(); + if ( maxMuonsToCheck > -1 ) { + ncheckMax = std::min((int)muons.size(), maxMuonsToCheck); + } + for (int imu=0; imu < ncheckMax; ++imu) { + xAOD::Muon::Quality muQuality = + m_muSelectionTool->getQuality(*muons[imu]); + if ( !(muQuality <= xAOD::Muon::Tight) ) { + allTight = false; + break; + } + } + return allTight; + } + //--------------------------------------------------------------------------- + // Helper to check whether an element is marked as passing a specific + // hypothesis. + //--------------------------------------------------------------------------- + bool Select_Bmumu::pass(const SG::AuxElement& em, std::string hypo) const { + + SG::AuxElement::Accessor<Char_t> flagAcc("passed_"+hypo); + return flagAcc.isAvailable(em) && flagAcc(em) != 0; + } + //--------------------------------------------------------------------------- + // Helper to set an element marked as passing a specific hypothesis. + //--------------------------------------------------------------------------- + bool Select_Bmumu::setPass(const SG::AuxElement& em, std::string hypo, + bool passVal) const { + + SG::AuxElement::Decorator<Char_t> flagDec("passed_"+hypo); + flagDec(em) = passVal; + return true; + } + //--------------------------------------------------------------------------- + // Helper to set an element marked as passing a specific hypothesis + // if the element doesn't have the specific flag yet. + // Returns true if action had to be taken. + //--------------------------------------------------------------------------- + bool Select_Bmumu::setPassIfNotAvailable(SG::AuxElement& em, std::string hypo, + bool passVal) const { + + SG::AuxElement::Accessor<Char_t> flagAcc("passed_"+hypo); + bool exists = flagAcc.isAvailable(em); + if ( !exists ) { + setPass(em, hypo, passVal); + } + return !exists; + } + //--------------------------------------------------------------------------- + // Fetch a vector of preceeding vertices for a specific vertex + //--------------------------------------------------------------------------- + /* + std::vector<xAOD::Vertex*> + Select_Bmumu::getPrecedingVertices(const xAOD::Vertex* vtx) { + + // new vector of vertices + std::vector<xAOD::Vertex*> vtxList; + + // Create auxiliary branches accessors + static SG::AuxElement::Accessor<VertexLinkVector> + precedingVertexLinksAcc("PrecedingVertexLinks"); + + // check if branch exists + if( precedingVertexLinksAcc.isAvailable(*vtx) ) { + + // retrieve the precedingVertex links... + const VertexLinkVector& precedingVertexLinks = + precedingVertexLinksAcc(*vtx); + + // ... and check if they are all valid + for ( VertexLinkVector::const_iterator precedingVertexLinksItr = + precedingVertexLinks.begin(); + precedingVertexLinksItr!=precedingVertexLinks.end(); + ++precedingVertexLinksItr) { + // check if links are valid + if( (*precedingVertexLinksItr).isValid() ) { + // xAOD::Vertex* vtx2 = *precedingVertexLinkItr; + // vtxList.push_back(*(*precedingVertexLinksItr)); + } + } // for + } // if available + + return vtxList; + } + */ + //--------------------------------------------------------------------------- + +} // namespace DerivationFramework diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Select_onia2mumu.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Select_onia2mumu.cxx new file mode 100644 index 0000000000000000000000000000000000000000..33913419bf79bd17e54b7240420f7fe3ecafee46 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Select_onia2mumu.cxx @@ -0,0 +1,197 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +///////////////////////////////////////////////////////////////// +// Select_onia2mumu.cxx +/////////////////////////////////////////////////////////////////// +// Author: Daniel Scheirich <daniel.scheirich@cern.ch> +// Based on the Integrated Simulation Framework +// +// Basic Jpsi->mu mu derivation example + +#include "DerivationFrameworkBPhys/Select_onia2mumu.h" + +#include "TrkVertexAnalysisUtils/V0Tools.h" +#include "xAODBPhys/BPhysHypoHelper.h" + +#include <vector> +#include <string> + +namespace DerivationFramework { + + Select_onia2mumu::Select_onia2mumu(const std::string& t, + const std::string& n, + const IInterface* p) : + AthAlgTool(t,n,p), + m_v0Tools("Trk::V0Tools") + { + declareInterface<DerivationFramework::IAugmentationTool>(this); + + // Declare tools + declareProperty("V0Tools", m_v0Tools); + + // Declare user-defined properties + + declareProperty("HypothesisName" , m_hypoName = "A"); + declareProperty("InputVtxContainerName", m_inputVtxContainerName = "JpsiCandidates"); + declareProperty("TrkMasses" , m_trkMasses = std::vector<double>(2, 105.658) ); + declareProperty("VtxMassHypo" , m_massHypo = 3096.916 ); + declareProperty("MassMax" , m_massMax = 6000); + declareProperty("MassMin" , m_massMin = 2000); + declareProperty("Chi2Max" , m_chi2Max = 200); + declareProperty("DoVertexType" , m_DoVertexType = 7); + declareProperty("LxyMin" , m_lxyMin = std::numeric_limits<double>::lowest()); + declareProperty("Do3d" , m_do3d = false); + + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode Select_onia2mumu::initialize() + { + + ATH_MSG_DEBUG("in initialize()"); + + // retrieve V0 tools + CHECK( m_v0Tools.retrieve() ); + ATH_CHECK(m_inputVtxContainerName.initialize()); + + return StatusCode::SUCCESS; + + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + void Select_onia2mumu::ProcessVertex(xAOD::BPhysHypoHelper &onia, xAOD::BPhysHelper::pv_type pv_t) const{ + constexpr float errConst = -9999999; + const xAOD::Vertex* pv = onia.pv(pv_t); + if(pv) { + // decorate the vertex. + // Proper decay time assuming constant mass hypothesis m_massHypo + BPHYS_CHECK( onia.setTau( m_v0Tools->tau(onia.vtx(), pv, m_massHypo), + pv_t, + xAOD::BPhysHypoHelper::TAU_CONST_MASS) ); + // Proper decay time assuming error constant mass hypothesis m_massHypo + BPHYS_CHECK( onia.setTauErr( m_v0Tools->tauError(onia.vtx(), pv, m_massHypo), + pv_t, + xAOD::BPhysHypoHelper::TAU_CONST_MASS) ); + + BPHYS_CHECK( onia.setTau( m_v0Tools->tau(onia.vtx(), pv, m_trkMasses), + pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + + BPHYS_CHECK( onia.setTauErr( m_v0Tools->tauError(onia.vtx(), pv, m_trkMasses), + pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + + //enum pv_type {PV_MAX_SUM_PT2, PV_MIN_A0, PV_MIN_Z0, PV_MIN_Z0_BA}; + }else{ + + + BPHYS_CHECK( onia.setTau(errConst, pv_t, + xAOD::BPhysHypoHelper::TAU_CONST_MASS) ); + // Proper decay time assuming error constant mass hypothesis m_massHypo + BPHYS_CHECK( onia.setTauErr( errConst, + pv_t, + xAOD::BPhysHypoHelper::TAU_CONST_MASS) ); + + BPHYS_CHECK( onia.setTau( errConst, + pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + + BPHYS_CHECK( onia.setTauErr( errConst, + pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + } + + if(m_do3d){ + BPHYS_CHECK( onia.setTau3d( pv ? m_v0Tools->tau3D(onia.vtx(), pv, m_massHypo) : errConst, + pv_t, + xAOD::BPhysHypoHelper::TAU_CONST_MASS) ); + // Proper decay time assuming error constant mass hypothesis m_massHypo + BPHYS_CHECK( onia.setTau3dErr( pv ? m_v0Tools->tau3DError(onia.vtx(), pv, m_massHypo) : errConst, + pv_t, + xAOD::BPhysHypoHelper::TAU_CONST_MASS) ); + + BPHYS_CHECK( onia.setTau3d( pv ? m_v0Tools->tau3D(onia.vtx(), pv, m_trkMasses) : errConst, + pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + + BPHYS_CHECK( onia.setTau3dErr( pv ? m_v0Tools->tau3DError(onia.vtx(), pv, m_trkMasses) : errConst, + pv_t, + xAOD::BPhysHypoHelper::TAU_INV_MASS) ); + + } + + } + + + StatusCode Select_onia2mumu::addBranches() const + { + + SG::ReadHandle<xAOD::VertexContainer> oniaContainer(m_inputVtxContainerName); + + bool doPt = (m_DoVertexType & 1) != 0; + bool doA0 = (m_DoVertexType & 2) != 0; + bool doZ0 = (m_DoVertexType & 4) != 0; + bool doZ0BA = (m_DoVertexType & 8) != 0; + // loop over onia candidates and perform selection and augmentation + xAOD::VertexContainer::const_iterator oniaItr = oniaContainer->begin(); + for(; oniaItr!=oniaContainer->end(); ++oniaItr) { + // create BPhysHypoHelper + xAOD::BPhysHypoHelper onia(m_hypoName, *oniaItr); + if((*oniaItr)->nTrackParticles() != m_trkMasses.size()) + ATH_MSG_WARNING("Vertex has " << (*oniaItr)->nTrackParticles() << " while provided masses " << m_trkMasses.size()); + //---------------------------------------------------- + // decorate the vertex + //---------------------------------------------------- + // a) invariant mass and error + if( !onia.setMass(m_trkMasses) ) ATH_MSG_WARNING("Decoration onia.setMass failed"); + + double massErr = m_v0Tools->invariantMassError(onia.vtx(), m_trkMasses); + if( !onia.setMassErr(massErr) ) ATH_MSG_WARNING("Decoration onia.setMassErr failed"); + + // b) proper decay time and error: + // retrieve the refitted PV (or the original one, if the PV refitting was turned off) + if(doPt) ProcessVertex(onia, xAOD::BPhysHelper::PV_MAX_SUM_PT2); + if(doA0) ProcessVertex(onia, xAOD::BPhysHelper::PV_MIN_A0); + if(doZ0) ProcessVertex(onia, xAOD::BPhysHelper::PV_MIN_Z0); + if(doZ0BA) ProcessVertex(onia, xAOD::BPhysHelper::PV_MIN_Z0_BA); + + //---------------------------------------------------- + // perform the selection (i.e. flag the vertex) + //---------------------------------------------------- + // flag the vertex indicating that it is selected by this selector + onia.setPass(true); + + // now we check othe cuts. if one of them didn't pass, set the flag to 0 + // and continue to the next candidate: + + // 1) invariant mass cut + if( onia.mass() < m_massMin || onia.mass() > m_massMax) { + onia.setPass(false); // flag as failed + continue; + } + + // 2) chi2 cut + if( onia.vtx()->chiSquared() > m_chi2Max) { + onia.setPass(false);; // flag as failed + continue; + } + // 3) lxy cut + if( onia.lxy(xAOD::BPhysHelper::PV_MAX_SUM_PT2) < m_lxyMin) { + onia.setPass(false);; // flag as failed + continue; + } + + } // end of loop over onia candidates + + // all OK + return StatusCode::SUCCESS; + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + +} diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Thin_vtxDuplicates.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Thin_vtxDuplicates.cxx new file mode 100644 index 0000000000000000000000000000000000000000..abb34bcd436cf5c99517e5a0149911977e601b8b --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Thin_vtxDuplicates.cxx @@ -0,0 +1,176 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +///////////////////////////////////////////////////////////////// +// Thin_vtxDuplicates.cxx +/////////////////////////////////////////////////////////////////// +// Matteo Bedognetti (matteo.bedognetti@cern.ch) +//Based on Thin_vtxTrk.cxx, by + + + +#include "DerivationFrameworkBPhys/Thin_vtxDuplicates.h" +#include "xAODTracking/TrackParticleContainer.h" +#include "xAODBPhys/BPhysHypoHelper.h" +#include <vector> +#include <string> +#include <algorithm> // for the sort function +#include <iomanip> +#include "StoreGate/ThinningHandle.h" +// Constructor +DerivationFramework::Thin_vtxDuplicates::Thin_vtxDuplicates(const std::string& t, const std::string& n, const IInterface* p ) : + AthAlgTool(t,n,p), + // m_acceptanceR(-1.), + m_noFlags(false), + m_nVtxTot(0), + m_nVtxPass(0) +{ + declareInterface<DerivationFramework::IThinningTool>(this); + + declareProperty("VertexContainerName" , m_vertexContainerNames); + declareProperty("PassFlags" , m_passFlags); + //declareProperty("AcceptanceRadius" , m_acceptanceR); + declareProperty("ApplyAnd" , m_and = true); //This will be applied depending on the order in which the thinning tools are added to the kernel + declareProperty("IgnoreFlags" , m_noFlags); + //declareProperty("ApplyAndForTracks" , m_trackAnd = false); + //declareProperty("ThinTracks" , m_thinTracks = true); +} + +// Destructor +DerivationFramework::Thin_vtxDuplicates::~Thin_vtxDuplicates() = default; + +// Athena initialize and finalize +StatusCode DerivationFramework::Thin_vtxDuplicates::initialize() +{ + // Decide which collections need to be checked for ID TrackParticles + ATH_MSG_VERBOSE("initialize() ..."); + ATH_CHECK(m_vertexContainerNames.initialize(m_streamName)); + + + if (m_passFlags.empty()) { + ATH_MSG_FATAL("No pass flags provided for thinning."); + return StatusCode::FAILURE; + } else { + for(auto itr = m_passFlags.cbegin(); itr!=m_passFlags.cend(); ++itr) { + ATH_MSG_INFO("Vertices must pass the \"" << itr->key() << "\" selection"); + } + } + + for(auto &key : m_passFlags){ + key = m_vertexContainerNames.key() + '.' + key.key(); + } + ATH_CHECK(m_passFlags.initialize()); + return StatusCode::SUCCESS; +} + +StatusCode DerivationFramework::Thin_vtxDuplicates::finalize() +{ + ATH_MSG_VERBOSE("finalize() ..."); + ATH_MSG_INFO("Processed "<< m_nVtxTot <<" vertices, "<< m_nVtxPass<< " were retained "); + + return StatusCode::SUCCESS; +} + +// The thinning itself +StatusCode DerivationFramework::Thin_vtxDuplicates::doThinning() const +{ + // retieve vertex + SG::ThinningHandle< xAOD::VertexContainer > vertexContainer(m_vertexContainerNames); + std::vector<bool> vtxMask(vertexContainer->size(), true); // default: keep all vertices + int vtxTot = 0; + int nVtxPass = 0; + // loop over vertices + int k = 0; + std::vector<SG::ReadDecorHandle<xAOD::VertexContainer, Char_t>> handles; + handles.reserve(m_passFlags.size()); + for(const auto &key : m_passFlags){ + handles.emplace_back(key); + if(!handles.back().isPresent()) return StatusCode::FAILURE; + } + for(auto vtxItr = vertexContainer->cbegin(); vtxItr!=vertexContainer->cend(); ++vtxItr, ++k) { + const xAOD::Vertex* vtx = *vtxItr; + // check if the vertex passed the required selections criteria (is run when the vertex is already excluded, because the counter needs the info) + bool passed = false; + if(m_noFlags){passed = true; vtxTot++; } + else{ + for(auto &flagAcc : handles) { + if(flagAcc(*vtx) != 0) { + passed = true; + vtxTot++;//Have to count the ones which are accepted to start with + break; + } + } // end of loop over flags + } + + // Skip if it has already been identified as duplicate + if(vtxMask[k] == false)continue; //After the flag-check to have the total-passed work correctly + + if(!passed)vtxMask[k]= false; + + if(passed) { + // vertex passed the selection + nVtxPass++; + + // determine the sum of the tracks at vertex as centre for the cone + std::vector<const xAOD::TrackParticle*> presentVertex, compareVertex; + + //Fill in the present vertex, for later comparison against other vertices + presentVertex.clear(); + for(uint j=0; j<vtx->nTrackParticles(); ++j) { + presentVertex.push_back(vtx->trackParticle(j)); + } + sort( presentVertex.begin(), presentVertex.end() ); //Sort the trackparticles BY POINTER ADDRESS + + //Loop over the remaining vertices and remove them if needed + int loop_k = k+1; + for(auto vtxLoopItr = vtxItr+1; vtxLoopItr!=vertexContainer->cend(); vtxLoopItr++, loop_k++){ + + const xAOD::Vertex* loop_vtx = *vtxLoopItr; + + //Vertices are distinct if have different size + if(vtx->nTrackParticles() != loop_vtx->nTrackParticles())continue; + + //If the vertex is still active load and compare + if(vtxMask[loop_k]){ + + compareVertex.clear(); + for(uint j=0; j<loop_vtx->nTrackParticles(); ++j) { + compareVertex.push_back(loop_vtx->trackParticle(j)); + } + + std::sort( compareVertex.begin(), compareVertex.end()); + + vtxMask[loop_k] = false; + + ATH_MSG_DEBUG("Compared tracks: "); + ATH_MSG_DEBUG(std::setw(14)<<compareVertex[0]<<std::setw(14) << compareVertex[1]<<std::setw(14)<<compareVertex[2]); + ATH_MSG_DEBUG(std::setw(14)<<presentVertex[0]<<std::setw(14) << presentVertex[1]<<std::setw(14)<<presentVertex[2]); + + for(uint j=0; j<loop_vtx->nTrackParticles(); ++j) { + if( compareVertex[j] != presentVertex[j] ){vtxMask[loop_k] = true; break;} + } + ATH_MSG_DEBUG("Verdict:"<<(vtxMask[loop_k]? "keep": "erase") ); + } + + } // Endo of extra loop over remaining vertices + + } // if( passed ) + } // end of loop over vertices + + // Execute the thinning service based on the vtxMask. + if (m_and) { + vertexContainer.keep(vtxMask, SG::ThinningHandleBase::Op::And); + } + if (!m_and) { + vertexContainer.keep(vtxMask, SG::ThinningHandleBase::Op::Or); + } + + m_nVtxTot.fetch_add( vtxTot, std::memory_order_relaxed); + m_nVtxPass.fetch_add( nVtxPass, std::memory_order_relaxed); + + + + return StatusCode::SUCCESS; +} + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Thin_vtxTrk.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Thin_vtxTrk.cxx new file mode 100644 index 0000000000000000000000000000000000000000..14628f870cdcd411d3d034b359732a2d108c836c --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/Thin_vtxTrk.cxx @@ -0,0 +1,200 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +///////////////////////////////////////////////////////////////// +// Thin_vtxTrk.cxx +/////////////////////////////////////////////////////////////////// +// Author: James Catmore (James.Catmore@cern.ch) +// This is a trivial example of an implementation of a thinning tool +// which removes all ID tracks which do not pass a user-defined cut + +#include "DerivationFrameworkBPhys/Thin_vtxTrk.h" + +#include "xAODBPhys/BPhysHypoHelper.h" +#include "StoreGate/ThinningHandle.h" +#include <vector> +#include <string> +// Constructor +DerivationFramework::Thin_vtxTrk::Thin_vtxTrk(const std::string& t, const std::string& n, const IInterface* p ) : + AthAlgTool(t,n,p), + m_ntot(0), + m_npass(0), + m_acceptanceR(-1.), // Do not add tracks within a cone from the vertex by default + m_nVtxTot(0), + m_nVtxPass(0), + m_noFlags(false) +{ + declareInterface<DerivationFramework::IThinningTool>(this); + + declareProperty("TrackParticleContainerName", m_trackParticleContainerName = "InDetTrackParticles"); + declareProperty("VertexContainerNames" , m_vertexContainerName); + declareProperty("PassFlags" , m_passFlags); + declareProperty("AcceptanceRadius" , m_acceptanceR); + declareProperty("IgnoreFlags" , m_noFlags); + declareProperty("ApplyAnd" , m_and = false); + declareProperty("ApplyAndForTracks" , m_trackAnd = false); + declareProperty("ThinTracks" , m_thinTracks = true); +} + +// Destructor +DerivationFramework::Thin_vtxTrk::~Thin_vtxTrk() = default; + +// Athena initialize and finalize +StatusCode DerivationFramework::Thin_vtxTrk::initialize() +{ + // Decide which collections need to be checked for ID TrackParticles + ATH_MSG_VERBOSE("initialize() ..."); + ATH_CHECK(m_trackParticleContainerName.initialize(m_streamName)); + + + if( m_noFlags){ + ATH_MSG_INFO("IgnoreFlags is set, all vertices in the container will be kept"); + } + + if( ! m_noFlags){ + if (m_passFlags.empty()) { + ATH_MSG_FATAL("No pass flags provided for thinning."); + return StatusCode::FAILURE; + } else { + for(auto itr = m_passFlags.begin(); itr!=m_passFlags.end(); ++itr) { + ATH_MSG_INFO("Vertices must pass the \"" << *itr << "\" selection"); + } + } + } + + if (m_acceptanceR > 0.) { + ATH_MSG_INFO("Extra tracks must be within cone of "<<m_acceptanceR<<" from vertex candidate."); + } + + for(auto &handle : m_vertexContainerName){ + ATH_CHECK(handle.initialize(m_streamName)); + } + for(const auto &tracknames : m_vertexContainerName){ + for(const auto &str : m_passFlags){ + m_passArray.emplace_back(tracknames.key() + '.' + str); + } + } + ATH_CHECK(m_passArray.initialize()); + return StatusCode::SUCCESS; +} + +StatusCode DerivationFramework::Thin_vtxTrk::finalize() +{ + ATH_MSG_VERBOSE("finalize() ..."); + ATH_MSG_INFO("Processed "<< m_ntot <<" tracks, "<< m_npass<< " were retained "); + ATH_MSG_INFO("Processed "<< m_nVtxTot <<" vertices, "<< m_nVtxPass<< " were retained "); + + return StatusCode::SUCCESS; +} + +// The thinning itself +StatusCode DerivationFramework::Thin_vtxTrk::doThinning() const +{ + // Retrieve main TrackParticle collection + SG::ThinningHandle<xAOD::TrackParticleContainer> importedTrackParticles(m_trackParticleContainerName); + + // Check the event contains tracks + unsigned int nTracks = importedTrackParticles->size(); + if (nTracks==0) return StatusCode::SUCCESS; + + // Set up a trackMask with the same entries as the full TrackParticle collection + std::vector<bool> trackMask(nTracks,false); // default: don't keep any tracks + m_ntot += nTracks; + int nVtxTot =0; + int nVtxPass=0; + + std::unordered_map<std::string, SG::ReadDecorHandle<xAOD::VertexContainer, Char_t>> handles; + handles.reserve(m_passArray.size()); + for(const auto &key : m_passArray){ + auto it = handles.emplace(key.key(), key); + if(!(*it.first).second.isPresent()) return StatusCode::FAILURE; + } + + // retieve vertex + for(const auto& name : m_vertexContainerName){ + SG::ThinningHandle<xAOD::VertexContainer> vertexContainer(name); + std::vector<bool> vtxMask(vertexContainer->size(), false); // default: don't keep any vertices + + // loop over vertices + int k = 0; + for(auto vtxItr = vertexContainer->begin(); vtxItr!=vertexContainer->end(); ++vtxItr, ++k) { + const xAOD::Vertex* vtx = *vtxItr; + nVtxTot++; + + // check if the vertex passed the required selections criteria + bool passed = false; + for(std::vector<std::string>::const_iterator flagItr = m_passFlags.begin(); flagItr!=m_passFlags.end(); ++flagItr) { + std::string lookupstr = name.key() + '.' + (*flagItr); + const auto& handle = handles.at(lookupstr); + if(handle(*vtx) != 0) { + passed = true; + break; + } + } // end of loop over flags + + if(passed || m_noFlags) { + // vertex passed the selection + vtxMask[k] = true; + nVtxPass++; + + // Add tracks according to DR selection + if(m_acceptanceR > 0.){ + + // determine the sum of the tracks at vertex as centre for the cone + TLorentzVector centreCandidate; + for(uint j=0; j<vtx->nTrackParticles(); ++j) { + centreCandidate += vtx->trackParticle(j)->p4(); + } + + for(uint i=0; i<nTracks; ++i) { + if(!trackMask[i]) { // do this only for tracks that haven't been selected, yet + const xAOD::TrackParticle* track = (*importedTrackParticles)[i]; + if(centreCandidate.DeltaR(track->p4()) < m_acceptanceR) trackMask[i]= true; + } + } + }// end adding tracks according to DR selection + + if(m_thinTracks) { + // loop over all tracks + for(uint i=0; i<nTracks; ++i) { + if(!trackMask[i]) { // do this only for tracks that haven't been selected, yet + const xAOD::TrackParticle* track = (*importedTrackParticles)[i]; + // loop over tracks at vertex + for(uint j=0; j<vtx->nTrackParticles(); ++j) { + if(vtx->trackParticle(j) == track) { + trackMask[i] = true; // accept track + } + } // end of loop over tracks at vertex + } + } // end of loop over all tracks + } + } + } // end of loop over vertices + + // Execute the thinning service based on the vtxMask. + if (m_and) { + vertexContainer.keep(vtxMask, SG::ThinningHandleBase::Op::And); + } + if (!m_and) { + vertexContainer.keep(vtxMask, SG::ThinningHandleBase::Op::Or); + } + } + + // Count up the trackMask contents + m_npass += std::accumulate(trackMask.begin(), trackMask.end(), 0); + m_nVtxTot += nVtxTot; + m_nVtxPass+= nVtxPass; + if(m_thinTracks || m_acceptanceR > 0.) { + // Execute the thinning service based on the trackMask. Finish. + if (m_trackAnd) { + importedTrackParticles.keep(trackMask, SG::ThinningHandleBase::Op::And); + } + if (!m_trackAnd) { + importedTrackParticles.keep(trackMask, SG::ThinningHandleBase::Op::Or); + } + } + + return StatusCode::SUCCESS; +} + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/TriggerCountToMetadata.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/TriggerCountToMetadata.cxx new file mode 100644 index 0000000000000000000000000000000000000000..58af23444ecf73fe5e40188e304fac9b58a66ad9 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/TriggerCountToMetadata.cxx @@ -0,0 +1,62 @@ +/* +Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ +//============================================================================ +// +// Author : Matteo Bedognetti <matteo.bedognetti@cern.ch.> +// Changes: +// +// Store trigger counts for specific chains in the DAOD's MetaData. +// This allows it to store information about triggers upon which events are NOT selected during the derivation +// +// Job options: +// - TriggerList -- a vector containing all triggers to store as strings +// - FolderName -- Is supposed to be the derivation name (some convention I guess) +// - TrigDecisionTool -- if one wants to pass this a specific TrigDecisionTool +// +//============================================================================ +// + +#include "DerivationFrameworkBPhys/TriggerCountToMetadata.h" +#include "AthenaPoolUtilities/CondAttrListCollection.h" + +#include <memory> + +namespace DerivationFramework { + + //-------------------------------------------------------------------------- + TriggerCountToMetadata::TriggerCountToMetadata(const std::string& t, + const std::string& n, + const IInterface* p) + : CfAthAlgTool(t,n,p), m_trigDecisionTool( "Trig::TrigDecisionTool/TrigDecisionTool" ) + { + declareInterface<DerivationFramework::IAugmentationTool>(this); + + declareProperty("TrigDecisionTool", m_trigDecisionTool ); + declareProperty("FolderName", m_folderName = "DerivationLevel"); + declareProperty("TriggerList", m_triggerList); + + } + //-------------------------------------------------------------------------- + StatusCode TriggerCountToMetadata::initialize() { + ATH_CHECK(m_trigDecisionTool.retrieve()); + + return StatusCode::SUCCESS; + } + + //-------------------------------------------------------------------------- + StatusCode TriggerCountToMetadata::addBranches() const { + + ATH_MSG_DEBUG("Inside TriggerCountToMetadata::addBranches()"); + + // W.w. method + addEvent("AllEvents"); + + for( unsigned int i=0; i<m_triggerList.size(); i++){ + addEvent(m_triggerList[i] , m_trigDecisionTool->isPassed(m_triggerList[i]) ); + } + + return StatusCode::SUCCESS; + } + +} // End of namespace DerivationFramework diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/VertexCaloIsolation.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/VertexCaloIsolation.cxx new file mode 100644 index 0000000000000000000000000000000000000000..c47d71389e39718bf5666da94454221654b946ad --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/VertexCaloIsolation.cxx @@ -0,0 +1,583 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + +// VertexCaloIsolation.cxx by Matteo Bedognetti +// +// This code is based on CaloIsolationTool of IsolationTools package +// +// Etcone is determined as a topoCluster-isolation value minus Energy Density (ED) correction and minus the energy depositions of the muons +// Muon's energy deposition is already stored in side the xAOD::Muon objects, but the muon-clusters are used to correct for the fact that they muons may have overlapping clusters +// The muon-clusters are stored as well in connection with the muons themselves +// +// The idea of comparing topoClusters with muon-clusters to decide what part of the muon's deposition is of +// importance had to be abandoned because topCluster cells are not present in xAOD +// +// It enforces the fact that for muons no core-surface is removed for the energy-density correction (thus the corrections are independent from each other) +// +// "isReliable" flag reports of each isolation value if all particles crossing the cone have been correctly corrected for. +// In the case of 2mu+ 1 track it mirrors the fact that the track does not extrapolate into the cone (as tracks have no muon-cluster from which to determine the core-correction) +// + + + + +#include "DerivationFrameworkBPhys/VertexCaloIsolation.h" + +#include <vector> +#include <string> +#include "TVector3.h" + +#include "xAODTracking/VertexContainer.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "xAODBPhys/BPhysHelper.h" +#include "xAODBPhys/BPhysHypoHelper.h" +#include "TrkVertexAnalysisUtils/V0Tools.h" + +//#include "IsolationTool/CaloIsolationTool.h" +//#include "IsolationTool/CaloIsolationTool.h" + +#include "RecoToolInterfaces/ICaloTopoClusterIsolationTool.h" + +//#include "IsolationTool/IsolationHelper.h" +//#include "InDetTrackSelectionTool/InDetTrackSelectionTool.h" +#include "CaloEvent/CaloCell.h" //Is used (though shown as auto) +//#include "TrkParameters/TrackParameters.h" +#include "CaloInterface/ICaloNoiseTool.h" +#include "TrkCaloExtension/CaloExtension.h" +//#include "CaloUtils/CaloClusterStoreHelper.h" +//#include "CaloUtils/CaloCellList.h" +//#include "CaloEvent/CaloCellContainer.h" +#include "xAODTracking/TrackingPrimitives.h" +#include "xAODPrimitives/IsolationHelpers.h" +#include "TrackToCalo/CaloCellCollector.h" +#include <set> + +//#include "Identifier/Identifier32.h" +using namespace std; +namespace DerivationFramework { + + VertexCaloIsolation::VertexCaloIsolation(const std::string& t, + const std::string& n, + const IInterface* p) : + AthAlgTool(t,n,p), + m_caloIsoTool("xAOD::CaloIsolationTool/CaloIsolationTool"), + m_trackContainerName("InDetTrackParticles"), + m_vertexContainerName("NONE"), + m_caloClusterContainerName("CaloCalTopoClusters"), + m_muonContainerName("Muons"), + m_caloExtTool("Trk::ParticleCaloExtensionTool/ParticleCaloExtensionTool"), + //m_caloNoiseTool(""), + m_cones(), + m_sigmaCaloNoiseCut(3.4), + m_vertexType(7) + + + // m_cellCollector("") + + +// m_caloExtTool +// m_caloNoiseTool, m_applyCaloNoiseCut, m_sigmaCaloNoiseCut +// m_cellCollector + + { + ATH_MSG_DEBUG("in constructor"); + declareInterface<DerivationFramework::IAugmentationTool>(this); + + // Declare tools + declareProperty("CaloIsoTool" , m_caloIsoTool); + declareProperty("TrackContainer" , m_trackContainerName); + declareProperty("InputVertexContainer" , m_vertexContainerName); + declareProperty("CaloClusterContainer" , m_caloClusterContainerName); + declareProperty("ParticleCaloExtensionTool", m_caloExtTool); + declareProperty("MuonContainer", m_muonContainerName); + declareProperty("PassFlags" , m_passFlags); + declareProperty("IsolationTypes" , m_cones); + declareProperty("DoVertexTypes" , m_vertexType); + + + + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode VertexCaloIsolation::initialize() + { + + ATH_MSG_DEBUG("in initialize()"); + + // retrieve CaloIsolationTool + CHECK( m_caloIsoTool.retrieve() ); + + // retrieve CaloIsolationTool + CHECK( m_caloExtTool.retrieve() ); + + //Check that flags were given to tag the correct vertices + if(m_passFlags.empty()){ + ATH_MSG_WARNING("As no pass-flags are given, no vertices will be decorated"); + } + + // Control the IsolationType sequence + if(m_cones.empty()){ + m_cones.push_back(xAOD::Iso::etcone40); + m_cones.push_back(xAOD::Iso::etcone30); + m_cones.push_back(xAOD::Iso::etcone20); + } + + //if(m_applyCaloNoiseCut){ + //ATH_MSG_ERROR("No handle to a caloNoiseTool is kept in this tool, "); + //return StatusCode::FAILURE; + //} + + return StatusCode::SUCCESS; + + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode VertexCaloIsolation::finalize() + { + // everything all right + return StatusCode::SUCCESS; + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode VertexCaloIsolation::addBranches() const { + + + // There is also the "MuonClusterCollection" which may already contain all the muon's clusters + + const xAOD::TrackParticleContainer* idTrackParticleContainer = NULL; + const xAOD::VertexContainer* vertexContainer = NULL; + + Rec::CaloCellCollector cellCollector; //To keep private dependence for this package it is used here + + + const xAOD::MuonContainer* muons = NULL; + + + //Load InDetTrackParticles + if(evtStore()->contains<xAOD::TrackParticleContainer>(m_trackContainerName)) { + CHECK( evtStore()->retrieve(idTrackParticleContainer, m_trackContainerName) ); + } + else{ATH_MSG_ERROR("Failed loading IdTrackparticleContainer container"); + return StatusCode::FAILURE; + } + + // load vertices + if(evtStore()->contains<xAOD::VertexContainer>(m_vertexContainerName)) { + CHECK( evtStore()->retrieve(vertexContainer, m_vertexContainerName) ); + } + else{ATH_MSG_ERROR("Failed loading vertex container"); + return StatusCode::FAILURE; + } + + + const xAOD::CaloClusterContainer* caloClusterContainer = NULL; + // load CaloCalTopoClusters + if(evtStore()->contains<xAOD::CaloClusterContainer>(m_caloClusterContainerName)) { + CHECK( evtStore()->retrieve(caloClusterContainer, m_caloClusterContainerName) ); + } + else{ATH_MSG_ERROR("Failed loading vertex container"); + return StatusCode::FAILURE; + } + + + //Retrieve muon container + if(evtStore()->contains<xAOD::MuonContainer>(m_muonContainerName)) { + CHECK( evtStore()->retrieve(muons, m_muonContainerName) ); + } + else{ATH_MSG_ERROR("Failed loading muon contianer"); + return StatusCode::FAILURE; + } + + +//------------------------------------------------- + + std::vector<xAOD::Iso::IsolationType> cones; cones.resize(m_cones.size()); + +// for(unsigned int cone : m_cones) +// cones.push_back(xAOD::Iso::IsolationType(cone)); + + for (unsigned int i =0; i< m_cones.size(); i++) + cones[i] = xAOD::Iso::IsolationType(m_cones[i]); + + + + //Loop over vertices + for(auto vertex : *vertexContainer){ + + bool passed = false; + for(std::vector<std::string>::const_iterator flagItr = m_passFlags.begin(); flagItr!=m_passFlags.end(); ++flagItr) { + SG::AuxElement::Accessor<Char_t> flagAcc(*flagItr); + if(flagAcc.isAvailable(*vertex) && flagAcc(*vertex) != 0) { + passed = true; + break; + } + } // end of loop over flags + if(passed){ + ATH_MSG_DEBUG("Entered loop over vertices"); + if(vertex->trackParticleLinks().size() != 3)ATH_MSG_WARNING("Vertex without 3 tracks, it has "<< vertex->trackParticleLinks().size() <<" instead"); + + TLorentzVector candidate; + + std::set<const xAOD::TrackParticle*> exclusionset; + + for(auto part : vertex->trackParticleLinks()){ //Loop over tracks linked to vertex + candidate += (*part)->p4(); + exclusionset.insert( *part ); //If it crashes use the direct TP from the vertex + } + + //List of corrections: only the pileup correction is applied within the tool + xAOD::CaloCorrection corrlist; + corrlist.calobitset.set(static_cast<unsigned int>(xAOD::Iso::pileupCorrection)); + + + + std::vector<const xAOD::Muon*> vtxMuons; + std::vector<TLorentzVector> extrVtxMuons ; + std::vector<const xAOD::CaloCluster*> vtxMuonCluster; + + std::vector<const xAOD::TrackParticle*> usedVtxTracks; + //The information whether we are missing some core-corrections in the final isolation value + map<xAOD::Iso::IsolationType, bool> is_reliable; + + + TLorentzVector muonref; //Place holder for the extrapolated position + //Load the caloclusters of the various muons (which you need to load from here) + const xAOD::MuonContainer* muons = 0; + CHECK( evtStore()->retrieve( muons, "Muons" ) ); + for ( auto muon : *muons ) { + //I ask for all information to be fine before filling in an entry (so all containers will have the same -matching- objects) + if(muon->inDetTrackParticleLink().isValid() && exclusionset.find(*muon->inDetTrackParticleLink() ) != exclusionset.end() ){ + const xAOD::CaloCluster* clus = muon->cluster(); + if(clus && extrapolateMuon(muonref, clus)){ + // have a muon, an extrapolation and a cluster (hurray) + vtxMuonCluster.push_back(clus); + vtxMuons.push_back(muon); + usedVtxTracks.push_back( *muon->inDetTrackParticleLink() ); + extrVtxMuons.push_back(muonref); + }else{ + ATH_MSG_DEBUG("Cannot find clusters. Would need a consistent set of Trk::Tracks to run extrapolation."); + + // //If working with the cluster failed, try extrapolating the track + // if(extrapolateTrack(muonref, *muon)){ //This does not use the muonic cluster, but uses both its tracks to determine a precise position + // vtxMuonCluster.push_back(clus); //Note clus can also be NULL (for if it's not in the cone there is no point to fret) + // vtxMuons.push_back(muon); + // usedVtxTracks.push_back( *muon->inDetTrackParticleLink() ); + // extrVtxMuons.push_back(muonref); + //} + } + } + } + + + //What if there was a track and not a muon?? + //Should be treated like the muon-without-cluster case + + if(vtxMuonCluster.size() !=3){ //remember that some of the ctxMuonCluster elements may be NULL + ATH_MSG_DEBUG( "Attempt at extrapolating the IDtrack" ); + + //Attempt extrapolating the IDtrack for the missing cases + for(const xAOD::TrackParticle* missingTrk : exclusionset){ + if(std::find(usedVtxTracks.begin(), usedVtxTracks.end(), missingTrk) == usedVtxTracks.end()){ + if(extrapolateTrack(muonref, *missingTrk)){ + vtxMuonCluster.push_back(NULL); //Null, for we didn't start from a muon + usedVtxTracks.push_back( missingTrk ); + extrVtxMuons.push_back(muonref); + } + } + } + + //If there are still missing ones values cannot be guaranteed to be reliable + if(vtxMuonCluster.size() !=3){ + ATH_MSG_DEBUG( "For this vertex there were less than 3 muons found (or extrapolated)" ); + for(xAOD::Iso::IsolationType isoCone : cones) is_reliable[isoCone] = false; + } + } + else{ + for(xAOD::Iso::IsolationType isoCone : cones) + is_reliable[isoCone] = true; + } + + + // Adapt this loop! + + for(unsigned int vertex_type = 0 ; vertex_type<= xAOD::BPhysHelper::PV_MIN_Z0 ; vertex_type++ ){ + + if((m_vertexType & (1 << vertex_type ) ) == 0)continue; //Stop if the type of vertex is not required + + //This can be in an inside loop + + xAOD::BPhysHelper::pv_type this_type = static_cast<xAOD::BPhysHelper::pv_type>( vertex_type ); + + xAOD::TrackParticle candidate_slyTrack; + makeSlyTrack(candidate_slyTrack, candidate, vertex, this_type); + + + xAOD::CaloIsolation result; + + ATH_MSG_DEBUG("Check if the caloclus container has to be given or not... see line from 755 on of CaloIsolationTool"); + + bool successful = m_caloIsoTool->caloTopoClusterIsolation(result, candidate_slyTrack, cones, corrlist, caloClusterContainer); + if( !successful ) { + ATH_MSG_DEBUG("Calculation of caloTopoClusterIsolation failed"); + return StatusCode::FAILURE; + } + + // Make the extension to the calorimeter, as it is done inside the other tools... + TLorentzVector extr_candidate; + if( !extrapolateTrack(extr_candidate, candidate_slyTrack) ){ + ATH_MSG_WARNING("Failure extrapolating the slyTrack "<<"pt="<<candidate_slyTrack.pt()<<" eta="<<candidate_slyTrack.eta()<<" phi="<<candidate_slyTrack.phi()); + ATH_MSG_WARNING("Taking the original coordinates"); + } + + + std::map<xAOD::Iso::IsolationType,float> coreCorrections; + + //See if this is inside the cone, to determine the correct correction ^^ + for(xAOD::Iso::IsolationType isoType : cones){ + + double conesize = xAOD::Iso::coneSize(isoType); + //check what is inside the cone + std::vector<xAOD::CaloCluster> clustersInCone; + + for(unsigned int j=0; j < vtxMuonCluster.size(); j++){ + auto mucluster = vtxMuonCluster[j]; + // I should use the propagated values here, though the variation is very small, coming from the vertex position + float dr=extrVtxMuons[j].DeltaR(extr_candidate); + + + + ATH_MSG_DEBUG("Cone size: "<<conesize<<" dr="<<dr); + ATH_MSG_DEBUG(extrVtxMuons[j].Eta() <<" - "<<extr_candidate.Eta()<<" and "<<extrVtxMuons[j].Phi() <<" - "<<extr_candidate.Phi()); + + if( dr < conesize ){ //This makes a copy, such that I can remove some cells if needed + + + //here do the check for the cluster, if it should go in, then prevent and set the bad for this cone + if(mucluster != NULL) clustersInCone.push_back( xAOD::CaloCluster(*mucluster) ); + else is_reliable[isoType] = false; + + + + } + } + + // ATH_MSG_DEBUG("Muon clusters in cone "<<xAOD::Iso::toString(isoType)<<" "<< clustersInCone.size()); + // if( msgLvl(MSG::DEBUG) ){ + // for(auto muon : vtxMuons) + // if(muon->isAvailable<float>("ET_Core")) ATH_MSG_DEBUG("ET_core stored inside: "<< muon->auxdataConst<float>("ET_Core") ); + // } + //remove eventually doubles in cells + if(clustersInCone.size() == 2){ + for(auto cell : clustersInCone[0]){ + clustersInCone[1].removeCell(cell); + } + } + if(clustersInCone.size() == 3){ + for(auto cell : clustersInCone[0]){ + clustersInCone[1].removeCell(cell); + clustersInCone[2].removeCell(cell); + } + for(auto cell : clustersInCone[1]){ + clustersInCone[2].removeCell(cell); + } + } + + //Calculate the core-correction + std::vector<float> etcore(4, 0.); + float coreCorr=0.; + for(auto cl : clustersInCone){ + if(cl.size() != 0){ //Maybe two muons have a full cluster overlap?? + ATH_MSG_DEBUG("Cells in this cluster: "<< cl.size()); + + cellCollector.collectEtCore( cl, etcore, nullptr, m_sigmaCaloNoiseCut ); //Note an empty handle to ICaloNoiseTool is passed + coreCorr += etcore[Rec::CaloCellCollector::ET_Core]; + ATH_MSG_DEBUG("Their core-energy: "<< etcore[Rec::CaloCellCollector::ET_Core]); + + } + } + + //Store the core-correction + coreCorrections[isoType] = coreCorr; + + } + + //For a pion I have no such cone energy, do I? But then I should also see what the original vertex was + //If something is not a muon there is no way the calocluster was stored, I think + //Would need further study + + + //Collect all the required information + string ED("_EDcorr"); + string core("_COREcorr"); + string reliable("_isReliable"); + + + string vtx_type[3] = {"SumPt", "A0", "Z0"}; + + string vtx = vtx_type[ vertex_type ]; + + ATH_MSG_DEBUG("Detailed: "); + for(unsigned int i=0; i< cones.size(); i++){ + xAOD::Iso::IsolationType isoType = cones[i]; + result.etcones[i] -= coreCorrections[isoType]; //Finish correcting the energy + + + // if(fabs(result.etcones[i]) < 0.1){ + // + // ATH_MSG_INFO("Isolation: "<<xAOD::Iso::toString(isoType) ); // The name of the isolation + // ATH_MSG_ERROR(result.etcones[i]<<" + "<<(result.noncoreCorrections[xAOD::Iso::pileupCorrection])[i]<<" + "<<coreCorrections[isoType] ); + // } + + //Here do the decoration (store all, and as well if three muons are found) + + + string variableName = xAOD::Iso::toString(isoType) + vtx; //I corrected for the closest vertex in A0 + SG::AuxElement::Decorator<float> isolation(variableName); + isolation(*vertex) = result.etcones[i]; + + isolation = SG::AuxElement::Decorator<float>(variableName + ED); + isolation(*vertex) = (result.noncoreCorrections[xAOD::Iso::pileupCorrection])[i]; + + isolation = SG::AuxElement::Decorator<float>(variableName + core); + isolation(*vertex) = coreCorrections[isoType]; + + //This variable contains the info whether 3 caloclusters have been found in the muons + //Future would be to see if their extrapolations are of interest anyhow (if not missing them is no issue) + //Fore some reason these seem to become chars (instead of bools) in the output + SG::AuxElement::Decorator<bool> reliability(variableName + reliable); + reliability(*vertex) = is_reliable[isoType]; + + } + } //Loop over primaryVertex choice + + //Decorate the candidate with the new information + + +// return StatusCode::SUCCESS; +// +// /////////////////////////////////// + + } + +//END OF NEW PART + }//End of loop over vertices + return StatusCode::SUCCESS; + } + + //Note that the full version had a different method for muons!!!! Maybe I should use that one instead! + + //This is almost a perfect copy of CaloIsolationTool::GetExtrapEtaPhi, but only for the part relative to tracks + bool VertexCaloIsolation::extrapolateTrack(TLorentzVector& extr_tp, const xAOD::IParticle& tp) const{ + extr_tp = tp.p4(); //Pre-set the output TLorentzVector to the input's 4-momentum + ATH_MSG_ERROR("VertexCaloIsolation::extrapolateTrack needs to be rewritten because of changes to the caloExtension"); + throw std::runtime_error("VertexCaloIsolation::extrapolateTrack needs to be rewritten because of changes to the caloExtension"); +/* + + + const Trk::CaloExtension* caloExtension = 0; + if(!m_caloExtTool->caloExtension(tp,caloExtension)){ + ATH_MSG_WARNING("Can not get caloExtension."); + return false; + } + + const std::vector<const Trk::CurvilinearParameters*>& intersections = caloExtension->caloLayerIntersections(); + if (intersections.size()>0) { + Amg::Vector3D avePoint(0,0,0); + for (unsigned int i = 0; i < intersections.size(); ++i){ + const Amg::Vector3D& point = intersections[i]->position(); + avePoint += point; + } + avePoint = (1./intersections.size())*avePoint; + + + extr_tp.SetPtEtaPhiE(1., avePoint.eta(), avePoint.phi(), 10.); //Using the three-vector constructor + //eta = avePoint.eta(); + //phi = avePoint.phi(); + ATH_MSG_DEBUG("Successfully extrapolated candidate eta/phi : "<<tp.eta()<<"/"<<tp.phi()<<" --> "<< extr_tp.Eta()<<"/"<<extr_tp.Phi()); + + } + else{ //This is very unlikely, it happens if a few cases in MC + ATH_MSG_WARNING("Candidate extrapolation failed. Keeping track's eta/phi values"); + return false; + + } + + return true; +*/ + + } + + //Version for the muons + bool VertexCaloIsolation::extrapolateMuon(TLorentzVector& extr_tp, const xAOD::CaloCluster* cluster) const + { + //auto cluster = mu->cluster(); //done outside + if(cluster){ + float etaT = 0, phiT = 0; + int nSample = 0; + for(unsigned int i=0; i<CaloSampling::Unknown; i++) // dangerous? + { + auto s = static_cast<CaloSampling::CaloSample>(i); + if(!cluster->hasSampling(s)) continue; + //ATH_MSG_DEBUG("Sampling: " << i << "eta-phi (" << cluster->etaSample(s) << ", " << cluster->phiSample(s) << ")"); + etaT += cluster->etaSample(s); + phiT += cluster->phiSample(s); + nSample++; + } + if(nSample>0){ + + extr_tp.SetPtEtaPhiE(1., etaT/nSample, phiT/nSample, 10.); //Using the three-vector constructor + return true ; + + }else{ + ATH_MSG_WARNING("Muon calo cluster is empty????"); + return false; + } + }else{ + ATH_MSG_WARNING("Muon calo cluster not found. Calo extension can not be obtained!!!"); + return false; + } + } + + //Make a sly track to be fed to the CaloIsolationTool + xAOD::TrackParticle& VertexCaloIsolation::makeSlyTrack(xAOD::TrackParticle& candidate_slyTrack, const TLorentzVector& candidate, const xAOD::Vertex* vertex, xAOD::BPhysHelper::pv_type vertexType) const { + + candidate_slyTrack.makePrivateStore(); + candidate_slyTrack.setDefiningParameters(0, 0., candidate.Phi(), candidate.Theta(), 0. ); // avoided q/p = 1./candidate.P() + + //I should set the correct d0 and z0, while setting momentum to enormous, to obtain a straight line + //I fear that q/p == 0 might cause some divide by 0, though. + + //Somewhere this information will be checked, so I need to provide it + SG::AuxElement::Decorator<uint8_t> hypothesis("particleHypothesis"); + hypothesis(candidate_slyTrack) = xAOD::undefined; //Value 99 as none of the common types (muon, pion, kaon, etc.) + SG::AuxElement::Decorator<std::vector<float> > covmat( "definingParametersCovMatrix" ); + covmat(candidate_slyTrack) = std::vector<float>(25, 0.); // I am saying that there are no errors on my parameters + //The precision goes down a bit, but it's a matter of 10e-7 with our values of interest + + xAOD::BPhysHelper vertex_h(vertex); //Use the BPhysHelper to access vertex quantities + + SG::AuxElement::Decorator<float> vx( "vx" ); + vx(candidate_slyTrack) = vertex_h.pv(vertexType)->x(); + + SG::AuxElement::Decorator<float> vy( "vy" ); + vy(candidate_slyTrack) = vertex_h.pv(vertexType)->y(); + + SG::AuxElement::Decorator<float> vz( "vz" ); + vz(candidate_slyTrack) = vertex_h.pv(vertexType)->z(); + //The precision goes down a bit, but it's a matter of 10e-7 with our values of interest + + return candidate_slyTrack; + + + } + + + + +}//End of namespace DerivationFramework + + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/VertexPlus1TrackCascade.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/VertexPlus1TrackCascade.cxx new file mode 100644 index 0000000000000000000000000000000000000000..b3c4c221dd4af9c2cad11253937642c0ed63d660 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/VertexPlus1TrackCascade.cxx @@ -0,0 +1,215 @@ +/* + Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +*/ + + +#include "DerivationFrameworkBPhys/VertexPlus1TrackCascade.h" + +#include "TrkVertexFitterInterfaces/IVertexFitter.h" +#include "TrkVKalVrtFitter/TrkVKalVrtFitter.h" +#include "TrkToolInterfaces/ITrackSelectorTool.h" + +namespace DerivationFramework { + + + typedef std::vector<const xAOD::TrackParticle*> TrackBag; + + StatusCode VertexPlus1TrackCascade::initialize() { + + // retrieving vertex Fitter + if ( m_iVertexFitter.retrieve().isFailure() ) { + ATH_MSG_FATAL("Failed to retrieve tool " << m_iVertexFitter); + return StatusCode::FAILURE; + } else { + ATH_MSG_DEBUG("Retrieved tool " << m_iVertexFitter); + } + + // Get the track selector tool from ToolSvc + if ( m_trkSelector.retrieve().isFailure() ) { + ATH_MSG_FATAL("Failed to retrieve tool " << m_trkSelector); + return StatusCode::FAILURE; + } else { + ATH_MSG_DEBUG("Retrieved tool " << m_trkSelector); + } + if(!m_vertexContainerKey.key().empty()) ATH_CHECK(m_vertexContainerKey.initialize()); + if(!m_TrackPContainerKey.key().empty()) ATH_CHECK(m_TrackPContainerKey.initialize()); + if(!m_MuonsUsedInJpsiKey.key().empty()) ATH_CHECK(m_MuonsUsedInJpsiKey.initialize()); + + return StatusCode::SUCCESS; + } + + StatusCode VertexPlus1TrackCascade::finalize() { + + return StatusCode::SUCCESS; + + } + + VertexPlus1TrackCascade::VertexPlus1TrackCascade(const std::string& t, const std::string& n, const IInterface* p) : AthAlgTool(t,n,p), + m_vertexContainerKey(""), + m_TrackPContainerKey(""), + m_MuonsUsedInJpsiKey(""), + m_Vtx1MassConstraint(0.), + m_Vtx2MassConstraint(0.0), + m_trkThresholdPt(0.0), + m_trkMaxEta(102.5), +// m_BThresholdPt(0.0), +// m_BMassUpper(0.0), +// m_BMassLower(0.0), + m_roughMassLower(0.0), + m_roughMassUpper(0.0), + m_iVertexFitter("Trk::TrkVKalVrtFitter"), + m_trkSelector("InDet::TrackSelectorTool") + { + declareProperty("InitialVertices", m_vertexContainerKey); + declareProperty("TrackParticleCollection", m_TrackPContainerKey); + declareProperty("MuonCollection", m_MuonsUsedInJpsiKey); + declareProperty("MassHypthesis", m_massHypothesis); + declareProperty("MassContraintTracksVtx1", m_massConstraintTracksVtx1); + declareProperty("MassContraintTracksVtx2", m_massConstraintTracksVtx2); + + declareProperty("Vtx1MassConstraint", m_Vtx1MassConstraint); + declareProperty("Vtx2MassConstraint", m_Vtx2MassConstraint); + + declareProperty("trkThresholdPtCut", m_trkThresholdPt); + declareProperty("trkMassEtaCut", m_trkMaxEta); +// declareProperty("BThresholdPtCut", m_BThresholdPt); +// declareProperty("BMassUpperCut", m_BMassUpper); +// declareProperty("BMassLowerCut", m_BMassLower); + + declareProperty("RoughMassUpperCut", m_roughMassLower); + declareProperty("RoughMassLowerCut", m_roughMassUpper); + + } + + VertexPlus1TrackCascade::~VertexPlus1TrackCascade(){ } + + double VertexPlus1TrackCascade::getInvariantMass(const TrackBag &Tracks, const std::vector<double> &massHypotheses){ + + TLorentzVector total; + total.SetVectM(Tracks[0]->p4().Vect(), massHypotheses[0]); + TLorentzVector temp; + for(size_t i=1; i < Tracks.size(); i++){ + temp.SetVectM(Tracks[i]->p4().Vect(), massHypotheses[i]); + total += temp; + } + return total.M(); + } + + bool VertexPlus1TrackCascade::isContainedIn(const xAOD::TrackParticle* theTrack, const xAOD::MuonContainer* theColl) { + bool isContained(false); + for (auto muItr=theColl->cbegin(); muItr!=theColl->cend(); ++muItr) { + auto& link = ( *muItr )->inDetTrackParticleLink(); + if ( link.isValid() && ( *link == theTrack ) ) {isContained=true; break;} + } + return isContained; + } + + StatusCode VertexPlus1TrackCascade::performSearch(std::vector<Trk::VxCascadeInfo*> *cascadeinfoContainer) const + { + ATH_MSG_DEBUG( "VertexPlus1TrackCascade::performSearch" ); + assert(cascadeinfoContainer!=nullptr); + SG::ReadHandle<xAOD::VertexContainer> vertexContainer(m_vertexContainerKey); + if(!vertexContainer.isValid()){ + ATH_MSG_ERROR("No VertexContainer with key " << m_vertexContainerKey.key() << " found in StoreGate. BCandidates will be EMPTY!"); + return StatusCode::FAILURE; + } + + // Get tracks + SG::ReadHandle<xAOD::TrackParticleContainer> TrackPContainer(m_TrackPContainerKey); + if(!TrackPContainer.isValid()){ + ATH_MSG_ERROR("No track particle collection with name " << m_TrackPContainerKey.key() << " found in StoreGate!"); + return StatusCode::FAILURE; + } + + + // Get the muon collection used to build the J/psis + const xAOD::MuonContainer* importedMuonCollection = nullptr; + if (!m_MuonsUsedInJpsiKey.key().empty()) { + SG::ReadHandle<xAOD::MuonContainer> handle(m_MuonsUsedInJpsiKey); + if(handle.isValid()) importedMuonCollection = handle.cptr(); + else { + ATH_MSG_FATAL("problem retrieving MuonContainer " << m_MuonsUsedInJpsiKey.key()); + return StatusCode::FAILURE; + } + ATH_MSG_DEBUG("Muon container size "<< importedMuonCollection->size()); + } + + // Select the inner detector tracks + TrackBag theIDTracksAfterSelection; + for (auto tp : *TrackPContainer){ + if ( tp->pt()<m_trkThresholdPt ) continue; + if ( fabs(tp->eta())>m_trkMaxEta ) continue; + if (importedMuonCollection!=NULL) { + if (isContainedIn(tp, importedMuonCollection)) continue; + } + if ( m_trkSelector->decision(*tp, NULL) ) theIDTracksAfterSelection.push_back(tp); + } + + const std::vector<double> &fullMassHypoth = (m_massHypothesis); + const std::vector<double> initialVertexMassHypo(fullMassHypoth.begin(), fullMassHypoth.end()-1); + + TrackBag originalVertexTracks(initialVertexMassHypo.size()); + TrackBag secondVertexTracks(fullMassHypoth.size()); + + const std::vector< Trk::VertexID > emptyVtxList; + TrackBag ConstraintTracksVtx1(m_massConstraintTracksVtx1.size()); + TrackBag ConstraintTracksVtx2(m_massConstraintTracksVtx2.size()); + + assert(fullMassHypoth.size() == secondVertexTracks.size()); + + for(auto vertex : *vertexContainer){ //Iterate over previous vertices + + size_t OriginaltrackNum = vertex->nTrackParticles(); + if(initialVertexMassHypo.size() != OriginaltrackNum){ + ATH_MSG_FATAL("Mass hypothesis not correctly set"); + return StatusCode::FAILURE; + } + for(size_t i = 0;i<OriginaltrackNum;i++) + originalVertexTracks[i] = secondVertexTracks[i] = (vertex->trackParticle(i)); + + for(auto newtrack : theIDTracksAfterSelection){ + //Skip any track already used in vertex + if(std::find(originalVertexTracks.begin(), originalVertexTracks.end(), newtrack) != originalVertexTracks.end()) continue; + + secondVertexTracks.back() = newtrack; + + double roughmass = getInvariantMass(secondVertexTracks, fullMassHypoth); + + if(m_roughMassUpper > 0.0 && (roughmass < m_roughMassLower || roughmass > m_roughMassUpper)) continue; + + std::unique_ptr<Trk::IVKalState> state = m_iVertexFitter->makeState(); + m_iVertexFitter->setRobustness( 0, *state ); + + auto vID1 = m_iVertexFitter->startVertex( originalVertexTracks, initialVertexMassHypo, *state ); + auto vID2 = m_iVertexFitter->nextVertex( secondVertexTracks, fullMassHypoth, *state ); + + if(!m_massConstraintTracksVtx1.empty()){ + for(size_t i =0; i<m_massConstraintTracksVtx1.size(); i++) ConstraintTracksVtx1[i] = originalVertexTracks.at(m_massConstraintTracksVtx1[i]); + if( !m_iVertexFitter->addMassConstraint( vID1, ConstraintTracksVtx1, emptyVtxList, *state, m_Vtx1MassConstraint ).isSuccess() ) { + ATH_MSG_WARNING( "cascade fit: addMassConstraint failed" ); + } + } + + if(!m_massConstraintTracksVtx2.empty()){ + for(size_t i =0; i<m_massConstraintTracksVtx2.size(); i++) ConstraintTracksVtx2[i] = secondVertexTracks.at(m_massConstraintTracksVtx2[i]); + if( !m_iVertexFitter->addMassConstraint( vID2, ConstraintTracksVtx2, emptyVtxList,*state, m_Vtx2MassConstraint ).isSuccess() ) { + ATH_MSG_WARNING( "cascade fit: addMassConstraint failed" ); + } + } + + auto result = m_iVertexFitter->fitCascade(*state); + if(result ==nullptr ){ ATH_MSG_WARNING("Cascade Fit failed"); continue; } + assert(result->vertices().size()==2); + cascadeinfoContainer->push_back(result); + + } + + } + ATH_MSG_DEBUG("cascadeinfoContainer size " << cascadeinfoContainer->size()); + return StatusCode::SUCCESS; + } + + +} + + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/VertexTrackIsolation.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/VertexTrackIsolation.cxx new file mode 100644 index 0000000000000000000000000000000000000000..7ce8df7972fe1816db5ea553564da41740fcc1a9 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/VertexTrackIsolation.cxx @@ -0,0 +1,274 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + + +#include "DerivationFrameworkBPhys/VertexTrackIsolation.h" + +#include <vector> +#include <string> +#include "TVector3.h" + +#include "xAODTracking/VertexContainer.h" +#include "xAODTracking/VertexAuxContainer.h" +#include "xAODBPhys/BPhysHelper.h" +#include "xAODBPhys/BPhysHypoHelper.h" +#include "TrkVertexAnalysisUtils/V0Tools.h" + +//#include "IsolationTool/CaloIsolationTool.h" +//#include "IsolationTool/TrackIsolationTool.h" +#include "RecoToolInterfaces/ITrackIsolationTool.h" +#include "xAODPrimitives/IsolationHelpers.h" //For the definition of Iso::conesize + +//#include "IsolationTool/IsolationHelper.h" +//#include "InDetTrackSelectionTool/InDetTrackSelectionTool.h" + + +//#include "Identifier/Identifier32.h" +using namespace std; +namespace DerivationFramework { + + VertexTrackIsolation::VertexTrackIsolation(const std::string& t, + const std::string& n, + const IInterface* p) : + AthAlgTool(t,n,p), + m_trackIsoTool("xAOD::TrackIsolationTool"), + m_trackContainerName("InDetTrackParticles"), + m_vertexContainerName("NONE"), + m_cones(), + m_vertexType(7), + + m_doIsoPerTrk(false), + m_removeDuplicate(2) + { + ATH_MSG_DEBUG("in constructor"); + declareInterface<DerivationFramework::IAugmentationTool>(this); + + // Declare tools + declareProperty("TrackIsoTool" , m_trackIsoTool); + + declareProperty("TrackContainer" , m_trackContainerName); + declareProperty("InputVertexContainer" , m_vertexContainerName); + declareProperty("PassFlags" , m_passFlags); + declareProperty("IsolationTypes" , m_cones); + declareProperty("DoVertexTypes" , m_vertexType); + + declareProperty("DoIsoPerTrk" , m_doIsoPerTrk, "New property to deal with track isolation per track, the default option (m_doIsoPerTrk=false) preserves the old behavior"); + declareProperty("RemoveDuplicate" , m_removeDuplicate, "Used with DoIsoPerTrk"); + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode VertexTrackIsolation::initialize() + { + + ATH_MSG_DEBUG("in initialize()"); + + // retrieve TrackIsolationTool + CHECK( m_trackIsoTool.retrieve() ); + + //Check that flags were given to tag the correct vertices + if(m_passFlags.empty()){ + ATH_MSG_WARNING("As no pass-flags are given, no vertices will be decorated"); + } + + // Control the IsolationType sequence + if(m_cones.empty()){ + ATH_MSG_INFO("Setting ptcones to default"); + + if(m_doIsoPerTrk) m_cones.push_back(xAOD::Iso::ptcone50); + m_cones.push_back(xAOD::Iso::ptcone40); + m_cones.push_back(xAOD::Iso::ptcone30); + m_cones.push_back(xAOD::Iso::ptcone20); + + } + + return StatusCode::SUCCESS; + + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode VertexTrackIsolation::finalize() + { + // everything all right + return StatusCode::SUCCESS; + } + + // check if the two vertices are composed of the same set of tracks + bool VertexTrackIsolation::isSame(const xAOD::Vertex* theVtx1, const xAOD::Vertex* theVtx2) const { + if(!theVtx1 || !theVtx2) return false; + if(theVtx1==theVtx2) return true; + if(theVtx1->nTrackParticles() != theVtx2->nTrackParticles()) return false; + + if(m_removeDuplicate==2 && theVtx1->nTrackParticles()==4) { // a special case with sub-structure + bool firstTwoAreSame = std::set<const xAOD::TrackParticle*>( { theVtx1->trackParticle(0), theVtx1->trackParticle(1)} ) == std::set<const xAOD::TrackParticle*>( {theVtx2->trackParticle(0), theVtx2->trackParticle(1)} ); // the 1st pair of tracks + bool lastTwoAreSame = std::set<const xAOD::TrackParticle*>( { theVtx1->trackParticle(2), theVtx1->trackParticle(3)} ) == std::set<const xAOD::TrackParticle*>( {theVtx2->trackParticle(2), theVtx2->trackParticle(3)} ); // the 2nd pair of tracks + if(firstTwoAreSame && lastTwoAreSame) return true; + else return false; + } + else { // the general case + std::set<const xAOD::TrackParticle*> vtxset1; + std::set<const xAOD::TrackParticle*> vtxset2; + for(size_t i=0; i<theVtx1->nTrackParticles(); i++) vtxset1.insert(theVtx1->trackParticle(i)); + for(size_t i=0; i<theVtx2->nTrackParticles(); i++) vtxset2.insert(theVtx2->trackParticle(i)); + return vtxset1 == vtxset2; + } + } + + bool VertexTrackIsolation::isContainedIn(const xAOD::Vertex* theVtx, const std::vector<const xAOD::Vertex*> &theColl) const { + for ( const auto vtxPtr : theColl ) { + if ( isSame(vtxPtr, theVtx) ) return true; + } + return false; + } + + // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + + StatusCode VertexTrackIsolation::addBranches() const { + + const xAOD::TrackParticleContainer* idTrackParticleContainer = NULL; + const xAOD::VertexContainer* vertexContainer = NULL; + + if(evtStore()->contains<xAOD::TrackParticleContainer>(m_trackContainerName)) { + CHECK( evtStore()->retrieve(idTrackParticleContainer, m_trackContainerName) ); + } + else{ATH_MSG_ERROR("Failed loading IdTrackparticleContainer container"); + return StatusCode::FAILURE; + } + + // load vertices + if(evtStore()->contains<xAOD::VertexContainer>(m_vertexContainerName)) { + CHECK( evtStore()->retrieve(vertexContainer, m_vertexContainerName) ); + } + else{ATH_MSG_ERROR("Failed loading vertex container"); + return StatusCode::FAILURE; + } + + std::vector<const xAOD::Vertex*> outVtxContainer; + + //Convert m_cones (done per-event to avoid needing extra public dependency) + + std::vector<xAOD::Iso::IsolationType> cones; cones.resize(m_cones.size()); + + for (unsigned int i =0; i< m_cones.size(); i++) + cones[i] = xAOD::Iso::IsolationType(m_cones[i]); + //for(unsigned int cone : m_cones) + // cones.push_back(xAOD::Iso::IsolationType(cone)); + + ATH_MSG_DEBUG("The provided IsolationTypes are re-ordered internally"); + std::sort(cones.begin(),cones.end(),[](xAOD::Iso::IsolationType i, xAOD::Iso::IsolationType j) { return xAOD::Iso::coneSize(i) > xAOD::Iso::coneSize(j); } ); + + // loop over vertices + for(auto vertex : *vertexContainer){ + + bool passed = false; + for(std::vector<std::string>::const_iterator flagItr = m_passFlags.begin(); flagItr!=m_passFlags.end(); ++flagItr) { + SG::AuxElement::Accessor<Char_t> flagAcc(*flagItr); + if(flagAcc.isAvailable(*vertex) && flagAcc(*vertex) != 0) { + passed = true; + break; + } + } // end of loop over flags + if(passed){ + if(!m_doIsoPerTrk) { // for legacy + if(vertex->trackParticleLinks().size() != 3)ATH_MSG_WARNING("Vertex without 3 tracks, it has "<< vertex->trackParticleLinks().size() <<" instead"); + } + else { + if(m_removeDuplicate) { + if( isContainedIn(vertex, outVtxContainer) ) continue; + outVtxContainer.push_back(vertex); + } + } + + TLorentzVector candidate; + + std::set<const xAOD::TrackParticle*> exclusionset; + + for(auto part : vertex->trackParticleLinks()){ //Loop over tracks linked to vertex + candidate += (*part)->p4(); + exclusionset.insert( *part ); //If it crashes use the direct TP from the vertex + } + //No! the above candidate will fail acceptance of isolation() because it's neither a muon nor a TrackParticle + + //Make a dummy TrackParticle, otherwise TrackIsolationTool cannot deal with it + xAOD::TrackParticle candidate_slyTrack; + candidate_slyTrack.makePrivateStore(); + candidate_slyTrack.setDefiningParameters(0, 0., candidate.Phi(), candidate.Theta(), 0./*1./candidate.P()*/); + //The precision goes down a bit, but it's a matter of 10e-7 with our values of interest + + //Make a correctionlist such that the given exclusionset will be removed from the used tracks + //There is no danger that the input particle will be excluded, as it is not part of inDetTrackContainer + xAOD::TrackCorrection corrlist; + corrlist.trackbitset.set(static_cast<unsigned int>(xAOD::Iso::coreTrackPtr)); + + + string vtxType_name[3] = {"SumPt", "A0", "Z0"}; + + xAOD::BPhysHelper vertex_h(vertex); //Use the BPhysHelper to access vertex quantities + + + //Loop over refitted primary vertex choice + for(unsigned int vertex_type = 0 ; vertex_type<= xAOD::BPhysHelper::PV_MIN_Z0 ; vertex_type++ ){ + + if((m_vertexType & (1 << vertex_type ) ) == 0)continue; //Stop if the type of vertex is not required + + + //if(debug should go outside!!!) + + ATH_MSG_DEBUG("List of cone types" ); + + + for(unsigned int i =0; i < cones.size(); i++){ + + ATH_MSG_DEBUG("cone type = "<< xAOD::Iso::toString(xAOD::Iso::IsolationType(cones[i])) ); + // ATH_MSG_DEBUG("isolation value "<< vtxType_name[vertex_type] << " = "<< result.ptcones[i] ); + // ATH_MSG_DEBUG("isolation value "<<vtxType_name[vertex_type] <<" = "<< result.ptcones[i] ); + } + + + + const xAOD::Vertex* refVtx = vertex_h.pv( static_cast<xAOD::BPhysHelper::pv_type>(vertex_type) ); //Fix the cast + + xAOD::TrackIsolation result; + + if(!m_doIsoPerTrk) { + m_trackIsoTool->trackIsolation(result, candidate_slyTrack, cones, corrlist, refVtx, &exclusionset, idTrackParticleContainer); + + + //Decorate the vertex with all the isolation values + for(unsigned int i =0; i < cones.size(); i++){ + + string variableName; + + variableName = xAOD::Iso::toString(xAOD::Iso::IsolationType(cones[i])); + variableName += vtxType_name[vertex_type]; + + SG::AuxElement::Decorator<float> isolation(variableName); + isolation(*vertex) = result.ptcones[i]; + + } + } + else { + for(size_t i=0; i<vertex->nTrackParticles(); i++) { + m_trackIsoTool->trackIsolation(result, *vertex->trackParticle(i), cones, corrlist, refVtx, &exclusionset, idTrackParticleContainer); + + for(unsigned int j =0; j < cones.size(); j++) { + string variableName; + variableName = xAOD::Iso::toString(xAOD::Iso::IsolationType(cones[j])); + variableName += vtxType_name[vertex_type] + "_trk" + std::to_string(i+1); + SG::AuxElement::Decorator<float> isolation(variableName); + isolation(*vertex) = result.ptcones[j]; + } + } + } + } + + }// End of if passed + }// end of loop over vertices + + return StatusCode::SUCCESS; + } + +}//End of namespace DerivationFramework + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/components/DerivationFrameworkBPhys_entries.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/components/DerivationFrameworkBPhys_entries.cxx new file mode 100644 index 0000000000000000000000000000000000000000..9bd3917c40d8d4b11d0b39277705fdd5a2e89d20 --- /dev/null +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkBPhys/src/components/DerivationFrameworkBPhys_entries.cxx @@ -0,0 +1,74 @@ +#include "DerivationFrameworkBPhys/Reco_Vertex.h" +#include "DerivationFrameworkBPhys/Reco_4mu.h" +#include "DerivationFrameworkBPhys/Select_onia2mumu.h" +#include "DerivationFrameworkBPhys/Thin_vtxTrk.h" +#include "DerivationFrameworkBPhys/Thin_vtxDuplicates.h" +#include "DerivationFrameworkBPhys/AugOriginalCounts.h" +#include "DerivationFrameworkBPhys/BPhysPVThinningTool.h" +#include "DerivationFrameworkBPhys/VertexCaloIsolation.h" +#include "DerivationFrameworkBPhys/VertexTrackIsolation.h" +#include "DerivationFrameworkBPhys/BPhysMetadataBase.h" +#include "DerivationFrameworkBPhys/Bmumu_metadata.h" +//#include "DerivationFrameworkBPhys/CfAthAlgTool.h" +#include "DerivationFrameworkBPhys/Bmumu_reco_mumu.h" +#include "DerivationFrameworkBPhys/FourMuonTool.h" +//#include "DerivationFrameworkBPhys/BPhysAddMuonBasedInvMass.h" +//#include "DerivationFrameworkBPhys/BPhysVertexTrackBase.h" +//#include "DerivationFrameworkBPhys/BVertexTrackIsoTool.h" +//#include "DerivationFrameworkBPhys/BMuonTrackIsoTool.h" +//#include "DerivationFrameworkBPhys/BVertexClosestTrackTool.h" +#include "DerivationFrameworkBPhys/BTrackVertexMapLogger.h" +//#include "DerivationFrameworkBPhys/Select_Bmumu.h" +//#include "DerivationFrameworkBPhys/BPhysVarBlinder.h" +//#include "DerivationFrameworkBPhys/BmumuThinningTool.h" +#include "DerivationFrameworkBPhys/VertexPlus1TrackCascade.h" +#include "DerivationFrameworkBPhys/TriggerCountToMetadata.h" +#include "DerivationFrameworkBPhys/MuonExtrapolationTool.h" +#include "DerivationFrameworkBPhys/CascadeTools.h" +#include "DerivationFrameworkBPhys/Reco_V0Finder.h" +#include "DerivationFrameworkBPhys/JpsiPlusV0Cascade.h" +#include "DerivationFrameworkBPhys/JpsiPlusDsCascade.h" +#include "DerivationFrameworkBPhys/JpsiPlusDpstCascade.h" +#include "DerivationFrameworkBPhys/JpsiPlusDs1Cascade.h" +#include "DerivationFrameworkBPhys/ReVertex.h" +#include "DerivationFrameworkBPhys/BPhysConversionFinder.h" +#include "DerivationFrameworkBPhys/Cascade3Plus1.h" + +using namespace DerivationFramework; + +DECLARE_COMPONENT( Reco_4mu ) +DECLARE_COMPONENT( Reco_Vertex ) +DECLARE_COMPONENT( Select_onia2mumu ) +DECLARE_COMPONENT( Thin_vtxTrk ) +DECLARE_COMPONENT( Thin_vtxDuplicates ) +DECLARE_COMPONENT( AugOriginalCounts ) +DECLARE_COMPONENT( BPhysPVThinningTool ) +DECLARE_COMPONENT( VertexCaloIsolation ) +DECLARE_COMPONENT( VertexTrackIsolation ) +DECLARE_COMPONENT( BPhysMetadataBase ) +DECLARE_COMPONENT( Bmumu_metadata ) +//DECLARE_COMPONENT( CfAthAlgTool ) +//DECLARE_COMPONENT( Bmumu_reco_mumu ) +DECLARE_COMPONENT( FourMuonTool ) +//DECLARE_COMPONENT( BPhysAddMuonBasedInvMass ) +//DECLARE_COMPONENT( BPhysVertexTrackBase ) +//DECLARE_COMPONENT( BVertexTrackIsoTool ) +//DECLARE_COMPONENT( BMuonTrackIsoTool ) +//DECLARE_COMPONENT( BVertexClosestTrackTool ) +DECLARE_COMPONENT( BTrackVertexMapLogger ) +//DECLARE_COMPONENT( Select_Bmumu ) +//DECLARE_COMPONENT( BPhysVarBlinder ) +//DECLARE_COMPONENT( BmumuThinningTool ) +DECLARE_COMPONENT( VertexPlus1TrackCascade ) +DECLARE_COMPONENT( TriggerCountToMetadata ) +DECLARE_COMPONENT( MuonExtrapolationTool ) +DECLARE_COMPONENT( CascadeTools ) +DECLARE_COMPONENT( Reco_V0Finder ) +DECLARE_COMPONENT( JpsiPlusV0Cascade ) +DECLARE_COMPONENT( JpsiPlusDsCascade ) +DECLARE_COMPONENT( JpsiPlusDpstCascade ) +DECLARE_COMPONENT( JpsiPlusDs1Cascade ) +DECLARE_COMPONENT( ReVertex ) +DECLARE_COMPONENT( BPhysConversionFinder ) +DECLARE_COMPONENT( Cascade3Plus1 ) + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkEGamma/share/EGAM7.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkEGamma/share/EGAM7.py index 3b9755c3556efc976106adf84394d06f6965525f..f47fc4e7d1353f991ea952b9d7137e16869e9976 100644 --- a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkEGamma/share/EGAM7.py +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkEGamma/share/EGAM7.py @@ -384,3 +384,9 @@ EGAM7SlimmingHelper.ExtraVariables += PhotonsCPDetailedContent # This line must come after we have finished configuring EGAM7SlimmingHelper EGAM7SlimmingHelper.AppendContentToStream(EGAM7Stream) + +#Add full CellContainer +EGAM7Stream.AddItem("CaloCellContainer#AODCellContainer") +EGAM7Stream.AddItem("CaloClusterCellLinkContainer#egammaClusters_links") + + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/python/ExtendedJetCommon.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/python/ExtendedJetCommon.py index b255d5cc77defe3f2b4dd7041b880f6fb4bed7ff..8d4942a7a588ed9821d241ccbf33cec705b07ad6 100644 --- a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/python/ExtendedJetCommon.py +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/python/ExtendedJetCommon.py @@ -265,7 +265,7 @@ def applyJetCalibration(jetalg,algname,sequence,largeRjetconfig = 'comb', suffix 'AntiKt4LCTopo':('JES_data2016_data2015_Recommendation_Dec2016_rel21.config', 'JetArea_Residual_EtaJES_GSC'), 'AntiKt4EMPFlow':('JES_MC16Recommendation_Consolidated_PFlow_Apr2019_Rel21.config', - 'JetArea_Residual_EtaJES_GSC_Smear'), + 'JetArea_Residual_EtaJES_GSC'), 'AntiKt10LCTopoTrimmedPtFrac5SmallR20':('JES_MC15recommendation_FatJet_Nov2016_QCDCombinationUncorrelatedWeights.config', 'EtaJES_JMS'), 'AntiKt2LCTopo':('JES_2015_2016_data_Rscan2LC_18Dec2018_R21.config', @@ -448,6 +448,79 @@ def addJetTruthLabel(jetalg,algname,labelname,sequence): extjetlog.info('ExtendedJetCommon: Applying JetTruthLabel augmentation to jet collection: ' + jetalg + 'Jets' + ' using ' + labelname +' definition') applyJetAugmentation(jetalg,algname,sequence,jetaugtool) +################################################################## + +def getPFlowfJVT(jetalg,algname,sequence,primaryVertexCont="PrimaryVertices",trackVertexAssociation="JetTrackVtxAssoc",overlapLabel="",outLabel="fJvt",includePV=False): + supportedJets = ['AntiKt4EMPFlow','AntiKt4PFlowCustomVtxHgg'] + if jetalg not in supportedJets: + extjetlog.error('*** PFlow fJvt augmentation requested for unsupported jet collection {}! ***'.format(jetalg)) + return + else: + from AthenaCommon.AppMgr import ToolSvc + jetaugtool = getJetAugmentationTool(jetalg,suffix=algname) + + #Check if the calibration and JVT tools exist already + jetcalibtoolname_default = 'DFJetCalib_'+jetalg + jetjvttoolname_default = 'DFJetJvt_'+jetalg + + if '_BTagging' in jetalg: + jetalg_basename = jetalg[:jetalg.find('_BTagging')] + elif 'PFlowCustomVtx' in jetalg: + jetalg_basename = 'AntiKt4EMPFlow' + else: + jetalg_basename = jetalg + + jvtefftoolname = getJvtEffToolName(jetalg_basename) + + #Jet calibration tool + if hasattr(ToolSvc, jetcalibtoolname_default): + jetaugtool.JetCalibTool = getattr(ToolSvc, jetcalibtoolname_default) + else: + applyJetCalibration(jetalg,algname,sequence,suffix=algname) + + #JVT tool + if hasattr(ToolSvc, jetjvttoolname_default) and hasattr(ToolSvc, jvtefftoolname): + jetaugtool.JetJvtTool = getattr(ToolSvc, jetjvttoolname_default) + jetaugtool.JetJvtEffTool = getattr(ToolSvc, jvtefftoolname) + else: + updateJVT(jetalg,algname,sequence,customVxColl=primaryVertexCont,suffix=algname) + + # Calibration tool specific for pFlow fJVT: without GSC and smearing + jetcalibtoolname = 'DFJetCalib_PFfJvt_'+jetalg + if hasattr(ToolSvc, jetcalibtoolname): + jetaugtool.JetCalibToolfJvt = getattr(ToolSvc,jetcalibtoolname) + else: + jetcalibrationtool = CfgMgr.JetCalibrationTool(jetcalibtoolname, + JetCollection=jetalg, + ConfigFile="JES_MC16Recommendation_Consolidated_PFlow_Apr2019_Rel21.config", + CalibSequence="JetArea_Residual_EtaJES", + CalibArea="00-04-82", + IsData=False) + + ToolSvc += jetcalibrationtool + + wpfotoolname = "DFwPFO_"+jetalg+algname + wpfotool = CfgMgr.CP__WeightPFOTool(wpfotoolname) + + pffjvttoolname = 'DFJetPFfJvt_'+jetalg+algname + jetCont = jetalg+"Jets" + + if hasattr(ToolSvc,pffjvttoolname): + jetaugtool.JetForwardPFlowJvtTool = getattr(ToolSvc,pffjvttoolname) + jetaugtool.fJvtMomentKey = outLabel + else: + pffjvttool = CfgMgr.JetForwardPFlowJvtTool(pffjvttoolname, + verticesName=primaryVertexCont, JetContainer=jetCont, + TrackVertexAssociation=jtm.tvassoc.TrackVertexAssociation, + WeightPFOTool=wpfotool, JetCalibrationTool=jetcalibrationtool, + ORName=overlapLabel, FjvtRawName='DFCommonJets_'+outLabel, includePV=includePV) + ToolSvc += pffjvttool + jetaugtool.JetForwardPFlowJvtTool = pffjvttool + jetaugtool.fJvtMomentKey = outLabel + + extjetlog.info('ExtendedJetCommon: Applying PFlow fJvt augmentation to jet collection: '+jetalg+'Jets') + applyJetAugmentation(jetalg,algname,sequence,jetaugtool) + ################################################################## def applyBTaggingAugmentation(jetalg,algname='default',sequence=DerivationFrameworkJob,btagtooldict={}): @@ -787,6 +860,8 @@ def addCHSPFlowObjects(): ################################################################## applyJetCalibration_xAODColl("AntiKt4EMTopo") updateJVT_xAODColl("AntiKt4EMTopo") +applyJetCalibration_xAODColl("AntiKt4EMPFlow") +updateJVT_xAODColl("AntiKt4EMPFlow") applyOverlapRemoval() eventCleanLoose_xAODColl("AntiKt4EMTopo") diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/src/JetAugmentationTool.cxx b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/src/JetAugmentationTool.cxx index dba95878b6dfac5f9acd1d202a1e04cc57b32c11..ce917ddba396d1e21bfeff1b53e0838f3715f256 100644 --- a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/src/JetAugmentationTool.cxx +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/src/JetAugmentationTool.cxx @@ -23,6 +23,7 @@ namespace DerivationFramework { m_jvtTool(""), m_jetJvtEfficiencyTool(""), m_dojvt(false), + m_dofjvt(false), m_dobtag(false), m_jetTrackSumMomentsTool(""), m_decoratetracksum(false), @@ -44,6 +45,8 @@ namespace DerivationFramework { declareProperty("JvtMomentKey", m_jvtMomentKey = "Jvt"); declareProperty("JetJvtTool", m_jvtTool); declareProperty("JetJvtEffTool", m_jetJvtEfficiencyTool); + declareProperty("fJvtMomentKey", m_fjvtMomentKey = "fJvt"); + declareProperty("JetForwardPFlowJvtTool", m_fjvtTool); declareProperty("JetBtagTools", m_btagSelTools); declareProperty("JetBtagWPs", m_btagWP); declareProperty("JetTrackSumMomentsTool", m_jetTrackSumMomentsTool); @@ -58,6 +61,9 @@ namespace DerivationFramework { { ATH_MSG_INFO("Initialising JetAugmentationTool"); + m_container_key = m_containerName; + ATH_CHECK(m_container_key.initialize()); + if(!m_jetCalibTool.empty()) { CHECK(m_jetCalibTool.retrieve()); ATH_MSG_INFO("Augmenting jets with calibration \"" << m_momentPrefix+m_calibMomentKey << "\""); @@ -79,12 +85,27 @@ namespace DerivationFramework { ATH_MSG_INFO("Augmenting jets with updated JVT \"" << m_momentPrefix+m_jvtMomentKey << "\""); m_dojvt = true; + m_acc_JVT = std::make_unique< SG::AuxElement::ConstAccessor<float> >(m_momentPrefix+m_jvtMomentKey); + m_acc_passJVT = std::make_unique< SG::AuxElement::ConstAccessor<char> >(m_momentPrefix+"pass"+m_jvtMomentKey); + m_jvt_key = m_containerName + "." + m_momentPrefix + m_jvtMomentKey; m_passJvt_key = m_containerName + "." + m_momentPrefix + "pass" + m_jvtMomentKey; ATH_CHECK(m_jvt_key.initialize()); ATH_CHECK(m_passJvt_key.initialize()); + // PFlow fJVT tool + if(!m_fjvtTool.empty()) { + CHECK(m_fjvtTool.retrieve()); + ATH_MSG_INFO("Augmenting (PFlow) jets with fJVT \"" << m_momentPrefix+m_fjvtMomentKey << "\""); + m_dofjvt = true; + + m_acc_fJVT = std::make_unique< SG::AuxElement::ConstAccessor<float> >(m_momentPrefix+m_fjvtMomentKey); + + m_fjvt_key = m_containerName + "." + m_momentPrefix + m_fjvtMomentKey; + ATH_CHECK(m_fjvt_key.initialize()); + } + if(!m_btagSelTools.empty()) { size_t ibtag(0); for(const auto& tool : m_btagSelTools) { @@ -106,6 +127,7 @@ namespace DerivationFramework { } if(!m_jetTrackSumMomentsTool.empty()) { + ATH_MSG_INFO("Augmenting jets with track sum moments \"" << m_momentPrefix << "TrackSumMass,Pt\""); CHECK(m_jetTrackSumMomentsTool.retrieve()); ATH_MSG_INFO("Augmenting jets with track sum moments \"" << m_momentPrefix << "TrackSumMass,Pt\""); m_decoratetracksum = true; @@ -168,7 +190,7 @@ namespace DerivationFramework { } } } - + if(!m_jetTruthLabelingTool.empty()) { CHECK(m_jetTruthLabelingTool.retrieve()); ATH_MSG_INFO("Augmenting jets with truthlabeling"); @@ -211,10 +233,11 @@ namespace DerivationFramework { StatusCode JetAugmentationTool::addBranches() const { + // retrieve container - const xAOD::JetContainer* jets(0); - if( evtStore()->retrieve( jets, m_containerName ).isFailure() ) { - ATH_MSG_WARNING ("Couldn't retrieve jets with key: " << m_containerName ); + SG::ReadHandle<xAOD::JetContainer> jets(m_container_key); + if( !jets.isValid() ) { + ATH_MSG_WARNING ("Couldn't retrieve jets with key: " << m_container_key.key() ); return StatusCode::FAILURE; } @@ -229,6 +252,31 @@ namespace DerivationFramework { ATH_MSG_WARNING("Problem applying jet calibration"); return StatusCode::FAILURE; } + + if(m_dojvt){ + + SG::WriteDecorHandle<xAOD::JetContainer, float> jvt_handle(m_jvt_key); + SG::WriteDecorHandle<xAOD::JetContainer, char> passJvt_handle(m_passJvt_key); + + //First update the Jvt criteria (needed for fJVT) + for(const xAOD::Jet *jet : *jets_copy) { + + float jvt_value = m_jvtTool->updateJvt(*jet); + jvt_handle(*jet)= jvt_value; + ATH_MSG_VERBOSE("Calibrated JVT: " << jvt_value); + + bool passJVT = m_jetJvtEfficiencyTool->passesJvtCut(*jet); + passJvt_handle(*jet) = passJVT; + } + + // pFlow fJVT + if(m_dofjvt){ + if((m_fjvtTool->modify(*jets_copy)).isFailure()){ + ATH_MSG_ERROR("Problem computing fJVT"); + return StatusCode::FAILURE; + } + } + } } if(m_decoratetracksum){ @@ -291,16 +339,28 @@ namespace DerivationFramework { if(m_dojvt) { - SG::WriteDecorHandle<xAOD::JetContainer, float> jvt_handle(m_jvt_key); - SG::WriteDecorHandle<xAOD::JetContainer, char> passJvt_handle(m_passJvt_key); + SG::WriteDecorHandle<xAOD::JetContainer, float> jvt_handle(m_jvt_key); + SG::WriteDecorHandle<xAOD::JetContainer, char> passJvt_handle(m_passJvt_key); + + if(m_acc_JVT->isAvailable(*jet)){ + jvt_handle(jet_orig) = (*m_acc_JVT)(*jet); + } + + bool passJVT = false; + + if(m_acc_passJVT->isAvailable(*jet)){ + passJVT = (*m_acc_passJVT)(*jet); + passJvt_handle(jet_orig) = passJVT; + } + + if(m_dofjvt){ + + SG::WriteDecorHandle<xAOD::JetContainer, float> fjvt_handle(m_fjvt_key); + if(m_acc_fJVT->isAvailable(*jet)){ + fjvt_handle(jet_orig) = (*m_acc_fJVT)(*jet); + } + } - float jvt_value = m_jvtTool->updateJvt(*jet); - jvt_handle(jet_orig)= jvt_value; - ATH_MSG_VERBOSE("Calibrated JVT: " << jvt_value); - - bool passJVT = m_jetJvtEfficiencyTool->passesJvtCut(jet_orig); - passJvt_handle(jet_orig) = passJVT; - if(m_dobtag) { size_t ibtag(0); for(const auto& tool : m_btagSelTools) { @@ -332,14 +392,14 @@ namespace DerivationFramework { if(m_acc_GhostTruthAssociationFraction->isAvailable(*jet)){ ghostTruthAssocFrac_handle(jet_orig) = (*m_acc_GhostTruthAssociationFraction)(*jet); - ATH_MSG_INFO("GhostTruthAssociationFraction: " << (*m_acc_GhostTruthAssociationFraction)(jet_orig) ); + ATH_MSG_VERBOSE("GhostTruthAssociationFraction: " << (*m_acc_GhostTruthAssociationFraction)(jet_orig) ); } if(m_acc_GhostTruthAssociationLink->isAvailable(*jet)){ ghostTruthAssocLink_handle(jet_orig) = (*m_acc_GhostTruthAssociationLink)(*jet); - ATH_MSG_INFO("GhostTruthAssociationLink: " << (*m_acc_GhostTruthAssociationLink)(jet_orig) ); + ATH_MSG_VERBOSE("GhostTruthAssociationLink: " << (*m_acc_GhostTruthAssociationLink)(jet_orig) ); } } - + if(m_decoratetruthlabel){ SG::WriteDecorHandle<xAOD::JetContainer, float> truthLabel_dRW_handle(m_truthLabel_dRW_key); @@ -376,9 +436,12 @@ namespace DerivationFramework { if(m_acc_Associated_truthjet_pt->isAvailable(*jet)) associated_truthjet_pt_handle(jet_orig) = (*m_acc_Associated_truthjet_pt)(*jet); if(m_acc_Associated_truthjet_eta->isAvailable(*jet)) associated_truthjet_eta_handle(jet_orig) = (*m_acc_Associated_truthjet_eta)(*jet); } - + } return StatusCode::SUCCESS; } + } + + diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/src/JetAugmentationTool.h b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/src/JetAugmentationTool.h index 9363efa601c402bd1d846b771ea68e48277aaa19..ff38519d9af77965cccec4eb6e4326bc64a44f7c 100644 --- a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/src/JetAugmentationTool.h +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkJetEtMiss/src/JetAugmentationTool.h @@ -43,6 +43,7 @@ namespace DerivationFramework { private: std::string m_momentPrefix; std::string m_containerName; + SG::ReadHandleKey<xAOD::JetContainer> m_container_key {this, "InputJetsKey", "", "Accessor for input JetContainer"}; // // implement augmentations explicitly to avoid need to parse lists of moments to copy // @@ -63,10 +64,15 @@ namespace DerivationFramework { ToolHandle<CP::IJetJvtEfficiency> m_jetJvtEfficiencyTool; std::string m_jvtMomentKey; bool m_dojvt; + std::unique_ptr< SG::AuxElement::ConstAccessor<float> > m_acc_JVT; + std::unique_ptr< SG::AuxElement::ConstAccessor<char> > m_acc_passJVT; //PFlow fJVT - std::unique_ptr< SG::AuxElement::ConstAccessor<float> > m_acc_fjvt; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_fjvt_key {this, "fJVTKey", "", "Decoration for fJVT"}; + ToolHandle<IJetModifier> m_fjvtTool; std::string m_fjvtMomentKey; + bool m_dofjvt; + std::unique_ptr< SG::AuxElement::ConstAccessor<float> > m_acc_fJVT; // b-tagging @author tripiana@cern.ch std::vector<std::string> m_btagWP; diff --git a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkPhys/share/PHYS.py b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkPhys/share/PHYS.py index e9e483b64cb86705e744435bbcf275589041544c..64a386f315ec1ed2b8bd09390d8b8d5ffc3924a6 100644 --- a/PhysicsAnalysis/DerivationFramework/DerivationFrameworkPhys/share/PHYS.py +++ b/PhysicsAnalysis/DerivationFramework/DerivationFrameworkPhys/share/PHYS.py @@ -14,7 +14,7 @@ from DerivationFrameworkEGamma import EGammaCommon from DerivationFrameworkEGamma import ElectronsCPDetailedContent from DerivationFrameworkMuons import MuonsCommon from DerivationFrameworkJetEtMiss.JetCommon import OutputJets -from DerivationFrameworkJetEtMiss.ExtendedJetCommon import replaceAODReducedJets, addDefaultTrimmedJets, addJetTruthLabel, addQGTaggerTool +from DerivationFrameworkJetEtMiss.ExtendedJetCommon import replaceAODReducedJets, addDefaultTrimmedJets, addJetTruthLabel, addQGTaggerTool, getPFlowfJVT from DerivationFrameworkJetEtMiss import METCommon from TriggerMenu.api.TriggerAPI import TriggerAPI from TriggerMenu.api.TriggerEnums import TriggerPeriod, TriggerType @@ -189,7 +189,7 @@ addQGTaggerTool(jetalg="AntiKt4EMTopo",sequence=SeqPHYS,algname="QGTaggerToolAlg addQGTaggerTool(jetalg="AntiKt4EMPFlow",sequence=SeqPHYS,algname="QGTaggerToolPFAlg") # fJVT -# getPFlowfJVT(jetalg='AntiKt4EMPFlow',sequence=SeqPHYS, algname='PHYSJetForwardPFlowJvtToolAlg') +getPFlowfJVT(jetalg='AntiKt4EMPFlow',sequence=SeqPHYS, algname='PHYSJetForwardPFlowJvtToolAlg') #==================================================================== # EGAMMA diff --git a/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/Root/AsgElectronLikelihoodTool.cxx b/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/Root/AsgElectronLikelihoodTool.cxx index 47e1fd55c2e4a4c48a3af23a4e9065c786f8e0a1..65bd255b1ba0c4a9c9446cf931a1433852aae18e 100644 --- a/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/Root/AsgElectronLikelihoodTool.cxx +++ b/PhysicsAnalysis/ElectronPhotonID/ElectronPhotonSelectorTools/Root/AsgElectronLikelihoodTool.cxx @@ -96,7 +96,7 @@ AsgElectronLikelihoodTool::AsgElectronLikelihoodTool(const std::string& myname) declareProperty("useOneExtraHighETLHBin",m_rootTool->m_useOneExtraHighETLHBin,"Use one extra bin for high ET LH"); // cut on Wstot above HighETBinThreshold declareProperty("CutWstotAtHighET",m_rootTool->m_cutWstotAtHighET,"Cut on Wstot above HighETBinThreshold"); - // cut on EoverP above HighETBinThreshold + // cut on EoverP above HighETBinThreshold declareProperty("CutEoverPAtHighET",m_rootTool->m_cutEoverPAtHighET,"Cut on EoverP above HighETBinThreshold"); // ET threshold for using high ET cuts and bin declareProperty("HighETBinThreshold",m_rootTool->m_highETBinThreshold,"ET threshold for using high ET cuts and bin"); @@ -145,34 +145,34 @@ StatusCode AsgElectronLikelihoodTool::initialize() { ATH_MSG_INFO("initialize : WP " << m_WorkingPoint.size() << " " << m_configFile.size()); - + std::string PDFfilename(""); //Default if(!m_WorkingPoint.empty()){ m_configFile=AsgConfigHelper::findConfigFile(m_WorkingPoint,EgammaSelectors::LHPointToConfFile); ATH_MSG_INFO("operating point : " << this->getOperatingPointName()); } - + if(!m_configFile.empty()){ std::string configFile = PathResolverFindCalibFile( m_configFile); - if(configFile.empty()){ + if(configFile.empty()){ ATH_MSG_ERROR("Could not locate " << m_configFile ); return StatusCode::FAILURE; - } + } ATH_MSG_DEBUG("Configfile to use " << m_configFile ); TEnv env(configFile.c_str()); - + // Get the input PDFs in the tool. ATH_MSG_DEBUG("Get the input PDFs in the tool "); - + if(!m_pdfFileName.empty()) { //If the property was set by the user, take that. ATH_MSG_INFO("Setting user specified PDF file " << m_pdfFileName); PDFfilename = m_pdfFileName; } else { if (m_configFile.find("dev/") != std::string::npos) { - + std::string PDFdevval = env.GetValue("inputPDFFileName", "ElectronPhotonSelectorTools/v1/ElectronLikelihoodPdfs.root"); PDFfilename = ("dev/"+PDFdevval); ATH_MSG_DEBUG ( "Getting the input PDFs from: " << PDFfilename ); @@ -235,25 +235,25 @@ StatusCode AsgElectronLikelihoodTool::initialize() m_rootTool->m_discLooseForPileupTransform4GeV = AsgConfigHelper::HelperDouble("DiscLooseForPileupTransform4GeV",env); m_rootTool->m_discMaxForPileupTransform = env.GetValue("DiscMaxForPileupTransform", 2.0); m_rootTool->m_pileupMaxForPileupTransform = env.GetValue("PileupMaxForPileupTransform", 50); - + } else{ //Error if it cant find the conf ATH_MSG_ERROR("Could not find configuration file"); return StatusCode::FAILURE; } ///-----------End of text config---------------------------- - + // Setup primary vertex key handle ATH_CHECK( m_primVtxContKey.initialize(m_usePVCont) ); - // Setup HI container key handle (must come after init from env) + // Setup HI container key handle (must come after init from env) bool doCentralityTransform = m_rootTool->m_doCentralityTransform; ATH_CHECK(m_HIESContKey.initialize(doCentralityTransform&&m_useCaloSumsCont)); - + // Get the name of the current operating point, and massage the other strings accordingly ATH_MSG_VERBOSE( "Going to massage the labels based on the provided operating point..." ); // Get the message level and set the underlying ROOT tool message level accordingly m_rootTool->msg().setLevel(this->msg().level()); - + // We need to initialize the underlying ROOT TSelectorTool if ( m_rootTool->initialize().isFailure() ){ ATH_MSG_ERROR ( "ERROR! Could not initialize the TElectronLikelihoodTool!" ); @@ -272,7 +272,7 @@ const asg::AcceptInfo& AsgElectronLikelihoodTool::getAcceptInfo() const } //============================================================================= -// The main accept method: the actual cuts are applied here +// The main accept method: the actual cuts are applied here //============================================================================= asg::AcceptData AsgElectronLikelihoodTool::accept(const xAOD::Electron* el, double mu ) const { @@ -290,7 +290,7 @@ asg::AcceptData AsgElectronLikelihoodTool::accept(const EventContext& ctx, const if ( !cluster ){ ATH_MSG_ERROR("exiting because cluster is NULL " << cluster); return m_rootTool->accept(); - } + } if( !cluster->hasSampling(CaloSampling::CaloSample::EMB2) && !cluster->hasSampling(CaloSampling::CaloSample::EME2) ){ ATH_MSG_ERROR("Failed, cluster is missing samplings EMB2 and EME2"); @@ -298,29 +298,29 @@ asg::AcceptData AsgElectronLikelihoodTool::accept(const EventContext& ctx, const } const double energy = cluster->e(); - const float eta = (cluster->etaBE(2)); + const float eta = (cluster->etaBE(2)); if( isForwardElectron(el,eta) ){ ATH_MSG_WARNING("Failed, this is a forward electron! The AsgElectronLikelihoodTool is only suitable for central electrons!"); - return m_rootTool->accept(); + return m_rootTool->accept(); } - + double et = 0.; if(el->trackParticle() && !m_caloOnly) { et = ( cosh(el->trackParticle()->eta()) != 0.) ? energy/cosh(el->trackParticle()->eta()) : 0.; - } else + } else et = ( cosh(eta) != 0.) ? energy/cosh(eta) : 0.; - + // number of track hits uint8_t nSiHitsPlusDeadSensors(0); uint8_t nPixHitsPlusDeadSensors(0); - bool passBLayerRequirement(false); + bool passBLayerRequirement(false); float d0(0.0); float deltaEta=0; float deltaPhiRescaled2=0; float wstot=0; float EoverP=0; - uint8_t ambiguityBit(0); + uint8_t ambiguityBit(0); double ip(0); bool allFound = true; @@ -335,17 +335,17 @@ asg::AcceptData AsgElectronLikelihoodTool::accept(const EventContext& ctx, const // get the ambiguity type from the decoration if ( !m_rootTool->m_cutAmbiguity.empty() ) { if ( el->isAvailable<uint8_t>("ambiguityType") ) { - static const SG::AuxElement::Accessor<uint8_t> acc("ambiguityType"); + static const SG::AuxElement::Accessor<uint8_t> acc("ambiguityType"); ambiguityBit = acc(*el); } else { allFound = false; notFoundList += "ambiguityType "; } } - + if(!m_caloOnly) { // retrieve associated track - const xAOD::TrackParticle* t = el->trackParticle(); + const xAOD::TrackParticle* t = el->trackParticle(); if (t) { nSiHitsPlusDeadSensors = ElectronSelectorHelpers::numberOfSiliconHitsAndDeadSensors(t); nPixHitsPlusDeadSensors = ElectronSelectorHelpers::numberOfPixelHitsAndDeadSensors(t); @@ -379,7 +379,7 @@ asg::AcceptData AsgElectronLikelihoodTool::accept(const EventContext& ctx, const ip = mu; } - // for now don't cache. + // for now don't cache. double likelihood = calculate(ctx, el, ip); ATH_MSG_VERBOSE(Form( @@ -395,7 +395,7 @@ asg::AcceptData AsgElectronLikelihoodTool::accept(const EventContext& ctx, const ATH_MSG_ERROR("Skipping LH rectangular cuts! The following variables are missing: " << notFoundList); return m_rootTool->accept(); } - + // Get the answer from the underlying ROOT tool return m_rootTool->accept( likelihood, eta, @@ -431,39 +431,39 @@ asg::AcceptData AsgElectronLikelihoodTool::accept(const EventContext& ctx, const if (!m_caloOnly) { if(eg->type() == xAOD::Type::Electron){ const xAOD::Electron* el = static_cast<const xAOD::Electron*>(eg); - return accept(el, mu); - } + return accept(ctx,el, mu); + } ATH_MSG_ERROR("Input is not an electron and not caloOnly is set"); return m_rootTool->accept(); - + } - + //Calo only LH const xAOD::CaloCluster* cluster = eg->caloCluster(); if ( !cluster ){ ATH_MSG_ERROR ("Failed, no cluster."); return m_rootTool->accept(); - } + } if( !cluster->hasSampling(CaloSampling::CaloSample::EMB2) && !cluster->hasSampling(CaloSampling::CaloSample::EME2) ){ ATH_MSG_ERROR("Failed, cluster is missing samplings EMB2 and EME2"); return m_rootTool->accept(); } - + const double energy = cluster->e(); - const float eta = (cluster->etaBE(2)); + const float eta = (cluster->etaBE(2)); if( isForwardElectron(eg,eta) ){ ATH_MSG_WARNING( "Failed, this is a forward electron! The AsgElectronLikelihoodTool is " "only suitable for central electrons!"); return m_rootTool->accept(); } - + const double et = ( cosh(eta) != 0.) ? energy/cosh(eta) : 0.; - + // Variables the EFCaloLH ignores uint8_t nSiHitsPlusDeadSensors(0); uint8_t nPixHitsPlusDeadSensors(0); - bool passBLayerRequirement(false); + bool passBLayerRequirement(false); uint8_t ambiguityBit(0); // Get the pileup or centrality information @@ -478,8 +478,8 @@ asg::AcceptData AsgElectronLikelihoodTool::accept(const EventContext& ctx, const else { ip = mu; } - // for now don't cache. - double likelihood = calculate(ctx, eg, ip); + // for now don't cache. + double likelihood = calculate(ctx, eg, ip); double deltaEta=0; double deltaPhiRescaled2=0; @@ -492,7 +492,7 @@ asg::AcceptData AsgElectronLikelihoodTool::accept(const EventContext& ctx, const // Wstot for use when CutWstotAtHighET vector is filled if( !eg->showerShapeValue(wstot, xAOD::EgammaParameters::wtots1) ){ - allFound = false; + allFound = false; notFoundList += "wtots1 "; } @@ -524,7 +524,7 @@ asg::AcceptData AsgElectronLikelihoodTool::accept(const EventContext& ctx, const ip ); } - + //============================================================================= // The main result method: the actual likelihood is calculated here //============================================================================= @@ -543,14 +543,14 @@ double AsgElectronLikelihoodTool::calculate( const EventContext& ctx, const xAOD if ( !cluster ){ ATH_MSG_ERROR ("Failed, no cluster."); return -999; - } + } if( !cluster->hasSampling(CaloSampling::CaloSample::EMB2) && !cluster->hasSampling(CaloSampling::CaloSample::EME2) ){ ATH_MSG_ERROR("Failed, cluster is missing samplings EMB2 and EME2"); return -999; } const double energy = cluster->e(); - const float eta = cluster->etaBE(2); + const float eta = cluster->etaBE(2); if( isForwardElectron(el,eta) ){ ATH_MSG_WARNING("Failed, this is a forward electron! The AsgElectronLikelihoodTool is only suitable for central electrons!"); @@ -563,7 +563,7 @@ double AsgElectronLikelihoodTool::calculate( const EventContext& ctx, const xAOD } else { et = ( cosh(eta) != 0.) ? energy/cosh(eta) : 0.; } - + // number of track hits and other track quantities float trackqoverp(0.0); float d0(0.0); @@ -579,7 +579,7 @@ double AsgElectronLikelihoodTool::calculate( const EventContext& ctx, const xAOD if (!m_caloOnly){ // retrieve associated TrackParticle - const xAOD::TrackParticle* t = el->trackParticle(); + const xAOD::TrackParticle* t = el->trackParticle(); if (t) { trackqoverp = t->qOverP(); @@ -589,12 +589,12 @@ double AsgElectronLikelihoodTool::calculate( const EventContext& ctx, const xAOD d0sigma=sqrtf(vard0); } if( !t->summaryValue(TRT_PID, xAOD::eProbabilityHT) ){ - allFound = false; + allFound = false; notFoundList += "eProbabilityHT "; } //Transform the TRT PID output for use in the LH tool. - double tau = 15.0; + double tau = 15.0; double fEpsilon = 1.0e-30; // to avoid zero division double pid_tmp = TRT_PID; if (pid_tmp >= 1.0) pid_tmp = 1.0 - 1.0e-15; //this number comes from TMVA @@ -603,20 +603,20 @@ double AsgElectronLikelihoodTool::calculate( const EventContext& ctx, const xAOD unsigned int index; if( t->indexOfParameterAtPosition(index, xAOD::LastMeasurement) ) { - - double refittedTrack_LMqoverp = + + double refittedTrack_LMqoverp = t->charge() / sqrt(std::pow(t->parameterPX(index), 2) + std::pow(t->parameterPY(index), 2) + std::pow(t->parameterPZ(index), 2)); - + dpOverp = 1 - trackqoverp/(refittedTrack_LMqoverp); } else if (!m_skipDeltaPoverP){ - allFound = false; + allFound = false; notFoundList += "deltaPoverP "; } - - + + } else { @@ -636,7 +636,7 @@ double AsgElectronLikelihoodTool::calculate( const EventContext& ctx, const xAOD // reta = e237/e277 if( !el->showerShapeValue(Reta, xAOD::EgammaParameters::Reta) ){ - allFound = false; + allFound = false; notFoundList += "Reta "; } // rphi e233/e237 @@ -705,7 +705,7 @@ double AsgElectronLikelihoodTool::calculate( const EventContext& ctx, const xAOD eta, et, f3, Rhad, Rhad1, Reta, w2, f1, Eratio, deltaEta, d0, - d0sigma, + d0sigma, Rphi, dpOverp, deltaPhiRescaled2, TRT_PID, trans_TRT_PID, ip ) ); @@ -739,7 +739,7 @@ double AsgElectronLikelihoodTool::calculate( const EventContext& ctx, const xAOD //============================================================================= // Calculate method for EFCaloLH in the trigger; do full LH if !CaloCutsOnly //============================================================================= -double AsgElectronLikelihoodTool::calculate( const xAOD::Egamma* eg, double mu ) const +double AsgElectronLikelihoodTool::calculate( const xAOD::Egamma* eg, double mu ) const { //Backward compatibility return calculate(Gaudi::Hive::currentContext(), eg, mu); @@ -757,31 +757,31 @@ double AsgElectronLikelihoodTool::calculate( const EventContext& ctx, const xAOD const xAOD::Electron* el = static_cast<const xAOD::Electron*>(eg); return calculate(ctx, el); } - + ATH_MSG_ERROR("Input is not an electron and not Calo Only is required"); return -999; - + } const xAOD::CaloCluster* cluster = eg->caloCluster(); if ( !cluster ){ ATH_MSG_ERROR ("Failed, no cluster."); return -999; - } + } if( !cluster->hasSampling(CaloSampling::CaloSample::EMB2) && !cluster->hasSampling(CaloSampling::CaloSample::EME2) ){ ATH_MSG_ERROR("Failed, cluster is missing samplings EMB2 and EME2"); return -999; } - + const double energy = cluster->e(); - const float eta = cluster->etaBE(2); + const float eta = cluster->etaBE(2); if( isForwardElectron(eg,eta) ){ ATH_MSG_WARNING("Failed, this is a forward electron! The AsgElectronLikelihoodTool is only suitable for central electrons!"); return -999; } - + const double et = ( cosh(eta) != 0.) ? energy/cosh(eta) : 0.; // Track variables that the EFCaloLH will not use @@ -808,42 +808,42 @@ double AsgElectronLikelihoodTool::calculate( const EventContext& ctx, const xAOD // reta = e237/e277 if( !eg->showerShapeValue(Reta, xAOD::EgammaParameters::Reta) ){ - allFound = false; + allFound = false; notFoundList += "Reta "; } // rphi e233/e237 if( !eg->showerShapeValue(Rphi, xAOD::EgammaParameters::Rphi) ){ - allFound = false; + allFound = false; notFoundList += "Rphi "; } // rhad1 = ethad1/et if( !eg->showerShapeValue(Rhad1, xAOD::EgammaParameters::Rhad1) ){ - allFound = false; + allFound = false; notFoundList += "Rhad1 "; } // rhad = ethad/et if( !eg->showerShapeValue(Rhad, xAOD::EgammaParameters::Rhad) ){ - allFound = false; + allFound = false; notFoundList += "Rhad "; } // shower width in 2nd sampling if( !eg->showerShapeValue(w2, xAOD::EgammaParameters::weta2) ){ - allFound = false; + allFound = false; notFoundList += "weta2 "; } // fraction of energy reconstructed in the 1st sampling if( !eg->showerShapeValue(f1, xAOD::EgammaParameters::f1) ){ - allFound = false; + allFound = false; notFoundList += "f1 "; } // E of 2nd max between max and min in strips if( !eg->showerShapeValue(Eratio, xAOD::EgammaParameters::Eratio) ){ - allFound = false; + allFound = false; notFoundList += "Eratio "; } // fraction of energy reconstructed in the 3rd sampling if( !eg->showerShapeValue(f3, xAOD::EgammaParameters::f3) ){ - allFound = false; + allFound = false; notFoundList += "f3 "; } @@ -914,10 +914,10 @@ asg::AcceptData AsgElectronLikelihoodTool::accept(const EventContext& ctx, const const xAOD::Electron* el = static_cast<const xAOD::Electron*>(part); return accept(ctx, el); } - + ATH_MSG_ERROR("Input is not an electron"); return m_rootTool->accept(); - + } double AsgElectronLikelihoodTool::calculate(const xAOD::IParticle* part) const @@ -932,20 +932,20 @@ double AsgElectronLikelihoodTool::calculate(const EventContext& ctx, const xAOD: const xAOD::Electron* el = static_cast<const xAOD::Electron*>(part); return calculate(ctx, el); } - + ATH_MSG_ERROR ( "Input is not an electron" ); return -999; - + } //============================================================================= // Helper method to get the number of primary vertices -// We don't want to iterate over all vertices in the event for each electron!!! +// We don't want to iterate over all vertices in the event for each electron!!! //============================================================================= unsigned int AsgElectronLikelihoodTool::getNPrimVertices(const EventContext& ctx) const { unsigned int nVtx(0); - SG::ReadHandle<xAOD::VertexContainer> vtxCont (m_primVtxContKey, ctx); + SG::ReadHandle<xAOD::VertexContainer> vtxCont (m_primVtxContKey, ctx); for ( unsigned int i = 0; i < vtxCont->size(); i++ ) { const xAOD::Vertex* vxcand = vtxCont->at(i); if ( vxcand->nTrackParticles() >= 2 ) nVtx++; @@ -959,7 +959,7 @@ unsigned int AsgElectronLikelihoodTool::getNPrimVertices(const EventContext& ctx double AsgElectronLikelihoodTool::getFcalEt(const EventContext& ctx) const { double fcalEt(0.); - SG::ReadHandle<xAOD::HIEventShapeContainer> HIESCont (m_HIESContKey,ctx); + SG::ReadHandle<xAOD::HIEventShapeContainer> HIESCont (m_HIESContKey,ctx); xAOD::HIEventShapeContainer::const_iterator es_itr = HIESCont->begin(); xAOD::HIEventShapeContainer::const_iterator es_end = HIESCont->end(); for (; es_itr != es_end; es_itr++){ @@ -975,7 +975,7 @@ bool AsgElectronLikelihoodTool::isForwardElectron( const xAOD::Egamma* eg, const static const SG::AuxElement::ConstAccessor< uint16_t > accAuthor( "author" ); if( accAuthor.isAvailable(*eg) ){ - + // cannot just do eg->author() because it isn't always filled // at trigger level if( accAuthor(*eg) == xAOD::EgammaParameters::AuthorFwdElectron ){ @@ -984,7 +984,7 @@ bool AsgElectronLikelihoodTool::isForwardElectron( const xAOD::Egamma* eg, const } } else{ - //Check for fwd via eta range the old logic + //Check for fwd via eta range the old logic if ( fabs(eta) > 2.5 ) { ATH_MSG_WARNING("Failed, cluster->etaBE(2) range due to " << eta << " seems like a fwd electron" ); return true; diff --git a/PhysicsAnalysis/Interfaces/JetAnalysisInterfaces/JetAnalysisInterfaces/IJetQGTagger.h b/PhysicsAnalysis/Interfaces/JetAnalysisInterfaces/JetAnalysisInterfaces/IJetQGTagger.h new file mode 100644 index 0000000000000000000000000000000000000000..3b0178dd123b3df9e663c6d4a7e13f9da6212938 --- /dev/null +++ b/PhysicsAnalysis/Interfaces/JetAnalysisInterfaces/JetAnalysisInterfaces/IJetQGTagger.h @@ -0,0 +1,50 @@ +// this file is -*- C++ -*- + +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef IJETQGTAGGER_H +#define IJETQGTAGGER_H + +#include "PATInterfaces/CorrectionCode.h" +#include "PATInterfaces/ISystematicsTool.h" + +#include "xAODJet/Jet.h" +#include "xAODTracking/Vertex.h" + +namespace CP { + + namespace QGntrackSyst { + const static SystematicVariation trackefficiency("JET_QG_trackEfficiency"); + const static SystematicVariation trackfakes("JET_QG_trackFakes"); + const static SystematicVariation nchargedtopo("JET_QG_nchargedTopo"); + const static SystematicVariation nchargedexp_up("JET_QG_nchargedExp__1up"); + const static SystematicVariation nchargedme_up("JET_QG_nchargedME__1up"); + const static SystematicVariation nchargedpdf_up("JET_QG_nchargedPDF__1up"); + const static SystematicVariation nchargedexp_down("JET_QG_nchargedExp__1down"); + const static SystematicVariation nchargedme_down("JET_QG_nchargedME__1down"); + const static SystematicVariation nchargedpdf_down("JET_QG_nchargedPDF__1down"); + const static SystematicVariation trackeff("JET_QG_trackeff"); + const static SystematicVariation fake("JET_QG_fake"); + + } //namespace QGntrackSyst + + class IJetQGTagger : public virtual CP::ISystematicsTool { + + ASG_TOOL_INTERFACE( CP::IJetQGTagger ) + + public: + + virtual ~IJetQGTagger() {} + + virtual StatusCode tag(const xAOD::Jet& jet) const = 0; + virtual StatusCode tag(const xAOD::Jet& jet, const xAOD::Vertex* pv) const = 0; + + virtual StatusCode sysApplySystematicVariation(const SystematicSet&) = 0; + + }; + +} // namespace CP + +#endif /* IJETQGTAGGER_H */ diff --git a/PhysicsAnalysis/Interfaces/JetAnalysisInterfaces/JetAnalysisInterfaces/IJetSelectorTool.h b/PhysicsAnalysis/Interfaces/JetAnalysisInterfaces/JetAnalysisInterfaces/IJetSelectorTool.h deleted file mode 100644 index 139f0bf2f2d557a4af7ab840feaad5f4a87987de..0000000000000000000000000000000000000000 --- a/PhysicsAnalysis/Interfaces/JetAnalysisInterfaces/JetAnalysisInterfaces/IJetSelectorTool.h +++ /dev/null @@ -1,27 +0,0 @@ -// for editors : this file is -*- C++ -*- - -/* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration -*/ - -#ifndef JetAnalysisInterfaces_IJetSelectorTool_H_ -#define JetAnalysisInterfaces_IJetSelectorTool_H_ - -#include "AsgTools/IAsgTool.h" - -#include "xAODJet/Jet.h" - -#include "PATCore/AcceptData.h" - - -class IJetSelectorTool : virtual public asg::IAsgTool { - ASG_TOOL_INTERFACE(IJetSelectorTool) - - public: - - virtual asg::AcceptData& tag(const xAOD::Jet& jet) const = 0; - - -}; - -#endif diff --git a/PhysicsAnalysis/Interfaces/JetAnalysisInterfaces/JetAnalysisInterfaces/JetAnalysisInterfacesDict.h b/PhysicsAnalysis/Interfaces/JetAnalysisInterfaces/JetAnalysisInterfaces/JetAnalysisInterfacesDict.h index 5bd4acf1a7653ea2707ad3c22acc88379d8abb28..60b8fb63a900e4c51ced456948033370905e5356 100644 --- a/PhysicsAnalysis/Interfaces/JetAnalysisInterfaces/JetAnalysisInterfaces/JetAnalysisInterfacesDict.h +++ b/PhysicsAnalysis/Interfaces/JetAnalysisInterfaces/JetAnalysisInterfaces/JetAnalysisInterfacesDict.h @@ -1,5 +1,5 @@ /* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ #ifndef JETANALYSISINTERFACES_JETANALYSISINTERFACESDICT_H @@ -10,7 +10,7 @@ #endif // __GCCXML__ // Includes for the dictionary generation: -#include "JetAnalysisInterfaces/IJetSelectorTool.h" +#include "JetAnalysisInterfaces/IJetQGTagger.h" #include "JetAnalysisInterfaces/IJetJvtEfficiency.h" #endif // JETANALYSISINTERFACES_JETANALYSISINTERFACESDICT_H diff --git a/PhysicsAnalysis/Interfaces/JetAnalysisInterfaces/JetAnalysisInterfaces/selection.xml b/PhysicsAnalysis/Interfaces/JetAnalysisInterfaces/JetAnalysisInterfaces/selection.xml index 0a98caef59b578a1c65329b83c1d6c35e3993f75..8e9a1f98c191d8cfdc620df2183d64d7c72f2d75 100644 --- a/PhysicsAnalysis/Interfaces/JetAnalysisInterfaces/JetAnalysisInterfaces/selection.xml +++ b/PhysicsAnalysis/Interfaces/JetAnalysisInterfaces/JetAnalysisInterfaces/selection.xml @@ -1,6 +1,6 @@ <lcgdict> <!-- Requested dictionary generation --> - <class name="IJetSelectorTool" /> + <class name="IJetQGTagger" /> <class name="IJetJvtEfficiency" /> <!-- Suppress unwanted dictionaries generated by ROOT 6 --> diff --git a/PhysicsAnalysis/JetTagging/JetTagAlgs/BTagging/src/HighLevelBTagAlg.cxx b/PhysicsAnalysis/JetTagging/JetTagAlgs/BTagging/src/HighLevelBTagAlg.cxx index 9aca1b66364752a76f85e1df2ea52d4522a6d4c3..4d35c7fc711f13a15b8b6017243b2f00e10288e9 100644 --- a/PhysicsAnalysis/JetTagging/JetTagAlgs/BTagging/src/HighLevelBTagAlg.cxx +++ b/PhysicsAnalysis/JetTagging/JetTagAlgs/BTagging/src/HighLevelBTagAlg.cxx @@ -62,7 +62,7 @@ namespace Analysis { CHECK( m_dec_track_mom.initialize() ); // create and initialize write handles - for (const std::string key: m_jetDecorator->getDecoratorKeys()) { + for (const std::string& key: m_jetDecorator->getDecoratorKeys()) { std::string full_key = m_BTagCollectionName.key() + "." + key; ATH_MSG_DEBUG("Adding " << full_key); m_outputKeys.emplace_back(std::make_unique<SG::WriteDecorHandleKey<xAOD::BTaggingContainer>>(this, key, full_key, "")); diff --git a/PhysicsAnalysis/JetTagging/JetTagCalibration/src/CalibrationBroker.cxx b/PhysicsAnalysis/JetTagging/JetTagCalibration/src/CalibrationBroker.cxx index eddec0b2136dfda4631ccd5456573e71a7a168eb..72d8cbe0108c8a38e33ea1bf7a0f326f2a124b51 100755 --- a/PhysicsAnalysis/JetTagging/JetTagCalibration/src/CalibrationBroker.cxx +++ b/PhysicsAnalysis/JetTagging/JetTagCalibration/src/CalibrationBroker.cxx @@ -1,5 +1,5 @@ /* - Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ #include "JetTagCalibration/CalibrationBroker.h" @@ -173,8 +173,10 @@ namespace Analysis { if((*hI).second.first) { if(((*hI).second.first)->InheritsFrom("TH1")){ if( msgLvl(MSG::VERBOSE) ){ - msg(MSG::VERBOSE)<< " entries: " - << dynamic_cast<TH1*>(((*hI).second).first)->GetEntries(); + if (auto th1 = dynamic_cast<TH1*>(((*hI).second).first)) { + msg(MSG::VERBOSE)<< " entries: " + << th1->GetEntries(); + } } } } else { diff --git a/PhysicsAnalysis/JetTagging/JetTagPerformanceCalibration/xAODBTaggingEfficiency/CMakeLists.txt b/PhysicsAnalysis/JetTagging/JetTagPerformanceCalibration/xAODBTaggingEfficiency/CMakeLists.txt index 2d6618ffc57ea37daf080361679fe7a122eaa970..e375745d0b0a18a7db56de783d90d88092400bb4 100644 --- a/PhysicsAnalysis/JetTagging/JetTagPerformanceCalibration/xAODBTaggingEfficiency/CMakeLists.txt +++ b/PhysicsAnalysis/JetTagging/JetTagPerformanceCalibration/xAODBTaggingEfficiency/CMakeLists.txt @@ -3,7 +3,6 @@ # Declare the package name: atlas_subdir( xAODBTaggingEfficiency ) - # External dependencies: find_package( ROOT COMPONENTS Core Hist RIO ) @@ -31,22 +30,31 @@ atlas_add_dictionary( xAODBTaggingEfficiencyDict LINK_LIBRARIES xAODBTaggingEfficiencyLib ) # Executable(s) in the package (to be built only under AthAnalysis or in stand-alone mode): -if( XAOD_ANALYSIS OR XAOD_STANDALONE ) +if( XAOD_ANALYSIS ) atlas_add_executable( BTaggingEfficiencyToolTester util/BTaggingEfficiencyToolTester.cxx - LINK_LIBRARIES xAODBTaggingEfficiencyLib ) + LINK_LIBRARIES xAODRootAccess AsgTools FTagAnalysisInterfacesLib ) atlas_add_executable( BTaggingEigenVectorRecompositionToolTester util/BTaggingEigenVectorRecompositionToolTester.cxx - LINK_LIBRARIES xAODBTaggingEfficiencyLib ) + LINK_LIBRARIES AsgTools FTagAnalysisInterfacesLib ) - atlas_add_executable( BTaggingSelectionToolTester + atlas_add_executable( BTaggingSelectionToolTester util/BTaggingSelectionToolTester.cxx - LINK_LIBRARIES AsgMessagingLib xAODJet xAODBTagging xAODBTaggingEfficiencyLib ) + INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} + LINK_LIBRARIES ${ROOT_LIBRARIES} AsgTools xAODJet xAODBTagging + PATInterfaces FTagAnalysisInterfacesLib ) + if( XAOD_STANDALONE ) + target_link_libraries( BTaggingSelectionToolTester + PRIVATE xAODRootAccess ) + else() + target_link_libraries( BTaggingSelectionToolTester + PRIVATE POOLRootAccessLib ) + endif() atlas_add_executable( BTaggingTruthTaggingTester util/BTaggingTruthTaggingTester.cxx - LINK_LIBRARIES AsgMessagingLib xAODJet xAODBTagging xAODBTaggingEfficiencyLib FTagAnalysisInterfacesLib ) + LINK_LIBRARIES AsgTools AsgMessagingLib FTagAnalysisInterfacesLib ) endif() # Install files from the package: diff --git a/PhysicsAnalysis/TauID/TauAnalysisTools/Root/CommonSmearingTool.cxx b/PhysicsAnalysis/TauID/TauAnalysisTools/Root/CommonSmearingTool.cxx index 9026acf04f44bbec94c01895dcf26d7960ec0796..1f37ff54499e8ee2be09bc141b852981440e893a 100644 --- a/PhysicsAnalysis/TauID/TauAnalysisTools/Root/CommonSmearingTool.cxx +++ b/PhysicsAnalysis/TauID/TauAnalysisTools/Root/CommonSmearingTool.cxx @@ -179,7 +179,10 @@ CP::CorrectionCode CommonSmearingTool::applyCorrection( xAOD::TauJet& xTau ) if (m_bApplyMVATES) { // veto MVA TES for unreasonably low resolution values - bool bVeto = dynamic_cast<CombinedP4FromRecoTaus*>(m_tCombinedP4FromRecoTaus.get())->getUseCaloPtFlag(xTau); + bool bVeto = false; + if (auto combp4 = dynamic_cast<CombinedP4FromRecoTaus*>(m_tCombinedP4FromRecoTaus.get())) { + bVeto = combp4->getUseCaloPtFlag(xTau); + } if (xTau.nTracks() > 0 and xTau.nTracks() < 6) { diff --git a/PhysicsAnalysis/TauID/TauAnalysisTools/test/ut_TauAnalysisTools_test.cxx b/PhysicsAnalysis/TauID/TauAnalysisTools/test/ut_TauAnalysisTools_test.cxx index 576cfbf840cc49fc0cafa8192dab5e46600cb865..a4f0a384c00445eb92a221c179933e10f0039d46 100644 --- a/PhysicsAnalysis/TauID/TauAnalysisTools/test/ut_TauAnalysisTools_test.cxx +++ b/PhysicsAnalysis/TauID/TauAnalysisTools/test/ut_TauAnalysisTools_test.cxx @@ -1,10 +1,7 @@ /* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ -#ifndef ROOTCORE -#ifdef XAOD_ANALYSIS - #ifndef TAUANALYSISTOOLS_UT_TAUANALYSISTOOLS_TEST_H #define TAUANALYSISTOOLS_UT_TAUANALYSISTOOLS_TEST_H 1 @@ -33,6 +30,7 @@ using namespace asg::msgUserCode; //messaging int main( int argc, char* argv[] ) { + ANA_CHECK_SET_TYPE (int); IAppMgrUI* app = POOL::Init(); //important to do this first! @@ -58,40 +56,40 @@ int main( int argc, char* argv[] ) // TauSelectionTool // =========================================================================== ToolHandle<TauAnalysisTools::ITauSelectionTool> TauSelTool("TauAnalysisTools::TauSelectionTool/TauSelectionTool"); - TauSelTool.retrieve(); //this will cause the tool to be created and initialized + ANA_CHECK(TauSelTool.retrieve()); //this will cause the tool to be created and initialized // =========================================================================== // TauSmearingTool // =========================================================================== ToolHandle<TauAnalysisTools::ITauSmearingTool> TauSmeTool("TauAnalysisTools::TauSmearingTool/TauSmearingTool"); - TauSmeTool.retrieve(); + ANA_CHECK(TauSmeTool.retrieve()); // =========================================================================== // TauEfficiencyCorrectionsTool // =========================================================================== ToolHandle<TauAnalysisTools::ITauEfficiencyCorrectionsTool> TauEffCorrTool( "TauAnalysisTools::TauEfficiencyCorrectionsTool/TauEfficiencyCorrectionsTool" ); - AthAnalysisHelper::setProperty(TauEffCorrTool, "TauSelectionTool", TauSelTool); - TauEffCorrTool.retrieve(); + ANA_CHECK(AthAnalysisHelper::setProperty(TauEffCorrTool, "TauSelectionTool", TauSelTool)); + ANA_CHECK(TauEffCorrTool.retrieve()); // =========================================================================== // TauTruthMatchingTool // =========================================================================== ToolHandle<TauAnalysisTools::ITauTruthMatchingTool> T2MT( "TauAnalysisTools::TauTruthMatchingTool/TauTruthMatchingTool"); - AthAnalysisHelper::setProperty(T2MT, "WriteTruthTaus", true); - T2MT.retrieve(); + ANA_CHECK(AthAnalysisHelper::setProperty(T2MT, "WriteTruthTaus", true)); + ANA_CHECK(T2MT.retrieve()); // =========================================================================== // TauTruthTrackMatchingTool // =========================================================================== ToolHandle<TauAnalysisTools::ITauTruthTrackMatchingTool> T3MT( "TauAnalysisTools::TauTruthTrackMatchingTool/TauTruthTrackMatchingTool"); - T3MT.retrieve(); + ANA_CHECK(T3MT.retrieve()); // defining needed Container const xAOD::TauJetContainer* xTauJetContainer = 0; //loop over input file with POOL POOL::TEvent evt; - evt.readFrom( fileName ); + ANA_CHECK(evt.readFrom( fileName )); // for(int i=0;i < evt.getEntries(); i++) { for(int i=0; i < 100; i++) @@ -102,7 +100,7 @@ int main( int argc, char* argv[] ) continue; } - evt.retrieve( xTauJetContainer, "TauJets" ); + ANA_CHECK(evt.retrieve( xTauJetContainer, "TauJets" )); for ( auto xTau : *xTauJetContainer ) { @@ -119,7 +117,7 @@ int main( int argc, char* argv[] ) << ", prong = " << int(xTau->nTracks()) << ", charge = " << int(xTau->charge())); - if ((bool)xTau->auxdata<char>("IsTruthMatched")) + if ((bool)xTau->auxdata<char>("IsTruthMatched") && (xTruthTau != nullptr)) { if (xTruthTau->isTau()) { @@ -164,21 +162,11 @@ int main( int argc, char* argv[] ) } } ServiceHandle<IProperty> toolSvc("ToolSvc",""); - toolSvc->setProperty("OutputLevel","1"); + ANA_CHECK(toolSvc->setProperty("OutputLevel","1")); asg::msgToolHandle::setMsgLevel(MSG::Level::DEBUG); - app->finalize(); //trigger finalization of all services and tools created by the Gaudi Application + ANA_CHECK(app->finalize()); //trigger finalization of all services and tools created by the Gaudi Application return 0; } #endif //> !TAUANALYSISTOOLS_UT_TAUANALYSISTOOLS_TEST_H - -#else -int main() -{ - return 0; -} - -#endif // XAOD_ANALYSIS - -#endif // not ROOTCORE diff --git a/PhysicsAnalysis/TopPhys/xAOD/TopAnalysis/Root/EventSaverFlatNtuple.cxx b/PhysicsAnalysis/TopPhys/xAOD/TopAnalysis/Root/EventSaverFlatNtuple.cxx index 604e408b09cce651635c3a5ceae244121d46186f..6ed286fe5a70f07c0d57259bf90d37f73cdf6b0d 100644 --- a/PhysicsAnalysis/TopPhys/xAOD/TopAnalysis/Root/EventSaverFlatNtuple.cxx +++ b/PhysicsAnalysis/TopPhys/xAOD/TopAnalysis/Root/EventSaverFlatNtuple.cxx @@ -12,6 +12,7 @@ #include "xAODMissingET/MissingETContainer.h" #include "xAODBTagging/BTaggingUtilities.h" +#include "AthContainers/tools/AtomicConstAccessor.h" #include "AthContainers/AuxTypeRegistry.h" #include "TFile.h" @@ -2209,7 +2210,8 @@ namespace top { // (non-collision-)background flags m_backgroundFlags = 0; - if (event.m_info->isAvailable<unsigned int>("backgroundFlags")) m_backgroundFlags = event.m_info->auxdataConst<unsigned int>("backgroundFlags"); + static const SG::AtomicConstAccessor<unsigned int> bkgFlagsAcc("backgroundFlags"); + if (bkgFlagsAcc.isAvailable(*(event.m_info))) m_backgroundFlags = bkgFlagsAcc(*(event.m_info)); // hasBadMuon flag m_hasBadMuon = 0; diff --git a/PhysicsAnalysis/TopPhys/xAOD/TopAnalysis/Root/Tools.cxx b/PhysicsAnalysis/TopPhys/xAOD/TopAnalysis/Root/Tools.cxx index 5fb00c304378facc651e5a2382ec410c6fc04c96..fe03601fd00278e8780d42c8bffd7c8bf753fbd1 100644 --- a/PhysicsAnalysis/TopPhys/xAOD/TopAnalysis/Root/Tools.cxx +++ b/PhysicsAnalysis/TopPhys/xAOD/TopAnalysis/Root/Tools.cxx @@ -165,7 +165,10 @@ namespace top { void parseCutBookkeepers(xAOD::TEvent& xaodEvent, const std::size_t size, std::vector<std::string> &names, std::vector<float>& sumW, const bool isHLLHC) { - for (std::size_t icbk = 0; icbk < size; ++icbk) { + // workaround for PMGTruthWeightTool returning ZERO weights, when sample has ONLY ONE weight... + const std::size_t modifiedSize = (size == 0) ? 1 : size; + + for (std::size_t icbk = 0; icbk < modifiedSize; ++icbk) { const std::string cbkName = (icbk == 0) ? "CutBookkeepers" : "CutBookkeepers_weight_" + std::to_string(icbk); const xAOD::CutBookkeeperContainer* cutBookKeepers = nullptr; top::check(xaodEvent.retrieveMetaInput(cutBookKeepers, cbkName), "Cannot retrieve CutBookkeepers: " + cbkName); diff --git a/PhysicsAnalysis/TopPhys/xAOD/TopCPTools/Root/TopFlavorTaggingCPTools.cxx b/PhysicsAnalysis/TopPhys/xAOD/TopCPTools/Root/TopFlavorTaggingCPTools.cxx index 5619ff6cd0542284918091a8393346bb9921c8b6..d11a483076c3ee98a9a1f43ecf2717697b1a9d38 100644 --- a/PhysicsAnalysis/TopPhys/xAOD/TopCPTools/Root/TopFlavorTaggingCPTools.cxx +++ b/PhysicsAnalysis/TopPhys/xAOD/TopCPTools/Root/TopFlavorTaggingCPTools.cxx @@ -40,7 +40,7 @@ namespace top { } static const std::string cdi_file_default = - "xAODBTaggingEfficiency/13TeV/2020-21-13TeV-MC16-CDI-2020-03-11_Sh228_v3.root"; + "xAODBTaggingEfficiency/13TeV/2020-21-13TeV-MC16-CDI-2020-12-02_v2.root"; m_tagger = ""; // Extract in the loop if (m_config->bTaggingCDIPath() != "Default") { @@ -85,7 +85,7 @@ namespace top { // Calibrated and uncalibrated working points for VR track jets for all algorithms top::check(setTaggerWorkingPoints("AntiKtVR30Rmax4Rmin02TrackJets", true, "MV2c10", {"FixedCutBEff_60", "FixedCutBEff_70", "FixedCutBEff_77", "FixedCutBEff_85", "Continuous"}), "Error setting AntiKtVR30Rmax4Rmin02TrackJets WP"); top::check(setTaggerWorkingPoints("AntiKtVR30Rmax4Rmin02TrackJets", true, "DL1", {"FixedCutBEff_60", "FixedCutBEff_70", "FixedCutBEff_77", "FixedCutBEff_85", "Continuous"}), "Error setting AntiKtVR30Rmax4Rmin02TrackJets WP"); - top::check(setTaggerWorkingPoints("AntiKtVR30Rmax4Rmin02TrackJets", false, "DL1r", {"FixedCutBEff_60", "FixedCutBEff_70", "FixedCutBEff_77", "FixedCutBEff_85", "Continuous"}), "Error setting AntiKtVR30Rmax4Rmin02TrackJets WP"); + top::check(setTaggerWorkingPoints("AntiKtVR30Rmax4Rmin02TrackJets", true, "DL1r", {"FixedCutBEff_60", "FixedCutBEff_70", "FixedCutBEff_77", "FixedCutBEff_85", "Continuous"}), "Error setting AntiKtVR30Rmax4Rmin02TrackJets WP"); top::check(setTaggerWorkingPoints("AntiKtVR30Rmax4Rmin02TrackJets", false, "DL1rmu", {"FixedCutBEff_60", "FixedCutBEff_70", "FixedCutBEff_77", "FixedCutBEff_85", "Continuous"}), "Error setting AntiKtVR30Rmax4Rmin02TrackJets WP"); diff --git a/PhysicsAnalysis/TopPhys/xAOD/TopCPTools/TopCPTools/TopBoostedTaggingCPTools.h b/PhysicsAnalysis/TopPhys/xAOD/TopCPTools/TopCPTools/TopBoostedTaggingCPTools.h index 523d678de379860ad134dd983b122db41fab3253..9cb78b2eef405caf305f5ce6fb94cbb54a16d41f 100644 --- a/PhysicsAnalysis/TopPhys/xAOD/TopCPTools/TopCPTools/TopBoostedTaggingCPTools.h +++ b/PhysicsAnalysis/TopPhys/xAOD/TopCPTools/TopCPTools/TopBoostedTaggingCPTools.h @@ -15,7 +15,7 @@ #include "AsgTools/ToolHandle.h" #include "AsgTools/ToolHandleArray.h" #include "AsgTools/AnaToolHandle.h" -#include "JetAnalysisInterfaces/IJetSelectorTool.h" +#include "JetInterface/IJetDecorator.h" #include "JetCPInterfaces/ICPJetUncertaintiesTool.h" namespace top { @@ -30,7 +30,7 @@ namespace top { private: std::shared_ptr<top::TopConfig> m_config; - std::unordered_map<std::string, asg::AnaToolHandle<IJetSelectorTool> > m_taggers; + std::unordered_map<std::string, asg::AnaToolHandle<IJetDecorator> > m_taggers; std::unordered_map<std::string, ToolHandle<ICPJetUncertaintiesTool> > m_tagSFuncertTool; }; } // namespace top diff --git a/PhysicsAnalysis/TopPhys/xAOD/TopEventSelectionTools/Root/JetFlavorPlots.cxx b/PhysicsAnalysis/TopPhys/xAOD/TopEventSelectionTools/Root/JetFlavorPlots.cxx index bd156abbb52f204dc007ab52393ad3f307e259b4..9e12ebdc80b9d10e38d20d52c92ccd8f8f22d3dc 100644 --- a/PhysicsAnalysis/TopPhys/xAOD/TopEventSelectionTools/Root/JetFlavorPlots.cxx +++ b/PhysicsAnalysis/TopPhys/xAOD/TopEventSelectionTools/Root/JetFlavorPlots.cxx @@ -34,6 +34,8 @@ namespace top { m_doNominal(false), m_doRadHigh(false), m_doRadLow(false), + m_doRadHighNoVar3c(false), + m_doRadLowNoVar3c(false), // default pT and eta binning, and default max number of Jets m_ptBins("15:20:30:45:60:80:110:160:210:260:310:400:500:600:800:1000:1200:1500:1800:2500"), m_etaBins("0.:0.3:0.8:1.2:2.1:2.8:3.6:4.5"), @@ -73,25 +75,30 @@ namespace top { s == "radiationHigh") m_doRadHigh = true; else if (s == "radiationlow" || s == "RADIATIONLOW" || s == "RadiationLow" || s == "radiationLow") m_doRadLow = true; + else if (s == "radiationhighnovar3c" || s == "RADIATIONHIGHNOVAR3C" || s == "RadiationHighNoVar3c" || + s == "radiationHighNoVar3c") m_doRadHighNoVar3c = true; + else if (s == "radiationlownovar3c" || s == "RADIATIONLOWNOVAR3C" || s == "RadiationLowNoVar3c" || + s == "radiationLowNoVar3c") m_doRadLowNoVar3c = true; else { throw std::runtime_error("ERROR: Can't understand argument " + s + "For JETFLAVORPLOTS"); } } //If neither nominal or radiation has been selected, assume it's nominal - if ((m_doNominal + m_doRadHigh + m_doRadLow) == false) m_doNominal = true; + if ((m_doNominal && m_doRadHigh && m_doRadLow && m_doRadHighNoVar3c && m_doRadLowNoVar3c) == false) m_doNominal = true; // create the JetFlavorPlots and JetFlavorPlots_Loose directories only if needed if (m_config->doTightEvents()) { if (m_doNominal) m_hists = std::make_shared<PlotManager>(name + "/JetFlavorPlots", outputFile, wk); - if (m_doRadHigh) m_hists_RadHigh = - std::make_shared<PlotManager>(name + "/JetFlavorPlots_RadHigh", outputFile, wk); + if (m_doRadHigh) m_hists_RadHigh = std::make_shared<PlotManager>(name + "/JetFlavorPlots_RadHigh", outputFile, wk); if (m_doRadLow) m_hists_RadLow = std::make_shared<PlotManager>(name + "/JetFlavorPlots_RadLow", outputFile, wk); + if (m_doRadHighNoVar3c) m_hists_RadHighNoVar3c = std::make_shared<PlotManager>(name + "/JetFlavorPlots_RadHighNoVar3c", outputFile, wk); + if (m_doRadLowNoVar3c) m_hists_RadLowNoVar3c = std::make_shared<PlotManager>(name + "/JetFlavorPlots_RadLowNoVar3c", outputFile, wk); } if (m_config->doLooseEvents()) { if (m_doNominal) m_hists_Loose = std::make_shared<PlotManager>(name + "/JetFlavorPlots_Loose", outputFile, wk); - if (m_doRadHigh) m_hists_RadHigh_Loose = std::make_shared<PlotManager>(name + "/JetFlavorPlots_Loose_RadHigh", - outputFile, wk); - if (m_doRadLow) m_hists_RadLow_Loose = std::make_shared<PlotManager>(name + "/JetFlavorPlots_Loose_RadLow", - outputFile, wk); + if (m_doRadHigh) m_hists_RadHigh_Loose = std::make_shared<PlotManager>(name + "/JetFlavorPlots_Loose_RadHigh", outputFile, wk); + if (m_doRadLow) m_hists_RadLow_Loose = std::make_shared<PlotManager>(name + "/JetFlavorPlots_Loose_RadLow", outputFile, wk); + if (m_doRadHighNoVar3c) m_hists_RadHighNoVar3c_Loose = std::make_shared<PlotManager>(name + "/JetFlavorPlots_Loose_RadHighNoVar3c", outputFile, wk); + if (m_doRadLowNoVar3c) m_hists_RadLowNoVar3c_Loose = std::make_shared<PlotManager>(name + "/JetFlavorPlots_Loose_RadLowNoVar3c", outputFile, wk); } //handle binning std::vector<double> ptBinning; @@ -111,11 +118,15 @@ namespace top { if (m_doNominal) BookHistograms(m_hists, ptBinning, etaBinning); if (m_doRadHigh) BookHistograms(m_hists_RadHigh, ptBinning, etaBinning); if (m_doRadLow) BookHistograms(m_hists_RadLow, ptBinning, etaBinning); + if (m_doRadHighNoVar3c) BookHistograms(m_hists_RadHighNoVar3c, ptBinning, etaBinning); + if (m_doRadLowNoVar3c) BookHistograms(m_hists_RadLowNoVar3c, ptBinning, etaBinning); } if (m_config->doLooseEvents()) { if (m_doNominal) BookHistograms(m_hists_Loose, ptBinning, etaBinning); if (m_doRadHigh) BookHistograms(m_hists_RadHigh_Loose, ptBinning, etaBinning); if (m_doRadLow) BookHistograms(m_hists_RadLow_Loose, ptBinning, etaBinning); + if (m_doRadHighNoVar3c) BookHistograms(m_hists_RadHighNoVar3c_Loose, ptBinning, etaBinning); + if (m_doRadLowNoVar3c) BookHistograms(m_hists_RadLowNoVar3c_Loose, ptBinning, etaBinning); } } @@ -165,28 +176,52 @@ namespace top { if (event.m_isLoose) FillHistograms(m_hists_Loose, eventWeight, event); else FillHistograms(m_hists, eventWeight, event); } - if (m_doRadHigh) { + if (m_doRadHigh || m_doRadHighNoVar3c) { // 2 different names are acceptable double scaleWeight = 1.; if (m_PMGTruthWeights->hasWeight(" muR = 0.5, muF = 0.5 ")) scaleWeight = m_PMGTruthWeights->getWeight(" muR = 0.5, muF = 0.5 "); else if (m_PMGTruthWeights->hasWeight(" muR = 0.50, muF = 0.50 ")) scaleWeight = m_PMGTruthWeights->getWeight(" muR = 0.50, muF = 0.50 "); + else if (m_PMGTruthWeights->hasWeight("MUR0.5_MUF0.5_PDF261000")) scaleWeight = m_PMGTruthWeights->getWeight("MUR0.5_MUF0.5_PDF261000"); // for e.g. Sherpa Z+jets + else if (m_PMGTruthWeights->hasWeight(" muR=0.50000E+00 muF=0.50000E+00 ")) scaleWeight = m_PMGTruthWeights->getWeight(" muR=0.50000E+00 muF=0.50000E+00 "); // for e.g. ttZ DSID 410218 + else if (m_PMGTruthWeights->hasWeight(" dyn= 0 muR=0.50000E+00 muF=0.50000E+00 ")) scaleWeight = m_PMGTruthWeights->getWeight(" dyn= 0 muR=0.50000E+00 muF=0.50000E+00 "); // for e.g. tWZ 412118 + else if (m_PMGTruthWeights->hasWeight("1009")) scaleWeight = m_PMGTruthWeights->getWeight("1009"); // for e.g. tZ 412063 + else if (m_PMGTruthWeights->hasWeight("muR=05,muF=05")) scaleWeight = m_PMGTruthWeights->getWeight("muR=05,muF=05"); // for some other generator setups else top::check(m_PMGTruthWeights->hasWeight(" muR = 0.5, muF = 0.5 "), "JetFlavorPlots::apply(): Weight \" muR = 0.5, muF = 0.5 \" not found. Please report this message!"); - top::check(m_PMGTruthWeights->hasWeight("Var3cUp"), "JetFlavorPlots::apply(): Weight \"Var3cUp\" not found. Please report this message!"); - double eventWeight = scaleWeight * m_PMGTruthWeights->getWeight("Var3cUp") / nominalWeight; - if (event.m_isLoose) FillHistograms(m_hists_RadHigh_Loose, eventWeight, event); - else FillHistograms(m_hists_RadHigh, eventWeight, event); + double eventWeight = scaleWeight; + if (!m_doRadHighNoVar3c) { + top::check(m_PMGTruthWeights->hasWeight("Var3cUp"), "JetFlavorPlots::apply(): Weight \"Var3cUp\" not found. Please report this message!"); + eventWeight *= m_PMGTruthWeights->getWeight("Var3cUp") / nominalWeight; + if (event.m_isLoose) FillHistograms(m_hists_RadHigh_Loose, eventWeight, event); + else FillHistograms(m_hists_RadHigh, eventWeight, event); + } // finish if (!m_doRadHighNoVar3c) + else { // m_doRadHighVar3c is true + if (event.m_isLoose) FillHistograms(m_hists_RadHighNoVar3c_Loose, eventWeight, event); + else FillHistograms(m_hists_RadHighNoVar3c, eventWeight, event); + } // finish else } - if (m_doRadLow) { + if (m_doRadLow || m_doRadLowNoVar3c) { //2 different names are acceptable double scaleWeight = 1.; if (m_PMGTruthWeights->hasWeight(" muR = 2.0, muF = 2.0 ")) scaleWeight = m_PMGTruthWeights->getWeight(" muR = 2.0, muF = 2.0 "); else if (m_PMGTruthWeights->hasWeight(" muR = 2.00, muF = 2.00 ")) scaleWeight = m_PMGTruthWeights->getWeight(" muR = 2.00, muF = 2.00 "); + else if (m_PMGTruthWeights->hasWeight("MUR2_MUF2_PDF261000")) scaleWeight = m_PMGTruthWeights->getWeight("MUR2_MUF2_PDF261000"); // for e.g. Sherpa Z+jets + else if (m_PMGTruthWeights->hasWeight(" muR=0.20000E+01 muF=0.20000E+01 ")) scaleWeight = m_PMGTruthWeights->getWeight(" muR=0.20000E+01 muF=0.20000E+01 "); // for e.g. ttZ DSID 410218 + else if (m_PMGTruthWeights->hasWeight(" dyn= 0 muR=0.20000E+01 muF=0.20000E+01 ")) scaleWeight = m_PMGTruthWeights->getWeight(" dyn= 0 muR=0.20000E+01 muF=0.20000E+01 "); // for e.g. tWZ 412118 + else if (m_PMGTruthWeights->hasWeight("1005")) scaleWeight = m_PMGTruthWeights->getWeight("1005"); // for e.g. tZ 412063 + else if (m_PMGTruthWeights->hasWeight("muR=20,muF=20")) scaleWeight = m_PMGTruthWeights->getWeight("muR=20,muF=20"); // for some other generator setups else top::check(m_PMGTruthWeights->hasWeight(" muR = 2.0, muF = 2.0 "), "JetFlavorPlots::apply(): Weight \" muR = 2.0, muF = 2.0 \" not found. Please report this message!"); - top::check(m_PMGTruthWeights->hasWeight("Var3cUp"), "JetFlavorPlots::apply(): Weight \"Var3cUp\" not found. Please report this message!"); - top::check(m_PMGTruthWeights->hasWeight("Var3cDown"), "JetFlavorPlots::apply(): Weight \"Var3cDown\" not found. Please report this message!"); - double eventWeight = scaleWeight * m_PMGTruthWeights->getWeight("Var3cDown") / nominalWeight; - if (event.m_isLoose) FillHistograms(m_hists_RadLow_Loose, eventWeight, event); - else FillHistograms(m_hists_RadLow, eventWeight, event); + double eventWeight = scaleWeight; + if (!m_doRadLowNoVar3c) { + top::check(m_PMGTruthWeights->hasWeight("Var3cDown"), "JetFlavorPlots::apply(): Weight \"Var3cDown\" not found. Please report this message!"); + eventWeight *= m_PMGTruthWeights->getWeight("Var3cDown") / nominalWeight; + if (event.m_isLoose) FillHistograms(m_hists_RadLow_Loose, eventWeight, event); + else FillHistograms(m_hists_RadLow, eventWeight, event); + } // finish if (!m_doRadLowNoVar3c) { + else { // m_doRadLowNoVar3c is true + if (event.m_isLoose) FillHistograms(m_hists_RadLowNoVar3c_Loose, eventWeight, event); + else FillHistograms(m_hists_RadLowNoVar3c, eventWeight, event); + } // finish else + } return true; } diff --git a/PhysicsAnalysis/TopPhys/xAOD/TopEventSelectionTools/TopEventSelectionTools/JetFlavorPlots.h b/PhysicsAnalysis/TopPhys/xAOD/TopEventSelectionTools/TopEventSelectionTools/JetFlavorPlots.h index 383a2547715e537329c15046b3b32faf3d275002..2c29d5876940fd21b77f055cd3a690e164d4384d 100644 --- a/PhysicsAnalysis/TopPhys/xAOD/TopEventSelectionTools/TopEventSelectionTools/JetFlavorPlots.h +++ b/PhysicsAnalysis/TopPhys/xAOD/TopEventSelectionTools/TopEventSelectionTools/JetFlavorPlots.h @@ -67,6 +67,10 @@ namespace top { std::shared_ptr<PlotManager> m_hists_RadHigh_Loose = nullptr; std::shared_ptr<PlotManager> m_hists_RadLow = nullptr; std::shared_ptr<PlotManager> m_hists_RadLow_Loose = nullptr; + std::shared_ptr<PlotManager> m_hists_RadHighNoVar3c = nullptr; + std::shared_ptr<PlotManager> m_hists_RadHighNoVar3c_Loose = nullptr; + std::shared_ptr<PlotManager> m_hists_RadLowNoVar3c = nullptr; + std::shared_ptr<PlotManager> m_hists_RadLowNoVar3c_Loose = nullptr; // Nominal hash value std::size_t m_nominalHashValue; @@ -78,6 +82,8 @@ namespace top { bool m_doNominal; bool m_doRadHigh; bool m_doRadLow; + bool m_doRadHighNoVar3c; // doRadHigh but don't take Var3c into account + bool m_doRadLowNoVar3c; // doRadLow but don't take Var3c into account // pT and eta bin edges, separated by colons std::string m_ptBins; diff --git a/PhysicsAnalysis/TopPhys/xAOD/TopSystematicObjectMaker/Root/JetObjectCollectionMaker.cxx b/PhysicsAnalysis/TopPhys/xAOD/TopSystematicObjectMaker/Root/JetObjectCollectionMaker.cxx index 811c9eca48b7aa891ec06d6ea142b4bf452c736f..244d7303f5916148a2f54993846f4c1ad419f37c 100644 --- a/PhysicsAnalysis/TopPhys/xAOD/TopSystematicObjectMaker/Root/JetObjectCollectionMaker.cxx +++ b/PhysicsAnalysis/TopPhys/xAOD/TopSystematicObjectMaker/Root/JetObjectCollectionMaker.cxx @@ -290,7 +290,7 @@ namespace top { if (m_config->useLargeRJets()) { for (const std::pair<std::string, std::string>& name : m_config->boostedJetTaggers()) { std::string fullName = name.first + "_" + name.second; - m_boostedJetTaggers[fullName] = ToolHandle<IJetSelectorTool>(fullName); + m_boostedJetTaggers[fullName] = ToolHandle<IJetDecorator>(fullName); top::check(m_boostedJetTaggers[fullName].retrieve(), "Failed to retrieve " + fullName); } } @@ -764,6 +764,7 @@ namespace top { //decorate with boosted-tagging flags for (const std::pair<std::string, std::string>& name : m_config->boostedJetTaggers()) { std::string fullName = name.first + "_" + name.second; + // TODO: Rewrite this to use the new interface // const Root::TAccept& result = m_boostedJetTaggers[fullName]->tag(jet); // TAccept has bool operator overloaded, but let's be more explicit in the output to char // jet.auxdecor<char>("isTagged_" + fullName) = (result ? 1 : 0); diff --git a/PhysicsAnalysis/TopPhys/xAOD/TopSystematicObjectMaker/TopSystematicObjectMaker/JetObjectCollectionMaker.h b/PhysicsAnalysis/TopPhys/xAOD/TopSystematicObjectMaker/TopSystematicObjectMaker/JetObjectCollectionMaker.h index 686540e520bb1c00ce8a4525f51e326b1db38fb2..57bfb1538fb427cdfafbadefea227684ba71c486 100644 --- a/PhysicsAnalysis/TopPhys/xAOD/TopSystematicObjectMaker/TopSystematicObjectMaker/JetObjectCollectionMaker.h +++ b/PhysicsAnalysis/TopPhys/xAOD/TopSystematicObjectMaker/TopSystematicObjectMaker/JetObjectCollectionMaker.h @@ -47,7 +47,7 @@ #include "TopJetSubstructure/TopJetSubstructure.h" #include "FTagAnalysisInterfaces/IBTaggingSelectionTool.h" -#include "JetAnalysisInterfaces/IJetSelectorTool.h" +#include "JetInterface/IJetDecorator.h" #include "ParticleJetTools/JetTruthLabelingTool.h" // Forward declaration(s): @@ -135,7 +135,7 @@ namespace top { // do decorate the large-R jets with the boosted-tagging flags // and decorate jets with TAccept object containing detailed tag result informaiton // https://twiki.cern.ch/twiki/bin/view/AtlasProtected/BoostedJetTaggingRecommendation2017#TAcceptUsageSection - std::unordered_map<std::string, ToolHandle<IJetSelectorTool> > m_boostedJetTaggers; + std::unordered_map<std::string, ToolHandle<IJetDecorator> > m_boostedJetTaggers; ToolHandle<IJetUpdateJvt> m_jetUpdateJvtTool; ToolHandle<IJetModifier> m_jetSelectfJvtTool; diff --git a/PhysicsAnalysis/TruthParticleID/McParticleUtils/CMakeLists.txt b/PhysicsAnalysis/TruthParticleID/McParticleUtils/CMakeLists.txt index 5457f0d1c86b6d3525ded34dcb0a5a439e546667..293767e7f0649a0cb0eae0618bcb310e41250bd5 100644 --- a/PhysicsAnalysis/TruthParticleID/McParticleUtils/CMakeLists.txt +++ b/PhysicsAnalysis/TruthParticleID/McParticleUtils/CMakeLists.txt @@ -7,6 +7,7 @@ atlas_subdir( McParticleUtils ) find_package( Boost ) find_package( HepPDT ) find_package( Python COMPONENTS Development ) +find_package( CLHEP ) # Component(s) in the package: atlas_add_library( McParticleUtils diff --git a/Projects/AnalysisBase/package_filters.txt b/Projects/AnalysisBase/package_filters.txt index 2f954cc6d377dea3cbee7a798c9cd2ec2e2bc497..b83b59bc01c3b407939963b18e3ff717469c9122 100644 --- a/Projects/AnalysisBase/package_filters.txt +++ b/Projects/AnalysisBase/package_filters.txt @@ -11,7 +11,6 @@ #+ PhysicsAnalysis/AnalysisCommon/FakeBkgTools #+ PhysicsAnalysis/BPhys/BPhysTools #+ PhysicsAnalysis/DerivationFramework/DerivationFrameworkAnalysisTests -#+ Reconstruction/Jet/BoostedJetTaggers #+ Trigger/TriggerSimulation/TrigBtagEmulationTool @@ -83,6 +82,7 @@ + PhysicsAnalysis/TauID/TauAnalysisTools + PhysicsAnalysis/TrackingID/.* + Reconstruction/EventShapes/EventShapeInterface ++ Reconstruction/Jet/BoostedJetTaggers - Reconstruction/Jet/JetAnalysisTools/JetAnalysisEDM - Reconstruction/Jet/JetEvent.* - Reconstruction/Jet/JetMonitoring diff --git a/Projects/AthAnalysis/CMakeGraphVizOptions.cmake b/Projects/AthAnalysis/CMakeGraphVizOptions.cmake new file mode 100644 index 0000000000000000000000000000000000000000..4f6d416f611d2a909e75120f4cedd261ebd7f504 --- /dev/null +++ b/Projects/AthAnalysis/CMakeGraphVizOptions.cmake @@ -0,0 +1,5 @@ +# Options used by cmake --graphviz +set( GRAPHVIZ_CUSTOM_TARGETS TRUE ) +set( GRAPHVIZ_GENERATE_DEPENDERS FALSE ) +set( GRAPHVIZ_GENERATE_PER_TARGET FALSE ) +set( GRAPHVIZ_IGNORE_TARGETS ".*Pkg$" ".*PkgPrivate$" "^__.*" ) diff --git a/Projects/AthAnalysis/CMakeLists.txt b/Projects/AthAnalysis/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..2750b1c54d0b346b0e6c93fd3e786c44cfb74d4f --- /dev/null +++ b/Projects/AthAnalysis/CMakeLists.txt @@ -0,0 +1,85 @@ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration + +# Set up the project. +cmake_minimum_required( VERSION 3.6 ) +file( READ ${CMAKE_SOURCE_DIR}/version.txt _version ) +string( STRIP ${_version} _version ) +project( AthAnalysis VERSION ${_version} LANGUAGES C CXX Fortran ) +unset( _version ) + +# Find the ATLAS CMake code: +find_package( AtlasCMake QUIET ) + +# Find the base project(s): +find_package( AthAnalysisExternals ${_version} REQUIRED ) +find_package( Gaudi REQUIRED ) + +# Additional externals needed for the build/runtime: +find_package( Xrootd ) +find_package( GSL ) +find_package( Davix ) +find_package( PNG ) +find_package( BLAS ) +find_package( VDT ) +find_package( requests ) + +# Compile flag(s). +add_definitions( -DXAOD_ANALYSIS ) + +# CMake cache variable(s). +set( XAOD_ANALYSIS TRUE CACHE BOOL + "Flag specifying that this is an analysis release" ) + +# Set up where to find the AthenaPoolUtilitiesTest CMake code. +set( AthenaPoolUtilitiesTest_DIR + "${CMAKE_SOURCE_DIR}/../../Database/AthenaPOOL/AthenaPoolUtilities/cmake" + CACHE PATH "Directory holding the AthenaPoolUtilititesTest module" ) + +# Set up where to find the xAODUtilities CMake code. +set( xAODUtilities_DIR + "${CMAKE_SOURCE_DIR}/../../Event/xAOD/xAODCore/cmake" + CACHE PATH "Directory holding the xAODUtilities module" ) + +# Make the local CMake files visible to AtlasCMake. +list( INSERT CMAKE_MODULE_PATH 0 ${CMAKE_SOURCE_DIR}/cmake ) + +# Set up CTest. +atlas_ctest_setup() + +# Set up the ATLAS project. +atlas_project( USE AthAnalysisExternals ${AthAnalysisExternals_VERSION} + PROJECT_ROOT ${CMAKE_SOURCE_DIR}/../../ ) + +# Generate the environment setup for the externals, to be used during the build. +lcg_generate_env( SH_FILE ${CMAKE_BINARY_DIR}/${ATLAS_PLATFORM}/env_setup.sh ) + +# Generate replacement rules for the installed paths. +set( _replacements ) +if( NOT "$ENV{NICOS_PROJECT_HOME}" STREQUAL "" ) + get_filename_component( _buildDir $ENV{NICOS_PROJECT_HOME} PATH ) + list( APPEND _replacements ${_buildDir} "\${AthAnalysis_DIR}/../../../.." ) +endif() + +# Now generate and install the installed setup files. +lcg_generate_env( + SH_FILE ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/env_setup_install.sh + REPLACE ${_replacements} ) +install( FILES ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/env_setup_install.sh + DESTINATION . RENAME env_setup.sh ) + +# Configure and install the project configuration file(s). +configure_file( ${CMAKE_SOURCE_DIR}/cmake/PreConfig.cmake.in + ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/PreConfig.cmake @ONLY ) +configure_file( ${CMAKE_SOURCE_DIR}/cmake/PostConfig.cmake.in + ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/PostConfig.cmake @ONLY ) +install( FILES + ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/PreConfig.cmake + ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/PostConfig.cmake + DESTINATION ${CMAKE_INSTALL_CMAKEDIR} ) + +# Install graphviz output if available. +install( FILES ${CMAKE_BINARY_DIR}/packages.dot + DESTINATION . OPTIONAL ) + +# Package up the release using CPack. +atlas_cpack_setup() diff --git a/Projects/AthAnalysis/README.md b/Projects/AthAnalysis/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b9f9ea40885b030dfb36e0210b29904e656c0721 --- /dev/null +++ b/Projects/AthAnalysis/README.md @@ -0,0 +1,56 @@ +The ATLAS Analysis Software Project +=================================== + +This is the configuration for building the analysis software of ATLAS, +meant for performing final-stage analysis on DxAOD + +It shares much the same code with the Athena project, it just doesn't +build a lot of things that are not needed for analysis jobs. + +Setup Instructions +------------------ + +You need a recent version of cmake and gcc set up. You can get this from +setupATLAS with, for instance: + + asetup none,gcc8,cmakesetup --cmakeversion=3.18.3 + +Build Instructions +------------------ + +To build the externals necessary for building this project itself, use the + + ./build_externals.sh -fc + +script. It will build all the externals necessary for this project into a +subdirectory of the directory holding this repository, called `build`. + +The sources of the externals will be checked out under `build/src`, the +build of the projects will commence under `build/build`, and the results of +the build will be installed under `build/install`. + +RPMs created from the externals are copied under `build/` by the script. + +Once the externals have finished building, you can initiate the full build +of the project against these newly built externals by executing the + + ./build.sh -acmi + +script. It uses the same directory layout inside the `build` directory as +was used for the externals. + +Finally, if you want to then *use* the release, you need to set it up by +sourcing the setup script in the install area: + + source ../../../build/install/AthAnalysis/*/InstallArea/*/setup.sh + +If you do this in a new shell you will need to lsetup cmake and gcc again +though! + +In fact, instead of sourcing the setup script directly, you are advised to use +the `asetup` command if it is available to you (which will source it on your +behalf, as well as probably do some other things). E.g. you can do: + + asetup AthAnalysis,22.2.4 --releasesarea=../../../build/install + +of course substitute in the correct version number and path to the install area. diff --git a/Projects/AthAnalysis/build.sh b/Projects/AthAnalysis/build.sh new file mode 100755 index 0000000000000000000000000000000000000000..c112b94640df2e2bd24cba0a580a0d1d12a0659f --- /dev/null +++ b/Projects/AthAnalysis/build.sh @@ -0,0 +1,165 @@ +#!/bin/bash +# +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +# +# Script for building the release on top of externals built using one of the +# scripts in this directory. +# + +# Helper function for timing the various build steps. +_time_() { local c="time -p " ; while test "X$1" != "X" ; do c+=" \"$1\"" ; + shift; done; ( eval "$c" ) 2>&1 | + sed "s,^real[[:space:]],time::${c}:: real ," ; } + +# Function printing the usage information for the script +usage() { + echo "Usage: build.sh [-t build type] [-b build dir] [-c] [-m] [-i] [-p] [-a] [-x opt]" + echo " -c: Execute CMake step" + echo " -m: Execute make step" + echo " -i: Execute install step" + echo " -p: Execute CPack step" + echo " -a: Abort on error" + echo " -x: Extra configuration argument(s) for CMake" + echo "" + echo "If none of the c, m, i or p options are set then the script will do" + echo "*all* steps. Otherwise only the enabled steps are run - it's your" + echo "reponsibility to ensure that precusors are in good shape" +} + +# Parse the command line arguments: +BUILDDIR="" +BUILDTYPE="RelWithDebInfo" +EXE_CMAKE="" +EXE_MAKE="" +EXE_INSTALL="" +EXE_CPACK="" +NIGHTLY=true +EXTRACMAKE=() +while getopts ":t:b:hcmipax:" opt; do + case $opt in + t) + BUILDTYPE=$OPTARG + ;; + b) + BUILDDIR=$OPTARG + ;; + c) + EXE_CMAKE="1" + ;; + m) + EXE_MAKE="1" + ;; + i) + EXE_INSTALL="1" + ;; + p) + EXE_CPACK="1" + ;; + a) + NIGHTLY=false + ;; + x) + EXTRACMAKE+=($OPTARG) + ;; + h) + usage + exit 0 + ;; + :) + echo "Argument -$OPTARG requires a parameter!" + usage + exit 1 + ;; + ?) + echo "Unknown argument: -$OPTARG" + usage + exit 1 + ;; + esac +done + +# If no step was explicitly specified, turn them all on: +if [ -z "$EXE_CMAKE" -a -z "$EXE_MAKE" -a -z "$EXE_INSTALL" \ + -a -z "$EXE_CPACK" ]; then + EXE_CMAKE="1" + EXE_MAKE="1" + EXE_INSTALL="1" + EXE_CPACK="1" +fi + +# Stop on errors from here on out: +set -e +# consider a pipe failed if ANY of the commands fails +set -o pipefail + +# Source in our environment +AthAnalysisSrcDir=$(dirname ${BASH_SOURCE[0]}) +if [ -z "$BUILDDIR" ]; then + BUILDDIR=${AthAnalysisSrcDir}/../../../build +fi +mkdir -p ${BUILDDIR} +BUILDDIR=$(\cd ${BUILDDIR} && \pwd) +source $AthAnalysisSrcDir/build_env.sh -b $BUILDDIR >& ${BUILDDIR}/build_env.log +cat ${BUILDDIR}/build_env.log + +# create the actual build directory +mkdir -p ${BUILDDIR}/build/AthAnalysis + +# CMake: +if [ -n "$EXE_CMAKE" ]; then + # Remove the CMakeCache.txt file, to force CMake to find externals + # from scratch in an incremental build. + rm -f ${BUILDDIR}/build/AthAnalysis/CMakeCache.txt + # Now run the actual CMake configuration: + { _time_ cmake --graphviz=${BUILDDIR}/build/AthAnalysis/packages.dot \ + -DCMAKE_BUILD_TYPE:STRING=${BUILDTYPE} \ + -DCTEST_USE_LAUNCHERS:BOOL=TRUE \ + ${EXTRACMAKE[@]} \ + -B ${BUILDDIR}/build/AthAnalysis \ + -S ${AthAnalysisSrcDir}; } \ + 2>&1 | tee ${BUILDDIR}/build/AthAnalysis/cmake_config.log +fi + +# for nightly builds we want to get as far as we can +if [ "$NIGHTLY" = true ]; then + # At this point stop worrying about errors: + set +e +fi + +# make: +if [ -n "$EXE_MAKE" ]; then + # Forcibly remove the merged CLID file from the previous build, to + # avoid issues with some library possibly changing the name/CLID + # of something during the build. + rm -f ${BUILDDIR}/build/AthAnalysis/*/share/clid.db + # Extra argument(s) for the build tool. + BUILDARGS=() + if [ "$NIGHTLY" = true ]; then + if [[ "${EXTRACMAKE[@]}" == *"Ninja"* ]]; then + BUILDARGS+=(-k0) + else + BUILDARGS+=(-k) + fi + fi + # Build the project. + { _time_ cmake --build ${BUILDDIR}/build/AthAnalysis -- ${BUILDARGS[@]}; } \ + 2>&1 | tee ${BUILDDIR}/build/AthAnalysis/cmake_build.log +fi + +# Install the results: +if [ -n "$EXE_INSTALL" ]; then + { DESTDIR=${BUILDDIR}/install _time_ \ + cmake --install ${BUILDDIR}/build/AthAnalysis; } \ + 2>&1 | tee ${BUILDDIR}/build/AthAnalysis/cmake_install.log +fi + +# Build an RPM for the release: +if [ -n "$EXE_CPACK" ]; then + cd ${BUILDDIR}/build/AthAnalysis + { _time_ cpack; } 2>&1 | tee ${BUILDDIR}/build/AthAnalysis/cmake_cpack.log + if [ "$BUILDTYPE" = "RelWithDebInfo" ]; then + { _time_ cpack --config CPackDbgRPMConfig.cmake; } \ + 2>&1 | tee -a ${BUILDDIR}/build/AthAnalysis/cmake_cpack.log + fi + cp AthAnalysis*.rpm ${BUILDDIR}/ +fi diff --git a/Projects/AthAnalysis/build_env.sh b/Projects/AthAnalysis/build_env.sh new file mode 100644 index 0000000000000000000000000000000000000000..aede41bd025f72116c702564edaa6043728ac744 --- /dev/null +++ b/Projects/AthAnalysis/build_env.sh @@ -0,0 +1,98 @@ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +# +# This script sets up the build enironment for an AthAnalysis +# build, on top of a built set of externals (including Gaudi) +# +# This script is kept separate from the build.sh +# wrapper so it can be sourced separately from it when +# clients want to manage their own build and just want +# to setup the build environment + +env_usage() { + echo "Usage: build_env.sh [-b build dir]" +} + +# This function actually sets up the environment for us +# (factorise it here in case it needs skipped) +env_setup() { + startdir=$(pwd) + # As this script can be sourced we need to support zsh and + # possibly other Bourne shells + if [ "x${BASH_SOURCE[0]}" = "x" ]; then + # This trick should do the right thing under ZSH: + thisdir=$(dirname `print -P %x`) + if [ $? != 0 ]; then + echo "ERROR: This script must be sourced from BASH or ZSH" + return 1 + fi + else + # The BASH solution is a bit more straight forward: + thisdir=$(dirname ${BASH_SOURCE[0]}) + fi + AthAnalysisSrcDir=$(cd ${thisdir};pwd) + + # The directory holding the helper scripts: + scriptsdir=${AthAnalysisSrcDir}/../../Build/AtlasBuildScripts + + # Go to the main directory of the repository: + cd ${AthAnalysisSrcDir}/../.. + + # Check if the user specified any source/build directories: + if [ "$BUILDDIR" = "" ]; then + BUILDDIR=${AthAnalysisSrcDir}/../../../build + fi + + # Get the version of AthAnalysis for the build. + version=`cat ${AthAnalysisSrcDir}/version.txt` + + # Set up the environment for the build: + export NICOS_PROJECT_HOME=$(cd ${BUILDDIR}/install;pwd)/AthAnalysis + + # Set up the environment variables for finding LCG externals: + source ${scriptsdir}/LCG_RELEASE_BASE.sh + + # Set up the AthAnalysisExternals project: + extDir=${BUILDDIR}/install/AthAnalysisExternals/${version}/InstallArea + if [ ! -d ${extDir} ]; then + echo "Didn't find the AthAnalysisExternals project under ${extDir}" + fi + echo "Setting up AthAnalysisExternals from: ${extDir}" + source ${extDir}/*/setup.sh + + cd $startdir +} + +# we need to reset the option index as we are sourcing this script +# http://stackoverflow.com/questions/23581368/bug-in-parsing-args-with-getopts-in-bash +OPTIND=1 + +# Parse the command line arguments: +BUILDDIR="" +while getopts "b:h" opt; do + case $opt in + b) + BUILDDIR=$OPTARG + ;; + h) + env_usage + ABORT=1 + ;; + :) + echo "Argument -$OPTARG requires a parameter!" + env_usage + ABORT=1 + ;; + ?) + echo "Unknown argument: -$OPTARG" + env_usage + ABORT=1 + ;; + esac +done + +# Put a big wrapper around bad argument case, because +# a sourced script should not call "exit". This is quite +# annoying... +if [ -z "$ABORT" ]; then + env_setup +fi diff --git a/Projects/AthAnalysis/build_externals.sh b/Projects/AthAnalysis/build_externals.sh new file mode 100755 index 0000000000000000000000000000000000000000..a204d1b32c07e8ccaadcfb2bf722cf822254d3b0 --- /dev/null +++ b/Projects/AthAnalysis/build_externals.sh @@ -0,0 +1,145 @@ +#!/bin/bash +# +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +# +# Script building all the externals necessary for AthAnalysis. +# + +# Function printing the usage information for the script +usage() { + echo "Usage: build_externals.sh [-t build_type] [-b build_dir] [-f] [-c]" + echo " -f: Force rebuild of externals from scratch, otherwise if script" + echo " finds an external build present it will only do an incremental" + echo " build" + echo " -c: Build the externals for the continuous integration (CI) system," + echo " skipping the build of the externals RPMs." + echo " -x: Extra cmake argument(s) to provide for the build(configuration)" + echo " of all externals needed by AthAnalysis." + echo "If a build_dir is not given the default is '../build'" + echo "relative to the athena checkout" +} + +# Parse the command line arguments: +BUILDDIR="" +BUILDTYPE="RelWithDebInfo" +FORCE="" +CI="" +EXTRACMAKE=(-DLCG_VERSION_NUMBER=98 -DLCG_VERSION_POSTFIX="python3_ATLAS_2" + -DATLAS_GAUDI_TAG="v35r0.001") +while getopts ":t:b:x:fch" opt; do + case $opt in + t) + BUILDTYPE=$OPTARG + ;; + b) + BUILDDIR=$OPTARG + ;; + f) + FORCE="1" + ;; + c) + CI="1" + ;; + x) + EXTRACMAKE+=($OPTARG) + ;; + h) + usage + exit 0 + ;; + :) + echo "Argument -$OPTARG requires a parameter!" + usage + exit 1 + ;; + ?) + echo "Unknown argument: -$OPTARG" + usage + exit 1 + ;; + esac +done + +# Only stop on errors if we are in the CI. Otherwise just count them. +if [ "$CI" = "1" ]; then + set -e + set -o pipefail +fi +ERROR_COUNT=0 + +# We are in BASH, get the path of this script in a simple way: +thisdir=$(dirname ${BASH_SOURCE[0]}) +thisdir=$(cd ${thisdir};pwd) + +# Go to the main directory of the repository: +cd ${thisdir}/../.. + +# Check if the user specified any source/build directories: +if [ "$BUILDDIR" = "" ]; then + BUILDDIR=${thisdir}/../../../build +fi +mkdir -p ${BUILDDIR} +BUILDDIR=$(cd $BUILDDIR; pwd) + +if [ "$FORCE" = "1" ]; then + echo "Force deleting existing build area..." + rm -fr ${BUILDDIR}/install/AthAnalysisExternals + rm -fr ${BUILDDIR}/src/AthAnalysisExternals + rm -fr ${BUILDDIR}/build/AthAnalysisExternals +fi + +# Get the version of AthAnalysis for the build. +version=`cat ${thisdir}/version.txt` +# Generate hash of any extra cmake arguments. +cmakehash=`echo -n "${EXTRACMAKE}" | openssl md5 | awk '{print $2}'` + +# Check if previous externals build can be reused: +externals_stamp=${BUILDDIR}/build/AthAnalysisExternals/externals-${version}-${cmakehash}.stamp +if [ -f ${externals_stamp} ]; then + if diff -q ${externals_stamp} ${thisdir}/externals.txt; then + echo "Correct version of externals already available in ${BUILDDIR}" + exit 0 + else + rm ${externals_stamp} + fi +fi + +# Create some directories: +mkdir -p ${BUILDDIR}/{src,install} + +# The directory holding the helper scripts: +scriptsdir=${thisdir}/../../Build/AtlasBuildScripts +scriptsdir=$(cd ${scriptsdir}; pwd) + +# Set the environment variable for finding LCG releases: +source ${scriptsdir}/LCG_RELEASE_BASE.sh + +# Flag for triggering the build of RPMs for the externals: +RPMOPTIONS="-r ${BUILDDIR}" +if [ "$CI" = "1" ]; then + RPMOPTIONS= +fi + +# Read in the tag/branch to use for AthAnalysisExternals: +AthAnalysisExternalsVersion=$(awk '/^AthAnalysisExternalsVersion/{print $3}' ${thisdir}/externals.txt) + +# Check out AthAnalysisExternals from the right branch/tag: +${scriptsdir}/checkout_atlasexternals.sh \ + -t ${AthAnalysisExternalsVersion} \ + -s ${BUILDDIR}/src/AthAnalysisExternals 2>&1 | tee ${BUILDDIR}/src/checkout.AthAnalysisExternals.log + +# Build AthAnalysisExternals: +${scriptsdir}/build_atlasexternals.sh \ + -s ${BUILDDIR}/src/AthAnalysisExternals \ + -b ${BUILDDIR}/build/AthAnalysisExternals \ + -i ${BUILDDIR}/install \ + -p AthAnalysisExternals ${RPMOPTIONS} -t ${BUILDTYPE} \ + ${EXTRACMAKE[@]/#/-x } -v ${version} || ((ERROR_COUNT++)) + +# Exit with the error count taken into account. +if [ ${ERROR_COUNT} -ne 0 ]; then + echo "AthAnalysis externals build encountered ${ERROR_COUNT} error(s)" +else + cp ${thisdir}/externals.txt ${externals_stamp} +fi +exit ${ERROR_COUNT} diff --git a/Projects/AthAnalysis/cmake/PostConfig.cmake.in b/Projects/AthAnalysis/cmake/PostConfig.cmake.in new file mode 100644 index 0000000000000000000000000000000000000000..931da4b36edb1260e8e7af9c2277e4d41e08c060 --- /dev/null +++ b/Projects/AthAnalysis/cmake/PostConfig.cmake.in @@ -0,0 +1,17 @@ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +# +# File taking care of pointing the downstream projects at the right +# version of the externals. +# + +# Set up a "quiet flag". +set( _quietFlag ) +if( AthAnalysis_FIND_QUIETLY ) + set( _quietFlag QUIET ) +endif() + +# CMake cache variable(s). +set( CLHEP_ROOT "${CLHEP_LCGROOT}" CACHE PATH "Path to pick up CLHEP from" ) + +# Find Gaudi. +find_package( Gaudi @Gaudi_VERSION@ EXACT REQUIRED ${_quietFlag} ) diff --git a/Projects/AthAnalysis/cmake/PreConfig.cmake.in b/Projects/AthAnalysis/cmake/PreConfig.cmake.in new file mode 100644 index 0000000000000000000000000000000000000000..5f56b636e732633cc202bd01206c423a7006dd40 --- /dev/null +++ b/Projects/AthAnalysis/cmake/PreConfig.cmake.in @@ -0,0 +1,17 @@ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +# +# Pre-config script to propagate variables to downstream projects +# + +# Set up the project's flake8 usage. +set( ATLAS_FLAKE8 @ATLAS_FLAKE8@ + CACHE STRING "Default flake8 command" ) +set( ATLAS_PYTHON_CHECKER @ATLAS_PYTHON_CHECKER@ + CACHE STRING "Python checker command to run during Python module compilation" ) + +# Compilation flag(s). +add_definitions( -DXAOD_ANALYSIS ) + +# CMake cache variable(s). +set( XAOD_ANALYSIS TRUE CACHE BOOL + "Flag specifying that this is an analysis release" ) diff --git a/Projects/AthAnalysis/cmake/README.txt.in b/Projects/AthAnalysis/cmake/README.txt.in new file mode 100644 index 0000000000000000000000000000000000000000..5f5b373adc2905eaca231fe90eba46471c94f805 --- /dev/null +++ b/Projects/AthAnalysis/cmake/README.txt.in @@ -0,0 +1,5 @@ + + AthAnalysis - @CMAKE_PROJECT_VERSION@ + +This package provides version @CMAKE_PROJECT_VERSION@ of the ATLAS +analysis software. diff --git a/Projects/AthAnalysis/externals.txt b/Projects/AthAnalysis/externals.txt new file mode 100644 index 0000000000000000000000000000000000000000..3eab080a196206b8e710e1e7f958d51b271fb1ae --- /dev/null +++ b/Projects/AthAnalysis/externals.txt @@ -0,0 +1,9 @@ +# Versions of the various externals to build before starting the build of +# this project, when doing a full stack nightly build. +# +# Remember that when specifying the name of a branch, you *must* put +# an "origin/" prefix before it. For tags however this is explicitly +# forbidden. + +# The version of atlas/atlasexternals to use: +AthAnalysisExternalsVersion = 2.0.92 diff --git a/Projects/AthAnalysis/package_filters.txt b/Projects/AthAnalysis/package_filters.txt new file mode 100644 index 0000000000000000000000000000000000000000..dee4753b679ef9a44fa5a89f590cf68b7c7e31c7 --- /dev/null +++ b/Projects/AthAnalysis/package_filters.txt @@ -0,0 +1,213 @@ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +# +# Package filtering rules for the AthAnalysis project build. +# + +# Testing package(s): ++ AsgExternal/Asg_Test ++ AtlasTest/TestTools + +# Core xAOD reading: ++ Control/AthContainersInterfaces ++ Control/AthContainersRoot ++ Control/AthContainers ++ Control/AthLinks ++ Control/AthToolSupport/.* ++ Control/CxxUtils ++ Control/xAODRootAccess.* + +# EDM: ++ Event/EventPrimitives ++ Event/FourMomUtils ++ Event/xAOD/xAODMetaDataCnv ++ Event/xAOD/xAODTriggerCnv ++ Event/xAOD/xAODTrackingCnv ++ Event/xAOD/xAODEventFormatCnv ++ Event/xAOD/xAODCoreCnv ++ Event/xAOD/xAODTruthCnv +#+ Event/xAOD/xAODEventInfoCnv +- Event/xAOD/.*Cnv ++ Event/xAOD/.* + +# Others: ++ Calorimeter/CaloGeoHelpers ++ DetectorDescription/GeoPrimitives ++ DetectorDescription/IRegionSelector ++ DetectorDescription/RoiDescriptor + +# Packages temporarily turned off: +- Reconstruction/Jet/JetAnalysisTools/.* +- Reconstruction/Jet/JetMomentTools + +# Analysis Tools: ++ DataQuality/GoodRunsLists ++ ForwardDetectors/ZDC/ZdcAnalysis ++ Generators/TruthUtils ++ InnerDetector/InDetRecTools/InDetTrackSelectionTool ++ InnerDetector/InDetRecTools/TrackVertexAssociationTool ++ MuonSpectrometer/MuonStationIndex +- PhysicsAnalysis/Algorithms/StandaloneAnalysisAlgorithms ++ PhysicsAnalysis/Algorithms/.* ++ PhysicsAnalysis/AnalysisCommon/AssociationUtils ++ PhysicsAnalysis/AnalysisCommon/EventUtils +#+ PhysicsAnalysis/AnalysisCommon/FakeBkgTools ++ PhysicsAnalysis/AnalysisCommon/FsrUtils ++ PhysicsAnalysis/AnalysisCommon/IsolationSelection ++ PhysicsAnalysis/AnalysisCommon/PATCore ++ PhysicsAnalysis/AnalysisCommon/PATInterfaces ++ PhysicsAnalysis/AnalysisCommon/PMGTools ++ PhysicsAnalysis/AnalysisCommon/ParticleJetTools ++ PhysicsAnalysis/AnalysisCommon/PileupReweighting ++ PhysicsAnalysis/AnalysisCommon/PMGOverlapRemovalTools/HFORTools ++ PhysicsAnalysis/AnalysisCommon/PMGOverlapRemovalTools/GammaORTools ++ PhysicsAnalysis/AnalysisCommon/ReweightUtils ++ PhysicsAnalysis/AnalysisCommon/HDF5Utils ++ PhysicsAnalysis/AnalysisCommon/CutBookkeeperUtils +#+ PhysicsAnalysis/AnalysisCommon/ThinningUtils ++ PhysicsAnalysis/AnalysisCommon/TruthClassification +#+ PhysicsAnalysis/BPhys/BPhysTools ++ PhysicsAnalysis/D3PDTools/RootCoreUtils ++ PhysicsAnalysis/D3PDTools/SampleHandler ++ PhysicsAnalysis/D3PDTools/AnaAlgorithm +- PhysicsAnalysis/D3PDTools/.* ++ PhysicsAnalysis/ElectronPhotonID/.* ++ PhysicsAnalysis/HeavyIonPhys/HIEventUtils ++ PhysicsAnalysis/HiggsPhys/Run2/HZZ/Tools/ZMassConstraint ++ PhysicsAnalysis/Interfaces/.* ++ PhysicsAnalysis/JetMissingEtID/JetSelectorTools ++ PhysicsAnalysis/JetTagging/JetTagPerformanceCalibration/CalibrationDataInterface ++ PhysicsAnalysis/JetTagging/JetTagPerformanceCalibration/xAODBTaggingEfficiency ++ PhysicsAnalysis/JetTagging/FlavorTagDiscriminants ++ PhysicsAnalysis/MCTruthClassifier ++ PhysicsAnalysis/MuonID/MuonIDAnalysis/.* ++ PhysicsAnalysis/MuonID/MuonSelectorTools +#+ PhysicsAnalysis/SUSYPhys/SUSYTools ++ PhysicsAnalysis/TauID/DiTauMassTools ++ PhysicsAnalysis/TauID/TauAnalysisTools ++ PhysicsAnalysis/TrackingID/.* ++ Reconstruction/EventShapes/EventShapeInterface +#- Reconstruction/Jet/JetAnalysisTools/JetAnalysisEDM +- Reconstruction/Jet/JetEvent.* +- Reconstruction/Jet/JetMonitoring ++ Reconstruction/Jet/JetReclustering +- Reconstruction/Jet/JetRec.+ +- Reconstruction/Jet/JetSimTools +- Reconstruction/Jet/JetValidation ++ Reconstruction/Jet/Jet.* +#+ Reconstruction/Jet/BoostedJetTaggers ++ Reconstruction/MET/METInterface ++ Reconstruction/MET/METUtilities ++ Reconstruction/MVAUtils ++ Reconstruction/PFlow/PFlowUtils ++ Reconstruction/egamma/egammaInterfaces ++ Reconstruction/egamma/egammaLayerRecalibTool ++ Reconstruction/egamma/egammaMVACalib ++ Reconstruction/egamma/egammaRecEvent ++ Reconstruction/egamma/egammaUtils ++ Reconstruction/tauRecTools ++ Tools/PathResolver ++ Tools/ART ++ Tools/DirectIOART ++ Trigger/TrigAnalysis/TrigAnalysisInterfaces ++ Trigger/TrigAnalysis/TrigBunchCrossingTool ++ Trigger/TrigAnalysis/TrigDecisionTool ++ Trigger/TrigAnalysis/TrigGlobalEfficiencyCorrection ++ Trigger/TrigAnalysis/TrigTauAnalysis/TrigTauMatching ++ Trigger/TrigAnalysis/TriggerMatchingTool ++ Trigger/TrigConfiguration/TrigConfBase ++ Trigger/TrigConfiguration/TrigConfData ++ Trigger/TrigConfiguration/TrigConfHLTData ++ Trigger/TrigConfiguration/TrigConfHLTUtils ++ Trigger/TrigConfiguration/TrigConfInterfaces ++ Trigger/TrigConfiguration/TrigConfL1Data ++ Trigger/TrigConfiguration/TrigConfxAOD +#+ Trigger/TrigCost/EnhancedBiasWeighter +#+ Trigger/TrigCost/RatesAnalysis ++ Trigger/TrigEvent/TrigDecisionInterface ++ Trigger/TrigEvent/TrigNavStructure ++ Trigger/TrigEvent/TrigRoiConversion ++ Trigger/TrigEvent/TrigSteeringEvent ++ Trigger/TrigSteer/TrigCompositeUtils ++ Trigger/TrigValidation/TrigAnalysisTest +#+ Trigger/TriggerSimulation/TrigBtagEmulationTool +#+ Reconstruction/RecoTools/IsolationTool ++ Reconstruction/RecoTools/RecoToolInterfaces + +### Here follows the Athena-specific parts of the analysis release + +# Core Athena (would like to reduce) : ++ Control/AthAllocators ++ Control/AthenaServices ++ Control/StoreGate ++ Control/SGComps ++ Control/SGTools ++ Control/StoreGateBindings ++ Control/SGMon/SGAudCore ++ Control/AthenaBaseComps ++ Control/AthAnalysisBaseComps ++ Control/AthenaCommon ++ Control/AthenaKernel ++ Control/AthenaPython ++ Control/CLID.* ++ Control/GaudiSequencer ++ Control/DataModelRoot ++ Control/RootUtils ++ Control/PerformanceMonitoring/PerfMonKernel ++ Control/PerformanceMonitoring/PerfMonComps ++ Control/PerformanceMonitoring/PerfMonEvent ++ Control/PerformanceMonitoring/PerfMonGPerfTools ++ Control/Navigation + +#scripts package contains useful commands like get_files ++ Tools/Scripts + ++ Tools/PyUtils +# Need IOVSvc for, at least, the file peeking (:-() ++ Control/IOVSvc + ++ PhysicsAnalysis/POOLRootAccess + +# POOL Support (would like to decouple from core) : ++ Database/APR/.* ++ Database/AthenaRoot/.* ++ Database/ConnectionManagement/.* +- Database/AthenaPOOL/AthenaPoolExample.* +- Database/AthenaPOOL/.*Tools ++ Database/AthenaPOOL/DBDataModel ++ Database/AthenaPOOL/.* ++ Database/IOVDb.* ++ Database/PersistentDataModel.* ++ Database/TPTools ++ Database/CoraCool ++ Database/AtlasSTLAddReflex ++ Control/DataModelAthenaPool + +# Basic DF setup : ++ PhysicsAnalysis/DerivationFramework/DerivationFrameworkCore ++ PhysicsAnalysis/DerivationFramework/DerivationFrameworkInterfaces ++ PhysicsAnalysis/CommonTools/ExpressionEvaluation + +# EVNT support : +#+ PhysicsAnalysis/DerivationFramework/DerivationFrameworkMCTruth ++ Generators/AtlasHepMC ++ Generators/GenInterfaces ++ Generators/GeneratorObjects.* ++ Generators/HepMCWeightSvc + +# Needed for CutFlowSvc when using filter algs ++ Event/EventBookkeeperTools ++ Event/EventBookkeeperMetaData + +# LumiBlockComps for the LumiBlockMetaDataTool ++ LumiBlock/LumiBlockComps + +# Other extras (would very much like to eliminate!) : ++ Event/EventInfo.* ++ Event/EventAthenaPool ++ Event/EventTPCnv ++ Event/EventPTCnv ++ DetectorDescription/Identifier ++ DetectorDescription/GeoModel/GeoModelInterfaces + +# Don't build anything else: +- .* diff --git a/Projects/AthAnalysis/version.txt b/Projects/AthAnalysis/version.txt new file mode 100644 index 0000000000000000000000000000000000000000..d4b369b4e5d8570b7a4ffd09e3e1336fcc9bc5e1 --- /dev/null +++ b/Projects/AthAnalysis/version.txt @@ -0,0 +1 @@ +22.2.4 diff --git a/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/BoostedJetTaggersDict.h b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/BoostedJetTaggersDict.h new file mode 100644 index 0000000000000000000000000000000000000000..2dd7809e2995d84297cba348da6e4b57adf7b4f1 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/BoostedJetTaggersDict.h @@ -0,0 +1,19 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef BOOSTEDJETTAGGERS_BOOSTEDJETTAGGERSDICT_H +#define BOOSTEDJETTAGGERS_BOOSTEDJETTAGGERSDICT_H + +#if defined(__GCCXML__) and not defined(EIGEN_DONT_VECTORIZE) +#define EIGEN_DONT_VECTORIZE +#endif // __GCCXML__ + +// Includes for the dictionary generation: +#include "BoostedJetTaggers/SmoothedWZTagger.h" +#include "BoostedJetTaggers/JSSWTopTaggerDNN.h" +#include "BoostedJetTaggers/JSSWTopTaggerANN.h" +#include "BoostedJetTaggers/JetQGTagger.h" +#include "BoostedJetTaggers/JetQGTaggerBDT.h" + +#endif // BOOSTEDJETTAGGERS_BOOSTEDJETTAGGERSDICT_H diff --git a/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JSSTaggerBase.h b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JSSTaggerBase.h new file mode 100644 index 0000000000000000000000000000000000000000..4988e50eda322c48b930e08f030d619be5a0638c --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JSSTaggerBase.h @@ -0,0 +1,239 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef BOOSTEDJETSTAGGERS_JSSTAGGERBASE_H +#define BOOSTEDJETSTAGGERS_JSSTAGGERBASE_H + +#include "AsgTools/AsgTool.h" +#include "AsgTools/AnaToolHandle.h" + +#include "JetInterface/IJetDecorator.h" + +#include "xAODJet/Jet.h" +#include "xAODJet/JetContainer.h" +#include "xAODTruth/TruthParticleContainer.h" +#include "xAODEventInfo/EventInfo.h" + +#include "BoostedJetTaggers/TagResultEnum.h" + +#include "PATCore/AcceptData.h" + +#include "PathResolver/PathResolver.h" + +#include "AsgDataHandles/WriteDecorHandle.h" +#include "AsgDataHandles/ReadDecorHandle.h" + +#include <TFile.h> +#include <TEnv.h> +#include <TH2.h> +#include <TF1.h> + +#include <atomic> + +class JSSTaggerBase : public asg::AsgTool , + virtual public IJetDecorator +{ + ASG_TOOL_CLASS1(JSSTaggerBase, IJetDecorator ) + + protected: + + /// Default constructor - to be used in all derived classes + JSSTaggerBase(const std::string &name); + + /// Default destructor - to be used in all derived classes + ~JSSTaggerBase() {}; + + /// Initialize the tool + virtual StatusCode initialize() override; + + /// Decorate jet collection with tagging info + virtual StatusCode decorate( const xAOD::JetContainer& jets ) const override; + + /// Decorate single jet with tagging info + virtual StatusCode tag( const xAOD::Jet& jet ) const = 0; + + /// TEnv instance to read config files + TEnv m_configReader; + + /// Object that stores the results for a jet + asg::AcceptInfo m_acceptInfo; + + /// WriteDecorHandle keys for tagging bools + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decTaggedKey{this, "TaggedName", "Tagged", "SG key for Tagged"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decValidPtRangeHighKey{this, "ValidPtRangeHighName", "ValidPtRangeHigh", "SG key for ValidPtRangeHigh"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decValidPtRangeLowKey{this, "ValidPtRangeLowName", "ValidPtRangeLow", "SG key for ValidPtRangeLow"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decValidEtaRangeKey{this, "ValidEtaRangeName", "ValidEtaRange", "SG key for ValidEtaRange"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decValidJetContentKey{this, "ValidJetContentName", "ValidJetContent", "SG key for ValidJetContent"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decValidEventContentKey{this, "ValidEventContentName", "ValidEventContent", "SG key for ValidEventContent"}; + + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decPassMassKey{this, "PassMassName", "PassMass", "SG key for PassMass"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decPassScoreKey{this, "PassScoreName", "PassScore", "SG key for PassScore"}; + + /// Maximum number of warnings + const int m_nWarnMax = 10; + + /// Warning counters + mutable std::atomic<int> m_nWarnKin; + mutable std::atomic<int> m_nWarnVar; + + /// Flag to indicate units pT is defined in + /// Set to false by default + bool m_ptGeV = false; + + /// Flag to indicate if mass window cut is used + bool m_useMassCut = false; + + /// Flag to indicate if a discriminant score is used + bool m_useScoreCut = false; + + /// TAGTYPE enum + enum TAGCLASS{Unknown, WBoson, ZBoson, TopQuark}; + TAGCLASS m_tagClass; + + /// Configurable members + + /// Jet container name + std::string m_containerName; + + /// Path to the SF configuration root file + std::string m_weightConfigPath; + + /// Configuration file name + std::string m_configFile; + + /// Location where config files live on cvmfs + std::string m_calibArea; + + /// Keras configurations for ML taggers + std::string m_kerasConfigFileName; + std::string m_kerasConfigFilePath; + std::string m_kerasConfigOutputName; + std::string m_kerasCalibArea; + + /// TMVA configurations for BDT taggers + std::string m_tmvaConfigFileName; + std::string m_tmvaConfigFilePath; + std::string m_tmvaCalibArea; + + /// Tagger information + std::string m_wkpt; + std::string m_tagType; + + /// Kinematic bounds for the jet - the units are controlled by m_ptGeV + float m_jetPtMin; + float m_jetPtMax; + float m_jetEtaMax; + + /// Flags controlling whether generalized ECF moments or L-series ratios are needed + /// TODO: Implement the functionality controlled by these + bool m_useECFG = false; + bool m_useLSeries = false; + + /// WriteDecorHandle keys for JSS moments + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decTau21WTAKey{this, "Tau21WTAName", "Tau21_wta", "SG key for Tau21_wta"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decTau32WTAKey{this, "Tau32WTAName", "Tau32_wta", "SG key for Tau32_wta"}; + + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decC2Key{this, "C2Name", "C2", "SG key for C2"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decD2Key{this, "D2Name", "D2", "SG key for D2"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decE3Key{this, "e3Name", "e3", "SG key for e3"}; + + /// ReadDecorHandle keys for JSS moments + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readTau1WTAKey{this, "Tau1WTAName", "Tau1_wta", "SG key for Tau1_wta"}; + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readTau2WTAKey{this, "Tau2WTAName", "Tau2_wta", "SG key for Tau2_wta"}; + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readTau3WTAKey{this, "Tau3WTAName", "Tau3_wta", "SG key for Tau3_wta"}; + + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readTau21WTAKey{this, "Tau21WTAName", "Tau21_wta", "SG key for Tau21_wta"}; + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readTau32WTAKey{this, "Tau32WTAName", "Tau32_wta", "SG key for Tau32_wta"}; + + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readECF1Key{this, "ECF1Name", "ECF1", "SG key for ECF1"}; + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readECF2Key{this, "ECF2Name", "ECF2", "SG key for ECF2"}; + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readECF3Key{this, "ECF3Name", "ECF3", "SG key for ECF3"}; + + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readC2Key{this, "C2Name", "C2", "SG key for C2"}; + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readD2Key{this, "D2Name", "D2", "SG key for D2"}; + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readE3Key{this, "e3Name", "e3", "SG key for e3"}; + + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readSplit12Key{this, "Split12Name", "Split12", "SG key for Split12"}; + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readSplit23Key{this, "Split23Name", "Split23", "SG key for Split23"}; + + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readQwKey{this, "QwName", "Qw", "SG key for Qw"}; + + /// Strings for cut functions + std::string m_strMassCutLow; + std::string m_strMassCutHigh; + std::string m_strScoreCut; + + /// TF1 for cut functions + std::unique_ptr<TF1> m_funcMassCutLow; + std::unique_ptr<TF1> m_funcMassCutHigh; + std::unique_ptr<TF1> m_funcScoreCut; + + /// Decoration name + std::string m_decorationName; + + /// Flag to calculate scale factor + bool m_calcSF; + bool m_isMC; + + /// String for scale factor decoration names + std::string m_weightDecorationName; + std::string m_weightFileName; + std::string m_weightHistogramName; + std::string m_efficiencyHistogramName; + std::string m_weightFlavors; + + /// Histograms for scale factors + std::unique_ptr<TFile> m_weightConfig; + std::map<std::string, std::unique_ptr<TH2D>> m_weightHistograms; + std::map<std::string, std::unique_ptr<TH2D>> m_efficiencyHistograms; + + /// Truth label options + bool m_truthLabelUseTRUTH3; + std::string m_truthParticleContainerName; + std::string m_truthBosonContainerName; + std::string m_truthTopQuarkContainerName; + std::string m_truthLabelName; + + /// Truth label ReadDecorHandle key + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readTruthLabelKey{this, "truthLabelName", "truthLabel", "SG key for truthLabel"}; + + /// WriteDecorHandle keys for cut values + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decCutMLowKey{this, "CutMLowName", "Cut_mlow", "SG key for Cut_mlow"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decCutMHighKey{this, "CutMHighName", "Cut_mhigh", "SG key for Cut_mhigh"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decScoreCutKey{this, "CutScoreName", "Cut_score", "SG key for Cut_score"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decScoreValueKey{this, "ScoreName", "Score", "SG key for Score"}; + + /// WriteDecorHandle keys for SF + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decWeightKey{this, "weightName", "weight", "SG key for weight"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decEfficiencyKey{this, "efficiencyName", "efficiency", "SG key for efficiency"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decEffSFKey{this, "effSFName", "effSF", "SG key for effSF"}; + + /// Get configReader StatusCode + StatusCode getConfigReader(); + + /// Reset cuts + StatusCode resetCuts( asg::AcceptData &acceptData ) const; + + /// Check if jet passes kinematic constraints + bool passKinRange( const xAOD::Jet &jet ) const; + + /// Check and record if jet passes kinematic constraints + StatusCode checkKinRange( const xAOD::Jet &jet, asg::AcceptData &acceptData ) const; + + /// Calculate JSS moment ratios in case they are not already saved + /// TODO: Remove this once JSSMomentTools is modified to take const jets + int calculateJSSRatios( const xAOD::Jet &jet ) const; + + /// Get SF weight + StatusCode getWeight( const xAOD::Jet& jet, bool passSel, asg::AcceptData &acceptData ) const; + + /// Get scale factor and efficiency + std::pair<double,double> getSF( const xAOD::Jet& jet, asg::AcceptData &acceptData ) const; + + /// Print configured cuts + void printCuts() const; + +}; + +#endif diff --git a/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JSSWTopTaggerANN.h b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JSSWTopTaggerANN.h new file mode 100644 index 0000000000000000000000000000000000000000..41c67b32441d37bbbd3e38c12a05ba7e0af2a8d7 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JSSWTopTaggerANN.h @@ -0,0 +1,47 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef JSSWTOPTAGGERANN_H_ +#define JSSWTOPTAGGERANN_H_ + +#include "BoostedJetTaggers/JSSTaggerBase.h" + +#include "lwtnn/LightweightGraph.hh" +#include "lwtnn/parse_json.hh" +#include "lwtnn/Exceptions.hh" +#include "lwtnn/lightweight_nn_streamers.hh" + +class JSSWTopTaggerANN : + public JSSTaggerBase { + ASG_TOOL_CLASS0(JSSWTopTaggerANN) + + public: + + /// Constructor + JSSWTopTaggerANN(const std::string& name); + + /// Run once at the start of the job to setup everything + virtual StatusCode initialize() override; + + /// Decorate single jet with tagging info + virtual StatusCode tag(const xAOD::Jet& jet) const override; + + private: + + /// ANN tools + std::unique_ptr<lwt::LightweightGraph> m_lwnn; + std::map<std::string, std::map<std::string,double>> m_ANN_inputValues; // variables for ANN + + /// Internal stuff to keep track of the output node for the NN + std::vector<std::string> m_out_names; + + /// Retrieve score for a given ANN type (top/W) + double getScore(const xAOD::Jet& jet) const; + + /// Update the jet substructure variables for each jet to use in ANN + std::map<std::string, std::map<std::string,double>> getJetProperties(const xAOD::Jet& jet) const; + + }; + +#endif diff --git a/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JSSWTopTaggerDNN.h b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JSSWTopTaggerDNN.h new file mode 100644 index 0000000000000000000000000000000000000000..6ad76c08a588124f9121a1319ba5d11f0c7c74e0 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JSSWTopTaggerDNN.h @@ -0,0 +1,43 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef JSSWTOPTAGGERDNN_H_ +#define JSSWTOPTAGGERDNN_H_ + +#include "BoostedJetTaggers/JSSTaggerBase.h" + +#include "lwtnn/LightweightNeuralNetwork.hh" +#include "lwtnn/parse_json.hh" + +class JSSWTopTaggerDNN : + public JSSTaggerBase { + ASG_TOOL_CLASS0(JSSWTopTaggerDNN) + + public: + + /// Constructor + JSSWTopTaggerDNN( const std::string& name ); + + /// Run once at the start of the job to setup everything + virtual StatusCode initialize() override; + + /// Decorate single jet with tagging info + virtual StatusCode tag( const xAOD::Jet& jet ) const override; + + private: + + /// DNN tools + std::unique_ptr<lwt::LightweightNeuralNetwork> m_lwnn; + /// Variables for DNN + std::map<std::string, double> m_DNN_inputValues; + + /// Retrieve score for a given DNN type (top/W) + double getScore( const xAOD::Jet& jet ) const; + + /// Update the jet substructure variables for each jet to use in DNN + std::map<std::string,double> getJetProperties( const xAOD::Jet& jet ) const; + + }; + +#endif diff --git a/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JetQGTagger.h b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JetQGTagger.h new file mode 100644 index 0000000000000000000000000000000000000000..53f73796fd0604fbce57147febd4d25f7ff8d96b --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JetQGTagger.h @@ -0,0 +1,126 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef JETQGTAGGER_H +#define JETQGTAGGER_H + +#include "JetAnalysisInterfaces/IJetQGTagger.h" +#include "BoostedJetTaggers/JSSTaggerBase.h" + +#include "PATInterfaces/SystematicsTool.h" + +#include <TH2D.h> + +namespace InDet { class IInDetTrackSelectionTool; } +namespace InDet { class IInDetTrackTruthFilterTool; } +namespace InDet { class IJetTrackFilterTool; } +namespace InDet { class IInDetTrackTruthOriginTool; } + +namespace CP { + + enum QGSystApplied { + QG_NONE, + QG_TRACKEFFICIENCY, + QG_TRACKFAKES, + QG_NCHARGEDTOPO, + QG_NCHARGEDEXP_UP, + QG_NCHARGEDME_UP, + QG_NCHARGEDPDF_UP, + QG_NCHARGEDEXP_DOWN, + QG_NCHARGEDME_DOWN, + QG_NCHARGEDPDF_DOWN + }; + + + class JetQGTagger: public IJetQGTagger, public JSSTaggerBase, public SystematicsTool{ + ASG_TOOL_CLASS( JetQGTagger, IJetQGTagger ) + + public: + + JetQGTagger( const std::string& name); + virtual ~JetQGTagger(); // destructor + + virtual StatusCode initialize() override; + + // Implement IBoostedJetTagger interface + virtual StatusCode tag(const xAOD::Jet& jet, const xAOD::Vertex *pv) const override; + virtual StatusCode tag(const xAOD::Jet& jet) const override { return tag(jet, nullptr); } + + // functions for systematic variations + virtual bool isAffectedBySystematic(const SystematicVariation& var) const override {return SystematicsTool::isAffectedBySystematic(var);} + virtual SystematicSet affectingSystematics() const override {return SystematicsTool::affectingSystematics();} + virtual SystematicSet recommendedSystematics() const override {return SystematicsTool::recommendedSystematics();} + virtual StatusCode applySystematicVariation(const SystematicSet& set) override {return SystematicsTool::applySystematicVariation(set);} + virtual StatusCode sysApplySystematicVariation(const SystematicSet&) override; + + private: + JetQGTagger(); + StatusCode getNTrack(const xAOD::Jet * jet, /*const xAOD::Vertex * pv,*/ int &ntracks) const ; + StatusCode getNTrackWeight(const xAOD::Jet * jet, double &weight) const ; + StatusCode simplegetNTrackWeight(const xAOD::Jet * jet, double &weight) const ; + + QGSystApplied m_appliedSystEnum; + + TH2D* m_hquark; + TH2D* m_hgluon; + + TH2D* m_topo_hquark; + + TH2D* m_exp_hquark_up; + TH2D* m_exp_hquark_down; + TH2D* m_exp_hgluon_up; + TH2D* m_exp_hgluon_down; + + TH2D* m_me_hquark_up; + TH2D* m_me_hquark_down; + TH2D* m_me_hgluon_up; + TH2D* m_me_hgluon_down; + + TH2D* m_pdf_hquark_up; + TH2D* m_pdf_hquark_down; + TH2D* m_pdf_hgluon_up; + TH2D* m_pdf_hgluon_down; + + TH2D* m_trackeff_hquark; + TH2D* m_trackeff_hgluon; + TH2D* m_fake_hquark; + TH2D* m_fake_hgluon; + + StatusCode loadHist(TH2D *&hist,std::string filename,std::string histname); + + std::string m_taggername; + std::string m_topofile; + std::string m_expfile; + std::string m_mefile; + std::string m_pdffile; + std::string m_trackefffile; + std::string m_fakefile; + std::string m_weight_decoration_name; + std::string m_tagger_decoration_name; + + int m_NTrackCut; + double m_slope; + double m_intercept; + std::string m_cuttype; + int m_mode; + + asg::AnaToolHandle<InDet::IInDetTrackSelectionTool> m_trkSelectionTool; + asg::AnaToolHandle<InDet::IInDetTrackTruthFilterTool> m_trkTruthFilterTool; + asg::AnaToolHandle<InDet::IInDetTrackTruthFilterTool> m_trkFakeTool; + asg::AnaToolHandle<InDet::IJetTrackFilterTool> m_jetTrackFilterTool; + asg::AnaToolHandle<InDet::IInDetTrackTruthOriginTool> m_originTool; + + /// ReadDecorHandle keys + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readNumTrkPt500PVKey{this, "NumTrkPt500PVName", "NumTrkPt500PV", "SG key for NumTrkPt500PV"}; + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readNtrkKey{this, "NtrkName", "DFCommonJets_QGTagger_NTracks", "SG key for Ntrk"}; + + /// WriteDecorHandle keys + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decTagKey{this, "TagName", "Tag", "SG key for Tag"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decWeightKey{this, "WeightName", "Weight", "SG key for Weight"}; + + }; + +} /* namespace CP */ + +#endif /* JETQGTAGGER_H */ diff --git a/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JetQGTaggerBDT.h b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JetQGTaggerBDT.h new file mode 100644 index 0000000000000000000000000000000000000000..8404a5c5ec2cb85864ef0459104f792032daa737 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/JetQGTaggerBDT.h @@ -0,0 +1,75 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef JETQGTAGGERBDT_H_ +#define JETQGTAGGERBDT_H_ + +#include "BoostedJetTaggers/JSSTaggerBase.h" + +#include "TMVA/Tools.h" +#include "TMVA/Reader.h" +#include "TMVA/MethodCuts.h" + +namespace InDet { + class IInDetTrackSelectionTool; +} + +namespace CP { + + class JetQGTaggerBDT : + public JSSTaggerBase { + ASG_TOOL_CLASS0(JetQGTaggerBDT) + + public: + + /// Constructor + JetQGTaggerBDT(const std::string& name); + + /// Run once at the start of the job to setup everything + virtual StatusCode initialize() override; + + /// IBoostedJetTagger interface + virtual StatusCode tag(const xAOD::Jet& jet) const override; + + private: + + /// Retrieve BDT score + float getScore( const xAOD::Jet& jet, asg::AcceptData &acceptData ) const; + + /// Update the jet substructure variables for each jet to use in BDT + bool getJetProperties( const xAOD::Jet& jet, asg::AcceptData &acceptData ) const; + + bool getPrecomputedVariables( const xAOD::Jet& jet, asg::AcceptData &acceptData ) const; + + bool calculateVariables( const xAOD::Jet& jet, asg::AcceptData &acceptData ) const; + + bool isCorrectNumberOfTracks( int expectedNTracks, int nTracksFromGhostTracks ) const; + + /// TMVA tools + std::unique_ptr<TMVA::Reader> m_bdtTagger; + std::string m_BDTmethod; + + asg::AnaToolHandle<InDet::IInDetTrackSelectionTool> m_trkSelectionTool; + + // inclusive config file + std::string m_tmvaConfigFileName; + std::string m_tmvaConfigFilePath; + + //string for cut function + std::string m_strScoreCut; + + // variables for TMVA + mutable float m_pt; + mutable float m_eta; + mutable float m_ntracks; + mutable float m_trackwidth; + mutable float m_trackC1; + + int m_mode; + + }; + +} /* namespace CP */ + +#endif diff --git a/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/SmoothedWZTagger.h b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/SmoothedWZTagger.h new file mode 100644 index 0000000000000000000000000000000000000000..3f9e00307f340a8064e83577321e3551225298a2 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/SmoothedWZTagger.h @@ -0,0 +1,51 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef BOOSTEDJETSTAGGERS_SMOOTHEDWZTAGGER_H_ +#define BOOSTEDJETSTAGGERS_SMOOTHEDWZTAGGER_H_ + +#include "BoostedJetTaggers/JSSTaggerBase.h" + +class SmoothedWZTagger : + public JSSTaggerBase { + ASG_TOOL_CLASS0(SmoothedWZTagger) + + public: + + /// Constructor + SmoothedWZTagger( const std::string& name ); + + /// Run once at the start of the job to setup everything + virtual StatusCode initialize() override; + + /// Decorate single jet with tagging info + virtual StatusCode tag( const xAOD::Jet& jet ) const override; + + private: + + /// Flag to indicate if Ntrk is used + bool m_useNtrk; + + /// Store functional form of cuts + std::string m_strD2Cut; + std::string m_strNtrkCut; + + /// Functions that are configurable for specific cut values + std::unique_ptr<TF1> m_funcD2Cut; + std::unique_ptr<TF1> m_funcNtrkCut; + + /// WriteDecorHandle keys + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decPassD2Key{this, "PassD2Name", "PassD2", "SG key for PassD2"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decPassNtrkKey{this, "PassNtrkName", "PassNtrk", "SG key for PassNtrk"}; + + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decCutD2Key{this, "CutD2Name", "Cut_D2", "SG key for Cut_D2"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decCutNtrkKey{this, "CutNtrkName", "Cut_Ntrk", "SG key for Cut_Ntrk"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_decAcceptKey{this, "acceptName", "accept", "SG key for accept"}; + + /// ReadDecorHandle keys + SG::ReadDecorHandleKey<xAOD::JetContainer> m_readUngroomedLinkKey{this, "ParentName", "Parent", "SG key for Parent"}; + + }; + +#endif diff --git a/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/TagResultEnum.h b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/TagResultEnum.h new file mode 100644 index 0000000000000000000000000000000000000000..2273d4cb5dd3a3c600774c8ed1fa24185997a587 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/TagResultEnum.h @@ -0,0 +1,45 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#ifndef TAGRESULTENUM_H +#define TAGRESULTENUM_H + +namespace TagResult +{ + enum TypeEnum + { + UNKNOWN=0, // not tagged yet + passMpassD2_2Var, + passMfailD2_2Var, + failMpassD2_2Var, + failMfailD2_2Var + }; + inline int enumToInt(const TypeEnum type) + { + switch (type) + { + case passMpassD2_2Var: return 1; + case passMfailD2_2Var: return 2; + case failMpassD2_2Var: return 3; + case failMfailD2_2Var: return 4; + default: return 0; + } + } + inline TypeEnum intToEnum(const int type) + { + if ( type==1 ){ + return passMpassD2_2Var; + }else if ( type==2 ){ + return passMfailD2_2Var; + }else if ( type==3 ){ + return failMpassD2_2Var; + }else if ( type==4 ){ + return failMfailD2_2Var; + } + + return UNKNOWN; + } +} + +#endif diff --git a/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/selection.xml b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/selection.xml new file mode 100644 index 0000000000000000000000000000000000000000..2c1ca9c8c170969c5730222cc596c7323d9b4e18 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/BoostedJetTaggers/selection.xml @@ -0,0 +1,17 @@ +<lcgdict> + <!-- Requested dictionary generation --> + <class name="SmoothedWZTagger" /> + <class name="JSSWTopTaggerDNN" /> + <class name="JSSWTopTaggerANN" /> + <class name="JetQGTagger" /> + <class name="CP::JetQGTaggerBDT" /> + + <!-- Suppress unwanted dictionaries generated by ROOT 6 --> + <exclusion> + <class name="SG::IConstAuxStore"/> + <class name="DataLink<SG::IConstAuxStore>"/> + <class name="xAOD::IParticle"/> + <class name="DataVector<xAOD::IParticle>"/> + <class name="ElementLink<DataVector<xAOD::IParticle> >"/> + </exclusion> +</lcgdict> diff --git a/Reconstruction/Jet/BoostedJetTaggers/CMakeLists.txt b/Reconstruction/Jet/BoostedJetTaggers/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..b70403f2e74f49e7d4adc950a8d3d5c4b462163f --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/CMakeLists.txt @@ -0,0 +1,92 @@ +################################################################################ +# Build configuration for BoostedJetTaggers +################################################################################ + +# Declare the name of the package: +atlas_subdir( BoostedJetTaggers ) + +# Extra dependencies, based on the environment: +set( extra_libs ) +if( XAOD_STANDALONE ) + set( extra_deps Control/xAODRootAccess ) + set( extra_libs xAODRootAccess ) +else() + set( extra_deps PRIVATE GaudiKernel ) +endif() + +# External dependencies: +find_package( FastJet ) +find_package( ROOT COMPONENTS Matrix TMVA ) +find_package( Boost ) +find_package( lwtnn ) + +# Build a shared library: +atlas_add_library( BoostedJetTaggersLib + BoostedJetTaggers/*.h + Root/JSSTaggerBase.cxx + Root/JetQGTagger.cxx + Root/JetQGTaggerBDT.cxx + Root/JSSWTopTaggerDNN.cxx + Root/JSSWTopTaggerANN.cxx + Root/SmoothedWZTagger.cxx + PUBLIC_HEADERS BoostedJetTaggers + INCLUDE_DIRS ${Boost_INCLUDE_DIRS} ${ROOT_INCLUDE_DIRS} ${LWTNN_INCLUDE_DIRS} + LINK_LIBRARIES ${Boost_LIBRARIES} ${ROOT_LIBRARIES} ${FASTJET_LIBRARIES} + ${LWTNN_LIBRARIES} + AsgTools + xAODBase xAODJet xAODTruth xAODTracking JetInterface JetRecLib JetEDM JetSelectorToolsLib JetCalibToolsLib JetUncertaintiesLib ParticleJetToolsLib PathResolver + MuonSelectorToolsLib MuonMomentumCorrectionsLib PATCoreLib PATInterfaces FlavorTagDiscriminants + JetAnalysisInterfacesLib InDetTrackSelectionToolLib InDetTrackSystematicsToolsLib + MuonAnalysisInterfacesLib ${extra_libs} + PRIVATE_LINK_LIBRARIES JetEDM ) + +if( NOT XAOD_STANDALONE ) + atlas_add_component( BoostedJetTaggers + src/components/*.cxx + LINK_LIBRARIES BoostedJetTaggersLib JetCalibToolsLib) +endif() + +atlas_add_dictionary( BoostedJetTaggersDict + BoostedJetTaggers/BoostedJetTaggersDict.h + BoostedJetTaggers/selection.xml + LINK_LIBRARIES BoostedJetTaggersLib ) + +# Executable(s) in the package: +if( XAOD_STANDALONE ) + atlas_add_executable( test_SmoothedWZTagger + util/test_SmoothedWZTagger.cxx + INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} + LINK_LIBRARIES ${ROOT_LIBRARIES} xAODRootAccess + xAODEventInfo xAODJet xAODTruth xAODCore PATInterfaces xAODCore AsgTools + BoostedJetTaggersLib + ) + atlas_add_executable( test_JetQGTagger + util/test_JetQGTagger.cxx + INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} + LINK_LIBRARIES ${ROOT_LIBRARIES} xAODRootAccess + xAODEventInfo xAODJet xAODCore PATInterfaces xAODCore AsgTools + BoostedJetTaggersLib + ) + atlas_add_executable( test_JSSWTopTaggerDNN + util/test_JSSWTopTaggerDNN.cxx + INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} + LINK_LIBRARIES ${ROOT_LIBRARIES} xAODRootAccess + xAODEventInfo xAODJet xAODTruth xAODCore PATInterfaces xAODCore AsgTools + BoostedJetTaggersLib + ) + atlas_add_executable( test_JSSWTopTaggerANN + util/test_JSSWTopTaggerANN.cxx + INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} + LINK_LIBRARIES ${ROOT_LIBRARIES} xAODRootAccess + xAODEventInfo xAODJet xAODTruth xAODCore PATInterfaces xAODCore AsgTools + BoostedJetTaggersLib + ) + atlas_add_executable( test_JetQGTaggerBDT + util/test_JetQGTaggerBDT.cxx + INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} + LINK_LIBRARIES ${ROOT_LIBRARIES} xAODRootAccess + xAODEventInfo xAODJet xAODCore PATInterfaces xAODCore AsgTools + BoostedJetTaggersLib + ) +endif() + diff --git a/Reconstruction/Jet/BoostedJetTaggers/README.md b/Reconstruction/Jet/BoostedJetTaggers/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9927df88e9e6583568afb19020a9b6e6bc0f764d --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/README.md @@ -0,0 +1,137 @@ +Boosted Jet Taggers +=================== + +This package contains the tagging code for boosted Top/W/Z/Higgs tagging + + +How to add a tagger +=================== + +We assume you have access to `/cvmfs` + +Step 1: Set up a release +------------------------ + +First you should pick a relatively recent version of AnalysisBase. To +see the versions, look here: + +``` +/cvmfs/atlas.cern.ch/repo/sw/software/21.2/AnalysisBase +``` + +You should probably pick the highest number. + +Next move to an empty directory and make a setup script. Something +like this should work: + +``` +export ATLAS_LOCAL_ROOT_BASE=/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase +alias setupATLAS='source ${ATLAS_LOCAL_ROOT_BASE}/user/atlasLocalSetup.sh' + +setupATLAS +asetup AnalysisBase,21.2.XX +``` + +where you can replace the `XX` on the last line with the most recent +release you found above. I recommend putting this into a `setup.sh` +script in the empty directory, so you can source it every time you log +in. + +Once you have this, run `source setup.sh`. + +Step 2: Check out BoostedJetTaggers +----------------------------------- + +We follow the "sparse checkout" instructions on the +[Atlas docs][1]. Assuming you've already created your fork of Athena, +you clone Athena and check out BoostedJetTaggers with + +``` +lsetup git +git atlas init-workdir https://:@gitlab.cern.ch:8443/atlas/athena.git +cd athena +git checkout -t upstream/21.2 +git atlas addpkg BoostedJetTaggers +``` + +When this finishes you'll see `Reconstruction` in your local +directory. The boosted taggers package is inside. + +[1]: https://atlassoftwaredocs.web.cern.ch/gittutorial/ + +Step 3: Build +------------- + +Go back to the top level directory (with your `setup.sh` script) and +run the following + +``` +mkdir build +cd build +cmake ../athena/Projects/WorkDir/ +make -j 4 +``` + +(Note that the `-j 4` argument in the last line is just to tell make +to run 4 processes in parallel. If you have limited cores on your +machine you can omit this.) + +Step 4: Run a test job +---------------------- + +You want to make sure something works before you start hacking +away. To run a simple test job, you can use the built in unit tests +before doing anything though, you need to set up the local +environment: + +``` +source $AnalysisBase_PLATFORM/setup.sh +``` + +In general you have to do this every time you start working (after +setting up the release). + +Now run + +``` +test_SmoothedWZTagger +``` + +it should start printing lines that indicate it's processing events. + +Step 5: Start a New Tagger +-------------------------- + +We recommend reading through one of the existing taggers to get an +idea of how they work. There are a few caveats to keep in mind when +creating one: + + - You should add the `.cxx` file under the `atlas_add_library` list + in the `CMakeLists.txt` file. This will ensure that CMake doesn't + get confused when you change something. + + - To ensure usability with ROOT (both inside and outside Athena), + you'll have to add your tagger to both `selection.xml` and + `BoostedJetTaggersDict.h`. Both of these files are in + `BoostedJetTaggers/BoostedJetTaggers/`. + + - You'll also have to add your tagger under + `src/components/BoostedJetTaggers_entries.cxx`. + + - If you have large configuration files that need to be read by your + tagger (BDT or NN weights, for example), you should store them in + [cvmfs][2]. To create these files you'll have to file a JIRA ticket + similar to this one: https://its.cern.ch/jira/browse/ATLINFR-1354 + +We also encourage you to copy one of the `test_*` executables in +`util` so that you can test your tagger before running it. + +Once you're reasonably happy with your tagger, you should create a +[merge request][3] with the 21.2 branch. We encourage you to do this +even if the tagger isn't completely final and to mark the request as +"WIP" (work in progress) so that people can see and discuss your +changes. + + +[2]: http://atlas.web.cern.ch/Atlas/GROUPS/DATABASE/GroupData/BoostedJetTaggers/ +[3]: https://atlassoftwaredocs.web.cern.ch/gittutorial/merge-request/ diff --git a/Reconstruction/Jet/BoostedJetTaggers/README_SFprovider.md b/Reconstruction/Jet/BoostedJetTaggers/README_SFprovider.md new file mode 100644 index 0000000000000000000000000000000000000000..5fc904f65ac62575097f0075c7c4b7152fa1f7d2 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/README_SFprovider.md @@ -0,0 +1,134 @@ +Tagging efficiency SF provider +=================== + +Tagging efficiency SF is decorated to the jet. +[Note] Currently, development is ongoing only for DNN top tagger. + + +Step 1: Decprate the truth labeling +=================== + +SF is provided according to the jet truth labels. +Truth labels are defined as enum in BoostedJetTaggers/FatjetLabelEnum.h: +``` + enum TypeEnum + { + UNKNOWN=0, // Not tagged yet + tqqb, // full-contained top->qqb + Wqq, // full-contained W->qq + Zqq, // full-contained Z->qq + Wqq_From_t,// full-contained W->qq (also mathced to top) + other_From_t, // matched to top + other_From_V, // matched to W/Z + notruth, // failed to truth-jet matching (pileup) + qcd, // not matched to top or W/Z (background jet) + Hbb, // full-contained H->bb + other_From_H, // matched to Higgs + }; +``` + +I is decorated to the given jet as integer by decorateTruthLabel( ) function in BoostedJetTaggers/JSSTaggerBase.h, which is called inside the tag() function, with the following convention: +``` + inline int enumToInt(const TypeEnum type) + { + switch (type) + { + case tqqb: return 1; + case Wqq: return 2; + case Zqq: return 3; + case Wqq_From_t: return 4; + case other_From_t: return 5; + case other_From_V: return 6; + case notruth: return 7; + case qcd: return 8; + case Hbb: return 9; + case other_From_H: return 10; + default: return 0; + } + } +``` +* First of all, DecorateMatchedTruthJet( ) function, defined in BoostedJetTaggers/JSSTaggerBase.h, is called to decorate trimmed truth jet associated with the given jet by dR<0.75. The function automatically identifies the format of the truth particle container (TRUTH1 or TRUTH3). +* If the matching to truth jet is failed, FatjetTruthLabel::notruth is docorated as the truth label. +* Then getWTopContainment( ) function is called to decorate truth labeling according to the definitions below. + +Details of truth definitions +----------------------------------- +* FatjetTruthLabel::tqqb +1. Associated trimmed truth jet is matched to truth top quark by dR<0.75 +2. GhostBHadronsFinalCount is greater than 0 +3. Trimmed truth jet mass satisfies 140 < mJ < 200GeV + +* FatjetTruthLabel::Wqq_From_t +1. Associated trimmed truth jet is matched to both truth top and truth W boson by dR<0.75 +2. GhostBHadronsFinalCount is equal to 0 +3. Trimmed truth jet mass satisfies 50 < mJ < 100GeV + +* FatjetTruthLabel::Wqq +1. Associated trimmed truth jet is matched to truth W boson by dR<0.75 but not matched to truth top +2. GhostBHadronsFinalCount is equal to 0 +3. Trimmed truth jet mass satisfies 50 < mJ < 100GeV + +* FatjetTruthLabel::Zqq +1. Associated trimmed truth jet is matched to truth Z boson by dR<0.75 +2. Trimmed truth jet mass satisfies 60 < mJ < 110GeV + +* FatjetTruthLabel::Hbb +1. Associated trimmed truth jet is matched to truth H boson by dR<0.75 +2. GhostBHadronsFinalCount is greater than 1 + +* FatjetTruthLabel::other_From_t +If trimmed truth jet is matched to truth top quark but does not satisfy the additional requirements above, FatjetTruthLabel::other_From_t is decorated. + +* FatjetTruthLabel::other_From_V +If trimmed truth jet is matched to truth W or Z but does not satisfy the additional requirements above, FatjetTruthLabel::other_From_V is decorated. + +* FatjetTruthLabel::other_From_H +If trimmed truth jet is matched to truth Hbut does not satisfy the additional requirements above, FatjetTruthLabel::other_From_H is decorated. + +* FatjetTruthLabel::unknown +All jets not satisfying the above are defined as FatjetTruthLabel::unknown. + +It is implemented in Root/JSSTaggerBase.cxx. + + +Sherpa V+jets +----------------------------------- +Sherpa 2.2.1 V+jets samples do not contain the intermediate truth W/Z boson information in the TruthParticles container. +We can look at the pair of truth particles with status==3, check the flavors of them, and reconstruct the mass of truth W/Z to define the truth W/Z boson. +The function matchToWZ_Sherpa( ) is called only when the flag isSherpa is turned on. The flag is turned on based on dataset ID defined in getIsSherpa( ) function in BoostedJetTaggers/JSSTaggerBase.h. +The DSID is needed to determine if the isSherpa flag should be switched on. This is done now automatically in the code and does not need to be provided anymore + + + + + + +Step 2: Decorate the SF +=================== + +SF is decorated to the given jet according to the truth definitions above. +getWeight( ) function is called inside the tag( ) function. + +SF values, as functions of jet pT and m/pT, are provided by TH2 histograms in ROOT file. +Path of the ROOT file, as well as some parameters for SF provider, are specified in the config file of BoostedJetTaggers tool. +An example is found in share/JSSWTopTaggingDNN/JSSDNNTagger_AntiKt10LCTopoTrimmed_TopQuarkContained_MC15c_20170824_BOOSTSetup80Eff.dat. + + + + +Systematic uncertainties +=================== + +We plan to provide the systematic uncertainties using JetUncertainties tool. +[Not ready yet] + + + + +Test the tool +=================== +To run the script to apply SF to tagged jets, +``` +$ test_JSSWTopTaggerDNN -f your.test.DAOD.root +``` +you can find output_JSSWTopTaggerDNN.root in your directory. diff --git a/Reconstruction/Jet/BoostedJetTaggers/Root/JSSTaggerBase.cxx b/Reconstruction/Jet/BoostedJetTaggers/Root/JSSTaggerBase.cxx new file mode 100644 index 0000000000000000000000000000000000000000..8c55ce8b62508d387583202b1361f286818121e3 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/Root/JSSTaggerBase.cxx @@ -0,0 +1,574 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#include "BoostedJetTaggers/JSSTaggerBase.h" + +#include "ParticleJetTools/LargeRJetLabelEnum.h" + +#include <TSystem.h> + +JSSTaggerBase::JSSTaggerBase(const std::string &name) : + asg::AsgTool(name), + m_calibArea(""), + m_jetPtMin(200000.), + m_jetPtMax(3000000.), + m_jetEtaMax(2.0), + m_strMassCutLow(""), + m_strMassCutHigh(""), + m_strScoreCut("") +{ + + /// Tagger configuration properties + declareProperty( "ContainerName", m_containerName = "", "Name of jet container" ); + declareProperty( "ConfigFile", m_configFile = "", "Name of config file" ); + declareProperty( "CalibArea", m_calibArea = "", "Path to config file" ); + declareProperty( "CalcSF", m_calcSF = false, "Flag to calculate the efficiency SF" ); + declareProperty( "WorkingPoint", m_wkpt = "", "If specified, name of working point is added to the variable names" ); + declareProperty( "TaggerType", m_tagType = "XXX", "Tagger type (e.g. SmoothedWZTagger, JSSWTopTaggerDNN, etc.)" ); + declareProperty( "Decoration", m_decorationName = "XX", "Prefix for the variables decorated to xAOD::Jet" ); + declareProperty( "IsMC", m_isMC = true, "Flag to identify data or MC" ); + + /// Jet kinematics properties + declareProperty( "JetEtaMax", m_jetEtaMax = 2.0, "Eta cut to define fiducial phase space for the tagger"); + + /// Truth labeling properties + declareProperty( "UseTRUTH3", m_truthLabelUseTRUTH3 = true, + "Flag to use TRUTH3 containers. If false, TRUTH1 format is used." ); + declareProperty( "TruthParticleContainerName", m_truthParticleContainerName = "TruthParticles", + "Name of truth-particle container (with UseTRUTH3=false). TruthParticles by default" ); + declareProperty( "TruthBosonContainerName", m_truthBosonContainerName = "TruthBosonsWithDecayParticles", + "Name of truth-boson container (with UseTRUTH3=true). TruthBosonWithDecayParticles by default" ); + declareProperty( "TruthTopQuarkContainerName", m_truthTopQuarkContainerName = "TruthTopQuarkWithDecayParticles", + "Name of truth-top container (with UseTRUTH3=true). TruthTopQuarkWithDecayParticles by default" ); + + /// Keras properties + declareProperty( "CalibAreaKeras", m_kerasCalibArea = "BoostedJetTaggers/TopoclusterTopTagger/Boost2017/", + "Path to json file to configure ML-taggers (Keras)" ); + declareProperty( "KerasConfigFile", m_kerasConfigFileName = "XXX", + "Name of json file to configure ML-taggers (Keras)" ); + declareProperty( "KerasOutput", m_kerasConfigOutputName = "XXX", + "Name of output variable by the ML-tagger (Keras)" ); + + /// TMVA properties + declareProperty( "CalibAreaTMVA", m_tmvaCalibArea = "BoostedJetTaggers/JSSWTopTaggerBDT/Boost2017/", + "Path to xml file to configure ML-taggers (TMVA)" ); + declareProperty( "TMVAConfigFile", m_tmvaConfigFileName = "XXX", + "Name of xml file to configure ML-taggers (TMVA)" ); + + /// Tagging scale factors + declareProperty( "WeightDecorationName", m_weightDecorationName = "SF", + "Name of SF variable decorated to xAOD::Jet" ); + declareProperty( "WeightFile", m_weightFileName = "", + "Name of config ROOT file for SF calculation" ); + declareProperty( "WeightHistogramName", m_weightHistogramName = "", + "Name of SF histograms in the ROOT file" ); + declareProperty( "EfficiencyHistogramName", m_efficiencyHistogramName = "", + "Name of efficiency histograms in the ROOT file" ); + declareProperty( "WeightFlavors", m_weightFlavors = "", + "List of jet flavours for which the SF is available. Divided by comma" ); +} + +StatusCode JSSTaggerBase::initialize() { + + /// Make sure jet container is set + if ( m_containerName.empty() ) { + ATH_MSG_ERROR( "ContainerName has not been set. Exiting" ); + return StatusCode::FAILURE; + } + + /// Initialize warning counters + m_nWarnKin = 0; + m_nWarnVar = 0; + + /// Define common tagger states + m_acceptInfo.addCut( "ValidPtRangeHigh" , "True if the jet is not too high pT" ); + m_acceptInfo.addCut( "ValidPtRangeLow" , "True if the jet is not too low pT" ); + m_acceptInfo.addCut( "ValidEtaRange" , "True if the jet is not too forward" ); + + m_acceptInfo.addCut( "ValidJetContent" , "True if the jet is alright technically (e.g. all attributes necessary for tag)" ); + m_acceptInfo.addCut( "ValidEventContent" , "True if the event is alright technically (e.g. primary vertices)" ); + + /// Initialize decorators + ATH_MSG_INFO( "Decorators that will be attached to jet :" ); + + m_decTaggedKey = m_containerName + "." + m_decorationName + "_" + m_decTaggedKey.key(); + m_decValidPtRangeHighKey = m_containerName + "." + m_decorationName + "_" + m_decValidPtRangeHighKey.key(); + m_decValidPtRangeLowKey = m_containerName + "." + m_decorationName + "_" + m_decValidPtRangeLowKey.key(); + m_decValidEtaRangeKey = m_containerName + "." + m_decorationName + "_" + m_decValidEtaRangeKey.key(); + m_decValidJetContentKey = m_containerName + "." + m_decorationName + "_" + m_decValidJetContentKey.key(); + m_decValidEventContentKey = m_containerName + "." + m_decorationName + "_" + m_decValidEventContentKey.key(); + + ATH_CHECK( m_decTaggedKey.initialize() ); + ATH_CHECK( m_decValidPtRangeHighKey.initialize() ); + ATH_CHECK( m_decValidPtRangeLowKey.initialize() ); + ATH_CHECK( m_decValidEtaRangeKey.initialize() ); + ATH_CHECK( m_decValidJetContentKey.initialize() ); + ATH_CHECK( m_decValidEventContentKey.initialize() ); + + ATH_MSG_INFO( " " << m_decTaggedKey.key() << " : pass tagging criteria" ); + ATH_MSG_INFO( " " << m_decValidPtRangeHighKey.key() << " : pass upper pt range" ); + ATH_MSG_INFO( " " << m_decValidPtRangeLowKey.key() << " : pass lower pt range" ); + ATH_MSG_INFO( " " << m_decValidEtaRangeKey.key() << " : pass eta range" ); + ATH_MSG_INFO( " " << m_decValidJetContentKey.key() << " : has valid jet content" ); + ATH_MSG_INFO( " " << m_decValidEventContentKey.key() << " : has valid event content" ); + + if ( m_useMassCut ) { + + m_decPassMassKey = m_containerName + "." + m_decorationName + "_" + m_decPassMassKey.key(); + m_decCutMLowKey = m_containerName + "." + m_decorationName + "_" + m_decCutMLowKey.key(); + m_decCutMHighKey = m_containerName + "." + m_decorationName + "_" + m_decCutMHighKey.key(); + + ATH_CHECK( m_decPassMassKey.initialize() ); + ATH_CHECK( m_decCutMLowKey.initialize() ); + ATH_CHECK( m_decCutMHighKey.initialize() ); + + ATH_MSG_INFO( " " << m_decPassMassKey.key() << " : pass mass cut" ); + ATH_MSG_INFO( " " << m_decCutMLowKey.key() << " : lower mass cut" ); + ATH_MSG_INFO( " " << m_decCutMHighKey.key() << " : upper mass cut" ); + + } + + if ( m_useScoreCut ) { + + m_decPassScoreKey = m_containerName + "." + m_decorationName + "_" + m_decPassScoreKey.key(); + m_decScoreCutKey = m_containerName + "." + m_decorationName + "_" + m_decScoreCutKey.key(); + m_decScoreValueKey = m_containerName + "." + m_decorationName + "_" + m_decScoreValueKey.key(); + + ATH_CHECK( m_decPassScoreKey.initialize() ); + ATH_CHECK( m_decScoreCutKey.initialize() ); + ATH_CHECK( m_decScoreValueKey.initialize() ); + + ATH_MSG_INFO( " " << m_decPassScoreKey.key() << " : pass MVA score cut" ); + ATH_MSG_INFO( " " << m_decScoreCutKey.key() << " : MVA score cut" ); + ATH_MSG_INFO( " " << m_decScoreValueKey.key() << " : evaluated MVA score" ); + + } + + if ( m_calcSF ) { + + m_decWeightKey = m_containerName + "." + m_decorationName + "_" + m_weightDecorationName; + m_decEfficiencyKey = m_containerName + "." + m_decorationName + "_" + m_decEfficiencyKey.key(); + m_decEffSFKey = m_containerName + "." +m_decorationName + "_" + m_decEffSFKey.key(); + + ATH_CHECK( m_decWeightKey.initialize() ); + ATH_CHECK( m_decEfficiencyKey.initialize() ); + ATH_CHECK( m_decEffSFKey.initialize() ); + + ATH_MSG_INFO( " " << m_decWeightKey.key() << " : tagging SF" ); + + m_readTruthLabelKey = m_containerName + "." + m_truthLabelName; + ATH_CHECK( m_readTruthLabelKey.initialize() ); + + } + + /// Initialize SFs if they are needed + if ( m_calcSF ) { + + /// Get weight config file + m_weightConfig = std::make_unique<TFile>( m_weightConfigPath.c_str() ); + if( !m_weightConfig ) { + ATH_MSG_ERROR( "SmoothedWZTagger: Error openning config file : " << m_weightConfigPath ); + return StatusCode::FAILURE; + } + + /// Install histograms for tagging SFs + std::stringstream ss{m_weightFlavors}; + std::string flavor; + while ( std::getline(ss, flavor, ',') ) { + m_weightHistograms.insert( std::make_pair( flavor, (TH2D*)m_weightConfig->Get((m_weightHistogramName+"_"+flavor).c_str()) ) ); + if ( !m_efficiencyHistogramName.empty() ) { + m_efficiencyHistograms.insert( std::make_pair( flavor, (TH2D*)m_weightConfig->Get((m_efficiencyHistogramName+"_"+flavor).c_str()) ) ); + } + ATH_MSG_INFO( "Tagging SF histogram for " << flavor << " is installed." ); + } + + } + + return StatusCode::SUCCESS; + +} + +/// Loop over jet collection and decorate each jet +StatusCode JSSTaggerBase::decorate( const xAOD::JetContainer& jets ) const { + + for ( auto jet : jets ) { + ATH_CHECK( tag(*jet) ); + } + + return StatusCode::SUCCESS; + +} + +/// Get configReader TEnv +StatusCode JSSTaggerBase::getConfigReader() { + + ATH_MSG_INFO( "Using config file : " << m_configFile ); + + /// Check for the existence of the configuration file + std::string configPath; + + if ( m_calibArea.compare("Local") == 0 ) { + configPath = PathResolverFindCalibFile(("$WorkDir_DIR/data/BoostedJetTaggers/"+m_configFile).c_str()); + } + else if ( m_calibArea.find("eos") != std::string::npos) { + configPath = PathResolverFindCalibFile((m_calibArea+"/"+m_configFile).c_str()); + } + else { + configPath = PathResolverFindCalibFile(("BoostedJetTaggers/"+m_calibArea+"/"+m_configFile).c_str()); + } + + /// https://root.cern.ch/root/roottalk/roottalk02/5332.html + FileStat_t fStats; + int fSuccess = gSystem->GetPathInfo(configPath.c_str(), fStats); + if ( fSuccess ) { + ATH_MSG_ERROR( "Recommendations file " << m_configFile << " could not be found" ); + return StatusCode::FAILURE; + } + else { + ATH_MSG_DEBUG( "Recommendations file was found : " << configPath ); + } + + if ( m_configReader.ReadFile( configPath.c_str(), EEnvLevel(0) ) ) { + ATH_MSG_ERROR( "Error while reading config file : "<< configPath ); + return StatusCode::FAILURE; + } + + return StatusCode::SUCCESS; + +} + +/// Reset cuts +StatusCode JSSTaggerBase::resetCuts( asg::AcceptData &acceptData ) const { + + /// Reset the AcceptData cut results to false + acceptData.clear(); + + /// Initialize common cuts to true by default + acceptData.setCutResult( "ValidJetContent", true ); + acceptData.setCutResult( "ValidEventContent", true ); + + acceptData.setCutResult( "ValidPtRangeHigh", true ); + acceptData.setCutResult( "ValidPtRangeLow" , true ); + acceptData.setCutResult( "ValidEtaRange" , true ); + + return StatusCode::SUCCESS; + +} + +/// Check if jet passes kinematic constraints +bool JSSTaggerBase::passKinRange( const xAOD::Jet &jet ) const { + + float scale = 1.0; + if ( m_ptGeV ) scale = 1.e3; + + if ( jet.pt() < m_jetPtMin * scale ) return false; + if ( jet.pt() > m_jetPtMax * scale ) return false; + if ( std::abs( jet.eta() ) > m_jetEtaMax ) return false; + + return true; + +} + +/// Check and record if jet passes kinematic constraints +StatusCode JSSTaggerBase::checkKinRange( const xAOD::Jet &jet, asg::AcceptData &acceptData ) const { + + float scale = 1.0; + if ( m_ptGeV ) scale = 1.e3; + + /// Check each kinematic constraint + /// Print warnings using counters + if ( std::abs(jet.eta()) > m_jetEtaMax ) { + if ( m_nWarnKin++ < m_nWarnMax ) ATH_MSG_WARNING( "Jet does not pass basic kinematic selection (|eta| < " << m_jetEtaMax << "). Jet eta = " << jet.eta() ); + else ATH_MSG_DEBUG( "Jet does not pass basic kinematic selection (|eta| < " << m_jetEtaMax << "). Jet eta = " << jet.eta() ); + acceptData.setCutResult( "ValidEtaRange", false ); + } + + if ( jet.pt() < m_jetPtMin * scale ) { + if ( m_nWarnKin++ < m_nWarnMax ) ATH_MSG_WARNING("Jet does not pass basic kinematic selection (pT > " << m_jetPtMin * scale / 1.e3 << "). Jet pT = " << jet.pt() / 1.e3 << " GeV" ); + else ATH_MSG_DEBUG( "Jet does not pass basic kinematic selection (pT > " << m_jetPtMin * scale / 1.e3 << "). Jet pT = " << jet.pt() / 1.e3 << " GeV" ); + acceptData.setCutResult( "ValidPtRangeLow", false ); + } + + if ( jet.pt() > m_jetPtMax * scale ) { + if( m_nWarnKin++ < m_nWarnMax ) ATH_MSG_WARNING( "Jet does not pass basic kinematic selection (pT < " << m_jetPtMax * scale / 1.e3 << "). Jet pT = " << jet.pt() / 1.e3 << " GeV" ); + else ATH_MSG_DEBUG( "Jet does not pass basic kinematic selection (pT < " << m_jetPtMax * scale / 1.e3 << "). Jet pT = " << jet.pt() / 1.e3 << " GeV" ); + acceptData.setCutResult( "ValidPtRangeHigh", false ); + } + + /// Create write decor handles + SG::WriteDecorHandle<xAOD::JetContainer, bool> decValidPtRangeHigh(m_decValidPtRangeHighKey); + SG::WriteDecorHandle<xAOD::JetContainer, bool> decValidPtRangeLow(m_decValidPtRangeLowKey); + SG::WriteDecorHandle<xAOD::JetContainer, bool> decValidEtaRange(m_decValidEtaRangeKey); + + /// Decorate kinematic pass information + decValidPtRangeHigh(jet) = acceptData.getCutResult( "ValidPtRangeHigh" ); + decValidPtRangeLow(jet) = acceptData.getCutResult( "ValidPtRangeLow" ); + decValidEtaRange(jet) = acceptData.getCutResult( "ValidEtaRange" ); + + return StatusCode::SUCCESS; + +} + +/// Calculate JSS moment ratios in case they are not already saved +/// These are calculated by hand here because JetSubStructureMomentTools +/// does not operate on const jets. This should be changed in the future +int JSSTaggerBase::calculateJSSRatios( const xAOD::Jet &jet ) const { + + int result = 0; + + /// Create write decor handles + SG::WriteDecorHandle<xAOD::JetContainer, float> decTau21WTA(m_decTau21WTAKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decTau32WTA(m_decTau32WTAKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decC2(m_decC2Key); + SG::WriteDecorHandle<xAOD::JetContainer, float> decD2(m_decD2Key); + SG::WriteDecorHandle<xAOD::JetContainer, float> decE3(m_decE3Key); + + /// Create read decor handles + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau1WTA(m_readTau1WTAKey); + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau2WTA(m_readTau2WTAKey); + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau3WTA(m_readTau3WTAKey); + + SG::ReadDecorHandle<xAOD::JetContainer, float> readECF1(m_readECF1Key); + SG::ReadDecorHandle<xAOD::JetContainer, float> readECF2(m_readECF2Key); + SG::ReadDecorHandle<xAOD::JetContainer, float> readECF3(m_readECF3Key); + + /// WTA N-subjettiness ratios + float tau21_wta = -999.0; + float tau32_wta = -999.0; + + float tau1_wta = readTau1WTA(jet); + float tau2_wta = readTau2WTA(jet); + float tau3_wta = readTau3WTA(jet); + + if ( tau1_wta > 1e-8 ) { + tau21_wta = tau2_wta / tau1_wta; + } + else result = 1; + + if ( tau2_wta > 1e-8 ) { + tau32_wta = tau3_wta / tau2_wta; + } + else result = 1; + + decTau21WTA(jet) = tau21_wta; + decTau32WTA(jet) = tau32_wta; + + /// ECF ratios + float C2 = -999.0; + float D2 = -999.0; + float e3 = -999.0; + + float ECF1 = readECF1(jet); + float ECF2 = readECF2(jet); + float ECF3 = readECF3(jet); + + if ( ECF2 > 1e-8 ) { + C2 = ECF3 * ECF1 / std::pow( ECF2, 2.0 ); + D2 = ECF3 * std::pow( ECF1, 3.0 ) / std::pow( ECF2, 3.0 ); + } + else result = 1; + + e3 = ECF3 / std::pow( ECF1, 3.0 ); + + decC2(jet) = C2; + decD2(jet) = D2; + decE3(jet) = e3; + + // TODO: Add L-series for UFO taggers + // TODO: Add ECFG for ANN tagger whenever it is defined + + return result; + +} + +/// Get SF weight +StatusCode JSSTaggerBase::getWeight( const xAOD::Jet& jet, bool passSel, asg::AcceptData &acceptData ) const { + + if ( !m_calcSF ) return StatusCode::SUCCESS; + + float weight = 1.0; + float effSF = 1.0; + float efficiency = 1.0; + + if ( m_isMC ) { + + std::tie(effSF, efficiency) = getSF( jet, acceptData ); + + /// Inefficiency SF is directly used + if ( m_weightFlavors.find("fail") != std::string::npos ) { + weight = effSF; + } + + else { + + /// Efficiency SF + if ( passSel ) { + weight = effSF; + } + + /// Calculate inefficiency SF + else { + /// If inefficiency SF is not available, SF is always 1.0 + if ( m_efficiencyHistogramName.empty() ) { + weight = 1.0; + } + else if ( efficiency < 1.0 ) { + weight = ( 1. - effSF * efficiency ) / ( 1. - efficiency ); + } + else { + weight = 1.0; + } + } + } + + } + + else { + weight = 1.0; + } + + /// Create write decor handles + SG::WriteDecorHandle<xAOD::JetContainer, float> decWeight(m_decWeightKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decEfficiency(m_decEfficiencyKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decEffSF(m_decEffSFKey); + + /// Decorate values + decWeight(jet) = weight; + decEfficiency(jet) = efficiency; + decEffSF(jet) = effSF; + + return StatusCode::SUCCESS; + +} + +/// Get scale factor and efficiency +std::pair<double, double> JSSTaggerBase::getSF( const xAOD::Jet& jet, asg::AcceptData &acceptData ) const { + + if ( !passKinRange(jet) ) return std::make_pair( 1.0, 1.0 ); + + /// Truth label string + std::string truthLabelStr; + + /// Truth label value + SG::ReadDecorHandle<xAOD::JetContainer, float> readTruthLabel(m_readTruthLabelKey); + LargeRJetTruthLabel::TypeEnum jetContainment = LargeRJetTruthLabel::intToEnum(readTruthLabel(jet)); + + /// Contained top tagger + if ( m_weightHistograms.count("t_qqb") ) { + + /// Contained top + if ( jetContainment==LargeRJetTruthLabel::tqqb ) { + truthLabelStr = "t_qqb"; + } + /// QCD + else if ( jetContainment==LargeRJetTruthLabel::notruth || jetContainment==LargeRJetTruthLabel::qcd ) { + truthLabelStr = "q"; + } + + } + /// TCC W/Z 2-var tagger + else if ( m_weightHistograms.count("V_qq_passMpassD2") ) { + + /// Top + if ( jetContainment==LargeRJetTruthLabel::tqqb || jetContainment==LargeRJetTruthLabel::other_From_t ) { + truthLabelStr = "t_"; + } + /// W/Z + else if ( jetContainment==LargeRJetTruthLabel::Wqq || jetContainment==LargeRJetTruthLabel::Zqq || jetContainment==LargeRJetTruthLabel::Wqq_From_t ) { + truthLabelStr = "V_qq_"; + } + /// QCD + else if ( jetContainment==LargeRJetTruthLabel::notruth || jetContainment==LargeRJetTruthLabel::qcd ) { + truthLabelStr = "q_"; + } + + /// Pass mass and D2 + if ( acceptData.getCutResult("PassMassLow") && acceptData.getCutResult("PassMassHigh") && acceptData.getCutResult("PassD2") ) { + truthLabelStr += "passMpassD2"; + } + /// Fail mass, pass D2 + else if ( !(acceptData.getCutResult("PassMassLow") && acceptData.getCutResult("PassMassHigh")) && acceptData.getCutResult("PassD2") ) { + truthLabelStr += "failMpassD2"; + } + /// Pass mass, fail D2 + else if ( acceptData.getCutResult("PassMassLow") && acceptData.getCutResult("PassMassHigh") && !acceptData.getCutResult("PassD2") ) { + truthLabelStr += "passMfailD2"; + } + /// Fail mass and D2 + else{ + truthLabelStr += "failMfailD2"; + } + + } + + /// W/Z or inclusive top tagger + else { + + /// Top + if ( jetContainment==LargeRJetTruthLabel::tqqb || jetContainment==LargeRJetTruthLabel::other_From_t ) { + truthLabelStr = "t"; + } + /// W/Z + else if ( jetContainment==LargeRJetTruthLabel::Wqq || jetContainment==LargeRJetTruthLabel::Zqq || jetContainment==LargeRJetTruthLabel::Wqq_From_t ) { + truthLabelStr = "V_qq"; + } + /// QCD + else if ( jetContainment==LargeRJetTruthLabel::notruth || jetContainment==LargeRJetTruthLabel::qcd ) { + truthLabelStr = "q"; + } + + } + + double logmOverPt = std::log(jet.m()/jet.pt()); + if ( m_decorationName.find("SmoothZ") != std::string::npos ) { + /// To apply W-tagging efficiency SF to Z-tagger, jet mass is shifted by 10.803 GeV + const double WtoZmassShift = 10803; + logmOverPt = std::log((jet.m()-WtoZmassShift)/jet.pt()); + } + + if ( logmOverPt > 0 ) logmOverPt = 0; + + double SF = 1.0; + double eff = 1.0; + + if ( m_weightHistograms.count(truthLabelStr.c_str()) ) { + + int pt_mPt_bin = (m_weightHistograms.find(truthLabelStr.c_str())->second)->FindBin(jet.pt()*0.001, logmOverPt); + SF = (m_weightHistograms.find(truthLabelStr.c_str())->second)->GetBinContent(pt_mPt_bin); + + if ( !m_efficiencyHistogramName.empty() ) { + eff = (m_efficiencyHistograms.find(truthLabelStr.c_str())->second)->GetBinContent(pt_mPt_bin); + } + + } + else { + ATH_MSG_DEBUG( "SF for truth label for " << truthLabelStr << " is not available. Returning 1.0" ); + return std::make_pair( 1.0, 1.0 ); + } + + if ( SF < 1e-3 ) { + ATH_MSG_DEBUG( "(pt, m/pt) (" << jet.pt()/1.e3 << ", " << jet.m()/jet.pt() << ") is out of range for SF calculation. Returning 1.0" ); + return std::make_pair( 1.0, 1.0 ); + } + else { + return std::make_pair( SF, eff ); + } + +} + +/// Print configured cuts +void JSSTaggerBase::printCuts() const { + ATH_MSG_INFO( "After tagging, you will have access to the following cuts as an asg::AcceptData : (<NCut>) <cut> : <description>)" ); + int nCuts = m_acceptInfo.getNCuts(); + for ( int iCut=0; iCut < nCuts; iCut++ ) { + std::string cut_string = ""; + cut_string += " ("; + cut_string += std::to_string(iCut); + cut_string += ") "; + cut_string += m_acceptInfo.getCutName(iCut).data(); + cut_string += " : "; + cut_string += m_acceptInfo.getCutDescription(iCut).data(); + ATH_MSG_INFO( cut_string ); + } +} diff --git a/Reconstruction/Jet/BoostedJetTaggers/Root/JSSWTopTaggerANN.cxx b/Reconstruction/Jet/BoostedJetTaggers/Root/JSSWTopTaggerANN.cxx new file mode 100644 index 0000000000000000000000000000000000000000..66785276ca0b3b511e45720a9310540aafadb65f --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/Root/JSSWTopTaggerANN.cxx @@ -0,0 +1,415 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#include "BoostedJetTaggers/JSSWTopTaggerANN.h" + +#include <fstream> + +JSSWTopTaggerANN::JSSWTopTaggerANN( const std::string& name ) : + JSSTaggerBase( name ), + m_lwnn(nullptr) +{ + +} + +/// Initialize the tagger +StatusCode JSSWTopTaggerANN::initialize() { + + ATH_MSG_INFO( "Initializing JSSWTopTaggerANN tool" ); + + /// Pt values are defined in GeV + m_ptGeV = true; + + /// Use mass cut + m_useMassCut = true; + + /// Use discriminant score cut + m_useScoreCut = true; + + if( ! m_configFile.empty() ) { + + /// Get configReader + ATH_CHECK( getConfigReader() ); + + /// Get tagger type + m_tagType = m_configReader.GetValue("TaggerType" ,""); + + /// Get the CVMFS calib area where stuff is stored + /// If this is set to "Local" then it will look for the config file in the share space + m_kerasCalibArea = m_configReader.GetValue("CalibAreaKeras" ,""); + + /// Get the name/path of the JSON config + m_kerasConfigFileName = m_configReader.GetValue("KerasConfigFile" ,""); + + /// Get the name of the Keras output node + m_kerasConfigOutputName = m_configReader.GetValue("KerasOutput" ,""); + + /// Get min and max jet mass. The unit is GeV now. Need to be consistent with ATLAS convention in the future + m_strMassCutLow = m_configReader.GetValue("MassCutLow_in_GeV" ,""); + m_strMassCutHigh = m_configReader.GetValue("MassCutHigh_in_GeV" ,""); + + /// Get min and max jet pt. The unit is GeV now. Need to be consistent with ATLAS convention in the future + m_jetPtMin = m_configReader.GetValue("pTCutLow_in_GeV", 200.0); + m_jetPtMax = m_configReader.GetValue("pTCutHigh_in_GeV", 2000.0); + + /// Get cut for ANN score + m_strScoreCut = m_configReader.GetValue("ScoreCut" ,""); + + /// Get the decoration name + m_decorationName = m_configReader.GetValue("DecorationName" ,""); + + /// Get the scale factor configuration + m_calcSF = m_configReader.GetValue("CalcSF", false); + if ( m_calcSF ) { + m_weightDecorationName = m_configReader.GetValue("WeightDecorationName", ""); + m_weightFileName = m_configReader.GetValue("WeightFile", ""); + m_weightHistogramName = m_configReader.GetValue("WeightHistogramName", ""); + m_efficiencyHistogramName = m_configReader.GetValue("EfficiencyHistogramName", ""); + m_weightFlavors = m_configReader.GetValue("WeightFlavors", ""); + + /// Get truth label name information + m_truthLabelName = m_configReader.GetValue("TruthLabelName" , "R10TruthLabel_R21Consolidated"); + } + + /// Print out the configuration parameters for viewing + ATH_MSG_INFO( "Configurations Loaded :"); + ATH_MSG_INFO( "tagType : " << m_tagType ); + ATH_MSG_INFO( "calibarea_keras : " << m_kerasCalibArea ); + ATH_MSG_INFO( "kerasConfigFileName : " << m_kerasConfigFileName ); + ATH_MSG_INFO( "kerasConfigOutputName : " << m_kerasConfigOutputName ); + ATH_MSG_INFO( "strMassCutLow : " << m_strMassCutLow ); + ATH_MSG_INFO( "strMassCutHigh : " << m_strMassCutHigh ); + ATH_MSG_INFO( "pTCutLow : " << m_jetPtMin ); + ATH_MSG_INFO( "pTCutHigh : " << m_jetPtMax ); + ATH_MSG_INFO( "strScoreCut : " << m_strScoreCut ); + ATH_MSG_INFO( "decorationName : " << m_decorationName ); + if ( m_calcSF ) { + ATH_MSG_INFO( "weightDecorationName : " << m_weightDecorationName ); + ATH_MSG_INFO( "weightFile : " << m_weightFileName ); + ATH_MSG_INFO( "weightHistogramName : " << m_weightHistogramName ); + ATH_MSG_INFO( "efficiencyHistogramName : "<<m_efficiencyHistogramName ); + ATH_MSG_INFO( "weightFlavors : " << m_weightFlavors ); + ATH_MSG_INFO( "TruthLabelName : " << m_truthLabelName ); + } + } + else { /// No config file + /// Assume the cut functions have been set through properties. + /// Check they are non empty + if ( (m_kerasConfigFileName.empty() || + m_kerasConfigOutputName.empty() || + m_strScoreCut.empty() || + m_strMassCutLow.empty() || + m_strMassCutHigh.empty() || + m_decorationName.empty() || + m_weightFileName.empty()) || + ((m_weightDecorationName.empty() || + m_weightHistogramName.empty() || + m_weightFlavors.empty()) && m_calcSF) ) { + ATH_MSG_ERROR( "No config file provided OR you haven't manually specified all needed parameters" ) ; + ATH_MSG_ERROR( "Please read the TWiki for this tool" ); + return StatusCode::FAILURE; + } + + } + + ATH_MSG_INFO( "Mass cut low : " << m_strMassCutLow ); + ATH_MSG_INFO( "Mass cut High : " << m_strMassCutHigh ); + ATH_MSG_INFO( "Score cut low : " << m_strScoreCut ); + + /// If the calibarea is specified to be "Local" then it looks in the same place as the top level configs + if ( m_kerasCalibArea.empty() ) { + ATH_MSG_ERROR( "You need to specify where the calibarea is as either being Local or on CVMFS" ); + return StatusCode::FAILURE; + } + else if( !m_kerasCalibArea.compare("Local") ) { + std::string localCalibArea = "BoostedJetTaggers/JSSWTopTaggerANN/"; + ATH_MSG_INFO( "Using Local calibarea " << localCalibArea ); + /// Convert the JSON config file name to the full path + m_kerasConfigFilePath = PathResolverFindCalibFile(localCalibArea+m_kerasConfigFileName); + if ( m_calcSF ) + m_weightConfigPath = PathResolverFindCalibFile(localCalibArea+m_weightFileName); + } + else { + ATH_MSG_INFO( "Using CVMFS calibarea" ); + /// Get the config file from CVMFS + /// Necessary because xml files are too large to house on the data space + m_kerasConfigFilePath = PathResolverFindCalibFile( (m_kerasCalibArea+m_kerasConfigFileName).c_str() ); + if ( m_calcSF ) + m_weightConfigPath = PathResolverFindCalibFile( (m_kerasCalibArea+m_weightFileName).c_str()); + } + + /// Read json file for ANN weights + ATH_MSG_INFO( "ANN Tagger configured with: " << m_kerasConfigFilePath ); + + std::ifstream input_cfg( m_kerasConfigFilePath.c_str() ); + + if ( !input_cfg.is_open() ) { + ATH_MSG_ERROR( "Error openning config file: " << m_kerasConfigFilePath ); + ATH_MSG_ERROR( "Are you sure that the file exists at this path?" ); + return StatusCode::FAILURE; + } + + lwt::GraphConfig config = lwt::parse_json_graph( input_cfg ); + + for ( auto& input_node: config.inputs ) { + ATH_MSG_INFO( " input node: " << input_node.name ); + for ( auto& input: input_node.variables ) { + ATH_MSG_INFO( " " << input ); + } + } + + auto output_node_name = config.outputs.begin()->first; + m_out_names = config.outputs.at(output_node_name).labels; + + ATH_MSG_INFO( "Keras Network NLayers: " << config.layers.size() ); + + m_lwnn = std::make_unique< lwt::LightweightGraph >(config, output_node_name); + + /// Build the network + try { + m_lwnn.reset(new lwt::LightweightGraph(config, output_node_name)); + } catch (lwt::NNConfigurationException& exc) { + ATH_MSG_ERROR( "NN configuration problem: " << exc.what() ); + return StatusCode::FAILURE; + } + + /// Set internal tagger type + if ( !m_tagType.compare("TopQuark") ) { + ATH_MSG_DEBUG( "This is a top quark tagger" ); + m_tagClass = TAGCLASS::TopQuark; + } + else if ( !m_tagType.compare("WBoson") ) { + ATH_MSG_DEBUG( "This is a W boson tagger" ); + m_tagClass = TAGCLASS::WBoson; + } + else if ( !m_tagType.compare("ZBoson") ) { + ATH_MSG_DEBUG( "This is a Z boson tagger" ); + m_tagClass = TAGCLASS::ZBoson; + } + else { + ATH_MSG_ERROR( "I can't tell what kind of tagger your configuration is for." ); + return StatusCode::FAILURE; + } + + /// Set the possible states that the tagger can be left in after the JSSTaggerBase::tag() function is called + m_acceptInfo.addCut( "PassMassLow" , "mJet > mCutLow" ); + m_acceptInfo.addCut( "PassScore" , "ScoreJet > ScoreCut" ); + if ( m_tagClass == TAGCLASS::WBoson || m_tagClass == TAGCLASS::ZBoson ) { + m_acceptInfo.addCut( "PassMassHigh", "mJet < mCutHigh" ); + } + + /// Loop over and print out the cuts that have been configured + printCuts(); + + /// Call base class initialize + ATH_CHECK( JSSTaggerBase::initialize() ); + + ATH_MSG_INFO( "ANN Tagger tool initialized" ); + + return StatusCode::SUCCESS; + +} + +StatusCode JSSWTopTaggerANN::tag( const xAOD::Jet& jet ) const { + + ATH_MSG_DEBUG( "Obtaining ANN result" ); + + /// Create asg::AcceptData object + asg::AcceptData acceptData( &m_acceptInfo ); + + /// Reset the AcceptData cut results + ATH_CHECK( resetCuts( acceptData ) ); + + /// Check basic kinematic selection + ATH_CHECK( checkKinRange( jet, acceptData ) ); + + /// Get the relevant attributes of the jet + /// Mass and pt - note that this will depend on the configuration of the calibration used + float jet_pt = jet.pt()/1000.0; + float jet_mass = jet.m()/1000.0; + + /// Get ANN score for the jet + float jet_score = getScore(jet); + + /// Evaluate the values of the upper and lower mass bounds and the d2 cut + float cut_mass_low = m_funcMassCutLow ->Eval(jet_pt); + float cut_mass_high = m_funcMassCutHigh->Eval(jet_pt); + float cut_score = m_funcScoreCut ->Eval(jet_pt); + + /// Print cut criteria and jet values + ATH_MSG_VERBOSE( "Cut values : Mass window = [" << cut_mass_low << "," << cut_mass_high << "], score cut = " << cut_score ); + ATH_MSG_VERBOSE( "Jet values : Mass = " << jet_mass << ", score = " << jet_score ); + + /// Get SF weight + ATH_CHECK( getWeight( jet, jet_score > cut_score, acceptData ) ); + + /// Decorate cut information if needed + ATH_MSG_DEBUG( "Decorating with score" ); + + /// Create WriteDecorHandles + SG::WriteDecorHandle<xAOD::JetContainer, bool> decPassMass(m_decPassMassKey); + SG::WriteDecorHandle<xAOD::JetContainer, bool> decPassScore(m_decPassScoreKey); + SG::WriteDecorHandle<xAOD::JetContainer, bool> decTagged(m_decTaggedKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decCutMLow(m_decCutMLowKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decCutMHigh(m_decCutMHighKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decScoreCut(m_decScoreCutKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decScoreValue(m_decScoreValueKey); + + /// Decorate values + decCutMLow(jet) = cut_mass_low; + decCutMHigh(jet) = cut_mass_high; + decScoreCut(jet) = cut_score; + decScoreValue(jet) = jet_score; + + /// Cut summary + bool passCuts = true; + + /// Set the AcceptData depending on whether it is a W/Z or a top tagger + if ( m_tagClass == TAGCLASS::WBoson || m_tagClass == TAGCLASS::ZBoson ) { + ATH_MSG_VERBOSE( "Determining WZ tag return" ); + if ( jet_mass > cut_mass_low ) acceptData.setCutResult( "PassMassLow", true ); + if ( jet_mass < cut_mass_high ) acceptData.setCutResult( "PassMassHigh", true ); + if ( jet_score > cut_score ) acceptData.setCutResult( "PassScore", true ); + decPassMass(jet) = acceptData.getCutResult( "PassMassLow" ) && acceptData.getCutResult( "PassMassHigh" ); + passCuts = passCuts && acceptData.getCutResult( "PassMassLow" ) && acceptData.getCutResult( "PassMassHigh" ); + } + else if ( m_tagClass == TAGCLASS::TopQuark ) { + ATH_MSG_VERBOSE( "Determining TopQuark tag return" ); + if( jet_mass > cut_mass_low ) acceptData.setCutResult( "PassMassLow", true ); + if( jet_score > cut_score ) acceptData.setCutResult( "PassScore", true ); + decPassMass(jet) = acceptData.getCutResult( "PassMassLow" ); + passCuts = passCuts && acceptData.getCutResult( "PassMassLow" ); + } + + decPassScore(jet) = acceptData.getCutResult( "PassScore" ); + + passCuts = passCuts && acceptData.getCutResult( "PassScore" ); + + decTagged(jet) = passCuts; + + return StatusCode::SUCCESS; + +} + +double JSSWTopTaggerANN::getScore( const xAOD::Jet& jet ) const { + + /// Create input dictionary map<string,double> for argument to lwtnn + std::map<std::string, std::map<std::string,double>> ANN_inputs = getJetProperties(jet); + + /// Evaluate the network response + auto discriminant = m_lwnn->compute(ANN_inputs); + + /// Obtain the output associated with the single output node + double ANNscore = -666.; + + /// Check that input variables are valid + bool validVars = true; + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau21WTA(m_readTau21WTAKey); + if ( readTau21WTA(jet) < 0.0 ) validVars = false; + if ( m_tagClass == TAGCLASS::TopQuark ) { + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau32WTA(m_readTau32WTAKey); + if ( readTau32WTA(jet) < 0.0 ) validVars = false; + } + + if ( !validVars ) { + + if ( m_nWarnVar++ < m_nWarnMax ) ATH_MSG_WARNING( "One (or more) tagger input variable has an out-of-range value, setting score to -666" ); + else ATH_MSG_WARNING( "One (or more) tagger input variable has an out-of-range value, setting score to -666" ); + + return ANNscore; + + } + + ANNscore = discriminant.at(m_out_names.at(0)); + + return ANNscore; +} + +std::map<std::string, std::map<std::string, double>> JSSWTopTaggerANN::getJetProperties( const xAOD::Jet& jet ) const { + + /// Map to store inputs + std::map< std::string, std::map<std::string, double> > ANN_inputs; + std::map< std::string, double > ANN_inputValues; + + /// Calculate NSubjettiness and ECF ratios + calculateJSSRatios(jet); + + ATH_MSG_DEBUG( "Loading variables for common ANN tagger" ); + + /// Create common read decor handles + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau21WTA(m_readTau21WTAKey); + SG::ReadDecorHandle<xAOD::JetContainer, float> readC2(m_readC2Key); + SG::ReadDecorHandle<xAOD::JetContainer, float> readD2(m_readD2Key); + SG::ReadDecorHandle<xAOD::JetContainer, float> readSplit12(m_readSplit12Key); + + /// Mass and pT + /// It is assumed that these are the combined and calibrated mass and pT + ANN_inputValues["CaloTACombinedMassUncorrelated"] = jet.m(); + ANN_inputValues["JetpTCorrByCombinedMass"] = jet.pt(); + + /// Splitting scales + ANN_inputValues["Split12"] = readSplit12(jet); + + /// Energy Correlation Functions + ANN_inputValues["C2"] = readC2(jet); + ANN_inputValues["D2"] = readD2(jet); + + /// Tau21 WTA + ANN_inputValues["Tau21_wta"] = readTau21WTA(jet); + + if ( m_tagClass == TAGCLASS::WBoson ) { + + ATH_MSG_DEBUG( "Loading variables for W boson tagger" ); + + /// Other moments + ANN_inputValues["FoxWolfram20"] = jet.getAttribute<float>("FoxWolfram2") / jet.getAttribute<float>("FoxWolfram0"); + ANN_inputValues["PlanarFlow"] = jet.getAttribute<float>("PlanarFlow"); + ANN_inputValues["Angularity"] = jet.getAttribute<float>("Angularity"); + ANN_inputValues["Aplanarity"] = jet.getAttribute<float>("Aplanarity"); + ANN_inputValues["ZCut12"] = jet.getAttribute<float>("ZCut12"); + ANN_inputValues["KtDR"] = jet.getAttribute<float>("KtDR"); + + } + + else if ( m_tagClass == TAGCLASS::TopQuark ) { + + ATH_MSG_DEBUG( "Loading variables for top quark tagger" ); + + /// Create top quark read decor handles + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau1WTA(m_readTau1WTAKey); + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau2WTA(m_readTau2WTAKey); + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau3WTA(m_readTau3WTAKey); + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau32WTA(m_readTau32WTAKey); + SG::ReadDecorHandle<xAOD::JetContainer, float> readSplit23(m_readSplit23Key); + SG::ReadDecorHandle<xAOD::JetContainer, float> readQw(m_readQwKey); + SG::ReadDecorHandle<xAOD::JetContainer, float> readE3(m_readE3Key); + + /// Additional splitting Scales + ANN_inputValues["Split23"] = readSplit23(jet); + + /// e3 := normalized ECF3/ECF1**3 + ANN_inputValues["e3"] = readE3(jet); + + /// N-subjettiness + ANN_inputValues["Tau1_wta"] = readTau1WTA(jet); + ANN_inputValues["Tau2_wta"] = readTau2WTA(jet); + ANN_inputValues["Tau3_wta"] = readTau3WTA(jet); + + ANN_inputValues["Tau32_wta"] = readTau32WTA(jet); + + /// Qw observable for top tagging + ANN_inputValues["Qw"] = readQw(jet); + + } + + else { + ATH_MSG_ERROR( "Loading variables failed because the tagger type is not supported" ); + } + + ANN_inputs["node_0"] = ANN_inputValues; + + return ANN_inputs; + +} + diff --git a/Reconstruction/Jet/BoostedJetTaggers/Root/JSSWTopTaggerDNN.cxx b/Reconstruction/Jet/BoostedJetTaggers/Root/JSSWTopTaggerDNN.cxx new file mode 100644 index 0000000000000000000000000000000000000000..af30c48b8b05119bdfe3261db47432ec61d94edc --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/Root/JSSWTopTaggerDNN.cxx @@ -0,0 +1,399 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#include "BoostedJetTaggers/JSSWTopTaggerDNN.h" + +#include <fstream> + +JSSWTopTaggerDNN::JSSWTopTaggerDNN( const std::string& name ) : + JSSTaggerBase( name ), + m_lwnn(nullptr) +{ + +} + +/// Initialize the tagger +StatusCode JSSWTopTaggerDNN::initialize() { + + ATH_MSG_INFO( "Initializing JSSWTopTaggerDNN tool" ); + + /// Pt values are defined in GeV + m_ptGeV = true; + + /// Use mass cut + m_useMassCut = true; + + /// Use discriminant score cut + m_useScoreCut = true; + + if ( ! m_configFile.empty() ) { + + /// Get configReader + ATH_CHECK( getConfigReader() ); + + /// Get tagger type + m_tagType = m_configReader.GetValue("TaggerType" ,""); + + /// Get the CVMFS calib area where stuff is stored + /// If this is set to "Local" then it will look for the config file in the share space + m_kerasCalibArea = m_configReader.GetValue("CalibAreaKeras" ,""); + + /// Get the name/path of the JSON config + m_kerasConfigFileName = m_configReader.GetValue("KerasConfigFile" ,""); + + /// Get the name of the Keras output node + m_kerasConfigOutputName = m_configReader.GetValue("KerasOutput" ,""); + + /// Get the configured cut values + m_strMassCutLow = m_configReader.GetValue("MassCutLow" ,""); + m_strMassCutHigh = m_configReader.GetValue("MassCutHigh" ,""); + m_strScoreCut = m_configReader.GetValue("ScoreCut" ,""); + + /// Get min and max jet pt + m_jetPtMin = m_configReader.GetValue("pTCutLow", 350.0); + m_jetPtMax = m_configReader.GetValue("pTCutHigh", 4000.0); + + /// Get the decoration name + m_decorationName = m_configReader.GetValue("DecorationName" ,""); + + /// Get the scale factor configuration + m_calcSF = m_configReader.GetValue("CalcSF", false); + if ( m_calcSF ) { + m_weightDecorationName = m_configReader.GetValue("WeightDecorationName", ""); + m_weightFileName = m_configReader.GetValue("WeightFile", ""); + m_weightHistogramName = m_configReader.GetValue("WeightHistogramName", ""); + m_efficiencyHistogramName = m_configReader.GetValue("EfficiencyHistogramName", ""); + m_weightFlavors = m_configReader.GetValue("WeightFlavors", ""); + + /// Get truth label name information + m_truthLabelName = m_configReader.GetValue("TruthLabelName" , "R10TruthLabel_R21Consolidated"); + } + + /// print out the configuration parameters for viewing + ATH_MSG_INFO( "Configurations Loaded :"); + ATH_MSG_INFO( "tagType : " << m_tagType ); + ATH_MSG_INFO( "calibarea_keras : " << m_kerasCalibArea ); + ATH_MSG_INFO( "kerasConfigFileName : " << m_kerasConfigFileName ); + ATH_MSG_INFO( "kerasConfigOutputName : " << m_kerasConfigOutputName ); + ATH_MSG_INFO( "strMassCutLow : " << m_strMassCutLow ); + ATH_MSG_INFO( "strMassCutHigh : " << m_strMassCutHigh ); + ATH_MSG_INFO( "pTCutLow : " << m_jetPtMin ); + ATH_MSG_INFO( "pTCutHigh : " << m_jetPtMax ); + ATH_MSG_INFO( "strScoreCut : " << m_strScoreCut ); + ATH_MSG_INFO( "decorationName : " << m_decorationName ); + if ( m_calcSF ) { + ATH_MSG_INFO( "weightDecorationName : " << m_weightDecorationName ); + ATH_MSG_INFO( "weightFile : " << m_weightFileName ); + ATH_MSG_INFO( "weightHistogramName : " << m_weightHistogramName ); + ATH_MSG_INFO( "efficiencyHistogramName : "<<m_efficiencyHistogramName ); + ATH_MSG_INFO( "weightFlavors : " << m_weightFlavors ); + ATH_MSG_INFO( "TruthLabelName : " << m_truthLabelName ); + } + } + else { /// No config file + /// Assume the cut functions have been set through properties. + /// Check they are non empty + if ( (m_kerasConfigFileName.empty() || + m_kerasConfigOutputName.empty() || + m_strScoreCut.empty() || + m_strMassCutLow.empty() || + m_strMassCutHigh.empty() || + m_decorationName.empty() || + m_weightFileName.empty()) || + ((m_weightDecorationName.empty() || + m_weightHistogramName.empty() || + m_weightFlavors.empty()) && m_calcSF) + ) + { + ATH_MSG_ERROR( "No config file provided OR you haven't manually specified all needed parameters" ) ; + ATH_MSG_ERROR( "Please read the TWiki for this tool" ); + return StatusCode::FAILURE; + } + + } + + ATH_MSG_INFO( "Mass cut low : " << m_strMassCutLow ); + ATH_MSG_INFO( "Mass cut High : " << m_strMassCutHigh ); + ATH_MSG_INFO( "Score cut low : " << m_strScoreCut ); + + /// If the calibarea is specified to be "Local" then it looks in the same place as the top level configs + if ( m_kerasCalibArea.empty() ) { + ATH_MSG_ERROR( "You need to specify where the calibarea is as either being Local or on CVMFS" ); + return StatusCode::FAILURE; + } + else if ( !m_kerasCalibArea.compare("Local") ){ + std::string localCalibArea = "BoostedJetTaggers/JSSWTopTaggerDNN/"; + ATH_MSG_INFO( "Using Local calibarea " << localCalibArea ); + /// Convert the JSON config file name to the full path + m_kerasConfigFilePath = PathResolverFindCalibFile(localCalibArea+m_kerasConfigFileName); + if ( m_calcSF ) + m_weightConfigPath = PathResolverFindCalibFile(localCalibArea+m_weightFileName); + } + else { + ATH_MSG_INFO( "Using CVMFS calibarea" ); + /// Get the config file from CVMFS + /// Necessary because xml files are too large to house on the data space + m_kerasConfigFilePath = PathResolverFindCalibFile( (m_kerasCalibArea+m_kerasConfigFileName).c_str() ); + if ( m_calcSF ) + m_weightConfigPath = PathResolverFindCalibFile( (m_kerasCalibArea+m_weightFileName).c_str()); + } + + /// Read json file for DNN weights + ATH_MSG_INFO( "DNN Tagger configured with: " << m_kerasConfigFilePath ); + + std::ifstream input_cfg( m_kerasConfigFilePath.c_str() ); + + if ( !input_cfg.is_open() ) { + ATH_MSG_ERROR( "Error openning config file: " << m_kerasConfigFilePath ); + ATH_MSG_ERROR( "Are you sure that the file exists at this path?" ); + return StatusCode::FAILURE; + } + + lwt::JSONConfig cfg = lwt::parse_json( input_cfg ); + + ATH_MSG_INFO( "Keras Network NLayers: " << cfg.layers.size() ); + + m_lwnn = std::make_unique<lwt::LightweightNeuralNetwork>(cfg.inputs, cfg.layers, cfg.outputs); + + /// Set internal tagger type + if ( !m_tagType.compare("TopQuark") ) { + ATH_MSG_DEBUG( "This is a top quark tagger" ); + m_tagClass = TAGCLASS::TopQuark; + } + else if ( !m_tagType.compare("WBoson") ) { + ATH_MSG_DEBUG( "This is a W boson tagger" ); + m_tagClass = TAGCLASS::WBoson; + } + else if ( !m_tagType.compare("ZBoson") ) { + ATH_MSG_DEBUG( "This is a Z boson tagger" ); + m_tagClass = TAGCLASS::ZBoson; + } + else { + ATH_MSG_ERROR( "I can't tell what kind of tagger your configuration is for." ); + return StatusCode::FAILURE; + } + + /// Set the possible states that the tagger can be left in after the JSSTaggerBase::tag() function is called + m_acceptInfo.addCut( "PassMassLow" , "mJet > mCutLow" ); + m_acceptInfo.addCut( "PassScore" , "ScoreJet > ScoreCut" ); + if ( m_tagClass == TAGCLASS::WBoson || m_tagClass == TAGCLASS::ZBoson ) { + m_acceptInfo.addCut( "PassMassHigh", "mJet < mCutHigh" ); + } + + /// Loop over and print out the cuts that have been configured + printCuts(); + + /// Call base class initialize + ATH_CHECK( JSSTaggerBase::initialize() ); + + ATH_MSG_INFO( "DNN Tagger tool initialized" ); + + return StatusCode::SUCCESS; + +} + +StatusCode JSSWTopTaggerDNN::tag( const xAOD::Jet& jet ) const { + + ATH_MSG_DEBUG( "Obtaining DNN result" ); + + /// Create asg::AcceptData object + asg::AcceptData acceptData( &m_acceptInfo ); + + /// Reset the AcceptData cut results + ATH_CHECK( resetCuts( acceptData ) ); + + /// Check basic kinematic selection + ATH_CHECK( checkKinRange( jet, acceptData ) ); + + /// Get the relevant attributes of the jet + /// Mass and pt - note that this will depend on the configuration of the calibration used + float jet_pt = jet.pt()/1000.0; + float jet_mass = jet.m()/1000.0; + + /// Get DNN score for the jet + float jet_score = getScore(jet); + + /// Evaluate the values of the upper and lower mass bounds and the d2 cut + float cut_mass_low = m_funcMassCutLow ->Eval(jet_pt); + float cut_mass_high = m_funcMassCutHigh->Eval(jet_pt); + float cut_score = m_funcScoreCut ->Eval(jet_pt); + + /// Print cut criteria and jet values + ATH_MSG_VERBOSE( "Cut values : Mass window = [" << cut_mass_low << "," << cut_mass_high << "], score cut = " << cut_score ); + ATH_MSG_VERBOSE( "Jet values : Mass = " << jet_mass << ", score = " << jet_score ); + + /// Get SF weight + ATH_CHECK( getWeight( jet, jet_score > cut_score, acceptData ) ); + + /// Decorate cut information if needed + ATH_MSG_DEBUG( "Decorating with score" ); + + /// Create WriteDecorHandles + SG::WriteDecorHandle<xAOD::JetContainer, bool> decPassMass(m_decPassMassKey); + SG::WriteDecorHandle<xAOD::JetContainer, bool> decPassScore(m_decPassScoreKey); + SG::WriteDecorHandle<xAOD::JetContainer, bool> decTagged(m_decTaggedKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decCutMLow(m_decCutMLowKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decCutMHigh(m_decCutMHighKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decScoreCut(m_decScoreCutKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decScoreValue(m_decScoreValueKey); + + /// Decorate values + decCutMLow(jet) = cut_mass_low; + decCutMHigh(jet) = cut_mass_high; + decScoreCut(jet) = cut_score; + decScoreValue(jet) = jet_score; + + /// Cut summary + bool passCuts = true; + + /// Set the AcceptData depending on whether it is a W/Z or a top tagger + if ( m_tagClass == TAGCLASS::WBoson || m_tagClass == TAGCLASS::ZBoson ) { + ATH_MSG_VERBOSE( "Determining WZ tag return" ); + if ( jet_mass > cut_mass_low ) acceptData.setCutResult( "PassMassLow", true ); + if ( jet_mass < cut_mass_high ) acceptData.setCutResult( "PassMassHigh", true ); + if ( jet_score > cut_score ) acceptData.setCutResult( "PassScore", true ); + decPassMass(jet) = acceptData.getCutResult( "PassMassLow" ) && acceptData.getCutResult( "PassMassHigh" ); + passCuts = passCuts && acceptData.getCutResult( "PassMassLow" ) && acceptData.getCutResult( "PassMassHigh" ); + } + else if ( m_tagClass == TAGCLASS::TopQuark ) { + ATH_MSG_VERBOSE( "Determining TopQuark tag return" ); + if ( jet_mass > cut_mass_low ) acceptData.setCutResult( "PassMassLow", true ); + if ( jet_score > cut_score ) acceptData.setCutResult( "PassScore", true ); + decPassMass(jet) = acceptData.getCutResult( "PassMassLow" ); + passCuts = passCuts && acceptData.getCutResult( "PassMassLow" ); + } + + decPassScore(jet) = acceptData.getCutResult( "PassScore" ); + + passCuts = passCuts && acceptData.getCutResult( "PassScore" ); + + decTagged(jet) = passCuts; + + return StatusCode::SUCCESS; + +} + +double JSSWTopTaggerDNN::getScore( const xAOD::Jet& jet ) const { + + /// Create input dictionary map<string,double> for argument to lwtnn + std::map<std::string,double> DNN_inputValues = getJetProperties(jet); + + /// Evaluate the network response + lwt::ValueMap discriminant = m_lwnn->compute(DNN_inputValues); + + /// Obtain the output associated with the single output node + double DNNscore = -666.; + + /// Check that input variables are valid + bool validVars = true; + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau21WTA(m_readTau21WTAKey); + if ( readTau21WTA(jet) < 0.0 ) validVars = false; + if ( m_tagClass == TAGCLASS::TopQuark ) { + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau32WTA(m_readTau32WTAKey); + if ( readTau32WTA(jet) < 0.0 ) validVars = false; + } + + if ( !validVars ) { + + if ( m_nWarnVar++ < m_nWarnMax ) ATH_MSG_WARNING( "One (or more) tagger input variable has an out-of-range value, setting score to -666" ); + else ATH_MSG_DEBUG( "One (or more) tagger input variable has an out-of-range value, setting score to -666" ); + + return DNNscore; + + } + + DNNscore = discriminant[m_kerasConfigOutputName]; + + return DNNscore; + +} + +std::map<std::string,double> JSSWTopTaggerDNN::getJetProperties( const xAOD::Jet& jet ) const { + + /// Map to store inputs + std::map<std::string,double> DNN_inputValues; + + /// Calculate NSubjettiness and ECF ratios + calculateJSSRatios(jet); + + ATH_MSG_DEBUG( "Loading variables for common DNN tagger" ); + + /// Create common read decor handles + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau21WTA(m_readTau21WTAKey); + SG::ReadDecorHandle<xAOD::JetContainer, float> readC2(m_readC2Key); + SG::ReadDecorHandle<xAOD::JetContainer, float> readD2(m_readD2Key); + SG::ReadDecorHandle<xAOD::JetContainer, float> readSplit12(m_readSplit12Key); + + /// Mass and pT + /// It is assumed that these are the combined and calibrated mass and pT + DNN_inputValues["CaloTACombinedMassUncorrelated"] = jet.m(); + DNN_inputValues["JetpTCorrByCombinedMass"] = jet.pt(); + + /// Splitting scales + DNN_inputValues["Split12"] = readSplit12(jet); + + /// Energy Correlation Functions + DNN_inputValues["C2"] = readC2(jet); + DNN_inputValues["D2"] = readD2(jet); + + /// Tau21 WTA + DNN_inputValues["Tau21_wta"] = readTau21WTA(jet); + + if ( m_tagClass == TAGCLASS::WBoson ) { + + ATH_MSG_DEBUG( "Loading variables for W boson tagger" ); + + /// Other moments + DNN_inputValues["FoxWolfram20"] = jet.getAttribute<float>("FoxWolfram2") / jet.getAttribute<float>("FoxWolfram0"); + DNN_inputValues["PlanarFlow"] = jet.getAttribute<float>("PlanarFlow"); + DNN_inputValues["Angularity"] = jet.getAttribute<float>("Angularity"); + DNN_inputValues["Aplanarity"] = jet.getAttribute<float>("Aplanarity"); + DNN_inputValues["ZCut12"] = jet.getAttribute<float>("ZCut12"); + DNN_inputValues["KtDR"] = jet.getAttribute<float>("KtDR"); + + } + + else if ( m_tagClass == TAGCLASS::TopQuark ) { + + ATH_MSG_DEBUG("Loading variables for top quark tagger"); + + /// Create top quark read decor handles + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau1WTA(m_readTau1WTAKey); + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau2WTA(m_readTau2WTAKey); + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau3WTA(m_readTau3WTAKey); + SG::ReadDecorHandle<xAOD::JetContainer, float> readTau32WTA(m_readTau32WTAKey); + SG::ReadDecorHandle<xAOD::JetContainer, float> readSplit23(m_readSplit23Key); + SG::ReadDecorHandle<xAOD::JetContainer, float> readQw(m_readQwKey); + SG::ReadDecorHandle<xAOD::JetContainer, float> readE3(m_readE3Key); + + /// Mass and pT again + DNN_inputValues["m"] = jet.m(); + DNN_inputValues["pt"] = jet.pt(); + + /// Additional splitting scales + DNN_inputValues["Split23"] = readSplit23(jet); + + /// e3 := normalized ECF3/ECF1**3 + DNN_inputValues["e3"] = readE3(jet); + + /// N-subjettiness + DNN_inputValues["Tau1_wta"] = readTau1WTA(jet); + DNN_inputValues["Tau2_wta"] = readTau2WTA(jet); + DNN_inputValues["Tau3_wta"] = readTau3WTA(jet); + + DNN_inputValues["Tau32_wta"] = readTau32WTA(jet); + + /// Qw observable for top tagging + DNN_inputValues["Qw"] = readQw(jet); + + } + + else { + ATH_MSG_ERROR( "Loading variables failed because the tagger type is not supported" ); + } + + return DNN_inputValues; + +} + diff --git a/Reconstruction/Jet/BoostedJetTaggers/Root/JetQGTagger.cxx b/Reconstruction/Jet/BoostedJetTaggers/Root/JetQGTagger.cxx new file mode 100644 index 0000000000000000000000000000000000000000..df139d452d567e163986448dd0da886913c39362 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/Root/JetQGTagger.cxx @@ -0,0 +1,734 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#include "BoostedJetTaggers/JetQGTagger.h" + +#include <TRandom3.h> +#include <TSystem.h> + +#include "InDetTrackSelectionTool/InDetTrackSelectionTool.h" +#include "InDetTrackSystematicsTools/InDetTrackTruthFilterTool.h" +#include "InDetTrackSystematicsTools/InDetTrackTruthOriginTool.h" +#include "InDetTrackSystematicsTools/JetTrackFilterTool.h" + +#include "xAODTracking/VertexContainer.h" + +namespace CP { + + JetQGTagger::JetQGTagger( const std::string& name): JSSTaggerBase( name ), + m_appliedSystEnum(QG_NONE), + m_hquark(nullptr), + m_hgluon(nullptr), + m_topo_hquark(nullptr), + m_exp_hquark_up(nullptr), + m_exp_hquark_down(nullptr), + m_exp_hgluon_up(nullptr), + m_exp_hgluon_down(nullptr), + m_me_hquark_up(nullptr), + m_me_hquark_down(nullptr), + m_me_hgluon_up(nullptr), + m_me_hgluon_down(nullptr), + m_pdf_hquark_up(nullptr), + m_pdf_hquark_down(nullptr), + m_pdf_hgluon_up(nullptr), + m_pdf_hgluon_down(nullptr), + m_trackeff_hquark(nullptr), + m_trackeff_hgluon(nullptr), + m_fake_hquark(nullptr), + m_fake_hgluon(nullptr), + m_trkSelectionTool(name+"_trackselectiontool", this), + m_trkTruthFilterTool(name+"_trackfiltertool",this), + m_trkFakeTool(name+"_trackfaketool",this), + m_jetTrackFilterTool(name+"_jettrackfiltertool",this), + m_originTool(name+"_origintool",this) + { + + declareProperty( "NTrackCut", m_NTrackCut=-1); + declareProperty( "cuttype", m_cuttype="log_pt"); + declareProperty( "slope", m_slope=9.779); + declareProperty( "intercept", m_intercept=-32.28); + declareProperty( "UseJetVars", m_mode = 0); // 0 uses the tracks. 1 uses variables from the jets + + declareProperty( "Tagger", m_taggername = "ntrack"); + m_calibArea = "BoostedJetTaggers/QGTagger/May2019/"; // Overwrite base class default + declareProperty( "TopoWeightFile", m_topofile = ""); + declareProperty( "ExpWeightFile", m_expfile = "qgsyst_exp.root"); + declareProperty( "MEWeightFile", m_mefile = "qgsyst_me.root"); + declareProperty( "PDFWeightFile", m_pdffile = "qgsyst_pdf.root"); + declareProperty( "TrackEffFile", m_trackefffile = "track_systs.root");//REPLACE when file available + declareProperty( "FakeFile", m_fakefile = "track_systs.root");//REPLACE when file available + declareProperty( "MinPt", m_jetPtMin = 50e3); + declareProperty( "MaxEta", m_jetEtaMax = 2.1); + declareProperty( "WeightDecorationName", m_weight_decoration_name = "qgTaggerWeight"); + declareProperty( "TaggerDecorationName", m_tagger_decoration_name = "qgTagger"); + + + applySystematicVariation(SystematicSet()).ignore(); + + } + + StatusCode JetQGTagger::initialize() { + + ATH_MSG_INFO( "Initializing QuarkGluonTagger tool" ); + + if( ! m_configFile.empty() ) { + ATH_MSG_INFO( "Using config file : "<< m_configFile ); + // check for the existence of the configuration file + std::string configPath; + configPath = PathResolverFindDataFile(("BoostedJetTaggers/"+m_configFile).c_str()); + FileStat_t fStats; + int fSuccess = gSystem->GetPathInfo(configPath.c_str(), fStats); + + if ( fSuccess ){ + ATH_MSG_ERROR( "Recommendations file " << m_configFile << " could not be found"); + return StatusCode::FAILURE; + } + else { + ATH_MSG_DEBUG( "Recommendations file was found : " << configPath ); + } + + TEnv configReader; + if(configReader.ReadFile( configPath.c_str(), EEnvLevel(0) ) != 0 ) { + ATH_MSG_ERROR( "Error while reading config file : "<< configPath ); + return StatusCode::FAILURE; + } + + // read in the specified track cut in the config file + m_NTrackCut=configReader.GetValue("NTrackCut" ,-1); + + ATH_MSG_VERBOSE( "NTrackCut by config file : "<<m_NTrackCut ); + + } + else { + // no config file + // Assume the cut functions have been set through properties. + // check they are non empty + if( m_NTrackCut!=-1){ + ATH_MSG_VERBOSE( "NTrackCut by manual setting of property : "<<m_NTrackCut ); + } + else { + ATH_MSG_WARNING( "No config file provided AND no NTrackCut specified." ) ; + } + } + if(m_cuttype != "linear_pt" && m_cuttype != "threshold" && m_cuttype != "log_pt"){ + ATH_MSG_ERROR("Cuttype set to: " << m_cuttype ); + ATH_MSG_ERROR("Cuttype invalid. Must use 'linear_pt', 'log_pt', or 'threshold'"); + return StatusCode::FAILURE; + } + + // decorators used to store + // 1) ntracks + // 2) tagger weight + ATH_MSG_INFO( "Decorators that will be attached to jet :" ); + ATH_MSG_INFO( " " << m_tagger_decoration_name << " : Number of tracks for tagging decision" ); + ATH_MSG_INFO( " " << m_weight_decoration_name << " : Scale factor weight given the number of tracks" ); + + m_decTagKey = m_containerName + "." + m_tagger_decoration_name; + m_decWeightKey = m_containerName + "." + m_weight_decoration_name; + + ATH_CHECK( m_decTagKey.initialize() ); + ATH_CHECK( m_decWeightKey.initialize() ); + + /// ReadDecorHandles for Ntrk variables + m_readNumTrkPt500PVKey = m_containerName + "." + m_readNumTrkPt500PVKey.key(); + m_readNtrkKey = m_containerName + "." + m_readNtrkKey.key(); + + ATH_CHECK( m_readNumTrkPt500PVKey.initialize() ); + ATH_CHECK( m_readNtrkKey.initialize() ); + + // set up InDet selection tool + ANA_CHECK( ASG_MAKE_ANA_TOOL( m_trkSelectionTool, InDet::InDetTrackSelectionTool ) ); + ANA_CHECK( m_trkSelectionTool.setProperty( "CutLevel", "Loose" ) ); + ANA_CHECK( m_trkSelectionTool.retrieve() ); + + // set up InDet truth track selection tools + ANA_CHECK( ASG_MAKE_ANA_TOOL( m_trkTruthFilterTool, InDet::InDetTrackTruthFilterTool ) ); + ANA_CHECK( ASG_MAKE_ANA_TOOL( m_trkFakeTool, InDet::InDetTrackTruthFilterTool ) ); + + ANA_CHECK( ASG_MAKE_ANA_TOOL( m_originTool, InDet::InDetTrackTruthOriginTool ) ); + ANA_CHECK( m_originTool.retrieve() ); + + ANA_CHECK( m_trkTruthFilterTool.setProperty( "Seed", 1234 ) ); + ANA_CHECK( m_trkTruthFilterTool.setProperty( "trackOriginTool", m_originTool ) ); + ANA_CHECK( m_trkTruthFilterTool.retrieve() ); + CP::SystematicSet systSetTrk = { + InDet::TrackSystematicMap[InDet::TRK_EFF_LOOSE_GLOBAL], + InDet::TrackSystematicMap[InDet::TRK_EFF_LOOSE_IBL], + InDet::TrackSystematicMap[InDet::TRK_EFF_LOOSE_PP0], + InDet::TrackSystematicMap[InDet::TRK_EFF_LOOSE_PHYSMODEL] + }; + ANA_CHECK( m_trkTruthFilterTool->applySystematicVariation(systSetTrk) ); + + // set up tools used for systematic variations of tracks + ANA_CHECK( m_trkFakeTool.setProperty( "Seed", 1234 ) ); + ANA_CHECK( m_trkFakeTool.setProperty( "trackOriginTool", m_originTool ) ); + ANA_CHECK( m_trkFakeTool.retrieve() ); + CP::SystematicSet systSetTrkFake = { + InDet::TrackSystematicMap[InDet::TRK_FAKE_RATE_LOOSE] + }; + ANA_CHECK( m_trkFakeTool->applySystematicVariation(systSetTrkFake) ); + + ANA_CHECK( ASG_MAKE_ANA_TOOL( m_jetTrackFilterTool, InDet::JetTrackFilterTool ) ); + ANA_CHECK( m_jetTrackFilterTool.setProperty( "Seed", 1234 ) ); + ANA_CHECK( m_jetTrackFilterTool.setProperty( "trackOriginTool", m_originTool ) ); + ANA_CHECK( m_jetTrackFilterTool.retrieve() ); + CP::SystematicSet systSetJet = { + InDet::TrackSystematicMap[InDet::TRK_EFF_LOOSE_TIDE] + }; + ANA_CHECK( m_jetTrackFilterTool->applySystematicVariation(systSetJet) ); + + // specify systematic variations relevant for this tool + if (!addAffectingSystematic(QGntrackSyst::trackfakes,true) || + !addAffectingSystematic(QGntrackSyst::trackefficiency,true) || + !addAffectingSystematic(QGntrackSyst::nchargedtopo,false /*for topology differences */) || + !addAffectingSystematic(QGntrackSyst::nchargedexp_up,true) || + !addAffectingSystematic(QGntrackSyst::nchargedme_up,true) || + !addAffectingSystematic(QGntrackSyst::nchargedpdf_up,true) || + !addAffectingSystematic(QGntrackSyst::nchargedexp_down,true) || + !addAffectingSystematic(QGntrackSyst::nchargedme_down,true) || + !addAffectingSystematic(QGntrackSyst::nchargedpdf_down,true) || + !addAffectingSystematic(QGntrackSyst::trackeff,true)|| + !addAffectingSystematic(QGntrackSyst::fake,true) + ) + { + ATH_MSG_ERROR("failed to set up JetQGTagger systematics"); + return StatusCode::FAILURE; + } + + // load in the histograms that store the ntrack systematics + if(m_topofile!="")//load topology file only if explicitly configured (default is "") + ANA_CHECK( this->loadHist(m_topo_hquark, m_topofile,"h2dquark") ); + ANA_CHECK( this->loadHist(m_exp_hquark_up, m_expfile,"h2dquark_up") ); + ANA_CHECK( this->loadHist(m_exp_hquark_down,m_expfile,"h2dquark_down")); + ANA_CHECK( this->loadHist(m_exp_hgluon_up, m_expfile,"h2dgluon_up") ); + ANA_CHECK( this->loadHist(m_exp_hgluon_down,m_expfile,"h2dgluon_down")); + ANA_CHECK( this->loadHist(m_me_hquark_up, m_mefile, "h2dquark_up") ); + ANA_CHECK( this->loadHist(m_me_hquark_down, m_mefile, "h2dquark_down")); + ANA_CHECK( this->loadHist(m_me_hgluon_up, m_mefile, "h2dgluon_up") ); + ANA_CHECK( this->loadHist(m_me_hgluon_down, m_mefile, "h2dgluon_down")); + ANA_CHECK( this->loadHist(m_pdf_hquark_up, m_pdffile,"h2dquark_up") ); + ANA_CHECK( this->loadHist(m_pdf_hquark_down,m_pdffile,"h2dquark_down")); + ANA_CHECK( this->loadHist(m_pdf_hgluon_up, m_pdffile,"h2dgluon_up") ); + ANA_CHECK( this->loadHist(m_pdf_hgluon_down,m_pdffile,"h2dgluon_down")); + ATH_MSG_INFO("about to load track syst histos"); + ATH_MSG_INFO("trackeff file: " << m_trackefffile); + ANA_CHECK( this->loadHist(m_trackeff_hquark,m_trackefffile,"track_syste_quark"));//REPLACE w/ right histo + ANA_CHECK( this->loadHist(m_trackeff_hgluon,m_trackefffile,"track_syste_gluon"));//REPLACE w/ right histo + ANA_CHECK( this->loadHist(m_fake_hquark,m_fakefile,"track_systf_quark"));//REPLACE w/ right histo + ANA_CHECK( this->loadHist(m_fake_hgluon,m_fakefile,"track_systf_gluon"));//REPLACE w/ right histo + + ATH_MSG_INFO( ": JetQGTagger tool initialized" ); + ATH_MSG_INFO( " NTrackCut : "<< m_NTrackCut ); + + /// Initialize the tagger states + m_acceptInfo.addCut( "QuarkJetTag", "True if the jet is deemed a quark jet because NTrack<NCut, False if jet deemed gluon jet because NTrack<NCut" ); + + /// Call base class initialize + ATH_CHECK( JSSTaggerBase::initialize() ); + + /// Loop over and print out the cuts that have been configured + printCuts(); + + return StatusCode::SUCCESS; + + } + + JetQGTagger::~JetQGTagger(){ + + delete m_topo_hquark; + delete m_exp_hquark_up; + delete m_exp_hquark_down; + delete m_exp_hgluon_up; + delete m_exp_hgluon_down; + delete m_me_hquark_up; + delete m_me_hquark_down; + delete m_me_hgluon_up; + delete m_me_hgluon_down; + delete m_pdf_hquark_up; + delete m_pdf_hquark_down; + delete m_pdf_hgluon_up; + delete m_pdf_hgluon_down; + delete m_trackeff_hquark; + delete m_trackeff_hgluon; + delete m_fake_hquark; + delete m_fake_hgluon; + + } + + StatusCode JetQGTagger::tag( const xAOD::Jet& jet, const xAOD::Vertex * pv ) const { + + ATH_MSG_DEBUG( "Obtaining QG result" ); + + double jetWeight = -1; + int jetNTrack = -1; + + /// Create asg::AcceptData object + asg::AcceptData acceptData( &m_acceptInfo ); + + /// Reset the AcceptData cut results + ATH_CHECK( resetCuts( acceptData ) ); + + /// Check basic kinematic selection + ATH_CHECK( checkKinRange( jet, acceptData ) ); + + /// Create WriteDecorHandles + SG::WriteDecorHandle<xAOD::JetContainer, bool> decTagged(m_decTaggedKey); + SG::WriteDecorHandle<xAOD::JetContainer, bool> decValidEventContent(m_decValidEventContentKey); + + /// If the jet isn't valid there's no point applying the remaining cuts + /// TODO: Is this actually needed? + if ( !passKinRange(jet) ) { + decTagged(jet) = false; + return StatusCode::SUCCESS; + } + + if ( m_mode == 0 ) { //do tagging assuming relevant track particle, PV, etc containers exist + bool isValid = true; + if ( pv ) ATH_MSG_DEBUG( "Obtaining JetQGTagger decision with user specific primary vertex" ); + else ATH_MSG_DEBUG( "Obtaining JetQGTagger decision default" ); + + // if no primary vertex is specified, then the 0th primary vertex is used + if ( !pv ) { + const xAOD::VertexContainer* vxCont = nullptr; + if ( evtStore()->retrieve( vxCont, "PrimaryVertices" ).isFailure() ) { + ATH_MSG_WARNING("Unable to retrieve primary vertex container PrimaryVertices"); + acceptData.setCutResult("ValidEventContent", false); + isValid = false; + } + else if ( vxCont->empty() ) { + ATH_MSG_WARNING("Event has no primary vertices!"); + acceptData.setCutResult("ValidEventContent", false); + isValid = false; + } + else { + for ( const auto& vx : *vxCont ) { + // take the first vertex in the list that is a primary vertex + if ( vx->vertexType()==xAOD::VxType::PriVtx ) { + pv = vx; + break; + } + } + } + // Now we have to make sure that we did ID one as PV + // I think this can happen in physics events (though they've got to be removed in order to perform a lot of calibrations) + // so I've elected to not spit out a warning message here + if ( !pv ) { + acceptData.setCutResult("ValidEventContent", false); + isValid = false; + } + } + + // If the object isn't valid there's no point applying the remaining cuts + if ( !isValid ) return StatusCode::SUCCESS; + + // obtain the relevant information for tagging + // 1) the number of tracks + // 2) jet-by-jet event weight + ATH_CHECK( getNTrack(&jet, /*pv,*/ jetNTrack) ); + ATH_CHECK( getNTrackWeight(&jet, jetWeight) ); + + } + + if ( m_mode == 1 ) { //only calculating uncertainty using given jet info (nTrk already calculated, etc) + ATH_CHECK( simplegetNTrackWeight(&jet, jetWeight) ); + SG::ReadDecorHandle<xAOD::JetContainer, int> readNumTrkPt500PV(m_readNumTrkPt500PVKey); + SG::ReadDecorHandle<xAOD::JetContainer, int> readNtrk(m_readNtrkKey); + if ( readNtrk.isAvailable() ) jetNTrack = readNtrk(jet); + else if ( readNumTrkPt500PV.isAvailable() ) jetNTrack = readNumTrkPt500PV(jet); + else { + ATH_MSG_ERROR("Neither NumTrkPt500PV nor DFCommonJets_QGTagger_NTracks is available for your jet. Please add it before running in mode 1 of the JetQGTagger."); + return StatusCode::FAILURE; + } + + // decorate the cut value if specified + SG::WriteDecorHandle<xAOD::JetContainer, float> decWeight(m_decWeightKey); + decWeight(jet) = jetWeight; + } + + decValidEventContent(jet) = acceptData.getCutResult( "ValidEventContent" ); + + // decorate the cut value if specified + SG::WriteDecorHandle<xAOD::JetContainer, float> decTag(m_decTagKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decWeight(m_decWeightKey); + + decTag(jet) = jetNTrack; + decWeight(jet) = jetWeight; + + // fill the AcceptData + ATH_MSG_DEBUG("NTrack = "<<jetNTrack); + ATH_MSG_DEBUG("NTrackWeight = "<<jetWeight); + double variable_nTrk = -999.0; + if (m_cuttype=="linear_pt"){ + variable_nTrk=(m_slope*jet.pt())+m_intercept; + if(jetNTrack<variable_nTrk) acceptData.setCutResult("QuarkJetTag", true); + } + else if (m_cuttype=="log_pt"){ + variable_nTrk=(m_slope*TMath::Log10(jet.pt()))+m_intercept; + if(jetNTrack<variable_nTrk) acceptData.setCutResult("QuarkJetTag", true); + } + else if(m_cuttype=="threshold" && jetNTrack<m_NTrackCut) acceptData.setCutResult("QuarkJetTag", true); + + decTagged(jet) = acceptData.getCutResult( "QuarkJetTag" ); + + return StatusCode::SUCCESS; + + } + + StatusCode JetQGTagger::simplegetNTrackWeight(const xAOD::Jet * jet, double &weight) const { + + ATH_MSG_DEBUG( "Getting the jet weight for systematic variation " << m_appliedSystEnum ); + + // initially set the weight to unity + // this is the weight returned if you are *not* dealing with a systematic variation + weight = 1.0; + ATH_MSG_DEBUG("Getting the jet weight for systematic variation " << m_appliedSystEnum); + ATH_MSG_DEBUG("made it into simplegetntrk"); + + // if you are not dealing with a systematic variation, then exit + if ( m_appliedSystEnum!=QG_NCHARGEDEXP_UP && + m_appliedSystEnum!=QG_NCHARGEDME_UP && + m_appliedSystEnum!=QG_NCHARGEDPDF_UP && + m_appliedSystEnum!=QG_NCHARGEDEXP_DOWN && + m_appliedSystEnum!=QG_NCHARGEDME_DOWN && + m_appliedSystEnum!=QG_NCHARGEDPDF_DOWN && + m_appliedSystEnum!=QG_TRACKEFFICIENCY && + m_appliedSystEnum!=QG_TRACKFAKES + ) + { + return StatusCode::SUCCESS; + } + + // use the lookup tables loaded in initialize() to find the systematically shifted weights + bool truthsyst = m_appliedSystEnum==QG_NCHARGEDEXP_UP || m_appliedSystEnum==QG_NCHARGEDME_UP || m_appliedSystEnum==QG_NCHARGEDPDF_UP || m_appliedSystEnum == QG_NCHARGEDEXP_DOWN || m_appliedSystEnum== QG_NCHARGEDME_DOWN || m_appliedSystEnum == QG_NCHARGEDPDF_DOWN; + bool recosyst = m_appliedSystEnum==QG_TRACKEFFICIENCY || m_appliedSystEnum == QG_TRACKFAKES; + + int ptbin, ntrkbin; + int pdgid = jet->getAttribute<int>("PartonTruthLabelID"); + if (truthsyst){ + int tntrk = jet->getAttribute<int>("DFCommonJets_QGTagger_truthjet_nCharged"); + float tjetpt = jet->getAttribute<float>("DFCommonJets_QGTagger_truthjet_pt")*0.001; + float tjeteta = jet->getAttribute<float>("DFCommonJets_QGTagger_truthjet_eta"); + ATH_MSG_DEBUG("truth jet pdgid: " << pdgid << " pt: " << tjetpt); + if ( pdgid<0 ) { + ATH_MSG_DEBUG("Undefined pdg ID: setting weight to 1"); + return StatusCode::SUCCESS; + } + + // if the jet is outside of the measurement fiducial region + // the systematic uncertainty is set to 0 + if ( tjetpt < m_jetPtMin*1e-3 || std::abs(tjeteta) > m_jetEtaMax ) { + ATH_MSG_DEBUG( "Outside of fiducial region: setting weight to 1" ); + return StatusCode::SUCCESS; + } + + if ( pdgid==21 && m_appliedSystEnum!=QG_NCHARGEDTOPO ) { + ptbin = m_hgluon->GetXaxis()->FindBin(tjetpt); + ntrkbin = m_hgluon->GetYaxis()->FindBin(tntrk); + weight = m_hgluon->GetBinContent(ptbin,ntrkbin); + }// gluon + else if ( pdgid < 5 && m_appliedSystEnum != QG_NCHARGEDTOPO && m_appliedSystEnum != QG_TRACKEFFICIENCY && m_appliedSystEnum != QG_TRACKFAKES ) { + ptbin = m_hquark->GetXaxis()->FindBin(tjetpt); + ntrkbin = m_hquark->GetYaxis()->FindBin(tntrk); + weight = m_hquark->GetBinContent(ptbin,ntrkbin); + }//quarks + else { + ATH_MSG_INFO( "Neither quark nor gluon jet: setting weight to 1" ); + } + } + + // check if jet contains at least one NTracks variables + // prefer to use DFCommonJets* version + int ntrk = -1; + if ( recosyst) { + SG::ReadDecorHandle<xAOD::JetContainer, int> readNumTrkPt500PV(m_readNumTrkPt500PVKey); + SG::ReadDecorHandle<xAOD::JetContainer, int> readNtrk(m_readNtrkKey); + if ( readNtrk.isAvailable() ) ntrk = readNtrk(*jet); + else if ( readNumTrkPt500PV.isAvailable() ) ntrk = readNumTrkPt500PV(*jet); + else ATH_MSG_ERROR("Neither NumTrkPt500PV nor DFCommonJets_QGTagger_NTracks is available for your jet. Please add it before running mode 1 JetQGTagger."); + //float rjetpt = jet->getAttribute<float>("truthjet_pt")*0.001; + float rjetpt = jet->pt()*1e-3; + float rjeteta = jet->eta(); + + ATH_MSG_DEBUG("reco jet Pt: " << rjetpt << " eta: " << rjeteta); + if( rjetpt<m_jetPtMin*1e-3 || std::abs(rjeteta)>m_jetEtaMax){ + ATH_MSG_DEBUG("Outside of fiducial region: setting weight to 1"); + return StatusCode::SUCCESS; + } + + if ( pdgid < 5 ) { + ptbin = m_hquark->GetXaxis()->FindBin(rjetpt); + ntrkbin = m_hquark->GetYaxis()->FindBin(ntrk); + weight = m_hquark->GetBinContent(ptbin,ntrkbin); + } + if ( pdgid == 21 ) { + ptbin = m_hgluon->GetXaxis()->FindBin(rjetpt); + ntrkbin = m_hgluon->GetYaxis()->FindBin(ntrk); + weight = m_hgluon->GetBinContent(ptbin,ntrkbin); + } + } + + ATH_MSG_DEBUG("weight: " << weight); + + return StatusCode::SUCCESS; + + } + + StatusCode JetQGTagger::getNTrack(const xAOD::Jet * jet, /*const xAOD::Vertex * pv,*/ int &ntracks) const { + + ATH_MSG_DEBUG( "Counting the number of tracks in the jet" ); + + ntracks = 0; + // loop over the tracks associated to the jet of interest + std::vector<const xAOD::IParticle*> jettracks; + + if(!jet->getAssociatedObjects<xAOD::IParticle>(xAOD::JetAttribute::GhostTrack,jettracks)){ + ATH_MSG_ERROR("This jet has no associated objects, so it will not be tagged. Please check the jet collection you are using."); + ntracks=999; + //Returning failure as this jet has no associated objects and we do not want to wrongly classify it as a gluon or quark using tag(). + //Physics should be independent of skimming, which may have removed tracks. + //So we are returning a failure, and throwing an exception. + return StatusCode::FAILURE; + } + + for (size_t i = 0; i < jettracks.size(); i++) { + + const xAOD::TrackParticle* trk = static_cast<const xAOD::TrackParticle*>(jettracks[i]); + + if(!trk){ + ATH_MSG_ERROR("This jet has null tracks, so it will not be tagged. Please check the jet collection you are using."); + ntracks=998; + //Returning failure as this jet has null tracks and we do not want to wrongly classify it as a gluon or quark using tag(). + //Physics should be independent of skimming, which may have introduced null tracks. + //So we are returning a failure, and throwing an exception. + return StatusCode::FAILURE; + } + + // if you are applying a systematic variation then + // FRANCESCO ADD COMMENT + + bool acceptSyst = true; + + if ( m_appliedSystEnum==QG_TRACKEFFICIENCY ) + acceptSyst = ( m_trkTruthFilterTool->accept(trk) && m_jetTrackFilterTool->accept(trk,jet) ); + else if ( m_appliedSystEnum==QG_TRACKFAKES ) + acceptSyst = m_trkFakeTool->accept(trk); + + if (!acceptSyst) + continue; + + // only count tracks with selections + // 1) pt>500 MeV + // 2) accepted track from InDetTrackSelectionTool with CutLevel==Loose + // 3) associated to primary vertex OR within 3mm of the primary vertex + bool accept = (trk->pt()>500 && + m_trkSelectionTool->accept(*trk) + // TODO: Implement alternative to TrackParticle::vertex() + //&& (trk->vertex()==pv || (!trk->vertex() && std::abs((trk->z0()+trk->vz()-pv->z())*sin(trk->theta()))<3.)) + ); + if (!accept) + continue; + + ntracks++; + } + + return StatusCode::SUCCESS; + } + + + + StatusCode JetQGTagger::getNTrackWeight(const xAOD::Jet * jet, double &weight) const { + + ATH_MSG_DEBUG( "Getting the jet weight for systematic variation " << m_appliedSystEnum ); + + // initially set the weight to unity + // this is the weight returned if you are *not* dealing with a systematic variation + weight = 1.0; + + // if you are not dealing with a systematic variation, then exit + if ( m_appliedSystEnum!=QG_NCHARGEDTOPO && + m_appliedSystEnum!=QG_NCHARGEDEXP_UP && + m_appliedSystEnum!=QG_NCHARGEDME_UP && + m_appliedSystEnum!=QG_NCHARGEDPDF_UP && + m_appliedSystEnum!=QG_NCHARGEDEXP_DOWN && + m_appliedSystEnum!=QG_NCHARGEDME_DOWN && + m_appliedSystEnum!=QG_NCHARGEDPDF_DOWN + ) + return StatusCode::SUCCESS; + + int pdgid = jet->getAttribute<int>("PartonTruthLabelID"); + if ( pdgid<0 ) { + ATH_MSG_DEBUG("Undefined pdg ID: setting weight to 1"); + return StatusCode::SUCCESS; + } + + // getting the associated truth jet + // FRANCESCO COMMENT + const xAOD::Jet* tjet; + if(jet->isAvailable< ElementLink<xAOD::JetContainer> >("GhostTruthAssociationLink") ){ + ATH_MSG_DEBUG("Accessing GhostTruthAssociationLink: is available"); + if(jet->auxdata< ElementLink<xAOD::JetContainer> >("GhostTruthAssociationLink").isValid() ){ + ATH_MSG_DEBUG("Accessing GhostTruthAssociationLink: is valid"); + ElementLink<xAOD::JetContainer> truthlink = jet->auxdata< ElementLink<xAOD::JetContainer> >("GhostTruthAssociationLink"); + if(truthlink) + tjet = * truthlink; + else{ + ATH_MSG_WARNING("Cannot access truth: setting weight to 1"); + return StatusCode::SUCCESS; + }//endelse NULL pointer + } + else { + ATH_MSG_WARNING("Cannot access truth: setting weight to 1"); + return StatusCode::SUCCESS; + } //endelse isValid + } //endif isAvailable + else { + ATH_MSG_WARNING("Cannot access truth: setting weight to 1"); + return StatusCode::SUCCESS; + }//endelse isAvailable + + // if the jet is outside of the measurement fiducial region + // the systematic uncertainty is set to 0 + double tjetpt = tjet->pt()*0.001; + double tjeteta = tjet->eta(); + if( tjetpt<m_jetPtMin*1.0e-3 || std::abs(tjeteta)>m_jetEtaMax){ + ATH_MSG_DEBUG("Outside of fiducial region: setting weight to 1"); + return StatusCode::SUCCESS; + } + + // compute truth ntrk + int tntrk = 0; + for (size_t ind = 0; ind < tjet->numConstituents(); ind++) { + const xAOD::TruthParticle *part = static_cast<const xAOD::TruthParticle*>(tjet->rawConstituent(ind)); + + // dont count invalid truth particles + if (!part) continue; + // require the particle in the final state + if( ! (part->status() == 1) ) continue; + // require that the particle type (e.g. production type) be valid (e.g. not primaries) + if ((part->barcode())>2e5) continue; + // pt>500 MeV + if( ! (part->pt()>500.) ) continue; + // charged + if( !(part->isCharged()) ) continue; + // this seems redundant + // FRANCESCO COMMENT + double pt = part->pt(); + if( pt>500 ) tntrk++; + + } + + // use the lookup tables loaded in initialize() to find the systematically shifted weights + if ( pdgid==21 && m_appliedSystEnum!=QG_NCHARGEDTOPO){ + int ptbin = m_hgluon->GetXaxis()->FindBin(tjetpt); + int ntrkbin = m_hgluon->GetYaxis()->FindBin(tntrk); + weight = m_hgluon->GetBinContent(ptbin,ntrkbin); + }// gluon + else if ( pdgid<5 ){ + int ptbin = m_hquark->GetXaxis()->FindBin(tjetpt); + int ntrkbin = m_hquark->GetYaxis()->FindBin(tntrk); + weight = m_hquark->GetBinContent(ptbin,ntrkbin); + }//quarks + else{ + ATH_MSG_DEBUG("Neither quark nor gluon jet: setting weight to 1"); + } + + return StatusCode::SUCCESS; + } + + StatusCode JetQGTagger::sysApplySystematicVariation(const SystematicSet& systSet){ + + // FRANCESCO COMMENT + ATH_MSG_DEBUG( "Applying systematic variation by weight" ); + + // by default no systematics are applied + m_appliedSystEnum = QG_NONE; + + if (systSet.size()==0) { + ATH_MSG_DEBUG("No affecting systematics received."); + return StatusCode::SUCCESS; + } + else if (systSet.size()>1) { + ATH_MSG_WARNING("Tool does not support multiple systematics, returning unsupported" ); + return StatusCode::FAILURE; + } + SystematicVariation systVar = *systSet.begin(); + if (systVar == SystematicVariation("")) + m_appliedSystEnum = QG_NONE; + else if (systVar == QGntrackSyst::nchargedtopo){ + m_appliedSystEnum = QG_NCHARGEDTOPO; + m_hquark=m_topo_hquark; + } + else if (systVar == QGntrackSyst::trackefficiency) + m_appliedSystEnum = QG_TRACKEFFICIENCY; + else if (systVar == QGntrackSyst::trackfakes) + m_appliedSystEnum = QG_TRACKFAKES; + else if (systVar == QGntrackSyst::nchargedexp_up){ + m_appliedSystEnum = QG_NCHARGEDEXP_UP; + m_hquark=m_exp_hquark_up; + m_hgluon=m_exp_hgluon_up; + } + else if (systVar == QGntrackSyst::nchargedme_up){ + m_appliedSystEnum = QG_NCHARGEDME_UP; + m_hquark=m_me_hquark_up; + m_hgluon=m_me_hgluon_up; + } + else if (systVar == QGntrackSyst::nchargedpdf_up){ + m_appliedSystEnum = QG_NCHARGEDPDF_UP; + m_hquark=m_pdf_hquark_up; + m_hgluon=m_pdf_hgluon_up; + } + else if (systVar == QGntrackSyst::nchargedexp_down){ + m_appliedSystEnum = QG_NCHARGEDEXP_DOWN; + m_hquark=m_exp_hquark_down; + m_hgluon=m_exp_hgluon_down; + } + else if (systVar == QGntrackSyst::nchargedme_down){ + m_appliedSystEnum = QG_NCHARGEDME_DOWN; + m_hquark=m_me_hquark_down; + m_hgluon=m_me_hgluon_down; + } + else if (systVar == QGntrackSyst::nchargedpdf_down){ + m_appliedSystEnum = QG_NCHARGEDPDF_DOWN; + m_hquark=m_pdf_hquark_down; + m_hgluon=m_pdf_hgluon_down; + } + else if (systVar == QGntrackSyst::trackeff){ + m_appliedSystEnum = QG_TRACKEFFICIENCY; + m_hquark = m_trackeff_hquark; + m_hgluon = m_trackeff_hgluon; + } + else if (systVar == QGntrackSyst::fake){ + m_appliedSystEnum = QG_TRACKFAKES; + m_hquark = m_fake_hquark; + m_hgluon = m_fake_hgluon; + } + + else { + ATH_MSG_WARNING("unsupported systematic applied"); + return StatusCode::FAILURE; + } + + ATH_MSG_DEBUG("applied systematic is " << m_appliedSystEnum); + return StatusCode::SUCCESS; + } + + StatusCode JetQGTagger::loadHist(TH2D *&hist,std::string fname,std::string histname){ + + std::string filename = PathResolverFindCalibFile( (m_calibArea+fname).c_str() ); + ATH_MSG_INFO("CALIB FILE: " << filename << " histo: " << histname); + if (filename.empty()){ + ATH_MSG_ERROR( "Could NOT resolve file name " << fname ); + return StatusCode::FAILURE; + } + else{ + ATH_MSG_DEBUG( "Path found = " << filename ); + } + TFile* infile = TFile::Open(filename.c_str()); + hist = dynamic_cast<TH2D*>(infile->Get(histname.c_str())); + hist->SetDirectory(0); + return StatusCode::SUCCESS; + } + + +} /* namespace CP */ diff --git a/Reconstruction/Jet/BoostedJetTaggers/Root/JetQGTaggerBDT.cxx b/Reconstruction/Jet/BoostedJetTaggers/Root/JetQGTaggerBDT.cxx new file mode 100644 index 0000000000000000000000000000000000000000..b0f98b9312756794a8094ce32ce00bb7b69c3e52 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/Root/JetQGTaggerBDT.cxx @@ -0,0 +1,380 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#include "BoostedJetTaggers/JetQGTaggerBDT.h" + +#include <TSystem.h> + +#include "InDetTrackSelectionTool/InDetTrackSelectionTool.h" + +#include "xAODTracking/VertexContainer.h" + +namespace CP { + + JetQGTaggerBDT::JetQGTaggerBDT( const std::string& name ) : + JSSTaggerBase( name ), + m_BDTmethod("BDT_method"), + m_trkSelectionTool(name+"_trackselectiontool", this) + { + + /// Jet kinematics + declareProperty( "JetPtMin", m_jetPtMin = 20000.0); + declareProperty( "JetPtMax", m_jetPtMax = 1500000.0); + m_jetEtaMax = 2.5; /// Replace base class default value for JetEtaMax + + /// Tagger configuration + m_calibArea = "BoostedJetTaggers/JetQGTaggerBDT/Oct18/"; /// Overwrite base class default + declareProperty( "TMVAConfigFile", m_tmvaConfigFileName="TMVAClassification_BDTQGTagger_Oct18_BDT.weights.xml"); + declareProperty( "UseJetVars", m_mode = 1); /// 0 uses the tracks. 1 uses variables from the jets (default) + + } + + StatusCode JetQGTaggerBDT::initialize() { + + ATH_MSG_INFO( "Initializing JetQGTaggerBDT tool" ); + + if( ! m_configFile.empty() ) { + ATH_MSG_INFO( "Using config file : "<< m_configFile ); + // check for the existence of the configuration file + std::string configPath; + configPath = PathResolverFindDataFile(("BoostedJetTaggers/"+m_configFile).c_str()); + + /* https://root.cern.ch/root/roottalk/roottalk02/5332.html */ + FileStat_t fStats; + int fSuccess = gSystem->GetPathInfo(configPath.c_str(), fStats); + if(fSuccess != 0){ + ATH_MSG_ERROR("Recommendations file could not be found : " << configPath); + return StatusCode::FAILURE; + } + else { + ATH_MSG_DEBUG("Recommendations file was found : "<<configPath); + } + + TEnv configReader; + if(configReader.ReadFile( configPath.c_str(), EEnvLevel(0) ) != 0 ) { + ATH_MSG_ERROR( "Error while reading config file : "<< configPath ); + return StatusCode::FAILURE; + } + + // get the CVMFS calib area where stuff is stored + m_calibArea = configReader.GetValue("CalibArea" ,""); + + // get the name/path of the JSON config + m_tmvaConfigFileName = configReader.GetValue("TMVAConfigFile" ,""); + + m_strScoreCut = configReader.GetValue("ScoreCut" ,""); + + ATH_MSG_INFO( "scoreCut: "<<m_strScoreCut ); + + } + // if the calibArea is specified to be "Local" then it looks in the same place as the top level configs + if( m_calibArea.empty() ){ + ATH_MSG_ERROR( "You need to specify where the calibArea is as either being Local or on CVMFS" ); + return StatusCode::FAILURE; + } + else if(m_calibArea.compare("Local")==0){ + std::string localCalibArea = "BoostedJetTaggers/share/JetQGTaggerBDT/"; + ATH_MSG_INFO( "Using Local calibArea " << localCalibArea ); + // convert the JSON config file name to the full path + m_tmvaConfigFilePath = PathResolverFindCalibFile(localCalibArea+m_tmvaConfigFileName); + } + else{ + ATH_MSG_INFO( "Using CVMFS calibArea" ); + // get the config file from CVMFS + // necessary because xml files are too large to house on the data space + m_tmvaConfigFilePath = PathResolverFindCalibFile( (m_calibArea+m_tmvaConfigFileName).c_str() ); + } + + /// Make sure score cut string is not empty + if(m_strScoreCut.empty()){ + ATH_MSG_ERROR( "Score cut function is empty!" ); + return StatusCode::FAILURE; + } + // set up InDet selection tool + ANA_CHECK( ASG_MAKE_ANA_TOOL( m_trkSelectionTool, InDet::InDetTrackSelectionTool ) ); + ANA_CHECK( m_trkSelectionTool.setProperty( "CutLevel", "Loose" ) ); + ANA_CHECK( m_trkSelectionTool.retrieve() ); + + // read json file for DNN weights + ATH_MSG_INFO( "BDT Tagger configured with: " << m_tmvaConfigFilePath ); + + // -- Initialize TMVA for BDTs + TMVA::Tools::Instance(); + m_bdtTagger = std::make_unique<TMVA::Reader>( "!Color:!Silent" ); + + m_bdtTagger->AddVariable( "NTracks", &m_ntracks); + m_bdtTagger->AddVariable( "TrackWidth", &m_trackwidth ); + m_bdtTagger->AddVariable( "JetPt", &m_pt ); + m_bdtTagger->AddVariable( "JetEta", &m_eta ); + m_bdtTagger->AddVariable( "TrackC1", &m_trackC1 ); + + // configure the bdt + m_bdtTagger->BookMVA( m_BDTmethod.c_str(), m_tmvaConfigFilePath.c_str() ); + + /// Call base class initialize + ATH_CHECK( JSSTaggerBase::initialize() ); + + return StatusCode::SUCCESS; + + } + + StatusCode JetQGTaggerBDT::tag( const xAOD::Jet& jet ) const { + + ATH_MSG_DEBUG( "Obtaining BDT QG result" ); + + /// Create asg::AcceptData object + asg::AcceptData acceptData( &m_acceptInfo ); + + /// Reset the AcceptData cut results + ATH_CHECK( resetCuts( acceptData ) ); + + /// Check basic kinematic selection + ATH_CHECK( checkKinRange( jet, acceptData ) ); + + /// Create WriteDecorHandles + SG::WriteDecorHandle<xAOD::JetContainer, bool> decTagged(m_decTaggedKey); + + /// TODO: Is this actually needed? + if ( !passKinRange(jet) ) { + decTagged(jet) = false; + return StatusCode::SUCCESS; + } + + // get BDT score + float jet_score = getScore( jet, acceptData ); + ATH_MSG_DEBUG(TString::Format("jet score %g",jet_score) ); + + //get cut from cut function + float cut = m_funcScoreCut->Eval(jet.pt()/1000.); + + if ( jet_score < cut ) acceptData.setCutResult("QuarkJetTag", true); + decTagged(jet) = acceptData.getCutResult( "QuarkJetTag" ); + + // return the AcceptData object that you created and filled + return StatusCode::SUCCESS; + + } + + float JetQGTaggerBDT::getScore( const xAOD::Jet& jet, asg::AcceptData &acceptData ) const { + + /// Load the new values of the variables for this jet + bool validVars = getJetProperties( jet, acceptData ); + + /// evaluate bdt + float bdt_score(-666.); + if ( !validVars ) { + ATH_MSG_WARNING( "One (or more) tagger input variable has an undefined value (NaN), setting score to -666" ); + return bdt_score; + } + bdt_score = m_bdtTagger->EvaluateMVA( m_BDTmethod.c_str() ); + + return bdt_score; + } + + bool JetQGTaggerBDT::getJetProperties( const xAOD::Jet& jet, asg::AcceptData &acceptData ) const { + /* Update the jet substructure variables for this jet */ + + m_pt = jet.pt()/1000.0; + m_eta = jet.eta(); + + ATH_MSG_DEBUG( TString::Format("pT: %g, eta: %g",m_pt,m_eta) ); + + m_ntracks = -1.; + m_trackwidth = -1.; + m_trackC1 = -1.; + + bool validVars = true; + + if ( m_mode == 1 ) { + validVars = getPrecomputedVariables( jet, acceptData ); + } + else if( m_mode == 0 ) { + validVars = calculateVariables( jet, acceptData ); + } + + if ( !validVars ) { + ATH_MSG_ERROR( "Can't determine QG tagging variables! Try different mode." ); + } + + return validVars; + + } + + bool JetQGTaggerBDT::getPrecomputedVariables( const xAOD::Jet& jet, asg::AcceptData &acceptData ) const { + + bool validVars = true; + + int ntrk = -1; + float trkWidth = -1.; + float trkC1 = -1.; + + if ( !jet.getAttribute<int>("DFCommonJets_QGTagger_NTracks", ntrk) ) { + if ( m_nWarnVar++ < m_nWarnMax ) ATH_MSG_WARNING( "Unable to retrieve DFCommonJets_QGTagger_NTracks" ); + else ATH_MSG_DEBUG( "Unable to retrieve DFCommonJets_QGTagger_NTracks" ); + acceptData.setCutResult("ValidEventContent", false); + validVars = false; + } + if ( !jet.getAttribute<float>("DFCommonJets_QGTagger_TracksWidth", trkWidth) ) { + if ( m_nWarnVar++ < m_nWarnMax )ATH_MSG_WARNING( "Unable to retrieve DFCommonJets_QGTagger_TracksWidth" ); + else ATH_MSG_DEBUG( "Unable to retrieve DFCommonJets_QGTagger_TracksWidth" ); + acceptData.setCutResult("ValidEventContent", false); + validVars = false; + } + if ( !jet.getAttribute<float>("DFCommonJets_QGTagger_TracksC1", trkC1) ) { + if ( m_nWarnVar++ < m_nWarnMax ) ATH_MSG_WARNING( "Unable to retrieve DFCommonJets_QGTagger_TracksC1" ); + else ATH_MSG_DEBUG( "Unable to retrieve DFCommonJets_QGTagger_TracksC1" ); + acceptData.setCutResult("ValidEventContent", false); + validVars = false; + } + + m_ntracks = (float) ntrk; + m_trackwidth = trkWidth; + m_trackC1 = trkC1; + + return validVars; + + } + + bool JetQGTaggerBDT::calculateVariables( const xAOD::Jet& jet, asg::AcceptData &acceptData ) const { + //calculate q/g tagging variables from GhostTracks associated to jet + //some derivations apply slimming to these tracks, which would lead to wrong values. + //so we compare the number of GhostTracks to NumTrkPt500 (i.e. nTracks) + // if they are "close enough" we can proceed + + bool validVars = true; + bool isValid = true; + const xAOD::Vertex* primvertex {nullptr}; + + const xAOD::VertexContainer* vxCont = nullptr; + if ( evtStore()->retrieve( vxCont, "PrimaryVertices" ).isFailure() ) { + if ( m_nWarnVar++ < m_nWarnMax ) ATH_MSG_WARNING( "Unable to retrieve primary vertex container PrimaryVertices" ); + else ATH_MSG_DEBUG( "Unable to retrieve primary vertex container PrimaryVertices" ); + acceptData.setCutResult("ValidEventContent", false); + isValid = false; + } + else if ( vxCont->empty() ) { + if ( m_nWarnVar++ < m_nWarnMax ) ATH_MSG_WARNING( "Event has no primary vertices!" ); + ATH_MSG_DEBUG( "Event has no primary vertices!" ); + acceptData.setCutResult("ValidEventContent", false); + isValid = false; + } + else { + for ( const auto& vx : *vxCont ) { + // take the first vertex in the list that is a primary vertex + if ( vx->vertexType()==xAOD::VxType::PriVtx ) { + primvertex = vx; + break; + } + } + } + if ( !primvertex ) isValid = false; + + if ( !isValid ) { + validVars = false; + return validVars; + } + + //NTracks + std::vector<int> nTrkVec; + if(jet.getAttribute(xAOD::JetAttribute::NumTrkPt500, nTrkVec)){ + ATH_MSG_DEBUG(nTrkVec.size()); + m_ntracks = (float) nTrkVec[primvertex->index()]; + } + else + //if NumTrkPt500 is not available, I can't confirm that the number of GhostTracks is correct (i.e. unslimmed) + validVars = false; + + //TrackWidth + bool undefTrackWidth = false; + std::vector<float> trkWidthVec; + if(jet.getAttribute(xAOD::JetAttribute::TrackWidthPt500, trkWidthVec)){ + ATH_MSG_DEBUG(trkWidthVec.size()); + m_trackwidth = trkWidthVec[primvertex->index()]; + } + else + //if TrackWidthPt500 is not available, we can maybe calculate it from tracks + undefTrackWidth = true; + float weightedwidth = 0.; + + //TrackC1 + float beta = 0.2; + float weightedwidth2 = 0.; + float sumPt = 0.; + + std::vector<const xAOD::TrackParticle*> trackParttmp; + if(!jet.getAssociatedObjects("GhostTrack",trackParttmp)){ + ATH_MSG_ERROR("This jet has no associated objects"); + validVars = false; + } + //track selection + for(unsigned i=trackParttmp.size();i>0; i--){ + if(!trackParttmp[i-1]){ + trackParttmp.erase(trackParttmp.begin()+i-1); + continue; + } + const xAOD::TrackParticle* trk = static_cast<const xAOD::TrackParticle*>(trackParttmp[i-1]); + bool accept = (trk->pt()>500 && + m_trkSelectionTool->accept(*trk) + // TODO: Implement alternative to TrackParticle::vertex() + //&& (trk->vertex()==primvertex || (!trk->vertex() && std::abs((trk->z0()+trk->vz()-primvertex->z())*sin(trk->theta()))<3.)) + ); + if (!accept){ + trackParttmp.erase(trackParttmp.begin()+i-1); + } + } + + if(! isCorrectNumberOfTracks(m_ntracks,trackParttmp.size())){ + ATH_MSG_ERROR("Number of ghost associated tracks wrong!"); + validVars = false; + } + + //calculate TrackC1 (and TrackWidth if necessary) + for(unsigned i=0; i<trackParttmp.size(); i++){ + double ipt = trackParttmp.at(i)->pt(); + double ieta = trackParttmp.at(i)->eta(); + double iphi = trackParttmp.at(i)->phi(); + sumPt += ipt; + if(undefTrackWidth){ + double deta_i = trackParttmp.at(i)->eta() - jet.eta(); + double dphi_i = TVector2::Phi_mpi_pi(trackParttmp.at(i)->phi() - jet.phi()); + double dR_i = sqrt( deta_i*deta_i + dphi_i*dphi_i ); + weightedwidth += ipt * dR_i; + } + + for(unsigned j=i+1; j<trackParttmp.size(); j++){ + double deta = ieta - trackParttmp.at(j)->eta(); + double dphi = TVector2::Phi_mpi_pi(iphi - trackParttmp.at(j)->phi()); + double dR = sqrt( deta*deta + dphi*dphi ); + weightedwidth2 += ipt * trackParttmp.at(j)->pt() * pow(dR,beta); + } + } + + if(undefTrackWidth) + m_trackwidth = sumPt>0 ? weightedwidth/sumPt : -0.1; + m_trackC1 = sumPt>0 ? weightedwidth2/(sumPt*sumPt) : -0.1; + + return validVars; + } + + bool JetQGTaggerBDT::isCorrectNumberOfTracks(int expectedNTracks, int nTracksFromGhostTracks) const{ + //some derivations do not store all tracks associated to the jet. + //In this case the calculation of the tagging variables will be wrong. + //The requirements are fairly loose, because a few tracks may get lost in the derivation production. + //But it will fail quickly if the too many tracks were slimmed away. + if(nTracksFromGhostTracks == 0){ + if(expectedNTracks == 0) + return true; + if(abs(expectedNTracks-nTracksFromGhostTracks) < 3) + return true; + else + return false; + }else if(expectedNTracks/nTracksFromGhostTracks < 0.5 && abs(expectedNTracks-nTracksFromGhostTracks) > 5){ + return false; + } + return true; + } + +} /* namespace CP */ + +// the end diff --git a/Reconstruction/Jet/BoostedJetTaggers/Root/SmoothedWZTagger.cxx b/Reconstruction/Jet/BoostedJetTaggers/Root/SmoothedWZTagger.cxx new file mode 100644 index 0000000000000000000000000000000000000000..9be6b5c68269904ed50f8d980a42f667035c7033 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/Root/SmoothedWZTagger.cxx @@ -0,0 +1,365 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#include "BoostedJetTaggers/SmoothedWZTagger.h" + +#include "xAODTracking/VertexContainer.h" + +SmoothedWZTagger::SmoothedWZTagger( const std::string& name ) : + JSSTaggerBase( name ) +{ + + /// Functional forms for cuts + declareProperty( "MassCutLowFunc", m_strMassCutLow = "", "Lower mass cut"); + declareProperty( "MassCutHighFunc", m_strMassCutHigh = "", "Higher mass cut"); + declareProperty( "D2CutFunc", m_strD2Cut = "", "Upper cut on D2"); + declareProperty( "NtrkCutFunc", m_strNtrkCut = "", "Upper cut on Ntrk"); + +} + +StatusCode SmoothedWZTagger::initialize() { + + ATH_MSG_INFO( "Initializing SmoothedWZTagger tool" ); + + /// Pt values are defined in GeV + m_ptGeV = true; + + /// Use mass cut + m_useMassCut = true; + + if ( ! m_configFile.empty() ) { + + /// Get configReader + ATH_CHECK( getConfigReader() ); + + if ( m_wkpt.empty() ) { + m_strMassCutLow = m_configReader.GetValue("MassCutLow", ""); + m_strMassCutHigh = m_configReader.GetValue("MassCutHigh", ""); + m_strD2Cut = m_configReader.GetValue("D2Cut", ""); + m_strNtrkCut = m_configReader.GetValue("NtrkCut", ""); + } + else { + m_strMassCutLow = m_configReader.GetValue((m_wkpt+".MassCutLow").c_str(), ""); + m_strMassCutHigh = m_configReader.GetValue((m_wkpt+".MassCutHigh").c_str(), ""); + m_strD2Cut = m_configReader.GetValue((m_wkpt+".D2Cut").c_str(), ""); + m_strNtrkCut = m_configReader.GetValue((m_wkpt+".NtrkCut").c_str(), ""); + } + + /// Get min and max jet pt + m_jetPtMin = m_configReader.GetValue("pTCutLow", 200.0); + m_jetPtMax = m_configReader.GetValue("pTCutHigh", 4000.0); + + /// Get the decoration name + m_decorationName = m_configReader.GetValue("DecorationName", ""); + + /// Get the scale factor configuration + m_calcSF = m_configReader.GetValue("CalcSF", false); + if ( m_calcSF ) { + m_weightDecorationName = m_configReader.GetValue("WeightDecorationName", ""); + m_weightFileName = m_configReader.GetValue("WeightFile", ""); + m_weightHistogramName = m_configReader.GetValue("WeightHistogramName", ""); + m_efficiencyHistogramName = m_configReader.GetValue("EfficiencyHistogramName", ""); + m_weightFlavors = m_configReader.GetValue("WeightFlavors", ""); + + /// Get truth label name information + m_truthLabelName = m_configReader.GetValue("TruthLabelName", "R10TruthLabel_R21Consolidated"); + + if ( m_calibArea.compare("Local") == 0 ) { + m_weightConfigPath = PathResolverFindCalibFile(("$WorkDir_DIR/data/BoostedJetTaggers/SmoothedWZTaggers/"+m_weightFileName).c_str()); + } + else if ( m_calibArea.find("eos") != std::string::npos ) { + m_weightConfigPath = PathResolverFindCalibFile((m_calibArea+"/"+m_weightFileName).c_str()); + } + else { + m_weightConfigPath = PathResolverFindCalibFile(("BoostedJetTaggers/"+m_calibArea+"/"+m_weightFileName).c_str()); + } + } + + } + else { /// No config file + /// Assume the cut functions have been set through properties. + /// Check they are non empty + if( m_strD2Cut.empty() || m_strMassCutLow.empty() || m_strMassCutHigh.empty() || + ((m_weightDecorationName.empty() || + m_weightHistogramName.empty() || + m_weightFlavors.empty()) && m_calcSF) ) { + ATH_MSG_ERROR( "No config file provided AND no parameters specified." ) ; + return StatusCode::FAILURE; + } + } + + /// Set flag to indicate if Ntrk cut is used + m_useNtrk = !m_strNtrkCut.empty(); + + /// Transform these strings into functions + m_funcD2Cut = std::make_unique<TF1>("strD2Cut", m_strD2Cut.c_str(), 0, 14000); + if ( m_useNtrk ) m_funcNtrkCut = std::make_unique<TF1>("strNtrkCut", m_strNtrkCut.c_str(), 0, 14000); + + ATH_MSG_INFO( "Smoothed WZ Tagger tool initialized" ); + ATH_MSG_INFO( " Mass cut low : " << m_strMassCutLow ); + ATH_MSG_INFO( " Mass cut High : " << m_strMassCutHigh ); + ATH_MSG_INFO( " D2 cut low : " << m_strD2Cut ); + if ( m_useNtrk ) + ATH_MSG_INFO( " Ntrk cut low : " << m_strNtrkCut ); + ATH_MSG_INFO( " DecorationName : " << m_decorationName ); + if ( m_calcSF ) { + ATH_MSG_INFO( "weightDecorationName : " << m_weightDecorationName ); + ATH_MSG_INFO( "weightFile : " << m_weightFileName ); + ATH_MSG_INFO( "weightHistogramName : " << m_weightHistogramName ); + ATH_MSG_INFO( "efficiencyHistogramName : " << m_efficiencyHistogramName ); + ATH_MSG_INFO( "weightFlavors : " << m_weightFlavors ); + ATH_MSG_INFO( "TruthLabelName : " << m_truthLabelName ); + } + ATH_MSG_INFO( " Pt cut low : " << m_jetPtMin ); + ATH_MSG_INFO( " Pt cut high : " << m_jetPtMax ); + + /// Set the possible states that the tagger can be left in after the JSSTaggerBase::tag() function is called + m_acceptInfo.addCut( "PassMassLow", "mJet > mCutLow" ); + m_acceptInfo.addCut( "PassMassHigh", "mJet < mCutHigh" ); + m_acceptInfo.addCut( "PassD2", "D2Jet < D2Cut" ); + if ( m_useNtrk ) { + m_acceptInfo.addCut( "PassNtrk", "NtrkJet < NtrkCut" ); + } + + /// Loop over and print out the cuts that have been configured + printCuts(); + + /// Call base class initialize + ATH_CHECK( JSSTaggerBase::initialize() ); + + /// Initialize additional decorators + ATH_MSG_INFO( "Additional decorators that will be attached to jet :" ); + + m_decPassD2Key = m_containerName + "." + m_decorationName + "_" + m_decPassD2Key.key(); + m_decCutD2Key = m_containerName + "." + m_decorationName + "_" + m_decCutD2Key.key(); + + ATH_CHECK( m_decPassD2Key.initialize() ); + ATH_CHECK( m_decCutD2Key.initialize() ); + + ATH_MSG_INFO( " " << m_decPassD2Key.key() << " : pass D2 cut" ); + ATH_MSG_INFO( " " << m_decCutD2Key.key() << " : D2 cut" ); + + if ( m_useNtrk ) { + + m_decPassNtrkKey = m_containerName + "." + m_decorationName + "_" + m_decPassNtrkKey.key(); + m_decCutNtrkKey = m_containerName + "." + m_decorationName + "_" + m_decCutNtrkKey.key(); + + ATH_CHECK( m_decPassNtrkKey.initialize() ); + ATH_CHECK( m_decCutNtrkKey.initialize() ); + + ATH_MSG_INFO( " " << m_decPassNtrkKey.key() << " : pass Ntrk cut" ); + ATH_MSG_INFO( " " << m_decCutNtrkKey.key() << " : Ntrk cut" ); + + } + + if ( m_calcSF ) { + + m_decAcceptKey = m_containerName + "." + m_decorationName + "_" + m_decAcceptKey.key(); + ATH_CHECK( m_decAcceptKey.initialize() ); + + } + + return StatusCode::SUCCESS; + +} + +StatusCode SmoothedWZTagger::tag( const xAOD::Jet& jet ) const { + + ATH_MSG_DEBUG( "Obtaining Smooth WZ result" ); + + /// Create asg::AcceptData object + asg::AcceptData acceptData( &m_acceptInfo ); + + /// Reset the AcceptData cut results + ATH_CHECK( resetCuts( acceptData ) ); + + /// Check basic kinematic selection + ATH_CHECK( checkKinRange( jet, acceptData ) ); + + /// Get the relevant attributes of the jet + /// Mass and pt - note that this will depend on the configuration of the calibration used + float jet_pt = jet.pt()/1000.0; + float jet_mass = jet.m()/1000.0; + + /// Initialize d2 to 0. This probably gets used when the jet has one constituent, so it will fail the mass cut anyways + float jet_d2 = 0; + + /// Calculate NSubjettiness and ECF ratios + calculateJSSRatios(jet); + + /// Create D2 read decor handle + SG::ReadDecorHandle<xAOD::JetContainer, float> readD2(m_readD2Key); + + /// Get D2 value + jet_d2 = readD2(jet); + + /// Evaluate the values of the upper and lower mass bounds and the d2 cut + float cut_mass_low = m_funcMassCutLow ->Eval(jet_pt); + float cut_mass_high = m_funcMassCutHigh->Eval(jet_pt); + float cut_d2 = m_funcD2Cut ->Eval(jet_pt); + + /// Decorate the cut values + + /// Create WriteDecorHandles + SG::WriteDecorHandle<xAOD::JetContainer, bool> decPassMass(m_decPassMassKey); + SG::WriteDecorHandle<xAOD::JetContainer, bool> decPassD2(m_decPassD2Key); + SG::WriteDecorHandle<xAOD::JetContainer, bool> decTagged(m_decTaggedKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decCutMLow(m_decCutMLowKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decCutMHigh(m_decCutMHighKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decCutD2(m_decCutD2Key); + + /// Decorate values + decCutMLow(jet) = cut_mass_low; + decCutMHigh(jet) = cut_mass_high; + decCutD2(jet) = cut_d2; + + /// Evaluate the cut criteria on mass and d2 + ATH_MSG_VERBOSE( "Cut Values : MassWindow = [" << cut_mass_low << "," << cut_mass_high << "], D2Cut = " << cut_d2 ); + ATH_MSG_VERBOSE( "Cut Values : JetMass = " << jet_mass << ", D2 = " << jet_d2 ); + + if ( jet_mass >= cut_mass_low ) acceptData.setCutResult( "PassMassLow", true ); + + if ( jet_mass <= cut_mass_high ) acceptData.setCutResult( "PassMassHigh", true ); + + if ( jet_d2 < cut_d2 ) acceptData.setCutResult( "PassD2", true ); + + decPassMass(jet) = acceptData.getCutResult( "PassMassLow" ) && acceptData.getCutResult( "PassMassHigh" ); + decPassD2(jet) = acceptData.getCutResult( "PassD2" ); + + bool passCuts = acceptData.getCutResult( "PassMassLow" ) && acceptData.getCutResult( "PassMassHigh" ); + passCuts = passCuts && acceptData.getCutResult( "PassD2" ); + + /// Check if it's a smooth three-variable tagger (ntrk) + if ( m_useNtrk ) { + + float cut_ntrk = m_funcNtrkCut->Eval(jet_pt); + + /// Decorate Ntrk cut value + + /// Create WriteDecorHandles + SG::WriteDecorHandle<xAOD::JetContainer, bool> decValidJetContent(m_decValidJetContentKey); + SG::WriteDecorHandle<xAOD::JetContainer, bool> decValidEventContent(m_decValidEventContentKey); + SG::WriteDecorHandle<xAOD::JetContainer, bool> decPassNtrk(m_decPassNtrkKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> decCutNtrk(m_decCutNtrkKey); + + /// Decorate values + decCutNtrk(jet) = cut_ntrk; + + /// Get the primary vertex + bool validVtx = false; + const xAOD::Vertex* primaryVertex = 0; + + const xAOD::VertexContainer* vxCont = 0; + if ( evtStore()->retrieve( vxCont, "PrimaryVertices" ).isFailure() ) { + ATH_MSG_WARNING( "Unable to retrieve primary vertex container PrimaryVertices" ); + validVtx = false; + } + else { + for ( const auto& vx : *vxCont ) { + if ( vx->vertexType()==xAOD::VxType::PriVtx ) { + primaryVertex = vx; + break; + } + } + + if ( primaryVertex ) validVtx = true; + + } + + if ( validVtx ) { + static SG::AuxElement::Accessor<ElementLink<xAOD::JetContainer> > ungroomedLink("Parent"); + const xAOD::Jet * ungroomedJet = 0; + + if ( ungroomedLink.isAvailable(jet) ) { + ElementLink<xAOD::JetContainer> linkToUngroomed = ungroomedLink(jet); + if ( linkToUngroomed.isValid() ) { + ungroomedJet = *linkToUngroomed; + + static SG::AuxElement::ConstAccessor< std::vector<int> >acc_Ntrk("NumTrkPt500"); + + if ( acc_Ntrk.isAvailable(*ungroomedJet) ) { + + const std::vector<int> NTrkPt500 = acc_Ntrk(*ungroomedJet); + + int jet_ntrk = NTrkPt500.at(primaryVertex->index()); + jet.auxdecor<int>("ParentJetNTrkPt500") = jet_ntrk; + + if ( jet_ntrk < cut_ntrk ) acceptData.setCutResult( "PassNtrk", true ); + decPassNtrk(jet) = acceptData.getCutResult( "PassNtrk" ); + passCuts = passCuts && acceptData.getCutResult( "PassNtrk" ); + + } + else { + acceptData.setCutResult( "ValidJetContent", false ); + decValidJetContent(jet) = false; + ATH_MSG_ERROR( "Unable to retrieve Ntrk of the ungroomed parent jet. Please make sure this variable is in your derivations!!!" ); + return StatusCode::FAILURE; + } + } + else { + acceptData.setCutResult( "ValidJetContent", false ); + decValidJetContent(jet) = false; + ATH_MSG_ERROR( "Unable to retrieve the parent ungroomed jet. Please make sure this variable is in your derivations!!!" ); + return StatusCode::FAILURE; + } + } + else { + acceptData.setCutResult( "ValidJetContent", false ); + decValidJetContent(jet) = false; + ATH_MSG_ERROR( "Unable to retrieve the link to the parent ungroomed jet. Please make sure this variable is in your derivations!!!" ); + return StatusCode::FAILURE; + } + } + else { + acceptData.setCutResult( "ValidEventContent", false ); + } + + decValidJetContent(jet) = acceptData.getCutResult( "ValidJetContent" ); + decValidEventContent(jet) = acceptData.getCutResult( "ValidEventContent" ); + + } + + /// Decorate jet with tagging summary + decTagged(jet) = passCuts; + + /// Get enum to decorate acceptData state if only using 2-var tagger + TagResult::TypeEnum myCutResultForSF = TagResult::UNKNOWN; + if ( !m_useNtrk ) { + /// Pass mass cut + if ( acceptData.getCutResult("PassMassLow") && acceptData.getCutResult("PassMassHigh") ) { + if ( acceptData.getCutResult("PassD2") ) { + myCutResultForSF = TagResult::passMpassD2_2Var; + } + else { + myCutResultForSF = TagResult::passMfailD2_2Var; + } + } + /// Fail mass cut + else { + if ( acceptData.getCutResult("PassD2") ) { + myCutResultForSF = TagResult::failMpassD2_2Var; + } + else { + myCutResultForSF = TagResult::failMfailD2_2Var; + } + } + } + + /// Get SF weight + ATH_CHECK( getWeight( jet, (bool)acceptData, acceptData ) ); + + if ( m_calcSF ) { + + /// Create WriteDecorHandles + SG::WriteDecorHandle<xAOD::JetContainer, float> decAccept(m_decAcceptKey); + + /// Decorate values + decAccept(jet) = myCutResultForSF; + + } + + return StatusCode::SUCCESS; + +} + diff --git a/Reconstruction/Jet/BoostedJetTaggers/src/components/BoostedJetTaggers_entries.cxx b/Reconstruction/Jet/BoostedJetTaggers/src/components/BoostedJetTaggers_entries.cxx new file mode 100644 index 0000000000000000000000000000000000000000..46588c6f64d250edfafb95a13bd8650eaead7361 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/src/components/BoostedJetTaggers_entries.cxx @@ -0,0 +1,12 @@ +#include "BoostedJetTaggers/SmoothedWZTagger.h" +#include "BoostedJetTaggers/JetQGTagger.h" +#include "BoostedJetTaggers/JetQGTaggerBDT.h" +#include "BoostedJetTaggers/JSSWTopTaggerDNN.h" +#include "BoostedJetTaggers/JSSWTopTaggerANN.h" + +DECLARE_COMPONENT(SmoothedWZTagger) +DECLARE_COMPONENT(JSSWTopTaggerDNN) +DECLARE_COMPONENT(JSSWTopTaggerANN) +DECLARE_COMPONENT(CP::JetQGTagger) +DECLARE_COMPONENT(CP::JetQGTaggerBDT) + diff --git a/Reconstruction/Jet/BoostedJetTaggers/util/test_JSSWTopTaggerANN.cxx b/Reconstruction/Jet/BoostedJetTaggers/util/test_JSSWTopTaggerANN.cxx new file mode 100644 index 0000000000000000000000000000000000000000..c83665f097027549fe816229f3cd2160a55409be --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/util/test_JSSWTopTaggerANN.cxx @@ -0,0 +1,289 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +// System include(s): +#include <string> + +// ROOT include(s): +#include <TFile.h> +#include <TString.h> +#include <TTree.h> +#include <TChain.h> + +// Infrastructure include(s): +#ifdef ROOTCORE +# include "xAODRootAccess/Init.h" +# include "xAODRootAccess/TEvent.h" +#endif // ROOTCORE + +// EDM include(s): +#include "xAODCore/ShallowAuxContainer.h" +#include "xAODCore/ShallowCopy.h" +#include "xAODCore/tools/IOStats.h" + +// Tool testing include(s): +#include "BoostedJetTaggers/JSSWTopTaggerANN.h" +#include "JetUncertainties/JetUncertaintiesTool.h" + +#include "AsgMessaging/MessageCheck.h" + +// messaging +ANA_MSG_HEADER(Test) +ANA_MSG_SOURCE(Test, "BoostedJetTaggers") +using namespace Test; + +int main( int argc, char* argv[] ) { + + ANA_CHECK_SET_TYPE (int); // makes ANA_CHECK return ints if exiting function + + // The application's name: + char* APP_NAME = argv[ 0 ]; + + // arguments + TString fileName = "/eos/atlas/atlascerngroupdisk/perf-jets/ReferenceFiles/mc16_13TeV.361028.Pythia8EvtGen_A14NNPDF23LO_jetjet_JZ8W.deriv.DAOD_FTAG1.e3569_s3126_r9364_r9315_p3260/DAOD_FTAG1.12133096._000074.pool.root.1"; + int ievent=-1; + int nevents=-1; + bool m_isMC=true; + bool verbose=false; + + + Info( APP_NAME, "==============================================" ); + Info( APP_NAME, "Usage: $> %s [xAOD file name]", APP_NAME ); + Info( APP_NAME, " $> %s | Run on default file", APP_NAME ); + Info( APP_NAME, " $> %s -f X | Run on xAOD file X", APP_NAME ); + Info( APP_NAME, " $> %s -n X | X = number of events you want to run on", APP_NAME ); + Info( APP_NAME, " $> %s -e X | X = specific number of the event to run on - for debugging", APP_NAME ); + Info( APP_NAME, " $> %s -d X | X = dataset ID", APP_NAME ); + Info( APP_NAME, " $> %s -m X | X = isMC", APP_NAME ); + Info( APP_NAME, " $> %s -v | run in verbose mode ", APP_NAME ); + Info( APP_NAME, "==============================================" ); + + // Check if we received a file name: + if( argc < 2 ) { + Info( APP_NAME, "No arguments - using default file" ); + Info( APP_NAME, "Executing on : %s", fileName.Data() ); + } + + //////////////////////////////////////////////////// + //::: parse the options + //////////////////////////////////////////////////// + std::string options; + for( int i=0; i<argc; i++){ + options+=(argv[i]); + } + + if(options.find("-f")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-f")==0){ + fileName = argv[ipos+1]; + Info( APP_NAME, "Argument (-f) : Running on file # %s", fileName.Data() ); + break; + } + } + } + + if(options.find("-event")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-event")==0){ + ievent = atoi(argv[ipos+1]); + Info( APP_NAME, "Argument (-event) : Running only on event # %i", ievent ); + break; + } + } + } + + if(options.find("-m")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-m")==0){ + m_isMC = atoi(argv[ipos+1]); + Info( APP_NAME, "Argument (-m) : IsMC = %i", m_isMC ); + break; + } + } + } + + if(options.find("-n")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-n")==0){ + nevents = atoi(argv[ipos+1]); + Info( APP_NAME, "Argument (-n) : Running on NEvents = %i", nevents ); + break; + } + } + } + + if(options.find("-v")!=std::string::npos){ + verbose=true; + Info( APP_NAME, "Argument (-v) : Setting verbose"); + } + + + //////////////////////////////////////////////////// + //::: initialize the application and get the event + //////////////////////////////////////////////////// + ANA_CHECK( xAOD::Init( APP_NAME ) ); + StatusCode::enableFailure(); + + // Open the input file: + TFile* ifile( TFile::Open( fileName, "READ" ) ); + if( !ifile ) Error( APP_NAME, "Cannot find file %s",fileName.Data() ); + + TChain *chain = new TChain ("CollectionTree","CollectionTree"); + chain->Add(fileName); + + // Create a TEvent object: + xAOD::TEvent event( (TTree*)chain, xAOD::TEvent::kAthenaAccess ); + Info( APP_NAME, "Number of events in the file: %i", static_cast< int >( event.getEntries() ) ); + + // Create a transient object store. Needed for the tools. + xAOD::TStore store; + + // Decide how many events to run over: + Long64_t entries = event.getEntries(); + + // Fill a validation true with the tag return value + std::unique_ptr<TFile> outputFile(TFile::Open("output_JSSWTopTaggerANN.root", "recreate")); + int pass,truthLabel,idx; + float sf,pt,eta,m; + TTree* Tree = new TTree( "tree", "test_tree" ); + Tree->Branch( "pass", &pass, "pass/I" ); + Tree->Branch( "sf", &sf, "sf/F" ); + Tree->Branch( "pt", &pt, "pt/F" ); + Tree->Branch( "m", &m, "m/F" ); + Tree->Branch( "eta", &eta, "eta/F" ); + Tree->Branch( "idx", &idx, "idx/I" ); + Tree->Branch( "truthLabel", &truthLabel, "truthLabel/I" ); + + std::unique_ptr<JetUncertaintiesTool> m_jetUncToolSF(new JetUncertaintiesTool(("JetUncProvider_SF"))); + ANA_CHECK( m_jetUncToolSF->setProperty("JetDefinition", "AntiKt10LCTopoTrimmedPtFrac5SmallR20") ); + ANA_CHECK( m_jetUncToolSF->setProperty("Path", "/eos/user/g/gang/public/BoostedJetTaggers/JSSWTopTaggerANN/") ); + ANA_CHECK( m_jetUncToolSF->setProperty("ConfigFile", "TagSFUncert_JSSANNTagger_AntiKt10LCTopoTrimmed.config") ); + ANA_CHECK( m_jetUncToolSF->setProperty("MCType", "MC16a") ); + ANA_CHECK( m_jetUncToolSF->initialize() ); + + std::vector<std::string> pulls = {"__1down", "__1up"}; + CP::SystematicSet jetUnc_sysSet = m_jetUncToolSF->recommendedSystematics(); + const std::set<std::string> sysNames = jetUnc_sysSet.getBaseNames(); + std::vector<CP::SystematicSet> m_jetUnc_sysSets; + for (std::string sysName: sysNames) { + for (std::string pull : pulls) { + std::string sysPulled = sysName + pull; + m_jetUnc_sysSets.push_back(CP::SystematicSet(sysPulled)); + } + } + + //////////////////////////////////////////// + /////////// START TOOL SPECIFIC //////////// + //////////////////////////////////////////// + + //////////////////////////////////////////////////// + //::: Tool setup + // setup the tool handle as per the + // recommendation by ASG - https://twiki.cern.ch/twiki/bin/view/AtlasProtected/AthAnalysisBase#How_to_use_AnaToolHandle + //////////////////////////////////////////////////// + std::cout<<"Initializing JSSWTopTaggerANN Tagger"<<std::endl; + asg::AnaToolHandle<JSSWTopTaggerANN> m_Tagger; //! + ASG_SET_ANA_TOOL_TYPE( m_Tagger, JSSWTopTaggerANN); + m_Tagger.setName("MyTagger"); + if(verbose) ANA_CHECK( m_Tagger.setProperty("OutputLevel", MSG::DEBUG) ); + ANA_CHECK( m_Tagger.setProperty( "CalibArea", "/eos/user/g/gang/public/BoostedJetTaggers/JSSWTopTaggerANN/") ); + ANA_CHECK( m_Tagger.setProperty( "ConfigFile", "JSSANNTagger_test.dat") ); + ANA_CHECK( m_Tagger.setProperty("TruthJetContainerName", "AntiKt10TruthTrimmedPtFrac5SmallR20Jets") ); + ANA_CHECK( m_Tagger.setProperty("IsMC", m_isMC) ); + ANA_CHECK( m_Tagger.retrieve() ); + + + std::cout << "Total Events in File : " << entries << std::endl; + + //////////////////////////////////////////////////// + // Loop over the events + //////////////////////////////////////////////////// + for( Long64_t entry = 0; entry < entries; ++entry ) { + + if( nevents!=-1 && entry > nevents ) break; + // Tell the object which entry to look at: + event.getEntry( entry ); + + // Print some event information + const xAOD::EventInfo* evtInfo = 0; + if(event.retrieve( evtInfo, "EventInfo" ) != StatusCode::SUCCESS){ + continue; + } + if(ievent!=-1 && static_cast <int> (evtInfo->eventNumber())!=ievent) { + continue; + } + + // Get the jets + const xAOD::JetContainer* myJets = 0; + if( event.retrieve( myJets, "AntiKt10LCTopoTrimmedPtFrac5SmallR20Jets" ) != StatusCode::SUCCESS) + continue ; + + // Loop over jet container + std::pair< xAOD::JetContainer*, xAOD::ShallowAuxContainer* > jets_shallowCopy = xAOD::shallowCopyContainer( *myJets ); + std::unique_ptr<xAOD::JetContainer> shallowJets(jets_shallowCopy.first); + std::unique_ptr<xAOD::ShallowAuxContainer> shallowAux(jets_shallowCopy.second); + idx=0; + for( xAOD::Jet* jetSC : *shallowJets ){ + + ANA_CHECK( m_Tagger->tag( *jetSC ) ); + if(verbose) { + std::cout << "Testing ANN Tagger " << std::endl; + std::cout << "jet pt = " << jetSC->pt() << std::endl; + std::cout << "RunningTag : " << jetSC->getAttribute<bool>("ANNTagger_Tagged") << std::endl; + std::cout << "Printing jet score : " << jetSC->auxdata<float>("ANNTagger_Score") << std::endl; + std::cout << "result masspass = " << jetSC->getAttribute<bool>("ANNTagger_PassMass") << std::endl; + } + truthLabel = jetSC->auxdata<int>("R10TruthLabel_R21Consolidated"); + + pass = jetSC->getAttribute<bool>("ANNTagger_Tagged"); + sf = jetSC->auxdata<float>("ANNTagger_SF"); + pt = jetSC->pt(); + m = jetSC->m(); + eta = jetSC->eta(); + + Tree->Fill(); + idx++; + if ( m_isMC ){ + if ( pt/1.e3 > 350 && std::abs(eta) < 2.0 && pass ) { + bool validForUncTool = ( pt/1.e3 >= 150 && pt/1.e3 < 2500 ); + validForUncTool &= ( m/pt >= 0 && m/pt <= 1 ); + validForUncTool &= ( std::abs(eta) < 2 ); + std::cout << "Nominal SF=" << sf << " truthLabel=" << truthLabel << " (1: t->qqb)" << std::endl; + if( validForUncTool ){ + for ( CP::SystematicSet sysSet : m_jetUnc_sysSets ){ + ANA_CHECK( m_Tagger->tag( *jetSC ) ); + ANA_CHECK( m_jetUncToolSF->applySystematicVariation(sysSet) ); + ANA_CHECK( m_jetUncToolSF->applyCorrection(*jetSC) ); + std::cout << sysSet.name() << " " << jetSC->auxdata<float>("ANNTagger_SF") << std::endl; + } + } + } + } + } + + Info( APP_NAME, "===>>> done processing event #%i, run #%i %i events processed so far <<<===", static_cast< int >( evtInfo->eventNumber() ), static_cast< int >( evtInfo->runNumber() ), static_cast< int >( entry + 1 ) ); + } + + //////////////////////////////////////////// + /////////// END TOOL SPECIFIC ////////////// + //////////////////////////////////////////// + + // write the tree to the output file + outputFile->cd(); + Tree->Write(); + outputFile->Close(); + + // cleanup + delete chain; + + // print the branches that were used for help with smart slimming + std::cout<<std::endl<<std::endl; + std::cout<<"Smart Slimming Checker :"<<std::endl; + xAOD::IOStats::instance().stats().printSmartSlimmingBranchList(); + std::cout<<std::endl<<std::endl; + + return 0; + +} + diff --git a/Reconstruction/Jet/BoostedJetTaggers/util/test_JSSWTopTaggerDNN.cxx b/Reconstruction/Jet/BoostedJetTaggers/util/test_JSSWTopTaggerDNN.cxx new file mode 100644 index 0000000000000000000000000000000000000000..5b53f927b3e2e08797271bb8e6270de5729b87c5 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/util/test_JSSWTopTaggerDNN.cxx @@ -0,0 +1,296 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +// System include(s): +#include <string> + +// ROOT include(s): +#include <TFile.h> +#include <TString.h> +#include <TTree.h> +#include <TChain.h> + +// Infrastructure include(s): +#ifdef ROOTCORE +# include "xAODRootAccess/Init.h" +# include "xAODRootAccess/TEvent.h" +#endif // ROOTCORE + +// EDM include(s): +#include "xAODCore/ShallowAuxContainer.h" +#include "xAODCore/ShallowCopy.h" +#include "xAODCore/tools/IOStats.h" + +// Tool testing include(s): +#include "BoostedJetTaggers/JSSWTopTaggerDNN.h" +#include "JetUncertainties/JetUncertaintiesTool.h" + +#include "AsgMessaging/MessageCheck.h" + +// messaging +ANA_MSG_HEADER(Test) +ANA_MSG_SOURCE(Test, "BoostedJetTaggers") +using namespace Test; + +int main( int argc, char* argv[] ) { + + ANA_CHECK_SET_TYPE (int); // makes ANA_CHECK return ints if exiting function + + // The application's name: + char* APP_NAME = argv[ 0 ]; + + // arguments + TString fileName = "/eos/atlas/atlascerngroupdisk/perf-jets/ReferenceFiles/mc16_13TeV.361028.Pythia8EvtGen_A14NNPDF23LO_jetjet_JZ8W.deriv.DAOD_FTAG1.e3569_s3126_r9364_r9315_p3260/DAOD_FTAG1.12133096._000074.pool.root.1"; + int ievent=-1; + int nevents=-1; + bool m_isMC=true; + bool verbose=false; + + + Info( APP_NAME, "==============================================" ); + Info( APP_NAME, "Usage: $> %s [xAOD file name]", APP_NAME ); + Info( APP_NAME, " $> %s | Run on default file", APP_NAME ); + Info( APP_NAME, " $> %s -f X | Run on xAOD file X", APP_NAME ); + Info( APP_NAME, " $> %s -n X | X = number of events you want to run on", APP_NAME ); + Info( APP_NAME, " $> %s -e X | X = specific number of the event to run on - for debugging", APP_NAME ); + Info( APP_NAME, " $> %s -d X | X = dataset ID", APP_NAME ); + Info( APP_NAME, " $> %s -m X | X = isMC", APP_NAME ); + Info( APP_NAME, " $> %s -v | run in verbose mode ", APP_NAME ); + Info( APP_NAME, "==============================================" ); + + // Check if we received a file name: + if( argc < 2 ) { + Info( APP_NAME, "No arguments - using default file" ); + Info( APP_NAME, "Executing on : %s", fileName.Data() ); + } + + //////////////////////////////////////////////////// + //::: parse the options + //////////////////////////////////////////////////// + std::string options; + for( int i=0; i<argc; i++){ + options+=(argv[i]); + } + + if(options.find("-f")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-f")==0){ + fileName = argv[ipos+1]; + Info( APP_NAME, "Argument (-f) : Running on file # %s", fileName.Data() ); + break; + } + } + } + + if(options.find("-event")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-event")==0){ + ievent = atoi(argv[ipos+1]); + Info( APP_NAME, "Argument (-event) : Running only on event # %i", ievent ); + break; + } + } + } + + if(options.find("-m")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-m")==0){ + m_isMC = atoi(argv[ipos+1]); + Info( APP_NAME, "Argument (-m) : IsMC = %i", m_isMC ); + break; + } + } + } + + if(options.find("-n")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-n")==0){ + nevents = atoi(argv[ipos+1]); + Info( APP_NAME, "Argument (-n) : Running on NEvents = %i", nevents ); + break; + } + } + } + + if(options.find("-v")!=std::string::npos){ + verbose=true; + Info( APP_NAME, "Argument (-v) : Setting verbose"); + } + + + //////////////////////////////////////////////////// + //::: initialize the application and get the event + //////////////////////////////////////////////////// + ANA_CHECK( xAOD::Init( APP_NAME ) ); + StatusCode::enableFailure(); + + // Open the input file: + TFile* ifile( TFile::Open( fileName, "READ" ) ); + if( !ifile ) Error( APP_NAME, "Cannot find file %s",fileName.Data() ); + + TChain *chain = new TChain ("CollectionTree","CollectionTree"); + chain->Add(fileName); + + // Create a TEvent object: + xAOD::TEvent event( (TTree*)chain, xAOD::TEvent::kAthenaAccess ); + Info( APP_NAME, "Number of events in the file: %i", static_cast< int >( event.getEntries() ) ); + + // Create a transient object store. Needed for the tools. + xAOD::TStore store; + + // Decide how many events to run over: + Long64_t entries = event.getEntries(); + + // Fill a validation true with the tag return value + std::unique_ptr<TFile> outputFile(TFile::Open("output_JSSWTopTaggerDNN.root", "recreate")); + int pass,truthLabel,idx; + float sf,pt,eta,m; + TTree* Tree = new TTree( "tree", "test_tree" ); + Tree->Branch( "pass", &pass, "pass/I" ); + Tree->Branch( "sf", &sf, "sf/F" ); + Tree->Branch( "pt", &pt, "pt/F" ); + Tree->Branch( "m", &m, "m/F" ); + Tree->Branch( "eta", &eta, "eta/F" ); + Tree->Branch( "idx", &idx, "idx/I" ); + Tree->Branch( "truthLabel", &truthLabel, "truthLabel/I" ); + + std::unique_ptr<JetUncertaintiesTool> m_jetUncToolSF(new JetUncertaintiesTool(("JetUncProvider_SF"))); + ANA_CHECK( m_jetUncToolSF->setProperty("JetDefinition", "AntiKt10LCTopoTrimmedPtFrac5SmallR20") ); + //ANA_CHECK( m_jetUncToolSF->setProperty("ConfigFile", "rel21/Summer2019/R10_SF_LC_DNNContained80_TopTag.config") ); + ANA_CHECK( m_jetUncToolSF->setProperty("ConfigFile", "/afs/cern.ch/user/t/tnobe/workDir/makeConfig/makebjtconfigs/outputs/temp_R10_SF_DNNTaggerTopQuarkContained80_SF.config") ); + ANA_CHECK( m_jetUncToolSF->setProperty("MCType", "MC16") ); + ANA_CHECK( m_jetUncToolSF->initialize() ); + + std::vector<std::string> pulls = {"__1down", "__1up"}; + CP::SystematicSet jetUnc_sysSet = m_jetUncToolSF->recommendedSystematics(); + const std::set<std::string> sysNames = jetUnc_sysSet.getBaseNames(); + std::vector<CP::SystematicSet> m_jetUnc_sysSets; + for (std::string sysName: sysNames) { + for (std::string pull : pulls) { + std::string sysPulled = sysName + pull; + m_jetUnc_sysSets.push_back(CP::SystematicSet(sysPulled)); + } + } + + //////////////////////////////////////////// + /////////// START TOOL SPECIFIC //////////// + //////////////////////////////////////////// + + //////////////////////////////////////////////////// + //::: Tool setup + // setup the tool handle as per the + // recommendation by ASG - https://twiki.cern.ch/twiki/bin/view/AtlasProtected/AthAnalysisBase#How_to_use_AnaToolHandle + //////////////////////////////////////////////////// + std::cout<<"Initializing JSSWTopTaggerDNN Tagger"<<std::endl; + asg::AnaToolHandle<JSSWTopTaggerDNN> m_Tagger; //! + ASG_SET_ANA_TOOL_TYPE( m_Tagger, JSSWTopTaggerDNN); + m_Tagger.setName("MyTagger"); + if(verbose) ANA_CHECK( m_Tagger.setProperty("OutputLevel", MSG::DEBUG) ); + ANA_CHECK( m_Tagger.setProperty( "CalibArea", "Local") ); + ANA_CHECK( m_Tagger.setProperty( "ConfigFile", "JSSWTopTaggerDNN/temp_JSSDNNTagger_AntiKt10LCTopoTrimmed_TopQuarkContained_MC16d_80Eff.dat") ); + ANA_CHECK( m_Tagger.setProperty( "UseTRUTH3", false) ); + //ANA_CHECK( m_Tagger.setProperty( "CalibArea", "JSSWTopTaggerDNN/Rel21") ); + //ANA_CHECK( m_Tagger.setProperty( "ConfigFile", "JSSDNNTagger_AntiKt10LCTopoTrimmed_TopQuarkContained_MC16d_20190405_80Eff.dat") ); + ANA_CHECK( m_Tagger.setProperty("IsMC", m_isMC) ); + ANA_CHECK( m_Tagger.retrieve() ); + + + std::cout << "Total Events in File : " << entries << std::endl; + + //////////////////////////////////////////////////// + // Loop over the events + //////////////////////////////////////////////////// + for( Long64_t entry = 0; entry < entries; ++entry ) { + + if( nevents!=-1 && entry > nevents ) break; + // Tell the object which entry to look at: + event.getEntry( entry ); + + // Print some event information + const xAOD::EventInfo* evtInfo = 0; + if(event.retrieve( evtInfo, "EventInfo" ) != StatusCode::SUCCESS){ + continue; + } + if(ievent!=-1 && static_cast <int> (evtInfo->eventNumber())!=ievent) { + continue; + } + + // Get the jets + const xAOD::JetContainer* myJets = 0; + if( event.retrieve( myJets, "AntiKt10LCTopoTrimmedPtFrac5SmallR20Jets" ) != StatusCode::SUCCESS) + continue ; + + // Loop over jet container + std::pair< xAOD::JetContainer*, xAOD::ShallowAuxContainer* > jets_shallowCopy = xAOD::shallowCopyContainer( *myJets ); + std::unique_ptr<xAOD::JetContainer> shallowJets(jets_shallowCopy.first); + std::unique_ptr<xAOD::ShallowAuxContainer> shallowAux(jets_shallowCopy.second); + idx=0; + for( xAOD::Jet* jetSC : *shallowJets ){ + + ANA_CHECK( m_Tagger->tag( *jetSC ) ); + if(verbose) { + std::cout << "Testing top Tagger " << std::endl; + std::cout << "jet pt = " << jetSC->pt() << std::endl; + std::cout << "RunningTag : " << jetSC->auxdata<bool>("DNNTaggerTopQuarkContained80_Tagged") << std::endl; + std::cout << "Printing jet score : " << jetSC->auxdata<float>("DNNTaggerTopQuarkContained80_Score") << std::endl; + std::cout << "result masspass = " << jetSC->auxdata<bool>("DNNTaggerTopQuarkContained80_PassMass") << std::endl; + } + truthLabel = jetSC->auxdata<int>("R10TruthLabel_R21Consolidated"); + + pass = jetSC->getAttribute<bool>("DNNTaggerTopQuarkContained80_Tagged"); + sf = jetSC->auxdata<float>("DNNTaggerTopQuarkContained80_SF"); + pt = jetSC->pt(); + m = jetSC->m(); + eta = jetSC->eta(); + + Tree->Fill(); + idx++; + if ( m_isMC ){ + if ( pt/1.e3 > 350 && std::abs(jetSC->eta()) < 2.0 ) { + bool validForUncTool = ( pt/1.e3 >= 150 && pt/1.e3 < 2500 ); + validForUncTool &= ( m/pt >= 0 && m/pt <= 1 ); + validForUncTool &= ( std::abs(eta) < 2 ); + std::cout << "Pass: " << pass << std::endl; + std::cout << "Nominal SF=" << sf << " truthLabel=" << truthLabel << " (1: t->qqb) " + << jetSC->auxdata<float>("DNNTaggerTopQuarkContained80_effSF") + << " " + << jetSC->auxdata<float>("DNNTaggerTopQuarkContained80_efficiency") + << std::endl; + if( validForUncTool ){ + for ( CP::SystematicSet sysSet : m_jetUnc_sysSets ){ + ANA_CHECK( m_Tagger->tag( *jetSC ) ); + ANA_CHECK( m_jetUncToolSF->applySystematicVariation(sysSet) ); + ANA_CHECK( m_jetUncToolSF->applyCorrection(*jetSC) ); + std::cout << sysSet.name() << " " << jetSC->auxdata<float>("DNNTaggerTopQuarkContained80_SF") << std::endl; + } + } + } + } + } + + Info( APP_NAME, "===>>> done processing event #%i, run #%i %i events processed so far <<<===", static_cast< int >( evtInfo->eventNumber() ), static_cast< int >( evtInfo->runNumber() ), static_cast< int >( entry + 1 ) ); + } + + //////////////////////////////////////////// + /////////// END TOOL SPECIFIC ////////////// + //////////////////////////////////////////// + + // write the tree to the output file + outputFile->cd(); + Tree->Write(); + outputFile->Close(); + + // cleanup + delete chain; + + // print the branches that were used for help with smart slimming + std::cout<<std::endl<<std::endl; + std::cout<<"Smart Slimming Checker :"<<std::endl; + xAOD::IOStats::instance().stats().printSmartSlimmingBranchList(); + std::cout<<std::endl<<std::endl; + + return 0; + +} + diff --git a/Reconstruction/Jet/BoostedJetTaggers/util/test_JetQGTagger.cxx b/Reconstruction/Jet/BoostedJetTaggers/util/test_JetQGTagger.cxx new file mode 100644 index 0000000000000000000000000000000000000000..9a44ab129a3b06ad4ba538b3a76bb0b7a847fbe9 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/util/test_JetQGTagger.cxx @@ -0,0 +1,213 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +// System include(s): +#include <string> + +// ROOT include(s): +#include <TFile.h> +#include <TString.h> +#include <TTree.h> +#include <TChain.h> + +// Infrastructure include(s): +#ifdef ROOTCORE +# include "xAODRootAccess/Init.h" +# include "xAODRootAccess/TEvent.h" +#endif // ROOTCORE + +// EDM include(s): +#include "xAODCore/ShallowAuxContainer.h" +#include "xAODCore/ShallowCopy.h" +#include "xAODCore/tools/IOStats.h" + +// Tool testing include(s): +#include "BoostedJetTaggers/JetQGTagger.h" + +// messaging +ANA_MSG_HEADER(Test) +ANA_MSG_SOURCE(Test, "BoostedJetTaggers") +using namespace Test; + +int main( int argc, char* argv[] ) { + + ANA_CHECK_SET_TYPE (int); // makes ANA_CHECK return ints if exiting function + + // The application's name: + char* APP_NAME = argv[ 0 ]; + + // arguments + TString fileName = "/eos/atlas/atlascerngroupdisk/perf-jets/ReferenceFiles/mc16_13TeV.361028.Pythia8EvtGen_A14NNPDF23LO_jetjet_JZ8W.deriv.DAOD_FTAG1.e3569_s3126_r9364_r9315_p3260/DAOD_FTAG1.12133096._000074.pool.root.1"; + int ievent=-1; + int nevents=-1; + bool verbose=false; + + + Info( APP_NAME, "==============================================" ); + Info( APP_NAME, "Usage: $> %s [xAOD file name]", APP_NAME ); + Info( APP_NAME, " $> %s | Run on default file", APP_NAME ); + Info( APP_NAME, " $> %s -f X | Run on xAOD file X", APP_NAME ); + Info( APP_NAME, " $> %s -n X | X = number of events you want to run on", APP_NAME ); + Info( APP_NAME, " $> %s -e X | X = specific number of the event to run on - for debugging", APP_NAME ); + Info( APP_NAME, " $> %s -v | run in verbose mode ", APP_NAME ); + Info( APP_NAME, "==============================================" ); + + // Check if we received a file name: + if( argc < 2 ) { + Info( APP_NAME, "No arguments - using default file" ); + Info( APP_NAME, "Executing on : %s", fileName.Data() ); + } + + //////////////////////////////////////////////////// + //::: parse the options + //////////////////////////////////////////////////// + std::string options; + for( int i=0; i<argc; i++){ + options+=(argv[i]); + } + + if(options.find("-f")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-f")==0){ + fileName = argv[ipos+1]; + Info( APP_NAME, "Argument (-f) : Running on file # %s", fileName.Data() ); + break; + } + } + } + + if(options.find("-event")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-event")==0){ + ievent = atoi(argv[ipos+1]); + Info( APP_NAME, "Argument (-event) : Running only on event # %i", ievent ); + break; + } + } + } + + if(options.find("-n")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-n")==0){ + nevents = atoi(argv[ipos+1]); + Info( APP_NAME, "Argument (-n) : Running on NEvents = %i", nevents ); + break; + } + } + } + + if(options.find("-v")!=std::string::npos){ + verbose=true; + Info( APP_NAME, "Argument (-v) : Setting verbose"); + } + + + //////////////////////////////////////////////////// + //::: initialize the application and get the event + //////////////////////////////////////////////////// + ANA_CHECK( xAOD::Init( APP_NAME ) ); + StatusCode::enableFailure(); + + // Open the input file: + TFile* ifile( TFile::Open( fileName, "READ" ) ); + if( !ifile ) Error( APP_NAME, "Cannot find file %s",fileName.Data() ); + + TChain *chain = new TChain ("CollectionTree","CollectionTree"); + chain->Add(fileName); + + // Create a TEvent object: + xAOD::TEvent event( (TTree*)chain, xAOD::TEvent::kAthenaAccess ); + Info( APP_NAME, "Number of events in the file: %i", static_cast< int >( event.getEntries() ) ); + + // Create a transient object store. Needed for the tools. + xAOD::TStore store; + + // Decide how many events to run over: + Long64_t entries = event.getEntries(); + + // Fill a validation true with the tag return value + TFile* outputFile = TFile::Open( "output_JetQGTagger.root", "recreate" ); + int pass; + TTree* Tree = new TTree( "tree", "test_tree" ); + Tree->Branch( "pass", &pass, "pass/I" ); + + //////////////////////////////////////////// + /////////// START TOOL SPECIFIC //////////// + //////////////////////////////////////////// + + //////////////////////////////////////////////////// + //::: Tool setup + // setup the tool handle as per the + // recommendation by ASG - https://twiki.cern.ch/twiki/bin/view/AtlasProtected/AthAnalysisBase#How_to_use_AnaToolHandle + //////////////////////////////////////////////////// + std::cout<<"Initializing QG Tagger"<<std::endl; + asg::AnaToolHandle<CP::JetQGTagger> m_Tagger; //! + ASG_SET_ANA_TOOL_TYPE( m_Tagger, CP::JetQGTagger); + m_Tagger.setName("MyTagger"); + if(verbose) ANA_CHECK( m_Tagger.setProperty("OutputLevel", MSG::DEBUG) ); + ANA_CHECK( m_Tagger.setProperty( "ConfigFile", "SmoothedWZTaggers/SmoothedContainedWTagger_AntiKt10LCTopoTrimmed_FixedSignalEfficiency50_MC15c_20161215.dat") ); + ANA_CHECK( m_Tagger.retrieve() ); + + //////////////////////////////////////////////////// + // Loop over the events + //////////////////////////////////////////////////// + for( Long64_t entry = 0; entry < entries; ++entry ) { + + if( nevents!=-1 && entry > nevents ) break; + // Tell the object which entry to look at: + event.getEntry( entry ); + + // Print some event information + const xAOD::EventInfo* evtInfo = 0; + if(event.retrieve( evtInfo, "EventInfo" ) != StatusCode::SUCCESS){ + continue; + } + if(ievent!=-1 && static_cast <int> (evtInfo->eventNumber())!=ievent) { + continue; + } + + // Get the jets + const xAOD::JetContainer* myJets = 0; + if( event.retrieve( myJets, "AntiKt10LCTopoTrimmedPtFrac5SmallR20Jets" ) != StatusCode::SUCCESS) + continue ; + + // Loop over jet container + for(const xAOD::Jet* jet : * myJets ){ + + ANA_CHECK( m_Tagger->tag( *jet ) ); + if(verbose) { + std::cout << "Testing W Tagger " << std::endl; + std::cout << "RunningTag : " << jet->auxdata<bool>("Tagged") << std::endl; + } + + pass = jet->auxdata<bool>("Tagged"); + + Tree->Fill(); + } + + Info( APP_NAME, "===>>> done processing event #%i, run #%i %i events processed so far <<<===", static_cast< int >( evtInfo->eventNumber() ), static_cast< int >( evtInfo->runNumber() ), static_cast< int >( entry + 1 ) ); + } + + //////////////////////////////////////////// + /////////// END TOOL SPECIFIC ////////////// + //////////////////////////////////////////// + + // write the tree to the output file + outputFile->cd(); + Tree->Write(); + outputFile->Close(); + + // cleanup + delete chain; + + // print the branches that were used for help with smart slimming + std::cout<<std::endl<<std::endl; + std::cout<<"Smart Slimming Checker :"<<std::endl; + xAOD::IOStats::instance().stats().printSmartSlimmingBranchList(); + std::cout<<std::endl<<std::endl; + + return 0; + +} + diff --git a/Reconstruction/Jet/BoostedJetTaggers/util/test_JetQGTaggerBDT.cxx b/Reconstruction/Jet/BoostedJetTaggers/util/test_JetQGTaggerBDT.cxx new file mode 100644 index 0000000000000000000000000000000000000000..bc84f4459d66e6198549b3c0008085f85b3c7e38 --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/util/test_JetQGTaggerBDT.cxx @@ -0,0 +1,272 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +// System include(s): +#include <string> + +// ROOT include(s): +#include <TFile.h> +#include <TString.h> +#include <TTree.h> +#include <TChain.h> + +// Infrastructure include(s): +#ifdef ROOTCORE +# include "xAODRootAccess/Init.h" +# include "xAODRootAccess/TEvent.h" +#endif // ROOTCORE + +// EDM include(s): +#include "xAODCore/ShallowAuxContainer.h" +#include "xAODCore/ShallowCopy.h" +#include "xAODCore/tools/IOStats.h" + +// Tool testing include(s): +#include "BoostedJetTaggers/JetQGTaggerBDT.h" + +// messaging +ANA_MSG_HEADER(Test) +ANA_MSG_SOURCE(Test, "BoostedJetTaggers") +using namespace Test; + +int main( int argc, char* argv[] ) { + + ANA_CHECK_SET_TYPE (int); // makes ANA_CHECK return ints if exiting function + + // The application's name: + char* APP_NAME = argv[ 0 ]; + + // arguments + TString fileName = "/eos/atlas/atlascerngroupdisk/perf-jets/ReferenceFiles/mc16_13TeV.361028.Pythia8EvtGen_A14NNPDF23LO_jetjet_JZ8W.deriv.DAOD_FTAG1.e3569_s3126_r9364_r9315_p3260/DAOD_FTAG1.12133096._000074.pool.root.1"; + int ievent=-1; + int nevents=-1; + bool verbose=false; + + + Info( APP_NAME, "==============================================" ); + Info( APP_NAME, "Usage: $> %s [xAOD file name]", APP_NAME ); + Info( APP_NAME, " $> %s | Run on default file", APP_NAME ); + Info( APP_NAME, " $> %s -f X | Run on xAOD file X", APP_NAME ); + Info( APP_NAME, " $> %s -n X | X = number of events you want to run on", APP_NAME ); + Info( APP_NAME, " $> %s -e X | X = specific number of the event to run on - for debugging", APP_NAME ); + Info( APP_NAME, " $> %s -v | run in verbose mode ", APP_NAME ); + Info( APP_NAME, "==============================================" ); + + // Check if we received a file name: + if( argc < 2 ) { + Info( APP_NAME, "No arguments - using default file" ); + Info( APP_NAME, "Executing on : %s", fileName.Data() ); + } + + //////////////////////////////////////////////////// + //::: parse the options + //////////////////////////////////////////////////// + std::string options; + for( int i=0; i<argc; i++){ + options+=(argv[i]); + } + + if(options.find("-f")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-f")==0){ + fileName = argv[ipos+1]; + Info( APP_NAME, "Argument (-f) : Running on file # %s", fileName.Data() ); + break; + } + } + } + + if(options.find("-event")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-event")==0){ + ievent = atoi(argv[ipos+1]); + Info( APP_NAME, "Argument (-event) : Running only on event # %i", ievent ); + break; + } + } + } + + if(options.find("-n")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-n")==0){ + nevents = atoi(argv[ipos+1]); + Info( APP_NAME, "Argument (-n) : Running on NEvents = %i", nevents ); + break; + } + } + } + + if(options.find("-v")!=std::string::npos){ + verbose=true; + Info( APP_NAME, "Argument (-v) : Setting verbose"); + } + + + //////////////////////////////////////////////////// + //::: initialize the application and get the event + //////////////////////////////////////////////////// + if(! xAOD::Init( APP_NAME ) ) + return 1; + StatusCode::enableFailure(); + + // Open the input file: + std::unique_ptr<TFile> ifile( TFile::Open( fileName, "READ" ) ); + if( !ifile ){ + Error( APP_NAME, "Cannot find file %s",fileName.Data() ); + return 1; + } + ifile->Close(); + ifile.reset( TFile::Open( fileName, "READ" ) ); + + // Create a TEvent object: + xAOD::TEvent event( ifile.get(), xAOD::TEvent::kAthenaAccess ); + Info( APP_NAME, "Number of events in the file: %i", static_cast< int >( event.getEntries() ) ); + + // Create a transient object store. Needed for the tools. + xAOD::TStore store; + + // Decide how many events to run over: + Long64_t entries = event.getEntries(); + + // Fill a validation true with the tag return value + std::unique_ptr<TFile> outputFile(TFile::Open( "output_JetQGTaggerBDT.root", "recreate" )); + int pass; + TTree* Tree = new TTree( "tree", "test_tree" ); + Tree->SetDirectory( outputFile.get() ); + Tree->Branch( "pass", &pass, "pass/I" ); + + //////////////////////////////////////////// + /////////// START TOOL SPECIFIC //////////// + //////////////////////////////////////////// + + //////////////////////////////////////////////////// + //::: Tool setup + // setup the tool handle as per the + // recommendation by ASG - https://twiki.cern.ch/twiki/bin/view/AtlasProtected/AthAnalysisBase#How_to_use_AnaToolHandle + //////////////////////////////////////////////////// + std::cout<<"Initializing QG BDT Tagger"<<std::endl; + asg::AnaToolHandle<CP::JetQGTaggerBDT> m_Tagger; //! + m_Tagger.setName("MyTagger"); + if(verbose) ANA_CHECK( m_Tagger.setProperty("OutputLevel", MSG::DEBUG) ); + if(! m_Tagger.setProperty( "ConfigFile", "JetQGTaggerBDT/JetQGTaggerBDT50Gluon.dat") ) return 1; + if(! m_Tagger.setProperty( "UseJetVars", 0) ) return 1; + if(! m_Tagger.retrieve() ) return 1; + + //////////////////////////////////////////////////// + // Loop over the events + //////////////////////////////////////////////////// + + //weighted number of quark, gluon jets and tags + float w_nGluon = 0.; + float w_nQuark = 0.; + float w_nGluonTaggedANDisGluon = 0.; + float w_nQuarkTaggedANDisQuark = 0.; + float w_nGluonTaggedANDisQuark = 0.; + float w_nQuarkTaggedANDisGluon = 0.; + float eventweight = 0.; + + for( Long64_t entry = 0; entry < entries; ++entry ) { + + if( nevents!=-1 && entry > nevents ) break; + // Tell the object which entry to look at: + event.getEntry( entry ); + + // Print some event information + const xAOD::EventInfo* evtInfo = 0; + if( ! event.retrieve( evtInfo, "EventInfo" ).isSuccess() ) { + continue; + } + if(ievent!=-1 && static_cast <int> (evtInfo->eventNumber())!=ievent) { + continue; + } + eventweight = evtInfo->mcEventWeight(); + + // Get the jets + const xAOD::JetContainer* myJets = 0; + if( ! event.retrieve( myJets, "AntiKt4EMTopoJets" ).isSuccess() ) { + continue ; + } + + // Loop over jet container + for(const xAOD::Jet* jet : * myJets ){ + int truthlabel = jet->getAttribute<int>("PartonTruthLabelID"); + if(jet->pt()<20000 || TMath::Abs(jet->eta())>2.5 || truthlabel==-1 || truthlabel==5) + continue; + if(verbose) std::cout<<std::endl; + + ANA_CHECK( m_Tagger->tag( *jet ) ); + if(verbose) { + std::cout << "Testing QG BDT Tagger " << std::endl; + std::cout << "RunningTag : " << jet->auxdata<bool>("Tagged") << " jet truth label: " << truthlabel << std::endl; + } + + //-------------------------------------------------------------------------------- + switch (truthlabel) { + case -1: + break; + case 1: + case 2: + case 3: + case 4: + w_nQuark += eventweight; + if(jet->auxdata<bool>("Tagged")==1){ + w_nQuarkTaggedANDisQuark += eventweight; + } + else{ + w_nGluonTaggedANDisQuark += eventweight; + } + break; + case 5: + break; + case 21: + w_nGluon += eventweight; + if(jet->auxdata<bool>("Tagged")==1){ + w_nQuarkTaggedANDisGluon += eventweight; + } + else{ + w_nGluonTaggedANDisGluon += eventweight; + } + break; + default: + break; + } + //-------------------------------------------------------------------------------- + + pass = jet->auxdata<bool>("Tagged"); + + Tree->Fill(); + } + + if( nevents!=-1 && entry%100 == 0) Info( APP_NAME, "===>>> done processing event #%i, run #%i %i events processed so far <<<===", static_cast< int >( evtInfo->eventNumber() ), static_cast< int >( evtInfo->runNumber() ), static_cast< int >( entry + 1 ) ); + } + //////////////////////////////////////////// + /////////// END TOOL SPECIFIC ////////////// + //////////////////////////////////////////// + + if(verbose){ + std::cout<<"gluons: "<<w_nGluon<<"\n"; + std::cout<<"lights: "<<w_nQuark<<"\n"; + std::cout<<"efficiencies for gluon WP:"<<"\n"; + std::cout<<"gluon efficiency: "<<w_nGluonTaggedANDisGluon/w_nGluon<<"\n"; + std::cout<<"quark efficiency: "<<w_nGluonTaggedANDisQuark/w_nQuark<<"\n"; + std::cout<<"efficiencies for quark WP:"<<"\n"; + std::cout<<"quark efficiency: "<<w_nQuarkTaggedANDisQuark/w_nQuark<<"\n"; + std::cout<<"gluon efficiency: "<<w_nQuarkTaggedANDisGluon/w_nGluon<<"\n"; + } + + // write the tree to the output file + outputFile->cd(); + Tree->Write(); + outputFile->Close(); + + // print the branches that were used for help with smart slimming + std::cout<<std::endl<<std::endl; + std::cout<<"Smart Slimming Checker :"<<std::endl; + xAOD::IOStats::instance().stats().printSmartSlimmingBranchList(); + std::cout<<std::endl<<std::endl; + + return 0; + +} + diff --git a/Reconstruction/Jet/BoostedJetTaggers/util/test_SmoothedWZTagger.cxx b/Reconstruction/Jet/BoostedJetTaggers/util/test_SmoothedWZTagger.cxx new file mode 100644 index 0000000000000000000000000000000000000000..11c5f2866a5acc1abdec5887ed12af7d7fae076c --- /dev/null +++ b/Reconstruction/Jet/BoostedJetTaggers/util/test_SmoothedWZTagger.cxx @@ -0,0 +1,258 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +// System include(s): +#include <string> + +// ROOT include(s): +#include <TFile.h> +#include <TString.h> +#include <TTree.h> +#include <TChain.h> + +// Infrastructure include(s): +#ifdef ROOTCORE +# include "xAODRootAccess/Init.h" +# include "xAODRootAccess/TEvent.h" +#endif // ROOTCORE + +// EDM include(s): +#include "xAODCore/ShallowAuxContainer.h" +#include "xAODCore/ShallowCopy.h" +#include "xAODCore/tools/IOStats.h" + +// Tool testing include(s): +#include "BoostedJetTaggers/SmoothedWZTagger.h" + +#include "AsgMessaging/MessageCheck.h" + +// messaging +ANA_MSG_HEADER(Test) +ANA_MSG_SOURCE(Test, "BoostedJetTaggers") +using namespace Test; + +int main( int argc, char* argv[] ) { + + ANA_CHECK_SET_TYPE (int); // makes ANA_CHECK return ints if exiting function + + // The application's name: + char* APP_NAME = argv[ 0 ]; + + // arguments + TString fileName = "/eos/atlas/atlascerngroupdisk/perf-jets/ReferenceFiles/mc16_13TeV.361028.Pythia8EvtGen_A14NNPDF23LO_jetjet_JZ8W.deriv.DAOD_FTAG1.e3569_s3126_r9364_r9315_p3260/DAOD_FTAG1.12133096._000074.pool.root.1"; + int ievent=-1; + int nevents=-1; + bool m_isMC=true; + bool verbose=false; + + + Info( APP_NAME, "==============================================" ); + Info( APP_NAME, "Usage: $> %s [xAOD file name]", APP_NAME ); + Info( APP_NAME, " $> %s | Run on default file", APP_NAME ); + Info( APP_NAME, " $> %s -f X | Run on xAOD file X", APP_NAME ); + Info( APP_NAME, " $> %s -n X | X = number of events you want to run on", APP_NAME ); + Info( APP_NAME, " $> %s -e X | X = specific number of the event to run on - for debugging", APP_NAME ); + Info( APP_NAME, " $> %s -d X | X = dataset ID", APP_NAME ); + Info( APP_NAME, " $> %s -m X | X = isMC", APP_NAME ); + Info( APP_NAME, " $> %s -v | run in verbose mode ", APP_NAME ); + Info( APP_NAME, "==============================================" ); + + // Check if we received a file name: + if( argc < 2 ) { + Info( APP_NAME, "No arguments - using default file" ); + Info( APP_NAME, "Executing on : %s", fileName.Data() ); + } + + //////////////////////////////////////////////////// + //::: parse the options + //////////////////////////////////////////////////// + std::string options; + for( int i=0; i<argc; i++){ + options+=(argv[i]); + } + + if(options.find("-f")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-f")==0){ + fileName = argv[ipos+1]; + Info( APP_NAME, "Argument (-f) : Running on file # %s", fileName.Data() ); + break; + } + } + } + + if(options.find("-event")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-event")==0){ + ievent = atoi(argv[ipos+1]); + Info( APP_NAME, "Argument (-event) : Running only on event # %i", ievent ); + break; + } + } + } + + if(options.find("-m")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-m")==0){ + m_isMC = atoi(argv[ipos+1]); + Info( APP_NAME, "Argument (-m) : IsMC = %i", m_isMC ); + break; + } + } + } + + if(options.find("-n")!=std::string::npos){ + for( int ipos=0; ipos<argc ; ipos++ ) { + if(std::string(argv[ipos]).compare("-n")==0){ + nevents = atoi(argv[ipos+1]); + Info( APP_NAME, "Argument (-n) : Running on NEvents = %i", nevents ); + break; + } + } + } + + if(options.find("-v")!=std::string::npos){ + verbose=true; + Info( APP_NAME, "Argument (-v) : Setting verbose"); + } + + + //////////////////////////////////////////////////// + //::: initialize the application and get the event + //////////////////////////////////////////////////// + ANA_CHECK( xAOD::Init( APP_NAME ) ); + StatusCode::enableFailure(); + + // Open the input file: + TFile* ifile( TFile::Open( fileName, "READ" ) ); + if( !ifile ) Error( APP_NAME, "Cannot find file %s",fileName.Data() ); + + TChain *chain = new TChain ("CollectionTree","CollectionTree"); + chain->Add(fileName); + + // Create a TEvent object: + xAOD::TEvent event( (TTree*)chain, xAOD::TEvent::kAthenaAccess ); + Info( APP_NAME, "Number of events in the file: %i", static_cast< int >( event.getEntries() ) ); + + // Create a transient object store. Needed for the tools. + xAOD::TStore store; + + // Decide how many events to run over: + Long64_t entries = event.getEntries(); + + // Fill a validation true with the tag return value + std::unique_ptr<TFile> outputFile(TFile::Open("output_SmoothedWZTagger.root", "recreate")); + int pass,truthLabel,ntrk; + float sf,pt,eta,m; + TTree* Tree = new TTree( "tree", "test_tree" ); + Tree->Branch( "pass", &pass, "pass/I" ); + Tree->Branch( "sf", &sf, "sf/F" ); + Tree->Branch( "pt", &pt, "pt/F" ); + Tree->Branch( "m", &m, "m/F" ); + Tree->Branch( "eta", &eta, "eta/F" ); + Tree->Branch( "ntrk", &ntrk, "ntrk/I" ); + Tree->Branch( "truthLabel", &truthLabel, "truthLabel/I" ); + + //////////////////////////////////////////// + /////////// START TOOL SPECIFIC //////////// + //////////////////////////////////////////// + + //////////////////////////////////////////////////// + //::: Tool setup + // setup the tool handle as per the + // recommendation by ASG - https://twiki.cern.ch/twiki/bin/view/AtlasProtected/AthAnalysisBase#How_to_use_AnaToolHandle + //////////////////////////////////////////////////// + std::cout<<"Initializing WZ Tagger"<<std::endl; + asg::AnaToolHandle<SmoothedWZTagger> m_Tagger; //! + ASG_SET_ANA_TOOL_TYPE( m_Tagger, SmoothedWZTagger); + m_Tagger.setName("MyTagger"); + if(verbose) ANA_CHECK( m_Tagger.setProperty("OutputLevel", MSG::DEBUG) ); + //ANA_CHECK( m_Tagger.setProperty( "CalibArea", "SmoothedWZTaggers/Rel21/") ); + //ANA_CHECK( m_Tagger.setProperty( "ConfigFile", "SmoothedContainedWTagger_AntiKt10LCTopoTrimmed_FixedSignalEfficiency50_MC16d_20190410.dat") ); + ANA_CHECK( m_Tagger.setProperty( "CalibArea", "Local") ); + ANA_CHECK( m_Tagger.setProperty( "ConfigFile", "SmoothedWZTaggers/temp_SmoothedContainedWTagger_AntiKt10LCTopoTrimmed_FixedSignalEfficiency50_MC16d.dat") ); + ANA_CHECK( m_Tagger.setProperty( "IsMC", m_isMC ) ); + ANA_CHECK( m_Tagger.retrieve() ); + + //////////////////////////////////////////////////// + // Loop over the events + //////////////////////////////////////////////////// + for( Long64_t entry = 0; entry < entries; ++entry ) { + + if( nevents!=-1 && entry > nevents ) break; + // Tell the object which entry to look at: + event.getEntry( entry ); + + // Print some event information + const xAOD::EventInfo* evtInfo = 0; + if(event.retrieve( evtInfo, "EventInfo" ) != StatusCode::SUCCESS){ + continue; + } + if(ievent!=-1 && static_cast <int> (evtInfo->eventNumber())!=ievent) { + continue; + } + + // Get the jets + const xAOD::JetContainer* myJets = 0; + if( event.retrieve( myJets, "AntiKt10LCTopoTrimmedPtFrac5SmallR20Jets" ) != StatusCode::SUCCESS) + continue ; + + // Loop over jet container + std::pair< xAOD::JetContainer*, xAOD::ShallowAuxContainer* > jets_shallowCopy = xAOD::shallowCopyContainer( *myJets ); + std::unique_ptr<xAOD::JetContainer> shallowJets(jets_shallowCopy.first); + std::unique_ptr<xAOD::ShallowAuxContainer> shallowAux(jets_shallowCopy.second); + for( xAOD::Jet* jetSC : *shallowJets ){ + + ANA_CHECK( m_Tagger->tag( *jetSC ) ); + if(verbose) { + std::cout << "Testing W Tagger " << std::endl; + std::cout << "jet pt = " << jetSC->pt() << std::endl; + std::cout << "jet ntrk = " << jetSC->auxdata<int>("SmoothWContained50_ParentJetNTrkPt500") << std::endl; + std::cout << "RunningTag : " << jetSC->auxdata<bool>("SmoothWContained50_Tagged") << std::endl; + std::cout << "result d2pass = " << jetSC->auxdata<bool>("SmoothWContained50_PassD2") << std::endl; + std::cout << "result ntrkpass = " << jetSC->auxdata<bool>("SmoothWContained50_PassNtrk") << std::endl; + std::cout << "result masspass = " << jetSC->auxdata<bool>("SmoothWContained50_PassMass") << std::endl; + } + truthLabel = jetSC->auxdata<int>("R10TruthLabel_R21Consolidated"); + + pass = jetSC->auxdata<bool>("SmoothWContained50_Tagged"); + pt = jetSC->pt(); + m = jetSC->m(); + eta = jetSC->eta(); + ntrk = jetSC->auxdata<int>("ParentJetNTrkPt500"); + sf = jetSC->auxdata<float>("SmoothWContained50_SF"); + std::cout << "pass " << pass + << " truthLabel " << truthLabel + << " sf " << sf + << " eff " << jetSC->auxdata<float>("SmoothWContained50_effMC") + << std::endl; + + Tree->Fill(); + } + + Info( APP_NAME, "===>>> done processing event #%i, run #%i %i events processed so far <<<===", static_cast< int >( evtInfo->eventNumber() ), static_cast< int >( evtInfo->runNumber() ), static_cast< int >( entry + 1 ) ); + } + + //////////////////////////////////////////// + /////////// END TOOL SPECIFIC ////////////// + //////////////////////////////////////////// + + // write the tree to the output file + outputFile->cd(); + Tree->Write(); + outputFile->Close(); + + // cleanup + delete chain; + + // print the branches that were used for help with smart slimming + std::cout<<std::endl<<std::endl; + std::cout<<"Smart Slimming Checker :"<<std::endl; + xAOD::IOStats::instance().stats().printSmartSlimmingBranchList(); + std::cout<<std::endl<<std::endl; + + return 0; + +} + diff --git a/Reconstruction/Jet/JetMomentTools/CMakeLists.txt b/Reconstruction/Jet/JetMomentTools/CMakeLists.txt index 3380b03d56bbe875b6b5c67ece4ba8a3b4f41d62..832412f363214f985b1eec465fbb37088995a27f 100644 --- a/Reconstruction/Jet/JetMomentTools/CMakeLists.txt +++ b/Reconstruction/Jet/JetMomentTools/CMakeLists.txt @@ -13,7 +13,7 @@ atlas_add_library( JetMomentToolsLib JetMomentTools/*.h Root/*.cxx PUBLIC_HEADERS JetMomentTools INCLUDE_DIRS ${Boost_INCLUDE_DIRS} ${FASTJET_INCLUDE_DIRS} ${ROOT_INCLUDE_DIRS} - LINK_LIBRARIES ${Boost_LIBRARIES} ${FASTJET_LIBRARIES} ${ROOT_LIBRARIES} AsgDataHandlesLib AsgTools InDetTrackSelectionToolLib JetEDM JetInterface JetRecLib JetUtils PFlowUtilsLib TrackVertexAssociationToolLib xAODCaloEvent xAODEventInfo xAODJet xAODMissingET xAODTracking xAODTruth + LINK_LIBRARIES ${Boost_LIBRARIES} ${FASTJET_LIBRARIES} ${ROOT_LIBRARIES} AsgDataHandlesLib AsgTools InDetTrackSelectionToolLib JetCalibToolsLib JetEDM JetInterface JetRecLib JetUtils PFlowUtilsLib TrackVertexAssociationToolLib xAODCaloEvent xAODEventInfo xAODJet xAODMissingET xAODTracking xAODTruth PRIVATE_LINK_LIBRARIES CaloGeoHelpers xAODMetaData xAODPFlow PathResolver ) if( NOT XAOD_STANDALONE ) @@ -24,7 +24,7 @@ if( NOT XAOD_STANDALONE ) atlas_add_component( JetMomentTools src/*.h src/*.cxx src/components/*.cxx INCLUDE_DIRS ${Boost_INCLUDE_DIRS} ${FASTJET_INCLUDE_DIRS} ${ROOT_INCLUDE_DIRS} - LINK_LIBRARIES ${Boost_LIBRARIES} ${FASTJET_LIBRARIES} ${ROOT_LIBRARIES} AsgTools CaloIdentifier xAODCaloEvent xAODJet GaudiKernel JetEDM JetInterface JetRecLib JetUtils PFlowUtilsLib PathResolver JetMomentToolsLib ${extra_libs} ) + LINK_LIBRARIES ${Boost_LIBRARIES} ${FASTJET_LIBRARIES} ${ROOT_LIBRARIES} AsgTools CaloIdentifier xAODCaloEvent xAODJet GaudiKernel JetCalibToolsLib JetEDM JetInterface JetRecLib JetUtils PFlowUtilsLib PathResolver JetMomentToolsLib ${extra_libs} ) endif() #if( XAOD_STANDALONE ) diff --git a/Reconstruction/Jet/JetMomentTools/JetMomentTools/JetForwardPFlowJvtTool.h b/Reconstruction/Jet/JetMomentTools/JetMomentTools/JetForwardPFlowJvtTool.h new file mode 100644 index 0000000000000000000000000000000000000000..7936f747f7bd0635d9a42fa0ec7c324f4d0f5f53 --- /dev/null +++ b/Reconstruction/Jet/JetMomentTools/JetMomentTools/JetForwardPFlowJvtTool.h @@ -0,0 +1,170 @@ +///////////////////////// -*- C++ -*- ///////////////////////////// + +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +// JetForwardPFlowJvtTool.h +// Header file for class JetForwardPFlowJvtTool +// Author: Anastasia Kotsokechagia <anastasia.kotsokechagia@cern.ch> + +// Tool for calculating fjvt values for pflow jets. +// Short describtion of the tool; +// First central PU jets are built per vertex. +// Reconstructed calibrated jets are then used to calculate the per vertex missing momentum (miss-mom). +// The per vertex missing momentum is defined as: The vector some of the calibrated jet momenta (for jets with pt>20GeV && Rpt>0.1 wrt to the vertex) + tracks assosiated to the vertex (otherwise). +// PU Jets closeby (dR<0.3) to a HS jet are not considered. +// The fJVT value for every forward jet (fj) of the event is then calculated choosing the vertex with the largest negative miss-mom projection on the fj. +// User action: After initializing the tool the user has to call the modify(xAOD::JetContainer& jetCont) function. Argument in this fuction is the PFlow jet container of the event. +// The fjvt value for every forward jet of the container is then calculated and can be retrieved. +/////////////////////////////////////////////////////////////////// + //Parameters + // m_orLabel: "" + // m_jetsName : "Container name for the output reconstructed PU jets " + // m_tightOP: "If true a tight fjvt threshold value is applied" + // m_outLabelFjvt: "Decorator for passing fJVT threshold (tight or loose)" + // m_jetchargedp4: "Name of the jet charged momentum 4-vector" + // m_etaThresh: "Maximum eta value for considering a jet as central" + // m_forwardMinPt: "Minimum forward jet pt" + // m_forwardMaxPt: "Maximum forward jet pt. If -1 no threshold is applied" + // m_centerMinPt: "Minimum central jet pt" + // m_centerMaxPt: "Maximum central jet pt. If -1 no threshold is applied" + // m_pvind: "Hard-Scatter primary vertex index of the event. If -1 it's automatically retrieved from the event" + // m_rptCut: "Rpt cut value for central PU jets contributing in the missing momentum calculation" + // m_jvtCut: "JVT threshold value for considering a central PU jet as HS" + // m_dzCut: "Dz=z-z0 cut value for pfo objects participating in the HS vertex jet reco" + // m_vertices: "Number of vertices for which the missing momentum is calculated" + // m_maxRap: "Maximum rapidity value in fastjet::AreaDefinition" + // m_neutMaxRap: "Maximum rapidity value for neutral pfos participating in jet reco" + // m_weight: "PFO weight value" + // m_pfoToolName: "Name of PFO retriever tool" + // m_wpfoToolName: "Name of PFO weighting tool" + // m_pfoJESName: "Name of jet calibration tool" + // m_jetAlgo: "Jet calibration collection name" + // m_calibconfig: "Calibration config for PFlow jets, need to be updated with latest one" + // m_calibSeq: "Calibration sequence to be applied" + // m_calibArea: "Calibration area" + // m_isdata: "True if data" + + +#ifndef FORWARDPFLOWJVTTOOL_JVT_FORWARDPFLOWJVTTOOL_H +#define FORWARDPFLOWJVTTOOL_JVT_FORWARDPFLOWJVTTOOL_H 1 + +// STL includes +#include <string> + +// FrameWork includes +#include "AsgTools/ToolHandle.h" +#include "AsgTools/AsgTool.h" +#include "AsgTools/PropertyWrapper.h" +#include "JetInterface/IJetDecorator.h" +#include "JetEDM/TrackVertexAssociation.h" +#include "xAODJet/JetContainer.h" +#include "xAODJet/JetAuxContainer.h" + +#include "AsgDataHandles/ReadDecorHandleKey.h" +#include "AsgDataHandles/ReadDecorHandle.h" +#include "AsgDataHandles/WriteDecorHandleKey.h" +#include "AsgDataHandles/WriteDecorHandle.h" + +// Pflow tools +#include "PFlowUtils/IWeightPFOTool.h" +#include "PFlowUtils/WeightPFOTool.h" +#include "JetCalibTools/IJetCalibrationTool.h" + +#include "AsgTools/ToolHandle.h" +#include "JetCalibTools/IJetCalibrationTool.h" + +namespace pflow { + struct puJets { + std::shared_ptr<xAOD::JetContainer> jetCont; + std::shared_ptr<xAOD::JetAuxContainer> jetAuxCont; + }; +} + + class JetForwardPFlowJvtTool + : public asg::AsgTool, + virtual public IJetDecorator{ + ASG_TOOL_CLASS(JetForwardPFlowJvtTool,IJetDecorator) + + /////////////////////////////////////////////////////////////////// + // Public methods: + /////////////////////////////////////////////////////////////////// + public: + + /// Constructor with parameters: + JetForwardPFlowJvtTool(const std::string& name); + + /// Destructor: + virtual ~JetForwardPFlowJvtTool(); + + virtual StatusCode initialize() override; + + + virtual StatusCode decorate(const xAOD::JetContainer& jetCont) const override; + + float getFJVT(const xAOD::Jet *jet,std::vector<TVector2> pileupMomenta) const; + bool isForwardJet(const xAOD::Jet *jet) const; + bool isCentralJet(const xAOD::Jet *jet) const; + + StatusCode tagTruth(const xAOD::JetContainer *jets,const xAOD::JetContainer *truthJets); + std::vector<TVector2> calculateVertexMomenta(const xAOD::JetContainer *jets,int pvind, int vertices) const; + pflow::puJets buildPFlowPUjets(const xAOD::Vertex &vx) const; + bool hasCloseByHSjet(const xAOD::Jet *jet, const xAOD::JetContainer *pjets ) const; + double getRpt(const xAOD::Jet *jet) const; + fastjet::PseudoJet pfoToPseudoJet(const xAOD::PFO* pfo, const CP::PFO_JetMETConfig_charge& theCharge, const xAOD::Vertex *vx) const; + + private: + + SG::ReadHandleKey<jet::TrackVertexAssociation> m_tvaKey{this, "TrackVertexAssociation", "", "Input track-vertex association"}; + Gaudi::Property<std::string> m_jetContainerName{this, "JetContainer", "", "SG key for the input jet container"}; + Gaudi::Property<std::string> m_jetsName{this, "jetsName", "AntiKt4PUPFlowJets", "Container name for the output reconstructed PU jets"}; + Gaudi::Property<std::string> m_jetchargedp4{this, "jetchargedp4", "JetChargedScaleMomentum", "Name of the jet charged momentum 4-vector"}; + Gaudi::Property<std::string> m_pfoToolName{this, "pfoToolName", "PFOTool", "Name of PFO retriever tool"}; + Gaudi::Property<std::string> m_wpfoToolName{this, "wpfoToolName", "WPFOTool", "Name of PFO weighting tool"}; + Gaudi::Property<std::string> m_pfoJESName{this, "pfoJESName", "pfoJES", "Name of jet claibration tool"}; + Gaudi::Property<std::string> m_jetAlgo{this, "jetAlgo", "AntiKt4EMPFlow", "Jet calibration collection name"}; + Gaudi::Property<std::string> m_calibconfig{this, "calibconfig", "JES_MC16Recommendation_Consolidated_PFlow_Apr2019_Rel21.config", "Calibration config for PFlow jets, need to be updated with latest one"}; + Gaudi::Property<std::string> m_calibSeq{this, "calibSeq", "JetArea_Residual_EtaJES", "Calibration sequence to be applied"}; + Gaudi::Property<std::string> m_calibArea{this, "calibArea", "00-04-82", "Calibration area"}; + + Gaudi::Property<bool> m_isdata{this, "isdata", false, "True if data"}; + Gaudi::Property<int> m_pvind{this, "pvind", -1, "Hard-Scatter primary vertex index of the event. If -1 it will be automatically retrieved from the event"}; + Gaudi::Property<int> m_vertices{this, "vertices", 10, "Number of vertices for which the missing momentum is calculated"}; + Gaudi::Property<bool> m_includePV{this, "includePV", false, "Flag to include jets and tracks associated to PV in the calculation"}; + Gaudi::Property<double> m_etaThresh{this, "etaThresh", 2.5, "Maximum eta value for considering a jet as central"}; + Gaudi::Property<double> m_forwardMinPt{this, "forwardMinPt", 20e3, "Minimum forward jet pt"}; + Gaudi::Property<double> m_forwardMaxPt{this, "forwardMaxPt", -1, "Maximum forward jet pt. If -1 no threshold is applied"}; + Gaudi::Property<double> m_centerMinPt{this, "centralMinPt", 20e3, "Minimum central jet pt"}; + Gaudi::Property<double> m_centerMaxPt{this, "centralMaxPt", -1, "Maximum central jet pt. If -1 no threshold is applied"}; + Gaudi::Property<double> m_fjvtThresh{this, "fjvtThresh", 15e3, "fjvt threshold value"}; + Gaudi::Property<double> m_rptCut{this, "rptCut", 0.1, "Rpt cut value for central PU jets contributing in the missing momentum calculation"}; + Gaudi::Property<double> m_jvtCut{this, "jvtCut", 0.2, "JVT threshold value for considering a central PU jet as HS"}; + Gaudi::Property<double> m_dzCut{this, "dzCut", 2.0, "Dz=z=-z0 cut for pfo objects participating in the HS vertex jet reco"}; + Gaudi::Property<double> m_maxRap{this, "maxRap", 2.5, "Maximum rapidity value in fastjet::AreaDefinition"}; + Gaudi::Property<double> m_neutMaxRap{this, "neutMaxRap", 2.5, "Maximum rapidity value for neutral pfos participating in jet reco"}; + Gaudi::Property<float> m_weight{this, "weight", 0, "PFO weight value"}; + Gaudi::Property<bool> m_tightOP{this, "tightOP", false, "If true a tight fjvt threshold value is applied"}; + + // not used? + //Gaudi::Property<std::string> m_jvtMomentName{"jvtMomentName", "", ""}; + //Gaudi::Property<double> m_centerJvtThresh{"", 0, ""}; + + SG::ReadHandleKey<xAOD::VertexContainer> m_vxContKey{this, "verticesName", "PrimaryVertices", "Container name of vertices to be retrieved"}; + SG::ReadHandleKey<xAOD::PFOContainer> m_PFOKey{this, "PFOName", "CHSParticleFlowObjects", "SG Key for CHS PFO Container"}; + + SG::ReadDecorHandleKey<xAOD::JetContainer> m_jvtKey{this, "jvtName", "Jvt", "SG key for the Jvt decoration"}; + SG::ReadDecorHandleKey<xAOD::PFO> m_orKey{this, "ORName", "", "OR label"}; + + SG::WriteDecorHandleKey<xAOD::JetContainer> m_fjvtKey{this, "FjvtName", "passOnlyFJVT", "Decorator for passing fJVT threshold (tight or loose)"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_fjvtRawKey{this, "FjvtRawName", "fJvt", "Decorator for raw fJVT variable"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_isHSKey{this, "isHSName", "isJVTHS", "SG key for output isJVTHS decoration"}; + SG::WriteDecorHandleKey<xAOD::JetContainer> m_isPUKey{this, "isPUName", "isJvtPU", "SG key for output isJVTPU decoration"}; + + ToolHandle<CP::WeightPFOTool> m_wpfotool{this,"WeightPFOTool", "", "Weight PFO tool name"}; + ToolHandle<IJetCalibrationTool> m_pfoJES{this,"JetCalibrationTool", "", "Jet calibration tool name"}; + + std::size_t getPV() const; + + }; +#endif //> !FORWARDJVTTOOL_JVT_FORWARDJVTTOOL_H diff --git a/Reconstruction/Jet/JetMomentTools/Root/JetForwardPFlowJvtTool.cxx b/Reconstruction/Jet/JetMomentTools/Root/JetForwardPFlowJvtTool.cxx new file mode 100644 index 0000000000000000000000000000000000000000..18d25c6833317959d6ce707eb8b247d023e34f43 --- /dev/null +++ b/Reconstruction/Jet/JetMomentTools/Root/JetForwardPFlowJvtTool.cxx @@ -0,0 +1,328 @@ +///////////////////////// -*- C++ -*- ///////////////////////////// + +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +// JetForwardPFlowJvtTool.cxx +// Implementation file for class JetForwardPFlowJvtTool +// Author: Anastasia Kotsokechagia <anastasia.kotsokechagia@cern.ch> +/////////////////////////////////////////////////////////////////// + +// JetForwardPFlowJvtTool includes +#include "JetMomentTools/JetForwardPFlowJvtTool.h" + +// Jet EDM +#include "xAODJet/JetAttributes.h" + +// FastJet +#include "fastjet/ClusterSequence.hh" +#include "fastjet/ClusterSequenceArea.hh" +#include <fastjet/AreaDefinition.hh> + +// Jet +#include "JetRec/JetFromPseudojet.h" + + /////////////////////////////////////////////////////////////////// + // Public methods: + /////////////////////////////////////////////////////////////////// + + // Constructors + //////////////// + JetForwardPFlowJvtTool::JetForwardPFlowJvtTool(const std::string& name) : + AsgTool(name) { + } + + // Destructor + /////////////// + JetForwardPFlowJvtTool::~JetForwardPFlowJvtTool() + {} + + // Athena algtool's Hooks + //////////////////////////// + StatusCode JetForwardPFlowJvtTool::initialize() + { + ATH_MSG_INFO ("Initializing " << name() << "..."); + if (m_tightOP) m_fjvtThresh = 0.53; + else m_fjvtThresh = 0.72; + + ATH_CHECK( m_tvaKey.initialize() ); + + if(m_jetContainerName.empty()){ + ATH_MSG_ERROR("JetForwardPFlowJvtTool needs to have its input jet container configured!"); + return StatusCode::FAILURE; + } + + if(!m_orKey.key().empty()){ + m_orKey = m_jetContainerName + "." + m_orKey.key(); + ATH_CHECK(m_orKey.initialize()); + } + m_fjvtKey = m_jetContainerName + "." + m_fjvtKey.key(); + m_fjvtRawKey = m_jetContainerName + "." + m_fjvtRawKey.key(); + m_isHSKey = m_jetContainerName + "." + m_isHSKey.key(); + m_isPUKey = m_jetContainerName + "." + m_isPUKey.key(); + m_jvtKey = m_jetContainerName + "." + m_jvtKey.key(); + + ATH_CHECK(m_fjvtKey.initialize()); + ATH_CHECK(m_fjvtRawKey.initialize()); + ATH_CHECK(m_isHSKey.initialize()); + ATH_CHECK(m_isPUKey.initialize()); + ATH_CHECK(m_jvtKey.initialize()); + + ATH_CHECK(m_vxContKey.initialize()); + ATH_CHECK(m_PFOKey.initialize()); + + return StatusCode::SUCCESS; + } + + StatusCode JetForwardPFlowJvtTool::decorate(const xAOD::JetContainer& jetCont) const { + std::vector<TVector2> pileupMomenta; + + pileupMomenta=calculateVertexMomenta(&jetCont,m_pvind, m_vertices); + + SG::WriteDecorHandle<xAOD::JetContainer, char> fjvtHandle(m_fjvtKey); + SG::WriteDecorHandle<xAOD::JetContainer, float> fjvtRawHandle(m_fjvtRawKey); + if(pileupMomenta.size()==0) { + ATH_MSG_DEBUG( "pileupMomenta is empty, this can happen for events with no PU vertices." + <<" fJVT won't be computed for this event and will be set to 0 instead." ); + for(const xAOD::Jet* jetF : jetCont) { + fjvtHandle(*jetF) = 1; + fjvtRawHandle(*jetF) = 0; + } + return StatusCode::SUCCESS; + } + + for(const xAOD::Jet* jetF : jetCont) { + fjvtHandle(*jetF) = 1; + fjvtRawHandle(*jetF) = 0; + + if (isForwardJet(jetF)){ + double fjvt = getFJVT(jetF,pileupMomenta); + if (fjvt>m_fjvtThresh) fjvtHandle(*jetF) = 0; + fjvtRawHandle(*jetF) = fjvt; + } + } + return StatusCode::SUCCESS; + } + + float JetForwardPFlowJvtTool::getFJVT(const xAOD::Jet *jet, std::vector<TVector2> pileupMomenta) const { + TVector2 fjet(jet->pt()*cos(jet->phi()),jet->pt()*sin(jet->phi())); + double fjvt = 0; + for (const TVector2& pu : pileupMomenta) { + double projection = pu*fjet/fjet.Mod(); + if (projection<fjvt) fjvt = projection; + } + return -1*fjvt/fjet.Mod(); + } + + std::vector<TVector2> JetForwardPFlowJvtTool::calculateVertexMomenta(const xAOD::JetContainer *pjets, + int pvind, int vertices) const { + std::vector<TVector2> pileupMomenta; + // -- Retrieve PV index if not provided by user + const std::size_t pv_index = (pvind==-1) ? getPV() : std::size_t(pvind); + + SG::ReadHandle<xAOD::VertexContainer> vxContHandle(m_vxContKey); + + for(const xAOD::Vertex* vx: *vxContHandle) { + if(vx->vertexType()!=xAOD::VxType::PriVtx && vx->vertexType()!=xAOD::VxType::PileUp) continue; + if(vx->index()==(size_t)pv_index) continue; + + TString jname = m_jetsName.value(); + jname += vx->index(); + + pflow::puJets vertjets = buildPFlowPUjets(*vx); + if( !vertjets.jetCont || !vertjets.jetAuxCont ){ + ATH_MSG_WARNING(" Some issue appeared while building the pflow pileup jets for vertex " + << vx->index() << " (vxType = " << vx->vertexType()<<" )!" ); + return pileupMomenta; + } + + TVector2 vertex_met; + for( const xAOD::Jet *jet : *(vertjets.jetCont) ) { + + // Remove jets which are close to hs + if (!m_includePV && hasCloseByHSjet(jet,pjets)) continue; + + // Calculate vertex missing momentum + if (isCentralJet(jet) && getRpt(jet)> m_rptCut) + { + vertex_met += TVector2(jet->pt()*cos(jet->phi()),jet->pt()*sin(jet->phi()) ) ; + } + else{ + vertex_met += TVector2(jet->jetP4(m_jetchargedp4).Pt()*cos(jet->jetP4(m_jetchargedp4).Phi()), + jet->jetP4(m_jetchargedp4).Pt()*sin(jet->jetP4(m_jetchargedp4).Phi()) ); + } + } + + pileupMomenta.push_back(vertex_met); + if(vertices!=-1 && int(vx->index())==vertices) break; + } + return pileupMomenta; + } + + bool JetForwardPFlowJvtTool::hasCloseByHSjet(const xAOD::Jet *jet, const xAOD::JetContainer *pjets ) const { + for (const xAOD::Jet* pjet : *pjets) { + float jet_jvt=0; + SG::ReadDecorHandle<xAOD::JetContainer, float> jvtHandle(m_jvtKey); + jet_jvt = jvtHandle(*pjet); + if (pjet->p4().DeltaR(jet->p4())<0.3 && jet_jvt>m_jvtCut && isCentralJet(pjet) ) return true; + } + return false; + } + + pflow::puJets JetForwardPFlowJvtTool::buildPFlowPUjets(const xAOD::Vertex &vx) const { + pflow::puJets pu_jets; + const std::size_t pv_index = (m_pvind==-1) ? getPV() : std::size_t (m_pvind); + + std::vector<fastjet::PseudoJet> input_pfo; + std::set<int> charged_pfo; + + SG::ReadHandle<jet::TrackVertexAssociation> tvaHandle(m_tvaKey); + SG::ReadHandle<xAOD::PFOContainer> PFOHandle(m_PFOKey); + + if (!tvaHandle.isValid()){ + ATH_MSG_ERROR("Could not retrieve the TrackVertexAssociation: " + << m_tvaKey.key()); + return pu_jets; + } + for(const xAOD::PFO* pfo : *PFOHandle){ + if (m_orKey.key().empty()) continue; + SG::ReadDecorHandle<xAOD::PFO, char> orHandle(m_orKey); + if (!orHandle(*pfo)) continue; + if (pfo->isCharged()) { + if (vx.index()==pv_index && std::abs((vx.z()-pfo->track(0)->z0())*sin(pfo->track(0)->theta()))>m_dzCut) + continue; + if (vx.index()!=pv_index + && (!tvaHandle->associatedVertex(pfo->track(0)) + || vx.index()!=tvaHandle->associatedVertex(pfo->track(0))->index()) + ) continue; + input_pfo.push_back(pfoToPseudoJet(pfo, CP::charged, &vx) ); + charged_pfo.insert(pfo->index()); + } + else if (std::abs(pfo->eta())<m_neutMaxRap && !pfo->isCharged() && pfo->eEM()>0) + { + input_pfo.push_back(pfoToPseudoJet(pfo, CP::neutral, &vx) ); + } + } + std::shared_ptr<xAOD::JetContainer> vertjets = std::make_shared<xAOD::JetContainer>(); + std::shared_ptr<xAOD::JetAuxContainer> vertjetsAux = std::make_shared<xAOD::JetAuxContainer>(); + + vertjets->setStore(vertjetsAux.get()); + TString newname = m_jetsName.value(); + newname += vx.index(); + + fastjet::JetDefinition jet_def(fastjet::antikt_algorithm,0.4); + fastjet::AreaDefinition area_def(fastjet::active_area_explicit_ghosts, + fastjet::GhostedAreaSpec(fastjet::SelectorAbsRapMax(m_maxRap))); + fastjet::ClusterSequenceArea clust_pfo(input_pfo,jet_def,area_def); + std::vector<fastjet::PseudoJet> inclusive_jets = sorted_by_pt(clust_pfo.inclusive_jets(5000.)); + + for (size_t i = 0; i < inclusive_jets.size(); i++) { + xAOD::Jet* jet= new xAOD::Jet(); + xAOD::JetFourMom_t tempjetp4(inclusive_jets[i].pt(), + inclusive_jets[i].eta(), + inclusive_jets[i].phi(), + inclusive_jets[i].m()); + xAOD::JetFourMom_t newArea(inclusive_jets[i].area_4vector().perp(), + inclusive_jets[i].area_4vector().eta(), + inclusive_jets[i].area_4vector().phi(), + inclusive_jets[i].area_4vector().m()); + vertjets->push_back(jet); + jet->setJetP4(tempjetp4); + jet->setJetP4("JetConstitScaleMomentum",tempjetp4); + jet->setJetP4("JetPileupScaleMomentum",tempjetp4); + jet->setAttribute("ActiveArea4vec",newArea); + jet->setAttribute("DetectorEta",jet->eta()); + std::vector<fastjet::PseudoJet> constituents = inclusive_jets[i].constituents(); + float chargedpart = 0; + for (size_t j = 0; j < constituents.size(); j++) { + if (charged_pfo.count(constituents[j].user_index())>=1) { + chargedpart += constituents[j].perp(); + } + } + xAOD::JetFourMom_t chargejetp4(chargedpart,inclusive_jets[i].eta(),inclusive_jets[i].phi(),inclusive_jets[i].m()); + jet->setJetP4(m_jetchargedp4,chargejetp4); + } + + if((m_pfoJES->modify(*vertjets)).isFailure()){ + ATH_MSG_ERROR(" Failed to calibrate PU jet container "); + return pu_jets; + } + + pu_jets.jetCont = vertjets; + pu_jets.jetAuxCont = vertjetsAux; + return pu_jets; + } + + fastjet::PseudoJet JetForwardPFlowJvtTool::pfoToPseudoJet(const xAOD::PFO* pfo, const CP::PFO_JetMETConfig_charge& theCharge, const xAOD::Vertex *vx) const { + TLorentzVector pfo_p4; + if (CP::charged == theCharge){ + float pweight = m_weight; + if( (m_wpfotool->fillWeight(*pfo,pweight)).isSuccess() ){ + // Create a PSeudojet with the momentum of the selected IParticle + pfo_p4= TLorentzVector(pfo->p4().Px()*pweight,pfo->p4().Py()*pweight,pfo->p4().Pz()*pweight,pfo->e()*pweight); + } + } else if (CP::neutral == theCharge){ + pfo_p4= pfo->GetVertexCorrectedEMFourVec(*vx); + } + fastjet::PseudoJet psj(pfo_p4); + // User index is used to identify the xAOD object used for the PSeudoJet + if (CP::charged == theCharge){ + psj.set_user_index(pfo->index()); + }else{ + psj.set_user_index(-1); + } + + return psj; + } + + bool JetForwardPFlowJvtTool::isForwardJet(const xAOD::Jet *jet) const { + if (std::abs(jet->eta())<m_etaThresh) return false; + if (jet->pt()<m_forwardMinPt || (m_forwardMaxPt>0 && jet->pt()>m_forwardMaxPt) ) return false; + return true; + } + + bool JetForwardPFlowJvtTool::isCentralJet(const xAOD::Jet *jet) const { + if (std::abs(jet->eta())>m_etaThresh) return false; + if (jet->pt()<m_centerMinPt || (m_centerMaxPt>0 && jet->pt()>m_centerMaxPt)) return false; + return true; + } + + double JetForwardPFlowJvtTool::getRpt(const xAOD::Jet *jet) const { + double Rpt; + Rpt= jet->jetP4(m_jetchargedp4).Pt()/ jet->pt(); + return Rpt; + } + + std::size_t JetForwardPFlowJvtTool::getPV() const{ + if (m_includePV) return -1; + + //const xAOD::VertexContainer *vxCont = 0; + SG::ReadHandle<xAOD::VertexContainer> vxContHandle(m_vxContKey); + ATH_MSG_DEBUG("Successfully retrieved primary vertex container"); + for(const xAOD::Vertex *vx : *vxContHandle) { + if(vx->vertexType()==xAOD::VxType::PriVtx) return vx->index(); + } + ATH_MSG_WARNING("Couldn't identify the hard-scatter primary vertex (no vertex with \"vx->vertexType()==xAOD::VxType::PriVtx\" in the container)!"); + // this almost certainly isn't what we should do here, the + // caller doesn't check this for errors + return 0; + } + + StatusCode JetForwardPFlowJvtTool::tagTruth(const xAOD::JetContainer *jets,const xAOD::JetContainer *truthJets) { + SG::WriteDecorHandle<xAOD::JetContainer, bool> isHSHandle(m_isHSKey); + SG::WriteDecorHandle<xAOD::JetContainer, bool> isPUHandle(m_isPUKey); + + for(const xAOD::Jet *jet : *jets) { + bool ishs = false; + bool ispu = true; + for(const xAOD::Jet *tjet : *truthJets) { + if (tjet->p4().DeltaR(jet->p4())<0.3 && tjet->pt()>10e3) ishs = true; + if (tjet->p4().DeltaR(jet->p4())<0.6) ispu = false; + } + isHSHandle(*jet)=ishs; + isPUHandle(*jet)=ispu; + } + return StatusCode::SUCCESS; + } + diff --git a/Reconstruction/Jet/JetMomentTools/src/components/JetMomentTools_entries.cxx b/Reconstruction/Jet/JetMomentTools/src/components/JetMomentTools_entries.cxx index e9286347a60537436517cceb7cabfe81abe004fc..674e1747036892b52be33ae7661bf7aafc257992 100644 --- a/Reconstruction/Jet/JetMomentTools/src/components/JetMomentTools_entries.cxx +++ b/Reconstruction/Jet/JetMomentTools/src/components/JetMomentTools_entries.cxx @@ -5,6 +5,7 @@ #include "JetMomentTools/JetVertexFractionTool.h" #include "JetMomentTools/JetVertexTaggerTool.h" #include "JetMomentTools/JetForwardJvtTool.h" +#include "JetMomentTools/JetForwardPFlowJvtTool.h" #include "JetMomentTools/JetTrackMomentsTool.h" #include "JetMomentTools/JetTrackSumMomentsTool.h" #include "JetMomentTools/JetClusterMomentsTool.h" @@ -29,6 +30,7 @@ DECLARE_COMPONENT( JetWidthTool ) DECLARE_COMPONENT( JetVertexFractionTool ) DECLARE_COMPONENT( JetVertexTaggerTool ) DECLARE_COMPONENT( JetForwardJvtTool ) +DECLARE_COMPONENT(JetForwardPFlowJvtTool) DECLARE_COMPONENT( JetTrackMomentsTool ) DECLARE_COMPONENT( JetTrackSumMomentsTool ) DECLARE_COMPONENT( JetClusterMomentsTool ) diff --git a/Reconstruction/Jet/JetRec/CMakeLists.txt b/Reconstruction/Jet/JetRec/CMakeLists.txt index 767c7545bc1890ba5453b871b06cc801277ea2b2..c37361923ea85d8f008a887adafc6276e62abed3 100644 --- a/Reconstruction/Jet/JetRec/CMakeLists.txt +++ b/Reconstruction/Jet/JetRec/CMakeLists.txt @@ -7,8 +7,11 @@ atlas_subdir( JetRec ) set( extra_libs ) if( NOT GENERATIONBASE ) list( APPEND extra_libs xAODPFlow ) + if( NOT XAOD_ANALYSIS ) + list( APPEND extra_libs AthenaMonitoringKernelLib ) + endif() if( NOT XAOD_STANDALONE ) - list( APPEND extra_libs AthenaMonitoringKernelLib StoreGateLib ) + list( APPEND extra_libs StoreGateLib ) endif() endif() diff --git a/Reconstruction/Jet/JetRec/JetRec/JetCopier.h b/Reconstruction/Jet/JetRec/JetRec/JetCopier.h index 5d6f7f9855c4ce7ab7dad90e8cf15b36bfc16916..d6929efeb96d577afc4bc17fbeeb26c5404a7ec1 100644 --- a/Reconstruction/Jet/JetRec/JetRec/JetCopier.h +++ b/Reconstruction/Jet/JetRec/JetRec/JetCopier.h @@ -26,7 +26,7 @@ // This class doesn't (yet) exist for AnalysisBase, so in that release // we will simply have to rerun modifiers if we need them. -#ifndef XAOD_ANALYSIS +#ifndef XAOD_STANDALONE #include "StoreGate/ShallowCopyDecorDeps.h" #endif @@ -42,7 +42,7 @@ class JetCopier // Called in parent initialize() virtual StatusCode initialize() override; -#ifndef XAOD_ANALYSIS +#ifndef XAOD_STANDALONE // Needed to initialise the ShallowCopyDecorDeps object, which propagates // decorations on the original into the copy in StoreGate. // Override interface implementation in Athena only @@ -64,7 +64,7 @@ class JetCopier Gaudi::Property<bool> m_shallowCopy {this, "ShallowCopy", true, "True for shallow copy, false for deep copy"}; Gaudi::Property<bool> m_shallowIO {this, "ShallowIO", false, "True for storing only modified data"}; -#ifndef XAOD_ANALYSIS +#ifndef XAOD_STANDALONE SG::ShallowCopyDecorDeps<xAOD::JetContainer> m_decorDeps { this, "DecorDeps", {}, "List of decorations to propagate through the shallow copy." }; #endif diff --git a/Reconstruction/Jet/JetRec/JetRec/JetRecTool.h b/Reconstruction/Jet/JetRec/JetRec/JetRecTool.h index 00063abd483a6c585782d3f566c99d4f18de3b4b..129c80e58daa9002ebf3f89aee068abb28704d2d 100644 --- a/Reconstruction/Jet/JetRec/JetRec/JetRecTool.h +++ b/Reconstruction/Jet/JetRec/JetRec/JetRecTool.h @@ -62,7 +62,7 @@ #include "AsgDataHandles/WriteHandleKey.h" #include "JetEDM/PseudoJetVector.h" #include "JetRec/PseudoJetContainer.h" -#if !defined(GENERATIONBASE) && !defined(XAOD_STANDALONE) +#if !defined(GENERATIONBASE) && !defined(XAOD_ANALYSIS) #include "AthenaMonitoringKernel/GenericMonitoringTool.h" #endif @@ -101,7 +101,7 @@ public: /// Default returns 0 and adds no names. int outputContainerNames(std::vector<std::string>& connames) override; - + /// For trigger usage in grooming mode only : give the input ungroomed jet container. void setInputJetContainer(const xAOD::JetContainer* cont); @@ -117,7 +117,7 @@ private: std::unique_ptr<xAOD::JetContainer> groomJets() const; std::unique_ptr<xAOD::JetContainer> copyJets() const; - + // Properties. SG::WriteHandleKey<xAOD::JetContainer> m_outcoll {this, "OutputContainer", ""}; SG::ReadHandleKey<xAOD::JetContainer> m_incoll {this, "InputContainer", ""}; @@ -158,7 +158,7 @@ private: mutable std::vector<TStopwatch> m_modclocks; mutable std::vector<TStopwatch> m_conclocks; -#if !defined (GENERATIONBASE) && !defined (XAOD_STANDALONE) +#if !defined (GENERATIONBASE) && !defined (XAOD_ANALYSIS) ToolHandle<GenericMonitoringTool> m_monTool{this,"MonTool","","Monitoring tool"}; #endif diff --git a/Reconstruction/Jet/JetRec/Root/JetCopier.cxx b/Reconstruction/Jet/JetRec/Root/JetCopier.cxx index 07297e9e3787ebbdc43af43af0827671695a6e59..f198e1396a7ce1279d91ec36ef6040034f782ff2 100644 --- a/Reconstruction/Jet/JetRec/Root/JetCopier.cxx +++ b/Reconstruction/Jet/JetRec/Root/JetCopier.cxx @@ -36,7 +36,7 @@ StatusCode JetCopier::initialize() { } -#ifndef XAOD_ANALYSIS +#ifndef XAOD_STANDALONE // Setup helper to propagate decorations from original to copy StatusCode JetCopier::initWithOutput(const SG::WriteHandleKey<xAOD::JetContainer>& outputJets) { return m_decorDeps.initialize(m_inputJets, outputJets) ; @@ -54,14 +54,14 @@ StatusCode JetCopier::getAndRecordJets(SG::WriteHandle<xAOD::JetContainer>& jetH if(m_shallowCopy){ std::unique_ptr<xAOD::ShallowAuxContainer> auxCont_derived(static_cast<xAOD::ShallowAuxContainer*>(auxCont.release())); ATH_CHECK( jetHandle.record(std::move(jets), std::move(auxCont_derived)) ); -#ifndef XAOD_ANALYSIS +#ifndef XAOD_STANDALONE ATH_CHECK( m_decorDeps.linkDecors (m_inputJets) ); #endif } else{ std::unique_ptr<xAOD::JetAuxContainer> auxCont_derived(static_cast<xAOD::JetAuxContainer*>(auxCont.release())); ATH_CHECK( jetHandle.record(std::move(jets), std::move(auxCont_derived)) ); -#ifndef XAOD_ANALYSIS +#ifndef XAOD_STANDALONE ATH_CHECK( m_decorDeps.linkDecors (m_inputJets) ); #endif } diff --git a/Reconstruction/Jet/JetRec/Root/JetRecTool.cxx b/Reconstruction/Jet/JetRec/Root/JetRecTool.cxx index 6a338e63035ac6c8fb951b7e051fc4827da5c6bb..bde191552fd91758a2108e4e77432b89aa970d8a 100644 --- a/Reconstruction/Jet/JetRec/Root/JetRecTool.cxx +++ b/Reconstruction/Jet/JetRec/Root/JetRecTool.cxx @@ -23,7 +23,7 @@ #include "AsgDataHandles/ReadHandle.h" #include "AsgDataHandles/WriteHandle.h" -#if !defined (GENERATIONBASE) && !defined (XAOD_STANDALONE) +#if !defined (GENERATIONBASE) && !defined (XAOD_ANALYSIS) #include "AthenaMonitoringKernel/Monitored.h" #endif @@ -42,7 +42,7 @@ using xAOD::Jet; JetRecTool::JetRecTool(std::string myname) : AsgTool(myname), m_intool("",this), -#ifdef XAOD_STANDALONE +#ifdef XAOD_ANALYSIS m_hpjr("",this), #else m_hpjr("JetPseudojetRetriever/jpjr",this), @@ -114,7 +114,7 @@ StatusCode JetRecTool::initialize() { m_ppjr = new JetPseudojetRetriever(name()+"_retriever"); #else m_ppjr = nullptr; -#endif +#endif } ATH_MSG_INFO("Jet reconstruction mode: " << mode); // Check/set the input jet collection name. @@ -207,7 +207,7 @@ StatusCode JetRecTool::initialize() { for ( auto& clk : m_conclocks) { clk.Reset(); } - + ATH_MSG_INFO(prefix << "Input collection names:"); for (const auto& name : m_incolls) ATH_MSG_INFO(prefix << " " << name); ATH_MSG_INFO(prefix << "Output collection names:"); @@ -219,7 +219,7 @@ StatusCode JetRecTool::initialize() { m_conclock.Reset(); m_nevt = 0; -#if !defined (GENERATIONBASE) && !defined (XAOD_STANDALONE) +#if !defined (GENERATIONBASE) && !defined (XAOD_ANALYSIS) if (!m_monTool.empty()) ATH_CHECK(m_monTool.retrieve()); #endif @@ -318,7 +318,7 @@ const JetContainer* JetRecTool::build() const { } -#if !defined (GENERATIONBASE) && !defined (XAOD_STANDALONE) +#if !defined (GENERATIONBASE) && !defined (XAOD_ANALYSIS) // monitor jet multiplicity and basic jet kinematics auto njets = Monitored::Scalar<int>("nJets"); auto pt = Monitored::Collection("pt", *jetsHandle, [c=m_mevtogev]( const xAOD::Jet* jet ) { return jet->pt()*c; }); @@ -419,22 +419,22 @@ int JetRecTool::outputContainerNames(std::vector<std::string>& connames) { } //********************************************************************** - + void JetRecTool::setInputJetContainer(const xAOD::JetContainer* cont) { - m_trigInputJetsForGrooming = cont; + m_trigInputJetsForGrooming = cont; } //********************************************************************** std::unique_ptr<PseudoJetContainer> JetRecTool::collectPseudoJets() const{ - // PseudoJetContainer used for jet finding - + // PseudoJetContainer used for jet finding + m_inpclock.Start(false); - + auto allPseudoJets = std::make_unique<PseudoJetContainer>(); ATH_MSG_DEBUG("Fetching pseudojet inputs."); - + for (const auto& pjcontkey : m_psjsin) { SG::ReadHandle<PseudoJetContainer> h_newpsjs( pjcontkey ); ATH_MSG_DEBUG("Adding PseudoJetContainers for: " << h_newpsjs.key()); @@ -452,9 +452,9 @@ std::unique_ptr<PseudoJetContainer> JetRecTool::collectPseudoJets() const{ } //********************************************************************** - + std::unique_ptr<xAOD::JetContainer> JetRecTool::fillOutputContainer() const{ - + if (!m_finder.empty()) {return findJets();} if (!m_groomer.empty()) {return groomJets();} return copyJets(); @@ -480,7 +480,7 @@ const xAOD::JetContainer* JetRecTool::getOldJets() const{ if ( pjetsin == 0 ) { ATH_MSG_ERROR("Unable to retrieve input jet container: " << m_incoll.key()); } else { - ATH_MSG_DEBUG("Input collection " << m_incoll.key() + ATH_MSG_DEBUG("Input collection " << m_incoll.key() << " jet multiplicity is "<< pjetsin->size()); } m_totclock.Stop(); @@ -512,14 +512,14 @@ std::unique_ptr<xAOD::JetContainer> JetRecTool::makeOutputContainer() const{ //********************************************************************** std::unique_ptr<xAOD::JetContainer> JetRecTool::findJets() const { - + m_actclock.Start(false); ATH_MSG_DEBUG("Finding jets."); - + // The new jet collection. auto jets = makeOutputContainer(); - - // PseudoJetContainer used for jet finding + + // PseudoJetContainer used for jet finding auto pseudoJets = collectPseudoJets(); m_finder->find(*pseudoJets, *jets, m_inputtype); @@ -531,9 +531,9 @@ std::unique_ptr<xAOD::JetContainer> JetRecTool::findJets() const { //********************************************************************** std::unique_ptr<xAOD::JetContainer> JetRecTool::groomJets() const{ - + m_actclock.Start(false); - + // The new jet collection. auto jets = makeOutputContainer(); @@ -544,16 +544,16 @@ std::unique_ptr<xAOD::JetContainer> JetRecTool::groomJets() const{ ATH_MSG_WARNING("Grooming: but input jets not found "); return jets; } - + ATH_MSG_DEBUG("Grooming " << jetsIn->size() << " jets."); - // PseudoJetContainer used for jet finding + // PseudoJetContainer used for jet finding auto pseudoJets = collectPseudoJets(); - + for (const auto ijet : *jetsIn){ m_groomer->groom(*ijet, *pseudoJets, *jets);} - + m_actclock.Stop(); return jets; } @@ -561,10 +561,10 @@ std::unique_ptr<xAOD::JetContainer> JetRecTool::groomJets() const{ //********************************************************************** std::unique_ptr<xAOD::JetContainer> JetRecTool::copyJets() const{ - + // The new jet collection. auto jets = makeOutputContainer(); - + // Retrieve the old jet collection. auto jetsIn = getOldJets(); @@ -581,7 +581,7 @@ std::unique_ptr<xAOD::JetContainer> JetRecTool::copyJets() const{ jets->push_back(pnewjet); *pnewjet = *poldjet; } - + m_actclock.Stop(); return jets; } diff --git a/Reconstruction/Jet/JetRec/src/JetRecAlg.cxx b/Reconstruction/Jet/JetRec/src/JetRecAlg.cxx index 5368dde708bc6f9a7bf1b9fb5219a3ddca67fb23..fba500de2eb5c157e39df4339e16425d75a1fdb3 100644 --- a/Reconstruction/Jet/JetRec/src/JetRecAlg.cxx +++ b/Reconstruction/Jet/JetRec/src/JetRecAlg.cxx @@ -10,6 +10,9 @@ #include "JetInterface/IJetExecuteTool.h" #include "xAODJet/JetAuxContainer.h" +#if !defined (GENERATIONBASE) && !defined (XAOD_STANDALONE) + #include "AthenaMonitoringKernel/Monitored.h" +#endif using std::string; @@ -32,6 +35,12 @@ StatusCode JetRecAlg::initialize() { ATH_MSG_INFO(" --> : "<< t->name()); } + ATH_CHECK(m_output.initialize()); + +#if !defined (GENERATIONBASE) && !defined (XAOD_STANDALONE) + if (!m_monTool.empty()) ATH_CHECK(m_monTool.retrieve()); +#endif + return StatusCode::SUCCESS; } @@ -50,9 +59,47 @@ StatusCode JetRecAlg::execute(const EventContext& ctx) const { // needn't know the type of the jet aux container // We can subsequently access the jets from the handle and don't have to // worry about memory management. + +#if !defined (GENERATIONBASE) && !defined (XAOD_STANDALONE) + auto t_total = Monitored::Timer<std::chrono::milliseconds>( "TIME_total" ); + SG::WriteHandle<xAOD::JetContainer> jetContHandle(m_output,ctx); + + auto t_jpv = Monitored::Timer<std::chrono::microseconds>( "TIME_jetprovider" ); ATH_CHECK( m_jetprovider->getAndRecordJets(jetContHandle) ); + auto mon_jpv = Monitored::Group(m_monTool, t_jpv); + + ATH_MSG_DEBUG("Created jet container of size "<< jetContHandle->size() << " | writing to "<< m_output.key() ); + + ATH_MSG_DEBUG("Applying jet modifiers to " << m_output.key()); + + // Calculate moments, calibrate, sort, filter... ----------- + auto t_mod = Monitored::Timer<std::chrono::milliseconds>( "TIME_modifiers_total" ); + for(const ToolHandle<IJetModifier>& t : m_modifiers){ + std::string modname = t.name(); + auto t_mods = Monitored::Timer<std::chrono::microseconds>( Form("TIME_modifier_%s",modname.c_str()) ); + ATH_MSG_DEBUG("Running " << modname); + ATH_CHECK(t->modify(*jetContHandle)); + auto mon_mods = Monitored::Group(m_monTool, t_mods); + } + auto mon_mod_total = Monitored::Group(m_monTool, t_mod); + + // monitor jet multiplicity and basic jet kinematics + auto njets = Monitored::Scalar<int>("nJets"); + auto pt = Monitored::Collection("pt", *jetContHandle, [c=0.001]( const xAOD::Jet* jet ) { return jet->pt()*c; }); + auto et = Monitored::Collection("et", *jetContHandle, [c=0.001]( const xAOD::Jet* jet ) { return jet->p4().Et()*c; }); + auto eta = Monitored::Collection("eta", *jetContHandle, []( const xAOD::Jet* jet ) { return jet->eta(); }); + auto phi = Monitored::Collection("phi", *jetContHandle, []( const xAOD::Jet* jet ) { return jet->phi(); }); + auto mon = Monitored::Group(m_monTool,njets,pt,et,eta,phi); + njets = jetContHandle->size(); + + auto mon_total = Monitored::Group(m_monTool, t_total); +#else + + SG::WriteHandle<xAOD::JetContainer> jetContHandle(m_output,ctx); + + ATH_CHECK( m_jetprovider->getAndRecordJets(jetContHandle) ); ATH_MSG_DEBUG("Created jet container of size "<< jetContHandle->size() << " | writing to "<< m_output.key() ); ATH_MSG_DEBUG("Applying jet modifiers to " << m_output.key()); @@ -63,6 +110,8 @@ StatusCode JetRecAlg::execute(const EventContext& ctx) const { ATH_CHECK(t->modify(*jetContHandle)); } +#endif + return StatusCode::SUCCESS; } diff --git a/Reconstruction/Jet/JetRec/src/JetRecAlg.h b/Reconstruction/Jet/JetRec/src/JetRecAlg.h index 1ccd79b6e5e43c8861eebb22d32c00b5886ef642..80e5229ba37b336b841c47224f0ba016c9a33aa4 100644 --- a/Reconstruction/Jet/JetRec/src/JetRecAlg.h +++ b/Reconstruction/Jet/JetRec/src/JetRecAlg.h @@ -1,6 +1,6 @@ // this is a -*- C++ -*- file /* - Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ //////////////////////////////////////////////////// @@ -23,6 +23,9 @@ #include "JetInterface/IJetProvider.h" #include "JetInterface/IJetModifier.h" +#if !defined(GENERATIONBASE) && !defined(XAOD_ANALYSIS) + #include "AthenaMonitoringKernel/GenericMonitoringTool.h" +#endif class IJetExecuteTool; @@ -43,7 +46,10 @@ private: ToolHandle<IJetProvider> m_jetprovider ={this , "Provider" , {} , "Tool providing the jets (fastjet, copy, grooming...)"}; ToolHandleArray<IJetModifier> m_modifiers = {this , "Modifiers", {}, "moment calculators" }; SG::WriteHandleKey<xAOD::JetContainer> m_output= {this, "OutputContainer", "AntiKt4LCtopoJets", "The output jet container name"}; - +#if !defined (GENERATIONBASE) && !defined (XAOD_STANDALONE) + ToolHandle<GenericMonitoringTool> m_monTool{this,"MonTool","","Monitoring tool"}; +#endif + }; #endif diff --git a/Reconstruction/Jet/JetRecConfig/python/JetRecConfig.py b/Reconstruction/Jet/JetRecConfig/python/JetRecConfig.py index beeaf5bfbcbfdbdd42620d4f734887b199eeb358..d807163ac317f470a4a3c1f2ff29cf87ca46940f 100644 --- a/Reconstruction/Jet/JetRecConfig/python/JetRecConfig.py +++ b/Reconstruction/Jet/JetRecConfig/python/JetRecConfig.py @@ -292,7 +292,7 @@ def getJetAlgorithm(jetname, jetdef, pjContNames, monTool = None): # New JetRecAlgorithm to replace JetRecTool # This call is for a JRA that runs jet-finding # -def getJetRecAlg( jetdef): +def getJetRecAlg( jetdef, monTool = None): """ """ pjContNames = jetdef._internalAtt['finalPJContainer'] jclust = CompFactory.JetClusterer( @@ -312,7 +312,8 @@ def getJetRecAlg( jetdef): "jetrecalg_"+jetname, Provider = jclust, Modifiers = mods, - OutputContainer = jetname) + OutputContainer = jetname, + MonTool = monTool) autoconfigureModifiers(jra.Modifiers, jetname) diff --git a/Reconstruction/MuonIdentification/MuidTrackBuilder/MuidTrackBuilder/CombinedMuonTrackBuilder.h b/Reconstruction/MuonIdentification/MuidTrackBuilder/MuidTrackBuilder/CombinedMuonTrackBuilder.h index 81dd5e2324b7142acfe23925d7de50a79669f7e8..3255fef13fa397548388bfef3873ff86b836bce7 100755 --- a/Reconstruction/MuonIdentification/MuidTrackBuilder/MuidTrackBuilder/CombinedMuonTrackBuilder.h +++ b/Reconstruction/MuonIdentification/MuidTrackBuilder/MuidTrackBuilder/CombinedMuonTrackBuilder.h @@ -62,37 +62,42 @@ namespace Rec { class CombinedMuonTrackBuilder : public AthAlgTool, virtual public ICombinedMuonTrackBuilder { public: CombinedMuonTrackBuilder(const std::string& type, const std::string& name, const IInterface* parent); - ~CombinedMuonTrackBuilder(); + virtual ~CombinedMuonTrackBuilder(); - StatusCode initialize(); - StatusCode finalize(); + virtual StatusCode initialize() override; + virtual StatusCode finalize() override; /** ICombinedMuonTrackBuilder interface: build and fit combined ID/Calo/MS track */ + virtual Trk::Track* combinedFit(const Trk::Track& indetTrack, const Trk::Track& extrapolatedTrack, - const Trk::Track& spectrometerTrack) const; + const Trk::Track& spectrometerTrack) const override; /** ICombinedMuonTrackBuilder interface: build and fit indet track extended to include MS Measurement set. Adds material effects as appropriate plus calo energy-loss treatment */ + virtual Trk::Track* indetExtension(const Trk::Track& indetTrack, const Trk::MeasurementSet& spectrometerMeas, const Trk::TrackParameters* innerParameters, const Trk::TrackParameters* middleParameters, - const Trk::TrackParameters* outerParameters) const; + const Trk::TrackParameters* outerParameters) const override; /** ICombinedMuonTrackBuilder interface: propagate to perigee adding calo energy-loss and material to MS track */ + virtual Trk::Track* standaloneFit(const Trk::Track& spectrometerTrack, const Trk::Vertex* vertex, float bs_x, float bs_y, - float bs_z) const; + float bs_z) const override; /** ICombinedMuonTrackBuilder interface: refit a track removing any indet measurements with optional addition of pseudoMeasurements */ - Trk::Track* standaloneRefit(const Trk::Track& combinedTrack, float bs_x, float bs_y, float bs_z) const; + virtual + Trk::Track* standaloneRefit(const Trk::Track& combinedTrack, float bs_x, float bs_y, float bs_z) const override; using ICombinedMuonTrackBuilder::fit; /*refit a track */ + virtual Trk::Track* fit(Trk::Track& track, const Trk::RunOutlierRemoval runOutlier = false, - const Trk::ParticleHypothesis particleHypothesis = Trk::muon) const; + const Trk::ParticleHypothesis particleHypothesis = Trk::muon) const override; /** fit a set of MeasurementBase objects with starting value for perigeeParameters */ diff --git a/Reconstruction/MuonIdentification/MuidTrackBuilder/MuidTrackBuilder/OutwardsCombinedMuonTrackBuilder.h b/Reconstruction/MuonIdentification/MuidTrackBuilder/MuidTrackBuilder/OutwardsCombinedMuonTrackBuilder.h index 6dca3a9b27c2cc5771656e9cbfff84b0a66da91f..9596475e391125c0119e7f43ed33927d42b089d6 100644 --- a/Reconstruction/MuonIdentification/MuidTrackBuilder/MuidTrackBuilder/OutwardsCombinedMuonTrackBuilder.h +++ b/Reconstruction/MuonIdentification/MuidTrackBuilder/MuidTrackBuilder/OutwardsCombinedMuonTrackBuilder.h @@ -42,36 +42,41 @@ namespace Rec { class OutwardsCombinedMuonTrackBuilder : public AthAlgTool, virtual public ICombinedMuonTrackBuilder { public: OutwardsCombinedMuonTrackBuilder(const std::string& type, const std::string& name, const IInterface* parent); - ~OutwardsCombinedMuonTrackBuilder()=default; + virtual ~OutwardsCombinedMuonTrackBuilder()=default; - StatusCode initialize(); + virtual StatusCode initialize() override; /** ICombinedMuonTrackBuilder interface: build and fit combined ID/Calo/MS track */ + virtual Trk::Track* combinedFit(const Trk::Track& indetTrack, const Trk::Track& extrapolatedTrack, - const Trk::Track& spectrometerTrack) const; + const Trk::Track& spectrometerTrack) const override; /** ICombinedMuonTrackBuilder interface: build and fit indet track extended to include MS Measurement set. Adds material effects as appropriate plus calo energy-loss treatment */ + virtual Trk::Track* indetExtension(const Trk::Track& indetTrack, const Trk::MeasurementSet& spectrometerMeas, const Trk::TrackParameters* innerParameters, const Trk::TrackParameters* middleParameters, - const Trk::TrackParameters* outerParameters) const; + const Trk::TrackParameters* outerParameters) const override; /** ICombinedMuonTrackBuilder interface: propagate to perigee adding calo energy-loss and material to MS track */ + virtual Trk::Track* standaloneFit(const Trk::Track& spectrometerTrack, const Trk::Vertex* vertex, float bs_x, float bs_y, - float bs_z) const; + float bs_z) const override; /** ICombinedMuonTrackBuilder interface: refit a track removing any indet measurements with optional addition of pseudoMeasurements according to original extrapolation */ - Trk::Track* standaloneRefit(const Trk::Track& combinedTrack, float bs_x, float bs_y, float bs_z) const; + virtual + Trk::Track* standaloneRefit(const Trk::Track& combinedTrack, float bs_x, float bs_y, float bs_z) const override; using ICombinedMuonTrackBuilder::fit; /** refit a track */ + virtual Trk::Track* fit(Trk::Track& track, const Trk::RunOutlierRemoval runOutlier = false, - const Trk::ParticleHypothesis particleHypothesis = Trk::muon) const; + const Trk::ParticleHypothesis particleHypothesis = Trk::muon) const override; /** @@ -104,7 +109,7 @@ class OutwardsCombinedMuonTrackBuilder : public AthAlgTool, virtual public IComb const Trk::RunOutlierRemoval runOutlier = false, const Trk::ParticleHypothesis particleHypothesis = Trk::muon) const; - void cleanUp() const override; + virtual void cleanUp() const override; private: Trk::Track* addIDMSerrors(Trk::Track* track) const; diff --git a/Reconstruction/MuonIdentification/MuonCombinedBaseTools/src/MuonCombinedTool.h b/Reconstruction/MuonIdentification/MuonCombinedBaseTools/src/MuonCombinedTool.h index 848754504049281078c163bfe77bd90ab8f28a82..133b339d8867f1189caba0a293a5e6a441ccd1ea 100644 --- a/Reconstruction/MuonIdentification/MuonCombinedBaseTools/src/MuonCombinedTool.h +++ b/Reconstruction/MuonIdentification/MuonCombinedBaseTools/src/MuonCombinedTool.h @@ -23,14 +23,15 @@ namespace MuonCombined { public: MuonCombinedTool(const std::string& type, const std::string& name, const IInterface* parent); - ~MuonCombinedTool()=default; + virtual ~MuonCombinedTool()=default; - StatusCode initialize(); + virtual StatusCode initialize() override; + virtual void combine(const MuonCandidateCollection& muonCandidates,const InDetCandidateCollection& inDetCandidates, std::vector<InDetCandidateToTagMap*> tagMaps, - TrackCollection* combinedTracks, TrackCollection* METracks) const; + TrackCollection* combinedTracks, TrackCollection* METracks) const override; - void cleanUp() const override; + virtual void cleanUp() const override; private: diff --git a/Reconstruction/MuonIdentification/MuonCombinedTrackFindingTools/src/MuonLayerAmbiguitySolverTool.h b/Reconstruction/MuonIdentification/MuonCombinedTrackFindingTools/src/MuonLayerAmbiguitySolverTool.h index d746543df4807fd9d593eec4aea474705e6517be..bce590e43a3cf52807ae14c85ea458c4a4951723 100644 --- a/Reconstruction/MuonIdentification/MuonCombinedTrackFindingTools/src/MuonLayerAmbiguitySolverTool.h +++ b/Reconstruction/MuonIdentification/MuonCombinedTrackFindingTools/src/MuonLayerAmbiguitySolverTool.h @@ -29,13 +29,13 @@ namespace Muon { /** Default AlgTool functions */ MuonLayerAmbiguitySolverTool(const std::string& type, const std::string& name, const IInterface* parent); virtual ~MuonLayerAmbiguitySolverTool()=default; - StatusCode initialize(); + virtual StatusCode initialize() override; /**IMuonLayerAmbiguitySolverTool interface: find */ - void resolveOverlaps( const std::vector<Muon::MuonLayerRecoData>& allLayers, std::vector< MuonCandidate >& resolvedCandidates ) const; + virtual void resolveOverlaps( const std::vector<Muon::MuonLayerRecoData>& allLayers, std::vector< MuonCandidate >& resolvedCandidates ) const override; - void cleanUp() const override; + virtual void cleanUp() const override; private: void buildLayerVec( const std::vector<MuonLayerRecoData>& allLayers, diff --git a/Reconstruction/PFlow/PFlowUtils/CMakeLists.txt b/Reconstruction/PFlow/PFlowUtils/CMakeLists.txt index 75fb320d21bddaaedf21ceb196b0d0ce46bf1f1a..eb531fa72de07df9b45a9b6fe73b95f84c4c11cf 100644 --- a/Reconstruction/PFlow/PFlowUtils/CMakeLists.txt +++ b/Reconstruction/PFlow/PFlowUtils/CMakeLists.txt @@ -9,7 +9,7 @@ atlas_add_library( PFlowUtilsLib PUBLIC_HEADERS PFlowUtils LINK_LIBRARIES AsgTools xAODPFlow ) -if( NOT XAOD_STANDALONE ) +if( NOT XAOD_ANALYSIS ) atlas_add_component( PFlowUtils src/*.h src/*.cxx src/components/*.cxx LINK_LIBRARIES AthContainers AthenaBaseComps xAODJet xAODPFlow xAODTruth diff --git a/Reconstruction/RecExample/RecExCommon/share/RecExCommon_topOptions.py b/Reconstruction/RecExample/RecExCommon/share/RecExCommon_topOptions.py index 6529e23a8e2e760288c56f4d5cfcd484d49eee0c..9988b612aecf6cb103f2aabad48141cd2792cfb8 100644 --- a/Reconstruction/RecExample/RecExCommon/share/RecExCommon_topOptions.py +++ b/Reconstruction/RecExample/RecExCommon/share/RecExCommon_topOptions.py @@ -554,42 +554,57 @@ if rec.readESD() and rec.doESD(): rec.doTrigger=False recAlgs.doTrigger=False logRecExCommon_topOptions.info("detected re-reconstruction from ESD, will switch trigger OFF !") -#try: + +# Disable Trigger output reading in MC if there is none, unless running Trigger selection algorithms +if not globalflags.InputFormat.is_bytestream() and not recAlgs.doTrigger: + try: + from RecExConfig.ObjKeyStore import cfgKeyStore + from PyUtils.MetaReaderPeeker import convert_itemList + cfgKeyStore.addManyTypesInputFile(convert_itemList(layout='#join')) + # Check for Run-1, Run-2 or Run-3 Trigger content in the input file + if not cfgKeyStore.isInInputFile("HLT::HLTResult", "HLTResult_EF") \ + and not cfgKeyStore.isInInputFile("xAOD::TrigNavigation", "TrigNavigation") \ + and not cfgKeyStore.isInInputFile("xAOD::TrigCompositeContainer", "HLTNav_Summary"): + logRecExCommon_topOptions.info('Disabled rec.doTrigger because recAlgs.doTrigger=False and there is no Trigger content in the input file') + rec.doTrigger = False + except Exception: + logRecExCommon_topOptions.warning('Failed to check input file for Trigger content, leaving rec.doTrigger value unchanged (%s)', rec.doTrigger) + if rec.doTrigger: - if globalflags.DataSource() == 'data'and globalflags.InputFormat == 'bytestream': + if globalflags.DataSource() == 'data' and globalflags.InputFormat == 'bytestream': try: include("TriggerJobOpts/BStoESD_Tier0_HLTConfig_jobOptions.py") except Exception: treatException("Could not import TriggerJobOpts/BStoESD_Tier0_HLTConfig_jobOptions.py . Switching trigger off !" ) - recAlgs.doTrigger=False + rec.doTrigger = recAlgs.doTrigger = False else: try: from TriggerJobOpts.TriggerGetter import TriggerGetter triggerGetter = TriggerGetter() except Exception: treatException("Could not import TriggerJobOpts.TriggerGetter . Switched off !" ) - recAlgs.doTrigger=False - -# Run-3 Trigger Outputs: Don't run any trigger - only pass the HLT contents from ESD to AOD -if rec.readESD() and rec.doAOD(): - from AthenaConfiguration.AllConfigFlags import ConfigFlags - # The simplest protection in case ConfigFlags.Input.Files is not set, doesn't cover all cases: - if ConfigFlags.Input.Files == ['_ATHENA_GENERIC_INPUTFILE_NAME_'] and athenaCommonFlags.FilesInput(): - ConfigFlags.Input.Files = athenaCommonFlags.FilesInput() - - if ConfigFlags.Trigger.EDMVersion == 3: - # Add HLT output - from TriggerJobOpts.HLTTriggerResultGetter import HLTTriggerResultGetter - hltOutput = HLTTriggerResultGetter() - # Add Trigger menu metadata - if rec.doFileMetaData(): - from RecExConfig.ObjKeyStore import objKeyStore - metadataItems = [ "xAOD::TriggerMenuContainer#TriggerMenu", - "xAOD::TriggerMenuAuxContainer#TriggerMenuAux." ] - objKeyStore.addManyTypesMetaData( metadataItems ) - # Add L1 output (to be consistent with R2) - from TrigEDMConfig.TriggerEDM import getLvl1AODList - objKeyStore.addManyTypesStreamAOD(getLvl1AODList()) + rec.doTrigger = recAlgs.doTrigger = False + + # ESDtoAOD Run-3 Trigger Outputs: Don't run any trigger - only pass the HLT contents from ESD to AOD + if rec.readESD() and rec.doAOD(): + from AthenaConfiguration.AllConfigFlags import ConfigFlags + # The simplest protection in case ConfigFlags.Input.Files is not set, doesn't cover all cases: + if ConfigFlags.Input.Files == ['_ATHENA_GENERIC_INPUTFILE_NAME_'] and athenaCommonFlags.FilesInput(): + ConfigFlags.Input.Files = athenaCommonFlags.FilesInput() + + if ConfigFlags.Trigger.EDMVersion == 3: + # Add HLT output + from TriggerJobOpts.HLTTriggerResultGetter import HLTTriggerResultGetter + hltOutput = HLTTriggerResultGetter() + # Add Trigger menu metadata + if rec.doFileMetaData(): + from RecExConfig.ObjKeyStore import objKeyStore + metadataItems = [ "xAOD::TriggerMenuContainer#TriggerMenu", + "xAOD::TriggerMenuAuxContainer#TriggerMenuAux." ] + objKeyStore.addManyTypesMetaData( metadataItems ) + # Add L1 output (to be consistent with R2) + from TrigEDMConfig.TriggerEDM import getLvl1AODList + objKeyStore.addManyTypesStreamAOD(getLvl1AODList()) AODFix_postTrigger() diff --git a/Reconstruction/RecExample/RecExConfig/python/RecAlgsFlags.py b/Reconstruction/RecExample/RecExConfig/python/RecAlgsFlags.py index 720352aba5d741d5d4e75267513ca4c50c28cfa6..77ae0617c99778f5eecd80b765b95d91736b2718 100755 --- a/Reconstruction/RecExample/RecExConfig/python/RecAlgsFlags.py +++ b/Reconstruction/RecExample/RecExConfig/python/RecAlgsFlags.py @@ -102,9 +102,11 @@ class doEgammaBremReco(JobProperty): StoredValue=False class doTrigger(JobProperty): """ Switch for trigger""" + # TODO: Remove this flag and assume False in all client configuration + # once the Run-2 trigger execution is removed from release 22 statusOn=True allowedTypes=['bool'] - StoredValue=True + StoredValue=False class doAtlfast(JobProperty): """ Switch for fast simulation (but normally run at ESD->AOD stage)""" statusOn=True diff --git a/Reconstruction/RecExample/RecExOnline/CMakeLists.txt b/Reconstruction/RecExample/RecExOnline/CMakeLists.txt index 453eee1c0cd76f2f7a1d94cc9100248547392057..b77e0edbf06f8d4ecaf858d8de9aab8e2a4b1c06 100644 --- a/Reconstruction/RecExample/RecExOnline/CMakeLists.txt +++ b/Reconstruction/RecExample/RecExOnline/CMakeLists.txt @@ -1,13 +1,11 @@ -################################################################################ -# Package: RecExOnline -################################################################################ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # Declare the package name: atlas_subdir( RecExOnline ) # Install files from the package: -atlas_install_python_modules( python/*.py ) +atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} ) atlas_install_joboptions( share/*.py ) -atlas_install_runtime( test/RecExOnline_TestConfiguration.xml share/RootHis*.C share/RecExOnline_Test*.C ) +atlas_install_runtime( share/RecExOnline_Test*.C ) atlas_install_scripts( scripts/*.sh ) diff --git a/Reconstruction/RecExample/RecExOnline/python/OnlineISConfiguration.py b/Reconstruction/RecExample/RecExOnline/python/OnlineISConfiguration.py index fbda833b60f61ad6cb9b99936c99e48464327dc5..bc2856ea6311ebb98189d645f2771960ca43d2fd 100644 --- a/Reconstruction/RecExample/RecExOnline/python/OnlineISConfiguration.py +++ b/Reconstruction/RecExample/RecExOnline/python/OnlineISConfiguration.py @@ -1,10 +1,5 @@ # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -from __future__ import print_function -from future import standard_library -standard_library.install_aliases() -import subprocess - import ispy @@ -14,7 +9,7 @@ def GetAtlasReady(): r4p = ISObject(IPCPartition("ATLAS"), 'RunParams.Ready4Physics', 'RunParams') r4p.checkout() return r4p.ready4physics - except: + except Exception: print ("#### Failed to determine if we are ready for physics") raise @@ -33,15 +28,15 @@ def GetRunType(): partition = "ATLAS" except KeyError: partition = "ATLAS" - mlog.warning("TDAQ_PARTITION not defined in environment, using %s as default" % partition) + mlog.warning("TDAQ_PARTITION not defined in environment, using %s as default", partition) #now try and read the information from IS try: from ipc import IPCPartition from ispy import ISObject - ipcPart = IPCPartition(partition); + ipcPart = IPCPartition(partition) if not ipcPart.isValid(): - raise UserWarning("Partition %s invalid - cannot access run type settings" % partition); + raise UserWarning("Partition %s invalid - cannot access run type settings" % partition) runparams = ISObject(ipcPart, 'RunParams.RunParams', 'RunParams') runparams.checkout() beamEnergy = runparams.beam_energy @@ -51,7 +46,7 @@ def GetRunType(): beamEnergy = None projectTag = None - mlog.info("Setting project tag to %s" % projectTag) + mlog.info("Setting project tag to %s", projectTag) return (None, beamEnergy, projectTag) # the BeamType in the IS RunParams is not useful for auto-configuration def GetBFields(): @@ -61,16 +56,16 @@ def GetBFields(): #BFields are read from initial partition partition = 'initial' - mlog.debug("Trying to read magnetic field configuration from partition %s" % partition) + mlog.debug("Trying to read magnetic field configuration from partition %s", partition) #now try and read the information from IS try: from ipc import IPCPartition - from ispy import ISObject - ipcPart = IPCPartition(partition); + ipcPart = IPCPartition(partition) if not ipcPart.isValid(): - raise UserWarning("Partition %s invalid - cannot access magnetic field setting" % partition); + raise UserWarning("Partition %s invalid - cannot access magnetic field setting" % partition) #Get the current and valid status + # from ispy import ISObject # torCurrent = ISObject(ipcPart, 'DCS_GENERAL.MagnetToroidsCurrent.value', 'DdcFloatInfo') # solCurrent = ISObject(ipcPart, 'DCS_GENERAL.MagnetSolenoidCurrent.value', 'DdcFloatInfo') # torInvalid = ISObject(ipcPart, 'DCS_GENERAL.MagnetToroidsCurrent.invalid', 'DdcIntInfo') @@ -107,8 +102,8 @@ def GetBFields(): sys.exit(1) #print the result - mlog.info("Magnetic field in solenoid is %s" % ((solOn and "ON") or "OFF")) - mlog.info("Magnetic field in toroid is %s" % ((torOn and "ON") or "OFF")) + mlog.info("Magnetic field in solenoid is %s", ((solOn and "ON") or "OFF")) + mlog.info("Magnetic field in toroid is %s", ((torOn and "ON") or "OFF")) #finally return our values return (solCurrent, torCurrent) diff --git a/Reconstruction/RecExample/RecExOnline/python/comparison_plot.py b/Reconstruction/RecExample/RecExOnline/python/comparison_plot.py deleted file mode 100644 index 998b759b7ce1ab1651d07f91164380ce84e5446f..0000000000000000000000000000000000000000 --- a/Reconstruction/RecExample/RecExOnline/python/comparison_plot.py +++ /dev/null @@ -1,228 +0,0 @@ -# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration - -""" -Draw comparison plots and statistical tables for each 2 histograms inside 2 data sets. -""" - -import rootpy.ROOT as ROOT -from rootpy.plotting.hist import Hist -from rootpy.plotting.style import get_style, set_style -import rootpy.plotting.root2matplotlib as rplt -import matplotlib -import matplotlib.pyplot as plt -import matplotlib.colors as colors -import matplotlib.ticker -import itertools -import numpy as np -import pandas as pd -import re -from collections import defaultdict -import HistCompare.test_statistics as TS -import copy -from UnbinnedHist import UnbinnedHist -from hash_tool import HashTool -matplotlib.rcParams["text.usetex"]=True -matplotlib.rcParams['text.latex.preamble']=['\\usepackage{booktabs}'] -def tree(): return defaultdict(tree) -#set_style('ATLAS',mpl=True) -def set_yaxis(ax, tick=None): - if tick == "%": - def to_percent(y, position): - s = format(100*y, '.1f') - if matplotlib.rcParams['text.usetex'] is True: - return s + r'$\%$' - else: - return s + '%' - formatter = matplotlib.ticker.FuncFormatter(to_percent) - ax.yaxis.set_major_formatter(formatter) -def identity(*x): - if len(x)==1: - return x[0] - return x -def formatted_plt_dict(plt_kw, length, **kwarg): - for i in range(length): - yield dict((key, val[i]) if np.iterable(val) else (key, val) for key, val in plt_kw.iteritems()) - -# keyword arguments for matplotlib.pyplot.text -TEXT_KW = { - "x": 0.94, - "y": 0.25, - "ha": "right", - "va": "center", - 'zorder': 2} - -# keyword arguments for auto_statistic_table -STATISTIC_TABLE_KW = { - "iterfunc": itertools.combinations, - "iterkey": {"r": 2}, - "row_title_func": identity, - "col_title_func": identity, - "table_fformat": ".2e", - "text_kw": TEXT_KW} - -# keyword arguments for matplotlib.pyplot.setp -AXES_KW = { - "ylim":(0,None)} - -# keyword arguments for matplotlib.pyplot.plot -PLT_KW = { - "linewidth": 2, - "color": None, - 'zorder': 1 - } - -def statistic_table_kw_func(ax): - statistic_table_kw = copy.deepcopy(STATISTIC_TABLE_KW) - statistic_table_kw["text_kw"].update({"axes": ax, "transform": ax.transAxes}) - return statistic_table_kw - -def auto_draw(hist_dict, cumulative = False, normalized = False, table = True, axes = None, frameon = True, cmap = plt.cm.brg, add_statistic_table_kw = {}, add_axes_kw = {}, add_plt_kw = {}): - """ - Draw comparison plots and statistical tables for each 2 histograms inside 2 data sets. - - Parameters - ---------- - hist_dict : pandas dataframe - Input data. *hist_dict* Should have 2 *rootpy.plotting.Hist*/*UnbinnedHist* instances - in each row. - cumulative : bool, optional - If True, then a histogram is computed where each bin gives the counts in that bin plus all bins for smaller - values. The last bin gives the total number of datapoints. If normed is also True then the histogram is - normalized such that the last bin equals 1. If cumulative evaluates to less than 0 (e.g., -1), the direction - of accumulation is reversed. In this case, if normed is also True, then the histogram is normalized such that - the first bin equals 1. - normalized : bool, optional - If False, the result will contain the number of samples in each bin. If True, the result is the value of the - probability density function at the bin, normalized such that the integral over the range is 1. - table : bool, optional - If True, places a statistical table on each axes. - axes : list of matplotlib axis, optional, default : None - if provided, plot on these axes. Note *axes* should have the same number as *hist_dict*. i.e.:: - >>> len(axes) = len(hist_dict) - frameon : bool, optional, default : True - If False, suppress drawing the figure frame - cmap : matplotlib.colors.Colormap instance or list of str, optional, default : plt.cm.brg - list of any matplotlib colors. - add_statistic_table_kw : dict, optional - Additional Keyword Arguments for ``statistic_table`` - add_axes_kw : dict, optional - Additional Keyword Arguments for ``matplotlib.pyplot.setp`` - add_plt_kw : dict, optional - dditional Keyword Arguments for ``matplotlib.pyplot.plot`` - - Returns - ------- - [ax1, ax2, ...] : list - list of matplotlib axes - """ - if axes == None: - axes = [plt.axes() for _ in hist_dict if plt.figure(frameon=frameon)] - for ax, kind in zip(axes, hist_dict): - statistic_table_kw = statistic_table_kw_func(ax) - statistic_table_kw.update(add_statistic_table_kw) - axes_kw = AXES_KW - axes_kw.update(add_axes_kw) - plt_kw = PLT_KW - plt_kw.update(add_plt_kw) - if cmap: - plt_kw["color"] = cmap(np.linspace(0, 1, len(hist_dict))) - plt_dicts = formatted_plt_dict(plt_kw, len(hist_dict)) - - for (i, h), d in zip(hist_dict[kind].iteritems(), plt_dicts): - entries = h.entries - h_ = h.clone("New1") - if normalized: - h_.Scale(1./h_.Integral()) - if cumulative: - arr = np.cumsum([bin.value for bin in h_ ]) - h_.set_content(arr) - for key, val in d.iteritems(): - setattr(h_,key,val) - rplt.hist(h_, label = "%s (%i)" % (statistic_table_kw["col_title_func"](i),entries), axes = ax) - - ######################################################################## - if np.all([type(v)==UnbinnedHist for v in hist_dict[kind].values]) and cumulative: - data_all = np.concatenate(map(UnbinnedHist.get_raw_data, hist_dict[kind].values)) - if normalized: - cdfs = [sorted(np.searchsorted(data.get_raw_data(), data_all, side='right')*1.0 / data.get_raw_data().shape[0]) for data in hist_dict[kind].values] - else: - cdfs = [sorted(np.searchsorted(data.get_raw_data(), data_all, side='right')) for data in hist_dict[kind].values] - for i, cdf, d in zip(hist_dict[kind].keys(), cdfs, plt_dicts): - plt.plot(sorted(data_all), cdf, label = "%s (%i)" % (statistic_table_kw["col_title_func"](i),entries), linestyle="--", axes = ax, **d) - ######################################################################## - - ax.legend() - if table: - df = auto_statistic_table(hist_dict[kind], ax=ax, ret_table=False, ret_text=False, **statistic_table_kw) - if HASH.objs: - textkw = statistic_table_kw["text_kw"].copy() - text = ax.text(textkw.pop("x"), textkw.pop("y"), df[0].to_latex(escape=False, sparsify=False, index_names=False).replace("\n",""), **textkw) - ax.set_xlabel(kind, fontsize=25) - - plt.setp(ax, **axes_kw) - if normalized: - ax.set_ylim(0,1) - set_yaxis(ax,tick='%') - if not cumulative: - ax.set_ylabel("Events", fontsize=25) - else: - ax.set_ylabel("Cumulative Events", fontsize=25) - return axes - -HASH = HashTool('hist_dict', ret_table=True) -@HASH -def auto_statistic_table(hist_dict, ax=None, ret_table=True, ret_text=True, **statistic_table_kw): - """Compute statistic tables for the 2 histograms in each row of *hist_dict*. - - Parameters - ---------- - hist_dict : pandas dataframe - Input data. *hist_dict* Should have 2 *rootpy.plotting.Hist*/*UnbinnedHist* instances - in each row. - ax : matplotlib axis, optional, default : None - if provided, plot on this axis - statistic_table_kw : dict, optional - Other keyword arguments - ret_table : bool, optional, default : True - If True, returns the formatted table in *list* form. - ret_text : bool, optional, default : True - If True, returns the formatted table in *latex* form. - - Returns - ------- - [text[, table]] : list - depending on *ret_table* and *ret_text*, returns a list with required inside. - """ - header = ["Type of Test", "Bin", "T","P","NDF","Description"] - df_ = pd.DataFrame() - textkw = statistic_table_kw["text_kw"].copy() - for (nA,A),(nB,B) in statistic_table_kw["iterfunc"](hist_dict.iteritems(), **statistic_table_kw["iterkey"]): - A_Arr_Binned = np.array([bin.value for bin in A.bins()]) - B_Arr_Binned = np.array([bin.value for bin in B.bins()]) - bin_str = A_Arr_Binned.shape[0] - Table=[#["$\chi^2$ Absolute Comparison", bin_str] + list(TS.chi2_2samp(A_Arr_Binned, B_Arr_Binned, normed=False, binned=True)) + ["Scipy + Modificatoin"], - ["$\chi^2$ Shape Comparison",bin_str] + list(TS.chi2_2samp(A_Arr_Binned, B_Arr_Binned, normed=True, binned=True)) + ["Scipy + Modificatoin"], - ["",bin_str, A.Chi2Test(B,"WW CHI2"), A.Chi2Test(B,"WW"),'',"ROOT"], - ["Likelihood Ratio Shape Comparison",bin_str]+list(TS.likelihoodratio_ksamp(A_Arr_Binned, B_Arr_Binned))+["Scipy + Modification"], - #["Likelihood Value Shape Comparison",bin_str]+list(TS.likelihoodvalue_ksamp(A_Arr_Binned, B_Arr_Binned))+["Scipy + Modification"], - #["BDM Shape Comparison",bin_str] + list(TS.BDM_2samp(A_Arr_Binned, B_Arr_Binned))+["Scipy + Modification"], - ["K-S Shape Comparison",bin_str] + list(TS.ks_2samp(A_Arr_Binned,B_Arr_Binned,binned=True))+ ["SciPy + Modification"], - ["",bin_str,A.KolmogorovTest(B,"M"), A.KolmogorovTest(B), '',"ROOT"], - ["A-D Shape Comparison",bin_str] + list(TS.anderson_ksamp(A_Arr_Binned, B_Arr_Binned, binned=True)) + ["Scipy + Modification"], - ["CVM Shape Comparison",bin_str] + list(TS.CVM_2samp(A_Arr_Binned, B_Arr_Binned, binned=True)) + ["Matlab"]] - - if type(A)==UnbinnedHist and type(B)==UnbinnedHist: - Table.insert(8, ["","Unbinned"] + list(TS.ks_2samp(A.get_raw_data(), B.get_raw_data(), binned=False)) + ["SciPy + Modification"]) - Table.insert(10, ["","Unbinned"] + list(TS.anderson_ksamp(A.get_raw_data(), B.get_raw_data(), binned=False)) + ["Scipy"]) - Table.insert(12, ["","Unbinned"] + list(TS.CVM_2samp(A.get_raw_data(), B.get_raw_data(), binned=False)) + ["Matlab"]) - - df=pd.DataFrame(Table, columns=header) - df["T/P"] = map(lambda tp: "/".join(["%"+statistic_table_kw["table_fformat"]]*2)%(tp[0],tp[1]), zip(df["T"],df.P)) - df_ = df[header] - text = ax.text(textkw.pop("x"), textkw.pop("y"), df_.to_latex(escape=False, sparsify=False, index_names=False).replace("\n",""), **textkw) - result = [] - if ret_text: - result.append(text) - if ret_table: - result.append(df_) - return result diff --git a/Reconstruction/RecExample/RecExOnline/python/hash_tool.py b/Reconstruction/RecExample/RecExOnline/python/hash_tool.py index fc00b999ddcbcf277405b3520b5b7932b4727783..272c5621d41f9df97d68708d4a666beeba831684 100644 --- a/Reconstruction/RecExample/RecExOnline/python/hash_tool.py +++ b/Reconstruction/RecExample/RecExOnline/python/hash_tool.py @@ -1,9 +1,7 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration import inspect import warnings -import numpy as np -import sys class HashTool: def __init__(self, *args, **parse): diff --git a/Reconstruction/RecExample/RecExOnline/python/power_of_test.py b/Reconstruction/RecExample/RecExOnline/python/power_of_test.py index d63c9bcb31d5b58b2fc4b99bc344419783bf57af..f4d83c8e9a5f81687ebe977065d1dfd282f237d8 100644 --- a/Reconstruction/RecExample/RecExOnline/python/power_of_test.py +++ b/Reconstruction/RecExample/RecExOnline/python/power_of_test.py @@ -1,9 +1,8 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration from scipy.stats import rv_discrete import test_statistics as TS import numpy as np -import warnings # If True, import IPyParallel package to do the parallel computation. # And if IPyParallel is not installed, automatically turns it False. @@ -121,7 +120,7 @@ def power_of_test(data1, data2, rvs_func = 'rvs_pairs', tests = ['chi2_2samp'], [p1, p2, ...] : 1-D array The corresponding p-values for each histogram pairs. """ - if parallel == None: parallel = PARALLEL + if parallel is None: parallel = PARALLEL if parallel: try: global client @@ -131,7 +130,7 @@ def power_of_test(data1, data2, rvs_func = 'rvs_pairs', tests = ['chi2_2samp'], jobs = [] for i in range(N): rvs_key['size'] = (size//N + 1) if (i < size % N) else size//N - jobs.append(client[client.ids[i]].apply_async(power_of_test, data1, data2, rvs_func, test, rvs_key, test_key, False)) + jobs.append(client[client.ids[i]].apply_async(power_of_test, data1, data2, rvs_func, tests, rvs_key, test_key, False)) ars = client._asyncresult_from_jobs(jobs) if sync: ars.wait_interactive() diff --git a/Reconstruction/RecExample/RecExOnline/python/test_statistics.py b/Reconstruction/RecExample/RecExOnline/python/test_statistics.py index 3e956af00ed1f794665be69dddd5b0ae7cff02f6..1d949bc8f10dfbd43f03ee3924ea1866dc86ebef 100644 --- a/Reconstruction/RecExample/RecExOnline/python/test_statistics.py +++ b/Reconstruction/RecExample/RecExOnline/python/test_statistics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration """ Test statistic functions. """ @@ -8,6 +8,7 @@ import numpy as np import scipy.stats import scipy.interpolate import scipy.special +import statsmodels.api as sm import warnings import distributions as distr from sympy import binomial, log @@ -94,7 +95,7 @@ def ks_2samp(data1, data2, binned=False): (0.07999999999999996, 0.41126949729859719) """ - if binned == True: + if binned is True: cdf1 = np.cumsum(data1) cdf2 = np.cumsum(data2) n1 = cdf1[-1] @@ -106,7 +107,7 @@ def ks_2samp(data1, data2, binned=False): n1 = data1.shape[0] n2 = data2.shape[0] ndf = float("nan") - if binned == False: + if binned is False: data1 = np.sort(data1) data2 = np.sort(data2) data_all = np.concatenate([data1, data2]) @@ -129,7 +130,7 @@ def ks_2samp(data1, data2, binned=False): en = np.sqrt(n1 * n2 / float(n1 + n2)) try: prob = scipy.stats.distributions.kstwobign.sf((en + 0.12 + 0.11 / en) * d) - except: + except Exception: prob = 1.0 if DEBUG: global statistic_seq @@ -169,7 +170,7 @@ def chi2_2samp(data1, data2, normed=True, binned=True): This code is modified from scipy.stats.chisquare and extended with supporting on 2 sample cases and shape comparison test. """ - if binned == True: + if binned is True: filter = ~((data1 == 0.) & (data2 == 0.)) data1 = data1[filter] data2 = data2[filter] @@ -385,7 +386,7 @@ def _anderson_ksamp_right(samples, Z, Zstar, k, n, N): A2kN = 0. lj = Z.searchsorted(Zstar[:-1], 'right') - Z.searchsorted(Zstar[:-1], 'left') Bj = lj.cumsum() - for i in arange(0, k): + for i in range(0, k): s = np.sort(samples[i]) Mij = s.searchsorted(Zstar[:-1], side='right') inner = lj / float(N) * (N * Mij - Bj * n[i])**2 / (Bj * (N - Bj)) diff --git a/Reconstruction/RecExample/RecExOnline/python/to_root.py b/Reconstruction/RecExample/RecExOnline/python/to_root.py deleted file mode 100644 index c355b1cf8d3b25c8f9e7ea5416b3280a2f7e7f9b..0000000000000000000000000000000000000000 --- a/Reconstruction/RecExample/RecExOnline/python/to_root.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -import ROOT -import numpy as np -import warnings -def fill_branch_by_arr(filename ,treename, list_of_branchname, list_of_entrylist): - of = ROOT.TFile(filename, 'update') - if of.GetListOfKeys().Contains(treename): - t = of.Get(treename) - warnings.warn('%r exsists.' % t, Warning) - else: - t = ROOT.TTree(treename, treename) - for branchname, entrylist in zip(list_of_branchname, list_of_entrylist): - t.SetEntries(entrylist.size) - address = np.zeros(1, 'float32') - br = t.Branch(branchname, address, branchname+'/F') - for en in entrylist: - address[0] = en - br.Fill() - of.Write("", 2) - of.Close() \ No newline at end of file diff --git a/Reconstruction/RecExample/RecExOnline/python/utils.py b/Reconstruction/RecExample/RecExOnline/python/utils.py index 205690c7d8d2e23e883caa0ab5ee228d6406b727..64568d05abd3cb4f6677bd1508ca156d2c36959a 100644 --- a/Reconstruction/RecExample/RecExOnline/python/utils.py +++ b/Reconstruction/RecExample/RecExOnline/python/utils.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration from collections import defaultdict import numpy as np def tree(key_wrapper = None): @@ -15,10 +15,10 @@ def get_array(hist): arr.SetSize(hist.GetNbinsX() + 2) return np.fromiter(arr, np.float) -TAIL = lambda s: u' └── {} '.format(s) -BRANCH = lambda s: u' ├── {} '.format(s) -LINE = lambda s: u' │ {} '.format(s) -SPACE = lambda s: u' {} '.format(s) +TAIL = lambda s: u' └── {} '.format(s) # noqa: E731 +BRANCH = lambda s: u' ├── {} '.format(s) # noqa: E731 +LINE = lambda s: u' │ {} '.format(s) # noqa: E731 +SPACE = lambda s: u' {} '.format(s) # noqa: E731 class TreeDict(defaultdict): def __init__(self, dic = {}, key_wrapper=None): super(defaultdict,self).__init__(dic) diff --git a/Reconstruction/RecExample/RecExOnline/share/RootHis_RTT.C b/Reconstruction/RecExample/RecExOnline/share/RootHis_RTT.C deleted file mode 100644 index cd21069e99e5a329662e71f3e096652f2db4d725..0000000000000000000000000000000000000000 --- a/Reconstruction/RecExample/RecExOnline/share/RootHis_RTT.C +++ /dev/null @@ -1,26 +0,0 @@ -#include <iostream> -#include <string> -#include <fstream> - -#include "TH1F.h" -#include "TFile.h" -#include "TCanvas.h" -#include "TSystem.h" -#include "TKey.h" - -void RootHis_RTT() -{ - - TFile *file = new TFile("Monitor.root"); - -// TH1F *h1_AK4LCTTopoJetsPt= (TH1F*)gDirectory->FindObjectAny("Jets/AntiKt4LCTTopoJets/pt"); - TH1F *h1_AK4LCTTopoJetsPt= (TH1F*)gDirectory->FindObjectAny("pt"); - - TCanvas *c1 = new TCanvas("c1", "GIF Canvas", 400, 300); - h1_AK4LCTTopoJetsPt->Draw("HistE"); - c1->Print("AK4LCTTopoJetsPt.png"); - - - -} - diff --git a/Reconstruction/RecExample/RecExOnline/share/SimpleJobOptions_ForRTT.py b/Reconstruction/RecExample/RecExOnline/share/SimpleJobOptions_ForRTT.py deleted file mode 100644 index b9728ff2d1e955d32d27fd8c2f4992374d981c0e..0000000000000000000000000000000000000000 --- a/Reconstruction/RecExample/RecExOnline/share/SimpleJobOptions_ForRTT.py +++ /dev/null @@ -1 +0,0 @@ -theApp.EvtMax = 5 diff --git a/Reconstruction/RecExample/RecExOnline/test/RecExOnline_TestConfiguration.xml b/Reconstruction/RecExample/RecExOnline/test/RecExOnline_TestConfiguration.xml deleted file mode 100755 index da82fc248d93be0230de70604b8a4272066d2a33..0000000000000000000000000000000000000000 --- a/Reconstruction/RecExample/RecExOnline/test/RecExOnline_TestConfiguration.xml +++ /dev/null @@ -1,386 +0,0 @@ -<?xml version="1.0"?> -<!-- MB: Search for RUNNUMBER to replace the run input collections and setting --> -<!-- MB: xml file validation: http://www.hep.ucl.ac.uk/atlas/AtlasTesting/DTD/onlineValidation/validator.html --> -<!-- <!DOCTYPE unifiedTestConfiguration SYSTEM "https://test-rtt.web.cern.ch/test-rtt/brinick/Results/unifiedTestConfiguration.dtd"> --> <!-- xmltest --> -<!DOCTYPE unifiedTestConfiguration SYSTEM "http://www.hep.ucl.ac.uk/atlas/AtlasTesting/DTD/unifiedTestConfiguration.dtd"> <!-- official --> -<!-- A test version --> - -<unifiedTestConfiguration> - - <atn/> - <kv/> - <rtt xmlns="http://www.hep.ucl.ac.uk/atlas/AtlasTesting/rtt"> - <rttContactPerson>Yun-Ju Lu</rttContactPerson> - <mailto>yunju@cern.ch</mailto> - <refRelease>20.1.X.Y</refRelease> - <jobList> - - <chain> - <chainName>OfflineChainJob</chainName> - <sequential> - <chainElement> - <jobTransform userJobId="RecExOnline_User_Offline_test_v1"> - <doc>Recexonline test</doc> - <jobTransformJobName>First_RecexOnline_test</jobTransformJobName> - <jobTransformCmd> - athena.py -c "inpuevtMax=850" RecExOnline/RecExOnline_User_Offline.py - </jobTransformCmd> - <group>RecExOnline_Test</group> - <queue>medium</queue> - - -<!-- - - <test> - <modulename>RttLibraryTools</modulename> - <testname>FileGrepper</testname> - - - <arg> - <fileGrepperArgs> - <fileGrepperInputFile>*v1_log</fileGrepperInputFile> - - <fileGrepperSearchPattern> - <fileGrepperPattern>Number of events processed</fileGrepperPattern> - </fileGrepperSearchPattern> - </fileGrepperArgs> - </arg> - </test> - - <test> - <modulename>RttLibraryTools</modulename> - <testname>ROOTMacroRunner</testname> - <arg> - <argname>macro</argname> - <argvalue>RootHis_RTT.C</argvalue> - </arg> - </test> - - <test> - <modulename>RttLibraryTools</modulename> - <testname>ROOTMacroRunner</testname> - <arg> - <argname>macro</argname> - <argvalue>RecExOnline_Test_Obj_His_RTT.C</argvalue> - </arg> - </test> ---> - <test position="1"> - <modulename>RttLibraryTools</modulename> - <testname>ROOTMacroRunner</testname> - <arg> - <argname>macro</argname> - <argvalue>RecExOnline_Test_Obj_His_RTT_Cpp_rename.C</argvalue> - </arg> - </test> - - <alwaysRunPostProc /> - </jobTransform> - <chainfileout>Monitor.root</chainfileout> - </chainElement> - - <chainElement> - <jobTransform userJobId="RTTAlgs"> - <doc>RTTAlgs</doc> - <jobTransformJobName>RTTAlgs</jobTransformJobName> - <jobTransformCmd> - athena.py RecExOnline/RTTAlgmain.py - </jobTransformCmd> - <group>RecExOnline_Test</group> - - <chaindataset_info> - <jobTransformData /> - <chaindatasetName>Monitor.root</chaindatasetName> - <!-- If the file on the previous line is not in the chain store, then use the following fallback file. --> - <dataset_info> - <jobTransformData /> - <datasetName>/afs/cern.ch/user/y/yunju/working/yunju/public/GM_ref_plots/Monitor.root</datasetName> - </dataset_info> - </chaindataset_info> - <testToRemove> - <jobGroupName>AthenaRecExOnline_Test</jobGroupName> - <testidentifier>FileGrepperprocessed</testidentifier> - </testToRemove> - - <alwaysRunPostProc /> - </jobTransform> - </chainElement> - - <chainElement> - <jobTransform userJobId="ohp_hispathcheck"> - <doc>ohp_hispathcheck</doc> - <jobTransformJobName>ohp_hispathcheck</jobTransformJobName> - <jobTransformCmd> - athena.py RecExOnline/Rtt_histogram.py - </jobTransformCmd> - <group>RecExOnline_Test</group> - - <testToRemove> - <jobGroupName>AthenaRecExOnline_Test</jobGroupName> - <testidentifier>FileGrepperprocessed</testidentifier> - </testToRemove> - - <alwaysRunPostProc /> - </jobTransform> - </chainElement> - - </sequential> - </chain> - - - <jobTransform userJobId="Online_reconstruction_autoconfiguration_test_v1"> - <doc>Recexonline test Online_reconstruction_autoconfiguration</doc> - <jobTransformJobName>Online_reconstruction_autoconfiguration_test_v1</jobTransformJobName> - <jobTransformCmd> - athena.py -c "REO_doAutoConfiguration=True" RecExOnline/RecExOnline_User_Offline_isOnlineTrue.py - </jobTransformCmd> - <group>RecExOnline_Test</group> - <queue>medium</queue> - <test position="1"> - <modulename>RttLibraryTools</modulename> - <testname>ROOTMacroRunner</testname> - <arg> - <argname>macro</argname> - <argvalue>RecExOnline_Test_Obj_His_RTT_Cpp_Online.C</argvalue> - </arg> - </test> - - - - - <alwaysRunPostProc /> - </jobTransform> - - <jobTransform userJobId="Online_reconstruction_autoconfiguration_False_test_v1"> - <doc>Recexonline test Online_reconstruction_autoconfiguration_False</doc> - <jobTransformJobName>Online_reconstruction_autoconfiguration_False_test_v1</jobTransformJobName> - <jobTransformCmd> - athena.py -c "REO_doAutoConfiguration=False" RecExOnline/RecExOnline_User_Offline_isOnlineTrue.py - </jobTransformCmd> - <group>RecExOnline_Test</group> - <queue>medium</queue> - <test position="1"> - <modulename>RttLibraryTools</modulename> - <testname>ROOTMacroRunner</testname> - <arg> - <argname>macro</argname> - <argvalue>RecExOnline_Test_Obj_His_RTT_Cpp_Online.C</argvalue> - </arg> - </test> - - - - - - - <alwaysRunPostProc /> - </jobTransform> - - <jobTransform userJobId="Online_reconstruction_autoconfiguration_False_cosmics_test_v1"> - <doc>Recexonline test Online_reconstruction_autoconfiguration_False_cosmics</doc> - <jobTransformJobName>Online_reconstruction_autoconfiguration_False_cosmics_test_v1</jobTransformJobName> - <jobTransformCmd> - athena.py -c "REO_doAutoConfiguration=False; beamType='cosmics'" RecExOnline/RecExOnline_User_Offline_isOnlineTrue.py - </jobTransformCmd> - <group>RecExOnline_Test</group> - <queue>medium</queue> - <test position="1"> - <modulename>RttLibraryTools</modulename> - <testname>ROOTMacroRunner</testname> - <arg> - <argname>macro</argname> - <argvalue>RecExOnline_Test_Obj_His_RTT_Cpp_Online.C</argvalue> - </arg> - </test> - - <alwaysRunPostProc /> - </jobTransform> - - <jobTransform userJobId="RTTTool_test_v1"> - <doc>RTTTool_test_v1</doc> - <jobTransformJobName>RTTTool_test_v1</jobTransformJobName> - <jobTransformCmd> - athena.py RecExOnline/SimpleJobOptions_ForRTT.py - </jobTransformCmd> - <group>RecExOnline_Test</group> - - <testToRemove> - <jobGroupName>AthenaRecExOnline_Test</jobGroupName> - <testidentifier>FileGrepperprocessed</testidentifier> - </testToRemove> - - <alwaysRunPostProc /> - </jobTransform> - - - - - </jobList> - <jobGroups> - <jobGroup name="AthenaRecExOnline_Test" parent="Athena"> - <keepFilePattern>*.root</keepFilePattern> - <keepFilePattern>*.png</keepFilePattern> - <keepFilePattern>testlog*.log</keepFilePattern> - <keepFilePattern>RTTAlg.html</keepFilePattern> - <keepFilePattern>P1MON_RTT_result_template_v1.html</keepFilePattern> - <auxFilePattern>RootHis*.C</auxFilePattern> - <auxFilePattern>RecExOnline_Test*.C</auxFilePattern> - <auxFilePattern>RecExOnline_Test_Obj_His_RTT.C</auxFilePattern> - <auxFilePattern>RecExOnline_Test_Obj_His_RTT_CINT.C</auxFilePattern> - -<!-- - <action> - <modulename>RttLibraryTools</modulename> - <testname>ROOTMacroRunner</testname> - <arg> - <argname>macro</argname> - <argvalue>RecExOnline_Test_Obj_His_RTT.C</argvalue> - </arg> - </action> - <action> - <modulename>RttLibraryTools</modulename> - <testname>ROOTMacroRunner</testname> - <arg> - <argname>macro</argname> - <argvalue>RecExOnline_Test_Obj_His_RTT_CINT.C</argvalue> - </arg> - </action> - <action> - <modulename>RttLibraryTools</modulename> - <testname>ROOTMacroRunner</testname> - <arg> - <argname>macro</argname> - <argvalue>RecExOnline_Test_Obj_His_RTT_CINT.C</argvalue> - </arg> - </action> ---> - - - <test position="2"> - - <modulename>RttLibraryTools</modulename> - <testname>FileGrepper</testname> - <testidentifier>FileGrepperprocessed</testidentifier> - - <arg> - <argname>inputFile</argname> - <argvalue>*v1_log</argvalue> <!-- put this one if you use job log --> - </arg> - <arg> - <argname>outputFile</argname> - <argvalue>testlogprocessed.log</argvalue> - </arg> - <arg> - <argname>searchList</argname> - <argvalue>Number of events processed</argvalue> <!-- put here the string you should find to mean all ok --> - </arg> - </test> - - <test position="3"> - - <modulename>RttLibraryTools</modulename> - <testname>FileGrepper</testname> - <testnegate /> - <testidentifier>FileGrepperERROR3</testidentifier> - <arg> - <argname>inputFile</argname> - <argvalue>*v1_log</argvalue> <!-- put this one if you use job log --> - </arg> - <arg> - <argname>outputFile</argname> - <argvalue>testlogError.log</argvalue> - </arg> - <arg> - <argname>searchList</argname> - <argvalue>ERROR</argvalue> <!-- put here the string you should find to mean all ok --> - - </arg> - <noalarm /> - </test> - <test position="4"> - - <modulename>RttLibraryTools</modulename> - <testname>FileGrepper</testname> - <testidentifier>FileGrepperERROR4</testidentifier> - - <arg> - <argname>inputFile</argname> - <argvalue>*v1_log</argvalue> <!-- put this one if you use job log --> - </arg> - <arg> - <argname>outputFile</argname> - <argvalue>testlogError2.log</argvalue> - </arg> - <arg> - <argname>searchList</argname> - <argvalue>ERROR</argvalue> <!-- put here the string you should find to mean all ok --> - - </arg> - <noalarm /> - </test> - - - <test position="5"> - - <modulename>RttLibraryTools</modulename> - <testname>FileGrepper</testname> - <testnegate /> - - <testidentifier>FileGrepperFATAL</testidentifier> - - <arg> - <argname>inputFile</argname> - <argvalue>*v1_log</argvalue> <!-- put this one if you use job log --> - </arg> - <arg> - <argname>outputFile</argname> - <argvalue>testlogFATAL.log</argvalue> - </arg> - <arg> - <argname>searchList</argname> - <argvalue>FATAL</argvalue> <!-- put here the string you should find to mean all ok --> - - </arg> - - </test> - - <test position="6"> - - <modulename>RttLibraryTools</modulename> - <testname>FileGrepper</testname> - <testnegate /> - <testidentifier>FileGrepperTraceback</testidentifier> - - <arg> - <argname>inputFile</argname> - <argvalue>*v1_log</argvalue> <!-- put this one if you use job log --> - </arg> - <arg> - <argname>outputFile</argname> - <argvalue>testlogTraceback.log</argvalue> - </arg> - <arg> - <argname>searchList</argname> - <argvalue>Traceback</argvalue> <!-- put here the string you should find to mean all ok --> - </arg> - <noalarm /> - </test> - - <testToRemove> - <jobGroupName>RTT:Top</jobGroupName> - <testidentifier>CheckFileRunner0</testidentifier> - </testToRemove> - <testToRemove> - <jobGroupName>RTT:Athena</jobGroupName> - <testidentifier>Athena_FileGrepper</testidentifier> - </testToRemove> - - </jobGroup> - </jobGroups> - - - - - - </rtt> -</unifiedTestConfiguration> diff --git a/Reconstruction/RecJobTransforms/share/skeleton.ESDtoAOD_tf.py b/Reconstruction/RecJobTransforms/share/skeleton.ESDtoAOD_tf.py index 7014c91ffcc1b92e8c803971027a8ac2a66a9e4c..443fd878689762e1a7844e9f06cea9d76f21869c 100644 --- a/Reconstruction/RecJobTransforms/share/skeleton.ESDtoAOD_tf.py +++ b/Reconstruction/RecJobTransforms/share/skeleton.ESDtoAOD_tf.py @@ -52,21 +52,11 @@ if hasattr(runArgs,"outputAODFile"): rec.doAOD.set_Value_and_Lock( True ) rec.doWriteAOD.set_Value_and_Lock( True ) athenaCommonFlags.PoolAODOutput.set_Value_and_Lock( runArgs.outputAODFile ) - # Begin temporary block for Run-3 Trigger outputs - if ConfigFlags.Trigger.EDMVersion == 3: - # Lock DQ configuration to prevent downstream override - from AthenaMonitoring.DQMonFlags import DQMonFlags - print('DQMonFlags override') - if not rec.doTrigger(): - DQMonFlags.useTrigger.set_Value_and_Lock(False) - if DQMonFlags.useTrigger() and rec.doTrigger(): - DQMonFlags.useTrigger.set_Value_and_Lock(True) - # Don't run any trigger - only pass the HLT contents from ESD to AOD - # Configure here, and extract HLT content in RecExCommon_topOptions - # after the rest of the job is configured - from RecExConfig.RecAlgsFlags import recAlgs - recAlgs.doTrigger.set_Value_and_Lock( False ) - rec.doTrigger.set_Value_and_Lock( False ) + # Lock DQ configuration to prevent downstream override + # RB 15/12/2020: This logic was added in !36737, not sure if still needed + from AthenaMonitoring.DQMonFlags import DQMonFlags + print('DQMonFlags.useTrigger override') + DQMonFlags.useTrigger.set_Value_and_Lock(rec.doTrigger() and DQMonFlags.useTrigger()) if hasattr(runArgs,"outputTAGFile"): # should be used as outputTAGFile_e2a=myTAG.root so that it does not trigger AODtoTAG diff --git a/Reconstruction/RecoTools/ITrackToVertex/ITrackToVertex/ITrackToVertex.h b/Reconstruction/RecoTools/ITrackToVertex/ITrackToVertex/ITrackToVertex.h index 15c890bc05acc6127fbb6676c8b745d84094d79a..6fd4f65f9655ca3c13077389e5befd986a2d817c 100755 --- a/Reconstruction/RecoTools/ITrackToVertex/ITrackToVertex/ITrackToVertex.h +++ b/Reconstruction/RecoTools/ITrackToVertex/ITrackToVertex/ITrackToVertex.h @@ -53,51 +53,90 @@ namespace Reco { /** AlgTool interface methods */ static const InterfaceID& interfaceID() { return IID_ITrackToVertex; }; - /** Use this for MT Coding */ - virtual std::unique_ptr<Trk::StraightLineSurface> GetBeamLine(const InDet::BeamSpotData*) const = 0; //In C++17 make this [[nodiscard]] - - virtual const InDet::BeamSpotData* GetBeamSpotData(const EventContext &ctx) const = 0; //In C++17 make this [[nodiscard]] - - /** Interface method for use with TrackParticle and given vertex position - AOD */ - virtual const Trk::Perigee* perigeeAtVertex(const Rec::TrackParticle& tp, const Amg::Vector3D& gp) const = 0; - - /** Interface method for use with xAOD::Trackparticle and given vertex position - xAOD */ - virtual const Trk::Perigee* perigeeAtVertex(const xAOD::TrackParticle& tp, const Amg::Vector3D& gp) const = 0; - - /** Interface method for use with TrackParticle and default primary vertex from TrackParticle - AOD */ - virtual const Trk::Perigee* perigeeAtVertex(const Rec::TrackParticle& tp) const = 0; - - /** Interface method for use with TrackParticle and default primary vertex from TrackParticle - xAOD */ - virtual const Trk::Perigee* perigeeAtVertex(const xAOD::TrackParticle& tp) const = 0; - - /** Interface method for use with Track and given vertex position - ESD */ - virtual const Trk::Perigee* perigeeAtVertex(const Trk::Track& trk, const Amg::Vector3D& gp) const = 0; - - /** Interface method for use with TrackParticle and the beamspot from the BeamSpotSvc - AOD*/ - virtual const Trk::Perigee* perigeeAtBeamspot(const Rec::TrackParticle& tp, const InDet::BeamSpotData*) const = 0; - - /** Interface method for use with TrackParticle and the beamspot from the BeamSpotSvc - xAOD*/ - virtual const Trk::Perigee* perigeeAtBeamspot(const xAOD::TrackParticle& tp, const InDet::BeamSpotData*) const = 0; - - /** Interface method for use with Track and the beamspot from the BeamSpotSvc - ESD */ - virtual const Trk::Perigee* perigeeAtBeamspot(const Trk::Track& trk, const InDet::BeamSpotData*) const = 0; - - /** Interface method for use with Track and the beamline from the BeamSpotSvc - ESD */ - virtual const Trk::Perigee* perigeeAtBeamline(const Trk::Track& trk, const InDet::BeamSpotData*) const = 0; - - /** Interface method for use with TrackParticle and the beamline from the BeamSpotSvc - AOD*/ - virtual const Trk::TrackParameters* trackAtBeamline(const Rec::TrackParticle& tp) const = 0; - - /** Interface method for use with TrackParticle and the beamline from the BeamSpotSvc - xAOD*/ - virtual const Trk::TrackParameters* trackAtBeamline(const xAOD::TrackParticle& tp, const InDet::BeamSpotData*) const = 0; - - /** Interface method for use with Track and the beamline from the BeamSpotSvc - ESD */ - virtual const Trk::TrackParameters* trackAtBeamline(const Trk::Track& trk, const Trk::StraightLineSurface*) const = 0; - - /** Interface method for use with Track and the beamline from the BeamSpotSvc - TrackParameters */ - virtual const Trk::TrackParameters* trackAtBeamline(const Trk::TrackParameters& tpars, const Trk::StraightLineSurface* ) const = 0; + /** Use this for MT Coding */ + virtual std::unique_ptr<Trk::StraightLineSurface> GetBeamLine( + const InDet::BeamSpotData*) + const = 0; // In C++17 make this [[nodiscard]] + + virtual const InDet::BeamSpotData* GetBeamSpotData( + const EventContext& ctx) const = 0; // In C++17 make this [[nodiscard]] + + /** Interface method for use with TrackParticle and given vertex position + * - AOD */ + virtual const Trk::Perigee* perigeeAtVertex( + const Rec::TrackParticle& tp, + const Amg::Vector3D& gp) const = 0; + + /** Interface method for use with xAOD::Trackparticle and given vertex + * position - xAOD */ + virtual const Trk::Perigee* perigeeAtVertex( + const xAOD::TrackParticle& tp, + const Amg::Vector3D& gp) const = 0; + + /** Interface method for use with TrackParticle and default primary + * vertex from TrackParticle - AOD */ + virtual const Trk::Perigee* perigeeAtVertex( + const Rec::TrackParticle& tp) const = 0; + + /** Interface method for use with TrackParticle and default primary + * vertex from TrackParticle - xAOD */ + virtual const Trk::Perigee* perigeeAtVertex( + const xAOD::TrackParticle& tp) const = 0; + + /** Interface method for use with Track and given vertex position - ESD + */ + virtual const Trk::Perigee* perigeeAtVertex( + const Trk::Track& trk, + const Amg::Vector3D& gp) const = 0; + + /** Interface method for use with TrackParticle and the beamspot from the + * BeamSpotSvc - AOD*/ + virtual const Trk::Perigee* perigeeAtBeamspot( + const Rec::TrackParticle& tp, + const InDet::BeamSpotData*) const = 0; + + /** Interface method for use with TrackParticle and the beamspot from the + * BeamSpotSvc - xAOD*/ + virtual const Trk::Perigee* perigeeAtBeamspot( + const xAOD::TrackParticle& tp, + const InDet::BeamSpotData*) const = 0; + + /** Interface method for use with Track and the beamspot from the + * BeamSpotSvc - ESD */ + virtual const Trk::Perigee* perigeeAtBeamspot( + const Trk::Track& trk, + const InDet::BeamSpotData*) const = 0; + + /** Interface method for use with Track and the beamline from the + * BeamSpotSvc - ESD */ + virtual const Trk::Perigee* perigeeAtBeamline( + const EventContext& ctx, + const Trk::Track& trk, + const InDet::BeamSpotData* beamSpotData) const = 0; + + /** Interface method for use with TrackParticle and the beamline from the + * BeamSpotSvc - AOD*/ + virtual const Trk::TrackParameters* trackAtBeamline( + const Rec::TrackParticle& tp) const = 0; + + /** Interface method for use with TrackParticle and the beamline from the + * BeamSpotSvc - xAOD*/ + virtual const Trk::TrackParameters* trackAtBeamline( + const xAOD::TrackParticle& tp, + const InDet::BeamSpotData*) const = 0; + + /** Interface method for use with Track and the beamline from the + * BeamSpotSvc - ESD */ + virtual const Trk::TrackParameters* trackAtBeamline( + const Trk::Track& trk, + const Trk::StraightLineSurface*) const = 0; + + /** Interface method for use with Track and the beamline from the + * BeamSpotSvc - TrackParameters */ + virtual const Trk::TrackParameters* trackAtBeamline( + const Trk::TrackParameters& tpars, + const Trk::StraightLineSurface*) const = 0; }; - } #endif // TRACKTOVERTEX_ITRACKTOVERTEX_H diff --git a/Reconstruction/RecoTools/TrackToVertex/TrackToVertex/TrackToVertex.h b/Reconstruction/RecoTools/TrackToVertex/TrackToVertex/TrackToVertex.h index 53a2313c005c3e7c01c7317223cebb248644c8e1..ad83644f4e5f341de0862e9e914daa8d7ea2305e 100755 --- a/Reconstruction/RecoTools/TrackToVertex/TrackToVertex/TrackToVertex.h +++ b/Reconstruction/RecoTools/TrackToVertex/TrackToVertex/TrackToVertex.h @@ -50,63 +50,108 @@ namespace Reco { /**Virtual destructor*/ virtual ~TrackToVertex(); - + /** AlgTool initailize method.*/ - virtual StatusCode initialize() override; + virtual StatusCode initialize() override final; /** AlgTool finalize method */ - virtual StatusCode finalize() override; - + virtual StatusCode finalize() override final; + /** Use this for MT Coding */ - virtual std::unique_ptr<Trk::StraightLineSurface> GetBeamLine(const InDet::BeamSpotData*) const override; //In C++17 make this [[nodiscard]] - - virtual const InDet::BeamSpotData* GetBeamSpotData(const EventContext &ctx) const override; //In C++17 make this [[nodiscard]] - - /** Interface method for use with TrackParticle and given vertex position - AOD */ - virtual const Trk::Perigee* perigeeAtVertex(const Rec::TrackParticle& tp, const Amg::Vector3D& gp) const override; - - /** Interface method for use with xAOD::Trackparticle and given vertex position - xAOD */ - virtual const Trk::Perigee* perigeeAtVertex(const xAOD::TrackParticle& tp, const Amg::Vector3D& gp) const override; - - /** Interface method for use with TrackParticle and default primary vertex from TrackParticle - AOD */ - virtual const Trk::Perigee* perigeeAtVertex(const Rec::TrackParticle& tp) const override; - - /** Interface method for use with TrackParticle and default primary vertex from TrackParticle - xAOD */ - virtual const Trk::Perigee* perigeeAtVertex(const xAOD::TrackParticle& tp) const override; - + virtual std::unique_ptr<Trk::StraightLineSurface> GetBeamLine( + const InDet::BeamSpotData*) + const override final; // In C++17 make this [[nodiscard]] + + virtual const InDet::BeamSpotData* GetBeamSpotData( + const EventContext& ctx) + const override final; // In C++17 make this [[nodiscard]] + + /** Interface method for use with TrackParticle and given vertex position + * - AOD */ + virtual const Trk::Perigee* perigeeAtVertex( + const Rec::TrackParticle& tp, + const Amg::Vector3D& gp) const override final; + + /** Interface method for use with xAOD::Trackparticle and given vertex + * position - xAOD */ + virtual const Trk::Perigee* perigeeAtVertex( + const xAOD::TrackParticle& tp, + const Amg::Vector3D& gp) const override final; + + /** Interface method for use with TrackParticle and default primary vertex + * from TrackParticle - AOD */ + virtual const Trk::Perigee* perigeeAtVertex( + const Rec::TrackParticle& tp) const override final; + + /** Interface method for use with TrackParticle and default primary vertex + * from TrackParticle - xAOD */ + virtual const Trk::Perigee* perigeeAtVertex( + const xAOD::TrackParticle& tp) const override final; + /** Interface method for use with Track and given vertex position - ESD */ - virtual const Trk::Perigee* perigeeAtVertex(const Trk::Track& trk, const Amg::Vector3D& gp) const override; - - /** Interface method for use with TrackParticle and the beamspot from the BeamSpotSvc - AOD*/ - virtual const Trk::Perigee* perigeeAtBeamspot(const Rec::TrackParticle& tp, const InDet::BeamSpotData*) const override; - - /** Interface method for use with TrackParticle and the beamspot from the BeamSpotSvc - xAOD*/ - virtual const Trk::Perigee* perigeeAtBeamspot(const xAOD::TrackParticle& tp, const InDet::BeamSpotData*) const override; - - /** Interface method for use with Track and the beamspot from the BeamSpotSvc - ESD */ - virtual const Trk::Perigee* perigeeAtBeamspot(const Trk::Track& trk, const InDet::BeamSpotData*) const override; - - /** Interface method for use with Track and the beamline from the BeamSpotSvc - ESD */ - virtual const Trk::Perigee* perigeeAtBeamline(const Trk::Track& trk, const InDet::BeamSpotData*) const override; - - /** Interface method for use with TrackParticle and the beamline from the BeamSpotSvc - AOD*/ - virtual const Trk::TrackParameters* trackAtBeamline(const Rec::TrackParticle& tp) const override; - - /** Interface method for use with TrackParticle and the beamline from the BeamSpotSvc - xAOD*/ - virtual const Trk::TrackParameters* trackAtBeamline(const xAOD::TrackParticle& tp, const InDet::BeamSpotData*) const override; - - /** Interface method for use with Track and the beamline from the BeamSpotSvc - ESD */ - virtual const Trk::TrackParameters* trackAtBeamline(const Trk::Track& trk, const Trk::StraightLineSurface* beamline) const override; - - /** Interface method for use with Track and the beamline from the BeamSpotSvc - TrackParameters */ - virtual const Trk::TrackParameters* trackAtBeamline(const Trk::TrackParameters& tpars, const Trk::StraightLineSurface* beamline) const override; - - - private: - ToolHandle< Trk::IExtrapolator > m_extrapolator; //!< ToolHandle for Extrapolator - - SG::ReadCondHandleKey<InDet::BeamSpotData> m_beamSpotKey { this, "BeamSpotKey", "BeamSpotData", "SG key for beam spot" }; - bool m_ForceBeamSpotZero = false; - const static Amg::Vector3D s_origin; //!< static origin + virtual const Trk::Perigee* perigeeAtVertex( + const Trk::Track& trk, + const Amg::Vector3D& gp) const override final; + + /** Interface method for use with TrackParticle and the beamspot from the + * BeamSpotSvc - AOD*/ + virtual const Trk::Perigee* perigeeAtBeamspot( + const Rec::TrackParticle& tp, + const InDet::BeamSpotData*) const override final; + + /** Interface method for use with TrackParticle and the beamspot from the + * BeamSpotSvc - xAOD*/ + virtual const Trk::Perigee* perigeeAtBeamspot( + const xAOD::TrackParticle& tp, + const InDet::BeamSpotData*) const override final; + + /** Interface method for use with Track and the beamspot from the + * BeamSpotSvc - ESD */ + virtual const Trk::Perigee* perigeeAtBeamspot( + const Trk::Track& trk, + const InDet::BeamSpotData*) const override final; + + /** Interface method for use with Track and the beamline from the + * BeamSpotSvc - ESD */ + virtual const Trk::Perigee* perigeeAtBeamline( + const EventContext& ctx, + const Trk::Track& trk, + const InDet::BeamSpotData*) const override final; + + /** Interface method for use with TrackParticle and the beamline from the + * BeamSpotSvc - AOD*/ + virtual const Trk::TrackParameters* trackAtBeamline( + const Rec::TrackParticle& tp) const override final; + + /** Interface method for use with TrackParticle and the beamline from the + * BeamSpotSvc - xAOD*/ + virtual const Trk::TrackParameters* trackAtBeamline( + const xAOD::TrackParticle& tp, + const InDet::BeamSpotData*) const override final; + + /** Interface method for use with Track and the beamline from the + * BeamSpotSvc - ESD */ + virtual const Trk::TrackParameters* trackAtBeamline( + const Trk::Track& trk, + const Trk::StraightLineSurface* beamline) const override final; + + /** Interface method for use with Track and the beamline from the + * BeamSpotSvc - TrackParameters */ + virtual const Trk::TrackParameters* trackAtBeamline( + const Trk::TrackParameters& tpars, + const Trk::StraightLineSurface* beamline) const override final; + + private: + ToolHandle<Trk::IExtrapolator> + m_extrapolator; //!< ToolHandle for Extrapolator + + SG::ReadCondHandleKey<InDet::BeamSpotData> m_beamSpotKey{ + this, + "BeamSpotKey", + "BeamSpotData", + "SG key for beam spot" + }; + bool m_ForceBeamSpotZero = false; + const static Amg::Vector3D s_origin; //!< static origin }; } // end of namespace diff --git a/Reconstruction/RecoTools/TrackToVertex/src/TrackToVertex.cxx b/Reconstruction/RecoTools/TrackToVertex/src/TrackToVertex.cxx index e67f1c6b775e20b7d7231f21d8404df10c9d4ca5..788ad1626c6d45084214963f75f0caab004c9253 100755 --- a/Reconstruction/RecoTools/TrackToVertex/src/TrackToVertex.cxx +++ b/Reconstruction/RecoTools/TrackToVertex/src/TrackToVertex.cxx @@ -196,8 +196,11 @@ const Trk::Perigee* Reco::TrackToVertex::perigeeAtBeamspot(const Trk::Track& tra return perigeeAtVertex(track, beamspot ? beamspot->beamVtx().position() : s_origin); } - -const Trk::Perigee* Reco::TrackToVertex::perigeeAtBeamline(const Trk::Track& track, const InDet::BeamSpotData* beamspotptr) const +const Trk::Perigee* +Reco::TrackToVertex::perigeeAtBeamline( + const EventContext& ctx, + const Trk::Track& track, + const InDet::BeamSpotData* beamspotptr) const { Amg::Vector3D beamspot(s_origin); @@ -218,7 +221,7 @@ const Trk::Perigee* Reco::TrackToVertex::perigeeAtBeamline(const Trk::Track& tra const Trk::Perigee* vertexPerigee = nullptr; const Trk::TrackParameters* extrapResult = - m_extrapolator->extrapolate(track, persf); + m_extrapolator->extrapolate(ctx,track, persf); if (extrapResult && extrapResult->surfaceType() == Trk::Surface::Perigee) { vertexPerigee = static_cast<const Trk::Perigee*>(extrapResult); } @@ -227,14 +230,13 @@ const Trk::Perigee* Reco::TrackToVertex::perigeeAtBeamline(const Trk::Track& tra // try again using the first track parameter set, since the current extrapolator will // use "the closest" track parameterset which is not necessarily the mostuseful one to // start the extrapolation with. - // @TODO should try to improve the extrapolator to pick the correct start parameters. const DataVector<const Trk::TrackParameters> *track_parameter_list= track.trackParameters(); if (track_parameter_list) { for(const Trk::TrackParameters *trk_params: *track_parameter_list) { if (!trk_params) { continue; } - extrapResult = m_extrapolator->extrapolate(*trk_params, persf); + extrapResult = m_extrapolator->extrapolate(ctx,*trk_params, persf); if (extrapResult && extrapResult->surfaceType() == Trk::Surface::Perigee) { vertexPerigee = static_cast<const Trk::Perigee*>(extrapResult); diff --git a/Reconstruction/egamma/egammaAlgs/src/EMBremCollectionBuilder.cxx b/Reconstruction/egamma/egammaAlgs/src/EMBremCollectionBuilder.cxx index dc1bc79b266a1b507351a98722a4db4cda237752..ead0ba6aaaac59db0c66ca840ad9e4ebd5c9ee35 100644 --- a/Reconstruction/egamma/egammaAlgs/src/EMBremCollectionBuilder.cxx +++ b/Reconstruction/egamma/egammaAlgs/src/EMBremCollectionBuilder.cxx @@ -253,7 +253,7 @@ EMBremCollectionBuilder::createNew( ElementLink<xAOD::TrackParticleContainer>> tP("originalTrackParticle"); ElementLink<xAOD::TrackParticleContainer> linkToOriginal(*AllTracks, - origIndex); + origIndex,ctx); tP(*aParticle) = linkToOriginal; if (m_doTruth) { @@ -303,7 +303,7 @@ EMBremCollectionBuilder::createNew( // Now Slim the Trk::Track for writing to disk m_slimTool->slimTrack(*(Info.track)); finalTracks->push_back(std::move(Info.track)); - ElementLink<TrackCollection> trackLink(*finalTracks,finalTracks->size()-1); + ElementLink<TrackCollection> trackLink(*finalTracks,finalTracks->size()-1,ctx); aParticle->setTrackLink( trackLink ); return StatusCode::SUCCESS; } diff --git a/Reconstruction/egamma/egammaAlgs/src/electronSuperClusterBuilder.cxx b/Reconstruction/egamma/egammaAlgs/src/electronSuperClusterBuilder.cxx index 0ba40bd73d30ca2b4144827772ac348a0b14c1a8..80dee18d0c823483456d68912929bf78b930fbaf 100644 --- a/Reconstruction/egamma/egammaAlgs/src/electronSuperClusterBuilder.cxx +++ b/Reconstruction/egamma/egammaAlgs/src/electronSuperClusterBuilder.cxx @@ -173,7 +173,7 @@ electronSuperClusterBuilder::execute(const EventContext& ctx) const // Push back the new cluster into the output container. outputClusterContainer->push_back(std::move(newClus)); ElementLink<xAOD::CaloClusterContainer> clusterLink(*outputClusterContainer, - outputClusterContainer->size() - 1); + outputClusterContainer->size() - 1,ctx); std::vector<ElementLink<xAOD::CaloClusterContainer>> elClusters{ clusterLink }; // Make egammaRec object, and push it back into output container. diff --git a/Reconstruction/egamma/egammaAlgs/src/photonSuperClusterBuilder.cxx b/Reconstruction/egamma/egammaAlgs/src/photonSuperClusterBuilder.cxx index 4863129319604757ab000f0c5f0448975a37bf95..69acee220f30e611636234e635f13f22ab45a79e 100644 --- a/Reconstruction/egamma/egammaAlgs/src/photonSuperClusterBuilder.cxx +++ b/Reconstruction/egamma/egammaAlgs/src/photonSuperClusterBuilder.cxx @@ -162,7 +162,7 @@ photonSuperClusterBuilder::execute(const EventContext& ctx) const // Add the cluster link to the super cluster ElementLink<xAOD::CaloClusterContainer> clusterLink(*outputClusterContainer, - outputClusterContainer->size() - 1); + outputClusterContainer->size() - 1,ctx); std::vector<ElementLink<xAOD::CaloClusterContainer>> phCluster{ clusterLink }; /////////////////////////////////////////////////////// diff --git a/Reconstruction/egamma/egammaAlgs/src/topoEgammaBuilder.cxx b/Reconstruction/egamma/egammaAlgs/src/topoEgammaBuilder.cxx index 1351748fb9d8418f0ff95488f03c51c8db96b769..eb7ffa5166fd172e16fb3aae723965a483869289 100644 --- a/Reconstruction/egamma/egammaAlgs/src/topoEgammaBuilder.cxx +++ b/Reconstruction/egamma/egammaAlgs/src/topoEgammaBuilder.cxx @@ -60,7 +60,7 @@ topoEgammaBuilder::initialize() m_egammaOQTool.disable(); } - //do we actually do ambiguity + //do we actually do ambiguity m_doAmbiguity = !m_ambiguityTool.empty(); if (m_doElectrons && m_doPhotons && m_doAmbiguity) { ATH_CHECK(m_ambiguityTool.retrieve()); @@ -100,15 +100,15 @@ topoEgammaBuilder::execute(const EventContext& ctx) const } SG::WriteHandle<xAOD::ElectronContainer> electronContainer(m_electronOutputKey, ctx); - + ATH_CHECK(electronContainer.record(std::make_unique<xAOD::ElectronContainer>(), std::make_unique<xAOD::ElectronAuxContainer>())); SG::WriteHandle<xAOD::PhotonContainer> photonContainer(m_photonOutputKey,ctx); - + ATH_CHECK(photonContainer.record(std::make_unique<xAOD::PhotonContainer>(), std::make_unique<xAOD::PhotonAuxContainer>())); - + electrons = electronContainer.ptr(); photons = photonContainer.ptr(); @@ -248,7 +248,7 @@ topoEgammaBuilder::execute(const EventContext& ctx) const } // Do the ambiguity Links if (m_doElectrons && m_doPhotons) { - ATH_CHECK(doAmbiguityLinks(electrons, photons)); + ATH_CHECK(doAmbiguityLinks(ctx,electrons, photons)); } return StatusCode::SUCCESS; @@ -256,6 +256,7 @@ topoEgammaBuilder::execute(const EventContext& ctx) const StatusCode topoEgammaBuilder::doAmbiguityLinks( + const EventContext& ctx, xAOD::ElectronContainer* electronContainer, xAOD::PhotonContainer* photonContainer) const { @@ -287,7 +288,7 @@ topoEgammaBuilder::doAmbiguityLinks( if (caloClusterLinks(*(electron->caloCluster())).at(0) == caloClusterLinks(*(photon->caloCluster())).at(0)) { ElementLink<xAOD::EgammaContainer> link(*electronContainer, - electronIndex); + electronIndex,ctx); ELink(*photon) = link; break; } @@ -309,7 +310,8 @@ topoEgammaBuilder::doAmbiguityLinks( if (caloClusterLinks(*(electron->caloCluster())).at(0) == caloClusterLinks(*(photon->caloCluster())).at(0)) { - ElementLink<xAOD::EgammaContainer> link(*photonContainer, photonIndex); + ElementLink<xAOD::EgammaContainer> link( + *photonContainer, photonIndex, ctx); ELink(*electron) = link; break; } diff --git a/Reconstruction/egamma/egammaAlgs/src/topoEgammaBuilder.h b/Reconstruction/egamma/egammaAlgs/src/topoEgammaBuilder.h index 658b1ae97e1b2ad736ff4b70badb30d4caa87b59..e086726d2326fca018dc4349e563670ecd5af95a 100644 --- a/Reconstruction/egamma/egammaAlgs/src/topoEgammaBuilder.h +++ b/Reconstruction/egamma/egammaAlgs/src/topoEgammaBuilder.h @@ -65,17 +65,17 @@ private: bool getPhoton(const egammaRec* egRec, xAOD::PhotonContainer *photonContainer, const unsigned int author, uint8_t type) const; + /** @brief Do the final ambiguity **/ + StatusCode doAmbiguityLinks(const EventContext& ctx, + xAOD::ElectronContainer* electronContainer, + xAOD::PhotonContainer* photonContainer) const; - /** @brief Do the final ambiguity **/ - StatusCode doAmbiguityLinks(xAOD::ElectronContainer *electronContainer, - xAOD::PhotonContainer *photonContainer) const ; - - /** @brief Call a tool using contExecute and electrons, photon containers if given **/ + /** @brief Call a tool using contExecute and electrons, photon containers if + * given **/ StatusCode CallTool(const EventContext& ctx, - const ToolHandle<IegammaBaseTool>& tool, - xAOD::ElectronContainer *electronContainer = nullptr, - xAOD::PhotonContainer *photonContainer = nullptr) const; - + const ToolHandle<IegammaBaseTool>& tool, + xAOD::ElectronContainer* electronContainer = nullptr, + xAOD::PhotonContainer* photonContainer = nullptr) const; /** @brief Vector of tools for dressing electrons and photons **/ ToolHandleArray<IegammaBaseTool> m_egammaTools {this, diff --git a/Reconstruction/egamma/egammaInterfaces/CMakeLists.txt b/Reconstruction/egamma/egammaInterfaces/CMakeLists.txt index 8f109fdeabb79adba673a971fb22a4b6870d96ee..34bdc8b8dace5186dc108f50f9982d932ac1e63d 100644 --- a/Reconstruction/egamma/egammaInterfaces/CMakeLists.txt +++ b/Reconstruction/egamma/egammaInterfaces/CMakeLists.txt @@ -1,20 +1,24 @@ -################################################################################ -# Package: egammaInterfaces -################################################################################ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # Declare the package name: atlas_subdir( egammaInterfaces ) +# Extra libraries used by the interface library. +set( extra_libs ) +if( NOT XAOD_ANALYSIS ) + set( extra_libs TrkParameters TrkTrack TrkEventPrimitives + TrkNeutralParameters TrkCaloExtension egammaRecEvent ) +endif() # Component(s) in the package: atlas_add_library( egammaInterfacesLib - egammaInterfaces/*.h - INTERFACE - PUBLIC_HEADERS egammaInterfaces - LINK_LIBRARIES GaudiKernel TrkCaloExtension TrkEventPrimitives TrkNeutralParameters TrkParameters - TrkTrack egammaRecEvent xAODCaloEvent xAODEgamma xAODTracking) + egammaInterfaces/*.h + INTERFACE + PUBLIC_HEADERS egammaInterfaces + LINK_LIBRARIES GaudiKernel xAODCaloEvent xAODEgamma xAODTracking + ${extra_libs} ) atlas_add_dictionary( egammaInterfacesDict - egammaInterfaces/egammaInterfacesDict.h - egammaInterfaces/selection.xml - LINK_LIBRARIES egammaInterfacesLib ) + egammaInterfaces/egammaInterfacesDict.h + egammaInterfaces/selection.xml + LINK_LIBRARIES egammaInterfacesLib ) diff --git a/Reconstruction/egamma/egammaInterfaces/egammaInterfaces/egammaInterfacesDict.h b/Reconstruction/egamma/egammaInterfaces/egammaInterfaces/egammaInterfacesDict.h index 8a46b57f925ecb76dc94eec1fbdf73609c22faee..5edb3297dbc582b82ea737bdd10a7715b6d27e29 100644 --- a/Reconstruction/egamma/egammaInterfaces/egammaInterfaces/egammaInterfacesDict.h +++ b/Reconstruction/egamma/egammaInterfaces/egammaInterfaces/egammaInterfacesDict.h @@ -18,10 +18,8 @@ #include "egammaInterfaces/ICaloCluster_OnTrackBuilder.h" #include "egammaInterfaces/IEMClusterTool.h" #include "egammaInterfaces/IEMConversionBuilder.h" -#include "egammaInterfaces/IEMExtrapolationTools.h" #include "egammaInterfaces/IEMFourMomBuilder.h" #include "egammaInterfaces/IEMShowerBuilder.h" -#include "egammaInterfaces/IEMTrackMatchBuilder.h" #include "egammaInterfaces/IegammaBackShape.h" #include "egammaInterfaces/IegammaBaseTool.h" #include "egammaInterfaces/IegammaCheckEnergyDepositTool.h" @@ -31,7 +29,12 @@ #include "egammaInterfaces/IegammaShowerShape.h" #include "egammaInterfaces/IegammaStripsShape.h" #include "egammaInterfaces/IegammaSwTool.h" -#include "egammaInterfaces/IegammaTrkRefitterTool.h" #include "egammaInterfaces/IegammaMVASvc.h" #include "egammaInterfaces/IegammaOQFlagsBuilder.h" +#ifndef XAOD_ANALYSIS +# include "egammaInterfaces/IEMExtrapolationTools.h" +# include "egammaInterfaces/IEMTrackMatchBuilder.h" +# include "egammaInterfaces/IegammaTrkRefitterTool.h" +#endif // not XAOD_ANALYSIS + #endif // EGAMMAINTERFACES_EGAMMAINTERFACESDICT_H diff --git a/Reconstruction/egamma/egammaMVACalib/CMakeLists.txt b/Reconstruction/egamma/egammaMVACalib/CMakeLists.txt index b072f20a3f31552677154f83f30b7c6151ea9e79..4897240610632899c0dce3e77319f8e229c6f2a4 100644 --- a/Reconstruction/egamma/egammaMVACalib/CMakeLists.txt +++ b/Reconstruction/egamma/egammaMVACalib/CMakeLists.txt @@ -5,10 +5,8 @@ atlas_subdir( egammaMVACalib ) # Extra dependencies for Athena capable builds: set( extra_libs ) -if( XAOD_STANDALONE ) - set( extra_libs xAODRootAccess) -else() - set( extra_libs GaudiKernel egammaInterfacesLib ) +if( NOT XAOD_STANDALONE ) + set( extra_libs PRIVATE_LINK_LIBRARIES GaudiKernel ) endif() # External dependencies: @@ -19,7 +17,8 @@ atlas_add_library( egammaMVACalibLib egammaMVACalib/*.h Root/*.cxx PUBLIC_HEADERS egammaMVACalib INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} - LINK_LIBRARIES ${ROOT_LIBRARIES} AsgMessagingLib xAODCaloEvent xAODEgamma ${extra_libs} ) + LINK_LIBRARIES ${ROOT_LIBRARIES} AsgMessagingLib xAODCaloEvent xAODEgamma + ${extra_libs} ) if( NOT XAOD_STANDALONE ) atlas_add_component( egammaMVACalib diff --git a/Reconstruction/egamma/egammaMVACalib/Root/egammaMVAFunctions.cxx b/Reconstruction/egamma/egammaMVACalib/Root/egammaMVAFunctions.cxx index ba163a48625d79286de0d7beb99e425f6285cc82..a81f9ab8fac963677c2e33a0c60e5929e8492884 100644 --- a/Reconstruction/egamma/egammaMVACalib/Root/egammaMVAFunctions.cxx +++ b/Reconstruction/egamma/egammaMVACalib/Root/egammaMVAFunctions.cxx @@ -5,7 +5,7 @@ #include "egammaMVACalib/egammaMVAFunctions.h" -#ifndef XAOD_ANALYSIS +#ifndef XAOD_STANDALONE #include "GaudiKernel/SystemOfUnits.h" using Gaudi::Units::GeV; #else @@ -20,12 +20,12 @@ namespace egammaMVAFunctions { // forward declarations - void initializeClusterFuncs(funcMap_t& funcLibrary, - const std::string& prefix, + void initializeClusterFuncs(funcMap_t& funcLibrary, + const std::string& prefix, bool useLayerCorrected); void initializeEgammaFuncs(funcMap_t& funcLibrary, - const std::string& prefix, + const std::string& prefix, bool useLayerCorrected); /// A function to build the map for electrons @@ -47,7 +47,7 @@ namespace egammaMVAFunctions { return compute_el_trackz0(*(static_cast<const xAOD::Electron*>(eg))); }; funcLibrary["el_refittedTrack_qoverp"] = [](const xAOD::Egamma* eg, const xAOD::CaloCluster*) { return compute_el_refittedTrack_qoverp(*(static_cast<const xAOD::Electron*>(eg))); }; - + return funcLibraryPtr; } @@ -56,7 +56,7 @@ namespace egammaMVAFunctions { auto funcLibraryPtr = std::make_unique<funcMap_t>(); funcMap_t& funcLibrary = *funcLibraryPtr; - + initializeClusterFuncs(funcLibrary, "ph", useLayerCorrected); initializeEgammaFuncs(funcLibrary, "ph", useLayerCorrected); @@ -69,32 +69,32 @@ namespace egammaMVAFunctions { auto funcLibraryPtr = std::make_unique<funcMap_t>(); funcMap_t& funcLibrary = *funcLibraryPtr; - + initializeClusterFuncs(funcLibrary, "ph", useLayerCorrected); initializeEgammaFuncs(funcLibrary, "ph", useLayerCorrected); - + funcLibrary["ph_Rconv"] = [](const xAOD::Egamma* eg, const xAOD::CaloCluster*)->float { return xAOD::EgammaHelpers::conversionRadius(static_cast<const xAOD::Photon*>(eg)); }; - + funcLibrary["convR"] = [](const xAOD::Egamma* eg, const xAOD::CaloCluster*)->float - { + { auto ph = static_cast<const xAOD::Photon*>(eg); - if (compute_ptconv(ph) > 3*GeV) { + if (compute_ptconv(ph) > 3*GeV) { return xAOD::EgammaHelpers::conversionRadius(ph); - } + } return 799.0; - + }; funcLibrary["ph_zconv"] = [](const xAOD::Egamma* eg, const xAOD::CaloCluster*) { return static_cast<const xAOD::Photon*>(eg)->vertex()->position().z(); }; funcLibrary["ph_pt1conv"] = [](const xAOD::Egamma* eg, const xAOD::CaloCluster*)->float - { return compute_pt1conv(static_cast<const xAOD::Photon*>(eg)); }; + { return compute_pt1conv(static_cast<const xAOD::Photon*>(eg)); }; funcLibrary["ph_pt2conv"] = [](const xAOD::Egamma* eg, const xAOD::CaloCluster*)->float - { return compute_pt2conv(static_cast<const xAOD::Photon*>(eg)); }; + { return compute_pt2conv(static_cast<const xAOD::Photon*>(eg)); }; funcLibrary["ph_ptconv"] = [](const xAOD::Egamma* eg, const xAOD::CaloCluster*) { return compute_ptconv(static_cast<const xAOD::Photon*>(eg)); }; - + funcLibrary["convPtRatio"] = [](const xAOD::Egamma* eg, const xAOD::CaloCluster*)->float { auto ph = static_cast<const xAOD::Photon*>(eg); @@ -102,43 +102,43 @@ namespace egammaMVAFunctions auto pt1 = compute_pt1conv(ph); auto pt2 = compute_pt2conv(ph); return std::max(pt1, pt2)/(pt1+pt2); - } + } return 1.0f; - + }; - + if (useLayerCorrected) { funcLibrary["convEtOverPt"] = [](const xAOD::Egamma* eg, const xAOD::CaloCluster*cl)->float { auto ph = static_cast<const xAOD::Photon*>(eg); - + float rv = 0.0; if (xAOD::EgammaHelpers::numberOfSiTracks(ph) == 2) { rv = std::max(0.0f, compute_correctedcl_Eacc(*cl)/(std::cosh(compute_cl_eta(*cl))*compute_ptconv(ph))); - } + } return std::min(rv, 2.0f); }; } else { funcLibrary["convEtOverPt"] = [](const xAOD::Egamma* eg, const xAOD::CaloCluster*cl)->float { auto ph = static_cast<const xAOD::Photon*>(eg); - + float rv = 0.0; if (xAOD::EgammaHelpers::numberOfSiTracks(ph) == 2) { rv = std::max(0.0f, compute_rawcl_Eacc(*cl)/(std::cosh(compute_cl_eta(*cl))*compute_ptconv(ph))); - } + } return std::min(rv, 2.0f); }; } - + return funcLibraryPtr; } // Initialize the functions that just depend on the cluster. // This helper function is not meant for external use. - void initializeClusterFuncs(funcMap_t& funcLibrary, - const std::string& prefix, + void initializeClusterFuncs(funcMap_t& funcLibrary, + const std::string& prefix, bool useLayerCorrected) { @@ -154,19 +154,19 @@ namespace egammaMVAFunctions { return compute_cl_phiCalo(*cl); }; funcLibrary[prefix + "_cl_E_TileGap3"] = [](const xAOD::Egamma*, const xAOD::CaloCluster* cl) { return cl->eSample(CaloSampling::TileGap3); }; - + funcLibrary["cellIndexCalo"] = [](const xAOD::Egamma*, const xAOD::CaloCluster* cl) { return std::floor(std::abs(compute_cl_etaCalo(*cl))/0.025); }; funcLibrary["phiModCalo"] = [](const xAOD::Egamma*, const xAOD::CaloCluster* cl) - { return ((abs(compute_cl_eta(*cl)) < 1.425) ? + { return ((abs(compute_cl_eta(*cl)) < 1.425) ? std::fmod(compute_cl_phiCalo(*cl), TMath::Pi()/512) : - std::fmod(compute_cl_phiCalo(*cl), TMath::Pi()/384)); + std::fmod(compute_cl_phiCalo(*cl), TMath::Pi()/384)); }; funcLibrary["etaModCalo"] = [](const xAOD::Egamma*, const xAOD::CaloCluster* cl) { return std::fmod(std::abs(compute_cl_etaCalo(*cl)), 0.025); }; funcLibrary["dPhiTG3"] = [](const xAOD::Egamma*, const xAOD::CaloCluster* cl) { return std::fmod(2.*TMath::Pi()+compute_cl_phi(*cl),TMath::Pi()/32.)-TMath::Pi()/64.0; }; - + if (useLayerCorrected) { funcLibrary[prefix + "_rawcl_Es0"] = [](const xAOD::Egamma*, const xAOD::CaloCluster* cl) { return compute_correctedcl_Es0(*cl); }; @@ -210,8 +210,8 @@ namespace egammaMVAFunctions // Initialize the variables that just depend on egamma. // This helper function is not meant for external use. - void initializeEgammaFuncs(funcMap_t& funcLibrary, - const std::string& prefix, + void initializeEgammaFuncs(funcMap_t& funcLibrary, + const std::string& prefix, bool /* useLayerCorrected */) { funcLibrary[prefix + "_e011"] = [](const xAOD::Egamma* eg, const xAOD::CaloCluster*) @@ -300,6 +300,6 @@ namespace egammaMVAFunctions { return eg->showerShapeValue(xAOD::EgammaParameters::Rhad1); }; funcLibrary[prefix + "_DeltaE"] = [](const xAOD::Egamma* eg, const xAOD::CaloCluster*) { return eg->showerShapeValue(xAOD::EgammaParameters::DeltaE); }; - + } } diff --git a/Reconstruction/egamma/egammaUtils/CMakeLists.txt b/Reconstruction/egamma/egammaUtils/CMakeLists.txt index ae76cabe806c99c6e5df52553a5f69c6b08e7ff3..4cd1cdaafba46bdaa61cc09d85a2b33061f49d68 100644 --- a/Reconstruction/egamma/egammaUtils/CMakeLists.txt +++ b/Reconstruction/egamma/egammaUtils/CMakeLists.txt @@ -1,23 +1,20 @@ -################################################################################ -# Package: egammaUtils -################################################################################ +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # Declare the package name: atlas_subdir( egammaUtils ) # External dependencies: -find_package( ROOT COMPONENTS Tree Core Hist) +find_package( ROOT COMPONENTS Core Hist RIO ) # Component(s in tne package: atlas_add_library( egammaUtils - Root/*.cxx - PUBLIC_HEADERS egammaUtils - INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} - PRIVATE_INCLUDE_DIRS - LINK_LIBRARIES ${ROOT_LIBRARIES} AsgMessagingLib xAODCaloEvent xAODTracking xAODEgamma GeoPrimitives - PRIVATE_LINK_LIBRARIES FourMomUtils PathResolver AnalysisUtilsLib) + egammaUtils/*.h Root/*.cxx + PUBLIC_HEADERS egammaUtils + INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} + LINK_LIBRARIES ${ROOT_LIBRARIES} xAODCaloEvent xAODEgamma GeoPrimitives + PRIVATE_LINK_LIBRARIES PathResolver ) atlas_add_dictionary( egammaUtilsDict - egammaUtils/egammaUtilsDict.h - egammaUtils/selection.xml - LINK_LIBRARIES egammaUtils ) + egammaUtils/egammaUtilsDict.h + egammaUtils/selection.xml + LINK_LIBRARIES egammaUtils ) diff --git a/Reconstruction/egamma/egammaValidation/CMakeLists.txt b/Reconstruction/egamma/egammaValidation/CMakeLists.txt index a47daabbec5f68f45f0c03d196988f152968b7d6..6f704d797d16c1f56921a99e4be2fb2819d4c61d 100644 --- a/Reconstruction/egamma/egammaValidation/CMakeLists.txt +++ b/Reconstruction/egamma/egammaValidation/CMakeLists.txt @@ -19,5 +19,5 @@ atlas_install_scripts( test/*.sh scripts/*py ) atlas_add_test( ut_egammaARTJob_test SCRIPT test/ut_egammaARTJob_test.sh - PROPERTIES TIMEOUT 1000 + PROPERTIES TIMEOUT 1200 ) diff --git a/Reconstruction/egamma/egammaValidation/python/egammaOnlyPreExec.py b/Reconstruction/egamma/egammaValidation/python/egammaOnlyPreExec.py index a30d147a65a5afa5f77bddfd9f7d2267ad3d40d5..bd245f4a87d58e65b7ffdeb67652577072ab0bc4 100644 --- a/Reconstruction/egamma/egammaValidation/python/egammaOnlyPreExec.py +++ b/Reconstruction/egamma/egammaValidation/python/egammaOnlyPreExec.py @@ -1,8 +1,8 @@ # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration -from ParticleBuilderOptions.AODFlags import AODFlags -from RecExConfig.RecAlgsFlags import recAlgs from RecExConfig.RecFlags import rec +from RecExConfig.RecAlgsFlags import recAlgs +from ParticleBuilderOptions.AODFlags import AODFlags from JetRec.JetRecFlags import jetFlags @@ -11,16 +11,20 @@ def setRunEgammaOnlyRecoFlags(): rec.doTau.set_Value_and_Lock(False) rec.doMuon.set_Value_and_Lock(False) rec.doBTagging.set_Value_and_Lock(False) + rec.doCaloRinger.set_Value_and_Lock(False) + recAlgs.doMuonSpShower.set_Value_and_Lock(False) recAlgs.doEFlow.set_Value_and_Lock(False) recAlgs.doEFlowJet.set_Value_and_Lock(False) recAlgs.doMissingET.set_Value_and_Lock(False) recAlgs.doMissingETSig.set_Value_and_Lock(False) recAlgs.doTrigger.set_Value_and_Lock(False) + AODFlags.ThinGeantTruth.set_Value_and_Lock(False) AODFlags.egammaTrackSlimmer.set_Value_and_Lock(False) AODFlags.ThinInDetForwardTrackParticles.set_Value_and_Lock(False) AODFlags.ThinNegativeEnergyNeutralPFOs.set_Value_and_Lock(False) + jetFlags.Enabled = False diff --git a/Reconstruction/tauRecTools/CMakeLists.txt b/Reconstruction/tauRecTools/CMakeLists.txt index ae8f23cedb7fae0703fdc373f37d088e40d313e0..ce207aee3150dc1566c4081be09667603d82e2ba 100644 --- a/Reconstruction/tauRecTools/CMakeLists.txt +++ b/Reconstruction/tauRecTools/CMakeLists.txt @@ -54,7 +54,7 @@ else() LINK_LIBRARIES ${ROOT_LIBRARIES} ${FASTJET_LIBRARIES} ${Boost_LIBRARIES} AthLinks AsgTools CxxUtils xAODCaloEvent xAODEventInfo xAODPFlow xAODTau xAODTracking xAODParticleEvent CaloUtilsLib Particle AsgDataHandlesLib MVAUtils - PRIVATE_LINK_LIBRARIES ${FASTJETCONTRIB_LIBRARIES} ${LWTNN_LIBRARIES} FourMomUtils xAODJet BeamSpotConditionsData + PRIVATE_LINK_LIBRARIES ${FASTJETCONTRIB_LIBRARIES} ${LWTNN_LIBRARIES} FourMomUtils xAODJet BeamSpotConditionsData PathResolver ) endif() @@ -64,8 +64,8 @@ if( NOT XAOD_STANDALONE ) src/*.h src/*.cxx src/components/*.cxx INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} ${Boost_INCLUDE_DIRS} ${LWTNN_INCLUDE_DIRS} LINK_LIBRARIES ${ROOT_LIBRARIES} ${Boost_LIBRARIES} ${LWTNN_LIBRARIES} xAODTau - xAODTracking AthContainers FourMomUtils xAODCaloEvent xAODJet - xAODPFlow xAODParticleEvent MVAUtils BeamSpotConditionsData GaudiKernel tauRecToolsLib ) + xAODTracking AthContainers FourMomUtils xAODCaloEvent xAODJet + xAODPFlow xAODParticleEvent MVAUtils GaudiKernel tauRecToolsLib ) else() atlas_add_component( tauRecTools src/*.h src/*.cxx src/components/*.cxx @@ -80,4 +80,3 @@ if( NOT XAOD_STANDALONE ) tauRecToolsLib ) endif() endif() - diff --git a/Reconstruction/tauRecTools/Root/BDTHelper.cxx b/Reconstruction/tauRecTools/Root/BDTHelper.cxx index a5f3261489a1372696a1de3802b13d4afb82ff97..55ff2f163facdb24fd00c43fc1f104a0f5027257 100644 --- a/Reconstruction/tauRecTools/Root/BDTHelper.cxx +++ b/Reconstruction/tauRecTools/Root/BDTHelper.cxx @@ -63,13 +63,15 @@ std::vector<TString> BDTHelper::parseString(const TString& str, const TString& d // split the string with ",", and put them into a vector for(size_t i = 0; i < arraySize; ++i) { - TString var = dynamic_cast<TObjString*> (objList->At(i))->String(); - var.ReplaceAll(" ", ""); - if(var.Contains(":=")) { - var=var(var.Index(":=")+2, var.Length()-var.Index(":=")-2); + if (auto str = dynamic_cast<TObjString*> (objList->At(i))) { + TString var = str->String(); + var.ReplaceAll(" ", ""); + if(var.Contains(":=")) { + var=var(var.Index(":=")+2, var.Length()-var.Index(":=")-2); + } + if(0==var.Length()) continue; + parsedString.push_back(var); } - if(0==var.Length()) continue; - parsedString.push_back(var); } delete objList; diff --git a/Reconstruction/tauRecTools/Root/HelperFunctions.cxx b/Reconstruction/tauRecTools/Root/HelperFunctions.cxx index 1e9088f13b26e5794d8d2ef4cec83b71f6cd6612..d487e29d8cfe13fffc7e3a53b1ee4a4e2db7b9f7 100644 --- a/Reconstruction/tauRecTools/Root/HelperFunctions.cxx +++ b/Reconstruction/tauRecTools/Root/HelperFunctions.cxx @@ -92,9 +92,11 @@ std::vector<TString> tauRecTools::parseString(const TString& str, const TString& std::vector<TString> parsed_strings; TObjArray* varList_ar = str.Tokenize(delim); for(int i = 0; i != varList_ar->GetEntries(); ++i){ - TString var = dynamic_cast<TObjString*> (varList_ar->At(i))->String(); - if(var.Length()==0) continue; - parsed_strings.push_back(var); + if (auto tos = dynamic_cast<TObjString*> (varList_ar->At(i))) { + TString var = tos->String(); + if(var.Length()==0) continue; + parsed_strings.push_back(var); + } } delete varList_ar; return parsed_strings; diff --git a/Tools/Tier0ChainTests/test/test_q220_mp.sh b/Tools/Tier0ChainTests/test/test_q220_mp.sh index 0ab3fea359cd1dcb31cc309d6db6c2c463033af2..b404fc6606513a21f8472b8a39426dec9c5c0b1a 100755 --- a/Tools/Tier0ChainTests/test/test_q220_mp.sh +++ b/Tools/Tier0ChainTests/test/test_q220_mp.sh @@ -7,6 +7,7 @@ # art-include: master/Athena # art-include: 21.3/Athena # art-include: 21.9/Athena +# art-athena-mt: 8 Reco_tf.py --AMI=q220 --athenaopts='--nprocs=2' --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --outputHISTFile=myHIST.root --imf False echo "art-result: $?" diff --git a/Tools/Tier0ChainTests/test/test_q220_mt.sh b/Tools/Tier0ChainTests/test/test_q220_mt.sh index 8d00ae6d8a2ccf86a5d8ea6da5cc69f1cdfcc6b5..78d87f9cb3f1cde6c079f77dbcd86f9ec1f78ae1 100755 --- a/Tools/Tier0ChainTests/test/test_q220_mt.sh +++ b/Tools/Tier0ChainTests/test/test_q220_mt.sh @@ -2,11 +2,8 @@ # # art-description: RecoTrf # art-type: grid -# art-include: 21.0/Athena -# art-include: 21.0-TrigMC/Athena # art-include: master/Athena -# art-include: 21.3/Athena -# art-include: 21.9/Athena +# art-athena-mt: 8 Reco_tf.py --AMI=q220 --athenaopts='--threads=1' --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --imf False echo "art-result: $?" diff --git a/Tools/Tier0ChainTests/test/test_q221.sh b/Tools/Tier0ChainTests/test/test_q221.sh index e40936e53fb8827390a56f112f26316a8760b255..75bbcdd31b67ad501fc0d6533223275d5eb7b722 100755 --- a/Tools/Tier0ChainTests/test/test_q221.sh +++ b/Tools/Tier0ChainTests/test/test_q221.sh @@ -8,7 +8,7 @@ # art-include: 21.3/Athena # art-include: 21.9/Athena -Reco_tf.py --AMI=q221 --outputRDOFile=myRDO.pool.root --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --outputHISTFile=myHIST.root --imf False --preExec="all:from IOVDbSvc.CondDB import conddb; conddb.addOverride('/PIXEL/PixMapOverlay','PixMapOverlay-SIM-MC16-000-03');" +Reco_tf.py --AMI=q221 --outputRDOFile=myRDO.pool.root --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --outputHISTFile=myHIST.root --imf False --preExec="all:from IOVDbSvc.CondDB import conddb; conddb.addOverride('/PIXEL/PixMapOverlay','PixMapOverlay-SIM-MC16-000-03');" --maxEvents=100 echo "art-result: $? Reco" Reco_tf.py --validationFlags 'doExample,doMET,doPFlow,doTau,doEgamma,doBtag,doZee,doJet,doTopoCluster,doMuon,doTrigMinBias,doTrigIDtrk,doTrigBphys,doTrigMET,doTrigJet,doTrigTau, doTrigEgamma,doTrigMuon,doTrigBjet,doTrigHLTResult' --inputAODFile=myAOD.pool.root --outputNTUP_PHYSVALFile=myNTUP_PHYSVAL.root @@ -16,5 +16,5 @@ echo "art-result: $? PhysVal" ArtPackage=$1 ArtJobName=$2 -art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} +art.py compare grid --entries 30 ${ArtPackage} ${ArtJobName} echo "art-result: $? Diff" diff --git a/Tools/Tier0ChainTests/test/test_q221_mp.sh b/Tools/Tier0ChainTests/test/test_q221_mp.sh index 679fcf87c8f0b6dd713c23196fffd12cf8c6a961..a1c8acb8defbc58794d71e56b127ad4d55f4f794 100755 --- a/Tools/Tier0ChainTests/test/test_q221_mp.sh +++ b/Tools/Tier0ChainTests/test/test_q221_mp.sh @@ -7,8 +7,9 @@ # art-include: master/Athena # art-include: 21.3/Athena # art-include: 21.9/Athena +# art-athena-mt: 8 -Reco_tf.py --AMI=q221 --athenaopts='--nprocs=2' --outputRDOFile=myRDO.pool.root --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --outputHISTFile=myHIST.root --imf False --preExec="all:from IOVDbSvc.CondDB import conddb; conddb.addOverride('/PIXEL/PixMapOverlay','PixMapOverlay-SIM-MC16-000-03');" +Reco_tf.py --AMI=q221 --athenaopts='--nprocs=8' --outputRDOFile=myRDO.pool.root --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --outputHISTFile=myHIST.root --imf False --preExec="all:from IOVDbSvc.CondDB import conddb; conddb.addOverride('/PIXEL/PixMapOverlay','PixMapOverlay-SIM-MC16-000-03');" --maxEvents=100 echo "art-result: $? Reco" Reco_tf.py --validationFlags 'doExample,doMET,doPFlow,doTau,doEgamma,doBtag,doZee,doJet,doTopoCluster,doMuon,doTrigMinBias,doTrigIDtrk,doTrigBphys,doTrigMET,doTrigJet,doTrigTau, doTrigEgamma,doTrigMuon,doTrigBjet,doTrigHLTResult' --inputAODFile=myAOD.pool.root --outputNTUP_PHYSVALFile=myNTUP_PHYSVAL.root @@ -16,6 +17,6 @@ echo "art-result: $? PhysVal" ArtPackage=$1 ArtJobName=$2 -art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} +art.py compare grid --entries 30 ${ArtPackage} ${ArtJobName} echo "art-result: $? Diff" diff --git a/Tools/Tier0ChainTests/test/test_q221_mt.sh b/Tools/Tier0ChainTests/test/test_q221_mt.sh index 76b3d511b6dae22f310c887606d050f8a79330a2..ed20b26e137848ed2afd257d43edd741dcb084b1 100755 --- a/Tools/Tier0ChainTests/test/test_q221_mt.sh +++ b/Tools/Tier0ChainTests/test/test_q221_mt.sh @@ -2,13 +2,10 @@ # # art-description: RecoTrf # art-type: grid -# art-include: 21.0/Athena -# art-include: 21.0-TrigMC/Athena # art-include: master/Athena -# art-include: 21.3/Athena -# art-include: 21.9/Athena +# art-athena-mt: 8 -Reco_tf.py --AMI=q221 --athenaopts='--threads=1' --outputRDOFile=myRDO.pool.root --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --imf False --preExec="all:from IOVDbSvc.CondDB import conddb; conddb.addOverride('/PIXEL/PixMapOverlay','PixMapOverlay-SIM-MC16-000-03');" +Reco_tf.py --AMI=q221 --athenaopts='--threads=8' --outputRDOFile=myRDO.pool.root --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --imf False --preExec="all:from IOVDbSvc.CondDB import conddb; conddb.addOverride('/PIXEL/PixMapOverlay','PixMapOverlay-SIM-MC16-000-03');" --maxEvents=100 echo "art-result: $? Reco" Reco_tf.py --validationFlags 'doExample,doMET,doPFlow,doTau,doEgamma,doBtag,doZee,doJet,doTopoCluster,doMuon,doTrigMinBias,doTrigIDtrk,doTrigBphys,doTrigMET,doTrigJet,doTrigTau, doTrigEgamma,doTrigMuon,doTrigBjet,doTrigHLTResult' --inputAODFile=myAOD.pool.root --outputNTUP_PHYSVALFile=myNTUP_PHYSVAL.root @@ -16,5 +13,5 @@ echo "art-result: $? PhysVal" ArtPackage=$1 ArtJobName=$2 -art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} +art.py compare grid --entries 30 ${ArtPackage} ${ArtJobName} echo "art-result: $? Diff" diff --git a/Tools/Tier0ChainTests/test/test_q223_mp.sh b/Tools/Tier0ChainTests/test/test_q223_mp.sh index b61face0f6eb90fc105beee095e5a99cd96c922d..74cf91e960f0607abb20638b75d214fb7c029e14 100755 --- a/Tools/Tier0ChainTests/test/test_q223_mp.sh +++ b/Tools/Tier0ChainTests/test/test_q223_mp.sh @@ -7,6 +7,7 @@ # art-include: master/Athena # art-include: 21.3/Athena # art-include: 21.9/Athena +# art-athena-mt: 8 Reco_tf.py --AMI=q223 --athenaopts='--nprocs=2' --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --outputHISTFile=myHIST.root --imf False echo "art-result: $? Reco" diff --git a/Tools/Tier0ChainTests/test/test_q223_mt.sh b/Tools/Tier0ChainTests/test/test_q223_mt.sh index 15d69d1b0f57c33ced8a67c9c795ddf20dddcea5..43dbbda69573eca8b3b10618c39b60102e911f46 100755 --- a/Tools/Tier0ChainTests/test/test_q223_mt.sh +++ b/Tools/Tier0ChainTests/test/test_q223_mt.sh @@ -2,11 +2,8 @@ # # art-description: RecoTrf # art-type: grid -# art-include: 21.0/Athena -# art-include: 21.0-TrigMC/Athena # art-include: master/Athena -# art-include: 21.3/Athena -# art-include: 21.9/Athena +# art-athena-mt: 8 Reco_tf.py --AMI=q223 --athenaopts='--threads=1' --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --imf False echo "art-result: $? Reco" diff --git a/Tools/Tier0ChainTests/test/test_q431.sh b/Tools/Tier0ChainTests/test/test_q431.sh index 47d12d3c523e56673b7b9b938351530d8ae685ee..89e28b361c3d837da59a59bc35f9c5b62dcd72f0 100755 --- a/Tools/Tier0ChainTests/test/test_q431.sh +++ b/Tools/Tier0ChainTests/test/test_q431.sh @@ -8,10 +8,10 @@ # art-include: 21.3/Athena # art-include: 21.9/Athena -Reco_tf.py --AMI=q431 --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --outputHISTFile=myHIST.root --imf False --maxEvents=1000 +Reco_tf.py --AMI=q431 --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --outputHISTFile=myHIST.root --imf False --maxEvents=100 echo "art-result: $? Reco" ArtPackage=$1 ArtJobName=$2 -art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} +art.py compare grid --entries 30 ${ArtPackage} ${ArtJobName} echo "art-result: $? Diff" diff --git a/Tools/Tier0ChainTests/test/test_q431_mp.sh b/Tools/Tier0ChainTests/test/test_q431_mp.sh index c3682da02683f638dfe30715b740bf2ea620f30e..4172e62e27df3e4ca3193540c43eb34339ca686d 100755 --- a/Tools/Tier0ChainTests/test/test_q431_mp.sh +++ b/Tools/Tier0ChainTests/test/test_q431_mp.sh @@ -7,11 +7,12 @@ # art-include: master/Athena # art-include: 21.3/Athena # art-include: 21.9/Athena +# art-athena-mt: 8 -Reco_tf.py --AMI=q431 --athenaopts='--nprocs=2' --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --outputHISTFile=myHIST.root --imf False --maxEvents=1000 +Reco_tf.py --AMI=q431 --athenaopts='--nprocs=8' --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --outputHISTFile=myHIST.root --imf False --maxEvents=100 echo "art-result: $? Reco" ArtPackage=$1 ArtJobName=$2 -art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} +art.py compare grid --entries 30 ${ArtPackage} ${ArtJobName} echo "art-result: $? Diff" diff --git a/Tools/Tier0ChainTests/test/test_q431_mt.sh b/Tools/Tier0ChainTests/test/test_q431_mt.sh index a755a1d7b1f1f7f0a78e99d775e795c1e6c564c6..8dbd6048c606589b2d108c9df94892c2826c5041 100755 --- a/Tools/Tier0ChainTests/test/test_q431_mt.sh +++ b/Tools/Tier0ChainTests/test/test_q431_mt.sh @@ -2,16 +2,13 @@ # # art-description: RecoTrf # art-type: grid -# art-include: 21.0/Athena -# art-include: 21.0-TrigMC/Athena # art-include: master/Athena -# art-include: 21.3/Athena -# art-include: 21.9/Athena +# art-athena-mt: 8 -Reco_tf.py --AMI=q431 --athenaopts='--threads=2' --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --imf False --maxEvents=1000 +Reco_tf.py --AMI=q431 --athenaopts='--threads=8' --outputAODFile=myAOD.pool.root --outputESDFile=myESD.pool.root --imf False --maxEvents=100 echo "art-result: $? Reco" ArtPackage=$1 ArtJobName=$2 -art.py compare grid --entries 10 ${ArtPackage} ${ArtJobName} +art.py compare grid --entries 30 ${ArtPackage} ${ArtJobName} echo "art-result: $? Diff" diff --git a/Tools/Tier0ChainTests/test/test_q441.sh b/Tools/Tier0ChainTests/test/test_q441.sh index c6969fcdea9df5044a85a59a266a99d16f0fda69..79fb1af1e196f0dd7c6c997ef80ba1d629617603 100755 --- a/Tools/Tier0ChainTests/test/test_q441.sh +++ b/Tools/Tier0ChainTests/test/test_q441.sh @@ -4,7 +4,7 @@ # art-type: grid # art-include: master/Athena -Reco_tf.py --AMI=q441 --inputHITSFile=/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/Tier0ChainTests/q441/22.0/HITS.12560240._000299.pool.root.1 --inputRDO_BKGFile=/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/Tier0ChainTests/q441/22.0/RDO.17190395._000013.pool.root.1 --maxEvents=25 --outputAODFile=q441.AOD.pool.root --outputESDFile=q441.ESD.pool.root +Reco_tf.py --AMI=q441 --athenaopts "RDOtoRDOTrigger:--imf --threads=1 --concurrent-events=1" --inputHITSFile=/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/Tier0ChainTests/q441/22.0/HITS.12560240._000299.pool.root.1 --inputRDO_BKGFile=/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/Tier0ChainTests/q441/22.0/RDO.17190395._000013.pool.root.1 --maxEvents=100 --outputAODFile=q441.AOD.pool.root --outputESDFile=q441.ESD.pool.root #This test currently has the muon isolation reconstruction switched off. It should be switched back on at a later date. echo "art-result: $?" diff --git a/Tools/Tier0ChainTests/test/test_q441_mp.sh b/Tools/Tier0ChainTests/test/test_q441_mp.sh index 10a25f938818df436a3b71692385adc714055164..a3083fb141d0c7da00d9c2cd6ce8f9ed957d5d2d 100755 --- a/Tools/Tier0ChainTests/test/test_q441_mp.sh +++ b/Tools/Tier0ChainTests/test/test_q441_mp.sh @@ -3,6 +3,7 @@ # art-description: RecoTrf # art-type: grid # art-include: master/Athena +# art-athena-mt: 8 Reco_tf.py --AMI=q441 --athenaopts='--nprocs=2' --inputHITSFile=/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/Tier0ChainTests/q441/22.0/HITS.12560240._000299.pool.root.1 --inputRDO_BKGFile=/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/Tier0ChainTests/q441/22.0/RDO.17190395._000013.pool.root.1 --maxEvents=25 --outputAODFile=q441.AOD.pool.root --outputESDFile=q441.ESD.pool.root diff --git a/Tools/Tier0ChainTests/test/test_q441_mt.sh b/Tools/Tier0ChainTests/test/test_q441_mt.sh index d78498e6688966ecd08cad576c19041f1aa66525..b1fb8d2752f8ba4777081fa25f064c5c9e5702bf 100755 --- a/Tools/Tier0ChainTests/test/test_q441_mt.sh +++ b/Tools/Tier0ChainTests/test/test_q441_mt.sh @@ -3,8 +3,9 @@ # art-description: RecoTrf # art-type: grid # art-include: master/Athena +# art-athena-mt: 8 -Reco_tf.py --AMI=q441 --athenaopts='--threads=1' --inputHITSFile=/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/Tier0ChainTests/q441/22.0/HITS.12560240._000299.pool.root.1 --inputRDO_BKGFile=/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/Tier0ChainTests/q441/22.0/RDO.17190395._000013.pool.root.1 --maxEvents=25 --outputAODFile=q441.AOD.pool.root --outputESDFile=q441.ESD.pool.root +Reco_tf.py --AMI=q441 --athenaopts='--threads=8' --inputHITSFile=/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/Tier0ChainTests/q441/22.0/HITS.12560240._000299.pool.root.1 --inputRDO_BKGFile=/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art/Tier0ChainTests/q441/22.0/RDO.17190395._000013.pool.root.1 --maxEvents=100 --outputAODFile=q441.AOD.pool.root --outputESDFile=q441.ESD.pool.root #This test currently has the muon isolation reconstruction switched off. It should be switched back on at a later date. echo "art-result: $?" diff --git a/Tracking/TrkFitter/TrkKalmanFitter/src/KalmanFitter.cxx b/Tracking/TrkFitter/TrkKalmanFitter/src/KalmanFitter.cxx index 90c908422b12e30964cc4f3eb1bae98e6c30ddd1..c1293717fd595848baf7787242576c98530c5c1c 100755 --- a/Tracking/TrkFitter/TrkKalmanFitter/src/KalmanFitter.cxx +++ b/Tracking/TrkFitter/TrkKalmanFitter/src/KalmanFitter.cxx @@ -429,12 +429,13 @@ Trk::KalmanFitter::fit(const EventContext& ctx, ATH_MSG_VERBOSE( "list of parameters as they are on the input track:"); DataVector<const TrackParameters>::const_iterator it = inputTrack.trackParameters()->begin(); - for(int i=0 ; it!=inputTrack.trackParameters()->end(); ++it, ++i) + for(int i=0 ; it!=inputTrack.trackParameters()->end(); ++it, ++i) { ATH_MSG_VERBOSE( "TrackPar" << (i<10 ? " " : " ") << i << " position mag : " << (*it)->position().mag() << ", to ref is " << ((*it)->position()-m_sortingRefPoint).mag()); - ATH_MSG_VERBOSE( "Now getting track parameters near origin " - << (m_option_enforceSorting? "via STL sort" : "as first TP (convention)")); + } + ATH_MSG_VERBOSE( "Now getting track parameters near origin " + << (m_option_enforceSorting? "via STL sort" : "as first TP (convention)")); } // fill internal trajectory through external preparator class const TrackParameters* minPar = nullptr; @@ -1182,13 +1183,13 @@ bool Trk::KalmanFitter::invokeAnnealingFilter(const Trk::TrackParameters*& star dafStatus = m_smoother->fitWithReference(m_trajectory, newFitQuality, kalMec); else dafStatus = m_smoother->fit(m_trajectory, newFitQuality, kalMec); - ATH_MSG_INFO( "Internal DAF returned with chi2 chain:"); - for (Trk::Trajectory::const_iterator it=m_trajectory.begin();it!=m_trajectory.end(); it++) { - if (!it->isOutlier()) { - if (it->fitQuality()) ATH_MSG_INFO( it->fitQuality()->chiSquared() << " % "); - else ATH_MSG_INFO( "Problem - no FitQ % "); - } + ATH_MSG_INFO( "Internal DAF returned with chi2 chain:"); + for (Trk::Trajectory::const_iterator it=m_trajectory.begin();it!=m_trajectory.end(); it++) { + if (!it->isOutlier()) { + if (it->fitQuality()) ATH_MSG_INFO( it->fitQuality()->chiSquared() << " % "); + else ATH_MSG_INFO( "Problem - no FitQ % "); } + } } bool successfulRecovery = newFitQuality!= nullptr diff --git a/Tracking/TrkFitter/TrkiPatFitter/TrkiPatFitter/MaterialAllocator.h b/Tracking/TrkFitter/TrkiPatFitter/TrkiPatFitter/MaterialAllocator.h index ee94bd319f7cea59d4ee6f8cf88b3374925ce845..a5505690b4780265615aa53ef06b6036d78c293f 100755 --- a/Tracking/TrkFitter/TrkiPatFitter/TrkiPatFitter/MaterialAllocator.h +++ b/Tracking/TrkFitter/TrkiPatFitter/TrkiPatFitter/MaterialAllocator.h @@ -62,12 +62,12 @@ public: FitParameters& fitParameters, Garbage_t& garbage) const; - // allocate material - void allocateMaterial (std::vector<FitMeasurement*>& measurements, - ParticleHypothesis particleHypothesis, - const FitParameters& fitParameters, - const TrackParameters& startParameters, - Garbage_t& garbage) const; + // allocate material + void allocateMaterial(std::vector<FitMeasurement*>& measurements, + ParticleHypothesis particleHypothesis, + FitParameters& fitParameters, + const TrackParameters& startParameters, + Garbage_t& garbage) const; // initialize scattering (needs to know X0 integral) void initializeScattering (std::vector<FitMeasurement*>& measurements) const; @@ -81,13 +81,13 @@ public: void orderMeasurements(std::vector<FitMeasurement*>& measurements, Amg::Vector3D startDirection, Amg::Vector3D startPosition) const; - - // has material been reallocated? - bool reallocateMaterial (std::vector<FitMeasurement*>& measurements, - const FitParameters& fitParameters, - Garbage_t& garbage) const; -private: + // has material been reallocated? + bool reallocateMaterial(std::vector<FitMeasurement*>& measurements, + FitParameters& fitParameters, + Garbage_t& garbage) const; + + private: // add material delimiters to control aggregation void addSpectrometerDelimiters (std::vector<FitMeasurement*>& measurements) const; @@ -106,11 +106,11 @@ private: Garbage_t& garbage) const; // allocate material in inner detector - void indetMaterial (std::vector<FitMeasurement*>& measurements, - ParticleHypothesis particleHypothesis, - const TrackParameters& startParameters, - Garbage_t& garbage) const; - + void indetMaterial(std::vector<FitMeasurement*>& measurements, + ParticleHypothesis particleHypothesis, + const TrackParameters& startParameters, + Garbage_t& garbage) const; + // material aggregation std::pair<FitMeasurement*,FitMeasurement*> materialAggregation ( const std::vector<const TrackStateOnSurface*>& material, @@ -129,11 +129,11 @@ private: void printMeasurements(std::vector<FitMeasurement*>& measurements) const; // allocate material in spectrometer - void spectrometerMaterial (std::vector<FitMeasurement*>& measurements, - ParticleHypothesis particleHypothesis, - const FitParameters& fitParameters, - const TrackParameters& startParameters, - Garbage_t& garbage) const; + void spectrometerMaterial(std::vector<FitMeasurement*>& measurements, + ParticleHypothesis particleHypothesis, + FitParameters& fitParameters, + const TrackParameters& startParameters, + Garbage_t& garbage) const; // Makes sure m_spectrometerEntrance is created, once only, and thread-safe void createSpectrometerEntranceOnce() const; diff --git a/Tracking/TrkFitter/TrkiPatFitter/src/MaterialAllocator.cxx b/Tracking/TrkFitter/TrkiPatFitter/src/MaterialAllocator.cxx index ce3c21a6eed06743426943cd89bb29e6b01152a9..44b907fe99bb27a6ed47de342d0597ca703d6559 100755 --- a/Tracking/TrkFitter/TrkiPatFitter/src/MaterialAllocator.cxx +++ b/Tracking/TrkFitter/TrkiPatFitter/src/MaterialAllocator.cxx @@ -99,14 +99,14 @@ namespace Trk // retrieve the necessary Extrapolators (muon tracking geometry is very picky!) ATH_CHECK( m_extrapolator.retrieve() ); ATH_MSG_DEBUG("Retrieved tool " << m_extrapolator); - + ATH_CHECK( m_intersector.retrieve() ); ATH_MSG_DEBUG("Retrieved tool " << m_intersector); // retrieve services ATH_CHECK( m_trackingGeometrySvc.retrieve() ); ATH_MSG_DEBUG("Retrieved Svc " << m_trackingGeometrySvc); - + // need to create the IndetExit and MuonEntrance TrackingVolumes ATH_CHECK( m_trackingVolumesSvc.retrieve() ); ATH_MSG_DEBUG("Retrieved Svc " << m_trackingVolumesSvc); @@ -605,7 +605,7 @@ namespace Trk void MaterialAllocator::allocateMaterial(std::vector<FitMeasurement*>& measurements, ParticleHypothesis particleHypothesis, - const FitParameters& fitParameters, + FitParameters& fitParameters, const TrackParameters& startParameters, Garbage_t& garbage) const { // different strategies used for indet and muon spectrometer @@ -705,9 +705,9 @@ namespace Trk // missing TrackingGeometrySvc - no leading material will be added m_messageHelper->printWarning(0); return nullptr; - } + } createSpectrometerEntranceOnce(); - + } // check input parameters are really in the spectrometer @@ -860,7 +860,7 @@ namespace Trk bool MaterialAllocator::reallocateMaterial(std::vector<FitMeasurement*>& measurements, - const FitParameters& parameters, + FitParameters& parameters, Garbage_t& garbage) const { ATH_MSG_DEBUG(" reallocateSpectrometerMaterial "); @@ -2189,7 +2189,7 @@ namespace Trk void MaterialAllocator::spectrometerMaterial(std::vector<FitMeasurement*>& measurements, ParticleHypothesis particleHypothesis, - const FitParameters& fitParameters, + FitParameters& fitParameters, const TrackParameters& startParameters, Garbage_t& garbage) const { // return if no MS measurement @@ -2302,9 +2302,9 @@ namespace Trk // missing TrackingGeometrySvc - no spectrometer material added m_messageHelper->printWarning(2); return; - } + } createSpectrometerEntranceOnce(); - + } // entranceParameters are at the MS entrance surface (0 if perigee downstream) diff --git a/Tracking/TrkFitter/TrkiPatFitterUtils/TrkiPatFitterUtils/FitParameters.h b/Tracking/TrkFitter/TrkiPatFitterUtils/TrkiPatFitterUtils/FitParameters.h index 2ab47c78b88b99986b20ff29c1111e33f99e4d9b..70698a484e2cfdb489a18cb0aa71c376d58743c7 100755 --- a/Tracking/TrkFitter/TrkiPatFitterUtils/TrkiPatFitterUtils/FitParameters.h +++ b/Tracking/TrkFitter/TrkiPatFitterUtils/TrkiPatFitterUtils/FitParameters.h @@ -105,19 +105,20 @@ public: void setPhiInstability (void); double sinPhi (void) const; double sinTheta (void) const; - Perigee* startingPerigee (void) const; - const TrackParameters* trackParameters (MsgStream& log, - const FitMeasurement& measurement, - bool withCovariance=false) const; - void update (const Amg::VectorX& differences); - void update (Amg::Vector3D position, - Amg::Vector3D direction, - double qOverP, - const Amg::MatrixX& leadingCovariance); - const Amg::Vector3D& vertex (void) const; - double z0 (void) const; - -private: + Perigee* startingPerigee(void) const; + //The following can update parameters + const TrackParameters* trackParameters(MsgStream& log, + const FitMeasurement& measurement, + bool withCovariance = false); + void update(const Amg::VectorX& differences); + void update(Amg::Vector3D position, + Amg::Vector3D direction, + double qOverP, + const Amg::MatrixX& leadingCovariance); + const Amg::Vector3D& vertex(void) const; + double z0(void) const; + + private: // assignment: no semantics, no implementation FitParameters &operator= (const FitParameters&); @@ -126,9 +127,9 @@ private: std::vector<double> m_alignmentOffset; std::vector<double> m_alignmentOffsetConstraint; double m_cosPhi; - mutable double m_cosPhi1; + double m_cosPhi1; double m_cosTheta; - mutable double m_cosTheta1; + double m_cosTheta1; double m_cotTheta; double m_d0; Amg::VectorX* m_differences; @@ -150,7 +151,7 @@ private: bool m_phiInstability; Amg::Vector3D m_position; double m_qOverP; - mutable double m_qOverP1; + double m_qOverP1; std::vector<double> m_scattererPhi; std::vector<double> m_scattererTheta; double m_sinPhi; diff --git a/Tracking/TrkFitter/TrkiPatFitterUtils/TrkiPatFitterUtils/FitProcedure.h b/Tracking/TrkFitter/TrkiPatFitterUtils/TrkiPatFitterUtils/FitProcedure.h index bd9a0c9b1e87cd231f5edd1535321fafbb803c7c..521dab0007785bdfbc45c94367f4c7118e853c21 100755 --- a/Tracking/TrkFitter/TrkiPatFitterUtils/TrkiPatFitterUtils/FitProcedure.h +++ b/Tracking/TrkFitter/TrkiPatFitterUtils/TrkiPatFitterUtils/FitProcedure.h @@ -66,7 +66,7 @@ public: // retrieve result Track* constructTrack (const std::vector<FitMeasurement*>& measurements, - const FitParameters& parameters, + FitParameters& parameters, const TrackInfo& trackInfo, const DataVector<const TrackStateOnSurface>* leadingTSOS = 0); diff --git a/Tracking/TrkFitter/TrkiPatFitterUtils/TrkiPatFitterUtils/IMaterialAllocator.h b/Tracking/TrkFitter/TrkiPatFitterUtils/TrkiPatFitterUtils/IMaterialAllocator.h index 2949072a0db5ef04d98616d85484a17a0918f727..e4320ade450d2034ed268284287e9b7b80d3f9b7 100755 --- a/Tracking/TrkFitter/TrkiPatFitterUtils/TrkiPatFitterUtils/IMaterialAllocator.h +++ b/Tracking/TrkFitter/TrkiPatFitterUtils/TrkiPatFitterUtils/IMaterialAllocator.h @@ -51,13 +51,13 @@ public: ParticleHypothesis particleHypothesis, FitParameters& fitParameters, Garbage_t& garbage) const = 0; - + /**IMaterialAllocator interface: allocate material */ - virtual void allocateMaterial (std::vector<FitMeasurement*>& measurements, - ParticleHypothesis particleHypothesis, - const FitParameters& fitParameters, - const TrackParameters& startParameters, - Garbage_t& garbage) const = 0; + virtual void allocateMaterial(std::vector<FitMeasurement*>& measurements, + ParticleHypothesis particleHypothesis, + FitParameters& fitParameters, + const TrackParameters& startParameters, + Garbage_t& garbage) const = 0; /**IMaterialAllocator interface: initialize scattering (needs to know X0 integral) */ virtual void initializeScattering (std::vector<FitMeasurement*>& measurements) const = 0; @@ -72,12 +72,11 @@ public: virtual void orderMeasurements (std::vector<FitMeasurement*>& measurements, Amg::Vector3D startDirection, Amg::Vector3D startPosition) const = 0; - - /**IMaterialAllocator interface: has material been reallocated? */ - virtual bool reallocateMaterial (std::vector<FitMeasurement*>& measurements, - const FitParameters& fitParameters, - Garbage_t& garbage) const = 0; + /**IMaterialAllocator interface: has material been reallocated? */ + virtual bool reallocateMaterial(std::vector<FitMeasurement*>& measurements, + FitParameters& fitParameters, + Garbage_t& garbage) const = 0; }; } // end of namespace diff --git a/Tracking/TrkFitter/TrkiPatFitterUtils/src/FitParameters.cxx b/Tracking/TrkFitter/TrkiPatFitterUtils/src/FitParameters.cxx index 68bcf57ca450bcf6afe73a6aaf5f697818c09d6e..626a509d8c02ba46ba7f980f7d7f4138536a7c3a 100755 --- a/Tracking/TrkFitter/TrkiPatFitterUtils/src/FitParameters.cxx +++ b/Tracking/TrkFitter/TrkiPatFitterUtils/src/FitParameters.cxx @@ -21,7 +21,7 @@ #include "TrkiPatFitterUtils/ParameterType.h" namespace Trk{ - + FitParameters::FitParameters (const Perigee& perigee) : m_cosPhi1 (0.), m_cosTheta1 (0.), @@ -49,7 +49,7 @@ FitParameters::FitParameters (const Perigee& perigee) m_vertex (perigee.associatedSurface().center()), m_z0 (perigee.position().z()) { - Amg::Vector3D momentum = perigee.momentum(); + Amg::Vector3D momentum = perigee.momentum(); double ptInv0 = 1./momentum.perp(); m_cosPhi = ptInv0*momentum.x(); m_sinPhi = ptInv0*momentum.y(); @@ -164,7 +164,7 @@ FitParameters::addAlignment (bool constrained, double angle, double offset) { m_alignmentAngleConstraint[m_numberAlignments] = angle; m_alignmentOffsetConstraint[m_numberAlignments] = offset; - } + } ++m_numberAlignments; } @@ -270,7 +270,7 @@ FitParameters::parameterDifference (const Amg::VectorX& parameters) const difference(0,4) = (parameters(5) - m_qOverP)*Gaudi::Units::TeV; return difference; } - + void FitParameters::performCutStep (double cutStep) { @@ -344,7 +344,7 @@ FitParameters::print (MsgStream& log) const << std::setw(11) << std::setprecision(6) << std::atan2(m_sinPhi,m_cosPhi) << " phi" << std::setw(11) << std::setprecision(6) << std::acos(m_cosTheta) - << " theta" + << " theta" << std::setw(13) << std::setprecision(3) << m_sinTheta/(m_qOverP*Gaudi::Units::GeV) << " pT (GeV)"; } @@ -394,11 +394,11 @@ FitParameters::printVerbose (MsgStream& log) const << std::setw(10) << std::setprecision(4) << differences(0) << " (0) " << std::setw(10) << std::setprecision(4) << differences(1) - << " (1) " + << " (1) " << std::setw(10) << std::setprecision(5) << differences(2) - << " (2) " + << " (2) " << std::setw(10) << std::setprecision(5) << differences(3) - << " (3) " + << " (3) " << std::setw(13) << std::setprecision(9) << differences(4)*Gaudi::Units::GeV/Gaudi::Units::TeV << " (4) "; if (m_fitEnergyDeposit) @@ -406,7 +406,7 @@ FitParameters::printVerbose (MsgStream& log) const << std::setw(13) << std::setprecision(9) << differences(5)*Gaudi::Units::GeV/Gaudi::Units::TeV << " (5) "; log << std::endl; - + if (m_numberAlignments) { log << " dAlign ==== "; @@ -453,17 +453,17 @@ FitParameters::printVerbose (MsgStream& log) const << std::setw(12) << std::setprecision(4) << m_d0 << " transverse impact " << std::setw(10) << std::setprecision(4) << m_position.z() - << " z0 " + << " z0 " << std::setw(10) << std::setprecision(6) << std::atan2(m_sinPhi,m_cosPhi) << std::setw(10) << std::setprecision(6) << m_cotTheta - << " phi,cotTheta " + << " phi,cotTheta " << std::setw(13) << std::setprecision(9) << m_qOverP/m_sinTheta << " inverse pT " << std::setw(12) << std::setprecision(6) << m_sinTheta/(m_qOverP*Gaudi::Units::GeV) << " signed pT "; if (m_fitEnergyDeposit) { - // TODO: should give fitted energy loss + // TODO: should give fitted energy loss log << std::endl << " E before/after energy deposit" << std::setw(12) << std::setprecision(3) << 1./std::abs(m_qOverP*Gaudi::Units::GeV) @@ -519,7 +519,7 @@ FitParameters::reset (const FitParameters& parameters) { // method is needed to complement copy in places where design uses // a reference to a FitParameter pointer - // essentially a copy, with history of previous iteration removed + // essentially a copy, with history of previous iteration removed m_cosPhi = parameters.m_cosPhi; m_cosPhi1 = parameters.m_cosPhi1; m_cosTheta = parameters.m_cosTheta; @@ -562,8 +562,8 @@ FitParameters::reset (const FitParameters& parameters) m_scattererPhi[s] = parameters.m_scattererPhi[s]; m_scattererTheta[s] = parameters.m_scattererTheta[s]; } - - // restore difference history + + // restore difference history delete m_differences; if (parameters.m_differences) { @@ -601,7 +601,7 @@ FitParameters::scatteringAngles (const FitMeasurement& fitMeasurement, int scatt scattererSigmaTheta); } } - + void FitParameters::setPhiInstability (void) { m_phiInstability = true; } @@ -614,7 +614,7 @@ FitParameters::startingPerigee (void) const double charge = 1.; if (m_qOverP < 0.) charge = -1.; Amg::Vector3D momentum(pT*m_cosPhi,pT*m_sinPhi,pT*m_cotTheta); - + return new Perigee(m_position, momentum, charge, @@ -624,7 +624,7 @@ FitParameters::startingPerigee (void) const const TrackParameters* FitParameters::trackParameters (MsgStream& log, const FitMeasurement& measurement, - bool withCovariance) const + bool withCovariance) { // make checks necessary for the TrackParameters to be computed // 1) a Surface is required @@ -640,7 +640,7 @@ FitParameters::trackParameters (MsgStream& log, log << MSG::WARNING << "FitParameters::trackParameters - invalid measurement" << endmsg; return nullptr; } - + // 3) the intersection position has to lie sufficiently close to the Surface const TrackSurfaceIntersection& intersection = measurement.intersection(FittedTrajectory); Amg::Vector2D localPos; @@ -651,7 +651,7 @@ FitParameters::trackParameters (MsgStream& log, log << MSG::WARNING << "FitParameters::trackParameters - globalToLocal failure" << endmsg; return nullptr; } - + // cache parameters at EnergyDeposit if (measurement.isEnergyDeposit()) { @@ -661,7 +661,7 @@ FitParameters::trackParameters (MsgStream& log, m_cosTheta1 = intersection.direction().z(); m_qOverP1 = measurement.qOverP(); } - + // propagate full covariance to form localCovariance AmgSymMatrix(5)* covMatrix = nullptr; if (withCovariance @@ -739,7 +739,7 @@ FitParameters::trackParameters (MsgStream& log, } theta = M_PI - theta; } - + // finally can create the appropriate 'concrete' TrackParameters const TrackParameters* parameters = nullptr; const StraightLineSurface* line = dynamic_cast<const StraightLineSurface*>(measurement.surface()); @@ -767,7 +767,7 @@ FitParameters::trackParameters (MsgStream& log, covMatrix); return parameters; } - + const CylinderSurface* cylinder = dynamic_cast<const CylinderSurface*>(measurement.surface()); if (cylinder) { @@ -793,7 +793,7 @@ FitParameters::trackParameters (MsgStream& log, covMatrix); return parameters; } - + const PerigeeSurface* peri = dynamic_cast<const PerigeeSurface*>(measurement.surface()); if (peri) { @@ -806,7 +806,7 @@ FitParameters::trackParameters (MsgStream& log, covMatrix); return parameters; } - + log << MSG::WARNING << "FitParameters::trackParameters - unrecognized surface" << endmsg; delete covMatrix; return nullptr; @@ -837,7 +837,7 @@ FitParameters::update (const Amg::VectorX& differences) (*a++) += differences(++align); (*o++) += differences(++align); } - + // scattering angles std::vector<double>::iterator p = m_scattererPhi.begin(); std::vector<double>::iterator t = m_scattererTheta.begin(); @@ -847,14 +847,14 @@ FitParameters::update (const Amg::VectorX& differences) (*p++) += differences(++scat); (*t++) += differences(++scat); } - + // qOverP, cotTheta if (m_fitMomentum) m_qOverP += differences(4)/Gaudi::Units::TeV; m_cotTheta -= differences(3)/(m_sinTheta*m_sinTheta); - + // impose charge conservation and decreasing energy if (m_fitEnergyDeposit) - { + { m_qOverP1 += differences(5)/Gaudi::Units::TeV; double deposit = 1./std::abs(m_qOverP) - 1./std::abs(m_qOverP1); if (std::abs(deposit) < std::abs(m_minEnergyDeposit) @@ -865,7 +865,7 @@ FitParameters::update (const Amg::VectorX& differences) if (m_qOverP1 < 0.) m_qOverP = -m_qOverP; } } - + // protect phi against some rounding instabilities double sinDPhi = differences(2); double cosDPhi = 0.; @@ -896,7 +896,7 @@ FitParameters::update (const Amg::VectorX& differences) m_vertex.y() + m_d0*m_cosPhi, m_z0); } - + void FitParameters::update (Amg::Vector3D position, Amg::Vector3D direction, diff --git a/Tracking/TrkFitter/TrkiPatFitterUtils/src/FitProcedure.cxx b/Tracking/TrkFitter/TrkiPatFitterUtils/src/FitProcedure.cxx index 7adacb872eb5e0fc2d9669d3c3a6955de46d106b..1c8f3332d7c3705f980948a0c7949ed2b9b2999d 100755 --- a/Tracking/TrkFitter/TrkiPatFitterUtils/src/FitProcedure.cxx +++ b/Tracking/TrkFitter/TrkiPatFitterUtils/src/FitProcedure.cxx @@ -41,7 +41,7 @@ #include "TrkiPatFitterUtils/MeasurementProcessor.h" namespace Trk{ - + // constructor FitProcedure::FitProcedure (bool constrainedAlignmentEffects, bool extendedDebug, @@ -109,7 +109,7 @@ FitProcedure::clear (void) Track* FitProcedure::constructTrack (const std::vector<FitMeasurement*>& measurements, - const FitParameters& parameters, + FitParameters& parameters, const TrackInfo& trackInfo, const DataVector<const TrackStateOnSurface>* leadingTSOS) { @@ -118,7 +118,7 @@ FitProcedure::constructTrack (const std::vector<FitMeasurement*>& measurements, // NB keep first and last measurements distinct i.e. separate TSOS (no scatterers etc) // NB trackParameters outwards from TSOS i.e. always last FitMeas on surface - + // create vector of TSOS - reserve upper limit for size (+1 as starts with perigee) DataVector<const TrackStateOnSurface>* trackStateOnSurfaces = new DataVector<const TrackStateOnSurface>; @@ -162,12 +162,12 @@ FitProcedure::constructTrack (const std::vector<FitMeasurement*>& measurements, } } } - + // then append the fitted TSOS for (auto *m : measurements) { if (m->isMaterialDelimiter()) continue; - + // push back previous TSOS when fresh surface reached if (m->surface() != surface || alignmentEffects || m->alignmentEffects()) { @@ -257,7 +257,7 @@ FitProcedure::constructTrack (const std::vector<FitMeasurement*>& measurements, typePattern = defaultPattern; alignmentEffects = nullptr; } - + measurementBase = m->measurementBase()->clone(); typePattern.set(TrackStateOnSurface::Measurement); if (m->isOutlier()) typePattern.set(TrackStateOnSurface::Outlier); @@ -285,11 +285,11 @@ FitProcedure::constructTrack (const std::vector<FitMeasurement*>& measurements, { const EnergyLoss* energyLoss = meot->energyLoss()->clone(); typeMaterial.set(Trk::MaterialEffectsBase::EnergyLossEffects); - if (m->numberDoF()) // fitted scatterer + if (m->numberDoF()) // fitted scatterer { materialEffects = new MaterialEffectsOnTrack(m->materialEffects()->thicknessInX0(), - parameters.scatteringAngles(*m,scatter), + parameters.scatteringAngles(*m,scatter), energyLoss, m->materialEffects()->associatedSurface(), typeMaterial); @@ -299,7 +299,7 @@ FitProcedure::constructTrack (const std::vector<FitMeasurement*>& measurements, { materialEffects = new MaterialEffectsOnTrack(m->materialEffects()->thicknessInX0(), - parameters.scatteringAngles(*m), + parameters.scatteringAngles(*m), energyLoss, m->materialEffects()->associatedSurface(), typeMaterial); @@ -311,7 +311,7 @@ FitProcedure::constructTrack (const std::vector<FitMeasurement*>& measurements, { materialEffects = new MaterialEffectsOnTrack(m->materialEffects()->thicknessInX0(), - parameters.scatteringAngles(*m,scatter), + parameters.scatteringAngles(*m,scatter), m->materialEffects()->associatedSurface(), typeMaterial); ++scatter; @@ -320,12 +320,12 @@ FitProcedure::constructTrack (const std::vector<FitMeasurement*>& measurements, { materialEffects = new MaterialEffectsOnTrack(m->materialEffects()->thicknessInX0(), - parameters.scatteringAngles(*m), + parameters.scatteringAngles(*m), m->materialEffects()->associatedSurface(), typeMaterial); } } - + typePattern.set(TrackStateOnSurface::Scatterer); } else @@ -341,14 +341,14 @@ FitProcedure::constructTrack (const std::vector<FitMeasurement*>& measurements, { typePattern.set(TrackStateOnSurface::Perigee); } - + // or alignment effects else if (m->alignmentEffects()) { const AlignmentEffectsOnTrack& AEOT = *m->alignmentEffects(); unsigned align = m->alignmentParameter() - 1; - *m_log << MSG::VERBOSE <<" Fitprocedure AEOT input deltaTranslation " << AEOT.deltaTranslation() << " deltaAngle " << AEOT.deltaAngle() << " output Trans " << parameters.alignmentOffset(align) << " deltaAngle " << parameters.alignmentAngle(align) << endmsg; + *m_log << MSG::VERBOSE <<" Fitprocedure AEOT input deltaTranslation " << AEOT.deltaTranslation() << " deltaAngle " << AEOT.deltaAngle() << " output Trans " << parameters.alignmentOffset(align) << " deltaAngle " << parameters.alignmentAngle(align) << endmsg; alignmentEffects = new Trk::AlignmentEffectsOnTrack(parameters.alignmentOffset(align), AEOT.sigmaDeltaTranslation(), @@ -358,7 +358,7 @@ FitProcedure::constructTrack (const std::vector<FitMeasurement*>& measurements, m->surface()); typePattern.set(TrackStateOnSurface::Alignment); } - + // passive types: hole for now else if (m->isPassive()) { @@ -419,7 +419,7 @@ FitProcedure::execute(bool asymmetricCaloEnergy, parameters->print(*m_log); *m_log << endmsg; } - + // choose appropriate intersector ToolHandle<IIntersector>& intersector = chooseIntersector(measurements,*parameters); @@ -457,13 +457,13 @@ FitProcedure::execute(bool asymmetricCaloEnergy, { m_fitMatrices->usePerigee(*measurements.front()); } - + // set requested options and initial values - double ptInvCut = 1./m_minPt; // protection against trapped particles + double ptInvCut = 1./m_minPt; // protection against trapped particles m_cutStep = true; m_convergence = false; m_nearConvergence = false; - + // keep best (original if not reasonable quality) results double bestChiSq = m_chiSqCut; FitParameters* bestParameters = nullptr; @@ -489,7 +489,7 @@ FitProcedure::execute(bool asymmetricCaloEnergy, if (m_extendedDebug) m_fitMatrices->checkPointers(*m_log); if (m_verbose) m_fitMatrices->printDerivativeMatrix(); } - + if (! m_fitMatrices->solveEquations()) { fitCode = 11; // fails matrix inversion @@ -514,8 +514,8 @@ FitProcedure::execute(bool asymmetricCaloEnergy, if (m_verbose && ! m_iteration) m_fitMatrices->printWeightMatrix(); } ++m_iteration; - - // report parameters + + // report parameters if (m_verbose) { *m_log << MSG::VERBOSE << " ===== start iteration " << m_iteration; @@ -529,7 +529,7 @@ FitProcedure::execute(bool asymmetricCaloEnergy, } parameters->printVerbose(*m_log); } - + // check for some error conditions (if none found yet) if (fitCode) { @@ -577,7 +577,7 @@ FitProcedure::execute(bool asymmetricCaloEnergy, m_fitMatrices->numberDoF(), parameters->numberScatterers(), m_worstMeasurement); - + if (m_debug) { if (m_verbose) *m_log << endmsg; @@ -588,7 +588,7 @@ FitProcedure::execute(bool asymmetricCaloEnergy, // have extrapolation and derivatives, calculate residual measurementProcessor.calculateResiduals(); - + // check for remaining error conditions. If OK then compute chisquared. if (m_iteration > m_maxIter && ! m_cutStep && for_iPatTrack) { @@ -596,7 +596,7 @@ FitProcedure::execute(bool asymmetricCaloEnergy, } else if (m_iteration == 4 && m_chiSq > 1000. && for_iPatTrack) { - fitCode = 7; // fail with too high chisquared + fitCode = 7; // fail with too high chisquared } else if (! fitCode) { @@ -629,7 +629,7 @@ FitProcedure::execute(bool asymmetricCaloEnergy, << m_iteration << ", numberOscillations " << parameters->numberOscillations() << endmsg; } - + // perform cutstep if (m_cutStep) { @@ -657,7 +657,7 @@ FitProcedure::execute(bool asymmetricCaloEnergy, bestParameters = new FitParameters(*parameters); parameters->resetOscillations(); } - + if (bestParameters && ((m_convergence && m_chiSq > bestChiSq + 0.5) || (parameters->phiInstability() && m_iteration == m_maxIter))) @@ -721,9 +721,9 @@ FitProcedure::execute(bool asymmetricCaloEnergy, m_chiSq = perigeeQuality->chiSquared() + m_chiSq * static_cast<double>(m_numberDoF); m_numberDoF += perigeeQuality->numberDoF(); - m_chiSq /= static_cast<double>(m_numberDoF); + m_chiSq /= static_cast<double>(m_numberDoF); } - + // probability of chisquared m_fitProbability = 1.; if (m_numberDoF > 0 && m_chiSq > 0.) @@ -768,7 +768,7 @@ FitProcedure::execute(bool asymmetricCaloEnergy, return *m_fitQuality; } - + Amg::MatrixX* FitProcedure::fullCovariance () const { @@ -776,7 +776,7 @@ FitProcedure::fullCovariance () const // return const_cast<Amg::MatrixX*>(m_fitMatrices->fullCovariance()); return nullptr; // NOT mig5 } - + void FitProcedure::setMinIterations (int minIter) { @@ -789,7 +789,7 @@ FitProcedure::calculateChiSq(std::vector<FitMeasurement*>& measurements) { // convergence criterion const double dChisqConv = 0.025; - + // compute total chisquared and sum of hit differences // flag hit with highest chisquared contribution (on entry if RoadFit) m_chiSq = 0.; @@ -803,10 +803,10 @@ FitProcedure::calculateChiSq(std::vector<FitMeasurement*>& measurements) // m_chiSq += m_fitMatrices->perigeeChiSquared(); // continue; // } - + double residual = m->residual(); double DiffSq = residual*residual; - m_chiSq += DiffSq; + m_chiSq += DiffSq; if (m->isPositionMeasurement()) { if (m->isDrift()) driftResidual += residual; @@ -843,7 +843,7 @@ FitProcedure::calculateChiSq(std::vector<FitMeasurement*>& measurements) m_chRatio2 = 0.; m_chiSqMin = m_chiSq; } - + m_chiSqOld = m_chiSqMin; double DChiSq = m_chiSqOld - m_chiSq; if (DChiSq > -dChisqConv) @@ -867,7 +867,7 @@ FitProcedure::calculateChiSq(std::vector<FitMeasurement*>& measurements) // fitlocal.dParMin[n] = fitlocal.dParam[n]; // } // } - + // if (m_cutTaken) // { // *m_log << " Cut ??? Chi2,DChiSq " << m_chiSq @@ -878,9 +878,9 @@ FitProcedure::calculateChiSq(std::vector<FitMeasurement*>& measurements) // << std::setiosflags(std::ios::fixed) // << " transverse impact parameter" // << std::setw(10) << std::setprecision(4) << parameters->d0() -// << " Z0" +// << " Z0" // << std::setw(10) << std::setprecision(4) << parameters->z0() -// << " 1/Pt" +// << " 1/Pt" // << std::setw(12) << std::setprecision(6) << parameters->ptInv0(); // // for (int n = 0; n < m_numberParameters; ++n) // // *m_log << " " << fitlocal.dParam[n]; @@ -889,19 +889,19 @@ FitProcedure::calculateChiSq(std::vector<FitMeasurement*>& measurements) // // *m_log << " " << fitlocal.dParMin[n]; // *m_log << std::endl; // } - + if (m_iteration > 0) { m_chRatio2 = m_chRatio1; - m_chRatio1 = m_chiSq/m_chiSqOld; + m_chRatio1 = m_chiSq/m_chiSqOld; } if (m_fitMatrices->numberDriftCircles()) { m_driftSumLast = m_driftSum; m_driftSum = driftResidual/static_cast<double>(m_fitMatrices->numberDriftCircles()); } - - // + + // // debugging info if (m_verbose) { @@ -915,7 +915,7 @@ FitProcedure::calculateChiSq(std::vector<FitMeasurement*>& measurements) << " ChSq Ratio1/2 " << std::setw(9) << std::setprecision(3) << m_chRatio1 << std::setw(10) << std::setprecision(3) << m_chRatio2 << std::endl << " driftResidual " << std::setw(9) << std::setprecision(3) << m_driftSum - << " #driftCircles " << m_fitMatrices->numberDriftCircles() << std::endl + << " #driftCircles " << m_fitMatrices->numberDriftCircles() << std::endl << " CutTaken " << m_cutTaken << std::endl << "----------------------------------" << std::endl << " "; @@ -940,9 +940,9 @@ FitProcedure::calculateChiSq(std::vector<FitMeasurement*>& measurements) } } } - - // - // check for possible convergence (nearConvergence forces extra iteration) + + // + // check for possible convergence (nearConvergence forces extra iteration) if (! m_cutStep && ! m_nCuts && ( m_chiSq < 0.1 @@ -955,7 +955,7 @@ FitProcedure::calculateChiSq(std::vector<FitMeasurement*>& measurements) m_convergence = true; } else - { + { m_nearConvergence = true; if (m_verbose) *m_log << MSG::VERBOSE << " near convergence " << endmsg; } @@ -964,7 +964,7 @@ FitProcedure::calculateChiSq(std::vector<FitMeasurement*>& measurements) { m_nearConvergence = false; } - + // else take cutstep if divergent or oscillating m_cutStep = false; // else if (m_nCuts < 2 || m_nCuts > 4) @@ -987,10 +987,10 @@ FitProcedure::calculateChiSq(std::vector<FitMeasurement*>& measurements) // if (m_verbose) // { -// *m_log << "----------------------------------" -// << " Debugging Info in ChiSquare method" -// << " cutstep - nCuts, -ve chisquared change " << m_nCuts -// << "----------------------------------" << std::endl; +// *m_log << "----------------------------------" +// << " Debugging Info in ChiSquare method" +// << " cutstep - nCuts, -ve chisquared change " << m_nCuts +// << "----------------------------------" << std::endl; // } // m_convergence = false; // m_chiSq = m_chiSqMin; @@ -1002,7 +1002,7 @@ FitProcedure::chooseIntersector (std::vector<FitMeasurement*>& measurements, const FitParameters& parameters) const { if (m_lineFit) return m_straightLineIntersector; - + // decide which intersector to use for curved tracks (default RungeKutta) // ToolHandle<IIntersector>& intersector = m_rungeKuttaIntersector; @@ -1012,7 +1012,7 @@ FitProcedure::chooseIntersector (std::vector<FitMeasurement*>& measurements, ++m) { if (! (**m).isPositionMeasurement()) continue; - if (! m_solenoidalIntersector->isValid(parameters.position(),(**m).position())) break; + if (! m_solenoidalIntersector->isValid(parameters.position(),(**m).position())) break; return m_solenoidalIntersector; } @@ -1034,7 +1034,7 @@ FitProcedure::reportQuality(const std::vector<FitMeasurement*>& measurements, { case 1: *m_log << " missing Trk::Surface "; - break; + break; case 2: *m_log << " too many measurements for fit matrix size: " << measurements.size(); @@ -1072,7 +1072,7 @@ FitProcedure::reportQuality(const std::vector<FitMeasurement*>& measurements, *m_log << " ill-defined cotTheta " << parameters.cotTheta() << " with difference " << parameters.difference(3) << " at iteration# "<< m_fitQuality->iterations(); - break; + break; case 11: *m_log << " singular matrix fails inversion:" << " at iteration# "<< m_fitQuality->iterations(); diff --git a/Tracking/TrkTools/TrkParticleCreator/src/TrackParticleCreatorTool.cxx b/Tracking/TrkTools/TrkParticleCreator/src/TrackParticleCreatorTool.cxx index 075e2dbbd6c0bc12b1ce901a40acb9a4ddfdc6d4..1d0a57bf43b2dee6885ea9bd9665582ed597213f 100644 --- a/Tracking/TrkTools/TrkParticleCreator/src/TrackParticleCreatorTool.cxx +++ b/Tracking/TrkTools/TrkParticleCreator/src/TrackParticleCreatorTool.cxx @@ -346,7 +346,7 @@ TrackParticleCreatorTool::TrackParticleCreatorTool(const std::string& t, } else if (m_perigeeExpression == "BeamLine"){ const Trk::Perigee* result = - m_trackToVertex->perigeeAtBeamline(*track, CacheBeamSpotData(ctx)); + m_trackToVertex->perigeeAtBeamline(ctx,*track, CacheBeamSpotData(ctx)); if (!result){ ATH_MSG_WARNING("Failed to extrapolate to Beamline"); @@ -532,7 +532,7 @@ TrackParticleCreatorTool::TrackParticleCreatorTool(const std::string& t, } else if (m_perigeeExpression == "BeamLine"){ const Trk::Perigee* result = - m_trackToVertex->perigeeAtBeamline(track, CacheBeamSpotData(ctx)); + m_trackToVertex->perigeeAtBeamline(ctx,track, CacheBeamSpotData(ctx)); if (!result){ ATH_MSG_WARNING("Failed to extrapolate to Beamline - No TrackParticle created."); return nullptr; diff --git a/Tracking/TrkTools/TrkTruthToTrack/TrkTruthToTrack/TruthTrackRecordToTrack.h b/Tracking/TrkTools/TrkTruthToTrack/TrkTruthToTrack/TruthTrackRecordToTrack.h index 877e50de410a2d8b35afc6c7fcbe8aa39d318211..7390c0e7640c1af5ee3d46cc14e185c2e9172365 100755 --- a/Tracking/TrkTools/TrkTruthToTrack/TrkTruthToTrack/TruthTrackRecordToTrack.h +++ b/Tracking/TrkTools/TrkTruthToTrack/TrkTruthToTrack/TruthTrackRecordToTrack.h @@ -49,7 +49,7 @@ namespace Trk { * ownership!) */ using ITruthToTrack::makeProdVertexParameters; - virtual const Trk::TrackParameters* makeProdVertexParameters(const HepMC::GenParticle* part) const; + virtual const Trk::TrackParameters* makeProdVertexParameters(HepMC::ConstGenParticlePtr part) const; virtual const Trk::TrackParameters* makeProdVertexParameters(const xAOD::TruthParticle* part) const; /** This function produces Trk::TrackParameters corresponding to @@ -59,7 +59,7 @@ namespace Trk { of deleting the returned parameters object. */ using ITruthToTrack::makePerigeeParameters; - virtual const Trk::TrackParameters* makePerigeeParameters(const HepMC::GenParticle* part) const; + virtual const Trk::TrackParameters* makePerigeeParameters(HepMC::ConstGenParticlePtr part) const; virtual const Trk::TrackParameters* makePerigeeParameters(const xAOD::TruthParticle* part) const; private: diff --git a/Tracking/TrkTools/TrkTruthToTrack/src/TruthTrackRecordToTrack.cxx b/Tracking/TrkTools/TrkTruthToTrack/src/TruthTrackRecordToTrack.cxx index d1e8e7209f594771efd3ba05160e20915471c05a..bf977e3d2207bc31c944bd7bea70856f2a3a093d 100755 --- a/Tracking/TrkTools/TrkTruthToTrack/src/TruthTrackRecordToTrack.cxx +++ b/Tracking/TrkTools/TrkTruthToTrack/src/TruthTrackRecordToTrack.cxx @@ -65,7 +65,7 @@ StatusCode Trk::TruthTrackRecordToTrack::initialize() { } //================================================================ -const Trk::TrackParameters* Trk::TruthTrackRecordToTrack::makeProdVertexParameters(const HepMC::GenParticle* part) const { +const Trk::TrackParameters* Trk::TruthTrackRecordToTrack::makeProdVertexParameters(HepMC::ConstGenParticlePtr part) const { if (part == nullptr || m_particleDataTable==nullptr) return nullptr; @@ -207,7 +207,7 @@ const Trk::TrackParameters* Trk::TruthTrackRecordToTrack::makeProdVertexParamete //================================================================ -const Trk::TrackParameters* Trk::TruthTrackRecordToTrack::makePerigeeParameters(const HepMC::GenParticle* part) const { +const Trk::TrackParameters* Trk::TruthTrackRecordToTrack::makePerigeeParameters(HepMC::ConstGenParticlePtr part) const { const Trk::TrackParameters* generatedTrackPerigee = nullptr; if(part && part->production_vertex() && m_particleDataTable && m_extrapolator) { diff --git a/Tracking/TrkTruthTracks/TrkTruthTrackAlgs/src/TruthTrackCreation.cxx b/Tracking/TrkTruthTracks/TrkTruthTrackAlgs/src/TruthTrackCreation.cxx index ad70752b16db18aa81ad9a4d254ac51de12a3e0c..37ad7e7a1c3f0e02ad6cebafbc734924310db28c 100644 --- a/Tracking/TrkTruthTracks/TrkTruthTrackAlgs/src/TruthTrackCreation.cxx +++ b/Tracking/TrkTruthTracks/TrkTruthTrackAlgs/src/TruthTrackCreation.cxx @@ -110,7 +110,7 @@ StatusCode Trk::TruthTrackCreation::execute() // ----------------------------------- main loop ------------------------------------------------------------------ // get the PRD truth trajectories - const std::map< const HepMC::GenParticle*, PRD_TruthTrajectory >& truthTraj = + const std::map< HepMC::ConstGenParticlePtr, PRD_TruthTrajectory >& truthTraj = m_prdTruthTrajectoryBuilder->truthTrajectories(); // some screen output ATH_MSG_VERBOSE("PRD_TruthTrajectoryBuilder delivered " << truthTraj.size() << " PRD truth trajectories, starting track creation."); diff --git a/Tracking/TrkValidation/TrkValAlgs/src/TrkEDMTestAlg.cxx b/Tracking/TrkValidation/TrkValAlgs/src/TrkEDMTestAlg.cxx index af7488febc00c35a9efbbef52ea081b82aca1c88..0ec1e2fe2ef72cce40031bb0706e6507dc025bb5 100644 --- a/Tracking/TrkValidation/TrkValAlgs/src/TrkEDMTestAlg.cxx +++ b/Tracking/TrkValidation/TrkValAlgs/src/TrkEDMTestAlg.cxx @@ -1,5 +1,5 @@ /* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ //<<<<<< INCLUDES >>>>>> @@ -27,7 +27,10 @@ StatusCode TrkEDMTestAlg::initialize() { // summary is special (normally want to write it) - if (m_dumpSummaryToFile) m_summaryFileOutput.open(m_summaryDumpFileName.c_str()); ATH_MSG_VERBOSE("SUM dump="<<m_summaryDumpFileName); + if (m_dumpSummaryToFile) { + m_summaryFileOutput.open(m_summaryDumpFileName.c_str()); + ATH_MSG_VERBOSE("SUM dump="<<m_summaryDumpFileName); + } return AthAlgorithm::initialize(); } diff --git a/Tracking/TrkValidation/TrkValTools/src/InDetPrimaryConversionSelector.cxx b/Tracking/TrkValidation/TrkValTools/src/InDetPrimaryConversionSelector.cxx index 26e8b3a1552558fc7751a711ca8e6d207bc82371..768c58a4fc206996182bb356d0e63b2a750b7a2d 100644 --- a/Tracking/TrkValidation/TrkValTools/src/InDetPrimaryConversionSelector.cxx +++ b/Tracking/TrkValidation/TrkValTools/src/InDetPrimaryConversionSelector.cxx @@ -59,61 +59,61 @@ StatusCode Trk::InDetPrimaryConversionSelector::finalize() { return StatusCode::SUCCESS; } -std::vector<const HepMC::GenParticle*>* +std::vector<HepMC::ConstGenParticlePtr>* Trk::InDetPrimaryConversionSelector::selectGenSignal (const McEventCollection* SimTracks) const { if (! SimTracks) return NULL; - std::vector<const HepMC::GenParticle *>* genSignal = - new std::vector<const HepMC::GenParticle *>; - + std::vector<HepMC::ConstGenParticlePtr>* genSignal = + new std::vector<HepMC::ConstGenParticlePtr>; // pile-up: vector of MCEC has more than one entry DataVector<HepMC::GenEvent>::const_iterator itCollision = SimTracks->begin(); for( ; itCollision != SimTracks->end(); ++itCollision ) { const HepMC::GenEvent* genEvent = *itCollision; - HepMC::GenParticle * particle = NULL; - for (HepMC::GenEvent::particle_const_iterator it = genEvent->particles_begin(); - it != genEvent->particles_end(); ++it) { - - particle = *it; + for (auto particle: *genEvent) { // 1) require stable particle from generation or simulation if ((particle->status()%1000) != 1 ) continue; - if(particle->production_vertex() == NULL) { + if(!particle->production_vertex()) { ATH_MSG_WARNING ("GenParticle without production vertex - simulation corrupt? "); - ATH_MSG_DEBUG ("It's this one: " << *particle); + ATH_MSG_DEBUG ("It's this one: " << particle); continue; } else { // 2) require track inside ID - relaxed definition including decays of neutrals (secondaries) - if ( fabs(particle->production_vertex()->position().perp()) > m_maxRStartAll || - fabs(particle->production_vertex()->position().z()) > m_maxZStartAll ) continue; + if ( std::fabs(particle->production_vertex()->position().perp()) > m_maxRStartAll || + std::fabs(particle->production_vertex()->position().z()) > m_maxZStartAll ) continue; int pdgCode = particle->pdg_id(); - if (abs(pdgCode) > 1000000000 ) continue; // ignore nuclei from hadronic interactions + if (std::abs(pdgCode) > 1000000000 ) continue; // ignore nuclei from hadronic interactions const HepPDT::ParticleData* pd = m_particleDataTable->particle(abs(pdgCode)); if (!pd) { // nuclei excluded, still problems with a given type? - ATH_MSG_INFO ("Could not get particle data for particle with pdgCode="<<pdgCode<< ", status=" << particle->status() << ", barcode=" << particle->barcode()); - ATH_MSG_INFO ("GenParticle= " << *particle); + ATH_MSG_INFO ("Could not get particle data for particle with pdgCode="<<pdgCode<< ", status=" << particle->status() << ", barcode=" << HepMC::barcode(particle)); + ATH_MSG_INFO ("GenParticle= " << particle); continue; } - ATH_MSG_DEBUG ("found particle = " << *particle); + ATH_MSG_DEBUG ("found particle = " << particle); // assume for the moment we're only running over single gamma MC files ... - HepMC::GenVertex* prodVertex( particle->production_vertex()); - if ( abs(pdgCode) == 11 ) { + auto prodVertex = particle->production_vertex(); + if ( std::abs(pdgCode) == 11 ) { ATH_MSG_DEBUG ("Electron/Positron detected -- checking for production process ..."); - HepMC::GenVertex::particles_in_const_iterator inParticle = prodVertex->particles_in_const_begin(); - HepMC::GenVertex::particles_out_const_iterator inParticleEnd = prodVertex->particles_in_const_end(); - for ( ; inParticle != inParticleEnd; ++inParticle) { - ATH_MSG_DEBUG(" --> checking morther: " << *(*inParticle) ); - if ( abs((*inParticle)->pdg_id()) == 22 || abs((*inParticle)->pdg_id()) == 11 ){ - if (fabs(particle->momentum().perp()) > m_minPt && fabs(particle->momentum().pseudoRapidity()) < m_maxEta ) { +#ifdef HEPMC3 + for ( auto inParticle: prodVertex->particles_in()) { +#else + HepMC::GenVertex::particles_in_const_iterator ItinParticle = prodVertex->particles_in_const_begin(); + HepMC::GenVertex::particles_out_const_iterator ItinParticleEnd = prodVertex->particles_in_const_end(); + for ( ; ItinParticle != ItinParticleEnd; ++ItinParticle) { + auto inParticle=*ItinParticle; +#endif + ATH_MSG_DEBUG(" --> checking morther: " << inParticle ); + if ( std::abs(inParticle->pdg_id()) == 22 || std::abs(inParticle->pdg_id()) == 11 ){ + if (std::fabs(particle->momentum().perp()) > m_minPt && std::fabs(particle->momentum().pseudoRapidity()) < m_maxEta ) { genSignal->push_back(particle); ATH_MSG_DEBUG ("Selected this electron/positron!"); break; diff --git a/Tracking/TrkValidation/TrkVertexFitterValidationTools/src/McEventNtupleTool.cxx b/Tracking/TrkValidation/TrkVertexFitterValidationTools/src/McEventNtupleTool.cxx index 5b27ed4b5f887a769d102f68f1682a86ee655160..faa703e2502be9a7f6c30f74c72a589a7cd6cfae 100755 --- a/Tracking/TrkValidation/TrkVertexFitterValidationTools/src/McEventNtupleTool.cxx +++ b/Tracking/TrkValidation/TrkVertexFitterValidationTools/src/McEventNtupleTool.cxx @@ -123,8 +123,13 @@ StatusCode Trk::McEventNtupleTool::finalize() { StatusCode Trk::McEventNtupleTool::fillMcEventData(const HepMC::GenEvent& myEvent) const { +#ifdef HEPMC3 + auto Vert = myEvent.vertices().begin(); + auto Vert_end = myEvent.vertices().end(); +#else HepMC::GenEvent::vertex_const_iterator Vert = myEvent.vertices_begin(); HepMC::GenEvent::vertex_const_iterator Vert_end = myEvent.vertices_end(); +#endif //store primary vertex CLHEP::HepLorentzVector pv_pos((*Vert)->position().x(), (*Vert)->position().y(), @@ -132,16 +137,23 @@ StatusCode Trk::McEventNtupleTool::fillMcEventData(const HepMC::GenEvent& myEven (*Vert)->position().t()); double pv_r = pv_pos.perp(); double pv_z = pv_pos.z(); - std::map<int,HepMC::GenVertex *> pv_vtx_ids; - std::vector<std::map<int,HepMC::GenVertex *> > sec_vtx_ids_vec; + std::map<int,HepMC::ConstGenVertexPtr> pv_vtx_ids; + std::vector<std::map<int,HepMC::ConstGenVertexPtr> > sec_vtx_ids_vec; +#ifdef HEPMC3 + auto Part = myEvent.particles().begin(); + auto Part_end = myEvent.particles().end(); +#else HepMC::GenEvent::particle_const_iterator Part = myEvent.particles_begin(); - for (;Part!=myEvent.particles_end(); ++Part){ + HepMC::GenEvent::particle_const_iterator Part_end = myEvent.particles_end(); +#endif + + for (;Part!=Part_end; ++Part){ //information about incomming and outgoing particles CLHEP::HepLorentzVector par_mom((*Part)->momentum().px(), (*Part)->momentum().py(), (*Part)->momentum().pz(), (*Part)->momentum().e()); - HepMC::GenVertex* par_vert = (*Part)->production_vertex(); + auto par_vert = (*Part)->production_vertex(); if (par_vert) { CLHEP::HepLorentzVector lv_pos(par_vert->position().x(), @@ -149,7 +161,7 @@ StatusCode Trk::McEventNtupleTool::fillMcEventData(const HepMC::GenEvent& myEven par_vert->position().z(), par_vert->position().t()); - if (par_mom.perp() > m_ptCut && fabs(par_mom.eta()) <= m_etaCut && fabs(lv_pos.perp()) <= m_radiusCut && fabs(lv_pos.z()) <= m_zPosCut){ + if (par_mom.perp() > m_ptCut && fabs(par_mom.eta()) <= m_etaCut && std::fabs(lv_pos.perp()) <= m_radiusCut && std::fabs(lv_pos.z()) <= m_zPosCut){ if(fabs(lv_pos.perp() - pv_r)<m_radiusRes && fabs(lv_pos.z() - pv_z)<m_zPosRes) { @@ -159,8 +171,8 @@ StatusCode Trk::McEventNtupleTool::fillMcEventData(const HepMC::GenEvent& myEven //loop over all entries in sec_vtx_ids_vec = vector of sec_vtx_map's for (unsigned int sec_vec_itr = 0; sec_vec_itr < sec_vtx_ids_vec.size(); ++sec_vec_itr) { - std::map<int,HepMC::GenVertex *> sec_vtx_map = sec_vtx_ids_vec[sec_vec_itr]; - std::map<int,HepMC::GenVertex *>::iterator map_itr = sec_vtx_map.begin(); + std::map<int,HepMC::ConstGenVertexPtr> sec_vtx_map = sec_vtx_ids_vec[sec_vec_itr]; + std::map<int,HepMC::ConstGenVertexPtr>::iterator map_itr = sec_vtx_map.begin(); for (; map_itr!= sec_vtx_map.end(); ++map_itr) { CLHEP::HepLorentzVector sec_pos((*map_itr).second->position().x(), @@ -180,22 +192,22 @@ StatusCode Trk::McEventNtupleTool::fillMcEventData(const HepMC::GenEvent& myEven } if(new_sec_vtx) //store new entry in sec_vtx_ids_vec { - std::map<int,HepMC::GenVertex *> new_map; - new_map.insert(std::make_pair(par_vert->barcode(),par_vert)); + std::map<int,HepMC::ConstGenVertexPtr> new_map; + new_map.insert(std::make_pair(HepMC::barcode(par_vert),par_vert)); sec_vtx_ids_vec.push_back(new_map); } }//if not primary }//eta && mom cut }//if production vertex }//end loop over particles - std::map<int,HepMC::GenVertex *>::iterator pv_map_itr = pv_vtx_ids.begin(); + std::map<int,HepMC::ConstGenVertexPtr>::iterator pv_map_itr = pv_vtx_ids.begin(); m_true_pri_x = (*pv_map_itr).second->position().x(); m_true_pri_y = (*pv_map_itr).second->position().y(); m_true_pri_z = (*pv_map_itr).second->position().z(); for (unsigned int sec_vtx_itr = 0; sec_vtx_itr < sec_vtx_ids_vec.size(); ++sec_vtx_itr) { - std::map<int,HepMC::GenVertex *> sec_map = sec_vtx_ids_vec[sec_vtx_itr]; - std::map<int,HepMC::GenVertex *>::iterator map_itr = sec_map.begin(); + std::map<int,HepMC::ConstGenVertexPtr> sec_map = sec_vtx_ids_vec[sec_vtx_itr]; + std::map<int,HepMC::ConstGenVertexPtr>::iterator map_itr = sec_map.begin(); CLHEP::HepLorentzVector sec_vtx_pos; for (; map_itr != sec_map.end(); ++map_itr){ sec_vtx_pos.setX(sec_vtx_pos.x() + (*map_itr).second->position().x()); diff --git a/Trigger/TrigAlgorithms/TrigCaloRec/python/TrigCaloRecConfig.py b/Trigger/TrigAlgorithms/TrigCaloRec/python/TrigCaloRecConfig.py index 484a9ac0bd534e62450343c8f20ff05942267147..e5e8f3b313a8b11f8985ef29758ab66117c1c1d7 100755 --- a/Trigger/TrigAlgorithms/TrigCaloRec/python/TrigCaloRecConfig.py +++ b/Trigger/TrigAlgorithms/TrigCaloRec/python/TrigCaloRecConfig.py @@ -1707,3 +1707,85 @@ class TrigCaloClusterCalibratorMT_LC(TrigCaloClusterCalibratorMT): self.MonTool.defineHistogram('Eta', path='EXPERT', type='TH1F', title="Cluster #eta; #eta ; Number of Clusters", xbins=100, xmin=-2.5, xmax=2.5) self.MonTool.defineHistogram('Phi', path='EXPERT', type='TH1F', title="Cluster #phi; #phi ; Number of Clusters", xbins=64, xmin=-3.2, xmax=3.2) self.MonTool.defineHistogram('Eta,Phi', path='EXPERT', type='TH2F', title="Number of Clusters; #eta ; #phi ; Number of Clusters", xbins=100, xmin=-2.5, xmax=2.5, ybins=128, ymin=-3.2, ymax=3.2) + + +from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator +from AthenaConfiguration.ComponentFactory import CompFactory + + +def hltCaloCellMakerCfg(flags, FS=False, roisKey='UNSPECIFIED'): + acc = ComponentAccumulator() + from TrigT2CaloCommon.TrigCaloDataAccessConfig import trigCaloDataAccessSvcCfg + acc.merge(trigCaloDataAccessSvcCfg(flags)) + + cellMaker = CompFactory.HLTCaloCellMaker('HLTCaloCellMaker'+ ('FS' if FS else 'RoI'), + CellsName='CaloCells', + TrigDataAccessMT=acc.getService('TrigCaloDataAccessSvc'), + monitorCells=True, + ExtraInputs=[('TileEMScale', 'ConditionStore+TileEMScale'), + ('TileBadChannels', 'ConditionStore+TileBadChannels'), + ('LArOnOffIdMapping', 'ConditionStore+LArOnOffIdMap')], # TODO check if this depends on data/MC + RoIs=roisKey) + + acc.addEventAlgo(cellMaker) + return acc + +def hltTopoClusterMakerCfg(flags, FS=False): + acc = ComponentAccumulator() + from CaloRec.CaloTopoClusterConfig import CaloTopoClusterToolCfg, CaloTopoClusterSplitterToolCfg + topoMaker = acc.popToolsAndMerge(CaloTopoClusterToolCfg(flags, cellsname='CaloCells')) + topoSplitter = acc.popToolsAndMerge(CaloTopoClusterSplitterToolCfg(flags)) + + + topoMoments = CompFactory.CaloClusterMomentsMaker ('TrigTopoMoments') + topoMoments.MaxAxisAngle = 20*deg + topoMoments.TwoGaussianNoise = flags.Calo.TopoCluster.doTwoGaussianNoise + topoMoments.MinBadLArQuality = 4000 + topoMoments.MomentsNames = ['FIRST_PHI', + 'FIRST_ETA', + 'SECOND_R' , + 'SECOND_LAMBDA', + 'DELTA_PHI', + 'DELTA_THETA', + 'DELTA_ALPHA' , + 'CENTER_X', + 'CENTER_Y', + 'CENTER_Z', + 'CENTER_MAG', + 'CENTER_LAMBDA', + 'LATERAL', + 'LONGITUDINAL', + 'FIRST_ENG_DENS', + 'ENG_FRAC_EM', + 'ENG_FRAC_MAX', + 'ENG_FRAC_CORE' , + 'FIRST_ENG_DENS', + 'SECOND_ENG_DENS', + 'ISOLATION', + 'ENG_BAD_CELLS', + 'N_BAD_CELLS', + 'N_BAD_CELLS_CORR', + 'BAD_CELLS_CORR_E', + 'BADLARQ_FRAC', + 'ENG_POS', + 'SIGNIFICANCE', + 'CELL_SIGNIFICANCE', + 'CELL_SIG_SAMPLING', + 'AVG_LAR_Q', + 'AVG_TILE_Q' + ] + from TrigEDMConfig.TriggerEDMRun3 import recordable + alg = CompFactory.TrigCaloClusterMakerMT('TrigCaloClusterMaker_topo'+('FS' if FS else 'RoI'), + Cells = 'CaloCells', + CaloClusters=recordable('HLT_TopoCaloClustersRoI'), + ClusterMakerTools = [ topoMaker, topoSplitter, topoMoments] # moments are missing yet + ) + acc.addEventAlgo(alg) + return acc + + +def hltCaloTopoClusteringCfg(flags, FS=False, roisKey='UNSPECIFIED'): + acc = ComponentAccumulator() + acc.merge(hltCaloCellMakerCfg(flags, FS=FS, roisKey=roisKey)) + acc.merge(hltTopoClusterMakerCfg(flags, FS=FS)) + return acc diff --git a/Trigger/TrigAlgorithms/TrigT2CaloCommon/python/CaloDef.py b/Trigger/TrigAlgorithms/TrigT2CaloCommon/python/CaloDef.py index b606da07f6fe43bde71ed3c175a97d35222d1091..26ad25afc2dfbdb6c66caad88903eb950f65758a 100644 --- a/Trigger/TrigAlgorithms/TrigT2CaloCommon/python/CaloDef.py +++ b/Trigger/TrigAlgorithms/TrigT2CaloCommon/python/CaloDef.py @@ -11,11 +11,6 @@ def setMinimalCaloSetup() : from TrigT2CaloCommon.TrigT2CaloCommonConfig import TrigCaloDataAccessSvc svcMgr+=TrigCaloDataAccessSvc() svcMgr.TrigCaloDataAccessSvc.OutputLevel=ERROR - if not hasattr(svcMgr,'RegSelSvcDefault'): - from RegionSelector.RegSelSvcDefault import RegSelSvcDefault - svcMgr += RegSelSvcDefault() - - ######################## ## ALGORITHMS diff --git a/Trigger/TrigAnalysis/TrigDecisionTool/CMakeLists.txt b/Trigger/TrigAnalysis/TrigDecisionTool/CMakeLists.txt index cf38f59877df49be4223dfbf1440cb4703cbee32..d4520741f6d2405ff4f54950e863838cccc6b5d6 100644 --- a/Trigger/TrigAnalysis/TrigDecisionTool/CMakeLists.txt +++ b/Trigger/TrigAnalysis/TrigDecisionTool/CMakeLists.txt @@ -15,8 +15,8 @@ if( XAOD_STANDALONE ) INCLUDE_DIRS ${Boost_INCLUDE_DIRS} PRIVATE_INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} LINK_LIBRARIES ${Boost_LIBRARIES} AsgTools xAODBase xAODTrigger - TrigConfHLTData TrigConfL1Data TrigNavStructure TrigRoiConversionLib - TrigConfInterfaces TrigDecisionInterface AsgDataHandlesLib + TrigConfHLTData TrigConfL1Data TrigNavStructure TrigRoiConversionLib + TrigConfInterfaces TrigDecisionInterface AsgDataHandlesLib TrigCompositeUtilsLib PRIVATE_LINK_LIBRARIES ${ROOT_LIBRARIES} TrigSteeringEvent ) else() @@ -28,9 +28,9 @@ else() PRIVATE_INCLUDE_DIRS ${ROOT_INCLUDE_DIRS} LINK_LIBRARIES ${Boost_LIBRARIES} AsgTools EventInfo xAODBase xAODTrigger GaudiKernel TrigConfHLTData TrigConfL1Data - TrigNavStructure StoreGateLib TrigRoiConversionLib TrigCompositeUtilsLib - TrigConfInterfaces TrigDecisionEvent TrigDecisionInterface - PRIVATE_LINK_LIBRARIES ${ROOT_LIBRARIES} AthenaBaseComps AsgDataHandlesLib + TrigNavStructure StoreGateLib TrigRoiConversionLib TrigCompositeUtilsLib + TrigConfInterfaces TrigDecisionInterface + PRIVATE_LINK_LIBRARIES ${ROOT_LIBRARIES} AthenaBaseComps AsgDataHandlesLib TrigSteeringEvent AthenaKernel ) else() atlas_add_library( TrigDecisionToolLib @@ -62,7 +62,7 @@ if( NOT XAOD_STANDALONE AND NOT XAOD_ANALYSIS ) atlas_add_test( Templates_test SOURCES test/Templates_test.cxx LINK_LIBRARIES TrigNavigationLib AthenaKernel GaudiKernel AthContainers - AnalysisTriggerEvent TrigSteeringEvent TrigMuonEvent + AnalysisTriggerEvent TrigSteeringEvent TrigMuonEvent TrigDecisionToolLib POST_EXEC_SCRIPT nopost.sh ) endif() @@ -72,4 +72,3 @@ atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} ) atlas_install_scripts( share/checkTriggerEDM.py share/checkTriggerProcessingErrors.py ) atlas_install_joboptions( share/checkR3Trigger.py ) - diff --git a/Trigger/TrigAnalysis/TrigDecisionTool/Root/CacheGlobalMemory.cxx b/Trigger/TrigAnalysis/TrigDecisionTool/Root/CacheGlobalMemory.cxx index 6ab34b2b6a4bf3195c97c4d6a462fb118d4c2bcb..b0a9af102f73bb46f6f26c39a133dfd1d14ffb03 100644 --- a/Trigger/TrigAnalysis/TrigDecisionTool/Root/CacheGlobalMemory.cxx +++ b/Trigger/TrigAnalysis/TrigDecisionTool/Root/CacheGlobalMemory.cxx @@ -11,7 +11,7 @@ * * @author Michael Begel <michael.begel@cern.ch> - Brookhaven National Laboratory * @author Lukas Heinrich <lukas.heinrich@cern.ch> - NYU - * + * ***********************************************************************************/ #include <string> @@ -57,7 +57,7 @@ Trig::CacheGlobalMemory::CacheGlobalMemory() : m_confChains(nullptr), m_expressStreamContainer(nullptr), m_decisionKeyPtr(nullptr), -#if !defined(XAOD_STANDALONE) && !defined(XAOD_ANALYSIS) // Full Athena +#ifndef XAOD_ANALYSIS // Full Athena m_oldDecisionKeyPtr(nullptr), m_oldEventInfoKeyPtr(nullptr), #endif @@ -105,7 +105,7 @@ const Trig::ChainGroup* Trig::CacheGlobalMemory::createChainGroup(const std::vec if (m_chainGroupsRef.find(key_alias)==m_chainGroupsRef.end()) { m_chainGroupsRef[key_alias]=m_chainGroups[key]; } - else { + else { if (m_chainGroupsRef[key_alias]!=m_chainGroupsRef[key]) { throw std::runtime_error("TrigDecisionTool: The alias "+alias+" already exists and cannot be overwritten."); } @@ -147,19 +147,19 @@ void Trig::CacheGlobalMemory::update(const TrigConf::HLTChainList* confChains, 0, 0, 0, prescales[ctpid]); ATH_MSG_DEBUG( " new configuration for item" << item->name() ); } - ATH_MSG_DEBUG( "Updating configuration, done with L1" ); - + ATH_MSG_DEBUG( "Updating configuration, done with L1" ); + //clear cache completely becuase underlying config objects might have changed for(auto& c : m_l2chainsCache){delete c.second;} - m_l2chainsCache.clear(); + m_l2chainsCache.clear(); for(auto& c : m_efchainsCache){delete c.second;} - m_efchainsCache.clear(); + m_efchainsCache.clear(); m_mConfChains.clear(); if ( ! confChains ) { ATH_MSG_WARNING( "No chains in configuration, probably run w/o HLT" ); } else { - ATH_MSG_DEBUG("Updating Configuration chains. Number of conf chains: " << m_confChains->size()); + ATH_MSG_DEBUG("Updating Configuration chains. Number of conf chains: " << m_confChains->size()); // updating internal map of conf chains (this map is only used for fast lookup) for(auto ch : *m_confChains) { @@ -170,7 +170,7 @@ void Trig::CacheGlobalMemory::update(const TrigConf::HLTChainList* confChains, for(auto ch : *m_confChains) { // std::cerr << "CacheGlobalMemory::update updating chain" << (*cChIt)->chain_name() << std::endl; int cntr = ch->chain_counter(); - if( ch->level()=="L2" ) { + if( ch->level()=="L2" ) { m_l2chainsCache[cntr] = new HLT::Chain(ch); } else {//merged chains are stored in efchains m_efchainsCache[cntr] = new HLT::Chain(ch); @@ -222,11 +222,11 @@ void Trig::CacheGlobalMemory::update(const TrigConf::HLTChainList* confChains, << alias ); // cg already exists (from previous config, we need to update it) preIt->second->m_patterns = mstIt->second; - updateChainGroup(preIt->second); + updateChainGroup(preIt->second); } else { createChainGroup(mstIt->second,alias); } - + } for (mstIt=m_groups.begin(); mstIt != m_groups.end(); ++mstIt) { const std::string alias("GROUP_"+mstIt->first); @@ -251,7 +251,7 @@ void Trig::CacheGlobalMemory::update(const TrigConf::HLTChainList* confChains, it!=m_chainGroups.end(); it++) { updateChainGroup(it->second); - } + } ATH_MSG_DEBUG( "Updating configuration, done with ChainGroups defined so " "far" ); ATH_MSG_DEBUG( "Updating configuration done" ); @@ -340,15 +340,15 @@ bool Trig::CacheGlobalMemory::assert_decision() { // here we unpack the decision. Note: the navigation will be unpacked only on demand (see navigation()) bool contains_xAOD_decision = false; -#if !defined(XAOD_STANDALONE) && !defined(XAOD_ANALYSIS) // Full Athena +#ifndef XAOD_ANALYSIS // Full Athena bool is_l1result_configured = false; bool contains_decision = false; bool contains_old_event_info = false; #endif - + if(!m_unpacker){ ATH_MSG_INFO("decision not set on first (?) assert. deciding how to unpack"); - + //Lukas 26-06-2015: we're hard coding the configuration for now //but we have setters and getters for m_trigDecisionKey (as CGM datamemer) //so we could in the future use the ones set by the python configuration @@ -360,7 +360,7 @@ bool Trig::CacheGlobalMemory::assert_decision() { contains_xAOD_decision = decisionReadHandle.isValid(); } -#if !defined(XAOD_STANDALONE) && !defined(XAOD_ANALYSIS) // Full Athena +#ifndef XAOD_ANALYSIS // Full Athena if (!m_oldDecisionKeyPtr->empty()) { SG::ReadHandle<TrigDec::TrigDecision> oldDecisionReadHandle = SG::makeHandle(*m_oldDecisionKeyPtr, context); @@ -395,27 +395,27 @@ bool Trig::CacheGlobalMemory::assert_decision() { setUnpacker(new DecisionUnpackerStandalone(m_decisionKeyPtr, m_run2NavigationKeyPtr)); } #endif - + }//if(!m_unpacker) if(!m_unpacker){ std::stringstream extra; -#if !defined(XAOD_STANDALONE) && !defined(XAOD_ANALYSIS) // Full Athena +#ifndef XAOD_ANALYSIS // Full Athena extra << ". Looked for old TrigDec::TrigDecision? " << (m_oldDecisionKeyPtr->empty() ? "NO" : "YES") - << ", has TrigDec::TrigDecision? " + << ", has TrigDec::TrigDecision? " << (contains_decision ? "YES" : "NO") - << ", TrigDec::TrigDecision has L1? " + << ", TrigDec::TrigDecision has L1? " << (is_l1result_configured ? "YES" : "NO") << ". Looked for old EventInfo? " << (m_oldEventInfoKeyPtr->empty() ? "NO" : "YES") - << ", has old EventInto? " + << ", has old EventInto? " << (contains_old_event_info ? "YES" : "NO"); #endif ATH_MSG_ERROR("No source of Trigger Decision in file. " << "(Looked for xAOD::TrigDecision? " << (m_decisionKeyPtr->empty() ? "NO" : "YES") - << ", has xAOD::TrigDecision? " + << ", has xAOD::TrigDecision? " << (contains_xAOD_decision ? "YES" : "NO") << extra.str() << ". Check UseRun1DecisionFormat and UseOldEventInfoDecisionFormat flags if reading pre-xAOD or BS input)."); @@ -440,7 +440,7 @@ StatusCode Trig::CacheGlobalMemory::unpackDecision() { ATH_MSG_DEBUG("Unpacking TrigDecision "); ATH_MSG_DEBUG("clearing the delete-end-of-event store"); m_deleteAtEndOfEvent.clear(); - + bool unpackHLT = ( m_confChains != 0 ); ATH_CHECK( m_unpacker->unpackDecision( m_itemsByName, m_itemsCache, m_l2chainsByName, m_l2chainsCache, @@ -472,9 +472,7 @@ StatusCode Trig::CacheGlobalMemory::unpackNavigation() { return StatusCode::SUCCESS; } -Trig::CacheGlobalMemory::AnyTypeDeleter::~AnyTypeDeleter() +Trig::CacheGlobalMemory::AnyTypeDeleter::~AnyTypeDeleter() { clear(); } - - diff --git a/Trigger/TrigAnalysis/TrigDecisionTool/TrigDecisionTool/CacheGlobalMemory.h b/Trigger/TrigAnalysis/TrigDecisionTool/TrigDecisionTool/CacheGlobalMemory.h index 9220505554ff14bdba7e84618ce0d4ce2f128cf4..fa4cbb104855812cec8790d0e663f02b6e9722be 100644 --- a/Trigger/TrigAnalysis/TrigDecisionTool/TrigDecisionTool/CacheGlobalMemory.h +++ b/Trigger/TrigAnalysis/TrigDecisionTool/TrigDecisionTool/CacheGlobalMemory.h @@ -1,5 +1,5 @@ /* - Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration */ #ifndef TRIGGER_DECISION_TOOL_CACHE_GLOBAL_MEMORY_H @@ -43,7 +43,7 @@ #include "xAODTrigger/TrigDecision.h" #include "xAODTrigger/TrigNavigation.h" -#ifndef XAOD_STANDALONE +#ifndef XAOD_ANALYSIS #include "EventInfo/EventInfo.h" #include "TrigDecisionEvent/TrigDecision.h" #endif @@ -68,7 +68,7 @@ namespace Trig { class CacheGlobalMemory : public virtual Logger { using Logger::msgLvl;//resolve ambiguity from also inheriting from Logger - + public: // constructors, destructor CacheGlobalMemory(); @@ -81,16 +81,16 @@ namespace Trig { /** * @brief creates new chain group * @param patterns list of patterns i.e. regexes, if chain groups already exists it is just given back - * The chain group is considered to exist if the patterns are the same. + * The chain group is considered to exist if the patterns are the same. * Patterns are stored and made unique i.e. "EF_mu4", "EF_mu6" is the same as "EF_mu6", "EF_mu4". * It is not the same though as "EF_mu.*" even if in particular case that would mean the same 2 chains. - * + * * @param alias is the short human readable name for the triggers which are in the group i.e. myMuons **/ const Trig::ChainGroup* createChainGroup(const std::vector< std::string >& patterns, const std::string& alias=""); /** - * @brief Updates configuration of the chain groups + * @brief Updates configuration of the chain groups * (i.e. regexes are reapplied to new set of chains) **/ void update(const TrigConf::HLTChainList* confChains, @@ -100,7 +100,7 @@ namespace Trig { const LVL1CTP::Lvl1Item* item(const TrigConf::TriggerItem& i) const; //!< CTP item from given config item const TrigConf::TriggerItem* config_item(const std::string& name) const; //!< CTP config item from given name float item_prescale(int ctpid) const; //!< Prescale for CPT item - + const HLT::Chain* chain(const std::string& name) const; //!< HLT chain object from given name (0 pointer returned if no match) const HLT::Chain* chain(const TrigConf::HLTChain& chIt) const; //!< HLT chain object from given config chain const TrigConf::HLTChain* config_chain(const std::string& name) const; //!< HLT config chain from given name @@ -111,10 +111,10 @@ namespace Trig { ATH_MSG_WARNING("unpack Navigation failed"); } } - return m_navigation; + return m_navigation; } void navigation(HLT::TrigNavStructure* nav) { m_navigation = nav; } //!< sets navigation object pointer - + std::map< std::vector< std::string >, Trig::ChainGroup* >& getChainGroups() {return m_chainGroupsRef;}; // std::map<unsigned, const LVL1CTP::Lvl1Item*> getItems() {return m_items;}; // std::map<unsigned, const LVL1CTP::Lvl1Item*> getItems() const {return m_items;}; @@ -130,13 +130,13 @@ namespace Trig { /** * @brief cheks if new event arrived with the decision * Need tu use before any call to CacheGlobalMemory. - * @return true if all went fine about decision, false otherwise + * @return true if all went fine about decision, false otherwise **/ bool assert_decision(); void setUnpacker( Trig::IDecisionUnpacker* up ){ m_unpacker = up; } Trig::IDecisionUnpacker* unpacker(){ return m_unpacker; } - + /// Set the event store to be used by the object void setStore( EventPtr_t store ) { m_store = store; } @@ -148,19 +148,19 @@ namespace Trig { void setRun3NavigationKeyPtr(SG::ReadHandleKey<TrigCompositeUtils::DecisionContainer>* k) { m_run3NavigationKeyPtr = k; } SG::ReadHandleKey<TrigCompositeUtils::DecisionContainer>& getRun3NavigationKeyPtr() { return *m_run3NavigationKeyPtr; } -#if !defined(XAOD_STANDALONE) && !defined(XAOD_ANALYSIS) // Full Athena +#ifndef XAOD_ANALYSIS // Full Athena void setOldDecisionKeyPtr(SG::ReadHandleKey<TrigDec::TrigDecision>* k) { m_oldDecisionKeyPtr = k; } void setOldEventInfoKeyPtr(SG::ReadHandleKey<EventInfo>* k) { m_oldEventInfoKeyPtr = k; } #endif SG::ReadHandleKey<xAOD::TrigDecision>* xAODTrigDecisionKey() { return m_decisionKeyPtr; } - // + // template<class T> void deleteAtTheEndOfEvent(T t) const { m_deleteAtEndOfEvent.insert(t); } - + private: friend class DecisionAccess; @@ -185,14 +185,14 @@ namespace Trig { /// Pointer to the event store in use EventPtr_t m_store; - + /// Trigger decision unpacker helper IDecisionUnpacker* m_unpacker; // Navigation owned by CGM HLT::TrigNavStructure* m_navigation; - - // chain groups + + // chain groups typedef std::map< std::vector< std::string >, Trig::ChainGroup* >::iterator ChGrIt; std::map< std::vector< std::string >, Trig::ChainGroup* > m_chainGroups; //!< primary storage for chain groups std::map< std::vector< std::string >, Trig::ChainGroup* > m_chainGroupsRef; //!< this map keeps the chain group more than once i.e. when alias is given @@ -200,11 +200,11 @@ namespace Trig { // std::map<CTPID, const LVL1CTP::Lvl1Item*> m_items; //!< items keyed by id (changing every event) // std::map<CHAIN_COUNTER, const HLT::Chain*> m_l2chains; //!< chains keyed by chain counter (chainging every event) // std::map<CHAIN_COUNTER, const HLT::Chain*> m_efchains; - + std::unordered_map<std::string, const LVL1CTP::Lvl1Item*> m_itemsByName; //!< items keyed by configuration name (chainging every event) std::unordered_map<std::string, const HLT::Chain*> m_l2chainsByName; //!< L2 chains keyed by chain name (chainging every event) std::unordered_map<std::string, const HLT::Chain*> m_efchainsByName; //!< L2 chains keyed by chain name (chainging every event) - + typedef unsigned CTPID; typedef unsigned CHAIN_COUNTER; std::map<CTPID, LVL1CTP::Lvl1Item*> m_itemsCache; //!< cache of all CTP items possible (given configuration) @@ -213,14 +213,14 @@ namespace Trig { std::map<std::string, std::vector<std::string> > m_groups; //!< mapping from group to list of chains std::map<std::string, std::vector<std::string> > m_streams; //!< mapping from stream to list of chains - + const TrigConf::ItemContainer* m_confItems; //!< items configuration const TrigConf::HLTChainList* m_confChains; //!< all chains configuration mutable const xAOD::TrigCompositeContainer* m_expressStreamContainer; SG::ReadHandleKey<xAOD::TrigDecision>* m_decisionKeyPtr; //!< Parent TDT's read handle key -#if !defined(XAOD_STANDALONE) && !defined(XAOD_ANALYSIS) // Full Athena +#ifndef XAOD_ANALYSIS // Full Athena SG::ReadHandleKey<TrigDec::TrigDecision>* m_oldDecisionKeyPtr; //!< Parent TDT's read handle key SG::ReadHandleKey<EventInfo>* m_oldEventInfoKeyPtr; //!< Parent TDT's read handle key #endif @@ -230,29 +230,29 @@ namespace Trig { typedef std::unordered_map<std::string, const TrigConf::HLTChain*> ChainHashMap_t; ChainHashMap_t m_mConfChains; //!< map of conf chains - + char m_bgCode; //!< the encoded bunchgroup information - - + + class AnyTypeDeleter { - + struct iholder { virtual ~iholder() {} virtual void* ptr() const { return 0;} }; struct holder_comp { - bool operator()(iholder* lhs, iholder* rhs) const { - return lhs->ptr() < rhs->ptr(); } + bool operator()(iholder* lhs, iholder* rhs) const { + return lhs->ptr() < rhs->ptr(); } }; template<class T> - class holder : public iholder { + class holder : public iholder { public: holder(T& t): m_held(t) {} - virtual ~holder() { + virtual ~holder() { delete m_held; - m_held = 0; } + m_held = 0; } virtual void* ptr() const { return (void*)m_held;} private: T m_held; @@ -266,14 +266,14 @@ namespace Trig { void clear() { for(iholder* i : m_todel) { delete i; - } + } m_todel.clear(); } ~AnyTypeDeleter(); - private: + private: std::set< iholder*, holder_comp > m_todel; }; // end of deleter - + mutable AnyTypeDeleter m_deleteAtEndOfEvent; mutable std::recursive_mutex m_cgmMutex; //!< R3 MT protection only against --threads > 1. Needs refacotring... diff --git a/Trigger/TrigAnalysis/TrigInDetAnalysisExample/src/AnalysisConfigMT_Ntuple.cxx b/Trigger/TrigAnalysis/TrigInDetAnalysisExample/src/AnalysisConfigMT_Ntuple.cxx index 01f4bf4d7d72a30a56977dbf4a507a2fb426d0db..6def4017a1b3e99bf5d3fadff7021202df6cd384 100644 --- a/Trigger/TrigAnalysis/TrigInDetAnalysisExample/src/AnalysisConfigMT_Ntuple.cxx +++ b/Trigger/TrigAnalysis/TrigInDetAnalysisExample/src/AnalysisConfigMT_Ntuple.cxx @@ -23,7 +23,7 @@ std::string date(); //function to find true taus -const HepMC::GenParticle* fromParent( int pdg_id, const HepMC::GenParticle* p, bool printout=false ); +HepMC::ConstGenParticlePtr fromParent( int pdg_id, HepMC::ConstGenParticlePtr p, bool printout=false ); diff --git a/Trigger/TrigAnalysis/TrigInDetAnalysisExample/src/AnalysisConfig_Ntuple.cxx b/Trigger/TrigAnalysis/TrigInDetAnalysisExample/src/AnalysisConfig_Ntuple.cxx index 062f385fd6ec8899788a00485e73440b276b6d4d..38fc80268bb9db5f3be2278b47fcda368a828b18 100644 --- a/Trigger/TrigAnalysis/TrigInDetAnalysisExample/src/AnalysisConfig_Ntuple.cxx +++ b/Trigger/TrigAnalysis/TrigInDetAnalysisExample/src/AnalysisConfig_Ntuple.cxx @@ -78,7 +78,7 @@ std::string date() { //function to find true taus -const HepMC::GenParticle* fromParent( int pdg_id, const HepMC::GenParticle* p, bool printout=false ) { +HepMC::ConstGenParticlePtr fromParent( int pdg_id, HepMC::ConstGenParticlePtr p, bool printout=false ) { if ( p==0 ) return 0; if (std::abs(p->pdg_id())==11 || std::abs(p->pdg_id())==13 ) return 0; //don't want light leptons from tau decays @@ -733,34 +733,20 @@ void AnalysisConfig_Ntuple::loop() { while ( evitr!=evend ) { int _ip = 0; /// count of particles in this interaction + int pid = HepMC::signal_process_id((*evitr)); - int pid = (*evitr)->signal_process_id(); - // if ( (*evitr)->particles_size()>0 ) std::cout << "process " << "\tpid " << pid << std::endl; + if ( pid!=0 ) { /// hooray! actually found a sensible event - if ( pid!=0 && (*evitr)->particles_size()>0 ) { /// hooray! actually found a sensible event - /// go through the particles - HepMC::GenEvent::particle_const_iterator pitr((*evitr)->particles_begin()); - HepMC::GenEvent::particle_const_iterator pend((*evitr)->particles_end()); - - while ( pitr!=pend ) { - - // int pdg_id = (*pitr)->pdg_id(); - // std::cout << ip++ << "\tparticle " << pdg_id << "\t" << "(*pitr)->pT()" << std::endl; - + for (auto pitr: **evitr ) { //if tau job, only select if have a tau somewhere along chain, otherwise just add - if ( (m_TruthPdgId==15 && fromParent(m_TruthPdgId, *pitr)!=0) || m_TruthPdgId!=15 ) { + if ( (m_TruthPdgId==15 && fromParent(m_TruthPdgId, pitr)!=0) || m_TruthPdgId!=15 ) { /// select the ones of interest - selectorTruth.selectTrack( *pitr ); - } - - + selectorTruth.selectTrack( pitr ); + } ++_ip; - - ++pitr; - } diff --git a/Trigger/TrigAnalysis/TrigInDetAnalysisUser/share/TIDAhisto-panel-vtx.dat b/Trigger/TrigAnalysis/TrigInDetAnalysisUser/share/TIDAhisto-panel-vtx.dat index d6933c847c57146bbb673a6411ae78de94a10604..bb9c224cc634416a398c9d8b8f6a9134ef8ffa92 100644 --- a/Trigger/TrigAnalysis/TrigInDetAnalysisUser/share/TIDAhisto-panel-vtx.dat +++ b/Trigger/TrigAnalysis/TrigInDetAnalysisUser/share/TIDAhisto-panel-vtx.dat @@ -24,12 +24,12 @@ vtx_res_panel = { "rdz_vs_ntrax/mean", "Offset z vs N_{tracks}", "xaxis:lin:auto", "Offline track multiplicity", "yaxis:auto:lin:-1:1.", "z_{0} mean [mm]" , "rdz_vs_zed/mean", "Offset z vs z", "xaxis:lin:-175:175", "Offline vertex z [mm]", "yaxis:auto:lin:-1:1", "z_{0} mean [mm]" , - "rdz_vs_zed/1d+rebin10", "Residual z", "xaxis:lin:-5:5", "#Delta z [mm]", "yaxis:log:auton", "(1/N)dN/dz [mm^{-1}]" , + "rdz_vs_zed/1d+rebin10", "Residual z", "xaxis:lin:-5:5", "#Delta z [mm]", "yaxis:log:auton", "(1/N)dN/dz [mm^{-1}]" , - "rdz_vs_ntrax/sigma", "Offset z vs N_{tracks}", "xaxis:lin:auto", "Offline track multiplicity", "yaxis:auto:lin:0:2.", "z_{0} resolution [mm]" , - "rdz_vs_zed/sigma", "Resolution z vs z", "xaxis:lin:-175:175", "Offline vertex z [mm]", "yaxis:lin:auto:0:2", "z_{0} resolution [mm]" , - "rdz_vs_zed/1d+lin+rebin10", "Residual z", "xaxis:lin:-5:5", "#Delta z [mm]", "yaxis:lin:auton", "(1/N)dN/dz [mm^{-1}]" + "rdz_vs_ntrax/sigma", "Offset z vs N_{tracks}", "xaxis:lin:auto", "Offline track multiplicity", "yaxis:log:auto:0.003:2.", "z_{0} resolution [mm]" , + "rdz_vs_zed/sigma", "Resolution z vs z", "xaxis:lin:-175:175", "Offline vertex z [mm]", "yaxis:log:auto:0.003:2", "z_{0} resolution [mm]" , + "rdz_vs_zed/1d+lin+rebin10", "Residual z", "xaxis:lin:-5:5", "#Delta z [mm]", "yaxis:lin:auton", "(1/N)dN/dz [mm^{-1}]" }; diff --git a/Trigger/TrigAnalysis/TrigInDetAnalysisUtils/TrigInDetAnalysisUtils/TrigTrackSelector.h b/Trigger/TrigAnalysis/TrigInDetAnalysisUtils/TrigInDetAnalysisUtils/TrigTrackSelector.h index 09a6a6f20557963f5d378a0e1746aadf5a11addf..422a582703ea0e734c3124b71f1a145500a88f72 100644 --- a/Trigger/TrigAnalysis/TrigInDetAnalysisUtils/TrigInDetAnalysisUtils/TrigTrackSelector.h +++ b/Trigger/TrigAnalysis/TrigInDetAnalysisUtils/TrigInDetAnalysisUtils/TrigTrackSelector.h @@ -104,7 +104,7 @@ public: void selectTracks( const xAOD::TruthParticleContainer* truthtracks ); - bool selectTrack( const HepMC::GenParticle* track ); + bool selectTrack( HepMC::ConstGenParticlePtr track ); // add a TruthParticle @@ -117,7 +117,7 @@ public: // make a TIDA::Track from a GenParticle - TIDA::Track* makeTrack( const HepMC::GenParticle* track ); + TIDA::Track* makeTrack( HepMC::ConstGenParticlePtr track ); // make a TIDA::Track from a TruthParticle TIDA::Track* makeTrack( const TruthParticle* track, unsigned long tid=0 ); diff --git a/Trigger/TrigAnalysis/TrigInDetAnalysisUtils/src/TrigTrackSelector.cxx b/Trigger/TrigAnalysis/TrigInDetAnalysisUtils/src/TrigTrackSelector.cxx index 66c02cad6d9fd84ba62811330911ba4cd2db9db4..450b0d6cd62b1f12d34065cc527f8825780d3dd1 100644 --- a/Trigger/TrigAnalysis/TrigInDetAnalysisUtils/src/TrigTrackSelector.cxx +++ b/Trigger/TrigAnalysis/TrigInDetAnalysisUtils/src/TrigTrackSelector.cxx @@ -298,13 +298,18 @@ void TrigTrackSelector::selectTracks( const xAOD::TruthParticleContainer* trutht // add a TruthParticle from a GenParticle - easy, bet it doesn't work -bool TrigTrackSelector::selectTrack( const HepMC::GenParticle* track ) { +bool TrigTrackSelector::selectTrack( HepMC::ConstGenParticlePtr track ) { /// not a "final state" particle if ( track->status() != 1 ) return false; /// set this so can use it as the identifier - don't forget to reset!! +//AV Using memory to get some value is not a good idea. This is not a repruducible/portable way, but I leave it as is. +#ifdef HEPMC3 + m_id = (unsigned long)(track.get()); +#else m_id = (unsigned long)track; +#endif bool sel; sel = selectTrack( TruthParticle(track) ); m_id = 0; @@ -476,8 +481,13 @@ bool TrigTrackSelector::selectTrack( const xAOD::TruthParticle* track ) { // make a TIDA::Track from a GenParticle -TIDA::Track* TrigTrackSelector::makeTrack( const HepMC::GenParticle* track ) { +TIDA::Track* TrigTrackSelector::makeTrack(HepMC::ConstGenParticlePtr track ) { +//AV Using memory to get some value is not a good idea. This is not a repruducible/portable way, but I leave it as is. +#ifdef HEPMC3 + unsigned long id = (unsigned long)(track.get()); +#else unsigned long id = (unsigned long)track; +#endif TruthParticle t = TruthParticle(track); return makeTrack( &t, id ); } diff --git a/Trigger/TrigConfiguration/TrigConfStorage/CMakeLists.txt b/Trigger/TrigConfiguration/TrigConfStorage/CMakeLists.txt index 2caee70e4ef7b85a8dec1f934e00f462dd8df444..69876d4daa39c76df316af4df6fc7a624c7938b4 100644 --- a/Trigger/TrigConfiguration/TrigConfStorage/CMakeLists.txt +++ b/Trigger/TrigConfiguration/TrigConfStorage/CMakeLists.txt @@ -15,7 +15,7 @@ atlas_add_library( TrigConfStorage PUBLIC_HEADERS TrigConfStorage INCLUDE_DIRS ${Boost_INCLUDE_DIRS} ${COOL_INCLUDE_DIRS} ${CORAL_INCLUDE_DIRS} PRIVATE_INCLUDE_DIRS ${TDAQ-COMMON_INCLUDE_DIRS} - LINK_LIBRARIES ${Boost_LIBRARIES} ${COOL_LIBRARIES} TrigConfBase TrigConfHLTData TrigConfL1Data + LINK_LIBRARIES ${Boost_LIBRARIES} ${COOL_LIBRARIES} TrigConfBase TrigConfHLTData TrigConfL1Data TrigConfData PRIVATE_LINK_LIBRARIES ${CORAL_LIBRARIES} ${TDAQ-COMMON_LIBRARIES} TrigConfJobOptData L1TopoConfig ) atlas_add_executable( TrigConf2COOLApp @@ -27,8 +27,8 @@ atlas_add_executable( TrigConfConsistencyChecker LINK_LIBRARIES TrigConfStorage ) atlas_add_executable( TrigConfReadWrite - src/test/ReadWrite.cxx - LINK_LIBRARIES L1TopoConfig TrigConfJobOptData TrigConfStorage ) + src/test/ReadWrite.cxx src/test/Run2toRun3Converters.cxx + LINK_LIBRARIES L1TopoConfig TrigConfJobOptData TrigConfStorage TrigConfData TrigConfIO TrigCompositeUtilsLib ) atlas_add_executable( TrigConfCoolFix src/test/CoolFix.cxx diff --git a/Trigger/TrigConfiguration/TrigConfStorage/src/test/ReadWrite.cxx b/Trigger/TrigConfiguration/TrigConfStorage/src/test/ReadWrite.cxx index 1d5a1f24dd2a805a08d016ba5cf1302b263f6ff1..b439ea320db6830470f36b66b0e77ee5ff2f3692 100644 --- a/Trigger/TrigConfiguration/TrigConfStorage/src/test/ReadWrite.cxx +++ b/Trigger/TrigConfiguration/TrigConfStorage/src/test/ReadWrite.cxx @@ -3,14 +3,14 @@ */ ///////////////////////////////////////////////////////////////////// -// -// NAME: TrigConfReadWrite.cxx +// +// NAME: TrigConfReadWrite.cxx // PACKAGE: TrigConfStorage -// -// AUTHOR: J.Stelzer (CERN) Joerg.Stelzer@cern.ch +// +// AUTHOR: J.Stelzer (CERN) Joerg.Stelzer@cern.ch // CREATED: 17 Mar 2013 -// -// PURPOSE: +// +// PURPOSE: // // This standalone application is designed to read and write the // trigger configuration (L1+HLT) from DB,XML,COOL and to XML or COOL @@ -42,6 +42,8 @@ #include "TrigConfHLTData/HLTPrescaleSet.h" #include "TrigConfJobOptData/JobOptionTable.h" +#include "Run2toRun3Converters.h" + #include "CoolKernel/DatabaseId.h" #include "CoolKernel/Exception.h" #include "CoolKernel/IDatabaseSvc.h" @@ -60,10 +62,11 @@ #include <ctime> #include <map> #include <vector> -#include <sys/stat.h> +#include <sys/stat.h> using namespace std; using namespace TrigConf; +HLTMenu convertRun2HLTtoRun3(const HLTFrame* frame); void printhelp(std::ostream & o, std::ostream& (*lineend) ( std::ostream& os )) { o << "================================================================================\n"; @@ -73,7 +76,7 @@ void printhelp(std::ostream & o, std::ostream& (*lineend) ( std::ostream& os )) o << "[Global options]\n"; o << " -i|--input input [input [input]] ... source of configuration, format see below (mandatory)\n"; o << " -2|--comp input [input [input]] ... source of a second configuration for comparison\n"; - o << " -o|--output xml|cool [output[;cooldb]] [run] ... output format, name (for cool optional run number)\n"; + o << " -o|--output xml|r3json|cool [output[;cooldb]] [run] ... output format, name (for cool optional run number)\n"; o << " ... absolute output file name must contain '/', cooldb can be appended COMP200|OFLP200\n"; o << " -v|--loglevel <string> ... log level [NIL, VERBOSE, DEBUG, INFO, WARNING, ERROR, FATAL, ALWAYS]\n"; o << " -l|--log <string> ... name of a log file\n"; @@ -100,6 +103,7 @@ void printhelp(std::ostream & o, std::ostream& (*lineend) ( std::ostream& os )) o << " -o xml test ... will produce LVL1config_test.xml and/or HLTconfig_test.xml. When\n"; o << " comparing two menus this will produce Diff_test.xml. In this case the\n"; o << " specification of '-o test' is sufficient\n"; + o << " -o r3json test ... will produce Run3 JSON LVL1Menu_test.json and HLTMenu_test.json\n"; o << " -o cool ... will produce trig_cool.db with cool db instance CONDBR2 and infinite IOV\n"; o << " -o cool 200000 ... will produce trig_cool.db with cool db instance CONDBR2 and run number 200000\n"; o << " -o cool test [200000] ... will produce trig_cool_test.db with cool db instance CONDBR2 [and run number 200000]\n"; @@ -111,7 +115,7 @@ void printhelp(std::ostream & o, std::ostream& (*lineend) ( std::ostream& os )) class JobConfig { public: - enum Format { UNDEF=0x00, DB=0x01, COOL=0x02, XML=0x04 }; + enum Format { UNDEF=0x00, DB=0x01, COOL=0x02, XML=0x04, JSON=0x08 }; enum ETriggerLevel { LVL1 = 0, HLT = 1, NONE = 2 }; ~JobConfig(){} JobConfig() {} @@ -127,6 +131,7 @@ public: string l1xmlOutFile { "LVL1Config.xml" }; string l1topoOutFile { "L1TopoConfig.xml" }; string hltxmlOutFile { "HLTConfig.xml" }; + string hltJsonOutFile { "HLTMenu.json" }; string coolInputConnection { "" }; string coolOutputConnection { "" }; unsigned int coolOutputRunNr { 0 }; @@ -210,9 +215,9 @@ JobConfig::parseProgramOptions(int argc, char* argv[]) { } else { if(currentPar == "i" || currentPar == "input") { inpar.push_back(stripped); continue; } if(currentPar == "2" || currentPar == "comp") { inpar2.push_back(stripped); continue; } - if(currentPar == "o" || currentPar == "output") { - if(outpar.size()==0 && stripped != "xml" && stripped != "cool") { - error.push_back("Unknown output type: " + stripped + ". Must be either xml or cool, optionally followed by a base string for the output file name"); + if(currentPar == "o" || currentPar == "output") { + if(outpar.size()==0 && stripped != "xml" && stripped != "r3json" && stripped != "cool") { + error.push_back("Unknown output type: " + stripped + ". Must be either xml, json or cool, optionally followed by a base string for the output file name"); } else { outpar.push_back(stripped); } @@ -228,7 +233,7 @@ JobConfig::parseProgramOptions(int argc, char* argv[]) { else if("ERROR" == stripped ) { outputlevel = MSGTC::ERROR; } else if("FATAL" == stripped ) { outputlevel = MSGTC::FATAL; } else if("ALWAYS" == stripped ) { outputlevel = MSGTC::ALWAYS; } - else { + else { error.push_back("Unknown output level: " + stripped + ". Must be one of NIL, VERBOSE, DEBUG, INFO, WARNING, ERROR, FATAL, ALWAYS"); } continue; @@ -242,8 +247,8 @@ JobConfig::parseProgramOptions(int argc, char* argv[]) { error.push_back("No input specified, use '-i' option"); - // parse the input - if( (inpar.size()==1 && endswith(inpar[0],".xml")) || + // parse the input + if( (inpar.size()==1 && endswith(inpar[0],".xml")) || (inpar.size()==2 && endswith(inpar[0],".xml") && endswith(inpar[1],".xml")) ) { input = XML; } else if( inpar.size()>=1 && inpar[0].find(".db") != string::npos ) { @@ -276,15 +281,17 @@ JobConfig::parseProgramOptions(int argc, char* argv[]) { }; } - if( (inpar2.size()==1 && endswith(inpar2[0],".xml")) || + if( (inpar2.size()==1 && endswith(inpar2[0],".xml")) || (inpar2.size()==2 && endswith(inpar2[1],".xml") && endswith(inpar2[1],".xml")) ) { input2 = XML; } // parse the output for(const string& o: outpar) { - if ( o=="xml") { + if ( o=="xml") { output |= XML; + } else if ( o=="r3json" ) { + output |= JSON; } else if ( o=="cool" ) { output |= COOL; } else if ( isUnsignedInteger(o) ) { @@ -312,6 +319,7 @@ JobConfig::parseProgramOptions(int argc, char* argv[]) { l1xmlOutFile = "LVL1config_" + outBase + ".xml"; l1topoOutFile = "L1TopoConfig_" + outBase + ".xml"; hltxmlOutFile = "HLTconfig_" + outBase + ".xml"; + hltJsonOutFile = "HLTMenu_" + outBase + ".json"; } } @@ -319,17 +327,17 @@ JobConfig::parseProgramOptions(int argc, char* argv[]) { string JobConfig::CheckForCompleteSetup() { - if(input==UNDEF) + if(input==UNDEF) return "Use argument '-i' to specify input source and check that the input is specified correctly"; if( input == DB ) { - if( db == "" ) - return "No TriggerDB connection string specified"; + if( db == "" ) + return "No TriggerDB connection string specified"; if( keys.size()==0 ) return "No configuration key(s) specified"; } if( input2 == DB ) { - if( db2 == "" ) - return "No TriggerDB connection string specified for comparison, use option '--db2'"; + if( db2 == "" ) + return "No TriggerDB connection string specified for comparison, use option '--db2'"; if( keys2.size()==0 ) return "No smk specified for comparison, use option '--dbsmk2'"; } @@ -356,7 +364,7 @@ JobConfig::PrintSetup(std::ostream & log, std::ostream& (*lineend) ( std::ostrea if( output != UNDEF ) { log << " Output : "; if( output&XML ) log << l1xmlOutFile << ", " << l1topoOutFile << ", " << hltxmlOutFile; - if( output&COOL ) { + if( output&COOL ) { log << coolOutputConnection; if(coolOutputRunNr==0) { log << ", infinite IOV"; } else { log << ", run nr " << coolOutputRunNr; } } @@ -370,7 +378,7 @@ JobConfig::PrintSetup(std::ostream & log, std::ostream& (*lineend) ( std::ostrea int main( int argc, char* argv[] ) { - + /*************************************** * * Getting the program parameters @@ -407,9 +415,9 @@ int main( int argc, char* argv[] ) { if(errf) errf->close(); return 1; } - + gConfig.PrintSetup(log, lineend); - + /*************************************** * @@ -421,7 +429,7 @@ int main( int argc, char* argv[] ) { HLTFrame* hltFrame(0); TXC::L1TopoMenu* l1tm = nullptr; uint smk(0),l1psk(0),hltpsk(0),bgsk(0), mck{0}; - string release; + string release; /*------------------ @@ -430,7 +438,7 @@ int main( int argc, char* argv[] ) { if (gConfig.input == JobConfig::DB) { unique_ptr<StorageMgr> sm(new StorageMgr(gConfig.db, "", "", log)); - // Loading L1 topo + // Loading L1 topo //log << "Retrieving Lvl1 Topo configuration" << lineend; l1tm = new TXC::L1TopoMenu(); l1tm->setSMK(gConfig.getKey(0)); @@ -469,9 +477,9 @@ int main( int argc, char* argv[] ) { mckloader->loadReleaseLinkedToMCK(mck,release); log << "Loaded MCK " << mck << " (active for SMK " << smk << " and release " << release << ")" << endl; } else { - log << "Did not load MCK from DB as MCK is 0 or no MCK is linked"; + log << "Did not load MCK from DB as MCK is 0 or no MCK is linked"; } - + } /*------------------ @@ -522,7 +530,7 @@ int main( int argc, char* argv[] ) { PrescaleSet ps; coolWriter->readL1PrescalePayload( runnumber, lb, l1psk, ps); ctpc->setPrescaleSet( ps ); - + // log << "L1 PSK 0 " << ps.id() << lineend; // log << "L1 PSK 1 " << l1psk << lineend; // log << "L1 PSK 2 " << ctpc->prescaleSet().id() << lineend; @@ -561,7 +569,7 @@ int main( int argc, char* argv[] ) { if (gConfig.input2 != JobConfig::UNDEF) { CTPConfig* ctpc2(0); HLTFrame* hltFrame2(0); - + /*------------------ * from DB *-----------------*/ @@ -578,7 +586,7 @@ int main( int argc, char* argv[] ) { sm->masterTableLoader().load(*ctpc2); ctpc2->muCTPi().setSMK( gConfig.getKey2(0) ); sm->masterTableLoader().load(ctpc2->muCTPi()); - + log << "Retrieving HLT menu configuration and prescale set from the TriggerDB for comparison" << lineend; hltFrame2 = new HLTFrame(); hltFrame2->setSMK( gConfig.getKey2(0) ); @@ -594,7 +602,7 @@ int main( int argc, char* argv[] ) { unique_ptr<XMLStorageMgr> sm( gConfig.inpar2.size()==1 ? new XMLStorageMgr( { xmlpathresolve(gConfig.inpar2[0]) } ) : new XMLStorageMgr( { xmlpathresolve(gConfig.inpar2[0]),xmlpathresolve(gConfig.inpar2[1]) } ) ); - + if(sm->hasLVL1()) { ctpc2 = new CTPConfig(); log << "Retrieving Lvl1 CTP configuration from " << sm->m_xmlL1File << lineend; @@ -611,7 +619,7 @@ int main( int argc, char* argv[] ) { sm->hltFrameLoader().load( *hltFrame2 ); log << "Done reading " << sm->m_xmlHLTFile << lineend; } - + } else if (gConfig.input2 == JobConfig::COOL) { /*------------------ * from COOL @@ -665,6 +673,17 @@ int main( int argc, char* argv[] ) { } } + if ( (gConfig.output & JobConfig::JSON) != 0 ) { + /*------------------ + * to JSON + *-----------------*/ + // TODO add L1 menu + if(hltFrame) { + convertRun2HLTtoRun3(hltFrame, gConfig.hltJsonOutFile); + } + + } + if ( (gConfig.output & JobConfig::COOL) != 0 ) { /*------------------ * to COOL @@ -677,16 +696,16 @@ int main( int argc, char* argv[] ) { string info(""); unsigned int runNr = gConfig.coolOutputRunNr; if(runNr == 0) { runNr = 0x80000000; } // infinite range - + if(ctpc) coolWriter->writeL1Payload(runNr, *ctpc); try{ if(hltFrame) coolWriter->writeHLTPayload(runNr, *hltFrame, configSource); - } + } catch(const cool::StorageTypeStringTooLong& e){ - log << "FATAL: Unable to write data to COOL"; - exit(1); + log << "FATAL: Unable to write data to COOL"; + exit(1); } if(mck) coolWriter->writeMCKPayload(runNr, mck, release, info); @@ -696,12 +715,12 @@ int main( int argc, char* argv[] ) { delete hltFrame; if(l1tm!=nullptr) delete l1tm; - + if( gConfig.jo ) { JobOptionTable jot; unique_ptr<IStorageMgr> sm( new StorageMgr(gConfig.db,"","",log) ); - + log << "TrigConfReadWrite: Retrieving JO from the TriggerDB" << lineend; jot.setSMK( gConfig.getKey(0) ); jot.setTriggerLevel(0); // L2 @@ -715,4 +734,3 @@ int main( int argc, char* argv[] ) { } } - diff --git a/Trigger/TrigConfiguration/TrigConfStorage/src/test/Run2toRun3Converters.cxx b/Trigger/TrigConfiguration/TrigConfStorage/src/test/Run2toRun3Converters.cxx new file mode 100644 index 0000000000000000000000000000000000000000..869d42f9659c449af4cfa8eb8697ae02a9d14700 --- /dev/null +++ b/Trigger/TrigConfiguration/TrigConfStorage/src/test/Run2toRun3Converters.cxx @@ -0,0 +1,120 @@ +#include "TrigConfData/HLTMenu.h" +#include "TrigConfData/DataStructure.h" +#include "TrigCompositeUtils/HLTIdentifier.h" +#include <boost/property_tree/json_parser.hpp> +#include "TrigConfHLTData/HLTFrame.h" +#include "TrigConfHLTData/HLTChain.h" +#include "TrigConfHLTData/HLTStreamTag.h" +#include "TrigConfHLTData/HLTSignature.h" +#include "TrigConfHLTData/HLTTriggerElement.h" +#include "TrigConfIO/JsonFileWriterHLT.h" + +template<typename COLL> +boost::property_tree::ptree asArray( const COLL& data) { + using ptree = boost::property_tree::ptree; + ptree array; + for ( const auto& el: data ) { + ptree one; + one.put("", el); + array.push_back(std::make_pair("", one)); + } + return array; +} + +std::vector<int> legMult(const TrigConf::HLTChain* cptr) { + std::vector<int> maxMult; + for ( const auto sig: cptr->signatures() ) { + std::vector<std::string> names; + std::vector<int> mult; + for ( const auto te: sig->outputTEs() ) { + auto found = std::find(names.begin(), names.end(), te->name()); + if ( found == names.end() ) { + names.push_back(te->name()); + mult.push_back(1); + } else { + int index = std::find(names.begin(), names.end(), te->name()) - names.begin(); + mult[index]++; + } + } + if ( maxMult.size() < mult.size() ) { + maxMult = mult; + } + } + return maxMult; +} + +std::vector<std::string> l1thresholds(const TrigConf::HLTFrame* frame, const TrigConf::HLTChain* cptr) { + std::set<std::string> names; + for ( const auto sig: cptr->signatures() ) { + for ( const auto te: sig->outputTEs() ) { + auto sequence = frame->sequences().getSequence(te->name()); + for ( const auto inTE: sequence->inputTEs() ) { + std::cout << "IN TE" << inTE->name() << " \n"; + if ( not ( inTE->name().find("L2_") == 0 or inTE->name().find("EF_") == 0 or inTE->name().find("HLT_") == 0 ) ) { + names.insert(inTE->name()); + std::cout << "L1 is " << inTE->name() << " \n"; + } + } + } + } + return std::vector<std::string>( names.begin(), names.end() ); +} + + +void convertRun2HLTtoRun3(const TrigConf::HLTFrame* frame, const std::string& filename) { + using ptree = boost::property_tree::ptree; + ptree top; + top.put("filetype", "hltmenu"); + top.put("name", frame->name()); + ptree pChains; + + std::map<std::string, const TrigConf::HLTStreamTag*> allStreams; + + for ( auto cptr : frame->chains() ) { + ptree pChain; + pChain.put("counter", cptr->chain_counter()); + pChain.put("nameHash", cptr->chain_hash_id()); + pChain.put("l1item", cptr->lower_chain_name()); + pChain.add_child("l1thresholds", asArray(l1thresholds(frame, cptr))); + pChain.add_child("legMultiplicities", asArray(legMult(cptr)) ); + pChain.add_child("sequencers", asArray(std::vector<std::string>({"missing"}))); + + std::vector<std::string> strNames; + for ( const auto st: cptr->streams()) { + strNames.push_back(st->stream()); + allStreams[st->stream()] = st; + } + pChain.add_child("streams", asArray(strNames)); + + pChain.add_child("groups", asArray(cptr->groups())); + + pChains.push_back(std::make_pair(cptr->chain_name(), pChain)); + } + ptree pStreams; + for ( auto [sname, stream]: allStreams ) { + ptree pStream; + pStream.put("name", sname); + pStream.put("type", stream->type()); + pStream.put("obeyLB", stream->obeyLB()); + pStream.put("forceFullEventBuilding", true); // TODO understand how to get this information from old menu + pStreams.push_back(std::make_pair(sname, pStream)); + } + + top.add_child("chains", pChains); + + top.add_child("streams", pStreams); + ptree pSequencers; + pSequencers.add_child("missing", asArray(std::vector<std::string>({""}))); + top.add_child("sequencers", pSequencers); + + std::stringstream ss; + boost::property_tree::json_parser::write_json(ss, top); +// std::cout << ss.str() << std::endl; + + + TrigConf::HLTMenu menu(std::move(top)); + TrigConf::JsonFileWriterHLT writer; + std::cout << "Saving file: " << filename << std::endl; + writer.writeJsonFile(filename, menu); + +} \ No newline at end of file diff --git a/Trigger/TrigConfiguration/TrigConfStorage/src/test/Run2toRun3Converters.h b/Trigger/TrigConfiguration/TrigConfStorage/src/test/Run2toRun3Converters.h new file mode 100644 index 0000000000000000000000000000000000000000..4a7946faecee7fa3545caeb588c34af7ab01e62c --- /dev/null +++ b/Trigger/TrigConfiguration/TrigConfStorage/src/test/Run2toRun3Converters.h @@ -0,0 +1,7 @@ +/* + Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +*/ + +#include "TrigConfData/HLTMenu.h" +#include "TrigConfHLTData/HLTFrame.h" +void convertRun2HLTtoRun3(const TrigConf::HLTFrame* frame, const std::string& filename); \ No newline at end of file diff --git a/Trigger/TrigEvent/TrigSteeringEvent/CMakeLists.txt b/Trigger/TrigEvent/TrigSteeringEvent/CMakeLists.txt index fbfdd079e2709b873728290a0b112a2858012c70..a8fcf7cffe3d27f20281fbebca911b0b1073d093 100644 --- a/Trigger/TrigEvent/TrigSteeringEvent/CMakeLists.txt +++ b/Trigger/TrigEvent/TrigSteeringEvent/CMakeLists.txt @@ -12,7 +12,8 @@ if( XAOD_STANDALONE ) TrigConfHLTData ) elseif( XAOD_ANALYSIS ) atlas_add_library( TrigSteeringEvent - TrigSteeringEvent/*.h Root/*.cxx src/*.cxx + TrigSteeringEvent/*.h Root/*.cxx src/HLTExtraData.cxx src/*Result.cxx + src/StringSerializer.cxx src/*Info.cxx src/TrigSuperRoi.cxx PUBLIC_HEADERS TrigSteeringEvent LINK_LIBRARIES AthContainers AsgTools AthenaKernel CxxUtils RoiDescriptor xAODCore GaudiKernel TrigConfHLTData ) @@ -40,7 +41,7 @@ else() atlas_add_sercnv_library( TrigSteeringEventSerCnv FILES TrigSteeringEvent/TrigRoiDescriptor.h - TrigSteeringEvent/TrigSuperRoi.h + TrigSteeringEvent/TrigSuperRoi.h TrigSteeringEvent/TrigPassFlags.h TrigSteeringEvent/TrigPassFlagsCollection.h TrigSteeringEvent/TrigRoiDescriptorCollection.h diff --git a/Trigger/TrigHypothesis/TrigEgammaHypo/python/TrigEgammaPrecisionCaloHypoTool.py b/Trigger/TrigHypothesis/TrigEgammaHypo/python/TrigEgammaPrecisionCaloHypoTool.py index 2d4e405e1a51ca0d614cc6d8c3ae4300c260ca76..a2d0687ba5f77c6d8d3a93c8ced30250586b0312 100644 --- a/Trigger/TrigHypothesis/TrigEgammaHypo/python/TrigEgammaPrecisionCaloHypoTool.py +++ b/Trigger/TrigHypothesis/TrigEgammaHypo/python/TrigEgammaPrecisionCaloHypoTool.py @@ -4,9 +4,8 @@ from AthenaCommon.SystemOfUnits import GeV def _IncTool(name, threshold, sel): - from TrigEgammaHypo.TrigEgammaHypoConf import TrigEgammaPrecisionCaloHypoToolInc - - tool = TrigEgammaPrecisionCaloHypoToolInc( name ) + from AthenaConfiguration.ComponentFactory import CompFactory + tool = CompFactory.TrigEgammaPrecisionCaloHypoToolInc(name) from AthenaMonitoringKernel.GenericMonitoringTool import GenericMonitoringTool, defineHistogram monTool = GenericMonitoringTool("MonTool_"+name) @@ -22,7 +21,7 @@ def _IncTool(name, threshold, sel): monTool.Histograms += [ defineHistogram('CutCounter', type='TH1I', path='EXPERT', title="PrecisionCalo Hypo Passed Cuts;Cut", xbins=13, xmin=-1.5, xmax=12.5, opt="kCumulative", xlabels=cuts) ] - monTool.HistPath = 'PrecisionCaloHypo/'+tool.name() + monTool.HistPath = 'PrecisionCaloHypo/'+tool.getName() tool.MonTool = monTool diff --git a/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionCaloHypoAlgMT.cxx b/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionCaloHypoAlgMT.cxx index 55fc86417b115a99f5c634bcecf0e44c7130f4a8..6bf6246d71330d8ad4419acf86f55288848abeb1 100644 --- a/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionCaloHypoAlgMT.cxx +++ b/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionCaloHypoAlgMT.cxx @@ -43,9 +43,10 @@ StatusCode TrigEgammaPrecisionCaloHypoAlgMT::execute( const EventContext& contex // loop over previous decisions size_t counter=0; for ( auto previousDecision: *previousDecisionsHandle ) { - //get RoI - auto roiELInfo = findLink<TrigRoiDescriptorCollection>( previousDecision, initialRoIString() ); - + + //get updated RoI + auto roiELInfo = findLink<TrigRoiDescriptorCollection>( previousDecision, roiString() ); + ATH_CHECK( roiELInfo.isValid() ); const TrigRoiDescriptor* roi = *(roiELInfo.link); @@ -70,7 +71,6 @@ StatusCode TrigEgammaPrecisionCaloHypoAlgMT::execute( const EventContext& contex auto d = newDecisionIn( decisions, name() ); d->setObjectLink( featureString(), el ); TrigCompositeUtils::linkToPrevious( d, decisionInput().key(), counter ); - d->setObjectLink( roiString(), roiELInfo.link ); toolInput.emplace_back( d, roi, clusterHandle.cptr()->at(cl), previousDecision ); validclusters++; diff --git a/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionElectronHypoAlgMT.cxx b/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionElectronHypoAlgMT.cxx index 9dbb604bce4556c243291e954cc5bf4929d428ff..3f5bb5c462b81bdbd7e2fec2b21fd4c2eeadbd22 100644 --- a/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionElectronHypoAlgMT.cxx +++ b/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionElectronHypoAlgMT.cxx @@ -52,9 +52,10 @@ StatusCode TrigEgammaPrecisionElectronHypoAlgMT::execute( const EventContext& co // loop over previous decisions size_t counter=0; for ( auto previousDecision: *previousDecisionsHandle ) { - //get RoI - auto roiELInfo = findLink<TrigRoiDescriptorCollection>( previousDecision, initialRoIString() ); - + + //get updated RoI + auto roiELInfo = findLink<TrigRoiDescriptorCollection>( previousDecision, roiString() ); + ATH_CHECK( roiELInfo.isValid() ); const TrigRoiDescriptor* roi = *(roiELInfo.link); const auto viewEL = previousDecision->objectLink<ViewContainer>( viewString() ); @@ -77,7 +78,6 @@ StatusCode TrigEgammaPrecisionElectronHypoAlgMT::execute( const EventContext& co auto d = newDecisionIn( decisions, name() ); d->setObjectLink( "feature", ph ); TrigCompositeUtils::linkToPrevious( d, decisionInput().key(), counter ); - d->setObjectLink( roiString(), roiELInfo.link ); toolInput.emplace_back( d, roi, electronHandle.cptr()->at(cl), previousDecision ); validelectrons++; diff --git a/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionPhotonHypoAlgMT.cxx b/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionPhotonHypoAlgMT.cxx index 6d59b33f34d26405d000e79ec5bac50a7f776865..82ff5a1128462361e69b0f1a4de56ffd1c139ccf 100644 --- a/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionPhotonHypoAlgMT.cxx +++ b/Trigger/TrigHypothesis/TrigEgammaHypo/src/TrigEgammaPrecisionPhotonHypoAlgMT.cxx @@ -52,9 +52,10 @@ StatusCode TrigEgammaPrecisionPhotonHypoAlgMT::execute( const EventContext& cont // loop over previous decisions size_t counter=0; for ( auto previousDecision: *previousDecisionsHandle ) { - //get RoI - auto roiELInfo = findLink<TrigRoiDescriptorCollection>( previousDecision, initialRoIString() ); - + + //get updated RoI + auto roiELInfo = findLink<TrigRoiDescriptorCollection>( previousDecision, roiString() ); + ATH_CHECK( roiELInfo.isValid() ); const TrigRoiDescriptor* roi = *(roiELInfo.link); @@ -79,7 +80,6 @@ StatusCode TrigEgammaPrecisionPhotonHypoAlgMT::execute( const EventContext& cont auto d = newDecisionIn( decisions, name() ); d->setObjectLink( "feature", ph ); TrigCompositeUtils::linkToPrevious( d, decisionInput().key(), counter ); - d->setObjectLink( roiString(), roiELInfo.link ); toolInput.emplace_back( d, roi, photonHandle.cptr()->at(cl), previousDecision ); validphotons++; diff --git a/Trigger/TrigHypothesis/TrigMuonHypoMT/src/TrigMufastHypoTool.cxx b/Trigger/TrigHypothesis/TrigMuonHypoMT/src/TrigMufastHypoTool.cxx index 19ebae5bcf9dc46d4597a73a66003122afbb809b..d975a6c2cfd41ac389d3779d84e28da055980d44 100644 --- a/Trigger/TrigHypothesis/TrigMuonHypoMT/src/TrigMufastHypoTool.cxx +++ b/Trigger/TrigHypothesis/TrigMuonHypoMT/src/TrigMufastHypoTool.cxx @@ -322,9 +322,9 @@ StatusCode TrigMufastHypoTool::inclusiveSelection(std::vector<TrigMufastHypoTool StatusCode TrigMufastHypoTool::multiplicitySelection(std::vector<TrigMufastHypoTool::MuonClusterInfo>& toolInput) const{ HLT::Index2DVec passingSelection( m_ptBins.size() ); + size_t elementIndex{ 0 }; for ( size_t cutIndex=0; cutIndex < m_ptBins.size(); ++cutIndex ) { - size_t elementIndex{ 0 }; for ( auto& i: toolInput ) { if(!m_acceptAll && m_applyOR && !i.passOR) { @@ -332,6 +332,8 @@ StatusCode TrigMufastHypoTool::multiplicitySelection(std::vector<TrigMufastHypoT continue; } + elementIndex = &i - &toolInput.front(); + // If muon event has difference DecisionID, it shouldn't apply. if ( TrigCompositeUtils::passed( m_decisionId.numeric(), i.previousDecisionIDs ) ) { if ( decideOnSingleObject( i, cutIndex ) == true ) { @@ -343,7 +345,6 @@ StatusCode TrigMufastHypoTool::multiplicitySelection(std::vector<TrigMufastHypoT } else { ATH_MSG_DEBUG("Not match DecisionID " << m_decisionId ); } - elementIndex++; } // If no object passes the selection, multipul selection should stop. diff --git a/Trigger/TrigHypothesis/TrigMuonHypoMT/src/TrigmuCombHypoTool.cxx b/Trigger/TrigHypothesis/TrigMuonHypoMT/src/TrigmuCombHypoTool.cxx index 106859d6ca42b8f530d31db25ed023f7151a0c63..dc4323fef1c0fed9a47ec5d35e2d9a774edfe370 100644 --- a/Trigger/TrigHypothesis/TrigMuonHypoMT/src/TrigmuCombHypoTool.cxx +++ b/Trigger/TrigHypothesis/TrigMuonHypoMT/src/TrigmuCombHypoTool.cxx @@ -275,9 +275,9 @@ StatusCode TrigmuCombHypoTool::inclusiveSelection(std::vector<TrigmuCombHypoTool StatusCode TrigmuCombHypoTool::multiplicitySelection(std::vector<TrigmuCombHypoTool::CombinedMuonInfo>& input) const { HLT::Index2DVec passingSelection( m_ptBins.size() ); + size_t elementIndex{ 0 }; for ( size_t cutIndex=0; cutIndex < m_ptBins.size(); ++cutIndex ) { - size_t elementIndex{ 0 }; for ( auto& i: input ) { if(!m_acceptAll && m_applyOR && !i.passOR) { @@ -285,6 +285,8 @@ StatusCode TrigmuCombHypoTool::multiplicitySelection(std::vector<TrigmuCombHypoT continue; } + elementIndex = &i - &input.front(); + // If muon event has difference DecisionID, it shouldn't apply. if ( TrigCompositeUtils::passed( m_decisionId.numeric(), i.previousDecisionIDs ) ) { if ( decideOnSingleObject( i, cutIndex ) == true ) { @@ -296,7 +298,6 @@ StatusCode TrigmuCombHypoTool::multiplicitySelection(std::vector<TrigmuCombHypoT } else { ATH_MSG_DEBUG("Not match DecisionID " << m_decisionId ); } - elementIndex++; } // If no object passes the selection, multipul selection should stop. diff --git a/Trigger/TrigMonitoring/TrigCaloMonitoring/python/TrigCaloMonitoringConfig.py b/Trigger/TrigMonitoring/TrigCaloMonitoring/python/TrigCaloMonitoringConfig.py index 2351ac41e3eca7bfe6254888d2221f47e5efdb1c..248fad1bf0282eb39bbb68479a2ad0759015c013 100644 --- a/Trigger/TrigMonitoring/TrigCaloMonitoring/python/TrigCaloMonitoringConfig.py +++ b/Trigger/TrigMonitoring/TrigCaloMonitoring/python/TrigCaloMonitoringConfig.py @@ -63,7 +63,9 @@ def HLTCaloMonitoringTool(): from AthenaCommon.AppMgr import ServiceMgr if not hasattr(ServiceMgr,"RegSelSvcDefault"): from RegionSelector.RegSelSvcDefault import RegSelSvcDefault - ServiceMgr += RegSelSvcDefault() + regsel = RegSelSvcDefault() + regsel.enableCalo = True + ServiceMgr += regsel #HLTFullCalo = HLTCaloTool(name = 'HLTFullCalo', # histoPathBase = "/Trigger/HLT") diff --git a/Trigger/TrigMonitoring/TrigJetMonitoring/python/TrigJetMonitorAlgorithm.py b/Trigger/TrigMonitoring/TrigJetMonitoring/python/TrigJetMonitorAlgorithm.py index f118878e94cc0a0c11ff95a6c83c121f904b6262..1519f1a4e54ea744c4f49c0c93b7b0074da933be 100644 --- a/Trigger/TrigMonitoring/TrigJetMonitoring/python/TrigJetMonitorAlgorithm.py +++ b/Trigger/TrigMonitoring/TrigJetMonitoring/python/TrigJetMonitorAlgorithm.py @@ -61,20 +61,21 @@ Chain2JetCollDict['MT'] = { 'HLT_j460_a10_lcw_subjes_L1J100' : 'HLT_AntiKt10LCTopoJets_subjes', 'HLT_j460_a10t_lcw_jes_L1J100' : 'HLT_AntiKt10LCTopoTrimmedPtFrac4SmallR20Jets_jes', 'HLT_2j330_a10t_lcw_jes_35smcINF_L1J100' : 'HLT_AntiKt10LCTopoTrimmedPtFrac4SmallR20Jets_jes', - 'HLT_j45_pf_ftf_L1J20' : 'HLT_AntiKt4EMPFlowJets_subjesIS_ftf', - 'HLT_j45_pf_subjesgscIS_ftf_L1J20' : 'HLT_AntiKt4EMPFlowJets_subjesgscIS_ftf', + 'HLT_j45_pf_ftf_L1J15' : 'HLT_AntiKt4EMPFlowJets_subjesIS_ftf', + 'HLT_j45_pf_subjesgscIS_ftf_L1J15' : 'HLT_AntiKt4EMPFlowJets_subjesgscIS_ftf', 'HLT_j85_pf_ftf_L1J20' : 'HLT_AntiKt4EMPFlowJets_subjesIS_ftf', - 'HLT_j45_pf_nojcalib_ftf_L1J20' : 'HLT_AntiKt4EMPFlowJets_nojcalib_ftf', - 'HLT_j45_csskpf_nojcalib_ftf_L1J20' : 'HLT_AntiKt4EMPFlowCSSKJets_nojcalib_ftf', + 'HLT_j45_pf_nojcalib_ftf_L1J15' : 'HLT_AntiKt4EMPFlowJets_nojcalib_ftf', + 'HLT_j45_csskpf_nojcalib_ftf_L1J15' : 'HLT_AntiKt4EMPFlowCSSKJets_nojcalib_ftf', + 'HLT_10j40_pf_subresjesgscIS_ftf_L14J15' : 'HLT_AntiKt4EMPFlowJets_subresjesgscIS_ftf', } TurnOnCurves['MT'] = { # ref chain, offline jet coll - 'HLT_j420_L1J100' : ['HLT_j80_L1J15','AntiKt4EMTopoJets'], - 'HLT_3j200_L1J100' : ['HLT_j80_L1J15','AntiKt4EMTopoJets'], - 'HLT_j460_a10r_L1J100' : ['HLT_j80_L1J15','AntiKt4EMTopoJets'], - 'HLT_j460_a10_lcw_subjes_L1J100' : ['HLT_j80_L1J15','AntiKt4EMTopoJets'], - 'HLT_j460_a10t_lcw_jes_L1J100' : ['HLT_j80_L1J15','AntiKt4EMTopoJets'], - 'HLT_2j330_a10t_lcw_jes_35smcINF_L1J100' : ['HLT_j80_L1J15','AntiKt4EMTopoJets'], - 'HLT_j85_pf_ftf_L1J20' : ['HLT_j45_pf_ftf_L1J20','AntiKt4EMPFlowJets'], + 'HLT_j420_L1J100' : ['HLT_j85_L1J20','AntiKt4EMTopoJets'], + 'HLT_3j200_L1J100' : ['HLT_j85_L1J20','AntiKt4EMTopoJets'], + 'HLT_j460_a10r_L1J100' : ['HLT_j85_L1J20','AntiKt4EMTopoJets'], + 'HLT_j460_a10_lcw_subjes_L1J100' : ['HLT_j85_L1J20','AntiKt4EMTopoJets'], + 'HLT_j460_a10t_lcw_jes_L1J100' : ['HLT_j85_L1J20','AntiKt4EMTopoJets'], + 'HLT_2j330_a10t_lcw_jes_35smcINF_L1J100' : ['HLT_j85_L1J20','AntiKt4EMTopoJets'], + 'HLT_j85_pf_ftf_L1J20' : ['HLT_j45_pf_ftf_L1J15','AntiKt4EMPFlowJets'], } JetColls2Match['MT'] = { 'HLT_AntiKt4EMTopoJets_subjesIS' : 'AntiKt4EMPFlowJets', diff --git a/Trigger/TrigSteer/DecisionHandling/DecisionHandling/ComboHypo.h b/Trigger/TrigSteer/DecisionHandling/DecisionHandling/ComboHypo.h index ef7151cc847694c70ebffcfd65392689ca82b7c5..91fa2f8ca7669a474ab688bce59f7bde014532bc 100644 --- a/Trigger/TrigSteer/DecisionHandling/DecisionHandling/ComboHypo.h +++ b/Trigger/TrigSteer/DecisionHandling/DecisionHandling/ComboHypo.h @@ -74,7 +74,7 @@ class ComboHypo : public ::AthReentrantAlgorithm { * @param[out] roiIndex Index inside the roiKey collection. **/ StatusCode extractFeatureAndRoI(const ElementLink<TrigCompositeUtils::DecisionContainer>& EL, - uint32_t& featureKey, uint16_t& featureIndex, uint32_t& roiKey, uint16_t& roiIndex) const; + uint32_t& featureKey, uint16_t& featureIndex, uint32_t& roiKey, uint16_t& roiIndex, bool& roiFullscan) const; /** diff --git a/Trigger/TrigSteer/DecisionHandling/share/emu_step_menu_processing.ref b/Trigger/TrigSteer/DecisionHandling/share/emu_step_menu_processing.ref index 5b79e11c73ac5acd93621007f9483ca7e4bf63c9..ce9c480634a2eebe85d10871351ec8e4e6ee3a51 100644 --- a/Trigger/TrigSteer/DecisionHandling/share/emu_step_menu_processing.ref +++ b/Trigger/TrigSteer/DecisionHandling/share/emu_step_menu_processing.ref @@ -144,7 +144,7 @@ TrigSignatureMoniMT INFO -- #2511524900 Events TrigSignatureMoniMT INFO -- #2511524900 Features 3 - 3 3 TrigSignatureMoniMT INFO HLT_TestChain6_muv1_TestChain10_ev1_L1MU6_EM5 #64374772 TrigSignatureMoniMT INFO -- #64374772 Events 2 2 1 1 1 1 1 -TrigSignatureMoniMT INFO -- #64374772 Features 3 3 3 3 +TrigSignatureMoniMT INFO -- #64374772 Features 5 5 5 5 TrigSignatureMoniMT INFO HLT_TestChain6_muv1_TestChain10_muv1_L12MU6 #3196402061 TrigSignatureMoniMT INFO -- #3196402061 Events 2 2 1 0 0 0 0 TrigSignatureMoniMT INFO -- #3196402061 Features 3 0 0 0 @@ -153,7 +153,7 @@ TrigSignatureMoniMT INFO -- #1237112870 Events TrigSignatureMoniMT INFO -- #1237112870 Features 0 0 0 0 TrigSignatureMoniMT INFO HLT_TestChain6_muv2_TestChain8_ev2_L1MU6_EM5 #3476793373 TrigSignatureMoniMT INFO -- #3476793373 Events 2 2 2 2 2 - 2 -TrigSignatureMoniMT INFO -- #3476793373 Features 4 4 4 - +TrigSignatureMoniMT INFO -- #3476793373 Features 7 7 7 - TrigSignatureMoniMT INFO HLT_TestChain8_ev1_L1EM5 #1677577445 TrigSignatureMoniMT INFO -- #1677577445 Events 4 4 3 3 3 - 3 TrigSignatureMoniMT INFO -- #1677577445 Features 4 4 4 - diff --git a/Trigger/TrigSteer/DecisionHandling/share/emu_step_processing.ref b/Trigger/TrigSteer/DecisionHandling/share/emu_step_processing.ref index c561eca08f8b5de841062724d55121405d1e3e8a..0df617cc3d1bc5813024717d70ddadbcb83e586c 100644 --- a/Trigger/TrigSteer/DecisionHandling/share/emu_step_processing.ref +++ b/Trigger/TrigSteer/DecisionHandling/share/emu_step_processing.ref @@ -131,7 +131,7 @@ TrigSignatureMoniMT INFO -- #1756953305 Events TrigSignatureMoniMT INFO -- #1756953305 Features 4 4 4 - TrigSignatureMoniMT INFO HLT_TestChain5_ev1_TestChain8_ev1_2TestChain6_muv1_L1EM3_L1EM5_L12MU6 #1820214917 TrigSignatureMoniMT INFO -- #1820214917 Events 1 1 1 1 - - 1 -TrigSignatureMoniMT INFO -- #1820214917 Features 2 2 - - +TrigSignatureMoniMT INFO -- #1820214917 Features 5 5 - - TrigSignatureMoniMT INFO HLT_TestChain5_ev1_TestChain8_ev1_L12EM3 #2709794009 TrigSignatureMoniMT INFO -- #2709794009 Events 0 0 0 0 - - 0 TrigSignatureMoniMT INFO -- #2709794009 Features 0 0 - - @@ -149,7 +149,7 @@ TrigSignatureMoniMT INFO -- #2511524900 Events TrigSignatureMoniMT INFO -- #2511524900 Features 3 - 3 3 TrigSignatureMoniMT INFO HLT_TestChain6_muv1_TestChain10_ev1_L1MU6_EM5 #64374772 TrigSignatureMoniMT INFO -- #64374772 Events 2 2 2 1 - 1 1 -TrigSignatureMoniMT INFO -- #64374772 Features 4 3 - 3 +TrigSignatureMoniMT INFO -- #64374772 Features 7 5 - 5 TrigSignatureMoniMT INFO HLT_TestChain6_muv1_TestChain10_muv1_L12MU6 #3196402061 TrigSignatureMoniMT INFO -- #3196402061 Events 2 2 1 0 - - 0 TrigSignatureMoniMT INFO -- #3196402061 Features 3 0 - - @@ -158,7 +158,7 @@ TrigSignatureMoniMT INFO -- #3205587050 Events TrigSignatureMoniMT INFO -- #3205587050 Features 0 0 - - TrigSignatureMoniMT INFO HLT_TestChain6_muv2_TestChain8_ev2_L1MU6_EM5 #3476793373 TrigSignatureMoniMT INFO -- #3476793373 Events 2 2 2 2 - - 2 -TrigSignatureMoniMT INFO -- #3476793373 Features 4 4 - - +TrigSignatureMoniMT INFO -- #3476793373 Features 7 7 - - TrigSignatureMoniMT INFO HLT_TestChain8_ev1_L1EM5 #1677577445 TrigSignatureMoniMT INFO -- #1677577445 Events 4 4 3 3 3 - 3 TrigSignatureMoniMT INFO -- #1677577445 Features 4 4 4 - diff --git a/Trigger/TrigSteer/DecisionHandling/src/ComboHypo.cxx b/Trigger/TrigSteer/DecisionHandling/src/ComboHypo.cxx index 23db05d587b0c539e6cae98a896cde34a5ed67d9..d8a254693e95f82d08684da59033d56cfa2389d5 100644 --- a/Trigger/TrigSteer/DecisionHandling/src/ComboHypo.cxx +++ b/Trigger/TrigSteer/DecisionHandling/src/ComboHypo.cxx @@ -93,7 +93,7 @@ StatusCode ComboHypo::copyDecisions( const LegDecisionsMap & passingLegs, const if ( inputHandle.isValid() ) { for (const Decision* inputDecision : *inputHandle) { - auto thisEL = TrigCompositeUtils::decisionToElementLink(inputDecision, context); + auto thisEL = TrigCompositeUtils::decisionToElementLink(inputDecision, context); DecisionIDContainer inputDecisionIDs; decisionIDs( inputDecision, inputDecisionIDs ); @@ -102,28 +102,29 @@ StatusCode ComboHypo::copyDecisions( const LegDecisionsMap & passingLegs, const std::set_intersection( inputDecisionIDs.begin(), inputDecisionIDs.end(), passing.begin(), passing.end(), std::inserter( common, common.end() ) ); - // check if this EL is in the combination map for the passing decIDs: - ATH_MSG_DEBUG("Searching this element in the map: ("<<thisEL.dataID() << " , " << thisEL.index()<<")"); + // check if this EL is in the combination map for the passing decIDs: + ATH_MSG_DEBUG("Searching this element in the map: ("<<thisEL.dataID() << " , " << thisEL.index()<<")"); DecisionIDContainer finalIds; for (const DecisionID c : common){ const HLT::Identifier cID = HLT::Identifier(c); - // add teh decID only if this candidate passed the combination selection - const ElementLinkVector<DecisionContainer>& Comb=passingLegs.at(c); - if(std::find(Comb.begin(), Comb.end(), thisEL) == Comb.end()) continue; - - ATH_MSG_DEBUG(" Adding "<< cID <<" because EL is found in the passingLegs map"); + // add the decID only if this candidate passed the combination selection + const ElementLinkVector<DecisionContainer>& Comb=passingLegs.at(c); + if(std::find(Comb.begin(), Comb.end(), thisEL) == Comb.end()) { + continue; + } + ATH_MSG_DEBUG(" Adding "<< cID <<" because EL is found in the passingLegs map"); finalIds.insert( cID.numeric() ); // all Ids used by the Filter, including legs if (TrigCompositeUtils::isLegId ( cID )){ const HLT::Identifier mainChain = TrigCompositeUtils::getIDFromLeg( cID ); finalIds.insert( mainChain.numeric() ); - ATH_MSG_DEBUG(" Adding "<< mainChain <<" consequently"); + ATH_MSG_DEBUG(" Adding "<< mainChain <<" consequently"); } } Decision* newDec = newDecisionIn( outDecisions, inputDecision, "CH", context ); ATH_MSG_DEBUG("New decision (Container Index:" << input_counter << ", Element Index:"<< newDec->index() <<") has " - << (TrigCompositeUtils::findLink<TrigRoiDescriptorCollection>(newDec, initialRoIString())).isValid() - << " valid initialRoI, "<< TrigCompositeUtils::getLinkToPrevious(newDec).size() <<" previous decisions and "<<finalIds.size()<<" decision IDs") ; + << (TrigCompositeUtils::findLink<TrigRoiDescriptorCollection>(newDec, initialRoIString())).isValid() + << " valid initialRoI, "<< TrigCompositeUtils::getLinkToPrevious(newDec).size() <<" previous decisions and "<<finalIds.size()<<" decision IDs") ; insertDecisionIDs( finalIds, newDec ); } @@ -159,7 +160,6 @@ StatusCode ComboHypo::execute(const EventContext& context ) const { // loop over all chains in the mult-map for ( const auto& m : m_multiplicitiesReqMap ) { - uint32_t nRequiredUnique = 0; const HLT::Identifier chainId = HLT::Identifier(m.first); const std::vector<int>& multiplicityPerLeg = m.second; const DecisionID requiredDecisionID = chainId.numeric(); @@ -169,14 +169,15 @@ StatusCode ComboHypo::execute(const EventContext& context ) const { bool overallDecision = true; - std::set<uint32_t> uniqueDecisionFeatures; - LegDecisionsMap thisChainCombMap; + std::vector< std::set<uint32_t> > legFeatureHashes; //!< Keeps track per leg of the hash of the objects passing the leg + legFeatureHashes.resize( multiplicityPerLeg.size() ); + size_t fsCount = 0; //!< We allow the FullScan ROI to pass any multiplicity. So we may have to magic up some unique hashes. Counting integers work fine. + LegDecisionsMap thisChainCombMap; // Check multiplicity of each leg of this chain for ( size_t legIndex = 0; legIndex < multiplicityPerLeg.size(); ++legIndex ) { const size_t requiredMultiplicity = multiplicityPerLeg.at( legIndex ); - nRequiredUnique += requiredMultiplicity; HLT::Identifier legId = TrigCompositeUtils::createLegName(chainId, legIndex); // If there is only one leg, then we just use the chain's name. @@ -194,49 +195,102 @@ StatusCode ComboHypo::execute(const EventContext& context ) const { break; } - //check this leg of the chain passes with required multiplicity - const size_t observedMultiplicity = it->second.size(); + // Check the total number of decision objects we have available on this leg from which to satisfy its multiplicity requirement + const size_t nLegDecisionObjects = it->second.size(); - ATH_MSG_DEBUG( "Required multiplicity " << requiredMultiplicity << " for leg " << legId - << ": observed multiplicity " << observedMultiplicity << " in leg " << legIndex ); + ATH_MSG_DEBUG( "Will attempt to meet the required multiplicity of " << requiredMultiplicity << " for leg " << legId + << " with " << nLegDecisionObjects << " Decision Objects in leg " << legIndex << " of " << legId); - if ( observedMultiplicity < requiredMultiplicity ) { - overallDecision = false; - break; - } - - //keep track of the number of unique features/rois + // We extract unique passing features on the leg, if there are no features yet - then the L1 ROI is used as a fall-back feature + // A special behaviour is that if this fall-back triggers, and the full-scan ROI, then the leg is assumed valid. + // This is regardless of whether or not other legs also use the same FS ROI. Regardless of if the leg's required multiplicity. + // This keeps the leg alive until the actual FS reconstruction may occur. At which point, the following ComboHypo will begin + // to cut on the actual reconstructed physics objects. for (const ElementLink<DecisionContainer>& dEL : it->second){ uint32_t featureKey = 0, roiKey = 0; uint16_t featureIndex = 0, roiIndex = 0; + bool roiFullscan = false; // NOTE: roiKey, roiIndex are only currently used in the discrimination for L1 Decision objects (which don't have a 'feature' link) // NOTE: We should make it configurable to choose either the feature or the ROI here, as done in the InputMaker base class when merging. - ATH_CHECK( extractFeatureAndRoI(dEL, featureKey, featureIndex, roiKey, roiIndex) ); - const uint32_t uniquenessHash = (featureKey != 0 ? (featureKey + featureIndex) : (roiKey + roiIndex)); - if (uniquenessHash == 0) { - ATH_MSG_ERROR("Object has no feature, and no initialRoI. Cannot get obtain unique element to avoid double-counting."); - return StatusCode::FAILURE; + ATH_CHECK( extractFeatureAndRoI(dEL, featureKey, featureIndex, roiKey, roiIndex, roiFullscan) ); + if (roiKey != 0 && roiFullscan) { + // This fsCount integer is being to generate unique "hash" values to allow the FS ROI to meet the multiplicity requirements of this leg + for (size_t i = 0; i < requiredMultiplicity; ++i) { + legFeatureHashes.at( legIndex ).insert( ++fsCount ); + ATH_MSG_DEBUG(" -- Add feature hash '" << fsCount << "' to leg " << legIndex << ". (Note: passing hash generated from FullScan ROI)"); + } + } else { + const uint32_t uniquenessHash = (featureKey != 0 ? (featureKey + featureIndex) : (roiKey + roiIndex)); + legFeatureHashes.at( legIndex ).insert( uniquenessHash ); + ATH_MSG_DEBUG(" -- Add feature hash '" << uniquenessHash << "' to leg " << legIndex << "."); } - uniqueDecisionFeatures.insert( uniquenessHash ); } // save combinations of all legs for the tools thisChainCombMap.insert (*it); allDecisionIds.insert(requiredDecisionIDLeg); + + } // end loop over legIndex + + + // Remove any duplicated features which are shared between legs. + // Keep the feature only in the leg which can afford to loose the least number of object, given its multiplicity requirement. + std::set<uint32_t> allFeatureHashes; + for (const std::set<uint32_t>& legHashes : legFeatureHashes) { + allFeatureHashes.insert(legHashes.begin(), legHashes.end()); + } + for (const uint32_t featureHash : allFeatureHashes) { + size_t legsWithHash = 0; //!< If this grows greater than one then we have to start culling features from legs + size_t keepLegIndex = 0; //!< If the hash is used in multiple legs, which leg can least afford to loose it? + int32_t keepLegMargin = std::numeric_limits<int32_t>::max(); //!< How many features the leg at keepLegIndex can afford to loose before it starts to fail its multiplicity requirement. + for (size_t legIndex = 0; legIndex < multiplicityPerLeg.size(); ++legIndex) { + if (legFeatureHashes.at(legIndex).count(featureHash) == 0) { + continue; + } + ++legsWithHash; + const int32_t requiredMultiplicity = multiplicityPerLeg.at(legIndex); + const int32_t currentMultiplicity = legFeatureHashes.at(legIndex).size(); + const int32_t safetyMargin = currentMultiplicity - requiredMultiplicity; // Signed, if -ve then the chain has already been failed by the leg at legIndex + if (safetyMargin < keepLegMargin) { + keepLegMargin = safetyMargin; + keepLegIndex = legIndex; + } + } + if (legsWithHash == 1) { + continue; + } + // If a feature is found on multiple legs, then remove it from all but the leg which can afford to loose it the least + for (size_t legIndex = 0; legIndex < multiplicityPerLeg.size(); ++legIndex) { + if (legIndex == keepLegIndex) { + ATH_MSG_DEBUG("Keeping feature hash '" << featureHash << "', on leg " << legIndex << ". This leg can least afford to loose it. " + << "Leg has " << legFeatureHashes.at(legIndex).size() + << " features, and a multiplicity requirement of " << multiplicityPerLeg.at(legIndex)); + continue; + } + if (legFeatureHashes.at(legIndex).erase(featureHash)) { + ATH_MSG_DEBUG("Removed duplicate feature hash '" << featureHash << "', from leg " << legIndex << ". Leg now has " << legFeatureHashes.at(legIndex).size() + << " remaining features, and a multiplicity requirement of " << multiplicityPerLeg.at(legIndex)); + } + } } //check that the multiplicity of unique features is high enough - ATH_MSG_DEBUG("Number of unique features: " << uniqueDecisionFeatures.size() << ", number of required unique decisions: " << nRequiredUnique); - if ( uniqueDecisionFeatures.size() < nRequiredUnique ) { - overallDecision = false; + for (size_t legIndex = 0; legIndex < multiplicityPerLeg.size(); ++legIndex) { + const size_t requiredMultiplicity = multiplicityPerLeg.at(legIndex); + const size_t currentMultiplicity = legFeatureHashes.at(legIndex).size(); + if (currentMultiplicity < requiredMultiplicity) { + ATH_MSG_DEBUG("Leg " << legIndex << " fails multiplicity check. Required unique features:" << requiredMultiplicity << ", found unique features: " << currentMultiplicity); + overallDecision = false; + break; + } } //Overall chain decision ATH_MSG_DEBUG( "Chain " << chainId << ( overallDecision ? " is accepted" : " is rejected") <<" after multiplicity requirements" ); if ( overallDecision == true ) { for (auto decID: allDecisionIds) { - // saving the good combiantions - passingLegs.insert (thisChainCombMap.begin(), thisChainCombMap.end()); + // saving the good combinations + passingLegs.insert (thisChainCombMap.begin(), thisChainCombMap.end()); ATH_MSG_DEBUG(" Passing " << HLT::Identifier(decID)<<" after multiplicity test"); } } @@ -247,21 +301,21 @@ StatusCode ComboHypo::execute(const EventContext& context ) const { /////////////////////// if (m_hypoTools.size()>0){ for ( auto& tool: m_hypoTools ) { - ATH_MSG_DEBUG( "Calling tool "<<tool->name()); - ATH_CHECK( tool->decide( passingLegs, context ) ); + ATH_MSG_DEBUG( "Calling tool "<<tool->name()); + ATH_CHECK( tool->decide( passingLegs, context ) ); } } } - // this is only for debug: - if (msgLvl(MSG::DEBUG)){ - DecisionIDContainer passing; - for (auto const& element : passingLegs) { - passing.insert(element.first); - } - for (auto p: passing) - ATH_MSG_DEBUG("Passing "<<HLT::Identifier(p)); + // this is only for debug: + if (msgLvl(MSG::DEBUG)){ + DecisionIDContainer passing; + for (auto const& element : passingLegs) { + passing.insert(element.first); } + for (auto p: passing) + ATH_MSG_DEBUG("Passing "<<HLT::Identifier(p)); + } // need to pass all combinations, since not all element pass the decID ATH_CHECK( copyDecisions( passingLegs, context ) ); @@ -271,7 +325,7 @@ StatusCode ComboHypo::execute(const EventContext& context ) const { StatusCode ComboHypo::extractFeatureAndRoI(const ElementLink<DecisionContainer>& dEL, - uint32_t& featureKey, uint16_t& featureIndex, uint32_t& roiKey, uint16_t& roiIndex) const + uint32_t& featureKey, uint16_t& featureIndex, uint32_t& roiKey, uint16_t& roiIndex, bool& roiFullscan) const { uint32_t featureClid = 0; // Note: Unused. We don't care what the type of the feature is here const bool foundFeature = typelessFindLink((*dEL), featureString(), featureKey, featureClid, featureIndex); @@ -280,6 +334,7 @@ StatusCode ComboHypo::extractFeatureAndRoI(const ElementLink<DecisionContainer>& if (roiSeedLI.isValid()) { roiKey = roiSeedLI.link.key(); roiIndex = roiSeedLI.link.index(); + roiFullscan = (*(roiSeedLI.link))->isFullscan(); } if (!foundFeature && !roiSeedLI.isValid()) { ATH_MSG_WARNING("Did not find the feature or initialRoI for " << dEL.dataID() << " index " << dEL.index()); diff --git a/Trigger/TrigT1/TrigT1CaloMonitoring/python/PprMonitorAlgorithm.py b/Trigger/TrigT1/TrigT1CaloMonitoring/python/PprMonitorAlgorithm.py index b08a4acf744dc81a7b6e65f1f08e4b81eff6a670..4aa674810cc2738105926dc23dbaddcca9dde58f 100644 --- a/Trigger/TrigT1/TrigT1CaloMonitoring/python/PprMonitorAlgorithm.py +++ b/Trigger/TrigT1/TrigT1CaloMonitoring/python/PprMonitorAlgorithm.py @@ -30,6 +30,9 @@ def PprMonitoringConfig(inputFlags): threshADC = 50 PprMonAlg.TT_ADC_HitMap_Thresh = threshADC # ADC cut for hit maps + sliceNo = 15 + PprMonAlg.SliceNo = sliceNo # Max number of timeslices in the readout + # Histogram paths mainDir = 'L1Calo' trigPath = 'PPM/' @@ -60,25 +63,25 @@ def PprMonitoringConfig(inputFlags): histPath = trigPath+'/LUT-CP/Distributions' # EM distributions - myGroup.defineHistogram('etaTT_EM;h_ppm_em_1d_tt_lutcp_Eta', title='EM LUT-CP: Distribution of peak in #eta; #eta', type='TH1F', path=histPath, xbins=etabins, cutmask='mask_EM_cpET_0_noPhi') + myGroup.defineHistogram('etaTT_EM;ppm_em_1d_tt_lutcp_Eta', title='EM LUT-CP: Distribution of peak in #eta; #eta', type='TH1F', path=histPath, xbins=etabins, cutmask='mask_EM_cpET_0_noPhi') - myGroup.defineHistogram('phiTT_1d_EM;h_ppm_em_1d_tt_lutcp_Phi', title='EM LUT-CP: Distribution of peak in #phi; #phi', type='TH1F', path=histPath, xbins=phibins, xmin=phimin, xmax=phimax_1d, cutmask='mask_EM_cpET_0_phiBins') + myGroup.defineHistogram('phiTT_1d_EM;ppm_em_1d_tt_lutcp_Phi', title='EM LUT-CP: Distribution of peak in #phi; #phi', type='TH1F', path=histPath, xbins=phibins, xmin=phimin, xmax=phimax_1d, cutmask='mask_EM_cpET_0_phiBins') - myGroup.defineHistogram('cpET_EM;h_ppm_em_1d_tt_lutcp_Et', title='EM LUT-CP: Distribution of peak; EM LUT peak [GeV/2]', type='TH1F', path=histPath, xbins=maxEnergyRange-1, xmin=1, xmax=maxEnergyRange, cutmask='mask_EM_cpET_0_noPhi') + myGroup.defineHistogram('cpET_EM;ppm_em_1d_tt_lutcp_Et', title='EM LUT-CP: Distribution of peak; EM LUT peak [GeV/2]', type='TH1F', path=histPath, xbins=maxEnergyRange-1, xmin=1, xmax=maxEnergyRange, cutmask='mask_EM_cpET_0_noPhi') # HAD distributions - myGroup.defineHistogram('etaTT_HAD;h_ppm_had_1d_tt_lutcp_Eta', title='HAD LUT-CP: Distribution of peak in #eta; #eta', type='TH1F', path=histPath, xbins=etabins, cutmask='mask_HAD_cpET_0_noPhi') + myGroup.defineHistogram('etaTT_HAD;ppm_had_1d_tt_lutcp_Eta', title='HAD LUT-CP: Distribution of peak in #eta; #eta', type='TH1F', path=histPath, xbins=etabins, cutmask='mask_HAD_cpET_0_noPhi') - myGroup.defineHistogram('phiTT_1d_HAD;h_ppm_had_1d_tt_lutcp_Phi', title='HAD LUT-CP: Distribution of peak in #phi; #phi', type='TH1F', path=histPath, xbins=phibins, xmin=phimin, xmax=phimax_1d, cutmask='mask_HAD_cpET_0_phiBins') + myGroup.defineHistogram('phiTT_1d_HAD;ppm_had_1d_tt_lutcp_Phi', title='HAD LUT-CP: Distribution of peak in #phi; #phi', type='TH1F', path=histPath, xbins=phibins, xmin=phimin, xmax=phimax_1d, cutmask='mask_HAD_cpET_0_phiBins') - myGroup.defineHistogram('cpET_HAD;h_ppm_had_1d_tt_lutcp_Et', title='HAD LUT-CP: Distribution of peak; HAD LUT peak [GeV/2]', type='TH1F', path=histPath, xbins=maxEnergyRange-1, xmin=1, xmax=maxEnergyRange, cutmask='mask_HAD_cpET_0_noPhi') + myGroup.defineHistogram('cpET_HAD;ppm_had_1d_tt_lutcp_Et', title='HAD LUT-CP: Distribution of peak; HAD LUT peak [GeV/2]', type='TH1F', path=histPath, xbins=maxEnergyRange-1, xmin=1, xmax=maxEnergyRange, cutmask='mask_HAD_cpET_0_noPhi') # Eta-phi maps histPath = trigPath+'/LUT-CP/EtaPhiMaps' - myGroup.defineHistogram('etaTT_EM,phiTT_2d_EM,cpET_EM;h_ppm_em_2d_etaPhi_tt_lutcp_AverageEt', title='EM Average LUT-CP Et for Et > 5 GeV/2', type='TProfile2D', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_EM_cpET_5_phiBins') + myGroup.defineHistogram('etaTT_EM,phiTT_2d_EM,cpET_EM;ppm_em_2d_etaPhi_tt_lutcp_AverageEt', title='EM Average LUT-CP Et for Et > 5 GeV/2', type='TProfile2D', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_EM_cpET_5_phiBins') - myGroup.defineHistogram('etaTT_HAD,phiTT_2d_HAD,cpET_HAD;h_ppm_had_2d_etaPhi_tt_lutcp_AverageEt', title='HAD Average LUT-CP Et for Et > 5 GeV/2', type='TProfile2D', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_HAD_cpET_5_phiBins') + myGroup.defineHistogram('etaTT_HAD,phiTT_2d_HAD,cpET_HAD;ppm_had_2d_etaPhi_tt_lutcp_AverageEt', title='HAD Average LUT-CP Et for Et > 5 GeV/2', type='TProfile2D', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_HAD_cpET_5_phiBins') ######################## @@ -87,25 +90,25 @@ def PprMonitoringConfig(inputFlags): histPath = trigPath+'/LUT-JEP/Distributions' # EM distributions - myGroup.defineHistogram('etaTT_EM;h_ppm_em_1d_tt_lutjep_Eta', title='EM LUT-JEP: Distribution of peak in #eta', type='TH1F', path=histPath, xbins=etabins, cutmask='mask_EM_jepET_0_noPhi') + myGroup.defineHistogram('etaTT_EM;ppm_em_1d_tt_lutjep_Eta', title='EM LUT-JEP: Distribution of peak in #eta', type='TH1F', path=histPath, xbins=etabins, cutmask='mask_EM_jepET_0_noPhi') - myGroup.defineHistogram('phiTT_1d_EM;h_ppm_em_1d_tt_lutjep_Phi', title='EM LUT-JEP: Distribution of peak in #phi; #phi', type='TH1F', path=histPath, xbins=phibins, xmin=phimin, xmax=phimax_1d, cutmask='mask_EM_jepET_0_phiBins') + myGroup.defineHistogram('phiTT_1d_EM;ppm_em_1d_tt_lutjep_Phi', title='EM LUT-JEP: Distribution of peak in #phi; #phi', type='TH1F', path=histPath, xbins=phibins, xmin=phimin, xmax=phimax_1d, cutmask='mask_EM_jepET_0_phiBins') - myGroup.defineHistogram('jepET_EM;h_ppm_em_1d_tt_lutjep_Et', title='EM LUT-JEP: Distribution of peak; EM LUT peak [GeV]', type='TH1F', path=histPath, xbins=maxEnergyRange-1, xmin=1, xmax=maxEnergyRange, cutmask='mask_EM_jepET_0_noPhi') + myGroup.defineHistogram('jepET_EM;ppm_em_1d_tt_lutjep_Et', title='EM LUT-JEP: Distribution of peak; EM LUT peak [GeV]', type='TH1F', path=histPath, xbins=maxEnergyRange-1, xmin=1, xmax=maxEnergyRange, cutmask='mask_EM_jepET_0_noPhi') # HAD distributions - myGroup.defineHistogram('etaTT_HAD;h_ppm_had_1d_tt_lutjep_Eta', title='HAD LUT-JEP: Distribution of peak in #eta', type='TH1F', path=histPath, xbins=etabins, cutmask='mask_HAD_jepET_0_noPhi') + myGroup.defineHistogram('etaTT_HAD;ppm_had_1d_tt_lutjep_Eta', title='HAD LUT-JEP: Distribution of peak in #eta', type='TH1F', path=histPath, xbins=etabins, cutmask='mask_HAD_jepET_0_noPhi') - myGroup.defineHistogram('phiTT_1d_HAD;h_ppm_had_1d_tt_lutjep_Phi', title='HAD LUT-JEP: Distribution of peak in #phi; #phi', type='TH1F', path=histPath, xbins=phibins, xmin=phimin, xmax=phimax_1d, cutmask='mask_HAD_jepET_0_phiBins') + myGroup.defineHistogram('phiTT_1d_HAD;ppm_had_1d_tt_lutjep_Phi', title='HAD LUT-JEP: Distribution of peak in #phi; #phi', type='TH1F', path=histPath, xbins=phibins, xmin=phimin, xmax=phimax_1d, cutmask='mask_HAD_jepET_0_phiBins') - myGroup.defineHistogram('jepET_HAD;h_ppm_had_1d_tt_lutjep_Et', title='HAD LUT-JEP: Distribution of peak; HAD LUT peak [GeV]', type='TH1F', path=histPath, xbins=maxEnergyRange-1, xmin=1, xmax=maxEnergyRange, cutmask='mask_HAD_jepET_0_noPhi') + myGroup.defineHistogram('jepET_HAD;ppm_had_1d_tt_lutjep_Et', title='HAD LUT-JEP: Distribution of peak; HAD LUT peak [GeV]', type='TH1F', path=histPath, xbins=maxEnergyRange-1, xmin=1, xmax=maxEnergyRange, cutmask='mask_HAD_jepET_0_noPhi') # Eta-phi maps histPath = trigPath+'/LUT-JEP/EtaPhiMaps' - myGroup.defineHistogram('etaTT_EM,phiTT_2d_EM,jepET_EM;h_ppm_em_2d_etaPhi_tt_lutjep_AverageEt', title='EM Average LUT-JEP Et for Et > 5 GeV', type='TProfile2D', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_EM_jepET_5_phiBins') + myGroup.defineHistogram('etaTT_EM,phiTT_2d_EM,jepET_EM;ppm_em_2d_etaPhi_tt_lutjep_AverageEt', title='EM Average LUT-JEP Et for Et > 5 GeV', type='TProfile2D', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_EM_jepET_5_phiBins') - myGroup.defineHistogram('etaTT_HAD,phiTT_2d_HAD,jepET_HAD;h_ppm_had_2d_etaPhi_tt_lutjep_AverageEt', title='HAD Average LUT-JEP Et for Et > 5 GeV', type='TProfile2D', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_HAD_jepET_5_phiBins') + myGroup.defineHistogram('etaTT_HAD,phiTT_2d_HAD,jepET_HAD;ppm_had_2d_etaPhi_tt_lutjep_AverageEt', title='HAD Average LUT-JEP Et for Et > 5 GeV', type='TProfile2D', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_HAD_jepET_5_phiBins') #################### @@ -114,18 +117,36 @@ def PprMonitoringConfig(inputFlags): histPath = trigPath+'/ADC/EtaPhiMaps' # EM tower maps - myGroup.defineHistogram('etaTT_EM,phiTT_2d_EM;h_ppm_em_2d_etaPhi_tt_adc_HitMap', title='#eta - #phi map of EM FADC > ' +str(threshADC)+ ' for triggered timeslice; Tower #eta; Tower #phi', type='TH2F', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_EM_timeslice') + myGroup.defineHistogram('etaTT_EM,phiTT_2d_EM;ppm_em_2d_etaPhi_tt_adc_HitMap', title='#eta - #phi map of EM FADC > ' +str(threshADC)+ ' for triggered timeslice; Tower #eta; Tower #phi', type='TH2F', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_EM_timeslice') - myGroup.defineHistogram('etaTT_EM,phiTT_2d_EM,emTT_ADC;h_ppm_em_2d_etaPhi_tt_adc_ProfileHitMap', title='#eta - #phi profile map of EM FADC > ' +str(threshADC)+ ' for triggered timeslice; Tower #eta; Tower #phi', type='TProfile2D', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_EM_timeslice') + myGroup.defineHistogram('etaTT_EM,phiTT_2d_EM,emTT_ADC;ppm_em_2d_etaPhi_tt_adc_ProfileHitMap', title='#eta - #phi profile map of EM FADC > ' +str(threshADC)+ ' for triggered timeslice; Tower #eta; Tower #phi', type='TProfile2D', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_EM_timeslice') # HAD tower maps - myGroup.defineHistogram('etaTT_HAD,phiTT_2d_HAD;h_ppm_had_2d_etaPhi_tt_adc_HitMap', title='#eta - #phi map of HAD FADC > ' +str(threshADC)+ ' for triggered timeslice; Tower #eta; Tower #phi', type='TH2F', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_HAD_timeslice') + myGroup.defineHistogram('etaTT_HAD,phiTT_2d_HAD;ppm_had_2d_etaPhi_tt_adc_HitMap', title='#eta - #phi map of HAD FADC > ' +str(threshADC)+ ' for triggered timeslice; Tower #eta; Tower #phi', type='TH2F', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_HAD_timeslice') + + myGroup.defineHistogram('etaTT_HAD,phiTT_2d_HAD,hadTT_ADC;ppm_had_2d_etaPhi_tt_adc_ProfileHitMap', title='#eta - #phi profile map of HAD FADC > ' +str(threshADC)+ ' for triggered timeslice; Tower #eta; Tower #phi', type='TProfile2D', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_HAD_timeslice') + + + # Triggered time-slice + histPath = trigPath+'/ADC/Timeslices' + + myGroup.defineHistogram('adcPeak_EM;ppm_em_1d_tt_adc_TriggeredSlice', title='Number of the EM triggered slice; # Slice', type='TH1F', path=histPath, xbins=sliceNo, xmin=0, xmax=sliceNo, cutmask='mask_EM_noDuplicates') - myGroup.defineHistogram('etaTT_HAD,phiTT_2d_HAD,hadTT_ADC;h_ppm_had_2d_etaPhi_tt_adc_ProfileHitMap', title='#eta - #phi profile map of HAD FADC > ' +str(threshADC)+ ' for triggered timeslice; Tower #eta; Tower #phi', type='TProfile2D', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_HAD_timeslice') + myGroup.defineHistogram('adcPeak_HAD;ppm_had_1d_tt_adc_TriggeredSlice', title='Number of the HAD triggered slice; # Slice', type='TH1F', path=histPath, xbins=sliceNo, xmin=0, xmax=sliceNo, cutmask='mask_HAD_noDuplicates') - + myGroup.defineHistogram('maxADC_EM;ppm_em_1d_tt_adc_MaxTimeslice', title='EM distribution of maximum timeslice; slice', type='TH1D', path=histPath, xbins=sliceNo, xmin=0, xmax=sliceNo, cutmask='mask_EM_maxSlice_noPhi') + + myGroup.defineHistogram('maxADC_HAD;ppm_had_1d_tt_adc_MaxTimeslice', title='HAD distribution of maximum timeslice; slice', type='TH1D', path=histPath, xbins=sliceNo, xmin=0, xmax=sliceNo, cutmask='mask_HAD_maxSlice_noPhi') + myGroup.defineHistogram('etaTT_EM,phiTT_2d_EM,maxADCPlus1_EM;ppm_em_2d_etaPhi_tt_adc_MaxTimeslice', title='Average maximum timeslice for EM signal (TS:1-15); Tower #eta; Tower #phi', type='TProfile2D', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_EM_maxSlice') + + myGroup.defineHistogram('etaTT_HAD,phiTT_2d_HAD,maxADCPlus1_HAD;ppm_had_2d_etaPhi_tt_adc_MaxTimeslice', title='Average maximum timeslice for HAD signal (TS:1-15); Tower #eta; Tower #phi', type='TProfile2D', path=histPath, xbins=etabins, ybins=phibins, ymin=phimin, ymax=phimax_2d, cutmask='mask_HAD_maxSlice') + + + + # Finish up + acc = helper.result() result.merge(acc) return result diff --git a/Trigger/TrigT1/TrigT1CaloMonitoring/src/PprMonitorAlgorithm.cxx b/Trigger/TrigT1/TrigT1CaloMonitoring/src/PprMonitorAlgorithm.cxx index 831506ae818541562945776ef81ac53c7d874a4d..93028892344ec56c13c44e422970b4d62680d1f4 100644 --- a/Trigger/TrigT1/TrigT1CaloMonitoring/src/PprMonitorAlgorithm.cxx +++ b/Trigger/TrigT1/TrigT1CaloMonitoring/src/PprMonitorAlgorithm.cxx @@ -88,6 +88,12 @@ StatusCode PprMonitorAlgorithm::fillHistograms( const EventContext& ctx ) const auto jepET_EM = Monitored::Collection("jepET_EM", vecMonTT_EM, []( const auto &emTower ){return emTower.jepET;}); variables.push_back(jepET_EM); + auto maxADC_EM = Monitored::Collection("maxADC_EM", vecMonTT_EM, []( const auto &emTower ){return emTower.maxADC;}); + variables.push_back(maxADC_EM); + + auto adcPeak_EM = Monitored::Collection("adcPeak_EM", vecMonTT_EM, []( const auto &emTower ){return emTower.tower->adcPeak();}); + variables.push_back(adcPeak_EM); + // HAD towers auto etaTT_HAD = Monitored::Collection("etaTT_HAD", vecMonTT_HAD, []( const auto &hadTower ){return hadTower.tower->eta();}); variables.push_back(etaTT_HAD); @@ -103,9 +109,19 @@ StatusCode PprMonitorAlgorithm::fillHistograms( const EventContext& ctx ) const auto jepET_HAD = Monitored::Collection("jepET_HAD", vecMonTT_HAD, []( const auto &hadTower ){return hadTower.jepET;}); variables.push_back(jepET_HAD); + + auto maxADC_HAD = Monitored::Collection("maxADC_HAD", vecMonTT_HAD, []( const auto &hadTower ){return hadTower.maxADC;}); + variables.push_back(maxADC_HAD); + + auto adcPeak_HAD = Monitored::Collection("adcPeak_HAD", vecMonTT_HAD, []( const auto &hadTower ){return hadTower.tower->adcPeak();}); + variables.push_back(adcPeak_HAD); + // Cutmasks (EM) + std::vector<int> vec_EM_noDuplicates = {}; std::vector<int> vec_EM_timeslice = {}; + std::vector<int> vec_EM_maxSlice = {}; + std::vector<int> vec_EM_maxSlice_noDuplicates = {}; std::vector<int> vec_EM_cpET_0 = {}; // includes "duplicate" towers for phi maps std::vector<int> vec_EM_cpET_0_noDuplicates = {}; // no duplicates: for plots not binned in phi std::vector<int> vec_EM_cpET_5 = {}; @@ -118,6 +134,9 @@ StatusCode PprMonitorAlgorithm::fillHistograms( const EventContext& ctx ) const // Weights std::vector<int> vec_EM_ADC = {}; + // For average ADC plots + std::vector<double> vec_EM_maxADCPlus1 = {}; + for (auto& emTower : vecMonTT_EM) { @@ -129,6 +148,7 @@ StatusCode PprMonitorAlgorithm::fillHistograms( const EventContext& ctx ) const ATH_MSG_DEBUG("cpET: " << cpET << " jepET: " << jepET); // Fill the cutmasks for EM LUT-CP and LUT-JEP energy distributions + vec_EM_noDuplicates.push_back(!isDuplicate); vec_EM_cpET_0.push_back(cpET > 0); // For phi distributions / maps vec_EM_cpET_0_noDuplicates.push_back((cpET > 0) && !isDuplicate); // For plots not binned in phi vec_EM_cpET_5.push_back(cpET > 5); @@ -155,12 +175,21 @@ StatusCode PprMonitorAlgorithm::fillHistograms( const EventContext& ctx ) const vec_EM_timeslice.push_back(0); vec_EM_ADC.push_back(0); } - ATH_MSG_DEBUG(" mask: " << vec_EM_timeslice.back()); - + + // -------- Timing of FADC signal -------- + + double max = emTower.maxADC; + vec_EM_maxSlice.push_back(max >= 0.); + vec_EM_maxSlice_noDuplicates.push_back((max >= 0.) && !isDuplicate); + vec_EM_maxADCPlus1.push_back(max + 1.); + } // End loop over vector of EM towers // Cutmasks (HAD) + std::vector<int> vec_HAD_noDuplicates = {}; std::vector<int> vec_HAD_timeslice = {}; + std::vector<int> vec_HAD_maxSlice = {}; + std::vector<int> vec_HAD_maxSlice_noDuplicates = {}; std::vector<int> vec_HAD_cpET_0 = {}; // includes "duplicate" towers for phi maps std::vector<int> vec_HAD_cpET_0_noDuplicates = {}; // no duplicates: for plots not binned in phi std::vector<int> vec_HAD_cpET_5 = {}; @@ -174,6 +203,9 @@ StatusCode PprMonitorAlgorithm::fillHistograms( const EventContext& ctx ) const // HAD weights std::vector<int> vec_HAD_ADC = {}; + // For average ADC plots + std::vector<double> vec_HAD_maxADCPlus1 = {}; + for (auto& hadTower : vecMonTT_HAD) { // -------- LUT -------- @@ -184,6 +216,7 @@ StatusCode PprMonitorAlgorithm::fillHistograms( const EventContext& ctx ) const if (cpET > 0) ATH_MSG_DEBUG("HAD tower cpET: " << cpET << " jepET: " << jepET); // Fill the cutmasks for HAD LUT-CP and LUT-JEP energy distributions + vec_HAD_noDuplicates.push_back(!isDuplicate); vec_HAD_cpET_0.push_back(cpET > 0); // For phi distributions / maps vec_HAD_cpET_0_noDuplicates.push_back((cpET > 0) && !isDuplicate); // For plots not binned in phi vec_HAD_cpET_5.push_back(cpET > 5); @@ -211,13 +244,37 @@ StatusCode PprMonitorAlgorithm::fillHistograms( const EventContext& ctx ) const } ATH_MSG_DEBUG(" mask: " << vec_HAD_timeslice.back()); + // -------- Timing of FADC signal -------- + + double max = hadTower.maxADC; + vec_HAD_maxSlice.push_back(max >= 0.); + vec_HAD_maxSlice_noDuplicates.push_back((max >= 0.) && !isDuplicate); + vec_HAD_maxADCPlus1.push_back(max + 1.); + } // End loop over vector of HAD towers + + // Define additional monitored variables (EM) + auto maxADCPlus1_EM = Monitored::Collection("maxADCPlus1_EM", vec_EM_maxADCPlus1); + variables.push_back(maxADCPlus1_EM); + + // Define additional monitored variables (HAD) + auto maxADCPlus1_HAD = Monitored::Collection("maxADCPlus1_HAD", vec_HAD_maxADCPlus1); + variables.push_back(maxADCPlus1_HAD); // Define the cutmasks (EM) + auto mask_EM_noDuplicates = Monitored::Collection("mask_EM_noDuplicates", vec_EM_noDuplicates); + variables.push_back(mask_EM_noDuplicates); + auto mask_EM_timeslice = Monitored::Collection("mask_EM_timeslice", vec_EM_timeslice); variables.push_back(mask_EM_timeslice); - + + auto mask_EM_maxSlice = Monitored::Collection("mask_EM_maxSlice", vec_EM_maxSlice); + variables.push_back(mask_EM_maxSlice); + + auto mask_EM_maxSlice_noPhi = Monitored::Collection("mask_EM_maxSlice_noPhi", vec_EM_maxSlice_noDuplicates); + variables.push_back(mask_EM_maxSlice_noPhi); + auto emTT_ADC = Monitored::Collection("emTT_ADC", vec_EM_ADC); variables.push_back(emTT_ADC); @@ -247,9 +304,18 @@ StatusCode PprMonitorAlgorithm::fillHistograms( const EventContext& ctx ) const // Define the cutmasks (HAD) + auto mask_HAD_noDuplicates = Monitored::Collection("mask_HAD_noDuplicates", vec_HAD_noDuplicates); + variables.push_back(mask_HAD_noDuplicates); + auto mask_HAD_timeslice = Monitored::Collection("mask_HAD_timeslice", vec_HAD_timeslice); variables.push_back(mask_HAD_timeslice); + auto mask_HAD_maxSlice = Monitored::Collection("mask_HAD_maxSlice", vec_HAD_maxSlice); + variables.push_back(mask_HAD_maxSlice); + + auto mask_HAD_maxSlice_noPhi = Monitored::Collection("mask_HAD_maxSlice_noPhi", vec_HAD_maxSlice_noDuplicates); + variables.push_back(mask_HAD_maxSlice_noPhi); + auto hadTT_ADC = Monitored::Collection("hadTT_ADC", vec_HAD_ADC); variables.push_back(hadTT_ADC); @@ -288,17 +354,25 @@ StatusCode PprMonitorAlgorithm::fillPPMTowerEtaPhi( const xAOD::TriggerTower_v2* std::vector<MonitorTT> &vecMonTT_HAD, std::vector<MonitorTT> &vecMonTT) const { - + // Geometry const int layer = tt->layer(); // 0 = EM, 1 = HAD const double eta = tt->eta(); const double absEta = std::fabs(eta); const double phi = tt->phi(); double phiMod = phi * m_phiScaleTT; + + // LUT JEP int jepET = 0; const std::vector<uint_least8_t> jepETvec = tt->lut_jep(); if (jepETvec.size() > 0) jepET = tt->jepET(); + + // To remove duplicates when filling multiple bins in phi bool isDuplicate = false; + // ADC timeslice + const std::vector<short unsigned int> &ADC( tt->adc() ); + double max = recTime(ADC, m_EMFADCCut); + // Offsets for filling multiple phi bins depending on TT granularity in eta std::vector<double> offset32 = {1.5, 0.5, -0.5, -1.5}; std::vector<double> offset25 = {0.5, -0.5}; @@ -318,12 +392,13 @@ StatusCode PprMonitorAlgorithm::fillPPMTowerEtaPhi( const xAOD::TriggerTower_v2* monTT.jepET = jepET; if(i != 0) isDuplicate = true; monTT.isDuplicate = isDuplicate; - + monTT.maxADC = max; + vecMonTT.push_back(monTT); if (layer == 0) vecMonTT_EM.push_back(monTT); if (layer == 1) vecMonTT_HAD.push_back(monTT); - ATH_MSG_DEBUG("layer: " << layer << " eta: " << eta << " phi: " << phi << " scaled phi: " << monTT.phiScaled << " 1d phi: " << phi1d << " duplicate: " << monTT.isDuplicate); + ATH_MSG_DEBUG("layer: " << layer << " eta: " << eta << " phi: " << phi << " scaled phi: " << monTT.phiScaled << " 1d phi: " << phi1d << " max: " << max << " duplicate: " << monTT.isDuplicate); } } else if (absEta > 2.5) { @@ -337,11 +412,12 @@ StatusCode PprMonitorAlgorithm::fillPPMTowerEtaPhi( const xAOD::TriggerTower_v2* monTT.jepET = jepET; if(i != 0) isDuplicate = true; monTT.isDuplicate = isDuplicate; + monTT.maxADC = max; vecMonTT.push_back(monTT); if (layer == 0) vecMonTT_EM.push_back(monTT); if (layer == 1) vecMonTT_HAD.push_back(monTT); - ATH_MSG_DEBUG("layer: " << layer << " eta: " << eta << " phi: " << phi << " scaled phi: " << monTT.phiScaled << " 1d phi: " << phi1d << " duplicate: " << monTT.isDuplicate); + ATH_MSG_DEBUG("layer: " << layer << " eta: " << eta << " phi: " << phi << " scaled phi: " << monTT.phiScaled << " 1d phi: " << phi1d << " max: " << max << " duplicate: " << monTT.isDuplicate); } } else { @@ -352,11 +428,69 @@ StatusCode PprMonitorAlgorithm::fillPPMTowerEtaPhi( const xAOD::TriggerTower_v2* monTT.phi1d = phi; monTT.jepET = jepET; monTT.isDuplicate = false; + monTT.maxADC = max; + vecMonTT.push_back(monTT); if (layer == 0) vecMonTT_EM.push_back(monTT); if (layer == 1) vecMonTT_HAD.push_back(monTT); - ATH_MSG_DEBUG("layer: " << layer << " eta: " << eta << " phi: " << phi << " scaled phi: " << monTT.phiScaled << " 1d phi: " << phi1d << " duplicate: " << monTT.isDuplicate); + ATH_MSG_DEBUG("layer: " << layer << " eta: " << eta << " phi: " << phi << " scaled phi: " << monTT.phiScaled << " 1d phi: " << phi1d << " max: " << max << " duplicate: " << monTT.isDuplicate); } return StatusCode::SUCCESS; } + +double PprMonitorAlgorithm::recTime(const std::vector<short unsigned int> &vFADC, int cut) const { + + int max = -1; + const int slices = vFADC.size(); + if (slices > 0) { + max = 0.; + int maxAdc = vFADC[0]; + for (int sl = 1; sl < slices; ++sl) { + if (vFADC[sl] > maxAdc) { + maxAdc = vFADC[sl]; + max = sl; + } else if (vFADC[sl] == maxAdc) + max = -1; + } + if (maxAdc == 0) + max = -1; + } + if (max >= 0) { + int slbeg = max - 2; + if (slbeg < 0) + slbeg = 0; + int slend = max + 3; + if (slend > slices) + slend = slices; + int sum = 0; + int min = 999999; + for (int sl = slbeg; sl < slend; ++sl) { + int val = vFADC[sl]; + if (val < m_TT_ADC_Pedestal) + val = m_TT_ADC_Pedestal; + sum += val; + if (val < min) + min = val; + } + sum -= (slend - slbeg) * min; + if (sum <= cut) + max = -1; + } + + return double(max); +} + + + + + + + + + + + + + + diff --git a/Trigger/TrigT1/TrigT1CaloMonitoring/src/PprMonitorAlgorithm.h b/Trigger/TrigT1/TrigT1CaloMonitoring/src/PprMonitorAlgorithm.h index 8e2d10fc6acc76a1e17b6911a71fe901e71e092b..1e387d67de8057b185f9f8f88f5f3092253c4277 100644 --- a/Trigger/TrigT1/TrigT1CaloMonitoring/src/PprMonitorAlgorithm.h +++ b/Trigger/TrigT1/TrigT1CaloMonitoring/src/PprMonitorAlgorithm.h @@ -21,6 +21,7 @@ public:PprMonitorAlgorithm( const std::string& name, ISvcLocator* pSvcLocator ); double phi1d; /// phi for 1d phi distributions (taking into account granularity in eta) int jepET; bool isDuplicate; /// Bookkeeping of multiple bins in phi for a given eta bin in the forward region + double maxADC; /// max ADC timeslice }; @@ -35,7 +36,9 @@ private: /// Properties Gaudi::Property<double> m_phiScaleTT{this, "phiScaleTT", 32./M_PI, "Scale factor to convert trigger tower phi to integer binning"}; Gaudi::Property<int> m_TT_ADC_HitMap_Thresh{this, "TT_ADC_HitMap_Thresh", 50, "ADC cut for hit maps"}; - + Gaudi::Property<int> m_SliceNo{this, "SliceNo", 15, "Number of possible time slices in the readout"}; + Gaudi::Property<int> m_EMFADCCut{this, "EMFADCCut", 40, "EM FADC cut for signal"}; + Gaudi::Property<int> m_TT_ADC_Pedestal{this, "ADCPedestal", 32, "Nominal pedestal value"}; /// Helper functions StatusCode fillPPMTowerEtaPhi( const xAOD::TriggerTower_v2* tt, @@ -43,5 +46,7 @@ private: std::vector<MonitorTT> &vecMonTT_HAD, std::vector<MonitorTT> &vecMonTT) const; + double recTime(const std::vector<short unsigned int> &vFADC, int cut) const; + }; #endif diff --git a/Trigger/TrigValidation/TrigAnalysisTest/share/ref_RDOtoRDOTrig_v1Dev_build.ref b/Trigger/TrigValidation/TrigAnalysisTest/share/ref_RDOtoRDOTrig_v1Dev_build.ref index b2f511800687305fbde82827a33b81c865806870..11c66571dfa97c2a8bae106e755da735b8cdd587 100644 --- a/Trigger/TrigValidation/TrigAnalysisTest/share/ref_RDOtoRDOTrig_v1Dev_build.ref +++ b/Trigger/TrigValidation/TrigAnalysisTest/share/ref_RDOtoRDOTrig_v1Dev_build.ref @@ -71,7 +71,7 @@ HLT_2g10_loose_mu20_L1MU20: stepFeatures: 0: 3 1: 3 - 2: 5 + 2: 4 HLT_2g15_tight_dPhi15_L1DPHI-M70-2EM12I: eventCount: 0 HLT_2g20_tight_L12EM15VH: @@ -585,7 +585,7 @@ HLT_e17_lhloose_mu14_L1EM15VH_MU10: stepFeatures: 0: 4 1: 5 - 2: 6 + 2: 4 3: 4 4: 4 5: 4 @@ -603,7 +603,7 @@ HLT_e17_lhvloose_nod0_L1EM15VH: stepFeatures: 0: 6 1: 7 - 2: 7 + 2: 5 3: 5 4: 5 HLT_e17_lhvloose_nod0_L1EM15VHI: @@ -617,7 +617,7 @@ HLT_e17_lhvloose_nod0_L1EM15VHI: stepFeatures: 0: 5 1: 6 - 2: 6 + 2: 4 3: 4 4: 4 HLT_e20_lhmedium_e15_lhmedium_Zee_L12EM3: @@ -649,7 +649,7 @@ HLT_e24_lhvloose_L1EM20VH: stepFeatures: 0: 6 1: 7 - 2: 7 + 2: 5 3: 5 4: 5 HLT_e26_etcut_L1EM22VHI: @@ -675,7 +675,7 @@ HLT_e26_lhloose_L1EM15VH: stepFeatures: 0: 6 1: 7 - 2: 7 + 2: 5 3: 5 4: 5 HLT_e26_lhloose_L1EM22VHI: @@ -689,7 +689,7 @@ HLT_e26_lhloose_L1EM22VHI: stepFeatures: 0: 5 1: 6 - 2: 6 + 2: 4 3: 4 4: 4 HLT_e26_lhmedium_L1EM15VH: @@ -703,7 +703,7 @@ HLT_e26_lhmedium_L1EM15VH: stepFeatures: 0: 5 1: 6 - 2: 6 + 2: 4 3: 4 4: 4 HLT_e26_lhmedium_L1EM22VHI: @@ -717,7 +717,7 @@ HLT_e26_lhmedium_L1EM22VHI: stepFeatures: 0: 5 1: 6 - 2: 6 + 2: 4 3: 4 4: 4 HLT_e26_lhmedium_mu8noL1_L1EM22VHI: @@ -733,7 +733,7 @@ HLT_e26_lhmedium_mu8noL1_L1EM22VHI: stepFeatures: 0: 10 1: 10 - 2: 10 + 2: 8 3: 8 4: 8 5: 4 @@ -749,7 +749,7 @@ HLT_e26_lhtight_L1EM15VH: stepFeatures: 0: 5 1: 6 - 2: 6 + 2: 4 3: 4 4: 4 HLT_e26_lhtight_L1EM22VHI: @@ -763,7 +763,7 @@ HLT_e26_lhtight_L1EM22VHI: stepFeatures: 0: 5 1: 6 - 2: 6 + 2: 4 3: 4 4: 4 HLT_e26_lhtight_gsf_L1EM22VHI: @@ -777,7 +777,7 @@ HLT_e26_lhtight_gsf_L1EM22VHI: stepFeatures: 0: 5 1: 6 - 2: 6 + 2: 4 3: 4 4: 4 HLT_e26_lhtight_ivarloose_L1EM22VHI: @@ -791,7 +791,7 @@ HLT_e26_lhtight_ivarloose_L1EM22VHI: stepFeatures: 0: 5 1: 6 - 2: 6 + 2: 4 3: 4 4: 3 HLT_e26_lhtight_ivarmedium_L1EM22VHI: @@ -805,7 +805,7 @@ HLT_e26_lhtight_ivarmedium_L1EM22VHI: stepFeatures: 0: 5 1: 6 - 2: 6 + 2: 4 3: 4 4: 3 HLT_e26_lhtight_ivartight_L1EM22VHI: @@ -819,7 +819,7 @@ HLT_e26_lhtight_ivartight_L1EM22VHI: stepFeatures: 0: 5 1: 6 - 2: 6 + 2: 4 3: 4 4: 3 HLT_e26_lhtight_nod0_L1EM22VHI: @@ -833,7 +833,7 @@ HLT_e26_lhtight_nod0_L1EM22VHI: stepFeatures: 0: 5 1: 6 - 2: 6 + 2: 4 3: 4 4: 4 HLT_e26_lhtight_nod0_L1EM24VHI: @@ -847,7 +847,7 @@ HLT_e26_lhtight_nod0_L1EM24VHI: stepFeatures: 0: 5 1: 6 - 2: 6 + 2: 4 3: 4 4: 4 HLT_e300_etcut_L1EM22VHI: @@ -896,9 +896,9 @@ HLT_e5_lhloose_L1EM3: 4: 6 stepFeatures: 0: 60 - 1: 146 - 2: 78 - 3: 44 + 1: 145 + 2: 81 + 3: 46 4: 7 HLT_e5_lhloose_noringer_L1EM3: eventCount: 6 @@ -910,8 +910,8 @@ HLT_e5_lhloose_noringer_L1EM3: 4: 6 stepFeatures: 0: 56 - 1: 130 - 2: 88 + 1: 128 + 2: 84 3: 46 4: 7 HLT_e5_lhmedium_L1EM3: @@ -926,7 +926,7 @@ HLT_e5_lhmedium_L1EM3: 0: 58 1: 132 2: 76 - 3: 42 + 3: 44 4: 5 HLT_e5_lhmedium_noringer_L1EM3: eventCount: 4 @@ -954,7 +954,7 @@ HLT_e5_lhtight_L1EM3: 0: 57 1: 129 2: 74 - 3: 41 + 3: 43 4: 5 HLT_e5_lhtight_nod0_L1EM3: eventCount: 4 @@ -968,7 +968,7 @@ HLT_e5_lhtight_nod0_L1EM3: 0: 57 1: 129 2: 74 - 3: 41 + 3: 43 4: 5 HLT_e5_lhtight_noringer_L1EM3: eventCount: 4 @@ -980,8 +980,8 @@ HLT_e5_lhtight_noringer_L1EM3: 4: 4 stepFeatures: 0: 45 - 1: 94 - 2: 60 + 1: 92 + 2: 61 3: 36 4: 5 HLT_e5_lhtight_noringer_nod0_L1EM3: @@ -994,8 +994,8 @@ HLT_e5_lhtight_noringer_nod0_L1EM3: 4: 4 stepFeatures: 0: 45 - 1: 94 - 2: 60 + 1: 92 + 2: 61 3: 36 4: 5 HLT_e60_lhmedium_L1EM22VHI: @@ -1009,7 +1009,7 @@ HLT_e60_lhmedium_L1EM22VHI: stepFeatures: 0: 2 1: 2 - 2: 4 + 2: 2 3: 2 4: 2 HLT_e60_lhmedium_nod0_L1EM22VHI: @@ -1023,7 +1023,7 @@ HLT_e60_lhmedium_nod0_L1EM22VHI: stepFeatures: 0: 2 1: 2 - 2: 4 + 2: 2 3: 2 4: 2 HLT_e7_etcut_L1EM3: @@ -1053,7 +1053,7 @@ HLT_e7_lhmedium_mu24_L1MU20: stepFeatures: 0: 18 1: 17 - 2: 17 + 2: 15 3: 14 4: 4 5: 4 @@ -1085,7 +1085,7 @@ HLT_g12_loose_LArPEB_L1EM10VH: stepFeatures: 0: 11 1: 11 - 2: 24 + 2: 22 3: 6 4: 6 HLT_g140_etcut_L1EM22VHI: @@ -1104,7 +1104,7 @@ HLT_g20_loose_L1EM15VH: stepFeatures: 0: 9 1: 9 - 2: 17 + 2: 15 3: 6 HLT_g20_loose_L1EM15VHI: eventCount: 5 @@ -1116,7 +1116,7 @@ HLT_g20_loose_L1EM15VHI: stepFeatures: 0: 6 1: 6 - 2: 11 + 2: 9 3: 5 HLT_g20_loose_LArPEB_L1EM15: eventCount: 6 @@ -1129,7 +1129,7 @@ HLT_g20_loose_LArPEB_L1EM15: stepFeatures: 0: 9 1: 9 - 2: 17 + 2: 15 3: 6 4: 6 HLT_g20_medium_L1EM15VH: @@ -1142,7 +1142,7 @@ HLT_g20_medium_L1EM15VH: stepFeatures: 0: 8 1: 8 - 2: 15 + 2: 14 3: 6 HLT_g20_medium_L1EM15VHI: eventCount: 5 @@ -1154,7 +1154,7 @@ HLT_g20_medium_L1EM15VHI: stepFeatures: 0: 6 1: 6 - 2: 11 + 2: 9 3: 5 HLT_g20_tight_L1EM15VH: eventCount: 5 @@ -1166,7 +1166,7 @@ HLT_g20_tight_L1EM15VH: stepFeatures: 0: 8 1: 8 - 2: 15 + 2: 14 3: 5 HLT_g20_tight_L1EM15VHI: eventCount: 5 @@ -1178,7 +1178,7 @@ HLT_g20_tight_L1EM15VHI: stepFeatures: 0: 6 1: 6 - 2: 11 + 2: 9 3: 5 HLT_g20_tight_icaloloose_L1EM15VH: eventCount: 5 @@ -1190,7 +1190,7 @@ HLT_g20_tight_icaloloose_L1EM15VH: stepFeatures: 0: 8 1: 8 - 2: 15 + 2: 14 3: 5 HLT_g20_tight_icaloloose_L1EM15VHI: eventCount: 5 @@ -1202,7 +1202,7 @@ HLT_g20_tight_icaloloose_L1EM15VHI: stepFeatures: 0: 6 1: 6 - 2: 11 + 2: 9 3: 5 HLT_g20_tight_icalomedium_L1EM15VH: eventCount: 5 @@ -1214,7 +1214,7 @@ HLT_g20_tight_icalomedium_L1EM15VH: stepFeatures: 0: 8 1: 8 - 2: 15 + 2: 14 3: 5 HLT_g20_tight_icalomedium_L1EM15VHI: eventCount: 5 @@ -1226,7 +1226,7 @@ HLT_g20_tight_icalomedium_L1EM15VHI: stepFeatures: 0: 6 1: 6 - 2: 11 + 2: 9 3: 5 HLT_g20_tight_icalotight_L1EM15VH: eventCount: 0 @@ -1237,7 +1237,7 @@ HLT_g20_tight_icalotight_L1EM15VH: stepFeatures: 0: 8 1: 8 - 2: 15 + 2: 14 HLT_g20_tight_icalotight_L1EM15VHI: eventCount: 0 stepCounts: @@ -1247,7 +1247,7 @@ HLT_g20_tight_icalotight_L1EM15VHI: stepFeatures: 0: 6 1: 6 - 2: 11 + 2: 9 HLT_g22_tight_L1EM15VH: eventCount: 5 stepCounts: @@ -1258,7 +1258,7 @@ HLT_g22_tight_L1EM15VH: stepFeatures: 0: 7 1: 7 - 2: 12 + 2: 11 3: 5 HLT_g25_etcut_L1EM20VH: eventCount: 7 @@ -1280,7 +1280,7 @@ HLT_g25_loose_L1EM20VH: stepFeatures: 0: 9 1: 9 - 2: 17 + 2: 15 3: 7 HLT_g25_medium_L1EM20VH: eventCount: 6 @@ -1292,7 +1292,7 @@ HLT_g25_medium_L1EM20VH: stepFeatures: 0: 7 1: 7 - 2: 12 + 2: 11 3: 6 HLT_g25_medium_mu24_ivarmedium_L1MU20: eventCount: 0 @@ -1308,7 +1308,7 @@ HLT_g25_medium_mu24_ivarmedium_L1MU20: stepFeatures: 0: 4 1: 4 - 2: 6 + 2: 4 3: 4 4: 4 5: 2 @@ -1324,7 +1324,7 @@ HLT_g25_tight_L1EM20VH: stepFeatures: 0: 7 1: 7 - 2: 12 + 2: 11 3: 5 HLT_g300_etcut_L1EM22VHI: eventCount: 0 @@ -1342,7 +1342,7 @@ HLT_g35_loose_mu18_L1EM24VHI: stepFeatures: 0: 4 1: 4 - 2: 6 + 2: 4 3: 4 4: 4 5: 2 @@ -1359,7 +1359,7 @@ HLT_g35_tight_icalotight_mu18noL1_L1EM22VHI: stepFeatures: 0: 10 1: 10 - 2: 13 + 2: 11 HLT_g3_loose_LArPEB_L1EM3: eventCount: 9 stepCounts: @@ -1385,7 +1385,7 @@ HLT_g40_loose_LArPEB_L1EM20VHI: stepFeatures: 0: 5 1: 5 - 2: 10 + 2: 8 3: 5 4: 5 HLT_g5_etcut_L1EM3: @@ -1408,7 +1408,7 @@ HLT_g5_loose_L1EM3: stepFeatures: 0: 56 1: 56 - 2: 103 + 2: 101 3: 14 HLT_g5_medium_L1EM3: eventCount: 9 @@ -1420,7 +1420,7 @@ HLT_g5_medium_L1EM3: stepFeatures: 0: 48 1: 48 - 2: 81 + 2: 83 3: 12 HLT_g5_tight_L1EM3: eventCount: 8 @@ -1432,7 +1432,7 @@ HLT_g5_tight_L1EM3: stepFeatures: 0: 45 1: 45 - 2: 73 + 2: 76 3: 8 HLT_g60_loose_LArPEB_L1EM20VHI: eventCount: 3 @@ -1445,7 +1445,7 @@ HLT_g60_loose_LArPEB_L1EM20VHI: stepFeatures: 0: 3 1: 3 - 2: 7 + 2: 5 3: 3 4: 3 HLT_g80_loose_LArPEB_L1EM20VHI: @@ -1570,15 +1570,15 @@ HLT_j420_subresjesgscIS_ftf_L1J100: 0: 3 HLT_j45_L1J15: eventCount: 0 -HLT_j45_csskpf_nojcalib_ftf_L1J20: +HLT_j45_csskpf_nojcalib_ftf_L1J15: eventCount: 17 stepCounts: - 0: 19 + 0: 20 1: 17 stepFeatures: - 0: 19 + 0: 20 1: 37 -HLT_j45_cssktc_nojcalib_L1J20: +HLT_j45_cssktc_nojcalib_L1J15: eventCount: 15 stepCounts: 0: 15 @@ -1592,75 +1592,75 @@ HLT_j45_ftf_L1J15: stepFeatures: 0: 20 1: 55 -HLT_j45_ftf_preselj20_L1J20: +HLT_j45_ftf_preselj20_L1J15: eventCount: 19 stepCounts: - 0: 19 + 0: 20 1: 19 stepFeatures: - 0: 19 + 0: 20 1: 55 -HLT_j45_nojcalib_L1J20: +HLT_j45_nojcalib_L1J15: eventCount: 17 stepCounts: 0: 17 stepFeatures: 0: 39 -HLT_j45_pf_ftf_010jvt_L1J20: +HLT_j45_pf_ftf_010jvt_L1J15: eventCount: 19 stepCounts: - 0: 19 + 0: 20 1: 19 stepFeatures: - 0: 19 + 0: 20 1: 55 -HLT_j45_pf_ftf_020jvt_L1J20: +HLT_j45_pf_ftf_020jvt_L1J15: eventCount: 19 stepCounts: - 0: 19 + 0: 20 1: 19 stepFeatures: - 0: 19 + 0: 20 1: 54 -HLT_j45_pf_ftf_050jvt_L1J20: +HLT_j45_pf_ftf_050jvt_L1J15: eventCount: 19 stepCounts: - 0: 19 + 0: 20 1: 19 stepFeatures: - 0: 19 + 0: 20 1: 54 -HLT_j45_pf_ftf_L1J20: +HLT_j45_pf_ftf_L1J15: eventCount: 19 stepCounts: - 0: 19 + 0: 20 1: 19 stepFeatures: - 0: 19 + 0: 20 1: 56 -HLT_j45_pf_ftf_preselj20_L1J20: +HLT_j45_pf_ftf_preselj20_L1J15: eventCount: 19 stepCounts: - 0: 19 + 0: 20 1: 19 stepFeatures: - 0: 19 + 0: 20 1: 56 -HLT_j45_pf_nojcalib_ftf_L1J20: +HLT_j45_pf_nojcalib_ftf_L1J15: eventCount: 18 stepCounts: - 0: 19 + 0: 20 1: 18 stepFeatures: - 0: 19 + 0: 20 1: 49 -HLT_j45_pf_subjesgscIS_ftf_L1J20: +HLT_j45_pf_subjesgscIS_ftf_L1J15: eventCount: 19 stepCounts: - 0: 19 + 0: 20 1: 19 stepFeatures: - 0: 19 + 0: 20 1: 52 HLT_j45_pf_subjesgscIS_ftf_bdl1r70_split_L1J20: eventCount: 14 @@ -1682,19 +1682,19 @@ HLT_j45_pf_subjesgscIS_ftf_boffperf_split_L1J20: 0: 19 1: 49 2: 49 -HLT_j45_sktc_nojcalib_L1J20: +HLT_j45_sktc_nojcalib_L1J15: eventCount: 15 stepCounts: 0: 15 stepFeatures: 0: 26 -HLT_j45_subjesIS_ftf_preselj20_L1J20: +HLT_j45_subjesIS_ftf_preselj20_L1J15: eventCount: 19 stepCounts: - 0: 19 + 0: 20 1: 19 stepFeatures: - 0: 19 + 0: 20 1: 50 HLT_j45_subjesgscIS_ftf_011jvt_L1J15: eventCount: 18 @@ -2222,7 +2222,7 @@ HLT_mu6_L1MU6: 1: 12 2: 13 3: 13 -HLT_mu6_idperfLRT_l2lrt_L1MU6: +HLT_mu6_LRT_idperf_l2lrt_L1MU6: eventCount: 10 stepCounts: 0: 10 diff --git a/Trigger/TrigValidation/TrigInDetValidation/python/TrigInDetNewArtSteps.py b/Trigger/TrigValidation/TrigInDetValidation/python/TrigInDetNewArtSteps.py index f36db2004b19a85072b1e23f66e649aaf443b880..9be14b5c66a8980c05f5a861ae2cb02aca59ce53 100644 --- a/Trigger/TrigValidation/TrigInDetValidation/python/TrigInDetNewArtSteps.py +++ b/Trigger/TrigValidation/TrigInDetValidation/python/TrigInDetNewArtSteps.py @@ -35,6 +35,7 @@ class TrigInDetReco(ExecStep): self.slices = [] self.preexec_trig = ' ' self.postinclude_trig = postinclude_file + self.release = 'latest' self.preexec_reco = ';'.join([ 'from RecExConfig.RecFlags import rec', 'rec.doForwardDet=False', @@ -60,21 +61,8 @@ class TrigInDetReco(ExecStep): self.postexec_trig = "from AthenaCommon.AppMgr import ServiceMgr; ServiceMgr.AthenaPoolCnvSvc.MaxFileSizes=['tmp.RDO_TRIG=100000000000']" - # get the cuttent atlas base release, and the previous base release - import os - DVERSION=os.getenv('Athena_VERSION') - if ( DVERSION is None ) : - AVERSION = "22.0.20" - else : - BASE=DVERSION[:5] - SUB=int(DVERSION[5:]) - SUB -= 1 - AVERSION=BASE+str(SUB) - - print( "remapping athena base release version: ", DVERSION, " -> ", AVERSION ) - self.postexec_reco = "from AthenaCommon.AppMgr import ServiceMgr; ServiceMgr.AthenaPoolCnvSvc.MaxFileSizes=['tmp.ESD=100000000000']" - self.args = '--outputAODFile=AOD.pool.root --steering="doRDO_TRIG" --asetup "RAWtoESD:Athena,'+AVERSION+'" "ESDtoAOD:Athena,'+AVERSION+'" ' + self.args = '--outputAODFile=AOD.pool.root --steering="doRDO_TRIG"' def configure(self, test): @@ -114,6 +102,25 @@ class TrigInDetReco(ExecStep): chains += ']' self.preexec_trig = 'doEmptyMenu=True;'+flags+'selectChains='+chains + if (self.release == 'current'): + print( "Using current release for offline Reco steps " ) + else: + # get the current atlas base release, and the previous base release + import os + DVERSION=os.getenv('Athena_VERSION') + if (self.release == 'latest'): + if ( DVERSION is None ) : + AVERSION = "22.0.20" + else: + BASE=DVERSION[:5] + SUB=int(DVERSION[5:]) + SUB -= 1 + AVERSION=BASE+str(SUB) + else: + AVERSION = self.release + self.args += ' --asetup "RAWtoESD:Athena,'+AVERSION+'" "ESDtoAOD:Athena,'+AVERSION+'" ' + print( "remapping athena base release version for offline Reco steps: ", DVERSION, " -> ", AVERSION ) + self.args += ' --preExec "RDOtoRDOTrigger:{:s};" "all:{:s};" "RAWtoESD:{:s};" "ESDtoAOD:{:s};"'.format( self.preexec_trig, self.preexec_all, self.preexec_reco, self.preexec_aod) diff --git a/Trigger/TrigValidation/TrigInDetValidation/share/TrigInDetValidation_AODtoTrkNtuple.py b/Trigger/TrigValidation/TrigInDetValidation/share/TrigInDetValidation_AODtoTrkNtuple.py index 5b204531fcb59da973f193cf859a11e9fe1aec87..36bc74755080b0dd35d6fae1580bc808c744f612 100644 --- a/Trigger/TrigValidation/TrigInDetValidation/share/TrigInDetValidation_AODtoTrkNtuple.py +++ b/Trigger/TrigValidation/TrigInDetValidation/share/TrigInDetValidation_AODtoTrkNtuple.py @@ -134,22 +134,10 @@ if ( True ) : "Taus:Medium:1Prong", "Taus:Tight:1Prong", - - # "HLT_e.*idperf.*:InDetTrigTrackingxAODCnv_Electron_FTF", - # "HLT_e.*idperf.*:InDetTrigTrackingxAODCnv_Electron_IDTrig", - # "HLT_mu.*_idperf.*:InDetTrigTrackingxAODCnv_Muon_IDTrig;DTE", - # "HLT_mu.*idperf.*:InDetTrigTrackingxAODCnv_Muon_IDTrig", - # "HLT_tau.*idperf.*track:key=InDetTrigTrackingxAODCnv_Tau_IDTrig", - # "HLT_tau.*idperf.*track:key=InDetTrigTrackingxAODCnv_Tau_FTF", - # "HLT_tau.*idperf.*tracktwo:key=InDetTrigTrackingxAODCnv_TauCore_FTF:roi=forID1", - # "HLT_tau.*idperf.*tracktwo:key=InDetTrigTrackingxAODCnv_TauIso_FTF:roi=forID3", - # "HLT_tau.*idperf.*tracktwo:key=InDetTrigTrackingxAODCnv_Tau_IDTrig:roi=forID3", - - # "HLT_tau.*idperf.*tracktwo:key=InDetTrigTrackingxAODCnv_TauCore_FTF:roi=forID1", - # "HLT_tau.*idperf.*tracktwo:key=InDetTrigTrackingxAODCnv_TauIso_FTF:roi=forID3", - # "HLT_tau.*idperf.*tracktwo:key=InDetTrigTrackingxAODCnv_Tau_IDTrig:roi=forID3", - + "Electron:Tight", + "Electron:Medium", + # ":HLT_IDTrack_FS_FTF", # ":HLT_IDTrack_FS_FTF:roi=HLT_FSRoI:vtx=HLT_IDVertex_FS", @@ -172,13 +160,14 @@ if ( True ) : # "HLT_e.*_etcut.*:HLT_IDTrack_Electron_FTF", # "HLT_e.*_etcut.*:HLT_IDTrack_Electron_IDTrig", - "HLT_e.*:HLT_IDTrack_Electron_FTF", + "HLT_e.*:HLT_IDTrack_Electron_FTF:roi=HLT_Roi_FastElectron", "HLT_e.*:HLT_IDTrack_Electron_IDTrig", # two stage tau FTF "HLT_tau.*_idperf.*tracktwo.*:HLT_IDTrack_TauCore_FTF:roi=HLT_Roi_TauCore", "HLT_tau.*_idperf.*tracktwo.*:HLT_IDTrack_TauIso_FTF:roi=HLT_Roi_TauIso", + "HLT_tau.*_idperf.*tracktwo.*:HLT_IDTrack_TauIso_FTF:roi=HLT_Roi_TauIsoBDT", # two stage tau precision tracking - empty ??? "HLT_tau.*_idperf.*tracktwo.*:HLT_IDTrack_Tau_IDTrig:roi=HLT_Roi_TauIso", @@ -189,7 +178,6 @@ if ( True ) : "HLT_tau.*_idperf.*_track_.*:HLT_IDTrack_Tau_IDTrig:roi=HLT_Roi_Tau", - # none of these will work "HLT_tau.*_idperf.*:HLT_IDTrack_Tau_IDTrig", diff --git a/Trigger/TrigValidation/TrigInDetValidation/share/TrigInDetValidation_NewBase.py b/Trigger/TrigValidation/TrigInDetValidation/share/TrigInDetValidation_NewBase.py index afbbee8fe1517ac2b4ab9d19595a00183dc151a5..d2de1c9dfbb707bef57e9bf83a1eac32a52970af 100755 --- a/Trigger/TrigValidation/TrigInDetValidation/share/TrigInDetValidation_NewBase.py +++ b/Trigger/TrigValidation/TrigInDetValidation/share/TrigInDetValidation_NewBase.py @@ -32,7 +32,7 @@ except getopt.GetoptError: print("-n N run only on N events per job") print("-c(--config) run with config_only and print to a pkl file") print("") - + sys.exit(1) Events_local = 0 local = False @@ -73,7 +73,8 @@ rdo2aod.slices = Slices rdo2aod.threads = Threads rdo2aod.concurrent_events = Slots rdo2aod.config_only = testconfig - +if 'Release' in dir(): + rdo2aod.release = Release if "Args" not in locals() : diff --git a/Trigger/TrigValidation/TrigInDetValidation/test/test_trigID_all_ttbar_pu40_new.py b/Trigger/TrigValidation/TrigInDetValidation/test/test_trigID_all_ttbar_pu40_new.py index 65623557b83d2a53f8c2e41c9d2d65e89af9f97d..a510619a43950a9c541882e14d09734cfa890819 100755 --- a/Trigger/TrigValidation/TrigInDetValidation/test/test_trigID_all_ttbar_pu40_new.py +++ b/Trigger/TrigValidation/TrigInDetValidation/test/test_trigID_all_ttbar_pu40_new.py @@ -32,20 +32,21 @@ Threads = 8 Slots = 8 Input = 'ttbar_ID' # defined in TrigValTools/share/TrigValInputs.json -Jobs = [ ( "Offline", " TIDAdata-run3-offline.dat -o data-hists.root" ) ] +Jobs = [ ( "Offline", " TIDAdata-run3-offline.dat -o data-hists-offline.root" ), + ( "OfflineVtx", " TIDAdata-run3-offline-vtx.dat -o data-hists-offline-vtx.root" ) ] +Comp = [ ( "L2muon", "L2muon", "data-hists-offline.root", " -c TIDAhisto-panel.dat -d HLTL2-plots-muon " ), + ( "L2electron", "L2electron", "data-hists-offline.root", " -c TIDAhisto-panel.dat -d HLTL2-plots-electron " ), + ( "L2tau", "L2tau", "data-hists-offline.root", " -c TIDAhisto-panel.dat -d HLTL2-plots-tau " ), + ( "L2bjet", "L2bjet", "data-hists-offline.root", " -c TIDAhisto-panel.dat -d HLTL2-plots-bjet " ), + ( "FSjetoffline", "L2fsjet", "data-hists-offline.root", " -c TIDAhisto-panel.dat -d HLTL2-plots-FS " ), + ( "FSvtx", "L2fsjetvtx", "data-hists-offline-vtx.root", " -c TIDAhisto-panel-vtx.dat -d HLTL2-plots-vtx --ncols 3" ), + ( "FSvtxall", "L2fsjetvtx", "data-hists-offline.root", " -c TIDAhisto-panel-vtx.dat -d HLTL2-plots-vtxall --ncols 3" ), -Comp = [ ( "L2muon", "L2muon", "data-hists.root", " -c TIDAhisto-panel.dat -d HLTL2-plots-muon " ), - ( "L2electron", "L2electron", "data-hists.root", " -c TIDAhisto-panel.dat -d HLTL2-plots-electron " ), - ( "L2tau", "L2tau", "data-hists.root", " -c TIDAhisto-panel.dat -d HLTL2-plots-tau " ), - ( "L2bjet", "L2bjet", "data-hists.root", " -c TIDAhisto-panel.dat -d HLTL2-plots-bjet " ), - ( "FSjetoffline", "L2fsjet", "data-hists.root", " -c TIDAhisto-panel.dat -d HLTL2-plots-FS " ), - ( "FSvtx", "L2fsjetvtx", "data-hists.root", " -c TIDAhistos-vtx.dat -d HLTL2-plots-vtx " ), - - ( "EFmuon", "EFmuon", "data-hists.root", " -c TIDAhisto-panel.dat -d HLTEF-plots-muon " ), - ( "EFelectron", "EFelectron", "data-hists.root", " -c TIDAhisto-panel.dat -d HLTEF-plots-electron " ), - ( "EFtau", "EFtau", "data-hists.root", " -c TIDAhisto-panel.dat -d HLTEF-plots-tau " ), - ( "EFbjet", "EFmuon", "data-hists.root", " -c TIDAhisto-panel.dat -d HLTEF-plots-bjet " ) ] + ( "EFmuon", "EFmuon", "data-hists-offline.root", " -c TIDAhisto-panel.dat -d HLTEF-plots-muon " ), + ( "EFelectron", "EFelectron", "data-hists-offline.root", " -c TIDAhisto-panel.dat -d HLTEF-plots-electron " ), + ( "EFtau", "EFtau", "data-hists-offline.root", " -c TIDAhisto-panel.dat -d HLTEF-plots-tau " ), + ( "EFbjet", "EFmuon", "data-hists-offline.root", " -c TIDAhisto-panel.dat -d HLTEF-plots-bjet " ) ] from AthenaCommon.Include import include include("TrigInDetValidation/TrigInDetValidation_NewBase.py") diff --git a/Trigger/TrigValidation/TriggerTest/share/ref_data_v1Dev_build.ref b/Trigger/TrigValidation/TriggerTest/share/ref_data_v1Dev_build.ref index f55a38a8f6e371abf458eda87e21727c9b4a5484..571575bba8b21bcd209ca1dee45c2257cb1f3f7f 100644 --- a/Trigger/TrigValidation/TriggerTest/share/ref_data_v1Dev_build.ref +++ b/Trigger/TrigValidation/TriggerTest/share/ref_data_v1Dev_build.ref @@ -429,7 +429,7 @@ HLT_e5_lhloose_L1EM3: 0: 19 1: 22 2: 13 - 3: 9 + 3: 10 HLT_e5_lhloose_noringer_L1EM3: eventCount: 0 stepCounts: @@ -440,7 +440,7 @@ HLT_e5_lhloose_noringer_L1EM3: stepFeatures: 0: 15 1: 22 - 2: 23 + 2: 22 3: 12 HLT_e5_lhmedium_L1EM3: eventCount: 0 @@ -452,8 +452,8 @@ HLT_e5_lhmedium_L1EM3: stepFeatures: 0: 21 1: 24 - 2: 15 - 3: 11 + 2: 16 + 3: 12 HLT_e5_lhmedium_noringer_L1EM3: eventCount: 0 stepCounts: @@ -476,8 +476,8 @@ HLT_e5_lhtight_L1EM3: stepFeatures: 0: 17 1: 17 - 2: 10 - 3: 7 + 2: 11 + 3: 8 HLT_e5_lhtight_nod0_L1EM3: eventCount: 0 stepCounts: @@ -488,8 +488,8 @@ HLT_e5_lhtight_nod0_L1EM3: stepFeatures: 0: 17 1: 17 - 2: 10 - 3: 7 + 2: 11 + 3: 8 HLT_e5_lhtight_noringer_L1EM3: eventCount: 0 stepCounts: @@ -652,7 +652,7 @@ HLT_g5_loose_L1EM3: stepFeatures: 0: 15 1: 15 - 2: 28 + 2: 27 3: 3 HLT_g5_medium_L1EM3: eventCount: 3 @@ -738,15 +738,15 @@ HLT_j45_L1J15: 0: 6 stepFeatures: 0: 6 -HLT_j45_csskpf_nojcalib_ftf_L1J20: +HLT_j45_csskpf_nojcalib_ftf_L1J15: eventCount: 3 stepCounts: - 0: 5 + 0: 7 1: 3 stepFeatures: - 0: 5 + 0: 7 1: 3 -HLT_j45_cssktc_nojcalib_L1J20: +HLT_j45_cssktc_nojcalib_L1J15: eventCount: 3 stepCounts: 0: 3 @@ -760,76 +760,76 @@ HLT_j45_ftf_L1J15: stepFeatures: 0: 7 1: 7 -HLT_j45_ftf_preselj20_L1J20: - eventCount: 5 +HLT_j45_ftf_preselj20_L1J15: + eventCount: 7 stepCounts: - 0: 5 - 1: 5 + 0: 7 + 1: 7 stepFeatures: - 0: 5 - 1: 5 -HLT_j45_nojcalib_L1J20: + 0: 7 + 1: 7 +HLT_j45_nojcalib_L1J15: eventCount: 3 stepCounts: 0: 3 stepFeatures: 0: 3 -HLT_j45_pf_ftf_010jvt_L1J20: - eventCount: 5 +HLT_j45_pf_ftf_010jvt_L1J15: + eventCount: 7 stepCounts: - 0: 5 - 1: 5 + 0: 7 + 1: 7 stepFeatures: - 0: 5 - 1: 5 -HLT_j45_pf_ftf_020jvt_L1J20: - eventCount: 5 + 0: 7 + 1: 7 +HLT_j45_pf_ftf_020jvt_L1J15: + eventCount: 7 stepCounts: - 0: 5 - 1: 5 + 0: 7 + 1: 7 stepFeatures: - 0: 5 - 1: 5 -HLT_j45_pf_ftf_050jvt_L1J20: - eventCount: 5 + 0: 7 + 1: 7 +HLT_j45_pf_ftf_050jvt_L1J15: + eventCount: 7 stepCounts: - 0: 5 - 1: 5 + 0: 7 + 1: 7 stepFeatures: - 0: 5 - 1: 5 -HLT_j45_pf_ftf_L1J20: - eventCount: 5 + 0: 7 + 1: 7 +HLT_j45_pf_ftf_L1J15: + eventCount: 7 stepCounts: - 0: 5 - 1: 5 + 0: 7 + 1: 7 stepFeatures: - 0: 5 - 1: 5 -HLT_j45_pf_ftf_preselj20_L1J20: - eventCount: 5 + 0: 7 + 1: 7 +HLT_j45_pf_ftf_preselj20_L1J15: + eventCount: 7 stepCounts: - 0: 5 - 1: 5 + 0: 7 + 1: 7 stepFeatures: - 0: 5 - 1: 5 -HLT_j45_pf_nojcalib_ftf_L1J20: - eventCount: 3 + 0: 7 + 1: 7 +HLT_j45_pf_nojcalib_ftf_L1J15: + eventCount: 4 stepCounts: - 0: 5 - 1: 3 + 0: 7 + 1: 4 stepFeatures: - 0: 5 - 1: 3 -HLT_j45_pf_subjesgscIS_ftf_L1J20: - eventCount: 5 + 0: 7 + 1: 4 +HLT_j45_pf_subjesgscIS_ftf_L1J15: + eventCount: 6 stepCounts: - 0: 5 - 1: 5 + 0: 7 + 1: 6 stepFeatures: - 0: 5 - 1: 5 + 0: 7 + 1: 6 HLT_j45_pf_subjesgscIS_ftf_bdl1r70_split_L1J20: eventCount: 0 stepCounts: @@ -848,20 +848,20 @@ HLT_j45_pf_subjesgscIS_ftf_boffperf_split_L1J20: 0: 5 1: 5 2: 5 -HLT_j45_sktc_nojcalib_L1J20: +HLT_j45_sktc_nojcalib_L1J15: eventCount: 3 stepCounts: 0: 3 stepFeatures: 0: 3 -HLT_j45_subjesIS_ftf_preselj20_L1J20: - eventCount: 5 +HLT_j45_subjesIS_ftf_preselj20_L1J15: + eventCount: 6 stepCounts: - 0: 5 - 1: 5 + 0: 7 + 1: 6 stepFeatures: - 0: 5 - 1: 5 + 0: 7 + 1: 6 HLT_j45_subjesgscIS_ftf_011jvt_L1J15: eventCount: 6 stepCounts: @@ -1188,7 +1188,7 @@ HLT_mu6_L1MU6: stepFeatures: 0: 1 1: 1 -HLT_mu6_idperfLRT_l2lrt_L1MU6: +HLT_mu6_LRT_idperf_l2lrt_L1MU6: eventCount: 1 stepCounts: 0: 1 diff --git a/Trigger/TriggerCommon/TrigEDMConfig/python/TriggerEDMRun3.py b/Trigger/TriggerCommon/TrigEDMConfig/python/TriggerEDMRun3.py index 36f1a86f5704216cd2e5e2fcdf1faa320bde3c66..2b2c6f4cb2d3f994443a7c30bcd88eb6f2132bd4 100644 --- a/Trigger/TriggerCommon/TrigEDMConfig/python/TriggerEDMRun3.py +++ b/Trigger/TriggerCommon/TrigEDMConfig/python/TriggerEDMRun3.py @@ -291,13 +291,13 @@ TriggerHLTListRun3 = [ ('xAOD::TrackParticleContainer#HLT_IDTrack_TauCore_FTF', 'BS ESD AODFULL', 'Tau', 'inViews:TAUFTFCoreViews'), ('xAOD::TrackParticleAuxContainer#HLT_IDTrack_TauCore_FTFAux.', 'BS ESD AODFULL', 'Tau'), - ('xAOD::TrackParticleContainer#HLT_IDTrack_TauIso_FTF', 'BS ESD AODFULL', 'Tau', 'inViews:TAUFTFIsoViews,TAUEFViews'), + ('xAOD::TrackParticleContainer#HLT_IDTrack_TauIso_FTF', 'BS ESD AODFULL', 'Tau', 'inViews:TAUFTFIsoViews,TAUEFViews,TAUFTFIsoBDTViews'), ('xAOD::TrackParticleAuxContainer#HLT_IDTrack_TauIso_FTFAux.', 'BS ESD AODFULL', 'Tau'), ('xAOD::TrackParticleContainer#HLT_IDTrack_Tau_FTF', 'BS ESD AODFULL', 'Tau', 'inViews:TAUFTFIdViews'), ('xAOD::TrackParticleAuxContainer#HLT_IDTrack_Tau_FTFAux.', 'BS ESD AODFULL', 'Tau'), - ('xAOD::TrackParticleContainer#HLT_IDTrack_Tau_IDTrig', 'BS ESD AODFULL', 'Tau', 'inViews:TAUFTFIdViews'), + ('xAOD::TrackParticleContainer#HLT_IDTrack_Tau_IDTrig', 'BS ESD AODFULL', 'Tau', 'inViews:TAUFTFIdViews,TAUPrecIsoViews'), ('xAOD::TrackParticleAuxContainer#HLT_IDTrack_Tau_IDTrigAux.', 'BS ESD AODFULL', 'Tau'), ('TrigRoiDescriptorCollection#HLT_Roi_Tau', 'BS ESD AODFULL AODSLIM', 'Steer'), diff --git a/Trigger/TriggerCommon/TriggerJobOpts/python/HLTTriggerGetter.py b/Trigger/TriggerCommon/TriggerJobOpts/python/HLTTriggerGetter.py index a5b2806068a0f2f277175cade788e023c61f592c..3ff97c437a6bd13b55c14dc39e40014d84da6094 100644 --- a/Trigger/TriggerCommon/TriggerJobOpts/python/HLTTriggerGetter.py +++ b/Trigger/TriggerCommon/TriggerJobOpts/python/HLTTriggerGetter.py @@ -145,7 +145,9 @@ class HLTSimulationGetter(Configured): log.info("Loading RegionSelector") from AthenaCommon.AppMgr import ServiceMgr from RegionSelector.RegSelSvcDefault import RegSelSvcDefault - ServiceMgr += RegSelSvcDefault() + regsel = RegSelSvcDefault() + regsel.enableCalo = TriggerFlags.doCalo() + ServiceMgr += regsel # Configure the Data Preparation for Calo if TriggerFlags.doCalo(): diff --git a/Trigger/TriggerCommon/TriggerJobOpts/python/HLTTriggerResultGetter.py b/Trigger/TriggerCommon/TriggerJobOpts/python/HLTTriggerResultGetter.py index a7378fe507517ac42094f83b9f23ccc823687df2..1fbbeb989f33fafe686cd277adc82637a5f22dbf 100644 --- a/Trigger/TriggerCommon/TriggerJobOpts/python/HLTTriggerResultGetter.py +++ b/Trigger/TriggerCommon/TriggerJobOpts/python/HLTTriggerResultGetter.py @@ -8,7 +8,6 @@ from AthenaCommon.GlobalFlags import globalflags from AthenaCommon.AppMgr import ServiceMgr from RecExConfig.Configured import Configured -from RecExConfig.RecAlgsFlags import recAlgs from RecExConfig.RecFlags import rec from TrigRoiConversion.TrigRoiConversionConf import RoiWriter @@ -305,11 +304,10 @@ class HLTTriggerResultGetter(Configured): xaodcnvrt = xAODConversionGetter() xAODContainers = xaodcnvrt.xaodlist - if recAlgs.doTrigger() or TriggerFlags.doTriggerConfigOnly(): - if ConfigFlags.Trigger.EDMVersion <= 2: - tdt = TrigDecisionGetterRun2() # noqa: F841 - else: - tdt = TrigDecisionGetter() # noqa: F841 + if ConfigFlags.Trigger.EDMVersion <= 2 and (rec.doTrigger() or TriggerFlags.doTriggerConfigOnly()): + tdt = TrigDecisionGetterRun2() # noqa: F841 + elif ConfigFlags.Trigger.EDMVersion >= 3 and TriggerFlags.readBS(): + tdt = TrigDecisionGetter() # noqa: F841 # Temporary hack to add Run-3 navigation to ESD and AOD if (rec.doESD() or rec.doAOD()) and ConfigFlags.Trigger.EDMVersion == 3: diff --git a/Trigger/TriggerCommon/TriggerJobOpts/python/Lvl1ResultBuilderGetter.py b/Trigger/TriggerCommon/TriggerJobOpts/python/Lvl1ResultBuilderGetter.py index 510671ab213a699f80ba77e6aaba454e1e0fb0f0..f5a3b362b8bb6086447ac733b1e2bfdd5f3dfe51 100644 --- a/Trigger/TriggerCommon/TriggerJobOpts/python/Lvl1ResultBuilderGetter.py +++ b/Trigger/TriggerCommon/TriggerJobOpts/python/Lvl1ResultBuilderGetter.py @@ -8,7 +8,6 @@ from AthenaCommon.Include import include # to include old style job options from AthenaCommon.AppMgr import theApp from RecExConfig.RecFlags import rec -from RecExConfig.RecAlgsFlags import recAlgs from RecExConfig.Configured import Configured @@ -41,7 +40,7 @@ class Lvl1ResultBuilderGetter(Configured): topSequence = AlgSequence() - if recAlgs.doTrigger(): + if rec.doTrigger(): if (rec.doESD() or rec.doAOD()) and (not(rec.readAOD() or \ rec.readESD())): if jobproperties.Global.InputFormat() == 'bytestream': diff --git a/Trigger/TriggerCommon/TriggerJobOpts/share/BStoESD_Tier0_HLTConfig_jobOptions.py b/Trigger/TriggerCommon/TriggerJobOpts/share/BStoESD_Tier0_HLTConfig_jobOptions.py index f7245c3d1b15d6721dc297123a6a3b583aa792e7..1b0a6dbd1e827c4592817d7d96e69be9906f0d32 100644 --- a/Trigger/TriggerCommon/TriggerJobOpts/share/BStoESD_Tier0_HLTConfig_jobOptions.py +++ b/Trigger/TriggerCommon/TriggerJobOpts/share/BStoESD_Tier0_HLTConfig_jobOptions.py @@ -15,6 +15,11 @@ from TriggerJobOpts.TriggerFlags import TriggerFlags as tf from AthenaCommon.AppMgr import ServiceMgr, ToolSvc from AthenaCommon.Include import include +assertMsg = 'This file is meant for Trigger configuration in RAWtoESD/RAWtoALL data reconstruction.' +assert rec.doTrigger(), assertMsg + ' Since rec.doTrigger is disabled, this file should not be included.' +assert not recAlgs.doTrigger(), assertMsg + \ + ' Trigger selection should not run in offline reconstruction, so recAlgs.doTrigger should be False' + # First check is HLT psk is ok, if not, turn trigger off. if tf.configForStartup() != 'HLToffline': include( "TriggerJobOpts/TriggerConfigCheckHLTpsk.py" ) @@ -130,14 +135,13 @@ if rec.doTrigger(): getattr(ToolSvc, toolName).LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc" #--------------------------------------------------------------------------- - if recAlgs.doTrigger(): - try: - from TriggerJobOpts.T0TriggerGetter import T0TriggerGetter - triggerGetter = T0TriggerGetter() - except Exception: - from AthenaCommon.Resilience import treatException - treatException("Could not import TriggerJobOpts.TriggerGetter . Switched off !" ) - recAlgs.doTrigger=False - elif rec.doWriteBS(): + try: + from TriggerJobOpts.T0TriggerGetter import T0TriggerGetter + triggerGetter = T0TriggerGetter() + except Exception: + from AthenaCommon.Resilience import treatException + treatException("Could not import TriggerJobOpts.TriggerGetter . Switched off !" ) + recAlgs.doTrigger=False + if rec.doWriteBS(): include( "ByteStreamCnvSvc/RDP_ByteStream_jobOptions.py" ) ## end of configure the HLT config diff --git a/Trigger/TriggerCommon/TriggerJobOpts/share/runHLT_standalone_newJO.py b/Trigger/TriggerCommon/TriggerJobOpts/share/runHLT_standalone_newJO.py index f824ab4af1ed5377953ddf9f0c5e55eb12c968e2..b524cd9622b49b5fa48baa5602963cb029ac25c1 100644 --- a/Trigger/TriggerCommon/TriggerJobOpts/share/runHLT_standalone_newJO.py +++ b/Trigger/TriggerCommon/TriggerJobOpts/share/runHLT_standalone_newJO.py @@ -106,6 +106,8 @@ acc.foreach_component("*/L1Decoder/*Tool").OutputLevel = DEBUG # tools acc.foreach_component("*HLTTop/*Hypo*").OutputLevel = DEBUG # hypo algs acc.foreach_component("*HLTTop/*Hypo*/*Tool*").OutputLevel = INFO # hypo tools acc.foreach_component("*HLTTop/RoRSeqFilter/*").OutputLevel = INFO# filters +acc.foreach_component("*/FPrecisionCalo").OutputLevel = DEBUG# filters +acc.foreach_component("*/CHElectronFTF").OutputLevel = DEBUG# filters acc.foreach_component("*HLTTop/*Input*").OutputLevel = DEBUG # input makers acc.foreach_component("*HLTTop/*HLTEDMCreator*").OutputLevel = WARNING # messaging from the EDM creators acc.foreach_component("*HLTTop/*GenericMonitoringTool*").OutputLevel = WARNING # silcence mon tools (addressing by type) diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Egamma/ElectronDef.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Egamma/ElectronDef.py index f1087e5e0a93893e49bc55e507266a1b67ae23c2..7441ed68d8381fe498722cd67bbf68c90d27f715 100644 --- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Egamma/ElectronDef.py +++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Egamma/ElectronDef.py @@ -86,9 +86,9 @@ class ElectronChainConfiguration(ChainConfigurationBase): 'lhlooseivarloose' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionElectron'], 'lhlooseivarmedium' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionElectron'], 'lhlooseivartight' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionElectron'], - 'lhlmediumivarloose' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionElectron'], - 'lhlmediumivarmedium': ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionElectron'], - 'lhlmediumivartight' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionElectron'], + 'lhmediumivarloose' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionElectron'], + 'lhmediumivarmedium': ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionElectron'], + 'lhmediumivartight' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionElectron'], 'lhtightivarloose' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionElectron'], 'lhtightivarmedium' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionElectron'], 'lhtightivartight' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionElectron'], @@ -100,9 +100,9 @@ class ElectronChainConfiguration(ChainConfigurationBase): 'lhloosegsfivarloose' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionGSFElectron'], 'lhloosegsfivarmedium' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionGSFElectron'], 'lhloosegsfivartight' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionGSFElectron'], - 'lhlmediumgsfivarloose' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionGSFElectron'], - 'lhlmediumgsfivarmedium': ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionGSFElectron'], - 'lhlmediumgsfivartight' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionGSFElectron'], + 'lhmediumgsfivarloose' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionGSFElectron'], + 'lhmediumgsfivarmedium': ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionGSFElectron'], + 'lhmediumgsfivartight' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionGSFElectron'], 'lhtightgsfivarloose' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionGSFElectron'], 'lhtightgsfivarmedium' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionGSFElectron'], 'lhtightgsfivartight' : ['getFastCalo', 'getFastElectron', 'getPrecisionCaloElectron', 'getPrecisionTracking', 'getPrecisionGSFElectron'], diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Electron/ElectronRecoSequences.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Electron/ElectronRecoSequences.py index fe7decdc9385acfa6c4b76c83fcb5716812c75ca..0d8f6ad75f625453ebcb2112aea651becb8f1257 100644 --- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Electron/ElectronRecoSequences.py +++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Electron/ElectronRecoSequences.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2002-2010 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration # from AthenaConfiguration.ComponentFactory import CompFactory diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Electron/generateElectron.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Electron/generateElectron.py index a509b32e047afeb161eba7f4a199734c84f21e23..031a5b0a752d3ea0106e0f86787ae4bbff66575b 100644 --- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Electron/generateElectron.py +++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Electron/generateElectron.py @@ -2,101 +2,129 @@ from TriggerMenuMT.HLTMenuConfig.Electron.ElectronRecoSequences import l2CaloRecoCfg, l2CaloHypoCfg from TriggerMenuMT.HLTMenuConfig.Menu.MenuComponents import CAMenuSequence, \ - ChainStep, Chain, createStepView, EmptyMenuSequence, InViewReco + ChainStep, Chain, EmptyMenuSequence, InViewReco, SelectionCA from TrigEgammaHypo.TrigEgammaFastCaloHypoTool import TrigEgammaFastCaloHypoToolFromDict -from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator from TrigEDMConfig.TriggerEDMRun3 import recordable from AthenaConfiguration.ComponentFactory import CompFactory from TriggerMenuMT.HLTMenuConfig.Menu.DictFromChainName import getChainMultFromDict -def generateChains( flags, chainDict ): - import pprint - pprint.pprint( chainDict ) +def generateChains(flags, chainDict): - firstStepName = 'FastCaloElectron' - stepReco, stepView = createStepView(firstStepName) + def __fastCalo(): + selAcc=SelectionCA('FastCaloElectron') + selAcc.mergeReco(l2CaloRecoCfg(flags)) - accCalo = ComponentAccumulator() - accCalo.addSequence(stepView) + # this alg needs EventInfo decorated with the pileup info + from LumiBlockComps.LumiBlockMuWriterConfig import LumiBlockMuWriterCfg + selAcc.merge(LumiBlockMuWriterCfg(flags)) - l2CaloReco = l2CaloRecoCfg(flags) - accCalo.merge(l2CaloReco, sequenceName=stepReco.getName()) + l2CaloHypo = l2CaloHypoCfg(flags, + name='L2ElectronCaloHypo', + CaloClusters=recordable('HLT_FastCaloEMClusters')) - # this alg needs EventInfo decorated with the pileup info - from LumiBlockComps.LumiBlockMuWriterConfig import LumiBlockMuWriterCfg - accCalo.merge( LumiBlockMuWriterCfg(flags) ) + selAcc.addHypoAlgo(l2CaloHypo) - l2CaloHypo = l2CaloHypoCfg( flags, name = 'L2ElectronCaloHypo', - CaloClusters = recordable('HLT_FastCaloEMClusters')) + fastCaloSequence = CAMenuSequence(selAcc, + HypoToolGen=TrigEgammaFastCaloHypoToolFromDict) - accCalo.addEventAlgo(l2CaloHypo, sequenceName=stepView.getName()) + # this cannot work for asymmetric combined chains....FP + return ChainStep(name=selAcc.name, Sequences=[fastCaloSequence], chainDicts=[chainDict], multiplicity=getChainMultFromDict(chainDict)) - fastCaloSequence = CAMenuSequence(accCalo, - HypoToolGen = TrigEgammaFastCaloHypoToolFromDict) + def __ftf(): + selAcc=SelectionCA('ElectronFTF') - accCalo.printConfig() - # this cannot work for asymmetric combined chains....FP - fastCaloStep = ChainStep(name=firstStepName, Sequences=[fastCaloSequence], chainDicts=[chainDict], multiplicity=getChainMultFromDict(chainDict)) - + # # # fast ID (need to be customised because require secialised configuration of the views maker - i.e. parent has to be linked) + name = "IMFastElectron" + evtViewMaker = CompFactory.EventViewCreatorAlgorithm(name, + ViewFallThrough = True, + RoIsLink = 'initialRoI', + RoITool = CompFactory.ViewCreatorInitialROITool(), + InViewRoIs = name+'RoIs', + Views = name+'Views', + ViewNodeName = name+"InView", + RequireParentView = True) + del name + from TrigInDetConfig.TrigInDetConfig import trigInDetFastTrackingCfg + idTracking = trigInDetFastTrackingCfg(flags, roisKey=evtViewMaker.InViewRoIs, signatureName="Electron") - secondStepName = 'ElectronFTF' - stepReco, stepView = createStepView(secondStepName) + fastInDetReco = InViewReco('FastElectron', viewMaker=evtViewMaker) + fastInDetReco.mergeReco(idTracking) + fastInDetReco.addRecoAlgo(CompFactory.AthViews.ViewDataVerifier(name='VDVElectronFastCalo', + DataObjects=[('xAOD::TrigEMClusterContainer', 'StoreGateSvc+HLT_FastCaloEMClusters')]) ) - accTrk = ComponentAccumulator() - accTrk.addSequence(stepView) + from TrigEgammaHypo.TrigEgammaFastElectronFexMTConfig import fastElectronFexAlgCfg + fastInDetReco.mergeReco(fastElectronFexAlgCfg(flags, rois=evtViewMaker.InViewRoIs)) + selAcc.mergeReco(fastInDetReco) - # # # fast ID (need to be customised because require secialised configuration of the views maker - i.e. parent has to be linked) - name = "IMFastElectron" - evtViewMaker = CompFactory.EventViewCreatorAlgorithm(name, - ViewFallThrough = True, - RoIsLink = 'initialRoI', - RoITool = CompFactory.ViewCreatorInitialROITool(), - InViewRoIs = name+'RoIs', - Views = name+'Views', - ViewNodeName = name+"InView", - RequireParentView = True) - del name + fastElectronHypoAlg = CompFactory.TrigEgammaFastElectronHypoAlgMT() + fastElectronHypoAlg.Electrons = 'HLT_FastElectrons' + fastElectronHypoAlg.RunInView = True + selAcc.addHypoAlgo(fastElectronHypoAlg) - from TrigInDetConfig.TrigInDetConfig import trigInDetFastTrackingCfg - idTracking = trigInDetFastTrackingCfg(flags, roisKey=evtViewMaker.InViewRoIs, signatureName="Electron") + from TrigEgammaHypo.TrigEgammaFastElectronHypoTool import TrigEgammaFastElectronHypoToolFromDict + fastInDetSequence = CAMenuSequence(selAcc, + HypoToolGen=TrigEgammaFastElectronHypoToolFromDict) - fastInDetReco = InViewReco("FastElectron", viewMaker=evtViewMaker) - fastInDetReco.mergeReco(idTracking) - fastInDetReco.addRecoAlgo(CompFactory.AthViews.ViewDataVerifier(name='VDVElectronFastCalo', - DataObjects=[('xAOD::TrigEMClusterContainer', 'StoreGateSvc+HLT_FastCaloEMClusters')])) - - from TrigEgammaHypo.TrigEgammaFastElectronFexMTConfig import fastElectronFexAlgCfg - fastInDetReco.mergeReco(fastElectronFexAlgCfg(flags, rois=evtViewMaker.InViewRoIs)) - - accTrk.merge(fastInDetReco, sequenceName=stepReco.getName()) - - - fastElectronHypoAlg = CompFactory.TrigEgammaFastElectronHypoAlgMT() - fastElectronHypoAlg.Electrons = "HLT_FastElectrons" - fastElectronHypoAlg.RunInView = True - accTrk.addEventAlgo(fastElectronHypoAlg, sequenceName=stepView.getName()) - - from TrigEgammaHypo.TrigEgammaFastElectronHypoTool import TrigEgammaFastElectronHypoToolFromDict - fastInDetSequence = CAMenuSequence(accTrk, - HypoToolGen = TrigEgammaFastElectronHypoToolFromDict) - - fastInDetStep = ChainStep( name=secondStepName, Sequences=[fastInDetSequence], chainDicts=[chainDict], multiplicity=getChainMultFromDict(chainDict)) + return ChainStep( name=selAcc.name, Sequences=[fastInDetSequence], chainDicts=[chainDict], multiplicity=getChainMultFromDict(chainDict)) l1Thresholds=[] for part in chainDict['chainParts']: l1Thresholds.append(part['L1threshold']) - - # # # EF calo + + # # # Precision calo + def __precisonCalo(): + recoAcc = InViewReco('ElectronRoITopoClusterReco') + recoAcc.addRecoAlgo(CompFactory.AthViews.ViewDataVerifier(name='VDV'+recoAcc.name, + DataObjects=[('TrigRoiDescriptorCollection', recoAcc.inputMaker().InViewRoIs), + ('CaloBCIDAverage', 'StoreGateSvc+CaloBCIDAverage')])) + + from TrigCaloRec.TrigCaloRecConfig import hltCaloTopoClusteringCfg + recoAcc.mergeReco(hltCaloTopoClusteringCfg(flags, + FS=False, + roisKey=recoAcc.inputMaker().InViewRoIs)) # RoI + + copier = CompFactory.egammaTopoClusterCopier('TrigEgammaTopoClusterCopierPrecisionCaloRoIs', + InputTopoCollection='HLT_TopoCaloClustersRoI', + OutputTopoCollection='HLT_CaloEMClusters', + OutputTopoCollectionShallow='tmp_HLT_CaloEMClusters') + recoAcc.addRecoAlgo(copier) + + selAcc = SelectionCA('PrecisionCalo') + selAcc.mergeReco(recoAcc) + hypoAlg = CompFactory.TrigEgammaPrecisionCaloHypoAlgMT(name='ElectronPrecisionCaloHypo', + CaloClusters=recordable('HLT_CaloEMClusters')) + selAcc.addHypoAlgo(hypoAlg) + from TrigEgammaHypo.TrigEgammaPrecisionCaloHypoTool import TrigEgammaPrecisionCaloHypoToolFromDict + menuSequence = CAMenuSequence(selAcc, + HypoToolGen=TrigEgammaPrecisionCaloHypoToolFromDict) + return ChainStep(name=selAcc.name, Sequences=[menuSequence], chainDicts=[chainDict], multiplicity=getChainMultFromDict(chainDict)) # # # Precison tracking - + # # # offline egamma emptyStep = ChainStep(name="EmptyElStep", Sequences=[EmptyMenuSequence("EmptyElStep")], chainDicts=[chainDict]) - chain = Chain(chainDict['chainName'], L1Thresholds=l1Thresholds, ChainSteps=[fastCaloStep, fastInDetStep, emptyStep, emptyStep]) - + chain = Chain(chainDict['chainName'], L1Thresholds=l1Thresholds, + ChainSteps=[__fastCalo(), __ftf(), __precisonCalo(), emptyStep, emptyStep,]) + return chain + +if __name__ == "__main__": + # run with: python -m TriggerMenuMT.HLTMenuConfig.Electron.generateElectron + from AthenaCommon.Configurable import Configurable + Configurable.configurableRun3Behavior=1 + from AthenaConfiguration.AllConfigFlags import ConfigFlags + from AthenaConfiguration.TestDefaults import defaultTestFiles + ConfigFlags.Input.Files = defaultTestFiles.RAW + ConfigFlags.lock() + from ..Menu.DictFromChainName import dictFromChainName + chain = generateChains(ConfigFlags, dictFromChainName('HLT_e26_L1EM15')) + for step in chain.steps: + for s in step.sequences: + if not isinstance(s, EmptyMenuSequence): + s.ca.printConfig(withDetails=True, summariseProps=False) # flip the last arg to see all settings + s.ca.wasMerged() # to silence check for orphanted CAs diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/LS2_v1.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/LS2_v1.py index 11510d8b227a8e1bf91d7004abf587ec002967e8..1821f9279060881a01c797833c7af6d69d52d729 100644 --- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/LS2_v1.py +++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/LS2_v1.py @@ -41,7 +41,7 @@ def setupMenu(): #test chains ChainProp(name='HLT_mu6_L1MU6', groups=SingleMuonGroup), - ChainProp(name='HLT_mu6_idperfLRT_l2lrt_L1MU6', groups=SingleMuonGroup), + ChainProp(name='HLT_mu6_LRT_idperf_l2lrt_L1MU6', groups=SingleMuonGroup), ChainProp(name='HLT_mu6_ivarmedium_L1MU6', groups=SingleMuonGroup), @@ -236,21 +236,21 @@ def setupMenu(): ChainProp(name='HLT_j45_ftf_L1J15', groups=SingleJetGroup), ChainProp(name='HLT_j85_ftf_L1J20', groups=SingleJetGroup), - ChainProp(name='HLT_j45_pf_ftf_L1J20', groups=SingleJetGroup), - ChainProp(name='HLT_j45_pf_ftf_010jvt_L1J20', groups=SingleJetGroup), - ChainProp(name='HLT_j45_pf_ftf_020jvt_L1J20', groups=SingleJetGroup), - ChainProp(name='HLT_j45_pf_ftf_050jvt_L1J20', groups=SingleJetGroup), - ChainProp(name='HLT_j45_pf_ftf_preselj20_L1J20', groups=SingleJetGroup), - ChainProp(name='HLT_j45_ftf_preselj20_L1J20', groups=SingleJetGroup), - ChainProp(name='HLT_j45_subjesIS_ftf_preselj20_L1J20', groups=SingleJetGroup), - ChainProp(name='HLT_j45_pf_subjesgscIS_ftf_L1J20', groups=SingleJetGroup), + ChainProp(name='HLT_j45_pf_ftf_L1J15', groups=SingleJetGroup), + ChainProp(name='HLT_j45_pf_ftf_010jvt_L1J15', groups=SingleJetGroup), + ChainProp(name='HLT_j45_pf_ftf_020jvt_L1J15', groups=SingleJetGroup), + ChainProp(name='HLT_j45_pf_ftf_050jvt_L1J15', groups=SingleJetGroup), + ChainProp(name='HLT_j45_pf_ftf_preselj20_L1J15', groups=SingleJetGroup), + ChainProp(name='HLT_j45_ftf_preselj20_L1J15', groups=SingleJetGroup), + ChainProp(name='HLT_j45_subjesIS_ftf_preselj20_L1J15', groups=SingleJetGroup), + ChainProp(name='HLT_j45_pf_subjesgscIS_ftf_L1J15', groups=SingleJetGroup), ChainProp(name='HLT_j85_pf_ftf_L1J20', groups=SingleJetGroup), - ChainProp(name='HLT_j45_nojcalib_L1J20', groups=SingleJetGroup), - ChainProp(name='HLT_j45_sktc_nojcalib_L1J20', groups=SingleJetGroup), - ChainProp(name='HLT_j45_cssktc_nojcalib_L1J20', groups=SingleJetGroup), - ChainProp(name='HLT_j45_pf_nojcalib_ftf_L1J20', groups=SingleJetGroup), - ChainProp(name='HLT_j45_csskpf_nojcalib_ftf_L1J20', groups=SingleJetGroup), + ChainProp(name='HLT_j45_nojcalib_L1J15', groups=SingleJetGroup), + ChainProp(name='HLT_j45_sktc_nojcalib_L1J15', groups=SingleJetGroup), + ChainProp(name='HLT_j45_cssktc_nojcalib_L1J15', groups=SingleJetGroup), + ChainProp(name='HLT_j45_pf_nojcalib_ftf_L1J15', groups=SingleJetGroup), + ChainProp(name='HLT_j45_csskpf_nojcalib_ftf_L1J15', groups=SingleJetGroup), ChainProp(name='HLT_j260_320eta490_L1J20', groups=['Online',SingleJetGroup]), diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/MenuComponents.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/MenuComponents.py index 394f04c9a13fd178dc364a8f6d7cfe2f783693e7..023c5dcc0011f223216a66b2f2da763cb4965a92 100644 --- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/MenuComponents.py +++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/MenuComponents.py @@ -927,6 +927,21 @@ class InViewReco(ComponentAccumulator): def inputMaker( self ): return self.viewMakerAlg +class SelectionCA(ComponentAccumulator): + def __init__(self, name): + self.name = name + super( SelectionCA, self ).__init__() + self.stepRecoSequence, self.stepViewSequence = createStepView(name) + self.addSequence(self.stepViewSequence) + + def mergeReco(self, other): + self.merge(other, sequenceName=self.stepRecoSequence.name) + + def mergeHypo(self, other): + self.merge(other, sequenceName=self.stepViewSequence.name) + + def addHypoAlgo(self, algo): + self.addEventAlgo(algo, sequenceName=self.stepViewSequence.name) class RecoFragmentsPool(object): """ Class to host all the reco fragments that need to be reused """ diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/SignatureDicts.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/SignatureDicts.py index 34bce8ae95447afaa80a913068fa3302603c4a03..7d176fb0b6ef75e627953e6d3ba528048f2e2a55 100644 --- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/SignatureDicts.py +++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Menu/SignatureDicts.py @@ -226,7 +226,7 @@ MuonChainParts = { 'IDinfo' : [], 'isoInfo' : ['ivarmedium'], 'invMassInfo' : ['10invm70'], - 'addInfo' : ['1step','idperf','idperfLRT','3layersEC','cosmic',"muonqual"], + 'addInfo' : ['1step','idperf','LRT','3layersEC','cosmic',"muonqual"], 'topo' : AllowedTopos_mu, 'flavour' : [], 'sigFolder' : 'Muon', diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/MuonDef.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/MuonDef.py index f68f6cdfa5410ddfd713680a8025623737a675b5..a63267ae05398ac6ec8537acc682c055799e80d7 100755 --- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/MuonDef.py +++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/MuonDef.py @@ -157,7 +157,7 @@ class MuonChainConfiguration(ChainConfigurationBase): if doOvlpRm: return self.getStep(2, 'muComb', [muCombOvlpRmSequenceCfg] ) - elif "idperfLRT" in self.chainName: + elif "LRT" in self.chainName: return self.getStep(2, 'muCombLRT', [muCombLRTSequenceCfg] ) else: return self.getStep(2, 'muComb', [muCombSequenceCfg] ) diff --git a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/generateMuon.py b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/generateMuon.py index ab0236cf8252ff24fbcc90b4da4282c058b16e37..cce859909024bc5102ff9787458ebe59bbb5ebe6 100644 --- a/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/generateMuon.py +++ b/Trigger/TriggerCommon/TriggerMenuMT/python/HLTMenuConfig/Muon/generateMuon.py @@ -400,11 +400,11 @@ def generateChains( flags, chainDict ): l1Thresholds.append(part['L1threshold']) log.debug('dictionary is: %s\n', pprint.pformat(chainDict)) - + def _empty(name): + return ChainStep(name="EmptyNoL2MuComb", Sequences=[EmptyMenuSequence("EmptyNoL2MuComb")], chainDicts=[chainDict]) if 'msonly' in chainDict['chainName']: - emptyStep = ChainStep(name="EmptyNoL2MuComb", Sequences=[EmptyMenuSequence("EmptyNoL2MuComb")], chainDicts=[chainDict]) - chain = Chain( name=chainDict['chainName'], L1Thresholds=l1Thresholds, ChainSteps=[ l2muFastStep, emptyStep, efmuMSStep, emptyStep ] ) + chain = Chain( name=chainDict['chainName'], L1Thresholds=l1Thresholds, ChainSteps=[ l2muFastStep, _empty("EmptyNoL2MuComb"), efmuMSStep, _empty("EmptyNoEFCB"), _empty("JustEmpty") ] ) else: - chain = Chain( name=chainDict['chainName'], L1Thresholds=l1Thresholds, ChainSteps=[ l2muFastStep, l2muCombStep, efmuMSStep, efmuCBStep ] ) + chain = Chain( name=chainDict['chainName'], L1Thresholds=l1Thresholds, ChainSteps=[ l2muFastStep, l2muCombStep, efmuMSStep, efmuCBStep, _empty("JustEmpty") ] ) return chain diff --git a/graphics/VP1/VP1Systems/VP1TrackSystems/VP1TrackSystems/AscObj_TruthPoint.h b/graphics/VP1/VP1Systems/VP1TrackSystems/VP1TrackSystems/AscObj_TruthPoint.h index c18c7be39ab4dc00bc6f296b97e8499dbd191031..7d75aa7e58dd6e08fa89abc91c152d8ebc97a803 100644 --- a/graphics/VP1/VP1Systems/VP1TrackSystems/VP1TrackSystems/AscObj_TruthPoint.h +++ b/graphics/VP1/VP1Systems/VP1TrackSystems/VP1TrackSystems/AscObj_TruthPoint.h @@ -27,7 +27,7 @@ class SimHitHandleBase; class AscObj_TruthPoint : public AssociatedObjectHandleBase { public: - AscObj_TruthPoint( TrackHandleBase*, const HepMC::GenVertex * v, const HepMC::GenParticle * p ); + AscObj_TruthPoint( TrackHandleBase*, HepMC::ConstGenVertexPtr v, HepMC::ConstGenParticlePtr p ); AscObj_TruthPoint( TrackHandleBase*, SimHitHandleBase* ); virtual ~AscObj_TruthPoint(); diff --git a/graphics/VP1/VP1Systems/VP1TrackSystems/src/AscObj_TruthPoint.cxx b/graphics/VP1/VP1Systems/VP1TrackSystems/src/AscObj_TruthPoint.cxx index 9372689c3f0e830631a87c6b71c1c861fe5f06e6..04d581cbed4c2063f00211779d2684cd9e581787 100644 --- a/graphics/VP1/VP1Systems/VP1TrackSystems/src/AscObj_TruthPoint.cxx +++ b/graphics/VP1/VP1Systems/VP1TrackSystems/src/AscObj_TruthPoint.cxx @@ -36,18 +36,18 @@ //____________________________________________________________________ class AscObj_TruthPoint::Imp { public: - Imp(const HepMC::GenVertex * v, const HepMC::GenParticle * p) - : genVertex(v), genParticle(p), simhit(0) {} + Imp(HepMC::ConstGenVertexPtr v, HepMC::ConstGenParticlePtr p) + : genVertex(v), genParticle(p), simhit(nullptr) {} Imp(SimHitHandleBase*s) - : genVertex(0), genParticle(0), simhit(s) {} - const HepMC::GenVertex * genVertex; - const HepMC::GenParticle * genParticle; + : genVertex(nullptr), genParticle(nullptr), simhit(s) {} + HepMC::ConstGenVertexPtr genVertex; + HepMC::ConstGenParticlePtr genParticle; SimHitHandleBase * simhit; }; //____________________________________________________________________ -AscObj_TruthPoint::AscObj_TruthPoint(TrackHandleBase*th, const HepMC::GenVertex * v, const HepMC::GenParticle * p) +AscObj_TruthPoint::AscObj_TruthPoint(TrackHandleBase*th, HepMC::ConstGenVertexPtr v, HepMC::ConstGenParticlePtr p) : AssociatedObjectHandleBase(th), m_d(new Imp(v,p)) { } diff --git a/graphics/VP1/VP1Systems/VP1TrackSystems/src/TrackHandle_TruthTrack.cxx b/graphics/VP1/VP1Systems/VP1TrackSystems/src/TrackHandle_TruthTrack.cxx index 1416ea225bfa6a2666bdbfa46b18d5c0e4cc0b3d..0ba886b7445136788bd8b261d8de251e8bb1b238 100644 --- a/graphics/VP1/VP1Systems/VP1TrackSystems/src/TrackHandle_TruthTrack.cxx +++ b/graphics/VP1/VP1Systems/VP1TrackSystems/src/TrackHandle_TruthTrack.cxx @@ -28,18 +28,18 @@ class TrackHandle_TruthTrack::Imp { public: Imp(TrackHandle_TruthTrack * tc, - const SimBarCode& sbc,const SimHitList& shl,const HepMC::GenParticle* p) + const SimBarCode& sbc,const SimHitList& shl,HepMC::ConstGenParticlePtr p) : theclass(tc), simBarCode(sbc), simHitList(shl), genParticle(p), ascObjVis(false), ascObjs(0), - trkTrack(0) {} + trkTrack(nullptr) {} TrackHandle_TruthTrack * theclass; SimBarCode simBarCode; SimHitList simHitList; - const HepMC::GenParticle* genParticle; + HepMC::ConstGenParticlePtr genParticle; bool ascObjVis; std::vector<AscObj_TruthPoint*> * ascObjs; @@ -47,11 +47,11 @@ public: const Trk::Track * trkTrack; void ensureInitTrkTracks(); - static Trk::Perigee * createTrkPerigeeFromProdVertex(const HepMC::GenParticle * p, const double& charge ) + static Trk::Perigee * createTrkPerigeeFromProdVertex(HepMC::ConstGenParticlePtr p, const double& charge ) { if (!p) return 0;//Fixme: message! - const HepMC::GenVertex * v = p->production_vertex(); + auto v = p->production_vertex(); if (!v) return 0;//Fixme: message! Amg::Vector3D mom(p->momentum().px(),p->momentum().py(),p->momentum().pz()); @@ -62,11 +62,11 @@ public: return new Trk::Perigee(0.,0.,mom.phi(), mom.theta(), charge/absmom, pos); } - static Trk::TrackParameters * createTrkParamFromDecayVertex(const HepMC::GenParticle * p, const double& charge ) + static Trk::TrackParameters * createTrkParamFromDecayVertex(HepMC::ConstGenParticlePtr p, const double& charge ) { if (!p) return 0;//Fixme: message! - const HepMC::GenVertex * v = p->end_vertex(); + auto v = p->end_vertex(); if (!v) return 0;//Fixme: message! Amg::Vector3D mom(p->momentum().px(),p->momentum().py(),p->momentum().pz()); @@ -125,7 +125,7 @@ public: TrackHandle_TruthTrack::TrackHandle_TruthTrack( TrackCollHandleBase* ch, const SimBarCode& simBarCode, const SimHitList& simHitList, - const HepMC::GenParticle* genPart ) + HepMC::ConstGenParticlePtr genPart ) : TrackHandleBase(ch), m_d(new Imp(this,simBarCode,simHitList,genPart)) { if (VP1Msg::verbose()) { @@ -262,7 +262,7 @@ bool TrackHandle_TruthTrack::hasVertexAtIR(const double& rmaxsq, const double& z { if (!m_d->genParticle) return false; - const HepMC::GenVertex * v = m_d->genParticle->production_vertex(); + auto v = m_d->genParticle->production_vertex(); if (!v) return false; @@ -308,8 +308,12 @@ void TrackHandle_TruthTrack::Imp::ensureInitAscObjs() if (ascObjs) return; ascObjs = new std::vector<AscObj_TruthPoint*>; - const HepMC::GenVertex * vprod = genParticle ? genParticle->production_vertex() : 0; - const HepMC::GenVertex * vend = genParticle ? genParticle->end_vertex() : 0; + HepMC::ConstGenVertexPtr vprod{nullptr}; + HepMC::ConstGenVertexPtr vend{nullptr}; + if (genParticle) { + vprod=genParticle->production_vertex(); + vend=genParticle->end_vertex(); + } ascObjs->reserve((vprod?1:0)+(vend?1:simHitList.size())); if (vprod) ascObjs->push_back(new AscObj_TruthPoint(theclass,vprod,genParticle));