diff --git a/DaVinciExamples/example_data/spruce_b2jpsik_opt.yaml b/DaVinciExamples/example_data/spruce_b2jpsik_opt.yaml new file mode 100644 index 0000000000000000000000000000000000000000..02ea23597dc8d8ab5906bdb46cfb9b03d97b5c08 --- /dev/null +++ b/DaVinciExamples/example_data/spruce_b2jpsik_opt.yaml @@ -0,0 +1,23 @@ +############################################################################### +# (c) Copyright 2021-2022 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### + +input_files: +- root://eoslhcb.cern.ch//eos/lhcb/wg/dpa/wp3/tests/B2JpsiK_spruce.dst +annsvc_config: root://eoslhcb.cern.ch//eos/lhcb/wg/dpa/wp3/tests/B2JpsiK_spruce.tck.json +input_type: ROOT +evt_max: 100 +ntuple_file: davinci_ntuple_ft.root +print_freq: 1 +data_type: Upgrade +simulation: true +conddb_tag: sim-20201218-vc-md100 +dddb_tag: dddb-20201211 + diff --git a/DaVinciExamples/python/DaVinciExamples/tupling/option_davinci_tupling_from_collections.py b/DaVinciExamples/python/DaVinciExamples/tupling/option_davinci_tupling_from_collections.py index e5e8e15098e0894792496e19be9c06c7066518ea..0142384dbde6b8734ae2f33400babed6d8504e2b 100644 --- a/DaVinciExamples/python/DaVinciExamples/tupling/option_davinci_tupling_from_collections.py +++ b/DaVinciExamples/python/DaVinciExamples/tupling/option_davinci_tupling_from_collections.py @@ -12,6 +12,7 @@ Read an HLT2 file and create an ntuple using pre-defined Functor collections. """ +import Functors as F from PyConf.components import force_location from FunTuple import FunctorCollection, functorcollections from FunTuple import FunTuple_Particles as Funtuple @@ -19,6 +20,8 @@ from DaVinci.algorithms import add_filter from DaVinci import make_config from DaVinci.truth_matching import configured_MCTruthAndBkgCatAlg from DaVinci.algorithms import get_odin, get_decreports +from PyConf.Algorithms import WeightedRelTableAlg +from Gaudi.Configuration import INFO def main(options): @@ -29,6 +32,13 @@ def main(options): mctruth = configured_MCTruthAndBkgCatAlg( inputs=d02kpi_data, process=options.process) + #configure "WeightedRelTableAlg" algorithm for HLT2 output + iso_rel_table = WeightedRelTableAlg( + ReferenceParticles=d02kpi_data, + InputCandidates=d02kpi_data, + Cut=(F.DR2() < 0.4), + OutputLevel=INFO) + #get location to odin odin = get_odin(options) @@ -40,7 +50,8 @@ def main(options): functorcollections.Kinematics(), functorcollections.MCHierarchy(mctruth), functorcollections.MCKinematics(mctruth), - functorcollections.MCVertexInfo(mctruth) + functorcollections.MCVertexInfo(mctruth), + functorcollections.TrackIsolation(iso_rel_table) ] evt_collections = [ diff --git a/DaVinciExamples/python/DaVinciExamples/tupling/option_davinci_tupling_weightedrelation_trackvariables.py b/DaVinciExamples/python/DaVinciExamples/tupling/option_davinci_tupling_weightedrelation_trackvariables.py new file mode 100644 index 0000000000000000000000000000000000000000..9327ff16d796d3421154def5b62fd70ee141f65e --- /dev/null +++ b/DaVinciExamples/python/DaVinciExamples/tupling/option_davinci_tupling_weightedrelation_trackvariables.py @@ -0,0 +1,106 @@ +############################################################################### +# (c) Copyright 2022 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +""" +Option file for testing the ParticleWeightedAlg algorithm. +The job runs over a spruced sample and retrieves a set of B0 -> J/psi K candidates. For each candidate the algorithm +looks at the TES location which contains the tagged particles and creates a 'one-to-many' relation map +relating all the available tracks to the B candidate of the events. + +Important: Setting DVPATH properly. +To run the example: $DVPATH/run lbexec option_davinci_tupling_weightedrelation_trackvariables:main $DVPATH/DaVinciExamples/example_data/spruce_b2jpsik_opt.yaml +""" + +import Functors as F +from PyConf.components import force_location +from PyConf.Algorithms import WeightedRelTableAlg +from FunTuple import FunctorCollection, FunTuple_Particles as Funtuple +from FunTuple.functorcollections import TrackIsolation +from DaVinci.reco_objects import make_pvs_v2 +from DaVinci.algorithms import add_filter, unpack_locations +from DaVinci import make_config + +b2jpsik_data = force_location("/Event/HLT2/Hlt2B2JpsiKLine/Particles") + +branches = { + 'B': "[B+ -> (J/psi(1S) -> mu+ mu- ) K+]CC", + 'Jpsi': "[B+ -> ^(J/psi(1S) -> mu+ mu- ) K+]CC", + 'Kp': "[B+ -> (J/psi(1S) -> mu+ mu- ) ^K+]CC" +} + + +def main(options): + # Hack used to unpack the tagged data for now + # follows discussion on Mattermost channel DPA WP3 Offline Analysis + # [https://mattermost.web.cern.ch/lhcb/pl/p6tbr8inetf99jgin5ihce63ic] + unpackers = unpack_locations(options, False) + tagged_data = None + for alg in unpackers: + if "OutputName" in alg.outputs.keys(): + if (alg.OutputName.location == + "/Event/HLT2/Hlt2B2JpsiKLine/LongTaggingParticles/Particles" + ): + tagged_data = alg.OutputName + + pvs = make_pvs_v2(process=options.process) + + ftAlg = WeightedRelTableAlg( + ReferenceParticles=b2jpsik_data, + InputCandidates=tagged_data, + Cut=F.SHARE_BPV(pvs)) + + ftAlg_Rels = ftAlg.OutputRelations + #Set the variables + extra_variables = FunctorCollection({ + 'THOR_MASS': + F.MASS, + "First_P": + F.MAP_INPUT(Functor=F.P, Relations=ftAlg_Rels), + "First_PT": + F.MAP_INPUT(Functor=F.PT, Relations=ftAlg_Rels), + "Sum_P": + F.SUMCONE(Functor=F.P, Relations=ftAlg_Rels), + "Sum_PT": + F.SUMCONE(Functor=F.PT, Relations=ftAlg_Rels), + "Max_P": + F.MAXCONE(Functor=F.P, Relations=ftAlg_Rels), + "Max_PT": + F.MAXCONE(Functor=F.PT, Relations=ftAlg_Rels), + "Min_P": + F.MINCONE(Functor=F.P, Relations=ftAlg_Rels), + "Min_PT": + F.MINCONE(Functor=F.PT, Relations=ftAlg_Rels), + "Asym_P": + F.ASYM(Functor=F.P, Relations=ftAlg_Rels), + "Asym_PT": + F.ASYM(Functor=F.PT, Relations=ftAlg_Rels), + "Num_tracks": + F.VALUE_OR(0) @ F.NINCONE(Relations=ftAlg_Rels), + }) + + variables_all = FunctorCollection({'THOR_P': F.P, 'THOR_PT': F.PT}) + + track_iso_variables = TrackIsolation(ftAlg) + + variables_jpsik = { + 'B': variables_all + extra_variables, + 'Jpsi': variables_all, + 'Kp': variables_all + track_iso_variables, + } + + my_filter = add_filter(options, "HDRFilter_B2JpsiK", + "HLT_PASS('Hlt2B2JpsiKLineDecision')") + my_tuple = Funtuple( + name="Tuple", + tuple_name="DecayTree", + fields=branches, + variables=variables_jpsik, + inputs=b2jpsik_data) + return make_config(options, [my_filter, my_tuple]) diff --git a/DaVinciExamples/tests/qmtest/tupling.qms/test_davinci_tupling_weightedrelation_trackvariables.qmt b/DaVinciExamples/tests/qmtest/tupling.qms/test_davinci_tupling_weightedrelation_trackvariables.qmt new file mode 100644 index 0000000000000000000000000000000000000000..dfe02b6eb552706d75a7d3c843c07494faf2efa9 --- /dev/null +++ b/DaVinciExamples/tests/qmtest/tupling.qms/test_davinci_tupling_weightedrelation_trackvariables.qmt @@ -0,0 +1,35 @@ +<?xml version="1.0" ?> +<!-- +############################################################################### +# (c) Copyright 2021-2022 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +--> +<!DOCTYPE extension PUBLIC '-//QM/2.3/Extension//EN' 'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'> +<extension class="GaudiTest.GaudiExeTest" kind="test"> + <argument name="program"><text>lbexec</text></argument> + <argument name="args"><set> + <text>DaVinciExamples.tupling.option_davinci_tupling_weightedrelation_trackvariables:main</text> + </set></argument> + <argument name="options_yaml_fn"><text>$DAVINCIEXAMPLESROOT/example_data/spruce_b2jpsik_opt.yaml</text></argument> + <argument name="extra_options_yaml"><text> + input_type: ROOT + evt_max: 100 + ntuple_file: davinci_ntuple_ft.root + print_freq: 1 + data_type: Upgrade + simulation: true + </text></argument> + <argument name="timeout"><integer>3600</integer></argument> + <argument name="reference"><text>$DAVINCIEXAMPLESROOT/tests/refs/test_davinci_tupling_weightedrelation_trackvariables.ref</text></argument> + <argument name="validator"><text> +findReferenceBlock("""Tuple SUCCESS ID=DecayTree Title="DecayTree" #items=32 {B_THOR_P,B_THOR_PT,B_THOR_MASS,B_First_P,B_First_PT,B_Sum_P,B_Sum_PT,B_Max_P,B_Ma}""") +countErrorLines({"FATAL":0, "ERROR":0}) + </text></argument> +</extension> diff --git a/DaVinciExamples/tests/refs/test_davinci_tupling_weightedrelation_trackvariables.ref b/DaVinciExamples/tests/refs/test_davinci_tupling_weightedrelation_trackvariables.ref new file mode 100644 index 0000000000000000000000000000000000000000..d3adbafbe6bc00f4c8d514d53d7313daeec9bfe6 --- /dev/null +++ b/DaVinciExamples/tests/refs/test_davinci_tupling_weightedrelation_trackvariables.ref @@ -0,0 +1,412 @@ +ApplicationMgr SUCCESS +==================================================================================================================================== + Welcome to DaVinci version 61.0 + running on lxplus757.cern.ch on Tue Aug 2 13:45:16 2022 +==================================================================================================================================== +ApplicationMgr INFO Application Manager Configured successfully +ToolSvc.GitDDDB INFO opening Git repository '/cvmfs/lhcb.cern.ch/lib/lhcb/git-conddb/DDDB.git' +ToolSvc.GitDDDB INFO using commit 'upgrade/dddb-20201211' corresponding to 13a10affe2d4682b4d183eadd6fe86303a885663 +ToolSvc.GitSIMCOND INFO opening Git repository '/cvmfs/lhcb.cern.ch/lib/lhcb/git-conddb/SIMCOND.git' +ToolSvc.GitSIMCOND INFO using commit 'upgrade/sim-20201218-vc-md100' corresponding to 9f30e42bc0e707524b922f0eeb1055551744544f +DetectorPersistencySvc INFO Added successfully Conversion service:XmlCnvSvc +DetectorDataSvc SUCCESS Detector description database: git:/lhcb.xml +EventClockSvc.FakeEventTime INFO Event times generated from 0 with steps of 0 +EventClockSvc.FakeEventTime INFO Run numbers generated from 0 every 0 events +MagneticFieldGridReader INFO Opened magnetic field file: /cvmfs/lhcb.cern.ch/lib/lhcb/DBASE/FieldMap/v5r7/cdf//field.v5r0.c1.down.cdf +MagneticFieldGridReader INFO Opened magnetic field file: /cvmfs/lhcb.cern.ch/lib/lhcb/DBASE/FieldMap/v5r7/cdf//field.v5r0.c2.down.cdf +MagneticFieldGridReader INFO Opened magnetic field file: /cvmfs/lhcb.cern.ch/lib/lhcb/DBASE/FieldMap/v5r7/cdf//field.v5r0.c3.down.cdf +MagneticFieldGridReader INFO Opened magnetic field file: /cvmfs/lhcb.cern.ch/lib/lhcb/DBASE/FieldMap/v5r7/cdf//field.v5r0.c4.down.cdf +MagneticFieldSvc INFO Map scaled by factor 1 with polarity internally used: -1 signed relative current: -1 +NTupleSvc INFO Added stream file:davinci_ntuple_ft.root as FILE1 +HLTControlFlowMgr INFO Start initialization +RootHistSvc INFO Writing ROOT histograms to: davinci_ntuple_ft.root +HistogramPersistencySvc INFO Added successfully Conversion service:RootHistSvc +FSROutputStreamDstWriter INFO Data source: EventDataSvc output: SVC='Gaudi::RootCnvSvc' +HiveDataBrokerSvc WARNING non-reentrant algorithm: RecordStream/FSROutputStreamDstWriter +HLTControlFlowMgr INFO Concurrency level information: +HLTControlFlowMgr INFO o Number of events slots: 1 +HLTControlFlowMgr INFO o TBB thread pool size: 'ThreadPoolSize':1 +HLTControlFlowMgr INFO ---> End of Initialization. This took 11377 ms +ApplicationMgr INFO Application Manager Initialized successfully +JobOptionsSvc INFO Properties are dumped into "tupling.test_davinci_tupling_weightedrelation_trackvariables.joboptsdump" +FunctorFactory INFO Reusing functor library: "/run/user/134901/FunctorJitLib_0xdfe05902db3347cc_0x8c576899dae8203c.so" +DeFTDetector INFO Current FT geometry version = 64 +ApplicationMgr INFO Application Manager Started successfully +EventPersistencySvc INFO Added successfully Conversion service:RootCnvSvc +EventSelector INFO Stream:EventSelector.DataStreamTool_1 Def:DATAFILE='root://eoslhcb.cern.ch//eos/lhcb/wg/dpa/wp3/tests/B2JpsiK_spruce.dst' SVC='Gaudi::RootEvtSelector' OPT='READ' IgnoreChecksum='YES' +HLTControlFlowMgr INFO Will measure time between events 10 and 90 (stop might be some events later) +HLTControlFlowMgr INFO Starting loop on events +EventSelector SUCCESS Reading Event record 1. Record number within stream 1: 1 +Hlt2 WARNING TCK obtained from rawbank seems to be 0 -- blindly ASSUMING that the current HltANNSvc somehow has the same configuration as when the input data was written. Proceed at your own risk, good luck... +RFileCnv INFO opening Root file "davinci_ntuple_ft.root" for writing +RCWNTupleCnv INFO Booked TTree with ID: DecayTree "DecayTree" in directory davinci_ntuple_ft.root:/Tuple +EventSelector SUCCESS Reading Event record 2. Record number within stream 1: 2 +EventSelector SUCCESS Reading Event record 3. Record number within stream 1: 3 +EventSelector SUCCESS Reading Event record 4. Record number within stream 1: 4 +EventSelector SUCCESS Reading Event record 5. Record number within stream 1: 5 +EventSelector SUCCESS Reading Event record 6. Record number within stream 1: 6 +EventSelector SUCCESS Reading Event record 7. Record number within stream 1: 7 +EventSelector SUCCESS Reading Event record 8. Record number within stream 1: 8 +EventSelector SUCCESS Reading Event record 9. Record number within stream 1: 9 +EventSelector SUCCESS Reading Event record 10. Record number within stream 1: 10 +HLTControlFlowMgr INFO Timing started at: 13:46:45 +EventSelector SUCCESS Reading Event record 11. Record number within stream 1: 11 +EventSelector SUCCESS Reading Event record 12. Record number within stream 1: 12 +EventSelector SUCCESS Reading Event record 13. Record number within stream 1: 13 +EventSelector SUCCESS Reading Event record 14. Record number within stream 1: 14 +EventSelector SUCCESS Reading Event record 15. Record number within stream 1: 15 +EventSelector SUCCESS Reading Event record 16. Record number within stream 1: 16 +EventSelector SUCCESS Reading Event record 17. Record number within stream 1: 17 +EventSelector SUCCESS Reading Event record 18. Record number within stream 1: 18 +EventSelector SUCCESS Reading Event record 19. Record number within stream 1: 19 +EventSelector SUCCESS Reading Event record 20. Record number within stream 1: 20 +EventSelector SUCCESS Reading Event record 21. Record number within stream 1: 21 +EventSelector SUCCESS Reading Event record 22. Record number within stream 1: 22 +EventSelector SUCCESS Reading Event record 23. Record number within stream 1: 23 +EventSelector SUCCESS Reading Event record 24. Record number within stream 1: 24 +EventSelector SUCCESS Reading Event record 25. Record number within stream 1: 25 +EventSelector SUCCESS Reading Event record 26. Record number within stream 1: 26 +EventSelector SUCCESS Reading Event record 27. Record number within stream 1: 27 +EventSelector SUCCESS Reading Event record 28. Record number within stream 1: 28 +EventSelector SUCCESS Reading Event record 29. Record number within stream 1: 29 +EventSelector SUCCESS Reading Event record 30. Record number within stream 1: 30 +EventSelector SUCCESS Reading Event record 31. Record number within stream 1: 31 +EventSelector SUCCESS Reading Event record 32. Record number within stream 1: 32 +EventSelector SUCCESS Reading Event record 33. Record number within stream 1: 33 +EventSelector SUCCESS Reading Event record 34. Record number within stream 1: 34 +EventSelector SUCCESS Reading Event record 35. Record number within stream 1: 35 +EventSelector SUCCESS Reading Event record 36. Record number within stream 1: 36 +EventSelector SUCCESS Reading Event record 37. Record number within stream 1: 37 +EventSelector SUCCESS Reading Event record 38. Record number within stream 1: 38 +EventSelector SUCCESS Reading Event record 39. Record number within stream 1: 39 +EventSelector SUCCESS Reading Event record 40. Record number within stream 1: 40 +EventSelector SUCCESS Reading Event record 41. Record number within stream 1: 41 +EventSelector SUCCESS Reading Event record 42. Record number within stream 1: 42 +EventSelector SUCCESS Reading Event record 43. Record number within stream 1: 43 +EventSelector SUCCESS Reading Event record 44. Record number within stream 1: 44 +EventSelector SUCCESS Reading Event record 45. Record number within stream 1: 45 +EventSelector SUCCESS Reading Event record 46. Record number within stream 1: 46 +EventSelector SUCCESS Reading Event record 47. Record number within stream 1: 47 +EventSelector SUCCESS Reading Event record 48. Record number within stream 1: 48 +EventSelector SUCCESS Reading Event record 49. Record number within stream 1: 49 +EventSelector SUCCESS Reading Event record 50. Record number within stream 1: 50 +EventSelector SUCCESS Reading Event record 51. Record number within stream 1: 51 +EventSelector SUCCESS Reading Event record 52. Record number within stream 1: 52 +EventSelector SUCCESS Reading Event record 53. Record number within stream 1: 53 +EventSelector SUCCESS Reading Event record 54. Record number within stream 1: 54 +EventSelector SUCCESS Reading Event record 55. Record number within stream 1: 55 +EventSelector SUCCESS Reading Event record 56. Record number within stream 1: 56 +EventSelector SUCCESS Reading Event record 57. Record number within stream 1: 57 +EventSelector SUCCESS Reading Event record 58. Record number within stream 1: 58 +EventSelector SUCCESS Reading Event record 59. Record number within stream 1: 59 +EventSelector SUCCESS Reading Event record 60. Record number within stream 1: 60 +EventSelector SUCCESS Reading Event record 61. Record number within stream 1: 61 +EventSelector SUCCESS Reading Event record 62. Record number within stream 1: 62 +EventSelector SUCCESS Reading Event record 63. Record number within stream 1: 63 +EventSelector SUCCESS Reading Event record 64. Record number within stream 1: 64 +EventSelector SUCCESS Reading Event record 65. Record number within stream 1: 65 +EventSelector SUCCESS Reading Event record 66. Record number within stream 1: 66 +EventSelector SUCCESS Reading Event record 67. Record number within stream 1: 67 +EventSelector SUCCESS Reading Event record 68. Record number within stream 1: 68 +EventSelector SUCCESS Reading Event record 69. Record number within stream 1: 69 +EventSelector SUCCESS Reading Event record 70. Record number within stream 1: 70 +EventSelector SUCCESS Reading Event record 71. Record number within stream 1: 71 +EventSelector SUCCESS Reading Event record 72. Record number within stream 1: 72 +EventSelector SUCCESS Reading Event record 73. Record number within stream 1: 73 +EventSelector SUCCESS Reading Event record 74. Record number within stream 1: 74 +EventSelector SUCCESS Reading Event record 75. Record number within stream 1: 75 +EventSelector SUCCESS Reading Event record 76. Record number within stream 1: 76 +EventSelector SUCCESS Reading Event record 77. Record number within stream 1: 77 +EventSelector SUCCESS Reading Event record 78. Record number within stream 1: 78 +EventSelector SUCCESS Reading Event record 79. Record number within stream 1: 79 +EventSelector SUCCESS Reading Event record 80. Record number within stream 1: 80 +EventSelector SUCCESS Reading Event record 81. Record number within stream 1: 81 +EventSelector SUCCESS Reading Event record 82. Record number within stream 1: 82 +EventSelector SUCCESS Reading Event record 83. Record number within stream 1: 83 +EventSelector SUCCESS Reading Event record 84. Record number within stream 1: 84 +EventSelector SUCCESS Reading Event record 85. Record number within stream 1: 85 +EventSelector SUCCESS Reading Event record 86. Record number within stream 1: 86 +EventSelector SUCCESS Reading Event record 87. Record number within stream 1: 87 +EventSelector SUCCESS Reading Event record 88. Record number within stream 1: 88 +EventSelector SUCCESS Reading Event record 89. Record number within stream 1: 89 +EventSelector SUCCESS Reading Event record 90. Record number within stream 1: 90 +HLTControlFlowMgr INFO Timing stopped at: 13:46:46 +EventSelector SUCCESS Reading Event record 91. Record number within stream 1: 91 +EventSelector SUCCESS Reading Event record 92. Record number within stream 1: 92 +EventSelector SUCCESS Reading Event record 93. Record number within stream 1: 93 +EventSelector SUCCESS Reading Event record 94. Record number within stream 1: 94 +EventSelector SUCCESS Reading Event record 95. Record number within stream 1: 95 +EventSelector SUCCESS Reading Event record 96. Record number within stream 1: 96 +EventSelector SUCCESS Reading Event record 97. Record number within stream 1: 97 +EventSelector SUCCESS Reading Event record 98. Record number within stream 1: 98 +EventSelector SUCCESS Reading Event record 99. Record number within stream 1: 99 +EventSelector SUCCESS Reading Event record 100. Record number within stream 1: 100 +HLTControlFlowMgr INFO ---> Loop over 100 Events Finished - WSS 3201.05, timed 80 Events: 794 ms, Evts/s = 100.756 +HDRFilter_B2JpsiK INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + |*"#passed" | 100 | 100 |( 100.0000 +- 0.000000)% | +ToolSvc.HltFactory INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# loaded from PYTHON" | 1 | +Tuple INFO Number of counters : 7 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# events with multiple candidates for field B" | 2 | + | "# events with multiple candidates for field Jpsi"| 2 | + | "# events with multiple candidates for field Kp"| 2 | + | "# non-empty events for field B" | 100 | + | "# non-empty events for field Jpsi" | 100 | + | "# non-empty events for field Kp" | 100 | + | "# processed events" | 100 | +WeightedRelTableAlg#1 INFO Number of counters : 3 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "#InputCandidates" | 100 | 8127 | 81.270 | + | "#InputParticles" | 100 | 102 | 1.0200 | + | "#OutputParticles" | 2729 | 0 | 0.0000 | +ApplicationMgr INFO Application Manager Stopped successfully +FSROutputStreamDstWriter INFO Set up File Summary Record +FSROutputStreamDstWriter INFO Events output: 1 +Tuple SUCCESS Booked 1 N-Tuples and 0 Event Tag Collections +Tuple SUCCESS List of booked N-Tuples in directory "FILE1/Tuple" +Tuple SUCCESS ID=DecayTree Title="DecayTree" #items=32 {B_THOR_P,B_THOR_PT,B_THOR_MASS,B_First_P,B_First_PT,B_Sum_P,B_Sum_PT,B_Max_P,B_Ma} +HLTControlFlowMgr INFO Memory pool: used 0.00341431 +/- 4.01611e-05 MiB (min: 0, max: 0) in 1 +/- 0 blocks (allocated >once in 0 +/- 0% events). Allocated capacity was 10 +/- 0 MiB (min: 10, max: 10) and 36.62 +/- 0.434 (min: 36, max: 67) requests were served +HLTControlFlowMgr INFO Timing table: +HLTControlFlowMgr INFO Average ticks per millisecond: 1000000 +HLTControlFlowMgr INFO + | Name of Algorithm | Execution Count | Total Time / s | Avg. Time / us | + | "Gaudi__Hive__FetchDataFromFile#5" | 100 | 0.922 | 9227.083 | + | "LHCb__UnpackRawEvent" | 100 | 0.049 | 494.677 | + | "HltPackedBufferDecoder" | 100 | 0.173 | 1731.571 | + | "CaloHypoUnpacker" | 100 | 0.002 | 21.215 | + | "CaloHypoUnpacker#1" | 100 | 0.000 | 2.599 | + | "CaloHypoUnpacker#2" | 100 | 0.001 | 10.229 | + | "CaloHypoUnpacker#3" | 100 | 0.000 | 2.393 | + | "ParticleUnpacker#4" | 100 | 0.000 | 7.916 | + | "DummyEventTime" | 100 | 0.263 | 2634.782 | + | "reserveIOV" | 100 | 0.039 | 396.743 | + | "ParticleUnpacker#3" | 100 | 0.003 | 32.495 | + | "RecVertexUnpacker#3" | 100 | 0.017 | 171.344 | + | "RecV1ToPVConverter" | 100 | 0.305 | 3053.432 | + | "WeightedRelTableAlg#1" | 100 | 0.051 | 518.775 | + | "Tuple" | 100 | 0.676 | 6764.783 | + | "Hlt2" | 100 | 0.032 | 327.096 | + | "HDRFilter_B2JpsiK" | 100 | 0.104 | 1044.627 | + | "MuonPIDUnpacker" | 100 | 0.014 | 143.943 | + | "P2VRelationUnpacker" | 100 | 0.000 | 3.132 | + | "P2VRelationUnpacker#1" | 100 | 0.000 | 2.004 | + | "P2VRelationUnpacker#2" | 100 | 0.000 | 1.845 | + | "P2VRelationUnpacker#3" | 100 | 0.004 | 40.595 | + | "P2VRelationUnpacker#4" | 100 | 0.000 | 8.899 | + | "ParticleUnpacker" | 100 | 0.001 | 11.987 | + | "ParticleUnpacker#1" | 100 | 0.000 | 2.296 | + | "ParticleUnpacker#2" | 100 | 0.000 | 6.569 | + | "ParticleUnpacker#5" | 100 | 0.000 | 2.413 | + | "ParticleUnpacker#6" | 100 | 0.000 | 6.074 | + | "ProtoParticleUnpacker" | 100 | 0.290 | 2901.331 | + | "ProtoParticleUnpacker#1" | 100 | 0.000 | 3.396 | + | "RecSummaryUnpacker" | 100 | 0.000 | 3.281 | + | "RecVertexUnpacker" | 100 | 0.050 | 504.626 | + | "RecVertexUnpacker#1" | 100 | 0.000 | 3.070 | + | "RecVertexUnpacker#2" | 100 | 0.000 | 3.187 | + | "FSROutputStreamDstWriter" | 100 | 0.014 | 144.513 | + | "RichPIDUnpacker" | 100 | 0.002 | 25.252 | + | "TrackUnpacker" | 100 | 0.031 | 317.595 | + | "Gaudi__Hive__FetchDataFromFile#6" | 100 | 0.000 | 8.289 | + | "UnpackMCParticle" | 100 | 0.052 | 523.529 | + | "Gaudi__Hive__FetchDataFromFile#7" | 100 | 0.000 | 5.657 | + | "UnpackMCVertex" | 100 | 0.000 | 6.053 | + | "VertexUnpacker" | 100 | 0.000 | 3.526 | + | "VertexUnpacker#1" | 100 | 0.000 | 2.222 | + | "VertexUnpacker#2" | 100 | 0.000 | 1.897 | + | "VertexUnpacker#3" | 100 | 0.000 | 4.227 | + | "VertexUnpacker#4" | 100 | 0.038 | 384.413 | + | "VertexUnpacker#5" | 100 | 0.000 | 3.202 | + | "VertexUnpacker#6" | 100 | 0.000 | 7.346 | + +HLTControlFlowMgr INFO StateTree: CFNode #executed #passed +LAZY_AND: DaVinci #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + NONLAZY_OR: FileSummaryRecords #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: GenFSR #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + RecordStream/FSROutputStreamDstWriter #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + NONLAZY_OR: UserAnalysis #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: default #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + LHCb__UnpackRawEvent/LHCb__UnpackRawEvent #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + HltPackedBufferDecoder/HltPackedBufferDecoder #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackMCParticle/UnpackMCParticle #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackMCVertex/UnpackMCVertex #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + RecVertexUnpacker/RecVertexUnpacker #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + RecVertexUnpacker/RecVertexUnpacker#1 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + RecVertexUnpacker/RecVertexUnpacker#2 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + RecVertexUnpacker/RecVertexUnpacker#3 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + VertexUnpacker/VertexUnpacker #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + VertexUnpacker/VertexUnpacker#1 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + VertexUnpacker/VertexUnpacker#2 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + VertexUnpacker/VertexUnpacker#3 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + VertexUnpacker/VertexUnpacker#4 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + VertexUnpacker/VertexUnpacker#5 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + VertexUnpacker/VertexUnpacker#6 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + TrackUnpacker/TrackUnpacker #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + RichPIDUnpacker/RichPIDUnpacker #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + MuonPIDUnpacker/MuonPIDUnpacker #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + CaloHypoUnpacker/CaloHypoUnpacker #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + CaloHypoUnpacker/CaloHypoUnpacker#1 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + CaloHypoUnpacker/CaloHypoUnpacker#2 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + CaloHypoUnpacker/CaloHypoUnpacker#3 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + ProtoParticleUnpacker/ProtoParticleUnpacker #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + ProtoParticleUnpacker/ProtoParticleUnpacker#1 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + ParticleUnpacker/ParticleUnpacker #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + ParticleUnpacker/ParticleUnpacker#1 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + ParticleUnpacker/ParticleUnpacker#2 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + ParticleUnpacker/ParticleUnpacker#3 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + ParticleUnpacker/ParticleUnpacker#4 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + ParticleUnpacker/ParticleUnpacker#5 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + ParticleUnpacker/ParticleUnpacker#6 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + RecSummaryUnpacker/RecSummaryUnpacker #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + P2VRelationUnpacker/P2VRelationUnpacker #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + P2VRelationUnpacker/P2VRelationUnpacker#1 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + P2VRelationUnpacker/P2VRelationUnpacker#2 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + P2VRelationUnpacker/P2VRelationUnpacker#3 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + P2VRelationUnpacker/P2VRelationUnpacker#4 #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + LoKi__HDRFilter/HDRFilter_B2JpsiK #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + FunTupleBase_Particles/Tuple #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + +HLTControlFlowMgr INFO Histograms converted successfully according to request. +ToolSvc INFO Removing all tools created by ToolSvc +RootCnvSvc INFO Disconnected data IO:79F21D74-0775-11ED-AC44-54AB3A714112 [root://eoslhcb.cern.ch//eos/lhcb/wg/dpa/wp3/tests/B2JpsiK_spruce.dst] +RFileCnv INFO dumping contents of /NTUPLES/FILE1 +TFile: name=davinci_ntuple_ft.root, title=Gaudi Trees, option=CREATE +****************************************************************************** +*Tree :DecayTree : DecayTree * +*Entries : 102 : Total = 33904 bytes File Size = 12973 * +* : : Tree compression factor = 1.56 * +****************************************************************************** +*Br 0 :B_THOR_P : B_THOR_P/F * +*Entries : 102 : Total Size= 993 bytes File Size = 488 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 1 :B_THOR_PT : B_THOR_PT/F * +*Entries : 102 : Total Size= 998 bytes File Size = 489 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 2 :B_THOR_MASS : B_THOR_MASS/D * +*Entries : 102 : Total Size= 1424 bytes File Size = 869 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.03 * +*............................................................................* +*Br 3 :B_First_P : B_First_P/F * +*Entries : 102 : Total Size= 998 bytes File Size = 489 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 4 :B_First_PT : B_First_PT/F * +*Entries : 102 : Total Size= 1003 bytes File Size = 490 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 5 :B_Sum_P : B_Sum_P/F * +*Entries : 102 : Total Size= 988 bytes File Size = 487 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 6 :B_Sum_PT : B_Sum_PT/F * +*Entries : 102 : Total Size= 993 bytes File Size = 488 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 7 :B_Max_P : B_Max_P/F * +*Entries : 102 : Total Size= 988 bytes File Size = 487 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 8 :B_Max_PT : B_Max_PT/F * +*Entries : 102 : Total Size= 993 bytes File Size = 488 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 9 :B_Min_P : B_Min_P/F * +*Entries : 102 : Total Size= 988 bytes File Size = 487 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 10 :B_Min_PT : B_Min_PT/F * +*Entries : 102 : Total Size= 993 bytes File Size = 488 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 11 :B_Asym_P : B_Asym_P/F * +*Entries : 102 : Total Size= 993 bytes File Size = 488 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 12 :B_Asym_PT : B_Asym_PT/F * +*Entries : 102 : Total Size= 998 bytes File Size = 489 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 13 :B_Num_tracks : B_Num_tracks/I * +*Entries : 102 : Total Size= 1013 bytes File Size = 267 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.84 * +*............................................................................* +*Br 14 :Jpsi_THOR_P : Jpsi_THOR_P/F * +*Entries : 102 : Total Size= 1008 bytes File Size = 491 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 15 :Jpsi_THOR_PT : Jpsi_THOR_PT/F * +*Entries : 102 : Total Size= 1013 bytes File Size = 492 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 16 :Kp_THOR_P : Kp_THOR_P/F * +*Entries : 102 : Total Size= 998 bytes File Size = 489 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 17 :Kp_THOR_PT : Kp_THOR_PT/F * +*Entries : 102 : Total Size= 1003 bytes File Size = 490 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 1.00 * +*............................................................................* +*Br 18 :Kp_HEAD_CMULT : Kp_HEAD_CMULT/I * +*Entries : 102 : Total Size= 1018 bytes File Size = 108 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 4.56 * +*............................................................................* +*Br 19 :Kp_HEAD_CP : Kp_HEAD_CP/F * +*Entries : 102 : Total Size= 1003 bytes File Size = 109 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 4.50 * +*............................................................................* +*Br 20 :Kp_HEAD_CPT : Kp_HEAD_CPT/F * +*Entries : 102 : Total Size= 1008 bytes File Size = 110 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 4.46 * +*............................................................................* +*Br 21 :Kp_HEAD_CPX : Kp_HEAD_CPX/F * +*Entries : 102 : Total Size= 1008 bytes File Size = 110 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 4.46 * +*............................................................................* +*Br 22 :Kp_HEAD_CPY : Kp_HEAD_CPY/F * +*Entries : 102 : Total Size= 1008 bytes File Size = 110 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 4.46 * +*............................................................................* +*Br 23 :Kp_HEAD_CPZ : Kp_HEAD_CPZ/F * +*Entries : 102 : Total Size= 1008 bytes File Size = 110 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 4.46 * +*............................................................................* +*Br 24 :Kp_HEAD_PASY : Kp_HEAD_PASY/F * +*Entries : 102 : Total Size= 1013 bytes File Size = 110 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 4.47 * +*............................................................................* +*Br 25 :Kp_HEAD_PTASY : Kp_HEAD_PTASY/F * +*Entries : 102 : Total Size= 1018 bytes File Size = 111 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 4.44 * +*............................................................................* +*Br 26 :Kp_HEAD_PXASY : Kp_HEAD_PXASY/F * +*Entries : 102 : Total Size= 1018 bytes File Size = 111 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 4.44 * +*............................................................................* +*Br 27 :Kp_HEAD_PYASY : Kp_HEAD_PYASY/F * +*Entries : 102 : Total Size= 1018 bytes File Size = 111 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 4.44 * +*............................................................................* +*Br 28 :Kp_HEAD_PZASY : Kp_HEAD_PZASY/F * +*Entries : 102 : Total Size= 1018 bytes File Size = 111 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 4.44 * +*............................................................................* +*Br 29 :indx : indx/I * +*Entries : 102 : Total Size= 973 bytes File Size = 102 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 4.75 * +*............................................................................* +*Br 30 :Kp_HEAD_DETA : Kp_HEAD_DETA[indx]/F * +*Entries : 102 : Total Size= 1516 bytes File Size = 303 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 3.00 * +*............................................................................* +*Br 31 :Kp_HEAD_DPHI : Kp_HEAD_DPHI[indx]/F * +*Entries : 102 : Total Size= 1516 bytes File Size = 303 * +*Baskets : 1 : Basket Size= 32000 bytes Compression= 3.00 * +*............................................................................* +NTupleSvc INFO NTuples saved successfully +ApplicationMgr INFO Application Manager Finalized successfully +ApplicationMgr INFO Application Manager Terminated successfully