diff --git a/DaVinciTests/python/DaVinciTests/dv_run_large_job_prtest.py b/DaVinciTests/python/DaVinciTests/dv_run_large_job_prtest.py new file mode 100644 index 0000000000000000000000000000000000000000..1f122bb1d7e9bf5610a6ae147fec04ddd8632337 --- /dev/null +++ b/DaVinciTests/python/DaVinciTests/dv_run_large_job_prtest.py @@ -0,0 +1,437 @@ +############################################################################### +# (c) Copyright 2021-2024 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +""" +Example of a DaVinci job filling a large number of branches. For checking memory usage in a prtest. + +rst_title: DV run large job +rst_description: This runs a significant job to check that memory usage does not get out of hand. +Adapted from the AllFunctors test. + +""" +__author__ = "P. Koppenburg, M. Smith" +__date__ = "2025-02-05" + +import Functors as F +import FunTuple.functorcollections as FC +from FunTuple import FunctorCollection +from FunTuple import FunTuple_Particles as Funtuple +from PyConf.reading import get_particles, get_pvs +from DaVinci.algorithms import create_lines_filter +from PyConf.reading import get_odin # get_decreports, +from DecayTreeFitter import DecayTreeFitter + +from DaVinci import Options, make_config + +_basic = "basic" +_composite = "composite" +_toplevel = "toplevel" +spruce_line_bu2kmumu = "SpruceRD_BuToKpMuMu" +spruce_line_bd2kstmumu = "SpruceRD_B0ToKpPimMuMu" + +hlt1_lines = ["Hlt1TrackMVADecision", "Hlt1TwoTrackMVADecision"] + +hlt2_lines = [ + "Hlt2RD_BuToKpEE", + "Hlt2RD_B0ToKpKmEE", + "Hlt2RD_B0ToKpKmMuMu", + "Hlt2RD_B0ToKpPimMuMu", + "Hlt2RD_B0ToPipPimMuMu", + "Hlt2RD_B0ToKpPimEE", + "Hlt2RD_BuToHpMuMu_Incl", + # b_to_ll_hlt2 + "Hlt2RD_BToHH_Incl", + "Hlt2RD_BToMuMu", + "Hlt2RD_BToEE", + # b_to_hemu_control_modes + "Hlt2RD_BuToKpJpsi_JpsiToMuMu", + "Hlt2RD_BuToKpJpsi_JpsiToEE", + "Hlt2RD_LbToPKJpsi_JpsiToMuMu", + "Hlt2RD_BdToKstJpsi_KstToKpPim_JpsiToMuMu", + "Hlt2RD_BdToKstJpsi_KstToKpPim_JpsiToEE", + # qqbar_to_ll_tupling + "Hlt2RD_UpsilonToMuMu", + "Hlt2RD_UpsilonToEE", + "Hlt2RD_PhiToEE", + "Hlt2RD_Upsilon2ToUpsilon1PiPi_Upsilon1ToMuMu", + "Hlt2RD_Upsilon2ToUpsilon1PiPi_Upsilon1ToEE", + # charmonium_to_dimuon_tupling + "Hlt2_JpsiToMuMu", + "Hlt2_Psi2SToMuMu", + # b_to_v0_ll + "Hlt2RD_BdToKSEE_LL", + "Hlt2RD_BdToKSMuMu_LL", + "Hlt2RD_LbToLEE_LL", + "Hlt2RD_LbToLMuMu_LL", + # strange + "Hlt2RD_KpToPiPiPi", + "Hlt2RD_KS0ToPiMu", + "Hlt2RD_KS0ToPiPi", + "Hlt2RD_KS0ToMuMu", + "Hlt2RD_KS0ToPiPiEE_Loose", + "Hlt2RD_Lambda0ToPPi_Tight", + "Hlt2RD_Lambda0ToPPi", + "Hlt2RD_Xi0ToPPi_Tight", + "Hlt2RD_XiMinusToPPiPi_Tight", + "Hlt2RD_XiMinusToLambdaPi_Tight", + "Hlt2RD_SigmaPlusToPPi0Resolved_Tight", + "Hlt2RD_SigmaPlusToPPi0Merged_Tight", + "Hlt2RD_SigmaPlusToPMuMu", + "Hlt2RD_SigmaPlusToPEE", + "Hlt2RD_OmegaMinusToLambdaK", + "Hlt2RD_OmegaMinusToLambdaPi", + # b to multilepton + "Hlt2RD_Displaced4Mu_Incl", + # rare tau decay lines + "Hlt2RD_TauToMuMuMu", + "Hlt2RD_DsToPhiPi_PhiToEE", + "Hlt2RD_DsToPhiPi_PhiToMuMu", +] + + +def all_variables(pvs, dtf, ptype, comp_name=None, candidates=None, ftAlg=None): + """ + function that returns dictionary of functors that work. + + functors are listed in order of https://lhcbdoc.web.cern.ch/lhcbdoc/moore/master/selection/thor_functors_reference.html#module-Functors + """ + if ptype not in [_basic, _composite]: + Exception(f"I want {_basic} or {_composite}. Got {ptype}") + all_vars = FunctorCollection({}) + + comp = _composite == ptype or _toplevel == ptype # is composite + basic = _basic == ptype # is not composite + top = _toplevel == ptype # the B + + # First import everything that comes in functorcollections + all_vars += FC.Kinematics() + if basic: + all_vars += FC.ParticleID(extra_info=True) + if candidates: + all_vars += FC.HltTisTos( + selection_type="Hlt1", trigger_lines=hlt1_lines, data=candidates + ) + all_vars += FC.HltTisTos( + selection_type="Hlt2", trigger_lines=hlt2_lines, data=candidates + ) + + if comp: + all_vars.update({"ALV": F.ALV(Child1=1, Child2=2)}) + + if comp: # all these require a vertex + all_vars.update({"BPVCORRM": F.BPVCORRM(pvs)}) + all_vars.update({"BPVCORRMERR": F.BPVCORRMERR(pvs)}) + all_vars.update({"BPVDIRA": F.BPVDIRA(pvs)}) + all_vars.update({"BPVDLS": F.BPVDLS(pvs)}) + all_vars.update({"BPVETA": F.BPVETA(pvs)}) + all_vars.update({"BPVFD": F.BPVFD(pvs)}) + all_vars.update({"BPVFDCHI2": F.BPVFDCHI2(pvs)}) + all_vars.update({"BPVFDIR": F.BPVFDIR(pvs)}) + all_vars.update({"BPVFDVEC": F.BPVFDVEC(pvs)}) + + all_vars.update({"BPVIP": F.BPVIP(pvs)}) + all_vars.update({"BPVIPCHI2": F.BPVIPCHI2(pvs)}) + all_vars.update({"BPVX": F.BPVX(pvs)}) + all_vars.update({"BPVY": F.BPVY(pvs)}) + all_vars.update({"BPVZ": F.BPVZ(pvs)}) + + all_vars.update({"ALLPVX[nPVs]": F.ALLPVX(pvs)}) + all_vars.update({"ALLPVY[nPVs]": F.ALLPVY(pvs)}) + all_vars.update({"ALLPVZ[nPVs]": F.ALLPVZ(pvs)}) + + if comp: # all these require a vertex + all_vars.update({"ALLPV_FD[nPVs]": F.ALLPV_FD(pvs)}) + all_vars.update({"ALLPV_IP[nPVs]": F.ALLPV_IP(pvs)}) + all_vars.update({"BPVLTIME": F.BPVLTIME(pvs)}) + all_vars.update({"BPVVDRHO": F.BPVVDRHO(pvs)}) + all_vars.update({"BPVVDX": F.BPVVDX(pvs)}) + all_vars.update({"BPVVDY": F.BPVVDY(pvs)}) + all_vars.update({"BPVVDZ": F.BPVVDZ(pvs)}) + + all_vars.update({"CHARGE": F.CHARGE}) + all_vars.update({"CHI2": F.CHI2}) + all_vars.update({"CHI2DOF": F.CHI2DOF}) + if top: # apply this only to B + all_vars.update({"CHILD1_PT": F.CHILD(1, F.PT)}) # example of CHILD + all_vars.update({"Ds_END_VZ": F.CHILD(1, F.END_VZ)}) + all_vars.update({"Delta_END_VZ_DsB0": F.CHILD(1, F.END_VZ) - F.END_VZ}) + + if comp: + all_vars.update({"DOCA": F.SDOCA(Child1=1, Child2=2)}) + all_vars.update({"DOCACHI2": F.SDOCACHI2(Child1=1, Child2=2)}) + all_vars.update({"END_VRHO": F.END_VRHO}) + all_vars.update({"END_VX": F.END_VX}) + all_vars.update({"END_VY": F.END_VY}) + all_vars.update({"END_VZ": F.END_VZ}) + + all_vars.update({"ETA": F.ETA}) + all_vars.update({"FOURMOMENTUM": F.FOURMOMENTUM}) + all_vars.update({"ISBASIC": F.ISBASICPARTICLE}) + + if basic: + all_vars.update({"GHOSTPROB": F.GHOSTPROB}) + all_vars.update({"ISMUON": F.ISMUON}) + all_vars.update({"INMUON": F.INMUON}) + all_vars.update({"INECAL": F.INECAL}) + all_vars.update({"INHCAL": F.INHCAL}) + all_vars.update({"HASBREM": F.HASBREM}) + all_vars.update({"HASBREMADDED": F.HASBREMADDED}) + all_vars.update({"BREMENERGY": F.BREMENERGY}) + all_vars.update({"BREMBENDCORR": F.BREMBENDCORR}) + all_vars.update({"BREMPIDE": F.BREMPIDE}) + all_vars.update({"ECALPIDE": F.ECALPIDE}) + all_vars.update({"ECALPIDMU": F.ECALPIDMU}) + all_vars.update({"HCALPIDE": F.HCALPIDE}) + all_vars.update({"HCALPIDMU": F.HCALPIDMU}) + all_vars.update({"ELECTRONSHOWEREOP": F.ELECTRONSHOWEREOP}) + all_vars.update({"ELECTRONSHOWERDLL": F.ELECTRONSHOWERDLL}) + all_vars.update({"CLUSTERID": F.CLUSTERID}) + all_vars.update({"CLUSTERMATCH_CHI2": F.CLUSTERMATCH_CHI2}) + all_vars.update({"ELECTRONMATCH_CHI2": F.ELECTRONMATCH_CHI2}) + all_vars.update({"BREMHYPOMATCH_CHI2": F.BREMHYPOMATCH_CHI2}) + all_vars.update({"ELECTRONENERGY": F.ELECTRONENERGY}) + all_vars.update({"BREMHYPOENERGY": F.BREMHYPOENERGY}) + all_vars.update({"BREMHYPODELTAX": F.BREMHYPODELTAX}) + all_vars.update({"BREMTRACKBASEDENERGY": F.BREMTRACKBASEDENERGY}) + all_vars.update({"ELECTRONID": F.ELECTRONID}) + all_vars.update({"HCALEOP": F.HCALEOP}) + all_vars.update({"TRACK_MOM_": F.TRACK_MOMVEC}) + all_vars.update({"TRACK_POS_CLOSESTTOBEAM_": F.TRACK_POSVEC_CLOSESTTOBEAM}) + + all_vars.update({"IS_ABS_ID_pi": F.IS_ABS_ID("pi+")}) + all_vars.update({"IS_ID_pi": F.IS_ID("pi-")}) + all_vars.update({"PDG_MASS_pi": F.PDG_MASS("pi+")}) + all_vars.update({"SIGNED_DELTA_MASS_pi": F.SIGNED_DELTA_MASS("pi+")}) + all_vars.update({"ABS_DELTA_MASS_pi": F.ABS_DELTA_MASS("pi+")}) + all_vars.update({"IS_NOT_H": F.IS_NOT_H}) + all_vars.update({"IS_PHOTON": F.IS_PHOTON}) + all_vars.update({"THREE_MOM_COV_MATRIX": F.THREE_MOM_COV_MATRIX}) + all_vars.update({"POS_COV_MATRIX": F.POS_COV_MATRIX}) + all_vars.update({"MOM_POS_COV_MATRIX": F.MOM_POS_COV_MATRIX}) + all_vars.update({"THREE_MOM_POS_COV_MATRIX": F.THREE_MOM_POS_COV_MATRIX}) + + all_vars.update({"DTF_PT": dtf(F.PT)}) + all_vars.update({"DTF_BPVIPCHI2": dtf(F.BPVIPCHI2(pvs))}) + + all_vars.update( + { + "DTF_MASS_SmallestDELTAPT": F.MASS + @ F.TO + @ F.ENTRY_WITH_MIN_REL_VALUE_OF( + F.PT @ F.TO @ F.FORWARDARG0 - F.PT @ F.FORWARDARG1 + ).bind( + F.RELATIONS.bind(F.TES(dtf.OutputRelations), F.FORWARDARGS), + F.FORWARDARGS, + ) + } + ) + all_vars.update( + { + "DTF_MASS_BiggestDELTAPT": F.MASS + @ F.TO + @ F.ENTRY_WITH_MAX_REL_VALUE_OF( + F.PT @ F.TO @ F.FORWARDARG0 - F.PT @ F.FORWARDARG1 + ).bind( + F.RELATIONS.bind(F.TES(dtf.OutputRelations), F.FORWARDARGS), + F.FORWARDARGS, + ) + } + ) + + if top: + all_vars.update({"DTF_NITER": dtf.NITER}) + all_vars.update({"DTF_CHI2": dtf.CHI2}) + all_vars.update({"DTF_NDOF": dtf.NDOF}) + all_vars.update({"DTF_CHI2DOF": dtf.CHI2DOF}) + + if comp: + all_vars.update({"DTF_MASS": dtf.MASS}) + all_vars.update({"DTF_MASSERR": dtf.MASSERR}) + all_vars.update({"DTF_P": dtf.P}) + all_vars.update({"DTF_PERR": dtf.PERR}) + all_vars.update({"DTF_CTAU": dtf.CTAU}) + all_vars.update({"DTF_CTAUERR": dtf.CTAUERR}) + all_vars.update({"DTF_FD": dtf.FD}) + all_vars.update({"DTF_FDERR": dtf.FDERR}) + + all_vars.update({"MASS": F.MASS}) + if top and comp_name == "Jpsi": # B + all_vars.update({"MASSWITHHYPOTHESES": F.MASSWITHHYPOTHESES((3096.9, 493.7))}) + elif top and comp_name == "Kst": + all_vars.update({"MASSWITHHYPOTHESES": F.MASSWITHHYPOTHESES((3096.9, 892.0))}) + elif comp and comp_name == "Jpsi": + all_vars.update({"MASSWITHHYPOTHESES": F.MASSWITHHYPOTHESES((105.66, 105.66))}) + elif comp and comp_name == "Kst": + all_vars.update({"MASSWITHHYPOTHESES": F.MASSWITHHYPOTHESES((493.7, 139.57))}) + if comp: + all_vars.update({"MAXPT": F.MAX(F.PT)}) + all_vars.update({"MAXDOCA": F.MAXSDOCA}) + all_vars.update({"MAXDOCACHI2": F.MAXSDOCACHI2}) + + if comp: + all_vars.update({"MINPT": F.MIN(F.PT)}) + all_vars.update({"MINIP": F.MINIP(pvs)}) + all_vars.update({"MINIPCHI2": F.MINIPCHI2(pvs)}) + + if basic: + all_vars.update({"TRACKPT": F.TRACK_PT}) + all_vars.update({"TRACKHISTORY": F.VALUE_OR(-1) @ F.TRACKHISTORY @ F.TRACK}) + all_vars.update({"QOVERP": F.QOVERP @ F.TRACK}) + all_vars.update({"NDOF": F.VALUE_OR(-1) @ F.NDOF @ F.TRACK}) + all_vars.update({"NFTHITS": F.VALUE_OR(-1) @ F.NFTHITS @ F.TRACK}) + all_vars.update({"NHITS": F.VALUE_OR(-1) @ F.NHITS @ F.TRACK}) + all_vars.update({"NUTHITS": F.VALUE_OR(-1) @ F.NUTHITS @ F.TRACK}) + all_vars.update({"NVPHITS": F.VALUE_OR(-1) @ F.NVPHITS @ F.TRACK}) + all_vars.update({"TRACKHASVELO": F.VALUE_OR(-1) @ F.TRACKHASVELO @ F.TRACK}) + all_vars.update({"TRACKHASUT": F.VALUE_OR(-1) @ F.TRACKHASUT @ F.TRACK}) + all_vars.update({"STATE_AT_T1": F.EXTRAPOLATE_TRACK(7931.0) @ F.TRACK}) + + all_vars.update({"OBJECT_KEY": F.OBJECT_KEY}) + + all_vars.update({"PHI": F.PHI}) + + all_vars.update({"ABS_PX": F.ABS @ F.PX}) + + all_vars.update({"REFERENCEPOINT_X": F.REFERENCEPOINT_X}) + all_vars.update({"REFERENCEPOINT_Y": F.REFERENCEPOINT_Y}) + all_vars.update({"REFERENCEPOINT_Z": F.REFERENCEPOINT_Z}) + + if comp: + all_vars.update({"SDOCA": F.SDOCA(1, 2)}) + all_vars.update({"SDOCACHI2": F.SDOCACHI2(1, 2)}) + if basic: + all_vars.update({"SHOWER_SHAPE": F.CALO_NEUTRAL_SHOWER_SHAPE}) + + if comp: + all_vars.update({"SUBCOMB12_MM": F.SUBCOMB(Functor=F.MASS, Indices=(1, 2))}) + all_vars.update({"SUMPT": F.SUM(F.PT)}) + + if basic: + all_vars.update({"TX": F.TX}) + all_vars.update({"TY": F.TY}) + + return all_vars + + +def event_variables(PVs, ODIN, decreports, lines, spruce_line): + """ + event variables + """ + + evt_vars = FunctorCollection({}) + evt_vars += FC.EventInfo() + + evt_vars += FC.SelectionInfo(selection_type="Spruce", trigger_lines=lines) + evt_vars += FC.SelectionInfo(selection_type="Hlt1", trigger_lines=hlt1_lines) + evt_vars += FC.SelectionInfo(selection_type="Hlt2", trigger_lines=hlt2_lines) + + if ODIN: + evt_vars.update({"EVENTTYPE": F.EVENTTYPE(ODIN)}) + + evt_vars.update({"PV_SIZE": F.SIZE(PVs)}) + + if decreports: + evt_vars.update({"TCK": F.TCK(decreports)}) + + return evt_vars + + +def main(options: Options): + # get the particles from two lines + bu2kmumu_data = get_particles(f"/Event/Spruce/{spruce_line_bu2kmumu}/Particles") + bd2kstmumu_data = get_particles(f"/Event/Spruce/{spruce_line_bd2kstmumu}/Particles") + + # DecayTreeFitter Algorithm + v2_pvs = get_pvs() + # DecayTreeFitter Algorithm with "name" and "input_particles" as arguments + DTF_bu2kmumu = DecayTreeFitter(name="DTF_Bu2KMuMu", input_particles=bu2kmumu_data) + DTF_bd2kstmumu = DecayTreeFitter( + name="DTF_Bd2KstMuMu", input_particles=bd2kstmumu_data + ) + + # Definition of fields (branches) and functors + fields_bu2kmumu = { + "B": "[B+ -> (J/psi(1S) -> mu+ mu-) K+]CC", + "Jpsi": "[B+ -> ^(J/psi(1S) -> mu+ mu-) K+]CC", + "L1": "[B+ -> (J/psi(1S) -> ^mu+ mu-) K+]CC", + "L2": "[B+ -> (J/psi(1S) -> mu+ ^mu-) K+]CC", + "K": "[B+ -> (J/psi(1S) -> mu+ mu-) ^K+]CC", + } + variables_bu2kmumu = { + "B": all_variables(v2_pvs, DTF_bu2kmumu, _toplevel, "Jpsi"), + "Jpsi": all_variables(v2_pvs, DTF_bu2kmumu, _composite, "Jpsi"), + "L1": all_variables(v2_pvs, DTF_bu2kmumu, _basic), + "L2": all_variables(v2_pvs, DTF_bu2kmumu, _basic), + "K": all_variables(v2_pvs, DTF_bu2kmumu, _basic), + } + + fields_bd2kstmumu = { + "B": "[B0 -> (J/psi(1S) -> mu+ mu-) (K*(892)0 -> K+ pi-)]CC", + "Jpsi": "[B0-> ^(J/psi(1S) -> mu+ mu-) (K*(892)0 -> K+ pi-)]CC", + "L1": "[B0 -> (J/psi(1S) -> ^mu+ mu-) (K*(892)0 -> K+ pi-)]CC", + "L2": "[B0 -> (J/psi(1S) -> mu+ ^mu-) (K*(892)0 -> K+ pi-)]CC", + "Kst": "[B0 -> (J/psi(1S) -> mu+ mu-) ^(K*(892)0 -> K+ pi-)]CC", + "K": "[B0 -> (J/psi(1S) -> mu+ mu-) (K*(892)0 -> ^K+ pi-)]CC", + "Pi": "[B0 -> (J/psi(1S) -> mu+ mu-) (K*(892)0 -> K+ ^pi-)]CC", + } + variables_bd2kstmumu = { + "B": all_variables(v2_pvs, DTF_bd2kstmumu, _toplevel, "Kst"), + "Jpsi": all_variables(v2_pvs, DTF_bd2kstmumu, _composite, "Jpsi"), + "L1": all_variables(v2_pvs, DTF_bd2kstmumu, _basic), + "L2": all_variables(v2_pvs, DTF_bd2kstmumu, _basic), + "Kst": all_variables(v2_pvs, DTF_bd2kstmumu, _composite, "Kst"), + "K": all_variables(v2_pvs, DTF_bd2kstmumu, _basic), + "Pi": all_variables(v2_pvs, DTF_bd2kstmumu, _basic), + } + + # event variables + odin = get_odin() + decreports = None + evt_vars_bu2kmumu = event_variables( + v2_pvs, + odin, + decreports, + [spruce_line_bu2kmumu + "Decision"], + spruce_line_bu2kmumu, + ) + evt_vars_bd2kstmumu = event_variables( + v2_pvs, + odin, + decreports, + [spruce_line_bd2kstmumu + "Decision"], + spruce_line_bd2kstmumu, + ) + + # Sprucing filter + my_filter = create_lines_filter( + name="HDRFilter_B2Xmumu", + lines=[f"{spruce_line_bu2kmumu}", f"{spruce_line_bd2kstmumu}"], + ) + + # FunTuple + my_tuple_bu2kmumu = Funtuple( + name="Bu2KMuMu_Tuple", + tuple_name="DecayTree", + fields=fields_bu2kmumu, + variables=variables_bu2kmumu, + event_variables=evt_vars_bu2kmumu, + inputs=bu2kmumu_data, + ) + + my_tuple_bd2kstmumu = Funtuple( + name="Bd2KstMuMu_Tuple", + tuple_name="DecayTree", + fields=fields_bd2kstmumu, + variables=variables_bd2kstmumu, + event_variables=evt_vars_bd2kstmumu, + inputs=bd2kstmumu_data, + ) + # Algorithms to be run + return make_config(options, [my_filter, my_tuple_bu2kmumu, my_tuple_bd2kstmumu]) diff --git a/DaVinciTests/tests/qmtest/davinci.qms/dv_run_large_job.qmt b/DaVinciTests/tests/qmtest/davinci.qms/dv_run_large_job.qmt new file mode 100755 index 0000000000000000000000000000000000000000..5865038cd16247874d0fc1214301ef10849e7f52 --- /dev/null +++ b/DaVinciTests/tests/qmtest/davinci.qms/dv_run_large_job.qmt @@ -0,0 +1,59 @@ +<?xml version="1.0" ?> +<!-- +############################################################################### +# (c) Copyright 2021-2025 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +--> +<!DOCTYPE extension PUBLIC '-//QM/2.3/Extension//EN' 'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'> +<!-- +####################################################### +# SUMMARY OF THIS TEST +# ................... +# Author: masmith +# Purpose: Test to run a large DaVinci job for memory usage checks. For PrMon - not to be run regularly. +# Prerequisites: None +####################################################### +--> + +<extension class="GaudiTest.GaudiExeTest" kind="test"> + <argument name="program"><text>lbexec</text></argument> + <argument name="test_file_db_options_yaml"><text>collision_24_magup_spruce_24c4_data_rd_stream</text></argument> + <argument name="extra_options_yaml"><text> + testfiledb_key: collision_24_magup_spruce_24c4_data_rd_stream + input_process: Spruce + lumi: true + input_stream: rd + input_type: ROOT + ntuple_file: 'dv_run_large_job.root' + print_freq: 10000 + simulation: false + dddb_tag: default + conddb_tag: default + evt_max: 100 + </text></argument> + <argument name="args"><set> + <text>DaVinciTests.dv_run_large_job_prtest:main</text> + <text>--override-option-class=DaVinci.LbExec:TestOptions</text> + </set></argument> + <argument name="reference"><text>../refs/test_dv_run_large_job.ref</text></argument> + <argument name="error_reference"><text>../refs/empty.ref</text></argument> + <argument name="validator"><text> +from DaVinciTests.QMTest.DaVinciExclusions import preprocessor, counter_preprocessor +validateWithReference(preproc = preprocessor, counter_preproc = counter_preprocessor) +countErrorLines({"FATAL":0, "ERROR":0}) + +from pathlib import Path +ntuple = Path('./dv_run_large_job.root') + +if not ntuple.is_file(): raise Exception(f"File {ntuple} does not exist!") +ntuple.unlink() +print('Test successfully completed') + </text></argument> +</extension> diff --git a/DaVinciTests/tests/refs/test_dv_run_large_job.ref b/DaVinciTests/tests/refs/test_dv_run_large_job.ref new file mode 100644 index 0000000000000000000000000000000000000000..8bbbfb3aeef29ff45767641c49dc7e39d1d06c35 --- /dev/null +++ b/DaVinciTests/tests/refs/test_dv_run_large_job.ref @@ -0,0 +1,94 @@ +ApplicationMgr SUCCESS +==================================================================================================================================== +==================================================================================================================================== +ApplicationMgr INFO Application Manager Configured successfully +NTupleSvc INFO Added stream file:dv_run_large_job.root as FILE1 +HistogramPersistencySvc INFO Added successfully Conversion service RootHistSvc +RootIOAlg INFO DATAFILE='root://eoslhcb.cern.ch//eos/lhcb//lhcb/data/2024/RD_DST/00235370/0000/00235370_00002996_1.rd.dst' +Bu2KMuMu_Tuple INFO User specified descriptor: [B+ -> (J/psi(1S) -> mu+ mu-) K+]CC +Bu2KMuMu_Tuple INFO Number of decay possibilities with specified descriptor: 2 +Bu2KMuMu_Tuple INFO Possibility #0: B+ -> (J/psi(1S) -> mu+ mu-) K+ +Bu2KMuMu_Tuple INFO Possibility #1: B- -> (J/psi(1S) -> mu- mu+) K- +Bd2KstMuMu_Tuple INFO User specified descriptor: [B0 -> (J/psi(1S) -> mu+ mu-) (K*(892)0 -> K+ pi-)]CC +Bd2KstMuMu_Tuple INFO Number of decay possibilities with specified descriptor: 2 +Bd2KstMuMu_Tuple INFO Possibility #0: B0 -> (J/psi(1S) -> mu+ mu-) (K*(892)0 -> K+ pi-) +Bd2KstMuMu_Tuple INFO Possibility #1: B~0 -> (J/psi(1S) -> mu- mu+) (K*(892)~0 -> K- pi+) +ApplicationMgr INFO Application Manager Initialized successfully +ApplicationMgr INFO Application Manager Started successfully +RootIOAlg SUCCESS Reading Event record 1 within root://eoslhcb.cern.ch//eos/lhcb//lhcb/data/2024/RD_DST/00235370/0000/00235370_00002996_1.rd.dst +RCWNTupleCnv INFO Booked TTree with ID: DecayTree "DecayTree" in directory dv_run_large_job.root:/Bd2KstMuMu_Tuple +ApplicationMgr INFO Application Manager Stopped successfully +Bd2KstMuMu_Tuple SUCCESS Booked 1 N-Tuples and 0 Event Tag Collections +Bd2KstMuMu_Tuple SUCCESS List of booked N-Tuples in directory "FILE1/Bd2KstMuMu_Tuple" +Bd2KstMuMu_Tuple SUCCESS ID=DecayTree Title="DecayTree" #items=751{BUNCHCROSSING_ID,BUNCHCROSSING_TYPE,EVENTNUMBER,EVENTTYPE,GPSTIME,Hlt1TrackMVADec} +LAZY_AND: DaVinci #=100 Sum=2 Eff=|( 2.000000 +- 1.40000 )%| + NONLAZY_OR: FileSummaryRecords #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: Lumi #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + EventAccounting/EventAccount #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + NONLAZY_OR: UserAnalysis #=100 Sum=2 Eff=|( 2.000000 +- 1.40000 )%| + LAZY_AND: default #=100 Sum=2 Eff=|( 2.000000 +- 1.40000 )%| + RawBankSizeFilter/FilterDstDataSize #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + VoidFilter/HDRFilter_B2Xmumu #=100 Sum=2 Eff=|( 2.000000 +- 1.40000 )%| + FunTupleBase_Particles/Bu2KMuMu_Tuple #=2 Sum=2 Eff=|( 100.0000 +- 0.00000 )%| + FunTupleBase_Particles/Bd2KstMuMu_Tuple #=2 Sum=2 Eff=|( 100.0000 +- 0.00000 )%| +NTupleSvc INFO NTuples saved successfully +ApplicationMgr INFO Application Manager Finalized successfully +ApplicationMgr INFO Application Manager Terminated successfully +Bd2KstMuMu_Tuple INFO Number of counters : 15 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# events with multiple candidates for field B" | 1 | + | "# events with multiple candidates for field Jpsi"| 1 | + | "# events with multiple candidates for field K" | 1 | + | "# events with multiple candidates for field Kst"| 1 | + | "# events with multiple candidates for field L1"| 1 | + | "# events with multiple candidates for field L2"| 1 | + | "# events with multiple candidates for field Pi"| 1 | + | "# non-empty events for field B" | 2 | + | "# non-empty events for field Jpsi" | 2 | + | "# non-empty events for field K" | 2 | + | "# non-empty events for field Kst" | 2 | + | "# non-empty events for field L1" | 2 | + | "# non-empty events for field L2" | 2 | + | "# non-empty events for field Pi" | 2 | + | "# processed events" | 2 | +Bu2KMuMu_Tuple INFO Number of counters : 6 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# events without candidate for field B" | 2 | + | "# events without candidate for field Jpsi" | 2 | + | "# events without candidate for field K" | 2 | + | "# events without candidate for field L1" | 2 | + | "# events without candidate for field L2" | 2 | + | "# processed events" | 2 | +DTF_Bd2KstMuMu INFO Number of counters : 5 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "Events" | 2 | + | "Fitted Particles" | 7 | 0 | 0.0000 | 0.0000 | 4.2950e+09 | 0.0000 | + | "Input Particles" | 2 | 7 | 3.5000 | 2.5000 | 1.0000 | 6.0000 | + | "saved Particles" | 2 | 49 | 24.500 | 17.500 | 7.0000 | 42.000 | + | "saved Vertices" | 2 | 21 | 10.500 | 7.5000 | 3.0000 | 18.000 | +EventAccount INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "Number of events seen" | 100 | +FilterDstDataSize INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + |*"Number of too small raw banks size" | 100 | 0 |( 0.000000 +- 0.000000)% | +HDRFilter_B2Xmumu INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + |*"Cut selection efficiency" | 100 | 2 |( 2.000000 +- 1.400000)% | +HLTControlFlowMgr INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "Processed events" | 100 | +Unpack_Spruce__Event_Spruce_HLT2... INFO Number of counters : 3 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# UnpackedData" | 2 | 10 | 5.0000 | + | "Buffer size" | 2 | 648 | 324.00 | + |*"Target data already present" | 2 | 0 |( 0.000000 +- 0.000000)% | +Unpack_Spruce__Event_Spruce_Spru... INFO Number of counters : 4 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# UnpackedData" | 2 | 7 | 3.5000 | + | "Buffer size" | 2 | 114 | 57.000 | + |*"Target data already present" | 2 | 0 |( 0.000000 +- 0.000000)% | +Unpack_Spruce__Event_Spruce_Spru... INFO Number of counters : 2 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "DstData buffer for configured output not available - unable to unpack"| 2 | + |*"Target data already present" | 2 | 0 |( 0.000000 +- 0.000000)% |