Skip to content
Snippets Groups Projects
Commit f49202c2 authored by Patrick Koppenburg's avatar Patrick Koppenburg :leaves:
Browse files

Test of all functors

parent f15f128d
No related branches found
No related tags found
2 merge requests!1103Draft: Add AnalysisHelpers to DaVinci Stack,!594Test of all functors
###############################################################################
# (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration #
# #
# This software is distributed under the terms of the GNU General Public #
# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
# #
# In applying this licence, CERN does not waive the privileges and immunities #
# granted to it by virtue of its status as an Intergovernmental Organization #
# or submit itself to any jurisdiction. #
###############################################################################
"""
Example of a DaVinci job filling all available functors. This is obviously a stress test and not realistic.
This example is meant to be run with
$ ./run davinci run-mc --inputfiledb FEST_October_2021_dst Phys/DaVinci/options/DaVinciDB-Example.yaml --joboptfile DaVinciTests/tests/options/option_davinci_sprucing.yaml --user_algorithms DaVinciExamples/python/DaVinciExamples/tupling/example-tupling-AllFunctors:alg_config --evt_max 100 |& cat | tee log
"""
__author__ = "P. Koppenburg"
__date__ = "2021-11-23"
import Functors as F
from FunTuple import FunctorCollection
from FunTuple import FunTuple_Particles as Funtuple
from PyConf.application import make_data_with_FetchDataFromFile
from DaVinci.reco_objects import make_pvs_for
from DaVinci.algorithms import set_hlt_config, add_filter
from DaVinci import options
#
# Definition of strucing line
#
bd2dsk_line = "SpruceB2OC_BdToDsmK_DsmToHHH_FEST_Line"
bd2dsk_data = make_data_with_FetchDataFromFile(
f"/Event/Spruce/{bd2dsk_line}/Particles")
_basic = 'basic'
_composite = 'composite'
def all_variables(pvs, DTFR, ptype):
"""
function that returns dictonary of functors that work.
"""
if ptype not in [_basic, _composite]:
Exception("I want {0} or {1}. Got {2}".format(_basic, _composite,
ptype))
vars = {}
vars['P'] = F.P
vars['PT'] = F.PT
vars['PHI'] = F.PHI
vars['ETA'] = F.ETA
vars['PX'] = F.PX
vars['PY'] = F.PY
vars['PZ'] = F.PZ
vars['ENERGY'] = F.ENERGY
vars['FOURMOMENTUM'] = F.FOURMOMENTUM
if (_basic == ptype):
vars['MINIP'] = F.MINIP(pvs) # crashes on composites
vars['MINIPCHI2'] = F.MINIPCHI2(pvs) # crashes on composites
# if (_composite == ptype):
# vars['CHILD'] = F.CHILD(1,F.PT) # Needs ParticleCombination
# vars['SUBCOMB'] = F.SUBCOMB
# vars['MASSWITHHYPOTHESES'] = F.MASSWITHHYPOTHESES([939.,939.]) relies on numChildren. Not in LHCb::Particle
vars['MASS'] = F.MASS
if (_composite == ptype):
vars['VX'] = F.VX
vars['VY'] = F.VY
vars['VZ'] = F.VZ
vars['VRho'] = F.VRho
# vars['DOCA'] = F.DOCA(Child1=1,Child2=2) # no member named 'doca' in 'LHCb::Particle' (?)
# vars['DOCACHI2'] = F.DOCACHI2(Child1=1,Child2=2) # same here
# vars['ALV'] = F.ALV(Child1=1,Child2=2) # wants cos_angle_prod
# vars['MAXDOCA'] = F.MAXDOCA # does not work
# vars['MAXDOCACHI2'] = F.MAXDOCACHI2
vars['CHARGE'] = F.CHARGE
# vars['SIZE'] = F.SIZE(DTFParts) # doesn't work
vars['BPVIPCHI2'] = F.BPVIPCHI2(pvs)
if (_composite == ptype): # all these require a vertex
vars['BPVETA'] = F.BPVETA(pvs)
vars['BPVCORRM'] = F.BPVCORRM(pvs)
vars['BPVDIRA'] = F.BPVDIRA(pvs)
vars['BPVFDCHI2'] = F.BPVFDCHI2(pvs)
vars['BPVVDZ'] = F.BPVVDZ(pvs)
vars['BPVVDRHO'] = F.BPVVDRHO(pvs)
vars['BPVLTIME'] = F.BPVLTIME(pvs)
vars['BPVDLS'] = F.BPVDLS(pvs)
# vars['RUNNUMBER'] = F.RUNNUMBER(ODINLocation) # doesn't work.
# vars['EVENTNUMBER'] = F.EVENTNUMBER('/Event/DAQ/RawBanks/ODIN')
# vars['EVENTTYPE'] = F.EVENTTYPE('/Event/DAQ/RawBanks/ODIN')
# vars['MVA'] = F.MVA # needs an MVA
# vars['COMB'] = F.COMB # starts from combination
# vars['POD'] = F.POD # starts from combination
vars['DTF_PT'] = F.MAP_INPUT(Functor=F.PT, Relations=DTFR)
vars['DTF_BPVIPCHI2'] = F.MAP_INPUT(
Functor=F.BPVIPCHI2(pvs), Relations=DTFR)
if (_basic == ptype):
vars['PID_MU'] = F.PID_MU
vars['PID_PI'] = F.PID_PI
vars['PID_K'] = F.PID_K
vars['PID_P'] = F.PID_P
vars['PID_E'] = F.PID_E
vars['PROBNN_D'] = F.PROBNN_D
vars['PROBNN_E'] = F.PROBNN_E
vars['PROBNN_GHOST'] = F.PROBNN_GHOST
vars['PROBNN_K'] = F.PROBNN_K
vars['PROBNN_MU'] = F.PROBNN_MU
vars['PROBNN_P'] = F.PROBNN_P
vars['PROBNN_PI'] = F.PROBNN_PI
print("For {0} returning variables {1}".format(ptype, vars.keys()))
return vars
def alg_config():
"""
Algorithm configuration function called from the comad line
"""
#
# DecayTreeFitter Algorithm
#
stream_pvs = f"{options.unpack_stream}/HLT2"
v2_pvs = make_pvs_for(stream=stream_pvs, data_type=options.data_type)
#
# DecayTreeFitter Algorithm
#
from PyConf.Algorithms import DecayTreeFitterAlg
DTF = DecayTreeFitterAlg(Input=bd2dsk_data, PrintTree=True)
# DTFParts = DTF.Output # Particles (not needed)
DTFRelations = DTF.OutputRelations # Relations
#
# Definition of branches and functors
#
branches_dsk = {
'B0': "[B0 -> D_s- K+]CC",
'Kaon': "[B0 -> D_s- ^K+]CC",
}
variables_dsk = {
'B0': FunctorCollection(
all_variables(v2_pvs, DTFRelations, _composite)),
'Kaon': FunctorCollection(all_variables(v2_pvs, DTFRelations, _basic)),
}
#
# Sprucing filter
#
hlt_dec_reports, spruce_dec_reports = set_hlt_config(options)
my_filter = add_filter("HDRFilter_B0DsK",
f"HLT_PASS('{bd2dsk_line}Decision')",
hlt_dec_reports, spruce_dec_reports)
#
# FunTuple
#
my_tuple = Funtuple(
name="B0DsK_Tuple",
tree_name="DecayTree",
branches=branches_dsk,
variables=variables_dsk,
loki_preamble=[],
inputs=bd2dsk_data)
#
# Algorithms to be run
#
return {"UserAlgs": [v2_pvs, my_filter, DTF, my_tuple]}, []
......@@ -15,7 +15,8 @@ Example of a typical DaVinci job:
- runs DecayTreeFitterAlg and stores some output
This example is meant to be run with
$ ./run davinci run-mc --simplejob --inputfiledb Upgrade_Bd2KstarMuMu --joboptfile example-tupling-basic.yaml --user_algorithms example-tupling-DTF-run-mc:main
$ ./run davinci run-mc --inputfiledb Upgrade_Bd2KstarMuMu_ldst Phys/DaVinci/options/DaVinciDB-Example.yaml --joboptfile DaVinciExamples/python/DaVinciExamples/tupling/example-tupling-DTF-run-mc.yaml --user_algorithms DaVinciExamples/python/DaVinciExamples/tupling/example-tupling-DTF-run-mc:main --evt_max -1 |& cat | tee log
"""
__author__ = "P. Koppenburg"
......
<?xml version="1.0" ?>
<!--
###############################################################################
# (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration #
# #
# This software is distributed under the terms of the GNU General Public #
# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". #
# #
# In applying this licence, CERN does not waive the privileges and immunities #
# granted to it by virtue of its status as an Intergovernmental Organization #
# or submit itself to any jurisdiction. #
###############################################################################
-->
<!DOCTYPE extension PUBLIC '-//QM/2.3/Extension//EN' 'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'>
<!--
#######################################################
# SUMMARY OF THIS TEST
# ...................
# Author: pkoppenb
# Purpose: Test for All functors
# Prerequisites: None
#######################################################
-->
<extension class="GaudiTest.GaudiExeTest" kind="test">
<argument name="program"><text>davinci</text></argument>
<argument name="args"><set>
<text>run-mc</text>
<text>--inputfiledb</text>
<text>FEST_October_2021_dst</text>
<text>../../../Phys/DaVinci/options/DaVinciDB-Example.yaml</text>
<text>--joboptfile</text>
<text>../../../DaVinciTests/tests/options/option_davinci_sprucing.yaml</text>
<text>--user_algorithms</text>
<text>../../../DaVinciExamples/python/DaVinciExamples/tupling/example-tupling-AllFunctors:alg_config</text>
</set></argument>
<argument name="validator"><text>
findReferenceBlock("""B0DsK_Tuple SUCCESS Booked 1 N-Tuples and 0 Event Tag Collections""")
</text></argument>
</extension>
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment