diff --git a/DaVinciExamples/python/DaVinciExamples/tupling/example-tupling-advanced-run-mc.py b/DaVinciExamples/python/DaVinciExamples/tupling/example-tupling-advanced-run-mc.py new file mode 100644 index 0000000000000000000000000000000000000000..4e6e9d6204f707e00a8ecbaf388b95985cfad6ca --- /dev/null +++ b/DaVinciExamples/python/DaVinciExamples/tupling/example-tupling-advanced-run-mc.py @@ -0,0 +1,118 @@ +############################################################################### +# (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +""" +Example of a typical DaVinci job: + - selection of two detached opposite-charge muons + - tuple of the selected candidates + + This example is meant to be run with + $ ./run davinci run-mc --testfiledb Upgrade_Bd2KstarMuMu $DAVINCIROOT/options/DaVinciDB-Example.yaml --user_algorithms example-tupling-advanced-run-mc:main +""" + +__author__ = "Davide Fazzini" +__date__ = "2021-06-18" + +import Functors as F +from PyConf.Algorithms import FunTuple_Particles as FunTuple +from DaVinci.standard_particles import make_detached_mumu, make_KsDD +from DaVinci.reco_objects import upfront_reconstruction_from_file as upfront_reconstruction +from FunTuple import ParticleTupleProp, convert_to_parsable_objs + + +# Define an helper function for creating the tuple +def CreateSimpleTuple(name, tree_name, parsable_objs, loki_preamble, inputs): + ftup = FunTuple( + name=name, + tree_name=tree_name, + branch_names_prefix=parsable_objs[0], + decay_descriptors=parsable_objs[1], + loki_functors=parsable_objs[2][0], + loki_functor_branch_names=parsable_objs[2][1], + thor_functors=parsable_objs[3][0], + thor_functor_branch_names=parsable_objs[3][1], + loki_preamble=loki_preamble, + input_location=inputs) + return ftup + + +# Prepare the node with the selection +dimuons = make_detached_mumu() +kshorts = make_KsDD() + +#FunTuple: define list of preambles for loki +loki_preamble = ['TRACK_MAX_PT = MAXTREE(ISBASIC & HASTRACK, PT, -1)'] +#FunTuple: Jpsi info +ParticleJpsi = ParticleTupleProp( + branch_name_prefix="Jpsi", + decay_descriptor="J/psi(1S) -> mu+ mu-", + #Dict -> {name:functor} + particle_code_loki={ + 'LOKI_P': 'P', + 'LOKI_PT': 'PT', + 'LOKI_Muonp_PT': 'CHILD(PT, 1)', + 'LOKI_Muonm_PT': 'CHILD(PT, 2)', + 'LOKI_MAXPT': 'TRACK_MAX_PT', + 'LOKI_N_HIHGPT_TRCKS': 'NINTREE(ISBASIC & HASTRACK & (PT > 1500*MeV))' + }, + particle_code_thor={ + 'THOR_P': F.P, + 'THOR_PT': F.PT + }) + +#FunTuple: Mu plus info: Can also muon and make a seperate ParticleTupleProp object +ParticleMuPlus = ParticleTupleProp( + branch_name_prefix="MuPlus", + decay_descriptor="J/psi(1S) -> ^mu+ mu-", + particle_code_loki={'LOKI_P': 'P'}, + particle_code_thor={'THOR_P': F.P}) + +#FunTuple: Do not need to do this if a custom gaudi property for ParticleTupleProp is implemented +parsable_objs_dimuons = convert_to_parsable_objs( + [ParticleJpsi, ParticleMuPlus]) +ftup_dimuons = CreateSimpleTuple("DimuonsTuple", "DecayTree", + parsable_objs_dimuons, loki_preamble, dimuons) + +ParticleKS = ParticleTupleProp( + branch_name_prefix="Ks", + decay_descriptor="KS0 -> pi+ pi-", + #Dict -> {name:functor} + particle_code_loki={ + 'LOKI_P': 'P', + 'LOKI_PT': 'PT', + 'LOKI_Muonp_PT': 'CHILD(PT, 1)', + 'LOKI_Muonm_PT': 'CHILD(PT, 2)', + 'LOKI_MAXPT': 'TRACK_MAX_PT', + 'LOKI_N_HIHGPT_TRCKS': 'NINTREE(ISBASIC & HASTRACK & (PT > 1500*MeV))' + }, + particle_code_thor={ + 'THOR_P': F.P, + 'THOR_PT': F.PT + }) +parsable_objs_kshorts = convert_to_parsable_objs([ParticleKS]) +ftup_kshorts = CreateSimpleTuple("KsTuple", "DecayTree", parsable_objs_kshorts, + loki_preamble, kshorts) + + +def main(): + from DaVinci import options + options.evt_max = 10 + options.ntuple_file = 'DV-example-tupling-advanced-ntp.root' + options.histo_file = 'DV-example-tupling-advanced-his.root' + options.input_raw_format = 4.3 + options.lumi = False + + tools = [] + algs = { + "Reco": upfront_reconstruction(), + "DiMuons": upfront_reconstruction() + [dimuons, ftup_dimuons], + "KShorts": upfront_reconstruction() + [kshorts, ftup_kshorts] + } + return algs, tools diff --git a/DaVinciExamples/python/DaVinciExamples/tupling/example-tupling-basic-run-mc.py b/DaVinciExamples/python/DaVinciExamples/tupling/example-tupling-basic-run-mc.py new file mode 100644 index 0000000000000000000000000000000000000000..76307737188c13c7e6acdf9ddfc1ee7f3311448a --- /dev/null +++ b/DaVinciExamples/python/DaVinciExamples/tupling/example-tupling-basic-run-mc.py @@ -0,0 +1,86 @@ +############################################################################### +# (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +""" +Example of a typical DaVinci job: + - selection of two detached opposite-charge muons + - tuple of the selected candidates + + This example is meant to be run with + $ ./run davinci run-mc --simplejob --testfiledb Upgrade_Bd2KstarMuMu --joboptfile example-tupling-basic.yaml --user_algorithms example-tupling-basic-run-mc:main +""" + +__author__ = "Maurizio Martinelli" +__date__ = "2021-05-03" + +import Functors as F +from PyConf.Algorithms import FunTuple_Particles as FunTuple +from DaVinci.standard_particles import make_detached_mumu +from DaVinci.reco_objects import upfront_reconstruction_from_file as upfront_reconstruction +from FunTuple import ParticleTupleProp, convert_to_parsable_objs + + +# Define a helper function to create the FunTuple instance +def CreateSimpleTuple(name, tree_name, parsable_objs, loki_preamble, inputs): + ftup = FunTuple( + name=name, + tree_name=tree_name, + branch_names_prefix=parsable_objs[0], + decay_descriptors=parsable_objs[1], + loki_functors=parsable_objs[2][0], + loki_functor_branch_names=parsable_objs[2][1], + thor_functors=parsable_objs[3][0], + thor_functor_branch_names=parsable_objs[3][1], + loki_preamble=loki_preamble, + input_location=inputs) + return ftup + + +# Prepare the node with the selection +dimuons = make_detached_mumu() + +#FunTuple: define list of preambles for loki +loki_preamble = ['TRACK_MAX_PT = MAXTREE(ISBASIC & HASTRACK, PT, -1)'] +#FunTuple: Jpsi info +ParticleJpsi = ParticleTupleProp( + branch_name_prefix="Jpsi", + decay_descriptor="J/psi(1S) -> mu+ mu-", + #Dict -> {name:functor} + particle_code_loki={ + 'LOKI_P': 'P', + 'LOKI_PT': 'PT', + 'LOKI_Muonp_PT': 'CHILD(PT, 1)', + 'LOKI_Muonm_PT': 'CHILD(PT, 2)', + 'LOKI_MAXPT': 'TRACK_MAX_PT', + 'LOKI_N_HIHGPT_TRCKS': 'NINTREE(ISBASIC & HASTRACK & (PT > 1500*MeV))' + }, + particle_code_thor={ + 'THOR_P': F.P, + 'THOR_PT': F.PT + }) + +#FunTuple: Mu plus info: Can also muon and make a seperate ParticleTupleProp object +ParticleMuPlus = ParticleTupleProp( + branch_name_prefix="MuPlus", + decay_descriptor="J/psi(1S) -> ^mu+ mu-", + particle_code_loki={'LOKI_P': 'P'}, + particle_code_thor={'THOR_P': F.P}) + +#FunTuple: Do not need to do this if a custom gaudi property for ParticleTupleProp is implemented +parsable_objs_dimuons = convert_to_parsable_objs( + [ParticleJpsi, ParticleMuPlus]) +ftup_dimuons = CreateSimpleTuple("DimuonsTuple", "DecayTree", + parsable_objs_dimuons, loki_preamble, dimuons) + + +def main(): + tools = [] + algs = upfront_reconstruction() + [dimuons, ftup_dimuons] + return algs, tools diff --git a/DaVinciExamples/python/DaVinciExamples/tupling/example-tupling-basic-run-mc.yaml b/DaVinciExamples/python/DaVinciExamples/tupling/example-tupling-basic-run-mc.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0067d7c5004e91f6ba194e7dff528bdc99afca73 --- /dev/null +++ b/DaVinciExamples/python/DaVinciExamples/tupling/example-tupling-basic-run-mc.yaml @@ -0,0 +1,16 @@ +############################################################################### +# (c) Copyright 2020-2021 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### + +evt_max: 10 +ntuple_file: 'DV-example-tupling-basic-ntp.root' +histo_file: 'DV-example-tupling-basic-his.root' +input_raw_format: 4.3 +lumi: False diff --git a/DaVinciSys/scripts/davinci b/DaVinciSys/scripts/davinci index e19520743f572aecd8dda8fe1f9add5855250bac..8f9aee6fd1fc07c6d88c881f6ef6f5050fedb09c 100755 --- a/DaVinciSys/scripts/davinci +++ b/DaVinciSys/scripts/davinci @@ -26,8 +26,10 @@ A DaVinci job using simulated data can be run using the command line: """ import os, sys, click -from DaVinci.utilities_script import dump_call, get_configurable_opts +from DaVinci.utilities_script import dump_call, get_configurable_opts, set_testfiledb from DaVinci.ConfigurationUpgrade import run_davinci +from DaVinci.config import options +from DaVinci.optionChecker import log_click if "GAUDIAPPVERSION" in os.environ: APP_VERSION = str(os.environ["GAUDIAPPVERSION"]) @@ -77,16 +79,41 @@ def run_job(configurables, export=None, with_defaults=False, dry_run=None): "ApplicationMgr.AppVersion": '"{}"'.format(APP_VERSION), } + # Using applyConfigurableUsers_old until the new version will not be ready + # Marco Clemencic found a bug in applyConfigurableUsers, but the fix caused troubles in existing + # configurations as somehow they were relying on the bug, so he had to resurrect the old implementation + from GaudiKernel.Proxy.Configurable import Configurable, applyConfigurableUsers_old + applyConfigurableUsers_old() + + dict_opts_old = get_configurable_opts( + Configurable.allConfigurables.values(), with_defaults) dict_opts = get_configurable_opts(configurables, with_defaults) + + conflicts = [ + n for n in set(dict_opts).intersection(dict_opts_old) + if dict_opts[n] != dict_opts_old[n] + ] + if conflicts: + conflicts.sort() + log_click( + "ERROR", + "Some properties are set in old and new style configuration") + log_click("WARNING", "name: old -> new") + for n in conflicts: + log_click("WARNING", + "%s: %s -> %s" % (n, dict_opts_old[n], dict_opts[n])) + exit(10) + + opts.update(dict_opts_old) opts.update(dict_opts) if export: - click.echo("writing configuration to {}".format(export)) + log_click("INFO", "writing configuration to {}".format(export)) with open(export, "w") as f: f.writelines("{} = {};\n".format(*item) for item in opts.items()) if dry_run: - click.echo("dry-run: not starting the application") + log_click("INFO", "dry-run: not starting the application") else: import Gaudi opts["ApplicationMgr.JobOptionsType"] = '"NONE"' @@ -100,12 +127,12 @@ def run_job(configurables, export=None, with_defaults=False, dry_run=None): allow_extra_args=True, )) @click.option( - "--testfiledb", - default=("$DAVINCIROOT/options/DaVinciDB-Example.yaml", - "Upgrade_Bd2KstarMuMu_ldst"), + "--inputfiledb", + default=("", "TestFileDB"), nargs=2, help= - "TestFileDB-like file containing job input and conditions information (.yaml). Takes the pair of values 'filedb-path', 'filedb-key'" + "TestFileDB-like file containing job input and conditions information (.yaml). Takes the pair of values 'filedb-key', 'filedb-path'."\ + "If you want to use the standard TestFileDB set 'filedb-path' = '-'." ) @click.option( "--joboptfile", @@ -119,34 +146,36 @@ def run_job(configurables, export=None, with_defaults=False, dry_run=None): "Option for running a simple DaVinci jobs without any specific configuration (.py)." ) @click.pass_context -def run_mc(ctx, testfiledb, joboptfile, simplejob): +def run_mc(ctx, inputfiledb, joboptfile, simplejob): """ - DaVinci function for running jobs on simulated samples on the command line, using Click. - Ctx: click.core.Context class (dict). Predefined click object storing information about the invoked command. + DaVinci function for running jobs on simulated samples. + Ctx: click.core.Context class (dict). + Predefined click object storing information about the invoked command. All the options passed by command line which are not recognised by click are stored into the ctx.args element. Ctx.args is a simple array and each extra option is stored using two values: the first one is the key and the second one is the corresponding value. Eg: --evt_max 100 will be stored as: ctx.args[0] = --evt_max, ctx.args[1] = 100 - Click automatically converts "_" in "-", so this function can be invoked calling run-mc as shown in the help. + Note: + Click automatically converts "_" in "-", so this function can be invoked calling run-mc as shown in the help. """ - assert len( - testfiledb - ) == 2, "--testfiledb takes two arguments: filedb filename and the relevant key." - testfiledb_file = testfiledb[0] - testfiledb_key = testfiledb[1] + + # Run on MC sample + options.simulation = True + # Test file DB key request overrides inputfiledb + inputfiledb_key, inputfiledb_file = set_testfiledb(inputfiledb) ctx_args = (ctx.args if (len(ctx.args) > 1) else []) - dump_call(testfiledb_file, testfiledb_key, joboptfile, ctx_args) + dump_call(inputfiledb_file, inputfiledb_key, joboptfile, ctx_args) config = run_davinci( - testfiledb_file, - testfiledb_key, #file and key for job input and conditions + inputfiledb_key, + inputfiledb_file, #file and key for job input and conditions joboptfile, # file for job options - True, # flag for MC job ctx_args, # list of extra options to be set in the job simplejob # flag for running a simplejob ) + return config @@ -156,49 +185,52 @@ def run_mc(ctx, testfiledb, joboptfile, simplejob): allow_extra_args=True, )) @click.option( - "--testfiledb", - default=("$DAVINCIROOT/options/DaVinciDB-Example.yaml", - "Upgrade_Bd2KstarMuMu_ldst"), + "--inputfiledb", + default=("", "TestFileDB"), nargs=2, help= - "TestFileDB-like file containing job input and conditions information (.yaml). Takes the pair of values 'filedb-path', 'filedb-key'" + "TestFileDB-like file containing job input and conditions information (.yaml). Takes the pair of values 'filedb-key', 'filedb-path'."\ + "If you want to use the standard TestFileDB set 'filedb-path' = '-'." ) @click.option( "--joboption-file", default="$DAVINCIROOT/options/jobOptions-Example.yaml", help="Option file containing the job information (.yaml, .py)") @click.pass_context -def run_data(ctx, testfiledb, optfile): +def run_data(ctx, inputfiledb, optfile): """ - DaVinci function for running jobs on real data samples on the command line, using Click. + DaVinci function for running jobs on real data samples. Ctx: click.core.Context class (dict). Predefined click object storing information about the invoked command. All options passed by the command line that are not recognised by click are stored into the ctx.args element. Ctx.args is a simple array and each extra option is stored using two values: the first one is the key and the second one is the corresponding value. Eg: --evt_max 100 will be stored as: ctx.args[0] = --evt_max, ctx.args[1] = 100 - Click automatically converts "_" in "-", so this function can be invoked calling run-data as shown in the help. + Note: + Click automatically converts "_" in "-", so this function can be invoked calling run-data as shown in the help. """ + raise ValueError( 'Data file with upgrade conditions are not yet available. Please use :mc function instead.' ) - assert len( - testfiledb - ) == 2, "--testfiledb takes two arguments: filedb filename and the relevant key." - testfiledb_file = testfiledb[0] - testfiledb_key = testfiledb[1] + # Run on data sample + options.simulation = False + + # Test file DB key request overrides inputfiledb + inputfiledb_key, inputfiledb_file = set_testfiledb(inputfiledb) + ctx_args = (ctx.args if (len(ctx.args) > 1) else []) - dump_call(testfiledb_file, testfiledb_key, joboptfile, ctx_args) + dump_call(inputfiledb_file, inputfiledb_key, joboptfile, ctx_args) config = run_davinci( - testfiledb_file, - testfiledb_key, #file and key for job input and conditions + inputfiledb_key, + inputfiledb_file, #file and key for job input and conditions joboptfile, # file for job options - False, # flag for MC job ctx_args, # list of extra options to be set in the job simplejob # flag for running a simplejob ) + return config diff --git a/DaVinciTests/tests/options/option_davinci_user_algs.py b/DaVinciTests/tests/options/option_davinci_user_algs.py index c60a0f119971d4fa3fe57a0796a3cf7c16f31c55..0f5be2b1ca1fdd9879c7d4da57d02b2b189502e3 100644 --- a/DaVinciTests/tests/options/option_davinci_user_algs.py +++ b/DaVinciTests/tests/options/option_davinci_user_algs.py @@ -15,17 +15,13 @@ Example of a DaVinci job printing run and event numbers on every read event. __author__ = "Davide Fazzini" __date__ = "2021-03-31" -from PyConf.control_flow import CompositeNode, NodeLogic -from DaVinci import options from DaVinci.reco_objects import upfront_reconstruction_from_file as upfront_reconstruction def main(): # the "upfront_reconstruction" is what unpacks reconstruction objects, particles and primary vertices # from file and creates protoparticles. + tools = [] algs = upfront_reconstruction() - node = CompositeNode( - "PrintJpsiNode", children=algs, combine_logic=NodeLogic.NONLAZY_AND) - - return node + return algs, tools diff --git a/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_control_flow.qmt b/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_control_flow.qmt index d13d0eff511a96dc59950b0a0384a673802eb6d2..8230297cd59a83cb4e2052c7b184f42013565dbb 100755 --- a/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_control_flow.qmt +++ b/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_control_flow.qmt @@ -19,13 +19,18 @@ # Author: dfazzini # Purpose: Very simple test of DaVinci configurable for testing the click feature # Prerequisites: None +# inputfiledb Upgrade_Bd2KstarMuMu_ldst $DAVINCIROOT/options/DaVinciDB-Example.yaml # joboptfile $DAVINCITESTSROOT/tests/options/option_davinci_initialise_upgrade.yaml +# evt_max 150 ####################################################### --> <extension class="GaudiTest.GaudiExeTest" kind="test"> <argument name="program"><text>davinci</text></argument> <argument name="args"><set> <text>run-mc</text> + <text>--inputfiledb</text> + <text>Upgrade_Bd2KstarMuMu_ldst</text> + <text>$DAVINCIROOT/options/DaVinciDB-Example.yaml</text> <text>--joboptfile</text> <text>$DAVINCITESTSROOT/tests/options/option_davinci_initialise_upgrade.yaml</text> <text>--evt_max</text> diff --git a/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_initialise_upgrade.qmt b/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_initialise_upgrade.qmt index ffb98043a761a933a3cd5b99cb124d0fc9bb9bec..0012319391a3888dcde5df94522df1ab67418807 100755 --- a/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_initialise_upgrade.qmt +++ b/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_initialise_upgrade.qmt @@ -19,6 +19,8 @@ # Author: dfazzini # Purpose: Very simple test of DaVinci configurable for testing the click feature # Prerequisites: None +# dry-run +# inputfiledb Upgrade_Bd2KstarMuMu_ldst $DAVINCIROOT/options/DaVinciDB-Example.yaml # joboptfile $DAVINCITESTSROOT/tests/options/option_davinci_initialise_upgrade.yaml ####################################################### --> @@ -27,6 +29,9 @@ <argument name="args"><set> <text>--dry-run</text> <text>run-mc</text> + <text>--inputfiledb</text> + <text>Upgrade_Bd2KstarMuMu_ldst</text> + <text>$DAVINCIROOT/options/DaVinciDB-Example.yaml</text> <text>--joboptfile</text> <text>$DAVINCITESTSROOT/tests/options/option_davinci_initialise_upgrade.yaml</text> </set></argument> diff --git a/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_simplejob.qmt b/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_simplejob.qmt index a7b61fcec817627f8da57894b74f7ae12a058370..c8a550ca847c2f8675f669b839f980de026dda47 100755 --- a/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_simplejob.qmt +++ b/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_simplejob.qmt @@ -19,7 +19,8 @@ # Author: dfazzini # Purpose: Very simple test of DaVinci configurable for testing the click feature # Prerequisites: None -# user_algorithms $DAVINCITESTSROOT/tests/options/option_davinci_simplejob:main +# simplejob +# inputfiledb Upgrade_Bd2KstarMuMu_ldst $DAVINCIROOT/options/DaVinciDB-Example.yaml ####################################################### --> <extension class="GaudiTest.GaudiExeTest" kind="test"> @@ -27,6 +28,9 @@ <argument name="args"><set> <text>run-mc</text> <text>--simplejob</text> + <text>--inputfiledb</text> + <text>Upgrade_Bd2KstarMuMu_ldst</text> + <text>$DAVINCIROOT/options/DaVinciDB-Example.yaml</text> </set></argument> <argument name="reference"><text>$DAVINCITESTSROOT/tests/refs/test_davinci_simplejob.ref</text></argument> <argument name="error_reference"><text>$DAVINCITESTSROOT/tests/refs/empty.ref</text></argument> diff --git a/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_testfiledb.qmt b/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_testfiledb.qmt new file mode 100755 index 0000000000000000000000000000000000000000..a56311a70be1bc097069c0f48954097c17eabe4d --- /dev/null +++ b/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_testfiledb.qmt @@ -0,0 +1,40 @@ +<?xml version="1.0" ?> +<!-- +############################################################################### +# (c) Copyright 2020-2021 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +--> +<!DOCTYPE extension PUBLIC '-//QM/2.3/Extension//EN' 'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'> +<!-- +####################################################### +# SUMMARY OF THIS TEST +# ................... +# Author: dfazzini +# Purpose: Very simple test for the new DaVinci configuration checking the correct access to the TestFileDB +# Prerequisites: None +# inputfiledb Upgrade_Bd2KstarMuMu - +####################################################### +--> +<extension class="GaudiTest.GaudiExeTest" kind="test"> + <argument name="program"><text>davinci</text></argument> + <argument name="args"><set> + <text>run-mc</text> + <text>--inputfiledb</text> + <text>Upgrade_Bd2KstarMuMu</text> + <text>-</text> + </set></argument> + <argument name="reference"><text>$DAVINCITESTSROOT/tests/refs/test_davinci_testfiledb.ref</text></argument> + <argument name="error_reference"><text>$DAVINCITESTSROOT/tests/refs/empty.ref</text></argument> + <argument name="validator"><text> +from DaVinciTests.QMTest.DaVinciExclusions import preprocessor +validateWithReference(preproc = preprocessor) +countErrorLines({"FATAL":0}) + </text></argument> +</extension> diff --git a/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_tupling.qmt b/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_tupling.qmt new file mode 100755 index 0000000000000000000000000000000000000000..4790aa3d8faf4d023c4f1b1025092b01c7648eba --- /dev/null +++ b/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_tupling.qmt @@ -0,0 +1,46 @@ +<?xml version="1.0" ?> +<!-- +############################################################################### +# (c) Copyright 2020-2021 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +--> +<!DOCTYPE extension PUBLIC '-//QM/2.3/Extension//EN' 'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'> +<!-- +####################################################### +# SUMMARY OF THIS TEST +# ................... +# Author: dfazzini +# Purpose: Very simple test of DaVinci configurable for testing the click feature +# Prerequisites: None +# inputfiledb Upgrade_Bd2KstarMuMu_ldst $DAVINCIROOT/options/DaVinciDB-Example.yaml +# joboptfile $DAVINCIEXAMPLESROOT/python/DaVinciExamples/tupling/example-tupling-basic-run-mc.yaml +# user_algorithms $DAVINCIEXAMPLESROOT/python/DaVinciExamples/tupling/example-tupling-basic-run-mc:main +####################################################### +--> +<extension class="GaudiTest.GaudiExeTest" kind="test"> + <argument name="program"><text>davinci</text></argument> + <argument name="args"><set> + <text>run-mc</text> + <text>--inputfiledb</text> + <text>Upgrade_Bd2KstarMuMu_ldst</text> + <text>$DAVINCIROOT/options/DaVinciDB-Example.yaml</text> + <text>--joboptfile</text> + <text>$DAVINCIEXAMPLESROOT/python/DaVinciExamples/tupling/example-tupling-basic-run-mc.yaml</text> + <text>--user_algorithms</text> + <text>$DAVINCIEXAMPLESROOT/python/DaVinciExamples/tupling/example-tupling-basic-run-mc:main</text> + </set></argument> + <argument name="reference"><text>$DAVINCITESTSROOT/tests/refs/test_davinci_tupling.ref</text></argument> + <argument name="error_reference"><text>$DAVINCITESTSROOT/tests/refs/empty.ref</text></argument> + <argument name="validator"><text> +from DaVinciTests.QMTest.DaVinciExclusions import preprocessor +validateWithReference(preproc = preprocessor) +countErrorLines({"FATAL":0}) + </text></argument> +</extension> diff --git a/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_tupling_advanced.qmt b/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_tupling_advanced.qmt new file mode 100755 index 0000000000000000000000000000000000000000..632f9f23615a890df4337db80878890c08dae71e --- /dev/null +++ b/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_tupling_advanced.qmt @@ -0,0 +1,43 @@ +<?xml version="1.0" ?> +<!-- +############################################################################### +# (c) Copyright 2020-2021 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +--> +<!DOCTYPE extension PUBLIC '-//QM/2.3/Extension//EN' 'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'> +<!-- +####################################################### +# SUMMARY OF THIS TEST +# ................... +# Author: dfazzini +# Purpose: Test for the new DaVinci configurable cheking the correct advanced tupling behaviour +# Prerequisites: None +# inputfiledb Upgrade_Bd2KstarMuMu_ldst $DAVINCIROOT/options/DaVinciDB-Example.yaml +# user_algorithms $DAVINCIEXAMPLESROOT/python/DaVinciExamples/tupling/example-tupling-advanced-run-mc:main +####################################################### +--> +<extension class="GaudiTest.GaudiExeTest" kind="test"> + <argument name="program"><text>davinci</text></argument> + <argument name="args"><set> + <text>run-mc</text> + <text>--inputfiledb</text> + <text>Upgrade_Bd2KstarMuMu_ldst</text> + <text>$DAVINCIROOT/options/DaVinciDB-Example.yaml</text> + <text>--user_algorithms</text> + <text>$DAVINCIEXAMPLESROOT/python/DaVinciExamples/tupling/example-tupling-advanced-run-mc:main</text> + </set></argument> + <argument name="reference"><text>$DAVINCITESTSROOT/tests/refs/test_davinci_tupling_advanced.ref</text></argument> + <argument name="error_reference"><text>$DAVINCITESTSROOT/tests/refs/empty.ref</text></argument> + <argument name="validator"><text> +from DaVinciTests.QMTest.DaVinciExclusions import preprocessor +validateWithReference(preproc = preprocessor) +countErrorLines({"FATAL":0}) + </text></argument> +</extension> diff --git a/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_user_algs.qmt b/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_user_algs.qmt index 49e53045ee9ec888c028b7899e909d629e083e43..4afa7a35774d46b264f716f226293e5a3e4734a5 100755 --- a/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_user_algs.qmt +++ b/DaVinciTests/tests/qmtest/davinci.qms/test_davinci_user_algs.qmt @@ -19,6 +19,7 @@ # Author: dfazzini # Purpose: Very simple test of DaVinci configurable for testing the click feature # Prerequisites: None +# inputfiledb Upgrade_Bd2KstarMuMu_ldst $DAVINCIROOT/options/DaVinciDB-Example.yaml # user_algorithms $DAVINCITESTSROOT/tests/options/option_davinci_simplejob:main ####################################################### --> @@ -26,6 +27,9 @@ <argument name="program"><text>davinci</text></argument> <argument name="args"><set> <text>run-mc</text> + <text>--inputfiledb</text> + <text>Upgrade_Bd2KstarMuMu_ldst</text> + <text>$DAVINCIROOT/options/DaVinciDB-Example.yaml</text> <text>--user_algorithms</text> <text>$DAVINCITESTSROOT/tests/options/option_davinci_user_algs:main</text> </set></argument> diff --git a/DaVinciTests/tests/refs/test_davinci_control_flow.ref b/DaVinciTests/tests/refs/test_davinci_control_flow.ref index 4d2f870189ff6727a5d14bbb1e911c4a9b884cea..d6472a1b14f185eef96dbce980d6e9682eebcc3e 100644 --- a/DaVinciTests/tests/refs/test_davinci_control_flow.ref +++ b/DaVinciTests/tests/refs/test_davinci_control_flow.ref @@ -2,11 +2,11 @@ |-auditors = [] (default: []) |-buffer_events = 20000 (default: 20000) |-callgrind_profile = False (default: False) -|-conddb_tag = 'HEAD' (default: '') +|-conddb_tag = 'sim-20171127-vc-md100' (default: '') |-control_flow_file = '' (default: '') |-data_flow_file = '' (default: '') |-data_type = 'Upgrade' (default: '') -|-dddb_tag = 'dddb-20200424-2' (default: '') +|-dddb_tag = 'dddb-20171126' (default: '') |-detectors = ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr'] | (default: ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr']) |-dqflags_tag = '' (default: '') @@ -45,19 +45,23 @@ |-user_algorithms = '' (default: '') |-write_fsr = True (default: True) \----- (End of User DVAppOptions/DVAppOptions) ----------------------------------------------------- +INFO No MainOptions specified. DaVinci() will import no options file! +WARNING DV option file or main function not defined. No user algorithms will be used. ApplicationMgr SUCCESS ==================================================================================================================================== ==================================================================================================================================== ApplicationMgr INFO Application Manager Configured successfully +DetectorPersistencySvc INFO Added successfully Conversion service:XmlCnvSvc +DetectorDataSvc SUCCESS Detector description database: git:/lhcb.xml NTupleSvc INFO Added stream file:DVNtuple.root as FILE1 RootHistSvc INFO Writing ROOT histograms to: DVHistos.root HistogramPersistencySvc INFO Added successfully Conversion service:RootHistSvc FSROutputStreamDstWriter INFO Data source: EventDataSvc output: SVC='Gaudi::RootCnvSvc' -DetectorDataSvc INFO Detector description not requested to be loaded EventClockSvc.FakeEventTime INFO Event times generated from 0 with steps of 0 -HiveDataBrokerSvc WARNING non-reentrant algorithm: RecordStream/FSROutputStreamDstWriter HiveDataBrokerSvc WARNING non-reentrant algorithm: GaudiHistoAlgorithm/SimpleHistos +HiveDataBrokerSvc WARNING non-reentrant algorithm: RecordStream/FSROutputStreamDstWriter ApplicationMgr INFO Application Manager Initialized successfully +DeFTDetector INFO Current FT geometry version = 63 ApplicationMgr INFO Application Manager Started successfully EventPersistencySvc INFO Added successfully Conversion service:RootCnvSvc EventSelector INFO Stream:EventSelector.DataStreamTool_1 Def:DATAFILE='root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000002_1.ldst' SVC='Gaudi::RootEvtSelector' OPT='READ' IgnoreChecksum='YES' @@ -69,13 +73,13 @@ SimpleHistos INFO GaudiHistoAlgorithm:: Filling Histog ApplicationMgr INFO Application Manager Stopped successfully FSROutputStreamDstWriter INFO Set up File Summary Record FSROutputStreamDstWriter INFO Events output: 1 -NONLAZY_OR: DaVinci #=150 Sum=150 Eff=|( 100.0000 +- 0.00000 )%| - NONLAZY_OR: WriteFSR #=150 Sum=150 Eff=|( 100.0000 +- 0.00000 )%| - RecordStream/FSROutputStreamDstWriter #=150 Sum=150 Eff=|( 100.0000 +- 0.00000 )%| - NONLAZY_AND: DVStdAlgs #=150 Sum=150 Eff=|( 100.0000 +- 0.00000 )%| - GaudiHistoAlgorithm/SimpleHistos #=150 Sum=150 Eff=|( 100.0000 +- 0.00000 )%| +LAZY_AND: DaVinci #=150 Sum=150 Eff=|( 100.0000 +- 0.00000 )%| + NONLAZY_OR: UserAnalysis #=150 Sum=150 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: StandardAlgs #=150 Sum=150 Eff=|( 100.0000 +- 0.00000 )%| + GaudiHistoAlgorithm/SimpleHistos #=150 Sum=150 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: WriteFSR #=150 Sum=150 Eff=|( 100.0000 +- 0.00000 )%| + RecordStream/FSROutputStreamDstWriter #=150 Sum=150 Eff=|( 100.0000 +- 0.00000 )%| ToolSvc INFO Removing all tools created by ToolSvc -*****Chrono***** INFO The Final CPU consumption ( Chrono ) Table (ordered) ChronoStatSvc.finalize() INFO Service finalized successfully ApplicationMgr INFO Application Manager Finalized successfully ApplicationMgr INFO Application Manager Terminated successfully diff --git a/DaVinciTests/tests/refs/test_davinci_initialise_upgrade.ref b/DaVinciTests/tests/refs/test_davinci_initialise_upgrade.ref index 909e122b9275c6f4b99b014cc62ed14eacd785c4..4b3c835c77545ed2655eb298db41eaf2d0a4494a 100644 --- a/DaVinciTests/tests/refs/test_davinci_initialise_upgrade.ref +++ b/DaVinciTests/tests/refs/test_davinci_initialise_upgrade.ref @@ -2,11 +2,11 @@ |-auditors = [] (default: []) |-buffer_events = 20000 (default: 20000) |-callgrind_profile = False (default: False) -|-conddb_tag = 'HEAD' (default: '') +|-conddb_tag = 'sim-20171127-vc-md100' (default: '') |-control_flow_file = '' (default: '') |-data_flow_file = '' (default: '') |-data_type = 'Upgrade' (default: '') -|-dddb_tag = 'dddb-20200424-2' (default: '') +|-dddb_tag = 'dddb-20171126' (default: '') |-detectors = ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr'] | (default: ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr']) |-dqflags_tag = '' (default: '') @@ -45,4 +45,6 @@ |-user_algorithms = '' (default: '') |-write_fsr = True (default: True) \----- (End of User DVAppOptions/DVAppOptions) ----------------------------------------------------- -dry-run: not starting the application +INFO No MainOptions specified. DaVinci() will import no options file! +WARNING DV option file or main function not defined. No user algorithms will be used. +INFO dry-run: not starting the application diff --git a/DaVinciTests/tests/refs/test_davinci_simplejob.ref b/DaVinciTests/tests/refs/test_davinci_simplejob.ref index a52703c24fee46a5c723905d816a3f69d27f8fa9..30d2056269c1249bab962a5c7bd2d3ff30dc31f5 100644 --- a/DaVinciTests/tests/refs/test_davinci_simplejob.ref +++ b/DaVinciTests/tests/refs/test_davinci_simplejob.ref @@ -2,11 +2,11 @@ |-auditors = [] (default: []) |-buffer_events = 20000 (default: 20000) |-callgrind_profile = False (default: False) -|-conddb_tag = 'HEAD' (default: '') +|-conddb_tag = 'sim-20171127-vc-md100' (default: '') |-control_flow_file = '' (default: '') |-data_flow_file = '' (default: '') |-data_type = 'Upgrade' (default: '') -|-dddb_tag = 'dddb-20200424-2' (default: '') +|-dddb_tag = 'dddb-20171126' (default: '') |-detectors = ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr'] | (default: ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr']) |-dqflags_tag = '' (default: '') @@ -45,16 +45,19 @@ |-user_algorithms = '' (default: '') |-write_fsr = True (default: True) \----- (End of User DVAppOptions/DVAppOptions) ----------------------------------------------------- +WARNING DV option file or main function not defined. No user algorithms will be used. ApplicationMgr SUCCESS ==================================================================================================================================== ==================================================================================================================================== ApplicationMgr INFO Application Manager Configured successfully +DetectorPersistencySvc INFO Added successfully Conversion service:XmlCnvSvc +DetectorDataSvc SUCCESS Detector description database: git:/lhcb.xml NTupleSvc INFO Added stream file:ExampleTuple.root as FILE1 RootHistSvc INFO Writing ROOT histograms to: ExampleHistos.root HistogramPersistencySvc INFO Added successfully Conversion service:RootHistSvc -DetectorDataSvc INFO Detector description not requested to be loaded EventClockSvc.FakeEventTime INFO Event times generated from 0 with steps of 0 ApplicationMgr INFO Application Manager Initialized successfully +DeFTDetector INFO Current FT geometry version = 63 ApplicationMgr INFO Application Manager Started successfully EventPersistencySvc INFO Added successfully Conversion service:RootCnvSvc EventSelector INFO Stream:EventSelector.DataStreamTool_1 Def:DATAFILE='root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000002_1.ldst' SVC='Gaudi::RootEvtSelector' OPT='READ' IgnoreChecksum='YES' @@ -160,8 +163,10 @@ Gaudi__Examples__VoidConsumer INFO executing VoidConsumer Gaudi__Examples__VoidConsumer INFO executing VoidConsumer Gaudi__Examples__VoidConsumer INFO executing VoidConsumer ApplicationMgr INFO Application Manager Stopped successfully -LAZY_AND: DummyNode #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - Gaudi__Examples__VoidConsumer/Gaudi__Examples__VoidConsumer #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| +LAZY_AND: DaVinci #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + NONLAZY_OR: UserAnalysis #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: UserAlgorithms #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + Gaudi__Examples__VoidConsumer/Gaudi__Examples__VoidConsumer #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| ToolSvc INFO Removing all tools created by ToolSvc ApplicationMgr INFO Application Manager Finalized successfully ApplicationMgr INFO Application Manager Terminated successfully diff --git a/DaVinciTests/tests/refs/test_davinci_testfiledb.ref b/DaVinciTests/tests/refs/test_davinci_testfiledb.ref new file mode 100644 index 0000000000000000000000000000000000000000..eef7a27a249090c05546400972430372c09fb808 --- /dev/null +++ b/DaVinciTests/tests/refs/test_davinci_testfiledb.ref @@ -0,0 +1,108 @@ +/***** User DVAppOptions/DVAppOptions ************************************************************** +|-auditors = [] (default: []) +|-buffer_events = 20000 (default: 20000) +|-callgrind_profile = False (default: False) +|-conddb_tag = 'sim-20171127-vc-md100' (default: '') +|-control_flow_file = '' (default: '') +|-data_flow_file = '' (default: '') +|-data_type = 'Upgrade' (default: '') +|-dddb_tag = 'dddb-20171126' (default: '') +|-detectors = ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr'] +| (default: ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr']) +|-dqflags_tag = '' (default: '') +|-enable_unpack = None +|-event_store = 'HiveWhiteBoard' (default: 'HiveWhiteBoard') +|-evt_max = 100 (default: -1) +|-first_evt = 0 (default: 0) +|-histo_file = 'ExampleHistos.root' (default: '') +|-ignore_dq_flags = False (default: False) +|-input_files = ['root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000002_1.ldst', 'root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000004_1.ldst', 'root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000043_1.ldst', 'root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000068_1.ldst'] +| (default: []) +|-input_raw_format = 0.3 (default: 0.3) +|-input_type = 'ROOT' (default: 'DST') +|-lines_maker = None +|-lumi = False (default: False) +|-main_options = '' (default: '') +|-memory_pool_size = 10485760 (default: 10485760) +|-merge_genfsr = False (default: False) +|-msg_svc_format = '% F%35W%S %7W%R%T %0W%M' (default: '% F%35W%S %7W%R%T %0W%M') +|-msg_svc_time_format = '%Y-%m-%d %H:%M:%S UTC' (default: '%Y-%m-%d %H:%M:%S UTC') +|-n_event_slots = 1 (default: -1) +|-n_threads = 1 (default: 1) +|-ntuple_file = 'ExampleTuple.root' (default: '') +|-output_file = '' (default: '') +|-output_level = 3 (default: 3) +|-output_type = '' (default: '') +|-overwrite_data_options = False (default: False) +|-print_freq = 1000 (default: 1000) +|-python_logging_level = 30 (default: 30) +|-root_compression_level = 'LZMA:6' (default: 'LZMA:6') +|-scheduler_legacy_mode = True (default: True) +|-simulation = True (default: False) +|-skip_events = 2 (default: 0) +|-tck = 0 (default: 0) +|-use_iosvc = False (default: False) +|-user_algorithms = '' (default: '') +|-write_fsr = True (default: True) +\----- (End of User DVAppOptions/DVAppOptions) ----------------------------------------------------- +INFO No MainOptions specified. DaVinci() will import no options file! +WARNING DV option file or main function not defined. No user algorithms will be used. +ApplicationMgr SUCCESS +==================================================================================================================================== +==================================================================================================================================== +ApplicationMgr INFO Application Manager Configured successfully +DetectorPersistencySvc INFO Added successfully Conversion service:XmlCnvSvc +DetectorDataSvc SUCCESS Detector description database: git:/lhcb.xml +NTupleSvc INFO Added stream file:ExampleTuple.root as FILE1 +RootHistSvc INFO Writing ROOT histograms to: ExampleHistos.root +HistogramPersistencySvc INFO Added successfully Conversion service:RootHistSvc +FSROutputStreamDstWriter INFO Data source: EventDataSvc output: SVC='Gaudi::RootCnvSvc' +EventClockSvc.FakeEventTime INFO Event times generated from 0 with steps of 0 +HiveDataBrokerSvc WARNING non-reentrant algorithm: GaudiHistoAlgorithm/SimpleHistos +HiveDataBrokerSvc WARNING non-reentrant algorithm: RecordStream/FSROutputStreamDstWriter +ApplicationMgr INFO Application Manager Initialized successfully +DeFTDetector INFO Current FT geometry version = 63 +ApplicationMgr INFO Application Manager Started successfully +EventPersistencySvc INFO Added successfully Conversion service:RootCnvSvc +EventSelector INFO Stream:EventSelector.DataStreamTool_1 Def:DATAFILE='root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000002_1.ldst' SVC='Gaudi::RootEvtSelector' OPT='READ' IgnoreChecksum='YES' +EventSelector SUCCESS Reading Event record 1. Record number within stream 1: 1 +RndmGenSvc.Engine INFO Generator engine type:CLHEP::RanluxEngine +RndmGenSvc.Engine INFO Current Seed:1234567 Luxury:3 +RndmGenSvc INFO Using Random engine:HepRndm::Engine<CLHEP::RanluxEngine> +SimpleHistos INFO GaudiHistoAlgorithm:: Filling Histograms...... Please be patient ! +ApplicationMgr INFO Application Manager Stopped successfully +FSROutputStreamDstWriter INFO Set up File Summary Record +FSROutputStreamDstWriter INFO Events output: 1 +LAZY_AND: DaVinci #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + NONLAZY_OR: UserAnalysis #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: StandardAlgs #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + GaudiHistoAlgorithm/SimpleHistos #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: WriteFSR #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + RecordStream/FSROutputStreamDstWriter #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| +ToolSvc INFO Removing all tools created by ToolSvc +ChronoStatSvc.finalize() INFO Service finalized successfully +ApplicationMgr INFO Application Manager Finalized successfully +ApplicationMgr INFO Application Manager Terminated successfully +SimpleHistos SUCCESS 1D histograms in directory "SimpleHistos" : 10 + | ID | Title | # | Mean | RMS | Skewness | Kurtosis | + | 101 | "Exponential" | 100 | 1.141 | 1.0488 | 1.1417 | 0.71718 | + | 102 | "Breit" | 100 | -0.15117 | 1.1275 | -0.32351 | 2.0377 | + | 1111 | "Forced Numeric ID time test" | 100 | -0.047428 | 0.89797 | -0.25222 | 0.14476 | + | AutoID time test | "AutoID time test" | 100 | -0.047428 | 0.89797 | -0.25222 | 0.14476 | + | Gaussian mean=0, sigma=1 | "Gaussian mean=0, sigma=1" | 100 | -0.047428 | 0.89797 | -0.25222 | 0.14476 | + | poisson | "Poisson" | 100 | 1.8947 | 1.1376 | 0.42462 | -0.74327 | + | subdir1/bino | "Binominal" | 100 | 1.8788 | 1.0567 | 0.38496 | -0.44547 | + | subdir2/bino | "Binominal" | 100 | 1.8788 | 1.0567 | 0.38496 | -0.44547 | + | test1 | "Forced Alpha ID time test" | 100 | -0.047428 | 0.89797 | -0.25222 | 0.14476 | + | varBinning/x | "1D Variable Binning" | 100 | -0.22236 | 2.6506 | 0.026186 | -0.97273 | +SimpleHistos SUCCESS 1D profile histograms in directory "SimpleHistos" : 9 + | ID | Title | # | Mean | RMS | Skewness | Kurtosis | + | Expo V Gauss 1DProf | "Expo V Gauss 1DProf" | 100 | -0.047428 | 0.89797 | -2.3629 | 9.4136 | + | Expo V Gauss 1DProf s | "Expo V Gauss 1DProf s" | 100 | -0.047428 | 0.89797 | -2.3629 | 9.4136 | + | Gauss V Flat 1DProf | "Gauss V Flat 1DProf" | 100 | 0.18402 | 5.688 | 0 | -3 | + | Gauss V Flat 1DProf S | "Gauss V Flat 1DProf S" | 100 | 0.18402 | 5.688 | 0 | -3 | + | Gauss V Flat 1DProf, with | "Gauss V Flat 1DProf, with limits-I" | 45 | 0.24192 | 5.5416 | -0.26447 | -1.0149 | + | Gauss V Flat 1DProf, with | "Gauss V Flat 1DProf, with limits-I s" | 45 | 0.24192 | 5.5416 | -0.26447 | -1.0149 | + | Gauss V Flat 1DProf, with | "Gauss V Flat 1DProf, with limits-II" | 55 | 0.13664 | 5.8046 | 0 | -3 | + | Gauss V Flat 1DProf, with | "Gauss V Flat 1DProf, with limits-II s" | 55 | 0.13664 | 5.8046 | 0 | -3 | + | varBinning/a | "1D Profile Variable Binning" | 100 | -0.22236 | 2.6506 | 7.4264 | 29.421 | diff --git a/DaVinciTests/tests/refs/test_davinci_tupling.ref b/DaVinciTests/tests/refs/test_davinci_tupling.ref new file mode 100644 index 0000000000000000000000000000000000000000..5bae535fc4c7129c84fc4b65da821a0e40ccfa14 --- /dev/null +++ b/DaVinciTests/tests/refs/test_davinci_tupling.ref @@ -0,0 +1,193 @@ +/***** User DVAppOptions/DVAppOptions ************************************************************** +|-auditors = [] (default: []) +|-buffer_events = 20000 (default: 20000) +|-callgrind_profile = False (default: False) +|-conddb_tag = 'sim-20171127-vc-md100' (default: '') +|-control_flow_file = '' (default: '') +|-data_flow_file = '' (default: '') +|-data_type = 'Upgrade' (default: '') +|-dddb_tag = 'dddb-20171126' (default: '') +|-detectors = ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr'] +| (default: ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr']) +|-dqflags_tag = '' (default: '') +|-enable_unpack = None +|-event_store = 'HiveWhiteBoard' (default: 'HiveWhiteBoard') +|-evt_max = 10 (default: -1) +|-first_evt = 0 (default: 0) +|-histo_file = 'DV-example-tupling-basic-his.root' (default: '') +|-ignore_dq_flags = False (default: False) +|-input_files = ['root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000002_1.ldst', 'root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000004_1.ldst', 'root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000043_1.ldst', 'root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000068_1.ldst'] +| (default: []) +|-input_raw_format = 4.3 (default: 0.3) +|-input_type = 'ROOT' (default: 'DST') +|-lines_maker = None +|-lumi = False (default: False) +|-main_options = '' (default: '') +|-memory_pool_size = 10485760 (default: 10485760) +|-merge_genfsr = False (default: False) +|-msg_svc_format = '% F%35W%S %7W%R%T %0W%M' (default: '% F%35W%S %7W%R%T %0W%M') +|-msg_svc_time_format = '%Y-%m-%d %H:%M:%S UTC' (default: '%Y-%m-%d %H:%M:%S UTC') +|-n_event_slots = 1 (default: -1) +|-n_threads = 1 (default: 1) +|-ntuple_file = 'DV-example-tupling-basic-ntp.root' (default: '') +|-output_file = '' (default: '') +|-output_level = 3 (default: 3) +|-output_type = '' (default: '') +|-overwrite_data_options = False (default: False) +|-print_freq = 1000 (default: 1000) +|-python_logging_level = 30 (default: 30) +|-root_compression_level = 'LZMA:6' (default: 'LZMA:6') +|-scheduler_legacy_mode = True (default: True) +|-simulation = True (default: False) +|-skip_events = 0 (default: 0) +|-tck = 0 (default: 0) +|-use_iosvc = False (default: False) +|-user_algorithms = '$DAVINCIEXAMPLESROOT/python/DaVinciExamples/tupling/example-tupling-basic-run-mc:main' +| (default: '') +|-write_fsr = True (default: True) +\----- (End of User DVAppOptions/DVAppOptions) ----------------------------------------------------- +INFO No MainOptions specified. DaVinci() will import no options file! +INFO User algorithm example-tupling-basic-run-mc.main imported successfully! +ApplicationMgr SUCCESS +==================================================================================================================================== +==================================================================================================================================== +ApplicationMgr INFO Application Manager Configured successfully +DetectorPersistencySvc INFO Added successfully Conversion service:XmlCnvSvc +DetectorDataSvc SUCCESS Detector description database: git:/lhcb.xml +NTupleSvc INFO Added stream file:DV-example-tupling-basic-ntp.root as FILE1 +RootHistSvc INFO Writing ROOT histograms to: DV-example-tupling-basic-his.root +HistogramPersistencySvc INFO Added successfully Conversion service:RootHistSvc +UnpackChargedProtos.ChargedProto... INFO Using retuned RICH el and mu DLL values in combined DLLs +FunctionalParticleMaker.LoKi::Hy... INFO CUT: ' ( (TrTYPE==3) &TrALL) ' +DimuonsTuple INFO Initialising the FunTuple algorithm +DimuonsTuple INFO Conducting checks with LoKi +DimuonsTuple INFO Conducting checks with ThOr +DimuonsTuple INFO Setting the properties of ParticleTupleProp objects! +DimuonsTuple INFO Instatiating LoKi functors! +DimuonsTuple INFO Instatiating ThOr functors! +FunctorFactory INFO Cache miss for functor: ::Functors::Track::Momentum{}, now trying cling with headers [Event/Particle.h, Functors/TrackLike.h] +FunctorFactory INFO Cache miss for functor: ::Functors::Track::TransverseMomentum{}, now trying cling with headers [Event/Particle.h, Functors/TrackLike.h] +FunctorFactory INFO Reusing cling compiled factory for functor: ::Functors::Track::Momentum{} +DimuonsTuple INFO Finished initialisation: +FSROutputStreamDstWriter INFO Data source: EventDataSvc output: SVC='Gaudi::RootCnvSvc' +EventClockSvc.FakeEventTime INFO Event times generated from 0 with steps of 0 +HiveDataBrokerSvc WARNING non-reentrant algorithm: GaudiHistoAlgorithm/SimpleHistos +HiveDataBrokerSvc WARNING non-reentrant algorithm: UnpackRecVertex/UnpackRecVertices +HiveDataBrokerSvc WARNING non-reentrant algorithm: DataPacking::Unpack<LHCb::MuonPIDPacker>/UnpackMuonPIDs +HiveDataBrokerSvc WARNING non-reentrant algorithm: DataPacking::Unpack<LHCb::RichPIDPacker>/UnpackRichPIDs +HiveDataBrokerSvc WARNING non-reentrant algorithm: UnpackProtoParticle/UnpackNeutralProtos +HiveDataBrokerSvc WARNING non-reentrant algorithm: UnpackProtoParticle/UnpackChargedProtos +HiveDataBrokerSvc WARNING non-reentrant algorithm: CombineParticles +HiveDataBrokerSvc WARNING non-reentrant algorithm: RecordStream/FSROutputStreamDstWriter +ApplicationMgr INFO Application Manager Initialized successfully +DeFTDetector INFO Current FT geometry version = 63 +ApplicationMgr INFO Application Manager Started successfully +EventPersistencySvc INFO Added successfully Conversion service:RootCnvSvc +EventSelector INFO Stream:EventSelector.DataStreamTool_1 Def:DATAFILE='root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000002_1.ldst' SVC='Gaudi::RootEvtSelector' OPT='READ' IgnoreChecksum='YES' +EventSelector SUCCESS Reading Event record 1. Record number within stream 1: 1 +RndmGenSvc.Engine INFO Generator engine type:CLHEP::RanluxEngine +RndmGenSvc.Engine INFO Current Seed:1234567 Luxury:3 +RndmGenSvc INFO Using Random engine:HepRndm::Engine<CLHEP::RanluxEngine> +SimpleHistos INFO GaudiHistoAlgorithm:: Filling Histograms...... Please be patient ! +MagneticFieldSvc INFO Map scaled by factor 1 with polarity internally used: -1 signed relative current: -1 +ToolSvc.LoKi::VertexFitter INFO Option for Optimised Kalman Filter fit is activated +RFileCnv INFO opening Root file "DV-example-tupling-basic-ntp.root" for writing +RCWNTupleCnv INFO Booked TTree with ID: DecayTree "DecayTree" in directory DV-example-tupling-basic-ntp.root:/DimuonsTuple +ApplicationMgr INFO Application Manager Stopped successfully +UnpackRichPIDs SUCCESS #WARNINGS = 10 Message = 'Incorrect data version 0 for packing version > 3. Correcting data to version 2.' +DimuonsTuple SUCCESS Booked 1 N-Tuples and 0 Event Tag Collections +DimuonsTuple SUCCESS List of booked N-Tuples in directory "FILE1/DimuonsTuple" +DimuonsTuple SUCCESS ID=DecayTree Title="DecayTree" #items=10 {Jpsi_LOKI_P,Jpsi_LOKI_PT,Jpsi_LOKI_Muonp_PT,Jpsi_LOKI_Muonm_PT,Jpsi_LOKI_MAXPT,Jp} +FSROutputStreamDstWriter INFO Set up File Summary Record +FSROutputStreamDstWriter INFO Events output: 1 +LAZY_AND: DaVinci #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + NONLAZY_OR: UserAnalysis #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: StandardAlgs #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + GaudiHistoAlgorithm/SimpleHistos #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: UserAlgorithms #=10 Sum=3 Eff=|( 30.00000 +- 14.4914 )%| + UnpackRecVertex/UnpackRecVertices #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloElectrons #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloPhotons #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloMergedPi0s #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloSplitPhotons #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + DataPacking__Unpack<LHCb__MuonPIDPacker>/UnpackMuonPIDs #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + DataPacking__Unpack<LHCb__RichPIDPacker>/UnpackRichPIDs #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + UnpackTrackFunctional/UnpackBestTracks #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + UnpackTrackFunctional/UnpackMuonTracks #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + UnpackProtoParticle/UnpackNeutralProtos #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + UnpackProtoParticle/UnpackChargedProtos #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + CombineParticles/CombineParticles #=10 Sum=3 Eff=|( 30.00000 +- 14.4914 )%| + FunTuple_Particles/DimuonsTuple #=3 Sum=3 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: WriteFSR #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| + RecordStream/FSROutputStreamDstWriter #=10 Sum=10 Eff=|( 100.0000 +- 0.00000 )%| +ToolSvc INFO Removing all tools created by ToolSvc +RFileCnv INFO dumping contents of /NTUPLES/FILE1 +TFile: name=DV-example-tupling-basic-ntp.root, title=Gaudi Trees, option=CREATE +NTupleSvc INFO NTuples saved successfully +ChronoStatSvc.finalize() INFO Service finalized successfully +ApplicationMgr INFO Application Manager Finalized successfully +ApplicationMgr INFO Application Manager Terminated successfully +CombineParticles INFO Number of counters : 11 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# FunctionalParticleMaker/Particles" | 10 | 1106 | 110.60 | 43.140 | 33.000 | 158.00 | + | "# J/psi(1S) -> mu+ mu+ " | 10 | 0 | 0.0000 | 0.0000 | 0.0000 | 0.0000 | + | "# J/psi(1S) -> mu+ mu- " | 10 | 3 | 0.30000 | 0.45826 | 0.0000 | 1.0000 | + | "# J/psi(1S) -> mu- mu- " | 10 | 0 | 0.0000 | 0.0000 | 0.0000 | 0.0000 | + | "# input particles" | 10 | 1106 | 110.60 | 43.140 | 33.000 | 158.00 | + | "# mu+" | 10 | 5 | 0.50000 | 0.50000 | 0.0000 | 1.0000 | + | "# mu-" | 10 | 6 | 0.60000 | 0.48990 | 0.0000 | 1.0000 | + | "# selected" | 10 | 3 | 0.30000 | + |*"#accept" | 10 | 3 |( 30.00000 +- 14.49138)% | + | "#pass combcut" | 3 | 3 | 1.0000 | + | "#pass mother cut" | 3 | 3 | 1.0000 | +FunctionalParticleMaker INFO Number of counters : 4 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + |*"# passed ProtoParticle filter" | 1106 | 1106 |( 100.0000 +- 0.000000)% | + |*"# passed Track filter" | 1667 | 1106 |( 66.34673 +- 1.157326)% | + | "Nb created anti-particles" | 10 | 570 | 57.000 | 25.072 | 11.000 | 93.000 | + | "Nb created particles" | 10 | 536 | 53.600 | 19.643 | 22.000 | 77.000 | +ToolSvc.HybridFactory INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# loaded from PYTHON" | 11 | +ToolSvc.LoKi::VertexFitter INFO Number of counters : 2 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "#iterations/1" | 3 | 3 | 1.0000 | 0.0000 | 1.0000 | 1.0000 | + | "#iterations/Opt" | 3 | 0 | 0.0000 | 0.0000 | 0.0000 | 0.0000 | +ToolSvc.PPFactoryHybridFactory INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# loaded from PYTHON" | 1 | +ToolSvc.TrackFunctorFactory INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# loaded from PYTHON" | 1 | +UnpackBestTracks INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# Unpacked Tracks" | 10 | 4332 | 433.20 | +UnpackMuonPIDs INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# UnPackedData" | 10 | 1088 | 108.80 | 43.990 | 33.000 | 159.00 | +UnpackMuonTracks INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# Unpacked Tracks" | 10 | 156 | 15.600 | +SimpleHistos SUCCESS 1D histograms in directory "SimpleHistos" : 10 + | ID | Title | # | Mean | RMS | Skewness | Kurtosis | + | 101 | "Exponential" | 10 | 1.4996 | 1.259 | 0.98706 | -0.35506 | + | 102 | "Breit" | 10 | 0.057899 | 1.2104 | -1.7162 | 1.96 | + | 1111 | "Forced Numeric ID time test" | 10 | 0.62385 | 0.83435 | 0.29994 | -1.437 | + | AutoID time test | "AutoID time test" | 10 | 0.62385 | 0.83435 | 0.29994 | -1.437 | + | Gaussian mean=0, sigma=1 | "Gaussian mean=0, sigma=1" | 10 | 0.62385 | 0.83435 | 0.29994 | -1.437 | + | poisson | "Poisson" | 10 | 1.3333 | 0.8165 | 0.72827 | -0.010819 | + | subdir1/bino | "Binominal" | 10 | 2.2222 | 1.3147 | -0.0065855 | -1.1662 | + | subdir2/bino | "Binominal" | 10 | 2.2222 | 1.3147 | -0.0065855 | -1.1662 | + | test1 | "Forced Alpha ID time test" | 10 | 0.62385 | 0.83435 | 0.29994 | -1.437 | + | varBinning/x | "1D Variable Binning" | 10 | 0.74391 | 2.9037 | -0.54505 | -0.53568 | +SimpleHistos SUCCESS 1D profile histograms in directory "SimpleHistos" : 9 + | ID | Title | # | Mean | RMS | Skewness | Kurtosis | + | Expo V Gauss 1DProf | "Expo V Gauss 1DProf" | 10 | 0.62385 | 0.83435 | 0.078851 | -2.1359 | + | Expo V Gauss 1DProf s | "Expo V Gauss 1DProf s" | 10 | 0.62385 | 0.83435 | 0.078851 | -2.1359 | + | Gauss V Flat 1DProf | "Gauss V Flat 1DProf" | 10 | -2.7485 | 4.9549 | 1.8445 | -0.57073 | + | Gauss V Flat 1DProf S | "Gauss V Flat 1DProf S" | 10 | -2.7485 | 4.9549 | 1.8445 | -0.57073 | + | Gauss V Flat 1DProf, with | "Gauss V Flat 1DProf, with limits-I" | 7 | -0.10679 | 3.4013 | 0.032668 | 0.72593 | + | Gauss V Flat 1DProf, with | "Gauss V Flat 1DProf, with limits-I s" | 7 | -0.10679 | 3.4013 | 0.032668 | 0.72593 | + | Gauss V Flat 1DProf, with | "Gauss V Flat 1DProf, with limits-II" | 3 | -8.9125 | 0.75156 | 0 | -3 | + | Gauss V Flat 1DProf, with | "Gauss V Flat 1DProf, with limits-II s" | 3 | -8.9125 | 0.75156 | 0 | -3 | + | varBinning/a | "1D Profile Variable Binning" | 10 | 0.74391 | 2.9037 | -1.4356 | 2.3329 | diff --git a/DaVinciTests/tests/refs/test_davinci_tupling_advanced.ref b/DaVinciTests/tests/refs/test_davinci_tupling_advanced.ref new file mode 100644 index 0000000000000000000000000000000000000000..38b34a0b668de7006d5d8d96c368a020c52296a5 --- /dev/null +++ b/DaVinciTests/tests/refs/test_davinci_tupling_advanced.ref @@ -0,0 +1,294 @@ +/***** User DVAppOptions/DVAppOptions ************************************************************** +|-auditors = [] (default: []) +|-buffer_events = 20000 (default: 20000) +|-callgrind_profile = False (default: False) +|-conddb_tag = 'sim-20171127-vc-md100' (default: '') +|-control_flow_file = '' (default: '') +|-data_flow_file = '' (default: '') +|-data_type = 'Upgrade' (default: '') +|-dddb_tag = 'dddb-20171126' (default: '') +|-detectors = ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr'] +| (default: ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr']) +|-dqflags_tag = '' (default: '') +|-enable_unpack = None +|-event_store = 'HiveWhiteBoard' (default: 'HiveWhiteBoard') +|-evt_max = 100 (default: -1) +|-first_evt = 0 (default: 0) +|-histo_file = 'ExampleHistos.root' (default: '') +|-ignore_dq_flags = False (default: False) +|-input_files = ['root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000002_1.ldst', 'root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000004_1.ldst', 'root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000043_1.ldst', 'root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000068_1.ldst'] +| (default: []) +|-input_raw_format = 0.3 (default: 0.3) +|-input_type = 'ROOT' (default: 'DST') +|-lines_maker = None +|-lumi = False (default: False) +|-main_options = '' (default: '') +|-memory_pool_size = 10485760 (default: 10485760) +|-merge_genfsr = False (default: False) +|-msg_svc_format = '% F%35W%S %7W%R%T %0W%M' (default: '% F%35W%S %7W%R%T %0W%M') +|-msg_svc_time_format = '%Y-%m-%d %H:%M:%S UTC' (default: '%Y-%m-%d %H:%M:%S UTC') +|-n_event_slots = 1 (default: -1) +|-n_threads = 1 (default: 1) +|-ntuple_file = 'ExampleTuple.root' (default: '') +|-output_file = '' (default: '') +|-output_level = 3 (default: 3) +|-output_type = '' (default: '') +|-overwrite_data_options = False (default: False) +|-print_freq = 1000 (default: 1000) +|-python_logging_level = 30 (default: 30) +|-root_compression_level = 'LZMA:6' (default: 'LZMA:6') +|-scheduler_legacy_mode = True (default: True) +|-simulation = True (default: False) +|-skip_events = 2 (default: 0) +|-tck = 0 (default: 0) +|-use_iosvc = False (default: False) +|-user_algorithms = '$DAVINCIEXAMPLESROOT/python/DaVinciExamples/tupling/example-tupling-advanced-run-mc:main' +| (default: '') +|-write_fsr = True (default: True) +\----- (End of User DVAppOptions/DVAppOptions) ----------------------------------------------------- +INFO No MainOptions specified. DaVinci() will import no options file! +INFO User algorithm example-tupling-advanced-run-mc.main imported successfully! +ApplicationMgr SUCCESS +==================================================================================================================================== +==================================================================================================================================== +ApplicationMgr INFO Application Manager Configured successfully +DetectorPersistencySvc INFO Added successfully Conversion service:XmlCnvSvc +DetectorDataSvc SUCCESS Detector description database: git:/lhcb.xml +NTupleSvc INFO Added stream file:DV-example-tupling-advanced-ntp.root as FILE1 +RootHistSvc INFO Writing ROOT histograms to: DV-example-tupling-advanced-his.root +HistogramPersistencySvc INFO Added successfully Conversion service:RootHistSvc +UnpackChargedProtos.ChargedProto... INFO Using retuned RICH el and mu DLL values in combined DLLs +FunctionalParticleMaker.LoKi::Hy... INFO CUT: ' ( (TrTYPE==3) &TrALL) ' +DimuonsTuple INFO Initialising the FunTuple algorithm +DimuonsTuple INFO Conducting checks with LoKi +DimuonsTuple INFO Conducting checks with ThOr +DimuonsTuple INFO Setting the properties of ParticleTupleProp objects! +DimuonsTuple INFO Instatiating LoKi functors! +DimuonsTuple INFO Instatiating ThOr functors! +FunctorFactory INFO Cache miss for functor: ::Functors::Track::Momentum{}, now trying cling with headers [Event/Particle.h, Functors/TrackLike.h] +FunctorFactory INFO Cache miss for functor: ::Functors::Track::TransverseMomentum{}, now trying cling with headers [Event/Particle.h, Functors/TrackLike.h] +FunctorFactory INFO Reusing cling compiled factory for functor: ::Functors::Track::Momentum{} +DimuonsTuple INFO Finished initialisation: +FunctionalParticleMaker#1.LoKi::... INFO CUT: ' ( (TrTYPE==5) &TrALL) ' +KsTuple INFO Initialising the FunTuple algorithm +KsTuple INFO Conducting checks with LoKi +KsTuple INFO Conducting checks with ThOr +KsTuple INFO Setting the properties of ParticleTupleProp objects! +KsTuple INFO Instatiating LoKi functors! +KsTuple INFO Instatiating ThOr functors! +FunctorFactory INFO Reusing cling compiled factory for functor: ::Functors::Track::Momentum{} +FunctorFactory INFO Reusing cling compiled factory for functor: ::Functors::Track::TransverseMomentum{} +KsTuple INFO Finished initialisation: +FSROutputStreamDstWriter INFO Data source: EventDataSvc output: SVC='Gaudi::RootCnvSvc' +EventClockSvc.FakeEventTime INFO Event times generated from 0 with steps of 0 +HiveDataBrokerSvc WARNING non-reentrant algorithm: GaudiHistoAlgorithm/SimpleHistos +HiveDataBrokerSvc WARNING non-reentrant algorithm: UnpackRecVertex/UnpackRecVertices +HiveDataBrokerSvc WARNING non-reentrant algorithm: DataPacking::Unpack<LHCb::MuonPIDPacker>/UnpackMuonPIDs +HiveDataBrokerSvc WARNING non-reentrant algorithm: DataPacking::Unpack<LHCb::RichPIDPacker>/UnpackRichPIDs +HiveDataBrokerSvc WARNING non-reentrant algorithm: UnpackProtoParticle/UnpackNeutralProtos +HiveDataBrokerSvc WARNING non-reentrant algorithm: UnpackProtoParticle/UnpackChargedProtos +HiveDataBrokerSvc WARNING non-reentrant algorithm: CombineParticles +HiveDataBrokerSvc WARNING non-reentrant algorithm: FilterDesktop +HiveDataBrokerSvc WARNING non-reentrant algorithm: CombineParticles/CombineParticles#1 +HiveDataBrokerSvc WARNING non-reentrant algorithm: RecordStream/FSROutputStreamDstWriter +ApplicationMgr INFO Application Manager Initialized successfully +DeFTDetector INFO Current FT geometry version = 63 +ApplicationMgr INFO Application Manager Started successfully +EventPersistencySvc INFO Added successfully Conversion service:RootCnvSvc +EventSelector INFO Stream:EventSelector.DataStreamTool_1 Def:DATAFILE='root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000002_1.ldst' SVC='Gaudi::RootEvtSelector' OPT='READ' IgnoreChecksum='YES' +EventSelector SUCCESS Reading Event record 1. Record number within stream 1: 1 +RndmGenSvc.Engine INFO Generator engine type:CLHEP::RanluxEngine +RndmGenSvc.Engine INFO Current Seed:1234567 Luxury:3 +RndmGenSvc INFO Using Random engine:HepRndm::Engine<CLHEP::RanluxEngine> +SimpleHistos INFO GaudiHistoAlgorithm:: Filling Histograms...... Please be patient ! +MagneticFieldSvc INFO Map scaled by factor 1 with polarity internally used: -1 signed relative current: -1 +ToolSvc.LoKi::VertexFitter INFO Option for Optimised Kalman Filter fit is activated +TransportSvc INFO Initialize the static pointer to DetDesc::IGeometryErrorSvc +TransportSvc INFO Recovery of geometry errors is ENABLED +RFileCnv INFO opening Root file "DV-example-tupling-advanced-ntp.root" for writing +RCWNTupleCnv INFO Booked TTree with ID: DecayTree "DecayTree" in directory DV-example-tupling-advanced-ntp.root:/DimuonsTuple +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +RCWNTupleCnv INFO Booked TTree with ID: DecayTree "DecayTree" in directory DV-example-tupling-advanced-ntp.root:/KsTuple +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +KsTuple INFO Multiple particles match the decay descriptor. All candidates info will be stored with a suffix. +DimuonsTuple INFO No candidates found in this event. Skipping this event +DimuonsTuple INFO No candidates found in this event. Skipping this event +ApplicationMgr INFO Application Manager Stopped successfully +UnpackRichPIDs SUCCESS #WARNINGS = 100 Message = 'Incorrect data version 0 for packing version > 3. Correcting data to version 2.' +DimuonsTuple SUCCESS Booked 1 N-Tuples and 0 Event Tag Collections +DimuonsTuple SUCCESS List of booked N-Tuples in directory "FILE1/DimuonsTuple" +DimuonsTuple SUCCESS ID=DecayTree Title="DecayTree" #items=10 {Jpsi_LOKI_P,Jpsi_LOKI_PT,Jpsi_LOKI_Muonp_PT,Jpsi_LOKI_Muonm_PT,Jpsi_LOKI_MAXPT,Jp} +KsTuple SUCCESS Booked 1 N-Tuples and 0 Event Tag Collections +KsTuple SUCCESS List of booked N-Tuples in directory "FILE1/KsTuple" +KsTuple SUCCESS ID=DecayTree Title="DecayTree" #items=8 {Ks_LOKI_P,Ks_LOKI_PT,Ks_LOKI_Muonp_PT,Ks_LOKI_Muonm_PT,Ks_LOKI_MAXPT,Ks_LOKI_N_HI} +FSROutputStreamDstWriter INFO Set up File Summary Record +FSROutputStreamDstWriter INFO Events output: 1 +LAZY_AND: DaVinci #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + NONLAZY_OR: UserAnalysis #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: StandardAlgs #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + GaudiHistoAlgorithm/SimpleHistos #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: Reco #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackRecVertex/UnpackRecVertices #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloElectrons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloPhotons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloMergedPi0s #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloSplitPhotons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + DataPacking__Unpack<LHCb__MuonPIDPacker>/UnpackMuonPIDs #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + DataPacking__Unpack<LHCb__RichPIDPacker>/UnpackRichPIDs #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackTrackFunctional/UnpackBestTracks #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackTrackFunctional/UnpackMuonTracks #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackProtoParticle/UnpackNeutralProtos #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackProtoParticle/UnpackChargedProtos #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: DiMuons #=100 Sum=36 Eff=|( 36.00000 +- 4.80000 )%| + UnpackRecVertex/UnpackRecVertices #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloElectrons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloPhotons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloMergedPi0s #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloSplitPhotons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + DataPacking__Unpack<LHCb__MuonPIDPacker>/UnpackMuonPIDs #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + DataPacking__Unpack<LHCb__RichPIDPacker>/UnpackRichPIDs #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackTrackFunctional/UnpackBestTracks #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackTrackFunctional/UnpackMuonTracks #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackProtoParticle/UnpackNeutralProtos #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackProtoParticle/UnpackChargedProtos #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + CombineParticles/CombineParticles #=100 Sum=36 Eff=|( 36.00000 +- 4.80000 )%| + FunTuple_Particles/DimuonsTuple #=36 Sum=36 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: KShorts #=100 Sum=45 Eff=|( 45.00000 +- 4.97494 )%| + UnpackRecVertex/UnpackRecVertices #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloElectrons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloPhotons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloMergedPi0s #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloSplitPhotons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + DataPacking__Unpack<LHCb__MuonPIDPacker>/UnpackMuonPIDs #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + DataPacking__Unpack<LHCb__RichPIDPacker>/UnpackRichPIDs #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackTrackFunctional/UnpackBestTracks #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackTrackFunctional/UnpackMuonTracks #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackProtoParticle/UnpackNeutralProtos #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackProtoParticle/UnpackChargedProtos #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + CombineParticles/CombineParticles#1 #=100 Sum=45 Eff=|( 45.00000 +- 4.97494 )%| + FunTuple_Particles/KsTuple #=45 Sum=45 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: WriteFSR #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + RecordStream/FSROutputStreamDstWriter #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| +TransportSvc SUCCESS GEOMETRY ERRORS: 'Skip' map has the size 2 + | Logical Volume | | # mean RMS min max | +TransportSvc SUCCESS GEOMETRY ERRORS: 'Recover' map has the size 11 + | Logical Volume | | # mean RMS min max | +TransportSvc SUCCESS GEOMETRY ERRORS: 'Codes' map has the size 0 +TransportSvc INFO Reset the static pointer to DetDesc::IGeometyrErrorSvc +ToolSvc INFO Removing all tools created by ToolSvc +ToolSvc.LoKi::VertexFitter SUCCESS #WARNINGS = 17 Message = 'fit(): Vertex is outside of fiducial volume [CombineParticles#1]' +ToolSvc.LoKi::VertexFitter SUCCESS #WARNINGS = 23 Message = 'No convergency has been reached [CombineParticles#1]' +ToolSvc.LoKi::VertexFitter SUCCESS #WARNINGS = 23 Message = 'fit(): failure from _iterate() [CombineParticles#1]' +ToolSvc.LoKi::VertexFitter SUCCESS #WARNINGS = 23 Message = 'fit(): failure from fit [CombineParticles#1]' +RFileCnv INFO dumping contents of /NTUPLES/FILE1 +TFile: name=DV-example-tupling-advanced-ntp.root, title=Gaudi Trees, option=CREATE +NTupleSvc INFO NTuples saved successfully +ChronoStatSvc.finalize() INFO Service finalized successfully +ApplicationMgr INFO Application Manager Finalized successfully +ApplicationMgr INFO Application Manager Terminated successfully +CombineParticles INFO Number of counters : 11 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# FunctionalParticleMaker/Particles" | 100 | 9988 | 99.880 | 47.095 | 26.000 | 252.00 | + | "# J/psi(1S) -> mu+ mu+ " | 100 | 1 | 0.010000 | 0.099499 | 0.0000 | 1.0000 | + | "# J/psi(1S) -> mu+ mu- " | 100 | 35 | 0.35000 | 0.47697 | 0.0000 | 1.0000 | + | "# J/psi(1S) -> mu- mu- " | 100 | 1 | 0.010000 | 0.099499 | 0.0000 | 1.0000 | + | "# input particles" | 100 | 9988 | 99.880 | 47.095 | 26.000 | 252.00 | + | "# mu+" | 100 | 70 | 0.70000 | 0.60828 | 0.0000 | 2.0000 | + | "# mu-" | 100 | 68 | 0.68000 | 0.64622 | 0.0000 | 4.0000 | + | "# selected" | 100 | 37 | 0.37000 | + |*"#accept" | 100 | 36 |( 36.00000 +- 4.800000)% | + | "#pass combcut" | 67 | 43 | 0.64179 | + | "#pass mother cut" | 43 | 37 | 0.86047 | +CombineParticles#1 INFO Number of counters : 10 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + |*"# FilterDesktop/Particles" | 100 | 100 |( 100.0000 +- 0.000000)% | + | "# KS0 -> pi+ pi- " | 100 | 73 | 0.73000 | 1.1212 | 0.0000 | 7.0000 | + | "# input particles" | 100 | 1129 | 11.290 | 7.4730 | 1.0000 | 43.000 | + | "# pi+" | 100 | 594 | 5.9400 | 4.1202 | 0.0000 | 22.000 | + | "# pi-" | 100 | 535 | 5.3500 | 3.8481 | 0.0000 | 21.000 | + | "# selected" | 100 | 73 | 0.73000 | + |*"#accept" | 100 | 45 |( 45.00000 +- 4.974937)% | + | "#pass combcut" | 4381 | 799 | 0.18238 | + | "#pass mother cut" | 799 | 73 | 0.091364 | + | "Error from IParticleCombiner, skip the combination"| 23 | 23 | 1.0000 | 0.0000 | 1.0000 | 1.0000 | +FilterDesktop INFO Number of counters : 5 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# FunctionalParticleMaker#1/Particles" | 100 | 2205 | 22.050 | 12.530 | 3.0000 | 70.000 | + | "# input particles" | 100 | 2205 | 22.050 | 12.530 | 3.0000 | 70.000 | + |*"#accept" | 100 | 100 |( 100.0000 +- 0.000000)% | + | "#passed" | 100 | 1129 | 11.290 | 7.4730 | 1.0000 | 43.000 | + |*"efficiency" | 2205 | 1129 |( 51.20181 +- 1.064487)% | +FunctionalParticleMaker INFO Number of counters : 4 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + |*"# passed ProtoParticle filter" | 9988 | 9988 |( 100.0000 +- 0.000000)% | + |*"# passed Track filter" | 15303 | 9988 |( 65.26825 +- 0.3848806)% | + | "Nb created anti-particles" | 100 | 4948 | 49.480 | 23.364 | 11.000 | 119.00 | + | "Nb created particles" | 100 | 5040 | 50.400 | 24.526 | 14.000 | 133.00 | +FunctionalParticleMaker#1 INFO Number of counters : 4 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + |*"# passed ProtoParticle filter" | 2205 | 2205 |( 100.0000 +- 0.000000)% | + |*"# passed Track filter" | 15303 | 2205 |( 14.40894 +- 0.2838847)% | + | "Nb created anti-particles" | 100 | 1073 | 10.730 | 6.3951 | 0.0000 | 38.000 | + | "Nb created particles" | 100 | 1132 | 11.320 | 6.8204 | 0.0000 | 35.000 | +ToolSvc.HybridFactory INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# loaded from PYTHON" | 22 | +ToolSvc.LoKi::VertexFitter INFO Number of counters : 5 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "#iterations/1" | 54 | 83 | 1.5370 | 0.83251 | 1.0000 | 3.0000 | + | "#iterations/2" | 782 | 2417 | 3.0908 | 1.2483 | 2.0000 | 10.000 | + | "#iterations/Gen" | 776 | 2405 | 3.0992 | + | "#iterations/Opt" | 43 | 6 | 0.13953 | 0.34650 | 0.0000 | 1.0000 | + | "Seed:case(1)" | 799 | +ToolSvc.PPFactoryHybridFactory INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# loaded from PYTHON" | 2 | +ToolSvc.TrackFunctorFactory INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# loaded from PYTHON" | 2 | +UnpackBestTracks INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# Unpacked Tracks" | 100 | 41403 | 414.03 | +UnpackMuonPIDs INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# UnPackedData" | 100 | 9896 | 98.960 | 47.355 | 25.000 | 264.00 | +UnpackMuonTracks INFO Number of counters : 1 + | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "# Unpacked Tracks" | 100 | 1684 | 16.840 | +SimpleHistos SUCCESS 1D histograms in directory "SimpleHistos" : 10 + | ID | Title | # | Mean | RMS | Skewness | Kurtosis | + | 101 | "Exponential" | 100 | 1.141 | 1.0488 | 1.1417 | 0.71718 | + | 102 | "Breit" | 100 | -0.15117 | 1.1275 | -0.32351 | 2.0377 | + | 1111 | "Forced Numeric ID time test" | 100 | -0.047428 | 0.89797 | -0.25222 | 0.14476 | + | AutoID time test | "AutoID time test" | 100 | -0.047428 | 0.89797 | -0.25222 | 0.14476 | + | Gaussian mean=0, sigma=1 | "Gaussian mean=0, sigma=1" | 100 | -0.047428 | 0.89797 | -0.25222 | 0.14476 | + | poisson | "Poisson" | 100 | 1.8947 | 1.1376 | 0.42462 | -0.74327 | + | subdir1/bino | "Binominal" | 100 | 1.8788 | 1.0567 | 0.38496 | -0.44547 | + | subdir2/bino | "Binominal" | 100 | 1.8788 | 1.0567 | 0.38496 | -0.44547 | + | test1 | "Forced Alpha ID time test" | 100 | -0.047428 | 0.89797 | -0.25222 | 0.14476 | + | varBinning/x | "1D Variable Binning" | 100 | -0.22236 | 2.6506 | 0.026186 | -0.97273 | +SimpleHistos SUCCESS 1D profile histograms in directory "SimpleHistos" : 9 + | ID | Title | # | Mean | RMS | Skewness | Kurtosis | + | Expo V Gauss 1DProf | "Expo V Gauss 1DProf" | 100 | -0.047428 | 0.89797 | -2.3629 | 9.4136 | + | Expo V Gauss 1DProf s | "Expo V Gauss 1DProf s" | 100 | -0.047428 | 0.89797 | -2.3629 | 9.4136 | + | Gauss V Flat 1DProf | "Gauss V Flat 1DProf" | 100 | 0.18402 | 5.688 | 0 | -3 | + | Gauss V Flat 1DProf S | "Gauss V Flat 1DProf S" | 100 | 0.18402 | 5.688 | 0 | -3 | + | Gauss V Flat 1DProf, with | "Gauss V Flat 1DProf, with limits-I" | 45 | 0.24192 | 5.5416 | -0.26447 | -1.0149 | + | Gauss V Flat 1DProf, with | "Gauss V Flat 1DProf, with limits-I s" | 45 | 0.24192 | 5.5416 | -0.26447 | -1.0149 | + | Gauss V Flat 1DProf, with | "Gauss V Flat 1DProf, with limits-II" | 55 | 0.13664 | 5.8046 | 0 | -3 | + | Gauss V Flat 1DProf, with | "Gauss V Flat 1DProf, with limits-II s" | 55 | 0.13664 | 5.8046 | 0 | -3 | + | varBinning/a | "1D Profile Variable Binning" | 100 | -0.22236 | 2.6506 | 7.4264 | 29.421 | diff --git a/DaVinciTests/tests/refs/test_davinci_user_algs.ref b/DaVinciTests/tests/refs/test_davinci_user_algs.ref index 0d9061710afda896a70787aa61bd85851d8c0958..0604020b311fedcb13bae751fc66dbb7b85a2867 100644 --- a/DaVinciTests/tests/refs/test_davinci_user_algs.ref +++ b/DaVinciTests/tests/refs/test_davinci_user_algs.ref @@ -2,11 +2,11 @@ |-auditors = [] (default: []) |-buffer_events = 20000 (default: 20000) |-callgrind_profile = False (default: False) -|-conddb_tag = 'HEAD' (default: '') +|-conddb_tag = 'sim-20171127-vc-md100' (default: '') |-control_flow_file = '' (default: '') |-data_flow_file = '' (default: '') |-data_type = 'Upgrade' (default: '') -|-dddb_tag = 'dddb-20200424-2' (default: '') +|-dddb_tag = 'dddb-20171126' (default: '') |-detectors = ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr'] | (default: ['VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', 'Magnet', 'Tr']) |-dqflags_tag = '' (default: '') @@ -45,25 +45,29 @@ |-user_algorithms = '$DAVINCITESTSROOT/tests/options/option_davinci_user_algs:main' (default: '') |-write_fsr = True (default: True) \----- (End of User DVAppOptions/DVAppOptions) ----------------------------------------------------- +INFO No MainOptions specified. DaVinci() will import no options file! +INFO User algorithm option_davinci_user_algs.main imported successfully! ApplicationMgr SUCCESS ==================================================================================================================================== ==================================================================================================================================== ApplicationMgr INFO Application Manager Configured successfully +DetectorPersistencySvc INFO Added successfully Conversion service:XmlCnvSvc +DetectorDataSvc SUCCESS Detector description database: git:/lhcb.xml NTupleSvc INFO Added stream file:ExampleTuple.root as FILE1 RootHistSvc INFO Writing ROOT histograms to: ExampleHistos.root HistogramPersistencySvc INFO Added successfully Conversion service:RootHistSvc -FSROutputStreamDstWriter INFO Data source: EventDataSvc output: SVC='Gaudi::RootCnvSvc' UnpackChargedProtos.ChargedProto... INFO Using retuned RICH el and mu DLL values in combined DLLs -DetectorDataSvc INFO Detector description not requested to be loaded +FSROutputStreamDstWriter INFO Data source: EventDataSvc output: SVC='Gaudi::RootCnvSvc' EventClockSvc.FakeEventTime INFO Event times generated from 0 with steps of 0 -HiveDataBrokerSvc WARNING non-reentrant algorithm: RecordStream/FSROutputStreamDstWriter HiveDataBrokerSvc WARNING non-reentrant algorithm: GaudiHistoAlgorithm/SimpleHistos HiveDataBrokerSvc WARNING non-reentrant algorithm: UnpackRecVertex/UnpackRecVertices HiveDataBrokerSvc WARNING non-reentrant algorithm: DataPacking::Unpack<LHCb::MuonPIDPacker>/UnpackMuonPIDs HiveDataBrokerSvc WARNING non-reentrant algorithm: DataPacking::Unpack<LHCb::RichPIDPacker>/UnpackRichPIDs HiveDataBrokerSvc WARNING non-reentrant algorithm: UnpackProtoParticle/UnpackNeutralProtos HiveDataBrokerSvc WARNING non-reentrant algorithm: UnpackProtoParticle/UnpackChargedProtos +HiveDataBrokerSvc WARNING non-reentrant algorithm: RecordStream/FSROutputStreamDstWriter ApplicationMgr INFO Application Manager Initialized successfully +DeFTDetector INFO Current FT geometry version = 63 ApplicationMgr INFO Application Manager Started successfully EventPersistencySvc INFO Added successfully Conversion service:RootCnvSvc EventSelector INFO Stream:EventSelector.DataStreamTool_1 Def:DATAFILE='root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000002_1.ldst' SVC='Gaudi::RootEvtSelector' OPT='READ' IgnoreChecksum='YES' @@ -73,28 +77,28 @@ RndmGenSvc.Engine INFO Current Seed:1234567 Luxury:3 RndmGenSvc INFO Using Random engine:HepRndm::Engine<CLHEP::RanluxEngine> SimpleHistos INFO GaudiHistoAlgorithm:: Filling Histograms...... Please be patient ! ApplicationMgr INFO Application Manager Stopped successfully +UnpackRichPIDs SUCCESS #WARNINGS = 100 Message = 'Incorrect data version 0 for packing version > 3. Correcting data to version 2.' FSROutputStreamDstWriter INFO Set up File Summary Record FSROutputStreamDstWriter INFO Events output: 1 -UnpackRichPIDs SUCCESS #WARNINGS = 100 Message = 'Incorrect data version 0 for packing version > 3. Correcting data to version 2.' -NONLAZY_OR: DaVinci #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - NONLAZY_OR: WriteFSR #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - RecordStream/FSROutputStreamDstWriter #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - NONLAZY_AND: DVStdAlgs #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - GaudiHistoAlgorithm/SimpleHistos #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - NONLAZY_AND: PrintJpsiNode #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - UnpackRecVertex/UnpackRecVertices #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - UnpackCaloHypo/UnpackCaloElectrons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - UnpackCaloHypo/UnpackCaloPhotons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - UnpackCaloHypo/UnpackCaloMergedPi0s #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - UnpackCaloHypo/UnpackCaloSplitPhotons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - DataPacking__Unpack<LHCb__MuonPIDPacker>/UnpackMuonPIDs #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - DataPacking__Unpack<LHCb__RichPIDPacker>/UnpackRichPIDs #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - UnpackTrackFunctional/UnpackBestTracks #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - UnpackTrackFunctional/UnpackMuonTracks #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - UnpackProtoParticle/UnpackNeutralProtos #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| - UnpackProtoParticle/UnpackChargedProtos #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| +LAZY_AND: DaVinci #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + NONLAZY_OR: UserAnalysis #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: StandardAlgs #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + GaudiHistoAlgorithm/SimpleHistos #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: UserAlgorithms #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackRecVertex/UnpackRecVertices #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloElectrons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloPhotons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloMergedPi0s #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackCaloHypo/UnpackCaloSplitPhotons #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + DataPacking__Unpack<LHCb__MuonPIDPacker>/UnpackMuonPIDs #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + DataPacking__Unpack<LHCb__RichPIDPacker>/UnpackRichPIDs #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackTrackFunctional/UnpackBestTracks #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackTrackFunctional/UnpackMuonTracks #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackProtoParticle/UnpackNeutralProtos #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + UnpackProtoParticle/UnpackChargedProtos #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + LAZY_AND: WriteFSR #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| + RecordStream/FSROutputStreamDstWriter #=100 Sum=100 Eff=|( 100.0000 +- 0.00000 )%| ToolSvc INFO Removing all tools created by ToolSvc -*****Chrono***** INFO The Final CPU consumption ( Chrono ) Table (ordered) ChronoStatSvc.finalize() INFO Service finalized successfully ApplicationMgr INFO Application Manager Finalized successfully ApplicationMgr INFO Application Manager Terminated successfully diff --git a/Phys/DaVinci/options/DaVinciDB-Example.yaml b/Phys/DaVinci/options/DaVinciDB-Example.yaml index 0c64652f196a2d79f28d717093c461a99e6f2e03..bb7b2a813b80ac200a385411c77dab0aaeb36be3 100644 --- a/Phys/DaVinci/options/DaVinciDB-Example.yaml +++ b/Phys/DaVinci/options/DaVinciDB-Example.yaml @@ -36,8 +36,8 @@ Upgrade_Bd2KstarMuMu_ldst: data_type: Upgrade input_type: LDST simulation: true - conddb_tag: HEAD - dddb_tag: dddb-20200424-2 + conddb_tag: sim-20171127-vc-md100 + dddb_tag: dddb-20171126 metadata: Author: 'Patrick Koppenburg' Date: '2020-05-28 11:12:55' diff --git a/Phys/DaVinci/python/DaVinci/ConfigurationUpgrade.py b/Phys/DaVinci/python/DaVinci/ConfigurationUpgrade.py index 817f93626044d41497d21db666b78cd9258ca3cf..9267eb1634847d60a443f95aa27c858ad8b3ef22 100644 --- a/Phys/DaVinci/python/DaVinci/ConfigurationUpgrade.py +++ b/Phys/DaVinci/python/DaVinci/ConfigurationUpgrade.py @@ -12,62 +12,90 @@ High level configuration tools for DaVinci. """ from PyConf.application import ComponentConfig, configure, configure_input -from PyConf.control_flow import CompositeNode, NodeLogic -from DaVinci.configOptions import (check_options, set_file_options, - set_option_value, set_job_options, - set_args_options) -from DaVinci.algorithms import (define_log, setup_algorithms, - define_fsr_writer, setup_user_algorithms, - add_missing_configurables) +from DaVinci.configOptions import (check_options, set_job_options, + get_option_value, set_args_options, + set_input_file_options) +from DaVinci.algorithms import setup_algorithms, define_fsr_writer, setup_user_algorithms +from .config import davinci_control_flow, prepare_davinci_nodes def add_davinci_configurables(options): """ - Add dedicated Davinci configurables to the main job configuration. + Run the job adding the specific Davinci configurables to the standard PyConf ones. + Algorithms developed by users are also included. + + Args: + - options: list of DV options. + + Returns: + - ComponentConfig: Dict of configured Gaudi and DaVinci Configurable instances and user algorithms. """ - dvNodes = [] + dvAlgs = {} + userAlgs, publicTools = setup_algorithms(options) + dvAlgs.update(userAlgs) + if options.simulation and options.write_fsr: - fsrNode = define_fsr_writer(options) - dvNodes.append(fsrNode) - algsNodes = setup_algorithms(options) - dvNodes.extend(algsNodes) + fsrAlgs = define_fsr_writer(options) + dvAlgs["WriteFSR"] = fsrAlgs - topNode = CompositeNode( - "DaVinci", - combine_logic=NodeLogic.NONLAZY_OR, - children=dvNodes, - force_order=True) + dvNodes = prepare_davinci_nodes(dvAlgs) + + top_dv_node = davinci_control_flow(options, dvNodes) config = ComponentConfig() - config.update(configure(options, topNode)) - config.update(add_missing_configurables(options, topNode)) + config.update(configure(options, top_dv_node, public_tools=publicTools)) return config -def run_davinci(fileDB_file, fileDB_key, jobOptFile, flagMC, ctx_args, - simplejob): +def add_simple_conf(options): + """ + Run the job using only the standard PyConf configuration with any user algorithm. + + Args: + - options: list of DV options. + + Returns: + - ComponentConfig: Dict of configured Gaudi Configurable instances and user algorithms. + """ + userAlgName = get_option_value(options, 'user_algorithms') + userAlgs, publicTools = setup_user_algorithms(userAlgName) + + dvNodes = prepare_davinci_nodes(userAlgs) + top_dv_node = davinci_control_flow(options, dvNodes) + + config = configure(options, top_dv_node, public_tools=publicTools) + + return config + + +def run_davinci(fileDB_key, fileDB_file, jobOptFile, ctx_args, simplejob): """ Run DaVinci application. + + Args: + - fileDB_key: key in the testfileDB. + - fileDB_file: file with the testfileDB. + - jobOptFile: file containing the options to be used for running the job. + - ctx_args: click context containing all the extra options. + - simplejob: flag for running job using only the PyConf configurations. + + Returns: + - ComponentConfig: Dict of configured Configurable instances. """ from DaVinci import options - define_log(options) - set_file_options(options, fileDB_key, fileDB_file) + set_input_file_options(options, fileDB_key, fileDB_file) set_job_options(options, jobOptFile, fileDB_key, fileDB_file) if ctx_args: set_args_options(options, ctx_args, fileDB_key, fileDB_file) - set_option_value(options, "input_type", "ROOT") check_options(options) config = configure_input(options) - if simplejob: - node = setup_user_algorithms(options) - config.update(configure(options, node)) - config.update(add_missing_configurables(options, node)) + config.update(add_simple_conf(options)) else: config.update(add_davinci_configurables(options)) diff --git a/Phys/DaVinci/python/DaVinci/algorithms.py b/Phys/DaVinci/python/DaVinci/algorithms.py index fdaaa12cf584e8c981f7c994256bd416a8addbb2..c46e8a0d9ece59712dce5756f1f8e24141961858 100644 --- a/Phys/DaVinci/python/DaVinci/algorithms.py +++ b/Phys/DaVinci/python/DaVinci/algorithms.py @@ -15,22 +15,31 @@ from PyConf.application import ComponentConfig, all_nodes_and_algs from PyConf.dataflow import dataflow_config from PyConf.control_flow import CompositeNode, NodeLogic from DaVinci.configOptions import get_option_value, set_option_value -from DaVinci.optionChecker import DVOptionError -from DaVinci.configurations import * +from DaVinci.optionChecker import DVOptionError, log_click def setup_algorithms(options): """ - Set DaVinci algorithms + Set DaVinci algorithms. + + Args: + - options: list of DaVinci options. + + Returns: + - Dict of the algorithm instances to be run in the job. + - List of public tool instances to configure. """ from PyConf.Algorithms import GaudiHistoAlgorithm - log = get_option_value(options, "log_file") opts = get_option_value(options, "main_options") + publicTools = [] + dvAlgs = {} + if not opts == "": importOptions(opts) else: - log.info( + log_click( + "INFO", "No MainOptions specified. DaVinci() will import no options file!") stdAlgs = [ @@ -39,34 +48,33 @@ def setup_algorithms(options): HistoPrint=True, OutputLevel=get_option_value(options, "output_level")) ] + dvAlgs["StandardAlgs"] = stdAlgs - stdAlgsNode = CompositeNode( - "DVStdAlgs", children=stdAlgs, combine_logic=NodeLogic.NONLAZY_AND) - - userAlgs = get_option_value(options, 'user_algorithms') - if not userAlgs: - log.warning( + userAlgName = get_option_value(options, 'user_algorithms') + if not userAlgName: + log_click( + "WARNING", "DV option file or main function not defined. No user algorithms will be used." ) - return [stdAlgsNode] else: - userAlgsNode = setup_user_algorithms(options) - return [stdAlgsNode, userAlgsNode] - - -def define_log(options): - """ - Define the logger. - """ - from AnalysisPython.Logger import getLogger + userAlgs, publicTools = setup_user_algorithms(userAlgName) + if type(userAlgs) == list: + dvAlgs["UserAlgorithms"] = userAlgs + else: + dvAlgs.update(userAlgs) - log = getLogger("DaVinci") - set_option_value(options, "log_file", log) + return dvAlgs, publicTools def define_fsr_writer(options): """ - Define Generator FSR writer + Define Generator FSR writer. + + Args: + - options: list of DaVinci options. + + Returns: + - list of FSR algorithm instances to be configured. """ from PyConf.Algorithms import GenFSRMerge, RecordStream @@ -84,34 +92,37 @@ def define_fsr_writer(options): OutputLevel=outputLevel, Output="SVC='Gaudi::RootCnvSvc'") algs.append(recStream) - fsrNode = CompositeNode( - "WriteFSR", - combine_logic=NodeLogic.NONLAZY_OR, - children=algs, - force_order=True) - return fsrNode + return algs -def setup_user_algorithms(options): - """ - Set user algorithms and return a PyConf node +def setup_user_algorithms(userAlgPath): """ - userAlgs = get_option_value(options, 'user_algorithms') - log = get_option_value(options, "log_file") + Set user algorithms and return a PyConf node. + + Args: + - userAlgPath: path to the file containing the algorithms and tools defined by the user. - if userAlgs == "": - log.warning( + Returns: + - Dict of user algorithm instances to be run in the job. + - List of user public tool instances to configure. + """ + if userAlgPath == "": + log_click( + "WARNING", "DV option file or main function not defined. No user algorithms will be used." ) + # Add a dummy algorithm in order to avoid errors from empty nodes from PyConf.Algorithms import Gaudi__Examples__VoidConsumer as VoidConsumer - alg = VoidConsumer() - userAlgsNode = CompositeNode("DummyNode", children=[alg]) + userAlgs = {"UserAlgorithms": [VoidConsumer()]} + publicTools = [] else: - modulePath = userAlgs.rsplit('/', 1)[0] - algName = userAlgs.rsplit('/', 1)[1] + # Identify the module path and the algorithm name from the user defined string + modulePath = userAlgPath.rsplit('/', 1)[0] + algName = userAlgPath.rsplit('/', 1)[1] + # Append module path to sys modulePath = os.path.expandvars(modulePath) sys.path.append(modulePath) @@ -126,6 +137,7 @@ def setup_user_algorithms(options): moduleName = algName funcName = "" + # Import the module containing the user algorithms try: module = importlib.import_module(moduleName) except: @@ -134,97 +146,18 @@ def setup_user_algorithms(options): "Importing user algorithms failed. Check if the user python module %s defined in %s exists!" % (moduleName, modulePath)) else: - print(module) try: - userAlgsNode = eval("module%s()" % funcName) + userAlgs, publicTools = eval("module%s()" % funcName) except: raise DVOptionError( "user_algorithms", "Run time error when calling the user algorithm. User algorithm %s can not be imported!" % funcName) else: - log.info("User algorithm %s%s imported successfully!" % - (moduleName, funcName)) + log_click( + "INFO", "User algorithm %s%s imported successfully!" % + (moduleName, funcName)) + if type(userAlgs) == list: + userAlgs = {"UserAlgorithms": userAlgs} - return userAlgsNode - - -def add_missing_configurables(options, - topNode, - public_tools=[], - barrier_algorithms=[]): - """ - Temporary function used for including missing configurables in the DaVinci job. - The reason of these missing configurables is probably due to the different Gaudi call - ('./run davinci' instead of './run gaudirun.py') and from the 'configure' method - implemented in PyConf: indeed the method doesn't return all the configurables stored - in 'configurable_algs' and 'configurable_tools'. - TO BE FIXED. - """ - from Configurables import ( - Gaudi__RootCnvSvc as RootCnvSvc, XmlCnvSvc, FileRecordDataSvc, - XmlParserSvc, Gaudi__MultiFileCatalog as MultiFileCatalog, - LHCb__DetDesc__ReserveDetDescForEvent as reserveIOV, - LHCb__Tests__FakeEventTimeProducer as DummyEventTime) - from DDDB.CheckDD4Hep import UseDD4Hep - if UseDD4Hep: - from Configurables import LHCb__Det__LbDD4hep__IOVProducer as IOVProducer - - options.finalize() - config = ComponentConfig() - - nodes, algs = all_nodes_and_algs(topNode) - configuration = dataflow_config() - for alg in algs: - configuration.update(alg.configuration()) - for tool in public_tools: - configuration.update(tool.configuration()) - configurable_algs, configurable_tools = configuration.apply() - - INITIAL_TIME = 1433509200 - odin_loc = "/Event/DAQ/DummyODIN" - configurable_algs += [ - setup_component( - DummyEventTime, - "DummyEventTime", - Start=INITIAL_TIME, - Step=0, - ODIN=odin_loc, - require_IOVLock=False), - setup_component( - reserveIOV, "reserveIOV", require_IOVLock=False, ODIN=odin_loc) - ] - if UseDD4Hep: - configurable_algs += [ - setup_component( - IOVProducer, - "ReserveIOVDD4hep", - require_IOVLock=False, - SliceLocation="IOVLockDD4hep", - ODIN=odin_loc) - ] - - rootSvc = RootCnvSvc("RootCnvSvc") - fileDataSvc = FileRecordDataSvc() - configure_file_record_data(rootSvc, fileDataSvc) - - xmlSvc = XmlCnvSvc() - xmlParser = XmlParserSvc() - configure_xml(xmlSvc, xmlParser) - - config.add(setup_component("ToolSvc")) - config.add( - setup_component( - "EventPersistencySvc", - CnvServices=["Gaudi::RootCnvSvc/RootCnvSvc"])) - config.add(fileDataSvc) - config.add(rootSvc) - config.add(xmlSvc) - config.add(xmlParser) - - for alg in configurable_algs: - config.add(alg) - for tool in configurable_tools: - config.add(tool) - - return config + return userAlgs, publicTools diff --git a/Phys/DaVinci/python/DaVinci/configOptions.py b/Phys/DaVinci/python/DaVinci/configOptions.py index 90a02b3f9a1b9dbc60159c740dbc09c1e9cf3206..0f53b100e139fb42132795e7d9e3fc56351a87bc 100644 --- a/Phys/DaVinci/python/DaVinci/configOptions.py +++ b/Phys/DaVinci/python/DaVinci/configOptions.py @@ -17,52 +17,86 @@ from DaVinci.optionChecker import DVOptionError, DVRuntimeError, option_checker def get_option_value(options, name): + """ + Get option value for a given name. + """ return options.getProp(name) def set_option_value(options, name, value): + """ + Set option value for a given name. + """ options.setProp(name, value) -def set_file_options(options, fileDB_key, fileDB_file): +def set_input_file_options(options, fileDB_key, fileDB_file): """ Set the dataset properties required by the user. + + This helper function is similar to what `set_input_and_conds_from_testfiledb` in + `https://gitlab.cern.ch/lhcb/LHCb/-/blob/master/PyConf/python/PyConf/application.py` + does, but the latter acts on a TestFileDB entry. + + Args: + - options: set of DV options. + - fileDB_key: key in the testfileDB. + - fileDB_file: file containing the testfileDB. """ - import yaml - with open(os.path.expandvars(fileDB_file)) as dbFile: - dataDV = yaml.safe_load(dbFile) - - idxFile = -1 - if ":" in fileDB_key: - idxFile = int(fileDB_key.split(":")[1]) - fileDB_key = fileDB_key.split(":")[0] - - for key, config in dataDV.items(): - if key == fileDB_key: - for item, obj in config.items(): - if item == 'qualifiers': - for prop, value in obj.items(): + if fileDB_file == "TestFileDB": + options.set_input_and_conds_from_testfiledb(fileDB_key) + else: + import yaml + with open(os.path.expandvars(fileDB_file)) as fileDB: + test_file_db = yaml.safe_load(fileDB) + + # File index for running job only on a specific file in the testfileDB list. + # File number specified with ":" (e.g. key:2). Dafault -1 for running over all the files. + idxFile = -1 + if ":" in fileDB_key: + idxFile = int(fileDB_key.split(":")[1]) + fileDB_key = fileDB_key.split(":")[0] + + # Given a specific key, loop over the elements of the testfileDB + for item, obj in test_file_db[fileDB_key].items(): + # Set all the file quelifiers: + if item == 'qualifiers': + for prop, value in obj.items(): + if prop == "input_type" and value != "MDF": + set_option_value(options, prop, "ROOT") + else: set_option_value(options, prop, value) - elif item == 'filenames': - if idxFile > -1: - if idxFile < len(obj): - set_option_value(options, "input_files", - [obj[idxFile]]) - else: - raise ValueError( - 'Index file exceeds the number of available files related to the given key!' - ) + # set the input files: + elif item == 'filenames': + # if file index is not -1, select only the corresponding file from the list in testfileDB. + if idxFile > -1: + # Check that the index element exists + if idxFile < len(obj): + set_option_value(options, "input_files", + [obj[idxFile]]) else: - set_option_value(options, "input_files", obj) + raise ValueError( + 'Index file exceeds the number of available files related to the given key!' + ) + else: + set_option_value(options, "input_files", obj) def set_job_options(options, jobOptFile, fileDB_key, fileDB_file): """ - Set the job properties required by the user. + Set the job properties required by the user. The method checks if there + are options related to the input files selected with key and file of the + testfileDB, if yes it checks if the option is settable. + + Args: + - options: set of DV options. + - jobOptFile: file containing the job options chosen by the user. + - fileDB_key: key in the testfileDB. + - fileDB_file: file containing the testfileDB. """ - log = get_option_value(options, "log_file") if jobOptFile == '': - log.warning('No jobOption file selected, the default values are used.') + log_click("WARNING", + "No jobOption file selected, the default values are used.") else: dataOptions = list_data_options(fileDB_key.split(":")[0], fileDB_file) @@ -87,6 +121,12 @@ def set_job_options(options, jobOptFile, fileDB_key, fileDB_file): def set_args_options(options, ctx_args, fileDB_key, fileDB_file): """ Set the extra arguments required by the user. + + Args: + - options: set of DV options. + - ctx_args: click context. + - fileDB_key: key in the testfileDB. + - fileDB_file: file containing the testfileDB. """ dataOptions = list_data_options(fileDB_key.split(":")[0], fileDB_file) for i in range(0, len(ctx_args), 2): @@ -97,22 +137,33 @@ def set_args_options(options, ctx_args, fileDB_key, fileDB_file): set_option_value(options, key, value) -def is_option_settable(options, key, dataOptions): +def is_option_settable(options, name, dataOptions): """ - Check if the current option 'key' can be set corretly + Check if the option identified by name is settable. + This is true for all the job options while for data options + the "overwrite_data_options" flag is required. + Indeed, by default all the data options are already set + using the information found in the testfileDB. + + Args: + - options: set of DV options. + - name: name of the option to be checked. + - dataOptions: list of options related to the input data. """ - log = get_option_value(options, "log_file") - if key in dataOptions: + if name in dataOptions: if get_option_value(options, "overwrite_data_options"): - log.info( - "New value found for the option %s in the job option file. 'OverwriteDataOptions' is active so the default value will be overwritten." - % key) + log_click( + "INFO", + "New value found for the option %s in the job option file. "\ + "'OverwriteDataOptions' is active so the default value will be overwritten." + % name) return True else: raise DVRuntimeError( - key, "overwrite_data_options", - "Default value for option %s is taken from DaVinciDB.\nA new value is found in the job option file but 'OverwriteDataOptions' is not active so the default value can't be overwritten!" - % key) + name, "overwrite_data_options", + "Default value for option %s is taken from DaVinciDB.\nA new value is found in the job "\ + "option file but 'OverwriteDataOptions' is not active so the default value can't be overwritten!" + % name) return False else: return True @@ -120,27 +171,36 @@ def is_option_settable(options, key, dataOptions): def list_data_options(fileDB_key, fileDB_file): """ - List of the properties that are set automatically given a dataset. + Get the list of the properties that are set automatically given a dataset. + + Args: + - fileDB_key: key in the testfileDB. + - fileDB_file: file containing the testfileDB. """ import yaml - with open(os.path.expandvars(fileDB_file)) as dbFile: - dataDV = yaml.safe_load(dbFile) - optionList = [] - for key, config in dataDV.items(): - if key == fileDB_key: - for item, obj in config.items(): - if item == 'qualifiers': - for prop, value in obj.items(): - optionList.append(prop) - break + qualifiers = [] + if fileDB_file == 'TestFileDB': + from PRConfig.TestFileDB import test_file_db + qualifiers = test_file_db[fileDB_key].qualifiers + else: + with open(os.path.expandvars(fileDB_file)) as fileDB: + test_file_db = yaml.safe_load(fileDB) + qualifiers = test_file_db[fileDB_key]["qualifiers"] + + for prop, value in qualifiers.items(): + optionList.append(prop) + return optionList def check_options(options): """ Check the options. Applies changes if needed. + + Args: + - options: list of DaVinci options. """ dataType = get_option_value(options, "data_type") option_checker("data_type", dataType) diff --git a/Phys/DaVinci/python/DaVinci/configurations.py b/Phys/DaVinci/python/DaVinci/configurations.py deleted file mode 100644 index 8c59a5d177251ed51e1c48080a201ed906f954c5..0000000000000000000000000000000000000000 --- a/Phys/DaVinci/python/DaVinci/configurations.py +++ /dev/null @@ -1,42 +0,0 @@ -############################################################################### -# (c) Copyright 2020-2021 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### -""" -Define configurations for applications, algorithms and external services. -""" - -################################################################################ -# Define configurations for external services -# - - -def configure_file_record_data(rootSvc, fileDataSvc): - """ - Configure File Record Data service. - """ - rootSvc.CacheBranches = [] - rootSvc.EnableIncident = True - rootSvc.VetoBranches = ["*"] - - fileDataSvc.EnableFaultHandler = True - fileDataSvc.ForceLeaves = True - fileDataSvc.PersistencySvc = "PersistencySvc/FileRecordPersistencySvc" - fileDataSvc.RootCLID = 1 - - -def configure_xml(xmlSvc, xmlParser): - """ - Configure xml services. - """ - xmlSvc.AllowGenericConversion = True - - xmlParser.CacheBehavior = 3 - xmlParser.EntityResolver = "EntityResolverDispatcher/EntityResolverDispatcher" - xmlParser.MaxDocNbInCache = 15 diff --git a/Phys/DaVinci/python/DaVinci/optionChecker.py b/Phys/DaVinci/python/DaVinci/optionChecker.py index 6ebd36c1d9ae87280edcf33f9f7b53d9f9454804..8a5858e1c9bdcdffa44c855ee1af85feafe650a0 100644 --- a/Phys/DaVinci/python/DaVinci/optionChecker.py +++ b/Phys/DaVinci/python/DaVinci/optionChecker.py @@ -104,3 +104,11 @@ def set_color(key): } return colors[key] + + +def log_click(mode, message): + import click + colors = {"INFO": "blue", "WARNING": "yellow", "ERROR": "red"} + + click.echo( + "%s %s" % (click.style(mode, bold=True, fg=colors[mode]), message)) diff --git a/Phys/DaVinci/python/DaVinci/utilities_script.py b/Phys/DaVinci/python/DaVinci/utilities_script.py index d5d8b4c8678bf8332c5d73b094450336f060535e..9929342510cb2587cb2828ef7e8cd0d5104187e2 100644 --- a/Phys/DaVinci/python/DaVinci/utilities_script.py +++ b/Phys/DaVinci/python/DaVinci/utilities_script.py @@ -15,14 +15,13 @@ This module contains some functions useful while running the davinci script. def dump_call(testfiledb_file, testfiledb_key, joboptfile, ctx_args): """ - Print out all the davinci options used to run the job by means of the - 'davinci' script. + Print out all the davinci options used to run the job by means of the 'davinci' script. Args: - - testfiledb_file: TestFileDB-like file with job input information (1st davinci argument, 1st slot) - - testfiledb_key: relevant key to be looked for in the TestFileDB-like file (1st davinci argument, 2nd slot) - - joboptfile: file containing the job option information (2nd davinci argument) - - ctx_args: array with the remnant options passed to davinci by command line + - testfiledb_file: TestFileDB-like file with job input information (1st davinci argument, 1st slot). + - testfiledb_key: relevant key to be looked for in the TestFileDB-like file (1st davinci argument, 2nd slot). + - joboptfile: file containing the job option information (2nd davinci argument). + - ctx_args: array with the remnant options passed to davinci by command line. """ import os, click @@ -42,16 +41,15 @@ def dump_call(testfiledb_file, testfiledb_key, joboptfile, ctx_args): def get_configurable_opts(configurables, with_defaults): """ - Temporary method to be used until GaudiConfig2 will be not implemented. Loop over the list of configurables to be passed to Gaudi and convert them in a dictionary. Args: - - configurables: list of configurables to be passed to Gaudi - - with_defaults: flag to set the default values + - configurables: list of configurables to be passed to Gaudi. + - with_defaults: flag to set the default values. - Ouput: - - opts: dictionary containing all the configurables + Returns: + - opts: dictionary containing all the configurables. """ import sys from itertools import chain @@ -59,21 +57,47 @@ def get_configurable_opts(configurables, with_defaults): opts = {} for c in configurables: - items = (chain(c.getDefaultProperties().items(), - c.getValuedProperties().items()) - if with_defaults else c.getValuedProperties().items()) - - for p, v in items: - if hasattr(Configurable, "PropertyReference") and isinstance( - v, Configurable.PropertyReference): - v = v.__resolve__() - if isinstance(v, str): - v = '"%s"' % v.replace('"', '\\"') - elif sys.version_info < (3, ) and isinstance(v, long): - v = '%d' % v - elif hasattr(v, '__opt_value__'): - v = v.__opt_value__() - - opts['.'.join((c.name(), p))] = str(v) + if hasattr(c, "__opt_properties__"): + opts.update(c.__opt_properties__(with_defaults)) + else: + items = (chain(c.getDefaultProperties().items(), + c.getValuedProperties().items()) + if with_defaults else c.getValuedProperties().items()) + for p, v in items: + if hasattr(Configurable, "PropertyReference") and isinstance( + v, Configurable.PropertyReference): + v = v.__resolve__() + if isinstance(v, str): + v = '"%s"' % v.replace('"', '\\"') + elif sys.version_info < (3, ) and isinstance(v, long): + v = '%d' % v + elif hasattr(v, '__opt_value__'): + v = v.__opt_value__() + + opts['.'.join((c.name(), p))] = str(v) return opts + + +def set_testfiledb(inputfiledb): + """ + Set the testfileDB file and the related key to be used in the DV job. + + Args: + - testfiledb: pair of values containing information on the testfileDB and the key chosen by the user. + + Returns: + - key for the testfileDB. + - file containing the testfileDB. + """ + import os + assert len( + inputfiledb + ) in (1, 2), "--inputfiledb takes two arguments: filedb key and the related filename."\ + "If the filename is not specified the TestFileDB will be used as default. %d" %len(inputfiledb) + + inputfiledb_key = inputfiledb[0] + inputfiledb_file = inputfiledb[ + 1] if not inputfiledb[1] == "-" else "TestFileDB" + + return inputfiledb_key, inputfiledb_file