From deb86ded347d3c6e573bc4746fce06ce8dda0b8d Mon Sep 17 00:00:00 2001 From: Chris Burr <christopher.burr@cern.ch> Date: Fri, 15 Jul 2022 15:13:53 +0200 Subject: [PATCH] Remove davinci command --- .../DaVinciTests/QMTest/DaVinciExclusions.py | 10 +- Phys/DaVinci/CMakeLists.txt | 1 - .../options/prod_conf/options-DaVinciConf.py | 29 -- .../options/prod_conf/options-ProdConf.py | 33 --- Phys/DaVinci/python/DaVinci/Configuration.py | 85 +----- Phys/DaVinci/python/DaVinci/__init__.py | 4 +- Phys/DaVinci/python/DaVinci/algorithms.py | 167 ++--------- Phys/DaVinci/python/DaVinci/application.py | 37 --- .../python/DaVinci/compat/DaVinciConf.py | 138 --------- .../DaVinci/python/DaVinci/compat/__init__.py | 0 .../python/DaVinci/compat/tests/__init__.py | 0 .../python/DaVinci/compat/tests/params.py | 28 -- Phys/DaVinci/python/DaVinci/config.py | 26 -- Phys/DaVinci/python/DaVinci/configOptions.py | 264 ----------------- Phys/DaVinci/python/DaVinci/optionChecker.py | 163 ----------- .../DaVinci/python/DaVinci/options_default.py | 272 ------------------ .../python/DaVinci/utilities_script.py | 219 -------------- .../DaVinci/tests/config/test_DVAppOptions.py | 60 ---- Phys/DaVinci/tests/config/test_algorithms.py | 209 +++++--------- .../tests/config/test_optionChecker.py | 45 --- 20 files changed, 90 insertions(+), 1700 deletions(-) delete mode 100644 Phys/DaVinci/options/prod_conf/options-DaVinciConf.py delete mode 100644 Phys/DaVinci/options/prod_conf/options-ProdConf.py delete mode 100644 Phys/DaVinci/python/DaVinci/application.py delete mode 100644 Phys/DaVinci/python/DaVinci/compat/DaVinciConf.py delete mode 100644 Phys/DaVinci/python/DaVinci/compat/__init__.py delete mode 100644 Phys/DaVinci/python/DaVinci/compat/tests/__init__.py delete mode 100644 Phys/DaVinci/python/DaVinci/compat/tests/params.py delete mode 100644 Phys/DaVinci/python/DaVinci/configOptions.py delete mode 100644 Phys/DaVinci/python/DaVinci/optionChecker.py delete mode 100644 Phys/DaVinci/python/DaVinci/options_default.py delete mode 100644 Phys/DaVinci/python/DaVinci/utilities_script.py delete mode 100644 Phys/DaVinci/tests/config/test_DVAppOptions.py delete mode 100644 Phys/DaVinci/tests/config/test_optionChecker.py diff --git a/DaVinciTests/python/DaVinciTests/QMTest/DaVinciExclusions.py b/DaVinciTests/python/DaVinciTests/QMTest/DaVinciExclusions.py index d29bac9c4..56b0c6834 100755 --- a/DaVinciTests/python/DaVinciTests/QMTest/DaVinciExclusions.py +++ b/DaVinciTests/python/DaVinciTests/QMTest/DaVinciExclusions.py @@ -9,7 +9,6 @@ # or submit itself to any jurisdiction. # ############################################################################### from GaudiTest import LineSkipper -from GaudiConf.QMTest.LHCbTest import BlockSkipper from RecConf.QMTest.exclusions import preprocessor as RecPreprocessor remove_uninteresting_lines = LineSkipper(regexps=[ @@ -45,11 +44,4 @@ remove_known_warnings = LineSkipper(regexps=[ r"[a-zA-Z0-9]* +WARNING FunTupleBase<KeyedContainer<LHCb::MCParticle,Containers::KeyedObjectManager<Containers::hashmap> > >:: .*", ]) -# Remove all DV options to avoid test failures due to LHCb changes. -# A dedicated python unit test is added in DaVinciTests. -#remove_options = LineSkipper(regexps=[r" |- .*" ]) -remove_list_options = BlockSkipper( - "/***** User DVAppOptions/DVAppOptions", - "\\----- (End of User DVAppOptions/DVAppOptions)") - -preprocessor = RecPreprocessor + remove_uninteresting_lines + remove_list_options + remove_known_warnings +preprocessor = RecPreprocessor + remove_uninteresting_lines + remove_known_warnings diff --git a/Phys/DaVinci/CMakeLists.txt b/Phys/DaVinci/CMakeLists.txt index f31372559..afc896f4d 100644 --- a/Phys/DaVinci/CMakeLists.txt +++ b/Phys/DaVinci/CMakeLists.txt @@ -14,7 +14,6 @@ Phys/DaVinci #]=======================================================================] gaudi_install(PYTHON) -gaudi_generate_confuserdb(DaVinci.compat.DaVinciConf) lhcb_env(SET DAVINCIOPTS ${CMAKE_CURRENT_SOURCE_DIR}/options) diff --git a/Phys/DaVinci/options/prod_conf/options-DaVinciConf.py b/Phys/DaVinci/options/prod_conf/options-DaVinciConf.py deleted file mode 100644 index 56a776ffb..000000000 --- a/Phys/DaVinci/options/prod_conf/options-DaVinciConf.py +++ /dev/null @@ -1,29 +0,0 @@ -############################################################################### -# (c) Copyright 2021-2022 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### -""" -Example option file that can be passed to ProdConf for the configuration of the DaVinci options. -To be used along with Phys/DaVinci/options/prod_conf/options-ProdConf.py. - -After having installed the ProdConf application locally the job can be launched using the following command: - xenv --xml ProdConf/ProdConf.xenv ./run gaudirun.py Phys/DaVinci/options/prod_conf/options-DaVinciConf.py Phys/DaVinci/options/prod_conf/options-ProdConf.py -""" -from DaVinci import options - -options.data_type = 'Upgrade' -options.enable_unpack = False -options.histo_file = 'DaVinci_histos.root' -options.input_raw_format = 0.3 -options.input_type = 'ROOT' -options.lumi = False -options.ntuple_file = 'DaVinci_tuple.root' -options.print_freq = 1 -options.simulation = True -options.user_algorithms = 'DaVinciExamples/python/DaVinciExamples/tupling/example-tupling-basic-run-mc:main' diff --git a/Phys/DaVinci/options/prod_conf/options-ProdConf.py b/Phys/DaVinci/options/prod_conf/options-ProdConf.py deleted file mode 100644 index 9358bd9c0..000000000 --- a/Phys/DaVinci/options/prod_conf/options-ProdConf.py +++ /dev/null @@ -1,33 +0,0 @@ -############################################################################### -# (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### -""" -Example option file that can be passed to ProdConf for the configuration of the DaVinci options. -To be used along with Phys/DaVinci/options/prod_conf/options-DaVinciConf.py. - -After having installed the ProdConf application locally the job can be launched using the following command: - xenv --xml ProdConf/ProdConf.xenv ./run gaudirun.py Phys/DaVinci/options/prod_conf/options-DaVinciConf.py Phys/DaVinci/options/prod_conf/options-ProdConf.py -""" -from ProdConf import ProdConf - -ProdConf( - Application='DaVinci', - AppVersion='v39r1', - CondDBTag="sim-20171127-vc-md100", - DDDBTag="dddb-20171126", - InputFiles=[ - 'root://eoslhcb.cern.ch//eos/lhcb/grid/prod/lhcb/MC/Upgrade/LDST/00076720/0000/00076720_00000002_1.ldst' - ], - NOfEvents=10, - OptionFormat='', - OutputFilePrefix='00076720_00000002_1', - OutputFileTypes=['root'], - XMLFileCatalog="pool_xml_catalog.xml", - XMLSummaryFile='DaVinci_summary.xml') diff --git a/Phys/DaVinci/python/DaVinci/Configuration.py b/Phys/DaVinci/python/DaVinci/Configuration.py index 607382fa4..4b56c2af3 100644 --- a/Phys/DaVinci/python/DaVinci/Configuration.py +++ b/Phys/DaVinci/python/DaVinci/Configuration.py @@ -11,82 +11,11 @@ """ High level configuration tools for DaVinci. """ -from GaudiKernel.ProcessJobOptions import importOptions -from PyConf.application import ComponentConfig, configure, configure_input +from PyConf.application import configure, configure_input from PyConf.Algorithms import EventAccounting from GaudiConf import reading -from DaVinci.configOptions import ( - check_options, set_job_options, set_properties, set_args_options, - set_input_file_options, list_data_options, check_user_option_values) -from DaVinci.algorithms import (setup_algorithms, define_fsr_writer, - apply_filters_and_unpacking, - expand_input_files) -from DaVinci.config import davinci_control_flow, prepare_davinci_nodes, DVAppOptions - - -def run_davinci_app(fileDB_key="", - fileDB_file="", - jobOpt_file="", - ctx_args=[], - override_data_options=False, - prod_conf=False): - """ - Run the DaVinci application. - - This is the "master function" called - - - by the `davinci` executable in DaVinciSys/scripts. - - by the `DaVinci` legacy configurable in `DaVinci/compat/DaVinciConf.py` - for running DaVinci in production via `ProdConf`. - - directly by the user, typically for simple jobs, when invoking `./run gaudirun.py`. - - Args: - fileDB_key (str, optional): key in the testfileDB. Defaults to "". - fileDB_file (str, optional): file with the testfileDB. Defaults to "". - jobOpt_file (str, optional): file containing the options to be used for running the job. - Defaults to "". - ctx_args (iterable, optional): click context containing all the extra options. - Defaults to []. - simple_job (bool, optional): flag for running job using only the PyConf configurations. - Defaults to False. - override_data_options (bool, optional): boolean for enabling data options override. - Defaults to False. - prod_conf (bool, optional): flag for running DaVinci with ProdConf. - Defaults to False. - - Returns: - ComponentConfig instance, a dict of configured Configurable instances. - """ - from DaVinci import options - - # Workaround ConfigurableUser limitation: options.<name> cannot be called if a value is not - # explicitly assigned and the related property is not set. - # Initialization in DVAppOptions class seems to be not sufficient. - set_properties(options) - - data_options = list_data_options(fileDB_key, fileDB_file) - - if fileDB_key and not prod_conf: - set_input_file_options(options, fileDB_key, fileDB_file) - if jobOpt_file: - set_job_options(options, jobOpt_file, data_options, - override_data_options) - if ctx_args: - set_args_options(options, ctx_args, data_options, - override_data_options) - - config = ComponentConfig() - init_properties = options.properties() - user_algorithms, public_tools = setup_algorithms(options.user_algorithms) - - if not override_data_options: - check_user_option_values(options, init_properties, data_options) - - config.update( - add_davinci_configurables(options, user_algorithms, public_tools)) - - if not prod_conf: - return config +from DaVinci.algorithms import (define_fsr_writer, apply_filters_and_unpacking) +from DaVinci.config import davinci_control_flow, prepare_davinci_nodes def add_davinci_configurables(options, user_algorithms, public_tools): @@ -96,7 +25,7 @@ def add_davinci_configurables(options, user_algorithms, public_tools): Algorithms developed by users are also included. Args: - options (DVAppOptions instance): list of DV options. + options (DaVinci.Options): lbexec provided options object Returns: ComponentConfig instance, a dict of configured Gaudi and DaVinci Configurable instances and user algorithms. @@ -104,14 +33,8 @@ def add_davinci_configurables(options, user_algorithms, public_tools): if not public_tools: public_tools = [] - if isinstance(options, DVAppOptions) and options.main_options: - importOptions(options.main_options) - unpack_only_mc = options.unpack_only_mc - if isinstance(options, DVAppOptions): - expand_input_files(options) - check_options(options) config = configure_input(options) if options.annsvc_config: diff --git a/Phys/DaVinci/python/DaVinci/__init__.py b/Phys/DaVinci/python/DaVinci/__init__.py index d6ca40e25..abc4c0b2b 100644 --- a/Phys/DaVinci/python/DaVinci/__init__.py +++ b/Phys/DaVinci/python/DaVinci/__init__.py @@ -8,7 +8,7 @@ # granted to it by virtue of its status as an Intergovernmental Organization # # or submit itself to any jurisdiction. # ############################################################################### -from .config import options, run_davinci, DVNode, make_config +from .config import make_config from .LbExec import Options -__all__ = ('options', 'run_davinci', 'DVNode', 'Options', 'make_config') +__all__ = ('Options', 'make_config') diff --git a/Phys/DaVinci/python/DaVinci/algorithms.py b/Phys/DaVinci/python/DaVinci/algorithms.py index 0ed57500b..d500ed423 100644 --- a/Phys/DaVinci/python/DaVinci/algorithms.py +++ b/Phys/DaVinci/python/DaVinci/algorithms.py @@ -9,55 +9,24 @@ # or submit itself to any jurisdiction. # ############################################################################### import re -import os, sys, importlib + +import click + from PyConf.Algorithms import ( FilterDecays, LoKi__HDRFilter as HDRFilter, LoKi__VoidFilter as VoidFilter, ) -from DaVinci.optionChecker import DVImportError, log_click -from DaVinci.configOptions import set_properties from PyConf.application import ( default_raw_event, ComponentConfig, make_odin, ) from DaVinci.algorithms_pyconf import make_dvalgorithm -from DaVinci.config import DVAppOptions from PyConf.components import force_location from Gaudi.Configuration import WARNING -def setup_algorithms(user_algs_name): - """ - Set DaVinci algorithms. - - Args: - user_algs_name (str): user algorithm name. - - Returns: - Dict of the algorithm instances to be run in the job. - List of public tool instances to configure. - """ - public_tools = [] - dv_algs = {} - - if not user_algs_name: - log_click( - "WARNING", - "DV option file or main function not defined. No user algorithms will be used." - ) - # Add a dummy algorithm in order to avoid errors from empty nodes - from Gaudi.Configuration import ERROR - from PyConf.Algorithms import Gaudi__Examples__VoidConsumer as VoidConsumer - dv_algs = {"UserAlgorithms": [VoidConsumer(OutputLevel=ERROR)]} - else: - user_algs, public_tools = setup_user_algorithms(user_algs_name) - dv_algs.update(user_algs) - - return dv_algs, public_tools - - def set_filter(name, code, dec_reports): """ Adding an HDR filter to FunTuple algorithms checking if the corresponding HLT/Sprucing line fired. @@ -130,7 +99,7 @@ def apply_filters_and_unpacking(options, algs_dict, unpack_only_mc): Adding filter and unpacking algorithms. Args: - options (DVAppOptions instance): list of DaVinci options. + options (DaVinci.Options): lbexec provided options object algs_dict (dict): dict of the user algorithms. unpack_only_mc (bool): flag to unpack only mc particles and vertices. (TO BE REMOVED WHEN THE UNPACKING WILL BECOME FUNCTIONAL) @@ -163,7 +132,7 @@ def define_fsr_writer(options): Define Generator FSR writer. Args: - options (DVAppOptions instance): list of DaVinci options. + options (DaVinci.Options): lbexec provided options object Returns: List of FSR algorithm instances to be configured. @@ -185,82 +154,13 @@ def define_fsr_writer(options): return algs -def setup_user_algorithms(userAlgPath): - """ - Set user algorithms and return a PyConf node. - - Args: - userAlgPath (str): path to the file containing the algorithms and tools defined by the user. - - Returns: - Dict of user algorithm instances to be run in the job. - List of user public tool instances to configure. - """ - userAlgs = {} - publicTools = [] - - if userAlgPath == "": - log_click( - "WARNING", - "DV option file or main function not defined. No user algorithms will be used." - ) - else: - # Identify the module path and the algorithm name from the user defined string - if '/' in userAlgPath: - modulePath = userAlgPath.rsplit('/', 1)[0] - algName = userAlgPath.rsplit('/', 1)[1] - else: - modulePath = "./" - algName = userAlgPath - - # Append module path to sys - modulePath = os.path.expandvars(modulePath) - sys.path.append(modulePath) - - if ":" in algName: - moduleName = algName.rsplit(":", 1)[0] - funcName = ".%s" % algName.rsplit(":", 1)[1] - else: - moduleName = algName - funcName = "" - - _, ext = os.path.splitext(moduleName) - if ext == ".py": - moduleName = moduleName.rsplit('.', 1)[0] - - # Import the module containing the user algorithms - try: - module = importlib.import_module(moduleName) # noqa: F841 - except ModuleNotFoundError: - raise DVImportError( - funcName, - f"Check why the user Python module '{moduleName}' defined in '{modulePath}' can't be imported!" - ) from None - else: - try: - userAlgs, publicTools = eval("module%s()" % funcName) - except (AttributeError, ImportError): - raise DVImportError( - funcName, - f"Runtime error when calling the user algorithm file. User algorithm {funcName} can not be run!" - ) from None - else: - log_click( - "INFO", "User algorithm %s%s imported successfully!" % - (moduleName, funcName)) - if type(userAlgs) == list: - userAlgs = {"UserAlgorithms": userAlgs} - - return userAlgs, publicTools - - def unpack_locations(options, unpack_only_mc): """ Configures algorithms for reading HLT2/Spruce output. Location are found using stream as prefix. (TO BE REMOVED WHEN THE UNPACKING WILL BECOME FUNCTIONAL) Args: - - options (DVAppOptions instance): list of DaVinci options. + - options (DaVinci.Options): lbexec provided options object - unpack_only_mc (bool): flag to unpack only mc particles and vertices. Returns: @@ -271,8 +171,6 @@ def unpack_locations(options, unpack_only_mc): process = options.process stream = options.stream reading_algs = [] - if isinstance(options, DVAppOptions): - set_properties(options) if process == "Spruce": TES_ROOT = '/Event/Spruce' @@ -284,9 +182,9 @@ def unpack_locations(options, unpack_only_mc): reading_algs += reading.mc_unpackers( process=process, filtered_mc=False, configurables=False) else: - log_click( - "WARNING", - "Requested unpacking MC but simulation is set False. Check your DV options." + message = "Requested unpacking MC but simulation is set False. Check your DV options." + click.echo( + f"{click.style('WARNING', bold=True, fg='yellow')} {click.style(message, fg='white')}" ) elif options.annsvc_config: config = ComponentConfig() @@ -337,7 +235,7 @@ def get_hlt_reports(options, source=''): Set the Hlt service and algorithms. Args: - options (list): list of DaVinci options. + options (DaVinci.Options): lbexec provided options object source (str): source ID required by the user. Returns: @@ -413,37 +311,37 @@ def configured_FunTuple(options, config): return dictAlgs -def get_odin(dv_options): +def get_odin(options): """ Function to get the LHCb::ODIN location Args: - dv_options (DVAppOptions instance): Configured DaVinci.options object + options (DaVinci.Options): lbexec provided options object Returns: odin_loc: Location of the LHCb::ODIN """ - if dv_options.process == 'Hlt2': + if options.process == 'Hlt2': stream = "" else: - stream = dv_options.stream - with default_raw_event.bind(raw_event_format=dv_options.input_raw_format): + stream = options.stream + with default_raw_event.bind(raw_event_format=options.input_raw_format): odin_loc = make_odin(stream=stream) return odin_loc -def get_decreports(sel_stage, dv_options): +def get_decreports(sel_stage, options): """ Function to get the LHCb::DecReports for HLT1, HLT2 or Sprucing. Args: sel_state (str): Selection stage can be "Hlt1" or "Hlt2" or "Spruce" - dv_options: Configured DaVinci.options object + options (DaVinci.Options): lbexec provided options object Returns: dec_loc: Location of the LHCb::DecReports for HLT1 or Hlt2 or Spruce """ dec_loc = get_hlt_reports( - dv_options, source=sel_stage).OutputHltDecReportsLocation + options, source=sel_stage).OutputHltDecReportsLocation return dec_loc @@ -481,32 +379,3 @@ def filter_on(location, decay_descriptor=None, bank_type=None): if decay_descriptor: data = apply_algorithm([data], FilterDecays, Code=decay_descriptor) return data - - -def expand_input_files(options): - """ - Function to expand input files list including all the files matching - a specified pattern according to the rules used by Unix (uses `glob`). - - Args: - options (DVAppOptions instance): list of DaVinci options. - """ - expanded_files = [] - for file_name in options.input_files: - if any(f"root://{x}.cern.ch//" in file_name - for x in ["eoslhcb", "eosuser"]): - import XRootD.client as c - # Workaround since file with a prefix before 'root://' - # as ('mdf:', 'PFN:') are not expanded correctly with .glob - if ":root" in file_name: - prefix, file_name = file_name.split(":", 1) - expanded_files.extend( - [f"{prefix}:{f}" for f in c.glob(file_name)]) - else: - expanded_files.extend(c.glob(file_name)) - else: - import glob - expanded_files.extend(glob.glob(file_name, recursive=True)) - - expanded_files = sorted(expanded_files) - options.input_files = expanded_files diff --git a/Phys/DaVinci/python/DaVinci/application.py b/Phys/DaVinci/python/DaVinci/application.py deleted file mode 100644 index ec6cc4e7e..000000000 --- a/Phys/DaVinci/python/DaVinci/application.py +++ /dev/null @@ -1,37 +0,0 @@ -############################################################################### -# (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### - -from PyConf.application import ApplicationOptions -from DaVinci.options_default import __optsDict__ as optsDict - - -def define_app_option_defaults(): - """ - Define/digest the default values for the application options as described in YAML. - """ - slots, doc = {}, {} - - for key, args in optsDict.items(): - for name, value in args.items(): - if name == 'value': - slots[key] = value - else: - doc[key] = value - - return slots, doc - - -class DVAppOptions(ApplicationOptions): - """ - Enhanced PyConf.application.ApplicationOptions class - with slots defined via a .py file. - """ - __slots__, _propertyDocDct = define_app_option_defaults() diff --git a/Phys/DaVinci/python/DaVinci/compat/DaVinciConf.py b/Phys/DaVinci/python/DaVinci/compat/DaVinciConf.py deleted file mode 100644 index 922081b15..000000000 --- a/Phys/DaVinci/python/DaVinci/compat/DaVinciConf.py +++ /dev/null @@ -1,138 +0,0 @@ -############################################################################### -# (c) Copyright 2021-2022 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### -""" -DaVinci configuration for ProdConf. -""" -import re -from Gaudi.Configuration import appendPostConfigAction -from Configurables import (LHCbApp, EventSelector, DDDBConf, CondDB, - LHCbConfigurableUser, HistogramPersistencySvc) -from DaVinci.Configuration import run_davinci_app -from DaVinci.configOptions import set_option_value -from DaVinci import options - -from AnalysisPython.Logger import getLogger -log = getLogger('DaVinci') - - -class DaVinciConfigurationError(Exception): - """Exception type raised when DaVinci is not configured consistently.""" - pass - - -class DaVinci(LHCbConfigurableUser): - - __used_configurables__ = [ - LHCbApp, - DDDBConf, - CondDB, - ] - - __slots__ = { - "CondDBtag": "", - "DDDBtag": "", - "EvtMax": -1, - "HistogramFile": "", - "OutputFile": "", - } - - _propertyDocDct = { - "CondDBtag": - """ Tag for the CondDB. """, - "DDDBtag": - """ Data type, can be ["Upgrade"] Forwarded to PhysConf, AnalysisConf, DstConf and LHCbApp. """, - "EvtMax": - """ Number of events to analyse. Default = -1 to run over all events. """, - "HistogramFile": - """Name of output histogram file. Default = ''.""", - "OutputFile": - """ Name of output file. Default = ''.""", - } - - _optNameMap = { - "CondDBtag": "conddb_tag", - "DDDBtag": "dddb_tag", - "EvtMax": "evt_max", - "HistogramFile": "histo_file", - "Input": "input_files", - "OutputFile": "ntuple_file", - } - - def undressInputFileName(self, selector): - m = re.match(r"DATAFILE='([^']+)'", selector) - return m.group(1) - - def remove_duplicates(self): - """Remove duplicates introduced by using both PyConf and LHCbApp""" - from Gaudi.Configuration import allConfigurables - for name, prop in (("ApplicationMgr", - "ExtSvc"), ("EventPersistencySvc", "CnvServices"), - ("FileRecordPersistencySvc", "CnvServices")): - try: - c = allConfigurables[name] - # we have a mixture of configurables and strings, - # so we count as duplicates the entries with the same name - new_items = {} - for element in getattr(c, prop): - el_name = element.name() if hasattr( - element, "name") else element.rsplit("/", 1)[-1] - new_items.setdefault(el_name, element) - setattr(c, prop, list(new_items.values())) - except (AttributeError, KeyError): - pass - - def retrieve_options(self): - """ - Retrieve option values from LHCbApp(). - """ - for name in self._optNameMap: - if name in ["CondDBtag", "DDDBtag", "EvtMax"]: - value = LHCbApp().getProp(name) - self.setProp(name, value) - elif name == "HistogramFile": - if HistogramPersistencySvc().isPropertySet("OutputFile"): - value = HistogramPersistencySvc().OutputFile - self.setProp(name, value) - - def propagate_options(self, options): - """ - Propagate DaVinci properties from __slots__ to DaVinci.options. - """ - for name in self._optNameMap: - if name == "Input": - inputs = [ - self.undressInputFileName(s) for s in EventSelector().Input - ] - set_option_value(options, self._optNameMap[name], inputs) - else: - if self.getProp(name): - set_option_value(options, self._optNameMap[name], - self.getProp(name)) - - def run_davinci_conf(self): - """ - Run the DaVinci configuration. - """ - run_davinci_app(prod_conf=True) - - def __apply_configuration__(self): - """ - DaVinci configuration. - """ - log.info("Applying DaVinci configuration") - - self.retrieve_options() - log.info(self) - - self.propagate_options(options) - self.run_davinci_conf() - - appendPostConfigAction(self.remove_duplicates) diff --git a/Phys/DaVinci/python/DaVinci/compat/__init__.py b/Phys/DaVinci/python/DaVinci/compat/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/Phys/DaVinci/python/DaVinci/compat/tests/__init__.py b/Phys/DaVinci/python/DaVinci/compat/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/Phys/DaVinci/python/DaVinci/compat/tests/params.py b/Phys/DaVinci/python/DaVinci/compat/tests/params.py deleted file mode 100644 index c20f242ea..000000000 --- a/Phys/DaVinci/python/DaVinci/compat/tests/params.py +++ /dev/null @@ -1,28 +0,0 @@ -############################################################################### -# (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### -from PRConfig.TestFileDB import test_file_db - -test_entry = test_file_db["Upgrade_Bd2KstarMuMu"] - -prod_conf_params = { - "Application": "DaVinci", - "AppVersion": "v39r1", - "CondDBTag": test_entry.qualifiers["CondDB"], - "DDDBTag": test_entry.qualifiers["DDDB"], - "HistogramFile": "histo.root", - "InputFiles": test_entry.filenames, - "NOfEvents": 10, - "OutputFilePrefix": "DaVinci", - "OutputFileTypes": ["ROOT"], - "OptionFormat": "", - "XMLFileCatalog": "pool_xml_catalog.xml", - "XMLSummaryFile": "summaryDaVinci.xml" -} diff --git a/Phys/DaVinci/python/DaVinci/config.py b/Phys/DaVinci/python/DaVinci/config.py index f9d2f67a9..6c0878071 100644 --- a/Phys/DaVinci/python/DaVinci/config.py +++ b/Phys/DaVinci/python/DaVinci/config.py @@ -14,15 +14,10 @@ from __future__ import absolute_import import logging from collections import namedtuple, OrderedDict -from PyConf.application import configure_input, configure from PyConf.control_flow import CompositeNode, NodeLogic -from DaVinci.application import DVAppOptions - log = logging.getLogger(__name__) -options = DVAppOptions(_enabled=False) - class DVNode(namedtuple('DVNode', ['node', 'extra_outputs'])): # noqa """Immutable object fully qualifying a DaVinci node. @@ -133,27 +128,6 @@ def prepare_davinci_nodes(user_algs): return dv_nodes -def run_davinci(options, user_algs, public_tools=[]): - """ - DaVinci application control flow. - - Args: - options (ApplicationOptions): holder of application options. - user_algs (dict): list of user algorithms 'catalogued' with the dict key, - e.g. {'MyAlgs': [<list of algs>]}. - public_tools (list): list of public `Tool` instances to configure. - """ - config = configure_input(options) - - dv_nodes = prepare_davinci_nodes(user_algs) - - top_dv_node = davinci_control_flow(options, dv_nodes) - - config.update(configure(options, top_dv_node, public_tools=public_tools)) - - return config - - def make_config(options, user_algorithms, *, public_tools=None): from DaVinci.Configuration import add_davinci_configurables diff --git a/Phys/DaVinci/python/DaVinci/configOptions.py b/Phys/DaVinci/python/DaVinci/configOptions.py deleted file mode 100644 index cc6f794aa..000000000 --- a/Phys/DaVinci/python/DaVinci/configOptions.py +++ /dev/null @@ -1,264 +0,0 @@ -############################################################################### -# (c) Copyright 2020-2022 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### -""" -Set and retrieve the value of a specific option property. -""" - -import os -from DaVinci.optionChecker import DVOptionError, DVRuntimeError -from DaVinci.optionChecker import option_checker, log_click -from DaVinci.application import DVAppOptions - - -def set_option_value(options, name, value): - """ - Set option value for a given name. - - Args: - options (DVAppOptions instance): list of DaVinci options. - name (str): name of the option of interest. - value (str): value to be set. - """ - # Properties value passed by command line are always interpreted as string - # Numerical values are converted automatically but this is not true for boolean - # So a conversion of the values from string to boolean is applied when needed - if isinstance(options.getProp(name), bool) and isinstance(value, str): - if value in {"True", "False"}: - value = (value == "True") - else: - raise DVOptionError( - name, - f"'{name}' is boolean and can accept only 'True' or 'False' as value" - ) - - options.setProp(name, value) - - -def set_input_file_options(options, fileDB_key, fileDB_file): - """ - Set the dataset properties required by the user. - - This helper function is similar to what `set_input_and_conds_from_testfiledb` in - `https://gitlab.cern.ch/lhcb/LHCb/-/blob/master/PyConf/python/PyConf/application.py` - does, but the latter acts on a TestFileDB entry. - - Args: - options (DVAppOptions instance): list of DaVinci options. - fileDB_key (str): key in the testfileDB. - fileDB_file (str): file containing the testfileDB. - """ - if fileDB_file == "TestFileDB": - options.set_input_and_conds_from_testfiledb(fileDB_key) - else: - import yaml - with open(os.path.expandvars(fileDB_file)) as fileDB: - test_file_db = yaml.safe_load(fileDB) - - valid_properties = ['filenames', 'qualifiers', 'metadata'] - - # Given a specific key, loop over the elements of the testfileDB - for item, obj in test_file_db[fileDB_key].items(): - # Check if there are no typo in the fileDB properties - if item not in valid_properties: - message = f"Property {item} not recognised! Please check the key {fileDB_key} in {fileDB_file}\n" - message += f"Valid property names: {valid_properties}" - raise DVOptionError(item, message) - - # Set all the file quelifiers: - if item == 'qualifiers': - for prop, value in obj.items(): - set_option_value(options, prop, value) - # set the input files: - elif item == 'filenames': - set_option_value(options, "input_files", obj) - - -def set_job_options(options, jobOptFile, dataOptions, override_data_options): - """ - Set the job properties required by the user. The method checks if there - are options related to the input files selected with key and file of the - testfileDB, if yes it checks if the option is settable. - - Args: - options (DVAppOptions instance): list of DaVinci options. - jobOptFile (str): file containing the job options chosen by the user. - fileDB_key (str): key in the testfileDB. - fileDB_file (str): file containing the testfileDB. - override_data_options (bool): boolean for enabling data options override. - """ - with open(os.path.expandvars(jobOptFile)) as config_file: - _, ext = os.path.splitext(jobOptFile) - if ext in (".yaml", ".yml", ".json"): - import yaml - config = yaml.safe_load(config_file) - elif ext == '.py': - import ast - config = ast.literal_eval(config_file.read()) - else: - raise ValueError( - 'JobOption file extension not known! Please use only a .py, .json or .yaml (.yml) file!' - ) - - for key, value in config.items(): - if is_option_settable(options, key, dataOptions, - override_data_options): - set_option_value(options, key, value) - - -def set_args_options(options, ctx_args, dataOptions, override_data_options): - """ - Set the extra arguments required by the user. - - Args: - options (DVAppOptions instance): list of DaVinci options. - ctx_args (list): click context. - fileDB_key (str): key in the testfileDB. - fileDB_file (str): file containing the testfileDB. - override_data_options (bool): boolean for enabling data options override. - """ - # Loop over the properties and set their values - for i in range(0, len(ctx_args), 2): - key = ctx_args[i][2:] - value = ctx_args[i + 1] - - if is_option_settable(options, key, dataOptions, - override_data_options): - set_option_value(options, key, value) - - -def is_option_settable(options, name, dataOptions, override_data_options): - """ - Check if the option identified by name is settable. - This is true for all the job options while for data options - the 'override_data_options' flag is required. - Indeed, by default all the data options are already set - using the information found in the testfileDB. - - Args: - options (DVAppOptions instance): list of DaVinci options. - name (str): name of the option to be checked. - dataOptions (list): list of options related to the input data. - override_data_options (bool): boolean for enabling data options override. - Raises: - DVRuntimeError: if 'override_data_options' is set and input 'name' is a data option. - """ - if name in dataOptions: - if override_data_options: - log_click( - "INFO", - "New value found for the option %s in the job option file. "\ - "'override_data_options' is active so the default value will be overridden." - % name) - return True - else: - raise DVRuntimeError( - name, "override_data_options", - "Default value for option %s is taken from DaVinciDB.\nA new value is found in the job "\ - "option file but 'override_data_options' is not active so the default value can't be overridden!" - % name) - else: - return True - - -def list_data_options(fileDB_key, fileDB_file): - """ - Get the list of the properties that are set automatically given a dataset. - - Args: - fileDB_key (str): key in the testfileDB. - fileDB_file (str): file containing the testfileDB. - """ - import yaml - - optionList = [] - qualifiers = [] - if fileDB_key: - if fileDB_file == 'TestFileDB': - from PRConfig.TestFileDB import test_file_db - qualifiers = test_file_db[fileDB_key].qualifiers - else: - with open(os.path.expandvars(fileDB_file)) as fileDB: - test_file_db = yaml.safe_load(fileDB) - qualifiers = test_file_db[fileDB_key]["qualifiers"] - - for prop, value in qualifiers.items(): - optionList.append(prop) - - return optionList - - -def check_options(options): - """ - Check the options. Applies changes if needed. - - Args: - options (DVAppOptions instance): list of DaVinci options. - """ - # Avoid running these checks if we're using lbexec - if isinstance(options, DVAppOptions): - dataType = options.data_type - option_checker("data_type", dataType) - - inputType = (options.input_type).upper() - option_checker("input_type", inputType) - if inputType != "MDF": - set_option_value(options, "input_type", "ROOT") - - flagMC = options.simulation - if not flagMC: - if options.merge_genfsr: - raise DVRuntimeError( - "merge_genfsr", "simulation", - "GenFSR are not available in real data so merge_genfsr cannot be run! Set merge_genfsr = False." - ) - else: - if options.lumi: - raise DVRuntimeError( - "lumi", "simulation", - "lumi not valid for simulation! Set lumi = False.") - - ## for simulation, it is very important to specify proper DB-tags: - if not options.dddb_tag: - raise DVOptionError( - "dddb_tag", "'dddb_tag' is not specified for simulated data!") - if not options.conddb_tag: - raise DVOptionError( - "conddb_tag", - "'conddb_tag' is not specified for simulated data!") - - -def set_properties(options): - """ - Function to initialise the DaVinci options. - - Args: - options (DVAppOptions instance): list of DaVinci options. - """ - for name, default in options.getDefaultProperties().items(): - if not options.isPropertySet(name): - options.setProp(name, default) - - -def check_user_option_values(options, init_properties, data_options): - """ - Method checking if options related to the input files have been changed in the - user algorithms .py file and if these options are settable. - - Args: - options (DVAppOptions instance): list of DaVinci options. - init_properties (dict): DaVinci options values before importing the user algorithms. - data_options (list): list of input data properties. - """ - properties = options.properties() - for key, value in properties.items(): - init_value = init_properties[key] - if init_value != value: - assert is_option_settable(options, key, data_options, False) diff --git a/Phys/DaVinci/python/DaVinci/optionChecker.py b/Phys/DaVinci/python/DaVinci/optionChecker.py deleted file mode 100644 index 557116696..000000000 --- a/Phys/DaVinci/python/DaVinci/optionChecker.py +++ /dev/null @@ -1,163 +0,0 @@ -############################################################################### -# (c) Copyright 2020-2022 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### -""" -Define value and run-time errors used in DaVinci application. -""" -import click - - -class DVOptionError(ValueError): - """ - DaVinci exception class for invalid option configurations. - """ - - def __init__(self, name, value): - """ - Constructor. - """ - self.message = f"invalid DaVinci '{name}' option value specified: '{value}'" - - def __str__(self): - """ - String representation. - """ - return f"{click.style('DVOptionError', bold=True, fg='red')} {click.style(self.message, fg='red')}" - - -class DVRuntimeError(RuntimeError): - """ - DaVinci exception class for inconsistent option configurations. - """ - - def __init__(self, option1, option2, value): - """ - Constructor. - """ - self.message = f"Option {option1} and {option2} are inconsistent! \n {value}" - - def __str__(self): - """ - String representation. - """ - return f"{click.style('DVRuntimeError', bold=True, fg='red')} {click.style(self.message, fg='red')}" - - -class DVImportError(ImportError): - """ - DaVinci exception class for an invalid Python module import. - """ - - def __init__(self, algName, value): - """ - Constructor. - """ - self.message = f"Importing user algorithm '{algName}' failed! \n {value}" - - def __str__(self): - """ - String representation. - """ - return f"{click.style('DVImportError', bold=True, fg='red')} {click.style(self.message, fg='red')}" - - -class DVKeyError(KeyError): - """ - DaVinci exception class for accessing to an invalid key option. - """ - - def __init__(self, optName): - """ - Constructor. - """ - self.message = f"There is no option named '{optName}' in DaVinci!" - - def __str__(self): - """ - String representation. - """ - return f"{click.style('DVKeyError', bold=True, fg='red')} {click.style(self.message, fg='red')}" - - -def option_checker(name, value): - """ - Check the correctness of a DaVinci configurable option. - - Args: - name (str): DaVinci option name - value: value to be checked - """ - from DaVinci.options_default import __optsDict__ as dv_default - - allowedValues = get_allowed_option_values() - if name in dv_default.keys() and name not in allowedValues.keys(): - log_click( - "INFO", - f"The option {name} has no restrinctions on the allowed values.") - else: - try: - if value not in allowedValues[name]: - print_allowed_option_values(allowedValues, name) - raise DVOptionError(name, value) - except KeyError: - log_click("ERROR", - f"You are trying to check the unknown option {name}!") - print_allowed_option_values(allowedValues) - raise DVKeyError(name) from None - - -def get_allowed_option_values(): - """ - Return the allowed values for the DaVinci options accepting - specific inputs. - - Returns: - dict containing the allowed values. - """ - allowedValues = { - "data_type": ["Upgrade"], - "detectors": [ - 'VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', - 'Magnet', 'Tr' - ], - "input_type": [ - "MDF", "DST", "DIGI", "RDST", "MDST", "SDST", "XDST", "LDST", - "ROOT", "XGEN" - ] - } - return allowedValues - - -def print_allowed_option_values(allowedValues, name=None): - """ - Print the allowed values for the DaVinci option configurations. - """ - if name is None: - log_click("", "Known job option configurations and allowed values:", - "green") - for name, values in allowedValues.items(): - log_click("", f"{name} \t : {values}") - else: - log_click("", f"Allowed values for DaVinci option {name}:", "green") - log_click("", f"{allowedValues[name]}") - - -def log_click(mode, message, color_text="white"): - import click - config_colors = { - "INFO": "blue", - "WARNING": "yellow", - "ERROR": "red", - } - - color_mode = config_colors[mode] if mode else "white" - click.echo( - f"{click.style(mode, bold=True, fg=color_mode)} {click.style(message, fg=color_text)}" - ) diff --git a/Phys/DaVinci/python/DaVinci/options_default.py b/Phys/DaVinci/python/DaVinci/options_default.py deleted file mode 100644 index 01e4dbcef..000000000 --- a/Phys/DaVinci/python/DaVinci/options_default.py +++ /dev/null @@ -1,272 +0,0 @@ -############################################################################### -# (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### - -# Definitions of, and defaults for, the DaVinci application options. - -__optsDict__ = { - "annsvc_config": { - "text": - '"""Path to .json file containing HLT2 and Spruce configuration generated by ANNSvc when the .dst input file was created. Default = ."""', - "value": - '' - }, - "auditors": { - "text": - '"""List of auditors to run. Possible common choices include "NameAuditor", "MemoryAuditor", "ChronoAuditor". See Gaudi manual for a full list. Default = []."""', - "value": [] - }, - "buffer_events": { - "text": - '"""Number of events to pre-fetch if use_iosvc=True. Default = 20000 is reasonable for most machines; it might need to be increased for more modern/powerful machines."""', - "value": - 20000 - }, - "callgrind_profile": { - "text": '"""Enable callgrind profiling. Default = False."""', - "value": False - }, - "conddb_tag": { - "text": '"""Tag for the CondDB. Default = ."""', - "value": '' - }, - "control_flow_file": { - "text": - '"""Control flow file name (.gv extension since default output format for the DOT language). Default = ' - ' for no file generation."""', - "value": - '' - }, - "data_flow_file": { - "text": - '"""Data flow file name (.gv extension since default output format for the DOT language). Default = ' - ' for no file generation."""', - "value": - '' - }, - "data_type": { - "text": '"""Data type, can be ["Upgrade"]. Default = ."""', - "value": '' - }, - "dddb_tag": { - "text": - '""" Data type, can be ["Upgrade"] Forwarded to PhysConf, AnalysisConf, DstConf and LHCbApp. Default = ."""', - "value": - '' - }, - "detectors": { - "text": - '"""List of detectors. Default = [VP, UT, FT, Rich1Pmt, Rich2Pmt, Ecal, Hcal, Muon, Magnet, Tr]."""', - "value": [ - 'VP', 'UT', 'FT', 'Rich1Pmt', 'Rich2Pmt', 'Ecal', 'Hcal', 'Muon', - 'Magnet', 'Tr' - ] - }, - "dqflags_tag": { - "text": - '"""Tag for DQFLAGS. Default as set in DDDBConf for DataType. Default = ."""', - "value": - '' - }, - "enable_unpack": { - "text": - '"""Explicitly enable/disable unpacking for input data (if specified). Default = True."""', - "value": - True - }, - "event_store": { - "text": - '"""Event store service to be used in the job, can be HiveWhiteBoard or EvtStoreSvc (faster). Default = HiveWhiteBoard."""', - "value": - 'HiveWhiteBoard' - }, - "evt_pre_filters": { - "text": - '"""Set of event filtering algorithms to be run before DaVinci initializaton sequence. Only events passing these filters will be processed. Default = {}."""', - "value": {} - }, - "evt_max": { - "text": - '"""Number of events to analyse. Default = -1 to run over all events."""', - "value": - -1 - }, - "first_evt": { - "text": '"""First event to be processed in the job. Default = 0. """', - "value": 0 - }, - "histo_file": { - "text": '"""Name of output histogram file. Default = ' - '."""', - "value": '' - }, - "ignore_dq_flags": { - "text": - '"""If False, process only events with good DQ. Default = False."""', - "value": - False - }, - "input_files": { - "text": '"""Input data. Default = []."""', - "value": [] - }, - "input_raw_format": { - "text": - '"""Sets the expected raw input format. Default = 0.3.' - ' (definitions at https://gitlab.cern.ch/lhcb-datapkg/RawEventFormat/blob/master/python/RawEventFormat/__init__.py)"""', - "value": - 0.3 - }, - "input_type": { - "text": - '"""Type of input files, e.g. "DST", "DIGI", "RDST", "MDST", "XDST" or "LDST". Default = DST."""', - "value": - 'DST' - }, - "log_file": { - "text": - '"""Logger used to print warning and error messages. Default = ."""', - "value": - '' - }, - "lumi": { - "text": '"""Luminosity accounting. Default = False."""', - "value": False - }, - "main_options": { - "text": '"""Main option file to execute. Default = ."""', - "value": '' - }, - "memory_pool_size": { - "text": - '"""Estimated size of the per-event memory pool. Set to 0 to disable the pool. Default = 10 * 1024 * 1024."""', - "value": - 10 * 1024 * 1024 - }, - "merge_genfsr": { - "text": - '"""Flags whether to merge the generator-level FSRs. Default = False."""', - "value": - False - }, - "monitoring_file": { - "text": '"""Name of output monitoring file. Default = ."""', - "value": '' - }, - "msg_svc_format": { - "text": - '"""MessageSvc output format. Default = "% F%35W%S %7W%R%T %0W%M"."""', - "value": - '% F%35W%S %7W%R%T %0W%M' - }, - "msg_svc_time_format": { - "text": - '"""MessageSvc time format. Default = "%Y-%m-%d %H:%M:%S UTC"."""', - "value": - '%Y-%m-%d %H:%M:%S UTC' - }, - "n_event_slots": { - "text": - '"""Number of event slots. Default = -1, i.e. 1.2 * n_threads. """"', - "value": - -1 - }, - "n_threads": { - "text": '"""Multithreading processing. Default = 1."""', - "value": 1 - }, - "ntuple_file": { - "text": '"""Name of output ntuple file. Default = ' - '."""', - "value": '' - }, - "output_file": { - "text": '"""Name of the output file. Default = ."""', - "value": '' - }, - "output_level": { - "text": - '"""Set the output level used in the job. Default = INFO=3."""', - "value": 3 - }, - "output_type": { - "text": - '"""Type of the output file, e.g. ROOT, DST. Default = ROOT """', - "value": "ROOT" - }, - "override_data_options": { - "text": - '"""Allow overriding default data options defined in DaVinci Database. Default = False."""', - "value": - False - }, - "phoenix_filename": { - "text": - '"""Define the file where phoenix event data are writen. Defaults =, i.e. no phoenix output', - "value": - '' - }, - "print_freq": { - "text": - '"""Frequency at which to print event numbers. Default = 1000."""', - "value": - 1000 - }, - "process": { - "text": - '"""Process is Turbo or Spruce (and temporarily Hlt2). Default = Spruce."""', - "value": - 'Spruce' - }, - "python_logging_level": { - "text": '"""Python logger level. Default = logging.WARNING=30."""', - "value": 30 - }, - "root_compression_level": { - "text": '"""ROOT Compression level for ntuples. Default = LZMA:6."""', - "value": 'LZMA:6' - }, - "scheduler_legacy_mode": { - "text": - '"""If false scheduler calls Algorithm::execute instead of Algorithm::sysExecute, which breaks some non-functional algorithms. Default = True."""', - "value": - True - }, - "skip_events": { - "text": - '"""Number of events to skip at the beginning. Default = 0."""', - "value": 0 - }, - "simulation": { - "text": '"""Boolean to specify simulated samples. Default = False."""', - "value": False - }, - "stream": { - "text": - '"""Stream selection line belongs to - drives location of RawEvent only. Default = default."""', - "value": - 'default' - }, - "user_algorithms": { - "text": '"""User algorithm to run. Default = ' - '."""', - "value": '' - }, - "use_iosvc": { - "text": - '"""Use an alternative, faster, IIOSvc implementation for MDFs. Default = False."""', - "value": - False - }, - "write_fsr": { - "text": '"""Flags whether to write out an FSR. Default = True."""', - "value": True - } -} diff --git a/Phys/DaVinci/python/DaVinci/utilities_script.py b/Phys/DaVinci/python/DaVinci/utilities_script.py deleted file mode 100644 index 4d0108f42..000000000 --- a/Phys/DaVinci/python/DaVinci/utilities_script.py +++ /dev/null @@ -1,219 +0,0 @@ -############################################################################### -# (c) Copyright 2021-2022 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### -""" -This module contains some functions useful while running the davinci script. -""" - - -def dump_call(testfiledb_file, testfiledb_key, joboptfile, ctx_args): - """ - Print out all the davinci options used to run the job by means of the 'davinci' script. - - Args: - - testfiledb_file (str): TestFileDB-like file with job input information (1st davinci argument, 1st slot). - - testfiledb_key (str) relevant key to be looked for in the TestFileDB-like file (1st davinci argument, 2nd slot). - - joboptfile (str): file containing the job option information (2nd davinci argument). - - ctx_args (list): array with the remnant options passed to davinci by command line. - """ - import click - - indentStr = "# " - click.echo("%sDaVinci: running using the following arguments:" % indentStr) - click.echo( - "%s - TestFileDB file: %s" % (indentStr, testfiledb_file)) - click.echo( - "%s - TestFileDB key: %s" % (indentStr, testfiledb_key)) - click.echo("%s - joboptfile: %s" % (indentStr, joboptfile)) - - if len(ctx_args) > 1: - for i in range(0, len(ctx_args), 2): - click.echo("%s - %s: %s" % (indentStr, ctx_args[i][2:], - ctx_args[i + 1])) - - -def get_configurable_opts(configurables, with_defaults): - """ - Loop over the list of configurables to be passed to Gaudi and convert them - in a dictionary. - - Args: - - configurables (dict): list of configurables to be passed to Gaudi. - - with_defaults (bool): flag to set the default values. - - Returns: - - opts: dictionary containing all the configurables. - """ - from itertools import chain - from GaudiKernel.Proxy.Configurable import Configurable - - opts = {} - for c in configurables: - if hasattr(c, "__opt_properties__"): # pragma: no cover - # Basic structure for including GaudiConfig2 configurables - # Removed from testing since it is not fully integrated with PyConf. - opts.update(c.__opt_properties__(with_defaults)) - else: - items = (chain(c.getDefaultProperties().items(), - c.getValuedProperties().items()) - if with_defaults else c.getValuedProperties().items()) - - for p, v in items: - if hasattr(Configurable, "PropertyReference") and isinstance( - v, Configurable.PropertyReference): # pragma: no cover - # Taken from the Gaudi.Main code. - #This feature is useful for synchronizing property values set in different configurables - # with old configurable files, when read via .opts files. Standard jobs import - # options via Python files and don't use this feature. It's kept in the code just - # for completeness but removed for testing for the moment, as suggested by Marco Clemencic. - v = v.__resolve__() - if isinstance(v, str): - v = '"%s"' % v.replace('"', '\\"') - elif hasattr(v, '__opt_value__'): - v = v.__opt_value__() - - opts['.'.join((c.name(), p))] = str(v) - return opts - - -def set_testfiledb(inputfiledb): - """ - Set the TestFileDB file and the related key to be used in the job. - - Args: - inputfiledb (str): pair of values containing information on the TestFileDB and the key chosen by the user. - - Returns: - key for the TestFileDB. - file containing the TestFileDB. - """ - assert len( - inputfiledb - ) in (1, 2), "--inputfiledb takes two arguments: filedb key and the related filename."\ - "If the filename is not specified the TestFileDB will be used as default. %d" %len(inputfiledb) - - inputfiledb_key = inputfiledb[0] - inputfiledb_file = inputfiledb[ - 1] if not inputfiledb[1] == "-" else "TestFileDB" - - return inputfiledb_key, inputfiledb_file - - -def get_dummy_config(): - """ - Simple function returning a dummy instance of the configurables to be passed to DaVinci. - """ - from PyConf.application import ComponentConfig - from Configurables import ApplicationMgr - - config = ComponentConfig() - config.add(ApplicationMgr(AppName="DaVinci")) - - return config - - -def create_jobopt_template(file_name="jobopt_template.yaml", - interactive=False): - """ - Function to create a simple template file with job options - as specified by the --joboptfile davinci script option. - - Args: - file_name (str): name for the output template file. Default = 'jobopt_template.yaml'. - interactive (bool, optional): flag to create the template interactively. - - Output: - the template file with job options and the name defined by the user. - """ - if not interactive: - template_str = "# Template job option YAML file.\n"\ - "# Best guesses are provided below for various options. Please adapt as per your needs.\n\n"\ - "annsvc_config: '<path_to_annsvc_json>'\n"\ - "evt_max: -1\n"\ - "ntuple_file: 'ntuple_file.root'\n"\ - "histo_file: 'histos_file.root'\n"\ - "enable_unpack: True\n"\ - "process: 'Spruce'\n" - - with open(file_name, 'w') as jobopt_file_file: - jobopt_file_file.write(template_str) - else: - import yaml, click - # Get the DaVinci dict containing the default values. - from DaVinci.options_default import __optsDict__ as opts_dict - - dict_template = {} - # These are the data options that are set via the inputfiledb, so we remove them from the joboptfile. - list_data_qualifiers = [ - "data_type", "input_type", "simulation", "conddb_tag", "dddb_tag", - "input_files" - ] - - for name, config in opts_dict.items(): - if name not in list_data_qualifiers: - value = click.prompt( - f"Set the value for the option {name}:\n[{config['text']}]\n", - default=config["value"], - type=type(config["value"])) - # Adding to the dict only if the value is not the default one. - if not value == config["value"]: - dict_template.update({name: value}) - - if len(dict_template): - with open(file_name, 'w') as jobopt_file: - yaml.safe_dump( - dict_template, jobopt_file, default_flow_style=False) - else: - click.echo( - "All option values match the default ones, no job option file is needed!" - ) - - -def create_inputdb_template(file_name="inputdb_template.yaml"): - """ - Function to create a simple template file for the TestFileDB-like inputdb file. - - Args: - file_name (str): name for the output template file. Default = 'inputdb_template.yaml'. - - Output: - the template TestFileDB-like file with input data options and the name defined by the user. - """ - - template_str = "# Template input data YAML file.\n"\ - "# Best guesses are provided below for various qualifiers. Please adapt as per your needs.\n\n"\ - "MyInputData:\n"\ - " filenames:\n"\ - " - '<a_file_path>'\n"\ - " - '<another_file_path>'\n"\ - " qualifiers:\n"\ - " data_type: Upgrade\n"\ - " input_type: DST\n"\ - " simulation: true\n"\ - " conddb_tag: <e.g. sim-20171127-vc-md100>\n"\ - " dddb_tag: <e.g. dddb-20171126>\n"\ - " metadata:\n"\ - " Author: '<me :-)>'\n"\ - " Date: '<date_of_creation>'\n"\ - " Comment: '<E.g. how the files were created or for what purpose>'" - - with open(file_name, 'w') as inputdb_file: - inputdb_file.write(template_str) - - -def check_for_deprecated_command(ctx_args): - """ - Temporary check raising an error message if the deprecated 'run-mc' command is used. - """ - import click - if len(ctx_args) > 1: - if "run-mc" in ctx_args: - message = "The command 'run-mc' is now deprecated. DaVinci can be run with './run davinci'" - raise Exception(f"{click.style(message, bold=True, fg='red')}") diff --git a/Phys/DaVinci/tests/config/test_DVAppOptions.py b/Phys/DaVinci/tests/config/test_DVAppOptions.py deleted file mode 100644 index d3e321b31..000000000 --- a/Phys/DaVinci/tests/config/test_DVAppOptions.py +++ /dev/null @@ -1,60 +0,0 @@ -############################################################################### -# (c) Copyright 2021-2022 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### -import pytest -from DaVinci import options - - -def test_option_set(): - """ - Check if it's possible to access directly to an option if set. - """ - print(options.detectors) - - -def test_option_unset(): - """ - Check that it's impossible to access to an option if it has not been set. - """ - with pytest.raises(AttributeError): - print(options.evt_max) - - -def test_option_get_after_set(): - """ - Check that after assigning a value the option can be accessed correctly. - """ - options.evt_max = 1 - print(options.evt_max) - - -def test_option_unknown(): - """ - Check that it's impossible to access to an unknow option. - """ - with pytest.raises(AttributeError): - print(options.unknown) - - -def test_ApplicationOptions_consistency_with_PyConf(): - """ - Check for any difference between the options available in ApplicationOptions in PyConf and the DV options. - """ - from PyConf.application import ApplicationOptions - from DaVinci import options - from DaVinci.options_default import __optsDict__ as dv_defaults - lhcb_options = ApplicationOptions.getDefaultProperties() - dv_options = options.getDefaultProperties() - for name, _ in lhcb_options.items(): - # Exclude Moore-specific options since they will be moved into a - # dedicated Moore-specific ApplicationOptions class. - if name not in ["lines_maker", "tck"]: - assert name in dv_options, f"Option {name} not available in DaVinci" - assert name in dv_defaults, f"Option {name} doesn't have a default value in DaVinci" diff --git a/Phys/DaVinci/tests/config/test_algorithms.py b/Phys/DaVinci/tests/config/test_algorithms.py index b51475d47..61f80f063 100644 --- a/Phys/DaVinci/tests/config/test_algorithms.py +++ b/Phys/DaVinci/tests/config/test_algorithms.py @@ -8,146 +8,25 @@ # granted to it by virtue of its status as an Intergovernmental Organization # # or submit itself to any jurisdiction. # ############################################################################### - -import pytest, os - from PyConf.Algorithms import Gaudi__Examples__VoidConsumer as VoidConsumer -from DaVinci import options -from DaVinci.optionChecker import DVImportError -from DaVinci.algorithms import (setup_algorithms, define_fsr_writer, filter_on, - add_filter, apply_filters_and_unpacking, - setup_user_algorithms, unpack_locations, +from DaVinci import Options +from DaVinci.algorithms import (define_fsr_writer, filter_on, add_filter, + apply_filters_and_unpacking, unpack_locations, configured_FunTuple, get_odin, get_decreports) -def test_set_user_algorithms(): - """ - Check if DaVinci can import correctly an external user algorithm implemented in a given Python module. - """ - filename = "test_useralgs" - with open(f"{filename}.py", "w") as f: - f.write(""" -from PyConf.Algorithms import Gaudi__Examples__VoidConsumer as VoidConsumer -from DaVinci import options - -options.data_type = 'Upgrade' -options.enable_unpack = False -options.evt_max = 10 - -def main(): - algs = {'testAlg': [VoidConsumer()]} - return algs, [] -""") - - options.user_algorithms = f"{filename}:main" - test_algs, _ = setup_algorithms(options.user_algorithms) - assert "testAlg" in test_algs - - os.remove(f"{filename}.py") - - -def test_no_user_algorithms(): - """ - Check that no algorithms are imported if user doens't provide a python module. - """ - test_algs, _ = setup_user_algorithms("") - assert len(test_algs) == 0 - - -def test_user_algorithms_with_ext(): - """ - Check that a Python module can be imported correctly - even if the '.py' extension is passed by mistake by the user. - """ - filename = "./test_useralg.py" - with open(filename, "w") as f: - f.write(""" -from PyConf.Algorithms import Gaudi__Examples__VoidConsumer as VoidConsumer -from DaVinci import options - -options.data_type = 'Upgrade' -options.enable_unpack = False -options.evt_max = 10 - -def main(): - algs = [VoidConsumer()] - return algs, [] -""") - - test_algs, _ = setup_user_algorithms(f"{filename}:main") - assert "UserAlgorithms" in test_algs - - os.remove(filename) - - -def test_user_algorithms_no_module(): - """ - Check DaVinci returns the correct error if the Python module defined by the user doesn't exist. - """ - filename = "non_existent_test_useralg" - with pytest.raises(DVImportError) as excinfo: - test_algs, _ = setup_user_algorithms(f"{filename}.py") - - assert "Importing user algorithm '' failed!" in str(excinfo.value) - assert f"Check why the user Python module '{filename}' defined in './' can't be imported!" in str( - excinfo.value) - - -def test_user_algorithms_no_function(): - """ - Check DaVinci returns the correct error if the user provides only the module - and not the name of the function to be run. - """ - filename = "test_useralg" - with open(f"{filename}.py", "w") as f: - f.write(""" -rom PyConf.Algorithms import Gaudi__Examples__VoidConsumer as VoidConsumer -from DaVinci import options - -options.data_type = 'Upgrade' -options.enable_unpack = False -options.evt_max = 10 -""") - - with pytest.raises(TypeError): - test_algs, _ = setup_user_algorithms(filename) - - os.remove(f"{filename}.py") - - -def test_user_algorithms_wrong_function(): - """ - Check DaVinci returns the correct error if the function specified by the user doesn't exist. - """ - filename = "test_useralg" - with open(f"{filename}.py", "w") as f: - f.write(""" -from PyConf.Algorithms import Gaudi__Examples__VoidConsumer as VoidConsumer -from DaVinci import options - -options.data_type = 'Upgrade' -options.enable_unpack = False -options.evt_max = 10 - -def main(): - algs = {'testAlg': [VoidConsumer()]} - return algs, [] -""") - - with pytest.raises(DVImportError): - test_algs, _ = setup_user_algorithms(f"{filename}:main2") - - os.remove(f"{filename}.py") - - def test_define_write_fsr(): """ Check if DaVinci imports correctly the algorithm to merge and write FSRs. """ - options.output_level = 3 - options.merge_genfsr = True - options.simulation = True + options = Options( + data_type="Upgrade", + evt_max=1, + output_level=3, + merge_genfsr=True, + simulation=True, + ) test_algs = define_fsr_writer(options) assert any("GenFSRMerge" == x.name for x in test_algs) @@ -156,8 +35,13 @@ def test_add_hlt2_filter(): """ Check if DaVinci is able to implement correctly a filter on an HLT2 line. """ - options.process = "Hlt2" - options.stream = "default" + options = Options( + data_type="Upgrade", + evt_max=1, + simulation=True, + process="Hlt2", + stream="default", + ) test_filter = add_filter(options, "test_filter", "HLT_PASS('Hlt2TESTLineDecision')") assert "HDRFilter" in test_filter.fullname @@ -167,8 +51,13 @@ def test_add_spruce_filter(): """ Check if DaVinci is able to implement correctly a filter on a Sprucing line. """ - options.process = "Spruce" - options.stream = "default" + options = Options( + data_type="Upgrade", + evt_max=1, + simulation=True, + process="Spruce", + stream="default", + ) test_filter = add_filter(options, "test_filter", "HLT_PASS('SpruceTESTLineDecision')") assert "HDRFilter" in test_filter.fullname @@ -179,6 +68,11 @@ def test_add_void_filter(): Check if DaVinci is able to implement correcty a Void filter if 'HLT_PASS' string is not found in the filter code." """ + options = Options( + data_type="Upgrade", + evt_max=1, + simulation=True, + ) test_filter = add_filter(options, "test_filter", "VOIDTEST_Filter") assert "VoidFilter" in test_filter.fullname @@ -188,7 +82,11 @@ def test_unpack_locations(): Check if the unpacking algorithms are retrieved correctly from GaudiConf. TO BE REMOVED WHEN THE UNPACKING WILL BECOME FULLY FUNCTIONAL. """ - options.simulation = True + options = Options( + data_type="Upgrade", + evt_max=1, + simulation=True, + ) test_algs = unpack_locations(options, False) assert isinstance(test_algs, list) @@ -201,7 +99,11 @@ def test_unpack_locations_xgen(): (i.e. 'unpack_only_mc' is set to True.) TO BE REMOVED WHEN THE UNPACKING WILL BECOME FULLY FUNCTIONAL. """ - options.simulation = True + options = Options( + data_type="Upgrade", + evt_max=1, + simulation=True, + ) unpack_only_mc = True test_algs = unpack_locations(options, unpack_only_mc) @@ -216,9 +118,13 @@ def test_apply_filters_and_unpack(): and instantiates correctly the main unpacking algorithms. TO BE UPDATED WHEN THE UNPACKING WILL BECOME FULLY FUNCTIONAL. """ - options.evt_pre_filters = {"test_filter": "EVT_PREFILTER"} - options.enable_unpack = True - options.simulation = True + options = Options( + data_type="Upgrade", + evt_max=1, + evt_pre_filters={"test_filter": "EVT_PREFILTER"}, + enable_unpack=True, + simulation=True, + ) alg_dict = {"test_alg": [VoidConsumer()]} test_alg_dict = apply_filters_and_unpacking(options, alg_dict, False) list_of_main_expected_algs = ["LoKi__VoidFilter"] @@ -246,7 +152,12 @@ def test_configured_funtuple(): "variables": variables, } } - + options = Options( + data_type="Upgrade", + evt_max=1, + input_raw_format=0.3, + simulation=True, + ) test_dict = configured_FunTuple(options, config) assert any("FunTupleBase_Particles/Tuple_TestTuple" in alg.fullname for alg in test_dict["TestTuple"]) @@ -256,7 +167,12 @@ def test_get_odin(): """ Check if get_odin provides a correct instance of ODIN. """ - options.input_raw_format = 0.3 + options = Options( + data_type="Upgrade", + evt_max=1, + input_raw_format=0.3, + simulation=True, + ) odin = get_odin(options) assert odin.location == "/Event/createODIN/ODIN" @@ -265,8 +181,13 @@ def test_get_decreports(): """ Check if get_decreports provide a correct instance of HltDecReportsDecoder. """ - options.process = "Turbo" - options.stream = "TurboSP" + options = Options( + data_type="Upgrade", + evt_max=1, + simulation=True, + process="Turbo", + stream="TurboSP", + ) decreports = get_decreports("Hlt2", options) assert decreports.location == "/Event/Hlt2/DecReports" diff --git a/Phys/DaVinci/tests/config/test_optionChecker.py b/Phys/DaVinci/tests/config/test_optionChecker.py deleted file mode 100644 index 730002402..000000000 --- a/Phys/DaVinci/tests/config/test_optionChecker.py +++ /dev/null @@ -1,45 +0,0 @@ -############################################################################### -# (c) Copyright 2022 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### -import pytest -from DaVinci.optionChecker import option_checker, DVOptionError, DVKeyError - - -def test_get_allowed_values_for_known_option(): - """ - Check if DaVinci raises an error if the data_type is not allowed. - """ - opt_name = "data_type" - opt_value = "2018" - with pytest.raises(DVOptionError): - option_checker(opt_name, opt_value) - - -def test_get_allowed_values_for_unknown_option(): - """ - Check that DaVinci raise an error if checking allowed values on an unknown option. - """ - opt_name = "data" - opt_value = "2018" - - with pytest.raises(DVKeyError) as excinfo: - option_checker(opt_name, opt_value) - - assert f"There is no option named '{opt_name}' in DaVinci!" in str( - excinfo.value) - - -def test_get_allowed_values_for_unrestricted_option(): - """ - Check that DaVinci does't raise an error if not allowed values are expected for a given option. - """ - opt_name = "evt_max" - opt_value = "200" - option_checker(opt_name, opt_value) -- GitLab