diff --git a/PhysicsAnalysis/PATJobTransforms/share/skeleton.DAODtoD2AOD_tf.py b/PhysicsAnalysis/PATJobTransforms/share/skeleton.DAODtoD2AOD_tf.py new file mode 100644 index 0000000000000000000000000000000000000000..65e234a9662f11a34e3e8b5ac3696047eff2c16a --- /dev/null +++ b/PhysicsAnalysis/PATJobTransforms/share/skeleton.DAODtoD2AOD_tf.py @@ -0,0 +1,85 @@ +# Skeleton file for DAOD_PHYS to D2AOD (derivation framework) job +# +from AthenaCommon.AlgSequence import AlgSequence +from AthenaCommon.Logging import logging +msg = logging.getLogger('DAODtoD2AOD') +msg.info( '****************** STARTING DAOD_PHYS->D2AOD MAKING *****************' ) + +def getSubSequences(sequence,sequenceList): + sequenceList.append(sequence) + for item in sequence: + if type(item).__name__ == 'AthSequencer': + getSubSequences(item,sequenceList) + return + +if hasattr(runArgs, "reductionConf"): + msg.info('Will attempt to make the following reduced formats: {0}'.format(runArgs.reductionConf)) +else: + msg.error('DAOD_PHYS skimming job started, but with no "reductionConf" array - aborting') + raise RuntimeError("No reductions configured") + +include("RecJobTransforms/CommonRecoSkeletonJobOptions.py") + +try: + import os + project = os.environ ['AtlasProject'] + version = os.environ ['AtlasVersion'] + release = project + '-' + version + rec.AtlasReleaseVersion = release +except: + print("WARNING: Unable to construct AtlasReleaseVersion from environment") + +if hasattr(runArgs,"inputDAOD_PHYSFile"): + globalflags.InputFormat.set_Value_and_Lock('pool') + rec.readAOD.set_Value_and_Lock( True ) + rec.readRDO.set_Value_and_Lock( False ) + rec.doTrigger.set_Value_and_Lock( False ) + rec.doTruth.set_Value_and_Lock( False ) + rec.doDPD.set_Value_and_Lock(True) + rec.OutputFileNameForRecoStep.set_Value_and_Lock("DAODtoD2AOD") + athenaCommonFlags.PoolAODInput.set_Value_and_Lock( runArgs.inputDAOD_PHYSFile ) +else: + msg.error('DAOD_PHYS skimming job started, but with no DAOD_PHYS inputs - aborting') + raise RuntimeError("No DAOD_PHYS input") + +listOfFlags=[] + +try: + from DerivationFrameworkCore.DerivationFrameworkProdFlags import derivationFlags + listOfFlags.append(derivationFlags) +except ImportError: + print("WARNING DerivationFrameworkProdFlags not available.") + +from PATJobTransforms.DPDUtils import SetupOutputDPDs +rec.DPDMakerScripts.append(SetupOutputDPDs(runArgs,listOfFlags)) + +## Pre-exec +if hasattr(runArgs,"preExec"): + msg.info("transform pre-exec") + for cmd in runArgs.preExec: + msg.info(cmd) + exec(cmd) + +## Pre-include +if hasattr(runArgs,"preInclude"): + for fragment in runArgs.preInclude: + include(fragment) + +#======================================================== +# Central topOptions (this is one is a string not a list) +#======================================================== +if hasattr(runArgs,"topOptions"): include(runArgs.topOptions) +else: include( "RecExCommon/RecExCommon_topOptions.py" ) + +## Post-include +if hasattr(runArgs,"postInclude"): + for fragment in runArgs.postInclude: + include(fragment) + +## Post-exec +if hasattr(runArgs,"postExec"): + msg.info("transform post-exec") + for cmd in runArgs.postExec: + msg.info(cmd) + exec(cmd) + diff --git a/Reconstruction/RecJobTransforms/python/recTransformUtils.py b/Reconstruction/RecJobTransforms/python/recTransformUtils.py index 33f6013cb5f0df3c9979bacbce42d0da52161445..1b318fd958f928ca4e17cf54311d4fcdbe49d97f 100644 --- a/Reconstruction/RecJobTransforms/python/recTransformUtils.py +++ b/Reconstruction/RecJobTransforms/python/recTransformUtils.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration ## @brief Module with standard reconstruction transform options and substeps @@ -126,6 +126,8 @@ def addRecoSubsteps(executorSet): substep = 'n2n', inData = ['NTUP_COMMON'], outData = ['DNTUP'])) executorSet.add(reductionFrameworkExecutor(name = 'EVNTtoDAOD', skeletonFile = 'PATJobTransforms/skeleton.AODtoDAOD_tf.py', substep = 'ev2da', inData = ['EVNT'], outData = ['DAOD'])) + executorSet.add(reductionFrameworkExecutor(name = 'DAODtoD2AOD', skeletonFile = 'PATJobTransforms/skeleton.DAODtoD2AOD_tf.py', + substep = 'da2d2a', inData = ['DAOD_PHYS'], outData = ['D2AOD'])) diff --git a/Tools/PyJobTransforms/python/trfArgs.py b/Tools/PyJobTransforms/python/trfArgs.py index 18b1feafd12a59239cfc185fa60a04d71ad14fa3..f3756915350e5c5e680fdea5e5e3eb63a4f136a2 100644 --- a/Tools/PyJobTransforms/python/trfArgs.py +++ b/Tools/PyJobTransforms/python/trfArgs.py @@ -292,8 +292,8 @@ def addD3PDArguments(parser, pick = None, transform = None, multipleOK=False, ad msg.warning('D3PDProdFlags not available - cannot add D3PD arguments') -## @brief Add reduction framework output file argument -# @details This is the base name for the NTUP output streams. It will get appended +## @brief Add derivation framework output file argument +# @details This is the base name for the D(2)AOD output streams. It will get appended # to it the names of the specific reduced types. # @param parser Argument parser object to add arguments to # @param transform Transform object, to add reduced object to (maybe don't need that here?) @@ -302,12 +302,18 @@ def addReductionArguments(parser, transform = None): parser.add_argument('--inputNTUP_COMMONFile', type=argFactory(trfArgClasses.argNTUPFile, io='input', treeNames=['physics']), group='Reduction', help='NTUP common input file', nargs='+') + parser.add_argument('--inputDAOD_PHYSFile', + type=argFactory(trfArgClasses.argPOOLFile, io='input', type='aod'), + group='Reduction', help='Input DAOD_PHYS for skimming') parser.add_argument('--outputDNTUPFile', type=argFactory(trfArgClasses.argNTUPFile, treeNames=None), group='Reduction', help='Reduced NTUP output filename base') parser.add_argument('--outputDAODFile', type=argFactory(trfArgClasses.argPOOLFile, io='output', type='aod'), help='Output reduced AOD filename base', group='Reduction') + parser.add_argument('--outputD2AODFile', + type=argFactory(trfArgClasses.argPOOLFile, io='output', type='aod'), + help='Output skimmed D2AOD filename base', group='Reduction') parser.add_argument('--reductionConf', type=argFactory(trfArgClasses.argList), nargs='+', help='Configuration of reduced stream outputs', group='Reduction') diff --git a/Tools/PyJobTransforms/python/trfExe.py b/Tools/PyJobTransforms/python/trfExe.py index 57bc5854936155ad1391c3aff7ced91b16870d15..1051be1aac635a2bb96a8771b69c288cd6ff7453 100755 --- a/Tools/PyJobTransforms/python/trfExe.py +++ b/Tools/PyJobTransforms/python/trfExe.py @@ -1672,25 +1672,28 @@ class hybridPOOLMergeExecutor(athenaExecutor): ## @brief Specialist executor to manage the handling of multiple implicit input # and output files within the reduction framework. class reductionFrameworkExecutor(athenaExecutor): - ## @brief Take inputDAODFile and setup the actual outputs needed # in this job. def preExecute(self, input=set(), output=set()): self.setPreExeStart() msg.debug('Preparing for execution of {0} with inputs {1} and outputs {2}'.format(self.name, input, output)) - if 'NTUP_PILEUP' not in output: if 'reductionConf' not in self.conf.argdict: raise trfExceptions.TransformExecutionException(trfExit.nameToCode('TRF_REDUCTION_CONFIG_ERROR'), 'No reduction configuration specified') - if 'DAOD' not in output: + if ('DAOD' not in output) and ('D2AOD' not in output): raise trfExceptions.TransformExecutionException(trfExit.nameToCode('TRF_REDUCTION_CONFIG_ERROR'), 'No base name for DAOD reduction') for reduction in self.conf.argdict['reductionConf'].value: - dataType = 'DAOD_' + reduction - outputName = 'DAOD_' + reduction + '.' + self.conf.argdict['outputDAODFile'].value[0] + if ('DAOD' in output): + dataType = 'DAOD_' + reduction + outputName = 'DAOD_' + reduction + '.' + self.conf.argdict['outputDAODFile'].value[0] + if ('D2AOD' in output): + dataType = 'D2AOD_' + reduction + outputName = 'D2AOD_' + reduction + '.' + self.conf.argdict['outputD2AODFile'].value[0] + msg.info('Adding reduction output type {0}'.format(dataType)) output.add(dataType) newReduction = trfArgClasses.argPOOLFile(outputName, io='output', runarg=True, type='AOD', @@ -1700,10 +1703,15 @@ class reductionFrameworkExecutor(athenaExecutor): # Clean up the stub file from the executor input and the transform's data dictionary # (we don't remove the actual argFile instance) - output.remove('DAOD') - del self.conf.dataDictionary['DAOD'] - del self.conf.argdict['outputDAODFile'] - + if ('DAOD' in output): + output.remove('DAOD') + del self.conf.dataDictionary['DAOD'] + del self.conf.argdict['outputDAODFile'] + if ('D2AOD' in output): + output.remove('D2AOD') + del self.conf.dataDictionary['D2AOD'] + del self.conf.argdict['outputD2AODFile'] + msg.info('Data dictionary is now: {0}'.format(self.conf.dataDictionary)) msg.info('Input/Output: {0}/{1}'.format(input, output))