Skip to content
Snippets Groups Projects
Commit 6e64f2ae authored by Zach Marshall's avatar Zach Marshall Committed by Frank Winklmeier
Browse files

Adding NTUP_PILEUP back into master

It looks like this (DF_DataPrep) was missed in the migration, so
NTUP_PILEUP wasn't working. This adds it back in.
parent 2512f2da
No related branches found
No related tags found
No related merge requests found
################################################################################
# Package: DerivationFrameworkDataPrep
################################################################################
# Declare the package name:
atlas_subdir( DerivationFrameworkDataPrep )
# Install files from the package:
atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
atlas_install_joboptions( share/*.py )
source diff could not be displayed: it is too large. Options to address this: view the blob.
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
# From Marcelo Vogel to allow setting the name of the output ROOT file for DAPR0
# See https://its.cern.ch/jira/browse/PRODSYS-601 for more info
from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer
from AthenaCommon.JobProperties import jobproperties
class outputFile(JobProperty):
statusOn = True
allowedTypes = ['str']
StoredValue = 'PILEUP.root'
## Definition of the flag container
class DataPrepJobProperties(JobPropertyContainer):
"""Container for the DerivationFrameworkDataPrep key flags
"""
pass
## adding the container to the general top-level container
jobproperties.add_Container(DataPrepJobProperties)
## adding the flag to the container
jobproperties.DataPrepJobProperties.add_JobProperty( outputFile )
## shortcut to access flags
DataPrepFlags = jobproperties.DataPrepJobProperties
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
#====================================================================
# DAPR0.py
# reductionConf flag DAPR0 in Reco_tf.py
#====================================================================
from DerivationFrameworkCore.DerivationFrameworkMaster import *
streamName = derivationFlags.WriteDAOD_DAPR0Stream.StreamName
fileName = buildFileName( derivationFlags.WriteDAOD_DAPR0Stream )
from DerivationFrameworkDataPrep.DataPrepJobProperties import DataPrepFlags
pileupFileName = DataPrepFlags.outputFile()
DAPR0Stream = MSMgr.NewPoolRootStream( streamName, fileName )
##use pileupreweighting provider to generate a df.metadata.root prw config file
##only to run if the input file is post-digitization (i.e. dont run this on EVNT)
from RecExConfig.InputFilePeeker import inputFileSummary
if 'metadata' in inputFileSummary and '/Digitization/Parameters' in inputFileSummary['metadata']:
ToolSvc += CfgMgr.CP__PileupReweightingTool("auto",ConfigFiles=[],LumiCalcFiles=[])
DerivationFrameworkJob += CfgMgr.CP__PileupReweightingProvider(ConfigOutputStream="DFMETADATA",Tool=ToolSvc.auto,RunSystematics=False)
if not hasattr(svcMgr,'THistSvc'):
svcMgr += CfgMgr.THistSvc()
histString = "DFMETADATA DATAFILE=\'"+pileupFileName+"\' OPT=\'RECREATE\'"
svcMgr.THistSvc.Output += [histString]
#====================================================================
# DAPR1.py
# reductionConf flag DAPR1 in Reco_tf.py
#====================================================================
from DerivationFrameworkCore.DerivationFrameworkMaster import *
from DerivationFrameworkJetEtMiss.JetCommon import *
#from DerivationFrameworkJetEtMiss.ExtendedJetCommon import *
#from DerivationFrameworkJetEtMiss.METCommon import *
#====================================================================
# SKIMMING TOOL
#====================================================================
# NOTE: need to be able to OR isSimulated as an OR with the trigger
trigger = '(EF_j360_a4tchad || HLT_j360_a4tchad || L1_BCM_AC_CA_BGRP0 || L1_BCM_Wide_EMPTY || L1_BCM_Wide_UNPAIRED_ISO || L1_BCM_Wide_UNPAIRED_NONISO || L1_BCM_Wide_CALIB || L1_BCM_Wide_ABORTGAPNOTCALIB || L1_BCM_AC_UNPAIRED_ISO || L1_BCM_CA_UNPAIRED_ISO || L1_BCM_AC_UNPAIRED_NONISO || L1_BCM_CA_UNPAIRED_NONISO || L1_BCM_AC_ABORTGAPNOTCALIB || L1_BCM_CA_ABORTGAPNOTCALIB || L1_BCM_AC_CALIB || L1_BCM_CA_CALIB || L1_J12_UNPAIRED_ISO || L1_J12_UNPAIRED_NONISO || L1_J12_ABORTGAPNOTCALIB || L1_J12 || L1_J12_EMPTY)'
expression = trigger+' || (EventInfo.eventTypeBitmask==1)'
from DerivationFrameworkTools.DerivationFrameworkToolsConf import DerivationFramework__xAODStringSkimmingTool
DAPR1SkimmingTool = DerivationFramework__xAODStringSkimmingTool(name = "DAPR1SkimmingTool1",
expression = expression)
ToolSvc += DAPR1SkimmingTool
#=======================================
# CREATE THE DERIVATION KERNEL ALGORITHM
#=======================================
from DerivationFrameworkCore.DerivationFrameworkCoreConf import DerivationFramework__DerivationKernel
DerivationFrameworkJob += CfgMgr.DerivationFramework__DerivationKernel("DAPR1Kernel",
SkimmingTools = [DAPR1SkimmingTool])
#====================================================================
# SET UP STREAM
#====================================================================
streamName = derivationFlags.WriteDAOD_DAPR1Stream.StreamName
fileName = buildFileName( derivationFlags.WriteDAOD_DAPR1Stream )
DAPR1Stream = MSMgr.NewPoolRootStream( streamName, fileName )
DAPR1Stream.AcceptAlgs(["DAPR1Kernel"])
#====================================================================
# Add the containers to the output stream - slimming done here
#====================================================================
from DerivationFrameworkCore.SlimmingHelper import SlimmingHelper
DAPR1SlimmingHelper = SlimmingHelper("DAPR1SlimmingHelper")
DAPR1SlimmingHelper.SmartCollections = ["Muons", "PrimaryVertices"]
DAPR1SlimmingHelper.AllVariables = ["AntiKt4LCTopoJets",
"AntiKt4EMTopoJets",
"CaloCalTopoClusters",
"MuonSegments" ]
DAPR1SlimmingHelper.IncludeMuonTriggerContent = True
DAPR1SlimmingHelper.IncludeEGammaTriggerContent = True
DAPR1SlimmingHelper.IncludeBPhysTriggerContent = True
DAPR1SlimmingHelper.IncludeJetTauEtMissTriggerContent = True
DAPR1SlimmingHelper.AppendContentToStream(DAPR1Stream)
#====================================================================
# DAPR2.py
# reductionConf flag DAPR2 in Reco_tf.py
# Special event picking format, to be used in case of event index
# failure
#====================================================================
from DerivationFrameworkCore.DerivationFrameworkMaster import *
#====================================================================
# EVENT PICKING
#====================================================================
from DerivationFrameworkDataPrep.DAPR2EventList import EventList
seq = CfgMgr.AthSequencer("DFEventPickingSequence")
from GaudiSequencer.PyComps import PyEvtFilter
seq += PyEvtFilter(
'DFEventPickingAlg',
# the store-gate key. leave as an empty string to take any eventinfo instance
evt_info='',
OutputLevel=Lvl.WARNING)
seq.DFEventPickingAlg.evt_list = EventList
DerivationFrameworkJob += seq
#====================================================================
# SET UP STREAM
#====================================================================
streamName = derivationFlags.WriteDAOD_DAPR2Stream.StreamName
fileName = buildFileName( derivationFlags.WriteDAOD_DAPR2Stream )
DAPR2Stream = MSMgr.NewPoolRootStream( streamName, fileName )
DAPR2Stream.AcceptAlgs(["DFEventPickingAlg"])
#====================================================================
# Store all containers
#====================================================================
from PrimaryDPDMaker import PrimaryDPD_OutputDefinitions as dpdOutput
excludeList = []
dpdOutput.addAllItemsFromInputExceptExcludeList( streamName, excludeList )
......@@ -15,11 +15,12 @@ def getSubSequences(sequence,sequenceList):
getSubSequences(item,sequenceList)
return
if hasattr(runArgs, "reductionConf"):
msg.info('Will attempt to make the following reduced formats: {0}'.format(runArgs.reductionConf))
else:
msg.error('AOD Reduction job started, but with no "reductionConf" array - aborting')
raise RuntimeError("No reductions configured")
if not hasattr(runArgs, "outputNTUP_PILEUPFile"):
if hasattr(runArgs, "reductionConf"):
msg.info('Will attempt to make the following reduced formats: {0}'.format(runArgs.reductionConf))
else:
msg.error('AOD Reduction job started, but with no "reductionConf" array - aborting')
raise RuntimeError("No reductions configured")
include("RecJobTransforms/CommonRecoSkeletonJobOptions.py")
......@@ -66,6 +67,13 @@ else:
msg.error('AOD Reduction job started, but with no AOD inputs - aborting')
raise RuntimeError("No AOD input")
# Deal appropriately with NTUP_PILEUP
if hasattr(runArgs, "outputNTUP_PILEUPFile"):
from DerivationFrameworkDataPrep.DataPrepJobProperties import DataPrepFlags
DataPrepFlags.outputFile = runArgs.outputNTUP_PILEUPFile
runArgs.outputDAOD_DAPR0File = "DAOD_DAPR0.root"
runArgs.reductionConf = "DAPR0"
listOfFlags=[]
try:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment