Skip to content
Snippets Groups Projects
Commit e0d350ac authored by Graeme Stewart's avatar Graeme Stewart
Browse files

* AFII_tf.py

	- Removed (see ATLASJT-105)
    * Tag FullChainTransforms-00-01-01 (FullChainTransforms-00-01-01)

2015-02-03 Graeme Stewart <graeme.andrew.stewart@cern.ch>
	* AFII_tf.py
	- Removed (see ATLASJT-105)
    * Tag FullChainTransforms-00-01-01

2014-12-19 Richard Hawkings <richard.hawkings@cern.ch>
    * Tag FullChainTransforms-00-01-00
    * FastChain_tf.py and skeleton.EVGENtoRDO.py
	- add first version of FastChain_tf that does sim+digi in one
	  athena step, and can then add reconstruction
	  actual simulation and digi is done with full G4/full digi for now
	  no pileup, no trigger

    * FullChain_tf.py add LODPK
parent f1f1fe30
No related branches found
No related tags found
No related merge requests found
#! /usr/bin/env python
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
## FullChain_tf.py
# One step transform that will start from EVNT and run ISF right through
# to AOD output
# @version $Id: AFII_tf.py 568684 2013-11-05 13:03:06Z graemes $
import sys
import time
import logging
# Setup core logging here
from PyJobTransforms.trfLogger import msg
msg.info('logging set in %s' % sys.argv[0])
from PyJobTransforms.transform import transform
from PyJobTransforms.trfExe import athenaExecutor, DQMergeExecutor, reductionFrameworkExecutor, reductionFrameworkExecutorNTUP
from PyJobTransforms.trfArgs import addAthenaArguments, addD3PDArguments, addPrimaryDPDArguments, addExtraDPDTypes, addDetectorArguments, addReductionArguments
from PyJobTransforms.trfDecorators import stdTrfExceptionHandler, sigUsrStackTrace
from RecJobTransforms.recTrfArgs import addCommonRecTrfArgs, addStandardRecoFiles
from SimuJobTransforms.simTrfArgs import addForwardDetTrfArgs, addCosmicsTrfArgs, addForwardDetTrfArgs, addCommonSimTrfArgs, addBasicDigiArgs, addPileUpTrfArgs, addCommonSimDigTrfArgs
import PyJobTransforms.trfArgClasses as trfArgClasses
@stdTrfExceptionHandler
@sigUsrStackTrace
def main():
msg.info('This is %s' % sys.argv[0])
trf = getTransform()
trf.parseCmdLineArgs(sys.argv[1:])
trf.execute()
trf.generateReport()
msg.info("%s stopped at %s, trf exit code %d" % (sys.argv[0], time.asctime(), trf.exitCode))
sys.exit(trf.exitCode)
def getTransform():
executorSet = set()
executorSet.add(athenaExecutor(name = 'Sim_tf', skeletonFile = 'SimuJobTransforms/skeleton.EVGENtoHIT_ISF.py',
substep = 'evnt2hits', tryDropAndReload = False, perfMonFile = 'ntuple.pmon.gz', inData=['NULL','Evgen'], outData=['HITS','TrackRecord','NULL'] )) #may have to add evgen to outData if cosmics/cavern background jobs don't work.
executorSet.add(athenaExecutor(name = 'HITtoRDO', skeletonFile = 'SimuJobTransforms/skeleton.HITtoRDO.py',
substep = 'h2r', tryDropAndReload = False, perfMonFile = 'ntuple.pmon.gz',
inData = ['HITS'], outData = ['RDO'],
runtimeRunargs = {'LowPtMinbiasHitsFile' : 'runArgs.inputLowPtMinbiasHitsFile',
'HighPtMinbiasHitsFile' : 'runArgs.inputHighPtMinbiasHitsFile',}))
executorSet.add(athenaExecutor(name = 'RAWtoESD', skeletonFile = 'RecJobTransforms/skeleton.RAWtoESD_tf.py',
substep = 'r2e', inData = ['BS', 'RDO'], outData = ['ESD', 'HIST_ESD_INT'],
perfMonFile = 'ntuple_RAWtoESD.pmon.gz'))
executorSet.add(athenaExecutor(name = 'ESDtoAOD', skeletonFile = 'RecJobTransforms/skeleton.ESDtoAOD_tf.py',
substep = 'e2a', inData = ['ESD'], outData = ['AOD', 'HIST_AOD_INT'],
perfMonFile = 'ntuple_ESDtoAOD.pmon.gz'))
executorSet.add(DQMergeExecutor(name = 'DQHistogramMerge', inData = [('HIST_ESD_INT', 'HIST_AOD_INT')], outData = ['HIST']))
executorSet.add(athenaExecutor(name = 'ESDtoDPD', skeletonFile = 'PATJobTransforms/skeleton.ESDtoDPD_tf.py',
substep = 'e2d', inData = ['ESD'], outData = [],
perfMonFile = 'ntuple_ESDtoDPD.pmon.gz'))
executorSet.add(athenaExecutor(name = 'AODtoDPD', skeletonFile = 'PATJobTransforms/skeleton.AODtoDPD_tf.py',
substep = 'a2d', inData = ['AOD', 'EVNT'], outData = [],
perfMonFile = 'ntuple_AODtoDPD.pmon.gz'))
executorSet.add(athenaExecutor(name = 'AODtoTAG', skeletonFile = 'RecJobTransforms/skeleton.AODtoTAG_tf.py',
inData = ['AOD'], outData = ['TAG'],))
# executorSet.add(athenaExecutor(name = 'AODtoHIST', skeletonFile = 'RecJobTransforms/skeleton.FROM_PETER.py',
# inData = ['AOD'], outData = ['HIST_AOD'],))
executorSet.add(reductionFrameworkExecutor(name = 'AODtoRED', skeletonFile = 'PATJobTransforms/skeleton.AODtoRED_tf.py',
substep = 'a2r', inData = ['AOD'], outData = ['DAOD_RED']))
executorSet.add(reductionFrameworkExecutorNTUP(name = 'NTUPtoRED', skeletonFile = 'PATJobTransforms/skeleton.NTUPtoRED_tf.py',
substep = 'n2n', inData = ['NTUP_COMMON'], outData = ['NTUP_RED']))
trf = transform(executor = executorSet, description = 'General purpose ATLAS reconstruction transform, which also supports'
' digitisation. Inputs can be HITS, RDO, BS, ESD or AOD, with outputs of RDO, ESD, AOD or DPDs.'
' See https://twiki.cern.ch/twiki/bin/viewauth/Atlas/RecoTf for more details.')
addAthenaArguments(trf.parser)
addDetectorArguments(trf.parser)
addCommonRecTrfArgs(trf.parser)
addStandardRecoFiles(trf.parser)
addPrimaryDPDArguments(trf.parser, transform = trf)
addD3PDArguments(trf.parser, transform = trf)
addExtraDPDTypes(trf.parser, transform = trf)
addReductionArguments(trf.parser, transform = trf)
addCommonSimTrfArgs(trf.parser)
addCommonSimDigTrfArgs(trf.parser)
addCosmicsTrfArgs(trf.parser)
addBasicDigiArgs(trf.parser)
addForwardDetTrfArgs(trf.parser)
addPileUpTrfArgs(trf.parser)
return trf
if __name__ == '__main__':
main()
#! /usr/bin/env python
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
# FastChain_tf.py
# One step transform to run SIM+DIGI as one job, then reco
# to ESD/AOD output
# Richard Hawkings, adapted from FullChain_tf.py by Graeme Stewart
import sys
import time
import logging
# Setup core logging here
from PyJobTransforms.trfLogger import msg
msg.info('logging set in %s' % sys.argv[0])
from PyJobTransforms.transform import transform
from PyJobTransforms.trfExe import athenaExecutor
from PyJobTransforms.trfArgs import addAthenaArguments, addDetectorArguments, addTriggerArguments
from PyJobTransforms.trfDecorators import stdTrfExceptionHandler, sigUsrStackTrace
from RecJobTransforms.recTransformUtils import addRecoSubsteps, addAllRecoArgs
from SimuJobTransforms.simTrfArgs import addForwardDetTrfArgs, addForwardDetTrfArgs, addCommonSimTrfArgs, addBasicDigiArgs, addCommonSimDigTrfArgs, addTrackRecordArgs, addSim_tfArgs
from PyJobTransforms.trfArgClasses import argFactory,argList
@stdTrfExceptionHandler
@sigUsrStackTrace
def main():
msg.info('This is %s' % sys.argv[0])
trf = getTransform()
trf.parseCmdLineArgs(sys.argv[1:])
trf.execute()
trf.generateReport()
msg.info("%s stopped at %s, trf exit code %d" % (sys.argv[0], time.asctime(), trf.exitCode))
sys.exit(trf.exitCode)
def getTransform():
executorSet = set()
addRecoSubsteps(executorSet)
# Sim + Digi - factor these out into an importable function in time
executorSet.add(athenaExecutor(name = 'EVNTtoRDO', skeletonFile = 'FullChainTransforms/skeleton.EVGENtoRDO.py',
substep = 'simdigi', tryDropAndReload = False, perfMonFile = 'ntuple.pmon.gz',
inData=['NULL','EVNT'],
outData=['RDO','NULL'] ))
trf = transform(executor = executorSet, description = 'Fast chain ATLAS transform with ISF simulation, digitisation'
' and reconstruction. Inputs can be EVNT, with outputs of RDO, ESD, AOD or DPDs.'
' See https://twiki.cern.ch/twiki/bin/viewauth/AtlasComputing/FastChainTf for more details.')
# Common arguments
addAthenaArguments(trf.parser)
addDetectorArguments(trf.parser)
addTriggerArguments(trf.parser)
# Reconstruction arguments and outputs (use the factorised 'do it all' function)
addAllRecoArgs(trf)
# Simulation and digitisation options
addCommonSimTrfArgs(trf.parser)
addCommonSimDigTrfArgs(trf.parser)
addBasicDigiArgs(trf.parser)
addSim_tfArgs(trf.parser)
# addForwardDetTrfArgs(trf.parser)
addCommonSimDigTrfArgs(trf.parser)
addTrackRecordArgs(trf.parser)
addFastChainTrfArgs(trf.parser)
return trf
def addFastChainTrfArgs(parser):
"Add transformation arguments for fast chain"
parser.defineArgGroup('FastChain','Fast chain options')
parser.add_argument('--preSimExec',type=argFactory(argList),nargs='+',
help='preExec before simulation step',
group='FastChain')
parser.add_argument('--postSimExec',type=argFactory(argList),nargs='+',
help='postExec after simulation step',
group='FastChain')
parser.add_argument('--preDigiExec',type=argFactory(argList),nargs='+',
help='preExec before digitisation step',
group='FastChain')
parser.add_argument('--preSimInclude',type=argFactory(argList),nargs='+',
help='preInclude before simulation step',
group='FastChain')
parser.add_argument('--postSimInclude',type=argFactory(argList),nargs='+',
help='postInclude after simulation step',
group='FastChain')
parser.add_argument('--preDigiInclude',type=argFactory(argList),nargs='+',
help='preInclude before digitisation step',
group='FastChain')
if __name__ == '__main__':
main()
# skeleton.EVGENtoRDO.py
# skeleton file for running simulation+digi in one job for FastChain
# currently using full simulation and digi, will swap in fast components later
# Richard Hawkings, Dec 2014, based on work by Robert Harrington
# started by merging simulation and digitisaton skeletons, then modifying
# removed cosmics, beam gas/halo and pileup configuration
# trigger will not be run
### Start of Sim
## Include common skeleton
include("SimuJobTransforms/skeleton.EVGENtoHIT.py")
if hasattr(runArgs, 'useISF') and not runArgs.useISF:
raise RuntimeError("Unsupported configuration! If you want to run with useISF=False, please use AtlasG4_tf.py!")
## Get the logger
from AthenaCommon.Logging import *
fast_chain_log = logging.getLogger('ISF')
fast_chain_log.info('****************** STARTING ISF ******************')
### Force trigger to be off
from RecExConfig.RecFlags import rec
rec.doTrigger.set_Value_and_Lock(False)
## Simulation flags need to be imported first
from G4AtlasApps.SimFlags import simFlags
simFlags.load_atlas_flags()
simFlags.ISFRun=True
from ISF_Config.ISF_jobProperties import ISF_Flags
## Set simulation geometry tag
if hasattr(runArgs, 'geometryVersion'):
simFlags.SimLayout.set_Value_and_Lock(runArgs.geometryVersion)
globalflags.DetDescrVersion = simFlags.SimLayout.get_Value()
fast_chain_log.debug('SimLayout set to %s' % simFlags.SimLayout)
else:
raise RuntimeError("No geometryVersion provided.")
## AthenaCommon flags
from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
# Jobs should stop if an include fails.
if hasattr(runArgs, "IgnoreConfigError"):
athenaCommonFlags.AllowIgnoreConfigError = runArgs.IgnoreConfigError
else:
athenaCommonFlags.AllowIgnoreConfigError = False
athenaCommonFlags.DoFullChain=True
from AthenaCommon.BeamFlags import jobproperties
## Input Files
def setInputEvgenFileJobProperties(InputEvgenFile):
from AthenaCommon.GlobalFlags import globalflags
globalflags.InputFormat.set_Value_and_Lock('pool')
from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
athenaCommonFlags.PoolEvgenInput.set_Value_and_Lock( InputEvgenFile )
athenaCommonFlags.FilesInput.set_Value_and_Lock( InputEvgenFile )
if hasattr(runArgs, "inputFile"):
athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputFile )
# We don't expect both inputFile and inputEVNT*File to be specified
if hasattr(runArgs, "inputEVNTFile"):
setInputEvgenFileJobProperties( runArgs.inputEVNTFile )
elif hasattr(runArgs, "inputEVNT_COSMICSFile"):
setInputEvgenFileJobProperties( runArgs.inputEVNT_COSMICSFile )
elif hasattr(runArgs, "inputEVNT_CAVERNFile"):
setInputEvgenFileJobProperties( runArgs.inputEVNT_CAVERNFile )
elif hasattr(runArgs, "inputEVNT_STOPPEDFile"):
setInputEvgenFileJobProperties( runArgs.inputEVNT_STOPPEDFile )
elif jobproperties.Beam.beamType.get_Value() == 'cosmics':
fast_chain_log.debug('No inputEVNTFile provided. OK, as performing cosmics simulation.')
athenaCommonFlags.PoolEvgenInput.set_Off()
else:
fast_chain_log.info('No inputEVNTFile provided. Assuming that you are running a generator on the fly.')
athenaCommonFlags.PoolEvgenInput.set_Off()
## Output hits file config
if hasattr(runArgs, "outputHITSFile"):
athenaCommonFlags.PoolHitsOutput.set_Value_and_Lock( runArgs.outputHITSFile )
else:
if hasattr(runArgs, "outputEVNT_STOPPEDFile"):
simFlags.StoppedParticleFile.set_Value_and_Lock( runArgs.outputEVNT_STOPPEDFile )
#raise RuntimeError("No outputHITSFile provided.")
fast_chain_log.info('No outputHITSFile provided. This simulation job will not write out any HITS file.')
athenaCommonFlags.PoolHitsOutput = ""
athenaCommonFlags.PoolHitsOutput.statusOn = False
#==============================================================
# Job Configuration parameters:
#==============================================================
## Pre-exec
if hasattr(runArgs, "preSimExec"):
fast_chain_log.info("transform pre-sim exec")
for cmd in runArgs.preSimExec:
fast_chain_log.info(cmd)
exec(cmd)
## Pre-include
if hasattr(runArgs, "preSimInclude"):
for fragment in runArgs.preSimInclude:
include(fragment)
# Avoid command line preInclude for stopped particles
if hasattr(runArgs, "inputEVNT_STOPPEDFile"):
include('SimulationJobOptions/preInclude.ReadStoppedParticles.py')
# Avoid command line preInclude for cavern background
if hasattr(runArgs, "inputEVNT_CAVERNFile"):
include('SimulationJobOptions/preInclude.G4ReadCavern.py')
if hasattr(runArgs, "outputEVNT_CAVERNTRFile"):
include('SimulationJobOptions/preInclude.G4WriteCavern.py')
## Select detectors
if 'DetFlags' not in dir():
## If you configure one det flag, you're responsible for configuring them all!
from AthenaCommon.DetFlags import DetFlags
DetFlags.all_setOn()
DetFlags.LVL1_setOff() # LVL1 is not part of G4 sim
DetFlags.Truth_setOn()
DetFlags.digitize.LVL1_setOff()
# note this makeRIO enables forward detectors, so have to set them off after
DetFlags.makeRIO.all_setOn()
DetFlags.Forward_setOff()
DetFlags.ZDC_setOff()
DetFlags.digitize.ZDC_setOff()
DetFlags.digitize.Micromegas_setOff()
DetFlags.digitize.sTGC_setOff()
# removed configuration of forward detectors from standard simulation config
# corresponding code block removed
## Set the PhysicsList
if hasattr(runArgs, 'physicsList'):
simFlags.PhysicsList = runArgs.physicsList
## Random seed
if hasattr(runArgs, "randomSeed"):
simFlags.RandomSeedOffset = int(runArgs.randomSeed)
else:
fast_chain_log.warning('randomSeed not set')
## Don't use the SeedsG4 override
simFlags.SeedsG4.set_Off()
## Set the Run Number (if required)
if hasattr(runArgs,"DataRunNumber"):
if runArgs.DataRunNumber>0:
fast_chain_log.info( 'Overriding run number to be: %s ', runArgs.DataRunNumber )
simFlags.RunNumber=runArgs.DataRunNumber
elif hasattr(runArgs,'jobNumber'):
if runArgs.jobNumber>=0:
fast_chain_log.info( 'Using job number '+str(runArgs.jobNumber)+' to derive run number.' )
simFlags.RunNumber = simFlags.RunDict.GetRunNumber( runArgs.jobNumber )
fast_chain_log.info( 'Set run number based on dictionary to '+str(simFlags.RunNumber) )
## removed code block for handling cosmics track record
# get top sequence
from AthenaCommon.AlgSequence import AlgSequence
topSeq = AlgSequence()
## Set Overall per-Algorithm time-limit on the AlgSequence
topSeq.TimeOut = 43200 * Units.s
try:
from RecAlgs.RecAlgsConf import TimingAlg
topSeq+=TimingAlg("SimTimerBegin", TimingObjOutputName = "EVNTtoHITS_timings")
except:
fast_chain_log.warning('Could not add TimingAlg, no timing info will be written out.')
from ISF_Config.ISF_jobProperties import ISF_Flags
if hasattr(runArgs, 'simulator'):
ISF_Flags.Simulator = runArgs.simulator
else:
ISF_Flags.Simulator = 'MC12G4'
#### *********** import ISF_Example code here **************** ####
include("ISF_Config/ISF_ConfigJobInclude.py")
## Add AMITag MetaData to TagInfoMgr
if hasattr(runArgs, 'AMITag'):
if runArgs.AMITag != "NONE":
from AthenaCommon.AppMgr import ServiceMgr as svcMgr
svcMgr.TagInfoMgr.ExtraTagValuePairs += ["AMITag", runArgs.AMITag]
## Increase max RDO output file size to 10 GB
## NB. We use 10GB since Athena complains that 15GB files are not supported
from AthenaCommon.AppMgr import ServiceMgr as svcMgr
svcMgr.AthenaPoolCnvSvc.MaxFileSizes = [ "10000000000" ]
### Changing to post-sim include/exec
## Post-include
if hasattr(runArgs, "postSimInclude"):
for fragment in runArgs.postSimInclude:
include(fragment)
# Avoid command line postInclude for stopped particles
if hasattr(runArgs, "outputEVNT_STOPPEDFile"):
include('SimulationJobOptions/postInclude.StoppedParticleWrite.py')
## Post-exec
if hasattr(runArgs, "postSimExec"):
fast_chain_log.info("transform post-sim exec")
for cmd in runArgs.postSimExec:
fast_chain_log.info(cmd)
exec(cmd)
## Always enable the looper killer, unless it's been disabled
if not hasattr(runArgs, "enableLooperKiller") or runArgs.enableLooperKiller:
def use_looperkiller():
from G4AtlasApps import PyG4Atlas, AtlasG4Eng
lkAction = PyG4Atlas.UserAction('G4UserActions', 'LooperKiller', ['BeginOfRun', 'EndOfRun', 'BeginOfEvent', 'EndOfEvent', 'Step'])
AtlasG4Eng.G4Eng.menu_UserActions.add_UserAction(lkAction)
simFlags.InitFunctions.add_function("postInit", use_looperkiller)
else:
fast_chain_log.warning("The looper killer will NOT be run in this job.")
### End of Sim
### Start of Digi
include("SimuJobTransforms/CommonSkeletonJobOptions.py")
if hasattr(runArgs, "jobNumber"):
if runArgs.jobNumber < 1:
raise ValueError('jobNumber must be a postive integer. %s lies outside this range', str(runArgs.jobNumber))
from AthenaCommon.GlobalFlags import globalflags
if hasattr(runArgs,"geometryVersion"):
# strip _VALIDATION
print "stripping _VALIDATION"
if runArgs.geometryVersion.endswith("_VALIDATION"):
pos=runArgs.geometryVersion.find("_VALIDATION")
globalflags.DetDescrVersion.set_Value_and_Lock( runArgs.geometryVersion[:pos] )
else:
globalflags.DetDescrVersion.set_Value_and_Lock( runArgs.geometryVersion )
### Do not invoke another logger
# get the logger
#from AthenaCommon.Logging import logging
digilog = logging.getLogger('Digi_trf')
fast_chain_log.info( '****************** STARTING DIGITIZATION *****************' )
fast_chain_log.info( '**** Transformation run arguments' )
fast_chain_log.info( str(runArgs) )
#==============================================================
# Job Configuration parameters:
#==============================================================
### Changing pre-exec to pre-digi exec
## Pre-exec
if hasattr(runArgs,"preDigiExec"):
fast_chain_log.info("transform pre-digi exec")
for cmd in runArgs.preDigiExec:
fast_chain_log.info(cmd)
exec(cmd)
## Pre-include
if hasattr(runArgs,"preDigiInclude"):
for fragment in runArgs.preDigiInclude:
include(fragment)
#--------------------------------------------------------------
# Override pile-up configuration on the command-line
#--------------------------------------------------------------
from Digitization.DigitizationFlags import digitizationFlags
#--------------------------------------------------------------
# Get the flags
#--------------------------------------------------------------
if hasattr(runArgs,"digiSeedOffset1"):
digitizationFlags.rndmSeedOffset1=int(runArgs.digiSeedOffset1)
else:
fast_chain_log.warning( 'digiSeedOffset1 not set' )
digitizationFlags.rndmSeedOffset1=1
if hasattr(runArgs,"digiSeedOffset2"):
digitizationFlags.rndmSeedOffset2=int(runArgs.digiSeedOffset2)
else:
fast_chain_log.warning( 'digiSeedOffset2 not set' )
digitizationFlags.rndmSeedOffset2=2
if hasattr(runArgs,"samplingFractionDbTag"): #FIXME change this to PhysicsList?
digitizationFlags.physicsList=runArgs.samplingFractionDbTag
if hasattr(runArgs,"digiRndmSvc"):
digitizationFlags.rndmSvc=runArgs.digiRndmSvc
if hasattr(runArgs,"conditionsTag"):
if(runArgs.conditionsTag!='NONE'):
digitizationFlags.IOVDbGlobalTag = runArgs.conditionsTag
### Avoid meta data reading
digitizationFlags.overrideMetadata=['ALL']
#--------------------------------------------------------------
# Pileup configuration - removed as pileup will be handled on-the-fly
#--------------------------------------------------------------
#--------------------------------------------------------------
# Other configuration: LVL1, turn off sub detectors, calo noise
#--------------------------------------------------------------
if hasattr(runArgs,"doAllNoise"):
if runArgs.doAllNoise!="NONE":
fast_chain_log.info('doAllNoise = %s: Overriding doInDetNoise, doCaloNoise and doMuonNoise', runArgs.doAllNoise)
if runArgs.doAllNoise=="True":
digitizationFlags.doInDetNoise=True
digitizationFlags.doCaloNoise=True
digitizationFlags.doMuonNoise=True
else:
digitizationFlags.doInDetNoise=False
digitizationFlags.doCaloNoise=False
digitizationFlags.doMuonNoise=False
### No RDO output
## Output RDO File
if hasattr(runArgs,"outputRDOFile") or hasattr(runArgs,"tmpRDO"):
if hasattr(runArgs,"outputRDOFile"):
if hasattr(runArgs,"tmpRDO"):
fast_chain_log.fatal("Both outputRDOFile and tmpRDO specified - this configuration should not be used!")
raise SystemError
athenaCommonFlags.PoolRDOOutput.set_Value_and_Lock( runArgs.outputRDOFile )
if hasattr(runArgs,"tmpRDO"):
athenaCommonFlags.PoolRDOOutput.set_Value_and_Lock( runArgs.tmpRDO )
if hasattr(runArgs, "AddCaloDigi"):
AddCaloDigi = runArgs.AddCaloDigi
if AddCaloDigi:
fast_chain_log.info("Will write out all LArDigitContainers and TileDigitsContainers to RDO file.")
digitizationFlags.experimentalDigi+=["AddCaloDigi"]
else:
fast_chain_log.info("no output file (outputRDOFile or tmpRDO) specified - switching off output StreamRDO")
# force writing of RDO file
fast_chain_log.info('Forcing writeRDOPool all on')
DetFlags.writeRDOPool.all_setOn()
#--------------------------------------------------------------
# Go for it
#--------------------------------------------------------------
if hasattr(runArgs,"DataRunNumber"):
if runArgs.DataRunNumber>0:
fast_chain_log.info( 'Overriding run number to be: %s ', runArgs.DataRunNumber )
digitizationFlags.dataRunNumber=runArgs.DataRunNumber
print "lvl1: -14... " + str(DetFlags.digitize.LVL1_on())
### Set digitize all except forward detectors
DetFlags.digitize.all_setOn()
DetFlags.digitize.LVL1_setOff()
DetFlags.digitize.ZDC_setOff()
DetFlags.digitize.Micromegas_setOff()
DetFlags.digitize.sTGC_setOff()
DetFlags.digitize.Forward_setOff()
DetFlags.digitize.Lucid_setOff()
DetFlags.digitize.AFP_setOff()
DetFlags.digitize.ALFA_setOff()
from AthenaCommon.AlgSequence import AlgSequence
topSeq = AlgSequence()
## Set Overall per-Algorithm time-limit on the AlgSequence
topSeq.TimeOut = 43200 * Units.s
try:
from RecAlgs.RecAlgsConf import TimingAlg
topSeq+=TimingAlg("DigiTimerBegin", TimingObjOutputName = "HITStoRDO_timings")
except:
fast_chain_log.warning('Could not add TimingAlg, no timing info will be written out.')
include ("Digitization/Digitization.py")
if hasattr(runArgs,"AMITag"):
from AthenaCommon.AppMgr import ServiceMgr as svcMgr
svcMgr.TagInfoMgr.ExtraTagValuePairs += ["AMITag", runArgs.AMITag ]
### No RDO output to increase file size of
# Increase max RDO output file size to 10 GB
#from AthenaCommon.AppMgr import ServiceMgr as svcMgr
#svcMgr.AthenaPoolCnvSvc.MaxFileSizes = [ "10000000000" ] #[ "15000000000" ] #Athena complains that 15GB files are not supported
## Post-include
if hasattr(runArgs,"postInclude"):
for fragment in runArgs.postInclude:
include(fragment)
## Post-exec
if hasattr(runArgs,"postExec"):
fast_chain_log.info("transform post-exec")
for cmd in runArgs.postExec:
fast_chain_log.info(cmd)
exec(cmd)
### End of Digi
#from AthenaCommon.ConfigurationShelve import saveToAscii
#saveToAscii('config.txt')
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment