Skip to content
Snippets Groups Projects
Commit e6db879d authored by Atlas-Software Librarian's avatar Atlas-Software Librarian Committed by Graeme Stewart
Browse files

'CMakeLists.txt' (EventDisplaysOnline-00-02-19)

parent 83e07316
No related branches found
No related tags found
No related merge requests found
Showing
with 968 additions and 0 deletions
################################################################################
# Package: EventDisplaysOnline
################################################################################
# Declare the package name:
atlas_subdir( EventDisplaysOnline )
# Install files from the package:
atlas_install_python_modules( python/*.py )
atlas_install_joboptions( share/*.py )
atlas_install_scripts( scripts/*.sh )
package EventDisplaysOnline
author Sebastian Boeser <sboeser@hep.ucl.ac.uk>
use AtlasPolicy AtlasPolicy-*
apply_pattern declare_joboptions files="*.py"
apply_pattern declare_python_modules files="*.py"
apply_pattern declare_scripts files="../scripts/*.sh"
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
def GetRunType():
"""Get the run type by reading the run-type setting in the partition from IS """
#Get a logger so we can show formated messages
from AthenaCommon.Logging import logging
mlog = logging.getLogger( 'EventDisplays' )
#Try to get the partition name
try :
import os
partition = os.environ['TDAQ_PARTITION']
except KeyError :
partition = "EventDisplays"
mlog.warning("TDAQ_PARTITION not defined in environment, using %s as default"%partition)
mlog.debug('Probing partition %s for RunType'%partition)
#now try and read the information from IS
try :
from ipc import IPCPartition
from ispy import ISObject
runparams = ISObject(IPCPartition(partition), 'RunParams.RunParams','RunParams')
runparams.checkout()
runType = runparams.run_type
except UserWarning, err:
mlog.error(err)
#Set the default runtype
runType="collisions"
mlog.warning("Failed to read run type from IS, using %s as default"%runType)
finally :
if not runType in ['collisions','singlebeam','cosmics']:
mlog.fatal("Invalid run type: %s"%runType)
import sys
sys.exit(1)
mlog.info("Setting run type to: %s"%runType)
return runType
def GetBFields():
#Get a logger so we can show formated messages
from AthenaCommon.Logging import logging
mlog = logging.getLogger( 'EventDisplays' )
#BFields are read from initial partition
partition='initial'
mlog.debug("Trying to read magnetic field configuration from partition %s"%partition)
#now try and read the information from IS
try :
from ipc import IPCPartition
from ispy import ISObject
#Get hold of the initial partition
ipcPart = IPCPartition(partition);
if not ipcPart.isValid():
raise UserWarning("Partition %s invalid - cannot access magnetic field setting"%partition);
#Get the current and valid status
toroidCurrent = ISObject(ipcPart,'DCS_GENERAL.MagnetToroidsCurrent.value','DdcFloatInfo')
solenoidCurrent = ISObject(ipcPart,'DCS_GENERAL.MagnetSolenoidCurrent.value','DdcFloatInfo')
toroidInvalid = ISObject(ipcPart,'DCS_GENERAL.MagnetToroidsCurrent.invalid','DdcIntInfo')
solenoidInvalid = ISObject(ipcPart,'DCS_GENERAL.MagnetSolenoidCurrent.invalid','DdcIntInfo')
toroidCurrent.checkout()
solenoidCurrent.checkout()
toroidInvalid.checkout()
solenoidInvalid.checkout()
#And calculate the flags
solenoidOn=((solenoidCurrent.value > 1000.) and (solenoidInvalid.value == 0))
toroidOn=((toroidCurrent.value > 1000.) and (toroidInvalid.value == 0))
except UserWarning, err:
mlog.error(err)
#Should always be able to access initial parititon
mlog.fatal("Failed to read magnetic field configuration from IS, aborting")
import sys
sys.exit(1)
#print the result
mlog.info("Magnetic field in solenoid is %s" % ((solenoidOn and "ON") or "OFF"))
mlog.info("Magnetic field in toroid is %s" % ((toroidOn and "ON") or "OFF"))
#finally return our values
return (solenoidOn,toroidOn)
if __name__ == "__main__":
runType=GetRunType()
print "RunType: %s"%runType
bFields = GetBFields()
print "BFields (Sol,Tor):",bFields
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
#!/bin/bash
##########################################################################
#
# Loops over all directories given as first parameter, looking for files
# atlantis and VP1 output files and keeping their number limited to the
# paramter given as second argument. This is repeated in the interval given
# as third argument
#
# author: Sebastian Boeser <sboeser@hep.ucl.ac.uk>
#
###########################################################################
## Printout environment
print_setup(){
echo "DATAPATH=${DATAPATH}"
echo "KEEPFILES=${KEEPFILES}"
echo "UPDATEINT=${UPDATEINT}"
echo "================================================================================"
}
## Print a short usage statement
print_usage(){
echo "Usage: $0 <data-path> <keep-files> <update-intervall>"
echo " <data-path>: directories to search for Atlantis and VP1 files"
echo " <keep-files>: number of files to keep in each directory"
echo " <update-intervall>: repeat removal at this interval in seconds"
}
## get the commandline args
get_args(){
#Check we have the proper number
if test $# != 3; then
print_usage
exit 1
fi
DATAPATH=""
#First is pathes to look at
for dir in `echo $1 | sed 's/:/ /g'`; do
if [ ! -d $dir ]; then
echo "Directory '$dir' does not exist - removed from list!"
else
DATAPATH="${DATAPATH} $dir"
fi
done
#Check there is at least one dir left
if [[ $DATAPATH == "" ]]; then
echo "No directories to prune - exiting"
exit 1
fi
KEEPFILES=0
#now check keepfiles
if (( $2 > 0 )); then
if (($2 < 100000 )) ; then
KEEPFILES=$2
fi
fi
if (( $KEEPFILES == 0 )); then
echo "Invalid number of files to keep: $2 (allowed range 1-99999) - exiting!"
exit 1;
fi
UPDATEINT=0
#now check update interval
if (( $3 > 0 )); then
if (($3 < 10000 )) ; then
UPDATEINT=$3
fi
fi
if (( $UPDATEINT == 0 )); then
echo "Invalid update intervall: $3 (allowed range 1-9999) - exiting!"
exit 1;
fi
}
prune_dir(){
if [ ! -z "${EVENTDISPLAY_VERBOSE}" ]; then
echo "Entering $1 looking for $3 ..."
fi
#Get list of Atlantis files
#Find file matching pattern, print last change time (unix secs) and name,
#sort by time, then only keep filename
FILES=$( find $1 -name "$3" -printf "%C@ %f \n" | sort -n | awk '{print $2}' )
#Calculate total number of files to remove
NTOTAL=`echo $FILES | wc -w`
#Calculate number of files to delete
NDELETE=$(( $NTOTAL-$2 ))
if (( $NDELETE <= 0 )); then
echo "Only $NTOTAL files total - not deleting any!"
return
fi
#Be verbose
echo "Removing $NDELETE files of $NTOTAL total"
if [ ! -z "${EVENTDISPLAY_VERBOSE}" ]; then
VERBOSE=" -v"
fi
#Now loop and delete
for file in $FILES; do
#Delete files
if rm $VERBOSE -f $1/$file; then
#Count successfully deleted
NDELETE=$(( $NDELETE-1 ))
fi
#Break if we have deleted enough
if (( $NDELETE == 0)); then
break
fi
done
}
## Wait for a given time
wait_for(){
if [ ! -z "${EVENTDISPLAY_VERBOSE}" ]; then
date "+%Y-%m-%d %H:%M:%S"
echo -n "Now sleeping for $1 seconds ..."
fi
sleep $1
if [ ! -z "${EVENTDISPLAY_VERBOSE}" ]; then
echo " - done"
fi
}
## Write a file list consisting of run number, event number and associated data files
file_list(){
if [ ! -z "${EVENTDISPLAY_VERBOSE}" ]; then
echo "Creating file list for directory $1 ..."
fi
#Now generat the listing by
#finding all files
find $1 -name "*[0-9]*_[0-9]*" -printf "%f\n" |
#printing run_event;filename and sorting it
sed 's/vp1_\([0-9]*\)_\([0-9]*\).*/\1_\2;\0/' |
sed 's/JiveXML_\([0-9]*\)_0*\([0-9]*\)\.xml/\1_\2;\0/' |
sort -n |
#Using awk to build an event table and print it
awk -F ";" '/JiveXML/{ events[$1]=(events[$1] ",atlantis:" $2) }
/vp1/{ events[$1]=(events[$1] ",vp1:" $2) }
END{ for (evt in events) {
if ( (index(events[evt],"vp1") > 0) && (index(events[evt],"atlantis") > 0)){
split(evt,nums,"_");
printf "run:%012i,event:%012i%s\n",nums[1],nums[2],events[evt]
}
}}' |
#And sorting again as awk does not provide sorted output
sort -n >| $1/events.list.new
#Now move the new list on the old one
mv -f $1/events.list.new $1/event.list
}
##### This is the start of the actual script ####
#First of all setup the path
PATH=$PATH:$EVENTDISPLAY_SW_EXTRA_PATH
echo "EVENTDISPLAY_SW_EXTRA_PATH=$EVENTDISPLAY_SW_EXTRA_PATH"
echo "EVENTDISPLAY_VERBOSE=$EVENTDISPLAY_VERBOSE"
echo "PATH=$PATH"
echo "================== Start trimming of EventDisplay data ========================="
get_args $@
print_setup
#enter endless loop
while true; do
#Now enter loop
echo "==================== Entering loop over directories ============================"
for dir in $DATAPATH; do
prune_dir $dir $KEEPFILES "JiveXML_*.xml"
prune_dir $dir $KEEPFILES "vp1_*.pool.root"
file_list $dir
done
#then sleep
wait_for $UPDATEINT
done
echo "===================== Finished pruning event display data ======================"
###
# Atlantis specific jOs for P1
# --> Do not rely on RecExCommission setup (doJiveXML),
# but setup things ourselves
###
### Enable Atlantis-flag (fake as this happens after we include RecExCommon)
from RecExConfig.RecFlags import rec
rec.doJiveXML.set_Value_and_Lock(True)
### Setup output level
if not 'AtlantisMsgLvl' in dir():
AtlantisMsgLvl=WARNING
### import and create top sequence
from AthenaCommon.AlgSequence import AlgSequence
topSequence = AlgSequence()
### Create a new algorithm
from JiveXML.JiveXMLConf import JiveXML__AlgoJiveXML
AlgoJiveXML = JiveXML__AlgoJiveXML(name="AlgoJiveXML",OutputLevel=AtlantisMsgLvl)
### Set the atlas release version
import os
AlgoJiveXML.AtlasRelease = os.environ["AtlasVersion"]
### Write the event data to disk and to a server
### - this is the same as AlgoJiveXML.WriteToFile = True
### but allows to set the StreamName as prefix
from JiveXML.JiveXMLConf import JiveXML__StreamToFileTool
AlgoJiveXML.StreamTools += [
JiveXML__StreamToFileTool(FileNamePrefix=OutputDirectory+"/JiveXML",
OutputLevel=AtlantisMsgLvl)
]
#Disable default tool
AlgoJiveXML.WriteToFile = False
### Online mode allows you to send events to
### AtlantiJava clients over a network connection
### - this is the same as AlgoJiveXML.OnlineMode = True
### but allows to the set server port number
if not 'Port' in dir():
Port=48965
from JiveXML.JiveXMLConf import JiveXML__XMLRPCStreamTool
AlgoJiveXML.StreamTools += [
JiveXML__XMLRPCStreamTool(ServerPort=Port,OutputLevel=AtlantisMsgLvl)
]
#Disable default tool
AlgoJiveXML.OnlineMode = False
### Also stream events to the new external ONCRPC server
### If you test this from the command line, after setting up
### AltasHLT, the server can be started with 'jivexmlserver.exe'
#configure external ONCRCP server service to stream to the machine running our
#server instance and adopt the message level
if not 'ServerHost' in dir():
ServerHost="pc-tdq-mon-31"
from JiveXML.JiveXMLConf import JiveXML__ExternalONCRPCServerSvc
ServiceMgr += JiveXML__ExternalONCRPCServerSvc("ExternalONCRPCServerSvc",
Hostname=ServerHost,
OutputLevel=AtlantisMsgLvl)
#And add a stream tool that stream using the current stream tag
from JiveXML.JiveXMLConf import JiveXML__StreamToServerTool
AlgoJiveXML.StreamTools += [
JiveXML__StreamToServerTool(ServerService=ServiceMgr.ExternalONCRPCServerSvc,
StreamName=StreamName)
]
### Enable this to recreate the geometry XML files for Atlantis
AlgoJiveXML.WriteGeometry = False
### add the AlgoJiveXML
topSequence += AlgoJiveXML
### add a dummy flag so that AlgoJiveXML won't be reinstantiated
theEventData2XML=AlgoJiveXML
### Now add the data retrievers for sub-detectors
if DetFlags.detdescr.ID_on() :
include ( "JiveXML/DataTypes_InDet.py" )
if DetFlags.detdescr.Calo_on():
include ( "JiveXML/DataTypes_Calos.py" )
#Switch on pulse shapes for Tiles
ToolSvc.CaloTileRetriever.DoTileDigit = True
ToolSvc.CaloTileRetriever.DoTileCellDetails = True
#Switch on pulse shapes for MBTS
ToolSvc.CaloMBTSRetriever.DoMBTSDigits = True
#Also switch on LAR digits, but only for Calo streams
if (Stream in ['L1Calo','L1CaloEM','MinBias']) and not Splash :
include("EventDisplaysOnline/LArDigits_jobOptions.py")
if DetFlags.detdescr.Muon_on() :
include ( "JiveXML/DataTypes_Muons.py" )
#Switch on trigger if trig data is there
if rec.doTrigger():
include ( "JiveXML/DataTypes_Trig.py" )
#Special options for ACR
from TrigJiveXML.TrigJiveXMLConf import JiveXML__EmTauROIRetriever
ToolSvc += JiveXML__EmTauROIRetriever (name = "EmTauROIRetriever",readCPM=True)
from TrigJiveXML.TrigJiveXMLConf import JiveXML__JetROIRetriever
ToolSvc += JiveXML__JetROIRetriever (name = "JetROIRetriever", readJEM = True)
### Always use this one
include ( "JiveXML/DataTypes_Reco.py" )
### Finally, print setup if in debug mode
if AtlantisMsgLvl <= DEBUG:
print "\n\n\t Atlantis setup\n",AlgoJiveXML,"\n\n"
###
# Finally, set up the ByteStreamEmonController
###
#First get the partition to which the controller shall publish
try :
import os
publishToPartition = os.environ['TDAQ_PARTITION']
except KeyError :
publishToPartition = "EventDisplays"
mlog.warning("TDAQ_PARTITION not defined in environment, using %s as default"%publishToPartition)
#Setup and configure ISProperty
from ISProperty.ISPropertyConf import ISPropertySvc
ISPropertySvc = ISPropertySvc(PartitionName=publishToPartition,ForcePublish=True)
ServiceMgr += ISPropertySvc
### NOTE: make sure ISProperty is created before BSEmonCtrlSvc,
### otherwise the shutdown sequence will fail
theApp.CreateSvc += ['ISPropertySvc']
#Now create and configure the ByteStreamEmonController
from ByteStreamEmonController.ByteStreamEmonControllerConf import BSEmonCtrlSvc
#Create an instance with a stream-specific name
ByteStreamEmonCtrlSvc = BSEmonCtrlSvc(Stream+"-BSEmonCtrlSvc")
ServiceMgr += ByteStreamEmonCtrlSvc
#Add to list of services created right-away (otherwise not referenced)
theApp.CreateSvc += ["BSEmonCtrlSvc/"+Stream+"-BSEmonCtrlSvc"]
###
# Switch output level if requested
###
if not 'BSEmonMsgLvl' in dir():
BSEmonMsgLvl=WARNING
ByteStreamEmonCtrlSvc.OutputLevel=BSEmonMsgLvl
ISPropertySvc.OutputLevel=BSEmonMsgLvl
if BSEmonMsgLvl <= DEBUG:
print "\n\n\t ISPropertySvc setup:\n",ISPropertySvc,"\n\n"
print "\n\n\t ByteStreamEmonControl setup:\n",ByteStreamEmonCtrlSvc,"\n\n"
#########################################
#
# Example setup for ByteStreamEmonSvc
#
#########################################
# Common part, copy from here
from AthenaCommon.AppMgr import ServiceMgr as svcMgr
from AthenaCommon.Constants import *
include("ByteStreamEmonSvc/ByteStreamEmonInputSvc_jobOptions.py")
# #####################################################
# If using the ERSBootstrap.py file, enable the output
# via ERS
# ####################################################
#MessageSvc.useErs = True
# Define the input
ByteStreamEmonInputSvc = svcMgr.ByteStreamInputSvc
# ############################################################
# The name of the partition you want to connect to is taken
# from 'TDAQ_PARTITION' if it exists, otherwise from here.
# ############################################################
if not 'Partition' in dir():
Partition = 'ATLAS'
print "ByteStreamEmonInputSvc: Setting partition to : %s" % (Partition)
ByteStreamEmonInputSvc.Partition = Partition
# #########################################
# The source of events, SFI for full events
# #########################################
if not 'SamplingLevel' in dir():
SamplingLevel="SFI"
ByteStreamEmonInputSvc.Key = SamplingLevel
# ############################################################
# A list of of key values, e.g. a list of SFIs to contact.
# If not defined, one event provider of this type (i.e. any SFI)
# ############################################################
#ByteStreamEmonInputSvc.KeyValue = ["SFI-1", "SFI-2", "SFI-3" ]
# #######################################
# Alternative: N providers of type 'SFI'
# KeyValue is ignored if this is set.
# ######################################
if not 'Nodes' in dir():
Nodes=10
ByteStreamEmonInputSvc.KeyCount = Nodes
# #######################################
# Set this to the IS server where you want
# to publish histograms, too. If unset, no
# histograms are published.
# #######################################
#ByteStreamEmonInputSvc.ISServer = 'Histogramming'
# ########################################
# The provider name under which your histograms
# appear in OH.
# ########################################
#ByteStreamEmonInputSvc.PublishName = 'TRT_PT'
# ###################################################
# Should histograms be cleared at new run ? default: yes
# ###################################################
#ByteStreamEmonInputSvc.ClearHistograms = True
# ####################################################
# A regular expression to restrict which histograms are published.
# ####################################################
# ByteStreamEmonInputSvc.Include = '.*'
# ByteStreamEmonInputSvc.Exclude = ''
# ###############################################
# Frequency of updates (in number of events, not secs...)
# ###############################################
# ByteStreamEmonInputSvc.Frequency = 5
# #################################
# Set dispersion flag to false (so other can sample the same event
# #################################
ByteStreamEmonInputSvc.Dispersion=False
# #################################
# Set timeout (in ms?)
# #################################
ByteStreamEmonInputSvc.Timeout=600000
# ##############################
# one of 'Ignore', 'Or', 'And'
# ##############################
ByteStreamEmonInputSvc.LVL1Logic = "Ignore"
# #########################################
# One of 'TAP', 'TBP' or 'TAV' (default)
# #########################################
#ByteStreamEmonInputSvc.LVL1Origin = "TAV"
# ###########################################################
# A list of L1 bit names to select on. This requires the
# L1CT.TrigConfL1Items to be published in IS.
#
# This can be used instead of or in addition to LVL1Bits.
# ###########################################################
#ByteStreamEmonInputSvc.LVL1Names = ['L1_MU2', 'L1_MU3', 'L1_EM10' ]
# A list of numerical trigger bits instead of names. This is
# Or'ed with LVL1Names
#ByteStreamEmonInputSvc.LVL1Items = [ 10, 20, 72, 245 ]
# ###########################################
# Selection by stream tag:
# One of 'Or', 'And', 'Ignore' (default)
# ###########################################
## Set stream logic ignore for 'Random' stream
if ( not 'Stream' in dir()) or (Stream == 'Default') or (Stream.startswith("Test")):
ByteStreamEmonInputSvc.StreamLogic = "Ignore"
else:
ByteStreamEmonInputSvc.StreamLogic = "And"
# ###########################################
# One of 'physics' or 'calibration'
# ############################################
#Make sure it is defined
if not 'StreamType' in dir():
StreamType="physics"
ByteStreamEmonInputSvc.StreamType = StreamType
# ############################################
# A list of stream tag names
# ############################################
ByteStreamEmonInputSvc.StreamNames = [ Stream ]
# #################################################
# The event buffer size
# #################################################
ByteStreamEmonInputSvc.BufferSize = 30
# #################################################
# Shall athena exit if the partition is shutdown ?
# For offline athena tasks mainly.
# #################################################
#ByteStreamEmonInputSvc.ExitOnPartitionShutdown = False
ByteStreamCnvSvc = Service( "ByteStreamCnvSvc" )
theApp.ExtSvc += [ "ByteStreamCnvSvc"]
###
# Switch output level if requested
###
if not 'BSEmonMsgLvl' in dir():
BSEmonMsgLvl=WARNING
ByteStreamEmonInputSvc.OutputLevel=BSEmonMsgLvl
if BSEmonMsgLvl <= DEBUG:
print "\n\n\t ByteStreamEmonInput setup:\n",ByteStreamEmonInputSvc,"\n\n"
#########################################
#
# Setup for running event displays at point1
#
#########################################
#Define the general output directory for VP1 and Atlantis
#All events will be stored in subdirectories of that directory
if not 'OutputDirectory' in dir():
OutputDirectory="/EventDisplayEvents/"
#Make sure it ends with a "/"
if not OutputDirectory.endswith("/"):
OutputDirectory += "/"
#Make sure we run infintly if not otherwise specified on the command line
from AthenaCommon.AthenaCommonFlags import jobproperties,athenaCommonFlags
if 'EvtMax' in dir():
athenaCommonFlags.EvtMax.set_Value_and_Lock(EvtMax)
else:
athenaCommonFlags.EvtMax.set_Value_and_Lock(-1)
#Get a logger so we can show formated messages
from AthenaCommon.Logging import logging
mlog = logging.getLogger( 'EventDisplays' )
#Make sure we have a stream name defined
if not 'Stream' in dir():
Stream="Default"
#Make sure the Splash flag exists
if not 'Splash' in dir():
Splash=False
#Create phony stream name
if not 'StreamName' in dir():
StreamName = Stream+(Splash and '-Splash' or '')
#Assemble final output directory
OutputDirectory += StreamName
#Make sure the output directory exists and is writable
import os, stat
if os.access(OutputDirectory, os.F_OK):
if os.path.isdir(OutputDirectory) and os.access(OutputDirectory, os.W_OK):
mlog.info("using existing output directory \'%s\' for stream \'%s\'" % (OutputDirectory, StreamName))
else:
mlog.fatal("cannot write to directory \'%s\'" % OutputDirectory)
raise OSError("cannot write to directory \'%s\'" % OutputDirectory)
else:
try:
os.mkdir(OutputDirectory);
os.chmod(OutputDirectory, stat.S_IRWXU | stat.S_IRWXG | stat.S_IROTH | stat.S_IXOTH)
mlog.info("created output directory \'%s\' for stream \'%s\'" % (OutputDirectory, StreamName))
except OSError, err:
mlog.fatal("failed to create output directory \'%s\' for stream \'%s\': %s", (OutputDirectory, StreamName, err.strerror))
raise err
#Bytestream input is configured here
include('EventDisplaysOnline/ByteStreamInput_jobOptions.py')
#Add reconstruction on top of that
if not Splash:
include('EventDisplaysOnline/RecoCommon_jobOptions.py')
else :
include('EventDisplaysOnline/SplashEvent_jobOptions.py')
#Add Atlantis-specific jobOptions
include('EventDisplaysOnline/Atlantis_jobOptions.py')
#Add VP1-specific jobOptions
include('EventDisplaysOnline/VP1_jobOptions.py')
#Finally configure the ByteStreamController (after RecoCommon!)
#include('EventDisplaysOnline/ByteStreamController_jobOptions.py')
###
# Configure LAr retriever to write out digits
###
#Add the single LAr digit retriever
AlgoJiveXML.DataTypes += ["JiveXML::LArDigitRetriever/LArDigitRetriever"]
#Remove the individual LAr non-digit retrievers
AlgoJiveXML.DataTypes.remove("JiveXML::CaloLArRetriever/CaloLArRetriever")
AlgoJiveXML.DataTypes.remove("JiveXML::CaloHECRetriever/CaloHECRetriever")
AlgoJiveXML.DataTypes.remove("JiveXML::CaloFCalRetriever/CaloFCalRetriever")
#Enable all digits
ToolSvc.LArDigitRetriever.DoLArDigit=True
ToolSvc.LArDigitRetriever.DoHECDigit=True
ToolSvc.LArDigitRetriever.DoFCalDigit=True
#----------------------------------------------- Get the run type from IS
from AthenaCommon.BeamFlags import jobproperties
from EventDisplaysOnline.EventDisplaysConfig import GetRunType
jobproperties.Beam.beamType.set_Value_and_Lock(GetRunType())
#----------------------------------------------- Set the magnetic field
from AthenaCommon.BFieldFlags import jobproperties
from EventDisplaysOnline.EventDisplaysConfig import GetBFields
(solenoidOn,toroidOn)=GetBFields();
jobproperties.BField.barrelToroidOn.set_Value_and_Lock(toroidOn);
jobproperties.BField.endcapToroidOn.set_Value_and_Lock(toroidOn);
jobproperties.BField.solenoidOn.set_Value_and_Lock(solenoidOn);
# ----------------------------------------------- Run configuration
from AthenaCommon.GlobalFlags import globalflags
globalflags.DataSource.set_Value_and_Lock('data')
globalflags.InputFormat.set_Value_and_Lock("bytestream")
globalflags.DetDescrVersion.set_Value_and_Lock('ATLAS-GEO-08-00-02')
# ----------------------------------------------- Use conditions according to beamType
conditionTags = { 'collisions' : "COMCOND-MONP-001-00",
'singlebeam' : "COMCOND-MONS-001-00",
'cosmics' : "COMCOND-MONC-003-00" }
globalflags.ConditionsTag.set_Value_and_Lock(conditionTags[jobproperties.Beam.beamType()])
# ----------------------------------------------- Online flag
from AthenaCommon.AthenaCommonFlags import jobproperties,athenaCommonFlags
athenaCommonFlags.isOnline = True # configures the job for online running
athenaCommonFlags.BSRDOInput.set_Value_and_Lock([]) # set proper bytestream input (for trigger)
#make sure ESD's and PoolFileCatalog from multiple jobs don't overwrite each other
import os
from random import randrange
randomString=str(randrange(0,100000))
athenaCommonFlags.PoolESDOutput = 'ESD_' + randomString +'.pool.root'
os.environ['POOL_CATALOG'] = 'xmlcatalog_file:PoolFileCatalog_'+randomString+'.xml'
athenaCommonFlags.BSRDOInput = []
# ----------------------------------------------- Output flags
from RecExConfig.RecFlags import rec
rec.doESD.set_Value_and_Lock(True)
rec.doAOD.set_Value_and_Lock(False)
rec.doDPD.set_Value_and_Lock(False)
rec.doWriteESD.set_Value_and_Lock(True)
rec.doWriteAOD.set_Value_and_Lock(False)
rec.doCBNT.set_Value_and_Lock(False)
rec.doWriteTAG.set_Value_and_Lock(False)
# ----------------------------------------------- Robust tracking for 30 Mar 2010
#from InDetRecExample.InDetJobProperties import InDetFlags
#InDetFlags.doRobustReco.set_Value_and_Lock(True)
# ----------------------------------------------- Reco flags
rec.doInDet.set_Value_and_Lock(True)
rec.doMuon.set_Value_and_Lock(True) # if True and doTrigger=True, set doID, doLAr, doTile to True
rec.doLArg.set_Value_and_Lock(True) # if True, set doID, doTile to True
rec.doTile.set_Value_and_Lock(True) # if True, set doID, doLAr to True
# --- Temporarily disable ZDC as it crashes ( savannah bug #59762 )
rec.UserFlags=["DetFlags.ZDC_setOff()"]
rec.doMonitoring.set_Value_and_Lock(False)
rec.doTrigger.set_Value_and_Lock(True)
rec.doHist.set_Value_and_Lock(False)
rec.doEgamma.set_Value_and_Lock(False)
rec.doTau.set_Value_and_Lock(False)
# ---------------------------------------------- Debug flags
rec.doPerfMon.set_Value_and_Lock(False) # optional for performance check
rec.doDetailedPerfMon.set_Value_and_Lock(False) # optional for performance check
rec.doNameAuditor.set_Value_and_Lock(False) # optional for debugging
rec.doDetStatus.set_Value_and_Lock(False) #
#------------------------------ Cosmics running -> do not use beam-spot
rec.Commissioning.set_Value_and_Lock(True)
# ----------------------------- LAr Online fix
include("RecExOnline/SimpleLarCondFlags.py")
####
# If LAr is running in transaprent mode, these lines need to be commented
# Otherwise, they should be uncommented.
####
#from LArROD.LArRODFlags import larRODFlags
#larRODFlags.readDigits.set_Value_and_Lock(False)
# ----------------------------- Main jobOpt
include("RecExCommon/RecExCommon_topOptions.py")
# ----------------------------- Catalog file
Service("PoolSvc").SortReplicas = False
PoolSvc = Service( "PoolSvc" )
PoolSvc.ReadCatalog += ["xmlcatalog_file:/sw/DbData/poolcond/PoolCat_comcond.xml"]
PoolSvc.ReadCatalog += ["xmlcatalog_file:/det/dqm/AthenaMonitoring/AtlasTier0-15.4.0.2/DbData/poolcond/PoolCat_comcond.xml"]
# ------------------------------ Force run number for test partition
# Otherwise can't use TrigDecisionTool
if 'ForceRunNumber' in dir() :
Service("IOVDbSvc").forceRunNumber = ForceRunNumber
Service("IOVDbSvc").forceLumiblockNumber = 1
# --------------------------------- Printout
globalflags.print_JobProperties()
# ----------------------- Over-writes come at the end
MessageSvc = Service("MessageSvc")
if 'MsgLvl' in dir():
MessageSvc.OutputLevel = MsgLvl
else:
MessageSvc.OutputLevel = WARNING
# Load all the usefull flags we might want to switch
include ("RecExCommon/RecoUsefulFlags.py")
# ----------------------------------------------- Run configuration
globalflags.DataSource.set_Value_and_Lock('data')
globalflags.InputFormat.set_Value_and_Lock("bytestream")
globalflags.ConditionsTag.set_Value_and_Lock('COMCOND-ES1CT-002-00')
globalflags.DetDescrVersion.set_Value_and_Lock('ATLAS-GEO-08-00-02')
# ----------------------------------------------- Beam type fixed to singlebeam for splash
jobproperties.Beam.beamType.set_Value_and_Lock("singlebeam")
# ----------------------------------------------- Online flag
from AthenaCommon.AthenaCommonFlags import jobproperties,athenaCommonFlags
athenaCommonFlags.isOnline = True # configures the job for online running
# ----------------------------------------------- Input flags
#make sure ESD's and PoolFileCatalog from multiple jobs don't overwrite each other
import os
from random import randrange
randomString=str(randrange(0,100000))
athenaCommonFlags.PoolESDOutput = 'ESD_' + randomString +'.pool.root'
os.environ['POOL_CATALOG'] = 'xmlcatalog_file:PoolFileCatalog_'+randomString+'.xml'
athenaCommonFlags.BSRDOInput.set_Value_and_Lock([]) # set proper bytestream input (for trigger)
# ----------------------------------------------- Output flags
rec.doESD.set_Value_and_Lock(True)
rec.doAOD.set_Value_and_Lock(True)
rec.doWriteESD.set_Value_and_Lock(True)
rec.doWriteAOD.set_Value_and_Lock(False)
rec.doCBNT.set_Value_and_Lock(False)
rec.doWriteTAG.set_Value_and_Lock(False)
# ----------------------------------------------- Reco flags
# ------------ Inner Detector
rec.doInDet.set_Value_and_Lock(True)
# --- Minimal reco
from InDetRecExample.InDetJobProperties import InDetFlags
InDetFlags.doMinimalReco = True
InDetFlags.useDCS.set_Value_and_Lock(False)
# ------------ LAr
rec.doLArg.set_Value_and_Lock(True) # if True, set doID, doTile to True
# --- LAr Online fix
include("RecExOnline/SimpleLarCondFlags.py")
####
# If LAr is running in transaprent mode, these lines need to be commented
# Otherwise, they should be uncommented.
####
#from LArROD.LArRODFlags import larRODFlags
#larRODFlags.readDigits.set_Value_and_Lock(False)
# ------------ Tile
rec.doTile.set_Value_and_Lock(True) # if True, set doID, doLAr to True
# ------------ Calo reco
from CaloRec.CaloRecFlags import jobproperties
jobproperties.CaloRecFlags.doEmCluster.set_Value_and_Lock(False)
jobproperties.CaloRecFlags.doCaloTopoCluster.set_Value_and_Lock(False)
jobproperties.CaloRecFlags.doCaloEMTopoCluster.set_Value_and_Lock(False)
# ------------ Muon Detector
rec.doMuon.set_Value_and_Lock(True) # if True and doTrigger=True, set doID, doLAr, doTile to True
# ---- muon settings from Rosy
muonRecFlags.doMoore=False
muonCombinedRecFlags.doMuidMuonCollection=False
#muonRecFlags.doMuonboy=False
#muonCombinedRecFlags.doStacoMuonCollection=False
mooreFlags.doSegmentsOnly=True
muonboyFlags.doSegmentsOnly=True
muonRecFlags.doSegmentT0Fit = False
muonCombinedRecFlags.Enabled = False
# ------------ Trigger
rec.doTrigger.set_Value_and_Lock(True)
# ------------ AOD objects
rec.doEgamma.set_Value_and_Lock(False)
rec.doTau.set_Value_and_Lock(False)
rec.doJetMissingETTag.set_Value_and_Lock(False)
BTaggingFlags.Active=False
# ------------ Monitoring
rec.doMonitoring.set_Value_and_Lock(False)
rec.doHist.set_Value_and_Lock(False)
# ---------------------------------------------- Debug flags
rec.doPerfMon.set_Value_and_Lock(False) # optional for performance check
rec.doDetailedPerfMon.set_Value_and_Lock(False) # optional for performance check
rec.doNameAuditor.set_Value_and_Lock(False) # optional for debugging
rec.doDetStatus.set_Value_and_Lock(False) #
#---------------------------------------------- Comissioning settings
rec.Commissioning.set_Value_and_Lock(True)
rec.abortOnUncheckedStatusCode.set_Value_and_Lock(False)
# ----------------------------- Main jobOpt
include("RecExCommon/RecExCommon_topOptions.py")
# ----------------------------- Catalog file
Service("PoolSvc").SortReplicas = False
PoolSvc = Service( "PoolSvc" )
PoolSvc.ReadCatalog += ["xmlcatalog_file:/sw/DbData/poolcond/PoolCat_comcond.xml"]
PoolSvc.ReadCatalog += ["xmlcatalog_file:/det/dqm/AthenaMonitoring/AtlasTier0-15.4.0.2/DbData/poolcond/PoolCat_comcond.xml"]
# ------------------------------ Force run number for test partition
# Otherwise can't use TrigDecisionTool
if 'ForceRunNumber' in dir() :
Service("IOVDbSvc").forceRunNumber = ForceRunNumber
Service("IOVDbSvc").forceLumiblockNumber = 1
# --------------------------------- Printout
globalflags.print_JobProperties()
# ----------------------- Over-writes come at the end
MessageSvc = Service("MessageSvc")
if 'MsgLvl' in dir():
MessageSvc.OutputLevel = MsgLvl
else:
MessageSvc.OutputLevel = WARNING
####
# Setup VP1 jobOptions for running at P1
# --> do not rely on RecExCommon options (doVP1),
# but setup things ourselves
#####
### Make sure a StreamESD stream has been setup before
if not 'StreamESD' in dir():
print "\n\n"
print "\t WARNING: StreamESD is not available - can not run VP1 event producers!"
print "\t Please use \"rec.doWriteESD\" to activate StreamESD.\n\n"
import sys
sys.exit(3);
### Add the algorithm producing VP1 events
from VP1Algs.VP1AlgsConf import VP1EventProd
VP1EventProducer = VP1EventProd(InputPoolFile = StreamESD.OutputFile)
#Write out files in the directory given by the stream name
VP1EventProducer.DestinationDirectory = OutputDirectory
#Set number of files large so deleting is doen by prune script
VP1EventProducer.MaxNumberOfFiles = 10000
#Set the output level
if not 'VP1MsgLvl' in dir():
VP1MsgLvl=WARNING
VP1EventProducer.OutputLevel=VP1MsgLvl
### Finally add this event producer to the main sequencer
from AthenaCommon.AlgSequence import AlgSequence
topSequence = AlgSequence()
topSequence += VP1EventProducer
### Finally print setup in debug mode
if VP1MsgLvl <= DEBUG:
print "\n\n\t VP1 setup\n",VP1EventProducer,"\n\n"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment