diff --git a/graphics/EventDisplaysOnline/python/EventDisplaysConfig.py b/graphics/EventDisplaysOnline/python/EventDisplaysConfig.py index 0ec09b662af2bd44c5245e38ea83aefc4c4d38fc..ea384c35243c8c1bb98a01fad94f061de0d8ac1a 100644 --- a/graphics/EventDisplaysOnline/python/EventDisplaysConfig.py +++ b/graphics/EventDisplaysOnline/python/EventDisplaysConfig.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration def GetRunType(): @@ -13,7 +13,7 @@ def GetRunType(): import os partition = os.environ['TDAQ_PARTITION'] except KeyError : - partition = "EventDisplays" + partition = "ATLAS" mlog.warning("TDAQ_PARTITION not defined in environment, using %s as default"%partition) mlog.debug('Probing partition %s for RunType'%partition) diff --git a/graphics/EventDisplaysOnline/python/EventUtils.py b/graphics/EventDisplaysOnline/python/EventUtils.py new file mode 100644 index 0000000000000000000000000000000000000000..9ca59221a4ac7c91525599df877f9b7b63445d0c --- /dev/null +++ b/graphics/EventDisplaysOnline/python/EventUtils.py @@ -0,0 +1,108 @@ +# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration + +import os, re, time + +# This method reads the files in the given directory, sorts them by run/event number, +# finds atlantis and vp1 files belonging to the same event and returns a list of events +# and their corresponing files: (event, run, atlantis, vp1) +def getEventlist(msg, directory): + filelist = [] + files = os.listdir(directory) + + # Build a list of files ordered by run/event number + for file in files: + matches = re.search('(?:JiveXML|vp1)_(\d+)_(\d+)(?:\.xml|_.+\.pool\.root)', file) + + # Event file, add tot the list + if matches: + run = "%012d" % long(matches.group(1)) + event = "%012d" % long(matches.group(2)) + + fileentry = run, event, file + filelist.append(fileentry) + + # Something else, check if it is one of our bookkeeping files, otherwise try to remove it + elif file not in ['latest_vp1event', 'event.list']: + + # If the file is recent, it might be from another thread... delete after 5 minutes + try: + if time.time() - os.path.getmtime(file) > 300: + msg.info("File '%s' does not belong in the output directory, removing it." % file) + try: + os.unlink("%s/%s" % (directory, file)) + except OSError, err: + msg.warning("Could not remove '%s': %s" % (file, err)) + except OSError: + # File was probably a temp file from another thread that already disappeared + pass + + i = 0 + eventlist = [] + filelist.sort() + numfiles = len(filelist) + + # Now loop through the files to form pairs + while i < numfiles-1: + #if filelist[i][0] != filelist[i+1][0] or filelist[i][1] != filelist[i+1][1]: + + # Make sure that files without a partner (atlantis-vp1) are also removed + # if i == 0: + # msg.warning("One of the files is missing for run %s, event %s, removing the other as well." % (filelist[i][0], filelist[i][1])) + # try: + # os.unlink("%s/%s" % (directory, filelist[i][2])) + # except OSError, err: + # msg.warning("Could not remove '%s': %s" % (filelist[i][2], err)) + + # Do not include such files in the list for atlas-live.cern.ch + # i = i + 1 + #else: + # Build list for atlas-live.cern.ch + evententry = filelist[i][0], filelist[i][1], filelist[i][2], filelist[i+1][2] + eventlist.append(evententry) + i = i + 1 + + return eventlist; + +# Prune events in the given directory if the number exceeds the specified number +def pruneEvents(msg, directory, maxevents, eventlist): + i = 0 + numevents = len(eventlist) + + # Check if there are more events than allowed and prune a number of files equal to the excess + if numevents > maxevents: + for i in range(numevents-maxevents): + run, event, atlantis, vp1 = eventlist.pop(0) + msg.debug("Going to prune files %s and %s for run %s and event %s." % (atlantis, vp1, run, event)) + try: + os.unlink("%s/%s" % (directory, atlantis)) + os.unlink("%s/%s" % (directory, vp1)) + except OSError, err: + msg.warning("Could not remove files for run %s, event %s: %s" % (run, event, err)) + + else: + msg.debug("Nothing to prune (%d <= %d)." % (numevents, maxevents)) + +# Build the event.list file that is used by atlas-live.cern.ch for synchronizing events +def writeEventlist(msg, directory, eventlist): + pid = os.getpid() + try: + file = open("%s/event.%d" % (directory, pid), 'w') + for run, event, atlantis, vp1 in eventlist: + file.write("run:%s,event:%s,atlantis:%s,vp1:%s\n" % (run, event, atlantis, vp1)) + file.close() + except IOError, err: + msg.warning("Could not write event list: %s" % err) + + # Rename for an atomic overwrite operation + try: + os.rename("%s/event.%d" % (directory, pid), "%s/event.list" % directory) + except OSError, err: + msg.warning("Could not rename event.%d to event.list: %s" % (pid, err)) + +# Perform all of these in one command +def cleanDirectory(msg, directory, maxevents): + eventlist = getEventlist(msg, directory) + if maxevents: + pruneEvents(msg, directory, maxevents, eventlist) + writeEventlist(msg, directory, eventlist) + diff --git a/graphics/EventDisplaysOnline/python/OnlineEventDisplaysSvc.py b/graphics/EventDisplaysOnline/python/OnlineEventDisplaysSvc.py new file mode 100644 index 0000000000000000000000000000000000000000..6fb683f45aabe51b49a840814f4b81e64a396536 --- /dev/null +++ b/graphics/EventDisplaysOnline/python/OnlineEventDisplaysSvc.py @@ -0,0 +1,232 @@ +# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration + +__doc__ = """This service runs in the online Athena event display threads. It +manages the distribution of incoming events to the right event display streams. +In order to do that it connects to the different Atlantis and VP1 event output +algorithms/tools and manipulates their job options during the run. +""" +__version__ = "0.1" +__author__ = "Eric Jansen <eric.jansen@cern.ch>" + +import os, grp, stat, random +from ipc import IPCPartition +from ispy import ISInfoDictionary, ISInfoAny, ISObject +from AthenaPython import PyAthena +from AthenaPython.PyAthena import StatusCode +from PyAnalysisCore import PyEventTools +from EventDisplaysOnline import EventUtils + +class OnlineEventDisplaysSvc( PyAthena.Svc ): + + def __init__(self, name = 'OnlineEventDisplaysSvc', **kw): + kw['name'] = name + self.output = kw.get('OutputDirectory') + self.maxevents = kw.get('MaxEvents') + self.public = kw.get('Public') + self.projecttags = kw.get('ProjectTags') + + self.DQMgid = None + self.partition = None + + self.StreamToFileTool = None + self.StreamToServerTool = None + self.VP1EventProducer = None + self.TrigConfDataIOVChanger = None + self.TrigBSExtraction = None + self.TrigDecMaker = None + self.TrigDecTool = None + + self.run = 0 + self.event = 0 + self.stream = '' + self.directory = '' + + super(OnlineEventDisplaysSvc,self).__init__(**kw) + + def initialize(self): + incidentSvc = PyAthena.py_svc('IncidentSvc', iface='IIncidentSvc') + incidentSvc.addListener(self, 'BeginEvent') + incidentSvc.addListener(self, 'StoreCleared') + + self.partition = IPCPartition('ATLAS') + self.dict = ISInfoDictionary(self.partition) + self.DQMgid = grp.getgrnam("DQM").gr_gid + return StatusCode.Success + + def finalize(self): + return StatusCode.Success + + def getJobOptions(self): + self.StreamToFileTool = PyAthena.py_tool('JiveXML::StreamToFileTool', iface='IProperty') + self.StreamToServerTool = PyAthena.py_tool('JiveXML::StreamToServerTool', iface='IProperty') + + # Some casting magic is needed to access algorithm properties + from GaudiPython.Bindings import gbl, InterfaceCast + vp1alg = PyAthena.py_alg('VP1EventProd') + self.VP1EventProducer = InterfaceCast(gbl.IProperty).cast(vp1alg) + + self.msg.info("StreamToFileTool: %s" % self.StreamToFileTool) + self.msg.info("StreamToServerTool: %s" % self.StreamToServerTool) + self.msg.info("VP1EventProducer: %s" % self.VP1EventProducer) + + def beginEvent(self): + if not (self.StreamToFileTool and self.StreamToServerTool and self.VP1EventProducer): + self.getJobOptions() + + if not self.TrigDecTool: + try: + self.TrigConfDataIOVChanger = PyAthena.py_alg('TrigConfDataIOVChanger') + self.TrigBSExtraction = PyAthena.py_alg('TrigBSExtraction') + self.TrigDecMaker = PyAthena.py_alg('TrigDecMaker') + self.TrigDecTool = PyAthena.py_tool('Trig::TrigDecisionTool/TrigDecisionTool', iface='Trig::TrigDecisionTool') + self.msg.info("TrigDecTool: %s" % self.TrigDecTool) + except Exception, err: + self.msg.warning("Coult not retrieve TrigDecisionTool: %s" % err) + + try: + eventInfo = PyEventTools.getEventInfo('') + except LookupError, err: + self.msg.error("Could not retrieve EventInfo: %s" % err) + return StatusCode.Recoverable + + try: + # Read event info + eventID = eventInfo.event_ID() + self.run = eventID.run_number() + self.event = eventID.event_number() + + # Retrieve trigger info + triggerInfo = eventInfo.trigger_info() + if triggerInfo: + streamTags = triggerInfo.streamTags() + else: + streamTags = [] + except Exception, err: + self.msg.error("Exception occured while reading event/trigger info: %s" % err) + return StatusCode.Recoverable + + # Retrieve the physics stream names from the trigger info + streams = [] + for tag in streamTags: + + ## Added 08/03/15 - sjiggins - Wanted to see if events had physics stream tag or was from random trigger + if tag.name(): + self.msg.debug("Event %d/%d has the corresponding streamTags: %s" % (self.run, self.event, tag.type())) + ################################################################################## + if tag.type() == 'express' and tag.name(): + streams += [tag.type()+'_'+tag.name()] + + if tag.type() == 'physics' and tag.name(): + streams += [tag.type()+'_'+tag.name()] + + # Add special streams to the list (JetTriggers, Public) + try: + if self.TrigDecTool: + self.TrigConfDataIOVChanger.execute() + self.TrigBSExtraction.execute() + self.TrigDecMaker.execute() + + if self.TrigDecTool.isPassed('L1_J5') or self.TrigDecTool.isPassed('L1_J10') or self.TrigDecTool.isPassed('L1_J15'): + streams += ['JetTriggers'] + #if self.TrigDecTool.isPassed('L1_MBTS_1_1'): + # streams += ['MinBias'] + #if self.TrigDecTool.isPassed('L1_MU0') or self.TrigDecTool.isPassed('L1_MU6') or self.TrigDecTool.isPassed('L1_MU10'): + # streams += ['Muons'] + except Exception, err: + self.msg.error("Exception occured while using TrigDecTool: %s" % err) + + try: + for stream in streams: + if stream in self.public: + ready4physics = ISInfoAny() + self.dict.getValue('RunParams.Ready4Physics', ready4physics) + print "Ready for physics: %s " % ready4physics.get() + runparams = ISObject(self.partition, 'RunParams.RunParams','RunParams') + runparams.checkout() + physicsReady = ISObject(self.partition, 'RunParams.Ready4Physics','Ready4PhysicsInfo') + physicsReady.checkout() + print "Ready for physics: %r" % (physicsReady.ready4physics) + #if ready4physics.get() and physicsReady.ready4physics and runparams.T0_project_tag in self.projecttags: + if physicsReady.ready4physics and runparams.T0_project_tag in self.projecttags: + streams += ['Public'] + else: + self.msg.debug("RunParams.Ready4Physics is not set, run number is not set, or T0_project_tag is not set to any of %s" % ", ".join(self.projecttags)) + break + except Exception, err: + self.msg.error("Exception occured while reading RunParams.Ready4Physics: %s" % err) + + # Randomize list of streams + random.shuffle(streams) + self.msg.debug("Event %d/%d has event display stream tags: %s" % (self.run, self.event, ", ".join(streams))) + + # Start from the beginning and send the event to the first stream that passes our directory checks + self.directory = '' + for self.stream in streams: + self.directory = "%s/%s" % (self.output, self.stream) + if os.access(self.directory, os.F_OK): + if os.path.isdir(self.directory) and os.access(self.directory, os.W_OK): + self.msg.debug("Going to write file to existing directory: %s" % self.directory) + if os.stat(self.directory).st_gid != self.DQMgid: + self.msg.debug("Setting group to 'DQM' for directory: %s" % self.directory) + os.chown(self.directory, -1, self.DQMgid) + break + else: + self.msg.warning("Directory \'%s\' is not usable, trying next alternative" % self.directory) + self.directory = '' + else: + try: + os.mkdir(self.directory) + os.chmod(self.directory, stat.S_IRWXU | stat.S_IRWXG | stat.S_IROTH | stat.S_IXOTH) + os.chown(self.directory, -1, self.DQMgid) + self.msg.info("Created output directory \'%s\' for stream \'%s\'" % (self.directory, self.stream)) + break + except OSError, err: + self.msg.warning("Failed to create output directory \'%s\' for stream \'%s\': %s", (self.directory, self.stream, err.strerror)) + self.directory = '' + + # Check if a suitable directory was found + if self.directory: + self.msg.debug("Event %d/%d will be streamed to: %s" % (self.run, self.event, self.stream)) + else: + # This event is hopelessly lost, send StatusCode.Recoverable in an attempt to abort. + # But if Athena chooses to ignore that, set the output to the "Unknown" trashcan stream. + self.stream = 'Unknown' + self.directory = "%s/.Unknown" % self.output + try: + # Set output stream for JiveXML event streaming (file and server) + self.StreamToServerTool.getProperty('StreamName').setValue("%s" % self.stream) + self.StreamToFileTool.getProperty('FileNamePrefix').setValue("%s/JiveXML" % self.directory) + + # And also for the VP1 event producer algorithm + self.VP1EventProducer.getProperty('DestinationDirectory').setValue(self.directory) + except Exception, err: + self.msg.error("Exception occured while setting job options: %s" % err) + return StatusCode.Failure + + if not self.directory: + return StatusCode.Recoverable + + def endEvent(self): + # Prune events and make index file for atlas-live.cern.ch + if self.directory: + # Hack for missing VP1 files, create an empty file to make cleanup/sync work + #open("%s/vp1_%d_%d_0.pool.root" % (self.directory, self.run, self.event), 'a').close() + EventUtils.cleanDirectory(self.msg, self.directory, self.maxevents) + + # And cleanup the variables + self.run = 0 + self.event = 0 + self.stream = '' + self.directory = '' + + def handle(self, incident): + self.msg.verbose("Received incident %s from %s" % (incident.type(), incident.source())) + + # Event and trigger info present, decide stream + if incident.type() == 'BeginEvent' and incident.source() == 'AthenaEventLoopMgr': + self.beginEvent() + + # VP1 writes its file at EndEvent, so we can do cleanup at StoreCleared + if incident.type() == 'StoreCleared' and incident.source() == 'StoreGateSvc': + self.endEvent() + diff --git a/graphics/EventDisplaysOnline/python/PruneAlg.py b/graphics/EventDisplaysOnline/python/PruneAlg.py new file mode 100755 index 0000000000000000000000000000000000000000..539c275cdae2ab25edd27a7dabdc0b3230670eb4 --- /dev/null +++ b/graphics/EventDisplaysOnline/python/PruneAlg.py @@ -0,0 +1,31 @@ +# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration + +__doc__ = """Algorithm for pruning eventdisplay files +""" +__version__ = "0.2" +__author__ = "Eric Jansen <eric.jansen@cern.ch>" + +from AthenaPython import PyAthena +from AthenaPython.PyAthena import StatusCode +from EventDisplaysOnline import EventUtils + +class PruneAlg( PyAthena.Alg ): + + def __init__(self, name = "PruneAlg", **kw): + kw['name'] = name + super(PruneAlg,self).__init__(**kw) + + self.directory = kw.get("OutputDirectory", "/EventDisplayEvents/Default") + self.numevents = kw.get("NumberOfEvents", 250) + + def initialize(self): + self.msg.debug("Going to prune %s, keeping %d events" % (self.directory, self.numevents)) + return StatusCode.Success + + def execute(self): + EventUtils.cleanDirectory(self.msg, self.directory, self.numevents) + return StatusCode.Success + + def finalize(self): + return StatusCode.Success + diff --git a/graphics/EventDisplaysOnline/scripts/athena_script.sh b/graphics/EventDisplaysOnline/scripts/athena_script.sh new file mode 100755 index 0000000000000000000000000000000000000000..7cd662e90f78d7a5d69449d66d67c597aa1229e2 --- /dev/null +++ b/graphics/EventDisplaysOnline/scripts/athena_script.sh @@ -0,0 +1,2 @@ +#!/bin/sh +exec athena.py $* \ No newline at end of file diff --git a/graphics/EventDisplaysOnline/share/Atlantis_jobOptions.py b/graphics/EventDisplaysOnline/share/Atlantis_jobOptions.py index f44c95ce84674a9815b5f51375fe9062e2131606..29be2b16e03dac7f9b5ef73fec085349c8d32933 100644 --- a/graphics/EventDisplaysOnline/share/Atlantis_jobOptions.py +++ b/graphics/EventDisplaysOnline/share/Atlantis_jobOptions.py @@ -6,7 +6,7 @@ ### Enable Atlantis-flag (fake as this happens after we include RecExCommon) from RecExConfig.RecFlags import rec -rec.doJiveXML.set_Value_and_Lock(True) +rec.doJiveXML.set_Value_and_Lock(True) ## Steered by graphics/JiveXML/share/JiveXML_RecEx_Config.py ### Setup output level if not 'AtlantisMsgLvl' in dir(): @@ -28,23 +28,23 @@ AlgoJiveXML.AtlasRelease = os.environ["AtlasVersion"] ### - this is the same as AlgoJiveXML.WriteToFile = True ### but allows to set the StreamName as prefix from JiveXML.JiveXMLConf import JiveXML__StreamToFileTool -AlgoJiveXML.StreamTools += [ - JiveXML__StreamToFileTool(FileNamePrefix=OutputDirectory+"/JiveXML", - OutputLevel=AtlantisMsgLvl) -] +StreamToFileTool = JiveXML__StreamToFileTool( + FileNamePrefix="%s/.Unknown/JiveXML" % OutputDirectory, + OutputLevel=AtlantisMsgLvl) +ToolSvc += StreamToFileTool +AlgoJiveXML.StreamTools += [ StreamToFileTool ] #Disable default tool AlgoJiveXML.WriteToFile = False ### Online mode allows you to send events to -### AtlantiJava clients over a network connection +### AtlantisJava clients over a network connection ### - this is the same as AlgoJiveXML.OnlineMode = True ### but allows to the set server port number -if not 'Port' in dir(): - Port=48965 -from JiveXML.JiveXMLConf import JiveXML__XMLRPCStreamTool -AlgoJiveXML.StreamTools += [ - JiveXML__XMLRPCStreamTool(ServerPort=Port,OutputLevel=AtlantisMsgLvl) -] +#if 'Port' in dir(): +# from JiveXML.JiveXMLConf import JiveXML__XMLRPCStreamTool +# AlgoJiveXML.StreamTools += [ +# JiveXML__XMLRPCStreamTool(ServerPort=Port,OutputLevel=AtlantisMsgLvl) +# ] #Disable default tool AlgoJiveXML.OnlineMode = False @@ -55,22 +55,23 @@ AlgoJiveXML.OnlineMode = False #configure external ONCRCP server service to stream to the machine running our #server instance and adopt the message level if not 'ServerHost' in dir(): - ServerHost="pc-tdq-mon-31" + ServerHost="pc-tdq-mon-29" from JiveXML.JiveXMLConf import JiveXML__ExternalONCRPCServerSvc ServiceMgr += JiveXML__ExternalONCRPCServerSvc("ExternalONCRPCServerSvc", Hostname=ServerHost, OutputLevel=AtlantisMsgLvl) #And add a stream tool that stream using the current stream tag from JiveXML.JiveXMLConf import JiveXML__StreamToServerTool -AlgoJiveXML.StreamTools += [ - JiveXML__StreamToServerTool(ServerService=ServiceMgr.ExternalONCRPCServerSvc, - StreamName=StreamName) -] +StreamToServerTool = JiveXML__StreamToServerTool( + OutputLevel=AtlantisMsgLvl, + ServerService=ServiceMgr.ExternalONCRPCServerSvc, + StreamName="Unknown") +ToolSvc += StreamToServerTool +AlgoJiveXML.StreamTools += [ StreamToServerTool ] ### Enable this to recreate the geometry XML files for Atlantis AlgoJiveXML.WriteGeometry = False - ### add the AlgoJiveXML topSequence += AlgoJiveXML @@ -82,20 +83,19 @@ if DetFlags.detdescr.ID_on() : include ( "JiveXML/DataTypes_InDet.py" ) if DetFlags.detdescr.Calo_on(): - include ( "JiveXML/DataTypes_Calos.py" ) + include ( "JiveXML/DataTypes_Calos.py" ) + if not rec.doHeavyIon: #Switch on pulse shapes for Tiles ToolSvc.CaloTileRetriever.DoTileDigit = True ToolSvc.CaloTileRetriever.DoTileCellDetails = True #Switch on pulse shapes for MBTS ToolSvc.CaloMBTSRetriever.DoMBTSDigits = True #Also switch on LAR digits, but only for Calo streams - if (Stream in ['L1Calo','L1CaloEM','MinBias']) and not Splash : - include("EventDisplaysOnline/LArDigits_jobOptions.py") if DetFlags.detdescr.Muon_on() : include ( "JiveXML/DataTypes_Muons.py" ) -#Switch on trigger if trig data is there +#Switch on trigger if trig data is there if rec.doTrigger(): include ( "JiveXML/DataTypes_Trig.py" ) #Special options for ACR @@ -107,6 +107,53 @@ if rec.doTrigger(): ### Always use this one include ( "JiveXML/DataTypes_Reco.py" ) +theJetRecJetRetriever.OtherJetCollections = [] +#theMissingETRetriever.OtherMissingETCollections = [] + +## ================== Edit: 25-02-2015 by sjiggins - Do not limit additional Track Collection ======================== +include ( "JiveXML/DataTypes_Trig.py" ) # 25-02-2015 Change made by sjiggins to allow doTriggers to float +#include ("xAODJiveXML/xAODJiveXML_DataTypes.py") # 04-04/15 by sjiggins +#### jpthomas 4Jun15: Added correct muon track collection, but still also retrieve MS-only too: +ToolSvc.TrackRetriever.OtherTrackCollections = ["CombinedMuonTracks","MuonSpectrometerTracks"] +## =================================================================================================================== + +## ========================== Edit: Beam Splash reduction flags ==================================== +#topSequence.theEventData2XML.DataTypes.remove("JiveXML::SiSpacePointRetriever/SiSpacePointRetriever"); +##topSequence.theEventData2XML.DataTypes.remove("JiveXML::SiClusterRetrieverSiClusterRetriever"); +##topSequence.theEventData2XML.DataTypes.remove("JiveXML::PixelClusterRetriever/PixelClusterRetriever"); #Solved by TrigJiveXML checkout +#topSequence.theEventData2XML.DataTypes.remove("JiveXML::TRTRetriever/TRTRetriever"); +##topSequence.theEventData2XML.DataTypes.remove("JiveXML::TrigSiSpacePointRetrieverTrigSiSpacePointRetriever"); +#topSequence.theEventData2XML.DataTypes.remove("JiveXML::SCTRDORetriever/SCTRDORetriever"); +#topSequence.theEventData2XML.DataTypes.remove("JiveXML::MdtPrepDataRetriever/MdtPrepDataRetriever"); + +#topSequence.CaloLArRetriever.LArlCellThreshold = 500.; +#topSequence.CaloHECRetriever.HEClCellThreshold = 500.; + +#ToolSvc.CaloLArRetriever.LArlCellThreshold = 500.; +#ToolSvc.CaloHECRetriever.HEClCellThreshold = 500.; + + +#ToolSvc.TrackRetriever.OtherTrackCollections = []; +#ToolSvc.xAODTrackParticleRetriever.OtherTrackCollections = [] +## ======================================================================================= + +if rec.doHeavyIon: + #topSequence.AlgoJiveXML.DataTypes.remove("JiveXML::V0CandidateRetriever/V0CandidateRetriever") + #topSequence.AlgoJiveXML.DataTypes.remove("JiveXML::TruthTrackRetriever/TruthTrackRetriever") + #topSequence.AlgoJiveXML.DataTypes.remove("JiveXML::TruthMuonTrackRetriever/TruthMuonTrackRetriever") + topSequence.AlgoJiveXML.DataTypes.remove("JiveXML::SiSpacePointRetriever/SiSpacePointRetriever") + topSequence.AlgoJiveXML.DataTypes.remove("JiveXML::SiClusterRetriever/SiClusterRetriever") + topSequence.AlgoJiveXML.DataTypes.remove("JiveXML::PixelClusterRetriever/PixelClusterRetriever") + topSequence.AlgoJiveXML.DataTypes.remove("JiveXML::TRTRetriever/TRTRetriever") + topSequence.AlgoJiveXML.DataTypes.remove("JiveXML::CaloClusterRetriever/CaloClusterRetriever") + topSequence.AlgoJiveXML.DataTypes.remove("JiveXML::PixelRDORetriever/PixelRDORetriever") + topSequence.AlgoJiveXML.DataTypes.remove("JiveXML::SCTRDORetriever/SCTRDORetriever") + + ToolSvc.TrackRetriever.PriorityTrackCollection = "Tracks" + ToolSvc.TrackRetriever.OtherTrackCollections = ["ConvertedStacoTracks","ConvertedMuIdCBTracks"] + + ToolSvc.TrackRetriever.DoHitsDetails = False + ### Finally, print setup if in debug mode if AtlantisMsgLvl <= DEBUG: print "\n\n\t Atlantis setup\n",AlgoJiveXML,"\n\n" diff --git a/graphics/EventDisplaysOnline/share/ByteStreamController_jobOptions.py b/graphics/EventDisplaysOnline/share/ByteStreamController_jobOptions.py index 2c796c9825545528ee78c281523ef3ac3cf94dca..13b8fbb78849d2842e709608098f83eb392007c4 100644 --- a/graphics/EventDisplaysOnline/share/ByteStreamController_jobOptions.py +++ b/graphics/EventDisplaysOnline/share/ByteStreamController_jobOptions.py @@ -22,11 +22,11 @@ theApp.CreateSvc += ['ISPropertySvc'] from ByteStreamEmonController.ByteStreamEmonControllerConf import BSEmonCtrlSvc #Create an instance with a stream-specific name -ByteStreamEmonCtrlSvc = BSEmonCtrlSvc(Stream+"-BSEmonCtrlSvc") +ByteStreamEmonCtrlSvc = BSEmonCtrlSvc(AppName+"-BSEmonCtrlSvc") ServiceMgr += ByteStreamEmonCtrlSvc #Add to list of services created right-away (otherwise not referenced) -theApp.CreateSvc += ["BSEmonCtrlSvc/"+Stream+"-BSEmonCtrlSvc"] +theApp.CreateSvc += ["BSEmonCtrlSvc/"+AppName+"-BSEmonCtrlSvc"] ### # Switch output level if requested diff --git a/graphics/EventDisplaysOnline/share/ByteStreamInput_jobOptions.py b/graphics/EventDisplaysOnline/share/ByteStreamInput_jobOptions.py index 978f51afab7d22585239a35a0cff919fbe15fb7e..30ecaea6f66618ddafe55a076f11bdefeedeae21 100644 --- a/graphics/EventDisplaysOnline/share/ByteStreamInput_jobOptions.py +++ b/graphics/EventDisplaysOnline/share/ByteStreamInput_jobOptions.py @@ -33,7 +33,7 @@ ByteStreamEmonInputSvc.Partition = Partition # The source of events, SFI for full events # ######################################### if not 'SamplingLevel' in dir(): - SamplingLevel="SFI" + SamplingLevel="efd" ByteStreamEmonInputSvc.Key = SamplingLevel # ############################################################ @@ -82,7 +82,9 @@ ByteStreamEmonInputSvc.KeyCount = Nodes # ################################# # Set dispersion flag to false (so other can sample the same event # ################################# -ByteStreamEmonInputSvc.Dispersion=False +if not 'Dispersion' in dir(): + Dispersion=False +ByteStreamEmonInputSvc.Dispersion=Dispersion # ################################# # Set timeout (in ms?) @@ -105,7 +107,9 @@ ByteStreamEmonInputSvc.LVL1Logic = "Ignore" # # This can be used instead of or in addition to LVL1Bits. # ########################################################### -#ByteStreamEmonInputSvc.LVL1Names = ['L1_MU2', 'L1_MU3', 'L1_EM10' ] +if 'LVL1Names' in dir(): + ByteStreamEmonInputSvc.LVL1Logic = "Or" + ByteStreamEmonInputSvc.LVL1Names = LVL1Names.split(' ') # A list of numerical trigger bits instead of names. This is # Or'ed with LVL1Names @@ -119,7 +123,7 @@ ByteStreamEmonInputSvc.LVL1Logic = "Ignore" if ( not 'Stream' in dir()) or (Stream == 'Default') or (Stream.startswith("Test")): ByteStreamEmonInputSvc.StreamLogic = "Ignore" else: - ByteStreamEmonInputSvc.StreamLogic = "And" + ByteStreamEmonInputSvc.StreamLogic = "Or" # ########################################### # One of 'physics' or 'calibration' @@ -132,7 +136,7 @@ ByteStreamEmonInputSvc.StreamType = StreamType # ############################################ # A list of stream tag names # ############################################ -ByteStreamEmonInputSvc.StreamNames = [ Stream ] +ByteStreamEmonInputSvc.StreamNames = Stream.split(' ') # ################################################# # The event buffer size diff --git a/graphics/EventDisplaysOnline/share/EventDisplay_jobOptions.py b/graphics/EventDisplaysOnline/share/EventDisplay_jobOptions.py index 18640415689dd1965f3a8cd2a183a0dc476f96e1..dadaecbb078a9c224698a507f8c262168062d798 100644 --- a/graphics/EventDisplaysOnline/share/EventDisplay_jobOptions.py +++ b/graphics/EventDisplaysOnline/share/EventDisplay_jobOptions.py @@ -1,74 +1,166 @@ -######################################### -# -# Setup for running event displays at point1 -# -######################################### - -#Define the general output directory for VP1 and Atlantis -#All events will be stored in subdirectories of that directory +## -- Overview of all default local settings that one can change +## -- The default values are also shown. + +## ------------------------------------------- flags set in: RecExOnline_jobOptions.py +isOnline = True +isOnlineStateless = True + +#Crashes online if you do not set this.... +isOfflineTest = False + +#for the time being, running over file is not possible ONLINE (please see RecExOnline_File_Offline.py) +useEmon = True +#The number of machines we run with helpfully labelled "keycount" +keycount = 10 #10 #1000 +buffersize = 10 +updateperiod = 200 +timeout = 600000 + +keyname = 'dcm' + +#Blank to read all +streamName = '' + +#Read Physics +streamType = 'physics' #Progonal Does not specify these + + +streamLogic = 'Or'#HAS TO BE OR AT ALL TIMES WHEN SPECIFYING A CERTAIN STREAM + +useAtlantisEmon = False + + +## ------------------------------------------- flags set in: RecExOnline_emonsvc.py (from RecExOnline_jobOptions.py) +partitionName = 'ATLAS' +#Current test partition looping through 2015 data if you want to test when no run is ongoing. +#partitionName = 'GMTestPartition_lshi_tdaq6' +publishName = 'EventDisplays' + +if (partitionName == 'ATLAS'): + evtMax = -1 + +#Don't flood if you are running on a test loop +if (partitionName != 'ATLAS'): + evtMax = 200 + +## ------------------------------------------- flags set in: RecExOnline_globalconfig.py (from RecExOnline_jobOptions.py) +#read the pickle file if you want to use the AMI tag info +#stored in ami_recotrf.pickle (produced by 'tct_getAmiTag.py f140 ami_recotrf.cmdargs ami_recotrf.pickle') +usePickleConfig = False +pickleconfigfile = './ami_recotrf.pickle' +DataSource = 'data' +InputFormat = 'bytestream' +fileName = './0.data' +#beamType = 'cosmics' +#beamType = 'collisions' + +#COND tag and GEO are needed for running over a test partition online +#Previous COND tag +ConditionsTag = 'CONDBR2-HLTP-2017-03' #Removed 07/04 +#Swapped to this following AMI tag for current reco. Swap back if not in release +#ConditionsTag = 'CONDBR2-ES1PA-2016-01' #Different +#Current DetDesc +DetDescrVersion = 'ATLAS-R2-2016-01-00-01' + +doESD = True +writeESD = True # False - Jiggins_12Feb_v2 working version switch +doAOD = False #True # False - Jiggins_12Feb_v2 wokring version switch +writeAOD = False # True # False - Jiggins_12Feb_v2 working version switch +IOVDbSvcMessage = False + +## ------------------------------------------ flags set in: RecExOnline_recoflags.py (from RecExOnline_jobOptions.py) +doAllReco = True +doInDet = doAllReco +doMuon = True +doLArg = doAllReco +doTile = doAllReco +doTrigger = doAllReco +doHist = False +doJiveXML = False +doEgammaTau = False + +## ------------------------------------------ flags set in : RecExOnline_monitoring.py (from from RecExOnline_jobOptions.py) +doAllMon = False +doCaloMon = doAllMon +doPhysMon = doAllMon +doTrigMon = False +doIDMon = doAllMon +doTRTMon = doAllMon +doMuonMon = False + +doIDMon = doAllMon +doTRTMon = doAllMon +doMuonMon = False + +## ------------------------------------------ flags set in : RecExOnline_postconfig.py (called from RecExOnline_jobOptions.py) + +## Define the general output directory for VP1 and Atlantis if not 'OutputDirectory' in dir(): - OutputDirectory="/EventDisplayEvents/" -#Make sure it ends with a "/" -if not OutputDirectory.endswith("/"): - OutputDirectory += "/" - -#Make sure we run infintly if not otherwise specified on the command line -from AthenaCommon.AthenaCommonFlags import jobproperties,athenaCommonFlags -if 'EvtMax' in dir(): - athenaCommonFlags.EvtMax.set_Value_and_Lock(EvtMax) -else: - athenaCommonFlags.EvtMax.set_Value_and_Lock(-1) - -#Get a logger so we can show formated messages -from AthenaCommon.Logging import logging -mlog = logging.getLogger( 'EventDisplays' ) - -#Make sure we have a stream name defined -if not 'Stream' in dir(): - Stream="Default" - -#Make sure the Splash flag exists -if not 'Splash' in dir(): - Splash=False - -#Create phony stream name -if not 'StreamName' in dir(): - StreamName = Stream+(Splash and '-Splash' or '') - -#Assemble final output directory -OutputDirectory += StreamName - -#Make sure the output directory exists and is writable -import os, stat -if os.access(OutputDirectory, os.F_OK): - if os.path.isdir(OutputDirectory) and os.access(OutputDirectory, os.W_OK): - mlog.info("using existing output directory \'%s\' for stream \'%s\'" % (OutputDirectory, StreamName)) - else: - mlog.fatal("cannot write to directory \'%s\'" % OutputDirectory) - raise OSError("cannot write to directory \'%s\'" % OutputDirectory) -else: - try: - os.mkdir(OutputDirectory); - os.chmod(OutputDirectory, stat.S_IRWXU | stat.S_IRWXG | stat.S_IROTH | stat.S_IXOTH) - mlog.info("created output directory \'%s\' for stream \'%s\'" % (OutputDirectory, StreamName)) - except OSError, err: - mlog.fatal("failed to create output directory \'%s\' for stream \'%s\': %s", (OutputDirectory, StreamName, err.strerror)) - raise err - -#Bytestream input is configured here -include('EventDisplaysOnline/ByteStreamInput_jobOptions.py') - -#Add reconstruction on top of that -if not Splash: - include('EventDisplaysOnline/RecoCommon_jobOptions.py') -else : - include('EventDisplaysOnline/SplashEvent_jobOptions.py') - -#Add Atlantis-specific jobOptions -include('EventDisplaysOnline/Atlantis_jobOptions.py') - -#Add VP1-specific jobOptions -include('EventDisplaysOnline/VP1_jobOptions.py') - -#Finally configure the ByteStreamController (after RecoCommon!) -#include('EventDisplaysOnline/ByteStreamController_jobOptions.py') + OutputDirectory="/atlas/EventDisplayEvents" + +## Pause this thread until the ATLAS partition is up +include ("EventDisplaysOnline/WaitForAtlas_jobOptions.py") + +from AthenaCommon.GlobalFlags import globalflags +globalflags.ConditionsTag.set_Value_and_Lock(ConditionsTag) + +## Setup unique output files (so that multiple Athenas on the same machine don't interfere) +jobId = os.environ.get('TDAQ_APPLICATION_NAME', '').split(':') +if not len(jobId) == 5: + from random import randint + jobId = ['Athena-EventProcessor', 'Athena-EventDisplays-Segment', 'EventDisplays-Rack', 'tmp', '%d' % randint(0, 999)] + +IPC_timeout = int(os.environ['TDAQ_IPC_TIMEOUT']) +print " IPC_timeout Envrionment Variable = %d" %IPC_timeout +################################################################################# + +#from random import randrange +from AthenaCommon.AthenaCommonFlags import athenaCommonFlags + +# #################### From JiveXML server python script #################### +athenaCommonFlags.PoolESDOutput = "ESD-%s-%s.pool.root" % (jobId[3], jobId[4]) +## Additional flags from: GetTfCommand.py --AMI=x392 +from CaloRec.CaloCellFlags import jobproperties +jobproperties.CaloCellFlags.doLArHVCorr=False +jobproperties.CaloCellFlags.doPileupOffsetBCIDCorr.set_Value_and_Lock(False) +jobproperties.CaloCellFlags.doLArCreateMissingCells=False + +#Work around to stop crash in pixel cluster splitting +from InDetRecExample.InDetJobProperties import InDetFlags#All OK +InDetFlags.doInnerDetectorCommissioning.set_Value_and_Lock(True) + +from JetRec.JetRecFlags import jetFlags +jetFlags.useTracks.set_Value_and_Lock(False) + +#from MuonRecExample.MuonRecFlags import muonRecFlags; +#Crashes claiming does not exist +#muonRecFlags.writeRDO.set_Value_and_Lock(True); + +from RecExConfig.RecFlags import rec +#rec.projectName.set_Value_and_Lock('data16_comm') # CHECK THIS NAME WITH RUN CONTROL + +from RecExConfig.RecAlgsFlags import recAlgs +recAlgs.doEFlow.set_Value_and_Lock(False) +recAlgs.doMissingET.set_Value_and_Lock(False) + +## Main online reco scripts +include ("RecExOnline/RecExOnline_jobOptions.py") + +ToolSvc.InDetPixelRodDecoder.OutputLevel = ERROR + +rec.abortOnUncheckedStatusCode = False +rec.abortOnErrorMessage=False + +include ("EventDisplaysOnline/JiveXMLServer_jobOptions.py") +include ("EventDisplaysOnline/Atlantis_jobOptions.py") +#include ("EventDisplaysOnline/VP1_jobOptions.py") + +## Disable histogramming +svcMgr.ByteStreamInputSvc.ISServer='' + +################### Added by sjiggins 10/03/15 as given by Peter Van Gemmeren for name PoolFileatalogs +svcMgr.PoolSvc.WriteCatalog = "xmlcatalog_file:PoolFileCatalog_%s_%s.xml" % (jobId[3], jobId[4]) +#################################################################################################### + +svcMgr.MessageSvc.OutputLevel = WARNING #INFO +svcMgr.MessageSvc.Format = "% F%t %18W%S%7W%R%T %0W%M" diff --git a/graphics/EventDisplaysOnline/share/JiveXMLServer_jobOptions.py b/graphics/EventDisplaysOnline/share/JiveXMLServer_jobOptions.py new file mode 100644 index 0000000000000000000000000000000000000000..9cbb84b77f4d3644abbe9e3dcd1e2938f5a90951 --- /dev/null +++ b/graphics/EventDisplaysOnline/share/JiveXMLServer_jobOptions.py @@ -0,0 +1,48 @@ + ######################################### +# +# Setup for running event displays at point1 +# +######################################### + +import os +from random import randrange +from AthenaCommon.AthenaCommonFlags import athenaCommonFlags + +######## Commented out by sjiggins - 19-02-2015 because of a ############### +######## writeESD crash with PoolFileCatalog read only error ############### + +#randomString=str(randrange(0,100000)) +#athenaCommonFlags.PoolESDOutput = 'ESD_' + randomString +'.pool.root' +#os.environ['POOL_CATALOG'] = 'xmlcatalog_file:PoolFileCatalog_'+randomString+'.xml' + +############################################################################ + +#Add the online event display service +from AthenaCommon.AppMgr import theApp +if not hasattr(svcMgr, 'OnlineEventDisplaysSvc'): + if rec.doHeavyIon: + maxEvents=250 + projectTags=['data18_hi'] + publicStreams=['HardProbes', 'MinBias'] + else: + maxEvents=250 #1000 + ############## Original - changed by sjiggins @ 02-06-15 ################# + #projectTags=['data12_8TeV'] + #publicStreams=['Egamma', 'JetTauEtmiss', 'MinBias'] + ########################################################################## + projectTags=['data18_13TeV'] + publicStreams=['physics_Main'] + + + + from EventDisplaysOnline.OnlineEventDisplaysSvc import OnlineEventDisplaysSvc + svcMgr += OnlineEventDisplaysSvc( + "OnlineEventDisplaysSvc", + OutputLevel = DEBUG, # Verbosity + MaxEvents = maxEvents, # Number of events to keep per stream + OutputDirectory = OutputDirectory, # Base directory for streams + ProjectTags = projectTags, # Project tags that are allowed to be made public + Public = publicStreams, # These streams go into public stream when Ready4Physics + ) + theApp.CreateSvc += [svcMgr.OnlineEventDisplaysSvc.getFullJobOptName()] + diff --git a/graphics/EventDisplaysOnline/share/VP1_jobOptions.py b/graphics/EventDisplaysOnline/share/VP1_jobOptions.py index c18c48a68ab385be7f6ae5f30b500ec4baff1c58..ed19e61a88547041056a30535abeba7bd6f4cbb1 100644 --- a/graphics/EventDisplaysOnline/share/VP1_jobOptions.py +++ b/graphics/EventDisplaysOnline/share/VP1_jobOptions.py @@ -4,23 +4,26 @@ # but setup things ourselves ##### -### Make sure a StreamESD stream has been setup before -if not 'StreamESD' in dir(): - print "\n\n" - print "\t WARNING: StreamESD is not available - can not run VP1 event producers!" - print "\t Please use \"rec.doWriteESD\" to activate StreamESD.\n\n" - import sys - sys.exit(3); +from AthenaCommon.AppMgr import ServiceMgr as svcMgr +from AthenaServices.AthenaServicesConf import OutputStreamSequencerSvc +outputStreamSequencerSvc = OutputStreamSequencerSvc() +outputStreamSequencerSvc.SequenceIncidentName = "EndEvent" +outputStreamSequencerSvc.IgnoreInputFileBoundary = True +svcMgr += outputStreamSequencerSvc ### Add the algorithm producing VP1 events -from VP1Algs.VP1AlgsConf import VP1EventProd +from VP1AlgsEventProd.VP1AlgsEventProdConf import VP1EventProd VP1EventProducer = VP1EventProd(InputPoolFile = StreamESD.OutputFile) +## =================== Added 09/03/15 by sjiggins ================= +print "<<<<<<< VP1 Output File >>>>>>>" +print "OutputFile: %s" % StreamESD.OutputFile +## ================================================================ #Write out files in the directory given by the stream name -VP1EventProducer.DestinationDirectory = OutputDirectory +VP1EventProducer.DestinationDirectory = "%s/.Unknown/" % OutputDirectory #Set number of files large so deleting is doen by prune script -VP1EventProducer.MaxNumberOfFiles = 10000 +VP1EventProducer.MaxNumberOfFiles = 250 #Set the output level if not 'VP1MsgLvl' in dir(): diff --git a/graphics/EventDisplaysOnline/share/WaitForAtlas_jobOptions.py b/graphics/EventDisplaysOnline/share/WaitForAtlas_jobOptions.py new file mode 100644 index 0000000000000000000000000000000000000000..d507d394a2b1b5471c8de796c29012d86a344b28 --- /dev/null +++ b/graphics/EventDisplaysOnline/share/WaitForAtlas_jobOptions.py @@ -0,0 +1,15 @@ +from ispy import * +import time + +partitionUp=False +while not partitionUp: + try: + p = IPCPartition(partitionName) + runparams = ISObject(p, "RunParams.RunParams", "RunParams") + runparams.checkout() + partitionUp=True + + except Exception, e: + print "%s partition is not up, sleeping for 30 seconds" % partitionName + time.sleep(30) + diff --git a/graphics/JiveXML/src/ONCRPCServerSvc.cxx b/graphics/JiveXML/src/ONCRPCServerSvc.cxx index d14a0abf10a4241356e202b9580948640ee267e7..42218c929b6167b65979a8cdeb4950f7516768cf 100644 --- a/graphics/JiveXML/src/ONCRPCServerSvc.cxx +++ b/graphics/JiveXML/src/ONCRPCServerSvc.cxx @@ -1,5 +1,5 @@ /* - Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration + Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration */ #include "GaudiKernel/ServiceHandle.h" @@ -342,8 +342,8 @@ namespace JiveXML { //obtain a lock. If the lock can not be obtained after a certain time, an //error is reported - //Timeout of 1 second and 0 nanoseconds - struct timespec timeout = { 1, 0 }; + //Timeout of 5 second and 0 nanoseconds + struct timespec timeout = { 5, 0 }; //Try to obtain the lock #ifndef __APPLE__ int retVal = pthread_mutex_timedlock(&m_accessLock, &timeout); diff --git a/graphics/JiveXML/src/ONCRPCServerThreads.cxx b/graphics/JiveXML/src/ONCRPCServerThreads.cxx index 6b8914c52ab82b4394e99326a77efbb61ade3c4f..77a1bdcd680bae6d35c7bbe35cd5618873ef0a4d 100644 --- a/graphics/JiveXML/src/ONCRPCServerThreads.cxx +++ b/graphics/JiveXML/src/ONCRPCServerThreads.cxx @@ -199,8 +199,8 @@ namespace JiveXML { pthread_attr_t attr; retVal = pthread_attr_init (&attr); if ( ! checkResult(retVal,"request handler initializing thread attributes",ServerSvc)) return ; - //Set the stack size to 2*minumum - retVal = pthread_attr_setstacksize(&attr,2*PTHREAD_STACK_MIN); + //Removing the limit on the thread memory usage as a test. Suspect that some threads do not have enough memory to finish and therefore eat up all the memory. + //retVal = pthread_attr_setstacksize(&attr,10*PTHREAD_STACK_MIN); if ( ! checkResult(retVal,"request handler setting thread stacksize",ServerSvc)) return ; //NOTE: All threads are first created joinable, so we can wait for the to