diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/CMakeLists.txt b/Trigger/TrigConfiguration/TrigConfigSvc/CMakeLists.txt index ce3981f774e2469bf29421ad55794296c31cdd2f..b293aa67ce530eff2bdec0ec28f26cef31242320 100644 --- a/Trigger/TrigConfiguration/TrigConfigSvc/CMakeLists.txt +++ b/Trigger/TrigConfiguration/TrigConfigSvc/CMakeLists.txt @@ -44,15 +44,10 @@ atlas_add_component( TrigConfigSvc LINK_LIBRARIES AthAnalysisBaseCompsLib AthenaBaseComps TrigConfIOLib TrigConfData TrigConfStorage TrigConfL1Data L1TopoConfig EventInfo AthenaMonitoringLib nlohmann_json::nlohmann_json ) # Install files from the package: -atlas_install_python_modules( python/*.py ) -atlas_install_joboptions( share/hltConfigSvc_standalone_test_configurables.py - share/hltConfigSvc_standalone_test.py - share/jobOptions_commonSetup.py +atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} ) +atlas_install_joboptions( share/jobOptions_commonSetup.py share/jobOptions_setupHLTSvc.py share/jobOptions_setupLVL1Svc.py - share/L1config-example.py - share/lvl1ConfigSvc_standalone_test.py - share/testKeyByNameLoader.py share/testTriggerFrontierQuery.py ) atlas_install_scripts( share/checkTrigger.py share/checkTriggerConfigOld.py share/trigconf_property.py ) atlas_install_xmls( data/*.dtd ) diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/DoDBConfig.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/DoDBConfig.py deleted file mode 100644 index 0f435b8d08720511b32ed235d89e4d952b438b04..0000000000000000000000000000000000000000 --- a/Trigger/TrigConfiguration/TrigConfigSvc/python/DoDBConfig.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -## @file DoDBConfig.py -## @brief Configure services to extract the trigger configuration for DB upload -## $Id: DoDBConfig.py,v 1.4 2009-03-05 14:52:41 pbell Exp $ - -def _doDBConfig(): - from AthenaCommon.AppMgr import ServiceMgr as svcMgr - from AthenaCommon.AppMgr import theApp - from TriggerJobOpts.TriggerFlags import TriggerFlags - from AthenaCommon.Logging import logging - - log = logging.getLogger( 'TrigConfigSvc::DoDBConfig:' ) - - if TriggerFlags.doLVL2(): SetupOutput = "l2" - else: SetupOutput = "ef" - SetupOutput += "_" + TriggerFlags.Online.doDBConfigBaseName() + "_setup.txt" - - if hasattr(svcMgr,'LVL1ConfigSvc'): - log.info("DBCONFIG LVL1XML %s" % svcMgr.LVL1ConfigSvc.XMLFile) - if hasattr(svcMgr,'HLTConfigSvc'): - log.info("DBCONFIG HLTXML %s" % svcMgr.HLTConfigSvc.XMLMenuFile) - - log.info("DBCONFIG SETUP %s" % SetupOutput) - - # Replace the common HistorySvc by the TrigHistorySvc - from TrigConfOffline.TrigHistorySvc import TrigHistorySvc - if not svcMgr.__dict__.has_key('HistorySvc') or not isinstance( svcMgr.HistorySvc, TrigHistorySvc) : - if svcMgr.__dict__.has_key('HistorySvc'): del svcMgr.HistorySvc - svcMgr += TrigHistorySvc('HistorySvc') - - svcMgr.HistorySvc.OutputFile = SetupOutput - svcMgr.HistorySvc.Dump = True - svcMgr.HistorySvc.Activate = True - theApp.ActivateHistory = True - - # Fill the meta data from the PSC configuration if available - try: - from TrigPSC import PscConfig - except: - log.warning('Cannot import TrigPSC.PscConfig. TrigConfMetaData will not be filled.') - pass - - if 'PscConfig' in dir(): - from TrigConfigSvc.TrigConfMetaData import TrigConfMetaData - meta = TrigConfMetaData() - meta.PreCommand = PscConfig.optmap['PRECOMMAND'] - meta.JobOptions = PscConfig.optmap['JOBOPTIONSPATH'] - - return - -# Create meta data configurable (needs to be at module-level) -from TrigConfigSvc.TrigConfMetaData import TrigConfMetaData -meta = TrigConfMetaData() - -# configure -_doDBConfig() - -# clean-up -del _doDBConfig - diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConf2COOL.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConf2COOL.py index 0daeef5bfb6b02f023a58c5c8d1b7a04a56e8680..56b229d9a6c55b2fe155290e464fa97916e8593c 100644 --- a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConf2COOL.py +++ b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConf2COOL.py @@ -1,7 +1,6 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration ## @file TrigCond2COOL.py -## $Id: TrigConf2COOL.py,v 1.14 2009-05-06 13:09:27 stelzer Exp $ ############################################################### # # The jobOptions to enable reading and writing of configuration @@ -11,9 +10,6 @@ #============================================================== # # Required libs: -import os.path -import threading - from . import TrigConf2COOLLib as _impl from AthenaCommon.AppMgr import theApp diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConf2COOLLib.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConf2COOLLib.py index 50ded42a6a0795b4cead71719965feba770deed7..b9e28db12d0fa4ee2c71bb0819f4d7c9f61c36a7 100644 --- a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConf2COOLLib.py +++ b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConf2COOLLib.py @@ -1,7 +1,6 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration ## @file TrigCond2COOLLib.py -## $Id: TrigConf2COOL.py,v 1.14 2009-05-06 13:09:27 stelzer Exp $ ############################################################### # # The jobOptions to enable reading and writing of configuration @@ -11,11 +10,10 @@ #============================================================== # # Required libs: -import os.path +import os import threading import AthenaCommon.Logging as L -from AthenaCommon.JobProperties import jobproperties as jp from AthenaPython import PyAthena @@ -31,7 +29,7 @@ class TmpThr(threading.Thread): line = line.lower() if ' warning ' in line: maxlevel = max(1,maxlevel) - if ' error ' in line and not 'connection refused' in line: + if ' error ' in line and 'connection refused' not in line: maxlevel = max(2,maxlevel) elif ' fatal ' in line.lower() or 'exception ' in line.lower(): maxlevel = max(3,maxlevel) @@ -85,7 +83,6 @@ class ConfToCoolSQlite: self.dbConnection = "<dbConnection>sqlite://;schema=%s;dbname=%s</dbConnection>" % (self.dbfilename,self.dbname) self.isWritingNeeded = False from RecExConfig.RecFlags import jobproperties as jp - from TriggerJobOpts.TriggerFlags import jobproperties as jp from TriggerJobOpts.TriggerFlags import TriggerFlags as tf if jp.Rec.Trigger.readLVL1configFromXML(): self.lvl1menu = jp.Rec.Trigger.inputLVL1configFile() @@ -127,12 +124,12 @@ class ConfToCoolSQlite: return # we write COOL sqlite file from given HLT and LVL1 menu xml if self.menusource == 'xml': - msg.info("Writing menu %s and %s to COOL (%s)" % (self.lvl1menu, self.hltmenu, self.dbfilename) ) + msg.info("Writing menu %s and %s to COOL (%s)", self.lvl1menu, self.hltmenu, self.dbfilename) syscmd = "rm -f %s; TrigConfReadWrite -i %s %s -o cool '%s;%s'" % (self.dbfilename, self.hltmenu, self.lvl1menu, self.dbfilename, self.dbname) else: # db if self.smk==0 or self.l1psk==0 or self.hltpsk==0: - raise RuntimeError, "Source of trigger menu configuration is the TriggerDB, but no keys are specified: %i/%i/%i" % (self.smk, self.l1psk, self.hltpsk) - msg.info("Writing menu (keys: %i/%i/%i/%i) from triggerDB (%s) to COOL (%s)" % (self.smk, self.l1psk, self.hltpsk, self.bgsk, self.trigdb, self.dbfilename) ) + raise RuntimeError("Source of trigger menu configuration is the TriggerDB, but no keys are specified: %i/%i/%i" % (self.smk, self.l1psk, self.hltpsk)) + msg.info("Writing menu (keys: %i/%i/%i/%i) from triggerDB (%s) to COOL (%s)", self.smk, self.l1psk, self.hltpsk, self.bgsk, self.trigdb, self.dbfilename) syscmd = "rm -f %s; TrigConf2COOLApp -e createwrite" % self.dbfilename syscmd += " --cooldb 'sqlite://;schema=%s;dbname=%s'" % (self.dbfilename,self.dbname) syscmd += " --trigdb '%s' --configkey %i --prescalekeylvl1 %i --prescalekeyhlt %i --bgkey %i" % (self.trigdb, self.smk, self.l1psk, self.hltpsk, self.bgsk) @@ -141,7 +138,7 @@ class ConfToCoolSQlite: syscmd += " --infiov" - msg.info('executing system command to create COOL SQlite file %s with trigger configuration' % self.dbfilename) + msg.info('executing system command to create COOL SQlite file %s with trigger configuration', self.dbfilename) msg.info("> " + syscmd) tmpThr = TmpThr(syscmd) diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConf2XML.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConf2XML.py deleted file mode 100644 index e18a9ab921c0dfd41968297cff367431a61a7536..0000000000000000000000000000000000000000 --- a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConf2XML.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - -## @file TrigCond2XML.py -## @brief Core job python configurable to setup IOVDbSvc -## @author RD Schaffer <R.D.Schaffer@cern.ch> -## $Id: TrigConf2XML.py,v 1.3 2008-03-07 22:40:23 stelzer Exp $ -############################################################### -# -# The jobOptions to enable writing of configuration -# data from the DB to XML# -# -#============================================================== -# -# Required libs: -import os.path - -from AthenaCommon.JobProperties import jobproperties as jp -from AthenaCommon.Logging import logging - -class ConfToXML: - """Writes the trigger configuration information to XML""" - - __instance = None - - class __single: - - def __init__(self): - - self.trigdb = "" - self.dbuser = "" - self.dbpass = "" - self.dbkey = 1 - self.lvl1pskey = 1 - self.hltpskey = 1 - self.outputfile = "dbOutput" - - def writeConf2XML(self): - msg = logging.getLogger( 'TriggerConfig(2XML)' ) - msg.info("Writing HLT XML file %s" % (self.outputfile) ) - - syscmd = "TrigConf2XMLApp" - syscmd += " --trigdb %s" % (self.trigdb) - syscmd += " --u %s" % self.dbuser - syscmd += " --p %s " % self.dbpass - syscmd += " --configkey %i" % self.dbkey - syscmd += " --prescalekeylvl1 %i" % self.lvl1pskey - syscmd += " --prescalekeyhlt %i" % self.hltpskey - syscmd += " --outputfile %s" % self.outputfile - - msg.info('executing system command to create XML file') - msg.info("> " + syscmd) - - output = os.popen(syscmd).readlines() - for line in output: - msg.info(line.strip()) - - def __init__(self): - """ Create singleton instance """ - if self.__instance is None: - ConfToXML.__instance = ConfToXML.__single() - - # Store instance reference as the only member in the handle, not sure if this is needed - self.__dict__['_ConfToXML__instance'] = ConfToXML.__instance - - def __getattr__(self, attr): - """ Delegate access to implementation """ - return getattr(self.__instance, attr) - - def __setattr__(self, attr, value): - """ Delegate access to implementation """ - return setattr(self.__instance, attr, value) - -theConfXMLWriter = ConfToXML() diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfFrontier.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfFrontier.py index 8b0a35c98851fb59cfd4059e207d65515949a8b6..683b01b80e41a297e3232eaea887e80d60a3240b 100755 --- a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfFrontier.py +++ b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfFrontier.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration from AthenaCommon.Logging import logging import time @@ -8,9 +8,9 @@ def getFrontierCursor(url, schema, loglevel = logging.INFO): log = logging.getLogger( "TrigConfFrontier.py" ) log.setLevel(loglevel) try: - from TrigConfDBConnection import frontier_client + from TrigConfDBConnection import frontier_client # noqa: F401 return FrontierCursor2( url = url, schema = schema) - except: + except Exception: log.warning("Couldn't import frontier_client from TrigConfDBConnection, falling back to pure python implementation without proper url resolution") return FrontierCursor( url = url, schema = schema) @@ -23,9 +23,9 @@ class FrontierCursor2: self.refreshFlag = refreshFlag from TrigConfDBConnection import frontier_client as fc fc.init("PyFrontier","debug") - log.debug("Frontier URL : %s" % self.url) - log.debug("Schema : %s" % self.schema) - log.debug("Refresh cache : %s" % self.refreshFlag) + log.debug("Frontier URL : %s", self.url) + log.debug("Schema : %s", self.schema) + log.debug("Refresh cache : %s", self.refreshFlag) @classmethod def resolvebindvars(cls, query, bindvars): @@ -35,7 +35,7 @@ class FrontierCursor2: import re varsextract = re.findall(':([A-z0-9]*)',query) values = map(bindvars.get, varsextract) - log.debug("Resolving bound variable %r with %r" % (varsextract,values)) + log.debug("Resolving bound variable %r with %r", varsextract,values) appendix = ":".join([str(v) for v in values]) queryWithQuestionMarks = re.sub(':[A-z0-9]*','?', query) query = queryWithQuestionMarks + ':' + appendix @@ -54,8 +54,8 @@ class FrontierCursor2: query = query.replace(":%s" % var,"%s" % val) else: query = query.replace(":%s" % var,"%r" % val) - log.debug("Resolving bound variable '%s' with %r" % (var,val)) - log.debug("Resolved query: %s" % query) + log.debug("Resolving bound variable '%s' with %r", var,val) + log.debug("Resolved query: %s", query) return query def execute(self, query, bindvars={}): @@ -64,7 +64,7 @@ class FrontierCursor2: from TrigConfDBConnection import frontier_client as fc log = logging.getLogger( "TrigConfFrontier.py" ) - log.debug("Executing query : %s" % query) + log.debug("Executing query : %s", query) conn = fc.Connection(self.url) session = fc.Session(conn) @@ -73,7 +73,7 @@ class FrontierCursor2: conn.setReload(doReload) queryStart = time.localtime() - log.debug("Query started: %s" % time.strftime("%m/%d/%y %H:%M:%S %Z", queryStart)) + log.debug("Query started: %s", time.strftime("%m/%d/%y %H:%M:%S %Z", queryStart)) t1 = time.time() req = fc.Request("frontier_request:1:DEFAULT", fc.encoding_t.BLOB) @@ -95,9 +95,9 @@ class FrontierCursor2: queryEnd = time.localtime() self.result = [r for r in session.getRecords2()] - log.debug("Query ended: %s" % time.strftime("%m/%d/%y %H:%M:%S %Z", queryEnd)) - log.debug("Query time: %s seconds" % (t2-t1)) - log.debug("Result size: %i entries" % len(self.result)) + log.debug("Query ended: %s", time.strftime("%m/%d/%y %H:%M:%S %Z", queryEnd)) + log.debug("Query time: %s seconds", (t2-t1)) + log.debug("Result size: %i entries", len(self.result)) def fetchall(self): return self.result @@ -113,7 +113,6 @@ Refresh cache: %s""" % (self.url, self.schema, self.refreshFlag) class FrontierCursor: def __init__(self, url, schema, refreshFlag=False, doDecode=True, retrieveZiplevel="zip"): - log = logging.getLogger( "TrigConfFrontier.py" ) if url.startswith('('): self.servertype, self.url = FrontierCursor.getServerUrls(url)[0] self.url += "/Frontier" @@ -133,7 +132,7 @@ Refresh cache: %s""" % (self.url, self.refreshFlag) @classmethod def getServerUrls(cls, frontier_servers): from re import findall - return findall('\((serverurl)=(.*?)\)',frontier_servers) + return findall(r'\((serverurl)=(.*?)\)',frontier_servers) @classmethod def testUrl(cls, url): @@ -148,9 +147,9 @@ Refresh cache: %s""" % (self.url, self.refreshFlag) query = FrontierCursor2.replacebindvars(query,bindvars) log = logging.getLogger( "TrigConfFrontier.py" ) - log.debug("Using Frontier URL: %s" % self.url) - log.debug("Refresh cache : %s" % self.refreshFlag) - log.debug("Query : %s" % query) + log.debug("Using Frontier URL: %s", self.url) + log.debug("Refresh cache : %s", self.refreshFlag) + log.debug("Query : %s", query) import base64, zlib, urllib2, time @@ -168,16 +167,16 @@ Refresh cache: %s""" % (self.url, self.refreshFlag) request.add_header("X-Frontier-Id", frontierId) queryStart = time.localtime() - log.debug("Query started: %s" % time.strftime("%m/%d/%y %H:%M:%S %Z", queryStart)) + log.debug("Query started: %s", time.strftime("%m/%d/%y %H:%M:%S %Z", queryStart)) t1 = time.time() result = urllib2.urlopen(request,None,10).read() t2 = time.time() queryEnd = time.localtime() - log.debug("Query ended: %s" % time.strftime("%m/%d/%y %H:%M:%S %Z", queryEnd)) - log.debug("Query time: %s [seconds]" % (t2-t1)) - log.debug("Result size: %i [seconds]" % len(result)) + log.debug("Query ended: %s", time.strftime("%m/%d/%y %H:%M:%S %Z", queryEnd)) + log.debug("Query time: %s [seconds]", (t2-t1)) + log.debug("Result size: %i [seconds]", len(result)) self.result = result def fetchall(self): @@ -188,7 +187,7 @@ Refresh cache: %s""" % (self.url, self.refreshFlag) def decodeResult(self): log = logging.getLogger( "TrigConfFrontier.py" ) from xml.dom.minidom import parseString - import base64,zlib, curses.ascii + import base64, zlib, curses.ascii #print "Query result:\n", self.result dom = parseString(self.result) dataList = dom.getElementsByTagName("data") @@ -220,7 +219,6 @@ Refresh cache: %s""" % (self.url, self.refreshFlag) firstRow = firstRow.replace(c, ' ') fields = [x for i,x in enumerate(firstRow.split()) if i%2==0] types = [x for i,x in enumerate(firstRow.split()) if i%2==1] - Nfields = len(fields) ptypes = [] for t in types: if t.startswith("NUMBER"): @@ -232,9 +230,9 @@ Refresh cache: %s""" % (self.url, self.refreshFlag) ptypes.append(str) - log.debug("Fields : %r" % fields) - log.debug("DB Types : %r" % types) - log.debug("Python Types: %r" % ptypes) + log.debug("Fields : %r", fields) + log.debug("DB Types : %r", types) + log.debug("Python Types: %r", ptypes) row = str(row[endFirstRow+1:]) @@ -264,7 +262,6 @@ Refresh cache: %s""" % (self.url, self.refreshFlag) def testConnection(): - import os log = logging.getLogger( "TrigConfFrontier.py::testConnection()" ) log.setLevel(logging.DEBUG) @@ -328,10 +325,10 @@ TE2CP.HTE2CP_ALGORITHM_COUNTER DESC""" def testBindVarResolution(): query = "SELECT :bar WHERE :foo = :bar sort by :ups asc, :foo" bindvars = {"foo": 500, "bar": 8, "ups": 42 } - print "Query" - print query - print "is translated to" - print FrontierCursor2.resolvebindvars(query, bindvars) + print("Query") + print(query) + print("is translated to") + print(FrontierCursor2.resolvebindvars(query, bindvars)) if __name__=="__main__": diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfMetaData.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfMetaData.py index 1bca4f187a010b2b4e6b7a5550142af9694932c7..6f3c157cb1f17e631f7a6d9ebdd3cb6b6f72ee33 100644 --- a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfMetaData.py +++ b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfMetaData.py @@ -1,11 +1,9 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration ## @file TrigConfMetaData.py ## @brief Meta data for the trigger configuration -## $Id: $ -from GaudiKernel.GaudiHandles import * -from GaudiKernel.Proxy.Configurable import * +from GaudiKernel.Proxy.Configurable import Configurable, ConfigurableUser class TrigConfMetaData( ConfigurableUser ) : """Configurable to store aribitray meta data in the job options diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfigCheckInPool.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfigCheckInPool.py index d96b3d1e8450145756b95f5fd499273f93b79c02..5943955b7f273e6f64ddc42986834e772811496d 100644 --- a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfigCheckInPool.py +++ b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfigCheckInPool.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration import ROOT, cppyy import AthenaROOTAccess.transientTree @@ -48,7 +48,6 @@ def checkPoolFileForRunLevel(poolfilename): for payload in plc.iter(): for i in xrange(payload.size()): chanNum = int(payload.chanNum(i)) - iovr = payload.iovRange(chanNum) l1keys += [ payload.attributeList(chanNum)["Lvl1PrescaleConfigurationKey"].data("unsigned int")() ] if hasattr(mdt,folderName["HLTK"]): @@ -60,14 +59,13 @@ def checkPoolFileForRunLevel(poolfilename): for payload in plc.iter(): for i in xrange(payload.size()): chanNum = int(payload.chanNum(i)) - iovr = payload.iovRange(chanNum) hltkeys += [ payload.attributeList(chanNum)["HltPrescaleConfigurationKey"].data("unsigned int")() ] - mlog.info("File contained these LVL1 prescale keys: %r" % l1keys) - mlog.info("File contained these HLT prescale keys: %r" % hltkeys) + mlog.info("File contained these LVL1 prescale keys: %r", l1keys) + mlog.info("File contained these HLT prescale keys: %r", hltkeys) - hasL1 = len(l1keys)>0 and not 0 in l1keys - hasHLT = len(hltkeys)>0 and not 0 in hltkeys + hasL1 = len(l1keys)>0 and 0 not in l1keys + hasHLT = len(hltkeys)>0 and 0 not in hltkeys from TriggerJobOpts.TriggerFlags import TriggerFlags if hasL1 or hasHLT: @@ -77,8 +75,4 @@ def checkPoolFileForRunLevel(poolfilename): TriggerFlags.dataTakingConditions='Lvl1Only' else: TriggerFlags.dataTakingConditions='HltOnly' - mlog.info("Set TriggerFlags.dataTakingConditions to '%s'" % TriggerFlags.dataTakingConditions()) - - - - + mlog.info("Set TriggerFlags.dataTakingConditions to '%s'", TriggerFlags.dataTakingConditions()) diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfigSvcUtils.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfigSvcUtils.py index 4bf8e2d37a7a6b6b41bc06f76fadb6a01e159006..1f8f5a34690baee44f1cc8b27a3eaa9a2ba1676d 100644 --- a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfigSvcUtils.py +++ b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigConfigSvcUtils.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration from xml.dom import minidom import re import os @@ -28,7 +28,7 @@ def _getFileLocalOrPath(filename, pathenv): returns path/filename if existing, otherwise None """ if os.path.exists(filename): - log.info( "Using local file %s" % filename) + log.info( "Using local file %s", filename) return filename pathlist = os.getenv(pathenv,'').split(os.pathsep) @@ -45,7 +45,7 @@ def _getConnectionServicesForAlias(alias): connectionServices = None # list of services dblookupfilename = _getFileLocalOrPath('dblookup.xml','CORAL_DBLOOKUP_PATH') - if dblookupfilename == None: return None + if dblookupfilename is None: return None doc = minidom.parse(dblookupfilename) for ls in doc.getElementsByTagName('logicalservice'): @@ -53,8 +53,8 @@ def _getConnectionServicesForAlias(alias): connectionServices = [str(s.attributes['name'].value) for s in ls.getElementsByTagName('service')] doc.unlink() - log.info( "For alias '%s' found list of connections %r" % (alias,connectionServices) ) - if connectionServices == None: + log.info( "For alias '%s' found list of connections %r", alias,connectionServices ) + if connectionServices is None: log.fatal("Trigger connection alias '%s' is not defined in %s" % (alias,dblookupfilename)) return connectionServices @@ -68,13 +68,12 @@ def _readAuthentication(): authDict = {} dbauthfilename = _getFileLocalOrPath('authentication.xml','CORAL_AUTH_PATH') - if dbauthfilename == None: return authDict + if dbauthfilename is None: return authDict doc = minidom.parse(dbauthfilename) for cn in doc.getElementsByTagName('connection'): user = "" pw = "" - svc = cn.attributes['name'].value for p in cn.getElementsByTagName('parameter'): if p.attributes['name'].value == 'user': user = p.attributes['value'].value if p.attributes['name'].value == 'password': pw = p.attributes['value'].value @@ -133,7 +132,7 @@ def _getConnectionParameters(connection): connectionParameters["passwd"] = passwd elif connection.startswith("frontier://"): - pattern = "frontier://ATLF/\(\)/(.*)" + pattern = r"frontier://ATLF/\(\)/(.*)" m = re.match(pattern,connection) if not m: log.fatal("connection string '%s' doesn't match the pattern '%s'?" % (connection,pattern) ) @@ -145,14 +144,15 @@ def _getConnectionParameters(connection): def interpretConnection(connection, debug=False, resolveAlias=True): - # connection needs to be of the following format (this is also the order of checking) - # <ALIAS> -- any string without a colon ':' will be checked for in the dblookup.xml file - # type:<detail> -- no dblookup will be used, type has to be oracle, mysql, or sqlite_file - # sqlite_file:filename.db -- an sqlite file, no authentication needed, will be opened in read-only mode - # oracle://ATLR/ATLAS_CONF_TRIGGER_V2 -- a service description without user and password, requires lookup in authentication.xml - # oracle://ATLR/ATLAS_CONF_TRIGGER_V2;username=ATLAS_CONF_TRIGGER_V2_R;password=<...> -- a service description with user and password + """connection needs to be of the following format (this is also the order of checking) + <ALIAS> -- any string without a colon ':' will be checked for in the dblookup.xml file + type:<detail> -- no dblookup will be used, type has to be oracle, mysql, or sqlite_file + sqlite_file:filename.db -- an sqlite file, no authentication needed, will be opened in read-only mode + oracle://ATLR/ATLAS_CONF_TRIGGER_V2 -- a service description without user and password, requires lookup in authentication.xml + oracle://ATLR/ATLAS_CONF_TRIGGER_V2;username=ATLAS_CONF_TRIGGER_V2_R;password=<...> -- a service description with user and password + """ - log.info("Specified connection string '%s'" % connection) + log.info("Specified connection string '%s'", connection) # not needed any longer # connection = connection.lstrip("dblookup://") @@ -176,27 +176,18 @@ def interpretConnection(connection, debug=False, resolveAlias=True): return connectionParameters connectionServices = _getConnectionServicesForAlias( connection ) # alias resolution via dblookup - if connectionServices == None: + if connectionServices is None: return connectionParameters - - # If TriggerFlags.triggerUseFrontier=true then we remove sqlite files - from TriggerJobOpts.TriggerFlags import TriggerFlags as tf - #if tf.triggerUseFrontier() or os.getenv('TRIGGER_USE_FRONTIER',False): - # connectionServices = filter(lambda conn: not conn.startswith("sqlite_file"), connectionServices) - # if 'ATLAS_TRIGGERDB_FORCESQLITE' in os.environ: - # log.fatal("Inconsistent setup: environment variable ATLAS_TRIGGERDB_FORCESQLITE is defined and use of Frontier is requested" ) - - # SQLite sqliteconnections = [conn for conn in connectionServices if conn.startswith("sqlite_file")] if len(sqliteconnections)>0: for conn in sqliteconnections: connectionParameters = _getConnectionParameters( conn ) - if connectionParameters["filename"] != None: + if connectionParameters["filename"] is not None: break # stop at the first sqlite file that exists - if connectionParameters["filename"] != None: - log.info("Using sqlite connection %s" % connectionParameters) + if connectionParameters["filename"] is not None: + log.info("Using sqlite connection %s", connectionParameters) return connectionParameters else: if 'ATLAS_TRIGGERDB_FORCESQLITE' in os.environ: @@ -209,34 +200,34 @@ def interpretConnection(connection, debug=False, resolveAlias=True): from CoolConvUtilities.AtlCoolLib import replicaList serverlist=['ATLAS_CONFIG' if s=='ATLAS_COOLPROD' else s for s in replicaList()] # replicaList is for COOL, I need ATLAS_CONFIG instead of ATLAS_COOLPROD #serverlist=['ATLF'] - log.info("Trying these servers in order %r" % serverlist) + log.info("Trying these servers in order %r", serverlist) for server in serverlist: - log.info("Trying server %s" % server) + log.info("Trying server %s", server) if server=='ATLF': #if not tf.triggerUseFrontier() and not os.getenv('TRIGGER_USE_FRONTIER',False): continue frontierconnections = [conn for conn in connectionServices if conn.startswith("frontier")] if len(frontierconnections) == 0: - log.debug("FroNTier connection not defined for alias %s in dblookup" % connection ) + log.debug("FroNTier connection not defined for alias %s in dblookup", connection ) continue - log.info("Environment FRONTIER_SERVER: %s" % os.getenv('FRONTIER_SERVER','not defined')) + log.info("Environment FRONTIER_SERVER: %s", os.getenv('FRONTIER_SERVER','not defined')) frontierServer = os.getenv('FRONTIER_SERVER',None) if not frontierServer: log.debug("No environment variable FRONTIER_SERVER" ) continue connectionParameters = _getConnectionParameters( frontierconnections[0] ) connectionParameters['url'] = frontierServer - log.info("Using frontier connection %s" % frontierconnections[0]) + log.info("Using frontier connection %s", frontierconnections[0]) #connstr='frontier://ATLF/%s;schema=%s;dbname=TRIGCONF' % (connectionParameters['url'],connectionParameters["schema"]) break elif server=='atlas_dd': continue else: oracleconnections = [conn for conn in connectionServices if conn.lower().startswith("oracle://%s/" % server.lower())] if len(oracleconnections) == 0: - log.debug("Oracle connection not defined for server %s in dblookup" % server ) + log.debug("Oracle connection not defined for server %s in dblookup", server ) continue connectionParameters = _getConnectionParameters( oracleconnections[0] ) - log.info("Using oracle connection %s" % oracleconnections[0]) + log.info("Using oracle connection %s", oracleconnections[0]) #connstr='oracle://%s;schema=ATLAS_%s;dbname=TRIGCONF' % (connectionParameters["server"],connectionParameters["schema"]) break @@ -302,7 +293,7 @@ def getUsedTables(output, condition, schemaname, tables): usedtables.add(o.split('.')[0]) for c in condition: for p in c.split(): - if '.' in p and not '\'' in p: usedtables.add(p.split('.')[0].lstrip('(')) + if '.' in p and '\'' not in p: usedtables.add(p.split('.')[0].lstrip('(')) return ["%s%s %s" % (schemaname,tables[t],t) for t in usedtables] @@ -326,10 +317,10 @@ def executeQuery(cursor, output, condition, schemaname, tables, bindvars=()): query += ' where ' + ' and '.join(condition) if len(bindvars)==0: - log.debug("Executing query %s" % query) + log.debug("Executing query %s", query) cursor.execute(str(query)) else: - log.debug("Executing query %s with bound variables %r" % (query, bindvars)) + log.debug("Executing query %s with bound variables %r", query, bindvars) cursor.execute(str(query),bindvars) return cursor.fetchall() @@ -465,7 +456,7 @@ def getMenuNameFromDB(connection, hltprescalekey): # now we need to do some logic, related to the - print res + print(res) hltpsName = str(res[0][0]) @@ -473,7 +464,7 @@ def getMenuNameFromDB(connection, hltprescalekey): m = re.match( "(.*)_default_prescale", hltpsName) menuName = m.group(1) if m else hltpsName - log.info("Interpreting menu name from HLT prescale key %i: %s" % (hltprescalekey,menuName)) + log.info("Interpreting menu name from HLT prescale key %i: %s", hltprescalekey, menuName) return menuName @@ -495,9 +486,9 @@ def getKeysFromNameRelease(connection, name, release, l1only): cursor,schemaname = getTriggerDBCursor(connection) smname = name.split('__')[0] - print 'SM name ', smname - print 'PS name ', name - print 'release ', release + print('SM name %s' % smname) + print('PS name %s' % name) + print('release %s' % release) keys = [] #Find the Release id @@ -572,7 +563,7 @@ def getKeysFromNameRelease(connection, name, release, l1only): hltk = executeQuery(cursor, output, condition, schemaname, tables) hltid = (str(hltk[-1])).lstrip('(').rstrip(')').split(',')[0] - print 'HLT PS gotten ', hltid + print('HLT PS gotten %s' % hltid) keys = [int(smid), int(l1id), int(hltid)] @@ -657,7 +648,7 @@ def getChainsWithLowerChainNames(connection, smk): if isrun2: output = ['TC.HTC_ID', 'TC.HTC_CHAIN_COUNTER', 'TC.HTC_NAME', 'TC.HTC_LOWER_CHAIN_NAME'] else: - print "ERROR: This method is compatibly with Run2 only" + log.error("This method is compatibly with Run2 only") return chainshlt tables = {} @@ -781,7 +772,7 @@ def getPrescaleFromCut(cut): """Convert (run-2) prescale cuts into prescale value""" sign = -1 if cut<0 else 1 ucut = abs(cut) - return (sign*0xFFFFFF ) / float( 0x1000000 - ucut ); + return (sign*0xFFFFFF ) / float( 0x1000000 - ucut ) def queryHLTPrescaleTable(connection,psk): """returns content of prescale set table and prescale table for a @@ -908,7 +899,7 @@ def queryHLTPrescaleTableRun2(connection,psk,smk): valid = True if not valid: - print "WARNING: Selected HLT Prescale Key not associated with Supermaster key" + log.warning("Selected HLT Prescale Key not associated with Supermaster key") return 0 cursor,schemaname = getTriggerDBCursor(connection) @@ -964,38 +955,38 @@ def test(): log.setLevel(logging.DEBUG) ### oracle - print """#################################### + print("""#################################### ## ## Testing ORACLE ## -####################################""" +####################################""") l2, ef = getChains("TRIGGERDBREPR", 539) strm_oracle = getStreams("TRIGGERDBREPR", 539) - print "\nList of Streams in SMK 539",strm_oracle,"\n" + print("\nList of Streams in SMK 539 %s\n" % strm_oracle) ### frontier - print """#################################### + print("""#################################### ## ## Testing FRONTIER ## -####################################""" +####################################""") from TriggerJobOpts.TriggerFlags import TriggerFlags as tf tf.triggerUseFrontier = True strm_frontier = getStreams("TRIGGERDBREPR", 539) - print "\nList of Streams in SMK 539",strm_frontier,"\n" + print("\nList of Streams in SMK 539 %s\n" % strm_frontier) if strm_oracle == strm_frontier: - print """#################################### + print("""#################################### ## ## ORACLE and FRONTIER give IDENTICAL results ## -####################################""" +####################################""") else: - print """#################################### + print("""#################################### ## ## ERROR: ORACLE and FRONTIER give DIFFERENT results ## -####################################""" +####################################""") @@ -1017,12 +1008,12 @@ def test2(): for c in connections: - print "\nConnecting to alias ",c + print("\nConnecting to alias %s" % c) cursor, schema = getTriggerDBCursor(c) if cursor: - print "SUCCESS : %s" % schema + print("SUCCESS : %s" % schema) else: - print "FAILURE" + print("FAILURE") diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigLVL1configExtented.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigLVL1configExtented.py deleted file mode 100755 index 2e126a8ba58a1a0c31596abd85c27a8d87870837..0000000000000000000000000000000000000000 --- a/Trigger/TrigConfiguration/TrigConfigSvc/python/TrigLVL1configExtented.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - - -#import TrigConfigSvc.TriggerPythonConfig - - -def extendLVL1config(triggerPythonConfig): - triggerPythonConfig.CTPInfo().setBunchGroupSet("MC") - triggerPythonConfig.CTPInfo().addBunchGroup( 'BCRVeto', 0, [1] ) - triggerPythonConfig.CTPInfo().addBunchGroup( 'Filled', 1, [1] ) - triggerPythonConfig.CTPInfo().addBunchGroup( 'EmptyCalib', 2, [] ) - triggerPythonConfig.CTPInfo().addBunchGroup( 'Empty', 3, [] ) - triggerPythonConfig.CTPInfo().addBunchGroup( 'UnpairedBeam1', 4, [] ) - triggerPythonConfig.CTPInfo().addBunchGroup( 'UnpairedBeam2', 5, [] ) - triggerPythonConfig.CTPInfo().addBunchGroup( 'EmptyAfterFilled', 6, [] ) - triggerPythonConfig.CTPInfo().addBunchGroup( 'InTrain', 7, [1] ) - triggerPythonConfig.CTPInfo().setDeadtime('Commissioning', 4, 0, 0, 0, 0) - triggerPythonConfig.CTPInfo().setRandom('rand01', 5, 0, 1, 1) - triggerPythonConfig.CTPInfo().setPrescaledClock('psc01', 10, 100) - triggerPythonConfig.Lvl1CaloInfo().setName('standard') - diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerConfig.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerConfig.py deleted file mode 100644 index 16be1649ecebef1fc17cf11e9897fc83a880eb9a..0000000000000000000000000000000000000000 --- a/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerConfig.py +++ /dev/null @@ -1,268 +0,0 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration - - - -## @file TriggerConfig.py -## @brief Core job python configurable to setup the trigger configuration source -## @author Till Eifert <eifert@cern.ch> -## @author Joerg Stelzer <stelzer@cern.ch> -## $Id: TriggerConfig.py,v 1.4 2008-03-10 13:03:47 stelzer Exp $ - -############################################################### -# -# The jobOptions to enable reading and writing of configuration -# data from COOL into AOD -# -# -#============================================================== - - - -# Required libs: -import os.path - -## all we need is the standard job to -import AthenaCommon.AtlasUnixStandardJob -from AthenaCommon.AppMgr import theApp - -## the environment for this module -from AthenaCommon.JobProperties import jobproperties as jp -import RecExConfig.RecFlags -import TriggerJobOpts.TriggerFlags - -from AthenaCommon.Logging import logging as log - -def _setupConfig(): - rf = {} - rf['readTAG'] = jp.Rec.readTAG() - rf['readESD'] = jp.Rec.readESD() - rf['readAOD'] = jp.Rec.readAOD() - rf['readBS'] = jp.Rec.Trigger.readBS() - rf['readRDO'] = jp.Rec.readRDO() - rf['DataSource'] = jp.Global.DataSource() - rf['readRDO'] = jp.Rec.readRDO() - rf['doWriteESD'] = jp.Rec.doWriteESD() - rf['doWriteAOD'] = jp.Rec.doWriteAOD() - rf['doWriteTAG'] = jp.Rec.doWriteTAG() - rf['TrigCoolSQLite'] = False - - global readRDO - print "global readRDO=", readRDO - varInit = globals().keys() - print varInit - print rf.keys() - for o in [o for o in rf.keys() if o in varInit]: - rf[o] = eval(o) - - #print "JJJJ DEBUG Print jobproperties" - #jp.print_JobProperties('tree&value') - #print "JJJJ DEBUG Print rec.jobproperties" - #jp.Rec.print_JobProperties('tree&value') - #print "JJJJ DEBUG Print rec.Trigger.jobproperties aka TriggerFlags" - #jp.Rec.Trigger.print_JobProperties('tree&value') - - ## logging for this module - msg = log.getLogger( 'TriggerConfig.py' ) - - ## the generic trigger configuration service needs to be set up - from TrigConfigSvc.TrigConfigSvcConfig import SetupTrigConfigSvc - msg.info('Creating the trigger configuration service wrapper') - svc = SetupTrigConfigSvc() - - ## ================================================================================ - ## the service depends on the condition we are running in - ## - ## environment ConfigSvc Data input Config input Flags - ## --------------- ------------ ---------- ------------ ----- - ## (1) data taking HLTConfigSvc BS TriggerDB DataSource='data',... - ## - ## (2) MC simulation HTLConfigSvc BS, RDO XML DataSource='geant3/4' and (readBS=True or readRDO=True) and doFEX=True and doHypo=True - ## (to be replaced by DSConfigSvc later) - ## - ## (3) data processing DSConfigSvc BS, RDO COOL/Oracle readBS=True or readRDO=True - ## - ## (4) AOD processing DSConfigSvc ESD/AOD ESD/AOD(run) (readESD=True or readAOD=True) and doFEX=False and doHypo=False - ## AODConfigSvc ESD/AOD(ev) - ## - ## (5) Trigger ESD/AOD HLTConfigSvc ESD/AOD XML (readESD or readAOD)=True and doFEX=True and doHypo=True - ## - ## (6) Trigger rerun HLTConfigSvc ESD/AOD XML (readBS or readRDO or readESD or readAOD)=True and doFEX=False and doHypo=True - ## - ## (7) TAG reading HLTConfigSvc TAG XML readTAG=True - ## - ## - ## ================================================================================ - ## In cases (2), (3) and (5) the configuration information needs to be written - ## into the header of the AOD file. This is completely independent of the configuration - ## service, but will depend on the flags readESD, readAOD, doWriteESD, doWriteAOD, doWriteTAG - ## - ## ================================================================================ - ## two flags are steering the execution of the trigger, if neither - ## is True, no trigger is used - ## - ## - doTrigger: if True, "TriggerRelease/jobOfragment_forRecExCommon.py" - ## is included - ## - ## - doTriggerConfigOnly: if True, no TrigDecision is build/used - ## - ## ================================================================================ - ## flags that control the setup of the trigger are - ## - ## boolean: - ## readBS, readAOD, readESD, readRDO, readTAG - ## writeBS, doWriteAOD, doWriteESD, doWriteRDO, doWriteTAG - ## readLVL1configFromXML, readHLTconfigFromXML - ## - ## string: - ## inputHLTconfigFile, inputLVL1configFile, outputHLTconfigFile, outputLVL1configFile - ## - ## on/offline: - ## DataSource: ['data','geant3','geant4'] - ## ================================================================================ - - svc.hltXmlFile = lv1_menu_file_name = jp.Rec.Trigger.inputHLTconfigFile() - svc.l1XmlFile = jp.Rec.Trigger.inputLVL1configFile() - - myList = [] - scenario = 0; - if len( jp.Rec.Trigger.configurationSourceList() ) > 0: - # take the user-defined list of trigger configuration sources: - myList = jp.Rec.Trigger.configurationSourceList() - - else: - print jp.Rec - if rf['readTAG']: # TAG - myList = ['xml'] - - elif rf['readESD'] or rf['readAOD']: # AOD/ESD - myList = ['ds','aod','xml'] - - elif rf['readBS'] or rf['readRDO']: # RDO/BS - if rf['DataSource'] == 'data': - myList = ['ds','xml'] - else: - myList = ['xml'] - - else: - msg.fatal('no reading of BS, RDO, AOD, ESD, or TAG specified') - - message = 'setup the following services in order: '; - for s in myList: message += s + " " - msg.info(message) - - try: - svc.SetStates( myList ) - except: - msg.error( 'failed to set state of TrigConfigSvc ...') - - try: - svc.InitialiseSvc() - except: - msg.error( 'failed to activate TrigConfigSvc ...') - - - if rf['doWriteESD'] or rf['doWriteAOD'] or rf['doWriteTAG'] or ('ds' in myList): - - ## setup the copying of configuration data from input to output metadatastore - if rf['doWriteESD'] or rf['doWriteAOD'] or rf['doWriteTAG']: - msg.info( 'writing of ESD, AOD, or TAG requested, will setup IOVDbSvc to access configuration meta data') - else: - msg.info( 'DSConfigSvc enabled, will setup IOVDbSvc to access configuration meta data') - - # first we need a new algorithm that triggers the copying - from AthenaCommon.AlgSequence import AlgSequence - from TrigConfigSvc.TrigConfigSvcConf import TrigConf__TrigConfDataIOVChanger as TrigConfDataIOVChanger - topAlgs = AlgSequence() - TrigConfDataIOVChanger = TrigConfDataIOVChanger('TrigConfDataIOVChanger') - topAlgs += TrigConfDataIOVChanger - - - from AthenaCommon.AppMgr import ServiceMgr as svcMgr - from AthenaCommon.AppMgr import ToolSvc - - if not hasattr( ToolSvc, 'IOVDbMetaDataTool' ): - from IOVDbMetaDataTools.IOVDbMetaDataToolsConf import IOVDbMetaDataTool - ToolSvc += IOVDbMetaDataTool( "IOVDbMetaDataTool" ) - - - if not hasattr( svcMgr, 'MetaDataSvc' ): - from AthenaServices.AthenaServicesConf import MetaDataSvc - svcMgr += MetaDataSvc( "MetaDataSvc" ) - svcMgr.MetaDataSvc.MetaDataContainer = "MetaDataHdr" - svcMgr.MetaDataSvc.MetaDataTools += [ "IOVDbMetaDataTool" ] - - # for debugging uncomment the following three lines - from AthenaCommon.Constants import VERBOSE - ToolSvc.IOVDbMetaDataTool.OutputLevel = VERBOSE - svcMgr.MetaDataSvc.OutputLevel = VERBOSE - - dbConnection = "<dbConnection>impl=cool;techno=oracle;schema=ATLAS_COOLONL_READER;devdb10:COOLTEST:atlas_trig_stelzer</dbConnection>" - - - ### when moving to configurable IOVDbSvc use the ### lines and remove those with ## at the end - ### import IOVDbSvc.IOVDb - ### IOVDbSvc = svcMgr.IOVDbSvc - from AthenaCommon.Include import include ## - include( "IOVDbSvc/IOVDbSvc_jobOptions.py" ) ## - IOVDbSvc = theApp.service( "IOVDbSvc" ) ## - #from AthenaCommon.Constants import VERBOSE - #IOVDbSvc.OutputLevel = VERBOSE - - ## if we process MC from an XML file the dbConnection needs to - ## be set to a local SQlite file - - if (rf['readRDO'] or rf['readBS']) and (rf['DataSource']=='geant3' or rf['DataSource']=='geant4' or rf['TrigCoolSQLite']): - from TrigConfigSvc.TrigConf2COOL import theConfCOOLWriter - dbConnection = theConfCOOLWriter.dbConnection - theConfCOOLWriter.isWritingNeeded = True - TrigConfDataIOVChanger.AdjustIOV = True - - msg.info("COOL DBConnection: " + dbConnection ) - - ### svcMgr.IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/HLT/Menu <tag>HEAD</tag>"] - ### svcMgr.IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/HLT/HltConfigKeys <tag>HEAD</tag>"] - ### svcMgr.IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/LVL1/Lvl1ConfigKey <tag>HEAD</tag>"] - ### svcMgr.IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/LVL1/Menu <tag>HEAD</tag>"] - ### svcMgr.IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/LVL1/Prescales <tag>HEAD</tag>"] - ### svcMgr.IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/LVL1/ItemDef <tag>HEAD</tag>"] - ### svcMgr.IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/HLT/Prescales <tag>HEAD</tag>"] - ### svcMgr.IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/HLT/PrescaleKey <tag>HEAD</tag>"] - - IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/HLT/Menu <tag>HEAD</tag>"] ## - IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/HLT/HltConfigKeys <tag>HEAD</tag>"] ## - IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/LVL1/Lvl1ConfigKey <tag>HEAD</tag>"] ## - IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/LVL1/Menu <tag>HEAD</tag>"] ## - IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/LVL1/Prescales <tag>HEAD</tag>"] ## - IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/LVL1/ItemDef <tag>HEAD</tag>"] ## - IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/HLT/Prescales <tag>HEAD</tag>"] ## - IOVDbSvc.Folders+=[dbConnection + "/TRIGGER/HLT/PrescaleKey <tag>HEAD</tag>"] ## - - - # only in jobs that write AOD or ESD - if rf['doWriteAOD'] or rf['doWriteESD']: - msg.info( 'writing of ESD or AOD enabled, will setup IOVDbSvc to write configuration meta data') - ### svcMgr.IOVDbSvc.FoldersToMetaData+=["/TRIGGER/HLT/Menu"] - ### svcMgr.IOVDbSvc.FoldersToMetaData+=["/TRIGGER/HLT/HltConfigKeys"] - ### svcMgr.IOVDbSvc.FoldersToMetaData+=["/TRIGGER/LVL1/Lvl1ConfigKey"] - ### svcMgr.IOVDbSvc.FoldersToMetaData+=["/TRIGGER/LVL1/Menu"] - ### svcMgr.IOVDbSvc.FoldersToMetaData+=["/TRIGGER/LVL1/Prescales"] - ### svcMgr.IOVDbSvc.FoldersToMetaData+=["/TRIGGER/LVL1/ItemDef"] - ### svcMgr.IOVDbSvc.FoldersToMetaData+=["/TRIGGER/HLT/Prescales"] - ### svcMgr.IOVDbSvc.FoldersToMetaData+=["/TRIGGER/HLT/PrescaleKey"] - - IOVDbSvc.FoldersToMetaData+=["/TRIGGER/HLT/Menu"] ## - IOVDbSvc.FoldersToMetaData+=["/TRIGGER/HLT/HltConfigKeys"] ## - IOVDbSvc.FoldersToMetaData+=["/TRIGGER/LVL1/Lvl1ConfigKey"] ## - IOVDbSvc.FoldersToMetaData+=["/TRIGGER/LVL1/Menu"] ## - IOVDbSvc.FoldersToMetaData+=["/TRIGGER/LVL1/Prescales"] ## - IOVDbSvc.FoldersToMetaData+=["/TRIGGER/LVL1/ItemDef"] ## - IOVDbSvc.FoldersToMetaData+=["/TRIGGER/HLT/Prescales"] ## - IOVDbSvc.FoldersToMetaData+=["/TRIGGER/HLT/PrescaleKey"] ## - - -## load basic services configuration at module import -_setupConfig() - -## clean-up: avoid running multiple times this method -del _setupConfig - diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerConfigARA.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerConfigARA.py index ea0e010e7b08abbc53debb63cca8656dbeb95f53..83d6513b5ae4df2a9d79e475ee18dff140cb9112 100644 --- a/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerConfigARA.py +++ b/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerConfigARA.py @@ -1,15 +1,14 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration -## @file TriggerConfig.py +## @file TriggerConfigARA.py ## @brief Configuration object to be used in python ARA ## @author Joerg Stelzer <stelzer@cern.ch> -## $Id: TriggerConfigARA.py,v 1.12 2009-03-11 15:13:27 stelzer Exp $ + +from __future__ import print_function import sys -import copy import ROOT import cppyy -from PyCool import coral import AthenaROOTAccess.transientTree def _iter(self) : @@ -95,7 +94,7 @@ class TriggerConfigARA(object): ROOT.IOVRange.__str__ = lambda x: "%s - %s" % (x.start(),x.stop()) ROOT.IOVRange.isInRange = lambda x,e: x.start()<=e and e<x.stop() - print "Opening file(s) and creating transient metadata ..." + print("Opening file(s) and creating transient metadata ...") if type(poolfile) == list: self._collection = ROOT.AthenaROOTAccess.TChainROOTAccess('MetaData') for file in poolfile: @@ -117,7 +116,7 @@ class TriggerConfigARA(object): for key in self.__class__.__keysInUpdateOrder: if not hasattr(self.mdt,self.__class__.__folderName[key]): - print "No key %s in file %s, use checkFile.py to verify" % (self.__class__.__folderName[key],poolfile) + print("No key %s in file %s, use checkFile.py to verify" % (self.__class__.__folderName[key],poolfile)) sys.exit(0) @@ -125,12 +124,12 @@ class TriggerConfigARA(object): def printfnc(self,chaindetails=False): - print self + print(self) if not chaindetails: return for ef in self.HLTChains.values(): if ef.level()!='EF': continue - if ef.prescale()<0: continue; - fc = self.printFullChain(ef.chain_name()) + if ef.prescale()<0: continue + self.printFullChain(ef.chain_name()) def printFullChain(self, name): fc = self.getFullChain(name) @@ -150,12 +149,12 @@ class TriggerConfigARA(object): if fc[1]: l2str = "%s (%1.2f)" % (fc[1].chain_name(), fc[1].prescale()) if fc[0]: l1str = "" - if type(fc[0]) == type([]): + if isinstance(fc[0], list): for i in fc[0]: l1str += "%s (%i) " % (i.name(), self.L1PS[i.ctpId()]) else: l1str = "%s (%i)" % (fc[0].name(), self.L1PS[fc[0].ctpId()]) - print "EF: %s, L2: %s, L1: %s [streams: %s" % (efstr, l2str, l1str, ststr ) + print("EF: %s, L2: %s, L1: %s [streams: %s" % (efstr, l2str, l1str, ststr )) def __str__(self): @@ -169,7 +168,7 @@ class TriggerConfigARA(object): """Forwards getting of unknown attributes to the configuration holder""" if key in self.__dict__["_TriggerConfigARA__curConf"].__dict__: return self.__dict__["_TriggerConfigARA__curConf"].__dict__.__getitem__(key) - raise AttributeError, "No configuration parameter %s" % key + raise AttributeError("No configuration parameter %s" % key) def __setattr__(self, key, val): """Forwards setting of unknown attributes to the configuration holder""" @@ -191,18 +190,18 @@ class TriggerConfigARA(object): iovr = payload.iovRange(chanNum) iovs += [iovr] - print "-----------------------------------------------------------------------" + print("-----------------------------------------------------------------------") for iov in iovs: end = iov.stop().event()-1 if end==-1: end="MAX" else: end="%3i" % end - print "Run: %6i LBs: %3i - %s" % (iov.start().run(),iov.start().event(),end) - print "---------------------------" + print("Run: %6i LBs: %3i - %s" % (iov.start().run(),iov.start().event(),end)) + print("---------------------------") currentEvent = ROOT.IOVTime(iov.start().run(), iov.start().event()) for key in self.__class__.__keysInUpdateOrder: - success = self.__loadData(key, currentEvent) + self.__loadData(key, currentEvent) self.printfnc(chaindetails) - print "-----------------------------------------------------------------------" + print("-----------------------------------------------------------------------") self.__dict__.__setitem__("_TriggerConfigARA__curConf", _TrigConfHolder()) self.__currentIOV = {} @@ -219,15 +218,15 @@ class TriggerConfigARA(object): success = self.__loadData(key, currentEvent) if not success: overallsucc = False - print "Did not find valid IOV for %s" % self.__class__.__folderName[key] + print("Did not find valid IOV for %s" % self.__class__.__folderName[key]) updated = True if updated: if overallsucc: - print "Loaded new trigger configuration for run/lb = %i/%i" % (run,lb) + print("Loaded new trigger configuration for run/lb = %i/%i" % (run,lb)) else: - print "ERROR: Loading of new trigger configuration for run/lb = %i/%i failed" % (run,lb) + print("ERROR: Loading of new trigger configuration for run/lb = %i/%i failed" % (run,lb)) if self.verbose: - print self + print(self) return updated def isConfigured(self, name): @@ -256,7 +255,7 @@ class TriggerConfigARA(object): l2name = hltchain.lower_chain_name() if l2name!="": if not self.isConfigured(l2name): - raise RuntimeError, "Lower chain %s as seed of %s not configured" % (l2name,name) + raise RuntimeError("Lower chain %s as seed of %s not configured" % (l2name,name)) fullchain[1] = self.HLTChains[l2name] else: fullchain[1] = hltchain @@ -271,11 +270,11 @@ class TriggerConfigARA(object): itemlist = l1name.replace(' ','').split(',') for item in itemlist: if not self.isConfigured(item): - raise RuntimeError, "L1 item %s as seed of %s not configured" % (str(item),l2name) + raise RuntimeError("L1 item %s as seed of %s not configured" % (str(item),l2name)) fullchain[0].append(self.L1Items[item]) else: if not self.isConfigured(l1name): - raise RuntimeError, "L1 item %s as seed of %s not configured" % (str(l1name),l2name) + raise RuntimeError("L1 item %s as seed of %s not configured" % (str(l1name),l2name)) fullchain[0] = self.L1Items[l1name] return fullchain @@ -285,14 +284,14 @@ class TriggerConfigARA(object): if not self.isConfigured(name): return 0 # if name is a L1Item if name in self.L1Items: - return self.L1PS[self.L1Items[lowname].ctpId()] + return self.L1PS[self.L1Items[name].ctpId()] hltchain = self.HLTChains[name] prescale = hltchain.prescale() lowname = hltchain.lower_chain_name() if lowname=="": return prescale # unseeded if not self.isConfigured(lowname): - raise RuntimeError, "Lower chain %s of %s not configured" % (lowname,name) + raise RuntimeError("Lower chain %s of %s not configured" % (lowname,name)) if hltchain.level()=='EF': hltchain = self.HLTChains[lowname] @@ -300,7 +299,7 @@ class TriggerConfigARA(object): lowname = hltchain.lower_chain_name() if lowname=="": return prescale # unseeded if not self.isConfigured(lowname): - raise RuntimeError, "Lower item %s of %s not configured" % (lowname,name) + raise RuntimeError("Lower item %s of %s not configured" % (lowname,name)) # hltchain should be of L2 prescale *= self.L1PS[self.L1Items[lowname].ctpId()] @@ -319,9 +318,9 @@ class TriggerConfigARA(object): br = self.mdt.GetBranch(self.__class__.__folderName[key]) validIOV = getattr(self,"_load%s" % key)(br, currentEvent) if self.verbose: - print "Loaded %s with iov %s" % (key, validIOV) + print("Loaded %s with iov %s" % (key, validIOV)) self.__currentIOV[key] = validIOV - return validIOV != None + return validIOV is not None def _loadHLTK(self, br, currentEvent): validIOV = None diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerSummary.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerSummary.py index 6a02715af52c4f4eefc0b135ae9bdf3937ee4388..6ebdce176b0ad8d61d0482fc9482634388c966bf 100644 --- a/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerSummary.py +++ b/Trigger/TrigConfiguration/TrigConfigSvc/python/TriggerSummary.py @@ -1,4 +1,5 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +from __future__ import print_function +# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration # @author: Till Eifert <Till.Eifert@cern.ch> # @date: October 2008 @@ -101,10 +102,10 @@ class PoolFile(object): self.poolFile = None dbFileName = whichdb.whichdb( fileName ) - if not dbFileName in ( None, '' ): - print "## opening file [%s]..." % str(fileName) + if dbFileName not in ( None, '' ): + print("## opening file [%s]..." % str(fileName)) db = shelve.open( fileName, 'r' ) - print "## opening file [OK]" + print("## opening file [OK]") report = db['report'] self._fileInfos = report['fileInfos'] @@ -115,9 +116,9 @@ class PoolFile(object): self.l2_global = report['l2_global'] self.ef_global = report['ef_global'] else: - print "## opening file [%s]..." % str(fileName) + print("## opening file [%s]..." % str(fileName)) self.__openPoolFile( fileName ) - print "## opening file [OK]" + print("## opening file [OK]") self.__processFile() return @@ -127,34 +128,29 @@ class PoolFile(object): # our fellow Mac users oldArgs = sys.argv sys.argv = sys.argv[:1] + ['-b'] + sys.argv[1:] - print "## importing ROOT..." + print("## importing ROOT...") import ROOT - print "## importing ROOT... [DONE]" - # prevent ROOT from being too verbose - #rootMsg = ShutUp() - #rootMsg.mute() + print("## importing ROOT... [DONE]") ROOT.gErrorIgnoreLevel = ROOT.kFatal - #rootMsg.unMute() sys.argv = oldArgs poolFile = None try: #ROOT.TFile.SetReadStreamerInfo(False) poolFile = ROOT.TFile.Open( fileName, PoolOpts.READ_MODE ) - except Exception, e: - rootMsg.unMute() - print "## Failed to open file [%s] !!" % fileName - print "## Reason:" - print e - print "## Bailing out..." - raise IOError, "Could not open file [%s]" % fileName + except Exception as e: + print("## Failed to open file [%s] !!" % fileName) + print("## Reason:") + print(e) + print("## Bailing out...") + raise IOError("Could not open file [%s]" % fileName) #rootMsg.unMute() - if poolFile == None: - print "## Failed to open file [%s] !!" % fileName + if poolFile is None: + print("## Failed to open file [%s] !!" % fileName) msg = "Could not open file [%s]" % fileName - raise IOError, msg + raise IOError(msg) self.poolFile = poolFile assert self.poolFile.IsOpen() and not self.poolFile.IsZombie(), "Invalid POOL file or a Zombie one" @@ -188,8 +184,8 @@ class PoolFile(object): self.ef_chains += [PoolRecord(name=name, counter=chain.chain_counter(), evtsBeforePS=0, evtsAfterPS=0, \ evtsAfterPT=0, level=PoolOpts.level[3]) ] else: - print "will ignore chain (%s, %i) because the trigger level (=%s) is not set to either 'L2' or 'EF'" \ - % ( name, chain.chain_counter(), chain.level()) + print("will ignore chain (%s, %i) because the trigger level (=%s) is not set to either 'L2' or 'EF'" \ + % ( name, chain.chain_counter(), chain.level())) return @@ -207,24 +203,24 @@ class PoolFile(object): self._fileInfos['entries'] = nEntries ## next, try to get the trigger configuration from the MetaData TTree - print "## Reading trigger configuration from MetaData ..." + print("## Reading trigger configuration from MetaData ...") from TrigConfigSvc.TriggerConfigARA import TriggerConfigARA try: self.trigConf = TriggerConfigARA(self.poolFile) - except Exception, e: - print "## Caught exception [%s] !!" % str(e.__class__) - print "## What:",e - print sys.exc_info()[0] - print sys.exc_info()[1] + except Exception as e: + print("## Caught exception [%s] !!" % str(e.__class__)) + print("## What:",e) + print(sys.exc_info()[0]) + print(sys.exc_info()[1]) pass - print "## Reading trigger configuration from MetaData ... [DONE]" + print("## Reading trigger configuration from MetaData ... [DONE]") ## get the transient tree - print "## Creating transient collection tree ..." + print("## Creating transient collection tree ...") import AthenaROOTAccess.transientTree def filter(dhe): if 'HLTResult' in dhe.token() or 'TrigDecision' in dhe.token() or 'EventInfo' in dhe.token(): - print '+', dhe.token() + print('+', dhe.token()) return dhe return False @@ -233,7 +229,7 @@ class PoolFile(object): branchNames = {} transientTree = AthenaROOTAccess.transientTree.makeTree(self.poolFile, branchNames = branchNames, dhfilt=filter) - print "## Creating transient collection tree ... [DONE]" + print("## Creating transient collection tree ... [DONE]") # figure out the name of the EventInfo branch (this may change from MC to MC and DATA) eventInfoBranchName = 'McEventInfo' @@ -243,17 +239,18 @@ class PoolFile(object): del possible # from TrigSteering.Chain import Chain #MN + import ROOT Chain = ROOT.HLT.Chain ## Branches that we're interested in: EventInfo_br = transientTree.GetBranch(eventInfoBranchName) TrigDecision_br = transientTree.GetBranch("TrigDecision") - print "## Looping over all events now ..." + print("## Looping over all events now ...") isFirstEvent = True for i in xrange(transientTree.GetEntries()): if EventInfo_br.GetEntry(i) <= 0 or TrigDecision_br.GetEntry(i) <= 0 : - print ":: could not get entry #%i" % i + print(":: could not get entry #%i" % i) break # update the trig conf. with these numbers: @@ -290,7 +287,7 @@ class PoolFile(object): l2Chains[aChain.getChainCounter()] = aChain for chain in self.l2_chains: - if not l2Chains.has_key(chain.counter): + if chain.counter not in l2Chains: continue if l2Chains[ chain.counter ].chainPassedRaw(): chain.evtsBeforePS += 1 @@ -307,7 +304,7 @@ class PoolFile(object): efChains[aChain.getChainCounter()] = aChain for chain in self.ef_chains: - if not efChains.has_key(chain.counter): + if chain.counter not in efChains: continue if efChains[ chain.counter ].chainPassedRaw(): chain.evtsBeforePS += 1 @@ -319,7 +316,7 @@ class PoolFile(object): - print "## Looping over all events ... [DONE]" + print("## Looping over all events ... [DONE]") return @@ -332,10 +329,10 @@ class PoolFile(object): ] ) def checkFile(self, sorting = PoolRecord.Sorter.name): - print self.fileInfos() - print "" - print "Trigger configuration summary:" - print self.trigConf + print(self.fileInfos()) + print("") + print("Trigger configuration summary:") + print(self.trigConf) ## sorting data if sorting in PoolRecord.Sorter.allowedValues(): @@ -347,48 +344,48 @@ class PoolFile(object): data3 = self.ef_chains data3.sort(key = operator.attrgetter(sorting) ) - print "" - print "="*80 - print PoolOpts.HDR_FORMAT % ( "ID", + print("") + print("="*80) + print(PoolOpts.HDR_FORMAT % ( "ID", "level", "Trigger name", - "Passed events: raw, after PS, after PT/Veto") - print "="*80 + "Passed events: raw, after PS, after PT/Veto")) + print("="*80) for item in [self.l1_global, self.l2_global, self.ef_global]: - print PoolOpts.ROW_FORMAT_GLOBAL % ( + print(PoolOpts.ROW_FORMAT_GLOBAL % ( item.level, item.name, item.evtsAfterPT - ) + )) - print "-"*80 + print("-"*80) for item in self.l1_items: - print PoolOpts.ROW_FORMAT % ( + print(PoolOpts.ROW_FORMAT % ( item.counter, item.level, item.name, item.evtsBeforePS, item.evtsAfterPS, item.evtsAfterPT - ) - print "-"*80 + )) + print("-"*80) for chain in self.l2_chains: - print PoolOpts.ROW_FORMAT % ( + print(PoolOpts.ROW_FORMAT % ( chain.counter, chain.level, chain.name, chain.evtsBeforePS, chain.evtsAfterPS, chain.evtsAfterPT - ) - print "-"*80 + )) + print("-"*80) for chain in self.ef_chains: - print PoolOpts.ROW_FORMAT % ( + print(PoolOpts.ROW_FORMAT % ( chain.counter, chain.level, chain.name, chain.evtsBeforePS, chain.evtsAfterPS, chain.evtsAfterPT - ) - print "="*80 + )) + print("="*80) return @@ -438,7 +435,6 @@ class PoolFile(object): if os.path.exists (fileName): os.unlink (fileName) o = csv.writer (open (fileName, 'w')) - nentries = self._fileInfos['entries'] map (o.writerow, [ ['file name', self._fileInfos['name']], ['file size', self._fileInfos['size']], @@ -467,8 +463,8 @@ class PoolFile(object): try: self.poolFile.Close() self.poolFile = None - except Exception,err: - print "WARNING:",err + except Exception as err: + print("WARNING:",err) pass pass # class PoolFile diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/python/__init__.py b/Trigger/TrigConfiguration/TrigConfigSvc/python/__init__.py index 7c2ce6d15052d441ff82c306f2f72e9f6c76b5ec..44a8fab620510d99903b9366e032f5c7a2357e1b 100755 --- a/Trigger/TrigConfiguration/TrigConfigSvc/python/__init__.py +++ b/Trigger/TrigConfiguration/TrigConfigSvc/python/__init__.py @@ -1,7 +1,7 @@ -# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration # File: TrigConfigSvc/__init__.py # Author: Tomasz.Bold@cern.ch __author__ = 'Paul Bell, Tomasz Bold, Joerg Stelzer' -__all__ = [ 'TrigConfigSvcConfig', 'TriggerConfigARA', 'TrigConfigSvcUtils.py', 'DoDBConfig.py' ] +__all__ = [ 'TrigConfigSvcConfig', 'TriggerConfigARA', 'TrigConfigSvcUtils.py' ] diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/share/L1config-example.py b/Trigger/TrigConfiguration/TrigConfigSvc/share/L1config-example.py deleted file mode 100755 index 071add86a0c79abe12294a08e5fe77e3dc855375..0000000000000000000000000000000000000000 --- a/Trigger/TrigConfiguration/TrigConfigSvc/share/L1config-example.py +++ /dev/null @@ -1,368 +0,0 @@ -#!/usr/bin/env python - -import os -import sys -from TriggerMenuConfig.TriggerPythonConfig import * - -tm = TriggerPythonConfig('hlt.xml', 'LVL1Config-CSC05.xml') - -#=========================================================================== -# TriggerThresholds -#------------------ -# arguments to addLvl1Threshold() -# addLvl1Threshold(name, type, bitnum, range_begin, range_end, -# slot, connector) -# -# To Switch off the threshold set following constants for each type of -# threshold -# ClusterOff (= 255) -# IsolationOff (= 63) -# JetOff (= 1023) -# EtSumOff (= 2047) -# EtMissOff (= 2895) -# JetEtOff (= 13286) -# -#----- MUON thresholds ----- -mu6 = tm.addLvl1Threshold('MU6', 'MUON', 0,'SLOT9','CON0')\ - .addMuonThresholdValue(6, -49, 49, 0, 64) -mu8 = tm.addLvl1Threshold('MU8', 'MUON', 1,'SLOT9','CON0')\ - .addMuonThresholdValue(8, -49, 49, 0, 64) -mu10 = tm.addLvl1Threshold('MU10', 'MUON', 2,'SLOT9','CON0')\ - .addMuonThresholdValue(10, -49, 49, 0, 64) -mu11 = tm.addLvl1Threshold('MU11', 'MUON', 3,'SLOT9','CON0')\ - .addMuonThresholdValue(12, -49, 49, 0, 64) -mu20 = tm.addLvl1Threshold('MU20','MUON', 4,'SLOT9','CON0')\ - .addMuonThresholdValue(20, -49, 49, 0, 64) -mu40 = tm.addLvl1Threshold('MU40','MUON', 5,'SLOT9','CON0')\ - .addMuonThresholdValue(40, -49, 49, 0, 64) -#----- EM thresholds ----- -em1 = tm.addLvl1Threshold('EM1', 'EM', 0, 'SLOT7', 'CON1')\ - .addEMThresholdValue(7, -49, 49, 0, 64,\ - IsolationOff,IsolationOff,IsolationOff) -em11 = tm.addLvl1Threshold('EM11', 'EM', 1, 'SLOT7', 'CON1')\ - .addEMThresholdValue(11, -49, 49, 0, 64, \ - IsolationOff,IsolationOff,IsolationOff) -em11i = tm.addLvl1Threshold('EM11I', 'EM', 2, 'SLOT7', 'CON1')\ - .addEMThresholdValue(11, -49, 49, 0, 64, 3, 2, 2) -em18 = tm.addLvl1Threshold('EM18', 'EM', 3, 'SLOT7', 'CON1')\ - .addEMThresholdValue(18, -49, 49, 0, 64, \ - IsolationOff,IsolationOff,IsolationOff) -em18i = tm.addLvl1Threshold('EM18I', 'EM', 4, 'SLOT7', 'CON1')\ - .addEMThresholdValue(18, -49, 49, 0, 64, 3, 2, 2) -em50 = tm.addLvl1Threshold('EM50', 'EM', 5, 'SLOT7', 'CON1')\ - .addEMThresholdValue(50, -49, 49, 0, 64, \ - IsolationOff,IsolationOff,IsolationOff, 1) -#----- TAU thresholds ----- -tau5 = tm.addLvl1Threshold('TAU5', 'TAU', 0, 'SLOT7', 'CON2')\ - .addTauThresholdValue(5, -49, 49, 0, 64, \ - IsolationOff, IsolationOff, IsolationOff) -tau8 = tm.addLvl1Threshold('TAU8', 'TAU', 1, 'SLOT7', 'CON2')\ - .addTauThresholdValue(8, -49, 49, 0, 64, \ - IsolationOff, IsolationOff, IsolationOff) -tau8i = tm.addLvl1Threshold('TAU8I', 'TAU', 2, 'SLOT7', 'CON2')\ - .addTauThresholdValue(8, -49, 49, 0, 64, \ - 5, IsolationOff, IsolationOff) -tau13 = tm.addLvl1Threshold('TAU13', 'TAU', 3, 'SLOT7', 'CON2')\ - .addTauThresholdValue(13, -49, 49, 0, 64, \ - IsolationOff, IsolationOff, IsolationOff) -tau13i = tm.addLvl1Threshold('TAU13I', 'TAU', 4, 'SLOT7', 'CON2')\ - .addTauThresholdValue(13, -49, 49, 0, 64, \ - 5, IsolationOff, IsolationOff) -tau17i = tm.addLvl1Threshold('TAU17I', 'TAU', 5, 'SLOT7', 'CON2')\ - .addTauThresholdValue(17, -49, 49, 0, 64, \ - 6, IsolationOff, IsolationOff) -tau22i = tm.addLvl1Threshold('TAU22I', 'TAU', 6, 'SLOT7', 'CON2')\ - .addTauThresholdValue(17, -49, 49, 0, 64, - 6, IsolationOff, IsolationOff) -tau30i = tm.addLvl1Threshold('TAU30I', 'TAU', 7, 'SLOT7', 'CON2')\ - .addTauThresholdValue(30, -49, 49, 0, 64, \ - 6, IsolationOff, IsolationOff) - -#----- JET thresholds ----- -jet15 = tm.addLvl1Threshold('JET15', 'JET', 0, 'SLOT8', 'CON0')\ - .addJetThresholdValue(15, -49, 49, 0, 64, 8) -jet20 = tm.addLvl1Threshold('JET20', 'JET', 1, 'SLOT8', 'CON0')\ - .addJetThresholdValue(20, -49, 49, 0, 64, 8) -jet25 = tm.addLvl1Threshold('JET25', 'JET', 2, 'SLOT8', 'CON0')\ - .addJetThresholdValue(25, -49, 49, 0, 64, 8) -jet35 = tm.addLvl1Threshold('JET35', 'JET', 3, 'SLOT8', 'CON0')\ - .addJetThresholdValue(35, -49, 49, 0, 64, 8) -jet55 = tm.addLvl1Threshold('JET55', 'JET', 4, 'SLOT8', 'CON0')\ - .addJetThresholdValue(55, -49, 49, 0, 64, 8) -jet4 = tm.addLvl1Threshold('JET4', 'JET', 5, 'SLOT8', 'CON0')\ - .addJetThresholdValue(4, -49, 49, 0, 64, 4) -#----- FJET thresholds ----- -jb10 = tm.addLvl1Threshold('JB10', 'JB', 0,'SLOT8','CON1')\ - .addJetThresholdValue(JetOff, -49, 49, 0, 64, 8, 1)\ - .addJetThresholdValue(10, -49, 0, 0, 64, 8, 2) -jf10 = tm.addLvl1Threshold('JF10','JF',0,'SLOT8','CON1')\ - .addJetThresholdValue(JetOff, -49, 49, 0, 64, 8, 1)\ - .addJetThresholdValue(10, 0, 49, 0, 64, 8, 2) -jb20 = tm.addLvl1Threshold('JB20', 'JB',1,'SLOT8','CON1')\ - .addJetThresholdValue(JetOff, -49, 49, 0, 64, 8, 1)\ - .addJetThresholdValue(20, -49, 0, 0, 64, 8, 2) -jf20 = tm.addLvl1Threshold('JF20','JF',1,'SLOT8','CON1')\ - .addJetThresholdValue(JetOff, -49, 49, 0, 64, 8, 1)\ - .addJetThresholdValue(20, 0, 49, 0, 64, 8, 2) -#----- TE thresholds ----- -xe20 = tm.addLvl1Threshold('XE20', 'XE', 0, 'SLOT8', 'CON2')\ - .addEnergyThresholdValue(20, -49, 49, 0, 64) -xe30 = tm.addLvl1Threshold('XE30', 'XE', 1, 'SLOT8', 'CON2')\ - .addEnergyThresholdValue(30, -49, 49, 0, 64) -xe40 = tm.addLvl1Threshold('XE40', 'XE', 2, 'SLOT8', 'CON2')\ - .addEnergyThresholdValue(40, -49, 49, 0, 64) -xe50 = tm.addLvl1Threshold('XE50', 'XE', 3, 'SLOT8', 'CON2')\ - .addEnergyThresholdValue(50, -49, 49, 0, 64) -xe100 = tm.addLvl1Threshold('XE100', 'XE', 4, 'SLOT8', 'CON2')\ - .addEnergyThresholdValue(100, -49, 49, 0, 64) -xe200 = tm.addLvl1Threshold('XE200', 'XE', 5, 'SLOT8', 'CON2')\ - .addEnergyThresholdValue(200, -49, 49, 0, 64) -#----- XE thresholds ----- -xe30 = tm.addLvl1Threshold('XE30', 'XE', 6, 'SLOT8', 'CON2')\ - .addEnergyThresholdValue(30, -49, 49, 0, 64) - -#=========================================================================== -# TriggerConditions -#------------------ -cond_1mu6 = mu6.condition(1) -cond_1mu8 = mu8.condition(1) -cond_1mu10 = mu10.condition(1) -cond_1mu11 = mu11.condition(1) -cond_1mu20 = mu20.condition(1) -cond_1mu40 = mu40.condition(1) -cond_2mu6 = mu6.condition(2) -cond_2mu8 = mu8.condition(2) -cond_2mu10 = mu10.condition(2) -cond_2mu11 = mu11.condition(2) -cond_2mu20 = mu20.condition(2) -cond_2mu40 = mu40.condition(2) - -cond_1j15 = jet15.condition(1) -cond_1j20 = jet20.condition(1) -cond_2j20 = jet20.condition(2) -cond_3j20 = jet20.condition(3) -cond_4j20 = jet20.condition(4) -cond_1fj10_left = jb10.condition(1) -cond_1fj10_right = jf10.condition(1) -cond_1bj15 = jet4.condition(1) - -cond_1em1 = em1.condition(1) -cond_2em11 = em11.condition(2) -cond_2em11i = em11i.condition(2) -cond_1em18 = em18.condition(1) -cond_1em18i = em18i.condition(1) -cond_1em50 = em50.condition(1) - -cond_1tau17i = tau17i.condition(1) -cond_xe30 = xe30.condition() -cond_1jet55 = jet55.condition(5) - -cond_xe30 = xe30.condition(1) -cond_xe40 = xe40.condition(1) -cond_xe50 = xe50.condition(1) -cond_xe100 = xe100.condition(1) -cond_xe200 = xe200.condition(1) - -#=========================================================================== -# TriggerItems -#------------- -# EM Items (bits [0:31]) -item_1em1 = LVL1MenuItem('L1_1EM1', 0x0, '1', 100, 'LOW')\ - .setLogic(cond_1em1).setTriggerType(1) -item_2em15 = LVL1MenuItem('L1_2EM15', 0x1, '1', 1, 'HIGH')\ - .setLogic(cond_2em11).setTriggerType(1) -item_2em15i = LVL1MenuItem('L1_2EM15i', 0x2, '1', 1, 'LOW')\ - .setLogic(cond_2em11i).setTriggerType(1) -item_1em25 = LVL1MenuItem('L1_2EM25', 0x3, '1', 1, 'LOW')\ - .setLogic(cond_1em18).setTriggerType(1) -item_1em25i = LVL1MenuItem('L1_2EM25i', 0x4, '1', 1, 'LOW')\ - .setLogic(cond_1em18i).setTriggerType(1) -item_1em60 = LVL1MenuItem('L1_1EM60', 0x5, '1', 1, 'LOW')\ - .setLogic(cond_1em50).setTriggerType(1) - -item_xe20 = LVL1MenuItem('L1_XE20', 0x6, '1', 1, 'LOW')\ - .setLogic(xe20.condition(1)).setTriggerType(8) - -# Muon Items (bits [32:63]) -item_1mu6 = LVL1MenuItem('L1_1MU6', 0x20, '1', 1, 'LOW')\ - .setLogic(cond_1mu6).setTriggerType(2) -item_2mu6 = LVL1MenuItem('L1_2MU6', 0x21, '1', 1, 'LOW')\ - .setLogic(cond_2mu6).setTriggerType(2) -item_1mu8 = LVL1MenuItem('L1_1MU8', 0x22, '1', 1, 'LOW')\ - .setLogic(cond_1mu8).setTriggerType(2) -item_1mu10 = LVL1MenuItem('L1_1MU10', 0x23, '1', 1, 'LOW')\ - .setLogic(cond_1mu10).setTriggerType(2) -item_1mu11 = LVL1MenuItem('L1_1MU11', 0x24, '1', 1, 'LOW')\ - .setLogic(cond_1mu11).setTriggerType(2) -item_1mu20 = LVL1MenuItem('L1_1MU20', 0x25, '1', 1, 'LOW')\ - .setLogic(cond_1mu20).setTriggerType(2) -item_1mu40 = LVL1MenuItem('L1_1MU40', 0x26, '1', 1, 'LOW')\ - .setLogic(cond_1mu40).setTriggerType(2) -#item_1mu6 = LVL1MenuItem('L1_1MU6', 0x20, '1', 1, 'LOW')\ -# .setLogic(cond_1mu6|cond_1mu8|cond_1mu10|cond_1mu11|cond_1mu20| -# cond_1mu40) - -## item_1mu6 = LVL1MenuItem('L1_1MU6', 0x20, '1', 1, 'LOW')\ -## .addOredCondition('L1_1MU6', 1, 'MU6') - -## item_2mu6 = LVL1MenuItem('L1_2MU6', 0x21, '1', 1, 'LOW')\ -## .setLogic( (cond_2mu6|cond_2mu8|cond_2mu10|cond_2mu11| -## cond_2mu20|cond_2mu40) | -## (cond_1mu6 & (cond_1mu8|cond_1mu10|cond_1mu11| -## cond_1mu20|cond_1mu40)) | -## (cond_1mu8 & (cond_1mu10|cond_1mu11|cond_1mu20| -## cond_1mu40)) | -## (cond_1mu10 & (cond_1mu11|cond_1mu20|cond_1mu40)) | -## (cond_1mu11 & (cond_1mu20|cond_1mu40)) | -## (cond_1mu20 & cond_1mu40)) -## item_1mu8 = LVL1MenuItem('L1_1MU8', 0x22, '1', 1, 'LOW')\ -## .setLogic(cond_1mu8|cond_1mu10|cond_1mu11|cond_1mu20|cond_1mu40) -## item_1mu10 = LVL1MenuItem('L1_1MU10', 0x23, '1', 1, 'LOW')\ -## .setLogic(cond_1mu10|cond_1mu11|cond_1mu20|cond_1mu40) -## item_1mu11 = LVL1MenuItem('L1_1MU11', 0x24, '1', 1, 'LOW')\ -## .setLogic(cond_1mu11|cond_1mu20|cond_1mu40) -## item_1mu20 = LVL1MenuItem('L1_1MU20', 0x25, '1', 1, 'LOW')\ -## .setLogic(cond_1mu20|cond_1mu40) -## item_1mu40 = LVL1MenuItem('L1_1MU40', 0x26, '1', 1, 'LOW').setLogic(cond_1mu40) - -# Tau Items (bits [64:95]) -item_1tau5 = LVL1MenuItem('L1_1TAU5', 0x40, '1', 1, 'LOW')\ - .setLogic(tau5.condition(1)).setTriggerType(4) -item_1tau10 = LVL1MenuItem('L1_1TAU10', 0x41, '1', 1, 'LOW')\ - .setLogic(tau8.condition(1)).setTriggerType(4) -item_1tau10i = LVL1MenuItem('L1_1TAU10i', 0x42, '1', 1, 'LOW')\ - .setLogic(tau8i.condition(1)).setTriggerType(4) -item_1tau15 = LVL1MenuItem('L1_1TAU15', 0x43, '1', 1, 'LOW')\ - .setLogic(tau13.condition(1)).setTriggerType(4) -item_1tau15i = LVL1MenuItem('L1_1TAU15i', 0x44, '1', 1, 'LOW')\ - .setLogic(tau13i.condition(1)).setTriggerType(4) -item_1tau20i = LVL1MenuItem('L1_1TAU20i', 0x45, '1', 1, 'LOW')\ - .setLogic(tau17i.condition(1)).setTriggerType(4) -item_1tau25i = LVL1MenuItem('L1_1TAU25i', 0x46, '1', 1, 'LOW')\ - .setLogic(tau22i.condition(1)).setTriggerType(4) -item_1tau35i = LVL1MenuItem('L1_1TAU35i', 0x47, '1', 1, 'LOW')\ - .setLogic(tau30i.condition(1)).setTriggerType(4) - - -# Jet items (bits [96:127]) -item_1j35 = LVL1MenuItem('L1_1J35', 0x60, '1', 1, 'LOW')\ - .setLogic(cond_1j15).setTriggerType(0x10) -item_1j45 = LVL1MenuItem('L1_1J45', 0x61, '1', 1, 'LOW')\ - .setLogic(cond_1j20).setTriggerType(0x10) -item_2j45 = LVL1MenuItem('L1_2J45', 0x62, '1', 1, 'LOW')\ - .setLogic(cond_2j20).setTriggerType(0x10) -item_3j45 = LVL1MenuItem('L1_3J45', 0x63, '1', 1, 'LOW')\ - .setLogic(cond_3j20).setTriggerType(0x10) -item_4j45 = LVL1MenuItem('L1_4J45', 0x64, '1', 1, 'LOW')\ - .setLogic(cond_4j20).setTriggerType(0x10) -item_1fj30 = LVL1MenuItem('L1_1fj30', 0x65, '1', 1, 'LOW')\ - .setLogic(cond_1fj10_right & cond_1fj10_left).setTriggerType(0x10) -item_1bj15 = LVL1MenuItem('L1_1bj15', 0x66, '1', 1, 'LOW')\ - .setLogic(cond_1bj15).setTriggerType(0x10) - - - -# Etmiss items (bits [128:159]) -item_xe30 = LVL1MenuItem('L1_XE30', 0x80, '1', 1, 'LOW')\ - .setLogic(cond_xe30).setTriggerType(0x20) -item_xe40 = LVL1MenuItem('L1_XE40', 0x81, '1', 1, 'LOW')\ - .setLogic(cond_xe40).setTriggerType(0x20) -item_xe50 = LVL1MenuItem('L1_XE50', 0x82, '1', 1, 'LOW')\ - .setLogic(cond_xe50).setTriggerType(0x20) -item_xe100 = LVL1MenuItem('L1_XE100', 0x83, '1', 1, 'LOW')\ - .setLogic(cond_xe100).setTriggerType(0x20) -item_xe200 = LVL1MenuItem('L1_XE200', 0x84, '1', 1, 'LOW')\ - .setLogic(cond_xe200).setTriggerType(0x20) - -item_1tau17i_xe30 = LVL1MenuItem('L1_1TAU17I_XE30', 0x8f, '1', 1, 'LOW')\ - .setLogic(cond_1tau17i&cond_xe30&Not(cond_1jet55))\ - .setTriggerType(0x24) - - -item_bgrp0 = LVL1MenuItem('L1_BGRP0', 0xa0, '1', 1, 'LOW').setLogic(bgrp0) -item_bgrp1 = LVL1MenuItem('L1_BGRP1', 0xa1, '1', 1, 'LOW').setLogic(bgrp1) -item_bgrp2 = LVL1MenuItem('L1_BGRP2', 0xa2, '1', 1, 'LOW').setLogic(bgrp2) -item_bgrp3 = LVL1MenuItem('L1_BGRP3', 0xa3, '1', 1, 'LOW').setLogic(bgrp3) -item_bgrp4 = LVL1MenuItem('L1_BGRP4', 0xa4, '1', 1, 'LOW').setLogic(bgrp4) -item_bgrp5 = LVL1MenuItem('L1_BGRP5', 0xa5, '1', 1, 'LOW').setLogic(bgrp5) -item_bgrp6 = LVL1MenuItem('L1_BGRP6', 0xa6, '1', 1, 'LOW').setLogic(bgrp6) -item_bgrp7 = LVL1MenuItem('L1_BGRP7', 0xa7, '1', 1, 'LOW').setLogic(bgrp7) -item_mu10rndm0 = LVL1MenuItem('L1_MU6RNDM0', 0xb0, '1', 1, 'LOW')\ - .setLogic(cond_1mu6&rndm0) - -tm.addLVL1Item(item_bgrp0) -tm.addLVL1Item(item_bgrp1) -tm.addLVL1Item(item_bgrp2) -tm.addLVL1Item(item_bgrp3) -tm.addLVL1Item(item_bgrp4) -tm.addLVL1Item(item_bgrp5) -tm.addLVL1Item(item_bgrp6) -tm.addLVL1Item(item_bgrp7) -tm.addLVL1Item(item_mu10rndm0) - -#--------------------------------------------------------------------------- -# Setting items to the menu -tm.addLVL1Item(item_1em1) -tm.addLVL1Item(item_2em15) -tm.addLVL1Item(item_2em15i) -tm.addLVL1Item(item_1em25) -tm.addLVL1Item(item_1em25i) -tm.addLVL1Item(item_1em60) -tm.addLVL1Item(item_xe20) -# -tm.addLVL1Item(item_1mu6) -tm.addLVL1Item(item_2mu6) -tm.addLVL1Item(item_1mu8) -tm.addLVL1Item(item_1mu10) -tm.addLVL1Item(item_1mu11) -tm.addLVL1Item(item_1mu20) -tm.addLVL1Item(item_1mu40) -# -tm.addLVL1Item(item_1tau5) -tm.addLVL1Item(item_1tau10) -tm.addLVL1Item(item_1tau10i) -tm.addLVL1Item(item_1tau15) -tm.addLVL1Item(item_1tau15i) -tm.addLVL1Item(item_1tau20i) -tm.addLVL1Item(item_1tau25i) -tm.addLVL1Item(item_1tau35i) -# -tm.addLVL1Item(item_1tau17i_xe30) - -#=========================================================================== -# Other CTP configuration -#------------------------ -tm.CTPInfo().setRandom('rand1', rate1=1000, rate2=100000, seed1=1, seed2=1) -tm.CTPInfo().setPrescaledClock('psc1', clock1=10, clock2=100) -tm.CTPInfo().setDeadtime(name='dt1', simple=0, - complex1_rate=10, complex1_level=0, - complex2_rate=20, complex2_level=1) -bgs = tm.CTPInfo().setBunchGroupSet('bg_set1')\ - .addBunchGroup('bg_p1', 1, [1, 2])\ - .addBunchGroup('bg_p2', 2, [1001, 2002, 3003])\ - .addBunchGroup('bg_empty', 2, [101, 202, 302]) -#tm.CTPInfo().setTriggerType(0, 0x1234) -#tm.CTPInfo().setTriggerType(1, 0x0) -#tm.CTPInfo().setTriggerType(2, 0x0) -#tm.CTPInfo().setTriggerType(3, 0x0) -#tm.CTPInfo().setTriggerType(4, 0x0) -#tm.CTPInfo().setTriggerType(5, 0x0) -#tm.CTPInfo().setTriggerType(6, 0x124<<128) -#tm.CTPInfo().setTriggerType(7, 0x0) -#=========================================================================== -# Muctpi -#------- -# tm.setMuctpiInfo(low_pt=2, high_pt=6, max_cand=13) - -#=========================================================================== -# CaloInfo -#--------- -tm.Lvl1CaloInfo().setName('standard') -tm.Lvl1CaloInfo().setGlobalScale(1) -#--- ALGORITHM FOR CALCULATING JET WEIGHTS IS NOT WORKING YET!!!!! -#jet_weights = tm.Lvl1CaloInfo().calcJetWeights(tm.Lvl1Thresholds(), \ -# Lvl1CaloInfo.AverageOfThr) -#tm.Lvl1CaloInfo().setJetWeights(jet_weights) -tm.Lvl1CaloInfo().setJetWeights([4, 15, 20, 25, 35, 55, 0, 0, 10, 20, 30, 40]) - -tm.writeConfigFiles() diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/share/hltConfigSvc_standalone_test.py b/Trigger/TrigConfiguration/TrigConfigSvc/share/hltConfigSvc_standalone_test.py deleted file mode 100755 index a4fd03b0cf4cbaa08adc5bab077723035d9e872a..0000000000000000000000000000000000000000 --- a/Trigger/TrigConfiguration/TrigConfigSvc/share/hltConfigSvc_standalone_test.py +++ /dev/null @@ -1,27 +0,0 @@ -######################################################################### -# # -# Standalone jobO for testing the HLT Config service # -# # - -MessageSvc = Service( "MessageSvc" ) -MessageSvc.OutputLevel = INFO -MessageSvc.defaultLimit = 10000000 - -theApp.Dlls += [ "TrigConfigSvc" ] - -theApp.CreateSvc += [ "TrigConf::HLTConfigSvc/HLTConfigSvc" ] - -HLTConfigSvc = Service( "HLTConfigSvc" ) -HLTConfigSvc.ConfigSource = "XML" -# HLTConfigSvc.DBServer = "devdb" -#HLTConfigSvc.XMLSignatureFile = "siglistnew_newconfig.xml" -#HLTConfigSvc.XMLSequenceFile = "seqlistnew_newconfig.xml" -HLTConfigSvc.XMLMenuFile = "hlt_menu.xml" -HLTConfigSvc.OutputLevel = VERBOSE - -theApp.EvtMax = 0 - -# # -# End of standalone jobO for testing the HLTConfig service # -# # -######################################################################### diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/share/hltConfigSvc_standalone_test_configurables.py b/Trigger/TrigConfiguration/TrigConfigSvc/share/hltConfigSvc_standalone_test_configurables.py deleted file mode 100755 index 33ad2f827359ddf2ff8750687e302c89dc7279af..0000000000000000000000000000000000000000 --- a/Trigger/TrigConfiguration/TrigConfigSvc/share/hltConfigSvc_standalone_test_configurables.py +++ /dev/null @@ -1,39 +0,0 @@ -######################################################################### -# # -# Standalone jobO for testing the HLT Config service # -# # - - -MessageSvc = Service( "MessageSvc" ) -MessageSvc.OutputLevel = INFO -MessageSvc.defaultLimit = 10000000 - -#from TrigConfigSvc.TrigConfigSvcConfig import HLTConfigSvc -from TrigConfigSvc.TrigConfigSvcConf import TrigConf__HLTConfigSvc -from AthenaCommon.AlgSequence import AlgSequence - -job = AlgSequence() - -#job += HLTConfigSvc( "HLTConfigSvc-Test" ) -ServiceMgr += TrigConf__HLTConfigSvc( "HLTConfigSvc" ) - - -#theApp.Dlls += [ "TrigConfigSvc" ] - -#theApp.CreateSvc += [ "TrigConf::HLTConfigSvc/HLTConfigSvc" ] - -#HLTConfigSvc = Service( "HLTConfigSvc" ) -#HLTConfigSvc.ConfigSource = "XML" -# HLTConfigSvc.DBServer = "devdb" -#HLTConfigSvc.XMLSignatureFile = "siglistnew_newconfig.xml" -#HLTConfigSvc.XMLSequenceFile = "seqlistnew_newconfig.xml" - -#job.HLTConfigSvc.XMLMenuFile = "hlt_menu.xml" -#HLTConfigSvc.OutputLevel = VERBOSE - -theApp.EvtMax = 1 - -# # -# End of standalone jobO for testing the HLTConfig service # -# # -######################################################################### diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/share/lvl1ConfigSvc_standalone_test.py b/Trigger/TrigConfiguration/TrigConfigSvc/share/lvl1ConfigSvc_standalone_test.py deleted file mode 100755 index d048e9e6bd58bc61a874d1b7354dde3f144bdf7e..0000000000000000000000000000000000000000 --- a/Trigger/TrigConfiguration/TrigConfigSvc/share/lvl1ConfigSvc_standalone_test.py +++ /dev/null @@ -1,36 +0,0 @@ -######################################################################### -# # -# Standalone jobO for testing the LVL1Config service # -# # - -MessageSvc = Service( "MessageSvc" ) -MessageSvc.OutputLevel = INFO -MessageSvc.defaultLimit = 10000000 - -theApp.Dlls += [ "TrigConfigSvc" ] - -theApp.CreateSvc += [ "TrigConf::LVL1ConfigSvc/LVL1ConfigSvc" ] -from TrigConfigSvc.TrigConfigSvcConfig import LVL1ConfigSvc -from AthenaCommon.AppMgr import ServiceMgr - -theLVL1ConfigSvc = LVL1ConfigSvc('LVL1ConfigSvc') -ServiceMgr += theLVL1ConfigSvc - -theLVL1ConfigSvc.ConfigSource = "XML" -#theLVL1ConfigSvc.DBServer = "devdb" -theLVL1ConfigSvc.XMLFile = "LVL1Config.xml" -theLVL1ConfigSvc.CreateLegacyObjects = TRUE -theLVL1ConfigSvc.DumpTTVmap = False -theLVL1ConfigSvc.OutputLevel = VERBOSE - -#include( "TrigT1Muctpi/TrigT1Muctpi_jobOptions.py" ) -#include( "TrigT1Calo/TrigT1CaloJobOptions.py" ) -#include( "TrigT1TGC/TrigT1TGCJobOptions.py" ) -#include( "TrigT1CTP/TrigT1CTP_jobOptions.py" ) - -theApp.EvtMax = 0 - -# # -# End of standalone jobO for testing the LVL1Config service # -# # -######################################################################### diff --git a/Trigger/TrigConfiguration/TrigConfigSvc/share/testKeyByNameLoader.py b/Trigger/TrigConfiguration/TrigConfigSvc/share/testKeyByNameLoader.py deleted file mode 100755 index ba8a77cb4a4904471288715658d11803f93cf4a0..0000000000000000000000000000000000000000 --- a/Trigger/TrigConfiguration/TrigConfigSvc/share/testKeyByNameLoader.py +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env python2.5 - -__author__ = "Joerg Stelzer stelzer@cern.ch" - -if __name__ == '__main__': - - from TrigConfigSvc.TrigConfigSvcUtils import getKeysFromName - - #configkeys = getKeysFromName('sqlite_file:TriggerMenuSQLiteFile_rel_2.sqlite','%') - configkeys = getKeysFromName('TRIGGERDB','Cosmic%', MCOnly=True) - print configkeys - -