Commit d6c72e62 authored by Frank Winklmeier's avatar Frank Winklmeier
Browse files

TrigConfigSvc: Python fixes and cleanup

Enable the `flake8` build checker and make code compliant. Also delete
files that are obsolete.
parent 863a39e6
......@@ -44,15 +44,10 @@ atlas_add_component( TrigConfigSvc
LINK_LIBRARIES AthAnalysisBaseCompsLib AthenaBaseComps TrigConfIOLib TrigConfData TrigConfStorage TrigConfL1Data L1TopoConfig EventInfo AthenaMonitoringLib nlohmann_json::nlohmann_json )
# Install files from the package:
atlas_install_python_modules( python/*.py )
atlas_install_joboptions( share/hltConfigSvc_standalone_test_configurables.py
share/hltConfigSvc_standalone_test.py
share/jobOptions_commonSetup.py
atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
atlas_install_joboptions( share/jobOptions_commonSetup.py
share/jobOptions_setupHLTSvc.py
share/jobOptions_setupLVL1Svc.py
share/L1config-example.py
share/lvl1ConfigSvc_standalone_test.py
share/testKeyByNameLoader.py
share/testTriggerFrontierQuery.py )
atlas_install_scripts( share/checkTrigger.py share/checkTriggerConfigOld.py share/trigconf_property.py )
atlas_install_xmls( data/*.dtd )
......
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
## @file DoDBConfig.py
## @brief Configure services to extract the trigger configuration for DB upload
## $Id: DoDBConfig.py,v 1.4 2009-03-05 14:52:41 pbell Exp $
def _doDBConfig():
from AthenaCommon.AppMgr import ServiceMgr as svcMgr
from AthenaCommon.AppMgr import theApp
from TriggerJobOpts.TriggerFlags import TriggerFlags
from AthenaCommon.Logging import logging
log = logging.getLogger( 'TrigConfigSvc::DoDBConfig:' )
if TriggerFlags.doLVL2(): SetupOutput = "l2"
else: SetupOutput = "ef"
SetupOutput += "_" + TriggerFlags.Online.doDBConfigBaseName() + "_setup.txt"
if hasattr(svcMgr,'LVL1ConfigSvc'):
log.info("DBCONFIG LVL1XML %s" % svcMgr.LVL1ConfigSvc.XMLFile)
if hasattr(svcMgr,'HLTConfigSvc'):
log.info("DBCONFIG HLTXML %s" % svcMgr.HLTConfigSvc.XMLMenuFile)
log.info("DBCONFIG SETUP %s" % SetupOutput)
# Replace the common HistorySvc by the TrigHistorySvc
from TrigConfOffline.TrigHistorySvc import TrigHistorySvc
if not svcMgr.__dict__.has_key('HistorySvc') or not isinstance( svcMgr.HistorySvc, TrigHistorySvc) :
if svcMgr.__dict__.has_key('HistorySvc'): del svcMgr.HistorySvc
svcMgr += TrigHistorySvc('HistorySvc')
svcMgr.HistorySvc.OutputFile = SetupOutput
svcMgr.HistorySvc.Dump = True
svcMgr.HistorySvc.Activate = True
theApp.ActivateHistory = True
# Fill the meta data from the PSC configuration if available
try:
from TrigPSC import PscConfig
except:
log.warning('Cannot import TrigPSC.PscConfig. TrigConfMetaData will not be filled.')
pass
if 'PscConfig' in dir():
from TrigConfigSvc.TrigConfMetaData import TrigConfMetaData
meta = TrigConfMetaData()
meta.PreCommand = PscConfig.optmap['PRECOMMAND']
meta.JobOptions = PscConfig.optmap['JOBOPTIONSPATH']
return
# Create meta data configurable (needs to be at module-level)
from TrigConfigSvc.TrigConfMetaData import TrigConfMetaData
meta = TrigConfMetaData()
# configure
_doDBConfig()
# clean-up
del _doDBConfig
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
## @file TrigCond2COOL.py
## $Id: TrigConf2COOL.py,v 1.14 2009-05-06 13:09:27 stelzer Exp $
###############################################################
#
# The jobOptions to enable reading and writing of configuration
......@@ -11,9 +10,6 @@
#==============================================================
#
# Required libs:
import os.path
import threading
from . import TrigConf2COOLLib as _impl
from AthenaCommon.AppMgr import theApp
......
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
## @file TrigCond2COOLLib.py
## $Id: TrigConf2COOL.py,v 1.14 2009-05-06 13:09:27 stelzer Exp $
###############################################################
#
# The jobOptions to enable reading and writing of configuration
......@@ -11,11 +10,10 @@
#==============================================================
#
# Required libs:
import os.path
import os
import threading
import AthenaCommon.Logging as L
from AthenaCommon.JobProperties import jobproperties as jp
from AthenaPython import PyAthena
......@@ -31,7 +29,7 @@ class TmpThr(threading.Thread):
line = line.lower()
if ' warning ' in line:
maxlevel = max(1,maxlevel)
if ' error ' in line and not 'connection refused' in line:
if ' error ' in line and 'connection refused' not in line:
maxlevel = max(2,maxlevel)
elif ' fatal ' in line.lower() or 'exception ' in line.lower():
maxlevel = max(3,maxlevel)
......@@ -85,7 +83,6 @@ class ConfToCoolSQlite:
self.dbConnection = "<dbConnection>sqlite://;schema=%s;dbname=%s</dbConnection>" % (self.dbfilename,self.dbname)
self.isWritingNeeded = False
from RecExConfig.RecFlags import jobproperties as jp
from TriggerJobOpts.TriggerFlags import jobproperties as jp
from TriggerJobOpts.TriggerFlags import TriggerFlags as tf
if jp.Rec.Trigger.readLVL1configFromXML():
self.lvl1menu = jp.Rec.Trigger.inputLVL1configFile()
......@@ -127,12 +124,12 @@ class ConfToCoolSQlite:
return
# we write COOL sqlite file from given HLT and LVL1 menu xml
if self.menusource == 'xml':
msg.info("Writing menu %s and %s to COOL (%s)" % (self.lvl1menu, self.hltmenu, self.dbfilename) )
msg.info("Writing menu %s and %s to COOL (%s)", self.lvl1menu, self.hltmenu, self.dbfilename)
syscmd = "rm -f %s; TrigConfReadWrite -i %s %s -o cool '%s;%s'" % (self.dbfilename, self.hltmenu, self.lvl1menu, self.dbfilename, self.dbname)
else: # db
if self.smk==0 or self.l1psk==0 or self.hltpsk==0:
raise RuntimeError, "Source of trigger menu configuration is the TriggerDB, but no keys are specified: %i/%i/%i" % (self.smk, self.l1psk, self.hltpsk)
msg.info("Writing menu (keys: %i/%i/%i/%i) from triggerDB (%s) to COOL (%s)" % (self.smk, self.l1psk, self.hltpsk, self.bgsk, self.trigdb, self.dbfilename) )
raise RuntimeError("Source of trigger menu configuration is the TriggerDB, but no keys are specified: %i/%i/%i" % (self.smk, self.l1psk, self.hltpsk))
msg.info("Writing menu (keys: %i/%i/%i/%i) from triggerDB (%s) to COOL (%s)", self.smk, self.l1psk, self.hltpsk, self.bgsk, self.trigdb, self.dbfilename)
syscmd = "rm -f %s; TrigConf2COOLApp -e createwrite" % self.dbfilename
syscmd += " --cooldb 'sqlite://;schema=%s;dbname=%s'" % (self.dbfilename,self.dbname)
syscmd += " --trigdb '%s' --configkey %i --prescalekeylvl1 %i --prescalekeyhlt %i --bgkey %i" % (self.trigdb, self.smk, self.l1psk, self.hltpsk, self.bgsk)
......@@ -141,7 +138,7 @@ class ConfToCoolSQlite:
syscmd += " --infiov"
msg.info('executing system command to create COOL SQlite file %s with trigger configuration' % self.dbfilename)
msg.info('executing system command to create COOL SQlite file %s with trigger configuration', self.dbfilename)
msg.info("> " + syscmd)
tmpThr = TmpThr(syscmd)
......
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
## @file TrigCond2XML.py
## @brief Core job python configurable to setup IOVDbSvc
## @author RD Schaffer <R.D.Schaffer@cern.ch>
## $Id: TrigConf2XML.py,v 1.3 2008-03-07 22:40:23 stelzer Exp $
###############################################################
#
# The jobOptions to enable writing of configuration
# data from the DB to XML#
#
#==============================================================
#
# Required libs:
import os.path
from AthenaCommon.JobProperties import jobproperties as jp
from AthenaCommon.Logging import logging
class ConfToXML:
"""Writes the trigger configuration information to XML"""
__instance = None
class __single:
def __init__(self):
self.trigdb = ""
self.dbuser = ""
self.dbpass = ""
self.dbkey = 1
self.lvl1pskey = 1
self.hltpskey = 1
self.outputfile = "dbOutput"
def writeConf2XML(self):
msg = logging.getLogger( 'TriggerConfig(2XML)' )
msg.info("Writing HLT XML file %s" % (self.outputfile) )
syscmd = "TrigConf2XMLApp"
syscmd += " --trigdb %s" % (self.trigdb)
syscmd += " --u %s" % self.dbuser
syscmd += " --p %s " % self.dbpass
syscmd += " --configkey %i" % self.dbkey
syscmd += " --prescalekeylvl1 %i" % self.lvl1pskey
syscmd += " --prescalekeyhlt %i" % self.hltpskey
syscmd += " --outputfile %s" % self.outputfile
msg.info('executing system command to create XML file')
msg.info("> " + syscmd)
output = os.popen(syscmd).readlines()
for line in output:
msg.info(line.strip())
def __init__(self):
""" Create singleton instance """
if self.__instance is None:
ConfToXML.__instance = ConfToXML.__single()
# Store instance reference as the only member in the handle, not sure if this is needed
self.__dict__['_ConfToXML__instance'] = ConfToXML.__instance
def __getattr__(self, attr):
""" Delegate access to implementation """
return getattr(self.__instance, attr)
def __setattr__(self, attr, value):
""" Delegate access to implementation """
return setattr(self.__instance, attr, value)
theConfXMLWriter = ConfToXML()
#!/usr/bin/env python
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
from AthenaCommon.Logging import logging
import time
......@@ -8,9 +8,9 @@ def getFrontierCursor(url, schema, loglevel = logging.INFO):
log = logging.getLogger( "TrigConfFrontier.py" )
log.setLevel(loglevel)
try:
from TrigConfDBConnection import frontier_client
from TrigConfDBConnection import frontier_client # noqa: F401
return FrontierCursor2( url = url, schema = schema)
except:
except Exception:
log.warning("Couldn't import frontier_client from TrigConfDBConnection, falling back to pure python implementation without proper url resolution")
return FrontierCursor( url = url, schema = schema)
......@@ -23,9 +23,9 @@ class FrontierCursor2:
self.refreshFlag = refreshFlag
from TrigConfDBConnection import frontier_client as fc
fc.init("PyFrontier","debug")
log.debug("Frontier URL : %s" % self.url)
log.debug("Schema : %s" % self.schema)
log.debug("Refresh cache : %s" % self.refreshFlag)
log.debug("Frontier URL : %s", self.url)
log.debug("Schema : %s", self.schema)
log.debug("Refresh cache : %s", self.refreshFlag)
@classmethod
def resolvebindvars(cls, query, bindvars):
......@@ -35,7 +35,7 @@ class FrontierCursor2:
import re
varsextract = re.findall(':([A-z0-9]*)',query)
values = map(bindvars.get, varsextract)
log.debug("Resolving bound variable %r with %r" % (varsextract,values))
log.debug("Resolving bound variable %r with %r", varsextract,values)
appendix = ":".join([str(v) for v in values])
queryWithQuestionMarks = re.sub(':[A-z0-9]*','?', query)
query = queryWithQuestionMarks + ':' + appendix
......@@ -54,8 +54,8 @@ class FrontierCursor2:
query = query.replace(":%s" % var,"%s" % val)
else:
query = query.replace(":%s" % var,"%r" % val)
log.debug("Resolving bound variable '%s' with %r" % (var,val))
log.debug("Resolved query: %s" % query)
log.debug("Resolving bound variable '%s' with %r", var,val)
log.debug("Resolved query: %s", query)
return query
def execute(self, query, bindvars={}):
......@@ -64,7 +64,7 @@ class FrontierCursor2:
from TrigConfDBConnection import frontier_client as fc
log = logging.getLogger( "TrigConfFrontier.py" )
log.debug("Executing query : %s" % query)
log.debug("Executing query : %s", query)
conn = fc.Connection(self.url)
session = fc.Session(conn)
......@@ -73,7 +73,7 @@ class FrontierCursor2:
conn.setReload(doReload)
queryStart = time.localtime()
log.debug("Query started: %s" % time.strftime("%m/%d/%y %H:%M:%S %Z", queryStart))
log.debug("Query started: %s", time.strftime("%m/%d/%y %H:%M:%S %Z", queryStart))
t1 = time.time()
req = fc.Request("frontier_request:1:DEFAULT", fc.encoding_t.BLOB)
......@@ -95,9 +95,9 @@ class FrontierCursor2:
queryEnd = time.localtime()
self.result = [r for r in session.getRecords2()]
log.debug("Query ended: %s" % time.strftime("%m/%d/%y %H:%M:%S %Z", queryEnd))
log.debug("Query time: %s seconds" % (t2-t1))
log.debug("Result size: %i entries" % len(self.result))
log.debug("Query ended: %s", time.strftime("%m/%d/%y %H:%M:%S %Z", queryEnd))
log.debug("Query time: %s seconds", (t2-t1))
log.debug("Result size: %i entries", len(self.result))
def fetchall(self):
return self.result
......@@ -113,7 +113,6 @@ Refresh cache: %s""" % (self.url, self.schema, self.refreshFlag)
class FrontierCursor:
def __init__(self, url, schema, refreshFlag=False, doDecode=True, retrieveZiplevel="zip"):
log = logging.getLogger( "TrigConfFrontier.py" )
if url.startswith('('):
self.servertype, self.url = FrontierCursor.getServerUrls(url)[0]
self.url += "/Frontier"
......@@ -133,7 +132,7 @@ Refresh cache: %s""" % (self.url, self.refreshFlag)
@classmethod
def getServerUrls(cls, frontier_servers):
from re import findall
return findall('\((serverurl)=(.*?)\)',frontier_servers)
return findall(r'\((serverurl)=(.*?)\)',frontier_servers)
@classmethod
def testUrl(cls, url):
......@@ -148,9 +147,9 @@ Refresh cache: %s""" % (self.url, self.refreshFlag)
query = FrontierCursor2.replacebindvars(query,bindvars)
log = logging.getLogger( "TrigConfFrontier.py" )
log.debug("Using Frontier URL: %s" % self.url)
log.debug("Refresh cache : %s" % self.refreshFlag)
log.debug("Query : %s" % query)
log.debug("Using Frontier URL: %s", self.url)
log.debug("Refresh cache : %s", self.refreshFlag)
log.debug("Query : %s", query)
import base64, zlib, urllib2, time
......@@ -168,16 +167,16 @@ Refresh cache: %s""" % (self.url, self.refreshFlag)
request.add_header("X-Frontier-Id", frontierId)
queryStart = time.localtime()
log.debug("Query started: %s" % time.strftime("%m/%d/%y %H:%M:%S %Z", queryStart))
log.debug("Query started: %s", time.strftime("%m/%d/%y %H:%M:%S %Z", queryStart))
t1 = time.time()
result = urllib2.urlopen(request,None,10).read()
t2 = time.time()
queryEnd = time.localtime()
log.debug("Query ended: %s" % time.strftime("%m/%d/%y %H:%M:%S %Z", queryEnd))
log.debug("Query time: %s [seconds]" % (t2-t1))
log.debug("Result size: %i [seconds]" % len(result))
log.debug("Query ended: %s", time.strftime("%m/%d/%y %H:%M:%S %Z", queryEnd))
log.debug("Query time: %s [seconds]", (t2-t1))
log.debug("Result size: %i [seconds]", len(result))
self.result = result
def fetchall(self):
......@@ -188,7 +187,7 @@ Refresh cache: %s""" % (self.url, self.refreshFlag)
def decodeResult(self):
log = logging.getLogger( "TrigConfFrontier.py" )
from xml.dom.minidom import parseString
import base64,zlib, curses.ascii
import base64, zlib, curses.ascii
#print "Query result:\n", self.result
dom = parseString(self.result)
dataList = dom.getElementsByTagName("data")
......@@ -220,7 +219,6 @@ Refresh cache: %s""" % (self.url, self.refreshFlag)
firstRow = firstRow.replace(c, ' ')
fields = [x for i,x in enumerate(firstRow.split()) if i%2==0]
types = [x for i,x in enumerate(firstRow.split()) if i%2==1]
Nfields = len(fields)
ptypes = []
for t in types:
if t.startswith("NUMBER"):
......@@ -232,9 +230,9 @@ Refresh cache: %s""" % (self.url, self.refreshFlag)
ptypes.append(str)
log.debug("Fields : %r" % fields)
log.debug("DB Types : %r" % types)
log.debug("Python Types: %r" % ptypes)
log.debug("Fields : %r", fields)
log.debug("DB Types : %r", types)
log.debug("Python Types: %r", ptypes)
row = str(row[endFirstRow+1:])
......@@ -264,7 +262,6 @@ Refresh cache: %s""" % (self.url, self.refreshFlag)
def testConnection():
import os
log = logging.getLogger( "TrigConfFrontier.py::testConnection()" )
log.setLevel(logging.DEBUG)
......@@ -328,10 +325,10 @@ TE2CP.HTE2CP_ALGORITHM_COUNTER DESC"""
def testBindVarResolution():
query = "SELECT :bar WHERE :foo = :bar sort by :ups asc, :foo"
bindvars = {"foo": 500, "bar": 8, "ups": 42 }
print "Query"
print query
print "is translated to"
print FrontierCursor2.resolvebindvars(query, bindvars)
print("Query")
print(query)
print("is translated to")
print(FrontierCursor2.resolvebindvars(query, bindvars))
if __name__=="__main__":
......
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
## @file TrigConfMetaData.py
## @brief Meta data for the trigger configuration
## $Id: $
from GaudiKernel.GaudiHandles import *
from GaudiKernel.Proxy.Configurable import *
from GaudiKernel.Proxy.Configurable import Configurable, ConfigurableUser
class TrigConfMetaData( ConfigurableUser ) :
"""Configurable to store aribitray meta data in the job options
......
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
import ROOT, cppyy
import AthenaROOTAccess.transientTree
......@@ -48,7 +48,6 @@ def checkPoolFileForRunLevel(poolfilename):
for payload in plc.iter():
for i in xrange(payload.size()):
chanNum = int(payload.chanNum(i))
iovr = payload.iovRange(chanNum)
l1keys += [ payload.attributeList(chanNum)["Lvl1PrescaleConfigurationKey"].data("unsigned int")() ]
if hasattr(mdt,folderName["HLTK"]):
......@@ -60,14 +59,13 @@ def checkPoolFileForRunLevel(poolfilename):
for payload in plc.iter():
for i in xrange(payload.size()):
chanNum = int(payload.chanNum(i))
iovr = payload.iovRange(chanNum)
hltkeys += [ payload.attributeList(chanNum)["HltPrescaleConfigurationKey"].data("unsigned int")() ]
mlog.info("File contained these LVL1 prescale keys: %r" % l1keys)
mlog.info("File contained these HLT prescale keys: %r" % hltkeys)
mlog.info("File contained these LVL1 prescale keys: %r", l1keys)
mlog.info("File contained these HLT prescale keys: %r", hltkeys)
hasL1 = len(l1keys)>0 and not 0 in l1keys
hasHLT = len(hltkeys)>0 and not 0 in hltkeys
hasL1 = len(l1keys)>0 and 0 not in l1keys
hasHLT = len(hltkeys)>0 and 0 not in hltkeys
from TriggerJobOpts.TriggerFlags import TriggerFlags
if hasL1 or hasHLT:
......@@ -77,8 +75,4 @@ def checkPoolFileForRunLevel(poolfilename):
TriggerFlags.dataTakingConditions='Lvl1Only'
else:
TriggerFlags.dataTakingConditions='HltOnly'
mlog.info("Set TriggerFlags.dataTakingConditions to '%s'" % TriggerFlags.dataTakingConditions())
mlog.info("Set TriggerFlags.dataTakingConditions to '%s'", TriggerFlags.dataTakingConditions())
#!/usr/bin/env python
# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
from xml.dom import minidom
import re
import os
......@@ -28,7 +28,7 @@ def _getFileLocalOrPath(filename, pathenv):
returns path/filename if existing, otherwise None
"""
if os.path.exists(filename):
log.info( "Using local file %s" % filename)
log.info( "Using local file %s", filename)
return filename
pathlist = os.getenv(pathenv,'').split(os.pathsep)
......@@ -45,7 +45,7 @@ def _getConnectionServicesForAlias(alias):
connectionServices = None # list of services
dblookupfilename = _getFileLocalOrPath('dblookup.xml','CORAL_DBLOOKUP_PATH')
if dblookupfilename == None: return None
if dblookupfilename is None: return None
doc = minidom.parse(dblookupfilename)
for ls in doc.getElementsByTagName('logicalservice'):
......@@ -53,8 +53,8 @@ def _getConnectionServicesForAlias(alias):
connectionServices = [str(s.attributes['name'].value) for s in ls.getElementsByTagName('service')]
doc.unlink()
log.info( "For alias '%s' found list of connections %r" % (alias,connectionServices) )
if connectionServices == None:
log.info( "For alias '%s' found list of connections %r", alias,connectionServices )
if connectionServices is None:
log.fatal("Trigger connection alias '%s' is not defined in %s" % (alias,dblookupfilename))
return connectionServices
......@@ -68,13 +68,12 @@ def _readAuthentication():
authDict = {}
dbauthfilename = _getFileLocalOrPath('authentication.xml','CORAL_AUTH_PATH')
if dbauthfilename == None: return authDict
if dbauthfilename is None: return authDict
doc = minidom.parse(dbauthfilename)
for cn in doc.getElementsByTagName('connection'):
user = ""
pw = ""
svc = cn.attributes['name'].value
for p in cn.getElementsByTagName('parameter'):
if p.attributes['name'].value == 'user': user = p.attributes['value'].value
if p.attributes['name'].value == 'password': pw = p.attributes['value'].value
......@@ -133,7 +132,7 @@ def _getConnectionParameters(connection):
connectionParameters["passwd"] = passwd
elif connection.startswith("frontier://"):
pattern = "frontier://ATLF/\(\)/(.*)"
pattern = r"frontier://ATLF/\(\)/(.*)"
m = re.match(pattern,connection)
if not m:
log.fatal("connection string '%s' doesn't match the pattern '%s'?" % (connection,pattern) )
......@@ -145,14 +144,15 @@ def _getConnectionParameters(connection):
def interpretConnection(connection, debug=False, resolveAlias=True):
# connection needs to be of the following format (this is also the order of checking)
# <ALIAS> -- any string without a colon ':' will be checked for in the dblookup.xml file
# type:<detail> -- no dblookup will be used, type has to be oracle, mysql, or sqlite_file
# sqlite_file:filename.db -- an sqlite file, no authentication needed, will be opened in read-only mode
# oracle://ATLR/ATLAS_CONF_TRIGGER_V2 -- a service description without user and password, requires lookup in authentication.xml
# oracle://ATLR/ATLAS_CONF_TRIGGER_V2;username=ATLAS_CONF_TRIGGER_V2_R;password=<...> -- a service description with user and password
"""connection needs to be of the following format (this is also the order of checking)
<ALIAS> -- any string without a colon ':' will be checked for in the dblookup.xml file
type:<detail> -- no dblookup will be used, type has to be oracle, mysql, or sqlite_file
sqlite_file:filename.db -- an sqlite file, no authentication needed, will be opened in read-only mode
oracle://ATLR/ATLAS_CONF_TRIGGER_V2 -- a service description without user and password, requires lookup in authentication.xml
oracle://ATLR/ATLAS_CONF_TRIGGER_V2;username=ATLAS_CONF_TRIGGER_V2_R;password=<...> -- a service description with user and password
"""
log.info("Specified connection string '%s'" % connection)
log.info("Specified connection string '%s'", connection)
# not needed any longer
# connection = connection.lstrip("dblookup://")
......@@ -176,27 +176,18 @@ def interpretConnection(connection, debug=False, resolveAlias=True):
return connectionParameters
connectionServices = _getConnectionServicesForAlias( connection ) # alias resolution via dblookup
if connectionServices == None:
if connectionServices is None:
return connectionParameters
# If TriggerFlags.triggerUseFrontier=true then we remove sqlite files
from TriggerJobOpts.TriggerFlags import TriggerFlags as tf
#if tf.triggerUseFrontier() or os.getenv('TRIGGER_USE_FRONTIER',False):
# connectionServices = filter(lambda conn: not conn.startswith("sqlite_file"), connectionServices)
# if 'ATLAS_TRIGGERDB_FORCESQLITE' in os.environ:
# log.fatal("Inconsistent setup: environment variable ATLAS_TRIGGERDB_FORCESQLITE is defined and use of Frontier is requested" )
# SQLite
sqliteconnections = [conn for conn in connectionServices if conn.startswith("sqlite_file")]
if len(sqliteconnections)>0:
for conn in sqliteconnections:
connectionParameters = _getConnectionParameters( conn )
if connectionParameters["filename"] != None:
if connectionParameters["filename"] is not None:
break # stop at the first sqlite file that exists