From c24428c0029390d9555b87dbfbf9dd014624805e Mon Sep 17 00:00:00 2001
From: Aleksandra Poreba <aleksandra.poreba@cern.ch>
Date: Mon, 14 Dec 2020 15:18:17 +0000
Subject: [PATCH] Refactor of debug stream scripts

* retrieve Atlas Project name from DB
* move to ROOT 6
* move to cppyy 1.6.1
* whitespace changes to unify document style
---
 .../TrigTransform/python/dbgAnalysis.py       | 107 +++++++++---------
 .../TrigTransform/python/dbgEventInfo.py      |  56 +++++----
 .../TrigTransform/python/trigRecoExe.py       |  57 +++++-----
 .../TrigTransform/python/trigTranslate.py     |   7 +-
 .../TrigTransform/scripts/Trig_reco_tf.py     |   2 -
 5 files changed, 113 insertions(+), 116 deletions(-)

diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/python/dbgAnalysis.py b/HLT/Trigger/TrigTransforms/TrigTransform/python/dbgAnalysis.py
index 0efcaefe2ec..38e3db2fee1 100644
--- a/HLT/Trigger/TrigTransforms/TrigTransform/python/dbgAnalysis.py
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/python/dbgAnalysis.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 # @brief: Pre and Post debug_stream analysis operations for trigger transform
 # @details: Code to carry out operations that are needed for running the trigger debug_stream analysis in transform
@@ -18,8 +18,7 @@ from TrigTransform.dbgEventInfo import dbgEventInfo
 from TrigConfStorage.TriggerCoolUtil import TriggerCoolUtil
 from PyCool import cool
 
-from PyUtils import RootUtils
-ROOT = RootUtils.import_root()
+import ROOT
 from ROOT import TFile
 
 def dbgPreRun(inputFileList,outputFileList):
@@ -36,7 +35,7 @@ def dbgPreRun(inputFileList,outputFileList):
     data = []
     l1Info = []
     hltInfo = []
-    relInfo = str()
+    relInfo = []
     runInfo = 0
     for inputFile in inputFileList.value:
         
@@ -63,17 +62,20 @@ def dbgPreRun(inputFileList,outputFileList):
             eventInfo.event_count(event)
             eventInfo.event_info(event, l1Info, hltInfo)
             eventInfo.fillTree()
+
     #close output TFile
     hfile.Write()
     hfile.Close() 
+
     #Release format should be good, if relInfo is 'uknown' then print this error 
-    if not re.match(r'(\d+\.{0,1})+$',relInfo):
-        msg.error('Not able to find release from DB (or it was badly formatted), release : %s' % relInfo )
+    release = relInfo[0]
+    if not re.match(r'(\d+\.{0,1})+$', release):
+        msg.error('Not able to find release from DB (or it was badly formatted), release : %s' % release )
         msg.error('Problem with DB configuration in COOL DB, most likely during data-taking' )
         
     msg.info('Finished running debug_stream analysis PreRun operations')     
     #returns the local asetupString from runs in input files and to be used by asetup 
-    return getAsetupString(relInfo)
+    return getAsetupString(*relInfo)
     
 def dbgPostRun(inputFileList,outputFileList):
     msg.info('Running debug_stream analysis PostRun operations on files :{0} '.format(inputFileList))
@@ -88,7 +90,7 @@ def dbgPostRun(inputFileList,outputFileList):
     data = []
     l1Info = []
     hltInfo = []
-    relInfo = str()
+    relInfo = []
     for inputFile in inputFileList.value:
     
         if not os.path.isfile(inputFile):
@@ -122,16 +124,16 @@ def dbgPostRun(inputFileList,outputFileList):
     
 def TriggerDBInfo(run):
     #Get the connection to CONDBR2  
-    dbconn  = TriggerCoolUtil.GetConnection("CONDBR2")
+    dbconn = TriggerCoolUtil.GetConnection("CONDBR2")
     #dbconn  = TriggerCoolUtil.GetConnection("COMP")
     l1Info = []
     
-    limmin=run<<32
-    limmax=(run+1)<<32
+    limmin = run<<32
+    limmax = (run+1)<<32
 
     ## Get L1 Info from DB
-    l1Conn= dbconn.getFolder( "/TRIGGER/LVL1/Menu" )
-    l1Chansel=cool.ChannelSelection.all()
+    l1Conn = dbconn.getFolder( "/TRIGGER/LVL1/Menu" )
+    l1Chansel = cool.ChannelSelection.all()
     l1Objs = l1Conn.browseObjects( limmin,limmax,l1Chansel)
     itemName = {}
     l1Counts = 0
@@ -160,17 +162,17 @@ def TriggerDBInfo(run):
 
     ## Get HLT Info
     f = dbconn.getFolder( "/TRIGGER/HLT/Menu" )
-    chansel=cool.ChannelSelection.all()
+    chansel = cool.ChannelSelection.all()
     objs = f.browseObjects( limmin,limmax,chansel)
     hltInfo = []
     chainCount = 0
     chainNamesHLT = {}
 
     while objs.goToNext():
-        hltObj=objs.currentRef()
-        hltPayload=hltObj.payload()
-        hltName     = hltPayload['ChainName']
-        hltCounter  = hltPayload['ChainCounter']
+        hltObj = objs.currentRef()
+        hltPayload = hltObj.payload()
+        hltName = hltPayload['ChainName']
+        hltCounter = hltPayload['ChainCounter']
         chainNamesHLT[int(hltCounter)] = hltName
         hltInfo = (max(chainNamesHLT.keys())+1) * [0]
 
@@ -178,17 +180,21 @@ def TriggerDBInfo(run):
         hltInfo.pop(channel)
         hltInfo.insert(channel,chainNamesHLT[channel])
 
-    # Get HLT Release number
+    # Get Atlas Project and HLT Release number
     f = dbconn.getFolder( "/TRIGGER/HLT/HltConfigKeys" )
-    chansel=cool.ChannelSelection.all()
+    chansel = cool.ChannelSelection.all()
     objs = f.browseObjects( limmin,limmax,chansel)
     relInfo = 'unknown'
         
     while objs.goToNext():
-        relObj=objs.currentRef()
-        relPayload=relObj.payload()
-        confsrc     = relPayload['ConfigSource'].split(',')
-        if len(confsrc)>1: relInfo = confsrc[1]
+        relObj = objs.currentRef()
+        relPayload = relObj.payload()
+        confsrc = relPayload['ConfigSource'].split(',')
+
+        if len(confsrc) > 2:
+            # Skip database name
+            relInfo = confsrc[1:]
+
         msg.info("release: %s", relInfo)
 
     return (l1Info, hltInfo, relInfo) 
@@ -242,37 +248,28 @@ def getL1InfoXML():
     return l1Info
 
     
-def getAsetupString(release):
-    #From release and os.environ, sets asetupString for runwrapper.BSRDOtoRAW.sh
-    asetupString = None
-    AtlasProject = str()
-    AtlasPatchArea = str()
-    TestArea = str()
-    userEnvironment = str()
-    #Environment variables list for dictionary to be filled from os.environ
-    eVarList = ['AtlasProject','AtlasPatchArea','CORAL_AUTH_PATH','CORAL_DBLOOKUP_PATH','TestArea']
-    eVarDic = {}
-    for eVar in eVarList :
-        if eVar in os.environ:
-            eVarDic[eVar] = os.environ[eVar].rstrip() 
-
-    #Sets AtlasProject to AtlasP1HLT by default otherwise it gest is from the enviroment.
-    if eVarDic['AtlasProject'] :
-        AtlasProject = eVarDic['AtlasProject']
-    else:
-        msg.info('failed to find env variable : $'+eVar)
-        AtlasProject='AtlasP1HLT'
-
-    #If TestArea is for tzero (tzero/software/patches/AtlasP1HLT-RELEASE), then returns tzero/software/patches/AtlasP1HLT-release where release is the parameter given to this function getAsetupString(release)    
-    if eVarDic.get('TestArea') :
-        TestArea = eVarDic['TestArea']
-        if  TestArea.find("tzero/software/patches/AthenaP1-") > 0 :
+def getAsetupString(release, AtlasProject):
+    # From release and os.environ, sets asetupString for runwrapper.BSRDOtoRAW.sh
+
+    if not AtlasProject:
+        msg.warn("Atlas Project not available in TRIGGERDB - reading env variable")
+
+        if os.environ['AtlasProject']:
+            AtlasProject = os.environ['AtlasProject'].rstrip()
+            msg.info("Found Atlas Project %s" % AtlasProject)
+        else:
+            msg.error("Couldn't find Atlas Project!")
+
+    asetupString = AtlasProject + ',' + release
+
+    # If TestArea is for tzero (tzero/software/patches/AtlasP1HLT-RELEASE), 
+    #   then returns tzero/software/patches/AtlasP1HLT-release where release is 
+    #   the parameter given to this function getAsetupString(release)    
+    if os.environ['TestArea']:
+        TestArea = os.environ['TestArea']
+        if TestArea.find("tzero/software/patches/AthenaP1-") > 0 :
             testarea = TestArea.split('-')
-            TestArea = testarea[0]+'-'+release
-        asetupString = AtlasProject + ',' + release + ',gcc62 --testarea '+ TestArea
-        return asetupString
+            TestArea = testarea[0] + '-' + release
+        asetupString += ' --testarea '+ TestArea
 
-    #else, there is no TestArea,  then use the local directory    
-    else :
-        asetupString = AtlasProject + ',' + release + ',gcc62,here'
     return asetupString
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/python/dbgEventInfo.py b/HLT/Trigger/TrigTransforms/TrigTransform/python/dbgEventInfo.py
index 4e55b7d0290..ea6535afa28 100644
--- a/HLT/Trigger/TrigTransforms/TrigTransform/python/dbgEventInfo.py
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/python/dbgEventInfo.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 #### This is the dbgEventInfo class for the Debug Stream event analysis
 
 from __future__ import print_function
@@ -12,13 +12,10 @@ import eformat
 import sys
 import os
 from TrigTransform.dbgHltResult import * 
-#hltResult
 import cppyy
 
-from PyUtils import RootUtils
-ROOT = RootUtils.import_root()
 import ROOT
-from ROOT import gStyle, gROOT
+from ROOT import gStyle, gROOT, addressof
 
 
 class dbgEventInfo:
@@ -218,7 +215,8 @@ class dbgEventInfo:
         
     def get_chain(self,counter, s):
         #Prints chains and their information
-        ch = cppyy.makeClass('HLT::Chain')(s)
+        from cppyy.gbl import HLT
+        ch = HLT.Chain(s)
         #ch.deserialize(s)
         print (".... chain %-3d : %s Counter:%-4d Passed: %d (Raw:%d Prescaled: %d PassThrough:%d) Rerun: %d LastStep: %d Err: %s"\
             % ( counter, self.HLT_Chain_Names[ch.getChainCounter()], ch.getChainCounter(), ch.chainPassed(), ch.chainPassedRaw(), ch.isPrescaled(), ch.isPassedThrough(), ch.isResurrected(), ch.getChainStep(), ch.getErrorCode().str()))
@@ -446,7 +444,7 @@ class dbgEventInfo:
   
         from ROOT import EventInfoTree
         self.Event_Info = EventInfoTree()
-        self.event_info_tree = TTree('Event_Info'+dbgStep, inputFile)
+        self.event_info_tree = ROOT.TTree('Event_Info'+dbgStep, inputFile)
 
         self.event_info_tree._L1_Triggered_BP     = self.L1_Triggered_BP
         self.event_info_tree._L1_Triggered_AV     = self.L1_Triggered_AV
@@ -454,28 +452,28 @@ class dbgEventInfo:
         self.event_info_tree._HLT_Triggered_Names = self.HLT_Triggered_Names
         self.event_info_tree._HLT_Triggered_IDs   = self.HLT_Triggered_IDs
   
-        self.event_info_tree.Branch('Run_Number',       AddressOf(self.Event_Info,'Run_Number'),       'run_Number/I')
-        self.event_info_tree.Branch('Stream_Tag_Name',  AddressOf(self.Event_Info,'Stream_Tag_Name'),  'stream_Tag_Name/C')
-        self.event_info_tree.Branch('Stream_Tag_Type',  AddressOf(self.Event_Info,'Stream_Tag_Type'),  'stream_Tag_Type/C')
-        self.event_info_tree.Branch('Lvl1_ID',                             AddressOf(self.Event_Info,'Lvl1_ID'),                               'lvl1_ID/I')
-        self.event_info_tree.Branch('Event_Counter_Lvl1_ID',               AddressOf(self.Event_Info,'Event_Counter_Lvl1_ID'),                 'event_Counter_Lvl1_ID/I')
-        self.event_info_tree.Branch('Event_Counter_Reset_Counter_Lvl1_ID', AddressOf(self.Event_Info,'Event_Counter_Reset_Counter_Lvl1_ID'),   'event_Counter_Reset_Counter_Lvl1_ID/I')
-        self.event_info_tree.Branch('Global_ID',        AddressOf(self.Event_Info,'Global_ID'),  'global_ID/I')
-        self.event_info_tree.Branch('Lumiblock',        AddressOf(self.Event_Info,'Lumiblock'),  'lumiblock/I')
-        self.event_info_tree.Branch('Node_ID',          AddressOf(self.Event_Info,'Node_ID'),  'node_ID/I')
-        self.event_info_tree.Branch('SuperMasterKey',   AddressOf(self.Event_Info,'SuperMasterKey'),    'supperMasterKey/I')
-        self.event_info_tree.Branch('HLT_PSC_Key',      AddressOf(self.Event_Info,'HLT_PSC_Key'),       'hLT_PSC_Key/I')
-        self.event_info_tree.Branch('HLT_Action',       AddressOf(self.Event_Info,'HLT_Action'),     'hLT_Action/C')
-        self.event_info_tree.Branch('HLT_Reason',       AddressOf(self.Event_Info,'HLT_Reason'),     'hLT_Reason/C')
-        self.event_info_tree.Branch('HLT_Internal',     AddressOf(self.Event_Info,'HLT_Internal'),   'hLT_Internal/C')
-        self.event_info_tree.Branch('HLT_Decision',     AddressOf(self.Event_Info,'HLT_Decision'),    'hLT_Decision/B')
-        self.event_info_tree.Branch('HLT_Application',  AddressOf(self.Event_Info,'HLT_Application'),   'HLT_Application/C')
-        self.event_info_tree.Branch('EF_Overall',       AddressOf(self.Event_Info,'EF_Overall'),    'eF_Overall/C')
-        self.event_info_tree.Branch('EF_PSC_Result',    AddressOf(self.Event_Info,'EF_PSC_Result'), 'eF_PSC_Result/C')
-        self.event_info_tree.Branch('EF_Status_Result', AddressOf(self.Event_Info,'EF_Status_Result'),  'eF_Status_Result/C')
-        self.event_info_tree.Branch('Chain_Name_EF',    AddressOf(self.Event_Info,'Chain_Name_EF'),     'chain_Name_EF/C')
-        self.event_info_tree.Branch('Chain_Step_EF',    AddressOf(self.Event_Info,'Chain_Step_EF'),     'chain_Step_EF/I')  
-        self.event_info_tree.Branch('EventStatusNames', AddressOf(self.Event_Info,'EventStatusNames'),  'eventStatusNames/C')
+        self.event_info_tree.Branch('Run_Number',       addressof(self.Event_Info, 'Run_Number'),       'run_Number/I')
+        self.event_info_tree.Branch('Stream_Tag_Name',  addressof(self.Event_Info, 'Stream_Tag_Name'),  'stream_Tag_Name/C')
+        self.event_info_tree.Branch('Stream_Tag_Type',  addressof(self.Event_Info, 'Stream_Tag_Type'),  'stream_Tag_Type/C')
+        self.event_info_tree.Branch('Lvl1_ID',                             addressof(self.Event_Info, 'Lvl1_ID'),                               'lvl1_ID/I')
+        self.event_info_tree.Branch('Event_Counter_Lvl1_ID',               addressof(self.Event_Info, 'Event_Counter_Lvl1_ID'),                 'event_Counter_Lvl1_ID/I')
+        self.event_info_tree.Branch('Event_Counter_Reset_Counter_Lvl1_ID', addressof(self.Event_Info, 'Event_Counter_Reset_Counter_Lvl1_ID'),   'event_Counter_Reset_Counter_Lvl1_ID/I')
+        self.event_info_tree.Branch('Global_ID',        addressof(self.Event_Info, 'Global_ID'),  'global_ID/I')
+        self.event_info_tree.Branch('Lumiblock',        addressof(self.Event_Info, 'Lumiblock'),  'lumiblock/I')
+        self.event_info_tree.Branch('Node_ID',          addressof(self.Event_Info, 'Node_ID'),  'node_ID/I')
+        self.event_info_tree.Branch('SuperMasterKey',   addressof(self.Event_Info, 'SuperMasterKey'),    'supperMasterKey/I')
+        self.event_info_tree.Branch('HLT_PSC_Key',      addressof(self.Event_Info, 'HLT_PSC_Key'),       'hLT_PSC_Key/I')
+        self.event_info_tree.Branch('HLT_Action',       addressof(self.Event_Info, 'HLT_Action'),     'hLT_Action/C')
+        self.event_info_tree.Branch('HLT_Reason',       addressof(self.Event_Info, 'HLT_Reason'),     'hLT_Reason/C')
+        self.event_info_tree.Branch('HLT_Internal',     addressof(self.Event_Info, 'HLT_Internal'),   'hLT_Internal/C')
+        self.event_info_tree.Branch('HLT_Decision',     addressof(self.Event_Info, 'HLT_Decision'),    'hLT_Decision/B')
+        self.event_info_tree.Branch('HLT_Application',  addressof(self.Event_Info, 'HLT_Application'),   'HLT_Application/C')
+        self.event_info_tree.Branch('EF_Overall',       addressof(self.Event_Info, 'EF_Overall'),    'eF_Overall/C')
+        self.event_info_tree.Branch('EF_PSC_Result',    addressof(self.Event_Info, 'EF_PSC_Result'), 'eF_PSC_Result/C')
+        self.event_info_tree.Branch('EF_Status_Result', addressof(self.Event_Info, 'EF_Status_Result'),  'eF_Status_Result/C')
+        self.event_info_tree.Branch('Chain_Name_EF',    addressof(self.Event_Info, 'Chain_Name_EF'),     'chain_Name_EF/C')
+        self.event_info_tree.Branch('Chain_Step_EF',    addressof(self.Event_Info, 'Chain_Step_EF'),     'chain_Step_EF/I')  
+        self.event_info_tree.Branch('EventStatusNames', addressof(self.Event_Info, 'EventStatusNames'),  'eventStatusNames/C')
 
         self.event_info_tree.Branch('L1_Triggered_BP',  self.L1_Triggered_BP)
         self.event_info_tree.Branch('L1_Triggered_AV',  self.L1_Triggered_AV)
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/python/trigRecoExe.py b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigRecoExe.py
index 8092b546b68..0cdd0e39dfc 100644
--- a/HLT/Trigger/TrigTransforms/TrigTransform/python/trigRecoExe.py
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigRecoExe.py
@@ -132,8 +132,8 @@ class trigRecoExecutor(athenaExecutor):
             optionList = getTranslated(self.conf.argdict, name=self._name, substep=self._substep, first=self.conf.firstExecutor, output = outputFiles)
             self._cmd.extend(optionList)
 
-            #Run preRun step debug_stream analysis if debug_stream=True
-            if 'debug_stream' in self.conf.argdict:
+            # Run preRun step debug stream analysis if output BS file and output histogram are set
+            if "outputHIST_DEBUGSTREAMMONFile" in self.conf.argdict and 'BS' in self.conf.dataDictionary:
                 inputFiles = dict()
                 for dataType in input:
                     inputFiles[dataType] = self.conf.dataDictionary[dataType]
@@ -141,23 +141,19 @@ class trigRecoExecutor(athenaExecutor):
                 for dataType in output:
                     outputFiles[dataType] = self.conf.dataDictionary[dataType]
                 
-                #set default file name for debug_stream analysis output
-                fileNameDbg = ['debug-stream-monitoring.root']
-                if 'HIST_DEBUGSTREAMMON' in output:
-                    fileNameDbg = outputFiles['HIST_DEBUGSTREAMMON'].value
-                
-                #if file exist then rename file to -old.root to keep as backup
-                if(os.path.isfile(fileNameDbg[0])):
-                    oldOutputFileNameDbg = fileNameDbg[0].replace(".root","_old.root")
-                    msg.info('Renaming %s to %s' % (fileNameDbg[0], oldOutputFileNameDbg) )                    
-                    os.rename(fileNameDbg[0], oldOutputFileNameDbg)
+                # Set file name for debug stream analysis output
+                fileNameDbg = outputFiles['HIST_DEBUGSTREAMMON'].value
 
-                #do debug_stream preRun step and get asetup string from debug_stream input files
+                # Do debug stream preRun step and get asetup string from debug stream input files
                 dbgAsetupString  = dbgStream.dbgPreRun(inputFiles['BS_RDO'],fileNameDbg)
-                # setup asetup from debug_stream if no --asetup r2b:string was given and is not running with tzero/software/patches as TestArea
+                # Setup asetup from debug stream 
+                # if no --asetup r2b:string was given and is not running with tzero/software/patches as TestArea
                 if asetupString == None and dbgAsetupString != None : 
                     asetupString = dbgAsetupString
-                    msg.info('Will use asetup string for debug_stream analsys %s' % dbgAsetupString)
+                    msg.info('Will use asetup string for debug stream analsys %s' % dbgAsetupString)
+            else:
+                msg.warn("Flag outputHIST_DEBUGSTREAMMONFile or outputBSFile not defined - debug stream anaylsis will not run.")
+                    
                     
         #call athenaExecutor parent as the above overrides what athenaExecutor would have done 
         super(athenaExecutor, self).preExecute(input, output)
@@ -259,6 +255,7 @@ class trigRecoExecutor(athenaExecutor):
         log = self._logFileName
         msg.debug('Now scanning logfile {0} for HLTMPPU Child Issues'.format(log))
         # Using the generator so that lines can be grabbed by subroutines if needed for more reporting
+
         try:
             myGen = lineByLine(log, substepName=self._substep)
         except IOError as e:
@@ -341,6 +338,8 @@ class trigRecoExecutor(athenaExecutor):
         msg.info("Search for created BS files, and rename if single file found")
         #The following is needed to handle the BS file being written with a different name (or names)
         #base is from either the tmp value created by the transform or the value entered by the user
+
+        argInDict = {}
         if self._rc != 0:
             msg.error('HLT step failed (with status %s) so skip BS filename check' % self._rc)
         elif 'BS' in self.conf.dataDictionary:
@@ -361,7 +360,7 @@ class trigRecoExecutor(athenaExecutor):
             elif(len(matchedOutputFileNames)):
                 msg.info('Single BS file found: will split (if requested) and rename file')
 
-                #First check if we want to produce the COST DRAW output
+                # First check if we want to produce the COST DRAW output
                 if 'DRAW_TRIGCOST' in self.conf.dataDictionary:
                     splitFailed = self._splitBSfile('CostMonitoring', matchedOutputFileNames[0],self.conf.dataDictionary['DRAW_TRIGCOST'].value[0])
                     if(splitFailed):
@@ -382,30 +381,30 @@ class trigRecoExecutor(athenaExecutor):
         else:
             msg.info('BS output filetype not defined so skip BS filename check')
 
-        #Run PostRun step debug_stream analysis if debug_stream=True
-        if 'debug_stream' in self.conf.argdict:
-            msg.info("debug_stream analysis in postExecute")
+
+        # Run postRun step debug stream analysis if output BS file and output histogram are set
+        if "outputHIST_DEBUGSTREAMMONFile" in self.conf.argdict and 'BS' in self.conf.dataDictionary:
+            msg.info("debug stream analysis in postExecute")
     
-            #set default file name for debug_stream analysis output
-            fileNameDbg = ['debug-stream-monitoring.root']
-            if "outputHIST_DEBUGSTREAMMONFile" in self.conf.argdict:
-                fileNameDbg= self.conf.argdict["outputHIST_DEBUGSTREAMMONFile"].value                
-                msg.info('outputHIST_DEBUGSTREAMMONFile argument is {0}'.format(fileNameDbg) )
+            # Set file name for debug stream analysis output
+            fileNameDbg= self.conf.argdict["outputHIST_DEBUGSTREAMMONFile"].value                
+            msg.info('outputHIST_DEBUGSTREAMMONFile argument is {0}'.format(fileNameDbg) )
 
             #TODO add merging of mother and child debug files
 
             if(os.path.isfile(fileNameDbg[0])):
-                #keep filename if not defined
-                msg.info('Will use file created  in PreRun step {0}'.format(fileNameDbg) )
+                # Keep filename if not defined
+                msg.info('Will use file created in PreRun step {0}'.format(fileNameDbg) )
             else :
                 msg.info('No file created  in PreRun step {0}'.format(fileNameDbg) )
 
-            #do debug_stream postRun step
-            dbgStream.dbgPostRun(argInDict,fileNameDbg)
+            # Do debug stream postRun step
+            dbgStream.dbgPostRun(argInDict, fileNameDbg)
+
             #
             # TODO is the reset now redundant? (not needed for reprocessing)
             #
-            #now reset metadata for outputBSFile needed for trf file validation
+            # Now reset metadata for outputBSFile needed for trf file validation
             self.conf.dataDictionary['BS']._resetMetadata()
 
         msg.info('Now run athenaExecutor:postExecute')
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/python/trigTranslate.py b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigTranslate.py
index 7c9793c62cf..1825f3496c6 100644
--- a/HLT/Trigger/TrigTransforms/TrigTransform/python/trigTranslate.py
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigTranslate.py
@@ -17,7 +17,7 @@ import PyJobTransforms.trfExceptions as trfExceptions
 from PyJobTransforms.trfExitCodes import trfExit
 
 #create option dict needed by athenaHLT from runargs
-def getOption(runArgs,name, substep, first, output):
+def getOption(runArgs, name, substep, first, output):
 
     # Dictionary to be filled to run athenaHLT from
     option = {}
@@ -98,5 +98,10 @@ def getTranslated(runArgs,name,substep,first,output):
             v=''.join(v) 
         optionList.append(item.format(k,v))
 
+    # Replace --use-database=True with no argument version
+    if '--use-database=True' in optionList:
+        optionList.remove('--use-database=True')
+        optionList.append('--use-database')
+        
     return optionList
 
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_reco_tf.py b/HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_reco_tf.py
index 24448b4622a..466cd5f749c 100755
--- a/HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_reco_tf.py
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_reco_tf.py
@@ -237,8 +237,6 @@ def addDebugArgs(parser):
     # Use arggroup to get these arguments in their own sub-section (of --help)
     parser.defineArgGroup('Debug', 'Specific options related to the trigger debug recovery')
 
-    parser.add_argument('--debug_stream', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
-                        help='Runs debug_stream analysis', group='Debug')
     parser.add_argument('--outputHIST_DEBUGSTREAMMONFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False),
                         help='Output DEBUGSTREAMMON file', group='Debug')    
-- 
GitLab