diff --git a/DataQuality/DataQualityUtils/python/DBInfo.py b/DataQuality/DataQualityUtils/python/DBInfo.py
deleted file mode 100755
index 25264c14610f1a0392fe5683c1ff4849876be14c..0000000000000000000000000000000000000000
--- a/DataQuality/DataQualityUtils/python/DBInfo.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-
-
-# +---------------------------------------------+
-# | Vassilis D. Kazazakis - DBInfo.py v1.0	|
-# | Returns COOL DB connection information	|
-# +---------------------------------------------+
-
-# user & password information should be handled by each individual program
-
-def getReadServer():
-    return "ATLAS_COOLPROD"
-
-def getWriteServer():
-    #return "ATLAS_COOLWRITE"
-    return ""
-
-def getFolder():
-    #return "/GLOBAL/DQM/HIST"
-    return "/GLOBAL/DETSTATUS/DQMFOFL" #/GLOBAL/DETSTATUS/DQMFOFLH"
-
-def getDbName():
-    return "CONDBR2"
-    #	return "STRMP200"
-
-def getSchema():
-    #return "ATLAS_COOLOFL_GLOBAL"
-    return "MyCOOL.db"
-
-def getChannelDict():
-    import DBInfo_Histo
-    #########################################
-
-    return DBInfo_Histo.getChannelDict()
diff --git a/DataQuality/DataQualityUtils/python/DBInfo_Histo.py b/DataQuality/DataQualityUtils/python/DBInfo_Histo.py
deleted file mode 100755
index 2b8fe5f36dff0ede008088ff80de00ffa58e7af2..0000000000000000000000000000000000000000
--- a/DataQuality/DataQualityUtils/python/DBInfo_Histo.py
+++ /dev/null
@@ -1,240 +0,0 @@
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
-from __future__ import print_function
-
-# +---------------------------------------------+
-# | Vassilis D. Kazazakis - DBInfo.py v1.0	|
-# | Returns COOL DB connection information      |
-# | Updated by M.D'Onofrio: new folder added    |
-# | for history plots 				|
-# +---------------------------------------------+
-
-# user & password information should be handled by each individual program
-
-def getReadServer():
-    return "ATLAS_COOLPROD"
-
-def getWriteServer():
-    #return "ATLAS_COOLWRITE"
-    return ""
-
-def getFolder():
-    #return "/GLOBAL/DQM/HIST"
-    return "/GLOBAL/DETSTATUS/DQMFOFL"
-
-def getFolders():
-    #return "/GLOBAL/DQM/HIST"
-    return ["/GLOBAL/DETSTATUS/DQMFOFL", "/GLOBAL/DETSTATUS/SHIFTOFL"]
-
-def getFolderH():
-    #return "/GLOBAL/DQM/HIST"
-    return "/GLOBAL/DETSTATUS/DQMFOFLH"
-
-def getDbName():
-    #	return "OFLP200"
-    return "CONDBR2"
-
-def getSchema():
-    #return "ATLAS_COOLOFL_GLOBAL"
-    return "MyCOOL_histo.db"
-
-def getChannelDict():
-    from DetectorStatus.DetStatusLib import DetStatusNames
-    dsn = DetStatusNames()
-    channelDict = dsn.namedict.copy()
-##     channelDict = dict([])
-    
-##     # the following channels are for the M4 #
-##     #channelDict["default"] = 0
-##     #channelDict["default_dqmf"] = 1
-    
-##     # DB Structure as in DBQuery (from DetStatusLib.py)
-    
-##     # Inner Detector 
-##     channelDict["PIXB"] = 101
-##     channelDict["PIX0"] = 102 #new: Pixel B-layer 
-##     channelDict["PIXEA"] = 104
-##     channelDict["PIXEC"] = 105
-    
-##     channelDict["SCTB"] = 111
-##     channelDict["SCTEA"] = 114
-##     channelDict["SCTEC"] = 115
-
-##     channelDict["TRTB"] = 121
-##     channelDict["TRTEA"] = 124
-##     channelDict["TRTEC"] = 125
-
-##     # obsolete 
-##     #channelDict["IDGB"] = 131
-##     #channelDict["IDGEA"] = 134
-##     #channelDict["IDGEC"] = 135
-##     #channelDict["IDAB"] = 141
-##     #channelDict["IDAEA"] = 144
-##     #channelDict["IDAEC"] = 145
-##     # new ID Alignement and Global
-##     channelDict["IDGL"] = 130
-##     channelDict["IDAL"] = 140
-##     channelDict["IDBS"] = 150
-##     channelDict["IDPF"] = 160
-##     channelDict["IDVX"] = 161
-##     channelDict["IDBCM"] = 170
-
-##     # LAr 
-##     channelDict["EMBA"] = 202
-##     channelDict["EMBC"] = 203
-##     channelDict["EMECA"] = 204
-##     channelDict["EMECC"] = 205
-##     channelDict["HECA"] = 214
-##     channelDict["HECC"] = 215
-##     channelDict["FCALA"] = 224
-##     channelDict["FCALC"] = 225
-    
-##     # TileCal
-##     channelDict["TIGB"] = 230
-##     channelDict["TILBA"] = 232
-##     channelDict["TILBC"] = 233
-##     channelDict["TIEBA"] = 234
-##     channelDict["TIEBC"] = 235
-    
-##     # MBTS
-##     channelDict["MBTSA"] = 244
-##     channelDict["MBTSC"] = 245
-    
-##     # TileCal+LAr
-##     channelDict["CALBA"] = 251
-##     channelDict["CALEA"] = 254
-##     channelDict["CALEC"] = 255
-    
-##     # Muon Detectors 
-##     channelDict["MDTBA"] = 302
-##     channelDict["MDTBC"] = 303
-##     channelDict["MDTEA"] = 304
-##     channelDict["MDTEC"] = 305
-##     channelDict["RPCBA"] = 312
-##     channelDict["RPCBC"] = 313
-##     channelDict["TGCEA"] = 324
-##     channelDict["TGCEC"] = 325
-##     channelDict["CSCEA"] = 334
-##     channelDict["CSCEC"] = 335
-    
-##     # Lumi
-##     channelDict["LCD"] = 350
-##     channelDict["LCDA"] = 353
-##     channelDict["LCDC"] = 354
-##     channelDict["ALFA"] = 360
-##     channelDict["ZDC"] = 370
-    
-##     # Trigger
-##     channelDict["L1CAL"] = 401
-##     channelDict["L1MUB"] = 402
-##     channelDict["L1MUE"] = 403
-##     channelDict["L1CTP"] = 404
-##     channelDict["TRCAL"] = 411
-##     channelDict["HLTEF"] = 412
-##     channelDict["TRBJT"] = 421
-##     channelDict["TRBPH"] = 422
-##     channelDict["TRCOS"] = 423
-##     channelDict["TRELE"] = 424
-##     channelDict["TRGAM"] = 425
-##     channelDict["TRJET"] = 426
-##     channelDict["TRMET"] = 427
-##     channelDict["TRMBI"] = 428
-##     channelDict["TRMUO"] = 429
-##     channelDict["TRTAU"] = 430
-##     channelDict["TRIDT"] = 431
-    
-##     channelDict["LUMI"] = 450
-##     channelDict["RUNCLT"] = 460
-##     channelDict["RCOPS"] = 461
-
-##     channelDict["ATLGL"] = 480
-##     channelDict["ATLSOL"] = 481
-##     channelDict["ATLTOR"] = 482
-    
-##     # Physics Objects 
-##     channelDict["EIDB"] = 501
-##     channelDict["EIDCR"] = 502
-##     channelDict["EIDE"] = 503
-##     channelDict["PIDB"] = 505
-##     channelDict["PIDCR"] = 506
-##     channelDict["PIDE"] = 507
-##     channelDict["EIDF"] = 508
-##     channelDict["EIDSOFT"] = 509
-##     channelDict["MSTACO"] = 510
-##     channelDict["MMUIDCB"] = 511
-##     channelDict["MMUIDVX"] = 512
-##     channelDict["MMUGIRL"] = 513
-##     channelDict["MMUBOY"] = 514
-##     channelDict["MMUIDSA"] = 515
-##     channelDict["MMUTAG"] = 516
-##     channelDict["MMTIMO"] = 517
-##     channelDict["MCMUTAG"] = 518
-##     channelDict["MCALLHR"] = 519
-##     channelDict["JETB"] = 521
-##     channelDict["JETEA"] = 524
-##     channelDict["JETEC"] = 525
-##     channelDict["JETFA"] = 526
-##     channelDict["JETFC"] = 527
-##     channelDict["MET"] = 530
-##     channelDict["METCALO"] = 531
-##     channelDict["METMUON"] = 532
-##     channelDict["BTGLIFE"] = 541
-##     channelDict["BTGSOFTE"] = 544
-##     channelDict["BTGSOFTM"] = 545
-##     channelDict["TAUB"] = 551
-##     channelDict["TAUCR"] = 552
-##     channelDict["TAUE"] = 553
-
-    #########################################
-    
-    return channelDict
-    
-def getChannelDictH():
-    # This is all deprecated of course
-    print('Why are you calling this function? This is very very deprecated ... - ponyisi, 17/6/10')
-    channelDict = dict([])
-
-
-    channelDict["/InnerDetector/IDAlignment/ExtendedTracks_NoTriggerSelection/GenericTracks/Npixhits_per_track"]=10039
-    channelDict["/InnerDetector/IDAlignment/ExtendedTracks_NoTriggerSelection/GenericTracks/Nscthits_per_track"]=10040
-    channelDict["/InnerDetector/IDAlignment/ExtendedTracks_NoTriggerSelection/GenericTracks/Ntrthits_per_track"]=10041
-    
-    channelDict["/InnerDetector/IDAlignment/Tracks_NoTriggerSelection/HitEfficiencies/measurements_eff_vs_layer_barrel"]=10042
-    
-    channelDict["/InnerDetector/IDAlignment/Tracks_NoTriggerSelection/GenericTracks_Detail/Npixhits_per_track_barrel"]=10043  
-    channelDict["/InnerDetector/IDAlignment/Tracks_NoTriggerSelection/GenericTracks_Detail/Npixhits_per_track_eca"]=10044  
-    channelDict["/InnerDetector/IDAlignment/Tracks_NoTriggerSelection/GenericTracks_Detail/Npixhits_per_track_ecc"]=10045  
-    channelDict["/InnerDetector/IDAlignment/Tracks_NoTriggerSelection/GenericTracks_Detail/Nscthits_per_track_barrel"]=10046  
-    channelDict["/InnerDetector/IDAlignment/Tracks_NoTriggerSelection/GenericTracks_Detail/Nscthits_per_track_eca"]=10047  
-    channelDict["/InnerDetector/IDAlignment/Tracks_NoTriggerSelection/GenericTracks_Detail/Nscthits_per_track_ecc"]=10048  
-    channelDict["/InnerDetector/IDAlignment/Tracks_NoTriggerSelection/GenericTracks_Detail/Ntrthits_per_track_barrel"]=10049  
-    channelDict["/InnerDetector/IDAlignment/Tracks_NoTriggerSelection/GenericTracks_Detail/Ntrthits_per_track_eca"]=10050 
-    channelDict["/InnerDetector/IDAlignment/Tracks_NoTriggerSelection/GenericTracks_Detail/Ntrthits_per_track_ecc"]=10051  
-
-
-    channelDict["/InnerDetector/IDAlignment/PrimVtx/NoTriggerSelection/pvChiSqDof"]=10082
-    channelDict["/InnerDetector/IDAlignment/PrimVtx/NoTriggerSelection/pvX"]=10083
-    channelDict["/InnerDetector/IDAlignment/PrimVtx/NoTriggerSelection/pvY"]=10084
-    channelDict["/InnerDetector/IDAlignment/PrimVtx/NoTriggerSelection/pvZ"]=10085
-
-    channelDict["/InnerDetector/IDAlignment_Perf/Jpsi-->mumu/NoTriggerSelection/Jpsi_invmass"]=100102
-    channelDict["/InnerDetector/IDAlignment_Perf/Kshort-->pipi/NoTriggerSelection/ks_mass"]=100117
-
-        
-    channelDict["/InnerDetector/IDAlignment_Perf/Z-->ee/NoTriggerSelection/Zee_Eopasym_perevent"]=100198
-    channelDict["/InnerDetector/IDAlignment_Perf/Z-->ee/NoTriggerSelection/Zee_Eop_incl"]=100199
-    channelDict["/InnerDetector/IDAlignment_Perf/W-->enu/NoTriggerSelection/Wenu_Eop_incl"]=100200
-
-
-    channelDict["/InnerDetector/IDAlignment_Perf/Z-->mumu/NoTriggerSelection/z-mass_trks"]=100215
-
-        
-    channelDict["/InnerDetector/Pixel/PixelExpert/Timing/m_Pixel_track_Lvl1A"]=100220
-    channelDict["/InnerDetector/Pixel/PixelExpert/ToT/m_Pixel_track_cluster_ToT"]=100221 
-    channelDict["/InnerDetector/Pixel/PixelExpert/General/m_Pixel_track_clus_groupsize"]=100222
-
-
-
-    #########################################
-        
-    return channelDict
-
diff --git a/DataQuality/DataQualityUtils/python/DQGetHistogramMod.py b/DataQuality/DataQualityUtils/python/DQGetHistogramMod.py
deleted file mode 100644
index 3882bb9a9fd65f8b57ffc5c3e046fb0843c08221..0000000000000000000000000000000000000000
--- a/DataQuality/DataQualityUtils/python/DQGetHistogramMod.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
-
-
-# +---------------------------------------------+
-# | Vassilis D. Kazazakis, GetHistogram.py v1.0	|
-# | Extracts histograms from ROOT files		|
-# |  Interface used by QuerryDB.py application	|
-# +---------------------------------------------+
-
-import sys
-import os
-from six import print_
-
-startDir = os.getcwd() # noqa
-
-from ROOT import TFile ## This breaks up argument parsing and current directory
-os.chdir(startDir)
-
-def getFullName(dir):
-    motherDir = dir.GetMotherDir()
-    if not motherDir:
-        return ""       # it means that dir = TFile and we don't want the name of the file in the full name
-    else:
-        return getFullName(motherDir) + dir.GetName() + "/"
-
-
-# getMetaDict(TDirectory dir)
-def getMetadataDict(dir):
-    retDict = dict([])
-
-    metaKey = dir.FindKey("metadata")
-    if metaKey:
-        retDict[getFullName(dir)] = metaKey.ReadObj()
-        
-    iter = dir.GetListOfKeys().MakeIterator()
-    keyNext = iter.Next()
-    while keyNext:
-        if keyNext.GetClassName() == "TDirectory":
-            retDict.update(getMetadataDict(keyNext.ReadObj()))
-        keyNext = iter.Next()
-
-    return retDict
-
-def GetObjectList(tFile, path):
-    dirList = path.split("/")
-    ret = [tFile]
-    for dir in dirList:
-        if dir == '':
-            continue
-        key = ret[0].GetKey(dir)
-        if not key:
-            return []
-        ret[0] = key.ReadObj()
-
-    # if path ends with a '/' that means we must return a list of
-    # all histograms (class name begins with TH) in the last directory
-    if path.endswith('/'):
-        iter = ret[0].GetListOfKeys().MakeIterator()
-        ret = []
-        keyNext = iter.Next()
-        while keyNext:
-            if keyNext.GetClassName()[:2] == "TH":
-                objNext = keyNext.ReadObj()
-                ret.append(objNext)
-            keyNext = iter.Next()
-    return ret
-
-def SetObjectDir(tFile, path, obj, clearDict = False, dirDict = dict([])):
-    if clearDict:
-        for key,elem in dict(dirDict).iteritems():
-            del dirDict[key]
-
-    if not obj:
-        return
-
-
-    dirList = path.split("/")
-    if not path.endswith('/'):
-        dirList.pop()
-    tmpf = tFile
-    partialPath = ''
-
-    for dir in dirList:
-        if dir =='':
-            continue
-        partialPath += dir + '/'
-        try:
-            tmpf = dirDict[partialPath]
-        except KeyError:
-            #            print "Creating directory '" + dir + "' for directory list:", dirList
-            tmpf = tmpf.mkdir(dir)
-            dirDict[partialPath] = tmpf
-    obj.SetDirectory(tmpf)
-
-# GetHistogram(string:ROOT_InputFileName, string:ROOT_OutputFileName, list:NameOfHistogramsToExtract)
-def GetHistogram(inp, out, nameOptionList):
-    # clear the new dir dictionary (necessary if GetHistogram is to be used more than once)
-    SetObjectDir("", "", None, True)
-
-    changeCount = 0
-    inpFile = TFile(inp)
-
-    ################### check for shift/expert/etc flags and construct the approprate list of histograms to get ####################
-    nameList = []
-    metaDict = getMetadataDict(inpFile)
-    for option in nameOptionList:
-        optionList = option.split(":")            # optionList[0] = histogram name, optionList[1] = flag
-        if len(optionList) == 1:
-            if optionList[0] not in nameList:
-                nameList.append(optionList[0])
-        else:    # we have a flag
-            if (not optionList[0].endswith("/")) and (not optionList[0] == ""):
-                # flags go only with directories (i.e. name ends with "/")
-                print_("Warning: Directory names should always end with \"/\" OR flag defined for histogram.", file=sys.stderr)
-            if optionList[0] == "":                    # we have to get all flagged histograms
-                for key in metaDict.keys():
-                    for event in metaDict[key]:
-                        if event.LevelOfDetail == optionList[1]:
-                            histName = key+event.Name
-                            if histName not in nameList:
-                                changeCount += 1
-                                nameList.append(histName)
-                if changeCount == 0:
-                    print_("Warning: No histogram flagged", optionList[1], "found.", file=sys.stderr)
-
-            else:
-                # we have to get flagged histograms only from the specified directory
-                try:
-                    for event in metaDict[optionList[0]]:
-                        if event.LevelOfDetail == optionList[1]:
-                            histName = optionList[0]+event.Name
-                            if histName not in nameList:
-                                changeCount += 1
-                                nameList.append(histName)
-                    if changeCount == 0:
-                        print_("Warning: No histogram flagged", optionList[1], "found in", optionList[0],".", file=sys.stderr)
-                except KeyError:
-                    print_("Warning:", optionList[0], "directory/metadata object does not exist.", file=sys.stderr)
-    ##################################################################################################################################
-
-    outFile = TFile(out, "RECREATE")
-    changeCount = 0
-    for path in nameList:
-        objList = GetObjectList(inpFile, path)
-        for obj in objList:
-            #    print "Setting path for '" + obj.GetName() + "' to '" + path + "'"
-            changeCount = changeCount + 1
-            SetObjectDir(outFile, path, obj)
-    outFile.Write()
-    outFile.Flush()
-    inpFile.Close()
-    outFile.Close()
-    if changeCount == 0:
-        os.remove(out)
-        return False
-    else:
-        return True
-
diff --git a/DataQuality/DataQualityUtils/python/DQHistogramMergeMod.py b/DataQuality/DataQualityUtils/python/DQHistogramMergeMod.py
index f764a21d5753206c7fd2600f255a1fef1275a840..07212e69aff551b7d7ebcee8e046ac9648391814 100644
--- a/DataQuality/DataQualityUtils/python/DQHistogramMergeMod.py
+++ b/DataQuality/DataQualityUtils/python/DQHistogramMergeMod.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 import os
 ## Needed to correct ROOT behavior; see below
@@ -23,7 +23,7 @@ MODVERSION = '$Id: DQHistogramMergeMod.py,v 1.8 2009-05-12 11:38:35 ponyisi Exp
 
 DoProcMon=False
 if DoProcMon:
-    import DQProcMonitor
+    from . import DQProcMonitor
     DQProcMonitor.startProcMonThread()
   
 def DQHistogramMerge( listFileName, outFileName, runPostProcessing, directoryRegularExpression=".*", histogramRegularExpression=".*", isIncremental=False, compressionLevel=1,debugLevel=0 ):
@@ -38,5 +38,5 @@ def DQHistogramMerge( listFileName, outFileName, runPostProcessing, directoryReg
     mf.mergeLBintervals( outFileName )
   
     if runPostProcessing:
-        import DQPostProcessMod
+        from . import DQPostProcessMod
         DQPostProcessMod.DQPostProcess( outFileName, isIncremental )
diff --git a/DataQuality/DataQualityUtils/python/DQPostProcessMod.py b/DataQuality/DataQualityUtils/python/DQPostProcessMod.py
index 7364c5256caea11e89887d13fb50640080a6fafd..17d173638ca1088d6c945a7d093713019c00a60e 100644
--- a/DataQuality/DataQualityUtils/python/DQPostProcessMod.py
+++ b/DataQuality/DataQualityUtils/python/DQPostProcessMod.py
@@ -3,7 +3,7 @@ from __future__ import print_function
 
 import shutil, re
 
-from dqu_subprocess import apply as _local_apply
+from .dqu_subprocess import apply as _local_apply
 
 def _dolsrwrapper(fname):
     import ROOT
diff --git a/DataQuality/DataQualityUtils/python/DQProcMonitor.py b/DataQuality/DataQualityUtils/python/DQProcMonitor.py
index 897a1e9144d9d1d15b39b33feea0c8b5902c46af..806e0d6da2eb38677bd1f0651edb5e414e9252dd 100644
--- a/DataQuality/DataQualityUtils/python/DQProcMonitor.py
+++ b/DataQuality/DataQualityUtils/python/DQProcMonitor.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 from __future__ import print_function
 
 #python 2.6 is buggy, it can't import modules in deamon threads one has to import them in to global 
@@ -11,7 +11,7 @@ def formatRusage(rusage):
     fieldnames=["utime","stime","maxrss","shrmem","ushrmem","ushrstk","minflt",
                  "majflt","nswap","inblock","oublock","msgsnd","msgrcv",
                  "nsignals","nvcontsw","nivcontsw"]
-    return ", ".join(["=".join([n,"%s"%v] if type(v) is int else [n,"%f"%v]) for n,v in zip(fieldnames,rusage)])
+    return ", ".join(["=".join([n,"%s"%v] if isinstance(v, int) else [n,"%f"%v]) for n,v in zip(fieldnames,rusage)])
 
 def DQProcMon(*args,**kwargs):
     #    from resource import getrusage, RUSAGE_SELF, RUSAGE_CHILDREN
diff --git a/DataQuality/DataQualityUtils/python/DQWebDisplayMod.py b/DataQuality/DataQualityUtils/python/DQWebDisplayMod.py
index 51b1fd0d66e4dab0654796d9b46e18726c468795..33d77d0155b6b9a0e9bb0b054d5b42c85f521c82 100755
--- a/DataQuality/DataQualityUtils/python/DQWebDisplayMod.py
+++ b/DataQuality/DataQualityUtils/python/DQWebDisplayMod.py
@@ -58,7 +58,7 @@ def DQWebDisplay( inputFilePath, runAccumulating, c ):
     # we use the multiprocessing module to isolate code that possibly
     # has memory leaks, so they don't build up over time
 
-    if type(c.server) == str:
+    if isinstance(c.server, str):
         if c.server == '':
             c.server = []
         else:
@@ -601,13 +601,14 @@ def findCacheFile( inputFilePath, run, stream, cache ):
 
 
 def transferDirectoryToHandoffDir( dirName, localDir, targetDir, config ):
-    import time, ConfigParser, shutil, glob
+    import time, shutil, glob
+    import six.moves.configparser as configparser
     targetfname = repr(int(time.time())) + '-' + repr(os.getpid()) \
                   + '-' + os.uname()[1]  + '.tgz'
     targetfile = os.path.join(targetDir, targetfname)
 
     print('Creating tarball', targetfname, '...')
-    parser = ConfigParser.ConfigParser()
+    parser = configparser.ConfigParser()
     parser.set('DEFAULT', 'target', config.htmlDir)
     parser.add_section('execute')
     parser.set('execute', 'command', config.htmlDir + '/generateDQIndexFiles.py')
@@ -762,7 +763,7 @@ def importConfiguration(modname):
 def email(msg, subject, whofrom, addressees):
     import smtplib
     from email.mime.text import MIMEText
-    if type(addressees) == str:
+    if isinstance(addressees, str):
         addressees = [addressees]
     email = MIMEText(msg)
     email['Subject'] = subject
diff --git a/DataQuality/DataQualityUtils/python/TestCases.py b/DataQuality/DataQualityUtils/python/TestCases.py
index dd7d251bcd67e6e2842273075b19bb1ab2d01286..0bc117b418601923dcd53f9446ab11a123a70417 100644
--- a/DataQuality/DataQualityUtils/python/TestCases.py
+++ b/DataQuality/DataQualityUtils/python/TestCases.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 # None of this works, but keep it around in case someone wants to resurrect it later...
 # - PO 20180419
@@ -23,7 +23,7 @@ class DQUTestCase(unittest.TestCase):
         inlist = os.path.join(TESTING_DIR, 'test_merging')
         self.outfile = os.path.join(outdir, 'data09_calophys.00128005.physics_CosmicMuons.root')
         rv = os.system('cd %s ; DQHistogramMerge.py %s %s True' % (outdir, inlist, self.outfile))
-        self.failUnless(rv==0, "DQHistogramMerge.py return code is nonzero")
+        self.assertTrue(rv==0, "DQHistogramMerge.py return code is nonzero")
         
     def test02_WebDisplay(self):
         '''Test that a terminal web display job works'''
@@ -34,7 +34,7 @@ class DQUTestCase(unittest.TestCase):
         outdir = os.environ.get('TMPDIR', '.')
         infile = os.path.join(TESTING_DIR, 'data09_calophys.00128005.physics_CosmicMuons.root')
         rv = os.system('cd %s ; DQWebDisplay.py %s TestDisplay 123' % (outdir, infile))
-        self.failUnless(rv==0, "DQWebDisplay.py return code is nonzero")
+        self.assertTrue(rv==0, "DQWebDisplay.py return code is nonzero")
         
     def test_03_WebDisplay(self):
         '''Test that a terminal web display job works in temporary accumulation mode'''
@@ -55,7 +55,7 @@ class DQUTestCase(unittest.TestCase):
         shutil.copy(infile, os.path.join(cachedir, cachefilename))
 
         rv = os.system('cd %s ; DQWebDisplay.py %s TestDisplay 123 True' % (outdir, infile))
-        self.failUnless(rv==0, "DQWebDisplay.py return code is nonzero")
+        self.assertTrue(rv==0, "DQWebDisplay.py return code is nonzero")
 
     def tearDown(self):
         try:
diff --git a/DataQuality/DataQualityUtils/python/detmaskmod.py b/DataQuality/DataQualityUtils/python/detmaskmod.py
index 3dacb9a9c20700c0af1194a2f1e50d65985381b2..e3a6095728f25d84b0d4044743d0ad01363a7202 100644
--- a/DataQuality/DataQualityUtils/python/detmaskmod.py
+++ b/DataQuality/DataQualityUtils/python/detmaskmod.py
@@ -88,12 +88,12 @@ def decode(mask):
     dm = eformat.helper.DetectorMask(mask)
     rv = []
     for keys, value in detmaskmap.items():
-        if type(keys) == str:
+        if isinstance(keys, str):
             keys = [keys]
         if reduce(operator.or_,
                   [dm.is_set(getSubDetectorObj(key)) for key in keys]):
             flags = value
-            if type(flags) == str:
+            if isinstance(flags, str):
                 flags = [flags]
             rv += list(flags)
 
@@ -104,12 +104,12 @@ def decodeBlack(mask, defects=False):
     rv = []
     dmap = detmaskmap if not defects else detmaskmap_defects
     for keys, value in sorted(dmap.items()):
-        if type(keys) == str:
+        if isinstance(keys, str):
             keys = [keys]
         if reduce(operator.and_,
                   [not dm.is_set(getSubDetectorObj(key)) for key in keys]):
             flags = value
-            if type(flags) == str:
+            if isinstance(flags, str):
                 flags = [flags]
             rv += list(flags)
 
diff --git a/DataQuality/DataQualityUtils/python/doZLumi.py b/DataQuality/DataQualityUtils/python/doZLumi.py
index 605c4a763fe700e4df20dc5abdbbf59f207ee576..617008858c91bba12e7c363bfdd305eb3b748eba 100644
--- a/DataQuality/DataQualityUtils/python/doZLumi.py
+++ b/DataQuality/DataQualityUtils/python/doZLumi.py
@@ -45,7 +45,7 @@ def makeGRL(run, defect, fname):
     print('done')
     print('Query defects...', end='')
     ddb = DQDefects.DefectsDB('COOLOFL_GLOBAL/%s' % dbinstance, tag=tag)
-    ignores = set([_ for _ in ddb.defect_names if 'UNCHECKED' in _])
+    ignores = {_ for _ in ddb.defect_names if 'UNCHECKED' in _}
     try:
         defectiovs = ddb.retrieve(since = min(runs) << 32 | 1,
                                   until = max(runs) << 32 | 0xffffffff,
diff --git a/DataQuality/DataQualityUtils/python/dqu_subprocess.py b/DataQuality/DataQualityUtils/python/dqu_subprocess.py
index 08b0d793483494322945c369657da92a73ee0ac1..6b3e2eecabffc0de7a190f58b61b1cce243d56f1 100644
--- a/DataQuality/DataQualityUtils/python/dqu_subprocess.py
+++ b/DataQuality/DataQualityUtils/python/dqu_subprocess.py
@@ -10,7 +10,7 @@ def _local_apply_core(func, args, q):
         os._exit(1)
 
 def apply(func, args):
-    from Queue import Empty
+    from six.moves.queue import Empty
     from multiprocessing import Process
     from multiprocessing.managers import SyncManager
     import tempfile
@@ -34,7 +34,7 @@ def apply(func, args):
     try:
         rv = q.get(False)
     except Empty:
-        raise RuntimeError('daughter died while trying to execute %s%s' % (func.func_name, args))
+        raise RuntimeError('daughter died while trying to execute %s%s' % (func.__name__, args))
     if isinstance(rv, BaseException):
         if isinstance(rv, SystemExit):
             print('SystemExit raised by daughter; ignoring')
diff --git a/DataQuality/DataQualityUtils/python/hancool_histo_mod.py b/DataQuality/DataQualityUtils/python/hancool_histo_mod.py
index 572064dc18f945efbad6c0ed5c0df535fca48d33..5d3aa1db40163c0257c96020a95b69796859864a 100644
--- a/DataQuality/DataQualityUtils/python/hancool_histo_mod.py
+++ b/DataQuality/DataQualityUtils/python/hancool_histo_mod.py
@@ -82,7 +82,7 @@ def stringGetInfo(file, rootFolder):
 def ListHistoAssessments(xml, channel):
     the_result_histo = "Undefined"
     g = open(xml, 'r')
-    while 1:
+    while True:
         line = g.readline()
         if not line:
             break
@@ -160,7 +160,7 @@ def hancool_histo(inputFilePath="", input_run=-1, dbConnectionHisto="", dqmfOflH
     filename = "run_"+str(run)+"_han.root"
 
     # filling OFLH DB for histos (global run)
-    for pair in folderMapHisto.iteritems():
+    for pair in folderMapHisto.items():
         i = 0
         number = 0
         print(pair[0], pair[1])
diff --git a/DataQuality/DataQualityUtils/python/hancoolmod.py b/DataQuality/DataQualityUtils/python/hancoolmod.py
index 02073e0d605164a1fd02817173ad6e88f0eac728..d53a947a4e559399558206b8295bf0bb6cbd1cbc 100644
--- a/DataQuality/DataQualityUtils/python/hancoolmod.py
+++ b/DataQuality/DataQualityUtils/python/hancoolmod.py
@@ -136,7 +136,7 @@ intervalType = {
 def getLimits(name):
     try:
         import re
-        import detmaskmod
+        from . import detmaskmod
         runNumber = re.match(r'run_(\d+)_.*han.root', name).group(1)
         max_hi_limit = detmaskmod.getNumLumiBlocks(int(runNumber))+1
         if (name.find('minutes10_') > -1):
@@ -287,7 +287,7 @@ def hancool(runNumber=3070,
 
 
 def detmask_defects(runNumber, ddb):
-    import detmaskmod
+    from . import detmaskmod
     blacks = detmaskmod.decodeBlack(detmaskmod.getRunMask(runNumber),
                                     defects=True)
     nlbs = detmaskmod.getNumLumiBlocks(runNumber)
@@ -340,7 +340,7 @@ def ctp_defects(d, i, runNumber):
             rv.append(defect_iov(mapping[defect], message, False, lb, lb+1))
         if overflow_bad_lbs[defect]:
             message += '; defect occurred past end of monitoring histogram, marking end of run as bad'
-            import detmaskmod  # ugly: could happen for more than one defect - should be cheap though
+            from . import detmaskmod  # ugly: could happen for more than one defect - should be cheap though
             nlbs = detmaskmod.getNumLumiBlocks(runNumber)
             rv.append(defect_iov(defect, message,
                                  False, when.GetNbinsX(), nlbs+1))
@@ -413,7 +413,7 @@ def sct_perlb_defects(d, i, runNumber):
             rv.append(defect_iov(dname, message, False, lb, lb+1))
         if overflow_bad_lbs[dname]:
             message += '; defect occurred past end of monitoring histogram, marking end of run as bad'
-            import detmaskmod  # ugly: could happen for more than one defect - should be cheap though
+            from . import detmaskmod  # ugly: could happen for more than one defect - should be cheap though
             nlbs = detmaskmod.getNumLumiBlocks(runNumber)
             rv.append(defect_iov(dname, message,
                                  False, when.GetNbinsX(), nlbs+1))
@@ -452,7 +452,7 @@ def sct_eff_defect(d, i, runNumber):
     h2 = d.Get('InnerDetector/SCT/Summary/SctTotalEff_/Results/Status')
     if not h1 or not h2:
         return None
-    badstatuses = set(['Yellow', 'Red'])
+    badstatuses = {'Yellow', 'Red'}
     statuscheck = []
     for h in h1, h2:
         status = set(x.GetName() for x in h.GetListOfKeys())
@@ -486,7 +486,7 @@ def dqmf_node_defect(node, defect, badstatuses=['Red']):
 
 
 def hancool_defects(runNumber, filePath="./", dbConnection="", db_tag='HEAD', isESn=True):
-    import pix_defect
+    from . import pix_defect
     analyzers = []
     if isESn:
         # CTP
diff --git a/DataQuality/DataQualityUtils/python/handimod.py b/DataQuality/DataQualityUtils/python/handimod.py
index 355354073132ca7ca68d0d6014f78419ea9ff205..94189b976b7197a4f533320d51563787e5ad4eab 100644
--- a/DataQuality/DataQualityUtils/python/handimod.py
+++ b/DataQuality/DataQualityUtils/python/handimod.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 from __future__ import print_function
 
 import os
@@ -606,7 +606,7 @@ def makeOneHistFile(htmlDir, name, subname, sp, runlistLoc, compare, jsRoot):
                 else:
                     name = ' '.join([namecache[-1]])
                     namecache = []
-                import urllib
+                from six.moves import urllib
                 resultname = name.rsplit(':', 1)[0]
                 resultval = sp[cc-1]
                 if algorithm == 'RepeatAlgorithm' and resultname.endswith('|Status'):
@@ -615,15 +615,15 @@ def makeOneHistFile(htmlDir, name, subname, sp, runlistLoc, compare, jsRoot):
                 if compare and run is not None:
                     if period_type == 'run':
                         queryurl = 'http://atlasdqm.cern.ch:8080/dqmfquery/query?histogram=%s&result=%s&error=&stream=%s&period_type=%s&source=tier0&proc_ver=%s&low_run=%s&high_run=&low_y=&high_y=&outputtype=png' % (
-                            urllib.quote_plus(subname+'/'+sp[0]), urllib.quote_plus(resultname), stream.strip(), period_type, proc_ver, int(run)-1000)
+                            urllib.parse.quote_plus(subname+'/'+sp[0]), urllib.parse.quote_plus(resultname), stream.strip(), period_type, proc_ver, int(run)-1000)
                         k.write(
                             '<tr><td align="right"><b><a href="%s">%s</a>:</b></td>' % (queryurl, resultname))
                         k.write('<td>'+resultval+'</td></tr>\n')
                     else:
                         queryurl1 = 'http://atlasdqm.cern.ch:8080/dqmfquery/query?histogram=%s&result=%s&error=&stream=%s&period_type=%s&source=tier0&proc_ver=%s&low_run=%s&high_run=&low_y=&high_y=&outputtype=png' % (
-                            urllib.quote_plus(subname+'/'+sp[0]), urllib.quote_plus(resultname), stream.strip(), period_type, proc_ver, int(run)-1000)
+                            urllib.parse.quote_plus(subname+'/'+sp[0]), urllib.parse.quote_plus(resultname), stream.strip(), period_type, proc_ver, int(run)-1000)
                         queryurl2 = 'http://atlasdqm.cern.ch:8080/dqmfquery/query?histogram=%s&result=%s&error=&stream=%s&period_type=%s&source=tier0&proc_ver=%s&low_run=%s&high_run=%s&low_y=&high_y=&outputtype=png' % (
-                            urllib.quote_plus(subname+'/'+sp[0]), urllib.quote_plus(resultname), stream.strip(), period_type, proc_ver, run, run)
+                            urllib.parse.quote_plus(subname+'/'+sp[0]), urllib.parse.quote_plus(resultname), stream.strip(), period_type, proc_ver, run, run)
                         k.write(
                             '<tr><td align="right"><b><a href="%s">%s</a>:</b></td>' % (queryurl1, resultname))
                         k.write(
diff --git a/DataQuality/DataQualityUtils/python/hanwriter.py b/DataQuality/DataQualityUtils/python/hanwriter.py
index 3995982cf990dccc17dd01c43d072881e633bdc0..66d2677c1efdb371fee99d45296fee0b40757251 100755
--- a/DataQuality/DataQualityUtils/python/hanwriter.py
+++ b/DataQuality/DataQualityUtils/python/hanwriter.py
@@ -114,7 +114,7 @@ class Node(DQHanConfMaker.Node):
         if self.nodeType != Node.DOCUMENT:
             writer.write(" { %s" % (newl))
         if self.attributes:
-            for key, attribute in self.attributes.iteritems():
+            for key, attribute in self.attributes.items():
                 writer.write("%s %s = %s%s" % (indent, key, attribute, newl))
         if self.subnodes:
             for node in self.subnodes:
@@ -707,7 +707,7 @@ def _findAllDQBaseObjects(rootlist):
         if not isinstance(dqbase, DQBase):
             raise ValueError(
                 '%s is not a valid DQBase object; this should never happen' % dqbase)
-        retset = set([dqbase])
+        retset = {dqbase}
         for rel in dqbase.getAllRelatedObjects():
             retset |= recurse(rel)
         return retset
diff --git a/DataQuality/DataQualityUtils/python/messaging_listen.py b/DataQuality/DataQualityUtils/python/messaging_listen.py
index 5137d0fd2938ff09b69b3d35ec047aca2741184b..6b1080ef9052fca73a53058b4814f28e09dc6369 100644
--- a/DataQuality/DataQualityUtils/python/messaging_listen.py
+++ b/DataQuality/DataQualityUtils/python/messaging_listen.py
@@ -28,7 +28,7 @@ class ATLASDQMListener(object):
                                                      socket.AF_INET, 
                                                      socket.SOCK_STREAM)]
         
-        import stompconfig
+        from . import stompconfig
         self.conns = []
         if hasattr(self.listener, 'conn'):
             self.listener.conn=[]
@@ -66,7 +66,7 @@ class ATLASDQMListener(object):
                                                      socket.AF_INET, 
                                                      socket.SOCK_STREAM)]
         
-        import stompconfig
+        from . import stompconfig
         self.conns = []
         if hasattr(self.listener, 'conn'):
             self.listener.conn=[]
diff --git a/DataQuality/DataQualityUtils/python/panic.py b/DataQuality/DataQualityUtils/python/panic.py
index b92e312d49bd4b143e497646be932fb18af9a725..b0efb85a371b79651c3c7844a25989382c5aa03f 100644
--- a/DataQuality/DataQualityUtils/python/panic.py
+++ b/DataQuality/DataQualityUtils/python/panic.py
@@ -1,11 +1,11 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 DEST='/queue/atlas.dqm.panic'
 
 def panic(msg):
     import stomp
     import json
-    import stompconfig
+    from . import stompconfig
     import sys, os
     import traceback
     import time
diff --git a/DataQuality/DataQualityUtils/scripts/BrowseDB.py b/DataQuality/DataQualityUtils/scripts/BrowseDB.py
deleted file mode 100755
index 3ef767525e7d6980e6236bbdd1ef011b6a73f899..0000000000000000000000000000000000000000
--- a/DataQuality/DataQualityUtils/scripts/BrowseDB.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-# +-----------------------------------------------------+
-# | Vassilis D. Kazazakis - BrowseDB.py v1.0            |
-# |  Browses the COOL database and prints all elements  |
-# |  For testing purposes only                          |
-# +-----------------------------------------------------+
-
-import sys
-
-from PyCool import cool, coral
-
-import DataQualityUtils.DBInfo as DBInfo
-
-# user & password information should be handled by each individual program
-# if you have authentication file, this works
-
-home = os.environ.get('HOME')
-os.environ['CORAL_AUTH_PATH'] = home+"/private"
-
-#userName = "ATLAS_COOL_READER"
-#password = "*******" no need for a pwd here 
-
-connectString  = "oracle://" +  DBInfo.getWriteServer()
-connectString += ";schema=" +   DBInfo.getSchema()
-#connectString += ";user=" +     userName
-connectString += ";dbname=" +   DBInfo.getDbName()
-#connectString += ";password=" + password
-
-folderName = DBInfo.getFolder()
-
-dbSvc = cool.DatabaseSvcFactory.databaseService()
-
-try:
-        db = dbSvc.openDatabase( connectString )
-except:
-        print >>sys.stderr, "Database does not exist"
-        sys.exit(-1)
-
-folder = db.getFolder( folderName)
-metadata = DBInfo.getChannelDict()
-for key in metadata:
-	channel = metadata[key]
-	objIter = folder.browseObjects(0, cool.ValidityKeyMax, cool.ChannelSelection(channel))
-
-	print "Folder:", folderName
-	print "Channel:", channel, "Stream:", key
-
-	while(objIter.hasNext()):
-		objPayload = objIter.next().payload()
-		print " ", objPayload["filename"], "GUID:", objPayload["guid"]
-
-	print
diff --git a/DataQuality/DataQualityUtils/scripts/CreateDB.py b/DataQuality/DataQualityUtils/scripts/CreateDB.py
deleted file mode 100755
index 55a368f1bfe605819768c7f3c1b675fcac43b11d..0000000000000000000000000000000000000000
--- a/DataQuality/DataQualityUtils/scripts/CreateDB.py
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-# +-----------------------------------------------------+
-# | Vassilis D. Kazazakis, CreateDB.py v1.0		|
-# | Creates the appropriate folder structure in COOL db	|
-# +-----------------------------------------------------+
-
-import sys, os
-import DataQualityUtils.DBInfo_Histo as DBInfo
-
-from PyCool import cool, coral
-
-# user & password information should be handled by each individual program
-# if you have authentication file, this works
-
-home = os.environ.get('HOME')
-os.environ['CORAL_AUTH_PATH'] = home+"/private"
-
-#userName = "ATLAS_COOLOFL_GLOBAL"
-#password = "*********" # no need for a pwd 
-
-dropExistingDB = False
-
-connectString  = "sqlite://" +	DBInfo.getWriteServer()
-connectString += ";schema=" + 	DBInfo.getSchema()
-#connectString += ";user=" + 	userName
-connectString += ";dbname=" + 	DBInfo.getDbName()
-#connectString += ";password=" + password
-
-if len(sys.argv) > 1 and sys.argv[1] == "createdb":
-  dbSvc = cool.DatabaseSvcFactory.databaseService()
-  db = dbSvc.createDatabase( connectString )
-
-if len(sys.argv) > 1 and sys.argv[1] == "drop":
-	dropExistingDB = True
-
-folderNames = DBInfo.getFolders()
-
-dbSvc = cool.DatabaseSvcFactory.databaseService()
-print connectString
-db = dbSvc.openDatabase( connectString, False) 
-
-for folderName in folderNames:
-  folderList = folderName.split("/")[1:]
-  folder = ""
-  for name in folderList[:-1]:
-          folder = folder + "/" + name
-          if db.existsFolderSet(folder):
-                  print "Folderset", folder, "already exists."
-                  continue
-          print "Creating folderset", folder,
-          db.createFolderSet(folder)
-          print "... Done"
-
-  if dropExistingDB:
-          if db.existsFolder(folderName):
-                  print "Droping", folderName,
-                  folder = db.dropNode(folderName)
-                  print "... Done"
-  if db.existsFolder(folderName):
-          folder = db.getFolder(folderName)
-  else:
-          spec = cool.RecordSpecification()
-          #spec.extend("filename", cool.StorageType.String255)
-          #spec.extend("guid", cool.StorageType.Int32)
-          spec.extend("Code", cool.StorageType.Int32)
-          spec.extend("deadFrac", cool.StorageType.Float)
-          spec.extend("Thrust",cool.StorageType.Float)
-          # heaven forgive me ...
-          if 'SHIFT' in folderName:
-            spec.extend("Comment",cool.StorageType.String255)
-          print "Creating folder", folderName, 
-          # Deprecated/dropped:  folder = db.createFolder(folderName, spec, "", cool.FolderVersioning.MULTI_VERSION, False)
-          folderSpec=cool.FolderSpecification(cool.FolderVersioning.MULTI_VERSION, spec)
-          folder = db.createFolder(folderName, folderSpec, "", False)
-          print "... Done"
-
-  print "Creating channels."
-  channelDict = DBInfo.getChannelDict()
-  for channelName in channelDict.keys():
-          try:
-                  folder.createChannel(channelDict[channelName], channelName)
-                  print "Created channel:", channelName
-          except:
-                  print "Channel", channelName, "already exists."
diff --git a/DataQuality/DataQualityUtils/scripts/CreateDB_Histo.py b/DataQuality/DataQualityUtils/scripts/CreateDB_Histo.py
deleted file mode 100755
index 4ad0dfb2ee64b4d115d3692d8b194a45cf6a29e9..0000000000000000000000000000000000000000
--- a/DataQuality/DataQualityUtils/scripts/CreateDB_Histo.py
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-# +-----------------------------------------------------+
-# | Vassilis D. Kazazakis, CreateDB.py v1.0		|
-# | Creates the appropriate folder structure in COOL db	|
-# | Update by M.D'Onofrio to include new folder         |
-# | for history plots             	                |
-# +-----------------------------------------------------+
-
-import sys, os
-from DataQualityUtils.DBInfo_Histo import *
-
-from PyCool import cool, coral
-
-# user & password information should be handled by each individual program
-# if you have authentication file, this works
-
-home = os.environ.get('HOME')
-os.environ['CORAL_AUTH_PATH'] = home+"/private"
-
-#userName = "ATLAS_COOLOFL_GLOBAL"
-#password = "*******" #no need for a pwd
-
-dropExistingDB = False
-
-connectString  = "sqlite://" +	getWriteServer()
-connectString += ";schema=" + 	getSchema()
-#connectString += ";user=" + 	userName
-connectString += ";dbname=" + 	getDbName()
-#connectString += ";password=" + password
-
-if len(sys.argv) > 1 and sys.argv[1] == "createdb":
-  dbSvc = cool.DatabaseSvcFactory.databaseService()
-  db = dbSvc.createDatabase( connectString )
-
-if len(sys.argv) > 1 and sys.argv[1] == "drop":
-	dropExistingDB = True
-
-folderName = getFolder()
-folderNameH = getFolderH()
-
-dbSvc = cool.DatabaseSvcFactory.databaseService()
-print connectString
-db = dbSvc.openDatabase( connectString, False) 
-
-# DQMF folder
-
-folderList = folderName.split("/")[1:]
-folder = ""
-for name in folderList[:-1]:
-	folder = folder + "/" + name
-	if db.existsFolderSet(folder):
-		print "Folderset", folder, "already exists."
-		continue
-	print "Creating folderset", folder,
-	db.createFolderSet(folder)
-	print "... Done"
-
-if dropExistingDB:
-	if db.existsFolder(folderName):
-		print "Droping", folderName,
-		folder = db.dropNode(folderName)
-		print "... Done"
-if db.existsFolder(folderName):
-	folder = db.getFolder(folderName)
-else:
-	spec = cool.RecordSpecification()
-	#spec.extend("filename", cool.StorageType.String255)
-	#spec.extend("guid", cool.StorageType.Int32)
-	spec.extend("Code", cool.StorageType.Int32)
-	spec.extend("deadFrac", cool.StorageType.Float)
-	spec.extend("Thrust",cool.StorageType.Float)
-	print "Creating folder", folderName, 
-	# Deprecated/dropped:  folder = db.createFolder(folderName, spec, "", cool.FolderVersioning.MULTI_VERSION, False)
-	folderSpec=cool.FolderSpecification(cool.FolderVersioning.MULTI_VERSION, spec)
-	folder = db.createFolder(folderName, folderSpec, "", False)
-	print "... Done"
-
-print "Creating channels."
-channelDict = getChannelDict()
-for channelName in channelDict.keys():
-	try:
-		folder.createChannel(channelDict[channelName], channelName)
-		print "Created channel:", channelName
-	except:
-		print "Channel", channelName, "already exists."
-
-db.closeDatabase()
-
-# create defects
-from DQDefects import DefectsDB
-ddb = DefectsDB(connectString, create=True, read_only=False)
-chandb = DefectsDB('COOLOFL_GLOBAL/CONDBR2')
-for chan in chandb.defect_names:
-    ddb.create_defect(chan, chandb.get_channel_descriptions([chan]))
diff --git a/DataQuality/DataQualityUtils/scripts/DQFileMove.py b/DataQuality/DataQualityUtils/scripts/DQFileMove.py
index 7de8a88c4d2be5a7501f30fe9c0007dd822090b9..ce27ecd0b2c27d22cf27a3113863c57b805601c6 100755
--- a/DataQuality/DataQualityUtils/scripts/DQFileMove.py
+++ b/DataQuality/DataQualityUtils/scripts/DQFileMove.py
@@ -1,23 +1,23 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
+
+from __future__ import print_function
 
-## *****************************************************************************
-VERSION = '$Id $'
 ## *****************************************************************************
 def importConfiguration(modname):
     from DataQualityConfigurations import getmodule
-    print 'getting configuration', modname
+    print('getting configuration', modname)
     return getmodule(modname)
 
 def usage():
   cmdi = sys.argv[0].rfind("/")
   cmd = sys.argv[0][cmdi+1:]
-  print ""
-  print "Usage: ", cmd, "<config> <prefix>"
-  print ""
-  print "This is a production utility; use TEST config for development and testing."
-  print ""
+  print("")
+  print("Usage: ", cmd, "<config> <prefix>")
+  print("")
+  print("This is a production utility; use TEST config for development and testing.")
+  print("")
 
 if __name__ == '__main__':
     import sys
@@ -31,14 +31,14 @@ if __name__ == '__main__':
     
     try:
         cmod = importConfiguration(configModule)
-    except Exception, e:
-        print "Could not import configuration module \'" + configModule + "\'"
+    except Exception:
+        print("Could not import configuration module \'" + configModule + "\'")
         sys.exit(1)
 
     try:
         config = cmod.dqconfig
-    except Exception, e:
-        print "Configuration object 'dqconfig' not defined in module \'" + configModule + "\'"
+    except Exception:
+        print("Configuration object 'dqconfig' not defined in module \'" + configModule + "\'")
         sys.exit(1)
 
     filemovemod.move_files(sys.argv[2], config)
diff --git a/DataQuality/DataQualityUtils/scripts/DQHistogramMerge.py b/DataQuality/DataQualityUtils/scripts/DQHistogramMerge.py
index 895717d9dd12046fd621e9173ffa3f26f2bb1032..ea34c7f01eb8e9ab9bd6392f3ac3ddd96c8e0760 100755
--- a/DataQuality/DataQualityUtils/scripts/DQHistogramMerge.py
+++ b/DataQuality/DataQualityUtils/scripts/DQHistogramMerge.py
@@ -4,10 +4,6 @@
 
 from __future__ import print_function
 
-## *****************************************************************************
-VERSION = '$Id: DQHistogramMerge.py 509709 2012-07-10 16:03:00Z vogel $'
-## *****************************************************************************
-
 import DataQualityUtils.DQHistogramMergeMod as mod
 import sys, os
 
diff --git a/DataQuality/DataQualityUtils/scripts/DQHistogramMergeRegExp.py b/DataQuality/DataQualityUtils/scripts/DQHistogramMergeRegExp.py
index 2386a188d48813556a6ee27aab17a2d1a0212961..46a3ac9e69d61c2ecaaca462ad1bafd50c1b912b 100755
--- a/DataQuality/DataQualityUtils/scripts/DQHistogramMergeRegExp.py
+++ b/DataQuality/DataQualityUtils/scripts/DQHistogramMergeRegExp.py
@@ -1,11 +1,8 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-## *****************************************************************************
-VERSION = '$Id: DQHistogramMergeRegExp.py 509709 2012-07-10 16:03:00Z vogel $'
-## *****************************************************************************
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
+from __future__ import print_function
 import DataQualityUtils.DQHistogramMergeMod as mod
 import sys, os
 
@@ -14,8 +11,8 @@ os.environ['TDAQ_ERS_NO_SIGNAL_HANDLERS'] = '1'
 
 def usage():
   cmd = sys.argv[0].split("/")[-1]
-  print "Usage: ", cmd, "<input_list_file_name> <merged_file_name> [directory_regexp] [histogram_regexp] [run_post_processing [is_incremental_merge]]"
-  print "If you don't give any regular expressions, this script will act like DQHistogramMerge.py <infilelist> <outfile> False"
+  print("Usage: ", cmd, "<input_list_file_name> <merged_file_name> [directory_regexp] [histogram_regexp] [run_post_processing [is_incremental_merge]]")
+  print("If you don't give any regular expressions, this script will act like DQHistogramMerge.py <infilelist> <outfile> False")
 
 ########################################
 
diff --git a/DataQuality/DataQualityUtils/scripts/DQHistogramPrintStatistics.py b/DataQuality/DataQualityUtils/scripts/DQHistogramPrintStatistics.py
index 30c251ffdf79c115f83735ba83da9d34e0135102..ce7510378d111d53a7794f52d9e8346851e30085 100755
--- a/DataQuality/DataQualityUtils/scripts/DQHistogramPrintStatistics.py
+++ b/DataQuality/DataQualityUtils/scripts/DQHistogramPrintStatistics.py
@@ -1,18 +1,15 @@
 #!/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-## *****************************************************************************
-VERSION = '$Id: DQHistogramPrintStatistics.py 354890 2011-03-28 16:30:59Z kama $'
-## *****************************************************************************
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
+from __future__ import print_function
 import os
 ## Needed to correct ROOT behavior; see below
-CWD = os.getcwd()
+CWD = os.getcwd()  # noqa: E402
 
 import sys
 
-import ROOT
+import ROOT  # noqa: F401
 ## Importing gSystem may change the current directory to one of the
 ## command-line arguments; chdir to original directory to have
 ## predictable behavior
@@ -33,7 +30,7 @@ def DQHistogramPrintStatistics( inputFileName ):
 def usage():
   cmdi = sys.argv[0].rfind("/")
   cmd = sys.argv[0][cmdi+1:]
-  print "Usage: ", cmd, "<input_file_name>"
+  print("Usage: ", cmd, "<input_file_name>")
 
 
 
@@ -48,4 +45,3 @@ if __name__ == "__main__":
     sys.exit(0)
   
   DQHistogramPrintStatistics( sys.argv[1] )
-  
diff --git a/DataQuality/DataQualityUtils/scripts/DQM_Tier0Wrapper_tf.py b/DataQuality/DataQualityUtils/scripts/DQM_Tier0Wrapper_tf.py
index fe089d5f5bf16558005441de9acf25a307726f0c..19d85c4b01fc33ca79aea1cfb488dc18ae55e899 100755
--- a/DataQuality/DataQualityUtils/scripts/DQM_Tier0Wrapper_tf.py
+++ b/DataQuality/DataQualityUtils/scripts/DQM_Tier0Wrapper_tf.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 #########################################################################
 ##
@@ -55,7 +55,14 @@
 ##    J. Guenther (February 2017)
 #########################################################################
 
-import sys, string, commands, os.path, os, json, time, pprint, xmlrpclib, traceback
+from __future__ import print_function
+import sys, os.path, os, json, time, pprint, traceback
+from six.moves import xmlrpc_client as xmlrpclib
+import six
+if six.PY2:
+  from commands import getstatusoutput
+else:
+  from subprocess import getstatusoutput
 #sami
 import hashlib
 
@@ -75,7 +82,7 @@ def getSubFileMap(fname, nevts=0) :
     return map
 
 def publish_success_to_mq(run, ptag, stream, incr, ami, procpass, hcfg, isprod):
-  import stomp, json, os, ssl
+  import stomp, json, ssl
   from DataQualityUtils import stompconfig
   dest='/topic/atlas.dqm.progress'
   conn=stomp.Connection([('atlas-mb.cern.ch', 61013)], **stompconfig.config())
@@ -115,25 +122,25 @@ def genmd5sum(filename):
         md5summer.update(fs)
     finally:
         infil.close()
-  print "md5 sum of the \"%s\" is %s"%(filename,md5summer.hexdigest())
+  print("md5 sum of the \"%s\" is %s"%(filename,md5summer.hexdigest()))
   return
       
 def dq_combined_trf(jsonfile, outmap):
 
-  print "\n##################################################################"
-  print   "## STEP 1: creating file with list of root files ..."
-  print   "##################################################################\n"
+  print("\n##################################################################")
+  print("## STEP 1: creating file with list of root files ...")
+  print("##################################################################\n")
   
   nfiles=0
 
   try:
     # extract parameters from json file
-    print "Using json file ", jsonfile, " for input parameters"
+    print("Using json file ", jsonfile, " for input parameters")
     f = open(jsonfile, 'r')
     parmap = json.load(f)
     f.close()
 
-    print "\nFull Tier-0 run options:\n"
+    print("\nFull Tier-0 run options:\n")
     pprint.pprint(parmap)
 
     inputfilelist = parmap.get('inputHistFiles', [])
@@ -157,7 +164,7 @@ def dq_combined_trf(jsonfile, outmap):
   nevts = 0
 
   try:
-    if isinstance(inputfilelist[0], unicode) :  
+    if isinstance(inputfilelist[0], six.text_type) :  
       histtmpdsname = (inputfilelist[0]).split('#')[0]
       for val in inputfilelist :
         histtmpflist.append(val.split('#')[1])
@@ -169,7 +176,7 @@ def dq_combined_trf(jsonfile, outmap):
         nevt = fdict.get('events', 0)
         if nevt is None:
           nevt=0
-          print "WARNING Can't get number of events from input json file"
+          print("WARNING Can't get number of events from input json file")
         nevts+=nevt
   
     f = open('hist_merge.list', 'w')
@@ -180,9 +187,9 @@ def dq_combined_trf(jsonfile, outmap):
     f.close()
 
     cmd = "cat hist_merge.list"
-    (s,o) = commands.getstatusoutput(cmd)
-    print "\nContents of file hist_merge.list:\n"
-    print o
+    (s,o) = getstatusoutput(cmd)
+    print("\nContents of file hist_merge.list:\n")
+    print(o)
   except:
     outmap['exitCode'] = 103
     outmap['exitAcronym'] = 'TRF_INPUTINFO'
@@ -191,9 +198,9 @@ def dq_combined_trf(jsonfile, outmap):
     return
 
   try:
-    print "\n##################################################################"
-    print   "## STEP 2: determining job parameters..."
-    print   "##################################################################\n"
+    print("\n##################################################################")
+    print("## STEP 2: determining job parameters...")
+    print("##################################################################\n")
 
     # output file
     histdsname = (parmap['outputHistFile']).split('#')[0]
@@ -249,7 +256,7 @@ def dq_combined_trf(jsonfile, outmap):
     dqproject = parmap.get('projectTag', dqproject)
     
     # run number
-    if parmap.has_key('runNumber') : 
+    if 'runNumber' in parmap : 
       runnr = parmap['runNumber']
     else :
       try :
@@ -258,7 +265,7 @@ def dq_combined_trf(jsonfile, outmap):
         runnr = 1234567890
 
     # stream name  
-    if parmap.has_key('streamName') : 
+    if 'streamName' in parmap : 
       stream = parmap['streamName']
     else :
       try :
@@ -268,7 +275,7 @@ def dq_combined_trf(jsonfile, outmap):
     
     # processing pass number  
     MAX_XMLRPC_TRIES = 5 
-    if parmap.has_key('procNumber') : 
+    if 'procNumber' in parmap : 
       procnumber = parmap['procNumber']
     else :
       n_xmlrpc_tries = 1
@@ -279,19 +286,19 @@ def dq_combined_trf(jsonfile, outmap):
           procnumber = xmlrpcserver.get_next_proc_pass(runnr, stream, 'tier0')
           break 
         except :
-          print 'Web service connection failed, attempt', n_xmlrpc_tries, 'of', MAX_XMLRPC_TRIES
+          print('Web service connection failed, attempt', n_xmlrpc_tries, 'of', MAX_XMLRPC_TRIES)
           n_xmlrpc_tries += 1
           if n_xmlrpc_tries <= MAX_XMLRPC_TRIES:
             time.sleep(20*2**n_xmlrpc_tries)
 
-    print "Job parameters:\n"
-    print "  Run number:      ", runnr
-    print "  Stream name:     ", stream
-    print "  Processing pass: ", procnumber
-    print "  Incremental mode:", incr
-    print "  Post-processing: ", postproc
-    print "  COOL uploads:    ", allowCOOLUpload
-    print "  Production mode: ", productionMode
+    print("Job parameters:\n")
+    print("  Run number:      ", runnr)
+    print("  Stream name:     ", stream)
+    print("  Processing pass: ", procnumber)
+    print("  Incremental mode:", incr)
+    print("  Post-processing: ", postproc)
+    print("  COOL uploads:    ", allowCOOLUpload)
+    print("  Production mode: ", productionMode)
 
   except:
     outmap['exitCode'] = 104
@@ -301,17 +308,17 @@ def dq_combined_trf(jsonfile, outmap):
     return
 
   try:
-    print "\n##################################################################"
-    print   "## STEP 3: running histogram merging procedure ..."
-    print   "##################################################################\n"
+    print("\n##################################################################")
+    print("## STEP 3: running histogram merging procedure ...")
+    print("##################################################################\n")
 
     # environment setting
     os.environ['DQPRODUCTION'] = '1' if productionMode == 'True' else '0'
-    print "Setting env variable DQPRODUCTION to %s\n" % os.environ['DQPRODUCTION']
+    print("Setting env variable DQPRODUCTION to %s\n" % os.environ['DQPRODUCTION'])
     os.environ['DQ_STREAM'] = stream
-    print "Setting env variable DQ_STREAM to %s\n" % os.environ['DQ_STREAM']
+    print("Setting env variable DQ_STREAM to %s\n" % os.environ['DQ_STREAM'])
     os.environ['COOLUPLOADS'] = '1' if allowCOOLUpload == 'True' and productionMode == 'True' else '0'
-    print "Setting env variable COOLUPLOADS to %s\n" % os.environ['COOLUPLOADS']
+    print("Setting env variable COOLUPLOADS to %s\n" % os.environ['COOLUPLOADS'])
 
     if postproc == 'True' :
       if incr == 'True':
@@ -321,27 +328,27 @@ def dq_combined_trf(jsonfile, outmap):
     else :  
       cmd = "python -u `which DQHistogramMerge.py` hist_merge.list %s 0 0 %d %d"    % (histfile,histMergeCompressionLevel,histMergeDebugLevel)
     
-    print "Histogram merging command:\n"
-    print cmd
-    print "\n##################################################################\n"
+    print("Histogram merging command:\n")
+    print(cmd)
+    print("\n##################################################################\n")
     
-    print "## ... logfile from DQHistogramMerge.py: "
-    print "--------------------------------------------------------------------------------"
+    print("## ... logfile from DQHistogramMerge.py: ")
+    print("--------------------------------------------------------------------------------")
     tstart = time.time()
     # execute command
     retcode1 = os.system(cmd)
-    print "--------------------------------------------------------------------------------"
+    print("--------------------------------------------------------------------------------")
     t1 = time.time()
     dt1 = int(t1 - tstart)
     
-    print "\n## DQHistogramMerge.py finished with retcode = %s" % retcode1
-    print   "## ... elapsed time: ", dt1, " sec"
+    print("\n## DQHistogramMerge.py finished with retcode = %s" % retcode1)
+    print("## ... elapsed time: ", dt1, " sec")
 
     if retcode1 != 0 :
       outmap['exitCode'] = retcode1
       outmap['exitAcronym'] = 'TRF_DQMHISTMERGE_EXE'
       outmap['exitMsg'] = 'ERROR: DQHistogramMerge.py execution problem! (STEP 3).'
-      print "ERROR: DQHistogramMerge.py execution problem!"
+      print("ERROR: DQHistogramMerge.py execution problem!")
       retcode = retcode1
       txt = 'DQHistogramMerge.py execution problem'
       try:
@@ -354,38 +361,38 @@ def dq_combined_trf(jsonfile, outmap):
         genmd5sum(histfile)
         DQResFile="DQResourceUtilization.txt"
         if os.path.exists(DQResFile):
-          print "dumping resource utilization log"
+          print("dumping resource utilization log")
           with open(DQResFile) as resfile:
             for resline in resfile:
-              print resline,
+              print(resline, end=' ')
       except:
         outmap['exitMsg'] = 'ERROR: DQHistogramMerge.py execution problem + problem dumping DQResourceUtilization! (STEP 3).'
         traceback.print_exc()
-        print "ERROR: DQHistogramMerge.py execution problem + problem dumping DQResourceUtilization!"
+        print("ERROR: DQHistogramMerge.py execution problem + problem dumping DQResourceUtilization!")
       return
 
     if postproc == 'True' and incr == 'False':
-      print "\n##################################################################"
-      print "## STEP 3b: copying postprocessing output to AFS ..."
-      print "##################################################################\n"
+      print("\n##################################################################")
+      print("## STEP 3b: copying postprocessing output to AFS ...")
+      print("##################################################################\n")
 
       cmd = "python -u `which DQFileMove.py` %s %s_%s_%s" % (dqproject, runnr, stream, procnumber)
 
-      print "File move command:\n"
-      print cmd
-      print "\n##################################################################\n"
+      print("File move command:\n")
+      print(cmd)
+      print("\n##################################################################\n")
 
-      print "## ... logfile from DQFileMove.py: "
-      print "--------------------------------------------------------------------------------"
+      print("## ... logfile from DQFileMove.py: ")
+      print("--------------------------------------------------------------------------------")
       # execute command
       retcode1b = os.system(cmd)
-      print "--------------------------------------------------------------------------------"
+      print("--------------------------------------------------------------------------------")
       t1b = time.time()
       dt1b = int(t1b - t1)
       t1 = t1b
 
-      print "\n## DQFileMove.py finished with retcode = %s" % retcode1b
-      print   "## ... elapsed time: ", dt1b, " sec"
+      print("\n## DQFileMove.py finished with retcode = %s" % retcode1b)
+      print("## ... elapsed time: ", dt1b, " sec")
   except:
       outmap['exitCode'] = 105
       outmap['exitAcronym'] = 'TRF_DQMHISTMERGE_EXE'
@@ -397,29 +404,29 @@ def dq_combined_trf(jsonfile, outmap):
     retcode2 = 0
     dt2 = 0
     if doWebDisplay == 'True':
-      print "\n##################################################################"
-      print   "## STEP 4: running web-display creation procedure ..."
-      print   "##################################################################\n"
+      print("\n##################################################################")
+      print("## STEP 4: running web-display creation procedure ...")
+      print("##################################################################\n")
 
       cmd = "python -u `which DQWebDisplay.py` %s %s %s %s stream=%s" % (histfile, dqproject, procnumber, incr, stream)
 
-      print "Web display creation command:\n"
-      print cmd
-      print "\n##################################################################\n"
+      print("Web display creation command:\n")
+      print(cmd)
+      print("\n##################################################################\n")
 
-      print "## ... logfile from DQWebDisplay.py: "
-      print "--------------------------------------------------------------------------------"
+      print("## ... logfile from DQWebDisplay.py: ")
+      print("--------------------------------------------------------------------------------")
       # execute command
       retcode2 = os.system(cmd)
-      print 'DO NOT REPORT "Error in TH1: cannot merge histograms" ERRORS! THESE ARE IRRELEVANT!'
-      print "--------------------------------------------------------------------------------"
+      print('DO NOT REPORT "Error in TH1: cannot merge histograms" ERRORS! THESE ARE IRRELEVANT!')
+      print("--------------------------------------------------------------------------------")
       t2 = time.time()
       dt2 = int(t2 - t1)
 
-      print "\n## DQWebDisplay.py finished with retcode = %s" % retcode2
-      print   "## ... elapsed time: ", dt2, " sec"
+      print("\n## DQWebDisplay.py finished with retcode = %s" % retcode2)
+      print("## ... elapsed time: ", dt2, " sec")
       if not (retcode2 >> 8) in (0, 5) :
-        print "ERROR: DQWebDisplay.py execution problem!"
+        print("ERROR: DQWebDisplay.py execution problem!")
         outmap['exitCode'] = retcode2
         outmap['exitAcronym'] = 'TRF_DQMDISPLAY_EXE'
         outmap['exitMsg'] = 'ERROR: DQWebDisplay.py execution problem! (STEP 4).'
@@ -433,7 +440,7 @@ def dq_combined_trf(jsonfile, outmap):
         return
       if productionMode == 'True': 
           try:
-              print 'Publishing to message service'
+              print('Publishing to message service')
               publish_success_to_mq(runnr, dqproject, stream, incr=(incr=='True'), ami=amitag, procpass=procnumber, hcfg=filepaths, isprod=(productionMode=='True'))
           except:
               outmap['exitCode'] = 106
@@ -442,21 +449,21 @@ def dq_combined_trf(jsonfile, outmap):
               traceback.print_exc()
               return
     else:
-      print "\n##################################################################"
-      print   "## WEB DISPLAY CREATION SKIPPED BY USER REQUEST"
-      print   "##################################################################\n"
-      print 'Web display off, not publishing to message service'
+      print("\n##################################################################")
+      print("## WEB DISPLAY CREATION SKIPPED BY USER REQUEST")
+      print("##################################################################\n")
+      print('Web display off, not publishing to message service')
   except: 
     outmap['exitCode'] = 106
     outmap['exitAcronym'] = 'TRF_DQMDISPLAY_EXE'
     outmap['exitMsg'] = 'ERROR: Failure in web-display creation procedure (STEP 4).'
-    print 'ERROR: Failure in web-display creation procedure (STEP 4).'
+    print('ERROR: Failure in web-display creation procedure (STEP 4).')
     traceback.print_exc()
     return
   
-  print "\n##################################################################"
-  print   "## STEP 5: finishing the job ..."
-  print   "##################################################################\n"
+  print("\n##################################################################")
+  print("## STEP 5: finishing the job ...")
+  print("##################################################################\n")
         
   # get info for report json file
   try:
@@ -464,20 +471,20 @@ def dq_combined_trf(jsonfile, outmap):
     # assemble job report map
     outmap['files']['output'][0]['dataset'] = histdsname
     outmap['files']['output'][0]['subFiles'] = outfiles
-    outmap['resource']['transform']['processedEvents'] = long(nevts)
+    outmap['resource']['transform']['processedEvents'] = int(nevts)
     return
   except: 
     outmap['exitCode'] = 107
     outmap['exitAcronym'] = 'TRF_JOBREPORT'
     outmap['exitMsg'] = 'ERROR: in job report creation (STEP 5)'
-    print "ERROR: in job report creation (STEP 5) !"
+    print("ERROR: in job report creation (STEP 5) !")
     traceback.print_exc()
     return
 
 def dq_trf_wrapper(jsonfile):
-  print "\n##################################################################"
-  print   "##              ATLAS Tier-0 Offline DQM Processing             ##"
-  print   "##################################################################\n"
+  print("\n##################################################################")
+  print("##              ATLAS Tier-0 Offline DQM Processing             ##")
+  print("##################################################################\n")
 
   outmap = { 'exitAcronym' : 'OK',
                'exitCode' : 0,
@@ -486,7 +493,7 @@ def dq_trf_wrapper(jsonfile):
                                          'subFiles' : [ {},
                                                       ]}
                                     ] },
-                'resource' : { 'transform' : { 'processedEvents' : 0L } }
+                'resource' : { 'transform' : { 'processedEvents' : 0 } }
                  }
 
   # dq_combined_trf will update outmap
@@ -500,14 +507,14 @@ def dq_trf_wrapper(jsonfile):
   f.close()
 
   # summarize status
-  print "\n## ... job finished with retcode : %s" % outmap['exitCode']
-  print   "## ... error acronym: ", outmap['exitAcronym']
-  print   "## ... job status message: ", outmap['exitMsg']
-  print   "## ... elapsed time: ", outmap['resource']['transform']['wallTime'], "sec"
-  print   "##"
-  print   "##################################################################"
-  print   "## End of job."
-  print   "##################################################################\n"
+  print("\n## ... job finished with retcode : %s" % outmap['exitCode'])
+  print("## ... error acronym: ", outmap['exitAcronym'])
+  print("## ... job status message: ", outmap['exitMsg'])
+  print("## ... elapsed time: ", outmap['resource']['transform']['wallTime'], "sec")
+  print("##")
+  print("##################################################################")
+  print("## End of job.")
+  print("##################################################################\n")
 
 
 ########################################
@@ -517,33 +524,32 @@ def dq_trf_wrapper(jsonfile):
 if __name__ == "__main__":
 
   if (len(sys.argv) != 2) and (not sys.argv[1].startswith('--argJSON=')) :
-    print "Input format wrong --- use "
-    print "   --argJSON=<json-dictionary containing input info> "
-    print "   with key/value pairs: "
-    print "     1) 'inputHistFiles': python list "
-    print "          ['datasetname#filename1', 'datasetname#filename2',...] (input dataset + file names) "
-    print "        or list of file dictionaries "
-    print "          [{'lfn':'fname1', 'checksum':'cks1', 'dsn':'dsn1', 'size':sz1, 'guid':'guid1', 'events':nevts1, ...}, " 
-    print "           {'lfn':'fname2', 'checksum':'cks2', 'dsn':'dsn2', 'size':sz2, 'guid':'guid2', 'events':nevts2, ...}, ...] "
-    print "     2) 'outputHistFile': string 'datasetname#filename' "
-    print "        (HIST output dataset name + file) "
-    print "     optional parameters: "
-    print "     3) 'incrementalMode': string ('True'/'False') "
-    print "        ('True': do incremental update of DQM webpages on top of existing statistics; "
-    print "         'False': create final DQM webpages, replace temporary ones) "
-    print "     4) 'postProcessing': string ('True'/'False', default: 'True') "
-    print "        ('False': run histogram merging and DQ assessment only; "
-    print "         'True': run additional post-processing step (fitting, etc.)) "
-    print "     5) 'procNumber': int (number of processing pass, e.g. 1,2, ...) "
-    print "     6) 'runNumber': int "  
-    print "     7) 'streamName': string (e.g., physics_IDCosmic, physics_Express, ...) "  
-    print "     8) 'projectTag': string (e.g., data10_7TeV, TrigDisplay)"
-    print "     9) 'allowCOOLUpload': string ('True'/'False', default: 'True')"
-    print "        ('True': allow upload of defects to database; "
-    print "         'False': do not upload defects to database)"
+    print("Input format wrong --- use ")
+    print("   --argJSON=<json-dictionary containing input info> ")
+    print("   with key/value pairs: ")
+    print("     1) 'inputHistFiles': python list ")
+    print("          ['datasetname#filename1', 'datasetname#filename2',...] (input dataset + file names) ")
+    print("        or list of file dictionaries ")
+    print("          [{'lfn':'fname1', 'checksum':'cks1', 'dsn':'dsn1', 'size':sz1, 'guid':'guid1', 'events':nevts1, ...}, ") 
+    print("           {'lfn':'fname2', 'checksum':'cks2', 'dsn':'dsn2', 'size':sz2, 'guid':'guid2', 'events':nevts2, ...}, ...] ")
+    print("     2) 'outputHistFile': string 'datasetname#filename' ")
+    print("        (HIST output dataset name + file) ")
+    print("     optional parameters: ")
+    print("     3) 'incrementalMode': string ('True'/'False') ")
+    print("        ('True': do incremental update of DQM webpages on top of existing statistics; ")
+    print("         'False': create final DQM webpages, replace temporary ones) ")
+    print("     4) 'postProcessing': string ('True'/'False', default: 'True') ")
+    print("        ('False': run histogram merging and DQ assessment only; ")
+    print("         'True': run additional post-processing step (fitting, etc.)) ")
+    print("     5) 'procNumber': int (number of processing pass, e.g. 1,2, ...) ")
+    print("     6) 'runNumber': int ")  
+    print("     7) 'streamName': string (e.g., physics_IDCosmic, physics_Express, ...) ")  
+    print("     8) 'projectTag': string (e.g., data10_7TeV, TrigDisplay)")
+    print("     9) 'allowCOOLUpload': string ('True'/'False', default: 'True')")
+    print("        ('True': allow upload of defects to database; ")
+    print("         'False': do not upload defects to database)")
     sys.exit(-1)
   
   else :
     jsonfile = sys.argv[1][len('--argJSON='):]
     dq_trf_wrapper(jsonfile)
-  
diff --git a/DataQuality/DataQualityUtils/scripts/DQM_Tier0Wrapper_trf.py b/DataQuality/DataQualityUtils/scripts/DQM_Tier0Wrapper_trf.py
index 173f2f5a4c16282668500610df01f3af701aa1df..b10788862cac2a058b985e8d3cc54f5cb1f1ddb7 100755
--- a/DataQuality/DataQualityUtils/scripts/DQM_Tier0Wrapper_trf.py
+++ b/DataQuality/DataQualityUtils/scripts/DQM_Tier0Wrapper_trf.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 #########################################################################
 ##
@@ -52,9 +52,16 @@
 ##    S. Kama (March 2011)
 #########################################################################
 
-import sys, string, commands, os.path, os, pickle, time, pprint, xmlrpclib
+from __future__ import print_function
+import sys, string, os.path, os, pickle, time, pprint
+from six.moves import xmlrpc_client as xmlrpclib
 #sami
 import hashlib
+import six
+if six.PY2:
+  from commands import getstatusoutput
+else:
+  from subprocess import getstatusoutput
 
 #########################################################################
 
@@ -73,7 +80,7 @@ def getFileMap(fname, dsname, nevts=0) :
   return map
 
 def publish_success_to_mq(run, ptag, stream, incr, ami, procpass, hcfg, isprod):
-  import stomp, json, os, ssl
+  import stomp, json, ssl
   from DataQualityUtils import stompconfig
   dest='/topic/atlas.dqm.progress'
   #conn=stomp.Connection([('atlas-mb.cern.ch', 61023)], use_ssl=True,
@@ -116,28 +123,28 @@ def genmd5sum(filename):
         md5summer.update(fs)
     finally:
         infil.close()
-  print "md5 sum of the \"%s\" is %s"%(filename,md5summer.hexdigest())
+  print("md5 sum of the \"%s\" is %s"%(filename,md5summer.hexdigest()))
   return
       
 def dq_combined_trf(picklefile):
   
   tstart = time.time()
 
-  print "\n##################################################################"
-  print   "##              ATLAS Tier-0 Offline DQM Processing             ##"
-  print   "##################################################################\n"
+  print("\n##################################################################")
+  print("##              ATLAS Tier-0 Offline DQM Processing             ##")
+  print("##################################################################\n")
 
-  print "\n##################################################################"
-  print   "## STEP 1: creating file with list of root files ..."
-  print   "##################################################################\n"
+  print("\n##################################################################")
+  print("## STEP 1: creating file with list of root files ...")
+  print("##################################################################\n")
 
   # extract parameters from pickle file
-  print "Using pickled file ", picklefile, " for input parameters"
+  print("Using pickled file ", picklefile, " for input parameters")
   f = open(picklefile, 'r')
   parmap = pickle.load(f)
   f.close()
 
-  print "\nFull Tier-0 run options:\n"
+  print("\nFull Tier-0 run options:\n")
   pprint.pprint(parmap)
 
   inputfilelist = parmap.get('inputHistFiles', [])
@@ -175,7 +182,7 @@ def dq_combined_trf(picklefile):
         nevt = fdict.get('events', 0)
         if nevt is None:
           nevt=0
-          print "WARNING Can't get number of events from input pickle file"
+          print("WARNING Can't get number of events from input pickle file")
         nevts+=nevt
   
     f = open('hist_merge.list', 'w')
@@ -186,14 +193,14 @@ def dq_combined_trf(picklefile):
     f.close()
 
     cmd = "cat hist_merge.list"
-    (s,o) = commands.getstatusoutput(cmd)
-    print "\nContents of file hist_merge.list:\n"
-    print o
+    (s,o) = getstatusoutput(cmd)
+    print("\nContents of file hist_merge.list:\n")
+    print(o)
     
 
-    print "\n##################################################################"
-    print   "## STEP 2: determining job parameters..."
-    print   "##################################################################\n"
+    print("\n##################################################################")
+    print("## STEP 2: determining job parameters...")
+    print("##################################################################\n")
 
     # output file
     histdsname = (parmap['outputHistFile']).split('#')[0]
@@ -249,7 +256,7 @@ def dq_combined_trf(picklefile):
     dqproject = parmap.get('projectTag', dqproject)
     
     # run number
-    if parmap.has_key('runNumber') : 
+    if 'runNumber' in parmap : 
       runnr = parmap['runNumber']
     else :
       try :
@@ -258,7 +265,7 @@ def dq_combined_trf(picklefile):
         runnr = 1234567890
 
     # stream name  
-    if parmap.has_key('streamName') : 
+    if 'streamName' in parmap : 
       stream = parmap['streamName']
     else :
       try :
@@ -268,7 +275,7 @@ def dq_combined_trf(picklefile):
     
     # processing pass number  
     MAX_XMLRPC_TRIES = 5 
-    if parmap.has_key('procNumber') : 
+    if 'procNumber' in parmap : 
       procnumber = parmap['procNumber']
     else :
       n_xmlrpc_tries = 1
@@ -279,31 +286,31 @@ def dq_combined_trf(picklefile):
           procnumber = xmlrpcserver.get_next_proc_pass(runnr, stream, 'tier0')
           break 
         except :
-          print 'Web service connection failed, attempt', n_xmlrpc_tries, 'of', MAX_XMLRPC_TRIES
+          print('Web service connection failed, attempt', n_xmlrpc_tries, 'of', MAX_XMLRPC_TRIES)
           n_xmlrpc_tries += 1
           if n_xmlrpc_tries <= MAX_XMLRPC_TRIES:
             time.sleep(20*2**n_xmlrpc_tries)
 
-    print "Job parameters:\n"
-    print "  Run number:      ", runnr
-    print "  Stream name:     ", stream
-    print "  Processing pass: ", procnumber
-    print "  Incremental mode:", incr
-    print "  Post-processing: ", postproc
-    print "  COOL uploads:    ", allowCOOLUpload
-    print "  Production mode: ", productionMode
+    print("Job parameters:\n")
+    print("  Run number:      ", runnr)
+    print("  Stream name:     ", stream)
+    print("  Processing pass: ", procnumber)
+    print("  Incremental mode:", incr)
+    print("  Post-processing: ", postproc)
+    print("  COOL uploads:    ", allowCOOLUpload)
+    print("  Production mode: ", productionMode)
     
 
-    print "\n##################################################################"
-    print   "## STEP 3: running histogram merging procedure ..."
-    print   "##################################################################\n"
+    print("\n##################################################################")
+    print("## STEP 3: running histogram merging procedure ...")
+    print("##################################################################\n")
 
     # environment setting
     os.environ['DQPRODUCTION'] = '1' if productionMode == 'True' else '0'
     os.environ['DQ_STREAM'] = stream
-    print "Setting env variable DQPRODUCTION to %s\n" % os.environ['DQPRODUCTION']
+    print("Setting env variable DQPRODUCTION to %s\n" % os.environ['DQPRODUCTION'])
     os.environ['COOLUPLOADS'] = '1' if allowCOOLUpload == 'True' and productionMode == 'True' else '0'
-    print "Setting env variable COOLUPLOADS to %s\n" % os.environ['COOLUPLOADS']
+    print("Setting env variable COOLUPLOADS to %s\n" % os.environ['COOLUPLOADS'])
     
     if postproc == 'True' :
       if incr == 'True':
@@ -313,77 +320,77 @@ def dq_combined_trf(picklefile):
     else :  
       cmd = "python -u `which DQHistogramMerge.py` hist_merge.list %s 0 0 %d %d"    % (histfile,histMergeCompressionLevel,histMergeDebugLevel)
     
-    print "Histogram merging command:\n"
-    print cmd
-    print "\n##################################################################\n"
+    print("Histogram merging command:\n")
+    print(cmd)
+    print("\n##################################################################\n")
     
-    print "## ... logfile from DQHistogramMerge.py: "
-    print "--------------------------------------------------------------------------------"
+    print("## ... logfile from DQHistogramMerge.py: ")
+    print("--------------------------------------------------------------------------------")
     # execute command
     retcode1 = os.system(cmd)
-    print "--------------------------------------------------------------------------------"
+    print("--------------------------------------------------------------------------------")
     t1 = time.time()
     dt1 = int(t1 - tstart)
     
-    print "\n## DQHistogramMerge.py finished with retcode = %s" % retcode1
-    print   "## ... elapsed time: ", dt1, " sec"
+    print("\n## DQHistogramMerge.py finished with retcode = %s" % retcode1)
+    print("## ... elapsed time: ", dt1, " sec")
 
     if retcode1 == 0 :
       if postproc == 'True' and incr == 'False':
-        print "\n##################################################################"
-        print "## STEP 3b: copying postprocessing output to AFS ..."
-        print "##################################################################\n"
+        print("\n##################################################################")
+        print("## STEP 3b: copying postprocessing output to AFS ...")
+        print("##################################################################\n")
 
         cmd = "python -u `which DQFileMove.py` %s %s_%s_%s" % (dqproject, runnr, stream, procnumber)
 
-        print "File move command:\n"
-        print cmd
-        print "\n##################################################################\n"
+        print("File move command:\n")
+        print(cmd)
+        print("\n##################################################################\n")
 
-        print "## ... logfile from DQFileMove.py: "
-        print "--------------------------------------------------------------------------------"
+        print("## ... logfile from DQFileMove.py: ")
+        print("--------------------------------------------------------------------------------")
         # execute command
         retcode1b = os.system(cmd)
-        print "--------------------------------------------------------------------------------"
+        print("--------------------------------------------------------------------------------")
         t1b = time.time()
         dt1b = int(t1b - t1)
         t1 = t1b
 
-        print "\n## DQFileMove.py finished with retcode = %s" % retcode1b
-        print   "## ... elapsed time: ", dt1b, " sec"
+        print("\n## DQFileMove.py finished with retcode = %s" % retcode1b)
+        print("## ... elapsed time: ", dt1b, " sec")
 
       if doWebDisplay == 'True':
-        print "\n##################################################################"
-        print   "## STEP 4: running web-display creation procedure ..."
-        print   "##################################################################\n"
+        print("\n##################################################################")
+        print("## STEP 4: running web-display creation procedure ...")
+        print("##################################################################\n")
 
         cmd = "python -u `which DQWebDisplay.py` %s %s %s %s stream=%s" % (histfile, dqproject, procnumber, incr, stream)
 
-        print "Web display creation command:\n"
-        print cmd
-        print "\n##################################################################\n"
+        print("Web display creation command:\n")
+        print(cmd)
+        print("\n##################################################################\n")
 
-        print "## ... logfile from DQWebDisplay.py: "
-        print "--------------------------------------------------------------------------------"
+        print("## ... logfile from DQWebDisplay.py: ")
+        print("--------------------------------------------------------------------------------")
         # execute command
         retcode2 = os.system(cmd)
-        print 'DO NOT REPORT "Error in TH1: cannot merge histograms" ERRORS! THESE ARE IRRELEVANT!'
-        print "--------------------------------------------------------------------------------"
+        print('DO NOT REPORT "Error in TH1: cannot merge histograms" ERRORS! THESE ARE IRRELEVANT!')
+        print("--------------------------------------------------------------------------------")
         t2 = time.time()
         dt2 = int(t2 - t1)
 
-        print "\n## DQWebDisplay.py finished with retcode = %s" % retcode2
-        print   "## ... elapsed time: ", dt2, " sec"
+        print("\n## DQWebDisplay.py finished with retcode = %s" % retcode2)
+        print("## ... elapsed time: ", dt2, " sec")
       else:
-        print "\n##################################################################"
-        print   "## WEB DISPLAY CREATION SKIPPED BY USER REQUEST"
-        print   "##################################################################\n"
+        print("\n##################################################################")
+        print("## WEB DISPLAY CREATION SKIPPED BY USER REQUEST")
+        print("##################################################################\n")
         retcode2 = 0
         dt2 = 0
 
-    print "\n##################################################################"
-    print   "## STEP 5: finishing the job ..."
-    print   "##################################################################\n"
+    print("\n##################################################################")
+    print("## STEP 5: finishing the job ...")
+    print("##################################################################\n")
 
     # assemble report gpickle file
     outfiles = []
@@ -402,13 +409,13 @@ def dq_combined_trf(picklefile):
         outfiles = [histmap]
         dt += dt2
         if doWebDisplay == 'True':
-          print 'Publishing to message service'
+          print('Publishing to message service')
           publish_success_to_mq(runnr, dqproject, stream, incr=(incr=='True'), ami=amitag, procpass=procnumber, hcfg=filepaths, isprod=(productionMode=='True'))
         else:
-          print 'Web display off, not publishing to message service'
+          print('Web display off, not publishing to message service')
       else :
         txt = 'DQWebDisplay.py execution problem'  
-        print "ERROR: DQWebDisplay.py execution problem!"
+        print("ERROR: DQWebDisplay.py execution problem!")
         retcode = retcode2
         acronym = 'TRF_DQMDISPLAY_EXE'
         try:
@@ -419,7 +426,7 @@ def dq_combined_trf(picklefile):
           infilelist.close()
         genmd5sum(histfile)
     else :
-      print "ERROR: DQHistogramMerge.py execution problem!"
+      print("ERROR: DQHistogramMerge.py execution problem!")
       retcode = retcode1
       acronym = 'TRF_DQMHISTMERGE_EXE'
       dt = 0
@@ -433,10 +440,10 @@ def dq_combined_trf(picklefile):
       genmd5sum(histfile)
       DQResFile="DQResourceUtilization.txt"
       if os.path.exists(DQResFile):
-        print "dumping resource utilization log"
+        print("dumping resource utilization log")
         with open(DQResFile) as resfile:
           for resline in resfile:
-            print resline,
+            print(resline, end=' ')
                   
     # assemble job report map
     reportmap = { 'prodsys': { 'trfCode': retcode,
@@ -453,13 +460,13 @@ def dq_combined_trf(picklefile):
   pickle.dump(reportmap, f)
   f.close()
 
-  print "\n## ... job finished with retcode : %s" % reportmap['prodsys']['trfCode']
-  print   "## ... error acronym: ", reportmap['prodsys']['trfAcronym']
-  print   "## ... elapsed time: ", reportmap['prodsys']['more']['num2'], "sec"
-  print   "##"
-  print   "##################################################################"
-  print   "## End of job."
-  print   "##################################################################\n"
+  print("\n## ... job finished with retcode : %s" % reportmap['prodsys']['trfCode'])
+  print("## ... error acronym: ", reportmap['prodsys']['trfAcronym'])
+  print("## ... elapsed time: ", reportmap['prodsys']['more']['num2'], "sec")
+  print("##")
+  print("##################################################################")
+  print("## End of job.")
+  print("##################################################################\n")
 
 
 ########################################
@@ -469,33 +476,32 @@ def dq_combined_trf(picklefile):
 if __name__ == "__main__":
 
   if (len(sys.argv) != 2) and (not sys.argv[1].startswith('--argdict=')) :
-    print "Input format wrong --- use "
-    print "   --argdict=<pickled-dictionary containing input info> "
-    print "   with key/value pairs: "
-    print "     1) 'inputHistFiles': python list "
-    print "          ['datasetname#filename1', 'datasetname#filename2',...] (input dataset + file names) "
-    print "        or list of file dictionaries "
-    print "          [{'lfn':'fname1', 'checksum':'cks1', 'dsn':'dsn1', 'size':sz1, 'guid':'guid1', 'events':nevts1, ...}, " 
-    print "           {'lfn':'fname2', 'checksum':'cks2', 'dsn':'dsn2', 'size':sz2, 'guid':'guid2', 'events':nevts2, ...}, ...] "
-    print "     2) 'outputHistFile': string 'datasetname#filename' "
-    print "        (HIST output dataset name + file) "
-    print "     optional parameters: "
-    print "     3) 'incrementalMode': string ('True'/'False') "
-    print "        ('True': do incremental update of DQM webpages on top of existing statistics; "
-    print "         'False': create final DQM webpages, replace temporary ones) "
-    print "     4) 'postProcessing': string ('True'/'False', default: 'True') "
-    print "        ('False': run histogram merging and DQ assessment only; "
-    print "         'True': run additional post-processing step (fitting, etc.)) "
-    print "     5) 'procNumber': int (number of processing pass, e.g. 1,2, ...) "
-    print "     6) 'runNumber': int "  
-    print "     7) 'streamName': string (e.g., physics_IDCosmic, physics_Express, ...) "  
-    print "     8) 'projectTag': string (e.g., data10_7TeV, TrigDisplay)"
-    print "     9) 'allowCOOLUpload': string ('True'/'False', default: 'True')"
-    print "        ('True': allow upload of defects to database; "
-    print "         'False': do not upload defects to database)"
+    print("Input format wrong --- use ")
+    print("   --argdict=<pickled-dictionary containing input info> ")
+    print("   with key/value pairs: ")
+    print("     1) 'inputHistFiles': python list ")
+    print("          ['datasetname#filename1', 'datasetname#filename2',...] (input dataset + file names) ")
+    print("        or list of file dictionaries ")
+    print("          [{'lfn':'fname1', 'checksum':'cks1', 'dsn':'dsn1', 'size':sz1, 'guid':'guid1', 'events':nevts1, ...}, ") 
+    print("           {'lfn':'fname2', 'checksum':'cks2', 'dsn':'dsn2', 'size':sz2, 'guid':'guid2', 'events':nevts2, ...}, ...] ")
+    print("     2) 'outputHistFile': string 'datasetname#filename' ")
+    print("        (HIST output dataset name + file) ")
+    print("     optional parameters: ")
+    print("     3) 'incrementalMode': string ('True'/'False') ")
+    print("        ('True': do incremental update of DQM webpages on top of existing statistics; ")
+    print("         'False': create final DQM webpages, replace temporary ones) ")
+    print("     4) 'postProcessing': string ('True'/'False', default: 'True') ")
+    print("        ('False': run histogram merging and DQ assessment only; ")
+    print("         'True': run additional post-processing step (fitting, etc.)) ")
+    print("     5) 'procNumber': int (number of processing pass, e.g. 1,2, ...) ")
+    print("     6) 'runNumber': int ")  
+    print("     7) 'streamName': string (e.g., physics_IDCosmic, physics_Express, ...) ")  
+    print("     8) 'projectTag': string (e.g., data10_7TeV, TrigDisplay)")
+    print("     9) 'allowCOOLUpload': string ('True'/'False', default: 'True')")
+    print("        ('True': allow upload of defects to database; ")
+    print("         'False': do not upload defects to database)")
     sys.exit(-1)
   
   else :
     picklefile = sys.argv[1][len('--argdict='):]
     dq_combined_trf(picklefile)
-  
diff --git a/DataQuality/DataQualityUtils/scripts/DQPostProcessTest.py b/DataQuality/DataQualityUtils/scripts/DQPostProcessTest.py
index 7ce2ec2cb3ab318415d687484c99216087fdf961..0098b62330ddd2f12ba85068a9db4903ac1c8e8e 100644
--- a/DataQuality/DataQualityUtils/scripts/DQPostProcessTest.py
+++ b/DataQuality/DataQualityUtils/scripts/DQPostProcessTest.py
@@ -1,16 +1,11 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
-## *****************************************************************************
-VERSION = '$Id: DQPostProcessTest.py 696014 2015-09-21 16:35:54Z tuna $'
-## *****************************************************************************
-
-#import DataQualityUtils.DQHistogramMergeMod as mod
+from __future__ import print_function
 
 import sys
 
-import ROOT
 from ROOT import gROOT
 gROOT.SetBatch(True)
 
@@ -46,7 +41,7 @@ if len(sys.argv) == 3:
   if sys.argv[2] == "True" or sys.argv[2] == "1":
     isIncremental = True
 
-print '==================== Starting first round of checks ====================\n\n'
+print('==================== Starting first round of checks ====================\n\n')
 
 mf.fitMergedFile_IDPerfMonManager(outFile, isIncremental)
 mf.fitMergedFile_DiMuMonManager(outFile, isIncremental)
@@ -69,8 +64,8 @@ mf.L1CaloPostProcess(outFile, isIncremental)
 mf.SCTPostProcess(outFile, isIncremental)
 mf.VxMon_move(outFile, isIncremental) # may print a harmless error message about write access to EOS
 
-print '\n\n====================== First round of checks are completed============='
-print '==================== Starting second round ====================\n\n'
+print('\n\n====================== First round of checks are completed=============')
+print('==================== Starting second round ====================\n\n')
 
 mf.fitMergedFile_IDPerfMonManager(outFile, isIncremental)
 mf.fitMergedFile_DiMuMonManager(outFile, isIncremental)
@@ -93,5 +88,5 @@ mf.L1CaloPostProcess(outFile, isIncremental)
 mf.SCTPostProcess(outFile, isIncremental)
 mf.VxMon_move(outFile, isIncremental) # may print a harmless error message about write access to EOS
 
-print '\n\n====================== Second round of checks are completed============='
-print 'Postprocessing code should run in T0 without crashes '
+print('\n\n====================== Second round of checks are completed=============')
+print('Postprocessing code should run in T0 without crashes ')
diff --git a/DataQuality/DataQualityUtils/scripts/DQWebDisplay.py b/DataQuality/DataQualityUtils/scripts/DQWebDisplay.py
index b6f79c0f2856ddbcaf66007f4ffb7fe9fea1a544..b0fa551233988dc1d97bcfb4a95194e430e9290a 100755
--- a/DataQuality/DataQualityUtils/scripts/DQWebDisplay.py
+++ b/DataQuality/DataQualityUtils/scripts/DQWebDisplay.py
@@ -1,11 +1,9 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
-## *****************************************************************************
-VERSION = '$Id: DQWebDisplay.py 690278 2015-08-19 22:18:53Z ponyisi $'
-## *****************************************************************************
+from __future__ import print_function
 
 import os
 
@@ -25,29 +23,29 @@ import ROOT
 ## Importing gSystem may change the current directory to one of the
 ## command-line arguments; chdir to original directory to have
 ## predictable behavior
-from ROOT import gSystem
+from ROOT import gSystem  # noqa: F401
 os.chdir(CWD)
 
 from DataQualityUtils.DQWebDisplayMod import DQWebDisplay
 
 def importConfiguration(modname):
     from DataQualityConfigurations import getmodule
-    print 'getting configuration', modname
+    print('getting configuration', modname)
     return getmodule(modname)
 
 def usage():
   cmdi = sys.argv[0].rfind("/")
   cmd = sys.argv[0][cmdi+1:]
-  print ""
-  print "Usage: ", cmd, "<data_file> <config> <processing_version> [run_accumulating [conditions_string]]"
-  print ""
-  print "This is a production utility; use TEST config for development and testing."
-  print ""
-  print "Processing version is an integer, starting from 1 (not 0)"
-  print ""
+  print("")
+  print("Usage: ", cmd, "<data_file> <config> <processing_version> [run_accumulating [conditions_string]]")
+  print("")
+  print("This is a production utility; use TEST config for development and testing.")
+  print("")
+  print("Processing version is an integer, starting from 1 (not 0)")
+  print("")
 
 if __name__ == "__main__":
-  print len(sys.argv)
+  print(len(sys.argv))
   if len(sys.argv) < 5 or len(sys.argv) > 7:
     usage()
     sys.exit(64)
@@ -59,7 +57,7 @@ if __name__ == "__main__":
       runAccumulating = True
   
   if len(sys.argv) == 7:
-      print 'Setting condition', sys.argv[5]
+      print('Setting condition', sys.argv[5])
       ROOT.gSystem.Load('libDataQualityInterfaces')
       ROOT.dqi.ConditionsSingleton.getInstance().setCondition(sys.argv[5])
 
@@ -84,16 +82,15 @@ if __name__ == "__main__":
   
   try:
     cmod = importConfiguration(configModule)
-  except Exception, e:
-    print "Could not import configuration module \'" + configModule + "\'"
+  except Exception as e:
+    print("Could not import configuration module \'" + configModule + "\'")
     sys.exit(1)
 
   try:
     config = cmod.dqconfig
-  except Exception, e:
-    print "Configuration object 'dqconfig' not defined in module \'" + configModule + "\'"
+  except Exception as e:
+    print("Configuration object 'dqconfig' not defined in module \'" + configModule + "\'")
     sys.exit(1)
 
   
   DQWebDisplay( inputFile, runAccumulating, config )
-
diff --git a/DataQuality/DataQualityUtils/scripts/DeMoLib.py b/DataQuality/DataQualityUtils/scripts/DeMoLib.py
index 5efa69354aadd20a9db26ed338260551c407de18..fe292d2b82061b3c1038f9978ac3eace9baf2e58 100644
--- a/DataQuality/DataQualityUtils/scripts/DeMoLib.py
+++ b/DataQuality/DataQualityUtils/scripts/DeMoLib.py
@@ -1,10 +1,10 @@
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 # Author : Benjamin Trocme (LPSC - Grenoble) - 2017
 # Auxiliary libraries used DemoUpdate, DeMoStatus and DemoScan
 ##################################################################
 
 from ROOT import THStack
-from ROOT import TCanvas,TLegend
+from ROOT import TCanvas
 from ROOT import kYellow,kOrange,kRed,kBlue,kPink,kMagenta,kGreen,kSpring,kViolet,kAzure,kCyan,kTeal,kBlack
 
 #import gb
@@ -234,7 +234,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
 #################################### Pixel defects
   if system == "Pixel":
     partitions["color"] = {'IBL':kYellow-9,'LAYER0':kYellow,'BARREL':kOrange,'ENDCAPC':kOrange-3,'ENDCAPA':kRed-3}
-    partitions["list"] = partitions["color"].keys()
+    partitions["list"] = list(partitions["color"])
 
     defects0["prefix"] = ["PIXEL"]
     # Partition intolerable and tolerable defects - Order determines what defect is proeminent
@@ -268,7 +268,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
 # https://twiki.cern.ch/twiki/bin/view/Atlas/SCTOfflineMonitoringShifts#List_of_Defects
   if system == "SCT":
     partitions["color"] = {}
-    partitions["list"] = partitions["color"].keys()
+    partitions["list"] = list(partitions["color"])
 
     defects0["prefix"] = ["SCT"]
     # Partition intolerable and tolerable defects - Order determines what defect is proeminent
@@ -322,10 +322,10 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
 # https://twiki.cern.ch/twiki/bin/view/Atlas/TRTDQDefects
   if system == "TRT":
     partitions["color"] = {}
-    partitions["list"] = partitions["color"].keys()
+    partitions["list"] = list(partitions["color"].keys())
 
     defects0["prefix"] = ["TRT"]
-    # Partition intolerable and tolerable defects - Order determines what defect is proeminent
+    # Pa intolerable and tolerable defects - Order determines what defect is proeminent
     defects0["partIntol"] = []
     defects0["partTol"] = []
     # Global intolerable and tolerable defects
@@ -377,7 +377,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
     similarTags["Reproc_2018.roughVeto"]="/2018 Reproc. (rough veto)"
 
     partitions["color"] = { 'EMBA':kYellow-9,'EMBC':kYellow,'EMECA':kOrange,'EMECC':kOrange-3,'HECA':kRed-3,'HECC':kRed+2,'FCALA':kBlue-3,'FCALC':kBlue+2}
-    partitions["list"] = partitions["color"].keys()
+    partitions["list"] = list(partitions["color"])
 
     defects0["prefix"] = ["LAR","CALO_ONLINEDB"]
     # Partition intolerable and tolerable defects - Order determines what defect is proeminent
@@ -441,7 +441,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
 # https://twiki.cern.ch/twiki/bin/view/Atlas/TileDQLeaderManual#Global_Tile_Defects
   if system == "Tile":
     partitions["color"] = { 'EBA':kYellow-9,'EBC':kYellow,'LBA':kOrange,'LBC':kOrange-3}
-    partitions["list"] = partitions["color"].keys()
+    partitions["list"] = list(partitions["color"])
 
     defects0["prefix"] = ["TILE"]
     # Partition intolerable and tolerable defects - Order determines what defect is proeminent
@@ -476,7 +476,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
   if system == "CSC":
     partitions["color"] = {"EA":kYellow-9,'EC':kRed-3}
                            
-    partitions["list"] = partitions["color"].keys()
+    partitions["list"] = list(partitions["color"])
 
     defects0["prefix"] = ["MS_CSC"]
     # Partition intolerable and tolerable defects - Order determines what defect is proeminent
@@ -506,7 +506,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
   if system == "MDT":
     partitions["color"] = {"EA":kYellow-9,'EC':kRed-3,'BA':kBlue-3,'BC':kOrange-3}
                            
-    partitions["list"] = partitions["color"].keys()
+    partitions["list"] = list(partitions["color"])
 
     defects0["prefix"] = ["MS_MDT"]
     # Partition intolerable and tolerable defects - Order determines what defect is proeminent
@@ -536,7 +536,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
   if system == "RPC":
     partitions["color"] = {'BA':kBlue-3,'BC':kOrange-3}
                            
-    partitions["list"] = partitions["color"].keys()
+    partitions["list"] = list(partitions["color"])
 
     defects0["prefix"] = ["MS_RPC"]
     # Partition intolerable and tolerable defects - Order determines what defect is proeminent
@@ -572,7 +572,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
   if system == "TGC":
     partitions["color"] = {"EA":kYellow-9,'EC':kRed-3}
 
-    partitions["list"] = partitions["color"].keys()
+    partitions["list"] = list(partitions["color"])
 
     defects0["prefix"] = ["MS_TGC"]
     # Partition intolerable and tolerable defects - Order determines what defect is proeminent
@@ -627,7 +627,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
 #################################### ID defects
   if system == "IDGlobal":
     partitions["color"] = {}
-    partitions["list"] = partitions["color"].keys()
+    partitions["list"] = list(partitions["color"])
 
     defects0["prefix"] = ["ID"]
     # Partition intolerable and tolerable defects - Order determines what defect is proeminent
@@ -664,7 +664,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
                             'B':kYellow-9,'CR':kRed-3,'E':kBlue-3, # Tau partitions
                             'CALB':kYellow-9,'CALEA':kRed-3,'CALC':kBlue-3} # CaloGlobal partitions
                             
-    partitions["list"] = partitions["color"].keys()
+    partitions["list"] = list(partitions["color"])
 
     defects0["prefix"] = ["JET","EGAMMA","MET","TAU","CALO_"]
     # Partition intolerable and tolerable defects - Order determines what defect is proeminent
@@ -708,7 +708,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
   if system == "BTag":
     partitions["color"] = { } # No partition needed
                             
-    partitions["list"] = partitions["color"].keys()
+    partitions["list"] = list(partitions["color"])
 
     defects0["prefix"] = ["BTAG"]
     # Partition intolerable and tolerable defects - Order determines what defect is proeminent
@@ -747,7 +747,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
 # https://twiki.cern.ch/twiki/bin/view/Atlas/DataQualityTriggerDefects
   if system == "Trig_L1":
     partitions["color"] = {}
-    partitions["list"] = partitions["color"].keys()
+    partitions["list"] = list(partitions["color"])
 
     defects0["prefix"] = ["TRIG_L1"]
     # Partition intolerable and tolerable defects - Order determines what defect is proeminent
@@ -785,7 +785,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
 #################################### Trig_HLT defects
   if system == "Trig_HLT":
     partitions["color"] = {}
-    partitions["list"] = partitions["color"].keys()
+    partitions["list"] = list(partitions["color"])
 
     defects0["prefix"] = ["TRIG_HLT"]
     # Partition intolerable and tolerable defects - Order determines what defect is proeminent
@@ -827,7 +827,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
       defectVeto["description"][iDef] = iDef
 
 # Define color if not yet done
-  if not (defectVeto.has_key("color")):
+  if not ("color" in defectVeto):
     colors = [kBlue-4,kOrange-7,kTeal+1,kRed+1,kMagenta+2,kPink-3,kYellow+1,kGreen-2,kSpring-6,kViolet-4,kAzure-8,kCyan+1,
               kBlue-2,kOrange+1,kTeal+7,kRed+3,kMagenta-2,kPink+1,kYellow-1,kGreen+4,kSpring-2,kViolet+1,kAzure-2,kCyan-5,
               kBlue+2,kOrange+5,kTeal-4,kRed-5,kMagenta-6,kPink+6,kYellow-5,kGreen-6,kSpring+4,kViolet+6,kAzure+4,kCyan+4,]
@@ -843,7 +843,7 @@ def initialize(system,yearTag,partitions,defects0,defectVeto,veto,signOff,year =
     baseTag = iSimilar.split(".")[0]
     yearTag["description"][iSimilar] = similarTags[iSimilar]
     yearTag["defect"][iSimilar] = yearTag["defect"][baseTag] 
-    if (yearTag["veto"].has_key(baseTag)):
+    if (baseTag in yearTag["veto"]):
       yearTag["veto"][iSimilar] = yearTag["veto"][baseTag] 
 
   return True
diff --git a/DataQuality/DataQualityUtils/scripts/DeMoScan.py b/DataQuality/DataQualityUtils/scripts/DeMoScan.py
index ed40dabc3608415018f1be28320d7b78719fe801..279aa5a7a3ba72963d28ddd1d4c89f9a93b58054 100644
--- a/DataQuality/DataQualityUtils/scripts/DeMoScan.py
+++ b/DataQuality/DataQualityUtils/scripts/DeMoScan.py
@@ -1,20 +1,20 @@
 #! /usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 # Author : Benjamin Trocme (LPSC - Grenoble) - 2017
 # Displays the run affected per defect type
 # Perform run by run differences for difference tags
 ##################################################################
 
+from __future__ import print_function
 import os,sys  
 from math import fabs
 from re import match
 from time import strftime,gmtime
 
 from ROOT import TFile
-from ROOT import TH1F
 from ROOT import TCanvas
 from ROOT import kTeal
-from ROOT import gStyle,gROOT,gPad
+from ROOT import gStyle,gPad
 
 
 sys.path.append("/afs/cern.ch/user/l/larmon/public/prod/Misc")
@@ -30,7 +30,6 @@ debug = False
 ########################################################################
 ########################################################################
 # Main script
-import os,sys  
 
 from argparse import RawTextHelpFormatter,ArgumentParser
 
@@ -89,21 +88,21 @@ for iYear in args.parser_year:
           runGRL[yearTag].append(int(iRun)) # used only to determine if a run belongs to GRL in recap defects - Data in loss*.txt file NOT reliable
         fRunList.close()
       else:
-        print "No GRL list found... Please create it"
+        print("No GRL list found... Please create it")
         sys.exit()
 
 if len(args.parser_year) == 1:
   singleYear = True
 else:
   singleYear = False
-  if (options['plotDiff2tags']):
-    print "To compare two tags, you must choose only one year. Exiting..."
+  if args.parser_diff2tags:
+    print("To compare two tags, you must choose only one year. Exiting...")
     sys.exit()
 
 yearTagList.sort()
 
 if len(yearTagList) == 0:
-  print "No year / tag matching - Please check YearStats directory"
+  print("No year / tag matching - Please check YearStats directory")
   sys.exit()
 
 options = {}
@@ -114,9 +113,9 @@ if options['defect'] == [""] and options['veto'] == [""]:
   options['veto'] = veto["all"]
 else:
   if options['defect'][0] not in grlDef["intol"] and options['veto'][0] not in veto["all"]:
-    print "Defect/veto not found. Please check..."
-    print "Defect: ",grlDef["intol"] 
-    print "Veto: ",veto["all"]
+    print("Defect/veto not found. Please check...")
+    print("Defect: ",grlDef["intol"]) 
+    print("Veto: ",veto["all"])
     sys.exit()
   if options['defect'] == [""]:
     options['defect'] = []
@@ -148,8 +147,8 @@ if (options['plotDiff2tags'] and options['restrictTagRuns'] in yearTagProperties
   for iline in fRuns.readlines():
     runsFilter.append(int(iline))
   fRuns.close()
-  print "I am considering only the %d runs of %s"%(len(runsFilter),options['restrictTagRuns'])
-  print runsFilter
+  print("I am considering only the %d runs of %s"%(len(runsFilter),options['restrictTagRuns']))
+  print(runsFilter)
 
 options['minLumiYearStatsDefect'] = args.parser_minLumiLPR
 options['retrieveComments'] = args.parser_retrieveComments
@@ -213,7 +212,7 @@ h1_loss_rLPR = {}
 atlasReady = {}
 
 for iYT in yearTagList:
-  print "I am treating the following year/tag:%s"%iYT
+  print("I am treating the following year/tag:%s"%iYT)
 
   canvasResults[iYT] = {}
   legendResults[iYT] = {}
@@ -223,7 +222,7 @@ for iYT in yearTagList:
   if options['plotDiff2tags']:
     yearStatsArchiveFilename = '%s/TProfiles.root'%(yearTagDir[iYT])
     if not (os.path.exists(yearStatsArchiveFilename)):
-      print "No %s found - > Skipping"%yearStatsArchiveFilename
+      print("No %s found - > Skipping"%yearStatsArchiveFilename)
       continue
     
     file[iYT] = TFile(yearStatsArchiveFilename)
@@ -254,18 +253,17 @@ for iYT in yearTagList:
         
       # Check if a dat loss file is associated to this veto/defect
       if os.path.exists(lossFileName):
-        print "I am reading the %s file"%lossFileName
+        print("I am reading the %s file"%lossFileName)
         runsLPR[iYT][iDefVeto] = []
         lossLPR[iYT][iDefVeto] = []
         loss_rLPR[iYT][iDefVeto] = []
         f2 = open(lossFileName,'r')
-        tmpLines = f2.readlines()
-        tmpLines.sort()
+        tmpLines = sorted(f2.readlines())
         for iline in tmpLines: # Loop on all lines of the loss-[defect/veto].dat files
           if defVetoType[iDefVeto] == "Intolerable defect":
-            read = match("(\d+) \((\d+) ub-1.*\) -> (\d+.\d+) pb-1 \D+(\d+.\d+)\D+",iline)
+            read = match(r"(\d+) \((\d+) ub-1.*\) -> (\d+.\d+) pb-1 \D+(\d+.\d+)\D+",iline)
           else:# Veto loss is never recoverable (not tolerable defects)
-            read = match("(\d+) \((\d+) ub-1.*\) -> (\d+.\d+) pb-1",iline)
+            read = match(r"(\d+) \((\d+) ub-1.*\) -> (\d+.\d+) pb-1",iline)
           # retrieve the run number
           runnumber = int(read.group(1))
           # If the runs filter is activated (i.e. runsFilter != 0), check if the runs must be filtered
@@ -291,7 +289,7 @@ for iYT in yearTagList:
             lossLPR[iYT][iDefVeto].append(lostLumi)
             loss_rLPR[iYT][iDefVeto].append(recovLumi)
             if options['retrieveComments'] and "defect" in defVetoType[iDefVeto]: # retrieve comments for defects
-              print "@%d"%(runnumber)
+              print("@%d"%(runnumber))
               db = DefectsDB(tag=yearTagProperties["defect"][yearTagTag[iYT]])
               system_defects = []
               for iPrefix in grlDef["prefix"]:
@@ -311,7 +309,7 @@ for iYT in yearTagList:
                         defectUntilLumiAtlasReady = iLumiBlock+1
                         if defectSinceLumiAtlasReady == -1:
                           defectSinceLumiAtlasReady = iLumiBlock
-                    print defectSinceLumiAtlasReady,defectUntilLumiAtlasReady
+                    print(defectSinceLumiAtlasReady,defectUntilLumiAtlasReady)
                     if defectSinceLumiAtlasReady == -1: # Whole defect was outside ATLAS ready - Skip it
                       continue
                     
@@ -403,7 +401,7 @@ if options['plotLossPerRun'] and options['retrieveComments']:
   for iDef in options['defect']:
     if (iDef in h1_lossLPR[iYT].keys()): # This protection is needed as defRecap may have duplication in some rare cases. See Muon system with "MDT_ROD_PROBLEM_1" and "RPC_PROBLEM_1"
       if ("b-1" in defRecap[iDef]):# At least one data loss in the whole YearStats for this defect 
-        print defRecap[iDef]
+        print(defRecap[iDef])
         f.write(defRecap[iDef])
         fHtml.write("%s</tr>"%defRecapHtml[iDef].replace("LUMILOSTTOBEREPLACED",strLumi(h1_lossLPR[iYT][iDef].Integral(),"pb^{-1}")))
         if options['savePage1']:
@@ -422,17 +420,17 @@ if options['plotLossPerRun'] and options['retrieveComments']:
 
 # Compare defects/veto run by run (if the year is the same for both) 
 if (len(yearTagList) == 2 and options['plotDiff2tags'] and singleYear):
-  print "I am now comparing run by run the defects and their affected luminosity"
+  print("I am now comparing run by run the defects and their affected luminosity")
   
   # First basic check about the datasets used in both tags
   YT0 = yearTagList[0]
   YT1 = yearTagList[1]
   if (subperiodNb[YT0] != subperiodNb[YT1]):
-    print "Warning : different number of subperiods..."
+    print("Warning : different number of subperiods...")
   else:
     for iBin in range(subperiodNb[YT0]):
       if (h1Period_IntLuminosity[YT0].GetBinContent(iBin) != h1Period_IntLuminosity[YT1].GetBinContent(iBin)):
-        print "Warning : different luminosities in bin %s/%s: %f vs %f"%(h1Period_IntLuminosity[YT0].GetXaxis().GetBinLabel(iBin),h1Period_IntLuminosity[YT1].GetXaxis().GetBinLabel(iBin),h1Period_IntLuminosity[YT0].GetBinContent(iBin),h1Period_IntLuminosity[YT1].GetBinContent(iBin))
+        print("Warning : different luminosities in bin %s/%s: %f vs %f"%(h1Period_IntLuminosity[YT0].GetXaxis().GetBinLabel(iBin),h1Period_IntLuminosity[YT1].GetXaxis().GetBinLabel(iBin),h1Period_IntLuminosity[YT0].GetBinContent(iBin),h1Period_IntLuminosity[YT1].GetBinContent(iBin)))
   
   runs_diff2tags = {}
   lumi_diff2tags = {}
@@ -460,9 +458,9 @@ if (len(yearTagList) == 2 and options['plotDiff2tags'] and singleYear):
       else: 
         iYT2=yearTagList[0]
         sign = -1      
-      if (runsLPR[iYT].has_key(iDefVeto) and not runsLPR[iYT2].has_key(iDefVeto)):
+      if (iDefVeto in runsLPR[iYT] and iDefVeto not in runsLPR[iYT2]):
         for irun in range(len(runsLPR[iYT][iDefVeto])):
-          print "%s contains %s %s (%.6f pb-1) for run %d but %s does not!"%(iYT,defOrVeto,iDefVeto,lossLPR[iYT][iDefVeto][irun],runsLPR[iYT][iDefVeto][irun],iYT2)
+          print("%s contains %s %s (%.6f pb-1) for run %d but %s does not!"%(iYT,defOrVeto,iDefVeto,lossLPR[iYT][iDefVeto][irun],runsLPR[iYT][iDefVeto][irun],iYT2))
           defVeto_type = "%s_miss_%s"%(iDefVeto,iYT2)
           defOrVeto_type = "%s_miss_%s"%(defOrVeto,iYT2)
 
@@ -470,10 +468,10 @@ if (len(yearTagList) == 2 and options['plotDiff2tags'] and singleYear):
           if runsLPR[iYT][iDefVeto][irun] not in runs_diff2tags[defOrVeto_type]:
             runs_diff2tags[defOrVeto_type].append(runsLPR[iYT][iDefVeto][irun])
             
-      if (runsLPR[iYT].has_key(iDefVeto) and runsLPR[iYT2].has_key(iDefVeto)):
+      if (iDefVeto in runsLPR[iYT] and iDefVeto in runsLPR[iYT2]):
         for irun in range(len(runsLPR[iYT][iDefVeto])):
           if runsLPR[iYT][iDefVeto][irun] not in runsLPR[iYT2][iDefVeto]:
-            print "%s contains %s %s (%.6f pb-1) for run %d but %s does not!"%(iYT,defOrVeto,iDefVeto,lossLPR[iYT][iDefVeto][irun],runsLPR[iYT][iDefVeto][irun],iYT2)
+            print("%s contains %s %s (%.6f pb-1) for run %d but %s does not!"%(iYT,defOrVeto,iDefVeto,lossLPR[iYT][iDefVeto][irun],runsLPR[iYT][iDefVeto][irun],iYT2))
             defVeto_type = "%s_miss_%s"%(iDefVeto,iYT2)
             defOrVeto_type = "%s_miss_%s"%(defOrVeto,iYT2)
 
@@ -483,7 +481,7 @@ if (len(yearTagList) == 2 and options['plotDiff2tags'] and singleYear):
           else:
             irun2 = runsLPR[iYT2][iDefVeto].index(runsLPR[iYT][iDefVeto][irun])
             if (lossLPR[iYT][iDefVeto][irun] != lossLPR[iYT2][iDefVeto][irun2] and firstYT):
-              print "%s contains %s %s (%.6f pb-1) for run %d; %s also but with a different luminosity %.6f pb-1!"%(iYT,defOrVeto,iDefVeto,lossLPR[iYT][iDefVeto][irun],runsLPR[iYT][iDefVeto][irun],iYT2,lossLPR[iYT2][iDefVeto][irun2])
+              print("%s contains %s %s (%.6f pb-1) for run %d; %s also but with a different luminosity %.6f pb-1!"%(iYT,defOrVeto,iDefVeto,lossLPR[iYT][iDefVeto][irun],runsLPR[iYT][iDefVeto][irun],iYT2,lossLPR[iYT2][iDefVeto][irun2]))
               defVeto_type = "%s_diff"%(iDefVeto)
               defOrVeto_type = "%s_diff"%(defOrVeto)
 
@@ -569,4 +567,3 @@ if (len(yearTagList) == 2 and options['plotDiff2tags'] and singleYear):
         c_diffTwoYT[defOrVeto_type].cd(2)
         leg_diffTwoYT[defOrVeto_type].SetHeader(suffixTitle[iSuffix])
         leg_diffTwoYT[defOrVeto_type].Draw()
-    
diff --git a/DataQuality/DataQualityUtils/scripts/DeMoSetup.py b/DataQuality/DataQualityUtils/scripts/DeMoSetup.py
index 80a7b567f91aa249e4da5f84d75cf7397afffebc..6a76277f72386fa44702589e97f3536417e74658 100644
--- a/DataQuality/DataQualityUtils/scripts/DeMoSetup.py
+++ b/DataQuality/DataQualityUtils/scripts/DeMoSetup.py
@@ -1,10 +1,11 @@
 #! /usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 # Author : Benjamin Trocme (LPSC - Grenoble)- 2017
 # Creates directories for new year, tag, system... Only for experts
 ##################################################################
 
-import os,sys  
+from __future__ import print_function
+import os
 import argparse
 
 parser = argparse.ArgumentParser(description='')
@@ -17,18 +18,17 @@ parser.print_help()
 
 direct = "YearStats-%s"%args.parser_system
 if not os.path.exists(direct):
-  print "%s system directory does not exists. Creating it"%direct
+  print("%s system directory does not exists. Creating it"%direct)
   os.system("mkdir %s"%direct)
 
 direct = "YearStats-%s/%s"%(args.parser_system,args.parser_year)
 if not os.path.exists(direct):
-  print "%s year directory does not exists. Creating it"%direct
+  print("%s year directory does not exists. Creating it"%direct)
   os.system("mkdir %s"%direct)
 
 direct = "YearStats-%s/%s/%s"%(args.parser_system,args.parser_year,args.parser_tag)
 if not os.path.exists(direct):
-  print "%s tag directory does not exists. Creating it"%direct
+  print("%s tag directory does not exists. Creating it"%direct)
   os.system("mkdir %s"%direct)
   os.system("mkdir %s/Run"%direct)
   os.system("mkdir %s/Weekly"%direct)
-
diff --git a/DataQuality/DataQualityUtils/scripts/DeMoStatus.py b/DataQuality/DataQualityUtils/scripts/DeMoStatus.py
index b60b2a756e322ba4c66565676f063f9539f8ee9c..fe06c41ecf5649ae9ae5781a3748b604b0acdb9e 100644
--- a/DataQuality/DataQualityUtils/scripts/DeMoStatus.py
+++ b/DataQuality/DataQualityUtils/scripts/DeMoStatus.py
@@ -1,23 +1,22 @@
 #! /usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 # Author : Benjamin Trocme (LPSC - Grenoble) - 2017
 # Displays the year cumulated stats (GRL runs)
 ##################################################################
 
-import os,sys  
-from time import localtime, strftime
+from __future__ import print_function
+import os,sys
 
 from ROOT import TFile
-from ROOT import TProfile,TH1F
 from ROOT import TCanvas
 from ROOT import gStyle,gPad
-from ROOT import kBlack,kOrange,kGreen
+from ROOT import kBlack
 
 sys.path.append("/afs/cern.ch/user/l/larmon/public/prod/Misc")
 
 from gb import MakeTH1,SetXLabel,MakeTProfile
 
-from DeMoLib import strLumi, plotStack, initialize
+from DeMoLib import plotStack, initialize
 
 global debug
 debug = False
@@ -65,7 +64,6 @@ def ATLASLabel(x,y,text=""):
 ########################################################################
 # Main script
 from argparse import RawTextHelpFormatter,ArgumentParser
-from ROOT import gROOT
 
 parser = ArgumentParser(description='',formatter_class=RawTextHelpFormatter)
 parser.add_argument('-y','--year',dest='parser_year',default = ["2018"],nargs='*',help='Year [Default: 2018]',action='store')
@@ -105,7 +103,7 @@ for iYear in args.parser_year:
 yearTagList.sort()
 
 if len(yearTagList) == 0:
-  print "No year / tag matching - Please check YearStats directory"
+  print("No year / tag matching - Please check YearStats directory")
   sys.exit()
 
 options = {}
@@ -147,7 +145,7 @@ subperiodNb = {}
 runsCharact = {}
 
 for iYT in yearTagList:
-  print "I am treating the following year/tag:%s"%iYT
+  print("I am treating the following year/tag:%s"%iYT)
 
   canvasResults[iYT] = {}
   legendResults[iYT] = {}
@@ -168,7 +166,7 @@ for iYT in yearTagList:
     runsCharact[iYT]['Number'] += 1
 
   runsCharact[iYT]['Range']="%d->%d / GRL only"%(runsCharact[iYT]['Low'],runsCharact[iYT]['High'])
-  print "I found %d runs in this year/tag (%s)"%(runsCharact[iYT]['Number'],runsCharact[iYT]['Range'])
+  print("I found %d runs in this year/tag (%s)"%(runsCharact[iYT]['Number'],runsCharact[iYT]['Range']))
 
   if (options['plotYearStats'] or options['plotYearStatsLarge']):
     if options['approvedPlots']:
@@ -255,12 +253,12 @@ for iYT in yearTagList:
       gPad.SetGrid(1)
       h1PeriodLett_IntLuminosity[iYT].Draw("P HIST")
       for iBin in range(1,h1PeriodLett_IntLuminosity[iYT].GetNbinsX()):
-        print "Period %s: %.3f pb-1"%(h1PeriodLett_IntLuminosity[iYT].GetXaxis().GetBinLabel(iBin),h1PeriodLett_IntLuminosity[iYT].GetBinContent(iBin))
+        print("Period %s: %.3f pb-1"%(h1PeriodLett_IntLuminosity[iYT].GetXaxis().GetBinLabel(iBin),h1PeriodLett_IntLuminosity[iYT].GetBinContent(iBin)))
     else:
       canvasResults[iYT]['intLumi']= TCanvas( "c_intLumi_%s"%(iYT),"Integrated luminosity per period", 200, 10, 1000, 500)
       h1Period_IntLuminosity[iYT].Draw("P HIST")
       for iBin in range(1,h1Period_IntLuminosity[iYT].GetNbinsX()):
-        print "Period %s: %.3f pb-1"%(h1Period_IntLuminosity[iYT].GetXaxis().GetBinLabel(iBin),h1Period_IntLuminosity[iYT].GetBinContent(iBin))
+        print("Period %s: %.3f pb-1"%(h1Period_IntLuminosity[iYT].GetXaxis().GetBinLabel(iBin),h1Period_IntLuminosity[iYT].GetBinContent(iBin)))
 
     canvasResults[iYT]['intLumi'].SetGridy(1)
 
@@ -316,4 +314,3 @@ if (yearTagNb >= 2 and (options['plotYearStats'] or options['plotYearStatsLarge'
     plotStack("veto--Year--%s"%legendHeader,h1YearTag_Veto,veto["all"],defectVeto["description"],h1YearTag_IntLuminosity,options['lumiNotPercent'],stackResults,canvasResults,legendResults,False,True,options['approvedPlots'])
     if options['approvedPlots']:
       ATLASLabel(0.1,0.81,"Internal")
-  
diff --git a/DataQuality/DataQualityUtils/scripts/DeMoUpdate.py b/DataQuality/DataQualityUtils/scripts/DeMoUpdate.py
index 92c25863c9723366efd21a36d08ffc7106e6ed2d..ea8e39fb96290b4a2856b0c71b807c392da5be4a 100644
--- a/DataQuality/DataQualityUtils/scripts/DeMoUpdate.py
+++ b/DataQuality/DataQualityUtils/scripts/DeMoUpdate.py
@@ -1,5 +1,5 @@
 #! /usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 # Author : Benjamin Trocme (LPSC - Grenoble) - 2017
 # Udpates the year stats
 ##################################################################
@@ -10,15 +10,14 @@ from string import ljust
 import time
 
 from ROOT import TFile
-from ROOT import TH1F,TProfile
 from ROOT import TCanvas,TPaveText
 from ROOT import kBlack,kOrange,kGreen
 from ROOT import gStyle
 
-import xmlrpclib
+import six.moves.xmlrpc_client as xmlrpclib
 
 sys.path.append("/afs/cern.ch/user/l/larmon/public/prod/Misc")
-from LArMonCoolLib import GetLBTimeStamps,GetOnlineLumiFromCOOL,GetOfflineLumiFromCOOL,GetLBDuration,GetReadyFlag,GetNumberOfCollidingBunches
+from LArMonCoolLib import GetLBTimeStamps,GetOnlineLumiFromCOOL,GetOfflineLumiFromCOOL,GetReadyFlag,GetNumberOfCollidingBunches
 from gb import MakeTH1,SetXLabel,MakeTProfile
 
 from DeMoLib import strLumi,plotStack,initialize
@@ -57,7 +56,7 @@ def listify(l):
 
 ################################################################################################################################################
 def printBoth(string0,boolean,f):
-  print string0
+  print(string0)
   if boolean:# Also write on txt file
     f.write(string0+'\n')
   return
@@ -72,8 +71,6 @@ def findLB(lbts,startOfVetoPeriod): # Find the lumiblock where a veto period sta
 ################################################################################################################################################
 # print single run report. Only printing, no computation
 def singleRunReport(runNumber,dict1,dict2,directory,defects,veto,exactVetoComput): 
-  import string
-
   if dict1['signoff'] == "DONE" or dict1['signoff'] == "FINAL OK":
     repOnDisk = True
     f = open('%s/Run/%s.txt' % (directory,runNumber), 'w')
@@ -139,16 +136,16 @@ args = parser.parse_args()
 parser.print_help()
 
 # Token to avoid having multiple update in the same time
-print "Current time: %s"%(time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime()))
+print("Current time: %s"%(time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime())))
 
 options = {}
 options['system'] = args.parser_system
 tokenName = "DeMo-%s-%s.token"%(options['system'],args.parser_tag)
 if os.path.exists(tokenName):
-  print "A different DeMoUpdate is apparently running (or it was not properly ended recently). This may cause trouble when updating yearStats"
+  print("A different DeMoUpdate is apparently running (or it was not properly ended recently). This may cause trouble when updating yearStats")
   os.system("ls -ltr %s"%tokenName)
-  print "If you are sure that it is not the case, you can remove the %s..."%tokenName
-  print "If you are not the owner, contact the DQ coordinator"
+  print("If you are sure that it is not the case, you can remove the %s..."%tokenName)
+  print("If you are not the owner, contact the DQ coordinator")
   sys.exit()
 else:
   os.system("touch %s"%tokenName)
@@ -203,14 +200,14 @@ if args.parser_runListUpdate:
 
     for iRecentRun in sorted(recentRuns.keys()):
       if (recentRuns[iRecentRun][2]): # ATLAS ready
-        print "I am adding the new run with ATLAS ready: %s"%iRecentRun
+        print("I am adding the new run with ATLAS ready: %s"%iRecentRun)
         fRunList.write("%s\n"%iRecentRun)
     fRunList.close()
   else:
-    print "No %s..."%allRunListDat
+    print("No %s..."%allRunListDat)
 
   os.system("rm -f %s"%tokenName)
-  print "I am exiting..."
+  print("I am exiting...")
   sys.exit()
 
 ############ Fill runlist variable and change some options if single run
@@ -226,37 +223,37 @@ for iRunList in runlist.keys():
 
 if len(veto["all"]) == 0:
   options['noVeto'] = True
-  print "No veto information provided in DeMoLib.py"
+  print("No veto information provided in DeMoLib.py")
 else:
   if options['noVeto']:
-    print "WARNING: I do not consider time veto information..."
+    print("WARNING: I do not consider time veto information...")
 
 if options['updateYearStats']:
   yearStatsArchiveFilename = '%s/TProfiles.root'%options['yearStatsDir']
   if not os.path.exists(yearStatsArchiveFilename):
-    print "No archive file found in %s"%options['yearStatsDir']
-    print "I am forcing the year stats reset..."
+    print("No archive file found in %s"%options['yearStatsDir'])
+    print("I am forcing the year stats reset...")
     options['resetYearStats'] = True
   elif os.path.getsize("%s/runs-ALL.dat"%options['yearStatsDir']) == 0.:
     # runs-ALL.data and runs-[period].dat updated only for runs in GRL
     # Test here relevant at the beginning of the year when some runs have been reviewed at EXPR/BULK level (but not FINAL hence no year stats)
     # In such a case a TProfiles.root file may exist even if no update was made
     # April 18: I am not sure that this situation is still relevant... 
-    print "No run found in %s"%options['yearStatsDir']
-    print "I am forcing the year stats reset..."
+    print("No run found in %s"%options['yearStatsDir'])
+    print("I am forcing the year stats reset...")
     options['resetYearStats'] = True
 
 errorLogFile = open("%s/errors.log"%options['yearStatsDir'],'a')
 
 if (options['updateYearStats'] and options['resetYearStats']):
-  print "WARNING: I am going to reset the %s stats..."%options['yearStatsDir']
+  print("WARNING: I am going to reset the %s stats..."%options['yearStatsDir'])
   if (options['batchMode']): # In batch mode, no confirmation requested
     confirm = "y"
   else:
-    confirm = raw_input("Please confirm by typing y: ")
+    confirm = input("Please confirm by typing y: ")
 
   if ("y" in confirm):
-    print "I reseted the %s stats"%options['yearStatsDir']
+    print("I reseted the %s stats"%options['yearStatsDir'])
     # Delete the dat files that contains the runs updated and the associated lumi
     os.system("rm -f %s/lumi*.dat"%options['yearStatsDir'])
     os.system("rm -f %s/runs*.dat"%options['yearStatsDir'])
@@ -265,12 +262,12 @@ if (options['updateYearStats'] and options['resetYearStats']):
       os.system("rm -f %s/loss*.dat"%options['yearStatsDir'])
       os.system("rm -f %s/Run/*.txt"%options['yearStatsDir'])
     else:
-      print "However, I did NOT delete the loss files to preserve defects set in non-GRL runs"
+      print("However, I did NOT delete the loss files to preserve defects set in non-GRL runs")
 
     # Delete the root file that contains the TProfiles
     os.system("rm -f %s"%(yearStatsArchiveFilename))
   else:
-    print "I did NOT reset the %s stats"%options['yearStatsDir']
+    print("I did NOT reset the %s stats"%options['yearStatsDir'])
     options['resetYearStats'] = False
 
 
@@ -280,7 +277,7 @@ runSpec = {} # Characteristics of each run: start, stop, data period, luminosity
 if args.parser_allRuns: # all year runs
   runlist['toprocess'] = runlist['all']
 elif args.parser_weekly: # Weekly report - Look for the last 7-days runs + unsigned off
-  print "I am looking for all runs signed off in the past week and the older ones not yet signed off..."
+  print("I am looking for all runs signed off in the past week and the older ones not yet signed off...")
   options['savePlots'] = True
   runlist['toprocess'] = []
   oneWeek = 7*24*3600 # Nb of seconds in one week
@@ -315,14 +312,14 @@ elif args.parser_weekly: # Weekly report - Look for the last 7-days runs + unsig
     runlist['toprocess'].remove(iRun)
 
   runlist['toprocess'].reverse()
-  print "I will process these runs :",runlist['toprocess']
+  print("I will process these runs :",runlist['toprocess'])
 elif args.parser_grlUpdate: # Reprocess all grl runs skipping the ones already updated
   runlist['toprocess'] = runlist['grl']
   options['skipAlreadyUpdated'] = True
 elif len(args.parser_run) == 1: # Single run 
   runNb = args.parser_run[0]
   if (runNb not in (runlist['all'])):
-    print "------------>Please first add the run in the run list"
+    print("------------>Please first add the run in the run list")
     os.system("rm -f %s"%tokenName)
     sys.exit()
   runlist['toprocess'] = [runNb]
@@ -337,14 +334,14 @@ elif len(args.parser_run) == 2: # Run range
     if (runNb>=startrun and runNb<=endrun):
       runlist['toprocess'].append(runNb)
 else:
-  print "Please specify run number or run range with -r option"
+  print("Please specify run number or run range with -r option")
   os.system("rm -f %s"%tokenName)
   sys.exit()
 
 
 if len(runlist['toprocess']) == 0 and len(args.parser_run)>0:
-  print "No run found in this run range..."
-  print "Please double check or update the runlist file..."
+  print("No run found in this run range...")
+  print("Please double check or update the runlist file...")
   os.system("rm -f %s"%tokenName)
   sys.exit()
 
@@ -365,7 +362,7 @@ for runNb in runlist['toprocess']:
   else: # Did not find the data period
     runSpec[runNb]['period'] = "???"
     runSpec[runNb]['newInYearStats'] = False
-    print "I did not find the data period for run %d"%(runNb)
+    print("I did not find the data period for run %d"%(runNb))
 
 for iper in periodListCurrent.keys(): # Loop on all periods found and look for new periods/runs
   periodFileName = "%s/runs-%s.dat"%(options['yearStatsDir'],iper)
@@ -378,7 +375,7 @@ for iper in periodListCurrent.keys(): # Loop on all periods found and look for n
           runSpec[irun]['newInYearStats'] = True
         else:
           runSpec[irun]['newInYearStats'] = False
-          print "Run %d not in GRL run list -> Ignored for YearStats"%irun
+          print("Run %d not in GRL run list -> Ignored for YearStats"%irun)
       else:
         runSpec[irun]['newInYearStats'] = False
       if "%d\n"%(irun) in existingRuns:
@@ -393,24 +390,24 @@ for iper in periodListCurrent.keys(): # Loop on all periods found and look for n
           periodToBeAdded = True
         else:
           runSpec[irun]['newInYearStats'] = False
-          print "Run %d not in GRL run list -> Ignored for YearStats"%irun
+          print("Run %d not in GRL run list -> Ignored for YearStats"%irun)
       else:
         runSpec[irun]['newInYearStats'] = False
     if options['updateYearStats'] and periodToBeAdded:
-      print "I am going to add period %s in year stats!"%(iper)
+      print("I am going to add period %s in year stats!"%(iper))
       newPeriodInYearStats.append(iper)
 
 
 for iper in periodListCurrent.keys(): # Loop on all periods founds and print the runs to be updated
   for irun in periodListCurrent[iper]:
     if runSpec[irun]['newInYearStats']:
-      print "I am going to add run %d (period %s) in %s stats (provided that it is fully signed off - Not yet known...)!"%(irun,runSpec[irun]['period'],options['year'])
+      print("I am going to add run %d (period %s) in %s stats (provided that it is fully signed off - Not yet known...)!"%(irun,runSpec[irun]['period'],options['year']))
       bool_newRunsInYearStats = True
     else:
       if (options['skipAlreadyUpdated']):
         runSpec.pop(irun)
         runlist['toprocess'].pop(runlist['toprocess'].index(irun))
-        print "%d was already processed in yearStats - I am complety ignoring it..."%(irun)
+        print("%d was already processed in yearStats - I am complety ignoring it..."%(irun))
 
 if (not bool_newRunsInYearStats):
   options['updateYearStats'] = False # No new run -> no update needed
@@ -424,7 +421,7 @@ runSpec['AllRuns']['period'] = "-"
 runSpec['AllRuns']['signoff'] = "-"
 
 if debug:
-  print grlDef
+  print(grlDef)
 
 ################################################################
 # Book Histograms for general plot with intolerable defects/veto
@@ -455,8 +452,7 @@ for idef in grlDef["intol"]+grlDef["intol_recov"]: #Intolerable defects only
       
     if len(periodListYear) != 0 or len(periodListCurrent) != 0: # At least one period found in current or past runs, otherwise no way to plot year stats
       # Collect all periods (archived ones + new ones)
-      periodListYear = periodListYear + newPeriodInYearStats 
-      periodListYear.sort() # The list of periods is now sorted
+      periodListYear = sorted(periodListYear + newPeriodInYearStats) 
       periodNbYear = len(periodListYear) # Number of periods      
       # Create the empty year stats TProfile histograms for the updated period list
       hProfPeriod_IntolDefect[idef] = MakeTProfile(profPeriodName,"%s"%(defectVeto["description"][idefName]),"Lost luminosity (%)", -0.5,+0.5+periodNbYear,periodNbYear+1,defectVeto["color"][idefName])
@@ -505,7 +501,7 @@ SetXLabel(h1Run_IntLuminosity,runlist['toprocess'])
 h1Run_IntLuminosity.GetXaxis().SetBinLabel(len(runlist['toprocess'])+1,"All")
 
 if debug:
-  print "1",grlDef
+  print("1",grlDef)
 
 ### TO BE MODIFIED WHEN TH1 IS SAVED IN TPROFILE.ROOT. Can be filled in a more logical way
 if (options['updateYearStats'] and periodNbYear>0): # If update is required, it is now sure that some periods exist. Create a TH1 to store the integrated luminosity
@@ -541,15 +537,15 @@ runSpec['AllRuns']['ineffVetos'] = 0.
 for iVeto in veto["all"]:
   runSpec['AllRuns']['lumiVeto_%s'%iVeto] = 0. # Total luminosity rejected by each time veto
 
-if (len(runSpec.keys()) == 1):
-  print "I did not find any run in runList."
-  print "Please check the run range/options"
+if (len(runSpec) == 1):
+  print("I did not find any run in runList.")
+  print("Please check the run range/options")
 
 #######################################################################################
 #### Main loop over selected runs
 for irun,runNb in enumerate(runlist['toprocess']):
-  print "=================================================================" 
-  print "=============================Run %d (%d/%d)======================"%(runNb,irun+1,len(runlist['toprocess'])) 
+  print("=================================================================") 
+  print("=============================Run %d (%d/%d)======================"%(runNb,irun+1,len(runlist['toprocess']))) 
   # Init variables - List (indexed by partition) of tables of lumi blocks affected by defects
   lbAffected = {}
   for idef in grlDef["part"]+grlDef["partIntol_recov"]: # All partition defects
@@ -599,7 +595,7 @@ for irun,runNb in enumerate(runlist['toprocess']):
   lumiacct=fetch_iovs('COOLOFL_TRIGGER::/TRIGGER/OFLLUMI/LumiAccounting', tag='OflLumiAcct-001', since=v_lbTimeSt[1][0]*1000000000, until=v_lbTimeSt[len(v_lbTimeSt)][1]*1000000000) 
   #thisRunPerLB['liveFraction'] = dict()
   thisRunPerLB['duration'] = dict()
-  for iLumiAcct in xrange(len(lumiacct)):
+  for iLumiAcct in range(len(lumiacct)):
     #thisRunPerLB['liveFraction'][lumiacct[iLumiAcct].LumiBlock] = lumiacct[iLumiAcct].LiveFraction
     if options['recordedLumiNorm']: # The LB duration is corrected by the live fraction 
       thisRunPerLB['duration'][lumiacct[iLumiAcct].LumiBlock] = lumiacct[iLumiAcct].LBTime*lumiacct[iLumiAcct].LiveFraction
@@ -611,12 +607,12 @@ for irun,runNb in enumerate(runlist['toprocess']):
       if lb not in thisRunPerLB["deliveredLumi"].keys():
         thisRunPerLB["deliveredLumi"][lb] = 0.
         errorMsg = "Missing lumi for Run %d - LB %d\n"%(runNb,lb)
-        print errorMsg
+        print(errorMsg)
         errorLogFile.write(errorMsg)
       if lb not in thisRunPerLB["duration"].keys():
         thisRunPerLB["duration"][lb] = 0.
         errorMsg = "Missing duration/LiveFraction for Run %d - LB %d\n"%(runNb,lb)
-        print errorMsg
+        print(errorMsg)
         errorLogFile.write(errorMsg)
     else:
       if lb not in thisRunPerLB["deliveredLumi"].keys():
@@ -644,7 +640,7 @@ for irun,runNb in enumerate(runlist['toprocess']):
   # Consider only LB in runSpec[runNb]["readyLB"]
   for iRetrievedDefects in retrievedDefects:
     if debug:
-      print iRetrievedDefects
+      print(iRetrievedDefects)
     # keep track of runs with missing sign-off - Store the earliest stage of the sign off procedure
     for iSignOff in signOff["EXPR."]:
       if iRetrievedDefects.channel == iSignOff:
@@ -748,7 +744,7 @@ for irun,runNb in enumerate(runlist['toprocess']):
       for lb in range(iRetrievedDefects.since.lumi,iRetrievedDefects.until.lumi):
         if((lb in runSpec[runNb]['readyLB']) or runSpec[runNb]['nLBready']==0):# The LB is with ATLAS ready
 
-          if not lbAffected[defectFound].has_key(partAffected): # Store the affected partitions
+          if partAffected not in lbAffected[defectFound]: # Store the affected partitions
             lbAffected[defectFound][partAffected]=[]
           lbAffected[defectFound][partAffected].append(lb)
 
@@ -778,7 +774,7 @@ for irun,runNb in enumerate(runlist['toprocess']):
   # request, they can be also ignored.
   # NB: in any way, a non signed off run is never considered in year stats
   if options['skipUnsignedOff'] and runSpec[runNb]['signoff'] != 'FINAL OK':
-    print "Run %d is not yet signed off. Skipping it..."%runNb
+    print("Run %d is not yet signed off. Skipping it..."%runNb)
     runSpec.pop(runNb)
     continue
 
@@ -822,7 +818,7 @@ for irun,runNb in enumerate(runlist['toprocess']):
   if runSpec[runNb]['nLBready']>0:
     lbsToConsider=runSpec[runNb]["readyLB"]
   else:
-    lbsToConsider=range(1,runSpec[runNb]['nLB'])
+    lbsToConsider=list(range(1,runSpec[runNb]['nLB']))
 
   for lb in lbsToConsider:
     runSpec[runNb]['Lumi'] = runSpec[runNb]['Lumi'] +thisRunPerLB["deliveredLumi"][lb]*thisRunPerLB['duration'][lb]
@@ -872,7 +868,7 @@ for irun,runNb in enumerate(runlist['toprocess']):
     if (boolExactVetoComput_run):
       totalVeto = showEventVeto.showEventVetoFolder(db2,folderName,options['vetoTag'],runNb,runNb,0) 
     else:
-      print "WARNING: you use the rough event veto loss. To be used only if default is too slow..."
+      print("WARNING: you use the rough event veto loss. To be used only if default is too slow...")
       totalVeto = showEventVetoNoLumi.showEventVetoFolder(db2,folderName,options['vetoTag'],runNb,runNb,0) 
   else:
     totalVeto = None
@@ -889,7 +885,7 @@ for irun,runNb in enumerate(runlist['toprocess']):
     if (boolExactVetoComput_run):# Computation of veto rejection weighting by inst. lumi and ignoring LB already in intolerable defect list
       for iVeto in veto["all"]:
         runSpec[runNb]["lumiVeto_%s"%iVeto] = 0.
-        for iVetoedLB in xrange(len(totalVeto[veto["COOL"][iVeto]])): # Loop on all veto periods
+        for iVetoedLB in range(len(totalVeto[veto["COOL"][iVeto]])): # Loop on all veto periods
           lb0 = findLB(v_lbTimeSt,totalVeto[veto["COOL"][iVeto]][iVetoedLB][0]/1e9) # Start of veto period
           lb1 = findLB(v_lbTimeSt,totalVeto[veto["COOL"][iVeto]][iVetoedLB][0]/1e9) # End of veto period
           if options['vetoLumiEvolution']:
@@ -927,7 +923,7 @@ if options['vetoLumiEvolution']:
     h1_vetoInstLumiEvol[iVeto].Divide(h1_vetoInstLumiEvol[iVeto],h1_vetoInstLumiEvol['NoVeto'],100.,1.)
     
 ######################### Treatment when a run range was considered (weekly report)
-if (len(runSpec.keys())>2 and runSpec['AllRuns']['Lumi']!=0):
+if (len(runSpec)>2 and runSpec['AllRuns']['Lumi']!=0):
   # Compute inefficiencies for the whole period
   
   # Defect inefficencies first
@@ -966,7 +962,7 @@ if (len(runSpec.keys())>2 and runSpec['AllRuns']['Lumi']!=0):
       labels_xlow = [0.01,0.13,0.44,0.51,0.59,0.65,0.72,0.855,0.925,0.99]
       labels_xlow = [0.01,0.08,0.41,0.49,0.575,0.655,0.74,0.835,0.9,0.99]
       
-      for i in xrange(len(labels_col)):
+      for i in range(len(labels_col)):
 #        column[canvasIndex].append(TPaveText(labels_xlow[i],max(.99-0.08*len(runlist['toprocess']),0.01),labels_xlow[i+1],0.99))
         column[canvasIndex].append(TPaveText(labels_xlow[i],0.01,labels_xlow[i+1],0.99))
         column[canvasIndex][i].AddText(labels_col[i])
@@ -989,7 +985,7 @@ if (len(runSpec.keys())>2 and runSpec['AllRuns']['Lumi']!=0):
     column[canvasIndex][8].AddText("%10s"%(runSpec[runNb]["signoff"]))
     lineNb[canvasIndex] += 1
     if (lineNb[canvasIndex]==50 or runNb == "AllRuns"):
-      for i in xrange(len(column[canvasIndex])):
+      for i in range(len(column[canvasIndex])):
         if i == 1:
           column[canvasIndex][i].AddText("Completed at %s"%(time.strftime("%H:%M (%d %b)", time.localtime())))
         else:
@@ -1001,7 +997,7 @@ if (len(runSpec.keys())>2 and runSpec['AllRuns']['Lumi']!=0):
       canvasIndex += 1
       
     if runSpec[runNb]["signoff"] != "FINAL OK" and runNb != "AllRuns":
-      print "Run %d not fully signed off -> no year stats update. Current status: %s"%(runNb,runSpec[runNb]["signoff"])
+      print("Run %d not fully signed off -> no year stats update. Current status: %s"%(runNb,runSpec[runNb]["signoff"]))
 
   if options['savePlots']:
     for iCanvas in range(len(c1)):
@@ -1046,22 +1042,22 @@ if (options['saveHistos']):
     if options['vetoLumiEvolution']:
       h1_vetoInstLumiEvol[iVeto].Write()
   f.Close()
-  print "Histos saved in %s"%(filename)
+  print("Histos saved in %s"%(filename))
 
 # yearStats update
 # If new runs were added to period plots, save them
 if (options['updateYearStats'] and bool_newRunsInYearStats):
-  print "WARNING: I am going to update the %s stats with the following runs:"%(options['year'])
-  print "NB: only runs fully signed off are considered"
+  print("WARNING: I am going to update the %s stats with the following runs:"%(options['year']))
+  print("NB: only runs fully signed off are considered")
   for irun in runSpec.keys():
     if (irun != "AllRuns"):
       if runSpec[irun]['newInYearStats']:
-        print irun
+        print(irun)
         
   if (options['batchMode']): # In batch mode, no confirmation requested
     confirm = "y"
   else:
-    confirm = raw_input("Are you sure ([y]/n)?: ")
+    confirm = input("Are you sure ([y]/n)?: ")
     
   if ("n" not in confirm):
     f = TFile(yearStatsArchiveFilename,"recreate")
@@ -1092,7 +1088,7 @@ if (options['updateYearStats'] and bool_newRunsInYearStats):
           fAll.write("%d\n"%(irun))
       f.close()
     fAll.close()
-    print "I have updated year stats"
+    print("I have updated year stats")
 
 # The update of the defect dat files is now decoupled from the yearStatsUpdate to allows to also monitor runs (special runs notably)
 # that are not in the GRL.  
@@ -1140,4 +1136,4 @@ errorLogFile.close()
 
 os.system("rm -f %s"%tokenName)
 if not options['batchMode']:
-  raw_input("I am done. Type <return> to exit...")
+  input("I am done. Type <return> to exit...")
diff --git a/DataQuality/DataQualityUtils/scripts/GetHistogram.py b/DataQuality/DataQualityUtils/scripts/GetHistogram.py
deleted file mode 100755
index f53d6d7c4bc776b61fb7ef45937f9d86b06ec22a..0000000000000000000000000000000000000000
--- a/DataQuality/DataQualityUtils/scripts/GetHistogram.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-# +---------------------------------------------+
-# | Vassilis D. Kazazakis, GetHistogram.py v1.0	|
-# | Extracts histograms from ROOT files		|
-# |  Interface used by QuerryDB.py application	|
-# +---------------------------------------------+
-
-import DataQualityUtils.DQGetHistogramMod as mod
-
-
-def usage():
-	print "Usage:", sys.argv[0] , "[options]"
-	print """
-	-i/--input filename		 Input ROOT file
-	-o/--output filename		 Output ROOT file
-	-n/--name histogram		 Name of the histogram to extract
-	-d/--directory directory[:flag]	 Name of the directory from which to extract all[or flagged] histograms
-	-f/--flag flag			 Flag of the histograms to extract (shift/expert/etc)
-	"""
-
-
-if __name__ == "__main__":
-	input = output = None
-	nameList = []
-	try:
-		optlist, unparsed = getopt.getopt(sys.argv[1:],
-					"i:o:n:d:f:",
-					["input=", "output=", "name=", "directory=", "flag="])
-	except getopt.GetoptError:
-		usage()
-		sys.exit(-1)
-
-	if len(optlist) == 0:
-		usage()
-		sys.exit(0)
-
-	if len(unparsed) != 0:
-		print "Extra arguments", unparsed, "not used."
-
-	for opt, arg in optlist:
-		if opt in ("-i", "--input"):
-			input = arg
-		elif opt in ("-o", "--output"):
-			output = arg
-		elif opt in ("-n", "--name"):
-			nameList.append(arg)
-		elif opt in ("-d", "--directory"):
-			tmpList = arg.split(":")
-			if len(tmpList) < 2:
-				tmpList.append("")
-			if not tmpList[0].endswith('/'):
-				if tmpList[1] != "":
-					tmpList[1] = ":" + tmpList[1]
-				arg = tmpList[0] + "/" + tmpList[1]
-			nameList.append(arg)
-		elif opt in ("-f", "--flag"):
-			nameList.append(":" + arg)
-
-	if None in (input, output):
-		print >>sys.stderr, "Error: Must define input/output files."
-		usage()
-		sys.exit(-1)
-	if len(nameList) == 0:
-		print >>sys.stderr, "Error: Must give at least one histogram or directory name."
-		usage()
-		sys.exit(-1)
-	mod.GetHistogram(input, output, nameList)
diff --git a/DataQuality/DataQualityUtils/scripts/ScanHistFile.py b/DataQuality/DataQualityUtils/scripts/ScanHistFile.py
index c6b1efacbae53dc770fbd72969b563416b6f88aa..f6bf0c5a9c06dbd71f73027b05ad537d285e3b8c 100755
--- a/DataQuality/DataQualityUtils/scripts/ScanHistFile.py
+++ b/DataQuality/DataQualityUtils/scripts/ScanHistFile.py
@@ -1,21 +1,22 @@
 #!/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 #copied from DQPostProcessi.py and modified
 # Sami Kama
+from __future__ import print_function
 import sys
 def _dolsrwrapper(fname):
     import ROOT
     rf = ROOT.TFile.Open(fname, 'READ')
     if not rf or not rf.IsOpen():
-        print '   %s is empty or not accessible' % fname
+        print('   %s is empty or not accessible' % fname)
         return 3
     if rf.TestBit(ROOT.TFile.kRecovered):
-        print " %s is recovered. It means it was corrupt" % fname
+        print(" %s is recovered. It means it was corrupt" % fname)
         return 7
     cleancache = ROOT.gROOT.MustClean(); ROOT.gROOT.SetMustClean(False)
     RV=_dolsr(rf)
-    print "dolsr returned %s "%(RV)
+    print("dolsr returned %s "%(RV))
     rf.Close()
     ROOT.gROOT.SetMustClean(cleancache)
     return RV
@@ -56,10 +57,10 @@ def _dolsr(dir):
             del dirobj
         elif keyClass.InheritsFrom("TTree"):
             currObj=key.ReadObj()
-            if currObj == None:
-                print "WARNING TTree Object \"%s\" in file:directory \"%s\" is corrupt "\
+            if currObj is None:
+                print("WARNING TTree Object \"%s\" in file:directory \"%s\" is corrupt "\
                 "keylen=%s numbytes=%s objlen=%s fseekkey=%s"%(name,dir.GetPath(),key.GetKeylen(),
-                                                              key.GetNbytes(),key.GetObjlen(),key.GetSeekKey())
+                                                              key.GetNbytes(),key.GetObjlen(),key.GetSeekKey()))
                 return 9
             else:
                 nentries=currObj.GetEntriesFast()
@@ -67,7 +68,7 @@ def _dolsr(dir):
                 #"keylen=%s numbytes=%s objlen=%s fseekkey=%s "%(name,dir.GetPath(),key.GetKeylen(),
                 #                                              key.GetNbytes(),key.GetObjlen(),key.GetSeekKey()),
                 #print "Scanning tree %s"%name,
-                for j in xrange(nentries):
+                for j in range(nentries):
                     ientry=currObj.LoadTree(j)
                     if ientry<0:
                         break
@@ -79,10 +80,10 @@ def _dolsr(dir):
             del currObj
         else:
             currObj=key.ReadObj()
-            if currObj == None:
-                print "WARNING Object \"%s\" in file:directory \"%s\" is corrupt "\
+            if currObj is None:
+                print("WARNING Object \"%s\" in file:directory \"%s\" is corrupt "\
                 "keylen=%s numbytes=%s objlen=%s fseekkey=%s"%(name,dir.GetPath(),key.GetKeylen(),
-                                                              key.GetNbytes(),key.GetObjlen(),key.GetSeekKey())
+                                                              key.GetNbytes(),key.GetObjlen(),key.GetSeekKey()))
                 return 5
             currObj.Delete()
             del currObj
diff --git a/DataQuality/DataQualityUtils/scripts/StandAloneDisplay.py b/DataQuality/DataQualityUtils/scripts/StandAloneDisplay.py
index abf6138c6c2dddabab75c476dc6630dc873cf1da..5b032e1fe3ece15da55f267300132fc50e0c4d29 100755
--- a/DataQuality/DataQualityUtils/scripts/StandAloneDisplay.py
+++ b/DataQuality/DataQualityUtils/scripts/StandAloneDisplay.py
@@ -1,6 +1,7 @@
 #!/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
+from __future__ import print_function
 
 import os
 ## Needed to correct ROOT behavior; see below
@@ -8,7 +9,7 @@ CWD = os.getcwd()
 
 import sys
 
-import ROOT
+import ROOT  # noqa: F401
 ## Importing gSystem may change the current directory to one of the
 ## command-line arguments; chdir to original directory to have
 ## predictable behavior
@@ -34,7 +35,7 @@ def handi( name, resultsFile, htmlDir ):
       try:
         os.makedirs(subHtmlDir)
       except os.error:
-        print 'Cannot create directory "' + subHtmlDir + '"; exiting.'
+        print('Cannot create directory "' + subHtmlDir + '"; exiting.')
         sys.exit(-1)
   
   total=of.stringAllHistograms()
@@ -58,7 +59,7 @@ def handi( name, resultsFile, htmlDir ):
 def usage():
   cmdi = sys.argv[0].rfind("/")
   cmd = sys.argv[0][cmdi+1:]
-  print "Usage: ", cmd, "<imput_file> <html_output_directory>"
+  print("Usage: ", cmd, "<imput_file> <html_output_directory>")
 
 def makeAllDirsFile( htmlDir, name, s, number, resultsFile ):
   g=open(htmlDir+'index.html','w')
@@ -267,4 +268,3 @@ if __name__ == "__main__":
   html_dir=sys.argv[2] # destination directory for html files
   name=resultsFile
   handi( name, resultsFile, html_dir )
-
diff --git a/DataQuality/DataQualityUtils/scripts/StoreDB.py b/DataQuality/DataQualityUtils/scripts/StoreDB.py
deleted file mode 100755
index b5a3a17108de9eb94f0c86bf9bcf94b232f3cb3b..0000000000000000000000000000000000000000
--- a/DataQuality/DataQualityUtils/scripts/StoreDB.py
+++ /dev/null
@@ -1,135 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-# +-----------------------------------------------------+
-# | Vassilis D. Kazazakis, StoreDB.py v1.1		|
-# | Stores ROOT file information in the COOL database	|
-# +-----------------------------------------------------+
-
-import sys
-import getopt
-import DataQualityUtils.DBInfo as DBInfo
-
-from PyCool import cool, coral
-
-# user & password information should be handled by each individual program
-# if you have authentication file, this works 
-
-home = os.environ.get('HOME')
-os.environ['CORAL_AUTH_PATH'] = home+"/private"
-
-##userName = "ATLAS_COOLOFL_GLOBAL_W"
-##password = "*******" ### 
-
-connectString  = "oracle://" +  DBInfo.getWriteServer()
-connectString += ";schema=" +   DBInfo.getSchema()
-#connectString += ";user=" +     userName
-connectString += ";dbname=" +   DBInfo.getDbName()
-#connectString += ";password=" + password
-
-folderName = DBInfo.getFolder()
-
-def usage():
-	print "Usage:", sys.argv[0], "[options]"
-	print """
-	-f/--filename 	ROOT filename to enter to the database
-	-g/--guid	GUID of the file
-	-s/--start	Starting run:lumiBlock number
-	-e/--end	Ending run:lumiBlock (defaults to valid until next insertion) 
-	-c/--channel	Channel name
-	-l/--list	List all available channels
-	"""
-
-def addToDatabase(filename, guid, channelName, runStart, runEnd = None):
-	dbSvc = cool.DatabaseSvcFactory.databaseService()
-
-	try:
-		channel_id = DBInfo.getChannelDict()[channelName]
-	except KeyError:
-		print >>sys.stderr, "Channel", channelName, "does not exist."
-		sys.exit(-1)
-
-	if runEnd == None:
-		timeEnd = cool.ValidityKeyMax
-	else:
-		timeEnd = runEnd[0]  << 32
-		timeEnd += runEnd[1]
-	timeStart = runStart[0] << 32	## Since the 32 low bits are reserved for LumiBlocks...
-	timeStart += runStart[1]	## add lumiblock
-	
-	if timeStart >= timeEnd:
-		print >>sys.stderr, "Starting timestamp must be LESS than ending timestamp."
-		sys.exit(-1)
-
-	try:
-		db = dbSvc.openDatabase( connectString, False )
-	except:
-		print >>sys.stderr, "Error opening database."
-		sys.exit(-1)
-	
-	if db.existsFolder(folderName):
-		folder = db.getFolder(folderName)
-		spec = folder.payloadSpecification()
-	else:
-		print >>sys.stderr, "Error: Folder", folderName,"does not exist in database."
-		sys.exit(-1)
-
-	data = cool.Record(spec)
-	data["filename"] = filename
-	data["guid"] = guid
-	folder.storeObject( timeStart, timeEnd, data, channel_id)
-
-if __name__ == "__main__":
-	
-	try:
-		optlist, unparsed = getopt.getopt(sys.argv[1:], 
-							"f:s:e:c:lg:",
-							["filename=", "start=", "end=", "channel=", "list", "guid="])
-	except getopt.GetoptError:
-		print >>sys.stderr, "Error parsing arguments."
-		usage()
-		sys.exit(-1)
-
-	filename = None
-	channelName = "default"
-	guid = 0
-	runStart = (0, 0)
-	runEnd = None
-
-	for opt, arg in optlist:
-		if opt in ("-f", "--filename"):
-			filename = arg
-		elif opt in ("-g", "--guid"):
-			guid = int(arg)
-		elif opt in ("-s", "--start"):
-			startList = arg.split(":")
-			if len(startList) == 1:
-				startList.append("0")
-			for i in range(2):
-				if startList[i] == "":
-					startList[i] = "0"
-			runStart = ( int(startList[0]), int(startList[1]) )
-		elif opt in ("-e", "--end"):
-			endList = arg.split(":")
-			if len(endList) == 1:
-				endList.append("0")
-			for i in range(2):
-				if endList[i] == "":
-					endList[i] = "0"
-			runEnd = ( int(endList[0]), int(endList[1]) )
-		elif opt in ("-c", "--channel"):
-			channelName = arg
-		elif opt in ("-l", "--list"):
-			print "Available channels:"
-			for key in DBInfo.getChannelDict().keys():
-				print " ",key
-			sys.exit(0)
-
-	if filename == None:
-		print >>sys.stderr, "Must define ROOT file."
-		usage()
-		sys.exit(-1)
-
-	#guid = generateGUID(filename)
-
-	addToDatabase(filename, guid, channelName, runStart, runEnd)
diff --git a/DataQuality/DataQualityUtils/scripts/checkCorrelInHIST.py b/DataQuality/DataQualityUtils/scripts/checkCorrelInHIST.py
index 4f56fa1f26ee5d346779bbecd266424baecd67f6..c891d11c372e47297fbc9fbf1fa15fa96ce64041 100644
--- a/DataQuality/DataQualityUtils/scripts/checkCorrelInHIST.py
+++ b/DataQuality/DataQualityUtils/scripts/checkCorrelInHIST.py
@@ -1,5 +1,5 @@
 #!/usr/bin env python
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 # Script to browse the unmerged HIST files and correlate the number of entries in a region defined by (x;y,delta) arguments
 # Uses the pathExtract library to extract the EOS path
 # See the twiki: https://twiki.cern.ch/twiki/bin/viewauth/Atlas/UsingDQInframerge
@@ -34,15 +34,15 @@
 #                        [delta] (if not provided use global)
 # Author : Benjamin Trocme (LPSC Grenoble) / 2017
 
-import os, sys  
-import string
+from __future__ import print_function
+import os, sys
 import argparse
-import xmlrpclib
+from six.moves import xmlrpc_client as xmlrpclib
 
 from DataQualityUtils import pathExtract         
 
-from ROOT import TFile,TCanvas,TBox,TPaveText,TColor
-from ROOT import TH1,TH2,TH1I,TH1D,TH2D
+from ROOT import TFile,TCanvas,TBox,TPaveText
+from ROOT import TH1D,TH2D
 from ROOT import kBlue,kGreen,kRed
 from ROOT import gStyle
 
@@ -88,9 +88,9 @@ if args.arg5 != "":
   tag = args.arg5
 else: # Try to retrieve the data project tag via atlasdqm
   if (not os.path.isfile("atlasdqmpass.txt")):
-    print "To retrieve the data project tag, you need to generate an atlasdqm key and store it in this directory as atlasdqmpass.txt (yourname:key)"
-    print "To generate a kay, go here : https://atlasdqm.cern.ch/dqauth/"
-    print "You can also define by hand the data project tag with the option -t"
+    print("To retrieve the data project tag, you need to generate an atlasdqm key and store it in this directory as atlasdqmpass.txt (yourname:key)")
+    print("To generate a key, go here : https://atlasdqm.cern.ch/dqauth/")
+    print("You can also define by hand the data project tag with the option -t")
     sys.exit()
   passfile = open("atlasdqmpass.txt")
   passwd = passfile.read().strip(); passfile.close()
@@ -98,8 +98,8 @@ else: # Try to retrieve the data project tag via atlasdqm
   s = xmlrpclib.ServerProxy(passurl)
   run_spec = {'stream': 'physics_CosmicCalo', 'proc_ver': 1,'source': 'tier0', 'low_run': runNumber, 'high_run':runNumber}
   run_info= s.get_run_information(run_spec)
-  if '%d'%runNumber not in run_info.keys() or len(run_info['%d'%runNumber])<2:
-    print "Unable to retrieve the data project tag via atlasdqm... Please double check your atlasdqmpass.txt or define it by hand with -t option"
+  if '%d'%runNumber not in list(run_info.keys()) or len(run_info['%d'%runNumber])<2:
+    print("Unable to retrieve the data project tag via atlasdqm... Please double check your atlasdqmpass.txt or define it by hand with -t option")
     sys.exit()
   tag = run_info['%d'%runNumber][1]
 
@@ -113,7 +113,7 @@ b_WebdisplayPath = False
 if len(args.arg11): # The histograms ROOT file paths are directly provided 
   hArgs = args.arg11
 elif len(args.arg12): # The histograms paths are provided as webdisplay paths
-  print "I will have to retrieve the ROOT file path of histograms"
+  print("I will have to retrieve the ROOT file path of histograms")
   b_WebdisplayPath = True
   hArgs = args.arg12
   passfile = open("/afs/cern.ch/user/l/larmon/public/atlasdqmpass.txt")
@@ -122,7 +122,7 @@ elif len(args.arg12): # The histograms paths are provided as webdisplay paths
   prefix = {'express':'express_','Egamma':'physics_','CosmicCalo':'physics_','JetTauEtmiss':'physics_','Main':'physics_','ZeroBias':'physics_','MinBias':'physics_'}
   run_spec = {'run_list':[runNumber],'stream':"%s%s"%(prefix[stream],stream)}
 else:
-  print "You need to define at least 1 histogram..."
+  print("You need to define at least 1 histogram...")
   sys.exit()
 
 histos = {}
@@ -132,7 +132,7 @@ histoTypes = ["1d","2d"]
 
 runFilePath = "root://eosatlas.cern.ch/%s"%(pathExtract.returnEosHistPath(runNumber,stream,amiTag,tag)).rstrip()
 if ("FILE NOT FOUND" in runFilePath):
-  print "No merged file found..."
+  print("No merged file found...")
   sys.exit()
 
 f = TFile.Open(runFilePath)
@@ -147,7 +147,7 @@ histoMerged = {}
 nLB=2500
 nbHitInHot = {}
 
-for iArg in xrange(len(hArgs)): # Loop on histogram arguments
+for iArg in range(len(hArgs)): # Loop on histogram arguments
   if hArgs[iArg] in histoTypes: # I found a new histogram - Process the next arguments
     if hArgs[iArg] == "1d": 
       regionBins = []
@@ -246,15 +246,15 @@ for iArg in xrange(len(hArgs)): # Loop on histogram arguments
       nbHitInHot[tmp_path] = [0.] * nLB
 
 for iHisto in histos.keys():
-  print iHisto,histos[iHisto]
+  print(iHisto,histos[iHisto])
 
 # Extract all the unmerged files available with the LB range
 lbFilePathList = pathExtract.returnEosHistPathLB(runNumber,lowerLumiBlock,upperLumiBlock,stream,amiTag,tag)
 
-print "I have found the merged HIST file %s"%(runFilePath)
-print "I have found %d unmerged HIST files"%(len(lbFilePathList))
-print "The first one is root://eosatlas.cern.ch/%s"%(lbFilePathList[0])
-print "The last one is root://eosatlas.cern.ch/%s"%(lbFilePathList[-1])
+print("I have found the merged HIST file %s"%(runFilePath))
+print("I have found %d unmerged HIST files"%(len(lbFilePathList)))
+print("The first one is root://eosatlas.cern.ch/%s"%(lbFilePathList[0]))
+print("The last one is root://eosatlas.cern.ch/%s"%(lbFilePathList[-1]))
 
 # Loop on all unmerged files
 # and store number of hits per histogram
@@ -297,7 +297,7 @@ for iPath in histos.keys():
     corr = "%s_%s"%(iPath,iPath2)
     corr2 = "%s_%s"%(iPath2,iPath)
     if (iPath != iPath2 and corr2 not in hCorrel.keys()): # Correlation plots
-      print "====== I am checking correlation between %s and %s"%(iPath.split("/")[-1],iPath2.split("/")[-1])
+      print("====== I am checking correlation between %s and %s"%(iPath.split("/")[-1],iPath2.split("/")[-1]))
       
       hCorrel[corr] = TH2D("Correlation_%s"%corr,"Correlation_%s"%corr,50,min(nbHitInHot[iPath])-1,max(nbHitInHot[iPath])+1,50,min(nbHitInHot[iPath2])-1,max(nbHitInHot[iPath2])+1)
       hCorrel[corr].SetXTitle(iPath.split("/")[-1])
@@ -323,7 +323,7 @@ for iPath in histos.keys():
       for iLB in listLB:
         if (nbHitInHot[iPath][iLB] !=0 or nbHitInHot[iPath2][iLB] != 0.):
           hCorrel[corr].Fill(nbHitInHot[iPath][iLB],nbHitInHot[iPath2][iLB])
-          print "LB: %d -> %.2f / %.2f"%(iLB,nbHitInHot[iPath][iLB],nbHitInHot[iPath2][iLB])
+          print("LB: %d -> %.2f / %.2f"%(iLB,nbHitInHot[iPath][iLB],nbHitInHot[iPath2][iLB]))
         if nbHitRatio[corr][iLB]!= -999:
           hRatio[corr].Fill(nbHitRatio[corr][iLB])
         if nbHitRatio[corr2][iLB]!= -999:
@@ -368,30 +368,30 @@ for iPath in histos.keys():
       hEvol[iPath].Draw("P HIST")
 
   
-print "====== Summary data"
+print("====== Summary data")
 already = []
 for iPath in histos.keys():
   for iPath2 in histos.keys():
     corr = "%s_%s"%(iPath,iPath2)
     corr2 = "%s_%s"%(iPath2,iPath)
     if (iPath != iPath2 and corr2 not in already): # Correlation plots
-      print "====== %s vs %s"%(iPath.split("/")[-1],iPath2.split("/")[-1])
-      print "Correlation factor: %.3f"%(hCorrel[corr].GetCorrelationFactor())
+      print("====== %s vs %s"%(iPath.split("/")[-1],iPath2.split("/")[-1]))
+      print("Correlation factor: %.3f"%(hCorrel[corr].GetCorrelationFactor()))
       
       fractionNonZero = hRatio[corr].Integral(2,100)/hRatio[corr].Integral(1,100)
       if fractionNonZero != 0.:
         meanNonZero = hRatio[corr].GetMean()/fractionNonZero
       else:
         meanNonZero = 0.
-      print "When there is at least one entry in %s (%d LBs), there are %.1f %% of events with an entry in %s - Mean ratio: %.2f"%(iPath2.split("/")[-1],hRatio[corr].Integral(1,100),fractionNonZero*100.,iPath.split("/")[-1],meanNonZero)
+      print("When there is at least one entry in %s (%d LBs), there are %.1f %% of events with an entry in %s - Mean ratio: %.2f"%(iPath2.split("/")[-1],hRatio[corr].Integral(1,100),fractionNonZero*100.,iPath.split("/")[-1],meanNonZero))
       
       fractionNonZero = hRatio[corr2].Integral(2,100)/hRatio[corr2].Integral(1,100)
       if fractionNonZero != 0.:
         meanNonZero = hRatio[corr2].GetMean()/fractionNonZero
       else:
         meanNonZero = 0.
-      print "When there is at least one entry in %s (%d LBs), there are %.1f %% of events with an entry in %s - Mean ratio: %.2f"%(iPath.split("/")[-1],hRatio[corr2].Integral(1,100),fractionNonZero*100.,iPath2.split("/")[-1],meanNonZero)
+      print("When there is at least one entry in %s (%d LBs), there are %.1f %% of events with an entry in %s - Mean ratio: %.2f"%(iPath.split("/")[-1],hRatio[corr2].Integral(1,100),fractionNonZero*100.,iPath2.split("/")[-1],meanNonZero))
       
       already.append(corr)
 
-raw_input("I am done...")
+input("I am done...")
diff --git a/DataQuality/DataQualityUtils/scripts/dq_make_web_display.py b/DataQuality/DataQualityUtils/scripts/dq_make_web_display.py
index 7c38dc2b8c3212fbed1c054b21ecfa04416a11a7..50366f2df8bb395dbdbb9c9a295bec306ea8849c 100755
--- a/DataQuality/DataQualityUtils/scripts/dq_make_web_display.py
+++ b/DataQuality/DataQualityUtils/scripts/dq_make_web_display.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 """
 Transate arbitrary root file into a han config file with the "GatherData" algorithm
@@ -8,11 +8,11 @@ Transate arbitrary root file into a han config file with the "GatherData" algori
 9 Oct 2008
 Adapted for fast physics monitoring 14 April 2011
 """
+from __future__ import print_function
 
 #HANDIR='/afs/cern.ch/user/a/atlasdqm/dqmdisk/han_results/fastphysmon/1'
 
-from DQConfMakerBase.DQElements import *
-from DQConfMakerBase.Helpers import IDHelper, make_thresholds
+from DQConfMakerBase.DQElements import DQReference, DQRegion, DQAlgorithm
 from DQHanConfMaker.hanwriter import writeHanConfiguration
 import ROOT
 
@@ -26,14 +26,14 @@ def recurse(rdir, dqregion, ignorepath, reffile=None):
         cl = key.GetClassName(); rcl = ROOT.TClass.GetClass(cl)
         #print key.GetName(), cl
         if ' ' in key.GetName():
-            print 'WARNING: cannot have spaces in histogram names for han config; not including %s %s' % (cl, key.GetName())
+            print('WARNING: cannot have spaces in histogram names for han config; not including %s %s' % (cl, key.GetName()))
             continue
         if rcl.InheritsFrom('TH1'):
             if '/' in key.GetName():
-                print 'WARNING: cannot have slashes in histogram names, encountered in directory %s, histogram %s' % (rdir.GetPath(), key.GetName())
+                print('WARNING: cannot have slashes in histogram names, encountered in directory %s, histogram %s' % (rdir.GetPath(), key.GetName()))
                 continue
             if key.GetName() == 'summary':
-                print 'WARNING: cannot have histogram named summary, encountered in %s' % rdir.GetPath()
+                print('WARNING: cannot have histogram named summary, encountered in %s' % rdir.GetPath())
                 continue
             name = rdir.GetPath().replace(ignorepath, '') + '/' + key.GetName()
             dqpargs = { 'id' :name,
@@ -56,21 +56,21 @@ def prune(dqregion):
     False if we should not
     """
     params = dqregion.getDQParameters()
-    if params == None:
+    if params is None:
         params = []
     subregions = dqregion.getSubRegions()
-    if subregions == None:
+    if subregions is None:
         subregions = []
     else:
         subregions = subregions[:]
     # kill subregions
     for sr in subregions:
-        if sr == None:
+        if sr is None:
             continue
         if prune(sr):
             dqregion.delRelation('DQRegions', sr)
     subregions = dqregion.getSubRegions()
-    if subregions == None:
+    if subregions is None:
         subregions = []
     if len(subregions) + len(params) == 0:
         return True
@@ -79,10 +79,10 @@ def prune(dqregion):
 
 def paramcount(dqregion):
     params = dqregion.getDQParameters()
-    if params == None:
+    if params is None:
         params = []
     subregions = dqregion.getSubRegions()
-    if subregions == None:
+    if subregions is None:
         subregions = []
     
     return len(params) + sum([paramcount(region) for region in subregions])
@@ -90,13 +90,13 @@ def paramcount(dqregion):
 def process(infname, confname, reffile=None):
     f = ROOT.TFile(infname, 'READ')
     if not f.IsOpen():
-        print 'ERROR: cannot open %s' % infname
+        print('ERROR: cannot open %s' % infname)
         return
     
     top_level = DQRegion(id='topRegion',algorithm=worst)
-    print 'Building tree...'
+    print('Building tree...')
     recurse(f, top_level, f.GetPath(), reffile)
-    print 'Pruning dead branches...'
+    print('Pruning dead branches...')
     prune(top_level)
     pc = paramcount(top_level)
  
@@ -104,7 +104,7 @@ def process(infname, confname, reffile=None):
     for x in sublevel:
         top_level.delRelation('DQRegions', x)
         
-    print 'Writing output'
+    print('Writing output')
     writeHanConfiguration( filename = confname , roots = sublevel)
     return pc
 
@@ -113,7 +113,7 @@ def super_process(fname, options):
     import ROOT
     han_is_found = (ROOT.gSystem.Load('libDataQualityInterfaces') == 0)
     if not han_is_found:
-        print 'ERROR: unable to load offline DQMF; unable to proceed'
+        print('ERROR: unable to load offline DQMF; unable to proceed')
         sys.exit(1)
     bname = os.path.basename(fname)
 
@@ -123,6 +123,7 @@ def super_process(fname, options):
     hanoutput = None
 
     failed = False
+    prebuilt_hcfg = False
 
     @contextlib.contextmanager
     def tmpdir():
@@ -133,8 +134,8 @@ def super_process(fname, options):
 
     with tmpdir() as hantmpdir:
         try:
-            print '====> Processing file %s' % (fname)
-            print '====> Generating han configuration file'
+            print('====> Processing file %s' % (fname))
+            print('====> Generating han configuration file')
             hantmpinput = os.path.join(hantmpdir, bname)
             shutil.copyfile(fname, hantmpinput)
             haninput = hantmpinput
@@ -142,15 +143,15 @@ def super_process(fname, options):
             rv = process(hantmpinput, hanconfig, options.reffile)
             # bad hack. rv = number of histogram nodes
             if rv == 0:
-                print 'No histograms to display; exiting with code 0'
+                print('No histograms to display; exiting with code 0')
                 sys.exit(0)
 
-            print '====> Compiling han configuration'
+            print('====> Compiling han configuration')
             hanhcfg = os.path.join(hantmpdir, 'han.hcfg')
     ##         os.system('han-config-gen.exe %s' % hanconfig)
             ROOT.dqi.HanConfig().AssembleAndSave( hanconfig, hanhcfg )
 
-            print '====> Executing han'
+            print('====> Executing han')
             import resource
             memlimit = resource.getrlimit(resource.RLIMIT_AS)
             resource.setrlimit(resource.RLIMIT_AS, (memlimit[1], memlimit[1]))
@@ -162,18 +163,18 @@ def super_process(fname, options):
                 raise Exception('failure in han')
             hantargetdir = os.path.join(options.webdir, str(options.iteration),
                                         options.dispname, 'run_%s' % run)
-            print '====> Copying to', hantargetdir
+            print('====> Copying to', hantargetdir)
             hantargetfile = os.path.join(hantargetdir, 'run_%s_han.root' % run)
             if not os.access(hantargetdir, os.W_OK):
                 try:
                     os.makedirs(hantargetdir)
-                except Exception, e:
-                    print 'Unable to create %s for some reason: %s' % (hantargetdir, e)
+                except Exception as e:
+                    print('Unable to create %s for some reason: %s' % (hantargetdir, e))
                     raise Exception('Error during execute')
             shutil.copy2(hanoutput, hantargetfile)
-            print '====> Cleaning up'
-        except Exception, e:
-            print e
+            print('====> Cleaning up')
+        except Exception as e:
+            print(e)
             if 'canonical format' not in str(e):
                 failed = True
         finally:
@@ -183,14 +184,14 @@ def super_process(fname, options):
                     os.unlink(hanconfig)
                     os.unlink(hanhcfg)
                 os.unlink(hanoutput)
-            except:
+            except Exception:
                 pass
         
     return not failed
 
         
 if __name__=="__main__":
-    import sys, optparse, shutil
+    import sys, optparse
     parser = optparse.OptionParser(usage='usage: %prog [options] inputfile run')
     parser.add_option('--webdir', default='/afs/cern.ch/user/a/atlasdqm/dqmdisk/han_results/fastphysmon',
                       help='Change directory to store web display files')
@@ -212,11 +213,11 @@ if __name__=="__main__":
         options.run = run
     except ValueError:
         parser.print_help()
-        print 'Specified run', args[1], 'doesn\'t seem to be an integer'
+        print('Specified run', args[1], 'doesn\'t seem to be an integer')
         sys.exit(1)
 
     rv = super_process(fname, options)
-    if rv == True:
+    if rv:
         sys.exit(0)
     else:
         sys.exit(1)    
diff --git a/DataQuality/DataQualityUtils/scripts/hancool.py b/DataQuality/DataQualityUtils/scripts/hancool.py
index 091394ff290b30dcc6b5cdc6cd17f831ac508399..5f7e77af8a30be014f067d9bc10de8c6e8028d5a 100755
--- a/DataQuality/DataQualityUtils/scripts/hancool.py
+++ b/DataQuality/DataQualityUtils/scripts/hancool.py
@@ -1,7 +1,8 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
+from __future__ import print_function
 import DataQualityUtils.hancoolmod as mod
 
 import sys
@@ -10,11 +11,11 @@ import sys
 def usage():
   cmdi = sys.argv[0].rfind("/")
   cmd = sys.argv[0][cmdi+1:]
-  print ""
-  print "Usage: ", cmd, "<runnumber> <path> <dbConnection> <detstatus> <dbtag>"
-  print "or"
-  print "Usage: ", cmd, "(picking up default settings)"
-  print ""
+  print("")
+  print("Usage: ", cmd, "<runnumber> <path> <dbConnection> <detstatus> <dbtag>")
+  print("or")
+  print("Usage: ", cmd, "(picking up default settings)")
+  print("")
 
 
 if __name__ == "__main__":
@@ -51,4 +52,3 @@ if __name__ == "__main__":
       db_tag = "HEAD"
 
   mod.hancool(runNumber,filePath,dbConnection,dqmfOfl, db_tag)
-
diff --git a/DataQuality/DataQualityUtils/scripts/hancool_histo.py b/DataQuality/DataQualityUtils/scripts/hancool_histo.py
index 4145936a6c2a3fff624cca8ce3ec8d064637debf..a28967c4b556930058f2971070cf41a93f257cad 100755
--- a/DataQuality/DataQualityUtils/scripts/hancool_histo.py
+++ b/DataQuality/DataQualityUtils/scripts/hancool_histo.py
@@ -1,8 +1,8 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
+from __future__ import print_function
 import DataQualityUtils.hancool_histo_mod as mod
 
 import sys
@@ -13,11 +13,11 @@ import sys
 def usage():
   cmdi = sys.argv[0].rfind("/")
   cmd = sys.argv[0][cmdi+1:]
-  print ""
-  print "Usage: ", cmd, "<runnumber> <path> <dbConnection> <detstatus> <dbtag>"
-  print "or"
-  print "Usage: ", cmd, "(picking up default settings)"
-  print ""
+  print("")
+  print("Usage: ", cmd, "<runnumber> <path> <dbConnection> <detstatus> <dbtag>")
+  print("or")
+  print("Usage: ", cmd, "(picking up default settings)")
+  print("")
 
 
 ########################################
@@ -62,4 +62,3 @@ if __name__ == "__main__":
       dbTagName          = "DetStatusDQMFOFLH-FDR2-02"
       
   mod.hancool_histo( inputFilePath, input_run, dbConnectionHisto,dqmfOflHisto, dbTagName)
-
diff --git a/DataQuality/DataQualityUtils/scripts/handi.py b/DataQuality/DataQualityUtils/scripts/handi.py
index eeb41cacaebdba459fa52446d21d920c9ebf6a53..ce12ee49cf4d9db7a1ca27a05ece809e813dc5c7 100755
--- a/DataQuality/DataQualityUtils/scripts/handi.py
+++ b/DataQuality/DataQualityUtils/scripts/handi.py
@@ -1,10 +1,8 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
-## *****************************************************************************
-VERSION = '$Id: handi.py 598803 2014-05-24 10:52:51Z ponyisi $'
-## *****************************************************************************
+from __future__ import print_function
 
 import DataQualityUtils.handimod as mod
 
@@ -34,7 +32,7 @@ def handi( name, resultsFile, htmlDir, browserMenu=False, allDirsScriptLoc="http
 def usage():
   cmdi = sys.argv[0].rfind("/")
   cmd = sys.argv[0][cmdi+1:]
-  print "Usage: ", cmd, "[-m|--browser-menu] [-s|--script-loc=<URL>] <name_of_system> <results_file_name> <html_output_directory>"
+  print("Usage: ", cmd, "[-m|--browser-menu] [-s|--script-loc=<URL>] <name_of_system> <results_file_name> <html_output_directory>")
 
 
 ########################################
diff --git a/DataQuality/DataQualityUtils/scripts/hotSpotInHIST.py b/DataQuality/DataQualityUtils/scripts/hotSpotInHIST.py
index 78ea304591cba970f39f46db770fd770be7757de..d11dbb687dc8b30af4cd6a80a9109936bc30bec5 100644
--- a/DataQuality/DataQualityUtils/scripts/hotSpotInHIST.py
+++ b/DataQuality/DataQualityUtils/scripts/hotSpotInHIST.py
@@ -1,5 +1,5 @@
 #!/usr/bin env python
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 # Script to browse the unmerged HIST files and extract LBs for which at least N occurences of an object is found
 # at a position foundas noisy
 # Uses the pathExtract library to extract the EOS path
@@ -32,14 +32,14 @@
 #  -g, --grl             Look for Calo/LAr/Tile defects set in suspicious LBs
 # Author : Benjamin Trocme (LPSC Grenoble) / 2015-2016
 
-import os, sys  
-import string
-import argparse,xmlrpclib
+import os, sys
+import argparse
+from six.moves import xmlrpc_client as xmlrpclib
 
 from DataQualityUtils import pathExtract         
 
-from ROOT import TFile,TCanvas,TBox,TColor,TLegend
-from ROOT import TH1,TH2,TH1I
+from ROOT import TFile,TCanvas,TBox,TLegend,TLine,TArrow
+from ROOT import TH1I
 from ROOT import kBlue,kGreen,kOrange,kMagenta,kCyan,kRed
 from ROOT import gStyle
 
@@ -89,9 +89,9 @@ if args.arg5 != "":
   tag = args.arg5
 else: # Try to retrieve the data project tag via atlasdqm
   if (not os.path.isfile("atlasdqmpass.txt")):
-    print "To retrieve the data project tag, you need to generate an atlasdqm key and store it in this directory as atlasdqmpass.txt (yourname:key)"
-    print "To generate a kay, go here : https://atlasdqm.cern.ch/dqauth/"
-    print "You can also define by hand the data project tag wit hthe option -t"
+    print("To retrieve the data project tag, you need to generate an atlasdqm key and store it in this directory as atlasdqmpass.txt (yourname:key)")
+    print("To generate a kay, go here : https://atlasdqm.cern.ch/dqauth/")
+    print("You can also define by hand the data project tag wit hthe option -t")
     sys.exit()
   passfile = open("atlasdqmpass.txt")
   passwd = passfile.read().strip(); passfile.close()
@@ -100,7 +100,7 @@ else: # Try to retrieve the data project tag via atlasdqm
   run_spec = {'stream': 'physics_CosmicCalo', 'proc_ver': 1,'source': 'tier0', 'low_run': runNumber, 'high_run':runNumber}
   run_info= s.get_run_information(run_spec)
   if '%d'%runNumber not in run_info.keys() or len(run_info['%d'%runNumber])<2:
-    print "Unable to retrieve the data project tag via atlasdqm... Please double check your atlasdqmpass.txt or define it by hand with -t option"
+    print("Unable to retrieve the data project tag via atlasdqm... Please double check your atlasdqmpass.txt or define it by hand with -t option")
     sys.exit()
   tag = run_info['%d'%runNumber][1]
   
@@ -311,35 +311,35 @@ if histoType == "2d_etaPhiHotSpot":
   summaryTitle = "Nb of hits in a region of %.2f around the position (%.2f,%.2f) - %s"%(deltaSpot,etaSpot,phiSpot,histoName)
   statement = "I have looked for LBs with at least %.0f entries at position (%.2f,%.2f) in %s histogram"%(minInLB,etaSpot,phiSpot,histoName)
   if (etaSpot==-999. or phiSpot==-999.):
-    print "No eta/phi defined -> whole histogram considered!"
+    print("No eta/phi defined -> whole histogram considered!")
     b_wholeHisto = True
 if histoType == "2d_xyHotSpot":
   b_ValueNotEntries = True
   if (deltaSpot != 0):
-    print "Warning: you have been summing over several bins a variable that may be not summable (different from summing hits!)"
+    print("Warning: you have been summing over several bins a variable that may be not summable (different from summing hits!)")
   summaryTitle = "Value in a region of %.2f around the position (%.2f,%.2f) - %s"%(deltaSpot,xSpot,ySpot,histoName)
   statement = "I have looked for LBs with at least variable > %.2f at position (%.2f,%.2f) in %s histogram"%(minInLB,xSpot,ySpot,histoName)
   if (xSpot==-999. or ySpot==-999.):
-    print "No x/y defined -> whole histogram considered!"
-    print "Warning: you have been summing over several bins a variable that may be not summable (different from summing hits!)"
+    print("No x/y defined -> whole histogram considered!")
+    print("Warning: you have been summing over several bins a variable that may be not summable (different from summing hits!)")
     b_wholeHisto = True
 elif histoType == "1d_etaHotSpot":
   summaryTitle = "Nb of hits in a region of %.2f around the eta position %.2f - %s"%(deltaSpot,etaSpot,histoName)
   statement = "I have looked for LBs with at least %.0f entries at eta position %.2f in %s histogram"%(minInLB,etaSpot,histoName)
   if (etaSpot==-999.):
-    print "No eta/phi -> whole histogram considered!"
+    print("No eta/phi -> whole histogram considered!")
     b_wholeHisto = True
 elif histoType == "1d_phiHotSpot":
   summaryTitle = "Nb of hits in a region of %.2f around the phi position %.2f - %s"%(deltaSpot,phiSpot,histoName)
   statement = "I have looked for LBs with at least %.0f entries at phi position %.2f in %s histogram"%(minInLB,phiSpot,histoName)
   if (phiSpot==-999.):
-    print "No eta/phi defined -> whole histogram considered!"
+    print("No eta/phi defined -> whole histogram considered!")
     b_wholeHisto = True
 elif histoType == "1d_integralAbove":
   summaryTitle = "Nb of hits in the band above %.2f - %s"%(integralAbove,histoName)
   statement = "I have looked for LBs with at least %.0f entries in band above %.2f in %s histogram"%(minInLB,integralAbove,histoName)
   if (integralAbove==-999.):
-    print "No lwoer bound defined -> whole histogram considered!"
+    print("No lwoer bound defined -> whole histogram considered!")
     b_wholeHisto = True
 #    print "You must define the lower bound of your integral"
 #    sys.exit()
@@ -354,7 +354,7 @@ else:
 # and plot the histogram
 runFilePath = "root://eosatlas.cern.ch/%s"%(pathExtract.returnEosHistPath(runNumber,stream,amiTag,tag)).rstrip()
 if ("FILE NOT FOUND" in runFilePath):
-  print "No merged file found..."
+  print("No merged file found...")
   sys.exit()
 
 f = TFile.Open(runFilePath)
@@ -498,10 +498,10 @@ lbCanvas = []
 histoLBNoisy = []
 fLB = {}
 
-print "I have found the merged HIST file %s"%(runFilePath)
-print "I have found %d unmerged HIST files"%(len(lbFilePathList))
-print "The first one is root://eosatlas.cern.ch/%s"%(lbFilePathList[0])
-print "The last one is root://eosatlas.cern.ch/%s"%(lbFilePathList[len(lbFilePathList)-1])
+print("I have found the merged HIST file %s"%(runFilePath))
+print("I have found %d unmerged HIST files"%(len(lbFilePathList)))
+print("The first one is root://eosatlas.cern.ch/%s"%(lbFilePathList[0]))
+print("The last one is root://eosatlas.cern.ch/%s"%(lbFilePathList[len(lbFilePathList)-1]))
 
 # Loop on all unmerged files
 
@@ -535,8 +535,8 @@ if (lowerLB == upperLB):
   lowerLB = lowerLB - 1
   upperLB = upperLB + 4
 
-print ""
-print statement
+print("")
+print(statement)
 
 maxNbInHot = 0
 totalInRegionRecomp = {} 
@@ -553,7 +553,7 @@ for iHisto in histoKeys:
 sortedLB = {}
 
 for iHisto in histoKeys:
-  print "======= ",histoLegend[iHisto]
+  print("======= ",histoLegend[iHisto])
   for iBin in regionBins[iHisto]:
     totalInRegion[iHisto] = totalInRegion[iHisto] + histo[iHisto].GetBinContent(iBin)
   
@@ -571,21 +571,21 @@ for iHisto in histoKeys:
   for i in range(nLB):
     if nbHitInHot[iHisto][sortedLB[iHisto][i]]>=minInLB:
       if not b_ValueNotEntries:
-        print "%d-LB: %d -> %d hits"%(i,sortedLB[iHisto][i],nbHitInHot[iHisto][sortedLB[iHisto][i]])
+        print("%d-LB: %d -> %d hits"%(i,sortedLB[iHisto][i],nbHitInHot[iHisto][sortedLB[iHisto][i]]))
       else:
-        print "%d-LB: %d -> %.2f"%(i,sortedLB[iHisto][i],nbHitInHot[iHisto][sortedLB[iHisto][i]])
+        print("%d-LB: %d -> %.2f"%(i,sortedLB[iHisto][i],nbHitInHot[iHisto][sortedLB[iHisto][i]]))
 
   if not b_ValueNotEntries:
-    print "In the whole run, there are %d entries"%(totalInRegion[iHisto])
+    print("In the whole run, there are %d entries"%(totalInRegion[iHisto]))
     if (totalInRegionRecomp[iHisto] != totalInRegion[iHisto]):
-      print "To be compared with %d entries cumulated from unmerged files"%(totalInRegionRecomp[iHisto])
+      print("To be compared with %d entries cumulated from unmerged files"%(totalInRegionRecomp[iHisto]))
       if (totalInRegionRecomp[iHisto] < totalInRegion[iHisto]):
-        print "This is normal only if you restricted the LB range..."
+        print("This is normal only if you restricted the LB range...")
       if (totalInRegionRecomp[iHisto] > totalInRegion[iHisto]):
-        print "This can be also caused by multiple processing, try to filter with -a option"
-        print "File path of the first file:",lbFilePathList[0]
+        print("This can be also caused by multiple processing, try to filter with -a option")
+        print("File path of the first file:",lbFilePathList[0])
   else:
-    print "In the whole run, the value is %.2f"%(totalInRegion[iHisto])
+    print("In the whole run, the value is %.2f"%(totalInRegion[iHisto]))
 
 #########################################################################
 ## Plot evolution vs LB
@@ -620,7 +620,7 @@ if (upperLB>=lowerLB): # check that at least one noisy LB was found
   c0.Update()
 
 if defectQuery:
-  print "I am looking for LAr/Tile/Calo defects defined for the suspicious LB"
+  print("I am looking for LAr/Tile/Calo defects defined for the suspicious LB")
   from DQDefects import DefectsDB
   db = DefectsDB()
   defectList = [d for d in (db.defect_names | db.virtual_defect_names) if ((d.startswith("LAR") and "SEV" in d) or (d.startswith("TILE")) or (d.startswith("CALO")))]
@@ -632,8 +632,8 @@ if defectQuery:
         associatedSuspicious = True
     if associatedSuspicious:
       if (iDef.since.lumi == iDef.until.lumi-1):
-        print "%s: %d set by %s - %s"%(iDef.channel,iDef.since.lumi,iDef.user,iDef.comment)
+        print("%s: %d set by %s - %s"%(iDef.channel,iDef.since.lumi,iDef.user,iDef.comment))
       else:
-        print "%s: %d->%d set by %s - %s"%(iDef.channel,iDef.since.lumi,iDef.until.lumi-1,iDef.user,iDef.comment)
+        print("%s: %d->%d set by %s - %s"%(iDef.channel,iDef.since.lumi,iDef.until.lumi-1,iDef.user,iDef.comment))
 
-raw_input("I am done...")
+input("I am done...")
diff --git a/DataQuality/DataQualityUtils/scripts/hotSpotInTAG.py b/DataQuality/DataQualityUtils/scripts/hotSpotInTAG.py
index 4c3860cbed85c87038b0085bd3bc4e22dde01a1d..e8cfb30f07508212e67f8b53cfa6cc4824715738 100644
--- a/DataQuality/DataQualityUtils/scripts/hotSpotInTAG.py
+++ b/DataQuality/DataQualityUtils/scripts/hotSpotInTAG.py
@@ -1,6 +1,6 @@
 #!/usr/bin env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 # Script to browse a TAG file and extract LBs for which at least N occurences of an object is found
 # in a region defined as noisy.
 # Uses the pathExtract library to extract the EOS path
@@ -18,20 +18,16 @@
 #  -n, --noplot          Do not plot LB map
 # Author : Benjamin Trocme (LPSC Grenoble) / Summer 2012, updated in 2015
 
+from __future__ import print_function
 
-import os, sys  
-import string,math
+import sys
 from math import fabs
 import argparse
-from DataQualityUtils import pathExtract
+from DataQualityUtils import pathExtract, returnFilesPath
 
-import ROOT
-from ROOT import *
-from ROOT import gROOT, gDirectory
-from ROOT import gStyle, TCanvas, TString
-from ROOT import TFile, TTree
-from ROOT import TH1F,TH2F,TBrowser
-from ROOT import TPaveText
+from ROOT import gStyle, TCanvas
+from ROOT import TChain
+from ROOT import TH1D, TH2D, TH1I
 
 # Analysis functions===========================================================================================================
 def analyzeTree():
@@ -121,9 +117,9 @@ gStyle.SetOptStat("em")
 if ("MET" in objectType):
   etaSpot=0
 
-print '\n'
-print '---------------------------------'
-print "Investigation on run "+str(run)+"/"+stream+" stream with ami TAG "+amiTag
+print('\n')
+print('---------------------------------')
+print("Investigation on run "+str(run)+"/"+stream+" stream with ami TAG "+amiTag)
 
 tree = TChain("POOLCollectionTree")
 if tagDirectory=="": # TAG files stored on EOS
@@ -131,18 +127,18 @@ if tagDirectory=="": # TAG files stored on EOS
   if len(listOfFiles)>0:
     for files in listOfFiles:
       tree.AddFile("root://eosatlas/%s"%(files))
-      print "I chained the file %s"%(files)
+      print("I chained the file %s"%(files))
   else:
-    print "No file found on EOS.Exiting..."
+    print("No file found on EOS.Exiting...")
     sys.exit()
 else: # TAG files on user account
   listOfFiles = returnFilesPath(tagDirectory,"TAG")
   if len(listOfFiles)>0:
     for files in listOfFiles:
       tree.AddFile("%s"%(files))
-      print "I chained the file %s"%(files)
+      print("I chained the file %s"%(files))
     else:
-      print "No TAG file found in directory %s.Exiting..."%(tagDirectory)
+      print("No TAG file found in directory %s.Exiting..."%(tagDirectory))
   
 
 entries = tree.GetEntries()
@@ -161,26 +157,26 @@ else:
   h0map = TH2D("map","General map of %s with Et/Pt > %d MeV"%(objectType,thresholdE),90,-4.5,4.5,64,-3.14,3.14)
   h0mapClean = TH2D("mapClean","General map of %s with Et/Pt > %d MeV - LArFlags != ERROR"%(objectType,thresholdE),90,-4.5,4.5,64,-3.14,3.14)
 
-print "I am looking for LBs with at least %d %s in a region of %.2f around (%.2f,%.2f) and Et/Pt > %d MeV"%(minInLB,objectType,deltaSpot,etaSpot,phiSpot,thresholdE)
-for jentry in xrange( entries ): # Loop on all events
+print("I am looking for LBs with at least %d %s in a region of %.2f around (%.2f,%.2f) and Et/Pt > %d MeV"%(minInLB,objectType,deltaSpot,etaSpot,phiSpot,thresholdE))
+for jentry in range( entries ): # Loop on all events
   if (jentry % 100000 == 0):
-    print "%d / %d evnt processed"%(jentry,entries)
+    print("%d / %d evnt processed"%(jentry,entries))
   nb = tree.GetEntry( jentry )
   if (tree.LumiBlockN>lowerLumiBlock and tree.LumiBlockN<upperLumiBlock):
     analyzeTree()     
 
-print "I have looked for LBs with at least %d %s in a region of %.2f around (%.2f,%.2f) and Et/Pt > %d MeV"%(minInLB,objectType,deltaSpot,etaSpot,phiSpot,thresholdE)
+print("I have looked for LBs with at least %d %s in a region of %.2f around (%.2f,%.2f) and Et/Pt > %d MeV"%(minInLB,objectType,deltaSpot,etaSpot,phiSpot,thresholdE))
 if (args.larcleaning):
-  print "WARNING : The LArCleaning for noise bursts (LArEventInfo != ERROR) has been DEACTIVATED!!!"
+  print("WARNING : The LArCleaning for noise bursts (LArEventInfo != ERROR) has been DEACTIVATED!!!")
 else:
-  print "The LArCleaning (LArEventInfo != ERROR) for noise bursts has been activated"
+  print("The LArCleaning (LArEventInfo != ERROR) for noise bursts has been activated")
 
 nLB_offending = []
 lowerLB = 2500
 upperLB = 0
 for i in range(nLB):
   if nbHitInHot[i]>=minInLB:
-    print "LB: %d -> %d hits (LAr flag in this LB : %d veto / In these events : %d Std / %d SatTight)"%(i,nbHitInHot[i],nbNoiseBurstVeto[i],nbLArNoisyRO_Std[i],nbLArNoisyRO_SatTight[i])
+    print("LB: %d -> %d hits (LAr flag in this LB : %d veto / In these events : %d Std / %d SatTight)"%(i,nbHitInHot[i],nbNoiseBurstVeto[i],nbLArNoisyRO_Std[i],nbLArNoisyRO_SatTight[i]))
     nLB_offending.append(i)
     if i<lowerLB : lowerLB = i
     if i>upperLB : upperLB = i
@@ -281,6 +277,6 @@ if (not args.noplot):
       tree.Draw("TauJetPt2 >> +h1Pt_%d"%(nLB_offending[i]),"abs(TauJetEta2-%.3f) < %.3f && abs(TauJetPhi2-%.3f) < %.3f  && LumiBlockN==%d && %s"%(etaSpot,deltaSpot,phiSpot,deltaSpot,nLB_offending[i],cutC))
     
 if ("Tau" in objectType):     
-  print 'WARNING : in recent TAGs, the TauJet were not filled - A double check is welcome: tree.Draw(\"TauJetEta1\")'
+  print('WARNING : in recent TAGs, the TauJet were not filled - A double check is welcome: tree.Draw(\"TauJetEta1\")')
 
-raw_input("I am done...")
+input("I am done...")
diff --git a/DataQuality/DataQualityUtils/scripts/mergePhysValFiles.py b/DataQuality/DataQualityUtils/scripts/mergePhysValFiles.py
index 1765bb368b697ab9233811a2684c6380c0a7b90f..e246ea68b85b06f4eeba1f60ec6941c5472dd557 100755
--- a/DataQuality/DataQualityUtils/scripts/mergePhysValFiles.py
+++ b/DataQuality/DataQualityUtils/scripts/mergePhysValFiles.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 #----------------------------------------------------------------------
 #stand-alone script to merge specific directories of NTUP_PHYSVAL files 
@@ -8,7 +8,8 @@
 #16 May 2016
 #----------------------------------------------------------------------
 
-import getopt,os,sys,glob,argparse,ROOT,time
+from __future__ import print_function
+import os,glob,argparse,ROOT,time
 
 start = time.clock()
 
@@ -32,39 +33,39 @@ f = ROOT.TFile(output_file, "recreate")
 folder = os.getcwd()
 f2 = ROOT.TFile(files[1])
 
-print "Target file: " + output_file
+print("Target file: " + output_file)
 for infile in files:
-    print "Found input file: " + infile
+    print("Found input file: " + infile)
     if os.path.samefile(output_file, infile):
-        print "Please make sure that the output file is not part of the input files! Stopping."
+        print("Please make sure that the output file is not part of the input files! Stopping.")
         quit()
 
 errors = []
 
 def mergeFolder(path) :
-    print "Merging folder " + path
+    print("Merging folder " + path)
     d = f2.Get(path)
     if not d:
         error = "ERROR: Cannot find directory " + path + ". Omitting."
-        print error
+        print(error)
         errors.append(error)
         return
     dirlist = d.GetListOfKeys()
     for subdir in dirlist:
         obj = subdir.ReadObj()
         if obj.IsA().InheritsFrom(ROOT.TH1.Class()):
-            print "Now merging "+obj.GetName()
+            print("Now merging "+obj.GetName())
             h1 = obj
             hpath = d.GetPath()
             hname = hpath[hpath.find(":")+2:]+"/"+obj.GetName()
-            print "Path: "+hname
+            print("Path: "+hname)
             for tup in files:
                 if tup==files[1]: continue
                 nextfile = ROOT.TFile(tup)
                 h2 = nextfile.Get(hname)
                 if not h2:
                     error = "ERROR: Cannot find " + hname + " in file " + tup + ". Omitting."
-                    print error
+                    print(error)
                     errors.append(error)
                     continue
                 h1.Add(h2)
@@ -72,7 +73,7 @@ def mergeFolder(path) :
             subfolder.cd()
             h1.Write()
         if obj.IsA().InheritsFrom(ROOT.TDirectory.Class()):
-            print "Found subdirectory "+obj.GetName()
+            print("Found subdirectory "+obj.GetName())
             hpath = obj.GetPath()
             subfolder = f.mkdir(hpath[hpath.find(":")+2:],obj.GetTitle())
             subfolder.cd()
@@ -85,9 +86,9 @@ for mergeDir in mergeDirs:
 
 f.Close()
 if len(errors)>0:
-    print "Summary of all errors:"
+    print("Summary of all errors:")
     for phrase in errors:
-        print phrase
+        print(phrase)
 
 end = time.clock()
-print "Wall time used: %s sec" % (end - start)
+print("Wall time used: %s sec" % (end - start))
diff --git a/DataQuality/DataQualityUtils/scripts/physval_make_web_display.py b/DataQuality/DataQualityUtils/scripts/physval_make_web_display.py
index c9dcd60282f9df6c93ff7ece7fd8190ea73f0afe..8b29374ed54233b9fcc67fb71f08cf6146eda01d 100755
--- a/DataQuality/DataQualityUtils/scripts/physval_make_web_display.py
+++ b/DataQuality/DataQualityUtils/scripts/physval_make_web_display.py
@@ -9,8 +9,10 @@ Transate arbitrary root file into a han config file
 Adapted for physics validation 14 May 2014
 """
 
-from DQConfMakerBase.DQElements import *
-from DQConfMakerBase.Helpers import IDHelper, make_thresholds
+from __future__ import print_function
+
+from DQConfMakerBase.DQElements import DQRegion, DQReference, DQAlgorithm, DQAlgorithmParameter
+from DQConfMakerBase.Helpers import make_thresholds
 from DataQualityUtils.hanwriter import writeHanConfiguration
 import ROOT
 
@@ -28,21 +30,21 @@ algorithmparameters = [DQAlgorithmParameter('AuxAlgName--Chi2Test_Chi2_per_NDF',
 # Edit this to change thresholds
 thresh = make_thresholds('Chi2_per_NDF', 1.0, 1.50, 'Chi2Thresholds')
 
+
 def recurse(rdir, dqregion, ignorepath, modelrefs=[], displaystring='Draw=PE', displaystring2D='Draw=COLZ', regex=None, startpath=None, hists=None, manglefunc=None):
-    import re
     if manglefunc is None:
-        manglefunc = lambda a, b: a
+        manglefunc = lambda a, b: a  # noqa: E731
     for key in rdir.GetListOfKeys():
         cl = key.GetClassName(); rcl = ROOT.TClass.GetClass(cl)
         if ' ' in key.GetName():
-            print 'WARNING: cannot have spaces in histogram names for han config; not including %s %s' % (cl, key.GetName())
+            print('WARNING: cannot have spaces in histogram names for han config; not including %s %s' % (cl, key.GetName()))
             continue
         if rcl.InheritsFrom('TH1') or rcl.InheritsFrom('TGraph') or rcl.InheritsFrom('TEfficiency'):
             if '/' in key.GetName():
-                print 'WARNING: cannot have slashes in histogram names, encountered in directory %s, histogram %s' % (rdir.GetPath(), key.GetName())
+                print('WARNING: cannot have slashes in histogram names, encountered in directory %s, histogram %s' % (rdir.GetPath(), key.GetName()))
                 continue
             if key.GetName() == 'summary':
-                print 'WARNING: cannot have histogram named summary, encountered in %s' % rdir.GetPath()
+                print('WARNING: cannot have histogram named summary, encountered in %s' % rdir.GetPath())
                 continue
             fpath = rdir.GetPath().replace(ignorepath, '')
             name = (fpath + '/' + key.GetName()).lstrip('/')
@@ -97,21 +99,21 @@ def prune(dqregion):
     False if we should not
     """
     params = dqregion.getDQParameters()
-    if params == None:
+    if params is None:
         params = []
     subregions = dqregion.getSubRegions()
-    if subregions == None:
+    if subregions is None:
         subregions = []
     else:
         subregions = subregions[:]
     # kill subregions
     for sr in subregions:
-        if sr == None:
+        if sr is None:
             continue
         if prune(sr):
             dqregion.delRelation('DQRegions', sr)
     subregions = dqregion.getSubRegions()
-    if subregions == None:
+    if subregions is None:
         subregions = []
     if len(subregions) + len(params) == 0:
         return True
@@ -120,10 +122,10 @@ def prune(dqregion):
 
 def paramcount(dqregion):
     params = dqregion.getDQParameters()
-    if params == None:
+    if params is None:
         params = []
     subregions = dqregion.getSubRegions()
-    if subregions == None:
+    if subregions is None:
         subregions = []
     
     return len(params) + sum([paramcount(region) for region in subregions])
@@ -132,19 +134,19 @@ def process(infname, confname, options, refs=None):
     import re
     f = ROOT.TFile.Open(infname, 'READ')
     if not f.IsOpen():
-        print 'ERROR: cannot open %s' % infname
+        print('ERROR: cannot open %s' % infname)
         return
     
     top_level = DQRegion(id='topRegion',algorithm=worst)
-    print 'Building tree...'
+    print('Building tree...')
     refpairs = refs.split(',')
     try:
         refdict = dict(_.split(':') for _ in refpairs)
-    except Exception, e:
-        print e
+    except Exception as e:
+        print(e)
     # "Model" references
     dqrs = [DQReference(reference='%s:same_name' % v, id=k)
-            for k, v in refdict.items()]
+            for k, v in list(refdict.items())]
     displaystring = options.drawopt
     if options.refdrawopt:
         displaystring += ',' + (','.join('DrawRef=%s' % _ for _ in options.refdrawopt.split(',')))
@@ -165,7 +167,7 @@ def process(infname, confname, options, refs=None):
     hists = []
     if options.histlistfile:
         hists = [re.compile(line.rstrip('\n')) for line in open(options.histlistfile)]
-        if options.pathregex: print "histlistfile given, pathregex is ignored"
+        if options.pathregex: print("histlistfile given, pathregex is ignored")
     if options.refmangle:
         import sys
         sys.path.append(os.getcwd())
@@ -175,7 +177,7 @@ def process(infname, confname, options, refs=None):
         manglefunc = None
     recurse(topindir, top_level, topindirname, dqrs, displaystring, displaystring2D,
             re.compile(options.pathregex), startpath, hists, manglefunc=manglefunc)
-    print 'Pruning dead branches...'
+    print('Pruning dead branches...')
     prune(top_level)
     pc = paramcount(top_level)
  
@@ -183,7 +185,7 @@ def process(infname, confname, options, refs=None):
     for x in sublevel:
         top_level.delRelation('DQRegions', x)
         
-    print 'Writing output'
+    print('Writing output')
     writeHanConfiguration( filename = confname , roots = sublevel)
     return pc
 
@@ -192,7 +194,7 @@ def super_process(fname, options):
     import ROOT
     han_is_found = (ROOT.gSystem.Load('libDataQualityInterfaces') != 1)
     if not han_is_found:
-        print 'ERROR: unable to load offline DQMF; unable to proceed'
+        print('ERROR: unable to load offline DQMF; unable to proceed')
         sys.exit(1)
     bname = os.path.basename(fname)
 
@@ -201,6 +203,7 @@ def super_process(fname, options):
     hanoutput = None
 
     failed = False
+    prebuilt_hcfg = False
 
     @contextlib.contextmanager
     def tmpdir():
@@ -211,8 +214,8 @@ def super_process(fname, options):
 
     with tmpdir() as hantmpdir:
         try:
-            print '====> Processing file %s' % (fname)
-            print '====> Generating han configuration file'
+            print('====> Processing file %s' % (fname))
+            print('====> Generating han configuration file')
             hantmpinput = os.path.join(hantmpdir, bname)
             shutil.copyfile(fname, hantmpinput)
             haninput = hantmpinput
@@ -222,13 +225,13 @@ def super_process(fname, options):
             
             # bad hack. rv = number of histogram nodes
             if rv == 0:
-                print 'No histograms to display; exiting with code 0'
+                print('No histograms to display; exiting with code 0')
                 sys.exit(0)
 
-            print '====> Compiling han configuration'
+            print('====> Compiling han configuration')
             hanhcfg = os.path.join(hantmpdir, 'han.hcfg')
             ROOT.dqi.HanConfig().AssembleAndSave( hanconfig, hanhcfg )
-            print '====> Executing han'
+            print('====> Executing han')
             import resource
             memlimit = resource.getrlimit(resource.RLIMIT_AS)
             resource.setrlimit(resource.RLIMIT_AS, (memlimit[1], memlimit[1]))
@@ -237,7 +240,7 @@ def super_process(fname, options):
             rv = ROOT.dqi.HanApp().Analyze( hanhcfg, haninput, hanoutput )
             if rv != 0:
                 raise Exception('failure in han')
-            print '====> Dumping web display output'
+            print('====> Dumping web display output')
             from DataQualityUtils import handimod
             handimod.handiWithComparisons( options.title,
                                            hanoutput,
@@ -256,8 +259,8 @@ def super_process(fname, options):
 ##            shutil.copy2(hanoutput, hantargetfile)
 ##            print '====> Cleaning up'
             os.unlink(hanoutput)
-        except Exception, e:
-            print e
+        except Exception as e:
+            print(e)
             import traceback
             traceback.print_exc()
             if 'canonical format' not in str(e):
@@ -269,14 +272,14 @@ def super_process(fname, options):
                     os.unlink(hanconfig)
                     os.unlink(hanhcfg)
                 os.unlink(hanoutput)
-            except:
+            except Exception:
                 pass
         
     return not failed
 
         
 if __name__=="__main__":
-    import sys, optparse, shutil, os
+    import sys, optparse, os
     os.environ['TDAQ_ERS_NO_SIGNAL_HANDLERS']='1'
     parser = optparse.OptionParser(usage='usage: %prog [options] inputfile')
     parser.add_option('--reffile', default=None,
@@ -330,7 +333,7 @@ if __name__=="__main__":
         thresh = make_thresholds('P', 0.05, 0.01, 'pThresholds')
 
     rv = super_process(fname, options)
-    if rv == True:
+    if rv:
         sys.exit(0)
     else:
         sys.exit(1)    
diff --git a/DataQuality/DataQualityUtils/scripts/readTier0HIST.py b/DataQuality/DataQualityUtils/scripts/readTier0HIST.py
index c871a9b7dadb45b3bfc0c0c75595dd2cfe712168..a4153e707db9325ae4987f1a30523320f8b6cfde 100644
--- a/DataQuality/DataQualityUtils/scripts/readTier0HIST.py
+++ b/DataQuality/DataQualityUtils/scripts/readTier0HIST.py
@@ -1,6 +1,6 @@
 #!/usr/bin env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 # Simple script to extract the path of the HIST output of Tier0 monitoring, 
 # open it and open a TBrowser
 # Uses the pathExtract library to extract the EOS path
@@ -16,7 +16,7 @@
 
 import os, sys  
 import argparse
-import xmlrpclib
+from six.moves import xmlrpc_client as xmlrpclib
 
 from DataQualityUtils import pathExtract
 
@@ -45,9 +45,9 @@ if args.tag != "":
   tag = args.tag
 else: # Try to retrieve the data project tag via atlasdqm
   if (not os.path.isfile("atlasdqmpass.txt")):
-    print "To retrieve the data project tag, you need to generate an atlasdqm key and store it in this directory as atlasdqmpass.txt (yourname:key)"
-    print "To generate a kay, go here : https://atlasdqm.cern.ch/dqauth/"
-    print "You can also define by hand the data project tag wit hthe option -t"
+    print("To retrieve the data project tag, you need to generate an atlasdqm key and store it in this directory as atlasdqmpass.txt (yourname:key)")
+    print("To generate a kay, go here : https://atlasdqm.cern.ch/dqauth/")
+    print("You can also define by hand the data project tag wit hthe option -t")
     sys.exit()
   passfile = open("atlasdqmpass.txt")
   passwd = passfile.read().strip(); passfile.close()
@@ -56,7 +56,7 @@ else: # Try to retrieve the data project tag via atlasdqm
   run_spec = {'stream': 'physics_CosmicCalo', 'proc_ver': 1,'source': 'tier0', 'low_run': runNumber, 'high_run':runNumber}
   run_info= s.get_run_information(run_spec)
   if '%d'%runNumber not in run_info.keys() or len(run_info['%d'%runNumber])<2:
-    print "Unable to retrieve the data project tag via atlasdqm... Please double check your atlasdqmpass.txt or define it by hand with -t option"
+    print("Unable to retrieve the data project tag via atlasdqm... Please double check your atlasdqmpass.txt or define it by hand with -t option")
     sys.exit()
   tag = run_info['%d'%runNumber][1]
 
@@ -77,7 +77,7 @@ else:
 file = []
 for iPath in path:
    if ("NO FILE" not in iPath):
-      print "I am opening %s"%(iPath)
+      print("I am opening %s"%(iPath))
       file.append( TFile.Open(iPath))
 
 gStyle.SetPalette(1)
diff --git a/DataQuality/DataQualityUtils/scripts/readTier0LARNOISE.py b/DataQuality/DataQualityUtils/scripts/readTier0LARNOISE.py
index 31dbeefa551b0325c7c483277f5dd01e0f2adffd..fd2744a7378dc022e2137ede27a2568e44cabee6 100644
--- a/DataQuality/DataQualityUtils/scripts/readTier0LARNOISE.py
+++ b/DataQuality/DataQualityUtils/scripts/readTier0LARNOISE.py
@@ -1,6 +1,6 @@
 #!/usr/bin env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 # Simple script to extract the path of the TAG outputs of Tier0 monitoring, 
 # open them and chain them in a single TChain
 # Uses the pathExtract library to extract the EOS path
@@ -15,9 +15,9 @@ import os, sys
 import argparse
 
 from DataQualityUtils import pathExtract         
-import xmlrpclib
+from six.moves import xmlrpc_client as xmlrpclib
 
-from ROOT import TFile,TBrowser,TChain
+from ROOT import TChain
 from ROOT import gStyle
 
 gStyle.SetPalette(1)
@@ -41,9 +41,9 @@ if args.tag != "":
   tag = args.tag
 else: # Try to retrieve the data project tag via atlasdqm
   if (not os.path.isfile("atlasdqmpass.txt")):
-    print "To retrieve the data project tag, you need to generate an atlasdqm key and store it in this directory as atlasdqmpass.txt (yourname:key)"
-    print "To generate a kay, go here : https://atlasdqm.cern.ch/dqauth/"
-    print "You can also define by hand the data project tag wit hthe option -t"
+    print("To retrieve the data project tag, you need to generate an atlasdqm key and store it in this directory as atlasdqmpass.txt (yourname:key)")
+    print("To generate a kay, go here : https://atlasdqm.cern.ch/dqauth/")
+    print("You can also define by hand the data project tag wit hthe option -t")
     sys.exit()
   passfile = open("atlasdqmpass.txt")
   passwd = passfile.read().strip(); passfile.close()
@@ -52,7 +52,7 @@ else: # Try to retrieve the data project tag via atlasdqm
   run_spec = {'stream': 'physics_CosmicCalo', 'proc_ver': 1,'source': 'tier0', 'low_run': runNumber, 'high_run':runNumber}
   run_info= s.get_run_information(run_spec)
   if '%d'%runNumber not in run_info.keys() or len(run_info['%d'%runNumber])<2:
-    print "Unable to retrieve the data project tag via atlasdqm... Please double check your atlasdqmpass.txt or define it by hand with -t option"
+    print("Unable to retrieve the data project tag via atlasdqm... Please double check your atlasdqmpass.txt or define it by hand with -t option")
     sys.exit()
   tag = run_info['%d'%runNumber][1]
 
@@ -62,14 +62,13 @@ listOfFiles = pathExtract.returnEosLArNoisePath(runNumber,stream,amiTag,tag)
 
 tree = TChain("CollectionTree")
 
-print listOfFiles
+print(listOfFiles)
 for fileNames in listOfFiles:
-  print "Adding %s"%(fileNames)
+  print("Adding %s"%(fileNames))
   tree.AddFile("root://eosatlas/%s"%(fileNames))
 
 entries = tree.GetEntries()
 if entries != 0:
-  print "The chained tree contains %d entries"%(entries)
+  print("The chained tree contains %d entries"%(entries))
 else:
-  print "Empty chain..."
-
+  print("Empty chain...")
diff --git a/DataQuality/DataQualityUtils/scripts/readTier0TAGs.py b/DataQuality/DataQualityUtils/scripts/readTier0TAGs.py
index bd31946cc4a452512456545d593de45ce828648f..60e2a4173b3a154d4bc60072bfc62206356907e7 100644
--- a/DataQuality/DataQualityUtils/scripts/readTier0TAGs.py
+++ b/DataQuality/DataQualityUtils/scripts/readTier0TAGs.py
@@ -1,6 +1,6 @@
 #!/usr/bin env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 # Simple script to extract the path of the TAG outputs of Tier0 monitoring, 
 # open them and chain them in a single TChain
 # Uses the pathExtract library to extract the EOS path
@@ -15,9 +15,9 @@ import os, sys
 import argparse
 
 from DataQualityUtils import pathExtract         
-import xmlrpclib
+from six.moves import xmlrpc_client as xmlrpclib
 
-from ROOT import TFile,TChain
+from ROOT import TChain
 from ROOT import gStyle
 
 gStyle.SetPalette(1)
@@ -41,9 +41,9 @@ if args.tag != "":
   tag = args.tag
 else: # Try to retrieve the data project tag via atlasdqm
   if (not os.path.isfile("atlasdqmpass.txt")):
-    print "To retrieve the data project tag, you need to generate an atlasdqm key and store it in this directory as atlasdqmpass.txt (yourname:key)"
-    print "To generate a kay, go here : https://atlasdqm.cern.ch/dqauth/"
-    print "You can also define by hand the data project tag wit hthe option -t"
+    print("To retrieve the data project tag, you need to generate an atlasdqm key and store it in this directory as atlasdqmpass.txt (yourname:key)")
+    print("To generate a kay, go here : https://atlasdqm.cern.ch/dqauth/")
+    print("You can also define by hand the data project tag wit hthe option -t")
     sys.exit()
   passfile = open("atlasdqmpass.txt")
   passwd = passfile.read().strip(); passfile.close()
@@ -52,7 +52,7 @@ else: # Try to retrieve the data project tag via atlasdqm
   run_spec = {'stream': 'physics_CosmicCalo', 'proc_ver': 1,'source': 'tier0', 'low_run': runNumber, 'high_run':runNumber}
   run_info= s.get_run_information(run_spec)
   if '%d'%runNumber not in run_info.keys() or len(run_info['%d'%runNumber])<2:
-    print "Unable to retrieve the data project tag via atlasdqm... Please double check your atlasdqmpass.txt or define it by hand with -t option"
+    print("Unable to retrieve the data project tag via atlasdqm... Please double check your atlasdqmpass.txt or define it by hand with -t option")
     sys.exit()
   tag = run_info['%d'%runNumber][1]
 
@@ -64,12 +64,11 @@ tree = TChain("POOLCollectionTree")
 
 file = {}
 for fileNames in listOfFiles:
-  print "Adding %s"%(fileNames)
+  print("Adding %s"%(fileNames))
   tree.AddFile("root://eosatlas/%s"%(fileNames))
 
 entries = tree.GetEntries()
 if entries != 0:
-  print "The chained tree contains %d entries"%(entries)
+  print("The chained tree contains %d entries"%(entries))
 else:
-  print "Empty chain..."
-
+  print("Empty chain...")