diff --git a/Tools/PyUtils/CMakeLists.txt b/Tools/PyUtils/CMakeLists.txt
index 54be94b7b7d1527a7675f505fa33ea174e94c8b6..c5dfb66028520778b86a1abb22a2d73a66ddfa28 100644
--- a/Tools/PyUtils/CMakeLists.txt
+++ b/Tools/PyUtils/CMakeLists.txt
@@ -19,7 +19,8 @@ atlas_install_scripts( bin/acmd.py bin/checkFile.py bin/checkPlugins.py
    bin/gprof2dot bin/issues bin/magnifyPoolFile.py bin/merge-poolfiles.py
    bin/pool_extractFileIdentifier.py
    bin/pool_insertFileToCatalog.py bin/print_auditor_callgraph.py bin/pyroot.py
-   bin/vmem-sz.py bin/meta-reader.py bin/meta-diff.py bin/tree-orderer.py )
+   bin/vmem-sz.py bin/meta-reader.py bin/meta-diff.py bin/tree-orderer.py
+   POST_BUILD_CMD ${ATLAS_FLAKE8} )
 
 # Aliases:
 atlas_add_alias( checkFile "checkFile.py" )
diff --git a/Tools/PyUtils/bin/checkMetaSG.py b/Tools/PyUtils/bin/checkMetaSG.py
index 8b0ef140184746c1050f7f2729901279398e7d33..26211f16f569ec56499ab172afd3b7e0da0e1698 100755
--- a/Tools/PyUtils/bin/checkMetaSG.py
+++ b/Tools/PyUtils/bin/checkMetaSG.py
@@ -44,7 +44,7 @@ if __name__ == "__main__":
         fileNames = [ arg for arg in args if arg[0] != "-" ]
         pass
 
-    if options.fileName == None and len(fileNames) == 0:
+    if options.fileName is None and len(fileNames) == 0:
         str(parser.print_help() or "")
         sys.exit(1)
 
@@ -114,7 +114,7 @@ if __name__ == "__main__":
             sc = 1
             pass
 
-        except :
+        except Exception:
             print ("## Caught something !! (don't know what)")
             print (sys.exc_info()[0])
             print (sys.exc_info()[1])
diff --git a/Tools/PyUtils/bin/checkPlugins.py b/Tools/PyUtils/bin/checkPlugins.py
index 33da4cb8cf9be318b720f54e302c718353845364..d4935cbcda7fcc52c2dd47abcf1387d16c3ccf29 100755
--- a/Tools/PyUtils/bin/checkPlugins.py
+++ b/Tools/PyUtils/bin/checkPlugins.py
@@ -9,7 +9,6 @@
 
 from __future__ import print_function
 
-__version__ = "$Revision: 1.3 $"
 __author__  = "Sebastien Binet"
 
 import os
@@ -146,7 +145,7 @@ if __name__ == "__main__":
             capabilities = dsoDb.capabilities(libName)
             print ("::: capabilities of [%s]" % libName)
             print (os.linesep.join( [ "  "+str(c) for c in capabilities ] ))
-        except ValueError as err:
+        except ValueError:
             sc = 1
             pass
 
@@ -159,7 +158,7 @@ if __name__ == "__main__":
                 print (" -",k)
                 print (os.linesep.join( [ "  "+str(v) for v in dups[k] ] ))
             if len(dups.keys())>0: sc = 1
-        except ValueError as err:
+        except ValueError:
             sc = 1
             pass
         
diff --git a/Tools/PyUtils/bin/checkxAOD.py b/Tools/PyUtils/bin/checkxAOD.py
index 520ca8113d2749733872e7c1f1c582bcd039c855..cd09a7b2865313846301b1913c62e00230eaa58f 100755
--- a/Tools/PyUtils/bin/checkxAOD.py
+++ b/Tools/PyUtils/bin/checkxAOD.py
@@ -2,7 +2,6 @@
 
 # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 #
-# $Id: checkxAOD.py 776263 2016-10-03 14:46:39Z wlampl $
 #
 # This is a modified version of PyUtils/bin/checkFile.py. It has been taught
 # how to sum up the sizes of all the branches belonging to a single xAOD
@@ -20,6 +19,7 @@ import sys
 import os
 import re
 import six
+import operator
 
 from optparse import OptionParser
 
@@ -62,11 +62,11 @@ if __name__ == "__main__":
         fileNames = [ arg for arg in args if arg[ 0 ] != "-" ]
         pass
 
-    if options.fileName == None and len( fileNames ) == 0:
+    if options.fileName is None and len( fileNames ) == 0:
         str( parser.print_help() or "" )
         sys.exit( 1 )
 
-    if options.fileName != None:
+    if options.fileName is not None:
         fileName = os.path.expandvars( os.path.expanduser( options.fileName ) )
         fileNames.append( fileName )
         pass
@@ -96,13 +96,13 @@ if __name__ == "__main__":
             # The name of this branch:
             brName = d.name
             # Check if this is a static auxiliary store:
-            m = re.match( "(.*)Aux\..*", d.name )
+            m = re.match( r"(.*)Aux\..*", d.name )
             if m:
                 # Yes, it is. And the name of the main object/container is:
                 brName = m.group( 1 )
                 pass
             # Check if this is a dynamic auxiliary variable:
-            m = re.match( "(.*)AuxDyn\..*", d.name )
+            m = re.match( r"(.*)AuxDyn\..*", d.name )
             if m:
                 # Oh yes, it is. Let's construct the name of the main
                 # object/container:
@@ -129,7 +129,6 @@ if __name__ == "__main__":
             orderedData += [ summedData[ br ] ]
             pass
         sorter = PF.PoolRecord.Sorter.DiskSize
-        import operator
         orderedData.sort( key = operator.attrgetter( sorter ) )
 
         # Access the CollectionTree directly:
@@ -262,7 +261,6 @@ if __name__ == "__main__":
             categorizedData += [ categData[ br ] ]
             pass
         sorter = PF.PoolRecord.Sorter.DiskSize
-        import operator
         categorizedData.sort( key = operator.attrgetter( sorter ) )
 
         print( "=" * 80 )
diff --git a/Tools/PyUtils/bin/diff-athfile b/Tools/PyUtils/bin/diff-athfile
index 40521f50e5f6726c817eae62628b768b0aba5db4..d67358616c9472bb1785c6979d9985b465f4b562 100755
--- a/Tools/PyUtils/bin/diff-athfile
+++ b/Tools/PyUtils/bin/diff-athfile
@@ -1,11 +1,11 @@
 #!/usr/bin/env python
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 # @file:    diff-athfile.py
 # @purpose: simple command-line utility to diff metadata in two files.
 #           Uses PyUtils.AthFile.fopen. Based on dump-athfile.py.
 # @author:  Graeme Stewart <graeme.andrew.stewart@cern.ch>
 # @date:    Jan 2012
-# @version: $Id: diff-athfile 493697 2012-04-02 17:30:56Z binet $
 #
 # @example:
 # @code
@@ -16,9 +16,6 @@
 import sys
 import os
 
-try:                import cPickle as pickle
-except ImportError: import pickle
-    
 from optparse import OptionParser
 
 if __name__ == "__main__":
@@ -65,7 +62,7 @@ if __name__ == "__main__":
         for fname in fnames:
             fhandles[fname] = af.fopen(fname, evtmax=options.evtmax)
             msg.info(':'*80)
-            msg.info('Opened file %s.' % fname)
+            msg.info('Opened file %s.', fname)
             
 
         # Ignore the following keys, which are bound to be different:
@@ -77,10 +74,10 @@ if __name__ == "__main__":
     
         for k in simpleCompKeys:
             if fhandles[fnames[0]].infos[k] == fhandles[fnames[1]].infos[k]:
-                msg.info('%s equal in %s and %s: %s' % (k, fnames[0], fnames[1], fhandles[fnames[0]].infos[k]))
+                msg.info('%s equal in %s and %s: %s', k, fnames[0], fnames[1], fhandles[fnames[0]].infos[k])
             else:
-                msg.warning('%s not equal in %s and %s: %s != %s' % 
-                            (k, fnames[0], fnames[1], fhandles[fnames[0]].infos[k], fhandles[fnames[1]].infos[k]))
+                msg.warning('%s not equal in %s and %s: %s != %s',
+                            k, fnames[0], fnames[1], fhandles[fnames[0]].infos[k], fhandles[fnames[1]].infos[k])
                 sc = 1
 
         for k in bitByBitKeys:
@@ -90,21 +87,21 @@ if __name__ == "__main__":
             skeys.sort()
             skeys1.sort()
             if skeys != skeys1:
-                msg.warning('%s keys not equal for %s and %s: %s != %s' % 
-                            (k, fnames[0], fnames[1], skeys, skeys1))
+                msg.warning('%s keys not equal for %s and %s: %s != %s',
+                            k, fnames[0], fnames[1], skeys, skeys1)
                 sc = 1
             else:
-                msg.info('%s keys are equal for %s and %s: %s' % 
-                         (k, fnames[0], fnames[1], skeys))
+                msg.info('%s keys are equal for %s and %s: %s',
+                         k, fnames[0], fnames[1], skeys)
                 for subk in skeys:
                     if fhandles[fnames[0]].infos[k][subk] == fhandles[fnames[1]].infos[k][subk]:
                         # Here suppress the very long value output
-                        msg.info('%s element %s values are equal for %s and %s: (value suppressed)' % 
-                                 (k, subk, fnames[0], fnames[1]))
+                        msg.info('%s element %s values are equal for %s and %s: (value suppressed)',
+                                 k, subk, fnames[0], fnames[1])
                     else:
-                        msg.warning('%s element %s values are not equal for %s and %s: %s != %s' % 
-                                 (k, subk, fnames[0], fnames[1], fhandles[fnames[0]].infos[k][subk], 
-                                  fhandles[fnames[1]].infos[k][subk]))
+                        msg.warning('%s element %s values are not equal for %s and %s: %s != %s',
+                                    k, subk, fnames[0], fnames[1], fhandles[fnames[0]].infos[k][subk],
+                                    fhandles[fnames[1]].infos[k][subk])
                         sc = 1
                         
                 
@@ -119,7 +116,7 @@ if __name__ == "__main__":
         sc = 2
         pass
 
-    except :
+    except Exception:
         msg.error("Caught something !! (don't know what)")
         msg.error("\n%s\n%s",sys.exc_info()[0], sys.exc_info()[1])
         sc = 2
@@ -128,7 +125,7 @@ if __name__ == "__main__":
     
     if options.oname:
         oname = options.oname
-        msg.info("saving report into [%s]..." % oname)
+        msg.info("saving report into [%s]...", oname)
         if os.path.exists(oname):
             os.rename(oname, oname+'.bak')
         af.server.save_cache(oname)
diff --git a/Tools/PyUtils/bin/diff-jobo-cfg.py b/Tools/PyUtils/bin/diff-jobo-cfg.py
index 1a26d0905d3913d46a584cfba83160b8ab7f5377..bd2d7009a02a955e2826fadc87c174dd06c71bec 100755
--- a/Tools/PyUtils/bin/diff-jobo-cfg.py
+++ b/Tools/PyUtils/bin/diff-jobo-cfg.py
@@ -52,7 +52,7 @@ def load_cfg_file(fname):
         import shelve
         comps_db = shelve.open(fname, 'r')
         return comps_db['all-cfgs']
-    except Exception as err:
+    except Exception:
         from past.builtins import execfile
         execfile(fname, comps_db)
         return comps_db['d']
@@ -105,17 +105,12 @@ def cmp_component_db(ref, chk, verbose=True):
 
     diff = []
     for comp_name in common_keys:
-        is_diff = False
         comp_ref = ref[comp_name]
         comp_chk = chk[comp_name]
 
-        for k in ('comp_type', 'cxx_type',):
-            if comp_ref[k] != comp_chk[k]:
-                is_diff = True
         ref_props = sorted([(k,v) for k,v in comp_ref['props'].iteritems()])
         chk_props = sorted([(k,v) for k,v in comp_chk['props'].iteritems()])
         if ref_props != chk_props:
-            is_diff = True
             diff.append((comp_name, ref_props, chk_props,
                          dict_diff(ref=comp_ref['props'],
                                    chk=comp_chk['props'])))
@@ -140,7 +135,7 @@ def cmp_component_db(ref, chk, verbose=True):
             print ("-%s: %r" %(prop_name, ref_value,))
             print ("+%s: %r" %(prop_name, chk_value,))
     
-        
+
     if (len(ref_only_keys) > 0 or
         len(chk_only_keys) > 0 or
         len(diff) > 0):
@@ -186,8 +181,7 @@ if __name__ == "__main__":
         options.chk_fname = args[1]
         pass
 
-    if (options.chk_fname == None or 
-        options.ref_fname == None) :
+    if (options.chk_fname is None or options.ref_fname is None) :
         str(parser.print_help() or "")
         sys.exit(1)
         pass
diff --git a/Tools/PyUtils/bin/diffConfigs.py b/Tools/PyUtils/bin/diffConfigs.py
index e443ad7eb79220393c9d033f4def69124cb9b208..03580a06122c52e3ea0c32723fe760f25138f882 100755
--- a/Tools/PyUtils/bin/diffConfigs.py
+++ b/Tools/PyUtils/bin/diffConfigs.py
@@ -15,7 +15,6 @@
 
 from __future__ import print_function
 
-__version__ = "$Revision: 1.1 $"
 __author__  = "Sebastien Binet"
 
 import sys
@@ -51,8 +50,7 @@ if __name__ == "__main__":
         options.fileName = args[1]
         pass
 
-    if options.fileName    == None or \
-       options.refFileName == None :
+    if options.fileName    is None or options.refFileName is None :
         str(parser.print_help() or "")
         sys.exit(1)
         pass
diff --git a/Tools/PyUtils/bin/diffPoolFiles.py b/Tools/PyUtils/bin/diffPoolFiles.py
index 0591497957ade3e6df9b0859625a7580278329d3..d4ab854c26fa7664e4272399c389f4c691c5283c 100755
--- a/Tools/PyUtils/bin/diffPoolFiles.py
+++ b/Tools/PyUtils/bin/diffPoolFiles.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 # @file:    diffPoolFiles.py
 # @purpose: check that 2 POOL files have same content (containers and sizes)
@@ -12,7 +12,6 @@
 # diffPoolFiles aod.pool ref.aod.pool
 #
 
-__version__ = "$Revision: 1.3 $"
 __author__  = "Sebastien Binet"
 
 import sys
@@ -54,8 +53,7 @@ if __name__ == "__main__":
         options.fileName = args[1]
         pass
 
-    if options.fileName    == None or \
-       options.refFileName == None :
+    if options.fileName    is None or options.refFileName is None :
         str(parser.print_help() or "")
         sys.exit(1)
         pass
diff --git a/Tools/PyUtils/bin/dlldep.py b/Tools/PyUtils/bin/dlldep.py
index fb967379d5e057a2199c51df7d6db300b5f48972..865d5532601a6ca590f22f5c72da4adb2f677867 100755
--- a/Tools/PyUtils/bin/dlldep.py
+++ b/Tools/PyUtils/bin/dlldep.py
@@ -11,12 +11,10 @@
 #           based on Dominik Seichter's 'dependencies.sh':
 #           http://domseichter.blogspot.com/2008/02/visualize-dependencies-of-binaries-and.html
 #
-# $Id: dlldep.py,v 1.1 2009-02-09 17:56:35 fwinkl Exp $
-#
 
 from __future__ import print_function
 
-import sys, os
+import sys
 from os.path import basename
 import subprocess as sp
 import re
diff --git a/Tools/PyUtils/bin/dump-athfile.py b/Tools/PyUtils/bin/dump-athfile.py
index 3fa741c2e22e94eb92a28e22dc4b95a9bdb573da..96250916850656734bad1f7ae068e378e0a3d650 100755
--- a/Tools/PyUtils/bin/dump-athfile.py
+++ b/Tools/PyUtils/bin/dump-athfile.py
@@ -18,15 +18,11 @@
 
 from __future__ import print_function
 
-__version__ = "$Revision: 1.4 $"
 __author__  = "Sebastien Binet <binet@cern.ch>"
 
 import sys
 import os
 
-try:                import cPickle as pickle
-except ImportError: import pickle
-    
 from optparse import OptionParser
 
 if __name__ == "__main__":
@@ -65,11 +61,11 @@ if __name__ == "__main__":
         fnames = [ arg for arg in args if arg[0] != "-" ]
         pass
 
-    if options.fname == None and len(fnames) == 0:
+    if options.fname is None and len(fnames) == 0:
         str(parser.print_help() or "")
         sys.exit(1)
 
-    if options.fname != None:
+    if options.fname is not None:
         fname = os.path.expandvars(os.path.expanduser(options.fname))
         fnames.append(fname)
 
@@ -88,7 +84,7 @@ if __name__ == "__main__":
         sc = 1
         pass
 
-    except :
+    except Exception:
         msg.error("Caught something !! (don't know what)")
         msg.error("\n%s\n%s",sys.exc_info()[0], sys.exc_info()[1])
         sc = 10
@@ -154,7 +150,7 @@ if __name__ == "__main__":
     
     if options.oname:
         oname = options.oname
-        msg.info("saving report into [%s]..." % oname)
+        msg.info("saving report into [%s]...", oname)
         if os.path.exists(oname):
             os.rename(oname, oname+'.bak')
         af.server.save_cache(oname)
diff --git a/Tools/PyUtils/bin/dumpAthfilelite.py b/Tools/PyUtils/bin/dumpAthfilelite.py
index ea500d3394b9d046a808cc1bd382388713556ef4..99d661dcb8ba5a0c7fcac29a39f7528ada6b843a 100755
--- a/Tools/PyUtils/bin/dumpAthfilelite.py
+++ b/Tools/PyUtils/bin/dumpAthfilelite.py
@@ -5,12 +5,10 @@
 ## Simple wrapper to invoke AthFileLite metadata grabber and
 #  produce AthFile-like text output
 #
-# $Id: dumpAthfilelite.py 618684 2014-09-26 11:46:14Z graemes $
 
 from __future__ import print_function
 
 import argparse
-import os
 import pprint
 import sys
 
diff --git a/Tools/PyUtils/bin/filter-and-merge-d3pd.py b/Tools/PyUtils/bin/filter-and-merge-d3pd.py
index 8e7f916d6afe289e8042337b2cacddfc1d39ec13..f53ff2638255d37b2d047f9a559b48df7f8a9c74 100755
--- a/Tools/PyUtils/bin/filter-and-merge-d3pd.py
+++ b/Tools/PyUtils/bin/filter-and-merge-d3pd.py
@@ -34,7 +34,7 @@ def _fnmatch(fname, patterns):
     support for a list of patterns to match against
     """
     from fnmatch import fnmatch
-    if isinstance(patterns, basestring):
+    if isinstance(patterns, str):
         patterns = [patterns]
     for pattern in patterns:
         if fnmatch(fname, pattern):
@@ -78,7 +78,7 @@ def _interpret_grl(fname):
 
 def interpret_grl(fname="GRL.dat"):
     fnames = []
-    if isinstance(fname, basestring):
+    if isinstance(fname, str):
         fnames = [fname]
     elif isinstance(fname, (list,tuple)):
         fnames = fname[:]
@@ -101,12 +101,6 @@ def pass_grl(run, lb, good_lbs):
 
     return False
 
-def warm_up(fname):
-    assert os.path.exists(fname)
-    import commands
-    rc,_ = commands.getstatusoutput("/bin/dd if=%s of=/dev/null" % (fname,))
-    return rc
-
 def apply_filters(branches, patterns):
     """extract the branches which match the patterns.
     a pattern can add or remove a branch.
@@ -415,7 +409,6 @@ def order(m, chain_name, fnames, workdir):
         timer = ROOT.TStopwatch()
         timer.Start()
         print ("::: optimizing   [%s]..." % (fn,))
-        #warm_up(fn)
 
         timer.Start()
         fin = ROOT.TFile.Open(fn, "read")
@@ -484,7 +477,7 @@ def _load_filter_fct(selection):
     if selection is None:
         return filter_fct
     
-    if not isinstance(selection, basestring):
+    if not isinstance(selection, str):
         print ("** invalid filter-fct type (%r)" % (type(selection),))
         return filter_fct
     
@@ -772,7 +765,7 @@ def import_etree():
     except ImportError:
         pass
     # do it by hook or by crook...
-    import sys, os, imp
+    import os, imp
     xml_site_package = os.path.join(os.path.dirname(os.__file__), 'xml')
     m = imp.find_module('etree', [xml_site_package])
 
@@ -863,9 +856,9 @@ try:
         for child in node:
             # recursively add the element's children
             newitem = _xml2dict_recurse (child, dictclass)
-            if nodedict.has_key(child.tag):
+            if child.tag in nodedict:
                 # found duplicate tag, force a list
-                if type(nodedict[child.tag]) is type([]):
+                if isinstance(nodedict[child.tag], list):
                     # append to existing list
                     nodedict[child.tag].append(newitem)
                 else:
diff --git a/Tools/PyUtils/bin/getMetadata.py b/Tools/PyUtils/bin/getMetadata.py
index b4d8f474bc609974953ed047946228d005158df0..dff53ddec704dadfff9cf85c78933691569fe98c 100755
--- a/Tools/PyUtils/bin/getMetadata.py
+++ b/Tools/PyUtils/bin/getMetadata.py
@@ -6,14 +6,17 @@ from __future__ import print_function
 __author__ = "Will Buttinger"
 __doc__ = """Extract dataset parameters from AMI, and write them to a text file.\nExamples:\n\n\ngetMetadata.py --inDS="mc15_13TeV.361103%DAOD_TRUTH%" --fields=dataset_number,ldn,nfiles,events,crossSection,genFiltEff,generator_name"""
 
-
-
 import logging
+import sys
 
 from future import standard_library
 standard_library.install_aliases()
 import subprocess
 
+# Python 2.x/3.x compatibility
+if sys.version_info[0] >= 3:
+    unicode = str   # strings are unicode in Python3
+
 #pinched from pandatools!
 def readDsFromFile(txtName):
     import re
@@ -33,9 +36,9 @@ def readDsFromFile(txtName):
             dsList += [tmpLine]
         # close file    
         txt.close()
-    except:
+    except Exception:
         errType,errValue = sys.exc_info()[:2]
-        logging.error("cannot read datasets from %s due to %s:%s" % (txtName,errType,errValue))
+        logging.error("cannot read datasets from %s due to %s:%s",txtName,errType,errValue)
         sys.exit(-1)    
     return dsList
 
@@ -43,7 +46,7 @@ def readDsFromFile(txtName):
 
 def isfloat(x):
     try:
-        a = float(x)
+        float(x)
     except ValueError:
         return False
     else:
@@ -91,13 +94,13 @@ def main():
     #check the voms proxy 
     status,out = subprocess.getstatusoutput("voms-proxy-info -fqan -exists")
     if status!=0:
-        logging.error("Please renew your certificate with this command: voms-proxy-init -voms atlas");
+        logging.error("Please renew your certificate with this command: voms-proxy-init -voms atlas")
         return -1
 
     try:
         client = pyAMI.client.Client('atlas')
         AtlasAPI.init()
-    except:
+    except Exception:
         logging.error("Could not establish pyAMI session. Are you sure you have a valid certificate? Do: voms-proxy-init -voms atlas")
         return -1
 
@@ -110,7 +113,7 @@ def main():
 
     res = client.execute('ListPhysicsParameterDefs',format='dom_object')
     for r in res.get_rows() : #r is OrderedDict
-        explainString = "%s: %s" % (r[u'PARAMNAME'],r[u'DESCRIPTION']);
+        explainString = "%s: %s" % (r[u'PARAMNAME'],r[u'DESCRIPTION'])
         if r[u'UNITS']!=u'NULL': 
             explainString += " (units: %s)" % r[u'UNITS']
             paramUnits[r[u'PARAMNAME']] = r[u'UNITS']
@@ -205,7 +208,7 @@ def main():
 
     if len(paramFields)>0 and args.physicsGroups==[""]:
         logging.error("You must specify at least one physics group. See -h for allowed groups")
-        return -1;
+        return -1
 
     #combine paramDefaults with fieldDefaults
     fieldDefaults.update(paramDefaults)
@@ -214,9 +217,9 @@ def main():
     
     for field in args.fields:
         if field not in fieldDefaults:
-            logging.error("%s is not a recognised field. Allowed fields are:" % field)
+            logging.error("%s is not a recognised field. Allowed fields are:", field)
             logging.error(fieldDefaults.keys())
-            return -1;
+            return -1
         
 
     if args.oldTimestamp!="":
@@ -231,7 +234,7 @@ def main():
   
     if len(args.inDS)==0 or (len(args.inDS)==1 and args.inDS[0]==""):
         logging.error("No datasets provided. Please specify datasets with the --inDS or --inDsTxt options")
-        return -1;
+        return -1
 
     logging.info("Fetching list of datasets from AMI (this may take a few minutes)...")
 
@@ -239,10 +242,10 @@ def main():
     #obtain list of datasets 
     res = AtlasAPI.list_datasets(client,patterns=args.inDS,fields=dsFields+['ldn'],ami_status="VALID") #changed status from %, to only catch valid now: wb 08/2015
 
-    logging.info("...Found %d datasets matching your selection" % len(res))
+    logging.info("...Found %d datasets matching your selection", len(res))
 
     if len(res)==0:
-        return 0;
+        return 0
     
     #NOTE: Should we allow retrieval of the extra information: keyword, genfiltereff, approx crossection, .. these all come from GetDatasetInfo ami command
 
@@ -258,7 +261,9 @@ def main():
         if len(extraFields)>0 or len(args.keywords)>0:
             info_res = AtlasAPI.get_dataset_info(client,str(r['ldn']))
             #print(info_res)
-            if len(info_res)==0: logging.error("Unable to retrieve dataset info for %s" % str(r['ldn']));return -1
+            if len(info_res)==0:
+                logging.error("Unable to retrieve dataset info for %s", r['ldn'])
+                return -1
             for field in extraFields:
                 #ignore the keyword_ fields 
                 if field.startswith("keyword_"): continue
@@ -284,14 +289,14 @@ def main():
 
     for ds in args.inDS:
         if '%' not in ds and ds not in dataset_values.keys():
-            logging.warning("Unknown dataset: %s" % ds)
+            logging.warning("Unknown dataset: %s", ds)
 
     datasetsToQuery = ",".join(dataset_values.keys())
 
     #if using inDsTxt, retain any comment or blank lines in structure of output
     complete_values = OrderedDict()
     if args.inDsTxt != "":
-         # read lines
+        # read lines
         commentcount=0
         import re
         txt = open(args.inDsTxt)
@@ -315,7 +320,8 @@ def main():
         txt.close()
         dataset_values = complete_values
 
-    logging.info("Obtaining %s for selected datasets at timestamp=%s... (please be patient)" % (args.fields,args.timestamp))
+    logging.info("Obtaining %s for selected datasets at timestamp=%s... (please be patient)",
+                 args.fields, args.timestamp)
 
     #do as one query, to be efficient
     if(args.timestamp==current_time):
@@ -332,7 +338,8 @@ def main():
 
 
     if args.oldTimestamp!="" :
-        logging.info("Obtaining %s for selected datasets at timestamp=%s... (please be patient)" % (args.fields,args.oldTimestamp))
+        logging.info("Obtaining %s for selected datasets at timestamp=%s... (please be patient)",
+                     args.fields,args.oldTimestamp)
         res2 = client.execute(['GetPhysicsParamsForDataset',"--logicalDatasetName=%s"% datasetsToQuery,"--timestamp='%s'"%args.oldTimestamp,"--history=true"], format='dom_object')
         old_parameterQueryResults = dict()
         for r in res2.get_rows():
@@ -391,14 +398,14 @@ def main():
                         groupsWithVals[param] += [(str(r[u'physicsGroup']),str(r[u'paramValue']))]
                         continue
                     if args.physicsGroups.index(str(r[u'physicsGroup'])) > bestGroupIndex : continue
-                    if args.physicsGroups.index(str(r[u'physicsGroup'])) == bestGroupIndex : logging.warning("Duplicate parameter %s for group %s in dataset %s (subprocess %d). Please report this!" % (param,str(r[u'physicsGroup']),ds,sp))
+                    if args.physicsGroups.index(str(r[u'physicsGroup'])) == bestGroupIndex : logging.warning("Duplicate parameter %s for group %s in dataset %s (subprocess %d). Please report this!", param, r[u'physicsGroup'], ds, sp)
                     paramVals[param] = str(r[u'paramValue'])
                     if param=="crossSection_pb": paramVals[param] = str(float(paramVals[param])*1000.0)
                     bestGroupIndex=args.physicsGroups.index(str(r[u'physicsGroup']))
                     #keep the explanation info 
                     for e in args.explainInfo: 
                         if unicode(e) not in r:
-                            logging.error("Unrecognised explainInfo field: %s" % e)
+                            logging.error("Unrecognised explainInfo field: %s", e)
                             return -1
                         explainInfo[param][e]=str(r[unicode(e)])
                 if args.oldTimestamp!="":
@@ -409,7 +416,7 @@ def main():
                         if str(r[u'paramName']) != param  and not (param=="crossSection_pb" and str(r[u'paramName'])=="crossSection"): continue
                         if str(r[u'physicsGroup']) not in args.physicsGroups: continue
                         if args.physicsGroups.index(str(r[u'physicsGroup'])) > bestGroupIndex : continue
-                        if args.physicsGroups.index(str(r[u'physicsGroup'])) == bestGroupIndex : logging.warning("Duplicate parameter %s for group %s in dataset %s (subprocess %d). Please report this!" % (param,str(r[u'physicsGroup']),ds,sp))
+                        if args.physicsGroups.index(str(r[u'physicsGroup'])) == bestGroupIndex : logging.warning("Duplicate parameter %s for group %s in dataset %s (subprocess %d). Please report this!", param, r[u'physicsGroup'], ds, sp)
                         paramVals2[param] = str(r[u'paramValue'])
                         if param=="crossSection_pb": paramVals2[param] = str(float(paramVals2[param])*1000.0)
                         bestGroupIndex=args.physicsGroups.index(str(r[u'physicsGroup']))
@@ -424,8 +431,8 @@ def main():
                 elif param == "subprocessID": val = sp
                 elif param in dataset_values[ds].keys(): val = dataset_values[ds][param]
                 else: val = paramVals.get(param,None)
-                if val == None:
-                    if args.outFile != sys.stdout: logging.warning("dataset %s (subprocess %d) does not have parameter %s, which has no default." % (ds,sp,param))
+                if val is None:
+                    if args.outFile != sys.stdout: logging.warning("dataset %s (subprocess %d) does not have parameter %s, which has no default.",ds,sp,param)
                     if len(groupsWithVals.get(param,[]))>0:
                         logging.warning("The follow physicsGroups have defined that parameter though:")
                         logging.warning(groupsWithVals[param])
@@ -439,7 +446,7 @@ def main():
                     elif param == "subprocessID": val2 = sp
                     elif param in dataset_values[ds].keys(): val2 = dataset_values[ds][param]
                     else: val2 = paramVals2.get(param,None)
-                    if val2 == None: val2 = "#UNKNOWN#"
+                    if val2 is None: val2 = "#UNKNOWN#"
                     #if isfloat(str(val2)): val2 = "%.6g" % float(val)
                     if(str(val)!=str(val2)):
                         if not firstPrint: print("%s:" % ds)
@@ -478,7 +485,8 @@ def main():
             if commentCount > 0:
                 if args.outFile!=sys.stdout and args.delim!="": print(commentCache,file=args.outFile)
                 outputTable += [["COMMENT",commentCache]]
-                commentCache = ''; commentCount = 0
+                commentCache = ''
+                commentCount = 0
             if args.outFile != sys.stdout and args.delim!="": print(rowString,file=args.outFile)
             outputTable += [rowList]
             #also print the required explanations
@@ -487,7 +495,7 @@ def main():
                 doneFirst=False
                 for eField in args.explainInfo:
                     if doneFirst: outString += " , "
-                    if not eField in expl.keys(): outString += " %s: <NONE .. value is default>"%eField
+                    if eField not in expl.keys(): outString += " %s: <NONE .. value is default>"%eField
                     else: outString += "%s: %s" % (eField,expl[eField])
                     doneFirst=True
                 outString += " }"
@@ -528,12 +536,10 @@ def main():
         print("",file=args.outFile)
         print("#lsetup  \"asetup %s,%s\" pyAMI" % (os.environ.get('AtlasProject','UNKNOWN!'),os.environ.get('AtlasVersion','UNKNOWN!')),file=args.outFile)
         print("#getMetadata.py --timestamp=\"%s\" --physicsGroups=\"%s\" --fields=\"%s\" --inDS=\"%s\"" % (args.timestamp,",".join(args.physicsGroups),",".join(args.fields),",".join(datasetss)),file=args.outFile )
-        logging.info("Results written to: %s" % args.outFile.name)
+        logging.info("Results written to: %s", args.outFile.name)
 
     args.outFile.close()
 
 
 if __name__ == "__main__":
-    import sys
     sys.exit(main())
-
diff --git a/Tools/PyUtils/bin/gprof2dot b/Tools/PyUtils/bin/gprof2dot
index 7de8f54ea77f95f3488466814b12c307aad21468..c27e0d0cbc3e5949ee3489cbd56d9234e127efa9 100755
--- a/Tools/PyUtils/bin/gprof2dot
+++ b/Tools/PyUtils/bin/gprof2dot
@@ -17,6 +17,8 @@
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
 #
 
+# flake8: noqa
+
 """Generate a dot graph from the output of several profilers."""
 
 __author__ = "Jose Fonseca et al"
diff --git a/Tools/PyUtils/bin/issues b/Tools/PyUtils/bin/issues
index 5602cc1f9aaffeed6f408bb62900805b1124949d..27241c479907f0da6e393b3c1c64aaaf3e1340fc 100755
--- a/Tools/PyUtils/bin/issues
+++ b/Tools/PyUtils/bin/issues
@@ -1,4 +1,5 @@
 #!/usr/bin/env python
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
 from __future__ import print_function
 
@@ -25,7 +26,7 @@ def getDefects(pkg):
   request=Request(url,textData)
   try:
     u=urlopen(request, timeout=2)
-  except URLError as e:
+  except URLError:
     return "I'm sorry, the server timed out"
   textString = u.read().decode()
   return textString
diff --git a/Tools/PyUtils/bin/magnifyPoolFile.py b/Tools/PyUtils/bin/magnifyPoolFile.py
index cf3bcd80f7b8f639ffb24c509f4a182633fcb79b..4041a859a12dfc589dddef3ad28a1cf20b83ea13 100755
--- a/Tools/PyUtils/bin/magnifyPoolFile.py
+++ b/Tools/PyUtils/bin/magnifyPoolFile.py
@@ -2,7 +2,6 @@
 
 # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
-# $Id: magnifyPoolFile.py,v 1.5 2008-06-27 17:24:13 binet Exp $
 # @file:    magnifyPoolFile.py
 # @purpose: produce a new POOL file with N times the content of an input one.
 # @author:  Sebastien Binet <binet@cern.ch>
@@ -16,7 +15,6 @@
 
 from __future__ import print_function
 
-__version__ = "$Revision: 1.5 $"
 __author__  = "Sebastien Binet <binet@cern.ch>"
 
 import sys
@@ -99,7 +97,7 @@ if __name__ == "__main__":
     print ("## importing ROOT...")
     import ROOT
     print ("## importing ROOT... [DONE]")
-    import RootUtils.PyROOTFixes
+    import RootUtils.PyROOTFixes  # noqa: F401
 
     sys.argv = oldArgs
     
diff --git a/Tools/PyUtils/bin/merge-poolfiles.py b/Tools/PyUtils/bin/merge-poolfiles.py
index a67fab07b6cd75bfeb7feeda03ef8bb4135e1ae7..e38b71384011a08a923fbce9a640c4976f6956e6 100755
--- a/Tools/PyUtils/bin/merge-poolfiles.py
+++ b/Tools/PyUtils/bin/merge-poolfiles.py
@@ -1,19 +1,17 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 ## @file PyUtils/bin/merge-poolfiles.py
 ## @brief take a bunch of input pool files and produce a single one
 ##          autoconfiguration is (attempted to be) performed
 ## @author Sebastien Binet <binet@cern.ch>
 
-__version__ = "$Revision$"
 __author__  = "Sebastien Binet <binet@cern.ch>"
 __doc__ = """take a bunch of input pool files and produce a single one.
 autoconfiguration is (attempted to be) performed
 """
 
 import sys
-import os
 
 if __name__ == "__main__":
    
@@ -42,7 +40,7 @@ if __name__ == "__main__":
     
     
     msg.info(':'*40)
-    msg.info('welcome to poolfiles merger version %s', __version__)
+    msg.info('welcome to poolfiles merger')
     
     (options, args) = parser.parse_args()
 
diff --git a/Tools/PyUtils/bin/meta-reader.py b/Tools/PyUtils/bin/meta-reader.py
index 4759da2811356c19f70a671fdf5bb62ef854cc7f..24e62e348776bad6a98ca4ce11cd8df5285286b4 100755
--- a/Tools/PyUtils/bin/meta-reader.py
+++ b/Tools/PyUtils/bin/meta-reader.py
@@ -115,9 +115,9 @@ def _main():
 						type=str,
 						choices=['tiny', 'lite', 'full', 'peeker'],
 						help="This flag provides the user capability to select the amount of metadata retrieved. There three options: "
-							 "tiny (only those values used in PyJobTransforms), "
-							 "lite (same output as dump-athfile) "
-							 "and full ( all  available data found) ")
+							"tiny (only those values used in PyJobTransforms), "
+							"lite (same output as dump-athfile) "
+							"and full ( all  available data found) ")
 	parser.add_argument('-t',
 						'--type',
 						default= None,
@@ -129,7 +129,7 @@ def _main():
 						'--filter',
 						default= [],
 						metavar='FILTER',
-	                    nargs = '+',
+						nargs = '+',
 						type=str,
 						help="The metadata keys to filter. ")
 	parser.add_argument('--promote',
@@ -168,7 +168,7 @@ def _main():
 		else:
 			enc = sys.stdout.encoding.lower()
 			ascii = not sys.stdout.isatty() or enc.find('ansi') >= 0 or enc.find('ascii') >= 0
-			pp=_tree_print(metadata, indent= indent, pad= 18, dict_sort='key', list_max_items = 8, ascii = True)
+			_tree_print(metadata, indent= indent, pad= 18, dict_sort='key', list_max_items = 8, ascii = True)
 			print(_tree_print(metadata, indent= indent, pad= 18, dict_sort='key', list_max_items = 8, ascii = ascii))
 
 	else:
diff --git a/Tools/PyUtils/bin/pool_extractFileIdentifier.py b/Tools/PyUtils/bin/pool_extractFileIdentifier.py
index c65df9a765725535fd6656b89aa0b81bbaeab577..8375f6ba6c3fda6bb3c633b56e108640aa37963a 100755
--- a/Tools/PyUtils/bin/pool_extractFileIdentifier.py
+++ b/Tools/PyUtils/bin/pool_extractFileIdentifier.py
@@ -22,7 +22,7 @@ standard_library.install_aliases()
 
 def pool_extract(files):
     print (":: extracting GUID for [%i] files... "% len(files))
-    import os, sys
+    import os
     import subprocess
     sc,exe = subprocess.getstatusoutput('which pool_extractFileIdentifier')
     if sc != 0:
diff --git a/Tools/PyUtils/bin/pool_insertFileToCatalog.py b/Tools/PyUtils/bin/pool_insertFileToCatalog.py
index 89427f32cb9abdf427929bb9a23062848eff313c..f6c371cf0c1d925e72f43eb7e84fb5e1c9a7c96f 100755
--- a/Tools/PyUtils/bin/pool_insertFileToCatalog.py
+++ b/Tools/PyUtils/bin/pool_insertFileToCatalog.py
@@ -25,7 +25,7 @@ def pool_insert(files, catalog_name="xmlcatalog_file:PoolFileCatalog.xml"):
         len (files),
         catalog_name
         ))
-    import os, sys
+    import os
     import subprocess
     sc,exe = subprocess.getstatusoutput ('which pool_insertFileToCatalog')
     if sc != 0:
diff --git a/Tools/PyUtils/bin/print_auditor_callgraph.py b/Tools/PyUtils/bin/print_auditor_callgraph.py
index 9ce124d7d201dbd8922192289f57ff725cd0b342..e3bf1cc952c81286fe11e5750ef14723ac845b99 100755
--- a/Tools/PyUtils/bin/print_auditor_callgraph.py
+++ b/Tools/PyUtils/bin/print_auditor_callgraph.py
@@ -73,7 +73,7 @@ if __name__ == '__main__':
     step = Steps.ini
     if len(sys.argv) > 2:
         step = sys.argv[2].lower()
-        if not step in Steps.ALLOWED:
+        if step not in Steps.ALLOWED:
             raise SystemExit(
                 2, "Invalid step name [%s] allowed=%r"%(step, Steps.ALLOWED))
 
diff --git a/Tools/PyUtils/bin/pyroot.py b/Tools/PyUtils/bin/pyroot.py
index ab1b151fe564d66cae30005ce6529f78968e8240..06cd9abb896aa4edca4baf57e188ab8be8b20f0a 100755
--- a/Tools/PyUtils/bin/pyroot.py
+++ b/Tools/PyUtils/bin/pyroot.py
@@ -1,6 +1,6 @@
 # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
-"exec" "`which python`" "-tt" "$0" "$@";
+"exec" "`which python`" "-tt" "$0" "$@"
 
 # File: pyroot.py
 # Author: Sebastien Binet (binet@cern.ch)
@@ -10,7 +10,6 @@
 
 from __future__ import print_function
 
-__version__ = '$Revision$'
 __author__  = 'Sebastien Binet (binet@cern.ch)'
 __doc__     = 'For details about pyroot.py, run "less `which pyroot.py`"'
 
@@ -32,8 +31,7 @@ def _help_and_exit( reason = None ):
      --no-display                           prompt, but no graphics display
  -c, --command                        ...  one-liner, runs before any scripts
  -h, --help                           ...  print this help message
- -v, --version                        ...  print version number
- -,-- [arg1,...]                      ...  additional arguments passed directly 
+ -,-- [arg1,...]                      ...  additional arguments passed directly
                                            to user scripts (left untouched)
  [<file1>.py [<file2>.py [...]]]      ...  scripts to run""")
 
@@ -75,23 +73,20 @@ for opt, arg in optlist:
    elif opt in ("-i", "--interactive"):
       runBatch = 0
       defOptions = ""
-      if display == None: display = 1
+      if display is None: display = 1
    elif opt in ("--no-display",):
       display = 0
    elif opt in ("-c", "--command"):
       command = string.strip( arg )
    elif opt in ("-h", "--help"):
       _help_and_exit()
-   elif opt in ("-v", "--version"):
-      print (__version__)
-      sys.exit(0)
 
 if optlist: del opt, arg
 del args, optlist, opts
 del _useropts, _userlongopts, string, getopt
 
 ## for the benefit of PyROOT
-if not display and not '-b' in sys.argv:
+if not display and '-b' not in sys.argv:
    sys.argv = sys.argv[:1] + ['-b'] + sys.argv[1:]
 del display
 
@@ -100,7 +95,7 @@ del display
 if not os.getcwd() in sys.path:
    sys.path = [ os.getcwd() ] + sys.path
 
-if not '' in sys.path:
+if '' not in sys.path:
    sys.path = [ '' ] + sys.path
 
 sys.ps1 = 'pyroot> '
@@ -116,7 +111,7 @@ if runBatch:
 else:
    os.environ['PYTHONINSPECT'] = '1'
  # readline support
-   import rlcompleter, readline
+   import rlcompleter, readline  # noqa: F401
 
    readline.parse_and_bind( 'tab: complete' )
    readline.parse_and_bind( 'set show-all-if-ambiguous On' )
diff --git a/Tools/PyUtils/bin/vmem-sz.py b/Tools/PyUtils/bin/vmem-sz.py
index 795f9a9e0ae80192a2d05fa52dba3128dbb85a86..056372e8b9088858b7e2d8a9024266dc96264643 100755
--- a/Tools/PyUtils/bin/vmem-sz.py
+++ b/Tools/PyUtils/bin/vmem-sz.py
@@ -2,19 +2,15 @@
 
 # Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
 
-#@purpose: get the inclusive and exclusive vmem sizes of a library
-
 from __future__ import print_function
 
 __author__ = "Sebastien Binet <binet@cern.ch>"
 __doc__    = "get the inclusive and exclusive vmem sizes of a library"
-__version__= "$Revision: 1.2 $"
 
 ## std imports
 import argparse
 import ctypes
 import os
-import sys
 
 ## 3rd-party imports
 from PyUtils.Decorators import forking as forking
@@ -31,7 +27,7 @@ def lib_loader(libname):
 @forking
 def load_lib (libname):
     _,vmem0,_ = pymon()
-    lib = lib_loader (libname)
+    lib_loader (libname)
     _,vmem1,_  = pymon()
     libs = [l for l in loaded_libs()
             if not os.path.basename(l) in _veto_libs and
@@ -139,8 +135,6 @@ def save_stats (lib_stats, fname=None):
     print (":: saving vmem statistics in [%s]... [done]"%fname)
     
 def main():
-    import sys
-    import os
 
     parser = argparse.ArgumentParser(
         description='get the inclusive and exclusive vmem sizes of a library'
diff --git a/Tools/PyUtils/python/scripts/check_file.py b/Tools/PyUtils/python/scripts/check_file.py
index 1ec1321bc5f5121b6af72fee54035189d1b94087..d33c7f45cbcc90f75324b7398ccaba4c47fd4a99 100644
--- a/Tools/PyUtils/python/scripts/check_file.py
+++ b/Tools/PyUtils/python/scripts/check_file.py
@@ -30,7 +30,7 @@ import PyUtils.acmdlib as acmdlib
                   action='store_true',
                   default=False,
                   help="""Enable fast mode.
-                  Memory szie will not be accurate AT ALL""")
+                  Memory size will not be accurate AT ALL""")
 @acmdlib.argument('-o', '--output',
                   default=None,
                   help="""name of the output file which will contain the