diff --git a/Tools/PyJobTransforms/python/transform.py b/Tools/PyJobTransforms/python/transform.py
index fbb29d25244adee6bbaee5aa1aac1451ee4fc0b5..acdee418de05993fc227e3a2212b677495a615f9 100644
--- a/Tools/PyJobTransforms/python/transform.py
+++ b/Tools/PyJobTransforms/python/transform.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration
 
 ## @package PyJobTransforms.transform
 #
@@ -44,8 +44,7 @@ class transform(object):
     #  @param trfName Name of the transform. Default is executable name with .py rstripped.
     #  @param executor Executor list
     def __init__(self, standardSignalHandlers = True, standardTrfArgs = True, standardValidationArgs=True, 
-                 trfName = path.basename(sys.argv[0]).rsplit('.py', 1)[0], 
-                 executor = set([transformExecutor(),]), exeArgs = None, description = ''):
+                 trfName = None, executor = None, exeArgs = None, description = ''):
         '''Transform class initialiser'''
         msg.debug('Welcome to ATLAS job transforms')
         
@@ -62,12 +61,12 @@ class transform(object):
         self._trfPredata = os.environ.get('TRF_PREDATA')
 
         ## Transform _name
-        self._name = trfName        
+        self._name = trfName or path.basename(sys.argv[0]).rsplit('.py', 1)[0]
         
         ## @note Holder for arguments this trf understands
         #  Use @c argparse.SUPPRESS to have non-given arguments unset, rather than None
         #  Support reading arguments from a file using the notation @c @file 
-        self.parser = trfArgParser(description='Transform {0}. {1}'.format(trfName, description),
+        self.parser = trfArgParser(description='Transform {0}. {1}'.format(self.name, description),
                                    argument_default=argparse.SUPPRESS,
                                    fromfile_prefix_chars='@')
 
@@ -90,9 +89,9 @@ class transform(object):
         self._executors = set()
         self._executorDictionary = {}
         
-        # If we were passed executors at construction time then append them to the set:
+        # Append the given executors or a default one to the set:
         if executor is not None:
-            self.appendToExecutorSet(executor)
+            self.appendToExecutorSet(executor or {transformExecutor()})
         
         ## Transform exit code/message holders
         self._exitCode = None
diff --git a/Tools/PyJobTransforms/python/trfUtils.py b/Tools/PyJobTransforms/python/trfUtils.py
index 7a8efaea715af4230a4e14bf40e8f4f4532639fa..58ac29f8361d559fb7dc014678dd8bfa36014266 100644
--- a/Tools/PyJobTransforms/python/trfUtils.py
+++ b/Tools/PyJobTransforms/python/trfUtils.py
@@ -106,7 +106,7 @@ def getAncestry(listMyOrphans = False):
 #  @param parent The parent process for which to return all the child PIDs
 #  @param listOrphans Parameter value to pass to getAncestry() if necessary
 #  @return @c children List of child PIDs
-def listChildren(psTree = None, parent = os.getpid(), listOrphans = False):
+def listChildren(psTree = None, parent = os.getpid(), listOrphans = False):  # noqa: B008 (PID is constant)
     '''Take a psTree dictionary and list all children'''
     if psTree is None:
         psTree = getAncestry(listMyOrphans = listOrphans)
@@ -873,7 +873,7 @@ class ParallelJobProcessor(object):
     def __init__(
         self,
         jobSubmission = None,
-        numberOfProcesses = multiprocessing.cpu_count(),
+        numberOfProcesses = multiprocessing.cpu_count(),  # noqa: B008 (cpu_count is constant)
         ):
         self.jobSubmission = jobSubmission
         self.numberOfProcesses = numberOfProcesses