From c9e6a69ab269a27fe53ce8ae2ea02565fb9483e3 Mon Sep 17 00:00:00 2001 From: Frank Winklmeier <frank.winklmeier@cern.ch> Date: Wed, 8 May 2024 18:29:06 +0200 Subject: [PATCH] PyJobTransforms: fix flake8-bugbear warnings Fix flake8-bugbear warnings about calling functions for argument defaults. --- Tools/PyJobTransforms/python/transform.py | 13 ++++++------- Tools/PyJobTransforms/python/trfUtils.py | 4 ++-- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/Tools/PyJobTransforms/python/transform.py b/Tools/PyJobTransforms/python/transform.py index fbb29d25244a..acdee418de05 100644 --- a/Tools/PyJobTransforms/python/transform.py +++ b/Tools/PyJobTransforms/python/transform.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2022 CERN for the benefit of the ATLAS collaboration +# Copyright (C) 2002-2024 CERN for the benefit of the ATLAS collaboration ## @package PyJobTransforms.transform # @@ -44,8 +44,7 @@ class transform(object): # @param trfName Name of the transform. Default is executable name with .py rstripped. # @param executor Executor list def __init__(self, standardSignalHandlers = True, standardTrfArgs = True, standardValidationArgs=True, - trfName = path.basename(sys.argv[0]).rsplit('.py', 1)[0], - executor = set([transformExecutor(),]), exeArgs = None, description = ''): + trfName = None, executor = None, exeArgs = None, description = ''): '''Transform class initialiser''' msg.debug('Welcome to ATLAS job transforms') @@ -62,12 +61,12 @@ class transform(object): self._trfPredata = os.environ.get('TRF_PREDATA') ## Transform _name - self._name = trfName + self._name = trfName or path.basename(sys.argv[0]).rsplit('.py', 1)[0] ## @note Holder for arguments this trf understands # Use @c argparse.SUPPRESS to have non-given arguments unset, rather than None # Support reading arguments from a file using the notation @c @file - self.parser = trfArgParser(description='Transform {0}. {1}'.format(trfName, description), + self.parser = trfArgParser(description='Transform {0}. {1}'.format(self.name, description), argument_default=argparse.SUPPRESS, fromfile_prefix_chars='@') @@ -90,9 +89,9 @@ class transform(object): self._executors = set() self._executorDictionary = {} - # If we were passed executors at construction time then append them to the set: + # Append the given executors or a default one to the set: if executor is not None: - self.appendToExecutorSet(executor) + self.appendToExecutorSet(executor or {transformExecutor()}) ## Transform exit code/message holders self._exitCode = None diff --git a/Tools/PyJobTransforms/python/trfUtils.py b/Tools/PyJobTransforms/python/trfUtils.py index 7a8efaea715a..58ac29f8361d 100644 --- a/Tools/PyJobTransforms/python/trfUtils.py +++ b/Tools/PyJobTransforms/python/trfUtils.py @@ -106,7 +106,7 @@ def getAncestry(listMyOrphans = False): # @param parent The parent process for which to return all the child PIDs # @param listOrphans Parameter value to pass to getAncestry() if necessary # @return @c children List of child PIDs -def listChildren(psTree = None, parent = os.getpid(), listOrphans = False): +def listChildren(psTree = None, parent = os.getpid(), listOrphans = False): # noqa: B008 (PID is constant) '''Take a psTree dictionary and list all children''' if psTree is None: psTree = getAncestry(listMyOrphans = listOrphans) @@ -873,7 +873,7 @@ class ParallelJobProcessor(object): def __init__( self, jobSubmission = None, - numberOfProcesses = multiprocessing.cpu_count(), + numberOfProcesses = multiprocessing.cpu_count(), # noqa: B008 (cpu_count is constant) ): self.jobSubmission = jobSubmission self.numberOfProcesses = numberOfProcesses -- GitLab