Commit fb30b95f authored by Frank Winklmeier's avatar Frank Winklmeier
Browse files

Merge branch 'memoize' into 'master'

PyUtils: replace memoize by functools.cache

See merge request atlas/athena!48193
parents 8d4b8517 98723928
# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
from PyUtils.Decorators import memoize
from functools import reduce
from functools import reduce, cache
def _resolve_db_tag(origDbTag):
from AthenaCommon.GlobalFlags import globalflags
......@@ -32,7 +31,7 @@ def _InstanceFromProjectName():
# Set up the bad lb filter conditions algorithm
# Cache instance once already created
@memoize
@cache
def GetBadLBFilterAlg(name, defects, writekey, ignoreRecoverable=False, origDbTag=None):
"""
Configure an instance of the bad LB filter conditions algorithm. Not intended to be called directly by users.
......@@ -119,7 +118,7 @@ def GetBadLBFilterTool(name, defects, alwaysReturnTrue=False, ignoreRecoverable=
return monFilterTool
@memoize
@cache
def LArBadDefectList(origDbTag=None):
"""
Get the defects to configure for LAr - cache results to avoid lots of DB lookups
......
# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
from AthenaConfiguration.ComponentFactory import CompFactory
from AthenaConfiguration.ComponentAccumulator import ComponentAccumulator
from AthenaConfiguration.AccumulatorCache import AccumulatorCache
......@@ -74,7 +74,6 @@ def BadLBFilterToolCfg(inputFlags,name, defects, alwaysReturnTrue=False, ignoreR
return result
#@memoize - hash function on flags is deprecated, use AccumulatorCache instead
@AccumulatorCache
def LArDefectList(inputFlags,origDbTag=None):
"""
......
# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
from PyUtils.Decorators import memoize
from functools import cache
# Set up the event cleaning filter tool
# Cache instances that are already created
@memoize
@cache
def GetEventFlagFilterTool(name, doLAr=True, doTile=True, doSCT=True, doCore=True, alwaysReturnTrue=False):
"""
Configure an instance of the bad LB filter tool. If called twice with the same options, will return the same instance.
......
# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
# @file: AthenaPython/python/Bindings.py
# @author: Sebastien Binet <binet@cern.ch>
from __future__ import print_function
### data
__author__ = """
Sebastien Binet (binet@cern.ch)
"""
### imports
from PyUtils.Decorators import memoize
from functools import cache
from AthenaCommon.Logging import logging
@memoize
@cache
def _load_dict(lib):
"""Helper function to remember which libraries have been already loaded
"""
......@@ -23,7 +21,7 @@ def _load_dict(lib):
lib="lib"+lib
return cppyy.load_library(lib)
@memoize
@cache
def _import_ROOT():
import ROOT
ROOT.gROOT.SetBatch(True)
......@@ -79,7 +77,7 @@ class _PyAthenaBindingsCatalog(object):
return
@staticmethod
@memoize
@cache
def init(name):
"""Initialize the python binding with the callback previously registered
If no callback was registered, swallow the warning...
......@@ -100,7 +98,7 @@ class _PyAthenaBindingsCatalog(object):
return klass
### helper method to easily retrieve services by name -------------------------
@memoize
@cache
def py_svc(svcName, createIf=True, iface=None):
"""
Helper function to retrieve a service by name, using Gaudi python bindings.
......@@ -141,7 +139,7 @@ def py_svc(svcName, createIf=True, iface=None):
return svc
### helper method to easily retrieve tools from ToolSvc by name ---------------
@memoize
@cache
def py_tool(toolName, createIf=True, iface=None):
"""
Helper function to retrieve a tool (owned by the ToolSvc) by name, using
......@@ -193,7 +191,6 @@ def py_tool(toolName, createIf=True, iface=None):
return tool
### helper method to easily retrieve algorithms by name -----------------------
# @c memoize # don't memoize ?
def py_alg(algName, iface='IAlgorithm'):
"""
Helper function to retrieve an IAlgorithm (managed by the IAlgManager_) by
......@@ -238,7 +235,7 @@ def py_alg(algName, iface='IAlgorithm'):
return alg
### pythonizations for StoreGateSvc
@memoize
@cache
def _py_init_StoreGateSvc():
## most probably, user will want to interact with PyRoot objects
## => install the fixes for our user
......@@ -251,7 +248,7 @@ def _py_init_StoreGateSvc():
return StoreGateSvc
### pythonizations for IIncidentSvc
@memoize
@cache
def _py_init_IIncidentSvc():
import cppyy
# IIncidentSvc bindings from dictionary
......@@ -283,7 +280,7 @@ def _py_init_IIncidentSvc():
return IIncidentSvc
### pythonizations for ClassIDSvc
@memoize
@cache
def _py_init_ClassIDSvc():
import cppyy
# IClassIDSvc bindings from dictionary
......@@ -304,7 +301,7 @@ def _py_init_ClassIDSvc():
IClassIDSvc._clidgen = clidGenerator(db=None)
# add pythonized methods
@memoize
@cache
def _clid (self, name):
# handle special cases where CLID has been registered with a typedef
try: name = _clid_typename_aliases[name]
......@@ -316,7 +313,7 @@ def _py_init_ClassIDSvc():
IClassIDSvc.clid = _clid
del _clid
@memoize
@cache
def _typename (self, clid):
# handle special cases of missing clids
try:
......@@ -330,7 +327,7 @@ def _py_init_ClassIDSvc():
return IClassIDSvc
### pythonizations for ITHistSvc
@memoize
@cache
def _py_init_THistSvc():
import cppyy
# ITHistSvc bindings from dictionary
......@@ -588,7 +585,7 @@ del %s""" % (n,n,n,n,n)
return ITHistSvc
### pythonizations for EventStreamInfo
@memoize
@cache
def _py_init_EventStreamInfo():
import cppyy
# EventStreamInfo bindings from dictionary
......@@ -620,7 +617,7 @@ def _py_init_EventStreamInfo():
return ESI
### pythonizations for EventType
@memoize
@cache
def _py_init_EventType():
import cppyy
# EventStreamInfo bindings from dictionary
......@@ -652,27 +649,27 @@ def _py_init_EventType():
return cls
### pythonizations for DataLink
@memoize
@cache
def _py_init_DataLink():
return _gen_data_link
### pythonizations for ElementLink
@memoize
@cache
def _py_init_ElementLink():
return _gen_element_link
### pythonizations for ElementLinkVector
@memoize
@cache
def _py_init_ElementLinkVector():
return _gen_elv
### pythonizations for NavigationToken
@memoize
@cache
def _py_init_NavigationToken():
return _gen_navtok
### helper method to easily instantiate DataLink ------------------------------
@memoize
@cache
def _gen_data_link(klass, storage_policy=None):
"""helper method to easily instantiate a DataLink class.
Sensible default for the storage policy is chosen if none given (it usually
......@@ -691,7 +688,7 @@ def _gen_data_link(klass, storage_policy=None):
return ROOT.DataLink(klass, storage_policy)
### helper method to easily instantiate ElementLink ---------------------------
@memoize
@cache
def _gen_element_link(klass, storage_policy=None, indexing_policy=None):
"""helper method to easily instantiate an ElementLink class.
Sensible defaults for the storage and indexing policies are chosen if none
......@@ -714,7 +711,7 @@ def _gen_element_link(klass, storage_policy=None, indexing_policy=None):
return ROOT.ElementLink(klass)
### helper method to easily instantiate ElementLinkVector ---------------------
@memoize
@cache
def _gen_elv(klass, storage_policy=None, indexing_policy=None):
"""helper method to easily instantiate an ElementLinkVector class.
Sensible defaults for the storage and indexing policies are chosen if none
......@@ -734,7 +731,7 @@ def _gen_elv(klass, storage_policy=None, indexing_policy=None):
return ROOT.ElementLinkVector(klass, storage_policy, indexing_policy)
### helper method to easily instantiate NavigationToken -----------------------
@memoize
@cache
def _gen_navtok(klass, weight_cls=None, hash_cls=None):
"""helper method to easily instantiate a NavigationToken class.
Sensible default for the weight and hash parameters are chosen if none are
......
# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
# @file: PyPerfMon.py
# @author: Sebastien Binet <binet@cern.ch>
......@@ -9,18 +9,15 @@ __doc__ = """python module holding a python service to monitor athena perfor
"""
import os,sys
from functools import cache
import AthenaCommon.Logging as L
_perfMonStates = ('ini','evt','fin')
from PerfMonComps.PyMonUtils import Units, pymon
from PyUtils.Decorators import forking
from PyUtils.Decorators import memoize, forking
import six
_perfMonStates = ('ini','evt','fin')
@memoize
@cache
def _import_ROOT():
# FIXME: work-around ROOT's silly behaviour wrt graphics libraries
# see: https://savannah.cern.ch/bugs/?35461
......@@ -430,7 +427,7 @@ class Svc(object):
## write out meta-data
import PyUtils.dbsqlite as dbs
meta = dbs.open(headerFile, 'n')
for k,v in six.iteritems (self.meta):
for k,v in self.meta.items():
meta[k] = v
meta['version_id'] = '0.4.0' # stream-format + header file
meta['pmon_tuple_files'] = map( os.path.basename, outFiles[1:] )
......
#!/usr/bin/env python
# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
"""
Periods are assigned an ordinate by which they can be sorted
"""
from __future__ import print_function
from PyUtils.Decorators import memoize
from functools import cache
import re,sys
pat_last = re.compile(r"(?:l|la|las|last) (\d*)$") # l(ast) NN runs
pat_number = re.compile(r"\d{5,8}[+-]?$") # run number (5-8 digits), possibly followed by a + or -
pat_range = re.compile(r"\d{5,8}-\d{5,8}$") # range of run numbers (each 5-8 digits)
......@@ -42,7 +40,7 @@ def getRunsFromPeriods(list_of_periods):
return runlist
@memoize
@cache
def getSortedAvailablePeriods():
from CoolRunQuery.AtlRunQueryCOMA import ARQ_COMA
available_periods = ARQ_COMA.get_all_periods()
......
# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
#
# Utilities used in athenaHLT.py
#
from PyUtils.Decorators import memoize
from AthenaCommon.Logging import logging
log = logging.getLogger('athenaHLT')
from functools import cache
class CondDB:
_run2 = 236108
def __init__(self, run):
......@@ -21,7 +22,7 @@ class CondDB:
else:
return '/TDAQ/RunCtrl/SOR_Params'
@memoize
@cache
def get_sor_params(run_number):
import pickle
cool_cache = 'AthHLT.sor.pkl'
......@@ -62,7 +63,7 @@ def get_sor_params(run_number):
return d
@memoize
@cache
def get_trigconf_keys(run_number):
"""Read HLT keys from COOL"""
......
# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
# @author: Sebastien Binet <binet@cern.ch>
# @date: March 2008
# @purpose: a set of decorators. Most of them (if not all) have been stolen
# from here:
# http://www.phyast.pitt.edu/~micheles/python/documentation.html
#
from __future__ import with_statement, print_function
__author__ = "Sebastien Binet <binet@cern.ch>"
__all__ = [
'memoize',
'forking',
'async_decor',
]
"""
Some useful decorators.
"""
import sys
import itertools
from decorator import decorator
@decorator
def memoize(func, *args):
"""This decorator implements the memoize pattern, i.e. it caches the result
of a function in a dictionary, so that the next time the function is called
with the same input parameters the result is retrieved from the cache and
not recomputed.
"""
try:
mem_dict = getattr(func, "_mem_dict")
except AttributeError:
# look-up failed so we have to build the cache holder
mem_dict = {}
setattr(func, "_mem_dict", mem_dict)
try:
return mem_dict[args]
except KeyError:
# look-up failed so we have to build the result the first time around
# then we cache
mem_dict[args] = result = func(*args)
return result
# FIXME: does not work... func is an instance of FunctionMaker which cannot
# be pickled...
@decorator
def mp_forking(func, *args, **kwargs):
import multiprocessing as mp
## pool = mp.Pool (processes=1)
## return pool.apply (func, *args, **kwargs)
# create a local queue to fetch the results back
def wrapping(func):
q = mp.Queue()
def wrap_fct(*args, **kwargs):
try:
res = func(*args, **kwargs)
# catch *everything* and 're-raise'
except BaseException as err:
#import traceback; traceback.print_exc()
res = err
q.put(res)
wrap_fct.q = q
return wrap_fct
func = wrapping(func)
proc = mp.Process(target=func, args=args, kwargs=kwargs)
proc.start()
res = func.q.get()
proc.join()
proc.terminate()
if isinstance(res, BaseException):
#import traceback; traceback.print_exc()
raise res
#reraise_exception(exc,exc_info)
return res
def reraise_exception(new_exc, exc_info=None):
def _reraise_exception(new_exc, exc_info=None):
if exc_info is None:
exc_info = sys.exc_info()
_exc_class, _exc, tb = exc_info
......@@ -111,7 +43,7 @@ def forking(func, *args, **kwargs):
return result
else:
remote_exc = result[0]
reraise_exception(remote_exc)
_reraise_exception(remote_exc)
## child ##
else:
......@@ -135,73 +67,3 @@ def forking(func, *args, **kwargs):
os._exit(0)
pass # forking
### a decorator converting blocking functions into asynchronous functions
# stolen from http://pypi.python.org/pypi/decorator/3.0.0
def _async_on_success(result): # default implementation
"Called on the result of the function"
return result
def _async_on_failure(exc_info): # default implementation
"Called if the function fails"
_exc_class, _exc, tb = exc_info
raise _exc_class (_exc, tb)
pass
def _async_on_closing(): # default implementation
"Called at the end, both in case of success and failure"
pass
class Async(object):
"""
A decorator converting blocking functions into asynchronous
functions, by using threads or processes. Examples:
async_with_threads = Async(threading.Thread)
async_with_processes = Async(multiprocessing.Process)
"""
def __init__(self, threadfactory):
self.threadfactory = threadfactory
def __call__(self, func,
on_success=_async_on_success,
on_failure=_async_on_failure,
on_closing=_async_on_closing):
# every decorated function has its own independent thread counter
func.counter = itertools.count(1)
func.on_success = on_success
func.on_failure = on_failure
func.on_closing = on_closing
return decorator(self.call, func)
def call(self, func, *args, **kw):
def func_wrapper():
try:
result = func(*args, **kw)
except Exception:
func.on_failure(sys.exc_info())
else:
return func.on_success(result)
finally:
func.on_closing()
name = '%s-%s' % (func.__name__, next(func.counter))
thread = self.threadfactory(None, func_wrapper, name)
thread.start()
return thread
# default async decorator: using processes
def async_decor(async_type='mp'):
if async_type in ("mp", "multiprocessing"):
from multiprocessing import Process
factory = Process
elif async_type in ("th", "threading"):
from threading import Thread
factory = Thread
else:
raise ValueError ("async_type must be either 'multiprocessing' "
"or 'threading' (got: %s)"%async_type)
async_obj = Async (factory)
return async_obj
# Copyright (C) 2002-2020 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
# @file PyUtils.RootUtils
# @author Sebastien Binet
# @purpose a few utils to ease the day-to-day work with ROOT
# @date November 2009
from __future__ import with_statement, print_function
__doc__ = "a few utils to ease the day-to-day work with ROOT"
__author__ = "Sebastien Binet"
......@@ -19,8 +17,7 @@ __all__ = [
import os
import re
import six
from .Decorators import memoize
from functools import cache
### functions -----------------------------------------------------------------
def import_root(batch=True):
......@@ -120,7 +117,7 @@ def _root_compile (src, fname, batch):
ROOT.gErrorIgnoreLevel = orig_root_lvl
return
@memoize
@cache
def _pythonize_tfile():
import cppyy
root = import_root()
......
......@@ -11,7 +11,7 @@ __author__ = "Sebastien Binet"
### imports -------------------------------------------------------------------
import PyUtils.acmdlib as acmdlib
import re
from PyUtils.Decorators import memoize
from functools import cache
from math import isnan
from numbers import Real
from os import environ
......@@ -330,7 +330,7 @@ def main(args):
else:
return [int(s) for s in entry[2] if s.isdigit()]
@memoize
@cache
def skip_leaf(name_from_dump, skip_leaves):
""" Here decide if the current leaf should be skipped.
Previously the matching was done based on the full or partial
......
......@@ -2,7 +2,7 @@
import xml.etree.cElementTree as ET
from PyUtils.Decorators import memoize
from functools import cache
class TrigXMLElement:
def __init__(self,element):
......@@ -60,7 +60,7 @@ class MioctGeometryXMLReader(TrigXMLDocumentReader):
def getMIOCTs(self):
return self.MuCTPiGeometry.MIOCTs
@memoize
@cache
def getMIOCT(self, id):
for mioct in self.MuCTPiGeometry.MIOCTs:
if int(mioct["id"]) == id:
......
#!/usr/bin/env python
# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration
from __future__ import print_function
import eformat
import argparse
import operator
from PyUtils.Decorators import memoize
from functools import cache
__doc__ = """\
Dump content of the HLT result and HLT related details from the event header.
......@@ -129,7 +127,7 @@ def CTP_Info(event, module_id=1):
print("ROB 0x%0x, %s: %s" % (rob.source_id(), w, printL1Items(items,smk)))
@memoize
@cache
def getL1Menu(smk):
from CoolRunQuery.utils.AtlRunQueryTriggerUtils import getL1Menu
return getL1Menu(str(smk))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment