diff --git a/Control/AthenaConfiguration/python/OldFlags2NewFlags.py b/Control/AthenaConfiguration/python/OldFlags2NewFlags.py index c80d3365c4e7adb9cd91fd50f90f4bec17db1a90..3d20e46365602d2e682092fdc1a743714598fb98 100644 --- a/Control/AthenaConfiguration/python/OldFlags2NewFlags.py +++ b/Control/AthenaConfiguration/python/OldFlags2NewFlags.py @@ -16,9 +16,12 @@ def getNewConfigFlags(): ConfigFlags.Input.Files = jobproperties.AthenaCommonFlags.FilesInput.get_Value() ConfigFlags.IOVDb.GlobalTag = jobproperties.Global.ConditionsTag() ConfigFlags.Beam.BunchSpacing = jobproperties.Beam.bunchSpacing() + ConfigFlags.Output.HISTFileName = jobproperties.DQMonFlagsCont.histogramFile() # Geometry - General ConfigFlags.GeoModel.AtlasVersion = jobproperties.Global.DetDescrVersion() ConfigFlags.GeoModel.Align.Dynamic = InDetGeometryFlags.useDynamicAlignFolders() + # Environment + ConfigFlags.Common.isOnline = jobproperties.AthenaCommonFlags.isOnline() # Concurrency from AthenaCommon.ConcurrencyFlags import jobproperties as jp @@ -61,4 +64,8 @@ def getNewConfigFlags(): for flag in reco_flag_map: ConfigFlags._set('Detector.Enable'+flag, getattr(DetFlags.haveRIO,reco_flag_map[flag]+'_on')() ) + # miscellaneous settings + ConfigFlags.InDet.doTIDE_Ambi=jobproperties.InDetJobProperties.doTIDE_Ambi() + ConfigFlags.InDet.useDCS=jobproperties.InDetJobProperties.useDCS() + return ConfigFlags diff --git a/Control/AthenaMonitoring/python/AthMonitorCfgHelper.py b/Control/AthenaMonitoring/python/AthMonitorCfgHelper.py index f39ce830d3c38a29fce498919d17ac5f807bbc7d..c684f11d51fc4c1df3609c6ec995494e955e96e8 100644 --- a/Control/AthenaMonitoring/python/AthMonitorCfgHelper.py +++ b/Control/AthenaMonitoring/python/AthMonitorCfgHelper.py @@ -123,6 +123,7 @@ class AthMonitorCfgHelper(object): tool -- a GenericMonitoringToolArray object. This is used to define histograms associated with each group in the array. ''' + # Generate the n-dimensional array from AthenaMonitoringKernel.GenericMonitoringTool import GenericMonitoringArray array = GenericMonitoringArray(baseName,dimensions) @@ -134,6 +135,8 @@ class AthMonitorCfgHelper(object): self.resobj.merge(acc) pathToSet = self.inputFlags.DQ.FileKey+('/%s' % topPath if topPath else '') + if self.inputFlags.Output.HISTFileName: + pathToSet = '/' + pathToSet array.broadcast('HistPath',pathToSet) array.broadcast('UseCache',True) convention = 'ONLINE' if self.inputFlags.Common.isOnline else 'OFFLINE' @@ -267,7 +270,9 @@ class AthMonitorCfgHelperOld(object): from GaudiSvc.GaudiSvcConf import THistSvc svcMgr += THistSvc() # Set the histogram path - pathToSet = self.dqflags.monManFileKey() + ('/%s' % topPath if topPath else '') + pathToSet = self.dqflags.monManFileKey()+('/%s' % topPath if topPath else '') + if self.dqflags.histogramFile(): + pathToSet = '/' + pathToSet # Detect if online or offline from AthenaCommon.AthenaCommonFlags import athenaCommonFlags conventionName = 'OFFLINE' if not athenaCommonFlags.isOnline() else 'ONLINE' @@ -311,8 +316,9 @@ def getDQTHistSvc(inputFlags): return result histsvc = THistSvc() - histsvc.Output += ["%s DATAFILE='%s' OPT='RECREATE'" % (inputFlags.DQ.FileKey, - inputFlags.Output.HISTFileName)] + if inputFlags.Output.HISTFileName: + histsvc.Output += ["%s DATAFILE='%s' OPT='RECREATE'" % (inputFlags.DQ.FileKey, + inputFlags.Output.HISTFileName)] result.addService(histsvc) return result diff --git a/Control/AthenaMonitoring/python/AthenaMonitoringCfg.py b/Control/AthenaMonitoring/python/AthenaMonitoringCfg.py index 68602c3dfad07b97577cebe9c7a1ffc4044692ce..323d614a805fd2f4d377fea4f52cff7c00a113ca 100644 --- a/Control/AthenaMonitoring/python/AthenaMonitoringCfg.py +++ b/Control/AthenaMonitoring/python/AthenaMonitoringCfg.py @@ -123,7 +123,7 @@ def AthenaMonitoringCfg(flags): for t in algo.GMTools: for h in t.Histograms: ho = json.loads(h) - fullpath = os.path.join(ho['convention'], t.HistPath, ho['path'], ho['alias']) + fullpath = os.path.join(t.HistPath, ho['path'], ho['alias']) + ':' + ho['convention'] if fullpath in definedhists: previous = definedhists[fullpath] error(f'Multiple definition of histogram {fullpath} by:\n\t{algo.getName()}/{t.getName()} ({ho}) and\n\t{previous[0]}/{previous[1]} ({previous[2]})') diff --git a/Control/AthenaMonitoring/python/DQMonFlags.py b/Control/AthenaMonitoring/python/DQMonFlags.py index b7edcfb4e15056f25b6eaa8eeea8acfb590e300e..771a0f9b94120f4d0a97f021c9ef1924edba0ee0 100644 --- a/Control/AthenaMonitoring/python/DQMonFlags.py +++ b/Control/AthenaMonitoring/python/DQMonFlags.py @@ -442,6 +442,20 @@ class triggerMixedMode(JobProperty): StoredValue=False list+=[triggerMixedMode] +class doPostProcessing(JobProperty): + """ Run histogram postprocessing in Athena job """ + statusOn=True + allowedTypes=['bool'] + StoredValue=False +list+=[doPostProcessing] + +class postProcessingInterval(JobProperty): + """ Set number of events between postprocessing steps """ + statusOn=True + allowedTypes=['int'] + StoredValue=100 +list+=[postProcessingInterval] + ##----------------------------------------------------------------------------- ## 2nd step ## Definition of the DQMon flag container diff --git a/Control/AthenaMonitoring/share/DQMonFlagsConfig_jobOptions.py b/Control/AthenaMonitoring/share/DQMonFlagsConfig_jobOptions.py index 2c958d49fa283bf06896367087950b7de6cf7ee1..de1b70429fff4d708614173757137883d629424e 100644 --- a/Control/AthenaMonitoring/share/DQMonFlagsConfig_jobOptions.py +++ b/Control/AthenaMonitoring/share/DQMonFlagsConfig_jobOptions.py @@ -489,15 +489,10 @@ from AthenaConfiguration.OldFlags2NewFlags import getNewConfigFlags # Translate all needed flags from old jobProperties to a new AthConfigFlag Container ConfigFlags = getNewConfigFlags() -ConfigFlags.InDet.usePixelDCS=InDetFlags.usePixelDCS() -ConfigFlags.InDet.doTIDE_Ambi=InDetFlags.doTIDE_Ambi() - -ConfigFlags.Output.HISTFileName=DQMonFlags.histogramFile() ConfigFlags.DQ.FileKey=DQMonFlags.monManFileKey() ConfigFlags.DQ.Environment=DQMonFlags.monManEnvironment() ConfigFlags.DQ.useTrigger=DQMonFlags.useTrigger() ConfigFlags.DQ.triggerDataAvailable=DQMonFlags.useTrigger() -ConfigFlags.IOVDb.GlobalTag=globalflags.ConditionsTag() ConfigFlags.DQ.isReallyOldStyle=False from AthenaConfiguration import ComponentAccumulator diff --git a/Control/AthenaMonitoring/share/DataQualitySteering_jobOptions.py b/Control/AthenaMonitoring/share/DataQualitySteering_jobOptions.py index 781e45bed31b644a24cd27248710d8cf8386eea6..de48273c23d3ec2aa3d78286a29a1429d3699213 100644 --- a/Control/AthenaMonitoring/share/DataQualitySteering_jobOptions.py +++ b/Control/AthenaMonitoring/share/DataQualitySteering_jobOptions.py @@ -382,4 +382,16 @@ if DQMonFlags.doMonitoring(): asq = AthSequencer("AthBeginSeq") asq += AthenaMonitoringConf.ForceIDConditionsAlg("ForceIDConditionsAlg") + if DQMonFlags.doPostProcessing(): + from AthenaConfiguration.AllConfigFlags import ConfigFlags + asq = AthSequencer("AthEndSeq") + from DataQualityUtils.DQPostProcessingAlg import DQPostProcessingAlg + ppa = DQPostProcessingAlg("DQPostProcessingAlg") + ppa.ExtraInputs = [( 'xAOD::EventInfo' , 'StoreGateSvc+EventInfo' )] + ppa.Interval = DQMonFlags.postProcessingInterval() + if ConfigFlags.Common.isOnline: + ppa.FileKey = ((ConfigFlags.DQ.FileKey + '/') if not ConfigFlags.DQ.FileKey.endswith('/') + else ConfigFlags.DQ.FileKey) + asq += ppa + del local_logger diff --git a/Control/AthenaMonitoringKernel/src/HistogramFiller/HistogramFactory.cxx b/Control/AthenaMonitoringKernel/src/HistogramFiller/HistogramFactory.cxx index 29733fd0d283882d3dc6fdf15d279c0b20122f29..430fb5bfe04c9aa64f0dd9778c4f21970ef2ad4e 100644 --- a/Control/AthenaMonitoringKernel/src/HistogramFiller/HistogramFactory.cxx +++ b/Control/AthenaMonitoringKernel/src/HistogramFiller/HistogramFactory.cxx @@ -19,10 +19,14 @@ HistogramFactory::HistogramFactory(const ServiceHandle<ITHistSvc>& histSvc, std::string histoPath) : m_histSvc(histSvc) { - size_t split = histoPath.find('/'); - m_streamName = histoPath.substr(0,split); - m_groupName = split!=std::string::npos ? histoPath.substr(split) : ""; - + size_t whereToStart = 0; + // do we have a leading slash? This distinguishes temporary streams in THistSvc + if (! histoPath.empty() && histoPath[0] == '/') { + whereToStart = 1; + } + size_t split = histoPath.find('/', whereToStart); + m_streamName = (whereToStart == 1 ? "/" : "") + histoPath.substr(0,split); + m_groupName = split!=std::string::npos ? histoPath.substr(split+1) : ""; // Infrequently, loading a ROOT class in a MT context can fail. // So try to load the classes we'll need early. TClass::GetClass("TH1F"); @@ -260,14 +264,14 @@ namespace { } std::string HistogramFactory::getFullName(const HistogramDef& def) const { - + // for online paths, always prepend a slash. Otherwise take it from provided stream name std::string path; if ( onlinePaths.count( def.path)!=0 ) { path = "/" + def.path + "/" + m_streamName + "/" + m_groupName; } else if ( def.path=="DEFAULT" ) { path = "/" + m_streamName + "/" + m_groupName; } else { - path = "/" + m_streamName + "/" + def.tld + "/" + m_groupName + "/" + def.path; + path = m_streamName + "/" + def.tld + "/" + m_groupName + "/" + def.path; } // remove duplicate slashes diff --git a/Control/AthenaMonitoringKernel/test/HistogramFactoryTestSuite.cxx b/Control/AthenaMonitoringKernel/test/HistogramFactoryTestSuite.cxx index 04b20227e15a16b3cf88adc1ea54ad7e3be0ff8c..ef58d00754242a0eb52e7e185a5bdcbcf1d20011 100644 --- a/Control/AthenaMonitoringKernel/test/HistogramFactoryTestSuite.cxx +++ b/Control/AthenaMonitoringKernel/test/HistogramFactoryTestSuite.cxx @@ -50,6 +50,7 @@ class HistogramFactoryTestSuite { REGISTER_TEST_CASE(test_shouldProperlyFormatPathForDefaultHistograms), REGISTER_TEST_CASE(test_shouldProperlyFormatPathForCustomHistograms), REGISTER_TEST_CASE(test_shouldProperlyFormatPathForOfflineHistograms), + REGISTER_TEST_CASE(test_shouldProperlyFormatPathForTempOfflineHistograms), REGISTER_TEST_CASE(test_shouldSetXAxisLabelsFor1DHistogram), REGISTER_TEST_CASE(test_shouldSetXAndYAxisLabelsFor2DHistogram), REGISTER_TEST_CASE(test_shouldSetExtendAxesWhenkCanRebinIsSet), @@ -62,7 +63,7 @@ class HistogramFactoryTestSuite { // ==================== Test code ==================== private: void beforeEach() { - m_testObj.reset(new HistogramFactory(m_histSvc, "HistogramFactoryTestSuite")); + m_testObj.reset(new HistogramFactory(m_histSvc, "/HistogramFactoryTestSuite")); } void afterEach() { @@ -180,6 +181,16 @@ class HistogramFactoryTestSuite { VALUE(m_histSvc->exists("/HistogramFactoryTestSuite/run_XXXXXX/lbYYY/custom/path/for/histogram/offlineAlias")) EXPECTED(true); } + void test_shouldProperlyFormatPathForTempOfflineHistograms() { + m_testObj.reset(new HistogramFactory(m_histSvc, "HistogramFactoryTestSuite")); + HistogramDef histogramDef = defaultHistogramDef("TH1F"); + histogramDef.path = "/custom/path/for/histogram"; + histogramDef.alias = "offlineAlias"; + histogramDef.tld = "/run_XXXXXX/lbYYY/"; + m_testObj->create(histogramDef); + VALUE(m_histSvc->exists("HistogramFactoryTestSuite/run_XXXXXX/lbYYY/custom/path/for/histogram/offlineAlias")) EXPECTED(true); + } + void test_shouldSetXAxisLabelsFor1DHistogram() { HistogramDef histogramDef = defaultHistogramDef("TH1F"); histogramDef.alias = "labels1DTestAlias"; diff --git a/Control/AthenaPython/python/Bindings.py b/Control/AthenaPython/python/Bindings.py index cb265b608d536535127d38db02d4272b373af7ff..a0f1c9c79f9a48597f2d4da7de64f72dded84cdb 100644 --- a/Control/AthenaPython/python/Bindings.py +++ b/Control/AthenaPython/python/Bindings.py @@ -353,7 +353,7 @@ def _py_init_THistSvc(): # save original regXYZ methods: we'll use some modified ones # to improve look-up time from python - for n in ('Hist', 'Graph', 'Tree'): + for n in ('Hist', 'Graph', 'Efficiency', 'Tree'): code = "ITHistSvc._cpp_reg%s = ITHistSvc.reg%s" % (n,n) exec (code, globals(),locals()) @@ -395,6 +395,8 @@ def _py_init_THistSvc(): meth = '_cpp_regHist' elif isinstance(obj, (ROOT.TGraph,)): meth = '_cpp_regGraph' + elif isinstance(obj, (ROOT.TEfficiency,)): + meth = '_cpp_regEfficiency' elif isinstance(obj, (ROOT.TTree,)): meth = '_cpp_regTree' else: @@ -431,8 +433,7 @@ def _py_init_THistSvc(): except KeyError: pass def _get_helper(klass, hsvc, meth, oid, update_cache=True): - import cppyy - makeNullPtr = cppyy.libPyROOT.MakeNullPointer + makeNullPtr = ROOT.MakeNullPointer o = makeNullPtr(klass) if meth(oid, o).isSuccess(): if update_cache: @@ -446,6 +447,8 @@ def _py_init_THistSvc(): return _get_helper(klass, self, self.getHist, oid) if issubclass(klass, (ROOT.TGraph,)): return _get_helper(klass, self, self.getGraph, oid) + if issubclass(klass, (ROOT.TEfficiency,)): + return _get_helper(klass, self, self.getEfficiency, oid) if issubclass(klass, (ROOT.TTree,)): return _get_helper(klass, self, self.getTree, oid) raise RuntimeError('unsupported type [%r]'%klass) @@ -464,11 +467,14 @@ def _py_init_THistSvc(): obj = _get_helper(klass, self, self.getHist, name) # then graphs - ## FIXME: no 'ITHistSvc::getGraphs' method !! - ## https://savannah.cern.ch/bugs/index.php?36379 -## oids = [n for n in self.getGraphs() if not n in self._py_cache.keys()] -## for name in oids: -## _get_helper(ROOT.TGraph, self, self.getGraph, name) + oids = [n for n in self.getGraphs() if n not in self._py_cache.keys()] + for name in oids: + _get_helper(ROOT.TGraph, self, self.getGraph, name) + + # then efficiencies + oids = [n for n in self.getEfficiencies() if n not in self._py_cache.keys()] + for name in oids: + _get_helper(ROOT.TEfficiency, self, self.getEfficiency, name) # finally try ttrees oids = [n for n in self.getTrees() if n not in self._py_cache.keys()] @@ -501,7 +507,7 @@ def _py_init_THistSvc(): del setitem ## ties some loose ends - for n in ('Hist', 'Graph', 'Tree'): + for n in ('Hist', 'Graph', 'Efficiency', 'Tree'): code = """\ def reg%s(self, oid, oid_type=None): if not (oid_type is None): @@ -521,9 +527,10 @@ del reg%s""" % (n,n,n,n,n) its type. `oid_type' is a string whose value is either: - 'hist', to load any THx and TProfiles - 'tree', to load TTrees + - 'efficiency', to load TEfficiency - 'graph', to load TGraph and TGraphErrors """ - _allowed_values = ('hist','tree','graph') + _allowed_values = ('hist','tree','efficiency','graph') if oid_type not in _allowed_values: raise ValueError( 'oid_type (=%r) MUST be one of %r'%(oid_type, diff --git a/DataQuality/DataQualityUtils/python/DQPostProcessingAlg.py b/DataQuality/DataQualityUtils/python/DQPostProcessingAlg.py new file mode 100644 index 0000000000000000000000000000000000000000..c757114b2a167d9a0cf3acba4bd0c9360ea5518a --- /dev/null +++ b/DataQuality/DataQualityUtils/python/DQPostProcessingAlg.py @@ -0,0 +1,271 @@ +# Copyright (C) 2002-2021 CERN for the benefit of the ATLAS collaboration + +# from AthenaPython.PyAthenaComps import StatusCode +from AthenaPython import PyAthena + +import histgrinder +import histgrinder.interfaces +from histgrinder.HistObject import HistObject +from typing import (Union, Iterable, Mapping, Any, Collection, + Pattern, Generator) + +StatusCode = PyAthena.StatusCode + +class DQPostProcessingAlg(PyAthena.Alg): + def __init__(self, name=None, **kw): + super(DQPostProcessingAlg, self).__init__(name, **kw) + self.Interval = 1 + self.FileKey = '/CombinedMonitoring/run_%(run)s/' + self._ctr = 0 + self._run = 0 + self._transformermap = {} + self._timings = {} + self.DoTiming = True + self.DoEntryOptimization = True + + def initialize(self): + from histgrinder.config import read_configuration + from histgrinder.transform import Transformer + import os, glob + from DataQualityUtils._resolve_data_path import resolve_data_path + + self.hsvc = PyAthena.py_svc('THistSvc') + + dpath = resolve_data_path("DataQualityUtils") + if dpath is None: + self.msg.error("Unable to resolve DataQualityUtils data path, not running new-style postprocessing") + return StatusCode.Failure + + # read configuration & set up transformations + self._transformers = [] + postprocfiles = glob.glob(os.path.join(dpath,'postprocessing/*.yaml')) + self.msg.info(f'The postprocessing config file list is {postprocfiles}') + for configfile in glob.glob(os.path.join(dpath,'postprocessing/*.yaml')): + config = read_configuration(configfile) + self._transformers += [Transformer(_) for _ in config] + selectors = set() + for transform in self._transformers: + selectors.update(transform.inregexes) + if self.DoTiming: + self._timings[transform] = 0. + + # Configure input + self._im = AthInputModule() + in_configuration = {'source': self} + # if args.prefix: + in_configuration['prefix'] = f'{self.FileKey}' + self._im.configure(in_configuration) + self._im.setSelectors(selectors) + + # Configure output + self._om = AthOutputModule() + out_configuration = {'target': self} + # if args.prefix: + out_configuration['prefix'] = f'{self.FileKey}' + self._om.configure(out_configuration) + return StatusCode.Success + + def _process(self): + import time + for obj in self._im: + self.msg.debug(f'now processing for {obj.name}') + translist = self._transformermap.get(obj.name, self._transformers) + needtocache = (translist == self._transformers) + cached = [] + for _ in translist: + self.msg.debug(f'consider transformer {_.tc.description}') + if self.DoTiming: + t0 = time.perf_counter() + v = _.consider(obj) + if self.DoTiming: + t = time.perf_counter()-t0 + self._timings[_] += t + if v: + if needtocache: + cached.append(_) + self.msg.debug('Match made!') + self._om.publish(v) + if needtocache: + self._transformermap[obj.name] = cached + self._om.finalize() + + def execute(self): + self._ctr += 1 + if ((self._ctr - 1) % self.Interval) != 0: return StatusCode.Success + self._run = self.evtStore['EventInfo'].runNumber() + try: + self._process() + except Exception as e: + import traceback + self.msg.info(f"Caught exception: {e}") + self.msg.info(traceback.format_exc()) + self.msg.debug("I've got it made") + return StatusCode.Success + + def finalize(self): + self.msg.debug("Finalizing") + if self.DoTiming: + self.msg.info('Timings') + for k, v in self._timings.items(): + self.msg.info(f'{k.tc.description}, {v}') + try: + self._process() + except Exception as e: + import traceback + self.msg.info(f"Caught transformation exception: {e}") + self.msg.warning(traceback.format_exc()) + return StatusCode.Success + +class AthInputModule(histgrinder.interfaces.InputModule): + def __init__(self): + self.source = None + self.classwarnings = set() + self.selectors = None + self.entries = {} + + def configure(self, options: Mapping[str, Any]) -> None: + """ + Configure this module. Potential elements of "options": + source: should be a ROOT-openable filename or URL. + prefix: directory path to search under. Returned histogram names + will not include this. + """ + if 'source' not in options: + raise ValueError("Must specify 'source' as an " + "option to AthInputModule") + self.source = options['source'] + self.prefix = options.get('prefix', '/') + self.cachednames = set() + self.matchednames = {} + + def setSelectors(self, selectors: Collection[Pattern]) -> None: + """ Do more later """ + self.selectors = selectors + + def _getklass(self, k): + import ROOT + hsvc = self.source.hsvc + if hsvc.existsHist(k): + hptr = ROOT.MakeNullPointer(ROOT.TH1) + if hsvc.getHist(k, hptr).isSuccess(): + klass = getattr(ROOT, hptr.ClassName()) + return klass + return None + + + def iterate(self, dryrun) -> Generator[HistObject, None, None]: + """ Iterate over all histograms in THistSvc """ + import ROOT + log = self.source.msg + specprefix = self.prefix % { 'run': self.source._run } + log.debug(f'Would like to match {specprefix}') + hsvc = self.source.hsvc + + # check if we have new histograms; if so, check against selectors to see if we're interested + currenthists = set(str(_) for _ in hsvc.getHists()) + for k in currenthists - self.cachednames: + # log.info(f'We have ... ? {k}') + if not k.startswith(specprefix): + continue + shortk = k.replace(specprefix, '', 1) + if self.selectors is not None: + if not any(_.match(shortk) for _ in self.selectors): + continue + self.matchednames[k] = None + self.cachednames.update(currenthists) + log.debug(f'We now have {len(self.cachednames)} entries in our cache, of {len(currenthists)} total plots') + log.debug(f'There are {len(self.matchednames)} matches to be considered') + + # postprocess only matched histograms + for k, klass in self.matchednames.items(): + if dryrun: + yield HistObject(k.replace(specprefix, '', 1), None) + + log.debug(f'ROOT input trying to read {k}') + if klass is None: + klass = self._getklass(k) + self.matchednames[k] = klass + hptr = ROOT.MakeNullPointer(klass) + if hsvc.getHist(k, hptr).isSuccess(): + log.debug(f'ROOT input read {k} as {type(hptr)}') + # obj = hptr.Clone() + # obj.SetDirectory(0) + obj = hptr + if k in self.entries: + if obj.GetEntries() == self.entries[k]: + continue + self.entries[k] = obj.GetEntries() + yield HistObject(k.replace(specprefix, '', 1), obj) + else: + log.error(f'Cannot read {k}') + + log.debug('Done on input side') + + def __iter__(self) -> Iterable[HistObject]: + return self.iterate(dryrun=False) + + def warmup(self) -> Iterable[HistObject]: + return self.iterate(dryrun=True) + +class AthOutputModule(histgrinder.interfaces.OutputModule): + def __init__(self): + self.target = None + + def configure(self, options: Mapping[str, Any]) -> None: + """ + Configure this module. Potential elements of "options": + target: should be a ROOT-openable filename or URL which + can be opened for writing. + prefix: directory path to place results under. + overwrite: boolean to indicate whether results should overwrite + existing histograms in the file. + delay: only write histograms in finalize() (not during publish()). + """ + if 'target' not in options: + raise ValueError("Must specify 'target' as an option " + "to AthInputModule") + self.target = options['target'] + self.overwrite = bool(options.get('overwrite', True)) + self.prefix = options.get('prefix', '/') + self.delay = bool(options.get('delay', True)) + self.queue = {} + + def publish(self, obj: Union[HistObject, Iterable[HistObject]]) -> None: + """ Accepts a HistObject containing a ROOT object to write to file """ + if isinstance(obj, HistObject): + obj = [obj] + obj = { _.name: _ for _ in obj } + if self.delay: + self.queue.update(obj) + else: + self.queue = obj + self._write() + self.queue = None + + def _write(self) -> None: + """ write obj to THistSvc """ + import ROOT + import os.path + if not self.queue: + return # Nothing to do + log = self.target.msg + hsvc = self.target.hsvc + for _, o in self.queue.items(): + ROOT.SetOwnership(o.hist, False) + fulltargetname = os.path.join(self.prefix, o.name) % { 'run': self.target._run } + log.debug(f"Attempt to publish {fulltargetname}") + o.hist.SetName(os.path.basename(fulltargetname)) + if hsvc.existsHist(fulltargetname): + # following kind of silly procedure is necessary to avoid memory leaks + hptr = ROOT.MakeNullPointer(ROOT.TH1) + if hsvc.getHist(fulltargetname, hptr).isSuccess(): + hsvc.deReg(hptr) + if not hsvc._cpp_regHist(fulltargetname, o.hist).isSuccess(): + log.error(f"Unable to register {fulltargetname}") + else: + log.debug("Published") + self.queue.clear() + + def finalize(self) -> None: + """ Writes outstanding HistObjects to file """ + self._write() \ No newline at end of file diff --git a/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtRawDataMonitoring/python/MDTPostProcessing.py b/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtRawDataMonitoring/python/MDTPostProcessing.py index c388529d0fab587ecbb0d0f7af02d6cd52e115e1..8e60e7d294583186bfdce89123f5d865adc71036 100644 --- a/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtRawDataMonitoring/python/MDTPostProcessing.py +++ b/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtRawDataMonitoring/python/MDTPostProcessing.py @@ -8,8 +8,10 @@ from math import sqrt import numpy as np def make_hits_per_evt(inputs): - EvtOccBCap = inputs[0][1][1].Clone() - EvtOccECap = inputs[0][1][2].Clone() + inputs=list(inputs) + i01 = inputs[0][1] + EvtOccBCap = i01[1].Clone() + EvtOccECap = i01[2].Clone() EvtOccBCap.Reset() EvtOccECap.Reset() EvtOccBCap.SetName("HitsPerEvtInBarrelPerChamber_ADCCut") @@ -17,8 +19,8 @@ def make_hits_per_evt(inputs): EvtOccECap.SetName("HitsPerEvtInEndCapPerChamber_ADCCut") EvtOccECap.SetTitle("Avg # hits/evt Endcap, ADCCut") - VolumeMapBCap = inputs[0][1][1].Clone() - VolumeMapECap = inputs[0][1][2].Clone() + VolumeMapBCap = i01[1].Clone() + VolumeMapECap = i01[2].Clone() VolumeMapBCap.Reset() VolumeMapECap.Reset() VolumeMapBCap.SetName("VolumeMapBarrel") @@ -27,10 +29,7 @@ def make_hits_per_evt(inputs): VolumeMapECap.SetTitle("Volume Map (#tubes*tubeVol in m^3) Endcap") size = len(inputs) - chamberHits_vec = [] - for i in range(size): - hi = inputs[i][1][0].Clone() - chamberHits_vec.append(hi.GetEntries()) + chamberHits_vec = [_[1][0].GetEntries() for _ in inputs] sorted_chamberHits_vec = np.sort(chamberHits_vec) medianChamberHits = 0 @@ -40,9 +39,11 @@ def make_hits_per_evt(inputs): elif(size > 0): medianChamberHits = sorted_chamberHits_vec[den] + h_trigger = inputs[0][1][3] + nTriggers = int(h_trigger.GetEntries()) for i in range(size): hvOff = False - hi = inputs[i][1][0].Clone() + hi = inputs[i][1][0] name = hi.GetName() xAxis = name[0]+name[4]+name[3] yAxis = name[1]+name[5]+name[6] @@ -66,9 +67,6 @@ def make_hits_per_evt(inputs): elif(nhits < 0.07 * medianChamberHits + 0.1): hvOff = True - h_trigger = inputs[0][1][3] - nTriggers = int(h_trigger.GetEntries()) - tubeRadiusScale = 1 tubeLength = getTubeLength(name) if(name[0:3] == "BME" or name[0:3] == "BMG"): @@ -107,12 +105,13 @@ def make_eff_histo(inputs, ec): ecap_fullStr_lower = "mdt"+ecap_str heff = TH1F(ecap_fullStr_lower+"_TUBE_eff",ecap_fullStr_lower+"_TUBE_eff",100,0,1) - size = len(inputs) dencut = 10 - for i in range(size): - hi_num = inputs[i][1][0].Clone() - hi_den = inputs[i][1][1].Clone() + for itr in inputs: + if itr is None: + continue + hi_num = itr[1][0] + hi_den = itr[1][1] nbin=hi_den.GetNbinsX() for ibin in range(nbin): if( hi_den.At(ibin) > dencut ): @@ -121,7 +120,8 @@ def make_eff_histo(inputs, ec): return [heff] def make_eff_histo_perML(inputs, ec): - + if inputs[0] is None: + return [] ecap = ["BA", "BC", "EA", "EC"] ecap_str= ecap[ec] heff_outer = inputs[0][1][2].Clone() @@ -149,11 +149,12 @@ def make_eff_histo_perML(inputs, ec): heff_extra.SetTitle("effsIn"+ecap_str+"ExtraPerMultiLayer, ADCCut") heff_extra_N = heff_extra.Clone() - size = len(inputs) - for i in range(size): - hi_num = inputs[i][1][0].Clone() + for itr in inputs: + if itr is None: + continue + hi_num = itr[1][0] name_num = hi_num.GetName() - hi_den = inputs[i][1][1].Clone() + hi_den = itr[1][1] name=name_num[0:7] countsML1, countsML2, entriesML1, entriesML2 = MDTTubeEff(name,hi_num,hi_den) ch_name = name[0:7] @@ -226,11 +227,10 @@ def drift_time_monitoring(inputs, ec): sumtdrift.SetAxisRange(0,1200,"y") sumtdrift.Reset() - h=TH1F() for i in range(size): currentbin=i+1 - h = inputs[i][1][0].Clone() + h = inputs[i][1][0] t0, t0err, tmax, tmaxerr = MDTFitTDC(h) layer="" diff --git a/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtRawDataMonitoring/python/MdtMonUtils.py b/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtRawDataMonitoring/python/MdtMonUtils.py index cbf657d30bed14ee52658850b640900ffa2b362a..4a774b068ff28b6aa06a1fee962ec0fd7eaf7a23 100644 --- a/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtRawDataMonitoring/python/MdtMonUtils.py +++ b/MuonSpectrometer/MuonValidation/MuonDQA/MuonRawDataMonitoring/MdtRawDataMonitoring/python/MdtMonUtils.py @@ -196,17 +196,10 @@ def getTubeLength( name ): tubeLength = 4.9615 - if name03 in tubeLenght_dict: - tubeLength = tubeLenght_dict[name03] - - if name04 in tubeLenght_dict: - tubeLength = tubeLenght_dict[name04] - - if name03+name56 in tubeLenght_dict: - tubeLength = tubeLenght_dict[name03+name56] - - if name04+name56 in tubeLenght_dict: - tubeLength = tubeLenght_dict[name04+name56] + tubeLength = tubeLenght_dict.get(name03, tubeLength) + tubeLength = tubeLenght_dict.get(name04, tubeLength) + tubeLength = tubeLenght_dict.get(name03+name56, tubeLength) + tubeLength = tubeLenght_dict.get(name04+name56, tubeLength) return tubeLength @@ -268,11 +261,15 @@ def fittzero(x, par): fitvaltzero = par[0] + ( par[3] / ( 1 + ( TMath.Exp((-x[0]+par[1])/par[2]) ) ) ) return fitvaltzero +tf1_fittzero = TF1("func1", fittzero, 0., 200., 4) + def fittmax(x, par): fitvaltmax = par[0] + ( par[3] / ( 1 + ( TMath.Exp((x[0]-par[1])/par[2]) ) ) ) return fitvaltmax +tf1_fittmax = TF1("func2", fittmax, 0., 200., 4) + def MDTFitTDC(h): t0 = 0 tmax = 0 @@ -288,7 +285,8 @@ def MDTFitTDC(h): parESD1 = up parESD2 = 20 parESD3 = h.GetBinContent(h.GetMaximumBin()) - h.GetBinContent(h.GetMinimumBin()) - func1 = TF1("func1", fittzero, 0., up, 4) + func1 = tf1_fittzero + func1.SetRange(0., up) func1.SetParameters(parESD0, parESD1, parESD2, parESD3) func1.SetLineColor(kBlue+2) if(h.GetEntries()>100): @@ -306,7 +304,8 @@ def MDTFitTDC(h): parESD1 = down parESD2 = 50 parESD3 = (h.GetBinContent(h.GetMaximumBin())-h.GetBinContent(h.GetMinimumBin()))/10. - func2 = TF1("func2", fittmax, (down-135), (down+135), 4) + func2 = tf1_fittmax + func2.SetRange((down-135), (down+135)) func2.SetParameters(parESD0,parESD1,parESD2,parESD3) func2.SetLineColor(kRed+1) if(h.GetEntries()>100):