Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • rcurrie/LHCbPR2HD
  • dpopov/LHCbPR2HD
  • kzarebsk/LHCbPR2HD
  • lhcb-core/LHCbPR2HD
4 results
Show changes
Commits on Source (10)
Showing
with 1335 additions and 866 deletions
variables:
TARGET_BRANCH: master
stages:
- check
- test
default:
image: cern/cc7-base:latest
tags:
- cvmfs
before_script:
- . /cvmfs/lhcb.cern.ch/lib/LbEnv.sh
check-formatting:
stage: check
script:
- . /cvmfs/lhcb.cern.ch/lib/LbEnv.sh
- curl -o lb-format "https://gitlab.cern.ch/lhcb-core/LbDevTools/raw/master/LbDevTools/SourceTools.py?inline=false"
- python lb-format --format-patch apply-formatting.patch origin/${TARGET_BRANCH}
artifacts:
paths:
- apply-formatting.patch
when: on_failure
expire_in: 1 week
# test-python2:
# stage: test
# script:
# - python2 -m compileall -q .
test-python3:
stage: test
script:
- python3 -m compileall -q .
This diff is collapsed.
......@@ -4,6 +4,7 @@ import ROOT
from .BaseHandler import BaseHandler
def _TList__iter__(self):
next_item = ROOT.TIter(self)
while True:
......@@ -13,8 +14,10 @@ def _TList__iter__(self):
else:
return
ROOT.TList.__iter__ = _TList__iter__
def make_key(prefix, obj):
the_type = type(obj)
return "{}_{}_{}".format(
......@@ -23,11 +26,10 @@ def make_key(prefix, obj):
obj.GetName(),
)
def sanitize(string):
return (string
.replace("/", "_bs_")
.replace(".", "_ps_")
)
return string.replace("/", "_bs_").replace(".", "_ps_")
def all_plots_in_files(filenames):
def loop_over(key, tdirectory):
......@@ -73,20 +75,19 @@ class AllPlotsHandler(BaseHandler):
"""
def collectResults(self, directory):
filenames = [os.path.join(directory, i) for i in type(self).files_to_search]
filenames = [
os.path.join(directory, i) for i in type(self).files_to_search
]
allplots_keys = []
for key, obj in all_plots_in_files(filenames):
self.saveJSON(
key,
obj,
group="_allplots"
)
self.saveJSON(key, obj, group="_allplots")
allplots_keys.append(key)
self.saveJSON(
"allplots_keys",
allplots_keys,
description="A list of all names of JSON'd plottables recorded by AllPlotsHandler.",
description=
"A list of all names of JSON'd plottables recorded by AllPlotsHandler.",
group="_allplots_keys",
)
This diff is collapsed.
......@@ -7,8 +7,8 @@ try:
except ImportError:
logger.warning("ROOT not imported. Skipping...")
class BaseHandler(object):
class BaseHandler(object):
"""For using this class in order to build a handler check the documentation
to learn how to deploy a new handler"""
......@@ -26,42 +26,42 @@ class BaseHandler(object):
want to group your attributes, specify a group eg "Timing"
"""
dataDict = {
'name': name,
'data': data,
'description': description,
'group': group,
"name": name,
"data": data,
"description": description,
"group": group,
}
return dataDict
def saveInt(self, name, data, description="", group=""):
if name == '' or data == '':
if name == "" or data == "":
return False
dataDict = self.__save(name, data, description, group)
dataDict['type'] = 'Integer'
dataDict["type"] = "Integer"
self.__results.append(dataDict)
def saveFloat(self, name, data, description="", group=""):
if name == '' or data == '':
if name == "" or data == "":
return False
dataDict = self.__save(name, data, description, group)
dataDict['type'] = 'Float'
dataDict["type"] = "Float"
self.__results.append(dataDict)
def saveString(self, name, data, description="", group=""):
if name == '' or data == '':
if name == "" or data == "":
return False
dataDict = self.__save(name, data, description, group)
dataDict['type'] = 'String'
dataDict["type"] = "String"
self.__results.append(dataDict)
def saveJSON(self, name, data, description="", group=""):
if name == '' or data == '':
if name == "" or data == "":
return False
# If the object is a ROOT object, use ROOT method to save JSON.
......@@ -70,16 +70,18 @@ class BaseHandler(object):
if issubclass(type(data), ROOT.TObject):
if ROOT.gROOT.GetVersionInt() < 60800:
raise NotImplementedError(
"Converting ROOT objects to JSON is only supported with ROOT versions >=6.08.")
"Converting ROOT objects to JSON is only supported with ROOT versions >=6.08."
)
json_to_save = str(ROOT.TBufferJSON.ConvertToJSON(data))
else:
json_to_save = json.dumps(data)
except NameError:
logger.warning("Not using ROOT to save JSON as ROOT is not imported!")
logger.warning(
"Not using ROOT to save JSON as ROOT is not imported!")
json_to_save = json.dumps(data)
dataDict = self.__save(name, json_to_save, description, group)
dataDict['type'] = 'JSON'
dataDict["type"] = "JSON"
self.__results.append(dataDict)
......@@ -90,15 +92,15 @@ class BaseHandler(object):
to the file you want to file eg saveFile("Gauss-histogram.root",
"/afs/cern.ch/user/.../tests/Gauss-30000000-100ev-20130425-histos.root")
"""
if name == '' or filename == '':
if name == "" or filename == "":
return False
dataDict = {
'name': name,
'filename': filename,
'description': description,
'group': group,
'type': 'File'
"name": name,
"filename": filename,
"description": description,
"group": group,
"type": "File",
}
self.__results.append(dataDict)
......@@ -106,5 +108,5 @@ class BaseHandler(object):
def getResults(self):
return self.__results
def collectResults(self, directory='.'):
def collectResults(self, directory="."):
return NotImplementedError()
......@@ -3,14 +3,12 @@ from .BaseHandler import BaseHandler
class BooleMoniROOTFileHandler(BaseHandler):
def __init__(self):
super(self.__class__, self).__init__()
def collectResults(self, directory):
files = [
'PR-UPG-SpillOver25ns-FT-1000ev-histos.root',
'Boole-histos.root'
"PR-UPG-SpillOver25ns-FT-1000ev-histos.root", "Boole-histos.root"
]
fileFound = False
......@@ -19,7 +17,10 @@ class BooleMoniROOTFileHandler(BaseHandler):
print("Checking for ", f)
if os.path.isfile(os.path.join(directory, f)):
fileFound = True
self.saveFile('BooleROOTMoniOutput', os.path.join(directory, f))
self.saveFile("BooleROOTMoniOutput", os.path.join(
directory, f))
if not fileFound:
raise Exception('Could not locate any supported monitoring histograms ROOT files')
raise Exception(
"Could not locate any supported monitoring histograms ROOT files"
)
......@@ -3,22 +3,23 @@ import os
import re
from .BaseHandler import BaseHandler
class BrunelMemHandler(BaseHandler):
class BrunelMemHandler(BaseHandler):
def __init__(self):
super(self.__class__, self).__init__()
self.finished = False
self.results = []
def collectResults(self,directory):
def collectResults(self, directory):
l = self.findHistoFile(directory)
if len(l) != 1:
raise Exception("Could not locate just 1 histo file, found:" + str(l))
raise Exception("Could not locate just 1 histo file, found:" +
str(l))
f = ROOT.TFile(os.path.join(directory, l[0]))
b = f.Get("Brunel/MemoryTool/Total Memory [MB]")
self.saveFloat("TotalMemory", b.GetMean(), "Memory [MB]", "Memory");
self.saveFloat("TotalMemory", b.GetMean(), "Memory [MB]", "Memory")
def findHistoFile(self, dir):
return [f for f in os.listdir(dir) if re.match(".*histos.root", f)]
import os
from .BaseHandler import BaseHandler
from .parser.GaudiSequenceParser import GaudiSequenceParser
from .timing.CallgrindLogParser import CallgrindLogParser
"""
The CallGrindHandler module extracts callgrind metrics from a data structure
which is passed to it from the timing.CallgrindLogParser. The function numbers
......@@ -12,15 +10,16 @@ to extract from the CallgrindLogParser are extracted by the GaudiSequenceParser
class CallgrindHandler(BaseHandler):
def __init__(self, directory=''):
def __init__(self, directory=""):
super(self.__class__, self).__init__()
self.directory = directory
if not self.directory:
self.directory = os.path.realpath(os.curdir)
# variables used for parsing the cachegrind annotated log file
self.algoselect = ['GaudiSequencer/RecoDecodingSeq',
'GaudiSequencer/RecoTrFastSeq']
self.algoselect = [
"GaudiSequencer/RecoDecodingSeq",
"GaudiSequencer/RecoTrFastSeq",
]
def collectResults(self, directory):
gsp = GaudiSequenceParser(dir=directory)
......@@ -31,19 +30,19 @@ class CallgrindHandler(BaseHandler):
for alg in callgrindmetrics:
metr = callgrindmetrics[alg]
btot = metr['bc'] + metr['bi']
itot = metr['ir'] + metr['dr'] + metr['dw']
l1m = metr['i1mr'] + metr['d1mr'] + metr['d1mw']
llm = metr['ilmr'] + metr['dlmr'] + metr['dlmw']
bm = metr['bim'] + metr['bcm']
cest = metr['ir'] + 10 * bm + 10 * l1m + 100 * llm
fp32 = metr['ifp32x1'] + 2 * metr['ifp32x2'] + 4 * metr['ifp32x4']\
+ 8 * metr['ifp32x8']
fp64 = metr['ifp64x1'] + 2 * metr['ifp64x2'] + 4 * metr['ifp64x4']
vfp128 = 4 * metr['ifp32x4'] + 2 * metr['ifp64x4']
vfp256 = 8 * metr['ifp32x8'] + 4 * metr['ifp64x4']
vfp = 2 * metr['ifp32x2'] + vfp128 + vfp256
sfp = metr['ifp32x1'] + metr['ifp64x1']
btot = metr["bc"] + metr["bi"]
itot = metr["ir"] + metr["dr"] + metr["dw"]
l1m = metr["i1mr"] + metr["d1mr"] + metr["d1mw"]
llm = metr["ilmr"] + metr["dlmr"] + metr["dlmw"]
bm = metr["bim"] + metr["bcm"]
cest = metr["ir"] + 10 * bm + 10 * l1m + 100 * llm
fp32 = (metr["ifp32x1"] + 2 * metr["ifp32x2"] + 4 * metr["ifp32x4"]
+ 8 * metr["ifp32x8"])
fp64 = metr["ifp64x1"] + 2 * metr["ifp64x2"] + 4 * metr["ifp64x4"]
vfp128 = 4 * metr["ifp32x4"] + 2 * metr["ifp64x4"]
vfp256 = 8 * metr["ifp32x8"] + 4 * metr["ifp64x4"]
vfp = 2 * metr["ifp32x2"] + vfp128 + vfp256
sfp = metr["ifp32x1"] + metr["ifp64x1"]
flop = fp32 + fp64
rsimd = 0
......@@ -61,41 +60,41 @@ class CallgrindHandler(BaseHandler):
for val in callgrindvalues:
valname = val[0]
valdesc = val[1]
metname = '%s_%s' % (valname, alg)
metname = "%s_%s" % (valname, alg)
self.saveInt(metname, metr[valname], valdesc,
'callgrind_metric')
self.saveInt('l1m_' + alg, l1m, 'L1 Miss Sum', 'callgrind_metric')
self.saveInt('llm_' + alg, llm, 'Last Levl Miss Sum',
'callgrind_metric')
self.saveInt('bm_' + alg, bm, 'Branch Missprediction',
'callgrind_metric')
self.saveInt('cest_' + alg, cest, 'Cycle Estimation',
'callgrind_metric')
self.saveInt('fp32_' + alg, fp32, 'fp 32 operations',
'callgrind_metric')
self.saveInt('fp64_' + alg, fp64, 'fp 64 operations',
'callgrind_metric')
self.saveInt('vfp128_' + alg, vfp128, 'simd 128 fp operations',
'callgrind_metric')
self.saveInt('vfp256_' + alg, vfp256, 'simd 256 fp operations',
'callgrind_metric')
self.saveInt('vfp_' + alg, vfp, 'simd fp operations',
'callgrind_metric')
self.saveInt('sfp_' + alg, sfp, 'scalar fp operations',
'callgrind_metric')
self.saveInt('flop_' + alg, flop, 'fp operations',
'callgrind_metric')
self.saveFloat('rsimd_' + alg, rsimd, 'ratio simd operations',
'callgrind_metric')
self.saveFloat('rbm_' + alg, rbm, 'ratio branch misspredictions',
'callgrind_metric')
self.saveFloat('rcm_' + alg, rcm, 'ratio cache misses',
'callgrind_metric')
"callgrind_metric")
self.saveInt("l1m_" + alg, l1m, "L1 Miss Sum", "callgrind_metric")
self.saveInt("llm_" + alg, llm, "Last Levl Miss Sum",
"callgrind_metric")
self.saveInt("bm_" + alg, bm, "Branch Missprediction",
"callgrind_metric")
self.saveInt("cest_" + alg, cest, "Cycle Estimation",
"callgrind_metric")
self.saveInt("fp32_" + alg, fp32, "fp 32 operations",
"callgrind_metric")
self.saveInt("fp64_" + alg, fp64, "fp 64 operations",
"callgrind_metric")
self.saveInt("vfp128_" + alg, vfp128, "simd 128 fp operations",
"callgrind_metric")
self.saveInt("vfp256_" + alg, vfp256, "simd 256 fp operations",
"callgrind_metric")
self.saveInt("vfp_" + alg, vfp, "simd fp operations",
"callgrind_metric")
self.saveInt("sfp_" + alg, sfp, "scalar fp operations",
"callgrind_metric")
self.saveInt("flop_" + alg, flop, "fp operations",
"callgrind_metric")
self.saveFloat("rsimd_" + alg, rsimd, "ratio simd operations",
"callgrind_metric")
self.saveFloat("rbm_" + alg, rbm, "ratio branch misspredictions",
"callgrind_metric")
self.saveFloat("rcm_" + alg, rcm, "ratio cache misses",
"callgrind_metric")
def run(self):
self.collectResults()
if __name__ == '__main__':
if __name__ == "__main__":
CallgrindHandler().run()
......@@ -3,36 +3,43 @@ from .BaseHandler import BaseHandler
from xml.etree.ElementTree import ElementTree
from xml.parsers.expat import ExpatError
class CommentClassHandler(BaseHandler):
def __init__(self):
super(self.__class__, self).__init__()
self.finished = False
self.results = []
self.results = []
def collectResults(self, directory):
logfile = 'profile_info.txt'
logfile = "profile_info.txt"
run_path = os.path.join(directory, logfile)
regxp = "^comment\s*=\s*\"(.*)\s*/\s*(.*)\""
regxp = '^comment\s*=\s*"(.*)\s*/\s*(.*)"'
comment = ""
cclass = ""
try:
loglines = open(run_path, 'r')
for l in loglines.readlines():
m = re.match(regxp, l)
if m != None:
comment = m.group(1)
cclass = m.group(2)
break
loglines.close()
loglines = open(run_path, "r")
for l in loglines.readlines():
m = re.match(regxp, l)
if m != None:
comment = m.group(1)
cclass = m.group(2)
break
loglines.close()
except IOError:
raise Exception(str(self.__class__)+": File not found, this handler expects 'profile_info.txt' file in the result directory")
raise Exception(
str(self.__class__) +
": File not found, this handler expects 'profile_info.txt' file in the result directory"
)
self.saveString("Comment", comment, "Comment Results", "JobInfo")
self.saveString("Class", cclass, "Classify Results", "JobInfo")
print(comment, cclass)
if __name__ == "__main__":
cch = CommentClassHandler()
cch.collectResults('/afs/cern.ch/lhcb/software/profiling/releases/MOORE/MOORE_v20r1p1/x86_64-slc6-gcc46-opt/20130919_1659_time')
cch.collectResults(
"/afs/cern.ch/lhcb/software/profiling/releases/MOORE/MOORE_v20r1p1/x86_64-slc6-gcc46-opt/20130919_1659_time"
)
......@@ -3,19 +3,18 @@ from .BaseHandler import BaseHandler
class DataChallengesROOTFileHandler(BaseHandler):
def __init__(self):
super(self.__class__, self).__init__()
def collectResults(self, directory):
files = [
'hlt2_reco_baseline_DC.root',
'hlt1_allen_track_reconstruction.root',
'Hlt1SeedAndMatchTrackingResolutionAllen.root',
'HLT1HLT2Checker_fitted_profile.root',
'HLT1HLT2Checker_fitted_profile_new.root',
'histos_hlt2_light_reco_pr_kf_without_UT_on_data_with_monitoring.root',
'MCMatching_baseline_MiniBias.root',
"hlt2_reco_baseline_DC.root",
"hlt1_allen_track_reconstruction.root",
"Hlt1SeedAndMatchTrackingResolutionAllen.root",
"HLT1HLT2Checker_fitted_profile.root",
"HLT1HLT2Checker_fitted_profile_new.root",
"histos_hlt2_light_reco_pr_kf_without_UT_on_data_with_monitoring.root",
"MCMatching_baseline_MiniBias.root",
]
fileFound = False
......@@ -27,4 +26,4 @@ class DataChallengesROOTFileHandler(BaseHandler):
self.saveFile(f, os.path.join(directory, f))
if not fileFound:
raise Exception('No ROOT files found')
raise Exception("No ROOT files found")
......@@ -5,78 +5,129 @@ import re
import json
from .BaseHandler import BaseHandler
class DetailedTimingInVolumesHandler(BaseHandler):
def __init__(self):
super(self.__class__, self).__init__()
self.resulting_data = {}
def __init__(self):
super(self.__class__, self).__init__()
self.resulting_data = {}
# Read in the log file and return its content as a string
def read_logfile(self, log_file=None):
if not os.path.exists(log_file):
raise Exception("File %s does not exist" % log_file)
# Read in the log file and return its content as a string
def read_logfile(self, log_file=None):
if not os.path.exists(log_file):
raise Exception("File %s does not exist" % log_file)
with open(log_file, mode="r") as log_file_handler:
log_data = ""
for line in log_file_handler:
log_data += line
return log_data
with open(log_file, mode="r") as log_file_handler:
log_data = ""
for line in log_file_handler:
log_data += line
return log_data
# Parse the log data and return an array of lists of tuples
def parse_log_data(self, log_data=None):
resulting_data = {}
# Parse the log data and return an array of lists of tuples
def parse_log_data(self, log_data=None):
resulting_data = {}
# Match the blocks of data in the log
matched_log_blocks = re.finditer(
"\*(.+?)\n\n", log_data, flags=re.DOTALL)
if matched_log_blocks:
for match in matched_log_blocks:
data_block_title = ""
output_data_block = []
# Match the blocks of data in the log
matched_log_blocks = re.finditer("\*(.+?)\n\n", log_data, flags=re.DOTALL)
if matched_log_blocks:
for match in matched_log_blocks:
data_block_title = ""
output_data_block = []
imput_data_block = match.group(1).split("\n")
for block_line in imput_data_block:
# A new block with a title
if block_line.startswith("*"):
match_block_title = re.search(
"\*\s(.+?)\s\*", block_line, flags=re.IGNORECASE)
if match_block_title:
data_block_title = (
match_block_title.group(1).lower().replace(
" ", "_").split("_(", 1)[0])
# Data within a block
else:
# Volume or Process section
if block_line.lower().startswith(
"volume") or block_line.lower().startswith(
"process"):
block_contents_match = re.search(
".+?: (.+?) cumulated time (.+?) seconds",
block_line,
flags=re.IGNORECASE,
)
if block_contents_match:
output_data_block.append([
block_contents_match.group(1).lower(),
block_contents_match.group(2),
])
# Timing per particle in specific detectors section
elif data_block_title.startswith(
"timing_per_particle"):
block_contents_match = re.search(
"(.+?)\s+?: cumulated time (.+?) seconds \((.+?)\)",
block_line,
flags=re.IGNORECASE,
)
if block_contents_match:
output_data_block.append([
block_contents_match.group(1).lower(),
block_contents_match.group(2),
block_contents_match.group(3),
])
# Cumulative record of the section
elif block_line.lower().startswith("time in"):
block_contents_match = re.search(
"Time in (.+?): (.+?) seconds \((.+?)\s",
block_line,
flags=re.IGNORECASE,
)
if block_contents_match:
output_data_block.append([
block_contents_match.group(1).lower(),
block_contents_match.group(2),
block_contents_match.group(3),
])
# Summary section
elif data_block_title.startswith("summary"):
block_contents_match = re.search(
"Total time in (.+?): (.+?) seconds \((.+?)\s",
block_line,
flags=re.IGNORECASE,
)
if block_contents_match:
output_data_block.append([
block_contents_match.group(1).lower(),
block_contents_match.group(2),
block_contents_match.group(3),
])
# Other volumes section
elif data_block_title.startswith("other"):
block_contents_match = re.search(
"(.+?): (.+?)$",
block_line,
flags=re.IGNORECASE)
if block_contents_match:
output_data_block.append([
block_contents_match.group(1).lower(),
block_contents_match.group(2).lower(),
])
imput_data_block = match.group(1).split("\n")
for block_line in imput_data_block:
# A new block with a title
if block_line.startswith("*"):
match_block_title = re.search("\*\s(.+?)\s\*", block_line, flags=re.IGNORECASE)
if match_block_title:
data_block_title = match_block_title.group(1).lower().replace(" ", "_").split('_(', 1)[0]
# Data within a block
else:
# Volume or Process section
if block_line.lower().startswith("volume") or block_line.lower().startswith("process"):
block_contents_match = re.search(".+?: (.+?) cumulated time (.+?) seconds", block_line, flags=re.IGNORECASE)
if block_contents_match:
output_data_block.append([block_contents_match.group(1).lower(), block_contents_match.group(2)])
# Timing per particle in specific detectors section
elif data_block_title.startswith("timing_per_particle"):
block_contents_match = re.search("(.+?)\s+?: cumulated time (.+?) seconds \((.+?)\)", block_line, flags=re.IGNORECASE)
if block_contents_match:
output_data_block.append([block_contents_match.group(1).lower(), block_contents_match.group(2), block_contents_match.group(3)])
# Cumulative record of the section
elif block_line.lower().startswith("time in"):
block_contents_match = re.search("Time in (.+?): (.+?) seconds \((.+?)\s", block_line, flags=re.IGNORECASE)
if block_contents_match:
output_data_block.append([block_contents_match.group(1).lower(), block_contents_match.group(2), block_contents_match.group(3)])
# Summary section
elif data_block_title.startswith("summary"):
block_contents_match = re.search("Total time in (.+?): (.+?) seconds \((.+?)\s", block_line, flags=re.IGNORECASE)
if block_contents_match:
output_data_block.append([block_contents_match.group(1).lower(), block_contents_match.group(2), block_contents_match.group(3)])
# Other volumes section
elif data_block_title.startswith("other"):
block_contents_match = re.search("(.+?): (.+?)$", block_line, flags=re.IGNORECASE)
if block_contents_match:
output_data_block.append([block_contents_match.group(1).lower(), block_contents_match.group(2).lower()])
resulting_data[data_block_title] = output_data_block
resulting_data[data_block_title] = output_data_block
return resulting_data
return resulting_data
def collectResults(self, directory):
log_file = os.path.join(directory, "Timing.log")
resulting_data = self.parse_log_data(self.read_logfile(log_file))
self.saveJSON(
"detailed_timing_in_volumes",
resulting_data,
"Full information on detailed timing in volumes",
"detailed_timing_in_volumes",
)
def collectResults(self, directory):
log_file = os.path.join(directory, "Timing.log")
resulting_data = self.parse_log_data(self.read_logfile(log_file))
self.saveJSON("detailed_timing_in_volumes", resulting_data, "Full information on detailed timing in volumes", "detailed_timing_in_volumes")
if __name__ == "__main__":
dtvh = DetailedTimingInVolumesHandler()
dtvh = DetailedTimingInVolumesHandler()
# EOF
......@@ -2,15 +2,13 @@ import os, sys, re
from .BaseHandler import BaseHandler
import random
class DummyHandler(BaseHandler):
def __init__(self):
super(self.__class__, self).__init__()
self.finished = False
self.results = []
def collectResults(self,directory):
self.saveFloat("MyMeasure", random.gauss(42, 5), "Dummy timing value [ms]", "Timing")
def collectResults(self, directory):
self.saveFloat("MyMeasure", random.gauss(42, 5),
"Dummy timing value [ms]", "Timing")
......@@ -14,6 +14,9 @@ class EMHandler(BaseHandler):
super(self.__class__, self).__init__()
def collectResults(self, directory):
rootFiles = glob(directory+"*.root" if directory.endswith("/") else directory+"/*.root")
rootFiles = glob(directory + "*.root" if directory.
endswith("/") else directory + "/*.root")
for files in rootFiles:
self.saveFile(re.sub(".root","",files.split("RootFile",1)[1]), files)
self.saveFile(
re.sub(".root", "",
files.split("RootFile", 1)[1]), files)
This diff is collapsed.
......@@ -3,37 +3,46 @@ from .BaseHandler import BaseHandler
from xml.etree.ElementTree import ElementTree
from xml.parsers.expat import ExpatError
class FilePathHandler(BaseHandler):
def __init__(self):
super(self.__class__, self).__init__()
self.finished = False
self.results = []
def collectResults(self,directory):
logfile = 'run.log'
def collectResults(self, directory):
logfile = "run.log"
run_path = os.path.join(directory, logfile)
regxp = ".*/afs/cern.ch/lhcb/software/profiling/releases(/[A-Z0-9]+/[A-Z0-9]+_[\w-]+.*)"
path_line = ""
try:
loglines = open(run_path, 'r')
for l in loglines.readlines():
m = re.match(regxp, l)
if m != None:
path_line = m.group(1)
continue
loglines.close()
loglines = open(run_path, "r")
for l in loglines.readlines():
m = re.match(regxp, l)
if m != None:
path_line = m.group(1)
continue
loglines.close()
except IOError:
raise Exception(str(self.__class__)+": File not found, this handler expects 'run.log' file in the result directory")
raise Exception(
str(self.__class__) +
": File not found, this handler expects 'run.log' file in the result directory"
)
if os.path.exists(run_path) :
path = "$AFS_PROF" + path_line
self.saveString("Path", path, "Results Location", "JobInfo")
print(path)
if os.path.exists(run_path):
path = "$AFS_PROF" + path_line
self.saveString("Path", path, "Results Location", "JobInfo")
print(path)
else:
print('File or path does not exist (file: ' + run_path + ')')
print("File or path does not exist (file: " + run_path + ")")
if __name__ == "__main__":
fh = FilePathHandler()
fh.collectResults('/afs/cern.ch/lhcb/software/profiling/releases/MOORE/MOORE_v14r11/x86_64-slc6-gcc46-opt/20131112_1712_time')
fh.collectResults('/afs/cern.ch/lhcb/software/profiling/releases/MOORE/MOORE_lhcb-head-131111/x86_64-slc6-gcc46-opt/20131111_1931_time')
fh.collectResults(
"/afs/cern.ch/lhcb/software/profiling/releases/MOORE/MOORE_v14r11/x86_64-slc6-gcc46-opt/20131112_1712_time"
)
fh.collectResults(
"/afs/cern.ch/lhcb/software/profiling/releases/MOORE/MOORE_lhcb-head-131111/x86_64-slc6-gcc46-opt/20131111_1931_time"
)
......@@ -3,7 +3,7 @@ from .BaseHandler import BaseHandler
class G4RichTbSimHandler(BaseHandler):
def collectResults(self, directory):
mc_histos_file = os.path.join(directory, 'G4RichTbSimHTestOutput', 'RichTbSim_MC_Histograms.root')
self.saveFile('G4RichTbSim_MCHistograms', mc_histos_file)
mc_histos_file = os.path.join(directory, "G4RichTbSimHTestOutput",
"RichTbSim_MC_Histograms.root")
self.saveFile("G4RichTbSim_MCHistograms", mc_histos_file)
......@@ -11,10 +11,18 @@ import json
from .BaseHandler import BaseHandler
import logging
class GammaConversionHandler(BaseHandler):
def __init__(self, debug='INFO'):
super(self.__class__,self).__init__()
class GammaConversionHandler(BaseHandler):
def __init__(self, debug="INFO"):
super(self.__class__, self).__init__()
def collectResults(self, directory):
_gamma_dilepton_file = ('GammaToDiLeptonConversionTest.root', os.path.join(directory, 'G4GammaCVTestROOTFiles', 'G4GammaToDiLeptonConversionTest.root'))
_gamma_dilepton_file = (
"GammaToDiLeptonConversionTest.root",
os.path.join(
directory,
"G4GammaCVTestROOTFiles",
"G4GammaToDiLeptonConversionTest.root",
),
)
self.saveFile(*_gamma_dilepton_file)
......@@ -3,33 +3,61 @@ import os
import re
from .BaseHandler import BaseHandler
class GaussMemHandler(BaseHandler):
class GaussMemHandler(BaseHandler):
def __init__(self):
super(self.__class__, self).__init__()
self.finished = False
self.results = []
def collectResults(self,directory):
def collectResults(self, directory):
l = self.findHistoFile(directory)
if len(l) != 1:
raise Exception("Could not locate just 1 histo file, found:" + str(l))
raise Exception("Could not locate just 1 histo file, found:" +
str(l))
f = ROOT.TFile(os.path.join(directory, l[0]))
gaussGenTotal = f.Get("GaussGen.GaussGenMemory/Total Memory [MB]")
gaussGenDelta = f.Get("GaussGen.GaussGenMemory/Delta Memory [MB]")
mainEventGaussSimTotal = f.Get("MainEventGaussSim.MainEventGaussSimMemory/Total Memory [MB]")
mainEventGaussSimDelta = f.Get("MainEventGaussSim.MainEventGaussSimMemory/Delta Memory [MB]")
mainEventGaussSimTotal = f.Get(
"MainEventGaussSim.MainEventGaussSimMemory/Total Memory [MB]")
mainEventGaussSimDelta = f.Get(
"MainEventGaussSim.MainEventGaussSimMemory/Delta Memory [MB]")
# mem vars for Lamarr have a different path
if l[0].startswith('Lamarr'):
mainEventGaussSimTotal = f.Get("InitLamarr.InitLamarrMemory/Total Memory [MB]")
mainEventGaussSimDelta = f.Get("InitLamarr.InitLamarrMemory/Delta Memory [MB]")
if l[0].startswith("Lamarr"):
mainEventGaussSimTotal = f.Get(
"InitLamarr.InitLamarrMemory/Total Memory [MB]")
mainEventGaussSimDelta = f.Get(
"InitLamarr.InitLamarrMemory/Delta Memory [MB]")
self.saveFloat("TotalMemoryGaussGen", gaussGenTotal.GetMean(), "Total Memory [MB]", "Memory");
self.saveFloat("DeltaMemoryGaussGen", gaussGenDelta.GetMean(), "TotalDelta Memory [MB]", "Memory");
self.saveFloat("TotalMemoryMainEventGaussSim", mainEventGaussSimTotal.GetMean(), "Total Memory [MB]", "Memory");
self.saveFloat("DeltaMemoryMainEventGaussSim", mainEventGaussSimDelta.GetMean(), "TotalDelta Memory [MB]", "Memory");
self.saveFloat(
"TotalMemoryGaussGen",
gaussGenTotal.GetMean(),
"Total Memory [MB]",
"Memory",
)
self.saveFloat(
"DeltaMemoryGaussGen",
gaussGenDelta.GetMean(),
"TotalDelta Memory [MB]",
"Memory",
)
self.saveFloat(
"TotalMemoryMainEventGaussSim",
mainEventGaussSimTotal.GetMean(),
"Total Memory [MB]",
"Memory",
)
self.saveFloat(
"DeltaMemoryMainEventGaussSim",
mainEventGaussSimDelta.GetMean(),
"TotalDelta Memory [MB]",
"Memory",
)
def findHistoFile(self, dir):
return [f for f in os.listdir(dir) if re.match("(Gauss|Lamarr).*histos.root", f)]
return [
f for f in os.listdir(dir)
if re.match("(Gauss|Lamarr).*histos.root", f)
]
......@@ -2,34 +2,31 @@ import os
import fnmatch
import glob
import csv
#import random
# import random
from .BaseHandler import BaseHandler
class GeantStandaloneHandler(BaseHandler):
""" LHCbPR Handler for Geant standalone tests.
SetupProject --nightly lhcb-gauss-def Geant4 Head (--build-env)
getpack Geant/G4examples
make
hadronis_tests
"""LHCbPR Handler for Geant standalone tests.
SetupProject --nightly lhcb-gauss-def Geant4 Head (--build-env)
getpack Geant/G4examples
make
hadronis_tests
"""
def __init__(self):
super(self.__class__, self).__init__()
def collectResults(self, directory):
""" Collect results """
#self.saveInt("TestValue", random.randint(1, 10));
"""Collect results"""
# self.saveInt("TestValue", random.randint(1, 10));
# Files
exts = ['*.root']
base = os.path.join(directory,'root')
exts = ["*.root"]
base = os.path.join(directory, "root")
for file in os.listdir(base):
for ext in exts:
if fnmatch.fnmatch(file, ext):
self.saveFile(
os.path.basename(file),
os.path.join(base, file)
)
os.path.basename(file), os.path.join(base, file))
......@@ -5,17 +5,17 @@ from .BaseHandler import BaseHandler
class GeantTestEm3Handler(BaseHandler):
def __init__(self):
super(self.__class__, self).__init__()
def collectResults(self, directory):
""" Collect results """
"""Collect results"""
file = 'Selectedresults.root'
txtfile = 'selectedresults.txt'
filename = os.path.join(directory, 'G4SamplingCaloTestOutput', file)
txtfilename = os.path.join(directory, 'G4SamplingCaloTestOutput', txtfile)
file = "Selectedresults.root"
txtfile = "selectedresults.txt"
filename = os.path.join(directory, "G4SamplingCaloTestOutput", file)
txtfilename = os.path.join(directory, "G4SamplingCaloTestOutput",
txtfile)
if not os.path.exists(filename):
raise Exception("File %s does not exist" % filename)
......@@ -25,19 +25,24 @@ class GeantTestEm3Handler(BaseHandler):
self.saveFile(file, filename)
with open(txtfilename, mode='r') as f:
with open(txtfilename, mode="r") as f:
lines = f.readlines()
_, res_val, res_err = lines[1].split(',')
_, res_val, res_err = lines[1].split(",")
print((res_val, res_err))
res_val, res_err = float(res_val), float(res_err.split(';')[0])
res_val, res_err = float(res_val), float(res_err.split(";")[0])
_, const_val, const_err = lines[2].split(',')
const_val, const_err = float(const_val), float(const_err.split(';')[0])
_, const_val, const_err = lines[2].split(",")
const_val, const_err = float(const_val), float(
const_err.split(";")[0])
table = []
for line in lines[6:]:
e_en, e_val, e_err = line.split(',')
e_en, e_val, e_err = float(e_en), float(e_val), float(e_err.split(';')[0])
e_en, e_val, e_err = line.split(",")
e_en, e_val, e_err = (
float(e_en),
float(e_val),
float(e_err.split(";")[0]),
)
table.append((e_en, e_val, e_err))
self.saveFloat("TESTEM3_FIT_RESOLUTION_VALUE", res_val)
......