Skip to content
Snippets Groups Projects
Commit ef2c6fdf authored by Luke Grazette's avatar Luke Grazette
Browse files

Successful step 4 + some argparse changes

parent 13cdb61a
No related branches found
No related tags found
1 merge request!330[RTADPA BW Tests] Introducing an Hlt1-bandwidth test via Moore_in_Allen
......@@ -85,12 +85,18 @@ def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'-p', '--process', type=str, required=True, choices=['hlt2', 'spruce'])
'-p',
'--process',
type=str,
help='Compute for Hlt1, Hlt2 or Sprucing lines',
choices=['hlt1', 'hlt2', 'spruce'],
required=True)
parser.add_argument(
'--stream-config',
type=str,
required=True,
choices=["wg", "production"])
help='Choose production, per-WG or streamless stream configuration',
choices=['streamless', 'production', 'wg'],
required=True)
parser.add_argument('--streams', nargs='+', type=str, required=True)
args = parser.parse_args()
fname_helper = FileNameHelper(args.process)
......
......@@ -163,14 +163,22 @@ def make_rate_table_row_per_stream(stream_config, fname_helper):
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--process', type=str, required=True, choices=["hlt2", "spruce"])
'-p',
'--process',
type=str,
help='Compute for Hlt1, Hlt2 or Sprucing lines',
choices=['hlt1', 'hlt2', 'spruce'],
required=True)
args = parser.parse_args()
fname_helper = FileNameHelper(args.process)
stream_configs = ["production", "wg"] if args.process == "hlt2" else ["wg"]
stream_configs, all_lines = {
"hlt1": (["streamless"], "streamless"),
"hlt2": (["production", "wg"], "production"),
"spruce": (["wg"], "wg")
}[args.process]
rates_all_lines("production" if args.process == "hlt2" else "wg",
fname_helper)
rates_all_lines(all_lines, fname_helper)
for stream_config in stream_configs:
make_rate_table_row_per_stream(stream_config, fname_helper)
make_rate_table_row_per_line(stream_config, fname_helper)
......@@ -12,7 +12,7 @@
import GaudiPython as GP
from GaudiConf.reading import decoder, unpack_rawevent, hlt_decisions
from Configurables import (ApplicationMgr, LHCbApp, IODataManager,
EventSelector, createODIN)
EventSelector, createODIN, LHCb__UnpackRawEvent, HltDecReportsDecoder)
from GaudiConf import IOHelper
from PyConf.application import configured_ann_svc
import operator
......@@ -143,6 +143,68 @@ def processing_events_per_line_and_stream(evt_max, lines, process):
# First three variables per stream/file, last four for lines
return events_file, raw_size_all, dst_size_all, event_stats, exclusive, raw, dst
def processing_events_per_line_and_stream_hlt1(lines):
inputfile = fname_helper.mdf_fname_for_reading(args.stream_config, args.stream)
# Configure basic application with inputs
LHCbApp(DataType="Upgrade", Simulation=True)
IOHelper("MDF").inputFiles([inputfile])
# Disable warning about not being able to navigate ancestors
IODataManager(DisablePFNWarning=True)
# Decode Hlt DecReports
unpacker = LHCb__UnpackRawEvent(
"UnpackRawEvent",
RawBankLocations=["DAQ/RawBanks/HltDecReports"],
BankTypes=["HltDecReports"])
decDec = HltDecReportsDecoder(
"HltDecReportsDecoder/Hlt1DecReportsDecoder",
SourceID="Hlt1",
DecoderMapping="TCKANNSvc",
RawBanks=unpacker.RawBankLocations[0])
app = ApplicationMgr(TopAlg=[unpacker, decDec])
# Configure TCKANNSvc
app.ExtSvc += [configured_ann_svc(name='TCKANNSvc')]
gaudi = GP.AppMgr()
TES = gaudi.evtSvc()
gaudi.run(1)
mdf_size = os.stat(inputfile).st_size
if mdf_size<=0: assert RuntimeError(f"Size of {inputfile} is <=0")
events_file = 0
raw_size_all = mdf_size
dst_size_all = -1 # Not a valid question to ask for hlt1
event_stats = {line: [] for line in [line + 'Decision' for line in list(lines)]}
exclusive = {}
raw = {line: -1 for line in [line + 'Decision' for line in list(lines)]}
dst = {line: -1 for line in [line + 'Decision' for line in list(lines)]}
while TES["/Event"]:
decs = TES[str(decDec.OutputHltDecReportsLocation)]
if not decs:
print("DecReports TES location not found")
error = True
break
events_file += 1
exclusive.update({events_file: 0})
for line in decs.decisionNames():
report = decs.decReport(line)
if report.decision() == 1:
event_stats[line].append(events_file)
exclusive[events_file] += 1
gaudi.run(1)
if events_file==0: assert RuntimeError(f"No events found in {args.input}")
for line, eventnumber_per_line in event_stats.items():
# ToDo: remove this assumption if possible
# Assume that the event_size is independent w.r.t. which line fired.
# This 'might' be valid as no information per event is pruned out in hlt1.
raw[line] = mdf_size / events_file * len(eventnumber_per_line)
return events_file, raw_size_all, dst_size_all, event_stats, exclusive, raw, dst
def rates_per_line(event_stats, exclusive, raw, dst, input_rate,
output_file_path):
......@@ -247,14 +309,14 @@ if __name__ == '__main__':
'-p',
'--process',
type=str,
help='Compute for Hlt2 or Sprucing lines',
choices=['hlt2', 'spruce'],
help='Compute for Hlt1, Hlt2 or Sprucing lines',
choices=['hlt1', 'hlt2', 'spruce'],
required=True)
parser.add_argument(
'--stream-config',
type=str,
help='Choose production or per-WG stream configuration',
choices=['production', 'wg'],
help='Choose production, per-WG or streamless stream configuration',
choices=['streamless', 'production', 'wg'],
required=True)
args = parser.parse_args()
......@@ -264,11 +326,14 @@ if __name__ == '__main__':
input_config = parse_yaml(args.config)
if args.process == "spruce" and args.stream_config == "production":
if args.process == "spruce" and args.stream_config != "wg":
raise RuntimeError(
'"production" stream config not defined for sprucing. Please use "wg".'
'"production" and "streamless" stream configs are not defined for sprucing. Please use "wg".'
)
if args.process == "hlt1" and args.stream_config != "streamless":
raise RuntimeError(
'"production" and "wg" stream configs are not defined for hlt1. Please use "streamless".'
)
LHCbApp(
DataType="Upgrade",
Simulation=True,
......@@ -282,37 +347,41 @@ if __name__ == '__main__':
# because we need to set `input_process='Hlt2'` in `unpack_rawevent`
# to read MDF output from Sprucing
algs = []
unpack = unpack_rawevent(
bank_types=['ODIN', 'HltDecReports', 'DstData', 'HltRoutingBits'],
configurables=True)
hlt2 = [hlt_decisions(source="Hlt2", output_loc="/Event/Hlt2/DecReports")]
if args.process == 'spruce':
spruce = [
hlt_decisions(
source="Spruce", output_loc="/Event/Spruce/DecReports")
]
else:
spruce = []
decoder = decoder(input_process=args.process.capitalize())
algs = [unpack] + hlt2 + spruce + [decoder] + [createODIN(ODIN='myODIN')]
with open(fname_helper.stream_config_json_path(args.stream_config)) as f:
lines = json.load(f)[args.stream]
if not args.process == "hlt1":
unpack = unpack_rawevent(
bank_types=['ODIN', 'HltDecReports', 'DstData', 'HltRoutingBits'],
configurables=True)
hlt2 = [hlt_decisions(source="Hlt2", output_loc="/Event/Hlt2/DecReports")]
if args.process == 'spruce':
spruce = [
hlt_decisions(
source="Spruce", output_loc="/Event/Spruce/DecReports")
]
else:
spruce = []
decoder = decoder(input_process=args.process.capitalize())
algs = [unpack] + hlt2 + spruce + [decoder] + [createODIN(ODIN='myODIN')]
appMgr = ApplicationMgr(TopAlg=algs)
appMgr.ExtSvc += [
configured_ann_svc(json_file=fname_helper.tck(args.stream_config))
]
appMgr = ApplicationMgr(TopAlg=algs)
appMgr.ExtSvc += [
configured_ann_svc(json_file=fname_helper.tck(args.stream_config))
]
IOHelper("MDF").inputFiles(
[fname_helper.mdf_fname_for_reading(args.stream_config, args.stream)])
IOHelper("MDF").inputFiles(
[fname_helper.mdf_fname_for_reading(args.stream_config, args.stream)])
with open(fname_helper.stream_config_json_path(args.stream_config)) as f:
lines = json.load(f)[args.stream]
appMgr = GP.AppMgr()
evt = appMgr.evtsvc()
appMgr = GP.AppMgr()
evt = appMgr.evtsvc()
# Calculates retention, rate and bandwidth per line and stream (file)
evts_all, rawbanks_all, dst_all, event_stats, exclusive, raw, dst = processing_events_per_line_and_stream(
LHCbApp().EvtMax, lines, args.process)
else:
# Calculates retention, rate and bandwidth per line and stream (file)
evts_all, rawbanks_all, dst_all, event_stats, exclusive, raw, dst = processing_events_per_line_and_stream(
LHCbApp().EvtMax, lines, args.process)
evts_all, rawbanks_all, dst_all, event_stats, exclusive, raw, dst = processing_events_per_line_and_stream_hlt1(lines)
rates_per_line(
event_stats, exclusive, raw, dst, input_config['input_rate'],
fname_helper.tmp_rate_table_per_line_path(args.stream_config,
......
......@@ -37,7 +37,7 @@ def main():
'--stream-config',
type=str,
required=True,
choices=["wg", "production"],
choices=["wg", "production", "streamless"],
help='Name of the stream config')
parser.add_argument(
'-s', '--stream', type=str, required=True, help='Name of the stream')
......
......@@ -105,7 +105,7 @@ case $PROCESS in
TEST_PATH_PREFIX='$HLT1CONFROOT/tests/options/bandwidth/'
EVENT_SIZE_UPPER_LIMIT=500
GAUDIRUN_INPUT_PROCESS="Hlt1"
STREAM_CONFIGS=( "production" )
STREAM_CONFIGS=( "streamless" )
case $INPUTDATA in
nominal)
CONFIG_FILE="${TEST_PATH_PREFIX}hlt1_bandwidth_input.yaml"
......@@ -167,7 +167,7 @@ esac
# -d downloads the input files locally for speed-up running Moore. Not helpful unless that download is fast for you (e.g. you're at CERN)
for STREAM_CONFIG in "${STREAM_CONFIGS[@]}"; do
echo "Running trigger to obtain MDF files with ${STREAM_CONFIG} streams for comparison over ${CONFIG_FILE}"
time python -m MooreTests.run_bandwidth_test_jobs -d -c=$CONFIG_FILE -n=$N_EVTS -t=$MOORE_THREADS -a=$EVENT_SIZE_UPPER_LIMIT $EXTRA_OPTS "${TEST_PATH_PREFIX}${PROCESS}_bandwidth_${STREAM_CONFIG}_streams.py"
time python -m MooreTests.run_bandwidth_test_jobs -c=$CONFIG_FILE -n=$N_EVTS -t=$MOORE_THREADS -a=$EVENT_SIZE_UPPER_LIMIT $EXTRA_OPTS "${TEST_PATH_PREFIX}${PROCESS}_bandwidth_${STREAM_CONFIG}_streams.py"
STORE_ERR_CODE
done
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment