From 6c655653bdfc7112717b8905bb54bb1c5727a0dc Mon Sep 17 00:00:00 2001 From: Luke Grazette <l.grazette@warwick.ac.uk> Date: Mon, 22 Jul 2024 19:21:49 +0200 Subject: [PATCH 01/35] [TCK Infrastructure] Change default tck repository to /cvmfs/.../tcks --- AllenOnline/options/AllenConfig.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AllenOnline/options/AllenConfig.py b/AllenOnline/options/AllenConfig.py index 47525b351..5fb5b99d1 100755 --- a/AllenOnline/options/AllenConfig.py +++ b/AllenOnline/options/AllenConfig.py @@ -96,7 +96,7 @@ if initial_tck != 0: else: # Otherwise allow the repository to be set in an special # environment variable, but use the file content metadata repo - repo = "/group/hlt/commissioning/tck.git" + repo = os.getenv('TCK_REPO', '/cvmfs/lhcb.cern.ch/lib/lhcb/tcks.git') if not _is_repo(repo): raise RuntimeError( "Failed to find filecontent metadata repo for TCKs at {}".format( -- GitLab From e71b4cb226ac45e1df5d4aa7d3710c57bb1da1a4 Mon Sep 17 00:00:00 2001 From: Bogdan Kutsenko <bogdan.kutsenko@cern.ch> Date: Wed, 24 Jul 2024 10:18:49 +0200 Subject: [PATCH 02/35] PV resolution monitor --- MooreOnlineConf/options/reco.py | 62 +++++++++++++++++++++++++-- MooreOnlineConf/options/vp_veloSP.py | 15 +++++++ MooreScripts/CMakeLists.txt | 14 ++++++ MooreScripts/job/runRecoMon.sh | 2 +- MooreScripts/tests/qmtest/recomon.qmt | 4 +- 5 files changed, 91 insertions(+), 6 deletions(-) create mode 100644 MooreOnlineConf/options/vp_veloSP.py diff --git a/MooreOnlineConf/options/reco.py b/MooreOnlineConf/options/reco.py index dcfc04c8b..872d86bb1 100644 --- a/MooreOnlineConf/options/reco.py +++ b/MooreOnlineConf/options/reco.py @@ -9,12 +9,66 @@ # or submit itself to any jurisdiction. # ############################################################################### import os -from Moore import options, run_reconstruction +from Moore import options +from Moore.config import run_allen_reconstruction from Moore.config import Reconstruction from RecoConf.standalone import reco_prefilters, standalone_hlt2_global_reco from Hlt2Conf.settings.hlt2_binds import config_pp_2024_with_monitoring +from RecoConf.hlt1_allen import allen_gaudi_config +from RecoConf.legacy_rec_hlt1_tracking import make_RetinaCluster_raw_bank, make_velo_full_clusters, make_RetinaClusters +from PyConf.Algorithms import VertexCompare + +from RecoConf.legacy_rec_hlt1_tracking import ( + make_reco_pvs, + make_PatPV3DFuture_pvs, + make_VeloClusterTrackingSIMD, +) +from RecoConf.hlt2_tracking import ( + make_PrKalmanFilter_noUT_tracks, + make_PrKalmanFilter_Seed_tracks, + make_PrKalmanFilter_Velo_tracks, + make_TrackBestTrackCreator_tracks, +) from MooreOnlineConf.utils import update_and_reset +from Moore import options, run_moore +from RecoConf.global_tools import stateProvider_with_simplified_geom, trackMasterExtrapolator_with_simplified_geom +from RecoConf.reconstruction_objects import reconstruction +from RecoConf.hlt2_global_reco import reconstruction as hlt2_reconstruction, make_light_reco_pr_kf_without_UT +from RecoConf.hlt2_tracking import ( + make_TrackBestTrackCreator_tracks, + make_PrKalmanFilter_noUT_tracks, + make_PrKalmanFilter_Velo_tracks, + make_PrKalmanFilter_Seed_tracks, +) +from RecoConf.decoders import default_VeloCluster_source +from RecoConf.protoparticles import make_charged_protoparticles +from RecoConf.event_filters import require_gec +from Hlt2Conf.settings.defaults import get_default_hlt1_filter_code_for_hlt2 +from Moore.streams import Stream, Streams +from Hlt2Conf.lines.semileptonic import all_lines as full_lines # all full-stream lines +import sys +from DDDB.CheckDD4Hep import UseDD4Hep + +from Moore import options, run_moore +from RecoConf.global_tools import stateProvider_with_simplified_geom, trackMasterExtrapolator_with_simplified_geom +from RecoConf.reconstruction_objects import reconstruction +from RecoConf.hlt2_global_reco import reconstruction as hlt2_reconstruction, make_light_reco_pr_kf_without_UT +from RecoConf.hlt2_tracking import ( + make_TrackBestTrackCreator_tracks, + make_PrKalmanFilter_noUT_tracks, + make_PrKalmanFilter_Velo_tracks, + make_PrKalmanFilter_Seed_tracks, +) +from RecoConf.decoders import default_VeloCluster_source +from RecoConf.protoparticles import make_charged_protoparticles +from RecoConf.event_filters import require_gec +from Hlt2Conf.settings.defaults import get_default_hlt1_filter_code_for_hlt2 +from Moore.streams import Stream, Streams +from Hlt2Conf.lines.semileptonic import all_lines as full_lines # all full-stream lines +import sys +from DDDB.CheckDD4Hep import UseDD4Hep + task_type = os.getenv("TASK_TYPE", "GenericTask") ## Global event cut specifications: @@ -70,6 +124,7 @@ def with_update_and_reset(): IOVReset(ODIN=make_odin()), update_and_reset(), odin_bb_filter, rb_filter ]) + return reco @@ -78,5 +133,6 @@ def with_update_and_reset(): with config_pp_2024_with_monitoring(), reco_prefilters.bind( gec=False, gec_cut=GEC_cut, -): - run_reconstruction(options, with_update_and_reset) +),\ + VertexCompare.bind(produceNtuple=False,produceHistogram=False,monitoring=True): + run_allen_reconstruction(options, with_update_and_reset) diff --git a/MooreOnlineConf/options/vp_veloSP.py b/MooreOnlineConf/options/vp_veloSP.py new file mode 100644 index 000000000..126765205 --- /dev/null +++ b/MooreOnlineConf/options/vp_veloSP.py @@ -0,0 +1,15 @@ +############################################################################### +# (c) Copyright 2024 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +from RecoConf.decoders import default_VeloCluster_source +from AllenConf.velo_reconstruction import decode_velo + +default_VeloCluster_source.global_bind(bank_type="VP") +decode_velo.global_bind(retina_decoding=False) diff --git a/MooreScripts/CMakeLists.txt b/MooreScripts/CMakeLists.txt index db82a5e16..5537a4e85 100644 --- a/MooreScripts/CMakeLists.txt +++ b/MooreScripts/CMakeLists.txt @@ -26,6 +26,20 @@ if(BUILD_TESTING AND NOT USE_DD4HEP) ) endif() +if(BUILD_TESTING AND BINARY_TAG MATCHES ".*cuda.*") + # When compiling for GPU, vertex resolution monitoring call the device algorithms + # since these are incompatible with calling from Moore / Gaudi, disable the monitoring tests + set_property( + TEST + MooreScripts.beamspotmon + MooreScripts.calomon + MooreScripts.recomon + MooreScripts.velomon + PROPERTY + DISABLED TRUE + ) +endif() + # FIXME the following does not work as I don't know how to run cmsetup.py as part of the install # because xenv is not in available. Maybe find_package(xenv) would work? diff --git a/MooreScripts/job/runRecoMon.sh b/MooreScripts/job/runRecoMon.sh index 96a323050..4b2cca1ab 100755 --- a/MooreScripts/job/runRecoMon.sh +++ b/MooreScripts/job/runRecoMon.sh @@ -19,7 +19,7 @@ settings=$(python -c 'import OnlineEnvBase; print(OnlineEnvBase.HLTType)') vp_options=$MOOREONLINECONFROOT/options/vp_retina_clusters.py if [[ $settings == *"veloSP" ]]; then echo "RecoMon: configuring VELO SP algorithms based on current trigger config $settings" - vp_options= + vp_options=$MOOREONLINECONFROOT/options/vp_veloSP.py fi exec_gaudirun \ diff --git a/MooreScripts/tests/qmtest/recomon.qmt b/MooreScripts/tests/qmtest/recomon.qmt index 05066b627..b92254d58 100644 --- a/MooreScripts/tests/qmtest/recomon.qmt +++ b/MooreScripts/tests/qmtest/recomon.qmt @@ -1,4 +1,4 @@ -<?xml version="1.0" ?><!DOCTYPE extension PUBLIC '-//QM/2.3/Extension//EN' 'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'> +<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE extension PUBLIC '-//QM/2.3/Extension//EN' 'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'> <!-- (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration @@ -28,7 +28,7 @@ Run an RecoMon job in the Online testbench import glob workdir = self._common_tmpdir for fn in glob.glob(workdir + "/recomon/*.*"): - if not fn.endswith(".mdf"): + if not fn.endswith(".mdf") and not fn.endswith(".root"): result[os.path.basename(fn)] = open(fn).read() </text></argument> -- GitLab From 2cfdafdd02922db647777a2e7f63b88a416343a6 Mon Sep 17 00:00:00 2001 From: RefBot <lhcbsoft@cern.ch> Date: Thu, 25 Jul 2024 10:57:25 +0200 Subject: [PATCH 03/35] Update References for: Allen!1659, Moore!3521, MooreOnline!453 based on lhcb-2024-patches-mr/1112 [skip ci] --- AllenOnline/tests/refs/mep_lumi.ref | 5 +- .../tests/refs/mep_lumi.ref.x86_64_v3-opt | 5 +- AllenOnline/tests/refs/test_lumi.ref | 302 +++++++++--------- .../tests/refs/test_lumi.ref.x86_64_v3-opt | 300 ++++++++--------- 4 files changed, 307 insertions(+), 305 deletions(-) diff --git a/AllenOnline/tests/refs/mep_lumi.ref b/AllenOnline/tests/refs/mep_lumi.ref index fe86a32b2..2448d46e0 100644 --- a/AllenOnline/tests/refs/mep_lumi.ref +++ b/AllenOnline/tests/refs/mep_lumi.ref @@ -6,6 +6,7 @@ HLTControlFlowMgr INFO o TBB thread pool size: 'ThreadPoo ApplicationMgr INFO Application Manager Initialized successfully ApplicationMgr INFO Application Manager Started successfully DeviceFTGeometry INFO Conditions DB is compatible with FT bank version 7 and 8. +DeviceFTGeometry INFO Deactivated 4 links. Starting timer for throughput measurement Input complete ApplicationMgr INFO Application Manager Stopped successfully @@ -46,7 +47,7 @@ gather_selections INFO Number of counters : 10 | "Hlt1TAEPassthroughRate" | 6 | pv_beamline_cleanup INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "n_PVs" | 30000 | 39595 | 1.3198 | + | "n_PVs" | 30000 | 59289 | 1.9763 | scifi_calculate_cluster_count_76... INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_misordered_cluster" | 2772 | @@ -55,4 +56,4 @@ scifi_raw_bank_decoder_aef54905 INFO Number of counters : 1 | "n_invalid_chanid" | 828 | velo_consolidate_tracks INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "n_velo_tracks" | 30000 | 5092912 | 169.76 | + | "n_velo_tracks" | 30000 | 5096106 | 169.87 | diff --git a/AllenOnline/tests/refs/mep_lumi.ref.x86_64_v3-opt b/AllenOnline/tests/refs/mep_lumi.ref.x86_64_v3-opt index b42139bc8..a218da07b 100644 --- a/AllenOnline/tests/refs/mep_lumi.ref.x86_64_v3-opt +++ b/AllenOnline/tests/refs/mep_lumi.ref.x86_64_v3-opt @@ -6,6 +6,7 @@ HLTControlFlowMgr INFO o TBB thread pool size: 'ThreadPoo ApplicationMgr INFO Application Manager Initialized successfully ApplicationMgr INFO Application Manager Started successfully DeviceFTGeometry INFO Conditions DB is compatible with FT bank version 7 and 8. +DeviceFTGeometry INFO Deactivated 4 links. Starting timer for throughput measurement Input complete ApplicationMgr INFO Application Manager Stopped successfully @@ -46,7 +47,7 @@ gather_selections INFO Number of counters : 10 | "Hlt1TAEPassthroughRate" | 6 | pv_beamline_cleanup INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "n_PVs" | 30000 | 39607 | 1.3202 | + | "n_PVs" | 30000 | 59306 | 1.9769 | scifi_calculate_cluster_count_76... INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_misordered_cluster" | 2772 | @@ -55,4 +56,4 @@ scifi_raw_bank_decoder_aef54905 INFO Number of counters : 1 | "n_invalid_chanid" | 828 | velo_consolidate_tracks INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "n_velo_tracks" | 30000 | 5092937 | 169.76 | + | "n_velo_tracks" | 30000 | 5096105 | 169.87 | diff --git a/AllenOnline/tests/refs/test_lumi.ref b/AllenOnline/tests/refs/test_lumi.ref index 92e5125f6..99b023e72 100644 --- a/AllenOnline/tests/refs/test_lumi.ref +++ b/AllenOnline/tests/refs/test_lumi.ref @@ -13,7 +13,7 @@ ECalETMiddleTop: 6839.999898 ECalETOuterBottom: 12069.999820 ECalETOuterTop: 10009.999851 ECalEtot: 579400.329466 -FiducialVeloVertices: 3.000000 +FiducialVeloVertices: 6.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -47,20 +47,20 @@ VeloClustersOuterBin00: 101.000000 VeloClustersOuterBin01: 339.000000 VeloClustersOuterBin02: 140.000000 VeloClustersOuterBin03: 79.000000 -VeloFiducialTracks: 231.000000 -VeloTracks: 240.000000 -VeloTracksEtaBin0: 7.000000 -VeloTracksEtaBin1: 45.000000 -VeloTracksEtaBin2: 35.000000 -VeloTracksEtaBin3: 24.000000 -VeloTracksEtaBin4: 53.000000 -VeloTracksEtaBin5: 34.000000 -VeloTracksEtaBin6: 29.000000 -VeloTracksEtaBin7: 13.000000 -VeloVertexX: -0.012207 -VeloVertexY: 0.018311 -VeloVertexZ: 0.061035 -VeloVertices: 3.000000 +VeloFiducialTracks: 238.000000 +VeloTracks: 247.000000 +VeloTracksEtaBin0: 9.000000 +VeloTracksEtaBin1: 46.000000 +VeloTracksEtaBin2: 39.000000 +VeloTracksEtaBin3: 32.000000 +VeloTracksEtaBin4: 52.000000 +VeloTracksEtaBin5: 30.000000 +VeloTracksEtaBin6: 27.000000 +VeloTracksEtaBin7: 12.000000 +VeloVertexX: 1.179810 +VeloVertexY: 0.183716 +VeloVertexZ: -1.159668 +VeloVertices: 6.000000 encodingKey: 3104473477.000000 -------------- Event 2579 -------------- BCIDHigh: 0.000000 @@ -108,19 +108,19 @@ VeloClustersOuterBin00: 110.000000 VeloClustersOuterBin01: 213.000000 VeloClustersOuterBin02: 167.000000 VeloClustersOuterBin03: 202.000000 -VeloFiducialTracks: 313.000000 -VeloTracks: 343.000000 -VeloTracksEtaBin0: 4.000000 -VeloTracksEtaBin1: 42.000000 -VeloTracksEtaBin2: 34.000000 -VeloTracksEtaBin3: 39.000000 -VeloTracksEtaBin4: 51.000000 -VeloTracksEtaBin5: 100.000000 -VeloTracksEtaBin6: 53.000000 -VeloTracksEtaBin7: 20.000000 -VeloVertexX: 0.223999 -VeloVertexY: -0.017090 -VeloVertexZ: -19.470214 +VeloFiducialTracks: 297.000000 +VeloTracks: 328.000000 +VeloTracksEtaBin0: 7.000000 +VeloTracksEtaBin1: 40.000000 +VeloTracksEtaBin2: 36.000000 +VeloTracksEtaBin3: 43.000000 +VeloTracksEtaBin4: 47.000000 +VeloTracksEtaBin5: 90.000000 +VeloTracksEtaBin6: 46.000000 +VeloTracksEtaBin7: 19.000000 +VeloVertexX: 1.187744 +VeloVertexY: 0.166016 +VeloVertexZ: -19.348144 VeloVertices: 3.000000 encodingKey: 3104473477.000000 -------------- Event 3572 -------------- @@ -135,7 +135,7 @@ ECalETMiddleTop: 7824.999883 ECalETOuterBottom: 15739.999765 ECalETOuterTop: 12274.999817 ECalEtot: 813958.062595 -FiducialVeloVertices: 1.000000 +FiducialVeloVertices: 4.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -169,20 +169,20 @@ VeloClustersOuterBin00: 75.000000 VeloClustersOuterBin01: 146.000000 VeloClustersOuterBin02: 141.000000 VeloClustersOuterBin03: 200.000000 -VeloFiducialTracks: 283.000000 -VeloTracks: 292.000000 -VeloTracksEtaBin0: 6.000000 -VeloTracksEtaBin1: 24.000000 -VeloTracksEtaBin2: 22.000000 -VeloTracksEtaBin3: 24.000000 -VeloTracksEtaBin4: 49.000000 -VeloTracksEtaBin5: 81.000000 -VeloTracksEtaBin6: 72.000000 -VeloTracksEtaBin7: 14.000000 -VeloVertexX: -0.208740 -VeloVertexY: 0.007935 -VeloVertexZ: 27.404784 -VeloVertices: 1.000000 +VeloFiducialTracks: 256.000000 +VeloTracks: 269.000000 +VeloTracksEtaBin0: 8.000000 +VeloTracksEtaBin1: 23.000000 +VeloTracksEtaBin2: 23.000000 +VeloTracksEtaBin3: 26.000000 +VeloTracksEtaBin4: 47.000000 +VeloTracksEtaBin5: 72.000000 +VeloTracksEtaBin6: 57.000000 +VeloTracksEtaBin7: 13.000000 +VeloVertexX: 1.099243 +VeloVertexY: 0.222778 +VeloVertexZ: -18.493651 +VeloVertices: 4.000000 encodingKey: 3104473477.000000 -------------- Event 5410 -------------- BCIDHigh: 0.000000 @@ -196,7 +196,7 @@ ECalETMiddleTop: 3519.999948 ECalETOuterBottom: 12609.999812 ECalETOuterTop: 6049.999910 ECalEtot: 520584.734256 -FiducialVeloVertices: 0.000000 +FiducialVeloVertices: 3.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -230,20 +230,20 @@ VeloClustersOuterBin00: 62.000000 VeloClustersOuterBin01: 143.000000 VeloClustersOuterBin02: 96.000000 VeloClustersOuterBin03: 88.000000 -VeloFiducialTracks: 165.000000 -VeloTracks: 172.000000 -VeloTracksEtaBin0: 4.000000 -VeloTracksEtaBin1: 25.000000 -VeloTracksEtaBin2: 18.000000 -VeloTracksEtaBin3: 14.000000 -VeloTracksEtaBin4: 31.000000 -VeloTracksEtaBin5: 34.000000 -VeloTracksEtaBin6: 40.000000 +VeloFiducialTracks: 168.000000 +VeloTracks: 177.000000 +VeloTracksEtaBin0: 7.000000 +VeloTracksEtaBin1: 27.000000 +VeloTracksEtaBin2: 19.000000 +VeloTracksEtaBin3: 20.000000 +VeloTracksEtaBin4: 33.000000 +VeloTracksEtaBin5: 30.000000 +VeloTracksEtaBin6: 35.000000 VeloTracksEtaBin7: 6.000000 -VeloVertexX: 0.000000 -VeloVertexY: 0.000000 -VeloVertexZ: 0.000000 -VeloVertices: 0.000000 +VeloVertexX: 1.239014 +VeloVertexY: 0.122681 +VeloVertexZ: -16.052245 +VeloVertices: 3.000000 encodingKey: 3104473477.000000 -------------- Event 6541 -------------- BCIDHigh: 0.000000 @@ -257,7 +257,7 @@ ECalETMiddleTop: 8564.999872 ECalETOuterBottom: 16634.999752 ECalETOuterTop: 16179.999759 ECalEtot: 721529.272263 -FiducialVeloVertices: 2.000000 +FiducialVeloVertices: 3.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -441,20 +441,20 @@ VeloClustersOuterS22: 20.000000 VeloClustersOuterS23: 25.000000 VeloClustersOuterS24: 20.000000 VeloClustersOuterS25: 25.000000 -VeloFiducialTracks: 218.000000 -VeloTracks: 225.000000 +VeloFiducialTracks: 201.000000 +VeloTracks: 207.000000 VeloTracksEtaBin0: 5.000000 -VeloTracksEtaBin1: 12.000000 -VeloTracksEtaBin2: 16.000000 -VeloTracksEtaBin3: 13.000000 -VeloTracksEtaBin4: 52.000000 -VeloTracksEtaBin5: 48.000000 -VeloTracksEtaBin6: 67.000000 +VeloTracksEtaBin1: 13.000000 +VeloTracksEtaBin2: 17.000000 +VeloTracksEtaBin3: 17.000000 +VeloTracksEtaBin4: 55.000000 +VeloTracksEtaBin5: 40.000000 +VeloTracksEtaBin6: 48.000000 VeloTracksEtaBin7: 12.000000 -VeloVertexX: 0.007324 -VeloVertexY: 0.095825 -VeloVertexZ: -33.508299 -VeloVertices: 2.000000 +VeloVertexX: 0.964355 +VeloVertexY: 0.166016 +VeloVertexZ: -11.962890 +VeloVertices: 3.000000 encodingKey: 492876054.000000 -------------- Event 9516 -------------- BCIDHigh: 0.000000 @@ -590,7 +590,7 @@ ECalETMiddleTop: 5349.999920 ECalETOuterBottom: 9369.999860 ECalETOuterTop: 13969.999792 ECalEtot: 546746.654634 -FiducialVeloVertices: 2.000000 +FiducialVeloVertices: 3.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -624,20 +624,20 @@ VeloClustersOuterBin00: 90.000000 VeloClustersOuterBin01: 151.000000 VeloClustersOuterBin02: 116.000000 VeloClustersOuterBin03: 133.000000 -VeloFiducialTracks: 211.000000 -VeloTracks: 228.000000 -VeloTracksEtaBin0: 9.000000 -VeloTracksEtaBin1: 34.000000 -VeloTracksEtaBin2: 22.000000 -VeloTracksEtaBin3: 25.000000 +VeloFiducialTracks: 202.000000 +VeloTracks: 225.000000 +VeloTracksEtaBin0: 10.000000 +VeloTracksEtaBin1: 36.000000 +VeloTracksEtaBin2: 21.000000 +VeloTracksEtaBin3: 39.000000 VeloTracksEtaBin4: 41.000000 -VeloTracksEtaBin5: 45.000000 -VeloTracksEtaBin6: 46.000000 +VeloTracksEtaBin5: 38.000000 +VeloTracksEtaBin6: 34.000000 VeloTracksEtaBin7: 6.000000 -VeloVertexX: -0.039673 -VeloVertexY: -0.032959 -VeloVertexZ: 40.893553 -VeloVertices: 2.000000 +VeloVertexX: 1.309204 +VeloVertexY: 0.125122 +VeloVertexZ: 10.925292 +VeloVertices: 3.000000 encodingKey: 3104473477.000000 -------------- Event 13890 -------------- BCIDHigh: 0.000000 @@ -651,7 +651,7 @@ ECalETMiddleTop: 11224.999833 ECalETOuterBottom: 15934.999763 ECalETOuterTop: 15349.999771 ECalEtot: 795532.274547 -FiducialVeloVertices: 4.000000 +FiducialVeloVertices: 2.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -685,20 +685,20 @@ VeloClustersOuterBin00: 77.000000 VeloClustersOuterBin01: 173.000000 VeloClustersOuterBin02: 197.000000 VeloClustersOuterBin03: 127.000000 -VeloFiducialTracks: 263.000000 -VeloTracks: 275.000000 +VeloFiducialTracks: 259.000000 +VeloTracks: 271.000000 VeloTracksEtaBin0: 5.000000 -VeloTracksEtaBin1: 37.000000 -VeloTracksEtaBin2: 35.000000 -VeloTracksEtaBin3: 18.000000 -VeloTracksEtaBin4: 63.000000 -VeloTracksEtaBin5: 59.000000 -VeloTracksEtaBin6: 48.000000 -VeloTracksEtaBin7: 10.000000 -VeloVertexX: 0.024414 -VeloVertexY: -0.050049 -VeloVertexZ: -36.865233 -VeloVertices: 4.000000 +VeloTracksEtaBin1: 41.000000 +VeloTracksEtaBin2: 37.000000 +VeloTracksEtaBin3: 27.000000 +VeloTracksEtaBin4: 59.000000 +VeloTracksEtaBin5: 54.000000 +VeloTracksEtaBin6: 37.000000 +VeloTracksEtaBin7: 11.000000 +VeloVertexX: 1.113281 +VeloVertexY: 0.112915 +VeloVertexZ: -60.974118 +VeloVertices: 2.000000 encodingKey: 3104473477.000000 -------------- Event 15450 -------------- BCIDHigh: 0.000000 @@ -773,7 +773,7 @@ ECalETMiddleTop: 9054.999865 ECalETOuterBottom: 12889.999808 ECalETOuterTop: 25864.999615 ECalEtot: 778860.609527 -FiducialVeloVertices: 3.000000 +FiducialVeloVertices: 5.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -807,20 +807,20 @@ VeloClustersOuterBin00: 98.000000 VeloClustersOuterBin01: 272.000000 VeloClustersOuterBin02: 233.000000 VeloClustersOuterBin03: 145.000000 -VeloFiducialTracks: 313.000000 -VeloTracks: 330.000000 -VeloTracksEtaBin0: 15.000000 -VeloTracksEtaBin1: 39.000000 -VeloTracksEtaBin2: 38.000000 -VeloTracksEtaBin3: 40.000000 -VeloTracksEtaBin4: 73.000000 -VeloTracksEtaBin5: 69.000000 -VeloTracksEtaBin6: 52.000000 +VeloFiducialTracks: 321.000000 +VeloTracks: 339.000000 +VeloTracksEtaBin0: 17.000000 +VeloTracksEtaBin1: 38.000000 +VeloTracksEtaBin2: 44.000000 +VeloTracksEtaBin3: 53.000000 +VeloTracksEtaBin4: 74.000000 +VeloTracksEtaBin5: 66.000000 +VeloTracksEtaBin6: 43.000000 VeloTracksEtaBin7: 4.000000 -VeloVertexX: 0.004272 -VeloVertexY: 0.064697 -VeloVertexZ: 32.165526 -VeloVertices: 3.000000 +VeloVertexX: 1.207886 +VeloVertexY: 0.264893 +VeloVertexZ: 30.700682 +VeloVertices: 5.000000 encodingKey: 3104473477.000000 -------------- Event 18514 -------------- BCIDHigh: 0.000000 @@ -834,7 +834,7 @@ ECalETMiddleTop: 3104.999954 ECalETOuterBottom: 9309.999861 ECalETOuterTop: 9649.999856 ECalEtot: 353973.031587 -FiducialVeloVertices: 1.000000 +FiducialVeloVertices: 4.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -868,20 +868,20 @@ VeloClustersOuterBin00: 59.000000 VeloClustersOuterBin01: 111.000000 VeloClustersOuterBin02: 95.000000 VeloClustersOuterBin03: 99.000000 -VeloFiducialTracks: 146.000000 -VeloTracks: 155.000000 -VeloTracksEtaBin0: 9.000000 -VeloTracksEtaBin1: 23.000000 +VeloFiducialTracks: 152.000000 +VeloTracks: 166.000000 +VeloTracksEtaBin0: 10.000000 +VeloTracksEtaBin1: 25.000000 VeloTracksEtaBin2: 22.000000 -VeloTracksEtaBin3: 16.000000 -VeloTracksEtaBin4: 30.000000 -VeloTracksEtaBin5: 35.000000 -VeloTracksEtaBin6: 16.000000 +VeloTracksEtaBin3: 24.000000 +VeloTracksEtaBin4: 37.000000 +VeloTracksEtaBin5: 27.000000 +VeloTracksEtaBin6: 17.000000 VeloTracksEtaBin7: 4.000000 -VeloVertexX: -0.039673 -VeloVertexY: 0.023804 -VeloVertexZ: 71.716305 -VeloVertices: 1.000000 +VeloVertexX: 1.179810 +VeloVertexY: 0.062866 +VeloVertexZ: 58.349607 +VeloVertices: 4.000000 encodingKey: 3104473477.000000 -------------- Event 20355 -------------- BCIDHigh: 0.000000 @@ -1228,7 +1228,7 @@ ECalETMiddleTop: 20054.999701 ECalETOuterBottom: 32239.999520 ECalETOuterTop: 41504.999382 ECalEtot: 1832368.909254 -FiducialVeloVertices: 2.000000 +FiducialVeloVertices: 6.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -1262,20 +1262,20 @@ VeloClustersOuterBin00: 139.000000 VeloClustersOuterBin01: 297.000000 VeloClustersOuterBin02: 289.000000 VeloClustersOuterBin03: 301.000000 -VeloFiducialTracks: 476.000000 -VeloTracks: 505.000000 -VeloTracksEtaBin0: 10.000000 -VeloTracksEtaBin1: 46.000000 -VeloTracksEtaBin2: 53.000000 -VeloTracksEtaBin3: 37.000000 -VeloTracksEtaBin4: 106.000000 -VeloTracksEtaBin5: 127.000000 -VeloTracksEtaBin6: 105.000000 -VeloTracksEtaBin7: 21.000000 -VeloVertexX: 0.103149 -VeloVertexY: 0.038452 -VeloVertexZ: -30.090331 -VeloVertices: 2.000000 +VeloFiducialTracks: 467.000000 +VeloTracks: 496.000000 +VeloTracksEtaBin0: 11.000000 +VeloTracksEtaBin1: 50.000000 +VeloTracksEtaBin2: 56.000000 +VeloTracksEtaBin3: 48.000000 +VeloTracksEtaBin4: 104.000000 +VeloTracksEtaBin5: 117.000000 +VeloTracksEtaBin6: 88.000000 +VeloTracksEtaBin7: 22.000000 +VeloVertexX: 1.076660 +VeloVertexY: 0.236816 +VeloVertexZ: -52.307126 +VeloVertices: 6.000000 encodingKey: 3104473477.000000 -------------- Event 28880 -------------- BCIDHigh: 0.000000 @@ -1289,7 +1289,7 @@ ECalETMiddleTop: 2164.999968 ECalETOuterBottom: 1814.999973 ECalETOuterTop: 3459.999948 ECalEtot: 130524.744303 -FiducialVeloVertices: 0.000000 +FiducialVeloVertices: 1.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -1323,20 +1323,20 @@ VeloClustersOuterBin00: 24.000000 VeloClustersOuterBin01: 56.000000 VeloClustersOuterBin02: 43.000000 VeloClustersOuterBin03: 58.000000 -VeloFiducialTracks: 81.000000 -VeloTracks: 84.000000 +VeloFiducialTracks: 80.000000 +VeloTracks: 83.000000 VeloTracksEtaBin0: 2.000000 VeloTracksEtaBin1: 10.000000 VeloTracksEtaBin2: 5.000000 -VeloTracksEtaBin3: 9.000000 -VeloTracksEtaBin4: 23.000000 -VeloTracksEtaBin5: 16.000000 -VeloTracksEtaBin6: 14.000000 -VeloTracksEtaBin7: 5.000000 -VeloVertexX: 0.000000 -VeloVertexY: 0.000000 -VeloVertexZ: 0.000000 -VeloVertices: 0.000000 +VeloTracksEtaBin3: 11.000000 +VeloTracksEtaBin4: 24.000000 +VeloTracksEtaBin5: 14.000000 +VeloTracksEtaBin6: 13.000000 +VeloTracksEtaBin7: 4.000000 +VeloVertexX: 1.198120 +VeloVertexY: 0.275269 +VeloVertexZ: 3.356933 +VeloVertices: 1.000000 encodingKey: 3104473477.000000 HltLumiWriter INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | diff --git a/AllenOnline/tests/refs/test_lumi.ref.x86_64_v3-opt b/AllenOnline/tests/refs/test_lumi.ref.x86_64_v3-opt index 25dea1d49..1f462f917 100644 --- a/AllenOnline/tests/refs/test_lumi.ref.x86_64_v3-opt +++ b/AllenOnline/tests/refs/test_lumi.ref.x86_64_v3-opt @@ -13,7 +13,7 @@ ECalETMiddleTop: 6839.999898 ECalETOuterBottom: 12069.999820 ECalETOuterTop: 10009.999851 ECalEtot: 579400.329466 -FiducialVeloVertices: 3.000000 +FiducialVeloVertices: 6.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -47,20 +47,20 @@ VeloClustersOuterBin00: 101.000000 VeloClustersOuterBin01: 339.000000 VeloClustersOuterBin02: 140.000000 VeloClustersOuterBin03: 79.000000 -VeloFiducialTracks: 231.000000 -VeloTracks: 240.000000 -VeloTracksEtaBin0: 7.000000 -VeloTracksEtaBin1: 45.000000 -VeloTracksEtaBin2: 35.000000 -VeloTracksEtaBin3: 24.000000 -VeloTracksEtaBin4: 53.000000 -VeloTracksEtaBin5: 34.000000 -VeloTracksEtaBin6: 29.000000 -VeloTracksEtaBin7: 13.000000 -VeloVertexX: -0.012817 -VeloVertexY: 0.018311 -VeloVertexZ: 0.061035 -VeloVertices: 3.000000 +VeloFiducialTracks: 238.000000 +VeloTracks: 247.000000 +VeloTracksEtaBin0: 9.000000 +VeloTracksEtaBin1: 46.000000 +VeloTracksEtaBin2: 39.000000 +VeloTracksEtaBin3: 32.000000 +VeloTracksEtaBin4: 52.000000 +VeloTracksEtaBin5: 30.000000 +VeloTracksEtaBin6: 27.000000 +VeloTracksEtaBin7: 12.000000 +VeloVertexX: 1.179810 +VeloVertexY: 0.183716 +VeloVertexZ: -1.159668 +VeloVertices: 6.000000 encodingKey: 3104473477.000000 -------------- Event 2579 -------------- BCIDHigh: 0.000000 @@ -108,19 +108,19 @@ VeloClustersOuterBin00: 110.000000 VeloClustersOuterBin01: 213.000000 VeloClustersOuterBin02: 167.000000 VeloClustersOuterBin03: 202.000000 -VeloFiducialTracks: 313.000000 -VeloTracks: 343.000000 -VeloTracksEtaBin0: 4.000000 -VeloTracksEtaBin1: 42.000000 -VeloTracksEtaBin2: 34.000000 -VeloTracksEtaBin3: 39.000000 -VeloTracksEtaBin4: 51.000000 -VeloTracksEtaBin5: 100.000000 -VeloTracksEtaBin6: 53.000000 -VeloTracksEtaBin7: 20.000000 -VeloVertexX: 0.223999 -VeloVertexY: -0.017090 -VeloVertexZ: -19.470214 +VeloFiducialTracks: 297.000000 +VeloTracks: 328.000000 +VeloTracksEtaBin0: 7.000000 +VeloTracksEtaBin1: 40.000000 +VeloTracksEtaBin2: 36.000000 +VeloTracksEtaBin3: 43.000000 +VeloTracksEtaBin4: 47.000000 +VeloTracksEtaBin5: 90.000000 +VeloTracksEtaBin6: 46.000000 +VeloTracksEtaBin7: 19.000000 +VeloVertexX: 1.187134 +VeloVertexY: 0.166016 +VeloVertexZ: -19.348144 VeloVertices: 3.000000 encodingKey: 3104473477.000000 -------------- Event 3572 -------------- @@ -135,7 +135,7 @@ ECalETMiddleTop: 7824.999883 ECalETOuterBottom: 15739.999765 ECalETOuterTop: 12274.999817 ECalEtot: 813958.062595 -FiducialVeloVertices: 1.000000 +FiducialVeloVertices: 4.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -169,20 +169,20 @@ VeloClustersOuterBin00: 75.000000 VeloClustersOuterBin01: 146.000000 VeloClustersOuterBin02: 141.000000 VeloClustersOuterBin03: 200.000000 -VeloFiducialTracks: 283.000000 -VeloTracks: 292.000000 -VeloTracksEtaBin0: 6.000000 -VeloTracksEtaBin1: 24.000000 -VeloTracksEtaBin2: 22.000000 -VeloTracksEtaBin3: 24.000000 -VeloTracksEtaBin4: 49.000000 -VeloTracksEtaBin5: 81.000000 -VeloTracksEtaBin6: 72.000000 -VeloTracksEtaBin7: 14.000000 -VeloVertexX: -0.208740 -VeloVertexY: 0.007935 -VeloVertexZ: 27.404784 -VeloVertices: 1.000000 +VeloFiducialTracks: 256.000000 +VeloTracks: 269.000000 +VeloTracksEtaBin0: 8.000000 +VeloTracksEtaBin1: 23.000000 +VeloTracksEtaBin2: 23.000000 +VeloTracksEtaBin3: 26.000000 +VeloTracksEtaBin4: 47.000000 +VeloTracksEtaBin5: 72.000000 +VeloTracksEtaBin6: 57.000000 +VeloTracksEtaBin7: 13.000000 +VeloVertexX: 1.098633 +VeloVertexY: 0.222778 +VeloVertexZ: -18.493651 +VeloVertices: 4.000000 encodingKey: 3104473477.000000 -------------- Event 5410 -------------- BCIDHigh: 0.000000 @@ -196,7 +196,7 @@ ECalETMiddleTop: 3519.999948 ECalETOuterBottom: 12609.999812 ECalETOuterTop: 6049.999910 ECalEtot: 520584.734256 -FiducialVeloVertices: 0.000000 +FiducialVeloVertices: 3.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -230,20 +230,20 @@ VeloClustersOuterBin00: 62.000000 VeloClustersOuterBin01: 143.000000 VeloClustersOuterBin02: 96.000000 VeloClustersOuterBin03: 88.000000 -VeloFiducialTracks: 165.000000 -VeloTracks: 172.000000 -VeloTracksEtaBin0: 4.000000 -VeloTracksEtaBin1: 25.000000 -VeloTracksEtaBin2: 18.000000 -VeloTracksEtaBin3: 14.000000 -VeloTracksEtaBin4: 31.000000 -VeloTracksEtaBin5: 34.000000 -VeloTracksEtaBin6: 40.000000 +VeloFiducialTracks: 168.000000 +VeloTracks: 177.000000 +VeloTracksEtaBin0: 7.000000 +VeloTracksEtaBin1: 27.000000 +VeloTracksEtaBin2: 19.000000 +VeloTracksEtaBin3: 20.000000 +VeloTracksEtaBin4: 33.000000 +VeloTracksEtaBin5: 30.000000 +VeloTracksEtaBin6: 35.000000 VeloTracksEtaBin7: 6.000000 -VeloVertexX: 0.000000 -VeloVertexY: 0.000000 -VeloVertexZ: 0.000000 -VeloVertices: 0.000000 +VeloVertexX: 1.239624 +VeloVertexY: 0.122681 +VeloVertexZ: -16.052245 +VeloVertices: 3.000000 encodingKey: 3104473477.000000 -------------- Event 6541 -------------- BCIDHigh: 0.000000 @@ -257,7 +257,7 @@ ECalETMiddleTop: 8564.999872 ECalETOuterBottom: 16634.999752 ECalETOuterTop: 16179.999759 ECalEtot: 721529.272263 -FiducialVeloVertices: 2.000000 +FiducialVeloVertices: 3.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -441,20 +441,20 @@ VeloClustersOuterS22: 20.000000 VeloClustersOuterS23: 25.000000 VeloClustersOuterS24: 20.000000 VeloClustersOuterS25: 25.000000 -VeloFiducialTracks: 218.000000 -VeloTracks: 225.000000 +VeloFiducialTracks: 201.000000 +VeloTracks: 207.000000 VeloTracksEtaBin0: 5.000000 -VeloTracksEtaBin1: 12.000000 -VeloTracksEtaBin2: 16.000000 -VeloTracksEtaBin3: 13.000000 -VeloTracksEtaBin4: 52.000000 -VeloTracksEtaBin5: 48.000000 -VeloTracksEtaBin6: 67.000000 +VeloTracksEtaBin1: 13.000000 +VeloTracksEtaBin2: 17.000000 +VeloTracksEtaBin3: 17.000000 +VeloTracksEtaBin4: 55.000000 +VeloTracksEtaBin5: 40.000000 +VeloTracksEtaBin6: 48.000000 VeloTracksEtaBin7: 12.000000 -VeloVertexX: 0.007324 -VeloVertexY: 0.096436 -VeloVertexZ: -33.508299 -VeloVertices: 2.000000 +VeloVertexX: 0.961304 +VeloVertexY: 0.169678 +VeloVertexZ: -11.962890 +VeloVertices: 3.000000 encodingKey: 492876054.000000 -------------- Event 9516 -------------- BCIDHigh: 0.000000 @@ -590,7 +590,7 @@ ECalETMiddleTop: 5349.999920 ECalETOuterBottom: 9369.999860 ECalETOuterTop: 13969.999792 ECalEtot: 546746.654634 -FiducialVeloVertices: 2.000000 +FiducialVeloVertices: 3.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -624,20 +624,20 @@ VeloClustersOuterBin00: 90.000000 VeloClustersOuterBin01: 151.000000 VeloClustersOuterBin02: 116.000000 VeloClustersOuterBin03: 133.000000 -VeloFiducialTracks: 211.000000 -VeloTracks: 228.000000 -VeloTracksEtaBin0: 9.000000 -VeloTracksEtaBin1: 34.000000 -VeloTracksEtaBin2: 22.000000 -VeloTracksEtaBin3: 25.000000 +VeloFiducialTracks: 202.000000 +VeloTracks: 225.000000 +VeloTracksEtaBin0: 10.000000 +VeloTracksEtaBin1: 36.000000 +VeloTracksEtaBin2: 21.000000 +VeloTracksEtaBin3: 39.000000 VeloTracksEtaBin4: 41.000000 -VeloTracksEtaBin5: 45.000000 -VeloTracksEtaBin6: 46.000000 +VeloTracksEtaBin5: 38.000000 +VeloTracksEtaBin6: 34.000000 VeloTracksEtaBin7: 6.000000 -VeloVertexX: -0.040894 -VeloVertexY: -0.032959 -VeloVertexZ: 40.893553 -VeloVertices: 2.000000 +VeloVertexX: 1.308594 +VeloVertexY: 0.125122 +VeloVertexZ: 10.925292 +VeloVertices: 3.000000 encodingKey: 3104473477.000000 -------------- Event 13890 -------------- BCIDHigh: 0.000000 @@ -651,7 +651,7 @@ ECalETMiddleTop: 11224.999833 ECalETOuterBottom: 15934.999763 ECalETOuterTop: 15349.999771 ECalEtot: 795532.274547 -FiducialVeloVertices: 4.000000 +FiducialVeloVertices: 2.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -685,20 +685,20 @@ VeloClustersOuterBin00: 77.000000 VeloClustersOuterBin01: 173.000000 VeloClustersOuterBin02: 197.000000 VeloClustersOuterBin03: 127.000000 -VeloFiducialTracks: 263.000000 -VeloTracks: 275.000000 +VeloFiducialTracks: 259.000000 +VeloTracks: 271.000000 VeloTracksEtaBin0: 5.000000 -VeloTracksEtaBin1: 37.000000 -VeloTracksEtaBin2: 35.000000 -VeloTracksEtaBin3: 18.000000 -VeloTracksEtaBin4: 63.000000 -VeloTracksEtaBin5: 59.000000 -VeloTracksEtaBin6: 47.000000 +VeloTracksEtaBin1: 41.000000 +VeloTracksEtaBin2: 37.000000 +VeloTracksEtaBin3: 27.000000 +VeloTracksEtaBin4: 59.000000 +VeloTracksEtaBin5: 54.000000 +VeloTracksEtaBin6: 37.000000 VeloTracksEtaBin7: 11.000000 -VeloVertexX: 0.024414 -VeloVertexY: -0.050659 -VeloVertexZ: -36.865233 -VeloVertices: 4.000000 +VeloVertexX: 1.112061 +VeloVertexY: 0.112305 +VeloVertexZ: -60.974118 +VeloVertices: 2.000000 encodingKey: 3104473477.000000 -------------- Event 15450 -------------- BCIDHigh: 0.000000 @@ -773,7 +773,7 @@ ECalETMiddleTop: 9054.999865 ECalETOuterBottom: 12889.999808 ECalETOuterTop: 25864.999615 ECalEtot: 778860.609527 -FiducialVeloVertices: 3.000000 +FiducialVeloVertices: 5.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -807,20 +807,20 @@ VeloClustersOuterBin00: 98.000000 VeloClustersOuterBin01: 272.000000 VeloClustersOuterBin02: 233.000000 VeloClustersOuterBin03: 145.000000 -VeloFiducialTracks: 313.000000 -VeloTracks: 330.000000 -VeloTracksEtaBin0: 15.000000 -VeloTracksEtaBin1: 39.000000 -VeloTracksEtaBin2: 38.000000 -VeloTracksEtaBin3: 40.000000 -VeloTracksEtaBin4: 73.000000 -VeloTracksEtaBin5: 69.000000 -VeloTracksEtaBin6: 52.000000 +VeloFiducialTracks: 321.000000 +VeloTracks: 339.000000 +VeloTracksEtaBin0: 17.000000 +VeloTracksEtaBin1: 38.000000 +VeloTracksEtaBin2: 44.000000 +VeloTracksEtaBin3: 53.000000 +VeloTracksEtaBin4: 74.000000 +VeloTracksEtaBin5: 66.000000 +VeloTracksEtaBin6: 43.000000 VeloTracksEtaBin7: 4.000000 -VeloVertexX: 0.004272 -VeloVertexY: 0.064697 -VeloVertexZ: 32.165526 -VeloVertices: 3.000000 +VeloVertexX: 1.207886 +VeloVertexY: 0.264282 +VeloVertexZ: 30.700682 +VeloVertices: 5.000000 encodingKey: 3104473477.000000 -------------- Event 18514 -------------- BCIDHigh: 0.000000 @@ -834,7 +834,7 @@ ECalETMiddleTop: 3104.999954 ECalETOuterBottom: 9309.999861 ECalETOuterTop: 9649.999856 ECalEtot: 353973.031587 -FiducialVeloVertices: 1.000000 +FiducialVeloVertices: 4.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -868,20 +868,20 @@ VeloClustersOuterBin00: 59.000000 VeloClustersOuterBin01: 111.000000 VeloClustersOuterBin02: 95.000000 VeloClustersOuterBin03: 99.000000 -VeloFiducialTracks: 146.000000 -VeloTracks: 155.000000 -VeloTracksEtaBin0: 9.000000 -VeloTracksEtaBin1: 23.000000 +VeloFiducialTracks: 152.000000 +VeloTracks: 166.000000 +VeloTracksEtaBin0: 10.000000 +VeloTracksEtaBin1: 25.000000 VeloTracksEtaBin2: 22.000000 -VeloTracksEtaBin3: 16.000000 -VeloTracksEtaBin4: 30.000000 -VeloTracksEtaBin5: 35.000000 -VeloTracksEtaBin6: 16.000000 +VeloTracksEtaBin3: 24.000000 +VeloTracksEtaBin4: 37.000000 +VeloTracksEtaBin5: 27.000000 +VeloTracksEtaBin6: 17.000000 VeloTracksEtaBin7: 4.000000 -VeloVertexX: -0.039673 -VeloVertexY: 0.023193 -VeloVertexZ: 71.716305 -VeloVertices: 1.000000 +VeloVertexX: 1.179810 +VeloVertexY: 0.062866 +VeloVertexZ: 58.349607 +VeloVertices: 4.000000 encodingKey: 3104473477.000000 -------------- Event 20355 -------------- BCIDHigh: 0.000000 @@ -1228,7 +1228,7 @@ ECalETMiddleTop: 20054.999701 ECalETOuterBottom: 32239.999520 ECalETOuterTop: 41504.999382 ECalEtot: 1832368.909254 -FiducialVeloVertices: 2.000000 +FiducialVeloVertices: 6.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -1262,20 +1262,20 @@ VeloClustersOuterBin00: 139.000000 VeloClustersOuterBin01: 297.000000 VeloClustersOuterBin02: 289.000000 VeloClustersOuterBin03: 301.000000 -VeloFiducialTracks: 475.000000 -VeloTracks: 504.000000 -VeloTracksEtaBin0: 10.000000 -VeloTracksEtaBin1: 46.000000 -VeloTracksEtaBin2: 53.000000 -VeloTracksEtaBin3: 37.000000 -VeloTracksEtaBin4: 106.000000 -VeloTracksEtaBin5: 126.000000 -VeloTracksEtaBin6: 105.000000 -VeloTracksEtaBin7: 21.000000 -VeloVertexX: 0.103149 -VeloVertexY: 0.038452 -VeloVertexZ: -30.090331 -VeloVertices: 2.000000 +VeloFiducialTracks: 467.000000 +VeloTracks: 496.000000 +VeloTracksEtaBin0: 11.000000 +VeloTracksEtaBin1: 50.000000 +VeloTracksEtaBin2: 56.000000 +VeloTracksEtaBin3: 48.000000 +VeloTracksEtaBin4: 104.000000 +VeloTracksEtaBin5: 117.000000 +VeloTracksEtaBin6: 88.000000 +VeloTracksEtaBin7: 22.000000 +VeloVertexX: 1.080322 +VeloVertexY: 0.236816 +VeloVertexZ: -52.307126 +VeloVertices: 6.000000 encodingKey: 3104473477.000000 -------------- Event 28880 -------------- BCIDHigh: 0.000000 @@ -1289,7 +1289,7 @@ ECalETMiddleTop: 2164.999968 ECalETOuterBottom: 1814.999973 ECalETOuterTop: 3459.999948 ECalEtot: 130524.744303 -FiducialVeloVertices: 0.000000 +FiducialVeloVertices: 1.000000 GEC: 1.000000 MuonHitsM2R1: 1023.000000 MuonHitsM2R2: 1023.000000 @@ -1323,20 +1323,20 @@ VeloClustersOuterBin00: 24.000000 VeloClustersOuterBin01: 56.000000 VeloClustersOuterBin02: 43.000000 VeloClustersOuterBin03: 58.000000 -VeloFiducialTracks: 81.000000 -VeloTracks: 84.000000 +VeloFiducialTracks: 80.000000 +VeloTracks: 83.000000 VeloTracksEtaBin0: 2.000000 VeloTracksEtaBin1: 10.000000 VeloTracksEtaBin2: 5.000000 -VeloTracksEtaBin3: 9.000000 -VeloTracksEtaBin4: 23.000000 -VeloTracksEtaBin5: 16.000000 -VeloTracksEtaBin6: 14.000000 -VeloTracksEtaBin7: 5.000000 -VeloVertexX: 0.000000 -VeloVertexY: 0.000000 -VeloVertexZ: 0.000000 -VeloVertices: 0.000000 +VeloTracksEtaBin3: 11.000000 +VeloTracksEtaBin4: 24.000000 +VeloTracksEtaBin5: 14.000000 +VeloTracksEtaBin6: 13.000000 +VeloTracksEtaBin7: 4.000000 +VeloVertexX: 1.198730 +VeloVertexY: 0.277710 +VeloVertexZ: 3.295898 +VeloVertices: 1.000000 encodingKey: 3104473477.000000 HltLumiWriter INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | -- GitLab From 80dead6eb29ed212d8ad32ab3f331c5041076be2 Mon Sep 17 00:00:00 2001 From: Roel Aaij <roel.aaij@cern.ch> Date: Thu, 25 Jul 2024 11:01:57 +0200 Subject: [PATCH 04/35] Fast run change for HLT1 --- AllenOnline/CMakeLists.txt | 11 + AllenOnline/application/change_mep.cpp | 196 ++++++++++++++++++ AllenOnline/application/read_mep.cpp | 48 ++--- .../include/AllenOnline/TransposeMEP.h | 2 +- AllenOnline/options/AllenConfig.py | 10 +- AllenOnline/src/AllenApplication.cpp | 10 +- AllenOnline/src/AllenApplication.h | 2 +- AllenOnline/src/AllenConfiguration.cpp | 12 +- AllenOnline/src/AllenConfiguration.h | 15 +- AllenOnline/src/MEPProvider.cpp | 25 +-- AllenOnline/src/TransposeMEP.cpp | 11 +- AllenOnline/tests/options/check_duplicates.py | 4 +- AllenOnline/tests/qmtest/mep_lumi.qmt | 5 +- MooreScripts/options/HLT1MBMSlim.opts | 2 +- .../python/MooreScripts/testbench/emulator.py | 58 +++--- .../testbench/scenarios/default.py | 33 +-- MooreScripts/scripts/testbench.py | 11 + MooreScripts/tests/options/HLT1/Arch.xml | 1 + MooreScripts/tests/options/HLT1Slim/Arch.xml | 3 +- .../options/HLT1Slim/check_run_change.py | 140 +++++++++++++ .../update_tck_for_run_change_test.py | 29 +++ .../tests/qmtest/check_run_change.qmt | 27 +++ .../tests/qmtest/create_run_change_tck.qmt | 24 +++ MooreScripts/tests/qmtest/hlt1runchange.qmt | 41 ++++ 24 files changed, 600 insertions(+), 120 deletions(-) create mode 100644 AllenOnline/application/change_mep.cpp create mode 100644 MooreScripts/tests/options/HLT1Slim/check_run_change.py create mode 100644 MooreScripts/tests/options/HLT1Slim/update_tck_for_run_change_test.py create mode 100644 MooreScripts/tests/qmtest/check_run_change.qmt create mode 100644 MooreScripts/tests/qmtest/create_run_change_tck.qmt create mode 100644 MooreScripts/tests/qmtest/hlt1runchange.qmt diff --git a/AllenOnline/CMakeLists.txt b/AllenOnline/CMakeLists.txt index fa2ff3636..50dbf1e90 100644 --- a/AllenOnline/CMakeLists.txt +++ b/AllenOnline/CMakeLists.txt @@ -61,6 +61,7 @@ gaudi_add_executable(allen_read_mep AllenOnlineLib LHCb::MDFLib Boost::program_options + fmt::fmt ) gaudi_add_executable(allen_bench_mep_offsets @@ -93,6 +94,16 @@ gaudi_add_executable(allen_mpi_send Boost::program_options ) +gaudi_add_executable(allen_change_mep + SOURCES + application/change_mep.cpp + LINK + AllenOnlineLib + LHCb::MDFLib + Boost::program_options + fmt::fmt +) + # MPI if (TARGET MPI::MPI_CXX AND MPI_CXX_COMPILER AND TARGET PkgConfig::hwloc) message(STATUS "Found MPI ${MPI_CXX_VERSION}: ${MPI_CXX_COMPILER}") diff --git a/AllenOnline/application/change_mep.cpp b/AllenOnline/application/change_mep.cpp new file mode 100644 index 000000000..f18e2a54d --- /dev/null +++ b/AllenOnline/application/change_mep.cpp @@ -0,0 +1,196 @@ +/*****************************************************************************\ +* (c) Copyright 2018-2020 CERN for the benefit of the LHCb Collaboration * +\*****************************************************************************/ +#include <cassert> +#include <cstring> +#include <fstream> +#include <iomanip> +#include <iostream> +#include <map> +#include <string> +#include <unordered_set> + +#include <fcntl.h> +#include <sys/stat.h> +#include <sys/types.h> +#include <unistd.h> + +#include <fmt/format.h> + +#include <boost/lexical_cast.hpp> +#include <boost/program_options.hpp> + +#include <GaudiKernel/Bootstrap.h> +#include <GaudiKernel/IAppMgrUI.h> +#include <GaudiKernel/IProperty.h> +#include <GaudiKernel/IStateful.h> +#include <GaudiKernel/ISvcLocator.h> +#include <GaudiKernel/MsgStream.h> +#include <GaudiKernel/SmartIF.h> + +#include <Event/ODIN.h> +#include <Event/RawBank.h> + +#include <Allen/read_mdf.hpp> + +#include <MDF/StreamDescriptor.h> + +#include <EventBuilding/MEP_tools.hpp> +#include <EventBuilding/MFP_tools.hpp> + +#include <AllenOnline/ReadMEP.h> +#include <AllenOnline/TransposeMEP.h> + +using namespace std; +namespace po = boost::program_options; + +int main( int argc, char* argv[] ) { + + string filename, output_filename; + std::string tck_str; + ssize_t n_meps = 0; + unsigned run; + + // Declare the supported options. + po::options_description desc( "Allowed options" ); + // clang-format off + desc.add_options() + ( "help,h", "produce help message" ) + ( "filename", po::value<string>( &filename ), "filename pattern" ) + ( "output", po::value<string>( &output_filename ), "output filename" ) + ( "n_meps,n", po::value<ssize_t>( &n_meps ), "number of MEPs" ) + ( "tck", po::value<std::string>( &tck_str ), "new TCK" ) + ( "run", po::value<unsigned>( &run ), "new run number" ); + // clang-format on + + po::positional_options_description p; + p.add( "filename", 1 ); + p.add( "n_meps", 1 ); + p.add( "output", 1 ); + + po::variables_map vm; + po::store( po::command_line_parser( argc, argv ).options( desc ).positional( p ).run(), vm ); + po::notify( vm ); + + if ( vm.count( "help" ) ) { + std::cout << desc << "\n"; + return 1; + } + + unsigned tck = std::stoi( tck_str, nullptr, 16 ); + + SmartIF<IStateful> app = Gaudi::createApplicationMgr(); + auto prop = app.as<IProperty>(); + bool sc = prop->setProperty( "JobOptionsType", "\"NONE\"" ).isSuccess(); + sc &= app->configure(); + sc &= app->initialize(); + sc &= app->start(); + if ( !sc ) { return 1; } + SmartIF<ISvcLocator> sloc = app.as<ISvcLocator>(); + auto msgSvc = sloc->service<IMessageSvc>( "MessageSvc" ); + MsgStream info{msgSvc.get(), "allen_mep_read"}; + info.activate(); + + auto gaudi_exit = [&app]( int code ) { + bool sc = app->stop().isSuccess(); + sc &= app->finalize(); + return code & !sc; + }; + + // Some storage for reading the events into + bool eof = false, success = false; + + auto input = LHCb::StreamDescriptor::bind( filename ); + if ( input.ioDesc != 0 ) { + info << "Opened " << filename << endmsg; + } else { + info << "Failed to open file " << filename << " " << strerror( errno ) << endmsg; + return gaudi_exit( 1 ); + } + + vector<char> data; + + EventIDs event_ids; + + MEP::Slices mep_slices( 1 ); + auto& slice = mep_slices[0]; + + for ( ssize_t i_mep = 0; ( n_meps == -1 || i_mep < n_meps ) && !eof; ++i_mep ) { + + std::tie( eof, success, slice.mep, slice.packing_factor, slice.mep_data ) = MEP::read_mep( input, data, info ); + auto const* mep = slice.mep; + if ( !success ) { + return gaudi_exit( 1 ); + } else { + std::cout << "Read mep with packing factor " << slice.packing_factor << " #MFPs: " << mep->header.n_MFPs << "\n"; + } + + if ( i_mep == 0 ) { + slice.blocks.resize( mep->header.n_MFPs, MEP::Blocks::value_type{} ); + slice.offsets.resize( mep->header.n_MFPs ); + for ( auto& offsets : slice.offsets ) { offsets.resize( slice.packing_factor + 1 ); } + } + + std::function<void( size_t )> bad_mfp = []( size_t source_id ) { + auto const* sd = SourceId_sysstr( source_id ); + std::cout << "ERROR: bad MFP for " << sd << " with source ID " << source_id << "\n"; + }; + auto odin_block_index = MEP::find_blocks( mep, slice.blocks, bad_mfp ); + if ( !odin_block_index ) { + std::cout << "ERROR: No ODIN MFP" + << "\n"; + return gaudi_exit( 1 ); + } + + MEP::fragment_offsets( slice.blocks, slice.offsets ); + + auto const& odin_block = slice.blocks[*odin_block_index]; + std::cout << "MEP with packing: " << std::setw( 4 ) << odin_block.header->n_banks << " event_id: " << std::setw( 6 ) + << odin_block.header->ev_id << "\n"; + + // Decode first ODIN + auto const& odin_offsets = slice.offsets[*odin_block_index]; + auto decode_odin = [version = odin_block.header->block_version, &odin_block, + &odin_offsets]( unsigned event_number ) { + auto const* odin_data = reinterpret_cast<unsigned const*>( odin_block.payload + odin_offsets[event_number] ); + LHCb::ODIN odin; + if ( version == 7 ) { + odin = LHCb::ODIN{{odin_data, 10}}; + } else { + odin = LHCb::ODIN::from_version<6>( {odin_data, 10} ); + } + return odin; + }; + + auto first_odin = decode_odin( 0 ); + std::cout << "ODIN version: " << static_cast<unsigned>( first_odin.version() ) << " run: " << std::setw( 7 ) + << first_odin.runNumber() << " event: " << std::setw( 12 ) << first_odin.eventNumber() << "\n"; + if ( !tck_str.empty() ) { + std::cout << "Changing TCK from " << fmt::format( "{:#010x}", first_odin.triggerConfigurationKey() ) << " to " + << fmt::format( "{:#010x}", tck ) << "\n"; + } + if ( run != 0 ) { + std::cout << "Changing run from " << std::to_string( first_odin.runNumber() ) << " to " << std::to_string( run ) + << "\n"; + } + + char* payload = const_cast<char*>( odin_block.payload ); + for ( unsigned evt = 0; evt < slice.packing_factor; ++evt ) { + auto* odin_data = reinterpret_cast<unsigned*>( payload + odin_offsets[evt] ); + using namespace LHCb::ODINImplementation; + if ( !tck_str.empty() ) { + details::set_bits<LHCb::ODIN::TriggerConfigurationKeySize, LHCb::ODIN::TriggerConfigurationKeyOffset>( + {odin_data, 10}, tck ); + } + if ( run != 0 ) { + details::set_bits<LHCb::ODIN::RunNumberSize, LHCb::ODIN::RunNumberOffset>( {odin_data, 10}, run ); + } + } + } + + std::ofstream output_file{output_filename, std::ios::binary}; + output_file.write( slice.mep_data.data(), slice.mep_data.size() ); + output_file.close(); + + return gaudi_exit( 0 ); +} diff --git a/AllenOnline/application/read_mep.cpp b/AllenOnline/application/read_mep.cpp index 30638cb7f..ee4406056 100644 --- a/AllenOnline/application/read_mep.cpp +++ b/AllenOnline/application/read_mep.cpp @@ -15,6 +15,8 @@ #include <sys/types.h> #include <unistd.h> +#include <fmt/format.h> + #include <boost/algorithm/string.hpp> #include <boost/format.hpp> #include <boost/lexical_cast.hpp> @@ -80,19 +82,6 @@ namespace { {{'P', 0x02, 0}, {'N', 0x04, 1}, {'L', 0x08, 2}, {'1', 0x10, 3}, {'2', 0x20, 4}}}; } -std::vector<char> contiguous_mfps( Allen::Slice const& mep_data ) { - // To make direct use of the offsets, the MFPs need to be copied - // into temporary storage - auto const& mfps = mep_data.fragments; - vector<char> mep_fragments( mep_data.fragments_mem_size, '\0' ); - char* destination = &mep_fragments[0]; - for ( gsl::span<char const> mfp : mfps ) { - ::memcpy( destination, mfp.data(), mfp.size_bytes() ); - destination += mfp.size_bytes(); - } - return mep_fragments; -} - int main( int argc, char* argv[] ) { string filename; @@ -102,12 +91,15 @@ int main( int argc, char* argv[] ) { // Declare the supported options. po::options_description desc( "Allowed options" ); - desc.add_options()( "help,h", "produce help message" )( "filename", po::value<string>( &filename ), - "filename pattern" )( - "n_meps,n", po::value<ssize_t>( &n_meps ), - "number of events" )( "skip,s", po::value<size_t>( &n_skip )->default_value( 0 ), "number of events to skip" )( - "dump", po::value<string>( &dump ), "dump bank content (source_id_type,start_event[,end_event],bank_number" )( - "count-banks", "count raw banks by bank type" ); + // clang-format off + desc.add_options() + ( "help,h", "produce help message" ) + ( "filename", po::value<string>( &filename ), "filename pattern" ) + ( "n_meps,n", po::value<ssize_t>( &n_meps ), "number of MEPs" ) + ( "skip,s", po::value<size_t>( &n_skip )->default_value( 0 ), "number of events to skip" ) + ( "dump", po::value<string>( &dump ), "dump bank content (source_id_type,start_event[,end_event],bank_number" ) + ( "count-banks", "count raw banks by bank type" ); + // clang-format on po::positional_options_description p; p.add( "filename", 1 ); @@ -230,8 +222,6 @@ int main( int argc, char* argv[] ) { for ( auto& offsets : slice.offsets ) { offsets.resize( slice.packing_factor + 1 ); } } - if ( n_skip != 0 && n_skip-- > 0 ) continue; - std::function<void( size_t )> bad_mfp = []( size_t source_id ) { auto const* sd = SourceId_sysstr( source_id ); std::cout << "ERROR: bad MFP for " << sd << " with source ID " << source_id << "\n"; @@ -268,7 +258,8 @@ int main( int argc, char* argv[] ) { // Decode first ODIN auto const odin_index = to_integral( BankTypes::ODIN ); auto const& odin_slice = bank_slices[odin_index][0]; - auto odin_banks = contiguous_mfps( odin_slice ); + // There is always only a single ODIN MFP + auto odin_banks = odin_slice.fragments[0]; auto decode_odin = [& versions = banks_version, &odin_slice, &odin_banks]( unsigned event_number ) { auto const& odin_offsets = odin_slice.offsets; @@ -285,8 +276,10 @@ int main( int argc, char* argv[] ) { return odin; }; - auto first_odin = decode_odin( 0 ); - std::cout << "ODIN version: " << first_odin.version() << " run: " << std::setw( 7 ) << first_odin.runNumber() + auto first_odin = decode_odin( 0 ); + auto const tck = first_odin.triggerConfigurationKey(); + std::cout << "ODIN version: " << static_cast<unsigned>( first_odin.version() ) << " TCK " + << fmt::format( "{:#010x}", tck ) << " run: " << std::setw( 7 ) << first_odin.runNumber() << " event: " << std::setw( 12 ) << first_odin.eventNumber() << "\n"; // Print block information @@ -331,7 +324,10 @@ int main( int argc, char* argv[] ) { << padded_size << "\n"; } + size_t skip = n_skip; for ( unsigned evt = 0; evt < slice.packing_factor; ++evt ) { + if ( skip != 0 && skip-- > 0 ) continue; + // Count bank types and DAQ errors per SD auto bank_type = block.bank_types[evt]; if ( daq_errors.count( static_cast<LHCb::RawBank::BankType>( bank_type ) ) ) { @@ -342,6 +338,8 @@ int main( int argc, char* argv[] ) { if ( dump_block ) { for ( unsigned evt = dump_start; evt < ( dump_end == -1 ? slice.packing_factor : dump_end ); ++evt ) { + if ( n_skip != 0 && n_skip-- > 0 ) continue; + if ( dump_type == "ODIN" ) { auto odin = decode_odin( evt ); @@ -370,6 +368,8 @@ int main( int argc, char* argv[] ) { MDF::dump_hex( block.payload + slice.offsets[i_block][evt], block.bank_sizes[evt] ); } } + } else if ( n_skip != 0 ) { + n_skip -= std::min( n_skip, size_t{slice.packing_factor} ); } } } diff --git a/AllenOnline/include/AllenOnline/TransposeMEP.h b/AllenOnline/include/AllenOnline/TransposeMEP.h index aa36f20ea..4027a126d 100644 --- a/AllenOnline/include/AllenOnline/TransposeMEP.h +++ b/AllenOnline/include/AllenOnline/TransposeMEP.h @@ -80,7 +80,7 @@ namespace MEP { */ std::tuple<bool, std::array<unsigned int, NBankTypes>, std::array<int, NBankTypes>> fill_counts( EB::MEP const* mep ); - bool find_blocks( EB::MEP const* mep, Blocks& blocks, std::function<void( size_t )> const& badMFP ); + std::optional<size_t> find_blocks( EB::MEP const* mep, Blocks& blocks, std::function<void( size_t )> const& badMFP ); void fragment_offsets( Blocks const& blocks, std::vector<std::vector<uint32_t>>& offsets ); diff --git a/AllenOnline/options/AllenConfig.py b/AllenOnline/options/AllenConfig.py index 5fb5b99d1..1e997efd8 100755 --- a/AllenOnline/options/AllenConfig.py +++ b/AllenOnline/options/AllenConfig.py @@ -43,6 +43,7 @@ try: dddb_tag = "run3/" + dddb_tag conddb_tag = OnlineEnv.CondDBTag initial_tck = getattr(OnlineEnv, "InitialTCK", 0) + ignore_odin_tck = int(os.getenv("IGNORE_ODIN_TCK", "0")) except ImportError: run_online = False output_level = 3 @@ -52,6 +53,7 @@ except ImportError: dddb_tag = 'run3/trunk' conddb_tag = 'master' initial_tck = 0 + ignore_odin_tck = 1 integration_test = False @@ -173,6 +175,7 @@ allen_conf.Partition = partition allen_conf.PartitionBuffers = True allen_conf.PartitionID = partition_id allen_conf.EnableRunChanges = UseDD4Hep +allen_conf.TCKFromODIN = not ignore_odin_tck if run_online: from Configurables import Allen__MBMOutput as MBMOutput @@ -256,10 +259,9 @@ messageSvc.OutputLevel = 3 # Add the services that will produce the non-event-data monSink = OnlMonitorSink( - CountersToPublish=[("Bursts", "IN"), ("Bursts", "OUT"), ("Events", "IN"), - ("Events", "OUT"), ("Events", "MB_IN"), - ("Events", "MB_OUT"), ("gather_selections", - "Hlt1.*Pass"), + CountersToPublish=[("Bursts", ".*"), ("Events", ".*"), ("MBMOutput", ".*"), + ("EventLoop", ".*"), ("gather_selections", + "Hlt1.*Pass"), ("gather_selections", "Hlt1.*Rate"), ("velo_consolidate_tracks_ca09ac3f", "n_velo_tracks"), ("scifi_consolidate_seeds_7b68a248", "n_seed_tracks"), diff --git a/AllenOnline/src/AllenApplication.cpp b/AllenOnline/src/AllenApplication.cpp index 672fc36ee..a78430856 100644 --- a/AllenOnline/src/AllenApplication.cpp +++ b/AllenOnline/src/AllenApplication.cpp @@ -198,7 +198,7 @@ int AllenApplication::configureApplication() { return Online::ONLINE_ERROR; } - std::string const& sequence = m_allenConfig->sequence(); + auto const& [sequence, source] = m_allenConfig->sequence(); if ( sequence.empty() ) { m_logger->error( "Failed to obtain sequence" ); return Online::ONLINE_ERROR; @@ -211,6 +211,7 @@ int AllenApplication::configureApplication() { {"params", m_allenConfig->paramDir}, {"device", m_allenConfig->device.value()}, {"s", std::to_string( m_nSlices )}, + {"tck-from-odin", std::to_string( m_allenConfig->tckFromODIN.value() )}, {"disable-run-changes", std::to_string( !m_allenConfig->runChanges.value() )}, {"monitoring-filename", ""}}; @@ -223,7 +224,7 @@ int AllenApplication::configureApplication() { m_allenControl = m_zmqSvc->socket( zmq::PAIR ); m_allenControl->bind( m_controlConnection.c_str() ); - m_allenThread = std::thread{&AllenApplication::allenLoop, this, std::move( sequence )}; + m_allenThread = std::thread{&AllenApplication::allenLoop, this, sequence, source}; zmq::pollitem_t items[] = {{*m_allenControl, 0, zmq::POLLIN, 0}}; m_zmqSvc->poll( &items[0], 1, -1 ); @@ -292,8 +293,9 @@ int AllenApplication::continueProcessing() { return OnlineApplication::continueProcessing(); } -void AllenApplication::allenLoop( std::string config ) { - auto status = allen( m_options, config, this, m_provider, m_output, m_zmqSvc.get(), m_controlConnection ); +void AllenApplication::allenLoop( std::string_view config, std::string_view config_source ) { + auto status = + allen( m_options, config, config_source, this, m_provider, m_output, m_zmqSvc.get(), m_controlConnection ); if ( status != 0 ) { m_logger->error( "Allen event loop exited with error" ); error(); diff --git a/AllenOnline/src/AllenApplication.h b/AllenOnline/src/AllenApplication.h index 7f070fb6d..4c0f6c3bb 100644 --- a/AllenOnline/src/AllenApplication.h +++ b/AllenOnline/src/AllenApplication.h @@ -61,7 +61,7 @@ public: int continueProcessing() override; // Main function running the Allen event loop - void allenLoop( std::string config ); + void allenLoop( std::string_view config, std::string_view config_source ); bool initMPI(); diff --git a/AllenOnline/src/AllenConfiguration.cpp b/AllenOnline/src/AllenConfiguration.cpp index b4e2cb214..ed133b80c 100644 --- a/AllenOnline/src/AllenConfiguration.cpp +++ b/AllenOnline/src/AllenConfiguration.cpp @@ -29,12 +29,15 @@ StatusCode AllenConfiguration::queryInterface( const InterfaceID& riid, void** p AllenConfiguration::AllenConfiguration( std::string name, ISvcLocator* svcloc ) : Service( name, svcloc ) {} -std::string AllenConfiguration::sequence() const { - if ( !m_sequence.empty() ) { return m_sequence; } +std::tuple<std::string, std::string> AllenConfiguration::sequence() const { + if ( !m_sequence.empty() ) { return {m_sequence, "json"}; } + + std::string source; if ( !m_tck.empty() ) { // Load from TCK - auto repo = m_json.value(); + auto repo = m_json.value(); + source = repo + ":" + m_tck; LHCb::TCK::Info tck_info{}; std::tie( m_sequence, tck_info ) = Allen::sequence_from_git( repo, m_tck ); @@ -57,9 +60,10 @@ std::string AllenConfiguration::sequence() const { } else { info() << "Configuring Allen from JSON file " << m_json << endmsg; m_sequence = std::string{std::istreambuf_iterator<char>{sequence_file}, std::istreambuf_iterator<char>{}}; + source = m_json.value(); } } - return m_sequence; + return {m_sequence, source}; } AllenConfiguration::~AllenConfiguration() {} diff --git a/AllenOnline/src/AllenConfiguration.h b/AllenOnline/src/AllenConfiguration.h index a88ed27de..74007e443 100644 --- a/AllenOnline/src/AllenConfiguration.h +++ b/AllenOnline/src/AllenConfiguration.h @@ -12,6 +12,7 @@ #include <filesystem> +#include <Allen/Provider.h> #include <GaudiKernel/Service.h> class AllenConfiguration : public Service { @@ -29,7 +30,7 @@ public: virtual ~AllenConfiguration(); - std::string sequence() const; + std::tuple<std::string, std::string> sequence() const; Gaudi::Property<float> stopTimeout{this, "StopTimeout", 5.}; Gaudi::Property<unsigned int> nThreads{this, "NThreads", 8}; @@ -51,6 +52,7 @@ public: Gaudi::Property<unsigned> partitionID{this, "PartitionID", 0}; Gaudi::Property<bool> partitionBuffers{this, "PartitionBuffers", true}; Gaudi::Property<std::string> partition{this, "Partition", ""}; + Gaudi::Property<bool> tckFromODIN{this, "TCKFromODIN", true}; private: static std::string resolveEnvVars( std::string s ) { @@ -70,12 +72,11 @@ private: return; } - auto const json_path = resolveEnvVars( m_json.value() ); - std::regex tck_option{"([^:]+):(0x[a-fA-F0-9]{8})"}; - std::smatch tck_match; - if ( std::regex_match( json_path, tck_match, tck_option ) ) { - std::filesystem::path j{tck_match.str( 1 )}; - m_tck = tck_match.str( 2 ); + auto const json_path = resolveEnvVars( m_json.value() ); + auto [from_tck, repo, tck] = Allen::config_from_tck( json_path ); + if ( from_tck ) { + std::filesystem::path j{repo}; + m_tck = tck; if ( !std::filesystem::exists( j ) || !std::filesystem::is_directory( j ) ) { throw GaudiException{"Git repository " + json_path + " does not exist or is not a directory", name(), StatusCode::FAILURE}; diff --git a/AllenOnline/src/MEPProvider.cpp b/AllenOnline/src/MEPProvider.cpp index 8711e47e6..8253a659b 100755 --- a/AllenOnline/src/MEPProvider.cpp +++ b/AllenOnline/src/MEPProvider.cpp @@ -291,15 +291,14 @@ StatusCode MEPProvider::initialize() { } m_allenConfig = config.get(); - std::string const& sequence = m_allenConfig->sequence(); - if ( sequence.empty() ) { - error() << "Failed to obtain sequence" << endmsg; - return StatusCode::FAILURE; - } - if ( m_all_bank_types.value() ) { m_bank_types = DataBankTypes; } else { + auto const& [sequence, source] = m_allenConfig->sequence(); + if ( sequence.empty() ) { + error() << "Failed to obtain sequence" << endmsg; + return StatusCode::FAILURE; + } const auto config_reader = ConfigurationReader( sequence ); m_bank_types = config_reader.configured_bank_types(); } @@ -528,20 +527,6 @@ bool MEPProvider::release_buffers() { if ( bmid != MBM_INV_DESC ) { ::mbm_exclude( bmid ); } m_bmIDs[b] = MBM_INV_DESC; } - - for ( auto& status : m_buffer_status ) { - status.writable = true; - status.work_counter = 0; - status.intervals.clear(); - } - m_buffer_reading = m_buffer_status.begin(); - - for ( size_t i = 0; i < m_slice_free.size(); ++i ) m_slice_free[i] = true; - for ( size_t i = 0; i < m_odins.size(); ++i ) m_odins[i].fill( 0 ); - for ( size_t i = 0; i < m_slice_to_buffer.size(); ++i ) m_slice_to_buffer[i] = {-1, 0, 0}; - - m_transposed.clear(); - return true; } diff --git a/AllenOnline/src/TransposeMEP.cpp b/AllenOnline/src/TransposeMEP.cpp index 744e949ab..cc65b14f9 100644 --- a/AllenOnline/src/TransposeMEP.cpp +++ b/AllenOnline/src/TransposeMEP.cpp @@ -63,18 +63,19 @@ LHCb::ODIN MEP::decode_odin( char const* odin_data, unsigned const offset, unsig } } -bool MEP::find_blocks( EB::MEP const* mep, Blocks& blocks, std::function<void( size_t )> const& bad_mfp ) { +std::optional<size_t> MEP::find_blocks( EB::MEP const* mep, Blocks& blocks, + std::function<void( size_t )> const& bad_mfp ) { // Fill blocks in temporary container - bool found_odin = false; - size_t n_good = 0; + std::optional<size_t> odin_block_index; + size_t n_good = 0; blocks.resize( mep->header.n_MFPs ); for ( size_t i_block = 0; i_block < blocks.size(); ++i_block ) { EB::MFP const* mfp = mep->at( i_block ); if ( mfp->is_header_valid() ) { blocks[n_good] = Block{mfp}; + if ( SourceId_sys( mfp->header.src_id ) == SourceIdSys::SourceIdSys_ODIN ) odin_block_index = n_good; ++n_good; - if ( SourceId_sys( mfp->header.src_id ) == SourceIdSys::SourceIdSys_ODIN ) found_odin = true; } else { bad_mfp( mep->header.src_ids()[i_block] ); } @@ -89,7 +90,7 @@ bool MEP::find_blocks( EB::MEP const* mep, Blocks& blocks, std::function<void( s []( size_t s, const MEP::Block& b ) { return s + b.header->bytes(); } ); assert( total_block_size <= mep->bytes() ); #endif - return found_odin; + return odin_block_index; } void MEP::fragment_offsets( MEP::Blocks const& blocks, MEP::SourceOffsets& offsets ) { diff --git a/AllenOnline/tests/options/check_duplicates.py b/AllenOnline/tests/options/check_duplicates.py index 4ef61695d..5d454b2d4 100644 --- a/AllenOnline/tests/options/check_duplicates.py +++ b/AllenOnline/tests/options/check_duplicates.py @@ -47,11 +47,9 @@ if not UseDD4Hep: app.DDDBtag = "upgrade/dddb-20221004" app.CondDBtag = "upgrade/mu_VP_SciFi_macromicrosurvey_from20220923" else: - app.DDDBtag = "trunk" + app.DDDBtag = "run3/trunk" app.CondDBtag = "master" -# Decode VP, UT, FT and muons - check_seq = GaudiSequencer("CheckODINSeq") unpack_raw = UnpackRawEvent( diff --git a/AllenOnline/tests/qmtest/mep_lumi.qmt b/AllenOnline/tests/qmtest/mep_lumi.qmt index 59f573f9a..7dcb45576 100644 --- a/AllenOnline/tests/qmtest/mep_lumi.qmt +++ b/AllenOnline/tests/qmtest/mep_lumi.qmt @@ -16,7 +16,7 @@ <argument name="args"><set> <text>${BINARYDUMPERSROOT}/options/allen.py</text> <text>--monitoring-filename</text><text>mep_lumi.root</text> - <text>-m</text><text>1000</text> + <text>-m</text><text>300</text> <text>-t</text><text>1</text> <text>--events-per-slice</text><text>1000</text> <text>--tags</text><text>detdesc:upgrade/dddb-20221004,upgrade/mu_VP_SciFi_macromicrosurvey_from20220923|dd4hep:run3/trunk,master</text> @@ -24,6 +24,7 @@ <text>--mep</text><text>mdf:root://eoslhcb.cern.ch///eos/lhcb/wg/rta/samples/data/289232-LHCb-MEP/bu_289232_LHCb_ECEB01_BU_0.mep</text> <text>--output-file</text><text>mep_lumi.mdf</text> <text>--register-monitoring-counters</text><text>0</text> + <text>--real-data</text> </set></argument> <argument name="prerequisites"><set> <tuple><text>generate_passthrough_sequence</text><enumeral>PASS</enumeral></tuple> @@ -33,7 +34,7 @@ </set></argument> <argument name="use_temp_dir"><enumeral>true</enumeral></argument> <argument name="reference"><text>../refs/mep_lumi.ref</text></argument> - <argument name="timeout"><integer>3000</integer></argument> + <argument name="timeout"><integer>100</integer></argument> <argument name="validator"><text> from Allen.qmtest.exclusions import preprocessor diff --git a/MooreScripts/options/HLT1MBMSlim.opts b/MooreScripts/options/HLT1MBMSlim.opts index a1df271c7..7167d60dc 100644 --- a/MooreScripts/options/HLT1MBMSlim.opts +++ b/MooreScripts/options/HLT1MBMSlim.opts @@ -6,7 +6,7 @@ #include "$INFO_OPTIONS" #include "$FARMCONFIGROOT/options/Logging.opts" // The minimum buffer size that will fit a 2022 MEP is 1200000 kB -OnlineEnv.MBM_setup = "-s=1200000 -e=50 -u=50 -b=18 -t=1 -y -i=Events -f -c -s=100000 -e=50 -u=15 -b=12 -t=1 -y -i=Output -f -c"; +OnlineEnv.MBM_setup = "-s=1600000 -e=50 -u=50 -b=18 -t=1 -y -i=Events -f -c -s=100000 -e=50 -u=15 -b=12 -t=1 -y -i=Output -f -c"; // Manager.Setup = {"Dataflow_MBMServer/MEPManager"}; // diff --git a/MooreScripts/python/MooreScripts/testbench/emulator.py b/MooreScripts/python/MooreScripts/testbench/emulator.py index 0bf2f38a9..62e29ee45 100644 --- a/MooreScripts/python/MooreScripts/testbench/emulator.py +++ b/MooreScripts/python/MooreScripts/testbench/emulator.py @@ -450,36 +450,38 @@ async def tasks_measure_throughput(tasks, return await measure_throughput(utgids, max_duration, print_throughput) -async def wait_for_output(utgids): - def diff(start, end): - return (end[1] - start[1]) - +async def hlt1_wait_for_output(tasks, prod_svc, proc_svc): async with AsyncExitStack() as stack: - services = [ - stack.enter_context( - asyncdim.DimService(u + "/Writer/EventsOut", "X")) - for u in utgids - ] - # get the first data point per task - meas = [[(await s.get())[1]] for s in services] - log.debug(str(meas)) - da = [] - while len(da) < 4 or da[-1] > 0 or da[-2] > 0: - diffs = [] - for s, m, utgid in zip(services, meas, utgids): - m.extend(await s.get_all()) - log.debug(str(meas)) - diffs.append(diff(m[-2], m[-1])) - log.info( - f"{utgid}: {diffs[-1]:d} events written ({m[-1][1]} events)" - ) - da.append(sum(diffs)) - + task_services = {} + for t in tasks: + task_services[t.utgid] = [ + stack.enter_context( + asyncdim.DimService(f"{t.utgid}/{s}", "X")) + for s in (prod_svc, proc_svc) + ] -async def tasks_wait_for_output(tasks, type_pattern=r".*Writer.*"): - utgids = [t.utgid for t in tasks] - utgids = [u for u in utgids if re.match(type_pattern, u.split("_")[2])] - return await wait_for_output(utgids) + processed = [] + meas = {u: [[] for s in svcs] for u, svcs in task_services.items()} + while True: + for u, svcs in task_services.items(): + for i, s in enumerate(svcs): + meas[u][i].extend( + x[1] for x in await s.get_all() if x[1] is not None) + + log.debug(str(meas)) + done = [] + diff = 0 + for u, (prod_counts, proc_counts) in meas.items(): + latest = min(len(prod_counts), len(proc_counts)) + if latest >= 2: + done.append((prod_counts[latest - 1] - + proc_counts[latest - 1]) <= 0) + diff += (proc_counts[latest - 1] - proc_counts[latest - 2]) + processed.append(diff) + + if done and all(done) and len(processed) > 2 and processed[ + -1] == 0 and processed[-2] == 0: + break async def tasks_get_counter(tasks, counter_name): diff --git a/MooreScripts/python/MooreScripts/testbench/scenarios/default.py b/MooreScripts/python/MooreScripts/testbench/scenarios/default.py index ca25b8a40..b69240d03 100644 --- a/MooreScripts/python/MooreScripts/testbench/scenarios/default.py +++ b/MooreScripts/python/MooreScripts/testbench/scenarios/default.py @@ -19,7 +19,7 @@ from MooreScripts.testbench.emulator import ( tasks_send_command, tasks_wait_for_exit, tasks_measure_throughput, - tasks_wait_for_output, + hlt1_wait_for_output, tasks_wait_for_value, async_input, ) @@ -105,20 +105,23 @@ async def run(tasks: List[emulator.Task], args, extra_argv): n_events_produced = next( v for ts, v in reversed(await dim_prod_out.get_all()) if v is not None) - if "HLT1" in main_tasks[0].utgid: - # if there is a writer, wait for the output rate to be 0 - if any("Writer" in task.utgid for task in tasks): - await tasks_wait_for_output(tasks) - elif "HLT2" in main_tasks[0].utgid and not args.measure_throughput > 0: - log.info(f"Waiting to process all {n_events_produced} events") - n_events_processed = sum(await tasks_wait_for_value( - main_tasks, - "Events/OUT", - lambda vs: sum(vs) >= n_events_produced, - )) - if n_events_processed > n_events_produced: - log.error(f"Produced {n_events_produced} but processed " + - f"more: {n_events_processed}") + if not args.measure_throughput > 0: + if "HLT1" in main_tasks[0].utgid: + log.info(f"Waiting until all events have been processed") + await hlt1_wait_for_output(main_tasks, "Events/OUT", + "MBMOutput/NProcessed") + elif "HLT2" in main_tasks[0].utgid: + log.info( + f"Waiting until all {n_events_produced} events have been processed" + ) + n_events_processed = sum(await tasks_wait_for_value( + main_tasks, + "Events/OUT", + lambda vs: sum(vs) >= n_events_produced, + )) + if n_events_processed > n_events_produced: + log.error(f"Produced {n_events_produced} but processed " + + f"more: {n_events_processed}") await tasks_send_command([t for t in tasks if t not in prod_tasks], "stop") await tasks_wait_for_status([t for t in tasks if t not in prod_tasks], diff --git a/MooreScripts/scripts/testbench.py b/MooreScripts/scripts/testbench.py index c42e18c5d..85a071b28 100755 --- a/MooreScripts/scripts/testbench.py +++ b/MooreScripts/scripts/testbench.py @@ -129,6 +129,12 @@ parser.add_argument( help= "Enables writing of the encoding keys by setting env WRITE_ENCODING_KEYS=1.", ) +parser.add_argument( + "--tck-from-odin", + action="store_true", + help= + "Enables writing of the encoding keys by setting env WRITE_ENCODING_KEYS=1.", +) args, unknown_argv = parser.parse_known_args() args.data_dir = args.working_dir / args.data_dir @@ -168,6 +174,11 @@ if args.write_encoding_keys: arch, {"WRITE_ENCODING_KEYS": "1"}, ) +if args.tck_from_odin: + arch = architecture.overwrite_dict_value( + arch, + {"IGNORE_ODIN_TCK": "0"}, + ) task_instance_args = architecture.instance_args(arch, replacements) emulator.check_for_orphans([a["args"][0] for a in task_instance_args]) diff --git a/MooreScripts/tests/options/HLT1/Arch.xml b/MooreScripts/tests/options/HLT1/Arch.xml index 68d34b980..2d7c5c6e3 100644 --- a/MooreScripts/tests/options/HLT1/Arch.xml +++ b/MooreScripts/tests/options/HLT1/Arch.xml @@ -42,6 +42,7 @@ <fmcparam name="define" value="WORKING_DIR=${WORKING_DIR}" /> <fmcparam name="define" value="MBM_SETUP_OPTIONS=${MOORESCRIPTSROOT}/tests/options/HLT1/MBM_setup.opts" /> <fmcparam name="define" value="BIND_NUMA=1" /> + <fmcparam name="define" value="IGNORE_ODIN_TCK=1" /> <timeout action="Any" value="20"/> <timeout action="configure" value="70"/> <timeout action="start" value="80"/> diff --git a/MooreScripts/tests/options/HLT1Slim/Arch.xml b/MooreScripts/tests/options/HLT1Slim/Arch.xml index 4df08473b..25830cece 100644 --- a/MooreScripts/tests/options/HLT1Slim/Arch.xml +++ b/MooreScripts/tests/options/HLT1Slim/Arch.xml @@ -37,11 +37,12 @@ <command>${MOORESCRIPTSROOT}/job/runHLT1.sh</command> <argument name="-type" value="${NAME}"/> <argument name="-runinfo" value="${RUNINFO}"/> - <argument name="-numthreads" value="2" /> + <argument name="-numthreads" value="4" /> <fmcparam name="utgid" value="${PARTITION}_${NODE}_${NAME}_${INSTANCE}"/> <fmcparam name="define" value="BINARY_TAG=${BINARY_TAG}" /> <fmcparam name="define" value="WORKING_DIR=${WORKING_DIR}" /> <fmcparam name="define" value="MBM_SETUP_OPTIONS=${MOORESCRIPTSROOT}/tests/options/HLT1Slim/MBM_setup.opts" /> + <fmcparam name="define" value="IGNORE_ODIN_TCK=1" /> <timeout action="Any" value="20"/> <timeout action="configure" value="70"/> <timeout action="start" value="80"/> diff --git a/MooreScripts/tests/options/HLT1Slim/check_run_change.py b/MooreScripts/tests/options/HLT1Slim/check_run_change.py new file mode 100644 index 000000000..60021818c --- /dev/null +++ b/MooreScripts/tests/options/HLT1Slim/check_run_change.py @@ -0,0 +1,140 @@ +############################################################################### +# (c) Copyright 2000-2018 CERN for the benefit of the LHCb Collaboration # +############################################################################### +import os +import sys +import argparse +import glob +from collections import defaultdict +from itertools import chain +from pathlib import Path +from Configurables import LHCbApp +from Configurables import GaudiSequencer +from Configurables import ApplicationMgr +from Configurables import (AuditorSvc, SequencerTimerTool) +from Configurables import IODataManager +from Configurables import createODIN, HltDecReportsDecoder +from Configurables import LHCb__UnpackRawEvent as UnpackRawEvent +from DDDB.CheckDD4Hep import UseDD4Hep +from GaudiConf import IOHelper +from GaudiPython.Bindings import AppMgr, gbl +from PyConf.application import configured_ann_svc +from Allen.tck import property_from_git +from PyConf.filecontent_metadata import metainfo_repos + +parser = argparse.ArgumentParser() +parser.add_argument("tck_repo", nargs=1) +parser.add_argument("hlt1_line", nargs=1) +parser.add_argument("mdfs", nargs='+') +args = parser.parse_args() + +mdfs = args.mdfs +if len(mdfs) == 1: + # Shell expansion of wildcards doesn't work when launching from a + # qmtest, so do the glob here + mdfs = glob.glob(mdfs[0]) + +app = LHCbApp() +app.DataType = "Upgrade" +app.EvtMax = -1 +app.Simulation = not UseDD4Hep + +if not UseDD4Hep: + app.DDDBtag = "upgrade/dddb-20221004" + app.CondDBtag = "upgrade/mu_VP_SciFi_macromicrosurvey_from20220923" +else: + app.DDDBtag = "run3/trunk" + app.CondDBtag = "master" + +check_seq = GaudiSequencer("CheckODINSeq") + +unpack_raw = UnpackRawEvent( + RawEventLocation='DAQ/RawEvent', + RawBankLocations=['DAQ/RawBanks/ODIN', 'DAQ/RawBanks/HltDecReports'], + BankTypes=['ODIN', 'HltDecReports']) + +dec_reports = HltDecReportsDecoder( + RawBanks='DAQ/RawBanks/HltDecReports', + SourceID='Hlt1', + DecoderMapping="TCKANNSvc", + OutputHltDecReportsLocation='Hlt1/DecReports') + +check_seq.Members = [unpack_raw, createODIN(), dec_reports] + +ApplicationMgr().TopAlg = [check_seq] + +IOHelper('MDF').inputFiles(mdfs, clear=True) + +# Use the metainfo repository created by the create_hlt1_tck test +metainfo_repos.global_bind(repos=[('lhcb-metainfo/.git', 'master')]) + +# Some extra stuff for timing table +ApplicationMgr().ExtSvc += ['ToolSvc', 'AuditorSvc'] +ApplicationMgr().ExtSvc += [configured_ann_svc(name='TCKANNSvc')] + +ApplicationMgr().AuditAlgorithms = True +AuditorSvc().Auditors += ['TimingAuditor'] +SequencerTimerTool().OutputLevel = 4 + +# Some extra stuff to save histograms +ApplicationMgr().HistogramPersistency = "NONE" + +# No error messages when reading MDF +IODataManager().DisablePFNWarning = True + +gaudi = AppMgr() +gaudi.initialize() +TES = gaudi.evtSvc() + +events = set() + +decs = {} +tcks = {} + +n_evt = 0 +while app.EvtMax == -1 or n_evt < app.EvtMax: + gaudi.run(1) + if not TES['/Event']: + break + + n_evt += 1 + + odin = TES['DAQ/ODIN'] + run = odin.runNumber() + reps = TES['Hlt1/DecReports'] + tck = reps.configuredTCK() + if run not in tcks: + tcks[run] = tck + elif tcks[run] != tck: + sys.exit(f"Found multiple TCKs per run: 0x{tcks[run]:08x} 0x{tck:08x}") + + if tck not in decs: + decs[tck] = defaultdict(int) + for n in reps.decisionNames(): + decs[tck][str(n)[:-8]] += reps.decReport(n).decision() + +# Get the prescales from the TCK +prescales = {} +for tck in tcks.values(): + lines = property_from_git( + Path(args.tck_repo[0]), f"0x{tck:08x}", f"{args.hlt1_line[0]}", + "pre_scaler") + if not lines: + sys.exit( + f"Failed to find the prescales for {args.hlt1_line[0]} in 0x{tck:08x}" + ) + prescale = float( + next(chain.from_iterable(props.values() for props in lines.values()))) + prescales[tck] = prescale + +tck_without_events, tck_with_events = [ + e[0] for e in sorted(prescales.items(), key=lambda e: e[1]) +] +if decs[tck_with_events][args.hlt1_line[0]] == 0: + sys.exit( + f"ERROR: Line {args.hlt1_line[0]} didn't fire in file with TCK 0x{tck_with_events:08x}" + ) +if decs[tck_without_events][args.hlt1_line[0]] != 0: + sys.exit( + f"ERROR: Change of prescale not working; found decisions of {args.hlt1_line[0]} in file with TCK 0x{tck_without_events:08x}" + ) diff --git a/MooreScripts/tests/options/HLT1Slim/update_tck_for_run_change_test.py b/MooreScripts/tests/options/HLT1Slim/update_tck_for_run_change_test.py new file mode 100644 index 000000000..f500be821 --- /dev/null +++ b/MooreScripts/tests/options/HLT1Slim/update_tck_for_run_change_test.py @@ -0,0 +1,29 @@ +############################################################################### +# (c) Copyright 2024 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +import json +from Allen.tck import sequence_from_git, sequence_to_git + +seq_str, info = sequence_from_git('config.git', '0x10000001') + +# Get the JSON representaion of the sequence +seq = json.loads(seq_str) + +# Update a prescale +seq['Hlt1DiMuonHighMass']['pre_scaler'] = 0. +seq['dec_reporter']['tck'] = 0x10000002 + +try: + sequence_to_git('config.git', seq, info['type'], info['label'], 0x10000002, + info['metadata']['stack']['name'], + {'settings': info['metadata']['settings']}, True) +except RuntimeError as e: + print(e) + sys.exit(1) diff --git a/MooreScripts/tests/qmtest/check_run_change.qmt b/MooreScripts/tests/qmtest/check_run_change.qmt new file mode 100644 index 000000000..05305e520 --- /dev/null +++ b/MooreScripts/tests/qmtest/check_run_change.qmt @@ -0,0 +1,27 @@ +<?xml version="1.0" ?><!DOCTYPE extension PUBLIC '-//QM/2.3/Extension//EN' 'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'> +<!-- + (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration +--> +<!-- +####################################################### +# SUMMARY OF THIS TEST +# ................... +# Author: Roel Aaij +# Purpose: Use ROOT python bindings to obtain the geometry directly +# from the stack and run the Allen event loop +####################################################### +--> +<extension class="GaudiTest.GaudiExeTest" kind="test"> + <argument name="program"><text>python</text></argument> + <argument name="args"><set> + <text>$MOORESCRIPTSROOT/tests/options/HLT1Slim/check_run_change.py</text> + <text>config.git</text> + <text>Hlt1DiMuonHighMass</text> + <text>hlt1runchange/*.mdf</text> + </set></argument> + <argument name="prerequisites"><set> + <tuple><text>hlt1runchange</text><enumeral>PASS</enumeral></tuple> + </set></argument> + <argument name="timeout"><integer>200</integer></argument> + <argument name="use_temp_dir"><enumeral>true</enumeral></argument> +</extension> diff --git a/MooreScripts/tests/qmtest/create_run_change_tck.qmt b/MooreScripts/tests/qmtest/create_run_change_tck.qmt new file mode 100644 index 000000000..d446a5b7c --- /dev/null +++ b/MooreScripts/tests/qmtest/create_run_change_tck.qmt @@ -0,0 +1,24 @@ +<?xml version="1.0" ?><!DOCTYPE extension PUBLIC '-//QM/2.3/Extension//EN' 'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'> +<!-- + (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration +--> +<!-- +####################################################### +# SUMMARY OF THIS TEST +# ................... +# Author: Roel Aaij +# Purpose: Use ROOT python bindings to obtain the geometry directly +# from the stack and run the Allen event loop +####################################################### +--> +<extension class="GaudiTest.GaudiExeTest" kind="test"> + <argument name="program"><text>python</text></argument> + <argument name="args"><set> + <text>$MOORESCRIPTSROOT/tests/options/HLT1Slim/update_tck_for_run_change_test.py</text> + </set></argument> + <argument name="prerequisites"><set> + <tuple><text>create_hlt1_tck</text><enumeral>PASS</enumeral></tuple> + </set></argument> + <argument name="timeout"><integer>100</integer></argument> + <argument name="use_temp_dir"><enumeral>true</enumeral></argument> +</extension> diff --git a/MooreScripts/tests/qmtest/hlt1runchange.qmt b/MooreScripts/tests/qmtest/hlt1runchange.qmt new file mode 100644 index 000000000..4094e5b62 --- /dev/null +++ b/MooreScripts/tests/qmtest/hlt1runchange.qmt @@ -0,0 +1,41 @@ +<?xml version="1.0" ?><!DOCTYPE extension PUBLIC '-//QM/2.3/Extension//EN' 'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'> +<!-- + (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration + + This software is distributed under the terms of the GNU General Public + Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". + + In applying this licence, CERN does not waive the privileges and immunities + granted to it by virtue of its status as an Intergovernmental Organization + or submit itself to any jurisdiction. +--> +<!-- +Run an HLT1 job in the Online testbench +--> +<extension class="GaudiTest.GaudiExeTest" kind="test"> +<argument name="program"><text>$MOORESCRIPTSROOT/scripts/testbench.py</text></argument> +<argument name="args"><set> + <text>$MOORESCRIPTSROOT/tests/options/HLT1Slim/Arch.xml</text> + <text>--working-dir=hlt1runchange</text> + <text>--partition=TESTHLT1RUNCHANGE</text> + <text>--test-file-db-key=2024_mep_292860_run_change_test</text> + <text>--hlt-type=config.git:0x10000001</text> + <text>--tck-from-odin</text> + <text>--measure-throughput=0</text> +</set></argument> +<argument name="prerequisites"><set> + <tuple><text>create_run_change_tck</text><enumeral>PASS</enumeral></tuple> +</set></argument> +<argument name="use_temp_dir"><enumeral>true</enumeral></argument> +<argument name="validator"><text> + +# No validator for now: only check the exit code + +import glob +workdir = self._common_tmpdir +for fn in glob.glob(workdir + "/hlt1runchange/*.*"): + if not fn.endswith(".mdf"): + result[os.path.basename(fn)] = open(fn).read() + +</text></argument> +</extension> -- GitLab From b909e5692df94581a84645fd92f4263052efbf0c Mon Sep 17 00:00:00 2001 From: Roel Aaij <roel.aaij@cern.ch> Date: Fri, 26 Jul 2024 10:21:11 +0200 Subject: [PATCH 05/35] Fix wrong interval size when an interval is smaller than the number of events per slice --- AllenOnline/src/MEPProvider.cpp | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/AllenOnline/src/MEPProvider.cpp b/AllenOnline/src/MEPProvider.cpp index 8253a659b..c6cd7493a 100755 --- a/AllenOnline/src/MEPProvider.cpp +++ b/AllenOnline/src/MEPProvider.cpp @@ -1634,7 +1634,13 @@ void MEPProvider::transpose( int thread_id ) { // Update the interval to "cut off" the part at the end of the // interval with consecutive ODIN errors. If ODINs with invalid // bank types have been detected, cut off an extra event. - auto last_valid = std::find_if( event_mask.rbegin(), event_mask.rend(), []( auto e ) { return e != 0; } ); + + // Note that the event mask size is not guaranteed to be the same + // as the interval size, because intervals may be smaller if the + // number of events per size is not an integer divider of the + // packing factor. Correct for that if necessary. + auto rbegin = event_mask.rbegin() + ( event_mask.size() - ( interval_end - interval_start ) ); + auto last_valid = std::find_if( rbegin, event_mask.rend(), []( auto e ) { return e != 0; } ); size_t n_good = std::distance( event_mask.begin(), last_valid.base() ); if ( n_good > 0 ) n_good -= have_invalid_odin; std::get<1>( interval ) = interval_end = interval_start + n_good; -- GitLab From c37692ff7edd7dd96dae56feb221c313ad130be7 Mon Sep 17 00:00:00 2001 From: RefBot <lhcbsoft@cern.ch> Date: Thu, 1 Aug 2024 13:56:01 +0200 Subject: [PATCH 06/35] Update References for: MooreOnline!454 based on lhcb-2024-patches-mr/1181 [skip ci] --- AllenOnline/tests/refs/mep_lumi.ref | 2 +- AllenOnline/tests/refs/mep_lumi.ref.x86_64_v3-opt | 2 +- AllenOnline/tests/refs/mep_multiple_tae.ref | 3 +-- AllenOnline/tests/refs/mep_no_ut.ref | 2 +- AllenOnline/tests/refs/mep_no_ut.ref.detdesc | 2 +- AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt | 2 +- AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt | 2 +- AllenOnline/tests/refs/mep_real_data.ref | 2 +- AllenOnline/tests/refs/mep_real_data.ref.x86_64_v3-opt | 2 +- AllenOnline/tests/refs/mep_tae.ref | 3 +-- AllenOnline/tests/refs/mep_tae.ref.detdesc | 3 +-- AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-detdesc-opt | 2 +- AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-opt | 2 +- 13 files changed, 13 insertions(+), 16 deletions(-) diff --git a/AllenOnline/tests/refs/mep_lumi.ref b/AllenOnline/tests/refs/mep_lumi.ref index 2448d46e0..555b300cf 100644 --- a/AllenOnline/tests/refs/mep_lumi.ref +++ b/AllenOnline/tests/refs/mep_lumi.ref @@ -29,7 +29,7 @@ Events INFO Number of counters : 3 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "IN" | 30000 | | "MB_IN" | 1186 | - | "OUT" | 30000 | + | "SLICED" | 30000 | HLTControlFlowMgr INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Processed events" | 1 | diff --git a/AllenOnline/tests/refs/mep_lumi.ref.x86_64_v3-opt b/AllenOnline/tests/refs/mep_lumi.ref.x86_64_v3-opt index a218da07b..8b8d56f16 100644 --- a/AllenOnline/tests/refs/mep_lumi.ref.x86_64_v3-opt +++ b/AllenOnline/tests/refs/mep_lumi.ref.x86_64_v3-opt @@ -29,7 +29,7 @@ Events INFO Number of counters : 3 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "IN" | 30000 | | "MB_IN" | 1186 | - | "OUT" | 30000 | + | "SLICED" | 30000 | HLTControlFlowMgr INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Processed events" | 1 | diff --git a/AllenOnline/tests/refs/mep_multiple_tae.ref b/AllenOnline/tests/refs/mep_multiple_tae.ref index e7cd113fd..243b8bb2f 100644 --- a/AllenOnline/tests/refs/mep_multiple_tae.ref +++ b/AllenOnline/tests/refs/mep_multiple_tae.ref @@ -4,7 +4,6 @@ HLTControlFlowMgr INFO Concurrency level information: HLTControlFlowMgr INFO o Number of events slots: 1 HLTControlFlowMgr INFO o TBB thread pool size: 'ThreadPoolSize':1 ApplicationMgr INFO Application Manager Initialized successfully -Setting number of slices to 4 Bursts INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "IN" | 1 | @@ -12,7 +11,7 @@ Events INFO Number of counters : 3 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "IN" | 30000 | | "MB_IN" | 55 | - | "OUT" | 30000 | + | "SLICED" | 30000 | HLTControlFlowMgr INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Processed events" | 1 | diff --git a/AllenOnline/tests/refs/mep_no_ut.ref b/AllenOnline/tests/refs/mep_no_ut.ref index 93087364a..f48fbc14d 100644 --- a/AllenOnline/tests/refs/mep_no_ut.ref +++ b/AllenOnline/tests/refs/mep_no_ut.ref @@ -36,7 +36,7 @@ Events INFO Number of counters : 3 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "IN" | 2000 | | "MB_IN" | 1055 | - | "OUT" | 2000 | + | "SLICED" | 2000 | HLTControlFlowMgr INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Processed events" | 1 | diff --git a/AllenOnline/tests/refs/mep_no_ut.ref.detdesc b/AllenOnline/tests/refs/mep_no_ut.ref.detdesc index 66c63b93d..6cca6fbd5 100644 --- a/AllenOnline/tests/refs/mep_no_ut.ref.detdesc +++ b/AllenOnline/tests/refs/mep_no_ut.ref.detdesc @@ -36,7 +36,7 @@ Events INFO Number of counters : 3 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "IN" | 2000 | | "MB_IN" | 1055 | - | "OUT" | 2000 | + | "SLICED" | 2000 | HLTControlFlowMgr INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Processed events" | 1 | diff --git a/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt b/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt index 57768e6a4..0ed4d67e0 100644 --- a/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt +++ b/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt @@ -36,7 +36,7 @@ Events INFO Number of counters : 3 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "IN" | 2000 | | "MB_IN" | 1055 | - | "OUT" | 2000 | + | "SLICED" | 2000 | HLTControlFlowMgr INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Processed events" | 1 | diff --git a/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt b/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt index b741cacc4..fb5dfe670 100644 --- a/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt +++ b/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt @@ -36,7 +36,7 @@ Events INFO Number of counters : 3 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "IN" | 2000 | | "MB_IN" | 1055 | - | "OUT" | 2000 | + | "SLICED" | 2000 | HLTControlFlowMgr INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Processed events" | 1 | diff --git a/AllenOnline/tests/refs/mep_real_data.ref b/AllenOnline/tests/refs/mep_real_data.ref index d1e94e255..ca070399d 100644 --- a/AllenOnline/tests/refs/mep_real_data.ref +++ b/AllenOnline/tests/refs/mep_real_data.ref @@ -34,7 +34,7 @@ Events INFO Number of counters : 3 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "IN" | 30000 | | "MB_IN" | 818 | - | "OUT" | 30000 | + | "SLICED" | 30000 | HLTControlFlowMgr INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Processed events" | 1 | diff --git a/AllenOnline/tests/refs/mep_real_data.ref.x86_64_v3-opt b/AllenOnline/tests/refs/mep_real_data.ref.x86_64_v3-opt index 3b4c63c32..e8cad02de 100644 --- a/AllenOnline/tests/refs/mep_real_data.ref.x86_64_v3-opt +++ b/AllenOnline/tests/refs/mep_real_data.ref.x86_64_v3-opt @@ -34,7 +34,7 @@ Events INFO Number of counters : 3 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "IN" | 30000 | | "MB_IN" | 818 | - | "OUT" | 30000 | + | "SLICED" | 30000 | HLTControlFlowMgr INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Processed events" | 1 | diff --git a/AllenOnline/tests/refs/mep_tae.ref b/AllenOnline/tests/refs/mep_tae.ref index 1688070cf..d9f355a97 100644 --- a/AllenOnline/tests/refs/mep_tae.ref +++ b/AllenOnline/tests/refs/mep_tae.ref @@ -4,7 +4,6 @@ HLTControlFlowMgr INFO Concurrency level information: HLTControlFlowMgr INFO o Number of events slots: 1 HLTControlFlowMgr INFO o TBB thread pool size: 'ThreadPoolSize':1 ApplicationMgr INFO Application Manager Initialized successfully -Setting number of slices to 4 ApplicationMgr INFO Application Manager Started successfully DeviceFTGeometry INFO Conditions DB is compatible with FT bank version 7 and 8. Starting timer for throughput measurement @@ -31,7 +30,7 @@ Events INFO Number of counters : 3 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "IN" | 60000 | | "MB_IN" | 2108 | - | "OUT" | 60000 | + | "SLICED" | 60000 | HLTControlFlowMgr INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Processed events" | 1 | diff --git a/AllenOnline/tests/refs/mep_tae.ref.detdesc b/AllenOnline/tests/refs/mep_tae.ref.detdesc index 8678fb2bd..eeff080d7 100644 --- a/AllenOnline/tests/refs/mep_tae.ref.detdesc +++ b/AllenOnline/tests/refs/mep_tae.ref.detdesc @@ -4,7 +4,6 @@ HLTControlFlowMgr INFO Concurrency level information: HLTControlFlowMgr INFO o Number of events slots: 1 HLTControlFlowMgr INFO o TBB thread pool size: 'ThreadPoolSize':1 ApplicationMgr INFO Application Manager Initialized successfully -Setting number of slices to 4 ApplicationMgr INFO Application Manager Started successfully DeviceFTGeometry INFO Conditions DB is compatible with FT bank version 7 and 8. Starting timer for throughput measurement @@ -31,7 +30,7 @@ Events INFO Number of counters : 3 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "IN" | 60000 | | "MB_IN" | 2108 | - | "OUT" | 60000 | + | "SLICED" | 60000 | HLTControlFlowMgr INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Processed events" | 1 | diff --git a/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-detdesc-opt b/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-detdesc-opt index 80992781a..7e33215fc 100644 --- a/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-detdesc-opt +++ b/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-detdesc-opt @@ -30,7 +30,7 @@ Events INFO Number of counters : 3 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "IN" | 60000 | | "MB_IN" | 2108 | - | "OUT" | 60000 | + | "SLICED" | 60000 | HLTControlFlowMgr INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Processed events" | 1 | diff --git a/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-opt b/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-opt index ab748c96f..e7b3d4bc8 100644 --- a/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-opt +++ b/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-opt @@ -30,7 +30,7 @@ Events INFO Number of counters : 3 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "IN" | 60000 | | "MB_IN" | 2108 | - | "OUT" | 60000 | + | "SLICED" | 60000 | HLTControlFlowMgr INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Processed events" | 1 | -- GitLab From c6502e7a908aeca8dfe6eb9633fd9e67631fa733 Mon Sep 17 00:00:00 2001 From: Roel Aaij <roel.aaij@cern.ch> Date: Thu, 1 Aug 2024 13:57:04 +0200 Subject: [PATCH 07/35] Change HLT1 Events counters to adhere to convention agreed with Online --- AllenOnline/options/AllenConfig.py | 13 +------------ AllenOnline/src/MBMOutput.cpp | 2 +- AllenOnline/src/MEPProvider.cpp | 2 +- .../python/MooreScripts/testbench/emulator.py | 11 +++++++---- .../MooreScripts/testbench/scenarios/default.py | 7 +++++-- 5 files changed, 15 insertions(+), 20 deletions(-) diff --git a/AllenOnline/options/AllenConfig.py b/AllenOnline/options/AllenConfig.py index 1e997efd8..3030e7d70 100755 --- a/AllenOnline/options/AllenConfig.py +++ b/AllenOnline/options/AllenConfig.py @@ -259,18 +259,7 @@ messageSvc.OutputLevel = 3 # Add the services that will produce the non-event-data monSink = OnlMonitorSink( - CountersToPublish=[("Bursts", ".*"), ("Events", ".*"), ("MBMOutput", ".*"), - ("EventLoop", ".*"), ("gather_selections", - "Hlt1.*Pass"), - ("gather_selections", "Hlt1.*Rate"), - ("velo_consolidate_tracks_ca09ac3f", "n_velo_tracks"), - ("scifi_consolidate_seeds_7b68a248", "n_seed_tracks"), - ("scifi_consolidate_tracks_85f4cc16", "n_long_tracks"), - ("matching_consolidate_tracks_ie8943489", - "n_long_tracks"), - ("filter_clusters_ae8dd84a", "n_calo_clusters"), - ("pv_beamline_cleanup_d0beb1db", "total_pvs")], - HistogramsToPublish=[(".*", ".*")]) + CountersToPublish=[(".*", ".*")], HistogramsToPublish=[(".*", ".*")]) appMgr.ExtSvc = [monSvc, monSink] + appMgr.ExtSvc appMgr.EvtSel = "NONE" diff --git a/AllenOnline/src/MBMOutput.cpp b/AllenOnline/src/MBMOutput.cpp index 661efba2d..7049014a4 100755 --- a/AllenOnline/src/MBMOutput.cpp +++ b/AllenOnline/src/MBMOutput.cpp @@ -161,7 +161,7 @@ StatusCode Allen::MBMOutput::initialize() { } m_burstsOutput = std::make_unique<Gaudi::Accumulators::Counter<>>( burstsSvc, "OUT" ); - m_eventsOutput = std::make_unique<Gaudi::Accumulators::Counter<>>( eventsSvc, "STORED" ); + m_eventsOutput = std::make_unique<Gaudi::Accumulators::Counter<>>( eventsSvc, "OUT" ); m_mbOutput = std::make_unique<Gaudi::Accumulators::Counter<>>( eventsSvc, "MB_OUT" ); m_buffers.reserve( m_nThreads.value() ); diff --git a/AllenOnline/src/MEPProvider.cpp b/AllenOnline/src/MEPProvider.cpp index c6cd7493a..9e2fc96cf 100755 --- a/AllenOnline/src/MEPProvider.cpp +++ b/AllenOnline/src/MEPProvider.cpp @@ -328,7 +328,7 @@ StatusCode MEPProvider::initialize() { m_mepsInput = std::make_unique<Gaudi::Accumulators::Counter<>>( burstsSvc, "IN" ); m_eventsInput = std::make_unique<Gaudi::Accumulators::Counter<>>( eventsSvc, "IN" ); - m_eventsToBatches = std::make_unique<Gaudi::Accumulators::Counter<>>( eventsSvc, "OUT" ); + m_eventsToBatches = std::make_unique<Gaudi::Accumulators::Counter<>>( eventsSvc, "SLICED" ); m_mbInput = std::make_unique<Gaudi::Accumulators::Counter<>>( eventsSvc, "MB_IN" ); #ifdef HAVE_MPI diff --git a/MooreScripts/python/MooreScripts/testbench/emulator.py b/MooreScripts/python/MooreScripts/testbench/emulator.py index 62e29ee45..852738d0c 100644 --- a/MooreScripts/python/MooreScripts/testbench/emulator.py +++ b/MooreScripts/python/MooreScripts/testbench/emulator.py @@ -402,14 +402,15 @@ def check_for_orphans(names): "\n".join(matches)) -async def measure_throughput(utgids, max_duration, print_throughput): +async def measure_throughput(utgids, max_duration, print_throughput, counter): def throughput(start, end): return (end[1] - start[1]) / (end[0] - start[0]).total_seconds() ta = None async with AsyncExitStack() as stack: services = [ - stack.enter_context(asyncdim.DimService(u + "/Events/OUT", "X")) + stack.enter_context( + asyncdim.DimService(f"{u}/Events/{counter}", "X")) for u in utgids ] # get the first data point per task @@ -444,10 +445,12 @@ async def measure_throughput(utgids, max_duration, print_throughput): async def tasks_measure_throughput(tasks, max_duration, type_pattern=r".*(HLT|Mon).*", - print_throughput=True): + print_throughput=True, + counter="OUT"): utgids = [t.utgid for t in tasks] utgids = [u for u in utgids if re.match(type_pattern, u.split("_")[2])] - return await measure_throughput(utgids, max_duration, print_throughput) + return await measure_throughput(utgids, max_duration, print_throughput, + counter) async def hlt1_wait_for_output(tasks, prod_svc, proc_svc): diff --git a/MooreScripts/python/MooreScripts/testbench/scenarios/default.py b/MooreScripts/python/MooreScripts/testbench/scenarios/default.py index b69240d03..e55d9bd48 100644 --- a/MooreScripts/python/MooreScripts/testbench/scenarios/default.py +++ b/MooreScripts/python/MooreScripts/testbench/scenarios/default.py @@ -80,6 +80,8 @@ async def run(tasks: List[emulator.Task], args, extra_argv): await tasks_send_command(tasks, "start") await tasks_wait_for_status(tasks, "RUNNING") + prod_counter = "SLICED" if "HLT1" in main_tasks[0].utgid else "OUT" + if args.measure_throughput > 0: # wait a bit for things to settle and measure throughput # in case we are running the perf profile, we don't want to report the throughput @@ -88,7 +90,8 @@ async def run(tasks: List[emulator.Task], args, extra_argv): tasks, max_duration=args.measure_throughput, print_throughput=not (extra_args.use_perf - or extra_args.use_perf_control)) + or extra_args.use_perf_control), + counter=prod_counter) else: # wait for the reader task to get to a PAUSED state (no more input) await tasks_wait_for_status(prod_tasks, "PAUSED") @@ -108,7 +111,7 @@ async def run(tasks: List[emulator.Task], args, extra_argv): if not args.measure_throughput > 0: if "HLT1" in main_tasks[0].utgid: log.info(f"Waiting until all events have been processed") - await hlt1_wait_for_output(main_tasks, "Events/OUT", + await hlt1_wait_for_output(main_tasks, "Events/" + prod_counter, "MBMOutput/NProcessed") elif "HLT2" in main_tasks[0].utgid: log.info( -- GitLab From cb923129e0b03f37f90f633c45e9bcbc734a7eb4 Mon Sep 17 00:00:00 2001 From: RefBot <lhcbsoft@cern.ch> Date: Fri, 2 Aug 2024 10:43:09 +0200 Subject: [PATCH 08/35] Update References for: Allen!1700 based on lhcb-2024-patches-mr/1184 [skip ci] --- AllenOnline/tests/refs/mep_no_ut.ref | 6 ++++-- AllenOnline/tests/refs/mep_no_ut.ref.detdesc | 8 ++++++-- .../tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt | 8 ++++++-- AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt | 6 ++++-- AllenOnline/tests/refs/mep_tae.ref | 10 +++++----- AllenOnline/tests/refs/mep_tae.ref.detdesc | 10 +++++----- .../tests/refs/mep_tae.ref.x86_64_v3-detdesc-opt | 10 +++++----- AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-opt | 10 +++++----- 8 files changed, 40 insertions(+), 28 deletions(-) diff --git a/AllenOnline/tests/refs/mep_no_ut.ref b/AllenOnline/tests/refs/mep_no_ut.ref index f48fbc14d..ae1092111 100644 --- a/AllenOnline/tests/refs/mep_no_ut.ref +++ b/AllenOnline/tests/refs/mep_no_ut.ref @@ -43,7 +43,7 @@ HLTControlFlowMgr INFO Number of counters : 1 calo_filter_clusters_5af2e675 INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_calo_clusters" | 41231 | -gather_selections INFO Number of counters : 15 +gather_selections INFO Number of counters : 17 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Hlt1DiPhotonHighMassPass" | 4 | | "Hlt1DiPhotonHighMassRate" | 4 | @@ -58,6 +58,8 @@ gather_selections INFO Number of counters : 15 | "Hlt1SMOG2PassThroughLowMult5Rate" | 2 | | "Hlt1TAEPassthroughPass" | 1 | | "Hlt1TAEPassthroughRate" | 1 | + | "Hlt1TwoTrackMVAPass" | 6 | + | "Hlt1TwoTrackMVARate" | 6 | | "Hlt1VeloMicroBiasPass" | 1580 | | "Hlt1VeloMicroBiasRate" | 2 | pv_beamline_cleanup INFO Number of counters : 1 @@ -65,7 +67,7 @@ pv_beamline_cleanup INFO Number of counters : 1 | "n_PVs" | 2000 | 176 | 0.088000 | scifi_consolidate_tracks_forward INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "n_long_tracks_forward" | 1643 | 271 | 0.16494 | + | "n_long_tracks_forward" | 1643 | 1467 | 0.89288 | velo_consolidate_tracks INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_velo_tracks" | 2000 | 177852 | 88.926 | diff --git a/AllenOnline/tests/refs/mep_no_ut.ref.detdesc b/AllenOnline/tests/refs/mep_no_ut.ref.detdesc index 6cca6fbd5..1da7b2263 100644 --- a/AllenOnline/tests/refs/mep_no_ut.ref.detdesc +++ b/AllenOnline/tests/refs/mep_no_ut.ref.detdesc @@ -43,8 +43,10 @@ HLTControlFlowMgr INFO Number of counters : 1 calo_filter_clusters_5af2e675 INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_calo_clusters" | 41231 | -gather_selections INFO Number of counters : 15 +gather_selections INFO Number of counters : 19 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "Hlt1DiMuonDisplacedPass" | 1 | + | "Hlt1DiMuonDisplacedRate" | 1 | | "Hlt1DiPhotonHighMassPass" | 4 | | "Hlt1DiPhotonHighMassRate" | 4 | | "Hlt1ODINCalibPass" | 2 | @@ -58,6 +60,8 @@ gather_selections INFO Number of counters : 15 | "Hlt1SMOG2PassThroughLowMult5Rate" | 3 | | "Hlt1TAEPassthroughPass" | 1 | | "Hlt1TAEPassthroughRate" | 1 | + | "Hlt1TwoTrackMVAPass" | 2 | + | "Hlt1TwoTrackMVARate" | 2 | | "Hlt1VeloMicroBiasPass" | 1581 | | "Hlt1VeloMicroBiasRate" | 2 | pv_beamline_cleanup INFO Number of counters : 1 @@ -65,7 +69,7 @@ pv_beamline_cleanup INFO Number of counters : 1 | "n_PVs" | 2000 | 3764 | 1.8820 | scifi_consolidate_tracks_forward INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "n_long_tracks_forward" | 1643 | 110 | 0.066951 | + | "n_long_tracks_forward" | 1643 | 845 | 0.51430 | velo_consolidate_tracks INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_velo_tracks" | 2000 | 173884 | 86.942 | diff --git a/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt b/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt index 0ed4d67e0..5c76da533 100644 --- a/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt +++ b/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt @@ -43,8 +43,10 @@ HLTControlFlowMgr INFO Number of counters : 1 calo_filter_clusters_5af2e675 INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_calo_clusters" | 41231 | -gather_selections INFO Number of counters : 15 +gather_selections INFO Number of counters : 19 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | + | "Hlt1DiMuonDisplacedPass" | 1 | + | "Hlt1DiMuonDisplacedRate" | 1 | | "Hlt1DiPhotonHighMassPass" | 4 | | "Hlt1DiPhotonHighMassRate" | 4 | | "Hlt1ODINCalibPass" | 2 | @@ -58,6 +60,8 @@ gather_selections INFO Number of counters : 15 | "Hlt1SMOG2PassThroughLowMult5Rate" | 3 | | "Hlt1TAEPassthroughPass" | 1 | | "Hlt1TAEPassthroughRate" | 1 | + | "Hlt1TwoTrackMVAPass" | 3 | + | "Hlt1TwoTrackMVARate" | 3 | | "Hlt1VeloMicroBiasPass" | 1581 | | "Hlt1VeloMicroBiasRate" | 2 | pv_beamline_cleanup INFO Number of counters : 1 @@ -65,7 +69,7 @@ pv_beamline_cleanup INFO Number of counters : 1 | "n_PVs" | 2000 | 3771 | 1.8855 | scifi_consolidate_tracks_forward INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "n_long_tracks_forward" | 1643 | 111 | 0.067559 | + | "n_long_tracks_forward" | 1643 | 845 | 0.51430 | velo_consolidate_tracks INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_velo_tracks" | 2000 | 173884 | 86.942 | diff --git a/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt b/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt index fb5dfe670..5212629cd 100644 --- a/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt +++ b/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt @@ -43,7 +43,7 @@ HLTControlFlowMgr INFO Number of counters : 1 calo_filter_clusters_5af2e675 INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_calo_clusters" | 41231 | -gather_selections INFO Number of counters : 15 +gather_selections INFO Number of counters : 17 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Hlt1DiPhotonHighMassPass" | 4 | | "Hlt1DiPhotonHighMassRate" | 4 | @@ -58,6 +58,8 @@ gather_selections INFO Number of counters : 15 | "Hlt1SMOG2PassThroughLowMult5Rate" | 2 | | "Hlt1TAEPassthroughPass" | 1 | | "Hlt1TAEPassthroughRate" | 1 | + | "Hlt1TwoTrackMVAPass" | 6 | + | "Hlt1TwoTrackMVARate" | 6 | | "Hlt1VeloMicroBiasPass" | 1580 | | "Hlt1VeloMicroBiasRate" | 2 | pv_beamline_cleanup INFO Number of counters : 1 @@ -65,7 +67,7 @@ pv_beamline_cleanup INFO Number of counters : 1 | "n_PVs" | 2000 | 172 | 0.086000 | scifi_consolidate_tracks_forward INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "n_long_tracks_forward" | 1643 | 273 | 0.16616 | + | "n_long_tracks_forward" | 1643 | 1467 | 0.89288 | velo_consolidate_tracks INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_velo_tracks" | 2000 | 177861 | 88.930 | diff --git a/AllenOnline/tests/refs/mep_tae.ref b/AllenOnline/tests/refs/mep_tae.ref index d9f355a97..77a4bffd3 100644 --- a/AllenOnline/tests/refs/mep_tae.ref +++ b/AllenOnline/tests/refs/mep_tae.ref @@ -36,12 +36,12 @@ HLTControlFlowMgr INFO Number of counters : 1 | "Processed events" | 1 | gather_selections INFO Number of counters : 6 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "Hlt1ActivityPassthroughPass" | 6454 | - | "Hlt1ActivityPassthroughRate" | 6454 | + | "Hlt1ActivityPassthroughPass" | 22715 | + | "Hlt1ActivityPassthroughRate" | 22715 | | "Hlt1ODINLumiPass" | 119 | | "Hlt1ODINLumiRate" | 119 | - | "Hlt1TAEPassthroughPass" | 4 | - | "Hlt1TAEPassthroughRate" | 4 | + | "Hlt1TAEPassthroughPass" | 14 | + | "Hlt1TAEPassthroughRate" | 14 | pv_beamline_cleanup INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_PVs" | 60000 | 5810 | 0.096833 | @@ -50,7 +50,7 @@ scifi_calculate_cluster_count_76... INFO Number of counters : 1 | "n_misordered_cluster" | 2 | scifi_consolidate_tracks_forward INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "n_long_tracks_forward" | 60000 | 7571 | 0.12618 | + | "n_long_tracks_forward" | 60000 | 43305 | 0.72175 | velo_consolidate_tracks INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_velo_tracks" | 60000 | 5431268 | 90.521 | diff --git a/AllenOnline/tests/refs/mep_tae.ref.detdesc b/AllenOnline/tests/refs/mep_tae.ref.detdesc index eeff080d7..ad01015c5 100644 --- a/AllenOnline/tests/refs/mep_tae.ref.detdesc +++ b/AllenOnline/tests/refs/mep_tae.ref.detdesc @@ -36,12 +36,12 @@ HLTControlFlowMgr INFO Number of counters : 1 | "Processed events" | 1 | gather_selections INFO Number of counters : 6 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "Hlt1ActivityPassthroughPass" | 2751 | - | "Hlt1ActivityPassthroughRate" | 2751 | + | "Hlt1ActivityPassthroughPass" | 16005 | + | "Hlt1ActivityPassthroughRate" | 16005 | | "Hlt1ODINLumiPass" | 119 | | "Hlt1ODINLumiRate" | 119 | - | "Hlt1TAEPassthroughPass" | 1 | - | "Hlt1TAEPassthroughRate" | 1 | + | "Hlt1TAEPassthroughPass" | 8 | + | "Hlt1TAEPassthroughRate" | 8 | pv_beamline_cleanup INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_PVs" | 60000 | 113607 | 1.8935 | @@ -50,7 +50,7 @@ scifi_calculate_cluster_count_76... INFO Number of counters : 1 | "n_misordered_cluster" | 2 | scifi_consolidate_tracks_forward INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "n_long_tracks_forward" | 60000 | 2979 | 0.049650 | + | "n_long_tracks_forward" | 60000 | 24675 | 0.41125 | velo_consolidate_tracks INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_velo_tracks" | 60000 | 5309446 | 88.491 | diff --git a/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-detdesc-opt b/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-detdesc-opt index 7e33215fc..f80cd4b70 100644 --- a/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-detdesc-opt +++ b/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-detdesc-opt @@ -36,12 +36,12 @@ HLTControlFlowMgr INFO Number of counters : 1 | "Processed events" | 1 | gather_selections INFO Number of counters : 6 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "Hlt1ActivityPassthroughPass" | 2758 | - | "Hlt1ActivityPassthroughRate" | 2758 | + | "Hlt1ActivityPassthroughPass" | 16007 | + | "Hlt1ActivityPassthroughRate" | 16007 | | "Hlt1ODINLumiPass" | 119 | | "Hlt1ODINLumiRate" | 119 | - | "Hlt1TAEPassthroughPass" | 1 | - | "Hlt1TAEPassthroughRate" | 1 | + | "Hlt1TAEPassthroughPass" | 8 | + | "Hlt1TAEPassthroughRate" | 8 | pv_beamline_cleanup INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_PVs" | 60000 | 113588 | 1.8931 | @@ -50,7 +50,7 @@ scifi_calculate_cluster_count_76... INFO Number of counters : 1 | "n_misordered_cluster" | 2 | scifi_consolidate_tracks_forward INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "n_long_tracks_forward" | 60000 | 2984 | 0.049733 | + | "n_long_tracks_forward" | 60000 | 24677 | 0.41128 | velo_consolidate_tracks INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_velo_tracks" | 60000 | 5309443 | 88.491 | diff --git a/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-opt b/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-opt index e7b3d4bc8..45b58b526 100644 --- a/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-opt +++ b/AllenOnline/tests/refs/mep_tae.ref.x86_64_v3-opt @@ -36,12 +36,12 @@ HLTControlFlowMgr INFO Number of counters : 1 | "Processed events" | 1 | gather_selections INFO Number of counters : 6 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "Hlt1ActivityPassthroughPass" | 6456 | - | "Hlt1ActivityPassthroughRate" | 6456 | + | "Hlt1ActivityPassthroughPass" | 22725 | + | "Hlt1ActivityPassthroughRate" | 22725 | | "Hlt1ODINLumiPass" | 119 | | "Hlt1ODINLumiRate" | 119 | - | "Hlt1TAEPassthroughPass" | 4 | - | "Hlt1TAEPassthroughRate" | 4 | + | "Hlt1TAEPassthroughPass" | 14 | + | "Hlt1TAEPassthroughRate" | 14 | pv_beamline_cleanup INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_PVs" | 60000 | 5794 | 0.096567 | @@ -50,7 +50,7 @@ scifi_calculate_cluster_count_76... INFO Number of counters : 1 | "n_misordered_cluster" | 2 | scifi_consolidate_tracks_forward INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | - | "n_long_tracks_forward" | 60000 | 7581 | 0.12635 | + | "n_long_tracks_forward" | 60000 | 43340 | 0.72233 | velo_consolidate_tracks INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "n_velo_tracks" | 60000 | 5431281 | 90.521 | -- GitLab From 4d8ddc2752a352d0e57842871a4a54d4b45cb31c Mon Sep 17 00:00:00 2001 From: Hendrik Jage <hendrik.jage@cern.ch> Date: Fri, 9 Aug 2024 11:01:04 +0200 Subject: [PATCH 09/35] Update scifi options to latest version --- MooreOnlineConf/options/ft.py | 114 ++++++++++++++++++++++++---------- 1 file changed, 81 insertions(+), 33 deletions(-) diff --git a/MooreOnlineConf/options/ft.py b/MooreOnlineConf/options/ft.py index 4e0f4e8a6..dd756ca7d 100644 --- a/MooreOnlineConf/options/ft.py +++ b/MooreOnlineConf/options/ft.py @@ -1,5 +1,5 @@ ############################################################################### -# (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration # +# (c) Copyright 2000-2024 CERN for the benefit of the LHCb Collaboration # # # # This software is distributed under the terms of the GNU General Public # # Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # @@ -7,46 +7,94 @@ # In applying this licence, CERN does not waive the privileges and immunities # # granted to it by virtue of its status as an Intergovernmental Organization # # or submit itself to any jurisdiction. # -############################################################################# +############################################################################### +from PyConf.application import ( + configure_input, + configure, + default_raw_banks, + make_odin, +) +from PyConf.Algorithms import ( + FTRawBankDecoder, + FTLiteClusterMonitor, + FTLiteClusterTAEMonitor, +) +from MooreOnlineConf.utils import ( + common_monitors_node, + passes_rb, + RoutingBit, + decode_tae, + if_then, + run_all, +) +from Moore import options +import os -from PyConf.application import default_raw_event, default_raw_banks, make_odin -from Moore import options, run_reconstruction -from PyConf.Algorithms import FTDigitMonitor, FTNZSRawBankDecoder -from PyConf.Algorithms import FTClusterMonitor, FTRawBankDecoder -from Moore.config import Reconstruction -from RecoConf.standalone import reco_prefilters +try: + import OnlineEnvBase as OnlineEnv + TAE_HALF_WINDOW = OnlineEnv.TAE +except ImportError: + TAE_HALF_WINDOW = 3 -options.dddb_tag = 'upgrade/master' -options.conddb_tag = 'upgrade/master' +partition = os.environ.get("PARTITION", "LHCb") +isLocalFlag = bool(partition == "LHCb") -def ft_mon(): - raw_event = default_raw_event(["FTNZS"]) - raw_banks = default_raw_banks("FTGeneric") +def main(): odin = make_odin() - algs = [] - if options.input_type.lower() == 'online': - from MooreOnlineConf.utils import update_and_reset - algs.append(update_and_reset()) + def make_ft_clusters(name=""): + raw_banks = default_raw_banks("FTCluster") + ZS_decoder = FTRawBankDecoder( + name=f"FTRawBankDecoder{name}", Odin=odin, RawBanks=raw_banks) + return ZS_decoder.OutputLocation + + # the standard monitor + zs_monitor_lumi = FTLiteClusterMonitor( + name="FTLiteClusterMonitorLumi", + allow_duplicate_instances_with_distinct_names=True, + InputODIN=odin, + ClusterLocation=make_ft_clusters()) + + zs_monitor_physics = FTLiteClusterMonitor( + name="FTLiteClusterMonitor", + allow_duplicate_instances_with_distinct_names=True, + InputODIN=odin, + ClusterLocation=make_ft_clusters()) - ZS_decoder = FTRawBankDecoder(RawBanks=raw_banks) - ZS_monitor = FTClusterMonitor( - InputODIN=odin, ClusterLocation=ZS_decoder.OutputLocation) - algs += [ - ZS_decoder, - ZS_monitor, - ] + # the TAE monitor + is_tae, tae_decoding, tae_odins, tae_data = decode_tae( + make_ft_clusters, TAE_HALF_WINDOW) + tae_monitor = FTLiteClusterTAEMonitor( + name="FTLiteClusterTAEMonitor", + ODINVector=list(tae_odins.values()), + InputVector=list(tae_data.values()), + SuperTAEHalfWindow=TAE_HALF_WINDOW) - NZS_decoder = FTNZSRawBankDecoder(RawEventLocations=raw_event) - NZS_monitor = FTDigitMonitor( - InputODIN=odin, DigitLocation=NZS_decoder.OutputLocation) - algs += [ - NZS_decoder, - NZS_monitor, - ] + # assemble the control flow + if isLocalFlag: + top_node = run_all( + "top", + [ + common_monitors_node(), # common monitoring to all tasks + if_then("IfPHYSICS", passes_rb(RoutingBit.PHYSICS), + zs_monitor_physics), + if_then("IfLUMI", passes_rb(RoutingBit.LUMI), zs_monitor_lumi), + if_then("IfTAE", is_tae, + run_all("TAE", [tae_decoding, tae_monitor])), + ]) + else: + top_node = run_all( + "top", + [ + common_monitors_node(), # common monitoring to all tasks + zs_monitor_physics, + if_then("IfTAE", is_tae, + run_all("TAE", [tae_decoding, tae_monitor])), + ]) - return Reconstruction('ft_mon', algs, reco_prefilters(gec=False)) + return top_node -run_reconstruction(options, ft_mon) +configure_input(options) +configure(options, main()) -- GitLab From 5abe363b0f0202155ca5749678b72234b35fa585 Mon Sep 17 00:00:00 2001 From: Rosen Matev <rosen.matev@cern.ch> Date: Wed, 5 Jun 2024 19:10:12 +0200 Subject: [PATCH 10/35] Fix odinmon test and disable some tests on detdesc --- .../MooreScripts/testbench/scenarios/default.py | 12 ++++++------ MooreScripts/scripts/testbench.py | 8 +++++++- MooreScripts/tests/qmtest/hlt2slim.qmt | 1 + MooreScripts/tests/qmtest/odinmon.qmt | 1 + MooreScripts/tests/qmtest/odinmon_offline.qmt | 1 + 5 files changed, 16 insertions(+), 7 deletions(-) diff --git a/MooreScripts/python/MooreScripts/testbench/scenarios/default.py b/MooreScripts/python/MooreScripts/testbench/scenarios/default.py index e55d9bd48..5cbba94f9 100644 --- a/MooreScripts/python/MooreScripts/testbench/scenarios/default.py +++ b/MooreScripts/python/MooreScripts/testbench/scenarios/default.py @@ -102,18 +102,18 @@ async def run(tasks: List[emulator.Task], args, extra_argv): await tasks_send_command(prod_tasks, "stop") await tasks_wait_for_status(prod_tasks, "READY") - # Get last published value - # TODO can we make it such that the number of events put in the buffer keeps being published after stop? - # Markus says MDFReader may need to be Class 1 but are there side effects? - n_events_produced = next( - v for ts, v in reversed(await dim_prod_out.get_all()) if v is not None) - if not args.measure_throughput > 0: if "HLT1" in main_tasks[0].utgid: log.info(f"Waiting until all events have been processed") await hlt1_wait_for_output(main_tasks, "Events/" + prod_counter, "MBMOutput/NProcessed") elif "HLT2" in main_tasks[0].utgid: + # Get last published value + # TODO can we make it such that the number of events put in the buffer keeps being published after stop? + # Markus says MDFReader may need to be Class 1 but are there side effects? + n_events_produced = next( + v for ts, v in reversed(await dim_prod_out.get_all()) + if v is not None) log.info( f"Waiting until all {n_events_produced} events have been processed" ) diff --git a/MooreScripts/scripts/testbench.py b/MooreScripts/scripts/testbench.py index 85a071b28..56ab99204 100755 --- a/MooreScripts/scripts/testbench.py +++ b/MooreScripts/scripts/testbench.py @@ -149,7 +149,13 @@ args.working_dir = args.working_dir.resolve() # Ideally, the nightlies would properly isolate test jobs. # Another approach would be to convince Online to be able to control # where the shared resources go (/tmp/... and /dev/shm/...). -args.partition = args.partition + os.getenv("BUILD_ID", "") +PARTITION_MAX_LEN = 16 +build_id = os.getenv("BUILD_ID", "") +args.partition = args.partition + build_id[-( + PARTITION_MAX_LEN - len(args.partition)):] +if len(args.partition) > PARTITION_MAX_LEN: + raise RuntimeError(f"Partition name is too long (len({args.partition!r})=" + + f"{len(args.partition)} > {PARTITION_MAX_LEN})") emulator.setup_logging( args.working_dir / args.log_file, console_level=args.log_level) diff --git a/MooreScripts/tests/qmtest/hlt2slim.qmt b/MooreScripts/tests/qmtest/hlt2slim.qmt index 8efcab556..429f878d5 100644 --- a/MooreScripts/tests/qmtest/hlt2slim.qmt +++ b/MooreScripts/tests/qmtest/hlt2slim.qmt @@ -22,6 +22,7 @@ Run an HLT2 job in the Online testbench <text>--test-file-db-key=2022_raw_hlt1_253597</text> </set></argument> <argument name="use_temp_dir"><enumeral>true</enumeral></argument> +<argument name="unsupported_platforms"><set><text>detdesc</text></set></argument> <argument name="validator"><text> # No validator for now: only check the exit code diff --git a/MooreScripts/tests/qmtest/odinmon.qmt b/MooreScripts/tests/qmtest/odinmon.qmt index c1d791c45..f3ba51c43 100644 --- a/MooreScripts/tests/qmtest/odinmon.qmt +++ b/MooreScripts/tests/qmtest/odinmon.qmt @@ -21,6 +21,7 @@ Run an ODINMon job in the Online testbench <text>--test-file-db-key=2024_raw_hlt1_288877_tae</text> </set></argument> <argument name="use_temp_dir"><enumeral>true</enumeral></argument> +<argument name="unsupported_platforms"><set><text>detdesc</text></set></argument> <argument name="validator"><text> # No validator for now: only check the exit code diff --git a/MooreScripts/tests/qmtest/odinmon_offline.qmt b/MooreScripts/tests/qmtest/odinmon_offline.qmt index 75221b725..367dde023 100644 --- a/MooreScripts/tests/qmtest/odinmon_offline.qmt +++ b/MooreScripts/tests/qmtest/odinmon_offline.qmt @@ -19,6 +19,7 @@ Run an ODINMon job in an offline way <text>$MOOREONLINECONFROOT/options/odin.py</text> </set></argument> <argument name="use_temp_dir"><enumeral>per-test</enumeral></argument> +<argument name="unsupported_platforms"><set><text>detdesc</text></set></argument> <argument name="validator"><text> # No validator for now: only check the exit code -- GitLab From a477286bebb0bbaaa7865b1564cdd52b8d286b47 Mon Sep 17 00:00:00 2001 From: Rosen Matev <rosen.matev@cern.ch> Date: Tue, 13 Aug 2024 15:40:14 +0200 Subject: [PATCH 11/35] Fix hlt1runchange test --- MooreScripts/tests/qmtest/create_hlt1_tck.qmt | 3 +++ .../tests/qmtest/create_hlt1_tck_prepare.qmt | 20 +++++++++++++++++++ MooreScripts/tests/qmtest/hlt1runchange.qmt | 2 +- 3 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 MooreScripts/tests/qmtest/create_hlt1_tck_prepare.qmt diff --git a/MooreScripts/tests/qmtest/create_hlt1_tck.qmt b/MooreScripts/tests/qmtest/create_hlt1_tck.qmt index 33a1db543..6275c80c1 100644 --- a/MooreScripts/tests/qmtest/create_hlt1_tck.qmt +++ b/MooreScripts/tests/qmtest/create_hlt1_tck.qmt @@ -12,6 +12,9 @@ ####################################################### --> <extension class="GaudiTest.GaudiExeTest" kind="test"> + <argument name="prerequisites"><set> + <tuple><text>create_hlt1_tck_prepare</text><enumeral>PASS</enumeral></tuple> + </set></argument> <argument name="program"><text>python</text></argument> <argument name="args"><set> <text>$ALLENROOT/scripts/create_hlt1_tck.py</text> diff --git a/MooreScripts/tests/qmtest/create_hlt1_tck_prepare.qmt b/MooreScripts/tests/qmtest/create_hlt1_tck_prepare.qmt new file mode 100644 index 000000000..daaf6dc16 --- /dev/null +++ b/MooreScripts/tests/qmtest/create_hlt1_tck_prepare.qmt @@ -0,0 +1,20 @@ +<?xml version="1.0" ?><!DOCTYPE extension PUBLIC '-//QM/2.3/Extension//EN' 'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'> +<!-- + (c) Copyright 2024 CERN for the benefit of the LHCb Collaboration + + This software is distributed under the terms of the GNU General Public + Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". + + In applying this licence, CERN does not waive the privileges and immunities + granted to it by virtue of its status as an Intergovernmental Organization + or submit itself to any jurisdiction. +--> +<extension class="GaudiTest.GaudiExeTest" kind="test"> + <argument name="program"><text>rm</text></argument> + <argument name="args"><set> + <text>-rf</text> + <text>config.git</text> + </set></argument> + <argument name="timeout"><integer>100</integer></argument> + <argument name="use_temp_dir"><enumeral>true</enumeral></argument> +</extension> diff --git a/MooreScripts/tests/qmtest/hlt1runchange.qmt b/MooreScripts/tests/qmtest/hlt1runchange.qmt index 4094e5b62..0db592423 100644 --- a/MooreScripts/tests/qmtest/hlt1runchange.qmt +++ b/MooreScripts/tests/qmtest/hlt1runchange.qmt @@ -17,7 +17,7 @@ Run an HLT1 job in the Online testbench <argument name="args"><set> <text>$MOORESCRIPTSROOT/tests/options/HLT1Slim/Arch.xml</text> <text>--working-dir=hlt1runchange</text> - <text>--partition=TESTHLT1RUNCHANGE</text> + <text>--partition=TESTHLT1RC</text> <text>--test-file-db-key=2024_mep_292860_run_change_test</text> <text>--hlt-type=config.git:0x10000001</text> <text>--tck-from-odin</text> -- GitLab From 4b8ebd44c2ba19d1aaef09565157f0a16ba2a020 Mon Sep 17 00:00:00 2001 From: Rosen Matev <rosen.matev@cern.ch> Date: Tue, 13 Aug 2024 15:50:08 +0200 Subject: [PATCH 12/35] Remove environment variable from ref path --- AllenOnline/tests/qmtest/test_lumi.qmt | 2 +- AllenOnline/tests/qmtest/test_mep_banks.qmt | 2 +- AllenOnline/tests/qmtest/test_mep_banks_transpose.qmt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/AllenOnline/tests/qmtest/test_lumi.qmt b/AllenOnline/tests/qmtest/test_lumi.qmt index 5c508b7dd..5595597f8 100644 --- a/AllenOnline/tests/qmtest/test_lumi.qmt +++ b/AllenOnline/tests/qmtest/test_lumi.qmt @@ -23,7 +23,7 @@ Run lumi decoding and encoding on Allen MDF output created from MEP input <argument name="unsupported_platforms"><set> <text>detdesc</text> </set></argument> - <argument name="reference"><text>${ALLENONLINEROOT}/tests/refs/test_lumi.ref</text></argument> + <argument name="reference"><text>../refs/test_lumi.ref</text></argument> <argument name="use_temp_dir"><enumeral>true</enumeral></argument> <argument name="validator"><text> diff --git a/AllenOnline/tests/qmtest/test_mep_banks.qmt b/AllenOnline/tests/qmtest/test_mep_banks.qmt index 46061228c..fa7b9e116 100644 --- a/AllenOnline/tests/qmtest/test_mep_banks.qmt +++ b/AllenOnline/tests/qmtest/test_mep_banks.qmt @@ -24,7 +24,7 @@ <argument name="prerequisites"><set> <tuple><text>mep_passthrough_no_ut</text><enumeral>PASS</enumeral></tuple> </set></argument> - <argument name="reference"><text>${ALLENONLINEROOT}/tests/refs/test_mep_banks.ref</text></argument> + <argument name="reference"><text>../refs/test_mep_banks.ref</text></argument> <argument name="timeout"><integer>600</integer></argument> <argument name="validator"><text> diff --git a/AllenOnline/tests/qmtest/test_mep_banks_transpose.qmt b/AllenOnline/tests/qmtest/test_mep_banks_transpose.qmt index 703501e72..06badf12d 100644 --- a/AllenOnline/tests/qmtest/test_mep_banks_transpose.qmt +++ b/AllenOnline/tests/qmtest/test_mep_banks_transpose.qmt @@ -25,7 +25,7 @@ <argument name="prerequisites"><set> <tuple><text>mep_passthrough_no_ut</text><enumeral>PASS</enumeral></tuple> </set></argument> - <argument name="reference"><text>${ALLENONLINEROOT}/tests/refs/test_mep_banks_transpose-SYMLINK-DO_NOT_UPDATE_WITH_NEW.ref</text></argument> + <argument name="reference"><text>../refs/test_mep_banks_transpose-SYMLINK-DO_NOT_UPDATE_WITH_NEW.ref</text></argument> <argument name="timeout"><integer>600</integer></argument> <argument name="validator"><text> -- GitLab From 3019f5de02c7c6f93fb49189e97d2479b46a8621 Mon Sep 17 00:00:00 2001 From: Rosen Matev <rosen.matev@cern.ch> Date: Tue, 13 Aug 2024 23:57:42 +0200 Subject: [PATCH 13/35] Disable run change test on detdesc --- MooreScripts/tests/qmtest/check_run_change.qmt | 3 +++ MooreScripts/tests/qmtest/hlt1runchange.qmt | 3 +++ 2 files changed, 6 insertions(+) diff --git a/MooreScripts/tests/qmtest/check_run_change.qmt b/MooreScripts/tests/qmtest/check_run_change.qmt index 05305e520..64e27a680 100644 --- a/MooreScripts/tests/qmtest/check_run_change.qmt +++ b/MooreScripts/tests/qmtest/check_run_change.qmt @@ -24,4 +24,7 @@ </set></argument> <argument name="timeout"><integer>200</integer></argument> <argument name="use_temp_dir"><enumeral>true</enumeral></argument> + <argument name="unsupported_platforms"><set> + <text>detdesc</text> + </set></argument> </extension> diff --git a/MooreScripts/tests/qmtest/hlt1runchange.qmt b/MooreScripts/tests/qmtest/hlt1runchange.qmt index 0db592423..c1e1cb5c3 100644 --- a/MooreScripts/tests/qmtest/hlt1runchange.qmt +++ b/MooreScripts/tests/qmtest/hlt1runchange.qmt @@ -38,4 +38,7 @@ for fn in glob.glob(workdir + "/hlt1runchange/*.*"): result[os.path.basename(fn)] = open(fn).read() </text></argument> +<argument name="unsupported_platforms"><set> + <text>detdesc</text> +</set></argument> </extension> -- GitLab From 8bc87e47a3e70633ca41b590601fe534c50b9d48 Mon Sep 17 00:00:00 2001 From: Roel Aaij <raaij@nikhef.nl> Date: Thu, 15 Aug 2024 10:51:36 +0200 Subject: [PATCH 14/35] [AllenConfig] Update configuration to deal with 3 GPUs per server --- AllenOnline/options/AllenConfig.py | 29 +++++++++++++++++++++-------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/AllenOnline/options/AllenConfig.py b/AllenOnline/options/AllenConfig.py index 3030e7d70..752e40e45 100755 --- a/AllenOnline/options/AllenConfig.py +++ b/AllenOnline/options/AllenConfig.py @@ -10,6 +10,7 @@ # or submit itself to any jurisdiction. # ############################################################################### import os +import sys import socket from itertools import chain from pathlib import Path @@ -211,21 +212,33 @@ if run_online: # which is set by the SMI controller to the total number of Allen instances in the architecture, # *minus one*. n_instances = int(os.getenv("NBOFSLAVES")) + 1 + # The UTGID is set to {PARTITION}_{HOSTNAME}_Allen_{INSTANCE}, where {INSTANCE} is the instance + # number, starting from zero. + instance = int(os.getenv("UTGID").split("_")[3]) + + # Unsupported configuration + if n_instances > 3: + print( + f"ERROR: Unsupported HLT1 DAQ configuration with {n_instances} instances" + ) + sys.exit(1) + # The MBM options have a variable named Allen_Input{N} for each input buffer. input_buffers = [ getattr(mbm_setup, a) for a in sorted(dir(mbm_setup)) if a.startswith('Allen_Input') ] - # Only one Allen instance --> connect it to all BU buffers - # On sodin01 there is always just one BU (and one Allen) which only writes to Events_0, - # so make sure Allen does not subscribe to Events_1 and does not hang waiting for events - if n_instances == 1 and socket.gethostname() != 'sodin01': + # Special case for sodin01, where there is a single BU and all Allen instances must read only + # from that BU's output buffer + if socket.gethostname() == 'sodin01': + mep_provider.Connections = input_buffers[0] + # Two cases to connect to all BU output buffers: + # - There is only one Allen instance + # - This is the third Allen instance + elif (n_instances == 1 or instance == 2): mep_provider.Connections = input_buffers - # Multiple Allen instances --> connect this instance to only one BU buffer + # There are multiple Allen instances and this the first or second instance else: - # The UTGID is set to {PARTITION}_{HOSTNAME}_Allen_{INSTANCE}, where {INSTANCE} is the - # instance number, starting from zero. - instance = int(os.getenv("UTGID").split("_")[3]) mep_provider.Connections = [input_buffers[instance]] mep_provider.Requests = [ 'EvType=1;TriggerMask=0xFFFFFFFF,0xFFFFFFFF,0xFFFFFFFF,0xFFFFFFFF;VetoMask=0,0,0,0;MaskType=ANY;UserType=ONE;Frequency=PERC;Perc=100.0' -- GitLab From 12f3f2fb0ac433c85f87a0fe48a1f95dc41c90d9 Mon Sep 17 00:00:00 2001 From: Roel Aaij <raaij@nikhef.nl> Date: Thu, 15 Aug 2024 10:52:53 +0200 Subject: [PATCH 15/35] [testbench] Update the testbench to allow multiple instances of the main task to be specified as a command-line argument. --- MooreScripts/job/runHLT1.sh | 5 +++- MooreScripts/job/setupTask.sh | 7 ++++- MooreScripts/options/HLT1MEPProd.opts | 4 +-- .../MooreScripts/testbench/architecture.py | 29 ++++++++++++++----- .../testbench/scenarios/default.py | 11 +++---- MooreScripts/scripts/runDFTask.sh | 10 +++++++ MooreScripts/scripts/testbench.py | 9 +++++- 7 files changed, 58 insertions(+), 17 deletions(-) diff --git a/MooreScripts/job/runHLT1.sh b/MooreScripts/job/runHLT1.sh index eb854338c..e00ac44e2 100755 --- a/MooreScripts/job/runHLT1.sh +++ b/MooreScripts/job/runHLT1.sh @@ -15,7 +15,10 @@ source "$DIR/setupTask.sh" setup_options_path -MBM_SETUP_OPTIONS=${MOORESCRIPTSROOT}/tests/options/HLT1/MBM_setup.opts +unset CUDA_VISIBLE_DEVICES +if test -n "${BIND_NUMA-}"; then + export BIND_NUMA_GPU=1 +fi application=AllenApplication exec_gaudirun ${ALLENONLINEROOT}/options/AllenConfig.py diff --git a/MooreScripts/job/setupTask.sh b/MooreScripts/job/setupTask.sh index 8a70b2598..0f22f2b73 100755 --- a/MooreScripts/job/setupTask.sh +++ b/MooreScripts/job/setupTask.sh @@ -153,7 +153,12 @@ exec_gaudirun() { fi dump_environment cmd=() - if test -n "${BIND_NUMA-}"; then + if test -n "${BIND_NUMA_GPU-}"; then + gpu_num=$(echo $UTGID | grep -oP '[0-9]+$' ) + numa_domain=$(nvidia-smi topo -i ${gpu_num} -C | grep -oP "[0-9]+$") + cmd+=(numactl -N $numa_domain -m $numa_domain) + export CUDA_VISIBLE_DEVICES=${gpu_num} + elif test -n "${BIND_NUMA-}"; then numa_domains_num=$(lscpu -p=NODE | grep -oP '^[0-9]+$' | sort | uniq | wc -l) numa_domain=$(( $(echo $UTGID | grep -oP '[0-9]+$') % $numa_domains_num )) cmd+=(numactl -N $numa_domain -m $numa_domain) diff --git a/MooreScripts/options/HLT1MEPProd.opts b/MooreScripts/options/HLT1MEPProd.opts index cd08db012..961b42c8c 100644 --- a/MooreScripts/options/HLT1MEPProd.opts +++ b/MooreScripts/options/HLT1MEPProd.opts @@ -14,7 +14,7 @@ Manager.Runable = "Wrap"; Wrap.Callable = "Reader"; Task.HavePause = true; -Reader.Buffer = "Events"; +Reader.Buffer = "$EVENTS_INSTANCE_BUFFER"; Reader.BrokenHosts = ""; Reader.Directories = @OnlineEnv.Reader_Directories; Reader.FilePrefix = @OnlineEnv.Reader_FilePrefix; @@ -34,4 +34,4 @@ Reader.ReuseFile = @OnlineEnv.Reader_Preload; MEPManager.PartitionBuffers = true; MEPManager.PartitionName = @OnlineEnv.PartitionName; MEPManager.PartitionID = @OnlineEnv.PartitionID; -MEPManager.Buffers = {"Events"}; +MEPManager.Buffers = {"$EVENTS_INSTANCE_BUFFER"}; diff --git a/MooreScripts/python/MooreScripts/testbench/architecture.py b/MooreScripts/python/MooreScripts/testbench/architecture.py index adde57d0e..3952e6404 100644 --- a/MooreScripts/python/MooreScripts/testbench/architecture.py +++ b/MooreScripts/python/MooreScripts/testbench/architecture.py @@ -9,6 +9,7 @@ # or submit itself to any jurisdiction. # ############################################################################### import os +import re import platform import xml.etree.ElementTree as ET from string import Template @@ -39,14 +40,28 @@ def _rinterp(obj, mapping): return obj -def parse_task_tree(task): +def parse_task_tree(task, instances): """Parse task tree into name, n_instances, subprocess.Popen arguments.""" name = task.attrib["name"] - try: - n_instances = int(task.attrib.get("instances", 1)) - except ValueError: # instances = "NUMBER_OF_INSTANCES" - n_instances = 1 + + instance_match = re.match(R"(\d+|(NUMBER_OF_INSTANCES)(?::(\d+))?)", + task.attrib.get("instances", "1")) + if instance_match.group(2) is None: + # Default number of instances or specified number + n_instances = int(instance_match.group(1)) + elif instance_match.groups()[1:] == ('NUMBER_OF_INSTANCES', None): + # NUMBER_OF_INSTANCES + n_instances = 1 if instances is None else instances + elif instances is not None and instances < int(instance_match.group(3)): + # NUMBER_OF_INSTANCES:N with instances != None + raise ValueError( + f"This architecture requires at least {int(instance_match.groups(3))} instances to run" + ) + else: + # NUMBER_OF_INSTANCES:N + n_instances = int( + instance_match.group(3)) if instances is None else instances params = {"args": [], "env": {}, "cwd": "/"} # The controller passes populates `-instances`` as the instances argument @@ -73,11 +88,11 @@ def parse_task_tree(task): return name, n_instances, params -def read_xml(path): +def read_xml(path, main_instances): """"Parse architecture file into a list of task specs.""" tree = ET.parse(path) tasks_inventory = tree.getroot() - return [parse_task_tree(task) for task in tasks_inventory] + return [parse_task_tree(task, main_instances) for task in tasks_inventory] def instance_args(tasks, replacements): diff --git a/MooreScripts/python/MooreScripts/testbench/scenarios/default.py b/MooreScripts/python/MooreScripts/testbench/scenarios/default.py index 5cbba94f9..626c34a78 100644 --- a/MooreScripts/python/MooreScripts/testbench/scenarios/default.py +++ b/MooreScripts/python/MooreScripts/testbench/scenarios/default.py @@ -49,9 +49,10 @@ async def run(tasks: List[emulator.Task], args, extra_argv): t for t in tasks if re.match(r".*(HLT|Mon).*", t.utgid.split("_")[2]) ] - if len(prod_tasks) != 1: - raise ValueError("There must be exactly one *Prod task") - prod_task = prod_tasks[0] + if args.measure_throughput == 0: + if len(prod_tasks) != 1: + raise ValueError("There must be exactly one *Prod task") + prod_task = prod_tasks[0] if extra_args.use_perf or extra_args.use_perf_control: for t in main_tasks: @@ -95,8 +96,8 @@ async def run(tasks: List[emulator.Task], args, extra_argv): else: # wait for the reader task to get to a PAUSED state (no more input) await tasks_wait_for_status(prod_tasks, "PAUSED") - - dim_prod_out = asyncdim.DimService(prod_task.utgid + "/Events/OUT", "X") + dim_prod_out = asyncdim.DimService(prod_task.utgid + "/Events/OUT", + "X") # stop producing new data await tasks_send_command(prod_tasks, "stop") diff --git a/MooreScripts/scripts/runDFTask.sh b/MooreScripts/scripts/runDFTask.sh index 4bd4ca0c7..a9711b1ad 100755 --- a/MooreScripts/scripts/runDFTask.sh +++ b/MooreScripts/scripts/runDFTask.sh @@ -28,6 +28,16 @@ setup_options_path cd_working_dir dump_environment + +# For HLT1 tests there is a separate Events buffer per NUMA +# domain. Setting this here allows a single .opts file to be used for both +if [[ "$NBOFSLAVES" == "0" ]]; then + export EVENTS_INSTANCE_BUFFER="Events" +else + instance=$(echo $UTGID | grep -oP '[0-9]+$' ) + export EVENTS_INSTANCE_BUFFER="Events_${instance}" +fi + if test -f "${OPTIONS}" -a -n "`echo ${OPTIONS} | grep .opts`"; then exec -a ${UTGID} genRunner.exe libDataflow.so dataflow_run_task \ -msg=Dataflow_OutputLogger -mon=Dataflow_DIMMonitoring -class=${CLASS} \ diff --git a/MooreScripts/scripts/testbench.py b/MooreScripts/scripts/testbench.py index 56ab99204..dceab88d9 100755 --- a/MooreScripts/scripts/testbench.py +++ b/MooreScripts/scripts/testbench.py @@ -135,6 +135,13 @@ parser.add_argument( help= "Enables writing of the encoding keys by setting env WRITE_ENCODING_KEYS=1.", ) +parser.add_argument( + "-n", + "--instances", + type=int, + default=None, + help="Number of instances of the main task to run", +) args, unknown_argv = parser.parse_known_args() args.data_dir = args.working_dir / args.data_dir @@ -174,7 +181,7 @@ replacements = { "WORKING_DIR": args.working_dir.resolve(), } -arch = architecture.read_xml(args.architecture) +arch = architecture.read_xml(args.architecture, args.instances) if args.write_encoding_keys: arch = architecture.overwrite_dict_value( arch, -- GitLab From bfe0b9dbb88739c73ee923283d00fbcdc253ef9e Mon Sep 17 00:00:00 2001 From: Roel Aaij <raaij@nikhef.nl> Date: Thu, 15 Aug 2024 11:57:55 +0200 Subject: [PATCH 16/35] [testbench] Allow NUMA bindings of other tasks --- MooreScripts/scripts/runDFTask.sh | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/MooreScripts/scripts/runDFTask.sh b/MooreScripts/scripts/runDFTask.sh index a9711b1ad..e75bb68ff 100755 --- a/MooreScripts/scripts/runDFTask.sh +++ b/MooreScripts/scripts/runDFTask.sh @@ -38,10 +38,23 @@ else export EVENTS_INSTANCE_BUFFER="Events_${instance}" fi +cmd=() +if test -n "${BIND_NUMA-}"; then + numa_domains_num=$(lscpu -p=NODE | grep -oP '^[0-9]+$' | sort | uniq | wc -l) + numa_domain=$(( $(echo $UTGID | grep -oP '[0-9]+$') % $numa_domains_num )) + cmd+=(numactl -N $numa_domain -m $numa_domain) +fi +cmd+=( + setarch x86_64 --addr-no-randomize bash -c 'exec -a "$0" "$@"' + ${UTGID} + genRunner.exe libDataflow.so dataflow_run_task + -msg=Dataflow_OutputLogger -mon=Dataflow_DIMMonitoring -class=${CLASS} + -opts=${OPTIONS} +) + if test -f "${OPTIONS}" -a -n "`echo ${OPTIONS} | grep .opts`"; then - exec -a ${UTGID} genRunner.exe libDataflow.so dataflow_run_task \ - -msg=Dataflow_OutputLogger -mon=Dataflow_DIMMonitoring -class=${CLASS} \ - -opts=${OPTIONS} + echo "${cmd[@]}" + exec "${cmd[@]}" else echo "'${OPTIONS}' does not exist does not end with .opts" exit 123 -- GitLab From ad973219b40021c7a1840d1d3545c63c181e4bcc Mon Sep 17 00:00:00 2001 From: RefBot <lhcbsoft@cern.ch> Date: Thu, 15 Aug 2024 14:22:04 +0200 Subject: [PATCH 17/35] Update References for: Allen!1669 based on lhcb-2024-patches-mr/1248 [skip ci] --- AllenOnline/tests/refs/mep_no_ut.ref | 4 ++-- AllenOnline/tests/refs/mep_no_ut.ref.detdesc | 4 ++-- AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt | 4 ++-- AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt | 4 ++-- AllenOnline/tests/refs/mep_real_data.ref | 4 ++-- AllenOnline/tests/refs/mep_real_data.ref.x86_64_v3-opt | 4 ++-- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/AllenOnline/tests/refs/mep_no_ut.ref b/AllenOnline/tests/refs/mep_no_ut.ref index ae1092111..a07eccacb 100644 --- a/AllenOnline/tests/refs/mep_no_ut.ref +++ b/AllenOnline/tests/refs/mep_no_ut.ref @@ -4,12 +4,12 @@ HLTControlFlowMgr INFO Concurrency level information: HLTControlFlowMgr INFO o Number of events slots: 1 HLTControlFlowMgr INFO o TBB thread pool size: 'ThreadPoolSize':1 ApplicationMgr INFO Application Manager Initialized successfully + rate_validator global_decision - host_routingbits_writer make_selected_object_lists make_subbanks make_selreps - rate_validator + host_routingbits_writer ApplicationMgr INFO Application Manager Started successfully DeviceFTGeometry INFO Conditions DB is compatible with FT bank version 7 and 8. Starting timer for throughput measurement diff --git a/AllenOnline/tests/refs/mep_no_ut.ref.detdesc b/AllenOnline/tests/refs/mep_no_ut.ref.detdesc index 1da7b2263..c95a06680 100644 --- a/AllenOnline/tests/refs/mep_no_ut.ref.detdesc +++ b/AllenOnline/tests/refs/mep_no_ut.ref.detdesc @@ -4,12 +4,12 @@ HLTControlFlowMgr INFO Concurrency level information: HLTControlFlowMgr INFO o Number of events slots: 1 HLTControlFlowMgr INFO o TBB thread pool size: 'ThreadPoolSize':1 ApplicationMgr INFO Application Manager Initialized successfully + rate_validator global_decision - host_routingbits_writer make_selected_object_lists make_subbanks make_selreps - rate_validator + host_routingbits_writer ApplicationMgr INFO Application Manager Started successfully DeviceFTGeometry INFO Conditions DB is compatible with FT bank version 7 and 8. Starting timer for throughput measurement diff --git a/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt b/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt index 5c76da533..a2b55b5d0 100644 --- a/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt +++ b/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-detdesc-opt @@ -4,12 +4,12 @@ HLTControlFlowMgr INFO Concurrency level information: HLTControlFlowMgr INFO o Number of events slots: 1 HLTControlFlowMgr INFO o TBB thread pool size: 'ThreadPoolSize':1 ApplicationMgr INFO Application Manager Initialized successfully + rate_validator global_decision - host_routingbits_writer make_selected_object_lists make_subbanks make_selreps - rate_validator + host_routingbits_writer ApplicationMgr INFO Application Manager Started successfully DeviceFTGeometry INFO Conditions DB is compatible with FT bank version 7 and 8. Starting timer for throughput measurement diff --git a/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt b/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt index 5212629cd..5804ea567 100644 --- a/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt +++ b/AllenOnline/tests/refs/mep_no_ut.ref.x86_64_v3-opt @@ -4,12 +4,12 @@ HLTControlFlowMgr INFO Concurrency level information: HLTControlFlowMgr INFO o Number of events slots: 1 HLTControlFlowMgr INFO o TBB thread pool size: 'ThreadPoolSize':1 ApplicationMgr INFO Application Manager Initialized successfully + rate_validator global_decision - host_routingbits_writer make_selected_object_lists make_subbanks make_selreps - rate_validator + host_routingbits_writer ApplicationMgr INFO Application Manager Started successfully DeviceFTGeometry INFO Conditions DB is compatible with FT bank version 7 and 8. Starting timer for throughput measurement diff --git a/AllenOnline/tests/refs/mep_real_data.ref b/AllenOnline/tests/refs/mep_real_data.ref index ca070399d..665c9fe63 100644 --- a/AllenOnline/tests/refs/mep_real_data.ref +++ b/AllenOnline/tests/refs/mep_real_data.ref @@ -4,12 +4,12 @@ HLTControlFlowMgr INFO Concurrency level information: HLTControlFlowMgr INFO o Number of events slots: 1 HLTControlFlowMgr INFO o TBB thread pool size: 'ThreadPoolSize':1 ApplicationMgr INFO Application Manager Initialized successfully + rate_validator global_decision - host_routingbits_writer make_selected_object_lists make_subbanks make_selreps - rate_validator + host_routingbits_writer ApplicationMgr INFO Application Manager Started successfully DeviceFTGeometry INFO Conditions DB is compatible with FT bank version 7 and 8. Starting timer for throughput measurement diff --git a/AllenOnline/tests/refs/mep_real_data.ref.x86_64_v3-opt b/AllenOnline/tests/refs/mep_real_data.ref.x86_64_v3-opt index e8cad02de..5fd56e0bb 100644 --- a/AllenOnline/tests/refs/mep_real_data.ref.x86_64_v3-opt +++ b/AllenOnline/tests/refs/mep_real_data.ref.x86_64_v3-opt @@ -4,12 +4,12 @@ HLTControlFlowMgr INFO Concurrency level information: HLTControlFlowMgr INFO o Number of events slots: 1 HLTControlFlowMgr INFO o TBB thread pool size: 'ThreadPoolSize':1 ApplicationMgr INFO Application Manager Initialized successfully + rate_validator global_decision - host_routingbits_writer make_selected_object_lists make_subbanks make_selreps - rate_validator + host_routingbits_writer ApplicationMgr INFO Application Manager Started successfully DeviceFTGeometry INFO Conditions DB is compatible with FT bank version 7 and 8. Starting timer for throughput measurement -- GitLab From 888e66c15d7aec7a819a3a130bd270f98822c7f3 Mon Sep 17 00:00:00 2001 From: Christina Agapopoulou <christina.agapopoulou@cern.ch> Date: Tue, 20 Aug 2024 17:11:44 +0200 Subject: [PATCH 18/35] Update AllenConfig.py --- AllenOnline/options/AllenConfig.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AllenOnline/options/AllenConfig.py b/AllenOnline/options/AllenConfig.py index 752e40e45..262ca6f58 100755 --- a/AllenOnline/options/AllenConfig.py +++ b/AllenOnline/options/AllenConfig.py @@ -231,7 +231,7 @@ if run_online: # Special case for sodin01, where there is a single BU and all Allen instances must read only # from that BU's output buffer if socket.gethostname() == 'sodin01': - mep_provider.Connections = input_buffers[0] + mep_provider.Connections = [input_buffers[0]] # Two cases to connect to all BU output buffers: # - There is only one Allen instance # - This is the third Allen instance -- GitLab From 9ca2d1b2b5b7684098ba9b3c732b06ba8d235aa3 Mon Sep 17 00:00:00 2001 From: Hlt Oper <hlt_oper@cern.ch> Date: Fri, 9 Aug 2024 17:06:38 +0200 Subject: [PATCH 19/35] NumMBMConnections increase to 20 to optimize throughput of the new hlt5 nodes. Memory tested for old 30 GB memory nodes --- MooreOnlineConf/options/online.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/MooreOnlineConf/options/online.py b/MooreOnlineConf/options/online.py index a424eafc0..68b2cf9f3 100644 --- a/MooreOnlineConf/options/online.py +++ b/MooreOnlineConf/options/online.py @@ -88,10 +88,10 @@ try: # NBOFTHREADS is populated from the "-numthreads" argument in Arch.xml except (KeyError, ValueError): n_threads = 1 - if task_type == "HLT2": - from multiprocessing import cpu_count - n_threads = cpu_count() - +if task_type == "HLT2": + from multiprocessing import cpu_count + n_threads = cpu_count() +""" if task_type == "HLT2": # TODO this should probably be moved to runHLT2.sh # TODO this should account for the baseline process memory usage @@ -106,7 +106,7 @@ if task_type == "HLT2": n_threads = n_threads_limit print(f"+++ Limiting number of threads to {n_threads_limit} " f"to have {mem_per_thread} GiB/thread") - +""" # Use execMode = 1 for multi-threaded (async_queued) mode and # use 0 (default) for single-threaded (sync) mode, i.e. debugging. application.config.execMode = 1 @@ -140,7 +140,7 @@ else: if task_type == "HLT2": application.config.events_LowMark = 1000 application.config.events_HighMark = 1200 - application.config.MBM_numConnections = 2 + application.config.MBM_numConnections = 20 # When processing PbPb data some events take a very long time to process. # Until all events for a given batch are processed, we cannot reuse the # corresponding connection. Therefore, we increase the number of connections -- GitLab From 34597df7cdf323b98a85af531ca18384420c819e Mon Sep 17 00:00:00 2001 From: Daniel Magdalinski <daniel.magdalinski@cern.ch> Date: Mon, 12 Aug 2024 14:32:05 +0200 Subject: [PATCH 20/35] Moving n_threads if back into the except. --- MooreOnlineConf/options/online.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/MooreOnlineConf/options/online.py b/MooreOnlineConf/options/online.py index 68b2cf9f3..65f0a982b 100644 --- a/MooreOnlineConf/options/online.py +++ b/MooreOnlineConf/options/online.py @@ -88,9 +88,9 @@ try: # NBOFTHREADS is populated from the "-numthreads" argument in Arch.xml except (KeyError, ValueError): n_threads = 1 -if task_type == "HLT2": - from multiprocessing import cpu_count - n_threads = cpu_count() + if task_type == "HLT2": + from multiprocessing import cpu_count + n_threads = cpu_count() """ if task_type == "HLT2": # TODO this should probably be moved to runHLT2.sh -- GitLab From 6952aa407bd816ea134f14554e1853fdae9e4307 Mon Sep 17 00:00:00 2001 From: Daniel Magdalinski <daniel.magdalinski@cern.ch> Date: Mon, 12 Aug 2024 14:33:08 +0200 Subject: [PATCH 21/35] Removing n_threads limit code. --- MooreOnlineConf/options/online.py | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/MooreOnlineConf/options/online.py b/MooreOnlineConf/options/online.py index 65f0a982b..903caab3f 100644 --- a/MooreOnlineConf/options/online.py +++ b/MooreOnlineConf/options/online.py @@ -91,22 +91,7 @@ except (KeyError, ValueError): if task_type == "HLT2": from multiprocessing import cpu_count n_threads = cpu_count() -""" -if task_type == "HLT2": - # TODO this should probably be moved to runHLT2.sh - # TODO this should account for the baseline process memory usage - import subprocess - out = subprocess.check_output(["free", "-g"], encoding='utf8') - # 6 GiB are taken by reader + MBM - mem_avail_gb = int(out.splitlines()[1].split()[1]) - 6 - mem_per_thread = 1 - n_instances = int(os.getenv("NBOFSLAVES", 0)) + 1 - n_threads_limit = int(mem_avail_gb / n_instances / mem_per_thread) - if n_threads > n_threads_limit: - n_threads = n_threads_limit - print(f"+++ Limiting number of threads to {n_threads_limit} " - f"to have {mem_per_thread} GiB/thread") -""" + # Use execMode = 1 for multi-threaded (async_queued) mode and # use 0 (default) for single-threaded (sync) mode, i.e. debugging. application.config.execMode = 1 -- GitLab From 3c1a65d74b031287eb6e6ee246fdc373e03fd67b Mon Sep 17 00:00:00 2001 From: Rosen Matev <rosen.matev@cern.ch> Date: Mon, 19 Aug 2024 18:21:03 +0200 Subject: [PATCH 22/35] Explain MBM_numConnections setting --- MooreOnlineConf/options/online.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/MooreOnlineConf/options/online.py b/MooreOnlineConf/options/online.py index 903caab3f..c0c240e0d 100644 --- a/MooreOnlineConf/options/online.py +++ b/MooreOnlineConf/options/online.py @@ -125,12 +125,17 @@ else: if task_type == "HLT2": application.config.events_LowMark = 1000 application.config.events_HighMark = 1200 + # There are relatively frequent, relatively slow events which block + # the corresponding MBM connections. Until all events for a given + # burst are processed, we cannot reuse a connection, so if we don't + # have enough connections (relative to the CPU power available), + # this can stall processing. + # On the fastest Intel 2630-v4 we need 5 connections, and on the new + # Xeon 8592 we need 20. We cannot increase beyond what the MBM config + # allows (accounting for reader connections). application.config.MBM_numConnections = 20 - # When processing PbPb data some events take a very long time to process. - # Until all events for a given batch are processed, we cannot reuse the - # corresponding connection. Therefore, we increase the number of connections - # to a high value that is still less than the max allowed by the MBM config. - # application.config.MBM_numConnections = 24 + # When processing PbPb data, we may need to tune this further + # (in 2023 we needed 24), since some events take a very long time to process. else: application.config.MBM_numConnections = 1 -- GitLab From 149b7378317238be865aa81bf28bf8e95f767137 Mon Sep 17 00:00:00 2001 From: Rosen Matev <rosen.matev@cern.ch> Date: Tue, 20 Aug 2024 18:16:46 +0200 Subject: [PATCH 23/35] Remove unused HLT2 VdM test files --- MooreOnlineConf/options/hlt2_VdM2022.py | 32 --------- MooreScripts/job/runHLT2VdM.sh | 21 ------ MooreScripts/tests/options/HLT2VdM/Arch.xml | 65 ------------------- .../tests/options/HLT2VdM/OnlineEnv.opts | 12 ---- .../tests/options/HLT2VdM/OnlineEnvBase.py | 18 ----- 5 files changed, 148 deletions(-) delete mode 100644 MooreOnlineConf/options/hlt2_VdM2022.py delete mode 100755 MooreScripts/job/runHLT2VdM.sh delete mode 100644 MooreScripts/tests/options/HLT2VdM/Arch.xml delete mode 100644 MooreScripts/tests/options/HLT2VdM/OnlineEnv.opts delete mode 100644 MooreScripts/tests/options/HLT2VdM/OnlineEnvBase.py diff --git a/MooreOnlineConf/options/hlt2_VdM2022.py b/MooreOnlineConf/options/hlt2_VdM2022.py deleted file mode 100644 index a43ffe353..000000000 --- a/MooreOnlineConf/options/hlt2_VdM2022.py +++ /dev/null @@ -1,32 +0,0 @@ -############################################################################### -# (c) Copyright 2022 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### -"""Options for the van der Meer scan in Nov. 2022 -""" -from Moore import options, run_moore -from Hlt2Conf.settings.hlt2_VdM import make_streams -from Moore.monitoring import run_default_monitoring -from PyConf.application import (metainfo_repos, retrieve_encoding_dictionary) - -options.lines_maker = make_streams - -# add the commissioning branch of https://gitlab.cern.ch/lhcb-conddb/file-content-metadata -# to be able to decode online Hlt1 locations (by default GitANNSvc will only look into 'master') -metainfo_repos.global_bind(extra_central_tags=['commissioning']) -# for running online, we need to make sure that the keys already exist on cvmfs -options.write_decoding_keys_to_git = False -metainfo_repos.global_bind(repos=[]) # only use repos on cvmfs -retrieve_encoding_dictionary.global_bind( - require_key_present=True) # require key is in repo -# optional -# options.require_specific_decoding_keys = ["b80bb34f", "cf39d86c"] - -with run_default_monitoring.bind(run=False): - run_moore(options) diff --git a/MooreScripts/job/runHLT2VdM.sh b/MooreScripts/job/runHLT2VdM.sh deleted file mode 100755 index 722d4aa04..000000000 --- a/MooreScripts/job/runHLT2VdM.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -############################################################################### -# (c) Copyright 2000-2021 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### -set -euo pipefail -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" -source "$DIR/setupTask.sh" - -setup_options_path -exec_gaudirun \ - $MOOREONLINECONFROOT/options/verbosity.py \ - $MOOREONLINECONFROOT/options/tags-master.py \ - $MOOREONLINECONFROOT/options/hlt2_VdM2022.py \ - $MOOREONLINECONFROOT/options/online.py diff --git a/MooreScripts/tests/options/HLT2VdM/Arch.xml b/MooreScripts/tests/options/HLT2VdM/Arch.xml deleted file mode 100644 index 3090c8820..000000000 --- a/MooreScripts/tests/options/HLT2VdM/Arch.xml +++ /dev/null @@ -1,65 +0,0 @@ -<!-- - (c) Copyright 2021-2022 CERN for the benefit of the LHCb Collaboration - - This software is distributed under the terms of the GNU General Public - Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". - - In applying this licence, CERN does not waive the privileges and immunities - granted to it by virtue of its status as an Intergovernmental Organization - or submit itself to any jurisdiction. ---> -<tasks_inventory> - - <task name="MBM" user="${USER}" group="${GROUP}"> - <command>${MOORESCRIPTSROOT}/scripts/runDFTask.sh</command> - <argument name="-type" value="${NAME}" /> - <argument name="-runinfo" value="${RUNINFO}" /> - <argument name="-options" value="${MOORESCRIPTSROOT}/options/HLT2MBM.opts" /> - <argument name="-class" value="Class0" /> - <fmcparam name="utgid" value="${PARTITION}_${NODE}_${NAME}_${INSTANCE}" /> - <fmcparam name="define" value="BINARY_TAG=${BINARY_TAG}" /> - <fmcparam name="define" value="WORKING_DIR=${WORKING_DIR}" /> - <timeout action="Any" value="20" /> - </task> - - <task name="MDFProd" user="${USER}" group="${GROUP}"> - <command>${MOORESCRIPTSROOT}/scripts/runDFTask.sh</command> - <argument name="-type" value="${NAME}" /> - <argument name="-runinfo" value="${RUNINFO}" /> - <argument name="-options" value="${MOORESCRIPTSROOT}/options/HLT2MDFProd.opts" /> - <argument name="-class" value="Class2" /> - <fmcparam name="utgid" value="${PARTITION}_${NODE}_${NAME}_${INSTANCE}" /> - <fmcparam name="define" value="BINARY_TAG=${BINARY_TAG}" /> - <fmcparam name="define" value="WORKING_DIR=${WORKING_DIR}" /> - <fmcparam name="define" value="DATA_DIR=${DATA_DIR}" /> - <timeout action="Any" value="30" /> - </task> - - <task name="HLT2" user="${USER}" group="${GROUP}" instances="2"> - <command>${MOORESCRIPTSROOT}/job/runHLT2VdM.sh</command> - <argument name="-type" value="${NAME}" /> - <argument name="-runinfo" value="${RUNINFO}" /> - <argument name="-class" value="Class1" /> - <argument name="-numthreads" value="20" /> - <fmcparam name="utgid" value="${PARTITION}_${NODE}_${NAME}_${INSTANCE}" /> - <fmcparam name="define" value="BINARY_TAG=${BINARY_TAG}" /> - <fmcparam name="define" value="WORKING_DIR=${WORKING_DIR}" /> - <fmcparam name="define" value="BIND_NUMA=1" /> - <timeout action="Any" value="120" /> - <timeout action="load" value="20" /> - </task> - - <task name="Writer" user="${USER}" group="${GROUP}"> - <command>${MOORESCRIPTSROOT}/scripts/runDFTask.sh</command> - <argument name="-type" value="${NAME}" /> - <argument name="-runinfo" value="${RUNINFO}" /> - <argument name="-options" value="${MOORESCRIPTSROOT}/options/TestWriter.opts" /> - <argument name="-class" value="Class1" /> - <fmcparam name="utgid" value="${PARTITION}_${NODE}_${NAME}_${INSTANCE}" /> - <fmcparam name="define" value="BINARY_TAG=${BINARY_TAG}" /> - <fmcparam name="define" value="WORKING_DIR=${WORKING_DIR}" /> - <timeout action="Any" value="20" /> - <timeout action="load" value="20" /> - </task> - -</tasks_inventory> diff --git a/MooreScripts/tests/options/HLT2VdM/OnlineEnv.opts b/MooreScripts/tests/options/HLT2VdM/OnlineEnv.opts deleted file mode 100644 index 5f131486a..000000000 --- a/MooreScripts/tests/options/HLT2VdM/OnlineEnv.opts +++ /dev/null @@ -1,12 +0,0 @@ -OnlineEnv.PartitionID = 65535; -OnlineEnv.PartitionName = "TEST"; -OnlineEnv.Activity = "PHYSICS"; -OnlineEnv.OutputLevel = 3; -// -OnlineEnv.Reader_Rescan = 0; -// OnlineEnv.Reader_Directories = {"/hlt2/objects/LHCb/0000252975/"}; -// OnlineEnv.Reader_FilePrefix = "Run_0000252975_"; -// 4.0 GB -// OnlineEnv.Reader_FilePrefix = "Run_0000252975_HLT24705_20221110-090425-628"; -OnlineEnv.Reader_Directories = {"/hlt2/objects/LHCb/0000255983"}; -OnlineEnv.Reader_FilePrefix = "Run_0000255983_HLT22610_20230204"; diff --git a/MooreScripts/tests/options/HLT2VdM/OnlineEnvBase.py b/MooreScripts/tests/options/HLT2VdM/OnlineEnvBase.py deleted file mode 100644 index 146de0e10..000000000 --- a/MooreScripts/tests/options/HLT2VdM/OnlineEnvBase.py +++ /dev/null @@ -1,18 +0,0 @@ -############################################################################### -# (c) Copyright 2022 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### -PartitionID = 65535 -PartitionName = "TEST" -Activity = "PHYSICS" -HltArchitecture = "dummy" -OnlineVersion = "v0" -MooreVersion = "v0" -MooreOnlineVersion = "v0" -OutputLevel = 3 -- GitLab From 4d4393b18e898979e2a81eb20bf8df3ce4a7a6ab Mon Sep 17 00:00:00 2001 From: Rosen Matev <rosen.matev@cern.ch> Date: Tue, 20 Aug 2024 18:17:37 +0200 Subject: [PATCH 24/35] Dynamically determine n_threads based on available cores --- MooreOnlineConf/options/online.py | 6 ++++-- MooreScripts/tests/options/HLT2/Arch.xml | 2 +- MooreScripts/tests/options/HLT2Perf/Arch.xml | 2 +- MooreScripts/tests/options/HLT2_pp_thor/Arch.xml | 2 +- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/MooreOnlineConf/options/online.py b/MooreOnlineConf/options/online.py index c0c240e0d..f9f75d3f5 100644 --- a/MooreOnlineConf/options/online.py +++ b/MooreOnlineConf/options/online.py @@ -89,8 +89,10 @@ try: except (KeyError, ValueError): n_threads = 1 if task_type == "HLT2": - from multiprocessing import cpu_count - n_threads = cpu_count() + # run as many threads as we have cores available to this process + # (In case of binding to a numa domain, we have + # len(os.sched_getaffinity(0)) < multiprocessing.cpu_count() ) + n_threads = len(os.sched_getaffinity(0)) # Use execMode = 1 for multi-threaded (async_queued) mode and # use 0 (default) for single-threaded (sync) mode, i.e. debugging. diff --git a/MooreScripts/tests/options/HLT2/Arch.xml b/MooreScripts/tests/options/HLT2/Arch.xml index e7afe8663..7889a1331 100644 --- a/MooreScripts/tests/options/HLT2/Arch.xml +++ b/MooreScripts/tests/options/HLT2/Arch.xml @@ -40,7 +40,7 @@ <argument name="-type" value="${NAME}" /> <argument name="-runinfo" value="${RUNINFO}" /> <argument name="-class" value="Class1" /> - <argument name="-numthreads" value="20" /> + <!-- <argument name="-numthreads" value="20" /> --> <fmcparam name="utgid" value="${PARTITION}_${NODE}_${NAME}_${INSTANCE}" /> <fmcparam name="define" value="BINARY_TAG=${BINARY_TAG}" /> <fmcparam name="define" value="WORKING_DIR=${WORKING_DIR}" /> diff --git a/MooreScripts/tests/options/HLT2Perf/Arch.xml b/MooreScripts/tests/options/HLT2Perf/Arch.xml index ebeeea451..cdb0b07f2 100644 --- a/MooreScripts/tests/options/HLT2Perf/Arch.xml +++ b/MooreScripts/tests/options/HLT2Perf/Arch.xml @@ -40,7 +40,7 @@ <argument name="-type" value="${NAME}" /> <argument name="-runinfo" value="${RUNINFO}" /> <argument name="-class" value="Class1" /> - <argument name="-numthreads" value="20" /> + <!-- <argument name="-numthreads" value="20" /> --> <fmcparam name="utgid" value="${PARTITION}_${NODE}_${NAME}_${INSTANCE}" /> <fmcparam name="define" value="BINARY_TAG=${BINARY_TAG}" /> <fmcparam name="define" value="WORKING_DIR=${WORKING_DIR}" /> diff --git a/MooreScripts/tests/options/HLT2_pp_thor/Arch.xml b/MooreScripts/tests/options/HLT2_pp_thor/Arch.xml index ecdb1180c..66936e984 100644 --- a/MooreScripts/tests/options/HLT2_pp_thor/Arch.xml +++ b/MooreScripts/tests/options/HLT2_pp_thor/Arch.xml @@ -40,7 +40,7 @@ <argument name="-type" value="${NAME}" /> <argument name="-runinfo" value="${RUNINFO}" /> <argument name="-class" value="Class1" /> - <argument name="-numthreads" value="20" /> + <!-- <argument name="-numthreads" value="20" /> --> <fmcparam name="utgid" value="${PARTITION}_${NODE}_${NAME}_${INSTANCE}" /> <fmcparam name="define" value="BINARY_TAG=${BINARY_TAG}" /> <fmcparam name="define" value="WORKING_DIR=${WORKING_DIR}" /> -- GitLab From a2dd87683923d7f4f96d15579ef12d3014ea30ab Mon Sep 17 00:00:00 2001 From: Rosen Matev <rosen.matev@cern.ch> Date: Thu, 22 Aug 2024 15:05:36 +0200 Subject: [PATCH 25/35] Align HLT2 testbench to production configuration (reader config is still different) --- MooreScripts/options/HLT2MBM.opts | 2 +- MooreScripts/options/HLT2MDFProd.opts | 32 ++++++++++++++++-------- MooreScripts/tests/options/HLT2/Arch.xml | 3 +-- 3 files changed, 23 insertions(+), 14 deletions(-) diff --git a/MooreScripts/options/HLT2MBM.opts b/MooreScripts/options/HLT2MBM.opts index 37855bc5b..2a73f1edf 100644 --- a/MooreScripts/options/HLT2MBM.opts +++ b/MooreScripts/options/HLT2MBM.opts @@ -3,7 +3,7 @@ #pragma print off #include "$INFO_OPTIONS" #include "$FARMCONFIGROOT/options/Logging.opts" -OnlineEnv.MBM_setup = "-s=3000000 -e=50 -u=30 -b=18 -t=1 -y -i=Events -f -c -s=100000 -e=50 -u=15 -b=12 -t=1 -y -i=Output -f -c -s=100000 -e=50 -u=15 -b=12 -t=1 -y -i=Monitor -f -c"; +OnlineEnv.MBM_setup = "-s=1000000 -e=50 -u=30 -b=18 -t=1 -y -i=Events -f -c -s=100000 -e=50 -u=15 -b=12 -t=1 -y -i=Output -f -c"; // Manager.Setup = {"Dataflow_MBMServer/MEPManager"}; // diff --git a/MooreScripts/options/HLT2MDFProd.opts b/MooreScripts/options/HLT2MDFProd.opts index e28b15137..d1830341c 100644 --- a/MooreScripts/options/HLT2MDFProd.opts +++ b/MooreScripts/options/HLT2MDFProd.opts @@ -13,12 +13,17 @@ Manager.Services = {"Dataflow_MBMClient/MEPManager", Manager.Runable = "Wrap"; Wrap.Callable = "Reader"; Task.HavePause = true; +// +MEPManager.PartitionBuffers = true; +MEPManager.PartitionName = @OnlineEnv.PartitionName; +MEPManager.PartitionID = @OnlineEnv.PartitionID; +MEPManager.Buffers = {"Events"}; +MEPManager.InhibitCancel = true; + // Reader.Buffer = "Events"; Reader.BrokenHosts = ""; -Reader.Directories = @OnlineEnv.Reader_Directories; -Reader.FilePrefix = @OnlineEnv.Reader_FilePrefix; -Reader.AllowedRuns = {"*"}; +// Reader.MuDelay = 0; Reader.DeleteFiles = false; Reader.SaveRest = false; @@ -26,15 +31,20 @@ Reader.PauseSleep = 2; // Optional wait time until 'Output' event que Reader.InitialSleep = 0; Reader.MaxPauseWait = 1; Reader.GoService = ""; -Reader.Rescan = @OnlineEnv.Reader_Rescan; Reader.RequireConsumers = 0; +Reader.ConsumerWait = 400; // Needs to be at least the config time! + Reader.MMapFiles = 0; Reader.ReuseFile = 0; -Reader.PackingFactor = 6000; -Reader.AllocationSizekB = 600000; -Reader.PatchOdin = 5000000; +Reader.PackingFactor = 200; +Reader.AllocationSizekB = 20000; +Reader.PatchOdin = 0; // 0 = no patching // -MEPManager.PartitionBuffers = true; -MEPManager.PartitionName = @OnlineEnv.PartitionName; -MEPManager.PartitionID = @OnlineEnv.PartitionID; -MEPManager.Buffers = {"Events"}; +Reader.Rescan = @OnlineEnv.Reader_Rescan; +Reader.Directories = @OnlineEnv.Reader_Directories; +Reader.FilePrefix = @OnlineEnv.Reader_FilePrefix; +Reader.AllowedRuns = {"*"}; +// Only the StorageReader has the following properties and here +// we use BurstReader +//Reader.NumThreads = 2; +//Reader.NumBuffers = 2; diff --git a/MooreScripts/tests/options/HLT2/Arch.xml b/MooreScripts/tests/options/HLT2/Arch.xml index 7889a1331..1a2330ed4 100644 --- a/MooreScripts/tests/options/HLT2/Arch.xml +++ b/MooreScripts/tests/options/HLT2/Arch.xml @@ -35,7 +35,7 @@ <timeout action="Any" value="30" /> </task> - <task name="HLT2" user="${USER}" group="${GROUP}" instances="2"> + <task name="HLT2" user="${USER}" group="${GROUP}" instances="1"> <command>${MOORESCRIPTSROOT}/job/runHLT2.sh</command> <argument name="-type" value="${NAME}" /> <argument name="-runinfo" value="${RUNINFO}" /> @@ -44,7 +44,6 @@ <fmcparam name="utgid" value="${PARTITION}_${NODE}_${NAME}_${INSTANCE}" /> <fmcparam name="define" value="BINARY_TAG=${BINARY_TAG}" /> <fmcparam name="define" value="WORKING_DIR=${WORKING_DIR}" /> - <fmcparam name="define" value="BIND_NUMA=1" /> <fmcparam name="define" value="WRITE_ENCODING_KEYS=0" /> <timeout action="Any" value="120" /> <timeout action="load" value="20" /> -- GitLab From 5db217b2a71239a733b9126c55c867e9dc71e0d2 Mon Sep 17 00:00:00 2001 From: Rosen Matev <rosen.matev@cern.ch> Date: Thu, 22 Aug 2024 23:06:25 +0200 Subject: [PATCH 26/35] Distribute MBM connections among instances and limit --- MooreOnlineConf/options/online.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/MooreOnlineConf/options/online.py b/MooreOnlineConf/options/online.py index f9f75d3f5..057933345 100644 --- a/MooreOnlineConf/options/online.py +++ b/MooreOnlineConf/options/online.py @@ -83,6 +83,8 @@ if OnlineEnv.PartitionName.startswith("TEST"): application.updateAndReset.saveSetDir = "Savesets" # application.updateAndReset.saverCycle = 20 +n_instances = int(os.getenv("NBOFSLAVES", "0")) + 1 + try: n_threads = int(os.environ["NBOFTHREADS"]) # NBOFTHREADS is populated from the "-numthreads" argument in Arch.xml @@ -135,7 +137,9 @@ if task_type == "HLT2": # On the fastest Intel 2630-v4 we need 5 connections, and on the new # Xeon 8592 we need 20. We cannot increase beyond what the MBM config # allows (accounting for reader connections). - application.config.MBM_numConnections = 20 + # The connections are distributed equally among all instances and we + # limit them to the number of threads (useful for e.g. HLT2Slim). + application.config.MBM_numConnections = min(20 // n_instances, n_threads) # When processing PbPb data, we may need to tune this further # (in 2023 we needed 24), since some events take a very long time to process. else: -- GitLab From 69598add9db9c0d7c6fa6dbb1e2bcf6b08f05c87 Mon Sep 17 00:00:00 2001 From: Rosen Matev <rosen.matev@cern.ch> Date: Tue, 18 Jun 2024 15:15:08 +0200 Subject: [PATCH 27/35] ECAL Pi0 task Co-authored-by: Juan Leite <juan.baptista.leite@cern.ch> Co-authored-by: lesantor <leon.santoro@cern.ch> Co-authored-by: Rosen Matev <rosen.matev@cern.ch> Co-authored-by: Yang Gao <gaoyang1945@mails.ccnu.edu.cn> --- .../Pi0_Run3_KaliPackage/Kali_Analyzer.py | 127 +++++++++ .../Kali_doCalibration.py | 178 +++++++++++++ .../Pi0_Run3_KaliPackage/Kali_produceHist.py | 71 +++++ .../Kali_produceHist2D.py | 136 ++++++++++ .../Pi0_Run3_KaliPackage/Kali_produceMDF.py | 116 +++++++++ .../Pi0_Run3_KaliPackage/Kali_produceMap.py | 27 ++ .../Kali_produceNtuple.py | 104 ++++++++ MooreOnlineConf/options/CaloPi0/analyzer.py | 242 ++++++++++++++++++ MooreOnlineConf/options/CaloPi0/iterator.py | 172 +++++++++++++ .../python/MooreOnlineConf/Communicator.py | 86 +++++++ MooreScripts/job/CaloPi0/runAnalyzer.sh | 30 +++ MooreScripts/job/CaloPi0/runIterator.sh | 30 +++ .../testbench/scenarios/CalibrationPi0.py | 94 +++++++ .../testbench/scenarios/alignment.py | 3 + MooreScripts/tests/options/CaloPi0/Arch.xml | 39 +++ .../tests/options/CaloPi0/OnlineEnv.opts | 8 + .../tests/options/CaloPi0/OnlineEnvBase.py | 20 ++ README.md | 2 +- 18 files changed, 1484 insertions(+), 1 deletion(-) create mode 100755 MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py create mode 100644 MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_doCalibration.py create mode 100644 MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceHist.py create mode 100644 MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceHist2D.py create mode 100755 MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMDF.py create mode 100644 MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMap.py create mode 100755 MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceNtuple.py create mode 100644 MooreOnlineConf/options/CaloPi0/analyzer.py create mode 100644 MooreOnlineConf/options/CaloPi0/iterator.py create mode 100644 MooreOnlineConf/python/MooreOnlineConf/Communicator.py create mode 100755 MooreScripts/job/CaloPi0/runAnalyzer.sh create mode 100755 MooreScripts/job/CaloPi0/runIterator.sh create mode 100644 MooreScripts/python/MooreScripts/testbench/scenarios/CalibrationPi0.py create mode 100644 MooreScripts/tests/options/CaloPi0/Arch.xml create mode 100644 MooreScripts/tests/options/CaloPi0/OnlineEnv.opts create mode 100644 MooreScripts/tests/options/CaloPi0/OnlineEnvBase.py diff --git a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py new file mode 100755 index 000000000..cefbdfbc5 --- /dev/null +++ b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py @@ -0,0 +1,127 @@ +############################################################################### +# (c) Copyright 2024 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +from ruamel.yaml import YAML +import multiprocessing +import os + + +def MDFprodOption(inputfiles, outputfile, n_instances): + filename = outputfile + option_file_name = filename.replace(".mdf", ".yaml") + output_manifest_file = filename.replace(".mdf", ".tck.json") + + threads_per_instance = int((multiprocessing.cpu_count() - 1) / n_instances) + if threads_per_instance == 0: + print( + "### WARNING: More instances than cpu_threads. Setting threads_per_instance = 1. \n" + ) + threads_per_instance = 1 + print(f"### threads_per_instance: {threads_per_instance}") + + f = open(option_file_name, "w") + write_lines = [ + "input_files:\n", + ] + ["- '%s'\n" % inputfile for inputfile in inputfiles] + [ + "input_type: 'RAW'\n", "evt_max: 10000\n", + "output_file: '%s'\n" % outputfile, + "output_manifest_file: '%s' \n" % output_manifest_file, + "input_process: 'Hlt2'\n", "input_raw_format: 0.5\n", + "data_type: Upgrade\n", "simulation: False\n", + f"n_threads: {threads_per_instance} \n", + "scheduler_legacy_mode: False\n", "output_type: 'RAW' \n", + "geometry_version : run3/trunk \n", "conditions_version : 'master' \n", + "print_freq: 1000 \n", "write_decoding_keys_to_git: False \n" + ] + f.writelines(write_lines) + f.close() + + return option_file_name + + +def NtupleprodOption(inputfiles, outputfile): + filename = inputfiles + option_file_name = filename.replace(".mdf", ".yaml") + json_file = filename.replace(".mdf", ".tck.json") + + f = open(option_file_name, "w") + write_lines = [ + "input_files:\n", + ] + [ + "- '%s'\n" % os.path.abspath(inputfile) for inputfile in [inputfiles] + ] + [ + "input_manifest_file: '%s' \n" % os.path.abspath(json_file), + "input_type: 'RAW'\n", + "evt_max: -1\n", + f"ntuple_file: '{os.path.join(os.getcwd(), outputfile)}'\n", + #"ntuple_file: '%s'\n" % outputfile, + "input_process: Hlt2\n", + "input_stream: default" + " \n", + "n_threads: 1 \n", + "input_raw_format: 0.5\n", + "lumi: False\n", + "data_type: Upgrade\n", + "simulation: False\n", + "conddb_tag: 'upgrade/master'\n", + "dddb_tag: 'upgrade/master'\n", + "geometry_version : run3/trunk \n", + "conditions_version : 'master' \n", + ] + f.writelines(write_lines) + f.close() + + return option_file_name + + +def merge_lambda_yaml_files(FirstPass_lambda: str, SecondPass_lambda: str, + output_file_path: str): + + yaml = YAML() + + with open(FirstPass_lambda, 'r') as stream: + calibFirst = yaml.load(stream) + + with open(SecondPass_lambda, 'r') as stream2: + calibSecond = yaml.load(stream2) + + Lines_1 = calibFirst['data'] + Lines_2 = calibSecond['data'] + + Lambda_1 = [None] * 11384 #number of histograms + Lambda_2 = [None] * 11384 + Lambda_3 = [None] * 11384 + Merged_lambda = [[None, None]] * 11384 + + output_merged_file = open( + os.path.join(output_file_path, "merged_lambda.yml"), 'w') + output_merged_file.write('data: [\n') + + for line1 in Lines_1: + Lambda_1[line1[0] - 32768] = float(line1[1]) + + for line2 in Lines_2: + Lambda_2[line2[0] - 32768] = float(line2[1]) + + for i in range(0, len(Lambda_1)): + + if (not Lambda_1[i] is None or not Lambda_2[i] is None): + Lambda_3[i] = Lambda_1[i] * Lambda_2[i] + else: + continue + + if (Lambda_3[i] < 1.4 or Lambda_3[i] > 0.6): + Merged_lambda.insert(i, [i + 32768, Lambda_3[i]]) + else: + Merged_lambda.insert(i, [i + 32768, 1.0]) + + output_merged_file.write(f"{Merged_lambda[i]}, \n") + + output_merged_file.write(']') diff --git a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_doCalibration.py b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_doCalibration.py new file mode 100644 index 000000000..7dcd4b801 --- /dev/null +++ b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_doCalibration.py @@ -0,0 +1,178 @@ +############################################################################### +# (c) Copyright 2018 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +#--> perform an iteration of the pi0 calibration: fill the histogram, perform the fit and extract the calibration constants. +#--> in default, only bad fits and cells with low statistics are saved; if VERBOSE invoked, all the fits are saved on disk. +#--> three types of input files are allowed: ROOT (TTree), MMap, ROOT (TH2D: names hists & hists_bg) +#--> any further questions, please contact Zhirui at zhirui@cern.ch + +import os +import argparse +from Configurables import LHCbApp +from Configurables import LHCb__Det__LbDD4hep__IOVProducer as IOVProducer, LHCb__Tests__FakeRunNumberProducer as FakeRunNumberProducer +from GaudiPython import AppMgr +from Gaudi.Configuration import ApplicationMgr, INFO + +parser = argparse.ArgumentParser(description="do_calibration") +parser.add_argument("--nIt", type=str, dest="nIt", default=None) +parser.add_argument("--passname", type=str, dest="passname", default=None) +parser.add_argument("--inputfile", type=str, dest="inputfile", default=None) +parser.add_argument("--outputfile", type=str, dest="outputfile", default=None) +parser.add_argument( + "--output_lambda_name", type=str, dest="output_lambda_name", default=None) + +args = parser.parse_args() +inputfile = args.inputfile +outputfile = args.outputfile +output_lambda_name = args.output_lambda_name +nIt = args.nIt +passname = args.passname + +args = parser.parse_args() +tuplename = "Tuple/DecayTree" +year = 'Upgrade' +filetype = 'TH2D' + +LHCbApp().DataType = 'Upgrade' +LHCbApp().CondDBtag = 'master' +LHCbApp().DDDBtag = 'master' +LHCbApp().Simulation = False +LHCbApp().GeometryVersion = 'run3/trunk' + +from Configurables import Pi0CalibrationAlg +pi0Calib = Pi0CalibrationAlg("Pi0Calibration") +pi0Calib.OutputLevel = 1 +pi0Calib.tupleFileName = inputfile +pi0Calib.tupleName = tuplename +pi0Calib.filetype = filetype +pi0Calib.outputDir = os.path.dirname(outputfile) +pi0Calib.lambdaFileName = output_lambda_name +pi0Calib.saveLambdaFile = output_lambda_name +pi0Calib.saveLambdaYMLFile = outputfile +pi0Calib.nIt = nIt +pi0Calib.passname = passname + +from Configurables import Pi0CalibrationMonitor +pi0Moni = Pi0CalibrationMonitor("Pi0CalibrationMonitor") +pi0Moni.tupleFileName = inputfile +pi0Moni.tupleName = tuplename +pi0Moni.outputDir = os.path.dirname(outputfile) +pi0Moni.inputDir = os.path.dirname(outputfile) +pi0Moni.OutputLevel = 1 +pi0Moni.nIt = nIt +pi0Moni.passname = passname + +from Configurables import GaudiSequencer + +mainSeq = GaudiSequencer("MainSeq") + +pi0Moni.outputDir = pi0Moni.outputDir + f"/Iter{nIt}Mon_{passname}" +os.mkdir(pi0Moni.outputDir) +mainSeq.Members = [ + FakeRunNumberProducer(Start=256126, Step=0), + IOVProducer(), pi0Calib, pi0Moni +] + +#if '1_histos' in inputfile: +# if 'FirstPass' in inputfile: +# os.mkdir(pi0Moni.outputDir + "/Iter1Mon_FirstPass") +# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter1Mon_FirstPass" +# else: +# os.mkdir(pi0Moni.outputDir + "/Iter1Mon_SecondPass") +# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter1Mon_SecondPass" +# +# mainSeq.Members = [ +# FakeRunNumberProducer(Start=256126, Step=0), +# IOVProducer(), pi0Calib, pi0Moni +# ] +#elif '2_histos' in inputfile: +# if 'FirstPass' in inputfile: +# os.mkdir(pi0Moni.outputDir + "/Iter2Mon_FirstPass") +# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter2Mon_FirstPass" +# else: +# os.mkdir(pi0Moni.outputDir + "/Iter2Mon_SecondPass") +# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter2Mon_SecondPass" +# mainSeq.Members = [ +# FakeRunNumberProducer(Start=256126, Step=0), +# IOVProducer(), pi0Calib, pi0Moni +# ] +#elif '3_histos' in inputfile: +# if 'FirstPass' in inputfile: +# os.mkdir(pi0Moni.outputDir + "/Iter3Mon_FirstPass") +# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter3Mon_FirstPass" +# else: +# os.mkdir(pi0Moni.outputDir + "/Iter3Mon_SecondPass") +# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter3Mon_SecondPass" +# mainSeq.Members = [ +# FakeRunNumberProducer(Start=256126, Step=0), +# IOVProducer(), pi0Calib, pi0Moni +# ] +#elif '4_histos' in inputfile: +# if 'FirstPass' in inputfile: +# os.mkdir(pi0Moni.outputDir + "/Iter4Mon_FirstPass") +# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter4Mon_FirstPass" +# else: +# os.mkdir(pi0Moni.outputDir + "/Iter4Mon_SecondPass") +# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter4Mon_SecondPass" +# mainSeq.Members = [ +# FakeRunNumberProducer(Start=256126, Step=0), +# IOVProducer(), pi0Calib, pi0Moni +# ] +#elif '5_histos' in inputfile: +# if 'FirstPass' in inputfile: +# os.mkdir(pi0Moni.outputDir + "/Iter5Mon_FirstPass") +# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter5Mon_FirstPass" +# else: +# os.mkdir(pi0Moni.outputDir + "/Iter5Mon_SecondPass") +# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter5Mon_SecondPass" +# mainSeq.Members = [ +# FakeRunNumberProducer(Start=256126, Step=0), +# IOVProducer(), pi0Calib, pi0Moni +# ] +#elif '6_histos' in inputfile: +# if 'FirstPass' in inputfile: +# os.mkdir(pi0Moni.outputDir + "/Iter6Mon_FirstPass") +# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter6Mon_FirstPass" +# else: +# os.mkdir(pi0Moni.outputDir + "/Iter6Mon_SecondPass") +# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter6Mon_SecondPass" +# mainSeq.Members = [ +# FakeRunNumberProducer(Start=256126, Step=0), +# IOVProducer(), pi0Calib, pi0Moni +# ] +#elif '7_histos' in inputfile: +# if 'FirstPass' in inputfile: +# os.mkdir(pi0Moni.outputDir + "/Iter7Mon_FirstPass") +# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter7Mon_FirstPass" +# else: +# os.mkdir(pi0Moni.outputDir + "/Iter7Mon_SecondPass") +# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter7Mon_SecondPass" +# mainSeq.Members = [ +# FakeRunNumberProducer(Start=256126, Step=0), +# IOVProducer(), pi0Calib, pi0Moni +# ] +#else: +# mainSeq.Members = [ +# FakeRunNumberProducer(Start=256126, Step=0), +# IOVProducer(), pi0Calib +# ] + +ApplicationMgr( + OutputLevel=INFO, AppName="Pi0Calibration", EvtMax=-1, EvtSel='NONE') +ApplicationMgr().TopAlg.append(mainSeq) +AppMgr().run(1) + +import shutil + +for file in os.listdir(pi0Calib.outputDir): + if not "txt" in file: continue + shutil.copyfile( + os.path.join(pi0Calib.outputDir, file), + os.path.join(pi0Moni.outputDir, file)) diff --git a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceHist.py b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceHist.py new file mode 100644 index 000000000..d54900dcf --- /dev/null +++ b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceHist.py @@ -0,0 +1,71 @@ +#!/bin/python +############################################################################### +# (c) Copyright 2000-2018 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +## ============= the basic import ==================== +#from Gaudi.Configuration import * +from Gaudi.Configuration import INFO +from Configurables import LHCb__Det__LbDD4hep__IOVProducer as IOVProducer, LHCb__Tests__FakeRunNumberProducer as FakeRunNumberProducer +from GaudiPython.Bindings import AppMgr +from Gaudi.Configuration import ApplicationMgr +from Configurables import Pi0MMap2Histo +from Configurables import GaudiSequencer +from Configurables import LHCbApp + +import os + +MSG_VERBOSE = 3 +MSG_DEBUG = 3 +MSG_INFO = 3 +MSG_WARNING = 4 +MSG_ERROR = 5 +MSG_FATAL = 6 +MSG_ALWAYS = 7 + +import argparse +parser = argparse.ArgumentParser(description="Input file delivery") +parser.add_argument("--inputfile", type=str, dest="inputfile", default=None) +parser.add_argument("--outputfile", type=str, dest="outputfile", default=None) +parser.add_argument( + "--input_lambda_name", type=str, dest="input_lambda_name", default=None) +args = parser.parse_args() + +inputfile = args.inputfile +outputfile = args.outputfile +input_lambda_name = args.input_lambda_name +tuplename = "Tuple/DecayTree" +year = 'Upgrade' + +if '__main__' == __name__: + + LHCbApp().DataType = 'Upgrade' + LHCbApp().CondDBtag = 'master' + LHCbApp().DDDBtag = 'master' + LHCbApp().Simulation = False + LHCbApp().GeometryVersion = 'run3/trunk' + # LHCbApp().OnlineMode = True + + pi02Histo = Pi0MMap2Histo("Pi0MMap2Histo") + pi02Histo.nworker = 1 + pi02Histo.filenames = [inputfile] + pi02Histo.outputDir = os.path.dirname(outputfile) + pi02Histo.outputName = os.path.basename(outputfile) + pi02Histo.OutputLevel = MSG_INFO + pi02Histo.lambdaFileName = input_lambda_name + + mainSeq = GaudiSequencer("MainSeq") + mainSeq.Members = [ + FakeRunNumberProducer(Start=268295, Step=0), + IOVProducer(), pi02Histo + ] + ApplicationMgr( + OutputLevel=INFO, AppName="Pi0MMap2Histo", EvtMax=1, EvtSel='NONE') + ApplicationMgr().TopAlg.append(mainSeq) + AppMgr().run(1) diff --git a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceHist2D.py b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceHist2D.py new file mode 100644 index 000000000..b6179d7c4 --- /dev/null +++ b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceHist2D.py @@ -0,0 +1,136 @@ +#!/bin/python +############################################################################### +# (c) Copyright 2000-2018 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +## ============= the basic import ==================== +#from Gaudi.Configuration import * +from Gaudi.Configuration import INFO +from Configurables import LHCb__Det__LbDD4hep__IOVProducer as IOVProducer, LHCb__Tests__FakeRunNumberProducer as FakeRunNumberProducer +from GaudiPython.Bindings import AppMgr +from Gaudi.Configuration import ApplicationMgr +from Configurables import Pi0MMap2Histo +from Configurables import GaudiSequencer +from Configurables import LHCbApp +from ROOT import RDataFrame, TChain, TFile, TF1, ROOT, TH2D, gROOT, TH1D, gPad, kRed, kGreen, kBlack +import ROOT as ROOT26, math +#from funcs import cellid2COLROW +from multiprocessing import Pool +import os + +MSG_VERBOSE = 3 +MSG_DEBUG = 3 +MSG_INFO = 3 +MSG_WARNING = 4 +MSG_ERROR = 5 +MSG_FATAL = 6 +MSG_ALWAYS = 7 +from tqdm import trange, tqdm +import argparse +parser = argparse.ArgumentParser(description="Input file delivery") +parser.add_argument("--inputfile", nargs='+', dest="inputfile", default=None) +parser.add_argument("--outputfile", type=str, dest="outputfile", default=None) +parser.add_argument( + "--input_lambda_name", type=str, dest="input_lambda_name", default=None) +parser.add_argument("--nworker", type=int, dest="nworker", default=None) +args = parser.parse_args() + +inputfile = args.inputfile +outputfile = args.outputfile +input_lambda_name = args.input_lambda_name +nworker = args.nworker +tuplename = "Tuple/DecayTree" + +if '__main__' == __name__: + import ROOT + ROOT.ROOT.EnableImplicitMT(nworker) + ch = ROOT.TChain(tuplename) + for f in inputfile: + ch.Add(f) + + df = ROOT.RDataFrame(ch) + + # fill histograme with the different cuts for the different areas + ROOT.gInterpreter.Declare(''' + bool in_tight_cut_region(int cellid){ + int col = ( (cellid & 63) >> 0 ); + int row = ( (cellid & 4032) >> 6 ); + int area = ( (cellid & 12288) >> 12 ); + + double ds; + if (area == 0) ds = 121.2; + if (area == 1) ds = 121.2 / 2; + if (area == 2) return false; + + double x = ds * ( col - 32 + .5 ); + double y = ds * ( 32 - row + .5 ); + + if (abs(y / x) < 0.3) return true; + else return false; + } + ''') + + df = df.Define( + "in_tight_cut_regions", + "in_tight_cut_region(g1_CellID)||in_tight_cut_region(g2_CellID)") + df = df.Filter( + "nPVs <= 1 && (in_tight_cut_regions&&pi0_PT>1700)||(!(in_tight_cut_regions))" + ) + + # scale candidate's mass. The func can be used in RDataFrame + m = "pi0_m12" + if os.path.exists(input_lambda_name): + lambdas = [] + with open(input_lambda_name, "r") as f: + for line in f.readlines(): + line = line.strip('\n').split(' ') + lambdas.append((int(line[0]), float(line[1]))) + lambdas = f"{lambdas}".replace('(', '{').replace(')', '}').replace( + '[', '{').replace(']', '}') + + ROOT.gInterpreter.Declare(f''' + std::map <int, float> lambdas = {lambdas}; + + float m_scale(int g1_cellid, int g2_cellid, float pi0_m){{ + float g1_lambda = 1; + float g2_lambda = 1; + if (lambdas.count(g1_cellid)) g1_lambda = lambdas[g1_cellid]; + if (lambdas.count(g2_cellid)) g2_lambda = lambdas[g2_cellid]; + return pi0_m * sqrt( g1_lambda * g2_lambda ); + }} + ''') + m = "pi0_m12_scaled" + df = df.Define("pi0_m12_scaled", + "m_scale(g1_CellID, g2_CellID, pi0_m12)") + else: + print("Lambda file not found. Create one to start") + with open(input_lambda_name, "w") as f: + f.write("") + print(f"File {input_lambda_name} created !") + h = TH2D("hists", ";cell id;#pi^{0} mass [MeV]", 11384, 1, 11385, 100, 0, + 250) + h1 = df.Histo2D( + ("h1", ";cell id;#pi^{0} mass [MeV]", 11384, 1, 11385, 100, 0, 250), + "g1_CellID", m) + + h2 = df.Histo2D( + ("h2", ";cell id;#pi^{0} mass [MeV]", 11384, 1, 11385, 100, 0, 250), + "g2_CellID", m) + + h.Add(h1.GetValue()) + h.Add(h2.GetValue()) + print(f"number of entries {h.GetEntries()}") + outf = ROOT.TFile( + os.path.join( + os.path.dirname(outputfile), os.path.basename(outputfile)), + "recreate") + h.Write("hists") + outf.Close() + +os.path.join(os.path.dirname(outputfile), os.path.basename(input_lambda_name)) diff --git a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMDF.py b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMDF.py new file mode 100755 index 000000000..edb33e881 --- /dev/null +++ b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMDF.py @@ -0,0 +1,116 @@ +############################################################################### +# (c) Copyright 2024 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +from Gaudi.Configuration import INFO +from PyConf.Algorithms import CaloFutureRawToDigits +from RecoConf.muonid import make_muon_hits +from GaudiPython.Bindings import AppMgr +from PyConf.application import metainfo_repos +from Configurables import LHCb__Det__LbDD4hep__DD4hepSvc as DD4hepSvc +from RecoConf.calorimeter_reconstruction import make_digits +from RecoConf.reconstruction_objects import reconstruction +from RecoConf.global_tools import stateProvider_with_simplified_geom + +from RecoConf.decoders import default_ft_decoding_version, default_VeloCluster_source +from Hlt2Conf.algorithms_thor import ParticleFilter +import Functors as F +from Functors.math import in_range +from GaudiKernel.SystemOfUnits import MeV +from Hlt2Conf.standard_particles import make_photons +from Hlt2Conf.algorithms_thor import ParticleCombiner +from Moore.lines import Hlt2Line +from Moore.config import register_line_builder +from RecoConf.reconstruction_objects import make_pvs, upfront_reconstruction +from PyConf import configurable + +from Moore import Options, run_moore +from Configurables import LHCb__Det__LbDD4hep__DD4hepSvc as DD4hepSvc + +all_lines = {} + + +@configurable +def pi0_filter(particles): + cut = F.require_all(F.PT > 200 * (7 - F.ETA) * MeV) + return ParticleFilter(particles, F.FILTER(cut)) + + +@configurable +def photons_filter(particles): + cut = F.require_all(F.CALO_NEUTRAL_1TO9_ENERGY_RATIO > 0.7, + F.IS_NOT_H > .7) + return ParticleFilter(particles, F.FILTER(cut)) + + +@configurable +def make_own_resolved_pi0s(particles=make_photons(), + MaxMass=350 * MeV, + PtCut=0. * MeV, + **kwargs): + + comb_code = F.require_all(in_range(0., F.MASS, MaxMass)) + mother_code = F.require_all(F.PT > PtCut) + + return ParticleCombiner( + Inputs=[particles, particles], + DecayDescriptor="pi0 -> gamma gamma", + CombinationCut=comb_code, + CompositeCut=mother_code, + ParticleCombiner="ParticleAdder") + + +@register_line_builder(all_lines) +def All_pi0_line(name="Hlt2Allpi0", prescale=1.0): + photons = photons_filter(make_photons(PtCut=300 * MeV, pv_maker=make_pvs)) + resolved_pi0 = make_own_resolved_pi0s(particles=photons, MaxMass=300 * MeV) + pi0 = pi0_filter(resolved_pi0) + + return Hlt2Line( + name=name, algs=upfront_reconstruction() + [pi0], prescale=prescale) + + +def main(options: Options, secondPass: str, overlay_path: str): + + metainfo_repos.global_bind(extra_central_tags=['commissioning']) + dd4hepSvc = DD4hepSvc() + + if secondPass: + dd4hepSvc.ConditionsLocation = overlay_path + + dd4hepSvc.DetectorList = [ + '/world', 'VP', 'FT', 'Magnet', 'Rich1', 'Rich2', 'Ecal', 'Hcal', + 'Muon' + ] + + def resolved_pi0(): + return [builder() for builder in all_lines.values()] + + from RecoConf.global_tools import ( + trackMasterExtrapolator_with_simplified_geom, ) + + from RecoConf.hlt2_global_reco import ( + make_light_reco_pr_kf_without_UT, ) + + from RecoConf.hlt2_global_reco import reconstruction as hlt2_reconstruction + + from RecoConf.ttrack_selections_reco import make_ttrack_reco + + public_tools = [ + trackMasterExtrapolator_with_simplified_geom(), + stateProvider_with_simplified_geom(), + ] + + with reconstruction.bind(from_file=False), default_VeloCluster_source.bind( + bank_type="VP"), make_digits.bind( + calo_raw_bank=True), make_muon_hits.bind( + geometry_version=3), hlt2_reconstruction.bind( + make_reconstruction=make_light_reco_pr_kf_without_UT): + return run_moore( + options, resolved_pi0, public_tools, exclude_incompatible=False) diff --git a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMap.py b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMap.py new file mode 100644 index 000000000..449c867ff --- /dev/null +++ b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMap.py @@ -0,0 +1,27 @@ +#!/bin/python +############################################################################### +# (c) Copyright 2000-2018 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +#--> convert the ROOT (TTree) to MMap file + +from GaudiPython import gbl +pi0Calib = gbl.Calibration.Pi0Calibration +import argparse + +parser = argparse.ArgumentParser(description="Input file delivery") +parser.add_argument("--inputfile", type=str, dest="inputfile", default=None) +parser.add_argument("--outputfile", type=str, dest="outputfile", default=None) +args = parser.parse_args() + +inputfile = args.inputfile +outputfile = args.outputfile +tuplename = "Tuple/DecayTree" +if '__main__' == __name__: + pi0Calib.Pi0CalibrationFile(inputfile, tuplename, outputfile) diff --git a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceNtuple.py b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceNtuple.py new file mode 100755 index 000000000..fec410a18 --- /dev/null +++ b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceNtuple.py @@ -0,0 +1,104 @@ +############################################################################### +# (c) Copyright 2024 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +import Functors as F +from FunTuple import FunctorCollection +from FunTuple import FunTuple_Particles as Funtuple +from PyConf.reading import get_particles, get_pvs, get_odin +from DaVinci.algorithms import create_lines_filter +from DaVinci import make_config, Options +import FunTuple.functorcollections as FCs +from PyConf.reading import get_rec_summary + +from Configurables import LHCb__Det__LbDD4hep__DD4hepSvc as DD4hepSvc + + +def main(options: Options, secondPass: str, overlay_path: str): + + dd4hepSvc = DD4hepSvc() + + if secondPass: + dd4hepSvc.ConditionsLocation = overlay_path + + dd4hepSvc.DetectorList = [ + '/world', 'VP', 'FT', 'Magnet', 'Rich1', 'Rich2', 'Ecal', 'Hcal', + 'Muon' + ] + + fields = { + 'pi0': 'pi0 -> gamma gamma', + 'g1': 'pi0 -> ^gamma gamma', + 'g2': 'pi0 -> gamma ^gamma', + } + + pi0_variables = FunctorCollection({ + "ID": F.PARTICLE_ID, + "m12": F.MASS, + "PT": F.PT, + "Eta": F.ETA + }) + + dau_variables = FunctorCollection({ + "ID": + F.PARTICLE_ID, + "PT": + F.PT, + "PE": + F.ENERGY, + "CellID": + F.CALO_NEUTRAL_ID - 32768, + "CaloNeutralE19": + F.CALO_NEUTRAL_1TO9_ENERGY_RATIO, + "Is_NotH": + F.IS_NOT_H, + }) + + variables = { + "pi0": pi0_variables, + "g1": dau_variables, + "g2": dau_variables, + } + + pi02ggLine = "Hlt2Allpi0" + pi02gg_data = get_particles(f"/Event/HLT2/{pi02ggLine}/Particles") + my_filter = create_lines_filter( + name="HDRFilter_pi02gg", lines=[pi02ggLine]) + + odin = get_odin() + + # HLT1 decision + Hlt1_dec = [ + "Hlt1Pi02GammaGammaDecision", + "Hlt1TrackMVADecision", + ] + + sel_info = FCs.SelectionInfo(selection_type="Hlt1", trigger_lines=Hlt1_dec) + + rec_summary = get_rec_summary() + + evt_variables = FunctorCollection({ + "nPVs": + F.VALUE_OR(-1) @ F.RECSUMMARY_INFO(rec_summary, "nPVs") + }) + + evt_variables += FCs.EventInfo() + + my_tuple = Funtuple( + name="Tuple", + tuple_name='DecayTree', + fields=fields, + variables=variables, + event_variables=evt_variables, + inputs=pi02gg_data, + ) + + config = make_config(options, [my_filter, my_tuple]) + + return config diff --git a/MooreOnlineConf/options/CaloPi0/analyzer.py b/MooreOnlineConf/options/CaloPi0/analyzer.py new file mode 100644 index 000000000..d299d5a1c --- /dev/null +++ b/MooreOnlineConf/options/CaloPi0/analyzer.py @@ -0,0 +1,242 @@ +############################################################################### +# (c) Copyright 2000-2023 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +from MooreOnlineConf.Communicator import Communicator, State +import os, time, random, sys +import shlex +import subprocess +import glob, shutil + +sys.path.append("Pi0_Run3_KaliPackage") + +from MooreOnlineConf.utils import ( + alignment_options, + ensure_output_dir, + distribute_files, +) + +from Pi0_Run3_KaliPackage.Kali_Analyzer import NtupleprodOption, MDFprodOption +from pathlib import Path + +NIteration = 7 + + +def MDFprod(input_files: list, output_file: str, secondPass: str, + overlay_path: str, n_instances: int): + print(f"MDFprod: {input_files} -> {output_file}") + + options_file = MDFprodOption(input_files, str(output_file), + int(n_instances)) + print(options_file) + kali_path = os.path.dirname( + __file__) + "/Pi0_Run3_KaliPackage/Kali_produceMDF.py:main" + subprocess.run( + ["lbexec", kali_path, options_file, secondPass, overlay_path], + check=True) + + +def Ntupleprod(input_file: str, output_file: str, secondPass: str, + work_dir: str, overlay_path: str): + print(f"Ntupleprod: {input_file}, {overlay_path} -> {output_file}") + + options_file = os.path.abspath( + NtupleprodOption(str(input_file), str(output_file))) + print(options_file) + kali_path = os.path.dirname( + __file__) + "/Pi0_Run3_KaliPackage/Kali_produceNtuple.py:main" + if not os.path.exists(work_dir): os.mkdir(work_dir) + #subprocess.run( + # #["lb-run", "DaVinci/v64r4", "lbexec", kali_path, options_file, secondPass, overlay_path], + # [ + # "/swdev/calo/stack/DaVinci/run", "lbexec", kali_path, options_file, + # #"/scratch/lesantor/stack/DaVinci/run", "lbexec", kali_path, options_file, + # secondPass, overlay_path + # ], + # check=True) + subprocess.run( + f"cd {work_dir};\ + /swdev/calo/stack/DaVinci/run lbexec {kali_path} {options_file} {secondPass} {overlay_path};\ + ", + shell=True, + check=True) + shutil.rmtree(work_dir) + + +def MMapprod(input_file: str, output_file: str): + print(f"MMapprod: {input_file} -> {output_file}") + cmd = "python " + os.path.dirname( + __file__ + ) + f"/Pi0_Run3_KaliPackage/Kali_produceMap.py --inputfile {input_file} --outputfile {output_file}" + subprocess.run(shlex.split(cmd)) + + +def Histoprod(input_file: str, output_file: str, input_lambda_name: str): + print(f"Histoprod: {input_file} -> {output_file}") + cmd = "python " + os.path.dirname( + __file__ + ) + f"/Pi0_Run3_KaliPackage/Kali_produceHist.py --inputfile {input_file} --outputfile {output_file} --input_lambda_name {input_lambda_name}" + subprocess.run(shlex.split(cmd)) + + +def Hist2Doprod(input_file: str, output_file: str, input_lambda_name: str, + nworker: int): + print(f"Histoprod: {input_file} -> {output_file}") + #for input_file in input_file: + cmd = "python " + os.path.dirname( + __file__ + ) + f"/Pi0_Run3_KaliPackage/Kali_produceHist2D.py --inputfile {input_file} --outputfile {output_file} --input_lambda_name {input_lambda_name} --nworker {nworker}" + subprocess.run(shlex.split(cmd)) + + +def run(node, input_files, work_directory, iterator_output_path, n_instances): + com = Communicator(f"AligWrk_{node}") + + # FSM loop + state = State.NOT_READY + com.set_status(state) + n_it = 0 + p_it = 0 + + while True: + + command = com.get_command() + if command.startswith('configure') and state == State.NOT_READY: + if p_it == 0: + MDFprod( + input_files=input_files, + output_file=work_directory / + f"FirstPass_selected_{node}.mdf", + secondPass="FirstPass", + overlay_path=os.path.join( + "file:///group/online/alignment/Pi0CalibrationConditions", + "lhcb-conditions-database") if p_it == 2 else + "git:/cvmfs/lhcb.cern.ch/lib/lhcb/git-conddb/lhcb-conditions-database.git", + n_instances=n_instances) + p_it += 1 + state = State.READY + elif command.startswith('start') and state == State.READY: + state = State.RUNNING + com.set_status(state) + n_it += 1 + pass_name = "FirstPass" if p_it == 1 else "SecondPass" + print(f"Analyzer: {pass_name}, iteration: {n_it}") + + if p_it >= 3: + break + + if n_it == 1: + # First pass: Produces ntuples with default DB (lambdas == 1) + print("## Running MDFprod! \n") + + if p_it == 2: + MDFprod( + input_files=input_files, + output_file=work_directory / + f"{pass_name}_selected_{node}.mdf", + secondPass=pass_name, + overlay_path=os.path.join( + "file:///group/online/alignment/Pi0CalibrationConditions", + "lhcb-conditions-database") if p_it == 2 else + "git:/cvmfs/lhcb.cern.ch/lib/lhcb/git-conddb/lhcb-conditions-database.git", + n_instances=n_instances) + + print("## Running Ntupleprod! \n") + Ntupleprod( + input_file=work_directory / + f"{pass_name}_selected_{node}.mdf", + output_file=work_directory / f"{node}_{pass_name}.root", + work_dir=work_directory / f"{node}_{pass_name}", + secondPass=pass_name, + overlay_path=os.path.join( + "file:///group/online/alignment/Pi0CalibrationConditions", + "lhcb-conditions-database") if p_it == 2 else + "git:/cvmfs/lhcb.cern.ch/lib/lhcb/git-conddb/lhcb-conditions-database.git" + ) + + print(f"## Running Histo2Dprod {n_it}! \n") + Hist2Doprod( + input_file=work_directory / f"{node}_{pass_name}.root", + #input_file= + #"/scratch/lesantor/stack/backup80M/output/CaloPi0/analyzer/0000290068/TEST_N8190604_AlignWrk_1_SecondPass.root", + output_file=work_directory / + f"{node}_{pass_name}_{n_it}_histos.root", + input_lambda_name=os.path.join(iterator_output_path, + f"{pass_name}_lambda.txt"), + nworker=n_instances + ) #nworker is number of threads that RDataFrame used + + print( + f"## The lambda file in the interation {n_it} from {pass_name} comes from {pass_name}_lambda \n" + ) + + if n_it == NIteration: + n_it = 0 + p_it += 1 + + state = State.PAUSED + elif command.startswith('stop') and state == State.PAUSED: + state = State.READY + elif command.startswith('reset'): + print("Hello from reset!", flush=True) + state = State.NOT_READY + break + else: + print('analyzer: bad transition from %s to %s' % (state, command)) + state = State.ERROR + break + + time.sleep(random.uniform(0.5, 1.5)) + # Set the status + com.set_status(state) + + time.sleep(random.uniform(0.5, 1.5)) + # Set the status one last time. + com.set_status(state) + + +if __name__ == '__main__': + import OnlineEnvBase as OnlineEnv + online_options = alignment_options(OnlineEnv) + + ensure_output_dir(online_options.analyzer_output_path, online_options.tag) + print() + ensure_output_dir(online_options.iterator_output_path, online_options.tag) + print( + f"Will write analyzer output to {online_options.analyzer_output_path}") + + if not OnlineEnv.PartitionName.startswith("TEST"): + INPUT_DATA_PATH = Path("input_data") + files = [ + sorted((INPUT_DATA_PATH / run).iterdir()) + for run in online_options.runs + ] + else: + INPUT_DATA_PATH = Path(f"/calib/align/LHCb/Calo/{OnlineEnv.RunNumber}") + files = [sorted(INPUT_DATA_PATH.iterdir())] + + files_per_node = distribute_files(online_options.nodes, files) + + utgid = os.environ["UTGID"] + worker_id = utgid.split("_")[1] + + try: + input_files = files_per_node[worker_id] + except KeyError: + # When testing we run multiple instances on the same node + # TODO this should probably be done based on the partition name + # and also "nodes" should be renamed to workers everywhere. + worker_id = utgid + input_files = files_per_node[worker_id] + + run(worker_id, + input_files=input_files, + work_directory=online_options.analyzer_output_path, + iterator_output_path=online_options.iterator_output_path, + n_instances=len(online_options.nodes)) diff --git a/MooreOnlineConf/options/CaloPi0/iterator.py b/MooreOnlineConf/options/CaloPi0/iterator.py new file mode 100644 index 000000000..3cbdb7454 --- /dev/null +++ b/MooreOnlineConf/options/CaloPi0/iterator.py @@ -0,0 +1,172 @@ +############################################################################### +# (c) Copyright 2000-2018 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +from MooreOnlineConf.Communicator import Communicator, State +from MooreOnlineConf.utils import ( + alignment_options, + ensure_output_dir, +) +import subprocess +import shlex, os +from ruamel.yaml import YAML +from Pi0_Run3_KaliPackage.Kali_Analyzer import merge_lambda_yaml_files + +NIteration = 7 + + +def MergeHistograms(input_files: list, output_file: str): + print(f"MergeHistograms: {input_files} -> {output_file}") + cmd = f"hadd -f {output_file} {' '.join(input_files)}" + subprocess.run(shlex.split(cmd)) + + +def DoCalibration(nIt: int, passname: str, input_file: str, output_file: str, + output_lambda_name: str): + print(f"DoCalibration: {input_file} -> {output_file}") + cmd = "python " + os.path.dirname( + __file__ + ) + f"/Pi0_Run3_KaliPackage/Kali_doCalibration.py --nIt {nIt} --passname {passname} --passname {passname} --inputfile {input_file} --outputfile {output_file} --output_lambda_name {output_lambda_name} " + subprocess.run(shlex.split(cmd)) + + +def WriteOverlay(input_file: str, output_dir: str): + + print(f"WriteOverlay: {input_file} -> {output_dir}") + + if not os.path.exists( + os.path.join(output_dir, "lhcb-conditions-database")): + raise RuntimeError() + + yaml = YAML() + print("copy calibration constants to lhcb-conditions-database") + + with open( + os.path.join( + output_dir, + "lhcb-conditions-database/Conditions/Ecal/Calibration.yml/0"), + 'r') as stream: + cond = yaml.load(stream) + if os.path.exists(input_file): + with open(input_file, 'r') as stream2: + calib = yaml.load(stream2) + else: + print("yml file not found! \n") + + cond['Calibration']['data'] = calib['data'] + + with open( + os.path.join( + output_dir, + "lhcb-conditions-database/Conditions/Ecal/Calibration.yml/0"), + 'w', + encoding='utf8') as outfile: + yaml.dump(cond, outfile) + + print(f"calibration constants dumped to {output_dir}") + + +def run(online_options, work_directory): + # Start the communicator: + com = Communicator('AligDrv_0') + # FSM loop + state = State.NOT_READY + com.set_status(state) + n_it = 0 + p_it = 1 + + while True: + command = com.get_command() + if command == 'configure' and state == State.NOT_READY: + state = State.READY + elif command == 'start' and state == State.READY: + state = State.RUNNING + elif command == 'pause' and state == State.RUNNING: + state = State.PAUSED + com.set_status(state) + n_it += 1 + pass_name = "FirstPass" if p_it == 1 else "SecondPass" + print(f"Itertator: {pass_name}, iteration: {n_it}") + + if p_it >= 3: + print("ERROR iterator should not be running a third pass") + raise RuntimeError() + + hist_files = [ + str(online_options.analyzer_output_path / + f"{node}_{pass_name}_{n_it}_histos.root") + for node in online_options.nodes + ] + + print(f"## Running MergeHistograms {n_it}! \n") + MergeHistograms( + input_files=hist_files, + output_file=work_directory / f"{pass_name}_{n_it}_histos.root") + #Do fits and produce new lambdas + print(f"## Running DoCalibration {n_it}! \n") + DoCalibration( + nIt=str(n_it), + passname=pass_name, + input_file=work_directory / f"{pass_name}_{n_it}_histos.root", + output_file=work_directory / f"{pass_name}_{n_it}.yml", + output_lambda_name=work_directory / f"{pass_name}_lambda.txt") + + if n_it == NIteration: + print(f"## Running WriteOverlay {n_it}! \n") + WriteOverlay( + input_file=work_directory / f"{pass_name}_{n_it}.yml", + output_dir= + "/group/online/alignment/Pi0CalibrationConditions") + if p_it == 1: + n_it = 0 + p_it += 1 + + if p_it == 2 and n_it == NIteration: + print("## Merge the first and the second pass lambdas ! \n") + merge_lambda_yaml_files( + os.path.join(work_directory, "FirstPass_7.yml"), + os.path.join(work_directory, "SecondPass_7.yml"), + work_directory) + print(f"## Running WriteOverlay over merged files! \n") + WriteOverlay( + input_file=work_directory / "merged_lambda.yml", + output_dir= + "/group/online/alignment/Pi0CalibrationConditions") + + print('iterator done') + state = State.READY + else: + state = State.RUNNING + + elif command == 'stop' and state in (State.RUNNING, State.READY): + state = State.READY + elif command == 'reset': + state = State.NOT_READY + break + else: + print('iterator: bad transition from %s to %s' % (state, command)) + state = State.ERROR + break + # Set the status + com.set_status(state) + + # Set our status one last time + com.set_status(state) + + +if __name__ == '__main__': + import OnlineEnvBase as OnlineEnv + online_options = alignment_options(OnlineEnv) + + ensure_output_dir(online_options.iterator_output_path, online_options.tag) + + print( + f"Will write iterator output to {online_options.iterator_output_path}") + + run(online_options, work_directory=online_options.iterator_output_path) diff --git a/MooreOnlineConf/python/MooreOnlineConf/Communicator.py b/MooreOnlineConf/python/MooreOnlineConf/Communicator.py new file mode 100644 index 000000000..63d6067e5 --- /dev/null +++ b/MooreOnlineConf/python/MooreOnlineConf/Communicator.py @@ -0,0 +1,86 @@ +############################################################################### +# (c) Copyright 2000-2018 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +import os, pydim + + +class State(object): + UNKNOWN = "UNKNOWN" + ERROR = "ERROR" + NOT_READY = "NOT_READY" + READY = "READY" + RUNNING = "RUNNING" + PAUSED = "PAUSED" + + +class Communicator(object): + def __init__(self, utgid_suffix): + self.__status = State.UNKNOWN + from multiprocessing import Pipe, Condition, Lock + self.__master_end, self.__process_end = Pipe() + self.__callback_lock = Lock() + utgid = self.utgid(utgid_suffix) + + pydim.dis_add_cmnd(utgid, "C", self.__command_callback, 1) + self.__info_svc = pydim.dis_add_service(utgid + "/status", "C", + self.__status_callback, 0) + pydim.dis_start_serving(utgid) + + pydim.dis_update_service(self.__info_svc) + + def partition(self): + return os.environ.get('PARTITION_NAME', 'TEST') + + def hostname(self): + import socket + return socket.gethostname().split('.')[0] + + def utgid(self, suffix): + utgid = os.environ.get('UTGID', '') + if utgid: + return utgid + else: + return '_'.join((self.partition(), self.hostname().upper(), + suffix)) + + def has_command(self, *args): + if len(args) == 0: + return self.__process_end.poll() + elif len(args) == 1: + return self.__process_end.poll(args[0]) + else: + raise ValueError("wrong number of arguments, must be 0 or 1.") + + def status(self): + return self.__status + + def get_command(self): + return self.__process_end.recv() + + def set_status(self, status): + self.__status = status + pydim.dis_update_service(self.__info_svc) + + def __put_command(self, data): + self.__master_end.send(data) + + def __close_command(self): + self.__master_end.close() + + def __close_status(self): + self.__process_end.close() + + def __command_callback(self, args, tag): + self.__callback_lock.acquire() + self.__put_command(args[0].rstrip("\0")) + self.__callback_lock.release() + + def __status_callback(self, *args): + return (self.__status, ) diff --git a/MooreScripts/job/CaloPi0/runAnalyzer.sh b/MooreScripts/job/CaloPi0/runAnalyzer.sh new file mode 100755 index 000000000..ac774a553 --- /dev/null +++ b/MooreScripts/job/CaloPi0/runAnalyzer.sh @@ -0,0 +1,30 @@ +#!/bin/bash +############################################################################### +# (c) Copyright 2000-2021 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +set -euo pipefail +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source "$DIR/../setupTask.sh" + +setup_options_path MONITORING + + +cd_working_dir +dump_environment + +executable=genPython.exe +if [[ ${PARTITION} == "TEST"* ]]; then + # FIXME does the following still hold, i.e. can we debug fine when using genPython.exe? + # For some reason when we use genPython.exe gdb does not load the symbols from the shared libraries. + # Instead, use python directly, which works except that messages in the log viewer are not properly + # tagged by the process name. + executable=python +fi +exec -a ${UTGID} ${executable} $MOOREONLINECONFROOT/options/CaloPi0/analyzer.py diff --git a/MooreScripts/job/CaloPi0/runIterator.sh b/MooreScripts/job/CaloPi0/runIterator.sh new file mode 100755 index 000000000..3c94523ba --- /dev/null +++ b/MooreScripts/job/CaloPi0/runIterator.sh @@ -0,0 +1,30 @@ +#!/bin/bash +############################################################################### +# (c) Copyright 2000-2021 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +set -euo pipefail +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +source "$DIR/../setupTask.sh" + +setup_options_path MONITORING + + +cd_working_dir +dump_environment + +executable=genPython.exe +if [[ ${PARTITION} == "TEST"* ]]; then + # FIXME does the following still hold, i.e. can we debug fine when using genPython.exe? + # For some reason when we use genPython.exe gdb does not load the symbols from the shared libraries. + # Instead, use python directly, which works except that messages in the log viewer are not properly + # tagged by the process name. + executable=python +fi +exec -a ${UTGID} ${executable} $MOOREONLINECONFROOT/options/CaloPi0/iterator.py diff --git a/MooreScripts/python/MooreScripts/testbench/scenarios/CalibrationPi0.py b/MooreScripts/python/MooreScripts/testbench/scenarios/CalibrationPi0.py new file mode 100644 index 000000000..3ec7978b0 --- /dev/null +++ b/MooreScripts/python/MooreScripts/testbench/scenarios/CalibrationPi0.py @@ -0,0 +1,94 @@ +############################################################################### +# (c) Copyright 2022-2023 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +import logging +from MooreScripts.testbench.emulator import ( + tasks_load, + tasks_wait_for_status, + tasks_send_command, + tasks_wait_for_exit, + tasks_measure_throughput, + async_input, + dump_opts, +) + +log = logging.getLogger(__name__) + + +async def run(tasks, args, exta_argv): + analyzers = [t for t in tasks if "Wrk" in t.utgid] + iterator, = [t for t in tasks if "Drv" in t.utgid] + assert len(tasks) == len(analyzers) + 1 + + # Write the alignment-specific options + with open("RunList.opts", "w") as f: + dump_opts({"DeferredRuns": {"0000290068"}}, f) + #dump_opts({"DeferredRuns": {"0000255623"}}, f) + with open("NodeList.opts", "w") as f: + # usually the NodeList would contain only the node name + # since we run one instance per node. When testing locally + # however we use multiple instances + dump_opts({"NodeList": {t.utgid for t in analyzers}}, f) + + await tasks_load(tasks) + # TODO for some reason HLT2 publishes OFFLINE before NOT_READY, but only sometimes + await tasks_wait_for_status(tasks, "NOT_READY", skip=["OFFLINE"]) + + await tasks_send_command(tasks, "configure") + await tasks_wait_for_status(tasks, "READY") + + # await async_input("Press enter to continue...") + + await tasks_send_command(tasks, "start") + await tasks_wait_for_status(tasks, "RUNNING") + + for iteration in range(14): + log.info(f"Running analyzer for iteration {iteration} ...") + await tasks_wait_for_status(analyzers, "PAUSED") + + await tasks_send_command(analyzers, "stop") + await tasks_wait_for_status(analyzers, "READY") + + await tasks_send_command([iterator], "pause") + await tasks_wait_for_status([iterator], "PAUSED") + + status = await iterator.status() + if status == "READY": # we have converged + log.info("Iterator converged") + break + elif status == "RUNNING": # we haven't converged + log.info("Iterator did not converge") + await tasks_send_command(analyzers, "start") + await tasks_wait_for_status(analyzers, "RUNNING") + continue + else: + message = f"Unexpected status for iterator: {status}" + log.error(message) + raise RuntimeError(message) + + await tasks_send_command(tasks, "reset") + await tasks_wait_for_status(tasks, "NOT_READY") + + # # FIXME: the following is hack + for task in tasks: + task._exit_task.cancel() + + await tasks_send_command(tasks, "unload") + await tasks_wait_for_status(tasks, "OFFLINE") + + # Wait for the tasks to close + exit_codes = await tasks_wait_for_exit(tasks) + if set(exit_codes) != {0}: + for t, ec in zip(tasks, exit_codes): + if ec != 0: + log.error(f"{t.utgid} exited with non-zero code {ec}") + return 102 + + return 0 diff --git a/MooreScripts/python/MooreScripts/testbench/scenarios/alignment.py b/MooreScripts/python/MooreScripts/testbench/scenarios/alignment.py index 5936a6298..d8b16e86c 100644 --- a/MooreScripts/python/MooreScripts/testbench/scenarios/alignment.py +++ b/MooreScripts/python/MooreScripts/testbench/scenarios/alignment.py @@ -75,6 +75,9 @@ async def run(tasks, args, extra_argv): log.error(message) raise RuntimeError(message) + await tasks_send_command(analyzers, "stop") + await tasks_wait_for_status(analyzers, "READY") + await tasks_send_command(tasks, "reset") await tasks_wait_for_status(tasks, "NOT_READY") diff --git a/MooreScripts/tests/options/CaloPi0/Arch.xml b/MooreScripts/tests/options/CaloPi0/Arch.xml new file mode 100644 index 000000000..a3d1bee4f --- /dev/null +++ b/MooreScripts/tests/options/CaloPi0/Arch.xml @@ -0,0 +1,39 @@ +<!-- + (c) Copyright 2021-2022 CERN for the benefit of the LHCb Collaboration + + This software is distributed under the terms of the GNU General Public + Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". + + In applying this licence, CERN does not waive the privileges and immunities + granted to it by virtue of its status as an Intergovernmental Organization + or submit itself to any jurisdiction. +--> +<tasks_inventory> + + <task name="AlignWrk" user="${USER}" group="${GROUP}" instances="2"> + <command>${MOORESCRIPTSROOT}/job/CaloPi0/runAnalyzer.sh</command> + <argument name="-type" value="${NAME}" /> + <argument name="-runinfo" value="${RUNINFO}" /> + <argument name="-class" value="Class1" /> + <fmcparam name="utgid" value="${PARTITION}_${NODE}_${NAME}_${INSTANCE}" /> + <fmcparam name="define" value="BINARY_TAG=${BINARY_TAG}" /> + <fmcparam name="define" value="PYTHONUNBUFFERED=1" /> + <fmcparam name="define" value="WORKING_DIR=${WORKING_DIR}" /> + <timeout action="Any" value="120" /> + <timeout action="load" value="20" /> + </task> + + <task name="AlignDrv" user="${USER}" group="${GROUP}"> + <command>${MOORESCRIPTSROOT}/job/CaloPi0/runIterator.sh</command> + <argument name="-type" value="${NAME}" /> + <argument name="-runinfo" value="${RUNINFO}" /> + <argument name="-class" value="Class1" /> + <fmcparam name="utgid" value="${PARTITION}_${NODE}_${NAME}_${INSTANCE}" /> + <fmcparam name="define" value="BINARY_TAG=${BINARY_TAG}" /> + <fmcparam name="define" value="PYTHONUNBUFFERED=1" /> + <fmcparam name="define" value="WORKING_DIR=${WORKING_DIR}" /> + <timeout action="Any" value="120" /> + <timeout action="load" value="20" /> + </task> + +</tasks_inventory> diff --git a/MooreScripts/tests/options/CaloPi0/OnlineEnv.opts b/MooreScripts/tests/options/CaloPi0/OnlineEnv.opts new file mode 100644 index 000000000..1d378aa5b --- /dev/null +++ b/MooreScripts/tests/options/CaloPi0/OnlineEnv.opts @@ -0,0 +1,8 @@ +OnlineEnv.PartitionID = 65535; +OnlineEnv.PartitionName = "TESTBEAMGUI"; +OnlineEnv.Activity = "Alignment|CaloPi0"; +OnlineEnv.OutputLevel = 3; +OnlineEnv.Reader_Rescan = 1; +OnlineEnv.Reader_Directories = {"/scratch/rmatev"}; +OnlineEnv.Reader_FilePrefix = "Run_0000290068_"; +OnlineEnv.RunNumber = "0000290068 "; diff --git a/MooreScripts/tests/options/CaloPi0/OnlineEnvBase.py b/MooreScripts/tests/options/CaloPi0/OnlineEnvBase.py new file mode 100644 index 000000000..4599a0d51 --- /dev/null +++ b/MooreScripts/tests/options/CaloPi0/OnlineEnvBase.py @@ -0,0 +1,20 @@ +############################################################################### +# (c) Copyright 2022 CERN for the benefit of the LHCb Collaboration # +# # +# This software is distributed under the terms of the GNU General Public # +# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # +# # +# In applying this licence, CERN does not waive the privileges and immunities # +# granted to it by virtue of its status as an Intergovernmental Organization # +# or submit itself to any jurisdiction. # +############################################################################### +PartitionID = 65535 +PartitionName = "TESTBEAMGUI" +Activity = "Alignment|CaloPi0" +HltArchitecture = "dummy" +OnlineVersion = "v0" +MooreVersion = "v0" +MooreOnlineVersion = "v0" +OutputLevel = 3 +#RunNumber = "0000268295" #Only ECAL +RunNumber = "0000290068" diff --git a/README.md b/README.md index 2c01d21d4..940f881d8 100644 --- a/README.md +++ b/README.md @@ -251,7 +251,7 @@ As of May 2023, at most 5 MDFs from the beginning of each run are stored in ``` /calib/online/tmpHlt1Dumps/LHCb/<run-number> ``` -Those files will become available only after the run is stopped, and deleted after one week. +Those files will become available only after the run is stopped, and deleted after one week. If you need different or (slightly) more files from a run, follow the instructions below. -- GitLab From 9cf2c093b75b44f3fa114d99c3b50b570062e65e Mon Sep 17 00:00:00 2001 From: RTAAlign <noreply@cern.ch> Date: Tue, 18 Jun 2024 15:07:15 +0200 Subject: [PATCH 28/35] Online interaction and condition propagation --- .../Pi0_Run3_KaliPackage/Kali_Analyzer.py | 2 +- .../Pi0_Run3_KaliPackage/Kali_produceMDF.py | 7 +- .../Kali_produceNtuple.py | 7 +- MooreOnlineConf/options/CaloPi0/analyzer.py | 92 ++++++++++--------- MooreOnlineConf/options/CaloPi0/iterator.py | 81 ++++++++-------- .../python/MooreOnlineConf/Communicator.py | 5 +- .../python/MooreOnlineConf/utils.py | 7 +- MooreScripts/job/CaloPi0/runAnalyzer.sh | 14 +-- MooreScripts/job/CaloPi0/runIterator.sh | 14 +-- MooreScripts/job/setupTask.sh | 17 ++++ 10 files changed, 126 insertions(+), 120 deletions(-) diff --git a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py index cefbdfbc5..6bdbc284f 100755 --- a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py +++ b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py @@ -30,7 +30,7 @@ def MDFprodOption(inputfiles, outputfile, n_instances): write_lines = [ "input_files:\n", ] + ["- '%s'\n" % inputfile for inputfile in inputfiles] + [ - "input_type: 'RAW'\n", "evt_max: 10000\n", + "input_type: 'RAW'\n", "evt_max: -1\n", "output_file: '%s'\n" % outputfile, "output_manifest_file: '%s' \n" % output_manifest_file, "input_process: 'Hlt2'\n", "input_raw_format: 0.5\n", diff --git a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMDF.py b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMDF.py index edb33e881..d4f0a50cf 100755 --- a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMDF.py +++ b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMDF.py @@ -76,13 +76,14 @@ def All_pi0_line(name="Hlt2Allpi0", prescale=1.0): name=name, algs=upfront_reconstruction() + [pi0], prescale=prescale) -def main(options: Options, secondPass: str, overlay_path: str): +def main(options: Options, pass_name: str, overlay_path: str): metainfo_repos.global_bind(extra_central_tags=['commissioning']) dd4hepSvc = DD4hepSvc() - if secondPass: - dd4hepSvc.ConditionsLocation = overlay_path + if pass_name == "SecondPass": + dd4hepSvc.UseConditionsOverlay = True + dd4hepSvc.ConditionsOverlayInitPath = overlay_path dd4hepSvc.DetectorList = [ '/world', 'VP', 'FT', 'Magnet', 'Rich1', 'Rich2', 'Ecal', 'Hcal', diff --git a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceNtuple.py b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceNtuple.py index fec410a18..3fac25e05 100755 --- a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceNtuple.py +++ b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceNtuple.py @@ -20,12 +20,13 @@ from PyConf.reading import get_rec_summary from Configurables import LHCb__Det__LbDD4hep__DD4hepSvc as DD4hepSvc -def main(options: Options, secondPass: str, overlay_path: str): +def main(options: Options, pass_name: str, overlay_path: str): dd4hepSvc = DD4hepSvc() - if secondPass: - dd4hepSvc.ConditionsLocation = overlay_path + if pass_name == "SecondPass": + dd4hepSvc.UseConditionsOverlay = True + dd4hepSvc.ConditionsOverlayInitPath = overlay_path dd4hepSvc.DetectorList = [ '/world', 'VP', 'FT', 'Magnet', 'Rich1', 'Rich2', 'Ecal', 'Hcal', diff --git a/MooreOnlineConf/options/CaloPi0/analyzer.py b/MooreOnlineConf/options/CaloPi0/analyzer.py index d299d5a1c..b19d8af99 100644 --- a/MooreOnlineConf/options/CaloPi0/analyzer.py +++ b/MooreOnlineConf/options/CaloPi0/analyzer.py @@ -14,7 +14,7 @@ import shlex import subprocess import glob, shutil -sys.path.append("Pi0_Run3_KaliPackage") +sys.path.append("Pi0_Run3_KaliPackage") # FIXME remove from MooreOnlineConf.utils import ( alignment_options, @@ -28,6 +28,11 @@ from pathlib import Path NIteration = 7 +def subprocess_run(*args, **kwargs): + print(f"subprocess.run(): {args} {kwargs}") + return subprocess.run(*args, **kwargs) + + def MDFprod(input_files: list, output_file: str, secondPass: str, overlay_path: str, n_instances: int): print(f"MDFprod: {input_files} -> {output_file}") @@ -37,7 +42,7 @@ def MDFprod(input_files: list, output_file: str, secondPass: str, print(options_file) kali_path = os.path.dirname( __file__) + "/Pi0_Run3_KaliPackage/Kali_produceMDF.py:main" - subprocess.run( + subprocess_run( ["lbexec", kali_path, options_file, secondPass, overlay_path], check=True) @@ -52,7 +57,7 @@ def Ntupleprod(input_file: str, output_file: str, secondPass: str, kali_path = os.path.dirname( __file__) + "/Pi0_Run3_KaliPackage/Kali_produceNtuple.py:main" if not os.path.exists(work_dir): os.mkdir(work_dir) - #subprocess.run( + #subprocess_run( # #["lb-run", "DaVinci/v64r4", "lbexec", kali_path, options_file, secondPass, overlay_path], # [ # "/swdev/calo/stack/DaVinci/run", "lbexec", kali_path, options_file, @@ -60,9 +65,9 @@ def Ntupleprod(input_file: str, output_file: str, secondPass: str, # secondPass, overlay_path # ], # check=True) - subprocess.run( + subprocess_run( f"cd {work_dir};\ - /swdev/calo/stack/DaVinci/run lbexec {kali_path} {options_file} {secondPass} {overlay_path};\ + env -i /cvmfs/lhcb.cern.ch/lib/var/lib/LbEnv/3149/stable/linux-64/bin/lb-run -c x86_64_v3-el9-gcc13-opt+g DaVinci/v64r5 lbexec {kali_path} {options_file} {secondPass} {overlay_path};\ ", shell=True, check=True) @@ -74,7 +79,7 @@ def MMapprod(input_file: str, output_file: str): cmd = "python " + os.path.dirname( __file__ ) + f"/Pi0_Run3_KaliPackage/Kali_produceMap.py --inputfile {input_file} --outputfile {output_file}" - subprocess.run(shlex.split(cmd)) + subprocess_run(shlex.split(cmd)) def Histoprod(input_file: str, output_file: str, input_lambda_name: str): @@ -82,7 +87,7 @@ def Histoprod(input_file: str, output_file: str, input_lambda_name: str): cmd = "python " + os.path.dirname( __file__ ) + f"/Pi0_Run3_KaliPackage/Kali_produceHist.py --inputfile {input_file} --outputfile {output_file} --input_lambda_name {input_lambda_name}" - subprocess.run(shlex.split(cmd)) + subprocess_run(shlex.split(cmd)) def Hist2Doprod(input_file: str, output_file: str, input_lambda_name: str, @@ -92,7 +97,7 @@ def Hist2Doprod(input_file: str, output_file: str, input_lambda_name: str, cmd = "python " + os.path.dirname( __file__ ) + f"/Pi0_Run3_KaliPackage/Kali_produceHist2D.py --inputfile {input_file} --outputfile {output_file} --input_lambda_name {input_lambda_name} --nworker {nworker}" - subprocess.run(shlex.split(cmd)) + subprocess_run(shlex.split(cmd)) def run(node, input_files, work_directory, iterator_output_path, n_instances): @@ -102,24 +107,24 @@ def run(node, input_files, work_directory, iterator_output_path, n_instances): state = State.NOT_READY com.set_status(state) n_it = 0 - p_it = 0 + p_it = 1 while True: command = com.get_command() if command.startswith('configure') and state == State.NOT_READY: - if p_it == 0: - MDFprod( - input_files=input_files, - output_file=work_directory / - f"FirstPass_selected_{node}.mdf", - secondPass="FirstPass", - overlay_path=os.path.join( - "file:///group/online/alignment/Pi0CalibrationConditions", - "lhcb-conditions-database") if p_it == 2 else - "git:/cvmfs/lhcb.cern.ch/lib/lhcb/git-conddb/lhcb-conditions-database.git", - n_instances=n_instances) - p_it += 1 + # if p_it == 0: + # MDFprod( + # input_files=input_files, + # output_file=work_directory / + # f"FirstPass_selected_{node}.mdf", + # secondPass="FirstPass", + # overlay_path=os.path.join( + # "file:///group/online/alignment/Pi0CalibrationConditions", + # "lhcb-conditions-database") if p_it == 2 else + # "git:/cvmfs/lhcb.cern.ch/lib/lhcb/git-conddb/lhcb-conditions-database.git", + # n_instances=n_instances) + # p_it += 1 state = State.READY elif command.startswith('start') and state == State.READY: state = State.RUNNING @@ -135,17 +140,17 @@ def run(node, input_files, work_directory, iterator_output_path, n_instances): # First pass: Produces ntuples with default DB (lambdas == 1) print("## Running MDFprod! \n") - if p_it == 2: - MDFprod( - input_files=input_files, - output_file=work_directory / - f"{pass_name}_selected_{node}.mdf", - secondPass=pass_name, - overlay_path=os.path.join( - "file:///group/online/alignment/Pi0CalibrationConditions", - "lhcb-conditions-database") if p_it == 2 else - "git:/cvmfs/lhcb.cern.ch/lib/lhcb/git-conddb/lhcb-conditions-database.git", - n_instances=n_instances) + MDFprod( + input_files=input_files, + output_file=work_directory / + f"{pass_name}_selected_{node}.mdf", + secondPass=pass_name, + overlay_path=os.path.abspath( + os.path.join(iterator_output_path, + "new-constants-FirstPass")) + if p_it == 2 else + "git:/cvmfs/lhcb.cern.ch/lib/lhcb/git-conddb/lhcb-conditions-database.git", + n_instances=n_instances) print("## Running Ntupleprod! \n") Ntupleprod( @@ -154,9 +159,10 @@ def run(node, input_files, work_directory, iterator_output_path, n_instances): output_file=work_directory / f"{node}_{pass_name}.root", work_dir=work_directory / f"{node}_{pass_name}", secondPass=pass_name, - overlay_path=os.path.join( - "file:///group/online/alignment/Pi0CalibrationConditions", - "lhcb-conditions-database") if p_it == 2 else + overlay_path=os.path.abspath( + os.path.join(iterator_output_path, + "new-constants-FirstPass")) + if p_it == 2 else "git:/cvmfs/lhcb.cern.ch/lib/lhcb/git-conddb/lhcb-conditions-database.git" ) @@ -184,22 +190,21 @@ def run(node, input_files, work_directory, iterator_output_path, n_instances): elif command.startswith('stop') and state == State.PAUSED: state = State.READY elif command.startswith('reset'): - print("Hello from reset!", flush=True) state = State.NOT_READY + elif command.startswith('unload'): + state = State.OFFLINE + com.set_status(state) break else: print('analyzer: bad transition from %s to %s' % (state, command)) state = State.ERROR + com.set_status(state) break time.sleep(random.uniform(0.5, 1.5)) # Set the status com.set_status(state) - time.sleep(random.uniform(0.5, 1.5)) - # Set the status one last time. - com.set_status(state) - if __name__ == '__main__': import OnlineEnvBase as OnlineEnv @@ -211,14 +216,15 @@ if __name__ == '__main__': print( f"Will write analyzer output to {online_options.analyzer_output_path}") - if not OnlineEnv.PartitionName.startswith("TEST"): - INPUT_DATA_PATH = Path("input_data") + if OnlineEnv.PartitionName == "LHCbA": + INPUT_DATA_PATH = Path("/calib/align/LHCb/Calo") + files = [ sorted((INPUT_DATA_PATH / run).iterdir()) for run in online_options.runs ] else: - INPUT_DATA_PATH = Path(f"/calib/align/LHCb/Calo/{OnlineEnv.RunNumber}") + INPUT_DATA_PATH = Path("input_data") files = [sorted(INPUT_DATA_PATH.iterdir())] files_per_node = distribute_files(online_options.nodes, files) diff --git a/MooreOnlineConf/options/CaloPi0/iterator.py b/MooreOnlineConf/options/CaloPi0/iterator.py index 3cbdb7454..454120dd5 100644 --- a/MooreOnlineConf/options/CaloPi0/iterator.py +++ b/MooreOnlineConf/options/CaloPi0/iterator.py @@ -14,10 +14,16 @@ from MooreOnlineConf.utils import ( ensure_output_dir, ) import subprocess -import shlex, os +import shlex +import os +import shutil from ruamel.yaml import YAML from Pi0_Run3_KaliPackage.Kali_Analyzer import merge_lambda_yaml_files +# TODO this will not work in nightly tests.... +ONLINE_CONDDB_PATH = "/group/online/hlt/conditions.run3/lhcb-conditions-database" +CONDITION_PATH = "Conditions/Ecal/Calibration.yml" + NIteration = 7 @@ -36,40 +42,27 @@ def DoCalibration(nIt: int, passname: str, input_file: str, output_file: str, subprocess.run(shlex.split(cmd)) -def WriteOverlay(input_file: str, output_dir: str): - - print(f"WriteOverlay: {input_file} -> {output_dir}") - - if not os.path.exists( - os.path.join(output_dir, "lhcb-conditions-database")): - raise RuntimeError() +def WriteOverlay(input_file: str, output_dir: str, initial_dir: str): + initial_cond_file = os.path.join(initial_dir, CONDITION_PATH) + output_cond_file = os.path.join(output_dir, CONDITION_PATH) + print( + f"WriteOverlay: {initial_cond_file} + {input_file} -> {output_cond_file}" + ) yaml = YAML() - print("copy calibration constants to lhcb-conditions-database") - - with open( - os.path.join( - output_dir, - "lhcb-conditions-database/Conditions/Ecal/Calibration.yml/0"), - 'r') as stream: - cond = yaml.load(stream) - if os.path.exists(input_file): - with open(input_file, 'r') as stream2: - calib = yaml.load(stream2) - else: - print("yml file not found! \n") + + with open(initial_cond_file) as f: + cond = yaml.load(f) + with open(input_file) as f: + calib = yaml.load(f) cond['Calibration']['data'] = calib['data'] - with open( - os.path.join( - output_dir, - "lhcb-conditions-database/Conditions/Ecal/Calibration.yml/0"), - 'w', - encoding='utf8') as outfile: + os.makedirs(os.path.dirname(output_cond_file)) + with open(output_cond_file, 'w', encoding='utf8') as outfile: yaml.dump(cond, outfile) - print(f"calibration constants dumped to {output_dir}") + print(f"calibration constants written to {output_dir}") def run(online_options, work_directory): @@ -84,6 +77,13 @@ def run(online_options, work_directory): while True: command = com.get_command() if command == 'configure' and state == State.NOT_READY: + initial_constants_src = os.path.join( + ONLINE_CONDDB_PATH, CONDITION_PATH, + "0") # FIXME the "0" needs to be determined dynamically + initial_constants_dst = work_directory / "initial-constants" / CONDITION_PATH + initial_constants_dst.parent.mkdir(parents=True) + shutil.copy(initial_constants_src, initial_constants_dst) + state = State.READY elif command == 'start' and state == State.READY: state = State.RUNNING @@ -117,27 +117,27 @@ def run(online_options, work_directory): output_file=work_directory / f"{pass_name}_{n_it}.yml", output_lambda_name=work_directory / f"{pass_name}_lambda.txt") - if n_it == NIteration: - print(f"## Running WriteOverlay {n_it}! \n") + if p_it == 1 and n_it == NIteration: + print(f"## Running WriteOverlay {pass_name}/{n_it}\n") WriteOverlay( input_file=work_directory / f"{pass_name}_{n_it}.yml", - output_dir= - "/group/online/alignment/Pi0CalibrationConditions") + output_dir=work_directory / f"new-constants-{pass_name}", + initial_dir=work_directory / "initial-constants") if p_it == 1: n_it = 0 p_it += 1 - - if p_it == 2 and n_it == NIteration: + state = State.RUNNING + elif p_it == 2 and n_it == NIteration: print("## Merge the first and the second pass lambdas ! \n") merge_lambda_yaml_files( os.path.join(work_directory, "FirstPass_7.yml"), os.path.join(work_directory, "SecondPass_7.yml"), work_directory) - print(f"## Running WriteOverlay over merged files! \n") + print("## Running WriteOverlay over merged files! \n") WriteOverlay( input_file=work_directory / "merged_lambda.yml", - output_dir= - "/group/online/alignment/Pi0CalibrationConditions") + output_dir=work_directory / f"new-constants-{pass_name}", + initial_dir=work_directory / "initial-constants") print('iterator done') state = State.READY @@ -148,17 +148,18 @@ def run(online_options, work_directory): state = State.READY elif command == 'reset': state = State.NOT_READY + elif command == 'unload': + state = State.OFFLINE + com.set_status(state) break else: print('iterator: bad transition from %s to %s' % (state, command)) state = State.ERROR + com.set_status(state) break # Set the status com.set_status(state) - # Set our status one last time - com.set_status(state) - if __name__ == '__main__': import OnlineEnvBase as OnlineEnv diff --git a/MooreOnlineConf/python/MooreOnlineConf/Communicator.py b/MooreOnlineConf/python/MooreOnlineConf/Communicator.py index 63d6067e5..12450335a 100644 --- a/MooreOnlineConf/python/MooreOnlineConf/Communicator.py +++ b/MooreOnlineConf/python/MooreOnlineConf/Communicator.py @@ -18,6 +18,7 @@ class State(object): READY = "READY" RUNNING = "RUNNING" PAUSED = "PAUSED" + OFFLINE = "OFFLINE" class Communicator(object): @@ -79,7 +80,9 @@ class Communicator(object): def __command_callback(self, args, tag): self.__callback_lock.acquire() - self.__put_command(args[0].rstrip("\0")) + command = args[0].rstrip("\0") + if command != "!state": + self.__put_command(command) self.__callback_lock.release() def __status_callback(self, *args): diff --git a/MooreOnlineConf/python/MooreOnlineConf/utils.py b/MooreOnlineConf/python/MooreOnlineConf/utils.py index 9da1e3425..1cc98202c 100644 --- a/MooreOnlineConf/python/MooreOnlineConf/utils.py +++ b/MooreOnlineConf/python/MooreOnlineConf/utils.py @@ -189,7 +189,6 @@ def distribute_files(nodes, files): chunks(files_per_run, n_files_per_run_per_node) for files_per_run in files ])) - print(x) files_per_node = [sum(lists, []) for lists in x] return dict( zip(nodes, files_per_node + [[]] * (len(nodes) - len(files_per_node)))) @@ -227,7 +226,8 @@ def alignment_options(OnlineEnv) -> OnlineAlignmentOpts: if not nodes: raise ValueError("NodeList is empty") - activity = OnlineEnv.Activity.removeprefix("Alignment|") + activity = OnlineEnv.Activity.removeprefix("Alignment|").removeprefix( + "Calibration|") #make sure leading zeroes are in run numbers runs = [f'{int(run):010d}' for run in runs] data_id = runs[0] if len(runs) == 1 else f"{runs[0]}-{runs[-1]}" @@ -265,7 +265,8 @@ def ensure_output_dir(output_path, tag): import errno if e.errno == errno.ENOTEMPTY: pass # another worker already renamed the directory - raise + else: + raise output_path.mkdir(parents=True, exist_ok=True) utgid = os.environ["UTGID"] diff --git a/MooreScripts/job/CaloPi0/runAnalyzer.sh b/MooreScripts/job/CaloPi0/runAnalyzer.sh index ac774a553..8ba8041a6 100755 --- a/MooreScripts/job/CaloPi0/runAnalyzer.sh +++ b/MooreScripts/job/CaloPi0/runAnalyzer.sh @@ -15,16 +15,4 @@ source "$DIR/../setupTask.sh" setup_options_path MONITORING - -cd_working_dir -dump_environment - -executable=genPython.exe -if [[ ${PARTITION} == "TEST"* ]]; then - # FIXME does the following still hold, i.e. can we debug fine when using genPython.exe? - # For some reason when we use genPython.exe gdb does not load the symbols from the shared libraries. - # Instead, use python directly, which works except that messages in the log viewer are not properly - # tagged by the process name. - executable=python -fi -exec -a ${UTGID} ${executable} $MOOREONLINECONFROOT/options/CaloPi0/analyzer.py +exec_python $MOOREONLINECONFROOT/options/CaloPi0/analyzer.py diff --git a/MooreScripts/job/CaloPi0/runIterator.sh b/MooreScripts/job/CaloPi0/runIterator.sh index 3c94523ba..a6448e827 100755 --- a/MooreScripts/job/CaloPi0/runIterator.sh +++ b/MooreScripts/job/CaloPi0/runIterator.sh @@ -15,16 +15,4 @@ source "$DIR/../setupTask.sh" setup_options_path MONITORING - -cd_working_dir -dump_environment - -executable=genPython.exe -if [[ ${PARTITION} == "TEST"* ]]; then - # FIXME does the following still hold, i.e. can we debug fine when using genPython.exe? - # For some reason when we use genPython.exe gdb does not load the symbols from the shared libraries. - # Instead, use python directly, which works except that messages in the log viewer are not properly - # tagged by the process name. - executable=python -fi -exec -a ${UTGID} ${executable} $MOOREONLINECONFROOT/options/CaloPi0/iterator.py +exec_python $MOOREONLINECONFROOT/options/CaloPi0/iterator.py diff --git a/MooreScripts/job/setupTask.sh b/MooreScripts/job/setupTask.sh index 0f22f2b73..e3a180c4a 100755 --- a/MooreScripts/job/setupTask.sh +++ b/MooreScripts/job/setupTask.sh @@ -139,9 +139,26 @@ dump_environment() { fi } +exec_python() { + cd_working_dir + export PYTHONUNBUFFERED=1 # immediate python output + export ENABLE_BACKTRACE=1 # print stack traces for GaudiException-s + dump_environment + executable=genPython.exe + if [[ ${PARTITION} == "TEST"* || ! -z ${PRELOAD_SANITIZER_LIB-} ]]; then + # FIXME does the following still hold, i.e. can we debug fine when using genPython.exe? + # For some reason when we use genPython.exe gdb does not load the symbols from the shared libraries. + # Instead, use python directly, which works except that messages in the log viewer are not properly + # tagged by the process name. + executable=python + fi + exec -a ${UTGID} ${executable} "$@" +} + exec_gaudirun() { application=${application:-Online::OnlineEventApp} cd_working_dir + export PYTHONUNBUFFERED=1 # immediate python output export ENABLE_BACKTRACE=1 # print stack traces for GaudiException-s LD_PRELOAD= if [[ ! -z ${PRELOAD_SANITIZER_LIB-} ]]; then -- GitLab From 933de0084ecc1c47438f13220ee07e24d3de145d Mon Sep 17 00:00:00 2001 From: lesantor <leon.santoro@cern.ch> Date: Thu, 20 Jun 2024 20:48:30 +0200 Subject: [PATCH 29/35] Geometry changes to latest version --- .../Pi0_Run3_KaliPackage/Kali_Analyzer.py | 9 +++++---- MooreOnlineConf/options/CaloPi0/analyzer.py | 17 +++-------------- MooreScripts/tests/options/CaloPi0/Arch.xml | 2 +- 3 files changed, 9 insertions(+), 19 deletions(-) diff --git a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py index 6bdbc284f..326d31572 100755 --- a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py +++ b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py @@ -30,15 +30,16 @@ def MDFprodOption(inputfiles, outputfile, n_instances): write_lines = [ "input_files:\n", ] + ["- '%s'\n" % inputfile for inputfile in inputfiles] + [ - "input_type: 'RAW'\n", "evt_max: -1\n", + "input_type: 'RAW'\n", "evt_max: -1 \n", "output_file: '%s'\n" % outputfile, "output_manifest_file: '%s' \n" % output_manifest_file, "input_process: 'Hlt2'\n", "input_raw_format: 0.5\n", "data_type: Upgrade\n", "simulation: False\n", f"n_threads: {threads_per_instance} \n", "scheduler_legacy_mode: False\n", "output_type: 'RAW' \n", - "geometry_version : run3/trunk \n", "conditions_version : 'master' \n", - "print_freq: 1000 \n", "write_decoding_keys_to_git: False \n" + "geometry_version :run3/2024.Q1.2-v00.00 \n", + "conditions_version : 'master' \n", "print_freq: 1000 \n", + "write_decoding_keys_to_git: False \n" ] f.writelines(write_lines) f.close() @@ -72,7 +73,7 @@ def NtupleprodOption(inputfiles, outputfile): "simulation: False\n", "conddb_tag: 'upgrade/master'\n", "dddb_tag: 'upgrade/master'\n", - "geometry_version : run3/trunk \n", + "geometry_version : run3/2024.Q1.2-v00.00 \n", "conditions_version : 'master' \n", ] f.writelines(write_lines) diff --git a/MooreOnlineConf/options/CaloPi0/analyzer.py b/MooreOnlineConf/options/CaloPi0/analyzer.py index b19d8af99..6536e2f8a 100644 --- a/MooreOnlineConf/options/CaloPi0/analyzer.py +++ b/MooreOnlineConf/options/CaloPi0/analyzer.py @@ -25,7 +25,7 @@ from MooreOnlineConf.utils import ( from Pi0_Run3_KaliPackage.Kali_Analyzer import NtupleprodOption, MDFprodOption from pathlib import Path -NIteration = 7 +NIteration = 7 #Difined the number of interaction in each loop def subprocess_run(*args, **kwargs): @@ -113,18 +113,6 @@ def run(node, input_files, work_directory, iterator_output_path, n_instances): command = com.get_command() if command.startswith('configure') and state == State.NOT_READY: - # if p_it == 0: - # MDFprod( - # input_files=input_files, - # output_file=work_directory / - # f"FirstPass_selected_{node}.mdf", - # secondPass="FirstPass", - # overlay_path=os.path.join( - # "file:///group/online/alignment/Pi0CalibrationConditions", - # "lhcb-conditions-database") if p_it == 2 else - # "git:/cvmfs/lhcb.cern.ch/lib/lhcb/git-conddb/lhcb-conditions-database.git", - # n_instances=n_instances) - # p_it += 1 state = State.READY elif command.startswith('start') and state == State.READY: state = State.RUNNING @@ -224,8 +212,9 @@ if __name__ == '__main__': for run in online_options.runs ] else: + INPUT_DATA_PATH = Path(f"/calib/align/LHCb/Calo/{OnlineEnv.RunNumber}") + INPUT_DATA_PATH = Path("input_data") - files = [sorted(INPUT_DATA_PATH.iterdir())] files_per_node = distribute_files(online_options.nodes, files) diff --git a/MooreScripts/tests/options/CaloPi0/Arch.xml b/MooreScripts/tests/options/CaloPi0/Arch.xml index a3d1bee4f..cf2d7e53b 100644 --- a/MooreScripts/tests/options/CaloPi0/Arch.xml +++ b/MooreScripts/tests/options/CaloPi0/Arch.xml @@ -10,7 +10,7 @@ --> <tasks_inventory> - <task name="AlignWrk" user="${USER}" group="${GROUP}" instances="2"> + <task name="AlignWrk" user="${USER}" group="${GROUP}" instances="1"> <command>${MOORESCRIPTSROOT}/job/CaloPi0/runAnalyzer.sh</command> <argument name="-type" value="${NAME}" /> <argument name="-runinfo" value="${RUNINFO}" /> -- GitLab From b75b4328e176c29b913dd8910168c733135967a8 Mon Sep 17 00:00:00 2001 From: lesantor <leon.santoro@cern.ch> Date: Wed, 17 Jul 2024 19:53:53 +0200 Subject: [PATCH 30/35] change to VPRetinaCluster --- .../options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMDF.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMDF.py b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMDF.py index d4f0a50cf..e2b5bcfd7 100755 --- a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMDF.py +++ b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_produceMDF.py @@ -109,7 +109,7 @@ def main(options: Options, pass_name: str, overlay_path: str): ] with reconstruction.bind(from_file=False), default_VeloCluster_source.bind( - bank_type="VP"), make_digits.bind( + bank_type="VPRetinaCluster"), make_digits.bind( calo_raw_bank=True), make_muon_hits.bind( geometry_version=3), hlt2_reconstruction.bind( make_reconstruction=make_light_reco_pr_kf_without_UT): -- GitLab From 4bc36c98b0c3f392d9e7a546a1465e99fdc1653b Mon Sep 17 00:00:00 2001 From: lesantor <leon.santoro@cern.ch> Date: Fri, 2 Aug 2024 15:25:03 +0200 Subject: [PATCH 31/35] Create variables to correction of the cell index and from the number of cell on calorimeter --- .../Pi0_Run3_KaliPackage/Kali_Analyzer.py | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py index 326d31572..6a0864cbf 100755 --- a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py +++ b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_Analyzer.py @@ -30,14 +30,14 @@ def MDFprodOption(inputfiles, outputfile, n_instances): write_lines = [ "input_files:\n", ] + ["- '%s'\n" % inputfile for inputfile in inputfiles] + [ - "input_type: 'RAW'\n", "evt_max: -1 \n", + "input_type: 'RAW'\n", "evt_max: 100000 \n", "output_file: '%s'\n" % outputfile, "output_manifest_file: '%s' \n" % output_manifest_file, "input_process: 'Hlt2'\n", "input_raw_format: 0.5\n", "data_type: Upgrade\n", "simulation: False\n", f"n_threads: {threads_per_instance} \n", "scheduler_legacy_mode: False\n", "output_type: 'RAW' \n", - "geometry_version :run3/2024.Q1.2-v00.00 \n", + "geometry_version : run3/2024.Q1.2-v00.00 \n", "conditions_version : 'master' \n", "print_freq: 1000 \n", "write_decoding_keys_to_git: False \n" ] @@ -96,20 +96,25 @@ def merge_lambda_yaml_files(FirstPass_lambda: str, SecondPass_lambda: str, Lines_1 = calibFirst['data'] Lines_2 = calibSecond['data'] - Lambda_1 = [None] * 11384 #number of histograms - Lambda_2 = [None] * 11384 - Lambda_3 = [None] * 11384 - Merged_lambda = [[None, None]] * 11384 + Num_of_hist = 11384 #number of histograms + + corr = 32768 + #Correction over the cell index + + Lambda_1 = [None] * Num_of_hist + Lambda_2 = [None] * Num_of_hist + Lambda_3 = [None] * Num_of_hist + Merged_lambda = [[None, None]] * Num_of_hist output_merged_file = open( os.path.join(output_file_path, "merged_lambda.yml"), 'w') output_merged_file.write('data: [\n') for line1 in Lines_1: - Lambda_1[line1[0] - 32768] = float(line1[1]) + Lambda_1[line1[0] - corr] = float(line1[1]) for line2 in Lines_2: - Lambda_2[line2[0] - 32768] = float(line2[1]) + Lambda_2[line2[0] - corr] = float(line2[1]) for i in range(0, len(Lambda_1)): -- GitLab From 5424bffd7ade8483cf77cdf2a28b25462ef929dd Mon Sep 17 00:00:00 2001 From: lesantor <leon.santoro@cern.ch> Date: Fri, 2 Aug 2024 15:27:48 +0200 Subject: [PATCH 32/35] small fixes --- .../Kali_doCalibration.py | 86 +------------------ 1 file changed, 1 insertion(+), 85 deletions(-) diff --git a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_doCalibration.py b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_doCalibration.py index 7dcd4b801..ea96afe15 100644 --- a/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_doCalibration.py +++ b/MooreOnlineConf/options/CaloPi0/Pi0_Run3_KaliPackage/Kali_doCalibration.py @@ -44,7 +44,7 @@ LHCbApp().DataType = 'Upgrade' LHCbApp().CondDBtag = 'master' LHCbApp().DDDBtag = 'master' LHCbApp().Simulation = False -LHCbApp().GeometryVersion = 'run3/trunk' +LHCbApp().GeometryVersion = 'run3/2024.Q1.2-v00.00' from Configurables import Pi0CalibrationAlg pi0Calib = Pi0CalibrationAlg("Pi0Calibration") @@ -80,90 +80,6 @@ mainSeq.Members = [ IOVProducer(), pi0Calib, pi0Moni ] -#if '1_histos' in inputfile: -# if 'FirstPass' in inputfile: -# os.mkdir(pi0Moni.outputDir + "/Iter1Mon_FirstPass") -# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter1Mon_FirstPass" -# else: -# os.mkdir(pi0Moni.outputDir + "/Iter1Mon_SecondPass") -# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter1Mon_SecondPass" -# -# mainSeq.Members = [ -# FakeRunNumberProducer(Start=256126, Step=0), -# IOVProducer(), pi0Calib, pi0Moni -# ] -#elif '2_histos' in inputfile: -# if 'FirstPass' in inputfile: -# os.mkdir(pi0Moni.outputDir + "/Iter2Mon_FirstPass") -# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter2Mon_FirstPass" -# else: -# os.mkdir(pi0Moni.outputDir + "/Iter2Mon_SecondPass") -# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter2Mon_SecondPass" -# mainSeq.Members = [ -# FakeRunNumberProducer(Start=256126, Step=0), -# IOVProducer(), pi0Calib, pi0Moni -# ] -#elif '3_histos' in inputfile: -# if 'FirstPass' in inputfile: -# os.mkdir(pi0Moni.outputDir + "/Iter3Mon_FirstPass") -# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter3Mon_FirstPass" -# else: -# os.mkdir(pi0Moni.outputDir + "/Iter3Mon_SecondPass") -# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter3Mon_SecondPass" -# mainSeq.Members = [ -# FakeRunNumberProducer(Start=256126, Step=0), -# IOVProducer(), pi0Calib, pi0Moni -# ] -#elif '4_histos' in inputfile: -# if 'FirstPass' in inputfile: -# os.mkdir(pi0Moni.outputDir + "/Iter4Mon_FirstPass") -# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter4Mon_FirstPass" -# else: -# os.mkdir(pi0Moni.outputDir + "/Iter4Mon_SecondPass") -# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter4Mon_SecondPass" -# mainSeq.Members = [ -# FakeRunNumberProducer(Start=256126, Step=0), -# IOVProducer(), pi0Calib, pi0Moni -# ] -#elif '5_histos' in inputfile: -# if 'FirstPass' in inputfile: -# os.mkdir(pi0Moni.outputDir + "/Iter5Mon_FirstPass") -# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter5Mon_FirstPass" -# else: -# os.mkdir(pi0Moni.outputDir + "/Iter5Mon_SecondPass") -# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter5Mon_SecondPass" -# mainSeq.Members = [ -# FakeRunNumberProducer(Start=256126, Step=0), -# IOVProducer(), pi0Calib, pi0Moni -# ] -#elif '6_histos' in inputfile: -# if 'FirstPass' in inputfile: -# os.mkdir(pi0Moni.outputDir + "/Iter6Mon_FirstPass") -# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter6Mon_FirstPass" -# else: -# os.mkdir(pi0Moni.outputDir + "/Iter6Mon_SecondPass") -# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter6Mon_SecondPass" -# mainSeq.Members = [ -# FakeRunNumberProducer(Start=256126, Step=0), -# IOVProducer(), pi0Calib, pi0Moni -# ] -#elif '7_histos' in inputfile: -# if 'FirstPass' in inputfile: -# os.mkdir(pi0Moni.outputDir + "/Iter7Mon_FirstPass") -# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter7Mon_FirstPass" -# else: -# os.mkdir(pi0Moni.outputDir + "/Iter7Mon_SecondPass") -# pi0Moni.outputDir = pi0Moni.outputDir + "/Iter7Mon_SecondPass" -# mainSeq.Members = [ -# FakeRunNumberProducer(Start=256126, Step=0), -# IOVProducer(), pi0Calib, pi0Moni -# ] -#else: -# mainSeq.Members = [ -# FakeRunNumberProducer(Start=256126, Step=0), -# IOVProducer(), pi0Calib -# ] - ApplicationMgr( OutputLevel=INFO, AppName="Pi0Calibration", EvtMax=-1, EvtSel='NONE') ApplicationMgr().TopAlg.append(mainSeq) -- GitLab From 1bb841aea7129c5504237bacb42f4245e274e0f7 Mon Sep 17 00:00:00 2001 From: Rosen Matev <rosen.matev@cern.ch> Date: Thu, 5 Sep 2024 13:58:43 +0200 Subject: [PATCH 33/35] Add --runs argument for alignment scenario --- MooreOnlineConf/options/CaloPi0/analyzer.py | 4 +- .../python/MooreOnlineConf/utils.py | 4 -- .../testbench/scenarios/alignment.py | 15 +++++-- .../tests/options/CaloPi0/OnlineEnv.opts | 4 -- .../tests/options/CaloPi0/OnlineEnvBase.py | 2 - MooreScripts/tests/qmtest/alignment.qmt | 1 + README.md | 41 ++++++++++++------- 7 files changed, 40 insertions(+), 31 deletions(-) diff --git a/MooreOnlineConf/options/CaloPi0/analyzer.py b/MooreOnlineConf/options/CaloPi0/analyzer.py index 6536e2f8a..f98725659 100644 --- a/MooreOnlineConf/options/CaloPi0/analyzer.py +++ b/MooreOnlineConf/options/CaloPi0/analyzer.py @@ -204,7 +204,7 @@ if __name__ == '__main__': print( f"Will write analyzer output to {online_options.analyzer_output_path}") - if OnlineEnv.PartitionName == "LHCbA": + if OnlineEnv.PartitionName == "LHCbA" or online_options.runs: INPUT_DATA_PATH = Path("/calib/align/LHCb/Calo") files = [ @@ -212,8 +212,6 @@ if __name__ == '__main__': for run in online_options.runs ] else: - INPUT_DATA_PATH = Path(f"/calib/align/LHCb/Calo/{OnlineEnv.RunNumber}") - INPUT_DATA_PATH = Path("input_data") files_per_node = distribute_files(online_options.nodes, files) diff --git a/MooreOnlineConf/python/MooreOnlineConf/utils.py b/MooreOnlineConf/python/MooreOnlineConf/utils.py index 1cc98202c..bef6fd562 100644 --- a/MooreOnlineConf/python/MooreOnlineConf/utils.py +++ b/MooreOnlineConf/python/MooreOnlineConf/utils.py @@ -239,10 +239,6 @@ def alignment_options(OnlineEnv) -> OnlineAlignmentOpts: analyzer_output_path = prefix / activity / "analyzer" / data_id iterator_output_path = prefix / activity / "iterator" / data_id - # overwrite some options for CI test - if OnlineEnv.PartitionName.startswith("TEST"): - runs = [253597] - return OnlineAlignmentOpts(tag, runs, nodes, analyzer_output_path, iterator_output_path) diff --git a/MooreScripts/python/MooreScripts/testbench/scenarios/alignment.py b/MooreScripts/python/MooreScripts/testbench/scenarios/alignment.py index d8b16e86c..592645e78 100644 --- a/MooreScripts/python/MooreScripts/testbench/scenarios/alignment.py +++ b/MooreScripts/python/MooreScripts/testbench/scenarios/alignment.py @@ -8,6 +8,7 @@ # granted to it by virtue of its status as an Intergovernmental Organization # # or submit itself to any jurisdiction. # ############################################################################### +import argparse import logging from MooreScripts.testbench.emulator import ( tasks_load, @@ -23,14 +24,22 @@ log = logging.getLogger(__name__) async def run(tasks, args, extra_argv): + parser = argparse.ArgumentParser() + parser.add_argument( + "--runs", nargs="*", type=int, help="Run numbers for the RunList.opts") + parser.add_argument( + "--numiter", type=int, default=14, help="Max number of iterations") + extra_args = parser.parse_args(extra_argv) + analyzers = [t for t in tasks if "Wrk" in t.type] iterator, = [t for t in tasks if "Drv" in t.type] assert len(tasks) == len(analyzers) + 1 # Write the alignment-specific options with open("RunList.opts", "w") as f: - # dump_opts({"DeferredRuns": {"0000230000"}}, f) - dump_opts({"DeferredRuns": {"0000264400"}}, f) + dump_opts({ + "DeferredRuns": set(f"{run:010}" for run in extra_args.runs) + }, f) with open("NodeList.opts", "w") as f: # usually the NodeList would contain only the node name # since we run one instance per node. When testing locally @@ -49,7 +58,7 @@ async def run(tasks, args, extra_argv): await tasks_send_command(tasks, "start") await tasks_wait_for_status(tasks, "RUNNING") - for iteration in range(10): + for iteration in range(extra_args.numiter): log.info(f"Running analyzer for iteration {iteration} ...") await tasks_wait_for_status(analyzers, "PAUSED") diff --git a/MooreScripts/tests/options/CaloPi0/OnlineEnv.opts b/MooreScripts/tests/options/CaloPi0/OnlineEnv.opts index 1d378aa5b..92f6aba6d 100644 --- a/MooreScripts/tests/options/CaloPi0/OnlineEnv.opts +++ b/MooreScripts/tests/options/CaloPi0/OnlineEnv.opts @@ -2,7 +2,3 @@ OnlineEnv.PartitionID = 65535; OnlineEnv.PartitionName = "TESTBEAMGUI"; OnlineEnv.Activity = "Alignment|CaloPi0"; OnlineEnv.OutputLevel = 3; -OnlineEnv.Reader_Rescan = 1; -OnlineEnv.Reader_Directories = {"/scratch/rmatev"}; -OnlineEnv.Reader_FilePrefix = "Run_0000290068_"; -OnlineEnv.RunNumber = "0000290068 "; diff --git a/MooreScripts/tests/options/CaloPi0/OnlineEnvBase.py b/MooreScripts/tests/options/CaloPi0/OnlineEnvBase.py index 4599a0d51..e7420eed3 100644 --- a/MooreScripts/tests/options/CaloPi0/OnlineEnvBase.py +++ b/MooreScripts/tests/options/CaloPi0/OnlineEnvBase.py @@ -16,5 +16,3 @@ OnlineVersion = "v0" MooreVersion = "v0" MooreOnlineVersion = "v0" OutputLevel = 3 -#RunNumber = "0000268295" #Only ECAL -RunNumber = "0000290068" diff --git a/MooreScripts/tests/qmtest/alignment.qmt b/MooreScripts/tests/qmtest/alignment.qmt index d73863d58..0fa0422b5 100644 --- a/MooreScripts/tests/qmtest/alignment.qmt +++ b/MooreScripts/tests/qmtest/alignment.qmt @@ -20,6 +20,7 @@ Run an HLT2 job in the Online testbench <text>--test-file-db-key=2022_raw_hlt1_253597</text> <text>$MOORESCRIPTSROOT/tests/options/VeloHalf/Arch.xml</text> <text>alignment</text> + <text>--runs=253597</text> </set></argument> <argument name="unsupported_platforms"><set> <text>detdesc</text> diff --git a/README.md b/README.md index 940f881d8..861515de3 100644 --- a/README.md +++ b/README.md @@ -187,7 +187,7 @@ MooreOnline/run bash -c '$PRCONFIGROOT/scripts/benchmark-scripts/MooreOnline_hlt ```sh MooreOnline/run MooreOnline/MooreScripts/scripts/testbench.py --help -MooreOnline/run MooreOnline/MooreScripts/scripts/testbench.py --working-dir=output --test-file-db-key=2022_raw_hlt1_253597 --partition=TESTALIGNMENT MooreOnline/MooreScripts/tests/options/VeloHalf/Arch.xml alignment +MooreOnline/run MooreOnline/MooreScripts/scripts/testbench.py --working-dir=output --test-file-db-key=2022_raw_hlt1_253597 --partition=TESTALIGNMENT MooreOnline/MooreScripts/tests/options/VeloHalf/Arch.xml alignment --runs 253597 ``` <details><summary> `tree output` (click to expand) </summary> @@ -202,11 +202,9 @@ output ├── AlignWrk_0.env ├── AlignWrk_0.log ├── AlignWrk_0.opts -├── AlignWrk_1.dump -├── AlignWrk_1.env -├── AlignWrk_1.log -├── AlignWrk_1.opts ├── emu.log +├── input_data +│  └── Run_0000253597_HLT24640_20221114-023831-878_06c6355a15f87f57bfdcb9cf5ef57754.mdf ├── NodeList.opts ├── OnlineEnvBase.py ├── OnlineEnv.opts @@ -216,28 +214,41 @@ output ├── setup.vars └── VeloHalf ├── analyzer - │  └── 0000255623 - │  ├── derivatives-TEST_N8190402_AlignWrk_0.out - │  ├── derivatives-TEST_N8190402_AlignWrk_1.out - │  ├── histograms-TEST_N8190402_AlignWrk_0_new.root - │  ├── histograms-TEST_N8190402_AlignWrk_1_new.root + │  └── 0000253597 + │  ├── derivatives-TESTALIGNMENT_N8190402_AlignWrk_0.out │  └── tag └── iterator - └── 0000255623 + └── 0000253597 ├── alignlog.txt + ├── iter0 + │  ├── histograms-TESTALIGNMENT_N8190402_AlignWrk_0_new.root + │  └── MergedHistos_Iter0.root + ├── iter1 + │  ├── histograms-TESTALIGNMENT_N8190402_AlignWrk_0_new.root + │  └── MergedHistos_Iter1.root + ├── iter2 + │  ├── histograms-TESTALIGNMENT_N8190402_AlignWrk_0_new.root + │  └── MergedHistos_Iter2.root ├── new-constants-0 │  └── Conditions │  └── VP │  └── Alignment - │  ├── Global.yml - │  └── Modules.yml + │  └── Global.yml ├── new-constants-1 + │  └── Conditions + │  └── VP + │  └── Alignment + │  └── Global.yml + ├── new-constants-2 + │  └── Conditions + │  └── VP + │  └── Alignment + │  └── Global.yml ├── OverlayRoot │  └── Conditions │  └── VP │  └── Alignment - │  ├── Global.yml - │  └── Modules.yml + │  └── Global.yml └── tag ``` -- GitLab From 520092033d8be31443c0eaecb4dd357ae6c50388 Mon Sep 17 00:00:00 2001 From: Rosen Matev <rosen.matev@cern.ch> Date: Thu, 5 Sep 2024 13:59:47 +0200 Subject: [PATCH 34/35] Remove extraneos STOP in alignment scenario and clean up --- MooreOnlineConf/options/CaloPi0/analyzer.py | 4 +- .../testbench/scenarios/CalibrationPi0.py | 94 ------------------- .../testbench/scenarios/alignment.py | 3 - 3 files changed, 2 insertions(+), 99 deletions(-) delete mode 100644 MooreScripts/python/MooreScripts/testbench/scenarios/CalibrationPi0.py diff --git a/MooreOnlineConf/options/CaloPi0/analyzer.py b/MooreOnlineConf/options/CaloPi0/analyzer.py index f98725659..8028bdf22 100644 --- a/MooreOnlineConf/options/CaloPi0/analyzer.py +++ b/MooreOnlineConf/options/CaloPi0/analyzer.py @@ -12,7 +12,7 @@ from MooreOnlineConf.Communicator import Communicator, State import os, time, random, sys import shlex import subprocess -import glob, shutil +import shutil sys.path.append("Pi0_Run3_KaliPackage") # FIXME remove @@ -25,7 +25,7 @@ from MooreOnlineConf.utils import ( from Pi0_Run3_KaliPackage.Kali_Analyzer import NtupleprodOption, MDFprodOption from pathlib import Path -NIteration = 7 #Difined the number of interaction in each loop +NIteration = 7 def subprocess_run(*args, **kwargs): diff --git a/MooreScripts/python/MooreScripts/testbench/scenarios/CalibrationPi0.py b/MooreScripts/python/MooreScripts/testbench/scenarios/CalibrationPi0.py deleted file mode 100644 index 3ec7978b0..000000000 --- a/MooreScripts/python/MooreScripts/testbench/scenarios/CalibrationPi0.py +++ /dev/null @@ -1,94 +0,0 @@ -############################################################################### -# (c) Copyright 2022-2023 CERN for the benefit of the LHCb Collaboration # -# # -# This software is distributed under the terms of the GNU General Public # -# Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". # -# # -# In applying this licence, CERN does not waive the privileges and immunities # -# granted to it by virtue of its status as an Intergovernmental Organization # -# or submit itself to any jurisdiction. # -############################################################################### -import logging -from MooreScripts.testbench.emulator import ( - tasks_load, - tasks_wait_for_status, - tasks_send_command, - tasks_wait_for_exit, - tasks_measure_throughput, - async_input, - dump_opts, -) - -log = logging.getLogger(__name__) - - -async def run(tasks, args, exta_argv): - analyzers = [t for t in tasks if "Wrk" in t.utgid] - iterator, = [t for t in tasks if "Drv" in t.utgid] - assert len(tasks) == len(analyzers) + 1 - - # Write the alignment-specific options - with open("RunList.opts", "w") as f: - dump_opts({"DeferredRuns": {"0000290068"}}, f) - #dump_opts({"DeferredRuns": {"0000255623"}}, f) - with open("NodeList.opts", "w") as f: - # usually the NodeList would contain only the node name - # since we run one instance per node. When testing locally - # however we use multiple instances - dump_opts({"NodeList": {t.utgid for t in analyzers}}, f) - - await tasks_load(tasks) - # TODO for some reason HLT2 publishes OFFLINE before NOT_READY, but only sometimes - await tasks_wait_for_status(tasks, "NOT_READY", skip=["OFFLINE"]) - - await tasks_send_command(tasks, "configure") - await tasks_wait_for_status(tasks, "READY") - - # await async_input("Press enter to continue...") - - await tasks_send_command(tasks, "start") - await tasks_wait_for_status(tasks, "RUNNING") - - for iteration in range(14): - log.info(f"Running analyzer for iteration {iteration} ...") - await tasks_wait_for_status(analyzers, "PAUSED") - - await tasks_send_command(analyzers, "stop") - await tasks_wait_for_status(analyzers, "READY") - - await tasks_send_command([iterator], "pause") - await tasks_wait_for_status([iterator], "PAUSED") - - status = await iterator.status() - if status == "READY": # we have converged - log.info("Iterator converged") - break - elif status == "RUNNING": # we haven't converged - log.info("Iterator did not converge") - await tasks_send_command(analyzers, "start") - await tasks_wait_for_status(analyzers, "RUNNING") - continue - else: - message = f"Unexpected status for iterator: {status}" - log.error(message) - raise RuntimeError(message) - - await tasks_send_command(tasks, "reset") - await tasks_wait_for_status(tasks, "NOT_READY") - - # # FIXME: the following is hack - for task in tasks: - task._exit_task.cancel() - - await tasks_send_command(tasks, "unload") - await tasks_wait_for_status(tasks, "OFFLINE") - - # Wait for the tasks to close - exit_codes = await tasks_wait_for_exit(tasks) - if set(exit_codes) != {0}: - for t, ec in zip(tasks, exit_codes): - if ec != 0: - log.error(f"{t.utgid} exited with non-zero code {ec}") - return 102 - - return 0 diff --git a/MooreScripts/python/MooreScripts/testbench/scenarios/alignment.py b/MooreScripts/python/MooreScripts/testbench/scenarios/alignment.py index 592645e78..5148b5c8c 100644 --- a/MooreScripts/python/MooreScripts/testbench/scenarios/alignment.py +++ b/MooreScripts/python/MooreScripts/testbench/scenarios/alignment.py @@ -84,9 +84,6 @@ async def run(tasks, args, extra_argv): log.error(message) raise RuntimeError(message) - await tasks_send_command(analyzers, "stop") - await tasks_wait_for_status(analyzers, "READY") - await tasks_send_command(tasks, "reset") await tasks_wait_for_status(tasks, "NOT_READY") -- GitLab From 20cb20d7076671e5474b17ebb67fcccd56f424b5 Mon Sep 17 00:00:00 2001 From: RefBot <lhcbsoft@cern.ch> Date: Tue, 17 Sep 2024 14:51:17 +0200 Subject: [PATCH 35/35] Update References for: Rec!4041, Rec!4053 based on lhcb-2024-patches-mr/1470 [skip ci] --- AllenOnline/tests/refs/test_lumi.ref | 2 +- AllenOnline/tests/refs/test_lumi.ref.x86_64_v3-opt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/AllenOnline/tests/refs/test_lumi.ref b/AllenOnline/tests/refs/test_lumi.ref index 99b023e72..bc703e03b 100644 --- a/AllenOnline/tests/refs/test_lumi.ref +++ b/AllenOnline/tests/refs/test_lumi.ref @@ -1341,6 +1341,6 @@ encodingKey: 3104473477.000000 HltLumiWriter INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Average event size / 32-bit words" | 328 | 9208 | 28.073 | -VoidFilter_2e3c... INFO Number of counters : 1 +VoidFilter_2537... INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | |*"Cut selection efficiency" | 385 | 328 |( 85.19481 +- 1.810020)% | diff --git a/AllenOnline/tests/refs/test_lumi.ref.x86_64_v3-opt b/AllenOnline/tests/refs/test_lumi.ref.x86_64_v3-opt index 1f462f917..a354eae75 100644 --- a/AllenOnline/tests/refs/test_lumi.ref.x86_64_v3-opt +++ b/AllenOnline/tests/refs/test_lumi.ref.x86_64_v3-opt @@ -1341,6 +1341,6 @@ encodingKey: 3104473477.000000 HltLumiWriter INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | | "Average event size / 32-bit words" | 328 | 9208 | 28.073 | -VoidFilter_2e3c... INFO Number of counters : 1 +VoidFilter_2537... INFO Number of counters : 1 | Counter | # | sum | mean/eff^* | rms/err^* | min | max | |*"Cut selection efficiency" | 385 | 328 |( 85.19481 +- 1.810020)% | -- GitLab