From 78f855ff7a0b48a9fbbaf9cf3d33b360775a4d7e Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Tue, 2 Nov 2021 15:25:16 +0100 Subject: [PATCH 01/30] Hardcode path for rawbank view --- .../src/component/HltPackedDataDecoder.cpp | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp b/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp index 027aa75b682..015ddbe158c 100644 --- a/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp +++ b/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp @@ -118,14 +118,22 @@ namespace LHCb::Hlt::PackedData { StatusCode Decoder::execute() { - const auto& rawBanksConst = findFirstRawBank( RawBank::DstData ); - if ( rawBanksConst.empty() ) { + auto const* view = getIfExists<RawBank::View>( "/Event/DAQ/RawBanks/DstData" ); + if ( !view || view->empty() ) { ++m_no_packed_bank; return StatusCode::SUCCESS; } + auto rawBanks = std::vector<const RawBank*>{begin( *view ), end( *view )}; - auto rawBanks = std::vector<const RawBank*>{begin( rawBanksConst ), end( rawBanksConst )}; + /* + const auto& rawBanksConst = findFirstRawBank( RawBank::DstData ); + if ( rawBanksConst.empty() ) { + ++m_no_packed_bank; + return StatusCode::SUCCESS; + } + auto rawBanks = std::vector<const RawBank*>{begin( rawBanksConst ), end( rawBanksConst )}; + */ const auto* rawBank0 = rawBanks.front(); // Check we know how to decode this version @@ -187,7 +195,7 @@ namespace LHCb::Hlt::PackedData { } // Get the map of ids to locations (may differ between events) - auto* rawEvent = findFirstRawEvent(); + auto* rawEvent = getIfExists<LHCb::RawEvent>( *LHCb::RawEventLocation::Default ); if ( !rawEvent ) { ++m_missing_raw; return StatusCode::FAILURE; -- GitLab From 57dab4978826839e4de290989b7cd65eb45801c6 Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Tue, 2 Nov 2021 15:30:44 +0100 Subject: [PATCH 02/30] Add comments --- Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp b/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp index 015ddbe158c..a10e45e28eb 100644 --- a/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp +++ b/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp @@ -118,6 +118,7 @@ namespace LHCb::Hlt::PackedData { StatusCode Decoder::execute() { + // Hard coded - HltPackedDataDecoder will be replaced for HltPackedBufferDecoder auto const* view = getIfExists<RawBank::View>( "/Event/DAQ/RawBanks/DstData" ); if ( !view || view->empty() ) { ++m_no_packed_bank; @@ -125,15 +126,6 @@ namespace LHCb::Hlt::PackedData { } auto rawBanks = std::vector<const RawBank*>{begin( *view ), end( *view )}; - /* - const auto& rawBanksConst = findFirstRawBank( RawBank::DstData ); - if ( rawBanksConst.empty() ) { - ++m_no_packed_bank; - return StatusCode::SUCCESS; - } - - auto rawBanks = std::vector<const RawBank*>{begin( rawBanksConst ), end( rawBanksConst )}; - */ const auto* rawBank0 = rawBanks.front(); // Check we know how to decode this version @@ -195,7 +187,7 @@ namespace LHCb::Hlt::PackedData { } // Get the map of ids to locations (may differ between events) - auto* rawEvent = getIfExists<LHCb::RawEvent>( *LHCb::RawEventLocation::Default ); + auto* rawEvent = findFirstRawEvent(); if ( !rawEvent ) { ++m_missing_raw; return StatusCode::FAILURE; -- GitLab From b088fdf65d24c519864b1f7f078365e4e11b92d0 Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Thu, 11 Nov 2021 16:54:43 +0100 Subject: [PATCH 03/30] COnsolidateViews Untested --- DAQ/DAQUtils/CMakeLists.txt | 1 + DAQ/DAQUtils/src/ConsolidateViews.cpp | 82 +++++++++++++++++++ .../src/component/HltPackedDataDecoder.cpp | 11 ++- 3 files changed, 90 insertions(+), 4 deletions(-) create mode 100644 DAQ/DAQUtils/src/ConsolidateViews.cpp diff --git a/DAQ/DAQUtils/CMakeLists.txt b/DAQ/DAQUtils/CMakeLists.txt index 5a486b92b6f..04778e819a5 100644 --- a/DAQ/DAQUtils/CMakeLists.txt +++ b/DAQ/DAQUtils/CMakeLists.txt @@ -35,6 +35,7 @@ gaudi_add_module(DAQUtils src/RawEventSimpleCombiner.cpp src/UnpackRawEvent.cpp src/bankKiller.cpp + src/ConsolidateViews.cpp LINK Gaudi::GaudiKernel LHCb::LHCbAlgsLib diff --git a/DAQ/DAQUtils/src/ConsolidateViews.cpp b/DAQ/DAQUtils/src/ConsolidateViews.cpp new file mode 100644 index 00000000000..8198d7690b2 --- /dev/null +++ b/DAQ/DAQUtils/src/ConsolidateViews.cpp @@ -0,0 +1,82 @@ +/*****************************************************************************\ +* (c) Copyright 2000-2018 CERN for the benefit of the LHCb Collaboration * +* * +* This software is distributed under the terms of the GNU General Public * +* Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". * +* * +* In applying this licence, CERN does not waive the privileges and immunities * +* granted to it by virtue of its status as an Intergovernmental Organization * +* or submit itself to any jurisdiction. * +\*****************************************************************************/ +#include "Event/RawBank.h" +#include "Event/RawEvent.h" + +#include "LHCbAlgs/MergingTransformer.h" + +#include "GaudiKernel/ISvcLocator.h" + +#include <map> +#include <string> + +//----------------------------------------------------------------------------- +// Implementation file for class : ConsolidateViews +// +// 2021-11-04 : Nicole Skidmore +//----------------------------------------------------------------------------- + +/** + * Combines vector of RawBank::View and returns new RawEvent + * + * Based on RawEventCombiner + * If the same raw bank exists in several raw events, the merge will fail, + * except in the two following cases. + * - HltDecReports banks from different raw events are merged as long as their + * source ID is different. + * - The HltRoutingBits bank is always taken from the first raw event where it + * appears. Subsequent appearances are silently ignored. + * + * @author Nicole Skidmore + * @date 2021-11-04 + */ + +template <typename T> +using VOC = Gaudi::Functional::vector_of_const_<T>; + +struct ConsolidateViews final : LHCb::Algorithm::MergingTransformer<LHCb::RawEvent( VOC<LHCb::RawBank::View> const& )> { + + ConsolidateViews( const std::string& name, ISvcLocator* pSvcLocator ) + : MergingTransformer( name, pSvcLocator, + // Inputs + KeyValues{"RawBankViews", {}}, + // Output + KeyValue{"RawEvent", "Event/DAQ/MergedEvent"} ) {} + + StatusCode initialize() override { + return MergingTransformer::initialize().andThen( [&] { + if ( msgLevel( MSG::INFO ) ) info() << inputLocationSize() << " views being copied to " << endmsg; + } ); + } + + LHCb::RawEvent operator()( VOC<LHCb::RawBank::View> const& views ) const override { + + LHCb::RawEvent outputRawEvent; + for ( auto const& view : views ) { + for ( const LHCb::RawBank* b : view ) { + if ( !b ) continue; + + // Do I need to check if another raw abnk of this type exists already? + // Add it to the outputRawEvent + outputRawEvent.adoptBank( + outputRawEvent.createBank( b->sourceID(), b->type(), b->version(), b->size(), b->data() ), true ); + if ( msgLevel( MSG::VERBOSE ) ) { + verbose() << " Copied RawBank type=" << b->type() << " version= " << b->version() + << " sourceID= " << b->sourceID() << " size (bytes) = " << b->size() << endmsg; + } + } + } + return outputRawEvent; + } +}; + +// Declaration of the Algorithm Factory +DECLARE_COMPONENT( ConsolidateViews ) \ No newline at end of file diff --git a/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp b/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp index a10e45e28eb..64e3211f67f 100644 --- a/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp +++ b/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp @@ -119,17 +119,20 @@ namespace LHCb::Hlt::PackedData { StatusCode Decoder::execute() { // Hard coded - HltPackedDataDecoder will be replaced for HltPackedBufferDecoder - auto const* view = getIfExists<RawBank::View>( "/Event/DAQ/RawBanks/DstData" ); + auto const* view = getIfExists<RawBank::View>( "/Event/DAQ/RawBanks/DstData" ); + auto rawBanks = std::vector<const RawBank*>{begin( *view ), end( *view )}; + const auto* rawBank0 = rawBanks.front(); + if ( !view || view->empty() ) { ++m_no_packed_bank; return StatusCode::SUCCESS; } - auto rawBanks = std::vector<const RawBank*>{begin( *view ), end( *view )}; - - const auto* rawBank0 = rawBanks.front(); // Check we know how to decode this version if ( rawBank0->version() < 2 || rawBank0->version() > kVersionNumber ) { + std::cout << "rawBank0->version() " << rawBank0->version() << std::endl; + std::cout << "rawBank0->type() " << rawBank0->type() << std::endl; + ++m_bad_version; return StatusCode::FAILURE; } -- GitLab From 435b227f1bbd4dac92e716e6ea9bc3846b8f0a3c Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Thu, 18 Nov 2021 15:39:57 +0100 Subject: [PATCH 04/30] Update WIP --- DAQ/DAQUtils/CMakeLists.txt | 1 + ...ckwardsCompatibleMergeViewIntoRawEvent.cpp | 95 +++++++++++++++++++ DAQ/DAQUtils/src/ConsolidateViews.cpp | 5 +- .../src/component/HltDecReportsWriter.cpp | 52 +++++----- 4 files changed, 124 insertions(+), 29 deletions(-) create mode 100644 DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp diff --git a/DAQ/DAQUtils/CMakeLists.txt b/DAQ/DAQUtils/CMakeLists.txt index 04778e819a5..04be129938f 100644 --- a/DAQ/DAQUtils/CMakeLists.txt +++ b/DAQ/DAQUtils/CMakeLists.txt @@ -36,6 +36,7 @@ gaudi_add_module(DAQUtils src/UnpackRawEvent.cpp src/bankKiller.cpp src/ConsolidateViews.cpp + src/BackwardsCompatibleMergeViewIntoRawEvent.cpp LINK Gaudi::GaudiKernel LHCb::LHCbAlgsLib diff --git a/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp b/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp new file mode 100644 index 00000000000..b6c383930f7 --- /dev/null +++ b/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp @@ -0,0 +1,95 @@ +/*****************************************************************************\ +* (c) Copyright 2000-2018 CERN for the benefit of the LHCb Collaboration * +* * +* This software is distributed under the terms of the GNU General Public * +* Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". * +* * +* In applying this licence, CERN does not waive the privileges and immunities * +* granted to it by virtue of its status as an Intergovernmental Organization * +* or submit itself to any jurisdiction. * +\*****************************************************************************/ +#include "Event/RawBank.h" +#include "Event/RawEvent.h" + +#include "LHCbAlgs/MergingTransformer.h" + +#include "GaudiKernel/ISvcLocator.h" + +#include <map> +#include <string> + +//----------------------------------------------------------------------------- +// Implementation file for class : BackwardsCompatibleMergeViewIntoRawEvent +// +// 2021-11-04 : Nicole Skidmore +//----------------------------------------------------------------------------- + +/** + * TO BE REMOVED! + * + * For backwards compatibility - + * takes vector of RawBank::View and appends (copies) the banks the views + * point at into an existing RawEvent -- i.e. modifies a RawEvent that is already in the TES. + * This is needed to put the HltDecReportsin the 'default' RawEvent + * + * @author Nicole Skidmore + * @date 2021-11-04 + */ + +template <typename T> +using VOC = Gaudi::Functional::vector_of_const_<T>; + +struct BackwardsCompatibleMergeViewIntoRawEvent final + : LHCb::Algorithm::MergingTransformer<void( VOC<LHCb::RawBank::View> const& )> { + + // Get EXISTING default RawEvent + DataObjectReadHandle<LHCb::RawEvent> m_raw{this, "RawEvent", LHCb::RawEventLocation::Default}; + + BackwardsCompatibleMergeViewIntoRawEvent( const std::string& name, ISvcLocator* pSvcLocator ) + : MergingTransformer( name, pSvcLocator, + // Specify Inputs only + KeyValues{"RawBankViews", {}} ) {} + + StatusCode initialize() override { + return MergingTransformer::initialize().andThen( [&] { + if ( msgLevel( MSG::INFO ) ) info() << inputLocationSize() << " views being copied to " << m_raw << endmsg; + } ); + } + + void operator()( VOC<LHCb::RawBank::View> const& views ) const override { + + // get RawEvent + auto outputRawEvent = m_raw.getOrCreate(); + for ( auto const& view : views ) { + for ( const LHCb::RawBank* b : view ) { + if ( !b ) continue; + /* + // Need to check if another raw bank of this type exists already!!!! + if ( b == LHCb::RawBank::BankType::HltDecReports ) { + for ( const LHCb::RawBank* oBank : output_banks ) { + for ( const LHCb::RawBank* iBank : input_banks ) { + if ( oBank->sourceID() == iBank->sourceID() ) { + throw GaudiException( "Raw bank type " + toString( ib ) + " with sourceID " + + std::to_string( oBank->sourceID() ) + " from input #" + + std::to_string( i_rawEvent ) + " already added to output raw event.", + name(), StatusCode::FAILURE ); + } + } + } + } + */ + + // Add it to the outputRawEvent + outputRawEvent->adoptBank( + outputRawEvent->createBank( b->sourceID(), b->type(), b->version(), b->size(), b->data() ), true ); + if ( msgLevel( MSG::VERBOSE ) ) { + verbose() << " Copied RawBank type=" << b->type() << " version= " << b->version() + << " sourceID= " << b->sourceID() << " size (bytes) = " << b->size() << endmsg; + } + } + } + } +}; + +// Declaration of the Algorithm Factory +DECLARE_COMPONENT( BackwardsCompatibleMergeViewIntoRawEvent ) \ No newline at end of file diff --git a/DAQ/DAQUtils/src/ConsolidateViews.cpp b/DAQ/DAQUtils/src/ConsolidateViews.cpp index 8198d7690b2..b58f5a3d86a 100644 --- a/DAQ/DAQUtils/src/ConsolidateViews.cpp +++ b/DAQ/DAQUtils/src/ConsolidateViews.cpp @@ -49,11 +49,12 @@ struct ConsolidateViews final : LHCb::Algorithm::MergingTransformer<LHCb::RawEve // Inputs KeyValues{"RawBankViews", {}}, // Output - KeyValue{"RawEvent", "Event/DAQ/MergedEvent"} ) {} + KeyValue{"RawEvent", "/Event/DAQ/MergedEvent"} ) {} StatusCode initialize() override { return MergingTransformer::initialize().andThen( [&] { - if ( msgLevel( MSG::INFO ) ) info() << inputLocationSize() << " views being copied to " << endmsg; + if ( msgLevel( MSG::INFO ) ) + info() << inputLocationSize() << " views being copied to " << outputLocation() << endmsg; } ); } diff --git a/Hlt/HltDAQ/src/component/HltDecReportsWriter.cpp b/Hlt/HltDAQ/src/component/HltDecReportsWriter.cpp index cfcb61e8470..ce94004fb65 100644 --- a/Hlt/HltDAQ/src/component/HltDecReportsWriter.cpp +++ b/Hlt/HltDAQ/src/component/HltDecReportsWriter.cpp @@ -11,7 +11,7 @@ #include "Event/HltDecReports.h" #include "Event/RawEvent.h" #include "HltSourceID.h" -#include "LHCbAlgs/Consumer.h" +#include "LHCbAlgs/Transformer.h" //----------------------------------------------------------------------------- // Implementation file for class : HltDecReportsWriter @@ -25,15 +25,14 @@ * @author Tomasz Skwarnicki * @date 2008-07-26 * - * Algorithm to convert HltDecReports container on TES to HLT Raw Bank + * Algorithm to convert HltDecReports container on TES to *new* RawEvent and Raw Bank View * */ namespace LHCb::Hlt::DAQ { - class DecReportsWriter : public Algorithm::Consumer<void( HltDecReports const& )> { - /// location of output -- as it (potentially) updated 'in-situ' we use an explicit datahandle... - DataObjectWriteHandle<RawEvent> m_outputRawEvent{this, "OutputRawEventLocation", RawEventLocation::Default}; + class DecReportsWriter : public Algorithm::MultiTransformer<std::tuple<LHCb::RawEvent, LHCb::RawBank::View>( + LHCb::HltDecReports const& )> { /// SourceID to insert in the bank header Gaudi::Property<SourceID> m_sourceID{this, "SourceID", SourceID::Dummy}; @@ -45,9 +44,19 @@ namespace LHCb::Hlt::DAQ { enum HeaderIDs { kVersionNumber = 2 }; DecReportsWriter( const std::string& name, ISvcLocator* pSvcLocator ) - : Consumer{name, pSvcLocator, KeyValue{"InputHltDecReportsLocation", HltDecReportsLocation::Default}} {} + : MultiTransformer{name, + pSvcLocator, + // Input + KeyValue{"InputHltDecReportsLocation", LHCb::HltDecReportsLocation::Default}, + // Outputs + {KeyValue{"OutputRawEvent", "/Event/DAQ/HltDecEvent"}, + KeyValue{"OutputView", "/Event/DAQ/HltDec/View"}}} {}; - void operator()( HltDecReports const& inputSummary ) const override { + std::tuple<LHCb::RawEvent, LHCb::RawBank::View> + operator()( LHCb::HltDecReports const& inputSummary ) const override { + + // Create *new* RawEvent + LHCb::RawEvent outputrawevent; assert( m_sourceID == SourceIDs::Hlt1 || m_sourceID == SourceIDs::Hlt2 || m_sourceID == SourceIDs::Spruce ); if ( msgLevel( MSG::VERBOSE ) ) { @@ -59,20 +68,6 @@ namespace LHCb::Hlt::DAQ { verbose() << endmsg; } - // get output - auto rawEvent = m_outputRawEvent.getOrCreate(); - - // delete any previously inserted dec reports - // note that we need to _copy_ the list of banks, as the original will be modified while - // we're looping over the list... - const auto& bnks = rawEvent->banks( RawBank::HltDecReports ); - for ( const RawBank* b : std::vector( bnks.begin(), bnks.end() ) ) { - auto sourceID = b->version() > 1 ? static_cast<SourceID>( b->sourceID() >> SourceID::BitShift ) : SourceID::Hlt; - if ( m_sourceID != sourceID ) continue; - rawEvent->removeBank( b ); - ++m_removed_old_banks; - } - // compose the bank body std::vector<unsigned int> bankBody; bankBody.reserve( inputSummary.size() + 2 ); @@ -87,7 +82,8 @@ namespace LHCb::Hlt::DAQ { std::sort( std::next( std::begin( bankBody ), 2 ), std::end( bankBody ) ); // shift bits in sourceID for the same convention as in HltSelReports - rawEvent->addBank( int( m_sourceID << SourceID::BitShift ), RawBank::HltDecReports, kVersionNumber, bankBody ); + outputrawevent.addBank( int( m_sourceID << SourceID::BitShift ), RawBank::HltDecReports, kVersionNumber, + bankBody ); if ( msgLevel( MSG::VERBOSE ) ) { verbose() << " Output: "; @@ -102,11 +98,13 @@ namespace LHCb::Hlt::DAQ { } verbose() << endmsg; } - // return rawEvent->banks( RawBank::HltDecReports ); // FIXME this is _not_ save when a _subsequent_ algorithm - // calls - // // addBank,adoptBank or removeBank on the underlying - // RawEvent... - } + + // FIXME this is _not_ save when a _subsequent_ algorithm calls + // addBank,adoptBank or removeBank on the underlying RawEvent... + return std::tuple{std::move( outputrawevent ), outputrawevent.banks( RawBank::HltDecReports )}; + // without std::move here the RawEvent gets copied which would invalidate the view + // View creation must be after RawEvent is made + }; }; // Declaration of the Algorithm Factory -- GitLab From e2a5c3b7a917a838f400adfe581d7e5e1a48a873 Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Tue, 23 Nov 2021 16:36:39 +0100 Subject: [PATCH 05/30] HltPackedDataWriter outputs a View --- .../src/component/HltPackedDataWriter.cpp | 33 +++++++++++-------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp b/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp index 72d290f3db2..2d3f8879a89 100644 --- a/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp +++ b/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp @@ -26,8 +26,8 @@ #include "Event/PackedTrack.h" #include "Event/PackedVertex.h" #include "Event/RawEvent.h" -#include "GaudiAlg/MergingTransformer.h" #include "Kernel/IANNSvc.h" +#include "LHCbAlgs/MergingTransformer.h" #include "PackedDataChecksum.h" #include "RZip.h" #include <optional> @@ -40,6 +40,9 @@ * @date 2016-01-03 */ +template <typename T> +using VOC = Gaudi::Functional::vector_of_const_<T>; + namespace LHCb::Hlt::PackedData { namespace { const Gaudi::StringKey PackedObjectLocations{"PackedObjectLocations"}; @@ -74,14 +77,13 @@ namespace LHCb::Hlt::PackedData { PackedClusters, PackedCaloAdcs>(); } // namespace - class Writer - : public Gaudi::Functional::MergingConsumer<void( Gaudi::Functional::vector_of_const_<DataObject> const& )> { + class Writer : public LHCb::Algorithm::MergingTransformer<LHCb::RawBank::View( VOC<DataObject> const& )> { public: /// Standard constructor Writer( std::string const& name, ISvcLocator* isvc ); - StatusCode initialize() override; ///< Algorithm initialization - void operator()( Gaudi::Functional::vector_of_const_<DataObject> const& ) const override; + StatusCode initialize() override; ///< Algorithm initialization + LHCb::RawBank::View operator()( VOC<DataObject> const& ) const override; private: /// Put the (compressed) data buffer into raw banks and register them. @@ -118,11 +120,11 @@ namespace LHCb::Hlt::PackedData { DECLARE_COMPONENT_WITH_ID( Writer, "HltPackedDataWriter" ) Writer::Writer( std::string const& name, ISvcLocator* isvc ) - : Gaudi::Functional::MergingConsumer<void( Gaudi::Functional::vector_of_const_<DataObject> const& )>{ - name, isvc, {"PackedContainers", {}}} {} + : LHCb::Algorithm::MergingTransformer<LHCb::RawBank::View( VOC<DataObject> const& )>{ + name, isvc, {"PackedContainers", {}}, {"OutputView", {}}} {} StatusCode Writer::initialize() { - return GaudiAlgorithm::initialize().andThen( [&] { + return MergingTransformer::initialize().andThen( [&] { info() << "Configured to persist containers "; for ( size_t i = 0; i < inputLocationSize(); ++i ) { info() << " '" << inputLocation( i ) << "',"; } info() << endmsg; @@ -168,7 +170,10 @@ namespace LHCb::Hlt::PackedData { auto linkID = m_hltANNSvc->value( PackedObjectLocations, location ); if ( !linkID ) { - status = Error( "Requested to persist link to " + location + " but no ID is registered for it in the ANNSvc!" ); + /// !!! Had to change this + error() << "Requested to persist link to " << Gaudi::Utils::toString( location ) + << " but no ID is registered for it in the ANNSvc!" << endmsg; + return StatusCode::FAILURE; continue; } @@ -187,14 +192,15 @@ namespace LHCb::Hlt::PackedData { if ( msgLevel( MSG::DEBUG ) ) { debug() << "Packed " << containerPath << " with ID " << locationID->second << " and CLID " << classID << " into " << objectSize << " bytes" << endmsg; - counter( containerPath ) += objectSize; + // !!!Hope next line is not important - part of a debug? Could not make counter work + // counter( containerPath ) += objectSize; } } return status; } - void Writer::operator()( Gaudi::Functional::vector_of_const_<DataObject> const& containers ) const { + LHCb::RawBank::View Writer::operator()( VOC<DataObject> const& containers ) const { // Get the raw event -- FIXME: we should be creating a dedicated RawEvent, whose contents get merged by // a dedicated algorithm... @@ -232,7 +238,7 @@ namespace LHCb::Hlt::PackedData { debug() << "Packed data checksum for '" << x.first << "' = " << x.second << endmsg; } } - + return rawEvent->banks( RawBank::DstData ); //++m_hBufferSize[buffer.buffer().size()]; //++m_hCompressedBufferSize[compressedBuffer.size()]; } @@ -245,7 +251,8 @@ namespace LHCb::Hlt::PackedData { auto chunks = range::chunk( data, MAX_PAYLOAD_SIZE ); if ( chunks.size() > extract<SourceIDMasks::PartID>( ~uint16_t{0} ) ) { - Error( "Packed objects too large to save", StatusCode::SUCCESS, 50 ).ignore(); + // !!! Modified following line to compile + error() << "Packed objects too large to save" << endmsg; return; } if ( msgLevel( MSG::DEBUG ) ) { debug() << "Writing " << chunks.size() << " banks" << endmsg; } -- GitLab From 28651339477e37680d14f57c6b93abd0e2cabd30 Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Wed, 24 Nov 2021 17:41:17 +0100 Subject: [PATCH 06/30] for reading --- GaudiConf/python/GaudiConf/reading.py | 46 +++++++++++++++++-- .../src/component/HltPackedDataWriter.cpp | 2 +- 2 files changed, 42 insertions(+), 6 deletions(-) diff --git a/GaudiConf/python/GaudiConf/reading.py b/GaudiConf/python/GaudiConf/reading.py index f8678e23d8a..37d4b914efc 100644 --- a/GaudiConf/python/GaudiConf/reading.py +++ b/GaudiConf/python/GaudiConf/reading.py @@ -16,7 +16,10 @@ from RawEventFormat import Raw_location_db from .PersistRecoConf import PersistRecoPacking from PyConf.dataflow import force_location -__all__ = ["mc_unpackers", "unpackers", "decoders"] +__all__ = [ + "mc_unpackers", "unpackers", "decoder", "hlt2_decisions", + "spruce_decisions" +] def mc_unpackers(stream='/Event/HLT2', filtered_mc=True, configurables=True): @@ -91,9 +94,42 @@ def decoder(stream='/Event/HLT2', from PyConf.Algorithms import HltPackedDataDecoder prpacking = PersistRecoPacking(stream=stream, data_type=data_type) container_map = prpacking.packedToOutputLocationMap() - bank_location = Raw_location_db[raw_event_format]["DstData"] + if "Spruce" in stream: + bank_location = "DAQ/MergedEvent" + else: + bank_location = Raw_location_db[raw_event_format]["DstData"] decoder = HltPackedDataDecoder( - RawEventLocations=[bank_location], - ContainerMap=container_map, - ) + RawEventLocations=[bank_location], ContainerMap=container_map) return decoder + + +def hlt2_decisions(configurables=True, OutputLevel=4): + """Return a HltDecReportsDecoder instance for HLT2 decisions. + """ + if configurables: + from Configurables import HltDecReportsDecoder + else: + from PyConf.Algorithms import HltDecReportsDecoder + + decode_hlt2 = HltDecReportsDecoder( + "HLT2", SourceID='Hlt2', OutputLevel=OutputLevel) + + return decode_hlt2 + + +def spruce_decisions(configurables=True, OutputLevel=4): + """Return a HltDecReportsDecoder instance for Sprucing decisions. + """ + if configurables: + from Configurables import HltDecReportsDecoder + else: + from PyConf.Algorithms import HltDecReportsDecoder + + decode_spruce = HltDecReportsDecoder( + "Spruce", + SourceID='Spruce', + RawEventLocations="DAQ/MergedEvent", + OutputHltDecReportsLocation="/Event/Spruce/DecReports", + OutputLevel=OutputLevel) + + return decode_spruce \ No newline at end of file diff --git a/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp b/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp index 2d3f8879a89..069cd7cfb19 100644 --- a/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp +++ b/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp @@ -95,7 +95,7 @@ namespace LHCb::Hlt::PackedData { /// Property giving the mapping between containers and packed containers Gaudi::Property<std::map<std::string, std::string>> m_containerMap{this, "ContainerMap"}; /// Property giving the location of the raw event - DataObjectWriteHandle<RawEvent> m_outputRawEvent{this, "OutputRawEventLocation", RawEventLocation::Default}; + DataObjectWriteHandle<RawEvent> m_outputRawEvent{this, "OutputRawEvent", RawEventLocation::Default}; /// Property setting the compression algorithm Gaudi::Property<Compression> m_compression{this, "Compression", Compression::ZSTD}; /// Property setting the compression level -- GitLab From 53827f326fbbe0e743529d7845c3996e601259df Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Thu, 25 Nov 2021 11:49:40 +0100 Subject: [PATCH 07/30] it ain't pretty but HLT2 and Sprucing now works - now to un-hardcode locations --- GaudiConf/python/GaudiConf/reading.py | 11 +++++++---- Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp | 6 +++++- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/GaudiConf/python/GaudiConf/reading.py b/GaudiConf/python/GaudiConf/reading.py index 37d4b914efc..6b5e9384f90 100644 --- a/GaudiConf/python/GaudiConf/reading.py +++ b/GaudiConf/python/GaudiConf/reading.py @@ -85,7 +85,8 @@ def unpackers(stream='/Event/HLT2', def decoder(stream='/Event/HLT2', raw_event_format=0.3, data_type='Upgrade', - configurables=True): + configurables=True, + OutputLevel=4): """Return a DstData raw bank decoder configured for Turbo data. """ if configurables: @@ -98,9 +99,11 @@ def decoder(stream='/Event/HLT2', bank_location = "DAQ/MergedEvent" else: bank_location = Raw_location_db[raw_event_format]["DstData"] - decoder = HltPackedDataDecoder( - RawEventLocations=[bank_location], ContainerMap=container_map) - return decoder + decode_packeddata = HltPackedDataDecoder( + RawEventLocations=[bank_location], + ContainerMap=container_map, + OutputLevel=OutputLevel) + return decode_packeddata def hlt2_decisions(configurables=True, OutputLevel=4): diff --git a/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp b/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp index 64e3211f67f..17298fe9bb3 100644 --- a/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp +++ b/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp @@ -108,7 +108,11 @@ namespace LHCb::Hlt::PackedData { Decoder::Decoder( const std::string& name, ISvcLocator* pSvcLocator ) : HltRawBankDecoderBase( name, pSvcLocator ) { // new for decoders, initialize search path, and then call the base method - m_rawEventLocations = {RawEventLocation::Trigger, RawEventLocation::Copied, RawEventLocation::Default}; + m_rawEventLocations = { + RawEventLocation::Trigger, + RawEventLocation::Copied, + RawEventLocation::Default, + }; initRawEventSearch(); // The default m_sourceID=0 triggers a warning in HltRawBankDecoderBase::initialize // Since we only care about HLT2 persistence, set it explicitly: -- GitLab From 9c7a8c50ab900e177d0342da790170482a8f1a44 Mon Sep 17 00:00:00 2001 From: Eduardo Rodrigues <eduardo.rodrigues@cern.ch> Date: Thu, 25 Nov 2021 12:41:07 +0100 Subject: [PATCH 08/30] Apply 1 suggestion(s) to 1 file(s) --- Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp b/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp index 17298fe9bb3..ba87c1e9e60 100644 --- a/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp +++ b/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp @@ -1,5 +1,5 @@ /*****************************************************************************\ -* (c) Copyright 2000-2018 CERN for the benefit of the LHCb Collaboration * +* (c) Copyright 2000-2021 CERN for the benefit of the LHCb Collaboration * * * * This software is distributed under the terms of the GNU General Public * * Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". * -- GitLab From f083d55d897ef484ada7545fcdba9fca2fd06e7e Mon Sep 17 00:00:00 2001 From: Gerhard Raven <gerhard.raven@nikhef.nl> Date: Thu, 25 Nov 2021 12:41:27 +0100 Subject: [PATCH 09/30] Apply 1 suggestion(s) to 1 file(s) --- DAQ/DAQUtils/src/ConsolidateViews.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/DAQ/DAQUtils/src/ConsolidateViews.cpp b/DAQ/DAQUtils/src/ConsolidateViews.cpp index b58f5a3d86a..8081e8eeee4 100644 --- a/DAQ/DAQUtils/src/ConsolidateViews.cpp +++ b/DAQ/DAQUtils/src/ConsolidateViews.cpp @@ -67,8 +67,7 @@ struct ConsolidateViews final : LHCb::Algorithm::MergingTransformer<LHCb::RawEve // Do I need to check if another raw abnk of this type exists already? // Add it to the outputRawEvent - outputRawEvent.adoptBank( - outputRawEvent.createBank( b->sourceID(), b->type(), b->version(), b->size(), b->data() ), true ); + outputRawEvent.addBank( b->sourceID(), b->type(), b->version(), b->range<std::byte>() ); if ( msgLevel( MSG::VERBOSE ) ) { verbose() << " Copied RawBank type=" << b->type() << " version= " << b->version() << " sourceID= " << b->sourceID() << " size (bytes) = " << b->size() << endmsg; -- GitLab From 791b00d5e0efbb67c144c684307577a08f7d097d Mon Sep 17 00:00:00 2001 From: Gerhard Raven <gerhard.raven@nikhef.nl> Date: Thu, 25 Nov 2021 12:41:40 +0100 Subject: [PATCH 10/30] Apply 1 suggestion(s) to 1 file(s) --- DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp b/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp index b6c383930f7..da6f984b397 100644 --- a/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp +++ b/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp @@ -30,7 +30,9 @@ * For backwards compatibility - * takes vector of RawBank::View and appends (copies) the banks the views * point at into an existing RawEvent -- i.e. modifies a RawEvent that is already in the TES. - * This is needed to put the HltDecReportsin the 'default' RawEvent + * This is needed to (also) put the HltDecReports in the 'default' RawEvent, so that + the combination of the modified HltDecReportsWriter + BackwardsCompatibleMergeViewIntoRawEvent is a + drop-in replacement for the old HltDecReportsWriter. * * @author Nicole Skidmore * @date 2021-11-04 -- GitLab From 00cb3ad582d9565056480253007b82dfecebd105 Mon Sep 17 00:00:00 2001 From: Gerhard Raven <gerhard.raven@nikhef.nl> Date: Thu, 25 Nov 2021 12:41:49 +0100 Subject: [PATCH 11/30] Apply 1 suggestion(s) to 1 file(s) --- .../BackwardsCompatibleMergeViewIntoRawEvent.cpp | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp b/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp index da6f984b397..5ad3d1c4db6 100644 --- a/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp +++ b/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp @@ -65,21 +65,6 @@ struct BackwardsCompatibleMergeViewIntoRawEvent final for ( auto const& view : views ) { for ( const LHCb::RawBank* b : view ) { if ( !b ) continue; - /* - // Need to check if another raw bank of this type exists already!!!! - if ( b == LHCb::RawBank::BankType::HltDecReports ) { - for ( const LHCb::RawBank* oBank : output_banks ) { - for ( const LHCb::RawBank* iBank : input_banks ) { - if ( oBank->sourceID() == iBank->sourceID() ) { - throw GaudiException( "Raw bank type " + toString( ib ) + " with sourceID " + - std::to_string( oBank->sourceID() ) + " from input #" + - std::to_string( i_rawEvent ) + " already added to output raw event.", - name(), StatusCode::FAILURE ); - } - } - } - } - */ // Add it to the outputRawEvent outputRawEvent->adoptBank( -- GitLab From 9046f6aaf1c149c62ce14a13fa5652324af80029 Mon Sep 17 00:00:00 2001 From: Gerhard Raven <gerhard.raven@nikhef.nl> Date: Thu, 25 Nov 2021 12:41:56 +0100 Subject: [PATCH 12/30] Apply 1 suggestion(s) to 1 file(s) --- DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp b/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp index 5ad3d1c4db6..e2fccb0e03e 100644 --- a/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp +++ b/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp @@ -67,8 +67,7 @@ struct BackwardsCompatibleMergeViewIntoRawEvent final if ( !b ) continue; // Add it to the outputRawEvent - outputRawEvent->adoptBank( - outputRawEvent->createBank( b->sourceID(), b->type(), b->version(), b->size(), b->data() ), true ); + outputRawEvent->addBank( b->sourceID(), b->type(), b->version(), b->range<std::byte>() ); if ( msgLevel( MSG::VERBOSE ) ) { verbose() << " Copied RawBank type=" << b->type() << " version= " << b->version() << " sourceID= " << b->sourceID() << " size (bytes) = " << b->size() << endmsg; -- GitLab From 57261e759a72a99e491a3d5d362f19d016b5f037 Mon Sep 17 00:00:00 2001 From: Gerhard Raven <gerhard.raven@nikhef.nl> Date: Thu, 25 Nov 2021 12:42:06 +0100 Subject: [PATCH 13/30] Apply 1 suggestion(s) to 1 file(s) --- Hlt/HltDAQ/src/component/HltDecReportsWriter.cpp | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/Hlt/HltDAQ/src/component/HltDecReportsWriter.cpp b/Hlt/HltDAQ/src/component/HltDecReportsWriter.cpp index ce94004fb65..6220f318509 100644 --- a/Hlt/HltDAQ/src/component/HltDecReportsWriter.cpp +++ b/Hlt/HltDAQ/src/component/HltDecReportsWriter.cpp @@ -101,6 +101,11 @@ namespace LHCb::Hlt::DAQ { // FIXME this is _not_ save when a _subsequent_ algorithm calls // addBank,adoptBank or removeBank on the underlying RawEvent... + // TODO: `writeViewFor` (see https://gitlab.cern.ch/gaudi/Gaudi/-/merge_requests/1151 ) + // should be extended to allow it to be used for this case as well, as that would + // make the underlying `RawEvent` inaccessible, and thus _ensure_ it will not be + // modified (not even by code that doesn't play by the rules, provided it is not + // explicitly/maliciously tailored to do this) return std::tuple{std::move( outputrawevent ), outputrawevent.banks( RawBank::HltDecReports )}; // without std::move here the RawEvent gets copied which would invalidate the view // View creation must be after RawEvent is made -- GitLab From aec0e44d473023f2c79fac1e233f33aad304cbe3 Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Wed, 1 Dec 2021 15:27:44 +0100 Subject: [PATCH 14/30] losing hard-coded locaitons --- GaudiConf/python/GaudiConf/reading.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/GaudiConf/python/GaudiConf/reading.py b/GaudiConf/python/GaudiConf/reading.py index 6b5e9384f90..59d47eace34 100644 --- a/GaudiConf/python/GaudiConf/reading.py +++ b/GaudiConf/python/GaudiConf/reading.py @@ -13,15 +13,30 @@ from __future__ import absolute_import import os from RawEventFormat import Raw_location_db +from PyConf.application import default_raw_event from .PersistRecoConf import PersistRecoPacking from PyConf.dataflow import force_location +from PyConf.Algorithms import LHCb__UnpackRawEvent +from pprint import pprint __all__ = [ "mc_unpackers", "unpackers", "decoder", "hlt2_decisions", - "spruce_decisions" + "spruce_decisions", "unpack_rawevent" ] +def unpack_rawevent(bank_types=[], OutputLevel=4): + pprint(bank_types) + unpackrawevent = LHCb__UnpackRawEvent( + BankTypes=bank_types, + RawBankLocations=[ + '/Event/DAQ/RawBanks/%s' % (rb) for rb in bank_types + ], + RawEventLocation=default_raw_event(["DstData"]), + OutputLevel=OutputLevel) + return unpackrawevent + + def mc_unpackers(stream='/Event/HLT2', filtered_mc=True, configurables=True): """Return a list of unpackers for reading Monte Carlo truth objects. -- GitLab From a142adcecba0e467ec9e52b0841ede7caeb9ca34 Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Wed, 1 Dec 2021 18:05:57 +0100 Subject: [PATCH 15/30] changes for reading --- GaudiConf/python/GaudiConf/reading.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/GaudiConf/python/GaudiConf/reading.py b/GaudiConf/python/GaudiConf/reading.py index 59d47eace34..6f682f615ad 100644 --- a/GaudiConf/python/GaudiConf/reading.py +++ b/GaudiConf/python/GaudiConf/reading.py @@ -17,22 +17,28 @@ from PyConf.application import default_raw_event from .PersistRecoConf import PersistRecoPacking from PyConf.dataflow import force_location -from PyConf.Algorithms import LHCb__UnpackRawEvent -from pprint import pprint __all__ = [ "mc_unpackers", "unpackers", "decoder", "hlt2_decisions", "spruce_decisions", "unpack_rawevent" ] -def unpack_rawevent(bank_types=[], OutputLevel=4): - pprint(bank_types) +def unpack_rawevent(bank_types=["DstData", "HltDecReports"], + raw_event_format=0.3, + configurables=True, + OutputLevel=4): + if configurables: + from Configurables import LHCb__UnpackRawEvent + else: + from PyConf.Algorithms import LHCb__UnpackRawEvent unpackrawevent = LHCb__UnpackRawEvent( BankTypes=bank_types, RawBankLocations=[ '/Event/DAQ/RawBanks/%s' % (rb) for rb in bank_types ], - RawEventLocation=default_raw_event(["DstData"]), + RawEventLocation=default_raw_event( + bank_types=["DstData"], + raw_event_format=raw_event_format).location, OutputLevel=OutputLevel) return unpackrawevent -- GitLab From 92d2ce60de918f40bcabde19df9d84851c7edbae Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Thu, 2 Dec 2021 16:24:14 +0100 Subject: [PATCH 16/30] working for sprucing and hlt2 --- GaudiConf/python/GaudiConf/reading.py | 155 +++++++++++++++----------- 1 file changed, 93 insertions(+), 62 deletions(-) diff --git a/GaudiConf/python/GaudiConf/reading.py b/GaudiConf/python/GaudiConf/reading.py index 6f682f615ad..51fbeccd560 100644 --- a/GaudiConf/python/GaudiConf/reading.py +++ b/GaudiConf/python/GaudiConf/reading.py @@ -13,9 +13,7 @@ from __future__ import absolute_import import os from RawEventFormat import Raw_location_db -from PyConf.application import default_raw_event from .PersistRecoConf import PersistRecoPacking -from PyConf.dataflow import force_location __all__ = [ "mc_unpackers", "unpackers", "decoder", "hlt2_decisions", @@ -24,102 +22,85 @@ __all__ = [ def unpack_rawevent(bank_types=["DstData", "HltDecReports"], + process='hlt2', raw_event_format=0.3, configurables=True, OutputLevel=4): + """Return RawBank:Views of banks in RawEvent. + + Args: + bank_types (list of str): RawBanks to create RawBank:Views of. + process (str): 'hlt2' or 'spruce' - serves to determine `RawEventLocation` only. + """ + assert process == "spruce" or process == "hlt2", 'Unpacking helper only accepts hlt2 or spruce processes' if configurables: from Configurables import LHCb__UnpackRawEvent else: from PyConf.Algorithms import LHCb__UnpackRawEvent + if process == "spruce": + bank_location = "DAQ/MergedEvent" + else: + bank_location = Raw_location_db[raw_event_format]["DstData"] unpackrawevent = LHCb__UnpackRawEvent( BankTypes=bank_types, RawBankLocations=[ '/Event/DAQ/RawBanks/%s' % (rb) for rb in bank_types ], - RawEventLocation=default_raw_event( - bank_types=["DstData"], - raw_event_format=raw_event_format).location, + RawEventLocation=bank_location, OutputLevel=OutputLevel) return unpackrawevent -def mc_unpackers(stream='/Event/HLT2', filtered_mc=True, configurables=True): - """Return a list of unpackers for reading Monte Carlo truth objects. - - Args: - stream (str): The TES prefix of information saved in Moore. - filtered_mc (bool): If True, assume Moore saved only a filtered - subset of the input MC objects. - """ - mc_prefix = stream if filtered_mc else "/Event" - if configurables: - from Configurables import UnpackMCParticle, UnpackMCVertex - unpack_mcp = UnpackMCParticle( - InputName=os.path.join(mc_prefix, "pSim/MCParticles"), - OutputName=os.path.join(mc_prefix, "MC/Particles"), - ) - unpack_mcv = UnpackMCVertex( - InputName=os.path.join(mc_prefix, "pSim/MCVertices"), - OutputName=os.path.join(mc_prefix, "MC/Vertices"), - ) - else: - from PyConf.application import make_data_with_FetchDataFromFile - from PyConf.Algorithms import UnpackMCParticle, UnpackMCVertex - unpack_mcp = UnpackMCParticle( - InputName=make_data_with_FetchDataFromFile( - os.path.join(mc_prefix, "pSim/MCParticles")), - outputs={ - "OutputName": - force_location(os.path.join(mc_prefix, "MC/Particles")) - }) - unpack_mcv = UnpackMCVertex( - InputName=make_data_with_FetchDataFromFile( - os.path.join(mc_prefix, "pSim/MCVertices")), - outputs={ - "OutputName": - force_location(os.path.join(mc_prefix, "MC/Vertices")) - }) - - return [unpack_mcp, unpack_mcv] - - -def unpackers(stream='/Event/HLT2', - stream_psandvs='/Event/HLT2', +def unpackers(process='hlt2', data_type='Upgrade', - configurables=True): + configurables=True, + OutputLevel=4): """Return a list of unpackers for reading reconstructed objects. Args: - stream (str): The TES prefix of information saved in Moore. + process (str): 'hlt2' or 'spruce'. data_type (str): The data type to configure PersistRecoPacking """ + assert process == "spruce" or process == "hlt2", 'Unpacking helper only accepts hlt2 or spruce processes' if configurables: from Configurables import UnpackParticlesAndVertices else: from PyConf.Algorithms import UnpackParticlesAndVertices - prpacking = PersistRecoPacking(stream=stream, data_type=data_type) - unpack_persistreco = prpacking.unpackers(configurables=configurables) - unpack_psandvs = UnpackParticlesAndVertices(InputStream=stream_psandvs) + if process == "spruce": + PANDV_ROOT = '/Event/Spruce' + RECO_ROOT = '/Event/Spruce/HLT2' + else: + PANDV_ROOT = '/Event/HLT2' + RECO_ROOT = PANDV_ROOT + prpacking = PersistRecoPacking(stream=RECO_ROOT, data_type=data_type) + unpack_persistreco = prpacking.unpackers( + configurables=configurables, output_level=OutputLevel) + unpack_psandvs = UnpackParticlesAndVertices( + InputStream=PANDV_ROOT, OutputLevel=OutputLevel) return unpack_persistreco + [unpack_psandvs] -def decoder(stream='/Event/HLT2', +def decoder(process='hlt2', raw_event_format=0.3, data_type='Upgrade', configurables=True, OutputLevel=4): - """Return a DstData raw bank decoder configured for Turbo data. + """Return a DstData raw bank decoder. """ + assert process == "spruce" or process == "hlt2", 'Data decoder helper only accepts hlt2 or spruce processes' if configurables: from Configurables import HltPackedDataDecoder else: from PyConf.Algorithms import HltPackedDataDecoder - prpacking = PersistRecoPacking(stream=stream, data_type=data_type) - container_map = prpacking.packedToOutputLocationMap() - if "Spruce" in stream: + if process == "spruce": bank_location = "DAQ/MergedEvent" + RECO_ROOT = '/Event/Spruce/HLT2' else: bank_location = Raw_location_db[raw_event_format]["DstData"] + RECO_ROOT = '/Event/HLT2' + + prpacking = PersistRecoPacking(stream=RECO_ROOT, data_type=data_type) + container_map = prpacking.packedToOutputLocationMap() decode_packeddata = HltPackedDataDecoder( RawEventLocations=[bank_location], ContainerMap=container_map, @@ -127,23 +108,34 @@ def decoder(stream='/Event/HLT2', return decode_packeddata -def hlt2_decisions(configurables=True, OutputLevel=4): +def hlt2_decisions(process='hlt2', + raw_event_format=0.3, + configurables=True, + OutputLevel=4): """Return a HltDecReportsDecoder instance for HLT2 decisions. """ + assert process == "spruce" or process == "hlt2", 'Hlt2 decisions helper only accepts hlt2 or spruce processes' if configurables: from Configurables import HltDecReportsDecoder else: from PyConf.Algorithms import HltDecReportsDecoder - + if process == "spruce": + bank_location = "DAQ/MergedEvent" + else: + bank_location = Raw_location_db[raw_event_format]["HltDecReports"] decode_hlt2 = HltDecReportsDecoder( - "HLT2", SourceID='Hlt2', OutputLevel=OutputLevel) + "HLT2", + SourceID='Hlt2', + RawEventLocations=bank_location, + OutputLevel=OutputLevel) return decode_hlt2 -def spruce_decisions(configurables=True, OutputLevel=4): +def spruce_decisions(process="spruce", configurables=True, OutputLevel=4): """Return a HltDecReportsDecoder instance for Sprucing decisions. """ + assert process == "spruce", 'Can only ask for sprucing decisions if sprucing job' if configurables: from Configurables import HltDecReportsDecoder else: @@ -156,4 +148,43 @@ def spruce_decisions(configurables=True, OutputLevel=4): OutputHltDecReportsLocation="/Event/Spruce/DecReports", OutputLevel=OutputLevel) - return decode_spruce \ No newline at end of file + return decode_spruce + + +def mc_unpackers(process='hlt2', filtered_mc=True, configurables=True): + """Return a list of unpackers for reading Monte Carlo truth objects. + + Args: + process (str): 'hlt2' (or, in the future, 'spruce'). + filtered_mc (bool): If True, assume Moore saved only a filtered + subset of the input MC objects. + """ + assert process == "hlt2", 'MC tools not functional yet outside hlt2 :(' + mc_prefix = '/Event/HLT2' if filtered_mc else "/Event" + if configurables: + from Configurables import UnpackMCParticle, UnpackMCVertex + unpack_mcp = UnpackMCParticle( + InputName=os.path.join(mc_prefix, "pSim/MCParticles"), + OutputName=os.path.join(mc_prefix, "MC/Particles"), + ) + unpack_mcv = UnpackMCVertex( + InputName=os.path.join(mc_prefix, "pSim/MCVertices"), + OutputName=os.path.join(mc_prefix, "MC/Vertices"), + ) + else: + from PyConf.application import make_data_with_FetchDataFromFile + from PyConf.Algorithms import UnpackMCParticle, UnpackMCVertex + unpack_mcp = UnpackMCParticle( + InputName=make_data_with_FetchDataFromFile( + os.path.join(mc_prefix, "pSim/MCParticles")), + OutputName=make_data_with_FetchDataFromFile( + os.path.join(mc_prefix, "MC/Particles")), + ) + unpack_mcv = UnpackMCVertex( + InputName=make_data_with_FetchDataFromFile( + os.path.join(mc_prefix, "pSim/MCVertices")), + OutputName=make_data_with_FetchDataFromFile( + os.path.join(mc_prefix, "MC/Vertices")), + ) + + return [unpack_mcp, unpack_mcv] -- GitLab From 2332cf2b08b28c8c22265d98a31ac530528d893e Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Fri, 10 Dec 2021 11:45:34 +0100 Subject: [PATCH 17/30] reading takes pass --- GaudiConf/python/GaudiConf/reading.py | 76 +++++++++++++++++++-------- 1 file changed, 53 insertions(+), 23 deletions(-) diff --git a/GaudiConf/python/GaudiConf/reading.py b/GaudiConf/python/GaudiConf/reading.py index 51fbeccd560..a00d6248718 100644 --- a/GaudiConf/python/GaudiConf/reading.py +++ b/GaudiConf/python/GaudiConf/reading.py @@ -22,7 +22,8 @@ __all__ = [ def unpack_rawevent(bank_types=["DstData", "HltDecReports"], - process='hlt2', + process='Hlt2', + stream="default", raw_event_format=0.3, configurables=True, OutputLevel=4): @@ -30,15 +31,18 @@ def unpack_rawevent(bank_types=["DstData", "HltDecReports"], Args: bank_types (list of str): RawBanks to create RawBank:Views of. - process (str): 'hlt2' or 'spruce' - serves to determine `RawEventLocation` only. + process (str): 'Turbo' or 'Spruce' or 'Hlt2' - serves to determine `RawEventLocation` only. + stream (str): needed post-sprucing as RawEvent is then dependent on stream name + - drives `RawEventLocation` only. + raw_event_format (float): """ - assert process == "spruce" or process == "hlt2", 'Unpacking helper only accepts hlt2 or spruce processes' + assert process == "Spruce" or process == "Turbo" or process == "Hlt2", 'Unpacking helper only accepts Turbo, Spruce or Hlt2 processes' if configurables: from Configurables import LHCb__UnpackRawEvent else: from PyConf.Algorithms import LHCb__UnpackRawEvent - if process == "spruce": - bank_location = "DAQ/MergedEvent" + if process == "Spruce" or process == "Turbo": + bank_location = stream else: bank_location = Raw_location_db[raw_event_format]["DstData"] unpackrawevent = LHCb__UnpackRawEvent( @@ -51,22 +55,22 @@ def unpack_rawevent(bank_types=["DstData", "HltDecReports"], return unpackrawevent -def unpackers(process='hlt2', +def unpackers(process='Hlt2', data_type='Upgrade', configurables=True, OutputLevel=4): """Return a list of unpackers for reading reconstructed objects. Args: - process (str): 'hlt2' or 'spruce'. + process (str): 'Turbo' or 'Spruce' or 'Hlt2'. data_type (str): The data type to configure PersistRecoPacking """ - assert process == "spruce" or process == "hlt2", 'Unpacking helper only accepts hlt2 or spruce processes' + assert process == "Spruce" or process == "Turbo" or process == "Hlt2", 'Unpacking helper only accepts Turbo, Spruce or Hlt2 processes' if configurables: from Configurables import UnpackParticlesAndVertices else: from PyConf.Algorithms import UnpackParticlesAndVertices - if process == "spruce": + if process == "Spruce": PANDV_ROOT = '/Event/Spruce' RECO_ROOT = '/Event/Spruce/HLT2' else: @@ -80,21 +84,32 @@ def unpackers(process='hlt2', return unpack_persistreco + [unpack_psandvs] -def decoder(process='hlt2', +def decoder(process='Hlt2', + stream="default", raw_event_format=0.3, data_type='Upgrade', configurables=True, OutputLevel=4): """Return a DstData raw bank decoder. + + Args: + process (str): 'Turbo' or 'Spruce' or 'Hlt2' - serves to determine `RawEventLocation` only. + stream (str): needed post-sprucing as RawEvent is then dependent on stream name + - drives `RawEventLocation` only. + raw_event_format (float): + data_type (str): The data type to configure PersistRecoPacking. """ - assert process == "spruce" or process == "hlt2", 'Data decoder helper only accepts hlt2 or spruce processes' + assert process == "Spruce" or process == "Turbo" or process == "Hlt2", 'Unpacking helper only accepts Turbo, Spruce or Hlt2 processes' if configurables: from Configurables import HltPackedDataDecoder else: from PyConf.Algorithms import HltPackedDataDecoder - if process == "spruce": - bank_location = "DAQ/MergedEvent" + if process == "Spruce": + bank_location = stream RECO_ROOT = '/Event/Spruce/HLT2' + elif process == "Turbo": + bank_location = stream + RECO_ROOT = '/Event/HLT2' else: bank_location = Raw_location_db[raw_event_format]["DstData"] RECO_ROOT = '/Event/HLT2' @@ -108,19 +123,26 @@ def decoder(process='hlt2', return decode_packeddata -def hlt2_decisions(process='hlt2', +def hlt2_decisions(process='Hlt2', + stream="default", raw_event_format=0.3, configurables=True, OutputLevel=4): """Return a HltDecReportsDecoder instance for HLT2 decisions. + + Args: + process (str): 'Turbo' or 'Spruce' or 'Hlt2' - serves to determine `RawEventLocation` only. + stream (str): needed post-sprucing as RawEvent is then dependent on stream name + - drives `RawEventLocation` only. + raw_event_format (float): """ - assert process == "spruce" or process == "hlt2", 'Hlt2 decisions helper only accepts hlt2 or spruce processes' + assert process == "Spruce" or process == "Turbo" or process == "Hlt2", 'Unpacking helper only accepts Turbo, Spruce or Hlt2 processes' if configurables: from Configurables import HltDecReportsDecoder else: from PyConf.Algorithms import HltDecReportsDecoder - if process == "spruce": - bank_location = "DAQ/MergedEvent" + if process == "Spruce" or process == "Turbo": + bank_location = stream else: bank_location = Raw_location_db[raw_event_format]["HltDecReports"] decode_hlt2 = HltDecReportsDecoder( @@ -132,10 +154,18 @@ def hlt2_decisions(process='hlt2', return decode_hlt2 -def spruce_decisions(process="spruce", configurables=True, OutputLevel=4): +def spruce_decisions(process="Spruce", + stream="default", + configurables=True, + OutputLevel=4): """Return a HltDecReportsDecoder instance for Sprucing decisions. + + Args: + process (str): 'Turbo' or 'Spruce' - serves to determine `RawEventLocation` only. + stream (str): needed post-sprucing as RawEvent is then dependent on stream name + - drives `RawEventLocation` only. """ - assert process == "spruce", 'Can only ask for sprucing decisions if sprucing job' + assert process == "Spruce" or process == "Turbo", 'Can only ask for sprucing decisions if post - sprucing' if configurables: from Configurables import HltDecReportsDecoder else: @@ -144,22 +174,22 @@ def spruce_decisions(process="spruce", configurables=True, OutputLevel=4): decode_spruce = HltDecReportsDecoder( "Spruce", SourceID='Spruce', - RawEventLocations="DAQ/MergedEvent", + RawEventLocations=stream, OutputHltDecReportsLocation="/Event/Spruce/DecReports", OutputLevel=OutputLevel) return decode_spruce -def mc_unpackers(process='hlt2', filtered_mc=True, configurables=True): +def mc_unpackers(process='Hlt2', filtered_mc=True, configurables=True): """Return a list of unpackers for reading Monte Carlo truth objects. Args: - process (str): 'hlt2' (or, in the future, 'spruce'). + process (str): 'Hlt2' (or, in the future, 'Spruce'). filtered_mc (bool): If True, assume Moore saved only a filtered subset of the input MC objects. """ - assert process == "hlt2", 'MC tools not functional yet outside hlt2 :(' + assert process == "Hlt2", 'MC tools not functional yet outside Hlt2 :(' mc_prefix = '/Event/HLT2' if filtered_mc else "/Event" if configurables: from Configurables import UnpackMCParticle, UnpackMCVertex -- GitLab From 63d62b2206f16e01cfa5fb46c8a8dfa7aed1e2bb Mon Sep 17 00:00:00 2001 From: Gerhard Raven <gerhard.raven@nikhef.nl> Date: Mon, 13 Dec 2021 12:22:02 +0100 Subject: [PATCH 18/30] Apply 1 suggestion(s) to 1 file(s) --- Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp b/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp index ba87c1e9e60..1e9665fd345 100644 --- a/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp +++ b/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp @@ -134,8 +134,10 @@ namespace LHCb::Hlt::PackedData { // Check we know how to decode this version if ( rawBank0->version() < 2 || rawBank0->version() > kVersionNumber ) { - std::cout << "rawBank0->version() " << rawBank0->version() << std::endl; - std::cout << "rawBank0->type() " << rawBank0->type() << std::endl; + if (msgLevel(MSG::VERBOSE)) { + debug() << "rawBank0->version() " << rawBank0->version() << endmsg; + debug() << "rawBank0->type() " << rawBank0->type() << endmsg; + } ++m_bad_version; return StatusCode::FAILURE; -- GitLab From f1112c71dfa8ea791c19256ec23ea2feb76cc01e Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Wed, 15 Dec 2021 11:19:54 +0100 Subject: [PATCH 19/30] fix failing HltDAQ.a_dumptolatestformat test --- GaudiConf/python/GaudiConf/reading.py | 2 ++ Hlt/HltDAQ/src/component/HltDecReportsWriter.cpp | 6 +++--- .../options/dump_decode_wipe_encode_split.py | 15 ++++++++++++++- 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/GaudiConf/python/GaudiConf/reading.py b/GaudiConf/python/GaudiConf/reading.py index a00d6248718..e708c406b35 100644 --- a/GaudiConf/python/GaudiConf/reading.py +++ b/GaudiConf/python/GaudiConf/reading.py @@ -125,6 +125,7 @@ def decoder(process='Hlt2', def hlt2_decisions(process='Hlt2', stream="default", + output_loc="/Event/Hlt/DecReports", raw_event_format=0.3, configurables=True, OutputLevel=4): @@ -148,6 +149,7 @@ def hlt2_decisions(process='Hlt2', decode_hlt2 = HltDecReportsDecoder( "HLT2", SourceID='Hlt2', + OutputHltDecReportsLocation=output_loc, RawEventLocations=bank_location, OutputLevel=OutputLevel) diff --git a/Hlt/HltDAQ/src/component/HltDecReportsWriter.cpp b/Hlt/HltDAQ/src/component/HltDecReportsWriter.cpp index 6220f318509..ab12dc39c7f 100644 --- a/Hlt/HltDAQ/src/component/HltDecReportsWriter.cpp +++ b/Hlt/HltDAQ/src/component/HltDecReportsWriter.cpp @@ -101,9 +101,9 @@ namespace LHCb::Hlt::DAQ { // FIXME this is _not_ save when a _subsequent_ algorithm calls // addBank,adoptBank or removeBank on the underlying RawEvent... - // TODO: `writeViewFor` (see https://gitlab.cern.ch/gaudi/Gaudi/-/merge_requests/1151 ) - // should be extended to allow it to be used for this case as well, as that would - // make the underlying `RawEvent` inaccessible, and thus _ensure_ it will not be + // TODO: `writeViewFor` (see https://gitlab.cern.ch/gaudi/Gaudi/-/merge_requests/1151 ) + // should be extended to allow it to be used for this case as well, as that would + // make the underlying `RawEvent` inaccessible, and thus _ensure_ it will not be // modified (not even by code that doesn't play by the rules, provided it is not // explicitly/maliciously tailored to do this) return std::tuple{std::move( outputrawevent ), outputrawevent.banks( RawBank::HltDecReports )}; diff --git a/Hlt/HltDAQ/tests/options/dump_decode_wipe_encode_split.py b/Hlt/HltDAQ/tests/options/dump_decode_wipe_encode_split.py index bddc21f8138..ac198cac0e7 100644 --- a/Hlt/HltDAQ/tests/options/dump_decode_wipe_encode_split.py +++ b/Hlt/HltDAQ/tests/options/dump_decode_wipe_encode_split.py @@ -10,7 +10,7 @@ from __future__ import print_function # or submit itself to any jurisdiction. # ############################################################################### from Gaudi.Configuration import * -from Configurables import bankKiller, RawEventDump, GaudiSequencer +from Configurables import bankKiller, RawEventDump, GaudiSequencer, BackwardsCompatibleMergeViewIntoRawEvent from Configurables import LHCbApp import os @@ -48,6 +48,7 @@ for stage, t, (seq, at) in product((1, 2), bankTypes, ((decoders, "Decoder"), algName = "Hlt{0}{1}Reports{2}".format(stage, t, at) conf = getattr(Configurables, algType)(algName) conf.SourceID = "Hlt{0}".format(stage) + conf.setProp( propertyName(locs[at], t), "Hlt{0}/{1}Reports".format(stage, t)) if (t == "Sel" and at == "Decoder"): @@ -57,8 +58,20 @@ for stage, t, (seq, at) in product((1, 2), bankTypes, ((decoders, "Decoder"), conf.setProp("DecReports", "Hlt{0}/DecReports".format(stage)) conf.setProp("ObjectSummaries", "Hlt{0}/{1}Reports/Candidates".format( stage, t)) + if (t == "Dec" and at == "Writer"): + #Want writers to use RawBank::Views and not alter input RawEvent - see !LHCb3303 MR description + #For now have to send the HLT1 and HLT2 banks to different *new* RawEvent/RawBank::View output locations + conf.setProp("OutputRawEvent", + "/Event/DAQ/Hlt{0}DecEvent".format(stage)) + conf.setProp("OutputView", "/Event/DAQ/Hlt{0}DecView".format(stage)) + seq.Members.append(conf.getFullName()) +##Temporary for backwards compatibility - merge the HLT1 and HLT2 RawBank::Views back into the default RawEvent +views_to_raw_event = BackwardsCompatibleMergeViewIntoRawEvent( + RawBankViews=["/Event/DAQ/Hlt{0}DecView".format(i) for i in [1, 2]]) +encoders.Members.append(views_to_raw_event) + for atype in bankTypes: bank = "Hlt" + atype + "Reports" dump.RawBanks.append(bank) -- GitLab From dbf543d36f04330643d341ed7d0c6fa4921f2de1 Mon Sep 17 00:00:00 2001 From: Gitlab CI <noreply@cern.ch> Date: Wed, 15 Dec 2021 10:20:30 +0000 Subject: [PATCH 20/30] Fixed formatting patch generated by https://gitlab.cern.ch/lhcb/LHCb/-/jobs/18342415 --- DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp | 2 +- Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp b/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp index e2fccb0e03e..4b195cf555d 100644 --- a/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp +++ b/DAQ/DAQUtils/src/BackwardsCompatibleMergeViewIntoRawEvent.cpp @@ -30,7 +30,7 @@ * For backwards compatibility - * takes vector of RawBank::View and appends (copies) the banks the views * point at into an existing RawEvent -- i.e. modifies a RawEvent that is already in the TES. - * This is needed to (also) put the HltDecReports in the 'default' RawEvent, so that + * This is needed to (also) put the HltDecReports in the 'default' RawEvent, so that the combination of the modified HltDecReportsWriter + BackwardsCompatibleMergeViewIntoRawEvent is a drop-in replacement for the old HltDecReportsWriter. * diff --git a/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp b/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp index 1e9665fd345..53efa824f50 100644 --- a/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp +++ b/Hlt/HltDAQ/src/component/HltPackedDataDecoder.cpp @@ -134,7 +134,7 @@ namespace LHCb::Hlt::PackedData { // Check we know how to decode this version if ( rawBank0->version() < 2 || rawBank0->version() > kVersionNumber ) { - if (msgLevel(MSG::VERBOSE)) { + if ( msgLevel( MSG::VERBOSE ) ) { debug() << "rawBank0->version() " << rawBank0->version() << endmsg; debug() << "rawBank0->type() " << rawBank0->type() << endmsg; } -- GitLab From 98279d9f04eb57ac8b63e24919bfce71a6ff2231 Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Fri, 17 Dec 2021 16:35:55 +0100 Subject: [PATCH 21/30] reading to work with DV --- GaudiConf/python/GaudiConf/reading.py | 64 ++++++++++++++++++--------- 1 file changed, 42 insertions(+), 22 deletions(-) diff --git a/GaudiConf/python/GaudiConf/reading.py b/GaudiConf/python/GaudiConf/reading.py index e708c406b35..b315187c612 100644 --- a/GaudiConf/python/GaudiConf/reading.py +++ b/GaudiConf/python/GaudiConf/reading.py @@ -12,8 +12,9 @@ from __future__ import absolute_import import os -from RawEventFormat import Raw_location_db from .PersistRecoConf import PersistRecoPacking +from PyConf.application import default_raw_event, make_data_with_FetchDataFromFile +from PyConf.components import force_location __all__ = [ "mc_unpackers", "unpackers", "decoder", "hlt2_decisions", @@ -37,14 +38,16 @@ def unpack_rawevent(bank_types=["DstData", "HltDecReports"], raw_event_format (float): """ assert process == "Spruce" or process == "Turbo" or process == "Hlt2", 'Unpacking helper only accepts Turbo, Spruce or Hlt2 processes' + if process == "Spruce" or process == "Turbo": + bank_location = make_data_with_FetchDataFromFile(stream) + else: + bank_location = default_raw_event(["DstData"], + raw_event_format=raw_event_format) if configurables: from Configurables import LHCb__UnpackRawEvent + bank_location = bank_location.location else: from PyConf.Algorithms import LHCb__UnpackRawEvent - if process == "Spruce" or process == "Turbo": - bank_location = stream - else: - bank_location = Raw_location_db[raw_event_format]["DstData"] unpackrawevent = LHCb__UnpackRawEvent( BankTypes=bank_types, RawBankLocations=[ @@ -100,20 +103,23 @@ def decoder(process='Hlt2', data_type (str): The data type to configure PersistRecoPacking. """ assert process == "Spruce" or process == "Turbo" or process == "Hlt2", 'Unpacking helper only accepts Turbo, Spruce or Hlt2 processes' - if configurables: - from Configurables import HltPackedDataDecoder - else: - from PyConf.Algorithms import HltPackedDataDecoder if process == "Spruce": - bank_location = stream + bank_location = make_data_with_FetchDataFromFile(stream) RECO_ROOT = '/Event/Spruce/HLT2' elif process == "Turbo": - bank_location = stream + bank_location = make_data_with_FetchDataFromFile(stream) RECO_ROOT = '/Event/HLT2' else: - bank_location = Raw_location_db[raw_event_format]["DstData"] + bank_location = default_raw_event(["DstData"], + raw_event_format=raw_event_format) RECO_ROOT = '/Event/HLT2' + if configurables: + from Configurables import HltPackedDataDecoder + bank_location = bank_location.location + else: + from PyConf.Algorithms import HltPackedDataDecoder + prpacking = PersistRecoPacking(stream=RECO_ROOT, data_type=data_type) container_map = prpacking.packedToOutputLocationMap() decode_packeddata = HltPackedDataDecoder( @@ -138,19 +144,25 @@ def hlt2_decisions(process='Hlt2', raw_event_format (float): """ assert process == "Spruce" or process == "Turbo" or process == "Hlt2", 'Unpacking helper only accepts Turbo, Spruce or Hlt2 processes' + + if process == "Spruce" or process == "Turbo": + bank_location = make_data_with_FetchDataFromFile(stream) + else: + bank_location = default_raw_event(["HltDecReports"], + raw_event_format=raw_event_format) if configurables: from Configurables import HltDecReportsDecoder + bank_location = bank_location.location + else: from PyConf.Algorithms import HltDecReportsDecoder - if process == "Spruce" or process == "Turbo": - bank_location = stream - else: - bank_location = Raw_location_db[raw_event_format]["HltDecReports"] + output_loc = force_location(output_loc) + decode_hlt2 = HltDecReportsDecoder( - "HLT2", + name="HLT2", SourceID='Hlt2', - OutputHltDecReportsLocation=output_loc, RawEventLocations=bank_location, + OutputHltDecReportsLocation=output_loc, OutputLevel=OutputLevel) return decode_hlt2 @@ -168,16 +180,21 @@ def spruce_decisions(process="Spruce", - drives `RawEventLocation` only. """ assert process == "Spruce" or process == "Turbo", 'Can only ask for sprucing decisions if post - sprucing' + bank_location = make_data_with_FetchDataFromFile(stream) + output_loc = "/Event/Spruce/DecReports" if configurables: from Configurables import HltDecReportsDecoder + bank_location = bank_location.location + else: from PyConf.Algorithms import HltDecReportsDecoder + output_loc = force_location(output_loc) decode_spruce = HltDecReportsDecoder( - "Spruce", + name="Spruce", SourceID='Spruce', - RawEventLocations=stream, - OutputHltDecReportsLocation="/Event/Spruce/DecReports", + RawEventLocations=bank_location, + OutputHltDecReportsLocation=output_loc, OutputLevel=OutputLevel) return decode_spruce @@ -191,7 +208,10 @@ def mc_unpackers(process='Hlt2', filtered_mc=True, configurables=True): filtered_mc (bool): If True, assume Moore saved only a filtered subset of the input MC objects. """ - assert process == "Hlt2", 'MC tools not functional yet outside Hlt2 :(' + print( + "WARNING: MC tools not functional yet outside Hlt2 - they will run but the output is nonsense:(" + ) + assert process == "Hlt2" or process == "Turbo" or process == "Spruce", 'MC unpacker helper only accepts Turbo, Spruce or Hlt2 processes' mc_prefix = '/Event/HLT2' if filtered_mc else "/Event" if configurables: from Configurables import UnpackMCParticle, UnpackMCVertex -- GitLab From 0eb6a2b53846c6476f73e7eaa7dc5a2b625bd886 Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Fri, 17 Dec 2021 17:02:23 +0100 Subject: [PATCH 22/30] uff --- GaudiConf/python/GaudiConf/reading.py | 45 +++++++++++++++++---------- 1 file changed, 29 insertions(+), 16 deletions(-) diff --git a/GaudiConf/python/GaudiConf/reading.py b/GaudiConf/python/GaudiConf/reading.py index b315187c612..faca08b8d1d 100644 --- a/GaudiConf/python/GaudiConf/reading.py +++ b/GaudiConf/python/GaudiConf/reading.py @@ -152,18 +152,24 @@ def hlt2_decisions(process='Hlt2', raw_event_format=raw_event_format) if configurables: from Configurables import HltDecReportsDecoder - bank_location = bank_location.location + decode_hlt2 = HltDecReportsDecoder( + name="HLT2", + SourceID='Hlt2', + RawEventLocations=bank_location.location, + OutputHltDecReportsLocation=output_loc, + OutputLevel=OutputLevel) else: from PyConf.Algorithms import HltDecReportsDecoder - output_loc = force_location(output_loc) - decode_hlt2 = HltDecReportsDecoder( - name="HLT2", - SourceID='Hlt2', - RawEventLocations=bank_location, - OutputHltDecReportsLocation=output_loc, - OutputLevel=OutputLevel) + decode_hlt2 = HltDecReportsDecoder( + name="HLT2", + SourceID='Hlt2', + RawEventLocations=bank_location, + outputs={ + 'OutputHltDecReportsLocation': force_location(output_loc) + }, + OutputLevel=OutputLevel) return decode_hlt2 @@ -184,18 +190,25 @@ def spruce_decisions(process="Spruce", output_loc = "/Event/Spruce/DecReports" if configurables: from Configurables import HltDecReportsDecoder - bank_location = bank_location.location + + decode_spruce = HltDecReportsDecoder( + name="Spruce", + SourceID='Spruce', + RawEventLocations=bank_location.location, + OutputHltDecReportsLocation=output_loc, + OutputLevel=OutputLevel) else: from PyConf.Algorithms import HltDecReportsDecoder - output_loc = force_location(output_loc) - decode_spruce = HltDecReportsDecoder( - name="Spruce", - SourceID='Spruce', - RawEventLocations=bank_location, - OutputHltDecReportsLocation=output_loc, - OutputLevel=OutputLevel) + decode_spruce = HltDecReportsDecoder( + name="Spruce", + SourceID='Spruce', + RawEventLocations=bank_location, + outputs={ + 'OutputHltDecReportsLocation': force_location(output_loc) + }, + OutputLevel=OutputLevel) return decode_spruce -- GitLab From d80b08be716bd3207c587d6d013e8f42ba9b8b17 Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Sun, 19 Dec 2021 11:24:43 +0100 Subject: [PATCH 23/30] MC unpackers bug --- GaudiConf/python/GaudiConf/reading.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/GaudiConf/python/GaudiConf/reading.py b/GaudiConf/python/GaudiConf/reading.py index faca08b8d1d..e14b57da983 100644 --- a/GaudiConf/python/GaudiConf/reading.py +++ b/GaudiConf/python/GaudiConf/reading.py @@ -242,14 +242,17 @@ def mc_unpackers(process='Hlt2', filtered_mc=True, configurables=True): unpack_mcp = UnpackMCParticle( InputName=make_data_with_FetchDataFromFile( os.path.join(mc_prefix, "pSim/MCParticles")), - OutputName=make_data_with_FetchDataFromFile( - os.path.join(mc_prefix, "MC/Particles")), - ) + outputs={ + "OutputName": + force_location(os.path.join(mc_prefix, "MC/Particles")) + }) + unpack_mcv = UnpackMCVertex( InputName=make_data_with_FetchDataFromFile( os.path.join(mc_prefix, "pSim/MCVertices")), - OutputName=make_data_with_FetchDataFromFile( - os.path.join(mc_prefix, "MC/Vertices")), - ) + outputs={ + "OutputName": + force_location(os.path.join(mc_prefix, "MC/Vertices")) + }) return [unpack_mcp, unpack_mcv] -- GitLab From c43c4cb7ea4f1c07db6146d21aa3d57699c87909 Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Sun, 19 Dec 2021 11:28:01 +0100 Subject: [PATCH 24/30] reorder --- GaudiConf/python/GaudiConf/reading.py | 92 +++++++++++++-------------- 1 file changed, 46 insertions(+), 46 deletions(-) diff --git a/GaudiConf/python/GaudiConf/reading.py b/GaudiConf/python/GaudiConf/reading.py index e14b57da983..c7053f6fa60 100644 --- a/GaudiConf/python/GaudiConf/reading.py +++ b/GaudiConf/python/GaudiConf/reading.py @@ -22,6 +22,51 @@ __all__ = [ ] +def mc_unpackers(process='Hlt2', filtered_mc=True, configurables=True): + """Return a list of unpackers for reading Monte Carlo truth objects. + + Args: + process (str): 'Hlt2' (or, in the future, 'Spruce'). + filtered_mc (bool): If True, assume Moore saved only a filtered + subset of the input MC objects. + """ + print( + "WARNING: MC tools not functional yet outside Hlt2 - they will run but the output is nonsense:(" + ) + assert process == "Hlt2" or process == "Turbo" or process == "Spruce", 'MC unpacker helper only accepts Turbo, Spruce or Hlt2 processes' + mc_prefix = '/Event/HLT2' if filtered_mc else "/Event" + if configurables: + from Configurables import UnpackMCParticle, UnpackMCVertex + unpack_mcp = UnpackMCParticle( + InputName=os.path.join(mc_prefix, "pSim/MCParticles"), + OutputName=os.path.join(mc_prefix, "MC/Particles"), + ) + unpack_mcv = UnpackMCVertex( + InputName=os.path.join(mc_prefix, "pSim/MCVertices"), + OutputName=os.path.join(mc_prefix, "MC/Vertices"), + ) + else: + from PyConf.application import make_data_with_FetchDataFromFile + from PyConf.Algorithms import UnpackMCParticle, UnpackMCVertex + unpack_mcp = UnpackMCParticle( + InputName=make_data_with_FetchDataFromFile( + os.path.join(mc_prefix, "pSim/MCParticles")), + outputs={ + "OutputName": + force_location(os.path.join(mc_prefix, "MC/Particles")) + }) + + unpack_mcv = UnpackMCVertex( + InputName=make_data_with_FetchDataFromFile( + os.path.join(mc_prefix, "pSim/MCVertices")), + outputs={ + "OutputName": + force_location(os.path.join(mc_prefix, "MC/Vertices")) + }) + + return [unpack_mcp, unpack_mcv] + + def unpack_rawevent(bank_types=["DstData", "HltDecReports"], process='Hlt2', stream="default", @@ -210,49 +255,4 @@ def spruce_decisions(process="Spruce", }, OutputLevel=OutputLevel) - return decode_spruce - - -def mc_unpackers(process='Hlt2', filtered_mc=True, configurables=True): - """Return a list of unpackers for reading Monte Carlo truth objects. - - Args: - process (str): 'Hlt2' (or, in the future, 'Spruce'). - filtered_mc (bool): If True, assume Moore saved only a filtered - subset of the input MC objects. - """ - print( - "WARNING: MC tools not functional yet outside Hlt2 - they will run but the output is nonsense:(" - ) - assert process == "Hlt2" or process == "Turbo" or process == "Spruce", 'MC unpacker helper only accepts Turbo, Spruce or Hlt2 processes' - mc_prefix = '/Event/HLT2' if filtered_mc else "/Event" - if configurables: - from Configurables import UnpackMCParticle, UnpackMCVertex - unpack_mcp = UnpackMCParticle( - InputName=os.path.join(mc_prefix, "pSim/MCParticles"), - OutputName=os.path.join(mc_prefix, "MC/Particles"), - ) - unpack_mcv = UnpackMCVertex( - InputName=os.path.join(mc_prefix, "pSim/MCVertices"), - OutputName=os.path.join(mc_prefix, "MC/Vertices"), - ) - else: - from PyConf.application import make_data_with_FetchDataFromFile - from PyConf.Algorithms import UnpackMCParticle, UnpackMCVertex - unpack_mcp = UnpackMCParticle( - InputName=make_data_with_FetchDataFromFile( - os.path.join(mc_prefix, "pSim/MCParticles")), - outputs={ - "OutputName": - force_location(os.path.join(mc_prefix, "MC/Particles")) - }) - - unpack_mcv = UnpackMCVertex( - InputName=make_data_with_FetchDataFromFile( - os.path.join(mc_prefix, "pSim/MCVertices")), - outputs={ - "OutputName": - force_location(os.path.join(mc_prefix, "MC/Vertices")) - }) - - return [unpack_mcp, unpack_mcv] + return decode_spruce \ No newline at end of file -- GitLab From f04326f589837f76b6e8b5357e81c3cdc0097222 Mon Sep 17 00:00:00 2001 From: Gitlab CI <noreply@cern.ch> Date: Sun, 19 Dec 2021 10:28:37 +0000 Subject: [PATCH 25/30] Fixed formatting patch generated by https://gitlab.cern.ch/lhcb/LHCb/-/jobs/18432867 --- GaudiConf/python/GaudiConf/reading.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/GaudiConf/python/GaudiConf/reading.py b/GaudiConf/python/GaudiConf/reading.py index c7053f6fa60..28f3a224e30 100644 --- a/GaudiConf/python/GaudiConf/reading.py +++ b/GaudiConf/python/GaudiConf/reading.py @@ -255,4 +255,4 @@ def spruce_decisions(process="Spruce", }, OutputLevel=OutputLevel) - return decode_spruce \ No newline at end of file + return decode_spruce -- GitLab From 329e065e4bfda252a216b9b600960331f088378f Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Mon, 20 Dec 2021 10:39:07 +0100 Subject: [PATCH 26/30] print warning for unpacker order --- GaudiConf/python/GaudiConf/reading.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/GaudiConf/python/GaudiConf/reading.py b/GaudiConf/python/GaudiConf/reading.py index 28f3a224e30..fbc774a6839 100644 --- a/GaudiConf/python/GaudiConf/reading.py +++ b/GaudiConf/python/GaudiConf/reading.py @@ -23,13 +23,16 @@ __all__ = [ def mc_unpackers(process='Hlt2', filtered_mc=True, configurables=True): - """Return a list of unpackers for reading Monte Carlo truth objects. + """Return a list of unpackers for reading Monte Carlo truth objects + + This must run BEFORE unpackers!!!. Args: process (str): 'Hlt2' (or, in the future, 'Spruce'). filtered_mc (bool): If True, assume Moore saved only a filtered subset of the input MC objects. """ + print("'mc_unpackers' must run BEFORE 'unpackers'") print( "WARNING: MC tools not functional yet outside Hlt2 - they will run but the output is nonsense:(" ) @@ -109,6 +112,8 @@ def unpackers(process='Hlt2', OutputLevel=4): """Return a list of unpackers for reading reconstructed objects. + This must run AFTER mc_unpackers if MC data!!!. + Args: process (str): 'Turbo' or 'Spruce' or 'Hlt2'. data_type (str): The data type to configure PersistRecoPacking -- GitLab From f23b8b39102668af540574344855c22c20aad5da Mon Sep 17 00:00:00 2001 From: Rosen Matev <rosen.matev@cern.ch> Date: Thu, 6 Jan 2022 16:43:22 +0100 Subject: [PATCH 27/30] Apply 1 suggestion(s) to 1 file(s) --- Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp b/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp index 069cd7cfb19..51f826e194c 100644 --- a/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp +++ b/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp @@ -170,11 +170,9 @@ namespace LHCb::Hlt::PackedData { auto linkID = m_hltANNSvc->value( PackedObjectLocations, location ); if ( !linkID ) { - /// !!! Had to change this error() << "Requested to persist link to " << Gaudi::Utils::toString( location ) << " but no ID is registered for it in the ANNSvc!" << endmsg; return StatusCode::FAILURE; - continue; } buffer.save<int32_t>( linkID->second ); -- GitLab From 7c047532ffbaccd04a1077b88279e9f7d3b6c644 Mon Sep 17 00:00:00 2001 From: Rosen Matev <rosen.matev@cern.ch> Date: Thu, 6 Jan 2022 16:44:06 +0100 Subject: [PATCH 28/30] Apply 1 suggestion(s) to 1 file(s) --- Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp b/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp index 51f826e194c..9cf8d94760e 100644 --- a/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp +++ b/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp @@ -190,8 +190,6 @@ namespace LHCb::Hlt::PackedData { if ( msgLevel( MSG::DEBUG ) ) { debug() << "Packed " << containerPath << " with ID " << locationID->second << " and CLID " << classID << " into " << objectSize << " bytes" << endmsg; - // !!!Hope next line is not important - part of a debug? Could not make counter work - // counter( containerPath ) += objectSize; } } -- GitLab From d8a3c38ef9056c8d272b913fdcd7200d94fb52fd Mon Sep 17 00:00:00 2001 From: NicoleSkidmore <nicola.skidmore@cern.ch> Date: Thu, 6 Jan 2022 17:47:13 +0100 Subject: [PATCH 29/30] review comments --- DAQ/DAQUtils/CMakeLists.txt | 2 +- ....cpp => CombineRawBankViewsToRawEvent.cpp} | 24 +++++++------------ .../src/component/HltPackedDataWriter.cpp | 5 ++-- 3 files changed, 13 insertions(+), 18 deletions(-) rename DAQ/DAQUtils/src/{ConsolidateViews.cpp => CombineRawBankViewsToRawEvent.cpp} (74%) diff --git a/DAQ/DAQUtils/CMakeLists.txt b/DAQ/DAQUtils/CMakeLists.txt index 04be129938f..4ff1762dbae 100644 --- a/DAQ/DAQUtils/CMakeLists.txt +++ b/DAQ/DAQUtils/CMakeLists.txt @@ -35,7 +35,7 @@ gaudi_add_module(DAQUtils src/RawEventSimpleCombiner.cpp src/UnpackRawEvent.cpp src/bankKiller.cpp - src/ConsolidateViews.cpp + src/CombineRawBankViewsToRawEvent.cpp src/BackwardsCompatibleMergeViewIntoRawEvent.cpp LINK Gaudi::GaudiKernel diff --git a/DAQ/DAQUtils/src/ConsolidateViews.cpp b/DAQ/DAQUtils/src/CombineRawBankViewsToRawEvent.cpp similarity index 74% rename from DAQ/DAQUtils/src/ConsolidateViews.cpp rename to DAQ/DAQUtils/src/CombineRawBankViewsToRawEvent.cpp index 8081e8eeee4..546912f28db 100644 --- a/DAQ/DAQUtils/src/ConsolidateViews.cpp +++ b/DAQ/DAQUtils/src/CombineRawBankViewsToRawEvent.cpp @@ -19,21 +19,15 @@ #include <string> //----------------------------------------------------------------------------- -// Implementation file for class : ConsolidateViews +// Implementation file for class : CombineRawBankViewsToRawEvent // // 2021-11-04 : Nicole Skidmore //----------------------------------------------------------------------------- /** - * Combines vector of RawBank::View and returns new RawEvent - * - * Based on RawEventCombiner - * If the same raw bank exists in several raw events, the merge will fail, - * except in the two following cases. - * - HltDecReports banks from different raw events are merged as long as their - * source ID is different. - * - The HltRoutingBits bank is always taken from the first raw event where it - * appears. Subsequent appearances are silently ignored. + * Combines vector of RawBank::View and returns *new* RawEvent + * Note there is currently no check for if 2 RawBanks of the same type are being added to the new RawEvent. For the + * current implementation this is justified but should be revised if usage of this extends * * @author Nicole Skidmore * @date 2021-11-04 @@ -42,9 +36,10 @@ template <typename T> using VOC = Gaudi::Functional::vector_of_const_<T>; -struct ConsolidateViews final : LHCb::Algorithm::MergingTransformer<LHCb::RawEvent( VOC<LHCb::RawBank::View> const& )> { +struct CombineRawBankViewsToRawEvent final + : LHCb::Algorithm::MergingTransformer<LHCb::RawEvent( VOC<LHCb::RawBank::View> const& )> { - ConsolidateViews( const std::string& name, ISvcLocator* pSvcLocator ) + CombineRawBankViewsToRawEvent( const std::string& name, ISvcLocator* pSvcLocator ) : MergingTransformer( name, pSvcLocator, // Inputs KeyValues{"RawBankViews", {}}, @@ -65,8 +60,7 @@ struct ConsolidateViews final : LHCb::Algorithm::MergingTransformer<LHCb::RawEve for ( const LHCb::RawBank* b : view ) { if ( !b ) continue; - // Do I need to check if another raw abnk of this type exists already? - // Add it to the outputRawEvent + // Add it to the new outputRawEvent outputRawEvent.addBank( b->sourceID(), b->type(), b->version(), b->range<std::byte>() ); if ( msgLevel( MSG::VERBOSE ) ) { verbose() << " Copied RawBank type=" << b->type() << " version= " << b->version() @@ -79,4 +73,4 @@ struct ConsolidateViews final : LHCb::Algorithm::MergingTransformer<LHCb::RawEve }; // Declaration of the Algorithm Factory -DECLARE_COMPONENT( ConsolidateViews ) \ No newline at end of file +DECLARE_COMPONENT( CombineRawBankViewsToRawEvent ) \ No newline at end of file diff --git a/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp b/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp index 9cf8d94760e..1c4a19d3b53 100644 --- a/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp +++ b/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp @@ -106,6 +106,8 @@ namespace LHCb::Hlt::PackedData { mutable Gaudi::Accumulators::StatCounter<> m_serializedDataSize{this, "Size of serialized data"}; mutable Gaudi::Accumulators::StatCounter<> m_compressedDataSize{this, "Size of compressed data"}; + mutable Gaudi::Accumulators::MsgCounter<MSG::ERROR> m_packedobjectsize{this, "Packed objects too large to save", 50}; + /// HltANNSvc for making selection names to int selection ID ServiceHandle<IANNSvc> m_hltANNSvc{this, "ANNSvc", "HltANNSvc", "Service to retrieve DecReport IDs"}; @@ -247,8 +249,7 @@ namespace LHCb::Hlt::PackedData { auto chunks = range::chunk( data, MAX_PAYLOAD_SIZE ); if ( chunks.size() > extract<SourceIDMasks::PartID>( ~uint16_t{0} ) ) { - // !!! Modified following line to compile - error() << "Packed objects too large to save" << endmsg; + ++m_packedobjectsize; return; } if ( msgLevel( MSG::DEBUG ) ) { debug() << "Writing " << chunks.size() << " banks" << endmsg; } -- GitLab From ee07c4f8a933856bdb8b508537183c4d9df1bdf6 Mon Sep 17 00:00:00 2001 From: Gitlab CI <noreply@cern.ch> Date: Thu, 6 Jan 2022 16:47:48 +0000 Subject: [PATCH 30/30] Fixed formatting patch generated by https://gitlab.cern.ch/lhcb/LHCb/-/jobs/18636949 --- Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp b/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp index 1c4a19d3b53..eda9a9246c2 100644 --- a/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp +++ b/Hlt/HltDAQ/src/component/HltPackedDataWriter.cpp @@ -106,7 +106,8 @@ namespace LHCb::Hlt::PackedData { mutable Gaudi::Accumulators::StatCounter<> m_serializedDataSize{this, "Size of serialized data"}; mutable Gaudi::Accumulators::StatCounter<> m_compressedDataSize{this, "Size of compressed data"}; - mutable Gaudi::Accumulators::MsgCounter<MSG::ERROR> m_packedobjectsize{this, "Packed objects too large to save", 50}; + mutable Gaudi::Accumulators::MsgCounter<MSG::ERROR> m_packedobjectsize{this, "Packed objects too large to save", + 50}; /// HltANNSvc for making selection names to int selection ID ServiceHandle<IANNSvc> m_hltANNSvc{this, "ANNSvc", "HltANNSvc", "Service to retrieve DecReport IDs"}; -- GitLab