/*****************************************************************************\
* (c) Copyright 2020 CERN for the benefit of the LHCb Collaboration *
* *
* This software is distributed under the terms of the GNU General Public *
* Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". *
* *
* In applying this licence, CERN does not waive the privileges and immunities *
* granted to it by virtue of its status as an Intergovernmental Organization *
* or submit itself to any jurisdiction. *
\*****************************************************************************/
#include "CaloDet/DeCalorimeter.h"
#include "DetDesc/ConditionAccessorHolder.h"
#include "GaudiAlg/Transformer.h"
#include "Event/Particle.h"
#include "Event/Particle_v2.h"
#include "Event/ProtoParticle.h"
#include "Event/RecVertex.h"
#include "CaloFutureUtils/CaloMomentum.h"
#include "Kernel/IParticlePropertySvc.h"
#include "Kernel/ParticleProperty.h"
// File containing definition AND implementation of neutral makers (photons, pi0s (...))
/** @class PhotonMaker
*
* @brief LHCb::Particle creation from LHCb::ProtoParticle neutral objects and PVs
* It is based on "old" MergedPi0Maker, but respecting thread-safety during execution
*
* The main purpose of the algorithm is to create Photon Particles from ProtoParticles.
* An extra input consisting on PVs is given in order to use it as photon vertex,
* but it is only used if the "FirstPVasOrigin" Property is set to true
*
* List of Gaudi Properties:
* "FirstPVasOrigin": If True, first PV on the list will used as photon origin, otherwise (0,0,0) is chosen
* "ClusterCodeMasks": std::map to use custom masks on the clusters. Default (none) is good for most applications.
* "Particle": ID for this particle. Default behaviour assigns photon ID and mass.
* "ConfLevelCut": Minimum CL required
* "PtCut": Minimum PT required
*
* Example: Create photon particles from neutral protoparticles and PVs,
* but not using the first PVs as photon origins
* @code {.py}
* myphotonparticles = PhotonMaker(
InputProtoParticles=my_neutral_protoparticles,
Particle="gamma",
InputPrimaryVertices=my_pvs,
FistPVasOrigin=False)
* @endcode
*
*/
/** @class MergedPi0Maker
*
* @brief LHCb::Particle Merged Pi0 creation from LHCb::ProtoParticle neutral objects and PVs
* It is based on "old" MergedPi0Maker, but respecting thread-safety during execution
*
* The main purpose of the algorithm is to create Merged Pi0s Particles from ProtoParticles
* An extra input consisting on PVs is given in order to use it as photon vertex,
* but it is only used if the "FirstPVasOrigin" Property is set to true
*
*
* List of Gaudi Properties:
* "FirstPVasOrigin": If True, first PV on the list will used as photon origin, otherwise (0,0,0) is chosen
* "ClusterCodeMasks": std::map to use custom masks on the clusters. Default (none) is good for most applications.
* "Particle": ID for this particle. Default behaviour assigns pi0 ID and mass.
* "ConfLevelCut": Minimum CL required
* "PtCut": Minimum PT required for the pi0
* "GammaPtCut": Minimum PT of each photon
* "GammaGammaDistCut": Maximum photon distance
* "Chi2Cut": If positive, maximum Chi2
* "MassWindow": Mass range allowed, centered at the mass of the requested particle
*
* Example: Create merged pi0 particles from neutral protoparticles and PVs,
* but not using the first PVs as pi0 origins
* @code {.py}
* mymergedpi0particles = MergedPi0Maker(
InputProtoParticles=my_neutral_protoparticles,
Particle="pi0",
InputPrimaryVertices=my_pvs,
FistPVasOrigin=False)
* @endcode
*
*/
// Anonymous namespace for shared functions, enums of NeutralMakers
namespace {
// Enumeration for diferent cases of ClusterCodes
enum class ClusterCodes { Size, Position2nd, Shape, Isolated, None };
// ====================
// Index for the functions
std::optional ClusterCode( const LHCb::ProtoParticle& proto, ClusterCodes type );
Gaudi::XYZPoint GetPoint( LHCb::RecVertices const& PVs, bool const setPV );
Gaudi::SymMatrix3x3 GetPointErr( LHCb::RecVertices const& PVs, bool const setPV );
ClusterCodes to_ClusterCodes( std::string_view type );
std::map>
ClusterCodeToEnum( std::map> const& clusterMasks, std::string const& AlgName );
double confLevelPhoton( const LHCb::ProtoParticle& proto );
double confLevelMergedPi0( const LHCb::ProtoParticle& proto );
LHCb::Particle make_neutral( LHCb::Calo::Momentum& momentum );
// ====================
// Get first PV point
Gaudi::XYZPoint GetPoint( LHCb::RecVertices const& PVs, bool const setPV ) {
if ( setPV && PVs.size() != 0 ) { return ( *PVs.begin() )->position(); }
return Gaudi::XYZPoint();
}
// ====================
// Get first PV point error
Gaudi::SymMatrix3x3 GetPointErr( LHCb::RecVertices const& PVs, bool const setPV ) {
if ( setPV && PVs.size() != 0 ) { return ( *PVs.begin() )->covMatrix(); }
return Gaudi::SymMatrix3x3();
}
// ====================
// Get Cluster Code
std::optional ClusterCode( const LHCb::ProtoParticle& proto, ClusterCodes type ) {
int code = (int)proto.info( LHCb::ProtoParticle::additionalInfo::CaloClusterCode, 0. );
int mult = abs( code ) / 10;
int pos = abs( code ) - mult * 10;
int isol = ( code > 0 ) ? 1 : 0;
int conf = pos % 2;
switch ( type ) {
case ClusterCodes::Size:
return mult;
case ClusterCodes::Position2nd:
return pos;
case ClusterCodes::Shape:
return conf;
case ClusterCodes::Isolated:
return isol;
default:
return std::nullopt;
}
}
// ====================
// Conversion from string to ClusterCode
ClusterCodes to_ClusterCodes( std::string_view type ) {
if ( type == "Size" ) return ClusterCodes::Size;
if ( type == "2ndPosition" ) return ClusterCodes::Position2nd;
if ( type == "Shape" ) return ClusterCodes::Shape;
if ( type == "Isolated" ) return ClusterCodes::Isolated;
return ClusterCodes::None;
}
// ====================
// Construct ClusterMasks using enum instead of strings
// It simply construct another std::map using now the enum as keys
std::map>
ClusterCodeToEnum( std::map> const& clusterMasks,
std::string const& AlgName ) {
// Define returning std::map variable
std::map> clusterMasks_enum;
// Go through the map and convert strings to ClusterCodes
for ( const auto& [type, window] : clusterMasks ) {
auto cc = to_ClusterCodes( type );
clusterMasks_enum.insert( {cc, window} );
if ( cc == ClusterCodes::None ) Warning( AlgName.c_str(), "Unknown ClusterCode mask '%s'", type.c_str() );
}
return clusterMasks_enum;
}
// ====================
// To evaluate CL
double confLevelPhoton( const LHCb::ProtoParticle& proto ) {
if ( proto.hasInfo( LHCb::ProtoParticle::additionalInfo::IsNotH ) )
return std::clamp( proto.info( LHCb::ProtoParticle::additionalInfo::IsNotH, 0. ), 0., 1. );
else
return -1.0;
}
// ====================
// To evaluate CL
double confLevelMergedPi0( const LHCb::ProtoParticle& proto ) {
if ( proto.hasInfo( LHCb::ProtoParticle::additionalInfo::IsPhoton ) )
return std::clamp( 1 - proto.info( LHCb::ProtoParticle::additionalInfo::IsPhoton, +1. ), 0., 1. );
else
return -1.0;
}
// ====================
// Function to replace old CaloParticle class behaviour.
// Creates neutral LHCb::Particle from CaloMomentum
// The particle is set as if coming from first PV if requested ((0,0,0) otherwise)
// Only suitable for non-composite particles (photons, mergedpi0) which is the case here
LHCb::Particle make_neutral( LHCb::Calo::Momentum& momentum ) {
// Build particle
auto particle = LHCb::Particle();
// Set particle momentum
particle.setReferencePoint( momentum.referencePoint() );
particle.setPosCovMatrix( momentum.pointCovMatrix() );
particle.setMomentum( momentum.momentum() );
particle.setMomCovMatrix( momentum.momCovMatrix() );
particle.setPosMomCovMatrix( momentum.momPointCovMatrix() );
return particle;
}
} // namespace
// Classes are embedded in this namespace
// Other particle makers encouraged to do the same
namespace LHCb::Phys::ParticleMakers {
// ============================================================
// PhotonMaker definition
class PhotonMaker : public Gaudi::Functional::Transformer {
// ====================
// List of variables
private:
// ParticleProperty
ServiceHandle m_particlePropertySvc{
this, "ParticlePropertySvc", "LHCb::ParticlePropertySvc", "To get neutral particle properties"};
const LHCb::ParticleProperty* m_partProp;
// Map using enum
std::map> m_clusterMasks_enum;
// Gaudi properties
Gaudi::Property m_setPV{this, "FirstPVasOrigin", false};
// Map of cluster masks (no mask by default)
Gaudi::Property>> m_clusterMasks{
this,
"ClusterCodeMasks",
{},
[=]( const auto& ) { this->m_clusterMasks_enum = ClusterCodeToEnum( this->m_clusterMasks.value(), name() ); },
Gaudi::Details::Property::ImmediatelyInvokeHandler{true}};
// Id (string) of the particle
Gaudi::Property m_part{this, "Particle", "gamma"};
// Filters
Gaudi::Property m_clCut{this, "ConfLevelCut", -99};
Gaudi::Property m_ptCut{this, "PtCut", 150.0};
// Counters
// Total of CL
mutable Gaudi::Accumulators::StatCounter m_confidenceLevelCounter{this, "Confidence Level"};
// Total and selected photons
mutable Gaudi::Accumulators::Counter<> m_PhotonsCounter{this, "Created photons"};
mutable Gaudi::Accumulators::StatCounter<> m_SelPhotonsCounter{this, "Selected photons"};
mutable Gaudi::Accumulators::Counter<> m_SkipPhotonsCounter{this, "Skipped photons"};
// Count Errors
mutable Gaudi::Accumulators::MsgCounter m_invalid_calomom{this, "Invalid CaloMomentum status"};
mutable Gaudi::Accumulators::MsgCounter m_invalid_energy{this, "Negative energies are not allowed"};
mutable Gaudi::Accumulators::MsgCounter m_not_neutral{this, "Track(s) found. Particle must be neutral"};
mutable Gaudi::Accumulators::MsgCounter m_empty_hypotheses{this, "No hypotheses found"};
// ====================
// List of functions
public:
PhotonMaker( const std::string& name, ISvcLocator* pSvcLocator );
StatusCode initialize() override;
LHCb::Particles operator()( LHCb::ProtoParticles const& protos, LHCb::RecVertices const& PVs ) const override;
private:
// Transformer of proto photon into a photon
std::optional make_photon( const LHCb::ProtoParticle& proto, Gaudi::XYZPoint const& point,
Gaudi::SymMatrix3x3 const& pointErr ) const;
// Debug printing
void print_debug() const;
};
// PhotonMaker implementation
DECLARE_COMPONENT( LHCb::Phys::ParticleMakers::PhotonMaker )
PhotonMaker::PhotonMaker( const std::string& name, ISvcLocator* pSvcLocator )
: Transformer( name, pSvcLocator,
{KeyValue{"InputProtoParticles", LHCb::ProtoParticleLocation::Neutrals},
KeyValue{"InputPrimaryVertices", LHCb::RecVertexLocation::Primary}},
{KeyValue{"Particles", ""}} ) {}
// ===============================
// Initialize algorithm
StatusCode PhotonMaker::initialize() {
return Transformer::initialize().andThen( [&] { m_partProp = m_particlePropertySvc->find( m_part.value() ); } );
}
// ===============================
// Main execution
LHCb::Particles PhotonMaker::operator()( LHCb::ProtoParticles const& protos, LHCb::RecVertices const& PVs ) const {
LHCb::Particles particles;
// Loop over protos
for ( const LHCb::ProtoParticle* proto : protos ) {
auto particle = make_photon( *proto, GetPoint( PVs, m_setPV.value() ), GetPointErr( PVs, m_setPV.value() ) );
// Add particle if built correctly
if ( !particle.has_value() ) { continue; }
particles.add( new LHCb::Particle( std::move( particle ).value() ) );
}
// Add to stats of sel photons
m_SelPhotonsCounter += particles.size();
// === debug
if ( msgLevel( MSG::DEBUG ) ) { print_debug(); }
return particles;
}
// ====================
// Part to build the photon from its protoparticle
// It is separated from operator() to keep it as clean as possible
std::optional PhotonMaker::make_photon( const LHCb::ProtoParticle& proto,
Gaudi::XYZPoint const& point,
Gaudi::SymMatrix3x3 const& pointErr ) const {
// ---- skip invalid and charged
if ( proto.track() ) {
++m_not_neutral;
return {};
}
// ---- Check the hypothesis
const auto& hypos = proto.calo();
if ( hypos.empty() ) {
++m_empty_hypotheses;
return {};
}
// ---- Check the hypothesis. Nothing wrong if it's not matched to a photon, just skip
const auto& hypo = hypos.front();
if ( LHCb::CaloHypo::Hypothesis::Photon != hypo->hypothesis() ) { return {}; }
// ---- skip negative energy
if ( hypo->e() <= 0 ) {
++m_invalid_energy;
return {};
}
// Add to total photons
++m_PhotonsCounter;
// ---- apply mask on ClusterCode
if ( m_clusterMasks_enum.size() != 0 ) {
for ( const auto& [type, window] : m_clusterMasks_enum ) {
std::optional code = ClusterCode( proto, type );
if ( code.has_value() && ( code.value() < (int)window.first || code.value() > (int)window.second ) ) return {};
}
}
// == evaluate kinematical properties
LHCb::Calo::Momentum momentum = LHCb::Calo::Momentum( &proto, point, pointErr );
if ( momentum.status() ) {
++m_invalid_calomom;
return {};
}
double pT = momentum.pt();
double E = momentum.e();
double px = momentum.px();
double py = momentum.py();
double pz = momentum.pz();
double p = E;
if ( m_partProp->mass() > 0 ) {
p = std::sqrt( E * E - m_partProp->mass() * m_partProp->mass() );
px *= ( p / E );
py *= ( p / E );
pz *= ( p / E );
pT *= ( p / E );
}
if ( pT < m_ptCut ) { return {}; }
// ---- apply CL filter (must be after pT cut to match neutralID definition range)
double CL = confLevelPhoton( proto );
if ( CL < m_clCut ) return {};
m_confidenceLevelCounter += CL;
// === set photon parameters (4-momentum, vertex and correlations)
auto particle = std::make_optional( make_neutral( momentum ) );
// Warning : covariant matrix should be modified accordingly -> to be included in CaloMomentum ...
if ( m_partProp->mass() > 0 ) particle->setMomentum( Gaudi::LorentzVector( px, py, pz, E ) );
// ===== create new particle and fill it
particle->setParticleID( LHCb::ParticleID( m_partProp->pdgID().pid() ) );
particle->setProto( &proto );
// === set mass and mass uncertainties
particle->setMeasuredMass( m_partProp->mass() );
particle->setMeasuredMassErr( 0 ); // the mass error is EXACTLY zero!
// === set confidence level
particle->setConfLevel( CL );
// === printout
if ( msgLevel( MSG::VERBOSE ) ) {
verbose() << "----- Single " << m_part << " found" << endmsg;
verbose() << "Pt : " << momentum.pt() << endmsg;
verbose() << "CL : " << CL << endmsg;
verbose() << "Chi2 " << proto.info( LHCb::ProtoParticle::additionalInfo::CaloTrMatch, -999. ) << endmsg;
verbose() << "CaloDeposit : " << proto.info( LHCb::ProtoParticle::additionalInfo::CaloDepositID, -999. )
<< endmsg;
if ( m_partProp->mass() > 0. ) verbose() << " Mass : " << m_partProp->mass() << endmsg;
verbose() << " " << endmsg;
}
return particle;
}
// ====================
// Debug printing
void PhotonMaker::print_debug() const {
debug() << " " << endmsg;
debug() << "-----------------------" << endmsg;
debug() << " Filtered and created :" << endmsg;
debug() << " --> " << m_SelPhotonsCounter.nEntries() << " photons (among " << m_PhotonsCounter.nEntries() << ")"
<< endmsg;
debug() << " Skipped " << m_part.value() << " : " << m_SkipPhotonsCounter.nEntries() << endmsg;
debug() << "-----------------------" << endmsg;
}
// ============================================================
// MergedPi0Maker definition
class MergedPi0Maker
: public Gaudi::Functional::Transformer> {
// ====================
// List of variables
private:
// ParticleProperty
ServiceHandle m_particlePropertySvc{
this, "ParticlePropertySvc", "LHCb::ParticlePropertySvc", "To get neutral particle properties"};
const LHCb::ParticleProperty* m_partProp;
// Map using enum
std::map> m_clusterMasks_enum;
// Gaudi properties
Gaudi::Property m_setPV{this, "FirstPVasOrigin", false};
// Map of cluster masks (no mask by default)
Gaudi::Property>> m_clusterMasks{
this,
"ClusterCodeMasks",
{},
[=]( const auto& ) { this->m_clusterMasks_enum = ClusterCodeToEnum( this->m_clusterMasks.value(), name() ); },
Gaudi::Details::Property::ImmediatelyInvokeHandler{true}};
// Id (string) of the particle
Gaudi::Property m_part{this, "Particle", "pi0"};
// Filters
Gaudi::Property m_clCut{this, "ConfLevelCut", -99};
Gaudi::Property m_ptCut{this, "PtCut", 150.0};
Gaudi::Property m_gPtCut{this, "GammaPtCut", 0. * Gaudi::Units::MeV};
Gaudi::Property m_ggDistCut{this, "GammaGammaDistCut", 2.8};
Gaudi::Property m_chi2Cut{this, "Chi2Cut", 1.};
Gaudi::Property m_MassWin{this, "MassWindow", 30. * Gaudi::Units::MeV};
// Counters
// Total of CL
mutable Gaudi::Accumulators::StatCounter m_confidenceLevelCounter_pi0{this, "Confidence Level pi0s"};
// Total and selected pi0s
mutable Gaudi::Accumulators::Counter<> m_Pi0sCounter{this, "Created pi0s"};
mutable Gaudi::Accumulators::StatCounter<> m_SelPi0sCounter{this, "Selected pi0s"};
mutable Gaudi::Accumulators::Counter<> m_SkipPi0sCounter{this, "Skipped pi0s"};
// Count Errors
mutable Gaudi::Accumulators::MsgCounter m_invalid_calomom{this, "Invalid CaloMomentum status"};
mutable Gaudi::Accumulators::MsgCounter m_not_neutral{this, "Track(s) found. Particle must be neutral"};
mutable Gaudi::Accumulators::MsgCounter m_empty_hypotheses{this, "No hypotheses found"};
mutable Gaudi::Accumulators::MsgCounter m_null_caloposition{this, "CaloPosition point to null"};
// ====================
// List of functions
public:
MergedPi0Maker( const std::string& name, ISvcLocator* pSvcLocator );
StatusCode initialize() override;
LHCb::Particles operator()( LHCb::ProtoParticles const& protos, LHCb::RecVertices const& PVs,
DeCalorimeter const& DECalo ) const override;
private:
// Transformer of proto neutral into a pi0
std::optional make_pi0( const LHCb::ProtoParticle& proto, const DeCalorimeter& DECalo,
Gaudi::XYZPoint const& point, Gaudi::SymMatrix3x3 const& pointErr ) const;
// Debug printing
void print_debug() const;
}; // namespace LHCb::Phys::ParticleMakers
// Merged Pi0 implementation
DECLARE_COMPONENT( LHCb::Phys::ParticleMakers::MergedPi0Maker )
MergedPi0Maker::MergedPi0Maker( const std::string& name, ISvcLocator* pSvcLocator )
: Transformer( name, pSvcLocator,
{KeyValue{"InputProtoParticles", LHCb::ProtoParticleLocation::Neutrals},
KeyValue{"InputPrimaryVertices", LHCb::RecVertexLocation::Primary},
KeyValue{"DECalorimeter", DeCalorimeterLocation::Ecal}},
{KeyValue{"Particles", ""}} ) {}
// ====================
// Initialize algorithm
StatusCode MergedPi0Maker::initialize() {
return Transformer::initialize().andThen( [&] { m_partProp = m_particlePropertySvc->find( m_part.value() ); } );
}
// ====================
// Main execution
LHCb::Particles MergedPi0Maker::operator()( LHCb::ProtoParticles const& protos, LHCb::RecVertices const& PVs,
DeCalorimeter const& DECalo ) const {
LHCb::Particles particles;
// Loop over protos
for ( const LHCb::ProtoParticle* proto : protos ) {
auto particle = make_pi0( *proto, DECalo, GetPoint( PVs, m_setPV.value() ), GetPointErr( PVs, m_setPV.value() ) );
// Add particle if built correctly
if ( !particle.has_value() ) { continue; }
particles.add( new LHCb::Particle( std::move( particle ).value() ) );
}
// Add to stats of sel photons
m_SelPi0sCounter += particles.size();
// === debug
if ( msgLevel( MSG::DEBUG ) ) { print_debug(); }
return particles;
}
// ====================
// Part to build one pi0 from its protoparticle
std::optional MergedPi0Maker::make_pi0( const LHCb::ProtoParticle& proto, DeCalorimeter const& DECalo,
Gaudi::XYZPoint const& point,
Gaudi::SymMatrix3x3 const& pointErr ) const {
// ---- skip invalid and charged
if ( proto.track() ) {
++m_not_neutral;
return {};
}
// ---- Check the hypothesis
const auto& hypos = proto.calo();
if ( hypos.empty() ) {
++m_empty_hypotheses;
return {};
}
// ---- Check the hypothesis. Nothing wrong if it's not matched to a photon, just skip
const auto& hypo = hypos.front();
if ( LHCb::CaloHypo::Hypothesis::Pi0Merged != hypo->hypothesis() ) { return {}; }
// Add to total pi0s
++m_Pi0sCounter;
// Filters
LHCb::Calo::Momentum pi0Momentum( &proto, point, pointErr );
if ( pi0Momentum.status() ) {
++m_invalid_calomom;
return {};
}
// ---- apply mass window
double part_mass = pi0Momentum.mass();
if ( m_MassWin.value() < fabs( m_partProp->mass() - part_mass ) ) return {};
// ---- apply Pt(pi0) cut
if ( m_ptCut > pi0Momentum.pt() ) return {};
// ---- apply chi2(Tr,cluster) cut
const double chi2 = proto.info( LHCb::ProtoParticle::additionalInfo::CaloTrMatch, +1.e+06 );
if ( m_chi2Cut >= 0 && chi2 < m_chi2Cut ) return {};
// ---- apply mask on ClusterCode
if ( m_clusterMasks_enum.size() != 0 ) {
for ( const auto& [type, window] : m_clusterMasks_enum ) {
std::optional code = ClusterCode( proto, type );
if ( code.has_value() && ( code.value() < (int)window.first || code.value() > (int)window.second ) ) return {};
}
}
// == extract SplitPhotons hypos
const auto& ghypos = hypo->hypos();
const auto& g1 = ghypos.front();
const auto& g2 = ghypos.at( 1 );
LHCb::Calo::Momentum g1Momentum( g1, point, pointErr );
LHCb::Calo::Momentum g2Momentum( g2, point, pointErr );
// info() << hypos.size() << " -> " << g1Momentum.pt() << " " << g2Momentum.pt() << endmsg;
// ---- Apply SplitPhoton pT cut
if ( m_gPtCut > g1Momentum.pt() ) return {};
if ( m_gPtCut > g2Momentum.pt() ) return {};
// Gamma-Gamma Min distance
// retrieve cellID by position
// (WARNING USE g1 split photon 'position')
const LHCb::CaloPosition* hypoPos = g1->position();
if ( !hypoPos ) ++m_null_caloposition;
const Gaudi::XYZPoint hypoPoint( hypoPos->x(), hypoPos->y(), hypoPos->z() );
LHCb::Calo::CellID cellID = DECalo.Cell( hypoPoint );
double CellSize = DECalo.cellSize( cellID );
double zpos = DECalo.cellZ( cellID );
double epi0 = pi0Momentum.e();
double dmin = ( epi0 * CellSize > 0 ) ? zpos * 2. * m_partProp->mass() / epi0 / CellSize
: +9999.; // rare FPE ( hypo outside Calo acceptance ?)
if ( m_ggDistCut < dmin ) return {};
// ---- apply CL filter (must be after pT cut to match neutralID definition range)
double CL = confLevelMergedPi0( proto );
if ( m_clCut >= 0 && CL < m_clCut ) return {};
m_confidenceLevelCounter_pi0 += CL;
// === set MergedPi0 parameters in particle(4-momentum, vertex and correlations)
auto particle = std::make_optional( make_neutral( pi0Momentum ) );
particle->setParticleID( LHCb::ParticleID( m_partProp->pdgID().pid() ) );
particle->setProto( &proto );
// --- set confidence level
particle->setConfLevel( CL );
//-- set mass and mass uncertainties
particle->setMeasuredMass( part_mass );
particle->setMeasuredMassErr( pi0Momentum.emass() );
// === printout
if ( msgLevel( MSG::VERBOSE ) ) {
verbose() << " ---- Merged " << m_part << " found [" << m_SelPi0sCounter.nEntries() << "]" << endmsg;
verbose() << "Pt " << pi0Momentum.pt() << endmsg;
verbose() << "CL / Chi2 " << CL << " / " << chi2 << endmsg;
verbose() << "dist(gg)" << dmin << endmsg;
}
return particle;
}
// ====================
// Debug printing
void MergedPi0Maker::print_debug() const {
debug() << " " << endmsg;
debug() << "-----------------------" << endmsg;
debug() << " Filtered and created :" << endmsg;
debug() << " --> " << m_SelPi0sCounter << " Merged " << m_part << " (among " << m_Pi0sCounter << ")" << endmsg;
debug() << " Skipped : " << m_SkipPi0sCounter << endmsg;
debug() << "--------------------" << endmsg;
}
using neutral_basics_maker_output_t =
std::tuple>;
/** @class NeutralBasicsMaker
*
* @brief Create a container of neutral objects from the output of the reconstruction sequence and a set of vertices
*
*
*/
class NeutralBasicsMaker : public Gaudi::Functional::Transformer {
public:
NeutralBasicsMaker( const std::string& name, ISvcLocator* pSvcLocator )
: Transformer( name, pSvcLocator,
{KeyValue{"InputUniqueIDGenerator", LHCb::UniqueIDGeneratorLocation::Default},
KeyValue{"InputCaloObjects", LHCb::Event::Calo::v2::HypothesesLocation::Default},
KeyValue{"InputPrimaryVertices", LHCb::Event::v2::RecVertexLocation::Primary}},
{KeyValue{"OutputParticles", ""}} ) {}
neutral_basics_maker_output_t operator()( LHCb::UniqueIDGenerator const& unique_id_gen,
LHCb::Event::Calo::v2::Hypotheses const& calo_hypos,
LHCb::Event::v2::RecVertices const& primary_vertices ) const override {
auto zn = Zipping::generateZipIdentifier();
auto calo_hypos_with_direction = std::make_unique( unique_id_gen, zn );
calo_hypos_with_direction->reserve( calo_hypos.size() * primary_vertices.size() );
// here we are creating one hypothesis per primary vertex
for ( auto const& ch : calo_hypos )
for ( auto const& vtx : primary_vertices ) calo_hypos_with_direction->emplace_back( unique_id_gen, ch, vtx );
return {LHCb::Pr::make_zip( std::as_const( *calo_hypos_with_direction ) ),
std::move( calo_hypos_with_direction )};
}
};
DECLARE_COMPONENT( NeutralBasicsMaker )
} // namespace LHCb::Phys::ParticleMakers