Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • rrabadan/LHCb
  • talin/LHCb
  • imjelde/LHCb
  • mstahl/LHCb
  • padeken/LHCb
  • mimazure/LHCb
  • roiser/LHCb
  • conrad/LHCb
  • kklimasz/LHCb
  • rcurrie/LHCb
  • wkrzemie/LHCb
  • fkeizer/LHCb
  • valassi/LHCb
  • hschrein/LHCb
  • anstahll/LHCb
  • jonrob/LHCb
  • graven/LHCb
  • clemenci/LHCb
  • chaen/LHCb
  • sstahl/LHCb
  • lhcb/LHCb
21 results
Show changes
Commits on Source (16)
Showing
with 416 additions and 45 deletions
......@@ -11,7 +11,7 @@
cmake_minimum_required(VERSION 3.15)
option(CMAKE_EXPORT_COMPILE_COMMANDS "Enable/Disable output of compile_commands.json" ON)
project(LHCb VERSION 54.8
project(LHCb VERSION 54.9
LANGUAGES CXX)
# Enable testing with CTest/CDash
......@@ -195,6 +195,9 @@ gaudi_install(CMAKE
cmake/FindDataPackage.cmake
cmake/LHCbConfigUtils.cmake
cmake/ProjectConfig.cmake.in
# workaround until https://gitlab.cern.ch/gaudi/Gaudi/-/merge_requests/1465 is available
cmake/extract_qmtest_metadata.py
)
lhcb_finalize_configuration()
......@@ -8,20 +8,20 @@ This helps with the validation of code prior to making it available in the offic
- `master` branch: new developments and updates targeting run 3. Builds on current supported platforms against latest version of Gaudi
- `sim10-patches` branch: for patches to Gauss in Sim10 simulation workflows. Builds on current supported platforms against latest version of Gaudi
- `run2-patches` branch: new developments and updates targeting runs 1+2 analysis and/or reprocessing. Builds on current supported platfroms against latest version of Gaudi
- `2018-patches` branch: for 2018 incremental stripping (`S34r0pX`, `S35r0pX`, `S35r1pX`), 2015 and 2016 restripping (`S24r2`, `S28r2`) and patches to Moore, Brunel (`Reco18`) and stripping in 2015, 2016 and 2018 simulation workflows. Builds with gcc62 on centos7.
- `2017-patches` branch: for 2017 incremental stripping (`S29r2pX` (pp), `S32r0pX` (pp 5 Tev)) and patches to Moore, Brunel (`Reco17`) and stripping (S29r2, S29r2p1, S29r2p2, S32) in 2017 simulation workflows. Builds with gcc62 on centos7
- `2016-patches` branch: for 2016 incremental stripping (`S28r1pX`, `S30r2pX`, `S30r3pX`) and patches to Brunel (`Reco16`) and stripping in 2016 simulation workflows. Builds with gcc49 on slc6
- `2016-patches` branch: for patches to Brunel (`Reco16`) and Tesla in 2016 simulation workflows (and Tesla for 2015 simulation). Builds with gcc49 on slc6
- `hlt2016-patches` branch: for patches to Moore in 2016 simulation workflows. Builds with gcc49 on slc6
- `reco15-patches` branch: for patches to Brunel (`Reco15`) in 2015 simulation workflows. Builds with gcc49 on slc6
- `stripping24-patches` branch: for 2015 incremental stripping and patches to stripping in 2015 simulation workflows. Builds with gcc49 on slc6
- `reco14-patches` branch: for patches to Brunel (`Reco14`) in run 1 simulation workflows. Builds with gcc46 on slc5. Requires CMT
- `stripping21-patches` branch: for run 1 incremental stripping (`S21r0pX`, `S21r1pX`) and patches to stripping in run 1 simulation workflows. Builds with gcc49 on slc6
......@@ -40,11 +40,8 @@ This helps with the validation of code prior to making it available in the offic
- Bug fixes specific to a given processing should be committed to the corresponding `XXX-patches` branch.
- Any changes or fixes for Run 1 and Run 2 analysis (or re-reconstruction, re-stripping) should go to the `run2-patches` branch.
They will then be propagated to `master` (if relevant also for Upgrade) by the applications managers.
Fixes also relevant to specific `XXX-patches` branches should be flagged as such, they will be propagated by the applications managers.
- Any changes specific to Upgrade should *only* got to `master`. When making a MR to `master`, please make it initially as WIP and add
a comment when it is ready to be tested. The release manager will then test it initially in one nightly slot and, if successful,
remove the WIP for general testing.
- Any changes specific to Run 3 should go to `master`.
In doubt, please get in touch before creating a MR.
......@@ -8,6 +8,7 @@
* granted to it by virtue of its status as an Intergovernmental Organization *
* or submit itself to any jurisdiction. *
\*****************************************************************************/
#pragma once
#include "CaloDet/DeCalorimeter.h"
#include "Event/CaloClusters_v2.h"
#include "Event/CaloDigits_v2.h"
......
......@@ -9,6 +9,7 @@
* or submit itself to any jurisdiction. *
\*****************************************************************************/
#pragma once
#include <cassert>
#include <ostream>
#include <type_traits>
......
......@@ -57,7 +57,7 @@ public:
if ( shift == 0.f && scale == 1.f ) {
writeCounter( cntr.offset, cntr.size, &bank.front(), val );
} else {
auto scaled_val = static_cast<unsigned>( shift + scale * val );
auto scaled_val = static_cast<unsigned>( std::round( shift + scale * val ) );
writeCounter( cntr.offset, cntr.size, &bank.front(), scaled_val );
}
} else {
......
......@@ -31,11 +31,18 @@ The content of FSRs fields from input files is reported to the output
FSR either by merging the data (if possible) or copied verbatim).
#]=======================================================================]
gaudi_add_header_only_library(FileSummaryRecord
LINK
Gaudi::GaudiKernel
nlohmann_json::nlohmann_json
)
gaudi_add_module(FileSummaryRecordModule
SOURCES
src/mod/FSRSink.cpp
src/mod/OpenFilesTracker.cpp
LINK
FileSummaryRecord
Boost::regex
fmt::fmt
Gaudi::GaudiKernel
......@@ -49,6 +56,7 @@ if(BUILD_TESTING)
SOURCES
tests/src/FSRTestAlgs.cpp
LINK
FileSummaryRecord
Gaudi::GaudiKernel
LHCb::DAQEventLib
LHCb::LHCbAlgsLib
......
/*****************************************************************************\
* (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration *
* *
* This software is distributed under the terms of the GNU General Public *
* Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". *
* *
* In applying this licence, CERN does not waive the privileges and immunities *
* granted to it by virtue of its status as an Intergovernmental Organization *
* or submit itself to any jurisdiction. *
\*****************************************************************************/
#include <GaudiKernel/Incident.h>
#include <nlohmann/json.hpp>
namespace LHCb {
/// Simple incident to notify listeners that a new FSR has been read from an input file.
struct FileSummaryRecordIncident final : Incident {
static constexpr const char* Type = "FileSummaryRecordIncident";
FileSummaryRecordIncident( const std::string& source, const nlohmann::json& data )
: Incident{source, Type}, data{data} {}
~FileSummaryRecordIncident() = default;
const nlohmann::json& data;
};
} // namespace LHCb
/*****************************************************************************\
* (c) Copyright 2022 CERN for the benefit of the LHCb Collaboration *
* (c) Copyright 2022-2023 CERN for the benefit of the LHCb Collaboration *
* *
* This software is distributed under the terms of the GNU General Public *
* Licence version 3 (GPL Version 3), copied verbatim in the file "COPYING". *
......@@ -17,6 +17,7 @@ namespace Gaudi::Utils {
std::ostream& toStream( const boost::regex& obj, std::ostream& s ) { return s << std::quoted( obj.str() ); }
} // namespace Gaudi::Utils
#include <GAUDI_VERSION.h>
#include <Gaudi/Interfaces/IOptionsSvc.h>
#include <Gaudi/MonitoringHub.h>
#include <Gaudi/Property.h>
......@@ -24,11 +25,12 @@ namespace Gaudi::Utils {
#include <GaudiKernel/IIncidentListener.h>
#include <GaudiKernel/IIncidentSvc.h>
#include <GaudiKernel/Service.h>
#include <GaudiKernel/ServiceHandle.h>
#include <GaudiUtils/IIODataManager.h>
#include <LHCb/FileSummaryRecordIncident.h>
#include <TFile.h>
#include <algorithm>
#include <chrono>
#include <filesystem>
#include <fmt/format.h>
#include <fstream>
#include <future>
......@@ -44,6 +46,9 @@ namespace Gaudi::Utils {
#include <thread>
#include <tuple>
#include <utility>
#if GAUDI_VERSION < CALC_GAUDI_VERSION( 36, 13 )
# include <filesystem>
#endif
namespace {
std::string key_for( Gaudi::Monitoring::Hub::Entity const& ent ) {
......@@ -99,10 +104,17 @@ namespace LHCb::FSR {
return Service::initialize().andThen( [&] {
// declare ourself as a monitoring sink
serviceLocator()->monitoringHub().addSink( this );
// declare ourself as a incident linetener (to know when files are opened/closed)
if ( auto incSvc = service<IIncidentSvc>( "IncidentSvc" ) ) {
incSvc->addListener( this, "CONNECTED_OUTPUT" );
incSvc->addListener( this, IncidentType::BeginInputFile );
// declare ourself as a incident listener (to know when files are opened/closed)
if ( m_incSvc ) {
m_incSvc->addListener( this, "CONNECTED_OUTPUT" );
#if GAUDI_VERSION < CALC_GAUDI_VERSION( 36, 13 )
m_incSvc->addListener( this, IncidentType::BeginInputFile );
#else
m_incSvc->addListener( this, "CONNECTED_INPUT" );
#endif
} else {
warning() << "cannot access IIncidentSvc " << m_incSvc.name() << ": no access to FSRs from input files"
<< endmsg;
}
// add some special data to the FSR
if ( m_includeJobOptions ) {
......@@ -137,6 +149,7 @@ namespace LHCb::FSR {
}
void handle( const Incident& inc ) override {
#if GAUDI_VERSION < CALC_GAUDI_VERSION( 36, 13 )
if ( inc.type() == IncidentType::BeginInputFile ) {
namespace fs = std::filesystem;
// BeginInputFile is actually fired before really opening the file, so the only way
......@@ -163,25 +176,31 @@ namespace LHCb::FSR {
}
}
nlohmann::json incoming_fsr;
if ( is_root_file ) {
std::unique_ptr<TFile> f( TFile::Open( inc.source().c_str() ) );
if ( f ) {
std::string* s{nullptr};
f->GetObject( m_recordName.value().c_str(), s );
if ( s ) {
incoming_fsr = nlohmann::json::parse( *s );
} else {
incoming_fsr = nlohmann::json{{"name", inc.source()}, {"error", m_recordName.value() + " not found"}};
m_inputsFSRs.emplace_back( nlohmann::json::parse( *s ) );
if ( m_incSvc ) m_incSvc->fireIncident( FileSummaryRecordIncident( name(), m_inputsFSRs.back() ) );
}
} else {
incoming_fsr = nlohmann::json{{"name", inc.source()}, {"error", "could not open file"}};
}
} else {
incoming_fsr = nlohmann::json{{"name", inc.source()}, {"info", "not a root file"}};
}
m_inputsFSRs.emplace_back( std::move( incoming_fsr ) );
} else if ( inc.type() == "CONNECTED_OUTPUT" ) {
} else
#else
if ( inc.type() == "CONNECTED_INPUT" ) {
if ( auto ci = dynamic_cast<const ContextIncident<TFile*>*>( &inc ) ) {
std::string* s{nullptr};
ci->tag()->GetObject( m_recordName.value().c_str(), s );
if ( s ) {
m_inputsFSRs.emplace_back( nlohmann::json::parse( *s ) );
if ( m_incSvc ) m_incSvc->fireIncident( FileSummaryRecordIncident( name(), m_inputsFSRs.back() ) );
}
}
} else
#endif
if ( inc.type() == "CONNECTED_OUTPUT" ) {
if ( auto ci = dynamic_cast<const ContextIncident<TFile*>*>( &inc ) ) {
std::string guid;
if ( auto datamgr = service<Gaudi::IIODataManager>( "IODataManager" ) ) {
......@@ -201,7 +220,10 @@ namespace LHCb::FSR {
}
StatusCode finalize() override {
if ( auto incSvc = service<IIncidentSvc>( "IncidentSvc" ) ) { incSvc->removeListener( this ); }
if ( m_incSvc ) {
m_incSvc->removeListener( this );
m_incSvc.release().ignore();
}
m_flushThreadStop.set_value(); // tell the flush thread we are stopping
if ( m_flushThread.joinable() ) m_flushThread.join(); // and wait that it exits
......@@ -265,6 +287,8 @@ namespace LHCb::FSR {
std::vector<OutputFile> m_outputFiles;
std::list<nlohmann::json> m_inputsFSRs;
ServiceHandle<IIncidentSvc> m_incSvc{this, "IncidentSvc", "IncidentSvc"};
std::thread m_flushThread;
std::promise<void> m_flushThreadStop;
};
......
......@@ -113,12 +113,6 @@ def check(causes, result):
"writes": 5
}
},
"inputs": [{
"info":
"not a root file",
"name":
"mdf:root://eoslhcb.cern.ch//eos/lhcb/cern-swtest/lhcb/data/2018/RAW/FULL/LHCb/COLLISION18/209291/209291_0000000745.raw"
}],
}
try:
......
......@@ -12,7 +12,9 @@ import os
import json
from traceback import format_exc
from unittest import TestCase
import itertools
from pprint import pformat
from .write import GUID
FILENAMEFSR = f"{__name__}.fsr.json"
FILENAME = f"{__name__}.root"
......@@ -31,10 +33,14 @@ def config():
from PyConf.control_flow import CompositeNode
from PyConf.components import setup_component
from PyConf.Algorithms import LHCb__Tests__EventCountAlg, Gaudi__Examples__IntDataProducer, ReadTES
from Configurables import ApplicationMgr
from Configurables import ApplicationMgr, Gaudi__MultiFileCatalog
Gaudi__MultiFileCatalog("FileCatalog").Catalogs = [
"xmlcatalog_file:FSRTests.catalog.xml"
]
options = ApplicationOptions(_enabled=False)
options.input_files = ["FSRTests.write.root"]
options.input_files = ["LFN:FSRTests-write"]
options.input_type = 'ROOT'
options.output_file = FILENAME
options.output_type = 'ROOT'
......@@ -59,6 +65,12 @@ def config():
AcceptRegex=r"^(Evt|Other)Counter\.count$",
OutputFile=FILENAMEFSR,
)))
app.ExtSvc.append(
config.add(
setup_component(
"LHCb__Tests__InputFSRSpy",
instance_name="InputFSRSpy",
)))
producer = Gaudi__Examples__IntDataProducer()
......@@ -84,7 +96,7 @@ def config():
os.remove(name)
def check(causes, result):
def check(causes, result, stdout):
result["root_output_file"] = FILENAME
missing_files = [
......@@ -134,7 +146,8 @@ def check(causes, result):
expected["guid"] = guid # GUID is random
# let's get the FSR of the input file from the output of the upstream test
expected["inputs"].append(json.load(open("FSRTests.write.fsr.json")))
input_fsr = json.load(open("FSRTests.write.fsr.json"))
expected["inputs"].append(input_fsr)
tester = TestCase()
checking = "JSON dump"
......@@ -142,6 +155,20 @@ def check(causes, result):
checking = "ROOT file"
tester.assertEqual(expected, fsr_root)
checking = "input incident"
assert "got FSR from input file" in stdout, "missing message from InputFSRSpy"
lines = list(
itertools.islice(
itertools.dropwhile(
lambda l: "got FSR from input file" not in l,
stdout.splitlines()), 1, 4))
assert len(lines) == 3, \
"invalid output from InputFSRSpy (wrong length)"
assert (lines[0] == "```" and lines[2] == "```"), \
"invalid output from InputFSRSpy (missing ``` markers)"
tester.assertEqual(input_fsr, json.loads(lines[1]))
except AssertionError as err:
causes.append("FSR content")
result[f"FSR problem ({checking})"] = result.Quote(str(err))
......
......@@ -17,6 +17,7 @@ from pprint import pformat
FILENAMEFSR = f"{__name__}.fsr.json"
FILENAME = f"{__name__}.root"
FILENAMEJSON = f"{__name__}.json"
GUID = "D80398A9-70BB-4EEE-9AB8-0BEC4D976C13"
def checkDiff(a, b):
......@@ -31,13 +32,31 @@ def config():
from PyConf.control_flow import CompositeNode
from PyConf.components import setup_component
from PyConf.Algorithms import LHCb__Tests__EventCountAlg, Gaudi__Examples__IntDataProducer
from Configurables import ApplicationMgr
from Configurables import ApplicationMgr, Gaudi__MultiFileCatalog
# use a specific XML catalog
Gaudi__MultiFileCatalog("FileCatalog").Catalogs = [
"xmlcatalog_file:FSRTests.catalog.xml"
]
with open("FSRTests.catalog.xml", "w") as f:
f.write(f"""<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
<!DOCTYPE POOLFILECATALOG SYSTEM "InMemory">
<POOLFILECATALOG>
<File ID="{GUID}">
<physical>
<pfn filetype="ROOT" name="{FILENAME}"/>
</physical>
<logical>
<lfn name="FSRTests-write"/>
</logical>
</File>
</POOLFILECATALOG>""")
options = ApplicationOptions(_enabled=False)
# No data from the input is used, but something should be there for the configuration
options.input_files = ["dummy_input_file_name.dst"]
options.input_type = 'ROOT'
options.output_file = FILENAME
options.output_file = "LFN:FSRTests-write"
options.output_type = 'ROOT'
options.data_type = 'Upgrade'
options.dddb_tag = 'upgrade/dddb-20220705'
......@@ -95,7 +114,8 @@ def check(causes, result):
"empty": False,
"nEntries": 5,
"type": "counter:Counter:m"
}
},
"guid": GUID
}
try:
......@@ -115,11 +135,6 @@ def check(causes, result):
checking = "no check yet"
try:
guid = fsr_dump.get("guid")
assert guid, "missing or invalid GUID in FSR dump"
expected["guid"] = guid # GUID is random
tester = TestCase()
checking = "JSON dump"
tester.assertEqual(expected, fsr_dump)
......
......@@ -15,7 +15,7 @@
<argument name="use_temp_dir"><enumeral>true</enumeral></argument>
<argument name="validator"><text>
from FSRTests.read import check
check(causes, result)
check(causes, result, stdout)
</text></argument>
<argument name="prerequisites"><set>
<tuple><text>FileSummaryRecord.write</text><enumeral>PASS</enumeral></tuple>
......
......@@ -11,6 +11,10 @@
#include <Event/LumiEventCounter.h>
#include <Event/ODIN.h>
#include <Gaudi/Accumulators.h>
#include <GaudiKernel/IIncidentListener.h>
#include <GaudiKernel/IIncidentSvc.h>
#include <GaudiKernel/Service.h>
#include <LHCb/FileSummaryRecordIncident.h>
#include <LHCbAlgs/Consumer.h>
namespace LHCb ::Tests {
......@@ -32,4 +36,25 @@ namespace LHCb ::Tests {
mutable LHCb::LumiEventCounter m_lumiCount{this, "eventsByRun"};
};
DECLARE_COMPONENT( RunEventCountAlg )
struct InputFSRSpy final : extends<Service, IIncidentListener> {
using extends::extends;
StatusCode initialize() override {
return extends::initialize().andThen( [&] { return m_incSvc.retrieve(); } ).andThen( [&] {
m_incSvc->addListener( this, FileSummaryRecordIncident::Type );
} );
}
StatusCode finalize() override {
m_incSvc->removeListener( this );
m_incSvc.release().ignore();
return extends::finalize();
}
void handle( const Incident& inc ) override {
if ( auto fsrInc = dynamic_cast<const FileSummaryRecordIncident*>( &inc ) ) {
info() << "got FSR from input file:\n```\n" << fsrInc->data << "\n```" << endmsg;
}
}
ServiceHandle<IIncidentSvc> m_incSvc{this, "IncidentSvc", "IncidentSvc"};
};
DECLARE_COMPONENT( InputFSRSpy )
} // namespace LHCb::Tests
2023-06-12 LHCb v54r9
===
This version uses
Detector [v1r13](../../../../Detector/-/tags/v1r13),
Gaudi [v36r12](../../../../Gaudi/-/tags/v36r12) and
LCG [103](http://lcginfo.cern.ch/release/103/) with ROOT 6.28.00.
This version is released on the `master` branch.
Built relative to LHCb [v54r8](/../../tags/v54r8), with the following changes:
### New features ~"new feature"
### Fixes ~"bug fix" ~workaround
- ~UT | Fix out-of-bound access in UT GeomCache constructor, !4121 (@gunther) [#305]
- ~Build | Temporary port of gaudi/Gaudi!1465, !4154 (@rmatev)
- Avoid using &vector[0], prefer vector.data(), !4128 (@graven) [#310]
### Enhancements ~enhancement
- ~Configuration | Modify computation of component hash ID, !4116 (@graven)
- ~Luminosity | Round non-integer lumi counter values rather than truncating them, !4152 (@dcraik) [MooreOnline#32]
- Use more precise approximation of atan2, !4139 (@ahennequ)
### Code cleanups and changes to tests ~modernisation ~cleanup ~testing
- ~Persistency | Remove detector geometry from unpackers, !4137 (@sesen)
- Remove warning from FT readout map (follow !4129), !4145 (@rmatev)
- Remove broken link to doxygen, !4144 (@rmatev)
- Corrections in somes includes, !4142 (@obuon)
- Fix SourceID-based selection for empty input (follow !4119), !4141 (@rmatev)
- Small tweaks/simplifications to AddressKillerAlg, !4133 (@graven)
### Documentation ~Documentation
- Clean up CONTRIBUTING.md following obsoletion of stripping24-patches branch, !4148 (@cattanem)
### Other
- ~Decoding ~FT | Reshape a bit the readout map to allow for errors, !4129 (@lohenry)
- ~Decoding ~FT | Rewrite the FTReadoutMap in order to get deactivated links from Conditions, !4096 (@lohenry)
- ~Tuples | Update the Run3 DTF: Add extra output and v2 PVs support, !4007 (@jzhuo)
- Update References for: LHCb!4116, MooreOnline!251, DaVinci!904 based on lhcb-master-mr/8181, !4146 (@lhcbsoft)
- Remove redundant mutable from RelationsTable, !4131 (@graven)
- Add option to explicitly by-pass check for identically configured components with different names, !4126 (@graven)
- Various streamlining and simplifications for the UT code used for tracking, !4124 (@decianm)
- Add algorithm to select subview of RawBank view, !4119 (@sstahl)
- Change ReserveDetDescForEvent to use LHCb::Algorithm, !4082 (@clemenci)
- Make AddressKillerAlg functional, !4125 (@sponce)
......@@ -38,6 +38,12 @@ if(NOT COMMAND _gaudi_runtime_prepend)
endmacro()
endif()
# workaround until https://gitlab.cern.ch/gaudi/Gaudi/-/merge_requests/1465 is available
find_file(extract_qmtest_metadata extract_qmtest_metadata.py
PATHS ${LHCb_SOURCE_DIR}/cmake # When building LHCb
${LHCb_DIR} # When using an installed LHCb
NO_DEFAULT_PATH)
# make sure DD4hep does not dictate the CXX standard to use
if(NOT DEFINED CMAKE_CXX_STANDARD AND DEFINED GAUDI_CXX_STANDARD)
set(CMAKE_CXX_STANDARD ${GAUDI_CXX_STANDARD})
......
#!/usr/bin/env python3
#####################################################################################
# (c) Copyright 1998-2019 CERN for the benefit of the LHCb and ATLAS collaborations #
# #
# This software is distributed under the terms of the Apache version 2 licence, #
# copied verbatim in the file "LICENSE". #
# #
# In applying this licence, CERN does not waive the privileges and immunities #
# granted to it by virtue of its status as an Intergovernmental Organization #
# or submit itself to any jurisdiction. #
#####################################################################################
"""
Simple script to extract metadata (dependencies, labels) from QMTest tests (.qmt
files) and suites (.qms files), and report them as declaration of CTest test
properties.
"""
from __future__ import print_function
__author__ = "Marco Clemencic <marco.clemencic@cern.ch>"
try:
import collections
import os
import platform
import re
import xml.etree.ElementTree as ET
import six
except ImportError:
import sys
sys.exit(1)
def qmt_filename_to_name(path):
"""
convert the relative path to a .qmt/.qms file to the canonical QMTest test
name.
For example:
>>> qmt_filename_to_name('some_suite.qms/sub.qms/mytest.qmt')
'some_suite.sub.mytest'
"""
return ".".join(
re.sub(r"\.qm[st]$", "", p) for p in path.split(os.path.sep))
def fix_test_name(name, pkg):
"""
Convert the QMTest test name to the name used in CTest.
>>> fix_test_name('package.bug.123', 'Package')
'Package.bug.123'
>>> fix_test_name('Package.Bug.123', 'Package')
'Package.Bug.123'
>>> fix_test_name('simple', 'Package')
'Package.simple'
"""
return re.sub(r"^((%s|%s)\.)?" % (pkg.lower(), pkg), "%s." % pkg, name)
def find_files(rootdir, ext):
"""
Find recursively all the files in a directory with a given extension.
"""
for dirpath, _dirnames, filenames in os.walk(rootdir):
for filename in filenames:
if os.path.splitext(filename)[1] == ext:
yield os.path.join(dirpath, filename)
def parse_xml(path):
"""
Return the parsed tree, handling exceptions if needed.
"""
try:
return ET.parse(path)
except ET.ParseError as e:
sys.stderr.write("ERROR: could not parse {}\n{}\n".format(path, e))
sys.stderr.flush()
exit(1)
def analyze_deps(pkg, rootdir):
"""
Collect dependencies from the QMTest tests in a directory and report them
to stdout as CMake commands.
@param pkg: name of the package (used to fix the name of the tests to match
the CMake ones
@param rootdir: directory containing the QMTest tests (usually tests/qmtest)
"""
tests = {}
for path in find_files(rootdir, ".qmt"):
name = qmt_filename_to_name(os.path.relpath(path, rootdir))
name = fix_test_name(name, pkg)
assert name not in tests
tests[name] = path
prereq_xpath = 'argument[@name="prerequisites"]/set/tuple/text'
fixtures_setup = set()
for name, path in tests.items():
tree = parse_xml(path)
prereqs = [
fix_test_name(el.text, pkg) for el in tree.findall(prereq_xpath)
]
for prereq in prereqs:
if prereq not in tests:
sys.stderr.write(
"ERROR: prerequisite {0} from {1} not found.\n".format(
prereq, path))
sys.stderr.flush()
exit(1)
if prereqs:
print((
"set_property(TEST {0} APPEND PROPERTY DEPENDS {1})\n"
"if(NOT QMTEST_DISABLE_FIXTURES_REQUIRED)\n"
" set_property(TEST {0} APPEND PROPERTY FIXTURES_REQUIRED {1})\n"
"endif()").format(name, " ".join(prereqs)))
fixtures_setup.update(prereqs)
for name in fixtures_setup:
print("set_property(TEST {0} APPEND PROPERTY FIXTURES_SETUP {0})".
format(name))
def analyze_suites(pkg, rootdir):
"""
Find all the suites (.qms files) defined in a directory and use it as a
label for the tests in it.
"""
labels = collections.defaultdict(list)
tests_xpath = 'argument[@name="test_ids"]/set/text'
suites_xpath = 'argument[@name="suite_ids"]/set/text'
for path in find_files(rootdir, ".qms"):
name = qmt_filename_to_name(os.path.relpath(path, rootdir))
name = fix_test_name(name, pkg)
tree = parse_xml(path)
labels[name].extend(
fix_test_name(el.text, pkg) for el in tree.findall(tests_xpath))
if tree.findall(suites_xpath):
sys.stderr.write(("WARNING: %s: suites of suites are "
"not supported yet\n") % path)
sys.stderr.flush()
# transpose the dictionary of lists
test_labels = collections.defaultdict(set)
for label, tests in six.iteritems(labels):
for test in tests:
test_labels[test].add(label)
for test, labels in six.iteritems(test_labels):
print("set_property(TEST {0} APPEND PROPERTY LABELS {1})".format(
test, " ".join(labels)))
def analyze_disabling(pkg, rootdir):
"""
Set the label 'disabled' for tests that are not supported on a platform.
"""
platform_id = (os.environ.get("BINARY_TAG") or os.environ.get("CMTCONFIG")
or platform.platform())
unsupp_xpath = 'argument[@name="unsupported_platforms"]/set/text'
for path in find_files(rootdir, ".qmt"):
name = qmt_filename_to_name(os.path.relpath(path, rootdir))
name = fix_test_name(name, pkg)
tree = parse_xml(path)
# If at least one regex matches the test is disabled.
skip_test = [
None for el in tree.findall(unsupp_xpath)
if re.search(el.text, platform_id)
]
if skip_test:
print("set_property(TEST {0} APPEND PROPERTY LABELS disabled)".
format(name))
if __name__ == "__main__":
import sys
analyze_deps(*sys.argv[1:])
analyze_suites(*sys.argv[1:])
analyze_disabling(*sys.argv[1:])