From b4520aedf16986f9e70f937482543fe4b599b148 Mon Sep 17 00:00:00 2001
From: Roel Aaij <roel.aaij@nikhef.nl>
Date: Sat, 11 Mar 2023 00:53:24 +0100
Subject: [PATCH 01/49] Add property default values to standalone generated
 algorithms

Move the implementation of the AlgorithmDB to a cpp file and add it to
the Stream library. This allows it be be called from different
compilation units.

Add a default_properties executable that is passed a ;-separated list
of algorithm namespace::type through stdin, instantiates each
algorithm and retrieves its properties. Properties are saved to a JSON
representation as strings iwth the algorithm namespace::type as key

This JSON representation is used to add the property default values to
their Python views when those are generated.
---
 CMakeLists.txt                              | 21 ++++++-----
 cmake/GenerateConfiguration.cmake           | 39 ++++++++++++---------
 configuration/parser/ParseAlgorithms.py     | 34 ++++++++++++++----
 configuration/parser/default_properties.cpp | 34 ++++++++++++++++++
 main/include/Configuration.h                |  1 +
 stream/CMakeLists.txt                       | 25 ++++++-------
 stream/gear/include/AlgorithmDB.h           | 16 +++++++++
 7 files changed, 124 insertions(+), 46 deletions(-)
 create mode 100644 configuration/parser/default_properties.cpp
 create mode 100644 stream/gear/include/AlgorithmDB.h

diff --git a/CMakeLists.txt b/CMakeLists.txt
index 5ce09795e2b..ea667f9ae48 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -580,6 +580,14 @@ if(BUILD_TESTING)
   add_subdirectory(test/unit_tests)
 endif()
 
+# Interface library that will be used for common functionality
+add_library(AllenCommon INTERFACE)
+target_include_directories(AllenCommon
+  INTERFACE
+  $<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/main/include>
+  $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
+  $<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/test/contracts/include>)
+
 add_subdirectory(configuration)
 add_subdirectory(backend)
 add_subdirectory(host)
@@ -590,14 +598,6 @@ add_subdirectory(integration)
 add_subdirectory(zmq)
 add_subdirectory(stream)
 
-# Interface library that will be used for common functionality
-add_library(AllenCommon INTERFACE)
-target_include_directories(AllenCommon
-  INTERFACE
-  $<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/main/include>
-  $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
-  $<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/test/contracts/include>)
-
 if (STANDALONE)
   target_compile_definitions(AllenCommon INTERFACE ALLEN_STANDALONE)
 endif()
@@ -683,21 +683,19 @@ target_compile_definitions(AllenLib PUBLIC ${TARGET_DEFINITION})
 target_link_libraries(AllenLib
   PRIVATE
     Backend
-    Stream
     HostCommon
     HostEventModel
-    HostRoutingBits
     NonEventData
     TrackChecking
     PVChecking
     CheckClustering
     SelChecking
     Monitoring
-    HostCombiners
     EventModel
     Gear
     mdf
     LHCbEvent
+    Stream
   PUBLIC
     AllenCommon
     AllenRuntime
@@ -714,6 +712,7 @@ if (NOT STANDALONE)
 endif()
 
 allen_add_executable(Allen main/src/main.cpp)
+add_dependencies(Allen Sequences)
 
 target_link_libraries(Allen PRIVATE AllenLib NonEventData Gear)
 
diff --git a/cmake/GenerateConfiguration.cmake b/cmake/GenerateConfiguration.cmake
index d6f0736b8e6..64da7961b0b 100644
--- a/cmake/GenerateConfiguration.cmake
+++ b/cmake/GenerateConfiguration.cmake
@@ -15,6 +15,7 @@ set(ALLEN_PARSER_DIR ${PROJECT_SEQUENCE_DIR}/parser)
 set(ALGORITHMS_OUTPUTFILE ${ALLEN_ALGORITHMS_DIR}/allen_standalone_algorithms.py)
 set(PARSED_ALGORITHMS_OUTPUTFILE ${CODE_GENERATION_DIR}/parsed_algorithms.pickle)
 set(ALGORITHMS_GENERATION_SCRIPT ${PROJECT_SOURCE_DIR}/configuration/parser/ParseAlgorithms.py)
+set(DEFAULT_PROPERTIES_SRC ${PROJECT_SOURCE_DIR}/configuration/parser/default_properties.cpp)
 
 include_guard(GLOBAL)
 
@@ -59,31 +60,35 @@ add_custom_command(
   DEPENDS "${PROJECT_SOURCE_DIR}/configuration/python/AllenConf" "${PROJECT_SOURCE_DIR}/configuration/python/AllenCore")
 add_custom_target(generate_conf_core DEPENDS "${SEQUENCE_DEFINITION_DIR}" "${ALLEN_CORE_DIR}")
 
+# Generate Allen AlgorithmDB
+add_custom_command(
+  OUTPUT "${CODE_GENERATION_DIR}/AlgorithmDB.cpp"
+  COMMENT "Generating AlgorithmDB"
+  COMMAND ${CMAKE_COMMAND} -E env ${PARSER_ENV} ${Python_EXECUTABLE} ${ALGORITHMS_GENERATION_SCRIPT} --generate db --filename "${CODE_GENERATION_DIR}/AlgorithmDB.cpp" --parsed_algorithms "${PARSED_ALGORITHMS_OUTPUTFILE}"
+  WORKING_DIRECTORY ${ALLEN_PARSER_DIR}
+  DEPENDS "${PARSED_ALGORITHMS_OUTPUTFILE}")
+add_custom_target(algorithm_db_generation DEPENDS "${CODE_GENERATION_DIR}/AlgorithmDB.cpp")
+add_library(algorithm_db STATIC "${CODE_GENERATION_DIR}/AlgorithmDB.cpp")
+add_dependencies(algorithm_db algorithm_db_generation)
+target_include_directories(algorithm_db PUBLIC $<BUILD_INTERFACE:${ALLEN_GENERATED_INCLUDE_FILES_DIR}>)
+target_link_libraries(algorithm_db PRIVATE AllenCommon Gear Backend HostEventModel EventModel)
+
+add_executable(default_properties ${DEFAULT_PROPERTIES_SRC})
+target_link_libraries(default_properties PRIVATE AllenLib AllenCommon Gear Backend HostEventModel EventModel)
+
 # Generate allen standalone algorithms file
 add_custom_command(
   OUTPUT "${ALGORITHMS_OUTPUTFILE}"
   COMMAND
-    ${CMAKE_COMMAND} -E env ${PARSER_ENV} ${Python_EXECUTABLE} ${ALGORITHMS_GENERATION_SCRIPT} --generate views --filename "${ALGORITHMS_OUTPUTFILE}" --parsed_algorithms "${PARSED_ALGORITHMS_OUTPUTFILE}" &&
+    ${CMAKE_COMMAND} -E env ${PARSER_ENV} ${Python_EXECUTABLE} ${ALGORITHMS_GENERATION_SCRIPT} --generate views --filename "${ALGORITHMS_OUTPUTFILE}" --parsed_algorithms "${PARSED_ALGORITHMS_OUTPUTFILE}" --default_properties $<TARGET_FILE:default_properties> &&
     ${CMAKE_COMMAND} -E touch ${ALLEN_ALGORITHMS_DIR}/__init__.py
   WORKING_DIRECTORY ${ALLEN_PARSER_DIR}
-  DEPENDS "${PARSED_ALGORITHMS_OUTPUTFILE}" "${SEQUENCE_DEFINITION_DIR}" "${ALLEN_CORE_DIR}")
+  DEPENDS "${PARSED_ALGORITHMS_OUTPUTFILE}" "${SEQUENCE_DEFINITION_DIR}" "${ALLEN_CORE_DIR}" default_properties)
 add_custom_target(generate_algorithms_view DEPENDS "${ALGORITHMS_OUTPUTFILE}")
 install(FILES "${ALGORITHMS_OUTPUTFILE}" DESTINATION python/AllenAlgorithms)
 
-# Generate Allen AlgorithmDB
-add_custom_command(
-  OUTPUT "${ALLEN_GENERATED_INCLUDE_FILES_DIR}/AlgorithmDB.h"
-  COMMENT "Generating AlgorithmDB"
-  COMMAND ${CMAKE_COMMAND} -E env ${PARSER_ENV} ${Python_EXECUTABLE} ${ALGORITHMS_GENERATION_SCRIPT} --generate db --filename "${ALLEN_GENERATED_INCLUDE_FILES_DIR}/AlgorithmDB.h" --parsed_algorithms "${PARSED_ALGORITHMS_OUTPUTFILE}"
-  WORKING_DIRECTORY ${ALLEN_PARSER_DIR}
-  DEPENDS "${PARSED_ALGORITHMS_OUTPUTFILE}")
-add_custom_target(algorithm_db_generation DEPENDS "${ALLEN_GENERATED_INCLUDE_FILES_DIR}/AlgorithmDB.h")
-add_library(algorithm_db INTERFACE)
-add_dependencies(algorithm_db algorithm_db_generation "${ALLEN_GENERATED_INCLUDE_FILES_DIR}/AlgorithmDB.h")
-target_include_directories(algorithm_db INTERFACE $<BUILD_INTERFACE:${ALLEN_GENERATED_INCLUDE_FILES_DIR}>)
-install(TARGETS algorithm_db
-      EXPORT Allen
-      LIBRARY DESTINATION lib)
+# Target that the generation of the sequences can depend on
+add_custom_target(Sequences DEPENDS generate_algorithms_view)
 
 if(SEPARABLE_COMPILATION)
   add_custom_command(
@@ -198,6 +203,6 @@ function(generate_sequence sequence)
       WORKING_DIRECTORY ${sequence_dir})
   endif()
   add_custom_target(sequence_${sequence} DEPENDS "${PROJECT_BINARY_DIR}/${sequence}.json")
-  add_dependencies(Stream sequence_${sequence})
+  add_dependencies(Sequences sequence_${sequence})
   install(FILES "${PROJECT_BINARY_DIR}/${sequence}.json" DESTINATION constants)
 endfunction()
diff --git a/configuration/parser/ParseAlgorithms.py b/configuration/parser/ParseAlgorithms.py
index 0470d92c799..071a68124b6 100755
--- a/configuration/parser/ParseAlgorithms.py
+++ b/configuration/parser/ParseAlgorithms.py
@@ -114,7 +114,7 @@ class AllenCore():
         return s
 
     @staticmethod
-    def write_algorithm_code(algorithm, i=0):
+    def write_algorithm_code(algorithm, default_properties, i=0):
         s = AllenCore.prefix(
             i) + "class " + algorithm.name + "(AllenAlgorithm):\n"
         i += 1
@@ -133,7 +133,8 @@ class AllenCore():
                 + AllenCore.create_var_type(param.kind) + \
                 "\", \"" + str(param.typedef) + "\"),\n"
         for prop in algorithm.properties:
-            s += AllenCore.prefix(i) + prop.name[1:-1] + " = \"\",\n"
+            pn = prop.name[1:-1]
+            s += f'{AllenCore.prefix(i)}{pn} = "{default_properties[pn]}",\n'
         s = s[:-2]
         i -= 1
         s += "\n" + AllenCore.prefix(i) + ")\n"
@@ -587,10 +588,25 @@ class AllenCore():
         return code
 
     @staticmethod
-    def write_algorithms_view(algorithms, filename):
+    def write_algorithms_view(algorithms, filename, default_properties):
+        from subprocess import (PIPE, run)
+        import json
+
+        p = run([default_properties], stdout=PIPE,
+                input=';'.join(["{}::{}".format(a.namespace, a.name) for a in parsed_algorithms]),
+                encoding='ascii')
+
+        default_properties = None
+        if p.returncode == 0:
+            default_properties = json.loads(p.stdout)
+        else:
+            print("Failed to obtain default property values")
+            sys.exit(-1)
+
         s = AllenCore.write_preamble()
         for algorithm in parsed_algorithms:
-            s += AllenCore.write_algorithm_code(algorithm)
+            tn = "{}::{}".format(algorithm.namespace, algorithm.name)
+            s += AllenCore.write_algorithm_code(algorithm, default_properties[tn])
         with open(filename, "w") as f:
             f.write(s)
 
@@ -624,7 +640,7 @@ class AllenCore():
 
     @staticmethod
     def write_algorithms_db(algorithms, filename):
-        code = "\n".join(("#pragma once", "", "#include <Configuration.h>",
+        code = "\n".join(("#include <AlgorithmDB.h>",
                           "\n"))
         for alg in algorithms:
             code += f"namespace {alg.namespace} {{ struct {alg.name}; }}\n"
@@ -712,6 +728,12 @@ if __name__ == '__main__':
         type=str,
         default="",
         help="location of parsed algorithms")
+    parser.add_argument(
+        "--default_properties",
+        nargs="?",
+        type=str,
+        default="",
+        help="location of default_properties executable")
     parser.add_argument(
         "--generate",
         nargs="?",
@@ -747,7 +769,7 @@ if __name__ == '__main__':
 
         if args.generate == "views":
             # Generate algorithm python views
-            AllenCore.write_algorithms_view(parsed_algorithms, args.filename)
+            AllenCore.write_algorithms_view(parsed_algorithms, args.filename, args.default_properties)
         elif args.generate == "wrapperlist":
             # Generate Gaudi wrapper filenames
             gaudi_wrapper_filenames = AllenCore.write_gaudi_algorithms(
diff --git a/configuration/parser/default_properties.cpp b/configuration/parser/default_properties.cpp
new file mode 100644
index 00000000000..cb9bd267892
--- /dev/null
+++ b/configuration/parser/default_properties.cpp
@@ -0,0 +1,34 @@
+#include <iostream>
+#include <iterator>
+#include <string>
+#include <algorithm>
+#include <boost/algorithm/string.hpp>
+#include <nlohmann/json.hpp>
+#include <AlgorithmDB.h>
+
+int main()
+{
+  std::istreambuf_iterator<char> begin(std::cin), end;
+  std::string input(begin, end);
+  if (!input.empty() && input[input.size() - 1] == '\n') {
+    input.erase(input.size() - 1);
+  }
+
+  std::vector<std::string> algorithms;
+  boost::split(algorithms, input, boost::is_any_of(";"));
+
+  using json_float = nlohmann::basic_json<std::map, std::vector, std::string, bool,
+                                          std::int64_t, std::uint64_t, float>;
+  json_float default_properties;
+
+  for (auto alg : algorithms) {
+    auto allen_alg = instantiate_allen_algorithm({alg, "algorithm", ""});
+    std::map<std::string, std::string> string_props;
+    for (auto [k, j] : allen_alg.get_properties()) {
+      json_float jf = j;
+      string_props[k] = jf.is_string() ? jf.get<std::string>() : to_string(jf);
+    }
+    default_properties[alg] = string_props;
+  }
+  std::cout << std::setw(4) << default_properties;
+}
diff --git a/main/include/Configuration.h b/main/include/Configuration.h
index 223e7d6497f..fe3486b2869 100644
--- a/main/include/Configuration.h
+++ b/main/include/Configuration.h
@@ -13,6 +13,7 @@
 #include <stdexcept>
 #include <vector>
 #include <string>
+#include <map>
 
 struct ConfiguredAlgorithm {
   std::string id;
diff --git a/stream/CMakeLists.txt b/stream/CMakeLists.txt
index 1aad5aa3829..acf2a3e5d69 100644
--- a/stream/CMakeLists.txt
+++ b/stream/CMakeLists.txt
@@ -1,8 +1,6 @@
 ###############################################################################
 # (c) Copyright 2018-2020 CERN for the benefit of the LHCb Collaboration      #
 ###############################################################################
-include(GenerateConfiguration)
-
 # Gear interface library
 add_library(Gear INTERFACE)
 target_include_directories(Gear INTERFACE
@@ -12,6 +10,8 @@ target_include_directories(Gear INTERFACE
 target_link_libraries(Gear INTERFACE Boost::boost)
 install(TARGETS Gear EXPORT Allen)
 
+include(GenerateConfiguration)
+
 file(GLOB stream_src "sequence/src/*cpp")
 
 allen_add_host_library(Stream STATIC ${stream_src})
@@ -19,14 +19,6 @@ allen_add_host_library(Stream STATIC ${stream_src})
 target_link_libraries(Stream
   PRIVATE
     HostClustering
-    HostDataProvider
-    HostDummyMaker
-    HostErrorBanks
-    HostGEC
-    HostInitEventList
-    HostPrefixSum
-    HostRoutingBits
-    HostTAEFilter
     AllenCommon
     Associate
     Backend
@@ -41,16 +33,25 @@ target_link_libraries(Stream
     Plume
     SciFi
     UT
-    Validators
     Velo
     VertexFitter
     algorithm_db
     track_matching
+    algorithm_db
   PUBLIC
+    HostCombiners
+    HostDataProvider
+    HostDummyMaker
+    HostErrorBanks
+    HostGEC
+    HostInitEventList
+    HostPrefixSum
+    HostRoutingBits
+    HostTAEFilter
+    Validators
     Utils
     Selections)
 
-add_dependencies(Stream generate_algorithms_view)
 if(STANDALONE)
   add_dependencies(Stream checkout_lhcb checkout_gaudi)
 endif()
diff --git a/stream/gear/include/AlgorithmDB.h b/stream/gear/include/AlgorithmDB.h
new file mode 100644
index 00000000000..3100dd02610
--- /dev/null
+++ b/stream/gear/include/AlgorithmDB.h
@@ -0,0 +1,16 @@
+/*****************************************************************************\
+* (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration           *
+*                                                                             *
+* This software is distributed under the terms of the Apache License          *
+* version 2 (Apache-2.0), copied verbatim in the file "COPYING".              *
+*                                                                             *
+* In applying this licence, CERN does not waive the privileges and immunities *
+* granted to it by virtue of its status as an Intergovernmental Organization  *
+* or submit itself to any jurisdiction.                                       *
+\*****************************************************************************/
+#pragma once
+
+#include "Configuration.h"
+#include "Algorithm.cuh"
+
+Allen::TypeErasedAlgorithm instantiate_allen_algorithm(const ConfiguredAlgorithm& alg);
-- 
GitLab


From e91b2b4b939d75e4bc7b57455a4893317ebef1c6 Mon Sep 17 00:00:00 2001
From: Roel Aaij <roel.aaij@nikhef.nl>
Date: Sat, 11 Mar 2023 23:35:48 +0100
Subject: [PATCH 02/49] Always use the JSON representation for default values
 of properties

This allows the Python JSON parser to turn values into appropriate
Python objects.
---
 configuration/parser/ParseAlgorithms.py     | 23 +++++++++++++++++----
 configuration/parser/default_properties.cpp | 15 +++++++++++---
 2 files changed, 31 insertions(+), 7 deletions(-)

diff --git a/configuration/parser/ParseAlgorithms.py b/configuration/parser/ParseAlgorithms.py
index 071a68124b6..e8054ac2f8a 100755
--- a/configuration/parser/ParseAlgorithms.py
+++ b/configuration/parser/ParseAlgorithms.py
@@ -11,7 +11,7 @@ from collections import OrderedDict
 from AlgorithmTraversalLibClang import AlgorithmTraversal
 import argparse
 import pickle
-
+import json
 
 def get_clang_so_location():
     """Function that fetches location of detected clang so."""
@@ -132,10 +132,23 @@ class AllenCore():
             s += AllenCore.prefix(i) + param.typename + " = AllenDataHandle(\"" + param.scope + "\", " + dependencies + ", \"" + param.typename + "\", \"" \
                 + AllenCore.create_var_type(param.kind) + \
                 "\", \"" + str(param.typedef) + "\"),\n"
+
+        # Properties
         for prop in algorithm.properties:
+            # Use the python JSON parser to turn the JSON
+            # representation of default values into appropriate Python
+            # objects
             pn = prop.name[1:-1]
-            s += f'{AllenCore.prefix(i)}{pn} = "{default_properties[pn]}",\n'
-        s = s[:-2]
+            dv = json.loads(default_properties[pn])
+
+            # Quotes have to be added for properties that hold a string
+            if type(dv) is str:
+                dv = f'"{dv}"'
+
+            # Write the code for the property and include the C++ type
+            # as a comment
+            s += f'{AllenCore.prefix(i)}{pn} = {dv}, # {prop.typedef}\n'
+        s = s[:-1]
         i -= 1
         s += "\n" + AllenCore.prefix(i) + ")\n"
 
@@ -590,8 +603,10 @@ class AllenCore():
     @staticmethod
     def write_algorithms_view(algorithms, filename, default_properties):
         from subprocess import (PIPE, run)
-        import json
 
+        # Run the default_properties executable to get a JSON
+        # representation of the default values of all properties of
+        # all algorithms
         p = run([default_properties], stdout=PIPE,
                 input=';'.join(["{}::{}".format(a.namespace, a.name) for a in parsed_algorithms]),
                 encoding='ascii')
diff --git a/configuration/parser/default_properties.cpp b/configuration/parser/default_properties.cpp
index cb9bd267892..973f94d2592 100644
--- a/configuration/parser/default_properties.cpp
+++ b/configuration/parser/default_properties.cpp
@@ -1,3 +1,13 @@
+/*****************************************************************************\
+* (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration           *
+*                                                                             *
+* This software is distributed under the terms of the Apache License          *
+* version 2 (Apache-2.0), copied verbatim in the file "COPYING".              *
+*                                                                             *
+* In applying this licence, CERN does not waive the privileges and immunities *
+* granted to it by virtue of its status as an Intergovernmental Organization  *
+* or submit itself to any jurisdiction.                                       *
+\*****************************************************************************/
 #include <iostream>
 #include <iterator>
 #include <string>
@@ -17,8 +27,7 @@ int main()
   std::vector<std::string> algorithms;
   boost::split(algorithms, input, boost::is_any_of(";"));
 
-  using json_float = nlohmann::basic_json<std::map, std::vector, std::string, bool,
-                                          std::int64_t, std::uint64_t, float>;
+  using json_float = nlohmann::basic_json<std::map, std::vector, std::string, bool, std::int64_t, std::uint64_t, float>;
   json_float default_properties;
 
   for (auto alg : algorithms) {
@@ -26,7 +35,7 @@ int main()
     std::map<std::string, std::string> string_props;
     for (auto [k, j] : allen_alg.get_properties()) {
       json_float jf = j;
-      string_props[k] = jf.is_string() ? jf.get<std::string>() : to_string(jf);
+      string_props[k] = to_string(jf);
     }
     default_properties[alg] = string_props;
   }
-- 
GitLab


From 92bb2ce2ebeed13dfb34983042b051d27a7dbf64 Mon Sep 17 00:00:00 2001
From: Roel Aaij <roel.aaij@nikhef.nl>
Date: Sat, 11 Mar 2023 23:55:32 +0100
Subject: [PATCH 03/49] Document default_properties

---
 configuration/parser/default_properties.cpp | 14 +++++++++++++-
 1 file changed, 13 insertions(+), 1 deletion(-)

diff --git a/configuration/parser/default_properties.cpp b/configuration/parser/default_properties.cpp
index 973f94d2592..01bf693ea34 100644
--- a/configuration/parser/default_properties.cpp
+++ b/configuration/parser/default_properties.cpp
@@ -9,6 +9,7 @@
 * or submit itself to any jurisdiction.                                       *
 \*****************************************************************************/
 #include <iostream>
+#include <iomanip>
 #include <iterator>
 #include <string>
 #include <algorithm>
@@ -18,25 +19,36 @@
 
 int main()
 {
+  // Read the semicolon-separated list of algorithms from stdin
   std::istreambuf_iterator<char> begin(std::cin), end;
   std::string input(begin, end);
   if (!input.empty() && input[input.size() - 1] == '\n') {
     input.erase(input.size() - 1);
   }
 
+  // Split the list into algorithm namespace::type
   std::vector<std::string> algorithms;
   boost::split(algorithms, input, boost::is_any_of(";"));
 
-  using json_float = nlohmann::basic_json<std::map, std::vector, std::string, bool, std::int64_t, std::uint64_t, float>;
+  // Use non-default JSON parser to parse all floating point numbers
+  // as floats and integers as 32 bits. This aligns with what is used
+  // in Allen
+  using json_float = nlohmann::basic_json<std::map, std::vector, std::string, bool, std::int32_t, std::uint32_t, float>;
   json_float default_properties;
 
+  // Loop over the algorithms, instantiate each algorithm and get its
+  // (default valued) properties.
   for (auto alg : algorithms) {
     auto allen_alg = instantiate_allen_algorithm({alg, "algorithm", ""});
     std::map<std::string, std::string> string_props;
     for (auto [k, j] : allen_alg.get_properties()) {
+      // Assign to out JSON parser type to get the wanted parsing
+      // behaviour and use to_string to allow the Python JSON parser
+      // to change the values into Python objects.
       json_float jf = j;
       string_props[k] = to_string(jf);
     }
+    // Save the representation in another JSON.
     default_properties[alg] = string_props;
   }
   std::cout << std::setw(4) << default_properties;
-- 
GitLab


From 7c3ad73e8f959647ad584c9cd073e60c267f6a8b Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Mon, 13 Mar 2023 14:49:18 +0100
Subject: [PATCH 04/49] Refactor CMakeLists to not install static libs and fix
 algorithm_db link libraries

---
 CMakeLists.txt                            | 125 +++++++++++-----------
 Dumpers/BinaryDumpers/CMakeLists.txt      |   2 +-
 Rec/Allen/CMakeLists.txt                  |  24 +++--
 backend/CMakeLists.txt                    |   5 +-
 cmake/GenerateConfiguration.cmake         |  38 ++++++-
 device/selections/CMakeLists.txt          |  14 ++-
 integration/non_event_data/CMakeLists.txt |   6 +-
 stream/CMakeLists.txt                     |  37 ++-----
 8 files changed, 134 insertions(+), 117 deletions(-)

diff --git a/CMakeLists.txt b/CMakeLists.txt
index ea667f9ae48..a6a120552ac 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -276,6 +276,7 @@ if(TARGET_DEVICE STREQUAL "CPU")
   endif()
 
   function(allen_add_host_library)
+    list(FIND ARGV STATIC is_static)
     foreach(arg IN LISTS ARGN)
       if(${arg} MATCHES "\\.cu$")
         set_source_files_properties(${arg} PROPERTIES LANGUAGE CXX)
@@ -283,17 +284,19 @@ if(TARGET_DEVICE STREQUAL "CPU")
     endforeach()
 
     add_library(${ARGV})
+
     add_library(Allen::${ARGV0} ALIAS ${ARGV0})
     target_compile_definitions(${ARGV0} PRIVATE ${TARGET_DEFINITION} ODIN_WITHOUT_GAUDI)
-    install(TARGETS ${ARGV0}
-      EXPORT Allen
-      LIBRARY DESTINATION lib)
+
+    if (${is_static} EQUAL -1)
+      install(TARGETS ${ARGV0}
+        EXPORT Allen
+        LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR})
+    endif()
   endfunction()
 
   function(allen_add_device_library)
-    set(destination lib)
-    list(FIND ARGV STREAM is_stream)
-    list(FILTER ARGV EXCLUDE REGEX "STREAM")
+    list(FIND ARGV STATIC is_static)
 
     foreach(arg IN LISTS ARGN)
       if(${arg} MATCHES "\\.cu$")
@@ -303,15 +306,14 @@ if(TARGET_DEVICE STREQUAL "CPU")
 
     add_library(${ARGV})
 
-    if (NOT ${is_stream} EQUAL -1)
-      set(destination lib/sequences)
-    endif()
-
     add_library(Allen::${ARGV0} ALIAS ${ARGV0})
     target_compile_definitions(${ARGV0} PRIVATE ${TARGET_DEFINITION} ODIN_WITHOUT_GAUDI)
-    install(TARGETS ${ARGV0}
-      EXPORT Allen
-      DESTINATION ${destination})
+
+    if (${is_static} EQUAL -1)
+      install(TARGETS ${ARGV0}
+        EXPORT Allen
+        DESTINATION ${CMAKE_INSTALL_LIBDIR})
+    endif()
   endfunction()
 
   function(allen_add_executable)
@@ -323,7 +325,7 @@ if(TARGET_DEVICE STREQUAL "CPU")
 
     add_executable(${ARGV})
     target_compile_definitions(${ARGV0} PRIVATE ${TARGET_DEFINITION} ODIN_WITHOUT_GAUDI)
-    install(TARGETS ${ARGV0} RUNTIME DESTINATION bin)
+    install(TARGETS ${ARGV0} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
   endfunction()
 
   string(APPEND CMAKE_CXX_FLAGS " -Wall -Wextra -Wpedantic -Wnon-virtual-dtor -Wdouble-promotion")
@@ -361,23 +363,23 @@ elseif(TARGET_DEVICE STREQUAL "HIP")
   set(CMAKE_EXE_LINKER_FLAGS "-Wl,-rpath,./")
 
   function(allen_add_host_library)
+    list(FIND ARGV STATIC is_static)
+
     add_library(${ARGV})
-    add_library(Allen::${ARGV0} ALIAS ${ARGV0})
     target_include_directories(${ARGV0} PRIVATE ${HIP_PATH}/include ${ROCM_PATH}/hsa/include)
     target_compile_definitions(${ARGV0} PRIVATE ${TARGET_DEFINITION} ODIN_WITHOUT_GAUDI)
-    install(TARGETS ${ARGV0}
-      EXPORT Allen
-      LIBRARY DESTINATION lib)
-  endfunction()
 
-  function(allen_add_device_library)
-    set(destination lib)
-    list(FIND ARGV STREAM is_stream)
-    list(FILTER ARGV EXCLUDE REGEX "STREAM")
+    add_library(Allen::${ARGV0} ALIAS ${ARGV0})
 
-    if (NOT ${is_stream} EQUAL -1)
-      set(destination lib/sequences)
+    if (${is_static} EQUAL -1)
+      install(TARGETS ${ARGV0}
+        EXPORT Allen
+        LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR})
     endif()
+  endfunction()
+
+  function(allen_add_device_library)
+    list(FIND ARGV STATIC is_static)
 
     hip_add_library(${ARGV} HIPCC_OPTIONS ${HIPCC_OPTIONS})
 
@@ -385,16 +387,18 @@ elseif(TARGET_DEVICE STREQUAL "HIP")
     target_include_directories(${ARGV0} PRIVATE ${HIP_PATH}/include ${ROCM_PATH}/hsa/include)
     target_compile_definitions(${ARGV0} PRIVATE ${TARGET_DEFINITION} ODIN_WITHOUT_GAUDI)
 
-    install(TARGETS ${ARGV0}
-      EXPORT Allen
-      LIBRARY DESTINATION ${destination})
+    if (${is_static} EQUAL -1)
+      install(TARGETS ${ARGV0}
+        EXPORT Allen
+        LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR})
+    endif()
   endfunction()
 
   function(allen_add_executable)
     hip_add_executable(${ARGV} HIPCC_OPTIONS ${HIPCC_OPTIONS})
     target_include_directories(${ARGV0} PRIVATE ${HIP_PATH}/include ${ROCM_PATH}/hsa/include)
     target_compile_definitions(${ARGV0} PRIVATE ${TARGET_DEFINITION})
-    install(TARGETS ${ARGV0} RUNTIME DESTINATION bin)
+    install(TARGETS ${ARGV0} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
   endfunction()
 elseif(TARGET_DEVICE STREQUAL "CUDA")
 
@@ -421,45 +425,41 @@ elseif(TARGET_DEVICE STREQUAL "CUDA")
   message(STATUS "Detected CUDA include directory: " ${CMAKE_CUDA_TOOLKIT_INCLUDE_DIRECTORIES})
 
   function(allen_add_host_library)
-    set(destination lib)
-    list(FIND ARGV STREAM is_stream)
-    list(FILTER ARGV EXCLUDE REGEX "STREAM")
-
-    if (NOT ${is_stream} EQUAL -1)
-      set(destination lib/sequences)
-    endif()
+    list(FIND ARGV STATIC is_static)
 
     add_library(${ARGV})
+    target_include_directories(${ARGV0} PRIVATE ${CMAKE_CUDA_TOOLKIT_INCLUDE_DIRECTORIES} ${PROJECT_BINARY_DIR}/code_generation)
+    target_compile_definitions(${ARGV0} PRIVATE ${TARGET_DEFINITION} ODIN_WITHOUT_GAUDI)
     if(SEPARABLE_COMPILATION)
       set_property(TARGET ${ARGV0} PROPERTY CUDA_SEPARABLE_COMPILATION ON)
     endif()
-    target_include_directories(${ARGV0} PRIVATE ${CMAKE_CUDA_TOOLKIT_INCLUDE_DIRECTORIES} ${PROJECT_BINARY_DIR}/code_generation)
-    target_compile_definitions(${ARGV0} PRIVATE ${TARGET_DEFINITION} ODIN_WITHOUT_GAUDI)
+
     add_library(Allen::${ARGV0} ALIAS ${ARGV0})
-    install(TARGETS ${ARGV0}
-      EXPORT Allen
-      LIBRARY DESTINATION ${destination})
+
+    if (${is_static} EQUAL -1)
+      install(TARGETS ${ARGV0}
+        EXPORT Allen
+        LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR})
+    endif()
   endfunction()
 
   function(allen_add_device_library)
-    set(destination lib)
-    list(FIND ARGV STREAM is_stream)
-    list(FILTER ARGV EXCLUDE REGEX "STREAM")
-
-    if (NOT ${is_stream} EQUAL -1)
-      set(destination lib/sequences)
-    endif()
+    list(FIND ARGV STATIC is_static)
 
     add_library(${ARGV})
+    target_include_directories(${ARGV0} PRIVATE ${CMAKE_CUDA_TOOLKIT_INCLUDE_DIRECTORIES})
+    target_compile_definitions(${ARGV0} PRIVATE ${TARGET_DEFINITION} ODIN_WITHOUT_GAUDI)
     if(SEPARABLE_COMPILATION)
       set_property(TARGET ${ARGV0} PROPERTY CUDA_SEPARABLE_COMPILATION ON)
     endif()
-    target_include_directories(${ARGV0} PRIVATE ${CMAKE_CUDA_TOOLKIT_INCLUDE_DIRECTORIES})
-    target_compile_definitions(${ARGV0} PRIVATE ${TARGET_DEFINITION} ODIN_WITHOUT_GAUDI)
+
     add_library(Allen::${ARGV0} ALIAS ${ARGV0})
-    install(TARGETS ${ARGV0}
-      EXPORT Allen
-      LIBRARY DESTINATION ${destination})
+
+    if (${is_static} EQUAL -1)
+      install(TARGETS ${ARGV0}
+        EXPORT Allen
+        LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR})
+    endif()
   endfunction()
 
   function(allen_add_executable)
@@ -469,7 +469,7 @@ elseif(TARGET_DEVICE STREQUAL "CUDA")
     endif()
     target_include_directories(${ARGV0} PRIVATE ${CMAKE_CUDA_TOOLKIT_INCLUDE_DIRECTORIES})
     target_compile_definitions(${ARGV0} PRIVATE ${TARGET_DEFINITION})
-    install(TARGETS ${ARGV0} RUNTIME DESTINATION bin)
+    install(TARGETS ${ARGV0} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR})
   endfunction()
 
   string(APPEND CMAKE_CXX_FLAGS " -Wall -Wextra -Wpedantic -Wnon-virtual-dtor -Wdouble-promotion")
@@ -485,8 +485,6 @@ set(GROUPCOMP_DIR ${PROJECT_BINARY_DIR}/group_comp)
 file(MAKE_DIRECTORY ${GROUPCOMP_DIR})
 
 function(allen_add_device_library_unified)
-  set(destination lib)
-
   foreach(arg IN LISTS ARGN)
     if(${arg} MATCHES "\\.cu$")
       list(APPEND LIBRARY_SOURCES ${arg})
@@ -518,7 +516,7 @@ function(allen_add_device_library_unified)
 
   install(TARGETS ${ARGV0}
     EXPORT Allen
-    LIBRARY DESTINATION ${destination})
+    LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR})
 
   MATH(EXPR COUNTER "${COUNTER}+1")
 endfunction()
@@ -653,7 +651,7 @@ message(STATUS "CMAKE CXX FLAGS: " ${CMAKE_CXX_FLAGS})
 
 
 # Add host library
-allen_add_host_library(HostCommon STATIC
+allen_add_host_library(HostCommon SHARED
   main/src/BankTypes.cpp
   main/src/InputReader.cpp
   main/src/InputTools.cpp
@@ -668,7 +666,7 @@ allen_add_host_library(HostCommon STATIC
 target_link_libraries(AllenCommon INTERFACE
   LHCbEvent AllenFS nlohmann_json::nlohmann_json cppgsl::cppgsl)
 target_link_libraries(HostCommon PRIVATE
-  mdf EventModel Gear Backend mdf NonEventData AllenCommon LHCbEvent Boost::iostreams)
+  mdf EventModel Gear Backend mdf AllenCommon LHCbEvent Boost::iostreams)
 
 allen_add_host_library(AllenLib SHARED
   main/src/Allen.cpp
@@ -683,21 +681,24 @@ target_compile_definitions(AllenLib PUBLIC ${TARGET_DEFINITION})
 target_link_libraries(AllenLib
   PRIVATE
     Backend
-    HostCommon
-    HostEventModel
     NonEventData
     TrackChecking
     PVChecking
     CheckClustering
     SelChecking
     Monitoring
+    HostEventModel
     EventModel
-    Gear
     mdf
     LHCbEvent
+    Utils
+    HostClustering
+    HostRoutingBits
     Stream
+    algorithm_db
   PUBLIC
     AllenCommon
+    HostCommon
     AllenRuntime
     Threads::Threads
     AllenZMQ
diff --git a/Dumpers/BinaryDumpers/CMakeLists.txt b/Dumpers/BinaryDumpers/CMakeLists.txt
index 74e2bdb581e..ec3c0078e1f 100644
--- a/Dumpers/BinaryDumpers/CMakeLists.txt
+++ b/Dumpers/BinaryDumpers/CMakeLists.txt
@@ -13,7 +13,7 @@ gaudi_add_library(BinaryDumpers
                     AllenCommon
                     HostCommon
                     EventModel
-                    Backend
+                    AllenRuntime
                     NonEventData
                     Gaudi::GaudiKernel
                     LHCb::DAQEventLib
diff --git a/Rec/Allen/CMakeLists.txt b/Rec/Allen/CMakeLists.txt
index f3eb8a8fff9..a2ccd849ebb 100755
--- a/Rec/Allen/CMakeLists.txt
+++ b/Rec/Allen/CMakeLists.txt
@@ -78,24 +78,26 @@ gaudi_add_module(AllenAlgorithms
                  LINK
                    AllenLib
                    Backend
-                   Stream
+                   CheckClustering
+                   EventModel
+                   Gear
+                   HostCombiners
                    HostCommon
                    HostEventModel
                    HostRoutingBits
+                   LHCbEvent
+                   Monitoring
+                   MuonCommon
                    NonEventData
-                   TrackChecking
                    PVChecking
-                   CheckClustering
                    SelChecking
-                   Monitoring
-                   HostCombiners
-                   EventModel
-                   Gear
-                   mdf
-                   LHCbEvent
-                   EventModel
-                   MuonCommon
+                   SelectionsHeaders
+                   Stream
+                   TrackChecking
+                   UTCommon
+                   Utils
                    WrapperInterface
+                   mdf
                    Gaudi::GaudiAlgLib
                    LHCb::DAQEventLib
                    LHCb::DAQKernelLib
diff --git a/backend/CMakeLists.txt b/backend/CMakeLists.txt
index 0fc9807ed1d..9e993091da3 100644
--- a/backend/CMakeLists.txt
+++ b/backend/CMakeLists.txt
@@ -21,11 +21,12 @@ target_include_directories(AllenRuntime INTERFACE
   $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>
   $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}/Backend>
   $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>)
+target_compile_definitions(AllenRuntime INTERFACE ${TARGET_DEFINITION})
+target_link_libraries(AllenRuntime INTERFACE umesimd::umesimd)
 
 # Backend library
 allen_add_host_library(Backend STATIC ${backend_sources})
-target_link_libraries(Backend PUBLIC AllenRuntime Gear AllenCommon umesimd::umesimd)
-target_compile_definitions(Backend PUBLIC ${TARGET_DEFINITION})
+target_link_libraries(Backend PUBLIC AllenRuntime Gear AllenCommon)
 
 set(BackendHeaders)
 foreach(header
diff --git a/cmake/GenerateConfiguration.cmake b/cmake/GenerateConfiguration.cmake
index 64da7961b0b..cce915da968 100644
--- a/cmake/GenerateConfiguration.cmake
+++ b/cmake/GenerateConfiguration.cmake
@@ -71,10 +71,44 @@ add_custom_target(algorithm_db_generation DEPENDS "${CODE_GENERATION_DIR}/Algori
 add_library(algorithm_db STATIC "${CODE_GENERATION_DIR}/AlgorithmDB.cpp")
 add_dependencies(algorithm_db algorithm_db_generation)
 target_include_directories(algorithm_db PUBLIC $<BUILD_INTERFACE:${ALLEN_GENERATED_INCLUDE_FILES_DIR}>)
-target_link_libraries(algorithm_db PRIVATE AllenCommon Gear Backend HostEventModel EventModel)
+target_link_libraries(algorithm_db
+  PUBLIC
+  AllenCommon
+  Gear
+  Backend
+  HostEventModel
+  EventModel
+  Associate
+  Backend
+  Calo
+  Combiners
+  DeviceValidators
+  Examples
+  Kalman
+  Lumi
+  Muon
+  PV_beamline
+  Plume
+  SciFi
+  UT
+  Velo
+  VertexFitter
+  HostCombiners
+  HostDataProvider
+  HostDummyMaker
+  HostErrorBanks
+  HostGEC
+  HostInitEventList
+  HostPrefixSum
+  HostRoutingBits
+  HostTAEFilter
+  Validators
+  track_matching
+  SelectionsHost
+  Selections)
 
 add_executable(default_properties ${DEFAULT_PROPERTIES_SRC})
-target_link_libraries(default_properties PRIVATE AllenLib AllenCommon Gear Backend HostEventModel EventModel)
+target_link_libraries(default_properties PRIVATE algorithm_db AllenLib)
 
 # Generate allen standalone algorithms file
 add_custom_command(
diff --git a/device/selections/CMakeLists.txt b/device/selections/CMakeLists.txt
index f70a09fe118..eeeaea091da 100755
--- a/device/selections/CMakeLists.txt
+++ b/device/selections/CMakeLists.txt
@@ -19,21 +19,26 @@ else()
   )
 endif()
 
+add_library(SelectionsHeaders INTERFACE)
+target_include_directories(SelectionsHeaders INTERFACE
+  $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/Hlt1/include>
+  $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/line_types/include>)
+install(TARGETS SelectionsHeaders EXPORT Allen)
+
 target_link_libraries(Selections PRIVATE
   AllenCommon
   Backend
   HostEventModel
-  HostCommon
   EventModel
   Utils
   LHCbEvent
   Kalman
   VertexFitter
-  extern_lines)
+  extern_lines
+  SelectionsHeaders)
 
 target_include_directories(Selections PUBLIC
   $<BUILD_INTERFACE:${PROJECT_BINARY_DIR}/configuration/sequences>
-  $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/Hlt1/include>
   $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/filters/include>
   $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/lines/calibration/include>
   $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/lines/monitoring/include>
@@ -43,5 +48,4 @@ target_include_directories(Selections PUBLIC
   $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/lines/electron/include>
   $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/lines/photon/include>
   $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/lines/SMOG2/include>
-  $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/lines/heavy_ions/include>
-  $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/line_types/include>)
+  $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/lines/heavy_ions/include>)
diff --git a/integration/non_event_data/CMakeLists.txt b/integration/non_event_data/CMakeLists.txt
index ebdeca5cf5d..f795c8be812 100644
--- a/integration/non_event_data/CMakeLists.txt
+++ b/integration/non_event_data/CMakeLists.txt
@@ -3,15 +3,15 @@
 ###############################################################################
 file(GLOB SOURCES "src/*.cpp")
 
-allen_add_host_library(NonEventData STATIC ${SOURCES})
+allen_add_host_library(NonEventData SHARED ${SOURCES})
 
 target_link_libraries(NonEventData PRIVATE
   HostClustering
-  Backend
+  AllenRuntime
   AllenCommon
+  HostCommon
   EventModel
   Gear
-  Backend
   MuonCommon
   UTCommon
   nlohmann_json::nlohmann_json)
diff --git a/stream/CMakeLists.txt b/stream/CMakeLists.txt
index acf2a3e5d69..207994d4a48 100644
--- a/stream/CMakeLists.txt
+++ b/stream/CMakeLists.txt
@@ -18,39 +18,14 @@ allen_add_host_library(Stream STATIC ${stream_src})
 
 target_link_libraries(Stream
   PRIVATE
-    HostClustering
-    AllenCommon
-    Associate
+    HostEventModel
+    EventModel
     Backend
-    Calo
-    Combiners
-    DeviceValidators
-    Examples
-    Kalman
-    Lumi
-    Muon
-    PV_beamline
-    Plume
-    SciFi
-    UT
-    Velo
-    VertexFitter
-    algorithm_db
+    AllenCommon
+    Gear
     track_matching
-    algorithm_db
-  PUBLIC
-    HostCombiners
-    HostDataProvider
-    HostDummyMaker
-    HostErrorBanks
-    HostGEC
-    HostInitEventList
-    HostPrefixSum
-    HostRoutingBits
-    HostTAEFilter
-    Validators
-    Utils
-    Selections)
+    MuonCommon
+  )
 
 if(STANDALONE)
   add_dependencies(Stream checkout_lhcb checkout_gaudi)
-- 
GitLab


From c995ed8100e60e4d93987c89580408f3ad03a4fe Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Wed, 15 Mar 2023 14:55:30 +0100
Subject: [PATCH 05/49] Fix CMake to have both CUDA and CPU builds link

---
 CMakeLists.txt                    | 48 ++++++++++++++++++++++++-------
 cmake/GenerateConfiguration.cmake | 43 +++++----------------------
 2 files changed, 44 insertions(+), 47 deletions(-)

diff --git a/CMakeLists.txt b/CMakeLists.txt
index a6a120552ac..832cc674b45 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -680,23 +680,49 @@ target_compile_definitions(AllenLib PUBLIC ${TARGET_DEFINITION})
 
 target_link_libraries(AllenLib
   PRIVATE
+    algorithm_db
+    Associate
     Backend
-    NonEventData
-    TrackChecking
-    PVChecking
+    Calo
     CheckClustering
-    SelChecking
-    Monitoring
-    HostEventModel
+    Combiners
+    DeviceValidators
     EventModel
-    mdf
-    LHCbEvent
-    Utils
+    Examples
     HostClustering
+    HostCombiners
+    HostDataProvider
+    HostDummyMaker
+    HostErrorBanks
+    HostEventModel
+    HostGEC
+    HostInitEventList
+    HostPrefixSum
     HostRoutingBits
+    HostTAEFilter
+    Kalman
+    LHCbEvent
+    Lumi
+    Monitoring
+    Muon
+    PVChecking
+    PV_beamline
+    Plume
+    SciFi
+    SelChecking
+    Selections
     Stream
-    algorithm_db
+    TrackChecking
+    UT
+    Utils
+    Validators
+    Velo
+    VertexFitter
+    mdf
+    track_matching
   PUBLIC
+    Gear
+    NonEventData
     AllenCommon
     HostCommon
     AllenRuntime
@@ -715,7 +741,7 @@ endif()
 allen_add_executable(Allen main/src/main.cpp)
 add_dependencies(Allen Sequences)
 
-target_link_libraries(Allen PRIVATE AllenLib NonEventData Gear)
+target_link_libraries(Allen PRIVATE AllenLib NonEventData)
 
 
 if (NOT STANDALONE)
diff --git a/cmake/GenerateConfiguration.cmake b/cmake/GenerateConfiguration.cmake
index cce915da968..852b7394ad2 100644
--- a/cmake/GenerateConfiguration.cmake
+++ b/cmake/GenerateConfiguration.cmake
@@ -68,47 +68,18 @@ add_custom_command(
   WORKING_DIRECTORY ${ALLEN_PARSER_DIR}
   DEPENDS "${PARSED_ALGORITHMS_OUTPUTFILE}")
 add_custom_target(algorithm_db_generation DEPENDS "${CODE_GENERATION_DIR}/AlgorithmDB.cpp")
-add_library(algorithm_db STATIC "${CODE_GENERATION_DIR}/AlgorithmDB.cpp")
+add_library(algorithm_db OBJECT "${CODE_GENERATION_DIR}/AlgorithmDB.cpp")
 add_dependencies(algorithm_db algorithm_db_generation)
-target_include_directories(algorithm_db PUBLIC $<BUILD_INTERFACE:${ALLEN_GENERATED_INCLUDE_FILES_DIR}>)
 target_link_libraries(algorithm_db
   PUBLIC
-  AllenCommon
-  Gear
-  Backend
-  HostEventModel
-  EventModel
-  Associate
-  Backend
-  Calo
-  Combiners
-  DeviceValidators
-  Examples
-  Kalman
-  Lumi
-  Muon
-  PV_beamline
-  Plume
-  SciFi
-  UT
-  Velo
-  VertexFitter
-  HostCombiners
-  HostDataProvider
-  HostDummyMaker
-  HostErrorBanks
-  HostGEC
-  HostInitEventList
-  HostPrefixSum
-  HostRoutingBits
-  HostTAEFilter
-  Validators
-  track_matching
-  SelectionsHost
-  Selections)
+    EventModel
+    HostEventModel
+    Backend
+    AllenCommon
+    Gear)
 
 add_executable(default_properties ${DEFAULT_PROPERTIES_SRC})
-target_link_libraries(default_properties PRIVATE algorithm_db AllenLib)
+target_link_libraries(default_properties PRIVATE AllenLib HostEventModel EventModel)
 
 # Generate allen standalone algorithms file
 add_custom_command(
-- 
GitLab


From d0841a1bda786d81e9679feeefe8bfb64df17baa Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Wed, 15 Mar 2023 16:21:15 +0100
Subject: [PATCH 06/49] Include shared libraries in CI artifacts

---
 integration/non_event_data/CMakeLists.txt | 2 +-
 scripts/ci/config/common-build.yaml       | 8 +++++---
 zmq/CMakeLists.txt                        | 2 +-
 3 files changed, 7 insertions(+), 5 deletions(-)

diff --git a/integration/non_event_data/CMakeLists.txt b/integration/non_event_data/CMakeLists.txt
index f795c8be812..a77bdd7a913 100644
--- a/integration/non_event_data/CMakeLists.txt
+++ b/integration/non_event_data/CMakeLists.txt
@@ -3,7 +3,7 @@
 ###############################################################################
 file(GLOB SOURCES "src/*.cpp")
 
-allen_add_host_library(NonEventData SHARED ${SOURCES})
+allen_add_host_library(NonEventData STATIC ${SOURCES})
 
 target_link_libraries(NonEventData PRIVATE
   HostClustering
diff --git a/scripts/ci/config/common-build.yaml b/scripts/ci/config/common-build.yaml
index 03c3f550b3b..99576dde6f3 100644
--- a/scripts/ci/config/common-build.yaml
+++ b/scripts/ci/config/common-build.yaml
@@ -18,11 +18,11 @@
       # build jobs (with tests)
       - LCG_SYSTEM:
           - "x86_64_v3-el9-gcc12" # FIXME gcc12->clang12 (?)
-        LCG_OPTIMIZATION: 
+        LCG_OPTIMIZATION:
           - "opt+g"
         OPTIONS:
           - BUILD_TESTING+ENABLE_CONTRACTS+TREAT_WARNINGS_AS_ERRORS
-      
+
       - LCG_SYSTEM: "x86_64_v3-el9-gcc12"
         LCG_OPTIMIZATION: "dbg"
         LCG_QUALIFIER: "cuda12_1"
@@ -50,7 +50,9 @@
       - input
       - build*/external/ParamFiles/*
       - build*/*Allen*
-      - build*/sequences/libStream_*.so
+      - build*/libHostCommon.so
+      - build*/zmq/libAllenZMQ.so
+      - build*/integration/non_event_data/libNonEventData.so
       - build*/*.json
       - build*/CTestTestfile.cmake
       - build*/test/unit_tests/unit_tests
diff --git a/zmq/CMakeLists.txt b/zmq/CMakeLists.txt
index 6aeebc60768..a08ab07808c 100644
--- a/zmq/CMakeLists.txt
+++ b/zmq/CMakeLists.txt
@@ -22,7 +22,7 @@ if (NOT STANDALONE)
   install(TARGETS AllenZMQ EXPORT Allen)
   target_link_libraries(AllenZMQ INTERFACE ZMQSvc LHCb::ZMQLib)
 else()
-  allen_add_host_library(AllenZMQ src/functions.cpp src/svc.cpp)
+  allen_add_host_library(AllenZMQ SHARED src/functions.cpp src/svc.cpp)
   target_include_directories(AllenZMQ PUBLIC
     $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>
     $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/serialize>
-- 
GitLab


From b960685e76f589696ad369bc6acb038aaf97a25b Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Wed, 15 Mar 2023 16:26:33 +0100
Subject: [PATCH 07/49] Fix HIP includes

---
 backend/CMakeLists.txt        | 1 +
 cmake/AllenDependencies.cmake | 2 ++
 2 files changed, 3 insertions(+)

diff --git a/backend/CMakeLists.txt b/backend/CMakeLists.txt
index 9e993091da3..72815986820 100644
--- a/backend/CMakeLists.txt
+++ b/backend/CMakeLists.txt
@@ -15,6 +15,7 @@ elseif(TARGET_DEVICE STREQUAL "CUDA")
 elseif(TARGET_DEVICE STREQUAL "HIP")
   list(APPEND backend_sources src/HIPBackend.cpp)
   target_link_libraries(AllenRuntime INTERFACE ${HIP_RUNTIME_LIB})
+  target_include_directories(AllenRuntime INTERFACE ${HIP_PATH}/include ${ROCM_PATH}/hsa/include)
 endif()
 
 target_include_directories(AllenRuntime INTERFACE
diff --git a/cmake/AllenDependencies.cmake b/cmake/AllenDependencies.cmake
index 1254d49d554..c0115ee7ddd 100644
--- a/cmake/AllenDependencies.cmake
+++ b/cmake/AllenDependencies.cmake
@@ -151,6 +151,8 @@ if (STANDALONE)
   elseif($ENV{ROOTSYS}) # ROOT was compiled with configure/make
     set(ALLEN_ROOT_CMAKE $ENV{ROOTSYS}/etc)
   endif()
+else()
+  set(Allen_PERSISTENT_OPTIONS TARGET_DEVICE)
 endif()
 
 find_package(ROOT REQUIRED HINTS ${ALLEN_ROOT_CMAKE} COMPONENTS RIO Core Cling Hist Tree)
-- 
GitLab


From cde975cceffee84cd7fd5a9884d3f49f126ab2cd Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Wed, 15 Mar 2023 20:05:32 +0100
Subject: [PATCH 08/49] Fix zmq library linking

---
 zmq/CMakeLists.txt | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/zmq/CMakeLists.txt b/zmq/CMakeLists.txt
index a08ab07808c..71f6200ebf3 100644
--- a/zmq/CMakeLists.txt
+++ b/zmq/CMakeLists.txt
@@ -13,7 +13,6 @@ if (NOT STANDALONE)
     Gaudi::GaudiKernel
     LHCb::ZMQLib
     LHCbEvent
-    ${ALLEN_ROOT_LIBRARIES}
     PRIVATE
     EventModel
     AllenCommon)
@@ -30,8 +29,7 @@ else()
     $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/svc>
     ${PROJECT_SOURCE_DIR}/main/include)
   target_compile_definitions(AllenZMQ PUBLIC STANDALONE)
-  target_link_libraries(AllenZMQ PUBLIC PkgConfig::zmq PkgConfig::sodium Boost::headers LHCbEvent)
-  target_include_directories(AllenZMQ SYSTEM PUBLIC ${ROOT_INCLUDE_DIRS})
+  target_link_libraries(AllenZMQ PUBLIC AllenCommon PkgConfig::zmq PkgConfig::sodium Boost::headers LHCbEvent)
 endif()
 
 function(zmq_program)
-- 
GitLab


From ebfd0874f979755c82c08d3002d4ec317a87ee9f Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Sat, 25 Mar 2023 20:46:05 +0100
Subject: [PATCH 09/49] First working version for generation of TCK and
 configuration from TCKs

---
 CMakeLists.txt                                |  10 +-
 Dumpers/BinaryDumpers/CMakeLists.txt          |   3 +-
 Dumpers/BinaryDumpers/options/allen.py        |  26 ++-
 Rec/Allen/python/Allen/config.py              |  12 +-
 Rec/Allen/python/Allen/tck.py                 | 205 ++++++++++++++++++
 Rec/Allen/python/AllenAlgorithms/__init__.py  |  18 ++
 Rec/Allen/scripts/create_hlt1_tck.py          |  33 +++
 cmake/AllenDependencies.cmake                 |   3 +
 cmake/GenerateConfiguration.cmake             |   4 +-
 configuration/CMakeLists.txt                  |  24 +-
 configuration/include/TCK.h                   |  18 ++
 .../AllenCore/AllenSequenceGenerator.py       |  19 +-
 .../AllenCore/allen_standalone_generator.py   |  41 ++--
 configuration/src/TCK.cpp                     |  98 +++++++++
 .../{parser => src}/default_properties.cpp    |   0
 .../selections/Hlt1/include/DecReporter.cuh   |   2 +-
 main/include/Allen.h                          |   1 +
 main/include/InputReader.h                    |   2 +-
 main/include/Provider.h                       |   7 +-
 main/src/Allen.cpp                            |  10 +-
 main/src/InputReader.cpp                      |   9 +-
 main/src/ProgramOptions.cpp                   |   1 -
 main/src/Provider.cpp                         |  93 +++++---
 main/src/main.cpp                             |  11 +-
 mdf/CMakeLists.txt                            |  46 +++-
 mdf/test/test_mep_banks.cpp                   |   4 +-
 26 files changed, 587 insertions(+), 113 deletions(-)
 create mode 100644 Rec/Allen/python/Allen/tck.py
 create mode 100644 Rec/Allen/python/AllenAlgorithms/__init__.py
 create mode 100644 Rec/Allen/scripts/create_hlt1_tck.py
 create mode 100644 configuration/include/TCK.h
 create mode 100644 configuration/src/TCK.cpp
 rename configuration/{parser => src}/default_properties.cpp (100%)

diff --git a/CMakeLists.txt b/CMakeLists.txt
index 832cc674b45..da011f93b9e 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -705,6 +705,7 @@ target_link_libraries(AllenLib
     Lumi
     Monitoring
     Muon
+    NonEventData
     PVChecking
     PV_beamline
     Plume
@@ -722,7 +723,6 @@ target_link_libraries(AllenLib
     track_matching
   PUBLIC
     Gear
-    NonEventData
     AllenCommon
     HostCommon
     AllenRuntime
@@ -735,7 +735,13 @@ target_link_libraries(AllenLib PRIVATE AllenFS)
 target_link_libraries(AllenCommon INTERFACE ROOT::RIO ROOT::Core ROOT::Cling ROOT::Hist ROOT::Tree TBB::tbb)
 
 if (NOT STANDALONE)
-  target_link_libraries(AllenLib PRIVATE AIDA::aida)
+  target_link_libraries(AllenLib
+    PRIVATE
+      AIDA::aida
+      Configuration)
+  target_link_libraries(AllenLib
+    PUBLIC
+      LHCb::HltServicesLib)
 endif()
 
 allen_add_executable(Allen main/src/main.cpp)
diff --git a/Dumpers/BinaryDumpers/CMakeLists.txt b/Dumpers/BinaryDumpers/CMakeLists.txt
index ec3c0078e1f..9539a97c8a8 100644
--- a/Dumpers/BinaryDumpers/CMakeLists.txt
+++ b/Dumpers/BinaryDumpers/CMakeLists.txt
@@ -9,12 +9,13 @@ gaudi_add_library(BinaryDumpers
                   SOURCES
                     src/lib/TestUTBoards.cpp
                     src/lib/Utils.cpp
+                  LINK PRIVATE
+                    NonEventData
                   LINK PUBLIC
                     AllenCommon
                     HostCommon
                     EventModel
                     AllenRuntime
-                    NonEventData
                     Gaudi::GaudiKernel
                     LHCb::DAQEventLib
                     LHCb::FTDAQLib
diff --git a/Dumpers/BinaryDumpers/options/allen.py b/Dumpers/BinaryDumpers/options/allen.py
index 82dd6772041..fe3f1fc7bbe 100755
--- a/Dumpers/BinaryDumpers/options/allen.py
+++ b/Dumpers/BinaryDumpers/options/allen.py
@@ -5,6 +5,7 @@
 import os
 import sys
 import zmq
+import re
 from Configurables import ApplicationMgr
 from Configurables import Gaudi__RootCnvSvc as RootCnvSvc
 
@@ -36,6 +37,7 @@ allen_dir = os.environ["ALLEN_PROJECT_ROOT"]
 interpreter.Declare("#include <Dumpers/IUpdater.h>")
 interpreter.Declare("#include <Allen/Allen.h>")
 interpreter.Declare("#include <Allen/Provider.h>")
+interpreter.Declare("#include <Allen/TCK.h>")
 interpreter.Declare("#include <Dumpers/PyAllenHelper.h>")
 
 sequence_default = os.path.join(os.environ['ALLEN_INSTALL_DIR'], 'constants',
@@ -190,12 +192,27 @@ extSvc = ["ToolSvc", "AuditorSvc", "ZeroMQSvc"]
 rootSvc = RootCnvSvc("RootCnvSvc", EnableIncident=1)
 ApplicationMgr().ExtSvc += ["Gaudi::IODataManager/IODataManager", rootSvc]
 
+# Get Allen JSON configuration
+sequence = os.path.expandvars(args.sequence)
+sequence_json = ""
+tck_option = re.compile(r"([^:]+):(0x[a-fA-F0-9]{8})")
+if (m := tck_option.match(sequence)):
+    repo = m.group(1)
+    tck = m.group(2)
+    sequence_json = str(gbl.Allen.sequence_from_git(repo, tck, gbl.std.cerr))
+    if not sequence_json:
+        print(f"Failed to obtain configuration for TCK {tck} from repository {repo}")
+        sys.exit(1)
+else:
+    with open(sequence) as f:
+        sequence_json = f.read()
+
 if args.mep:
     extSvc += ["AllenConfiguration", "MEPProvider"]
     from Configurables import MEPProvider, AllenConfiguration
 
     allen_conf = AllenConfiguration("AllenConfiguration")
-    allen_conf.JSON = args.sequence
+    allen_conf.JSON = sequence_json
     allen_conf.OutputLevel = 3
 
     mep_provider = MEPProvider()
@@ -255,8 +272,7 @@ if not args.binary_geometry:
                     'SIMCOND': options.conddb_tag,
                 }))
 
-if not args.binary_geometry:
-    bank_types = configured_bank_types(args.sequence)
+    bank_types = configured_bank_types(sequence_json)
     cf_node = setup_allen_non_event_data_service(
         allen_event_loop=True, bank_types=bank_types)
     config.update(configure(options, cf_node, make_odin=make_odin))
@@ -279,13 +295,13 @@ for flag, value in [("g", args.det_folder), ("params", params),
                     ("output-batch-size", args.output_batch_size),
                     ("m", args.reserve), ("v", args.verbosity),
                     ("p", args.print_memory),
-                    ("sequence", os.path.expandvars(args.sequence)),
+                    ("sequence", sequence),
                     ("s", args.slices), ("mdf", os.path.expandvars(args.mdf)),
                     ("disable-run-changes", int(not args.enable_run_changes)),
                     ("monitoring-save-period", args.mon_save_period),
                     ("monitoring-filename", args.mon_filename),
                     ("events-per-slice", args.events_per_slice),
-                    ("device", args.device), ("run-from-json", "1"),
+                    ("device", args.device),
                     ("enable-monitoring-printing",
                      args.enable_monitoring_printing),
                     ("register-monitoring-counters",
diff --git a/Rec/Allen/python/Allen/config.py b/Rec/Allen/python/Allen/config.py
index 742a96b1a56..014868ab603 100755
--- a/Rec/Allen/python/Allen/config.py
+++ b/Rec/Allen/python/Allen/config.py
@@ -93,13 +93,11 @@ def allen_detectors(allen_node):
 
 def configured_bank_types(sequence_json):
     bank_types = set()
-    with open(sequence_json) as json_file:
-        j = json.load(json_file)
-        for t, n, c in j["sequence"]["configured_algorithms"]:
-            props = j.get(n, {})
-            if c == "ProviderAlgorithm" and not bool(
-                    props.get('empty', False)):
-                bank_types.add(props['bank_type'])
+    for t, n, c in sequence_json["sequence"]["configured_algorithms"]:
+        props = sequence_json.get(n, {})
+        if c == "ProviderAlgorithm" and not bool(
+                props.get('empty', False)):
+            bank_types.add(props['bank_type'])
     return bank_types
 
 
diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
new file mode 100644
index 00000000000..f910df03b7b
--- /dev/null
+++ b/Rec/Allen/python/Allen/tck.py
@@ -0,0 +1,205 @@
+import json
+import os
+import sys
+import re
+import importlib
+import importlib.util
+from pathlib import Path
+from lxml import etree
+from hashlib import md5
+from subprocess import PIPE, run
+
+
+def get_allen_version():
+    if "ALLEN_INSTALL_DIR" in os.environ:
+        manifest_tree = etree.parse(
+            os.path.expandvars("${ALLEN_INSTALL_DIR}/manifest.xml")
+        )
+        allen_version = manifest_tree.find("project").get("version")
+        vs = allen_version.split(".")
+        version = "ALLEN_"
+        for prefix, idx in (("v", 0), ("r", 1), ("p", 2)):
+            if idx < len(vs):
+                version += prefix + vs[idx]
+        return version
+    else:
+        return None
+
+
+def sequence_to_tck(config: dict):
+    tck_config = {"Scheduler/" + k: v for k, v in config["sequence"].items()}
+
+    for alg_type, alg_name, alg_kind in config["sequence"]["configured_algorithms"]:
+        properties = {
+            k: v if type(v) == str else json.dumps(v)
+            for k, v in config[alg_name].items()
+        }
+        tck_config[f"{alg_kind}/{alg_type}/{alg_name}"] = {
+            "Name": alg_name,
+            "Kind": alg_kind,
+            "Type": alg_type,
+            "Properties": properties,
+        }
+
+    return tck_config
+
+
+def tck_to_sequence(config: dict):
+    scheduler_entries = [
+        k.split("/")[1] for k in config.keys() if k.startswith("Scheduler/")
+    ]
+    sequence_config = {
+        "sequence": {e: config["Scheduler/" + e] for e in scheduler_entries}
+    }
+
+    for alg_type, alg_name, alg_kind in config["sequence"]["configured_algorithms"]:
+        tck_props = config[f"{alg_kind}/{alg_type}/{alg_name}"]["Properties"]
+        properties = {}
+        for k, v in tck_props.items():
+            try:
+                properties[k] = json.loads(v)
+            except json.JSONDecodeError:
+                properties[k] = v
+        sequence_config[alg_name] = properties
+
+    return sequence_config
+
+
+def json_tck_db(
+    configuration: dict, sequence_type: str, label: str, allen_version: str, tck: int
+):
+    if allen_version is None:
+        allen_version = get_allen_version()
+    if type(allen_version) == str:
+        version = allen_version
+    else:
+        raise ValueError("failed to determine allen version")
+
+    if len(hex(tck)) != 10 or hex(tck)[2] != "1":
+        raise ValueError(
+            "Badly formatted TCK, it must be a 32 bit hex number with most significant byte set to 1"
+        )
+
+    tck_config = sequence_to_tck(configuration)
+    digest = md5(json.dumps(tck_config).encode("utf-8")).hexdigest()
+    manifest = {
+        digest: {
+            "TCK": hex(tck),
+            "Release2Type": {version: sequence_type},
+            "label": label,
+        }
+    }
+    return {"manifest": manifest, digest: tck_config}
+
+
+def sequence_from_python(python_file: Path, node_name="hlt1_node") -> dict:
+    from AllenCore.allen_standalone_generator import generate, build_sequence
+    from AllenCore.AllenSequenceGenerator import generate_json_configuration
+
+    module_name = python_file.stem
+
+    node = None
+    with generate.bind(noop=True):
+        if python_file.suffix == "":
+            # Load sequence module from installed sequence
+            mod = importlib.import_module(f"AllenSequences.{module_name}")
+        else:
+            # Load sequence module from python file
+            spec = importlib.util.spec_from_file_location(module_name, python_file)
+            mod = importlib.util.module_from_spec(spec)
+            sys.modules[module_name] = mod
+            spec.loader.exec_module(mod)
+
+        node = getattr(mod, node_name)
+
+    if node is None:
+        print(f"Failed to get {node_name} from sequence file {str(python_file)}")
+        return None
+
+    algorithms = build_sequence(node, verbose=False)
+    return generate_json_configuration(algorithms)
+
+
+def sequence_to_git(
+    repository: Path,
+    sequence: dict,
+    sequence_type: dict,
+    label: str,
+    tck: int,
+    allen_version=None,
+    write_intermediate=False,
+):
+    db = json_tck_db(sequence, sequence_type, label, allen_version, tck)
+    if write_intermediate:
+        with open(hex(tck) + ".json", "w") as f:
+            json.dump(db, f, indent=4, sort_keys=True)
+
+    p = run(
+        ["hlttck_cdb_listkeys", "--convert-to-git", "-", f"{str(repository)}"],
+        stdout=PIPE,
+        input=json.dumps(db),
+        encoding="ascii",
+    )
+
+    if p.returncode != 0:
+        print("Failed to convert sequence to git repo")
+
+
+def sequence_from_git(repository: Path, tck: str, use_bindings=True) -> str:
+    if use_bindings:
+        from ROOT import gROOT
+
+        gROOT.SetBatch(True)
+        from ROOT import gInterpreter as interpreter
+        from cppyy import gbl
+
+        if gbl.gSystem.Load("libAllenLib") != 0:
+            print("Failed to load libAllenLib.so")
+            return None
+        if not interpreter.Declare("#include <Allen/TCK.h>"):
+            print("Failed to load Allen/TCK.h")
+            return None
+        # Do this in two steps so the pythonized object that owns the
+        # sequence and info objects is kept long enough
+        r = gbl.Allen.sequence_from_git(str(repository), tck)
+        sequence, info = r
+        return (str(sequence), {str(k): str(getattr(info, k)) for k in ("digest", "tck", "release", "type", "label")})
+    else:
+        p = run(
+            [
+                "hlttck_cdb_listkeys",
+                f"--tck={tck}",
+                "--convert-to-json",
+                f"{str(repository)}",
+                "-",
+            ],
+            stdout=PIPE,
+        )
+        if p.returncode != 0:
+            print("Failed to convert configuration in git repo to JSON")
+            return None
+        tck_db = json.loads(p.stdout)
+        digest, manifest_entry = next(((k, m) for k, m in tck_db.items() if m["TCK"] == tck), None)
+        return json.dump(tck_to_sequence(tck_db[digest]))
+
+
+def property_from_git(repository: Path, tck: str, algorithm: str, property=None):
+    p = run(["git", "-C", str(repository), "ls-tree", "-r", tck], stdout=PIPE, stderr=PIPE)
+    if p.returncode != 0:
+        print(f"Failed to list files for TCK {tck}")
+        return None
+
+    space_expr = re.compile(r"[\s]+")
+    files = p.stdout.decode().split("\n")
+    alg_lines = [space_expr.sub(' ', l).split(' ') for l in files if re.search(algorithm, l)]
+    algs = [tuple(line[2:]) for line in alg_lines if line[1] == 'blob']
+
+    r = {}
+    for git_id, name in algs:
+        p = run(["git", "-C", str(repository), "show", git_id], stdout=PIPE)
+        if p.returncode != 0:
+            print(f"Failed to list properties for {name} {git_id}")
+        else:
+            props = json.loads(p.stdout)["Properties"]
+            r[name] = props if property is None else props[property]
+    return r
diff --git a/Rec/Allen/python/AllenAlgorithms/__init__.py b/Rec/Allen/python/AllenAlgorithms/__init__.py
new file mode 100644
index 00000000000..1d3fc694cf9
--- /dev/null
+++ b/Rec/Allen/python/AllenAlgorithms/__init__.py
@@ -0,0 +1,18 @@
+import os
+
+__path__ = [
+    d
+    for d in [
+        os.path.realpath(
+            os.path.join(
+                os.path.dirname(__file__),
+                "..",
+                "..",
+                "code_generation",
+                "sequences",
+                "AllenAlgorithms",
+            )
+        )
+    ]
+    if os.path.exists(d)
+]
diff --git a/Rec/Allen/scripts/create_hlt1_tck.py b/Rec/Allen/scripts/create_hlt1_tck.py
new file mode 100644
index 00000000000..d5735a70a63
--- /dev/null
+++ b/Rec/Allen/scripts/create_hlt1_tck.py
@@ -0,0 +1,33 @@
+import argparse
+import json
+from Allen.tck import sequence_to_git, sequence_from_python
+from pathlib import Path
+
+parser = argparse.ArgumentParser()
+parser.add_argument("sequence", nargs=1)
+parser.add_argument("repository", nargs=1)
+parser.add_argument("tck", nargs=1)
+parser.add_argument("--python-hlt1-node", type=str, default="hlt1_node", dest="hlt1_node")
+parser.add_argument("--label", default="test", type=str)
+
+args = parser.parse_args()
+
+sequence_arg = Path(args.sequence[0])
+repository = Path(args.repository[0])
+tck = int(args.tck[0], 16)
+
+sequence = None
+if sequence_arg.suffix in (".py", ''):
+    from AllenCore.configuration_options import is_allen_standalone
+    is_allen_standalone.global_bind(standalone=True)
+
+    from AllenConf.persistency import make_dec_reporter
+    with make_dec_reporter.bind(TCK=tck):
+        sequence = sequence_from_python(sequence_arg, node_name=args.hlt1_node)
+elif sequence_arg.suffix == '.json':
+    with open(sequence_arg, "r") as sequence_file:
+        sequence = json.load(sequence_file)
+        dec_reporter_name = next((n for t, n, _ in sequence['sequence']['configured_algorithms'] if t == "dec_reporter::dec_reporter_t"), None)
+        sequence[dec_reporter_name]['tck'] = tck
+
+sequence_to_git(repository, sequence, sequence_arg.stem, args.label, tck)
diff --git a/cmake/AllenDependencies.cmake b/cmake/AllenDependencies.cmake
index c0115ee7ddd..e70e64dae09 100644
--- a/cmake/AllenDependencies.cmake
+++ b/cmake/AllenDependencies.cmake
@@ -104,6 +104,9 @@ find_package(umesimd REQUIRED)
 find_package(PkgConfig)
 pkg_check_modules(zmq libzmq REQUIRED IMPORTED_TARGET)
 pkg_check_modules(sodium libsodium REQUIRED IMPORTED_TARGET)
+if(NOT STANDALONE)
+  pkg_check_modules(git2 libgit2 REQUIRED IMPORTED_TARGET)  # for GitEntityResolver
+endif()
 
 if(WITH_Allen_PRIVATE_DEPENDENCIES)
   # We need a Python 3 interpreter
diff --git a/cmake/GenerateConfiguration.cmake b/cmake/GenerateConfiguration.cmake
index 852b7394ad2..caf41bd2974 100644
--- a/cmake/GenerateConfiguration.cmake
+++ b/cmake/GenerateConfiguration.cmake
@@ -15,7 +15,7 @@ set(ALLEN_PARSER_DIR ${PROJECT_SEQUENCE_DIR}/parser)
 set(ALGORITHMS_OUTPUTFILE ${ALLEN_ALGORITHMS_DIR}/allen_standalone_algorithms.py)
 set(PARSED_ALGORITHMS_OUTPUTFILE ${CODE_GENERATION_DIR}/parsed_algorithms.pickle)
 set(ALGORITHMS_GENERATION_SCRIPT ${PROJECT_SOURCE_DIR}/configuration/parser/ParseAlgorithms.py)
-set(DEFAULT_PROPERTIES_SRC ${PROJECT_SOURCE_DIR}/configuration/parser/default_properties.cpp)
+set(DEFAULT_PROPERTIES_SRC ${PROJECT_SOURCE_DIR}/configuration/src/default_properties.cpp)
 
 include_guard(GLOBAL)
 
@@ -26,7 +26,7 @@ file(MAKE_DIRECTORY ${ALLEN_ALGORITHMS_DIR})
 
 # We will invoke the parser a few times, set its required environment in a variable
 # Add the scripts folder only if we are invoking with a CMAKE_TOOLCHAIN_FILE
-if(CMAKE_TOOLCHAIN_FILE) 
+if(CMAKE_TOOLCHAIN_FILE)
   set(PARSER_ENV PYTHONPATH=$ENV{PYTHONPATH}:${PROJECT_SOURCE_DIR}/scripts LD_LIBRARY_PATH=${LIBCLANG_LIBDIR}:$ENV{LD_LIBRARY_PATH})
 else()
   set(PARSER_ENV PYTHONPATH=$ENV{PYTHONPATH}:${LIBCLANG_LIBDIR}/python${Python_VERSION_MAJOR}.${Python_VERSION_MINOR}/site-packages LD_LIBRARY_PATH=${LIBCLANG_LIBDIR}:$ENV{LD_LIBRARY_PATH})
diff --git a/configuration/CMakeLists.txt b/configuration/CMakeLists.txt
index 5494f64a93b..8b2fe04c2d0 100644
--- a/configuration/CMakeLists.txt
+++ b/configuration/CMakeLists.txt
@@ -12,8 +12,24 @@ if (NOT STANDALONE)
     add_test(NAME Allen:pytest
       COMMAND ${CMAKE_BINARY_DIR}/run bash ${CMAKE_CURRENT_SOURCE_DIR}/tests/test_configuration.sh ${CMAKE_CURRENT_SOURCE_DIR})
     gaudi_install(PYTHON)
-endif()
 
-# Configuration interface library
-add_library(Configuration INTERFACE)
-target_include_directories(Configuration INTERFACE ${PROJECT_BINARY_DIR}/configuration/sequences)
+    # Configuration library
+    allen_add_host_library(Configuration
+      STATIC
+        src/TCK.cpp)
+    target_include_directories(Configuration
+      PUBLIC
+      $<BUILD_INTERFACE:${CMAKE_BINARY_DIR}/include>  # for ALLEN_VERSION.h
+      include)
+
+    # Link privately to HltServicesLib here, it will be propagated by
+    # linking AllenLib to it as well
+    target_link_libraries(Configuration
+      PUBLIC
+        Boost::headers
+        LHCb::HltServicesLib)
+
+    # Install the header so it can be used in AllenOnline
+    install(FILES include/TCK.h
+            DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/Allen)
+endif()
diff --git a/configuration/include/TCK.h b/configuration/include/TCK.h
new file mode 100644
index 00000000000..f4289ec13e9
--- /dev/null
+++ b/configuration/include/TCK.h
@@ -0,0 +1,18 @@
+#pragma once
+
+#include <string>
+#include <iostream>
+#include <HltServices/TCKUtils.h>
+#include <ALLEN_VERSION.h>
+
+namespace Allen {
+
+namespace TCK {
+  std::string current_release();
+}
+
+std::string tck_from_git(std::string const& repo, std::string const& tck );
+
+std::tuple<std::string, LHCb::TCK::Info> sequence_from_git(std::string const& repo, std::string const& tck );
+
+}
diff --git a/configuration/python/AllenCore/AllenSequenceGenerator.py b/configuration/python/AllenCore/AllenSequenceGenerator.py
index 58e89e94e5c..cabd0b6d78b 100644
--- a/configuration/python/AllenCore/AllenSequenceGenerator.py
+++ b/configuration/python/AllenCore/AllenSequenceGenerator.py
@@ -1,9 +1,7 @@
 ###############################################################################
 # (c) Copyright 2021 CERN for the benefit of the LHCb Collaboration           #
 ###############################################################################
-from collections import OrderedDict
 from PyConf.dataflow import GaudiDataHandle
-from json import dump
 
 
 def clean_prefix(s):
@@ -22,13 +20,13 @@ def add_deps_and_transitive_deps(dep, arg_deps, parameter_dependencies_set):
             parameter_dependencies_set.add(transitive_dep)
 
 
-def generate_json_configuration(algorithms, filename):
+def generate_json_configuration(algorithms):
     """Generates runtime configuration (JSON)."""
     sequence_json = {}
     # Add properties for each algorithm
     for algorithm in algorithms:
+        sequence_json[algorithm.name] = {str(k): v for k, v in algorithm.type.getDefaultProperties().items() if not isinstance(v, GaudiDataHandle)}
         if len(algorithm.properties):
-            sequence_json[algorithm.name] = {}
             for k, v in algorithm.properties.items():
                 sequence_json[algorithm.name][str(k)] = v
 
@@ -118,15 +116,4 @@ def generate_json_configuration(algorithms, filename):
         "configured_sequence_arguments": configured_sequence_arguments,
         "argument_dependencies": argument_dependencies
     }
-    with open(filename, 'w') as outfile:
-        dump(sequence_json, outfile, indent=4, sort_keys=True)
-
-
-def generate_allen_sequence(algorithms,
-                            json_configuration_filename="Sequence.json"):
-    """Generates an Allen valid sequence.
-
-    * json_configuration_filename: JSON configuration that can be changed at runtime to change
-                                   values of properties.
-    """
-    generate_json_configuration(algorithms, json_configuration_filename)
+    return sequence_json
diff --git a/configuration/python/AllenCore/allen_standalone_generator.py b/configuration/python/AllenCore/allen_standalone_generator.py
index fee5a3041bb..32059a23cd3 100644
--- a/configuration/python/AllenCore/allen_standalone_generator.py
+++ b/configuration/python/AllenCore/allen_standalone_generator.py
@@ -10,13 +10,13 @@
 ###############################################################################
 from AllenCore.cftree_ops import get_execution_list_for, BoolNode
 from AllenCore.event_list_utils import add_event_list_combiners
-from AllenCore.AllenSequenceGenerator import generate_allen_sequence
+from AllenCore.AllenSequenceGenerator import generate_json_configuration
 from AllenCore.allen_benchmarks import benchmark_weights, benchmark_efficiencies
 from AllenCore.algorithms import host_init_event_list_t
 from PyConf.components import Algorithm
 from PyConf.filecontent_metadata import flush_key_registry
-from os.path import exists
-import contextlib
+from PyConf.tonic import configurable
+from json import dump
 
 
 def make_algorithm(alg_type, name, **kwargs):
@@ -59,16 +59,16 @@ def initialize_event_lists(**kwargs):
     return initialize_lists
 
 
-def generate(node, json_configuration_filename="Sequence.json", verbose=True):
-    """Generates an Allen sequence out of a root node."""
-    if type(node) == dict:
-        node = node['control_flow_node']
-    with flush_key_registry() :
-        best_order, score = get_execution_list_for(node)
+def build_sequence(root, verbose=True):
+    if type(root) == dict:
+        root = root['control_flow_node']
+
+    with flush_key_registry():
+        best_order, score = get_execution_list_for(root)
         final_seq = add_event_list_combiners(best_order)
 
-        if verbose:
-            print("Generated sequence represented as algorithms with execution masks:")
+    if verbose:
+        print("Generated sequence represented as algorithms with execution masks:")
         for alg, mask_in in final_seq:
             if mask_in == None:
                 mask_in_str = ""
@@ -76,6 +76,19 @@ def generate(node, json_configuration_filename="Sequence.json", verbose=True):
                 mask_in_str = f" in:{str(mask_in).split('/')[1]}"
             elif isinstance(mask_in, BoolNode):
                 mask_in_str = f" in:{mask_in}"
-            if verbose:
-                print(f"  {alg}{mask_in_str}")
-        return generate_allen_sequence([alg for (alg, _) in final_seq], json_configuration_filename)
+            print(f"  {alg}{mask_in_str}")
+
+    return [alg for (alg, _) in final_seq]
+
+
+@configurable
+def generate(root, json_configuration_filename="Sequence.json", noop=False, verbose=True):
+    """Generates an Allen sequence out of a root node."""
+    if noop:
+        return
+
+    algorithms = build_sequence(root, verbose=verbose)
+
+    sequence_json = generate_json_configuration(algorithms)
+    with open(json_configuration_filename, 'w') as outfile:
+        dump(sequence_json, outfile, indent=4, sort_keys=True)
diff --git a/configuration/src/TCK.cpp b/configuration/src/TCK.cpp
new file mode 100644
index 00000000000..fb349e5b799
--- /dev/null
+++ b/configuration/src/TCK.cpp
@@ -0,0 +1,98 @@
+#include "boost/algorithm/string/classification.hpp"
+#include "boost/algorithm/string/predicate.hpp"
+#include "boost/algorithm/string/split.hpp"
+#include <git2.h>
+#include <string>
+#include <nlohmann/json.hpp>
+
+#include <HltServices/TCKUtils.h>
+#include <TCK.h>
+
+std::string Allen::TCK::current_release() {
+  return "ALLEN_v" + std::to_string(ALLEN_MAJOR_VERSION) + "r" + std::to_string(ALLEN_MINOR_VERSION);
+}
+
+std::string Allen::tck_from_git(std::string const& repo, std::string const& tck ) {
+
+  using LHCb::TCK::Git::check;
+
+  git_libgit2_init();
+  git_repository* git_repo = nullptr;
+  check( git_repository_open_bare( &git_repo, repo.c_str() ) );
+  try {
+    auto tck_config = LHCb::TCK::Git::extract_json( git_repo, tck );
+    git_libgit2_shutdown();
+    return tck_config;
+  } catch ( std::runtime_error const& e ) {
+    git_libgit2_shutdown();
+    throw std::runtime_error{"Failed extract JSON configuration for TCK " + tck + " from " + repo + ":" + e.what()};
+  }
+}
+
+std::tuple<std::string, LHCb::TCK::Info> Allen::sequence_from_git(std::string const& repo, std::string const& tck ) {
+
+  auto tck_config = tck_from_git( repo, tck );
+  if (tck_config.empty()) {
+    return {tck_config, {}};
+  }
+
+  auto tck_db = nlohmann::json::parse(tck_config);
+  nlohmann::json manifest = tck_db["manifest"];
+
+  // The configuration JSON has a digest as key. Look at the
+  // "manifest" part to find the digest. The manifests are also
+  // indexed by digest, so loop over them until the one is found that
+  // has the right TCK entry.
+  auto items = tck_db.items();
+  auto json_tck = std::find_if( items.begin(), items.end(), [&manifest, tck] (auto const& e) {
+    return e.key() != "manifest" && manifest.count(e.key()) && manifest[e.key()]["TCK"] == tck;
+  });
+
+
+  nlohmann::json sequence;
+
+  std::vector<std::string> tokens;
+
+  for (auto const& [entry, config] : json_tck.value().items()) {
+    tokens.clear();
+    boost::algorithm::split( tokens, entry, boost::algorithm::is_any_of( "/" ) );
+    if (tokens[0] == "Scheduler") {
+      // Put special "sequence" items where they are expected
+      sequence["sequence"][tokens[1]] = config;
+    }
+    else if (tokens.size() == 3) {
+      // The rest is algorithm configuration. In the TCK all property
+      // values are stored as strings, but Allen expects parsed JSON,
+      // so convert between the two representations here. Some
+      // properties are strings and won't parse, so we have to check
+      // that.
+      auto props = config["Properties"];
+      nlohmann::json sequence_props;
+
+      for (auto const& [prop_key, prop_val] : props.items()) {
+        auto s = prop_val.get<std::string>();
+        // Disable exceptions when parsing and test is_discarded to
+        // check if the json is valid. If it's not valid, store as a
+        // string
+        auto j = nlohmann::json::parse(s, nullptr, false);
+        if (j.is_discarded()) {
+          sequence_props[prop_key] = s;
+        }
+        else {
+          sequence_props[prop_key] = j;
+        }
+      }
+
+      std::string const& alg_name = tokens[2];
+      sequence[alg_name] = sequence_props;
+    }
+  }
+
+  // Get the TCK metadata from the manifest, which is indexed by digest.
+  auto const& digest = json_tck.key();
+  auto const& m = manifest[digest];
+  auto [release, type] = m["Release2Type"].items().begin();
+  LHCb::TCK::Info info{digest, m["TCK"], release, type, m["label"]};
+
+  return {sequence.dump(), info};
+}
diff --git a/configuration/parser/default_properties.cpp b/configuration/src/default_properties.cpp
similarity index 100%
rename from configuration/parser/default_properties.cpp
rename to configuration/src/default_properties.cpp
diff --git a/device/selections/Hlt1/include/DecReporter.cuh b/device/selections/Hlt1/include/DecReporter.cuh
index 207e10c33f3..5eef43d9243 100644
--- a/device/selections/Hlt1/include/DecReporter.cuh
+++ b/device/selections/Hlt1/include/DecReporter.cuh
@@ -35,7 +35,7 @@ namespace dec_reporter {
   private:
     Property<block_dim_t> m_block_dim {this, {{64, 1, 1}}};
     Property<tck_t> m_tck {this, 0};
-    Property<encoding_key_t> m_key {this, 0xDEADBEEF}; // FIXME
+    Property<encoding_key_t> m_key {this, 0xDEADBEEF};
     Property<task_id_t> m_taskID {this, 1};
   };
 } // namespace dec_reporter
diff --git a/main/include/Allen.h b/main/include/Allen.h
index d4576d3fb21..234ba14adf3 100644
--- a/main/include/Allen.h
+++ b/main/include/Allen.h
@@ -15,6 +15,7 @@ struct Constants;
 
 int allen(
   std::map<std::string, std::string> options,
+  std::string_view configuration,
   Allen::NonEventData::IUpdater* updater,
   std::shared_ptr<IInputProvider> input_provider,
   OutputHandler* output_handler,
diff --git a/main/include/InputReader.h b/main/include/InputReader.h
index 1cfb3a53d5c..f52979a0e11 100644
--- a/main/include/InputReader.h
+++ b/main/include/InputReader.h
@@ -76,7 +76,7 @@ private:
 };
 
 struct ConfigurationReader {
-  ConfigurationReader(const std::string& file_name);
+  ConfigurationReader(std::string_view configuration);
   ConfigurationReader(const std::map<std::string, std::map<std::string, nlohmann::json>>& params) : m_params(params) {}
 
   std::map<std::string, nlohmann::json> params(std::string key) const
diff --git a/main/include/Provider.h b/main/include/Provider.h
index 62b800907de..94f8576532d 100644
--- a/main/include/Provider.h
+++ b/main/include/Provider.h
@@ -34,14 +34,15 @@ namespace Allen {
 
   std::tuple<bool, bool> velo_decoding_type(const ConfigurationReader& configuration_reader);
 
-  std::tuple<std::string, bool> sequence_conf(std::map<std::string, std::string> const& options);
+  std::string sequence_conf(std::map<std::string, std::string> const& options);
 
-  std::shared_ptr<IInputProvider> make_provider(std::map<std::string, std::string> const& options);
+  std::shared_ptr<IInputProvider> make_provider(std::map<std::string, std::string> const& options, std::string_view configuration);
 
   std::unique_ptr<OutputHandler> output_handler(
     IInputProvider* input_provider,
     IZeroMQSvc* zmq_svc,
-    std::map<std::string, std::string> const& options);
+    std::map<std::string, std::string> const& options,
+    std::string_view configuration);
 
   Allen::IOConf io_configuration(
     unsigned number_of_slices,
diff --git a/main/src/Allen.cpp b/main/src/Allen.cpp
index 700f4ff8294..1f1f87d0cc9 100644
--- a/main/src/Allen.cpp
+++ b/main/src/Allen.cpp
@@ -85,6 +85,7 @@ namespace {
  */
 int allen(
   std::map<std::string, std::string> options,
+  std::string_view config,
   Allen::NonEventData::IUpdater* updater,
   std::shared_ptr<IInputProvider> input_provider,
   OutputHandler* output_handler,
@@ -207,7 +208,6 @@ int allen(
   logger::setVerbosity(verbosity);
 
   auto io_conf = Allen::io_configuration(n_slices, n_repetitions, number_of_threads);
-  auto const [json_configuration_file, run_from_json] = Allen::sequence_conf(options);
 
   // Set device for main thread
   auto [device_set, device_name, device_memory_alignment] = Allen::set_device(device_id, 0);
@@ -240,7 +240,7 @@ int allen(
   }
   //
   // Load constant parameters from JSON
-  configuration_reader = std::make_unique<ConfigurationReader>(json_configuration_file);
+  configuration_reader = std::make_unique<ConfigurationReader>(config);
 
   // Get the path to the parameter folder: different for standalone and Gaudi build
   // Only in case of standalone gitlab CI pipepline the parameters folder path is passed as runtime argument
@@ -345,10 +345,8 @@ int allen(
     sequence->configure_algorithms(configuration);
   }
 
-  if (run_from_json) {
-    // Print configured sequence
-    streams.front()->print_configured_sequence();
-  }
+  // Print configured sequence
+  streams.front()->print_configured_sequence();
 
   // Interrogate stream configured sequence for validation algorithms
   const auto sequence_contains_validation_algorithms = streams.front()->contains_validation_algorithms();
diff --git a/main/src/InputReader.cpp b/main/src/InputReader.cpp
index d7d22700873..cdaa76d7f12 100644
--- a/main/src/InputReader.cpp
+++ b/main/src/InputReader.cpp
@@ -117,14 +117,9 @@ TwoTrackMVAModelReader::TwoTrackMVAModelReader(const std::string& file_name)
   m_n_layers = m_layer_sizes.size();
 }
 
-ConfigurationReader::ConfigurationReader(const std::string& file_name)
+ConfigurationReader::ConfigurationReader(std::string_view configuration)
 {
-  if (!exists_test(file_name)) {
-    throw StrException("Configuration JSON file " + file_name + " does not exist.");
-  }
-  std::ifstream i(file_name);
-  nlohmann::json j;
-  i >> j;
+  nlohmann::json j = nlohmann::json::parse(configuration);
   for (auto& el : j.items()) {
     std::string component = el.key();
     if (component == "sequence") {
diff --git a/main/src/ProgramOptions.cpp b/main/src/ProgramOptions.cpp
index b2327f67416..3e65b953585 100644
--- a/main/src/ProgramOptions.cpp
+++ b/main/src/ProgramOptions.cpp
@@ -53,7 +53,6 @@ std::vector<ProgramOption> allen_program_options()
     {{"v", "verbosity"}, "verbosity [0-5]", "3", "info"},
     {{"p", "print-memory"}, "print memory usage", "0"},
     {{"sequence"}, "sequence to run", ""},
-    {{"run-from-json"}, "run from json configuration file", "0"},
     {{"output-file"}, "Write selected event to output file", ""},
     {{"output-batch-size"}, "Write output in batches of N events", "10"},
     {{"device"}, "select device to use", "0"},
diff --git a/main/src/Provider.cpp b/main/src/Provider.cpp
index 52c5f5a61d2..3ace45679c4 100644
--- a/main/src/Provider.cpp
+++ b/main/src/Provider.cpp
@@ -2,6 +2,8 @@
  * (c) Copyright 2018-2020 CERN for the benefit of the LHCb Collaboration      *
 \*****************************************************************************/
 #include <string>
+#include <iostream>
+#include <regex>
 
 #include <MDFProvider.h>
 #include <Provider.h>
@@ -14,6 +16,10 @@
 #include <FileSystem.h>
 #include <InputReader.h>
 
+#ifndef ALLEN_STANDALONE
+#include <TCK.h>
+#endif
+
 std::tuple<bool, bool> Allen::velo_decoding_type(const ConfigurationReader& configuration_reader)
 {
   bool veloSP = false;
@@ -32,53 +38,79 @@ std::tuple<bool, bool> Allen::velo_decoding_type(const ConfigurationReader& conf
   return {veloSP, retina};
 }
 
-std::tuple<std::string, bool> Allen::sequence_conf(std::map<std::string, std::string> const& options)
+std::string Allen::sequence_conf(std::map<std::string, std::string> const& options)
 {
   static bool generated = false;
   std::string json_configuration_file = "Sequence.json";
   // Sequence to run
   std::string sequence = "hlt1_pp_default";
 
-  bool run_from_json = false;
-
   for (auto const& entry : options) {
     auto [flag, arg] = entry;
     if (flag_in(flag, {"sequence"})) {
       sequence = arg;
     }
-    else if (flag_in(flag, {"run-from-json"})) {
-      run_from_json = atoi(arg.c_str());
-    }
   }
 
-  // Determine configuration
-  if (run_from_json) {
-    if (fs::exists(sequence)) {
-      json_configuration_file = sequence;
+  std::regex tck_option{"([^:]+):(0x[a-fA-F0-9]{8})"};
+  std::smatch tck_match;
+  if ( std::regex_match(sequence, tck_match, tck_option) ) {
+#ifndef ALLEN_STANDALONE
+
+    // WORKING print some message using tck_info
+
+    auto repo = tck_match.str(1);
+    auto tck = tck_match.str(2);
+    std::string config;
+    LHCb::TCK::Info info;
+    try {
+      std::tie(config, info) = Allen::sequence_from_git( repo, tck );
+    } catch ( std::runtime_error const& e ) {
+      throw std::runtime_error{"Failed to obtain sequence for TCK " + tck + " from repository at " + repo + ":" + e.what() };
+    }
+    if ( config.empty() ) {
+      throw std::runtime_error{"Failed to obtain sequence for TCK " + tck + " from repository at " + repo };
     }
-    else {
-      json_configuration_file = sequence + ".json";
+    else if ( info.release != TCK::current_release() ) {
+      throw std::runtime_error{"TCK " + tck + " is compatible with release " + info.release + " and not current release " + TCK::current_release() };
     }
+    info_cout << "TCK " << tck << " loaded " << info.type << " sequence from git with label " << info.label << "\n";
+    return config;
+#else
+    throw std::runtime_error{"Loading configuration from TCK is not supported in standalone builds"};
+#endif
   }
-  else if (!generated) {
+  else {
+    // Determine configuration
+    if (sequence.size() > 5 && sequence.substr(sequence.size() - 5, std::string::npos) == ".json") {
+      json_configuration_file = sequence;
+    }
+    else if (!generated) {
 #ifdef ALLEN_STANDALONE
-    const std::string allen_configuration_options = "--no-register-keys";
+      const std::string allen_configuration_options = "--no-register-keys";
 #else
-    const std::string allen_configuration_options = "";
+      const std::string allen_configuration_options = "";
 #endif
 
-    int error = system(
-      ("PYTHONPATH=code_generation/sequences:$PYTHONPATH python3 ../configuration/python/AllenCore/gen_allen_json.py " +
-       allen_configuration_options + " --seqpath ../configuration/python/AllenSequences/" + sequence + ".py ")
-        .c_str());
-    if (error) {
-      throw std::runtime_error("sequence generation failed");
+      int error = system(
+                         ("PYTHONPATH=code_generation/sequences:$PYTHONPATH python3 ../configuration/python/AllenCore/gen_allen_json.py " +
+                          allen_configuration_options + " --seqpath ../configuration/python/AllenSequences/" + sequence + ".py > /dev/null")
+                         .c_str());
+      if (error) {
+        throw std::runtime_error{ "sequence generation failed" };
+      }
+      info_cout << "\n";
+      generated = true;
     }
-    info_cout << "\n";
-    generated = true;
-  }
 
-  return {json_configuration_file, run_from_json};
+    std::string config;
+    std::ifstream config_file{json_configuration_file};
+    if (!config_file.is_open()) {
+      throw std::runtime_error{ "failed to open sequence configuration file " + json_configuration_file };
+    }
+    config_file >> config;
+    return config;
+  }
 }
 
 Allen::IOConf Allen::io_configuration(
@@ -118,7 +150,7 @@ Allen::IOConf Allen::io_configuration(
   return io_conf;
 }
 
-std::shared_ptr<IInputProvider> Allen::make_provider(std::map<std::string, std::string> const& options)
+std::shared_ptr<IInputProvider> Allen::make_provider(std::map<std::string, std::string> const& options, std::string_view configuration)
 {
 
   unsigned number_of_slices = 0;
@@ -193,8 +225,7 @@ std::shared_ptr<IInputProvider> Allen::make_provider(std::map<std::string, std::
   setenv("CUDA_DEVICE_MAX_CONNECTIONS", std::to_string(cuda_device_max_connections).c_str(), 1);
 #endif
 
-  auto const [json_file, run_from_json] = Allen::sequence_conf(options);
-  ConfigurationReader configuration_reader {json_file};
+  ConfigurationReader configuration_reader {configuration};
 
   auto io_conf = io_configuration(number_of_slices, n_repetitions, number_of_threads, true);
 
@@ -254,11 +285,11 @@ std::shared_ptr<IInputProvider> Allen::make_provider(std::map<std::string, std::
 std::unique_ptr<OutputHandler> Allen::output_handler(
   IInputProvider* input_provider,
   IZeroMQSvc* zmq_svc,
-  std::map<std::string, std::string> const& options)
+  std::map<std::string, std::string> const& options,
+  std::string_view config)
 {
   std::string output_file;
   size_t output_batch_size = 10;
-  auto const [json_file, run_from_json] = Allen::sequence_conf(options);
 
   for (auto const& entry : options) {
     auto const [flag, arg] = entry;
@@ -277,7 +308,7 @@ std::unique_ptr<OutputHandler> Allen::output_handler(
 
   // Load constant parameters from JSON
   size_t n_lines = 0;
-  ConfigurationReader configuration_reader {json_file};
+  ConfigurationReader configuration_reader {config};
   auto const& configuration = configuration_reader.params();
   auto conf_it = configuration.find("gather_selections");
   if (conf_it != configuration.end()) {
diff --git a/main/src/main.cpp b/main/src/main.cpp
index dfb79e14640..be43237632c 100644
--- a/main/src/main.cpp
+++ b/main/src/main.cpp
@@ -122,9 +122,14 @@ int main(int argc, char* argv[])
 
   auto zmqSvc = makeZmqSvc();
 
+  auto configuration = Allen::sequence_conf(allen_options);
+
   Allen::NonEventData::Updater updater {allen_options};
-  auto input_provider = Allen::make_provider(allen_options);
-  auto output_handler = Allen::output_handler(input_provider.get(), zmqSvc, allen_options);
+
+  auto input_provider = Allen::make_provider(allen_options, configuration);
   if (!input_provider) return -1;
-  return allen(std::move(allen_options), &updater, std::move(input_provider), output_handler.get(), zmqSvc, "");
+
+  auto output_handler = Allen::output_handler(input_provider.get(), zmqSvc, allen_options, configuration);
+
+  return allen(std::move(allen_options), configuration, &updater, std::move(input_provider), output_handler.get(), zmqSvc, "");
 }
diff --git a/mdf/CMakeLists.txt b/mdf/CMakeLists.txt
index 69248dfd6fb..b387303b207 100644
--- a/mdf/CMakeLists.txt
+++ b/mdf/CMakeLists.txt
@@ -41,7 +41,11 @@ target_include_directories (mdf PUBLIC
   $<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/main/include>)
 target_link_libraries(mdf PUBLIC LHCbEvent)
 
-set(lhcb_public_headers include/write_mdf.hpp include/read_mdf.hpp include/mdf_header.hpp daq40/sourceid.h)
+set(lhcb_public_headers
+  include/write_mdf.hpp
+  include/read_mdf.hpp
+  include/mdf_header.hpp
+  daq40/sourceid.h)
 target_include_directories(LHCbEvent INTERFACE $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/daq40>)
 
 if (STANDALONE)
@@ -52,16 +56,28 @@ if (STANDALONE)
     ${PROJECT_BINARY_DIR}/sequences/Gaudi/GaudiKernel/include)
   target_compile_definitions(LHCbEvent INTERFACE ODIN_WITHOUT_GAUDI)
 else()
-  target_link_libraries(mdf PUBLIC Boost::filesystem Boost::thread Boost::regex)
-  target_link_libraries(LHCbEvent INTERFACE Gaudi::GaudiKernel LHCb::DAQEventLib LHCb::LumiEventLib)
+  target_link_libraries(mdf
+    PUBLIC
+      Boost::filesystem
+      Boost::thread
+      Boost::regex)
+  target_link_libraries(LHCbEvent
+    INTERFACE
+      Gaudi::GaudiKernel
+      LHCb::DAQEventLib
+      LHCb::LumiEventLib)
 endif()
 
 set_property(TARGET LHCbEvent PROPERTY PUBLIC_HEADER ${lhcb_public_headers})
 install(TARGETS LHCbEvent EXPORT Allen
         PUBLIC_HEADER DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/Allen)
 
-target_link_libraries(
-  mdf PRIVATE Gear Backend AllenCommon LHCbEvent)
+target_link_libraries(mdf
+  PRIVATE
+    Gear
+    Backend
+    AllenCommon
+    LHCbEvent)
 
 # These tests don't need CUDA
 remove_definitions(-DTARGET_DEVICE_CUDA)
@@ -74,7 +90,14 @@ function(test_program)
 
   find_package(Threads REQUIRED)
 
-  target_link_libraries(${test_name} PRIVATE Transpose Threads::Threads EventModel AllenZMQ Catch2::Catch2 Boost::program_options)
+  target_link_libraries(${test_name}
+    PRIVATE
+      Transpose
+      Threads::Threads
+      EventModel
+      AllenZMQ
+      Catch2::Catch2
+      Boost::program_options)
   target_compile_definitions(${test_name} PRIVATE ODIN_WITHOUT_GAUDI)
   if (NOT STANDALONE)
     find_package(fmt REQUIRED)
@@ -90,7 +113,15 @@ if (BUILD_TESTING)
     ${PROJECT_SOURCE_DIR}/main/src/Provider.cpp
     ${PROJECT_SOURCE_DIR}/main/src/ZMQOutputSender.cpp)
   target_compile_definitions(Transpose PRIVATE ODIN_WITHOUT_GAUDI)
-  target_link_libraries(Transpose PUBLIC HostCommon Backend mdf LHCbEvent AllenZMQ EventModel AllenFS)
+  target_link_libraries(Transpose
+    PUBLIC
+      HostCommon
+      Backend
+      mdf
+      LHCbEvent
+      AllenZMQ
+      EventModel
+      AllenFS)
 
   test_program(NAME mdf_test_read SOURCE test/test_read.cpp)
   test_program(NAME mdf_bench_read SOURCE test/bench_read.cpp)
@@ -100,6 +131,7 @@ if (BUILD_TESTING)
   if (NOT STANDALONE)
     test_program(NAME mep_test_banks SOURCE test/test_mep_banks.cpp)
     target_link_libraries(allen_mep_test_banks PRIVATE Gaudi::GaudiKernel)
+    target_link_libraries(Transpose PUBLIC Configuration)
   endif()
 
 endif()
diff --git a/mdf/test/test_mep_banks.cpp b/mdf/test/test_mep_banks.cpp
index 7905cd4aa47..13c061434d4 100644
--- a/mdf/test/test_mep_banks.cpp
+++ b/mdf/test/test_mep_banks.cpp
@@ -212,11 +212,11 @@ int main(int argc, char* argv[])
                                                   {"v", std::to_string(s_config.debug ? 4 : 3)},
                                                   {"mdf", s_config.mdf_files},
                                                   {"sequence", json_file.string()},
-                                                  {"run-from-json", "1"},
                                                   {"events-per-slice", std::to_string(s_config.eps)},
                                                   {"disable-run-changes", "1"}};
 
-    mdf = Allen::make_provider(options);
+    auto configuration = Allen::sequence_conf(options);
+    mdf = Allen::make_provider(options, configuration);
     if (!mdf) {
       std::cerr << "Failed to obtain MDFProvider\n";
       return 1;
-- 
GitLab


From e14f72596c1cc3f58989c36d8fba2241fcb2d625 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Sat, 25 Mar 2023 23:23:54 +0100
Subject: [PATCH 10/49] Use pybind11 to get python bindings for TCK functions
 instead of PyROOT

---
 Rec/Allen/python/Allen/__init__.py    |  16 +++
 Rec/Allen/python/Allen/tck.py         |  51 +++----
 cmake/AllenDependencies.cmake         |   9 +-
 cmake/modules/FindPythonLibsNew.cmake | 195 --------------------------
 cmake/modules/Findpybind11.cmake      |  97 -------------
 configuration/CMakeLists.txt          |  15 +-
 configuration/src/TCK.cpp             |   2 +-
 configuration/src/bindings.cpp        |  48 +++++++
 8 files changed, 103 insertions(+), 330 deletions(-)
 delete mode 100644 cmake/modules/FindPythonLibsNew.cmake
 delete mode 100644 cmake/modules/Findpybind11.cmake
 create mode 100644 configuration/src/bindings.cpp

diff --git a/Rec/Allen/python/Allen/__init__.py b/Rec/Allen/python/Allen/__init__.py
index e69de29bb2d..ef23ea0ce50 100644
--- a/Rec/Allen/python/Allen/__init__.py
+++ b/Rec/Allen/python/Allen/__init__.py
@@ -0,0 +1,16 @@
+import os
+
+__path__ += [
+    d
+    for d in [
+        os.path.realpath(
+            os.path.join(
+                os.path.dirname(__file__),
+                "..",
+                "..",
+                "configuration",
+            )
+        )
+    ]
+    if os.path.exists(d)
+]
diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
index f910df03b7b..26c59f3ca57 100644
--- a/Rec/Allen/python/Allen/tck.py
+++ b/Rec/Allen/python/Allen/tck.py
@@ -147,23 +147,9 @@ def sequence_to_git(
 
 def sequence_from_git(repository: Path, tck: str, use_bindings=True) -> str:
     if use_bindings:
-        from ROOT import gROOT
-
-        gROOT.SetBatch(True)
-        from ROOT import gInterpreter as interpreter
-        from cppyy import gbl
-
-        if gbl.gSystem.Load("libAllenLib") != 0:
-            print("Failed to load libAllenLib.so")
-            return None
-        if not interpreter.Declare("#include <Allen/TCK.h>"):
-            print("Failed to load Allen/TCK.h")
-            return None
-        # Do this in two steps so the pythonized object that owns the
-        # sequence and info objects is kept long enough
-        r = gbl.Allen.sequence_from_git(str(repository), tck)
-        sequence, info = r
-        return (str(sequence), {str(k): str(getattr(info, k)) for k in ("digest", "tck", "release", "type", "label")})
+        from Allen import TCK
+        sequence, info = TCK.sequence_from_git(str(repository), tck)
+        return (sequence, {k: getattr(info, k) for k in ("digest", "tck", "release", "type", "label")})
     else:
         p = run(
             [
@@ -183,23 +169,18 @@ def sequence_from_git(repository: Path, tck: str, use_bindings=True) -> str:
         return json.dump(tck_to_sequence(tck_db[digest]))
 
 
-def property_from_git(repository: Path, tck: str, algorithm: str, property=None):
-    p = run(["git", "-C", str(repository), "ls-tree", "-r", tck], stdout=PIPE, stderr=PIPE)
-    if p.returncode != 0:
-        print(f"Failed to list files for TCK {tck}")
-        return None
+def property_from_git(repository: Path, tck: str, algorithm=".*", property=".*"):
+    alg_re = re.compile(algorithm)
+    prop_re = re.compile(property)
 
-    space_expr = re.compile(r"[\s]+")
-    files = p.stdout.decode().split("\n")
-    alg_lines = [space_expr.sub(' ', l).split(' ') for l in files if re.search(algorithm, l)]
-    algs = [tuple(line[2:]) for line in alg_lines if line[1] == 'blob']
+    sequence, _ = sequence_from_git(repository, tck)
 
-    r = {}
-    for git_id, name in algs:
-        p = run(["git", "-C", str(repository), "show", git_id], stdout=PIPE)
-        if p.returncode != 0:
-            print(f"Failed to list properties for {name} {git_id}")
-        else:
-            props = json.loads(p.stdout)["Properties"]
-            r[name] = props if property is None else props[property]
-    return r
+    result = {}
+    for alg, props in sequence.items():
+        if alg == "scheduler" or not alg_re.match(alg):
+            continue
+        prop_result = {k: v for k, v in props.items() if prop_re.match(k)}
+        if prop_result:
+            result[alg] = prop_result
+
+    return result
diff --git a/cmake/AllenDependencies.cmake b/cmake/AllenDependencies.cmake
index e70e64dae09..48ec7eebc52 100644
--- a/cmake/AllenDependencies.cmake
+++ b/cmake/AllenDependencies.cmake
@@ -110,7 +110,7 @@ endif()
 
 if(WITH_Allen_PRIVATE_DEPENDENCIES)
   # We need a Python 3 interpreter
-  find_package(Python 3 REQUIRED Interpreter)
+  find_package(Python 3 REQUIRED Interpreter Development.Module)
 
   # Catch2 for tests
   find_package(Catch2 REQUIRED)
@@ -142,6 +142,13 @@ if(WITH_Allen_PRIVATE_DEPENDENCIES)
   if(NOT STANDALONE)
     find_package(Rangev3 REQUIRED)
     find_package(yaml-cpp REQUIRED)
+
+    # pybind11 is available in LCG, but it's installed with setup.py,
+    # so the CMake files are in a non-standard location and we have to
+    # make sure we can find them
+    execute_process(COMMAND ${Python_EXECUTABLE} -c "import pybind11; print(pybind11.get_cmake_dir(), end=\"\");" OUTPUT_VARIABLE PYBIND11_CMAKE_DIR)
+    list(APPEND CMAKE_PREFIX_PATH ${PYBIND11_CMAKE_DIR})
+    find_package(pybind11 CONFIG REQUIRED)
   endif()
 endif()
 
diff --git a/cmake/modules/FindPythonLibsNew.cmake b/cmake/modules/FindPythonLibsNew.cmake
deleted file mode 100644
index b29b287de72..00000000000
--- a/cmake/modules/FindPythonLibsNew.cmake
+++ /dev/null
@@ -1,195 +0,0 @@
-# - Find python libraries
-# This module finds the libraries corresponding to the Python interpreter
-# FindPythonInterp provides.
-# This code sets the following variables:
-#
-#  PYTHONLIBS_FOUND           - have the Python libs been found
-#  PYTHON_PREFIX              - path to the Python installation
-#  PYTHON_LIBRARIES           - path to the python library
-#  PYTHON_INCLUDE_DIRS        - path to where Python.h is found
-#  PYTHON_MODULE_EXTENSION    - lib extension, e.g. '.so' or '.pyd'
-#  PYTHON_MODULE_PREFIX       - lib name prefix: usually an empty string
-#  PYTHON_SITE_PACKAGES       - path to installation site-packages
-#  PYTHON_IS_DEBUG            - whether the Python interpreter is a debug build
-#
-# Thanks to talljimbo for the patch adding the 'LDVERSION' config
-# variable usage.
-
-#=============================================================================
-# Copyright 2001-2009 Kitware, Inc.
-# Copyright 2012 Continuum Analytics, Inc.
-#
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions
-# are met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-#
-# * Redistributions in binary form must reproduce the above copyright
-# notice, this list of conditions and the following disclaimer in the
-# documentation and/or other materials provided with the distribution.
-#
-# * Neither the names of Kitware, Inc., the Insight Software Consortium,
-# nor the names of their contributors may be used to endorse or promote
-# products derived from this software without specific prior written
-# permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#=============================================================================
-
-# Checking for the extension makes sure that `LibsNew` was found and not just `Libs`.
-if(PYTHONLIBS_FOUND AND PYTHON_MODULE_EXTENSION)
-    return()
-endif()
-
-# Use the Python interpreter to find the libs.
-if(PythonLibsNew_FIND_REQUIRED)
-    find_package(PythonInterp ${PythonLibsNew_FIND_VERSION} REQUIRED)
-else()
-    find_package(PythonInterp ${PythonLibsNew_FIND_VERSION})
-endif()
-
-if(NOT PYTHONINTERP_FOUND)
-    set(PYTHONLIBS_FOUND FALSE)
-    return()
-endif()
-
-# According to http://stackoverflow.com/questions/646518/python-how-to-detect-debug-interpreter
-# testing whether sys has the gettotalrefcount function is a reliable, cross-platform
-# way to detect a CPython debug interpreter.
-#
-# The library suffix is from the config var LDVERSION sometimes, otherwise
-# VERSION. VERSION will typically be like "2.7" on unix, and "27" on windows.
-execute_process(COMMAND "${PYTHON_EXECUTABLE}" "-c"
-    "from distutils import sysconfig as s;import sys;import struct;
-print('.'.join(str(v) for v in sys.version_info));
-print(sys.prefix);
-print(s.get_python_inc(plat_specific=True));
-print(s.get_python_lib(plat_specific=True));
-print(s.get_config_var('SO'));
-print(hasattr(sys, 'gettotalrefcount')+0);
-print(struct.calcsize('@P'));
-print(s.get_config_var('LDVERSION') or s.get_config_var('VERSION'));
-print(s.get_config_var('LIBDIR') or '');
-print(s.get_config_var('MULTIARCH') or '');
-"
-    RESULT_VARIABLE _PYTHON_SUCCESS
-    OUTPUT_VARIABLE _PYTHON_VALUES
-    ERROR_VARIABLE _PYTHON_ERROR_VALUE)
-
-if(NOT _PYTHON_SUCCESS MATCHES 0)
-    if(PythonLibsNew_FIND_REQUIRED)
-        message(FATAL_ERROR
-            "Python config failure:\n${_PYTHON_ERROR_VALUE}")
-    endif()
-    set(PYTHONLIBS_FOUND FALSE)
-    return()
-endif()
-
-# Convert the process output into a list
-string(REGEX REPLACE ";" "\\\\;" _PYTHON_VALUES ${_PYTHON_VALUES})
-string(REGEX REPLACE "\n" ";" _PYTHON_VALUES ${_PYTHON_VALUES})
-list(GET _PYTHON_VALUES 0 _PYTHON_VERSION_LIST)
-list(GET _PYTHON_VALUES 1 PYTHON_PREFIX)
-list(GET _PYTHON_VALUES 2 PYTHON_INCLUDE_DIR)
-list(GET _PYTHON_VALUES 3 PYTHON_SITE_PACKAGES)
-list(GET _PYTHON_VALUES 4 PYTHON_MODULE_EXTENSION)
-list(GET _PYTHON_VALUES 5 PYTHON_IS_DEBUG)
-list(GET _PYTHON_VALUES 6 PYTHON_SIZEOF_VOID_P)
-list(GET _PYTHON_VALUES 7 PYTHON_LIBRARY_SUFFIX)
-list(GET _PYTHON_VALUES 8 PYTHON_LIBDIR)
-list(GET _PYTHON_VALUES 9 PYTHON_MULTIARCH)
-
-# Make sure the Python has the same pointer-size as the chosen compiler
-# Skip if CMAKE_SIZEOF_VOID_P is not defined
-if(CMAKE_SIZEOF_VOID_P AND (NOT "${PYTHON_SIZEOF_VOID_P}" STREQUAL "${CMAKE_SIZEOF_VOID_P}"))
-    if(PythonLibsNew_FIND_REQUIRED)
-        math(EXPR _PYTHON_BITS "${PYTHON_SIZEOF_VOID_P} * 8")
-        math(EXPR _CMAKE_BITS "${CMAKE_SIZEOF_VOID_P} * 8")
-        message(FATAL_ERROR
-            "Python config failure: Python is ${_PYTHON_BITS}-bit, "
-            "chosen compiler is  ${_CMAKE_BITS}-bit")
-    endif()
-    set(PYTHONLIBS_FOUND FALSE)
-    return()
-endif()
-
-# The built-in FindPython didn't always give the version numbers
-string(REGEX REPLACE "\\." ";" _PYTHON_VERSION_LIST ${_PYTHON_VERSION_LIST})
-list(GET _PYTHON_VERSION_LIST 0 PYTHON_VERSION_MAJOR)
-list(GET _PYTHON_VERSION_LIST 1 PYTHON_VERSION_MINOR)
-list(GET _PYTHON_VERSION_LIST 2 PYTHON_VERSION_PATCH)
-
-# Make sure all directory separators are '/'
-string(REGEX REPLACE "\\\\" "/" PYTHON_PREFIX ${PYTHON_PREFIX})
-string(REGEX REPLACE "\\\\" "/" PYTHON_INCLUDE_DIR ${PYTHON_INCLUDE_DIR})
-string(REGEX REPLACE "\\\\" "/" PYTHON_SITE_PACKAGES ${PYTHON_SITE_PACKAGES})
-
-if(CMAKE_HOST_WIN32)
-    set(PYTHON_LIBRARY
-        "${PYTHON_PREFIX}/libs/Python${PYTHON_LIBRARY_SUFFIX}.lib")
-
-    # when run in a venv, PYTHON_PREFIX points to it. But the libraries remain in the
-    # original python installation. They may be found relative to PYTHON_INCLUDE_DIR.
-    if(NOT EXISTS "${PYTHON_LIBRARY}")
-        get_filename_component(_PYTHON_ROOT ${PYTHON_INCLUDE_DIR} DIRECTORY)
-        set(PYTHON_LIBRARY
-            "${_PYTHON_ROOT}/libs/Python${PYTHON_LIBRARY_SUFFIX}.lib")
-    endif()
-
-    # raise an error if the python libs are still not found.
-    if(NOT EXISTS "${PYTHON_LIBRARY}")
-        message(FATAL_ERROR "Python libraries not found")
-    endif()
-
-else()
-    if(PYTHON_MULTIARCH)
-        set(_PYTHON_LIBS_SEARCH "${PYTHON_LIBDIR}/${PYTHON_MULTIARCH}" "${PYTHON_LIBDIR}")
-    else()
-        set(_PYTHON_LIBS_SEARCH "${PYTHON_LIBDIR}")
-    endif()
-    #message(STATUS "Searching for Python libs in ${_PYTHON_LIBS_SEARCH}")
-    # Probably this needs to be more involved. It would be nice if the config
-    # information the python interpreter itself gave us were more complete.
-    find_library(PYTHON_LIBRARY
-        NAMES "python${PYTHON_LIBRARY_SUFFIX}"
-        PATHS ${_PYTHON_LIBS_SEARCH}
-        NO_DEFAULT_PATH)
-
-    # If all else fails, just set the name/version and let the linker figure out the path.
-    if(NOT PYTHON_LIBRARY)
-        set(PYTHON_LIBRARY python${PYTHON_LIBRARY_SUFFIX})
-    endif()
-endif()
-
-MARK_AS_ADVANCED(
-  PYTHON_LIBRARY
-  PYTHON_INCLUDE_DIR
-)
-
-# We use PYTHON_INCLUDE_DIR, PYTHON_LIBRARY and PYTHON_DEBUG_LIBRARY for the
-# cache entries because they are meant to specify the location of a single
-# library. We now set the variables listed by the documentation for this
-# module.
-SET(PYTHON_INCLUDE_DIRS "${PYTHON_INCLUDE_DIR}")
-SET(PYTHON_LIBRARIES "${PYTHON_LIBRARY}")
-SET(PYTHON_DEBUG_LIBRARIES "${PYTHON_DEBUG_LIBRARY}")
-
-find_package_message(PYTHON
-    "Found PythonLibs: ${PYTHON_LIBRARY}"
-    "${PYTHON_EXECUTABLE}${PYTHON_VERSION}")
-
-set(PYTHONLIBS_FOUND TRUE)
diff --git a/cmake/modules/Findpybind11.cmake b/cmake/modules/Findpybind11.cmake
deleted file mode 100644
index 31afb224a1a..00000000000
--- a/cmake/modules/Findpybind11.cmake
+++ /dev/null
@@ -1,97 +0,0 @@
-# - Find the NumPy libraries
-
-# This module finds if Pybind11 is installed, and sets the following variables
-# indicating where it is.
-#
-# TODO: Update to provide the libraries and paths for linking npymath lib.
-#
-#  PYBIND11_FOUND               - was Pybind11 found
-#  PYBIND11_VERSION             - the version of Pybind11 found as a string
-#  PYBIND11_VERSION_MAJOR       - the major version number of Pybind11
-#  PYBIND11_VERSION_MINOR       - the minor version number of Pybind11
-#  PYBIND11_VERSION_PATCH       - the patch version number of Pybind11
-#  PYBIND11_VERSION_DECIMAL     - e.g. version 1.6.1 is 10601
-#  PYBIND11_INCLUDE_DIRS        - path to the Pybind11 include files
-
-#============================================================================
-# Copyright 2012 Continuum Analytics, Inc.
-#
-# MIT License
-#
-# Permission is hereby granted, free of charge, to any person obtaining
-# a copy of this software and associated documentation files
-# (the "Software"), to deal in the Software without restriction, including
-# without limitation the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the Software, and to permit
-# persons to whom the Software is furnished to do so, subject to
-# the following conditions:
-#
-# The above copyright notice and this permission notice shall be included
-# in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
-# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-# OTHER DEALINGS IN THE SOFTWARE.
-#
-#============================================================================
-
-# Finding Pybind11 involves calling the Python interpreter
-find_package(pybind11 CONFIG QUIET)
-if (pybind11_FOUND)
-  set(PYBIND11_FOUND TRUE)
-else()
-  if(Pybind11_FIND_REQUIRED)
-    find_package(PythonInterp REQUIRED)
-  else()
-    find_package(PythonInterp)
-  endif()
-
-  if(NOT PYTHONINTERP_FOUND)
-    set(PYBIND11_FOUND FALSE)
-  endif()
-
-  execute_process(COMMAND "${PYTHON_EXECUTABLE}" "-c"
-    "import pybind11 as pb; print(pb.__version__); print(pb.get_include());"
-    RESULT_VARIABLE _PYBIND11_SEARCH_SUCCESS
-    OUTPUT_VARIABLE _PYBIND11_VALUES
-    ERROR_VARIABLE _PYBIND11_ERROR_VALUE
-    OUTPUT_STRIP_TRAILING_WHITESPACE)
-
-  if(NOT _PYBIND11_SEARCH_SUCCESS MATCHES 0)
-    if(Pybind11_FIND_REQUIRED)
-      message(FATAL_ERROR
-        "pybind11 import failure:\n${_PYBIND11_ERROR_VALUE}")
-    endif()
-    set(PYBIND11_FOUND FALSE)
-  else()
-    set(PYBIND11_FOUND TRUE)
-  endif()
-
-  if (PYBIND11_FOUND)
-    # Convert the process output into a list
-    string(REGEX REPLACE ";" "\\\\;" _PYBIND11_VALUES ${_PYBIND11_VALUES})
-    string(REGEX REPLACE "\n" ";" _PYBIND11_VALUES ${_PYBIND11_VALUES})
-    list(GET _PYBIND11_VALUES 0 PYBIND11_VERSION)
-    list(GET _PYBIND11_VALUES 1 PYBIND11_INCLUDE_DIRS)
-
-    # Make sure all directory separators are '/'
-    string(REGEX REPLACE "\\\\" "/" PYBIND11_INCLUDE_DIRS ${PYBIND11_INCLUDE_DIRS})
-
-    # Get the major and minor version numbers
-    string(REGEX REPLACE "\\." ";" _PYBIND11_VERSION_LIST ${PYBIND11_VERSION})
-    list(GET _PYBIND11_VERSION_LIST 0 PYBIND11_VERSION_MAJOR)
-    list(GET _PYBIND11_VERSION_LIST 1 PYBIND11_VERSION_MINOR)
-    list(GET _PYBIND11_VERSION_LIST 2 PYBIND11_VERSION_PATCH)
-    string(REGEX MATCH "[0-9]*" PYBIND11_VERSION_PATCH ${PYBIND11_VERSION_PATCH})
-    math(EXPR PYBIND11_VERSION_DECIMAL
-      "(${PYBIND11_VERSION_MAJOR} * 10000) + (${PYBIND11_VERSION_MINOR} * 100) + ${PYBIND11_VERSION_PATCH}")
-
-    find_package_message(PYBIND11
-      "Found Pybind11: version \"${PYBIND11_VERSION}\" ${PYBIND11_INCLUDE_DIRS}"
-      "${PYBIND11_INCLUDE_DIRS}${PYBIND11_VERSION}")
-  endif()
-endif()
diff --git a/configuration/CMakeLists.txt b/configuration/CMakeLists.txt
index 8b2fe04c2d0..0e1832f97b5 100644
--- a/configuration/CMakeLists.txt
+++ b/configuration/CMakeLists.txt
@@ -31,5 +31,18 @@ if (NOT STANDALONE)
 
     # Install the header so it can be used in AllenOnline
     install(FILES include/TCK.h
-            DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/Allen)
+      DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/Allen)
+
+    pybind11_add_module(TCK src/bindings.cpp)
+    target_link_libraries(TCK
+      PRIVATE
+        Configuration
+      PUBLIC
+        pybind11::pybind11
+        ${Python_LIBRARIES})
+    install(TARGETS TCK
+      EXPORT
+        Allen
+      LIBRARY DESTINATION
+        ${GAUDI_INSTALL_PYTHONDIR}/Allen)
 endif()
diff --git a/configuration/src/TCK.cpp b/configuration/src/TCK.cpp
index fb349e5b799..64a12f126fd 100644
--- a/configuration/src/TCK.cpp
+++ b/configuration/src/TCK.cpp
@@ -25,7 +25,7 @@ std::string Allen::tck_from_git(std::string const& repo, std::string const& tck
     return tck_config;
   } catch ( std::runtime_error const& e ) {
     git_libgit2_shutdown();
-    throw std::runtime_error{"Failed extract JSON configuration for TCK " + tck + " from " + repo + ":" + e.what()};
+    throw std::runtime_error{"Failed to extract JSON configuration for TCK " + tck + " from " + repo + ": " + e.what()};
   }
 }
 
diff --git a/configuration/src/bindings.cpp b/configuration/src/bindings.cpp
new file mode 100644
index 00000000000..2a93e1e027e
--- /dev/null
+++ b/configuration/src/bindings.cpp
@@ -0,0 +1,48 @@
+#include <pybind11/pybind11.h>
+#include <pybind11/stl.h>
+#include <pybind11/iostream.h>
+
+#include <iostream>
+#include <numeric>
+#include <cmath>
+
+#include <TCK.h>
+
+namespace {
+  namespace py = pybind11;
+}
+
+// Python Module and Docstrings
+PYBIND11_MODULE(TCK, m)
+{
+  py::class_<LHCb::TCK::Info>(m, "TCKInfo")
+    .def(py::init<>())
+    .def_readwrite("digest", &LHCb::TCK::Info::digest)
+    .def_readwrite("tck", &LHCb::TCK::Info::tck)
+    .def_readwrite("release", &LHCb::TCK::Info::release)
+    .def_readwrite("type", &LHCb::TCK::Info::type)
+    .def_readwrite("label", &LHCb::TCK::Info::label);
+
+  m.doc() = R"pbdoc(
+    standalong background generator for KM3NeT
+
+    .. currentmodule:: TCK
+
+    .. autosummary::
+       :toctree: _generate
+
+    TCKInfo
+    current_release
+    tck_from_git
+    sequence_to_git
+    )pbdoc";
+
+  m.def("current_release", &Allen::TCK::current_release,
+        "Get the current Allen release string in the form of ALLEN_vXrYpZ, "
+        "where pZ is optional");
+  m.def("tck_from_git", &Allen::tck_from_git,
+        "Get the TCK as it is in the git repository");
+  m.def("sequence_from_git", &Allen::sequence_from_git,
+        "Get the TCK and TCK information in a format that can be used to "
+        "configure Allen");
+}
-- 
GitLab


From 24aaa27430947f6d1578ca0010119e07ac2d2be2 Mon Sep 17 00:00:00 2001
From: Gitlab CI <noreply@cern.ch>
Date: Sat, 25 Mar 2023 22:29:59 +0000
Subject: [PATCH 11/49] Fixed formatting

patch generated by https://gitlab.cern.ch/lhcb/Allen/-/jobs/28458746
---
 Rec/Allen/python/Allen/__init__.py           |  9 ++---
 Rec/Allen/python/Allen/config.py             |  3 +-
 Rec/Allen/python/AllenAlgorithms/__init__.py |  9 ++---
 Rec/Allen/scripts/create_hlt1_tck.py         |  7 +++-
 configuration/include/TCK.h                  | 12 +++---
 configuration/src/TCK.cpp                    | 27 +++++++------
 configuration/src/bindings.cpp               | 19 +++++----
 main/include/Provider.h                      |  4 +-
 main/src/Provider.cpp                        | 41 +++++++++++---------
 main/src/main.cpp                            |  3 +-
 10 files changed, 72 insertions(+), 62 deletions(-)

diff --git a/Rec/Allen/python/Allen/__init__.py b/Rec/Allen/python/Allen/__init__.py
index ef23ea0ce50..edb5f8b26b7 100644
--- a/Rec/Allen/python/Allen/__init__.py
+++ b/Rec/Allen/python/Allen/__init__.py
@@ -1,16 +1,13 @@
 import os
 
 __path__ += [
-    d
-    for d in [
+    d for d in [
         os.path.realpath(
             os.path.join(
                 os.path.dirname(__file__),
                 "..",
                 "..",
                 "configuration",
-            )
-        )
-    ]
-    if os.path.exists(d)
+            ))
+    ] if os.path.exists(d)
 ]
diff --git a/Rec/Allen/python/Allen/config.py b/Rec/Allen/python/Allen/config.py
index 014868ab603..307e28f25c9 100755
--- a/Rec/Allen/python/Allen/config.py
+++ b/Rec/Allen/python/Allen/config.py
@@ -95,8 +95,7 @@ def configured_bank_types(sequence_json):
     bank_types = set()
     for t, n, c in sequence_json["sequence"]["configured_algorithms"]:
         props = sequence_json.get(n, {})
-        if c == "ProviderAlgorithm" and not bool(
-                props.get('empty', False)):
+        if c == "ProviderAlgorithm" and not bool(props.get('empty', False)):
             bank_types.add(props['bank_type'])
     return bank_types
 
diff --git a/Rec/Allen/python/AllenAlgorithms/__init__.py b/Rec/Allen/python/AllenAlgorithms/__init__.py
index 1d3fc694cf9..ce95439a793 100644
--- a/Rec/Allen/python/AllenAlgorithms/__init__.py
+++ b/Rec/Allen/python/AllenAlgorithms/__init__.py
@@ -1,8 +1,7 @@
 import os
 
 __path__ = [
-    d
-    for d in [
+    d for d in [
         os.path.realpath(
             os.path.join(
                 os.path.dirname(__file__),
@@ -11,8 +10,6 @@ __path__ = [
                 "code_generation",
                 "sequences",
                 "AllenAlgorithms",
-            )
-        )
-    ]
-    if os.path.exists(d)
+            ))
+    ] if os.path.exists(d)
 ]
diff --git a/Rec/Allen/scripts/create_hlt1_tck.py b/Rec/Allen/scripts/create_hlt1_tck.py
index d5735a70a63..7ec540651ea 100644
--- a/Rec/Allen/scripts/create_hlt1_tck.py
+++ b/Rec/Allen/scripts/create_hlt1_tck.py
@@ -7,7 +7,8 @@ parser = argparse.ArgumentParser()
 parser.add_argument("sequence", nargs=1)
 parser.add_argument("repository", nargs=1)
 parser.add_argument("tck", nargs=1)
-parser.add_argument("--python-hlt1-node", type=str, default="hlt1_node", dest="hlt1_node")
+parser.add_argument(
+    "--python-hlt1-node", type=str, default="hlt1_node", dest="hlt1_node")
 parser.add_argument("--label", default="test", type=str)
 
 args = parser.parse_args()
@@ -27,7 +28,9 @@ if sequence_arg.suffix in (".py", ''):
 elif sequence_arg.suffix == '.json':
     with open(sequence_arg, "r") as sequence_file:
         sequence = json.load(sequence_file)
-        dec_reporter_name = next((n for t, n, _ in sequence['sequence']['configured_algorithms'] if t == "dec_reporter::dec_reporter_t"), None)
+        dec_reporter_name = next(
+            (n for t, n, _ in sequence['sequence']['configured_algorithms']
+             if t == "dec_reporter::dec_reporter_t"), None)
         sequence[dec_reporter_name]['tck'] = tck
 
 sequence_to_git(repository, sequence, sequence_arg.stem, args.label, tck)
diff --git a/configuration/include/TCK.h b/configuration/include/TCK.h
index f4289ec13e9..42221b2e39d 100644
--- a/configuration/include/TCK.h
+++ b/configuration/include/TCK.h
@@ -7,12 +7,12 @@
 
 namespace Allen {
 
-namespace TCK {
-  std::string current_release();
-}
+  namespace TCK {
+    std::string current_release();
+  }
 
-std::string tck_from_git(std::string const& repo, std::string const& tck );
+  std::string tck_from_git(std::string const& repo, std::string const& tck);
 
-std::tuple<std::string, LHCb::TCK::Info> sequence_from_git(std::string const& repo, std::string const& tck );
+  std::tuple<std::string, LHCb::TCK::Info> sequence_from_git(std::string const& repo, std::string const& tck);
 
-}
+} // namespace Allen
diff --git a/configuration/src/TCK.cpp b/configuration/src/TCK.cpp
index 64a12f126fd..6c9d4e4e13d 100644
--- a/configuration/src/TCK.cpp
+++ b/configuration/src/TCK.cpp
@@ -8,30 +8,34 @@
 #include <HltServices/TCKUtils.h>
 #include <TCK.h>
 
-std::string Allen::TCK::current_release() {
+std::string Allen::TCK::current_release()
+{
   return "ALLEN_v" + std::to_string(ALLEN_MAJOR_VERSION) + "r" + std::to_string(ALLEN_MINOR_VERSION);
 }
 
-std::string Allen::tck_from_git(std::string const& repo, std::string const& tck ) {
+std::string Allen::tck_from_git(std::string const& repo, std::string const& tck)
+{
 
   using LHCb::TCK::Git::check;
 
   git_libgit2_init();
   git_repository* git_repo = nullptr;
-  check( git_repository_open_bare( &git_repo, repo.c_str() ) );
+  check(git_repository_open_bare(&git_repo, repo.c_str()));
   try {
-    auto tck_config = LHCb::TCK::Git::extract_json( git_repo, tck );
+    auto tck_config = LHCb::TCK::Git::extract_json(git_repo, tck);
     git_libgit2_shutdown();
     return tck_config;
-  } catch ( std::runtime_error const& e ) {
+  } catch (std::runtime_error const& e) {
     git_libgit2_shutdown();
-    throw std::runtime_error{"Failed to extract JSON configuration for TCK " + tck + " from " + repo + ": " + e.what()};
+    throw std::runtime_error {"Failed to extract JSON configuration for TCK " + tck + " from " + repo + ": " +
+                              e.what()};
   }
 }
 
-std::tuple<std::string, LHCb::TCK::Info> Allen::sequence_from_git(std::string const& repo, std::string const& tck ) {
+std::tuple<std::string, LHCb::TCK::Info> Allen::sequence_from_git(std::string const& repo, std::string const& tck)
+{
 
-  auto tck_config = tck_from_git( repo, tck );
+  auto tck_config = tck_from_git(repo, tck);
   if (tck_config.empty()) {
     return {tck_config, {}};
   }
@@ -44,18 +48,17 @@ std::tuple<std::string, LHCb::TCK::Info> Allen::sequence_from_git(std::string co
   // indexed by digest, so loop over them until the one is found that
   // has the right TCK entry.
   auto items = tck_db.items();
-  auto json_tck = std::find_if( items.begin(), items.end(), [&manifest, tck] (auto const& e) {
+  auto json_tck = std::find_if(items.begin(), items.end(), [&manifest, tck](auto const& e) {
     return e.key() != "manifest" && manifest.count(e.key()) && manifest[e.key()]["TCK"] == tck;
   });
 
-
   nlohmann::json sequence;
 
   std::vector<std::string> tokens;
 
   for (auto const& [entry, config] : json_tck.value().items()) {
     tokens.clear();
-    boost::algorithm::split( tokens, entry, boost::algorithm::is_any_of( "/" ) );
+    boost::algorithm::split(tokens, entry, boost::algorithm::is_any_of("/"));
     if (tokens[0] == "Scheduler") {
       // Put special "sequence" items where they are expected
       sequence["sequence"][tokens[1]] = config;
@@ -92,7 +95,7 @@ std::tuple<std::string, LHCb::TCK::Info> Allen::sequence_from_git(std::string co
   auto const& digest = json_tck.key();
   auto const& m = manifest[digest];
   auto [release, type] = m["Release2Type"].items().begin();
-  LHCb::TCK::Info info{digest, m["TCK"], release, type, m["label"]};
+  LHCb::TCK::Info info {digest, m["TCK"], release, type, m["label"]};
 
   return {sequence.dump(), info};
 }
diff --git a/configuration/src/bindings.cpp b/configuration/src/bindings.cpp
index 2a93e1e027e..7e968cc1bc6 100644
--- a/configuration/src/bindings.cpp
+++ b/configuration/src/bindings.cpp
@@ -37,12 +37,15 @@ PYBIND11_MODULE(TCK, m)
     sequence_to_git
     )pbdoc";
 
-  m.def("current_release", &Allen::TCK::current_release,
-        "Get the current Allen release string in the form of ALLEN_vXrYpZ, "
-        "where pZ is optional");
-  m.def("tck_from_git", &Allen::tck_from_git,
-        "Get the TCK as it is in the git repository");
-  m.def("sequence_from_git", &Allen::sequence_from_git,
-        "Get the TCK and TCK information in a format that can be used to "
-        "configure Allen");
+  m.def(
+    "current_release",
+    &Allen::TCK::current_release,
+    "Get the current Allen release string in the form of ALLEN_vXrYpZ, "
+    "where pZ is optional");
+  m.def("tck_from_git", &Allen::tck_from_git, "Get the TCK as it is in the git repository");
+  m.def(
+    "sequence_from_git",
+    &Allen::sequence_from_git,
+    "Get the TCK and TCK information in a format that can be used to "
+    "configure Allen");
 }
diff --git a/main/include/Provider.h b/main/include/Provider.h
index 94f8576532d..97c791b3bf4 100644
--- a/main/include/Provider.h
+++ b/main/include/Provider.h
@@ -36,7 +36,9 @@ namespace Allen {
 
   std::string sequence_conf(std::map<std::string, std::string> const& options);
 
-  std::shared_ptr<IInputProvider> make_provider(std::map<std::string, std::string> const& options, std::string_view configuration);
+  std::shared_ptr<IInputProvider> make_provider(
+    std::map<std::string, std::string> const& options,
+    std::string_view configuration);
 
   std::unique_ptr<OutputHandler> output_handler(
     IInputProvider* input_provider,
diff --git a/main/src/Provider.cpp b/main/src/Provider.cpp
index 3ace45679c4..0cc8a50e5df 100644
--- a/main/src/Provider.cpp
+++ b/main/src/Provider.cpp
@@ -52,9 +52,9 @@ std::string Allen::sequence_conf(std::map<std::string, std::string> const& optio
     }
   }
 
-  std::regex tck_option{"([^:]+):(0x[a-fA-F0-9]{8})"};
+  std::regex tck_option {"([^:]+):(0x[a-fA-F0-9]{8})"};
   std::smatch tck_match;
-  if ( std::regex_match(sequence, tck_match, tck_option) ) {
+  if (std::regex_match(sequence, tck_match, tck_option)) {
 #ifndef ALLEN_STANDALONE
 
     // WORKING print some message using tck_info
@@ -64,20 +64,22 @@ std::string Allen::sequence_conf(std::map<std::string, std::string> const& optio
     std::string config;
     LHCb::TCK::Info info;
     try {
-      std::tie(config, info) = Allen::sequence_from_git( repo, tck );
-    } catch ( std::runtime_error const& e ) {
-      throw std::runtime_error{"Failed to obtain sequence for TCK " + tck + " from repository at " + repo + ":" + e.what() };
+      std::tie(config, info) = Allen::sequence_from_git(repo, tck);
+    } catch (std::runtime_error const& e) {
+      throw std::runtime_error {"Failed to obtain sequence for TCK " + tck + " from repository at " + repo + ":" +
+                                e.what()};
     }
-    if ( config.empty() ) {
-      throw std::runtime_error{"Failed to obtain sequence for TCK " + tck + " from repository at " + repo };
+    if (config.empty()) {
+      throw std::runtime_error {"Failed to obtain sequence for TCK " + tck + " from repository at " + repo};
     }
-    else if ( info.release != TCK::current_release() ) {
-      throw std::runtime_error{"TCK " + tck + " is compatible with release " + info.release + " and not current release " + TCK::current_release() };
+    else if (info.release != TCK::current_release()) {
+      throw std::runtime_error {"TCK " + tck + " is compatible with release " + info.release +
+                                " and not current release " + TCK::current_release()};
     }
     info_cout << "TCK " << tck << " loaded " << info.type << " sequence from git with label " << info.label << "\n";
     return config;
 #else
-    throw std::runtime_error{"Loading configuration from TCK is not supported in standalone builds"};
+    throw std::runtime_error {"Loading configuration from TCK is not supported in standalone builds"};
 #endif
   }
   else {
@@ -92,21 +94,22 @@ std::string Allen::sequence_conf(std::map<std::string, std::string> const& optio
       const std::string allen_configuration_options = "";
 #endif
 
-      int error = system(
-                         ("PYTHONPATH=code_generation/sequences:$PYTHONPATH python3 ../configuration/python/AllenCore/gen_allen_json.py " +
-                          allen_configuration_options + " --seqpath ../configuration/python/AllenSequences/" + sequence + ".py > /dev/null")
-                         .c_str());
+      int error = system(("PYTHONPATH=code_generation/sequences:$PYTHONPATH python3 "
+                          "../configuration/python/AllenCore/gen_allen_json.py " +
+                          allen_configuration_options + " --seqpath ../configuration/python/AllenSequences/" +
+                          sequence + ".py > /dev/null")
+                           .c_str());
       if (error) {
-        throw std::runtime_error{ "sequence generation failed" };
+        throw std::runtime_error {"sequence generation failed"};
       }
       info_cout << "\n";
       generated = true;
     }
 
     std::string config;
-    std::ifstream config_file{json_configuration_file};
+    std::ifstream config_file {json_configuration_file};
     if (!config_file.is_open()) {
-      throw std::runtime_error{ "failed to open sequence configuration file " + json_configuration_file };
+      throw std::runtime_error {"failed to open sequence configuration file " + json_configuration_file};
     }
     config_file >> config;
     return config;
@@ -150,7 +153,9 @@ Allen::IOConf Allen::io_configuration(
   return io_conf;
 }
 
-std::shared_ptr<IInputProvider> Allen::make_provider(std::map<std::string, std::string> const& options, std::string_view configuration)
+std::shared_ptr<IInputProvider> Allen::make_provider(
+  std::map<std::string, std::string> const& options,
+  std::string_view configuration)
 {
 
   unsigned number_of_slices = 0;
diff --git a/main/src/main.cpp b/main/src/main.cpp
index be43237632c..2ef1c26b8ca 100644
--- a/main/src/main.cpp
+++ b/main/src/main.cpp
@@ -131,5 +131,6 @@ int main(int argc, char* argv[])
 
   auto output_handler = Allen::output_handler(input_provider.get(), zmqSvc, allen_options, configuration);
 
-  return allen(std::move(allen_options), configuration, &updater, std::move(input_provider), output_handler.get(), zmqSvc, "");
+  return allen(
+    std::move(allen_options), configuration, &updater, std::move(input_provider), output_handler.get(), zmqSvc, "");
 }
-- 
GitLab


From 64c9ec6b2462b81fb139b6ef9e91ae61c49a25b4 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Sat, 25 Mar 2023 23:59:59 +0100
Subject: [PATCH 12/49] Fix copyright

---
 Rec/Allen/python/Allen/__init__.py           | 10 ++++++++++
 Rec/Allen/python/Allen/tck.py                | 10 ++++++++++
 Rec/Allen/python/AllenAlgorithms/__init__.py | 10 ++++++++++
 Rec/Allen/scripts/create_hlt1_tck.py         | 10 ++++++++++
 configuration/include/TCK.h                  | 10 ++++++++++
 configuration/src/TCK.cpp                    | 10 ++++++++++
 configuration/src/bindings.cpp               | 10 ++++++++++
 7 files changed, 70 insertions(+)

diff --git a/Rec/Allen/python/Allen/__init__.py b/Rec/Allen/python/Allen/__init__.py
index edb5f8b26b7..b4612602b55 100644
--- a/Rec/Allen/python/Allen/__init__.py
+++ b/Rec/Allen/python/Allen/__init__.py
@@ -1,3 +1,13 @@
+###############################################################################
+# (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration           #
+#                                                                             #
+# This software is distributed under the terms of the Apache License          #
+# version 2 (Apache-2.0), copied verbatim in the file "COPYING".              #
+#                                                                             #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization  #
+# or submit itself to any jurisdiction.                                       #
+###############################################################################
 import os
 
 __path__ += [
diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
index 26c59f3ca57..25b20ecc82e 100644
--- a/Rec/Allen/python/Allen/tck.py
+++ b/Rec/Allen/python/Allen/tck.py
@@ -1,3 +1,13 @@
+###############################################################################
+# (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration           #
+#                                                                             #
+# This software is distributed under the terms of the Apache License          #
+# version 2 (Apache-2.0), copied verbatim in the file "COPYING".              #
+#                                                                             #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization  #
+# or submit itself to any jurisdiction.                                       #
+###############################################################################
 import json
 import os
 import sys
diff --git a/Rec/Allen/python/AllenAlgorithms/__init__.py b/Rec/Allen/python/AllenAlgorithms/__init__.py
index ce95439a793..f4ecabff7c3 100644
--- a/Rec/Allen/python/AllenAlgorithms/__init__.py
+++ b/Rec/Allen/python/AllenAlgorithms/__init__.py
@@ -1,3 +1,13 @@
+###############################################################################
+# (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration           #
+#                                                                             #
+# This software is distributed under the terms of the Apache License          #
+# version 2 (Apache-2.0), copied verbatim in the file "COPYING".              #
+#                                                                             #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization  #
+# or submit itself to any jurisdiction.                                       #
+###############################################################################
 import os
 
 __path__ = [
diff --git a/Rec/Allen/scripts/create_hlt1_tck.py b/Rec/Allen/scripts/create_hlt1_tck.py
index 7ec540651ea..fb16943240c 100644
--- a/Rec/Allen/scripts/create_hlt1_tck.py
+++ b/Rec/Allen/scripts/create_hlt1_tck.py
@@ -1,3 +1,13 @@
+###############################################################################
+# (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration           #
+#                                                                             #
+# This software is distributed under the terms of the Apache License          #
+# version 2 (Apache-2.0), copied verbatim in the file "COPYING".              #
+#                                                                             #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization  #
+# or submit itself to any jurisdiction.                                       #
+###############################################################################
 import argparse
 import json
 from Allen.tck import sequence_to_git, sequence_from_python
diff --git a/configuration/include/TCK.h b/configuration/include/TCK.h
index 42221b2e39d..17b69d333af 100644
--- a/configuration/include/TCK.h
+++ b/configuration/include/TCK.h
@@ -1,3 +1,13 @@
+/*****************************************************************************\
+* (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration           *
+*                                                                             *
+* This software is distributed under the terms of the Apache License          *
+* version 2 (Apache-2.0), copied verbatim in the file "COPYING".              *
+*                                                                             *
+* In applying this licence, CERN does not waive the privileges and immunities *
+* granted to it by virtue of its status as an Intergovernmental Organization  *
+* or submit itself to any jurisdiction.                                       *
+\*****************************************************************************/
 #pragma once
 
 #include <string>
diff --git a/configuration/src/TCK.cpp b/configuration/src/TCK.cpp
index 6c9d4e4e13d..85636b08a1b 100644
--- a/configuration/src/TCK.cpp
+++ b/configuration/src/TCK.cpp
@@ -1,3 +1,13 @@
+/*****************************************************************************\
+* (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration           *
+*                                                                             *
+* This software is distributed under the terms of the Apache License          *
+* version 2 (Apache-2.0), copied verbatim in the file "COPYING".              *
+*                                                                             *
+* In applying this licence, CERN does not waive the privileges and immunities *
+* granted to it by virtue of its status as an Intergovernmental Organization  *
+* or submit itself to any jurisdiction.                                       *
+\*****************************************************************************/
 #include "boost/algorithm/string/classification.hpp"
 #include "boost/algorithm/string/predicate.hpp"
 #include "boost/algorithm/string/split.hpp"
diff --git a/configuration/src/bindings.cpp b/configuration/src/bindings.cpp
index 7e968cc1bc6..030eaefc6b5 100644
--- a/configuration/src/bindings.cpp
+++ b/configuration/src/bindings.cpp
@@ -1,3 +1,13 @@
+/*****************************************************************************\
+* (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration           *
+*                                                                             *
+* This software is distributed under the terms of the Apache License          *
+* version 2 (Apache-2.0), copied verbatim in the file "COPYING".              *
+*                                                                             *
+* In applying this licence, CERN does not waive the privileges and immunities *
+* granted to it by virtue of its status as an Intergovernmental Organization  *
+* or submit itself to any jurisdiction.                                       *
+\*****************************************************************************/
 #include <pybind11/pybind11.h>
 #include <pybind11/stl.h>
 #include <pybind11/iostream.h>
-- 
GitLab


From 7324066dc24e1c32af00d3030db6e9c13f74902c Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Sun, 26 Mar 2023 00:28:51 +0100
Subject: [PATCH 13/49] Fix allen.py

---
 Dumpers/BinaryDumpers/options/allen.py | 9 +++++++--
 Rec/Allen/python/Allen/tck.py          | 2 +-
 2 files changed, 8 insertions(+), 3 deletions(-)

diff --git a/Dumpers/BinaryDumpers/options/allen.py b/Dumpers/BinaryDumpers/options/allen.py
index fe3f1fc7bbe..0ef2401e50d 100755
--- a/Dumpers/BinaryDumpers/options/allen.py
+++ b/Dumpers/BinaryDumpers/options/allen.py
@@ -37,7 +37,6 @@ allen_dir = os.environ["ALLEN_PROJECT_ROOT"]
 interpreter.Declare("#include <Dumpers/IUpdater.h>")
 interpreter.Declare("#include <Allen/Allen.h>")
 interpreter.Declare("#include <Allen/Provider.h>")
-interpreter.Declare("#include <Allen/TCK.h>")
 interpreter.Declare("#include <Dumpers/PyAllenHelper.h>")
 
 sequence_default = os.path.join(os.environ['ALLEN_INSTALL_DIR'], 'constants',
@@ -197,12 +196,18 @@ sequence = os.path.expandvars(args.sequence)
 sequence_json = ""
 tck_option = re.compile(r"([^:]+):(0x[a-fA-F0-9]{8})")
 if (m := tck_option.match(sequence)):
+    from Allen.tck import sequence_from_git, get_allen_version
     repo = m.group(1)
     tck = m.group(2)
-    sequence_json = str(gbl.Allen.sequence_from_git(repo, tck, gbl.std.cerr))
+    sequence_json, tck_info = sequence_from_git(repo, tck)
     if not sequence_json:
         print(f"Failed to obtain configuration for TCK {tck} from repository {repo}")
         sys.exit(1)
+    elif (allen_release := get_allen_version()) != tck_info.release:
+        print(f"TCK {tck} is compatible with Allen release {allen_release}, not with {tck_info.release}.")
+        sys.exit(1)
+    else:
+        print(f"Loaded TCK {tck} with sequence type {tck_info.type} and label {tck_info.label}.")
 else:
     with open(sequence) as f:
         sequence_json = f.read()
diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
index 25b20ecc82e..49d0230d199 100644
--- a/Rec/Allen/python/Allen/tck.py
+++ b/Rec/Allen/python/Allen/tck.py
@@ -28,7 +28,7 @@ def get_allen_version():
         allen_version = manifest_tree.find("project").get("version")
         vs = allen_version.split(".")
         version = "ALLEN_"
-        for prefix, idx in (("v", 0), ("r", 1), ("p", 2)):
+        for idx, prefix in enumerate(("v", "r", "p")):
             if idx < len(vs):
                 version += prefix + vs[idx]
         return version
-- 
GitLab


From c2c313354bced409274b4470a1569b024f1128f4 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Sun, 26 Mar 2023 00:29:29 +0100
Subject: [PATCH 14/49] Remove --run-from-json

---
 doc/setup/run_allen.rst                      |  3 +--
 scripts/ci/jobs/run_efficiency_throughput.sh | 18 +++++++++---------
 scripts/ci/jobs/run_physics_efficiency.sh    |  4 ++--
 scripts/ci/jobs/run_throughput.sh            | 10 +++++-----
 scripts/ci/jobs/run_toggle_run_changes.sh    |  4 ++--
 scripts/ci/test_config.yaml                  |  4 ++--
 6 files changed, 21 insertions(+), 22 deletions(-)

diff --git a/doc/setup/run_allen.rst b/doc/setup/run_allen.rst
index 5474aa95c96..0ace1700c38 100644
--- a/doc/setup/run_allen.rst
+++ b/doc/setup/run_allen.rst
@@ -33,7 +33,6 @@ A run of the Allen program with the help option `-h` will let you know the basic
      -v, --verbosity {verbosity [0-5]}=3 (info)
      -p, --print-memory {print memory usage}=0
      --sequence {sequence to run}
-     --run-from-json {run from json configuration file}=0
      --output-file {Write selected event to output file}
      --device {select device to use}=0
      --non-stop {Runs the program indefinitely}=0
@@ -94,7 +93,7 @@ Use Gaudi to update non-event data such as alignment and configuration constants
 When using MDF files as input, call from the Allen environment::
 
   ./Allen/build.${ARCHITECTURE}/run python Dumpers/BinaryDumpers/options/allen.py --mdf Allen/input/minbias/mdf/MiniBrunel_2018_MinBias_FTv4_DIGI_retinacluster_v1.mdf
- 
+
 When using MEP files as input, call from the MooreOnline environment, as MEP handling is implemented there::
 
   ./MooreOnline/build.${ARCHITECTURE}/run python Allen/Dumpers/BinaryDumpers/options/allen.py --sequence=Allen/InstallArea/${ARCHITECTURE}/constants/hlt1_pp_default.json --tags="dddb_tag,simcond_tag" --mep mep_file.mep
diff --git a/scripts/ci/jobs/run_efficiency_throughput.sh b/scripts/ci/jobs/run_efficiency_throughput.sh
index a0cd60d7002..c95528df309 100644
--- a/scripts/ci/jobs/run_efficiency_throughput.sh
+++ b/scripts/ci/jobs/run_efficiency_throughput.sh
@@ -26,14 +26,14 @@ if [ "${RUN_THROUGHPUT}" != "NO_THROUGHPUT" ]; then
     fi
     # overwrite GEOMETRY if RUN_THROUGHPUT_GEOMETRY defined
     if [ ! -z ${RUN_THROUGHPUT_GEOMETRY+x} ]; then
-        GEOMETRY="${RUN_THROUGHPUT_GEOMETRY}"   
+        GEOMETRY="${RUN_THROUGHPUT_GEOMETRY}"
     # else
     #     echo "RUN_THROUGHPUT_GEOMETRY not set - abort throughput test"
     #     exit 1
     fi
     # overwrite DATA_TAG if RUN_THROUGHPUT_DATA_TAG defined
     if [ ! -z ${RUN_THROUGHPUT_DATA_TAG+x} ]; then
-        DATA_TAG="${RUN_THROUGHPUT_DATA_TAG}"   
+        DATA_TAG="${RUN_THROUGHPUT_DATA_TAG}"
     else
         echo "RUN_THROUGHPUT_DATA_TAG not set - abort throughput test"
         exit 1
@@ -50,7 +50,7 @@ if [ "${RUN_THROUGHPUT}" != "NO_THROUGHPUT" ]; then
     RUN_OPTIONS="$RUN_OPTIONS -g /scratch/allen_geometries/${GEOMETRY}"
     fi
 
-    RUN_OPTIONS="--mdf ${ALLEN_DATA}/mdf_input/${DATA_TAG}.mdf --sequence ${SEQUENCE}  --run-from-json 1 --params external/ParamFiles/ ${RUN_OPTIONS}"
+    RUN_OPTIONS="--mdf ${ALLEN_DATA}/mdf_input/${DATA_TAG}.mdf --sequence ${SEQUENCE}.json --params external/ParamFiles/ ${RUN_OPTIONS}"
 
     set -euxo pipefail
     OUTPUT_FOLDER_REL="${TEST_NAME}_output_${SEQUENCE}_${DATA_TAG}${OPTIONS}/${DEVICE_ID}"
@@ -164,7 +164,7 @@ if [ "${RUN_THROUGHPUT}" != "NO_THROUGHPUT" ]; then
     # write metric to display on MR
     echo "throughput_kHz{device=\"${DEVICE_ID}\",sequence=\"${SEQUENCE}\",dataset=\"${DATA_TAG}\"} ${THROUGHPUT_KHZ}" >> "${OUTPUT_FOLDER}/metrics.txt"
 
-    if [ "${TPUT_REPORT}" = "NO_REPORT" ]; then 
+    if [ "${TPUT_REPORT}" = "NO_REPORT" ]; then
     echo "TPUT_REPORT is set to ${TPUT_REPORT} - throughput will not be reported."
 
     touch "${OUTPUT_FOLDER}/no_throughput_report.txt"
@@ -182,28 +182,28 @@ if [ "${RUN_EFFICIENCY}" != "NO_EFFICIENCY" ]; then
 
     check_build_exists
 
-    EFF_RUN_OPTIONS="-n 10000 -m 1100 --run-from-json 1"
+    EFF_RUN_OPTIONS="-n 10000 -m 1100"
 
     # Configure the input files (--mdf) and geometry (-g)
     set +x; set +u
 
     # overwrite SEQUENCE if RUN_EFFICIENCY_SEQUENCE defined
     if [ ! -z ${RUN_EFFICIENCY_SEQUENCE+x} ]; then
-        SEQUENCE="${RUN_EFFICIENCY_SEQUENCE}"   
+        SEQUENCE="${RUN_EFFICIENCY_SEQUENCE}"
     else
         echo "RUN_EFFICIENCY_SEQUENCE not set - abort efficiency test"
         exit 1
     fi
     # overwrite GEOMETRY if RUN_EFFICIENCY_GEOMETRY defined
     if [ ! -z ${RUN_EFFICIENCY_GEOMETRY+x} ]; then
-        GEOMETRY="${RUN_EFFICIENCY_GEOMETRY}"   
+        GEOMETRY="${RUN_EFFICIENCY_GEOMETRY}"
     # else
     #     echo "RUN_EFFICIENCY_GEOMETRY not set - abort efficiency test"
     #     exit 1
     fi
     # overwrite DATA_TAG if RUN_EFFICIENCY_DATA_TAG defined
     if [ ! -z ${RUN_EFFICIENCY_DATA_TAG+x} ]; then
-        DATA_TAG="${RUN_EFFICIENCY_DATA_TAG}"   
+        DATA_TAG="${RUN_EFFICIENCY_DATA_TAG}"
     else
         echo "RUN_EFFICIENCY_DATA_TAG not set - abort efficiency test"
         exit 1
@@ -215,7 +215,7 @@ if [ "${RUN_EFFICIENCY}" != "NO_EFFICIENCY" ]; then
 
     set -euxo pipefail
 
-    EFF_RUN_OPTIONS=" --mdf ${ALLEN_DATA}/mdf_input/${DATA_TAG}.mdf --sequence ${SEQUENCE} --params external/ParamFiles/ ${EFF_RUN_OPTIONS}"
+    EFF_RUN_OPTIONS=" --mdf ${ALLEN_DATA}/mdf_input/${DATA_TAG}.mdf --sequence ${SEQUENCE}.json --params external/ParamFiles/ ${EFF_RUN_OPTIONS}"
 
     OUTPUT_FOLDER="${TEST_NAME}_output_${SEQUENCE}"
 
diff --git a/scripts/ci/jobs/run_physics_efficiency.sh b/scripts/ci/jobs/run_physics_efficiency.sh
index 71c4f1bb077..566997637c3 100755
--- a/scripts/ci/jobs/run_physics_efficiency.sh
+++ b/scripts/ci/jobs/run_physics_efficiency.sh
@@ -11,7 +11,7 @@ fi
 check_build_exists
 
 
-RUN_OPTIONS="-n 10000 -m 1100 --run-from-json 1"
+RUN_OPTIONS="-n 10000 -m 1100"
 
 # Configure the input files (--mdf) and geometry (-g)
 set +x; set +u
@@ -21,7 +21,7 @@ fi
 
 set -euxo pipefail
 
-RUN_OPTIONS=" --mdf ${ALLEN_DATA}/mdf_input/${DATA_TAG}.mdf --sequence ${SEQUENCE} --params external/ParamFiles/ ${RUN_OPTIONS}"
+RUN_OPTIONS=" --mdf ${ALLEN_DATA}/mdf_input/${DATA_TAG}.mdf --sequence ${SEQUENCE}.json --params external/ParamFiles/ ${RUN_OPTIONS}"
 
 OUTPUT_FOLDER="${TEST_NAME}_output_${SEQUENCE}"
 
diff --git a/scripts/ci/jobs/run_throughput.sh b/scripts/ci/jobs/run_throughput.sh
index b1115bdab4a..a77ad609f76 100755
--- a/scripts/ci/jobs/run_throughput.sh
+++ b/scripts/ci/jobs/run_throughput.sh
@@ -17,10 +17,10 @@ if [ ! -z ${GEOMETRY+x} ]; then
   RUN_OPTIONS="$RUN_OPTIONS -g /scratch/allen_geometries/${GEOMETRY}"
 fi
 
-RUN_OPTIONS="--mdf ${ALLEN_DATA}/mdf_input/${DATA_TAG}.mdf --sequence ${SEQUENCE}  --run-from-json 1 --params external/ParamFiles/ ${RUN_OPTIONS}"
+RUN_OPTIONS="--mdf ${ALLEN_DATA}/mdf_input/${DATA_TAG}.mdf --sequence ${SEQUENCE}.json --params external/ParamFiles/ ${RUN_OPTIONS}"
 
 
-if [ "${AVOID_HIP}" = "1" ]; then 
+if [ "${AVOID_HIP}" = "1" ]; then
   if [ "${TARGET}" = "HIP" ]; then
     echo "***** Variable TARGET is set to HIP, and AVOID_HIP is set to 1 - quit."
     exit 0
@@ -87,7 +87,7 @@ else
     NUMA_NODE=${CI_RUNNER_DESCRIPTION_SPLIT[2]}
     THREADS=$((${TOTAL_THREADS} / ${TOTAL_NUMA_NODES}))
     RUN_OPTIONS="${RUN_OPTIONS} ${RUN_THROUGHPUT_OPTIONS_CPU} -t ${THREADS}"
-    
+
     ALLEN="numactl --cpunodebind=${NUMA_NODE} --membind=${NUMA_NODE} ./toolchain/wrapper ./Allen ${RUN_OPTIONS}"
 
   elif [ "${TARGET}" = "CUDA" ]; then
@@ -96,7 +96,7 @@ else
     GPU_NUMBER=`nvidia-smi -L | grep ${GPU_UUID} | awk '{ print $2; }' | sed -e 's/://'`
     NUMA_NODE=`nvidia-smi topo -m | grep GPU${GPU_NUMBER} | tail -1 | awk '{ print $NF; }'`
     RUN_OPTIONS="${RUN_OPTIONS} ${RUN_THROUGHPUT_OPTIONS_CUDA}"
-    
+
     ALLEN="CUDA_DEVICE_ORDER=PCI_BUS_ID CUDA_VISIBLE_DEVICES=${GPU_NUMBER} numactl --cpunodebind=${NUMA_NODE} --membind=${NUMA_NODE} ./toolchain/wrapper ./Allen ${RUN_OPTIONS}"
 
     nvidia-smi
@@ -141,7 +141,7 @@ echo "${CI_COMMIT_SHORT_SHA}" > "${OUTPUT_FOLDER}/revision.txt"
 echo "throughput_kHz{device=\"${DEVICE_ID}\",sequence=\"${SEQUENCE}\",dataset=\"${DATA_TAG}\"} ${THROUGHPUT_KHZ}" >> "${OUTPUT_FOLDER}/metrics.txt"
 
 
-if [ "${TPUT_REPORT}" = "NO_REPORT" ]; then 
+if [ "${TPUT_REPORT}" = "NO_REPORT" ]; then
   echo "TPUT_REPORT is set to ${TPUT_REPORT} - throughput will not be reported."
 
   touch "${OUTPUT_FOLDER}/no_throughput_report.txt"
diff --git a/scripts/ci/jobs/run_toggle_run_changes.sh b/scripts/ci/jobs/run_toggle_run_changes.sh
index 79f87b855ff..201303c2914 100755
--- a/scripts/ci/jobs/run_toggle_run_changes.sh
+++ b/scripts/ci/jobs/run_toggle_run_changes.sh
@@ -7,8 +7,8 @@ set -euxo pipefail
 
 check_build_exists
 
-RUN_OPTIONS="-n 1000 -m 1000 --run-from-json 1 --params external/ParamFiles/"
-JOB="./toolchain/wrapper ./Allen --mdf ${ALLEN_DATA}/mdf_input/${DATA_TAG}.mdf --sequence ${SEQUENCE} ${RUN_OPTIONS}"
+RUN_OPTIONS="-n 1000 -m 1000 --params external/ParamFiles/"
+JOB="./toolchain/wrapper ./Allen --mdf ${ALLEN_DATA}/mdf_input/${DATA_TAG}.mdf --sequence ${SEQUENCE}.json ${RUN_OPTIONS}"
 
 for RUN_CHANGES in ON OFF; do
   echo "RUN_CHANGES: $RUN_CHANGES"
diff --git a/scripts/ci/test_config.yaml b/scripts/ci/test_config.yaml
index 3e6907e68a7..cd788514b20 100644
--- a/scripts/ci/test_config.yaml
+++ b/scripts/ci/test_config.yaml
@@ -9,13 +9,13 @@ config:
   # args added for specific test keys
   args:
     # Added to Allen command always
-    base: "--run-from-json 1 --params external/ParamFiles/"
+    base: "--params external/ParamFiles/"
 
     # added if "dataset:" specified
     dataset: "--mdf /scratch/allen_data/mdf_input/{dataset}.mdf"
 
     # added if "sequence:" specified
-    sequence: "--sequence {sequence}"
+    sequence: "--sequence {sequence}.json"
 
     # added if "geometry:" specified
     geometry: "-g /scratch/allen_geometries/{geometry}"
-- 
GitLab


From 4112d67f20884769683fa9ca8041d49f33d7e667 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Sun, 26 Mar 2023 21:51:41 +0200
Subject: [PATCH 15/49] Use ORIGIN in build rpath for Allen to find shared
 libs.

---
 CMakeLists.txt                            | 6 +++---
 Dumpers/BinaryDumpers/CMakeLists.txt      | 3 +--
 Rec/Allen/CMakeLists.txt                  | 1 -
 integration/non_event_data/CMakeLists.txt | 2 +-
 4 files changed, 5 insertions(+), 7 deletions(-)

diff --git a/CMakeLists.txt b/CMakeLists.txt
index da011f93b9e..98340634a1e 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -705,7 +705,6 @@ target_link_libraries(AllenLib
     Lumi
     Monitoring
     Muon
-    NonEventData
     PVChecking
     PV_beamline
     Plume
@@ -728,6 +727,7 @@ target_link_libraries(AllenLib
     AllenRuntime
     Threads::Threads
     AllenZMQ
+    NonEventData
     ${CMAKE_DL_LIBS})
 
 # To propagate filesystem includes/libs
@@ -747,8 +747,8 @@ endif()
 allen_add_executable(Allen main/src/main.cpp)
 add_dependencies(Allen Sequences)
 
-target_link_libraries(Allen PRIVATE AllenLib NonEventData)
-
+target_link_libraries(Allen PRIVATE AllenLib)
+set_target_properties(Allen PROPERTIES BUILD_RPATH_USE_ORIGIN TRUE)
 
 if (NOT STANDALONE)
     include(FileContentMetadataRepository)
diff --git a/Dumpers/BinaryDumpers/CMakeLists.txt b/Dumpers/BinaryDumpers/CMakeLists.txt
index 9539a97c8a8..71637c68db6 100644
--- a/Dumpers/BinaryDumpers/CMakeLists.txt
+++ b/Dumpers/BinaryDumpers/CMakeLists.txt
@@ -9,9 +9,8 @@ gaudi_add_library(BinaryDumpers
                   SOURCES
                     src/lib/TestUTBoards.cpp
                     src/lib/Utils.cpp
-                  LINK PRIVATE
-                    NonEventData
                   LINK PUBLIC
+                    NonEventData
                     AllenCommon
                     HostCommon
                     EventModel
diff --git a/Rec/Allen/CMakeLists.txt b/Rec/Allen/CMakeLists.txt
index a2ccd849ebb..66e26dd0657 100755
--- a/Rec/Allen/CMakeLists.txt
+++ b/Rec/Allen/CMakeLists.txt
@@ -36,7 +36,6 @@ gaudi_add_module(AllenWrapper
                    HostCommon
                    HostEventModel
                    HostRoutingBits
-                   NonEventData
                    TrackChecking
                    PVChecking
                    CheckClustering
diff --git a/integration/non_event_data/CMakeLists.txt b/integration/non_event_data/CMakeLists.txt
index a77bdd7a913..f795c8be812 100644
--- a/integration/non_event_data/CMakeLists.txt
+++ b/integration/non_event_data/CMakeLists.txt
@@ -3,7 +3,7 @@
 ###############################################################################
 file(GLOB SOURCES "src/*.cpp")
 
-allen_add_host_library(NonEventData STATIC ${SOURCES})
+allen_add_host_library(NonEventData SHARED ${SOURCES})
 
 target_link_libraries(NonEventData PRIVATE
   HostClustering
-- 
GitLab


From 98b9c9d29866ece780f2e1f4a223a3092a1d1bb5 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Mon, 27 Mar 2023 08:48:39 +0200
Subject: [PATCH 16/49] Fix loading configuration from JSON file

---
 main/src/Provider.cpp | 5 +++--
 main/src/main.cpp     | 2 +-
 2 files changed, 4 insertions(+), 3 deletions(-)

diff --git a/main/src/Provider.cpp b/main/src/Provider.cpp
index 0cc8a50e5df..63dc95b2384 100644
--- a/main/src/Provider.cpp
+++ b/main/src/Provider.cpp
@@ -3,6 +3,7 @@
 \*****************************************************************************/
 #include <string>
 #include <iostream>
+#include <fstream>
 #include <regex>
 
 #include <MDFProvider.h>
@@ -111,8 +112,8 @@ std::string Allen::sequence_conf(std::map<std::string, std::string> const& optio
     if (!config_file.is_open()) {
       throw std::runtime_error {"failed to open sequence configuration file " + json_configuration_file};
     }
-    config_file >> config;
-    return config;
+
+    return std::string {std::istreambuf_iterator<char> {config_file}, std::istreambuf_iterator<char> {}};
   }
 }
 
diff --git a/main/src/main.cpp b/main/src/main.cpp
index 2ef1c26b8ca..1e50b7c821e 100644
--- a/main/src/main.cpp
+++ b/main/src/main.cpp
@@ -2,7 +2,7 @@
 * (c) Copyright 2018-2020 CERN for the benefit of the LHCb Collaboration      *
 \*****************************************************************************/
 /**
- *      CUDA HLT1
+ *      LHCb GPU HLT1 Demonstrator
  *
  *      author  -  GPU working group
  *      e-mail  -  lhcb-parallelization@cern.ch
-- 
GitLab


From 4e8994d04364fc371c98012536a6c6c801fb9c7d Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Mon, 27 Mar 2023 14:30:39 +0200
Subject: [PATCH 17/49] Refactor build of NonEventData to avoid double device
 link

---
 CMakeLists.txt                            |  3 +-
 Dumpers/BinaryDumpers/CMakeLists.txt      |  2 +-
 Dumpers/BinaryDumpers/src/lib/Utils.cpp   | 10 -------
 integration/non_event_data/CMakeLists.txt | 35 +++++++++++++----------
 main/src/RegisterConsumers.cpp            | 14 +++++++--
 5 files changed, 35 insertions(+), 29 deletions(-)

diff --git a/CMakeLists.txt b/CMakeLists.txt
index 98340634a1e..6d01d3e494b 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -705,6 +705,7 @@ target_link_libraries(AllenLib
     Lumi
     Monitoring
     Muon
+    NonEventData
     PVChecking
     PV_beamline
     Plume
@@ -727,7 +728,7 @@ target_link_libraries(AllenLib
     AllenRuntime
     Threads::Threads
     AllenZMQ
-    NonEventData
+    NonEventDataHeaders
     ${CMAKE_DL_LIBS})
 
 # To propagate filesystem includes/libs
diff --git a/Dumpers/BinaryDumpers/CMakeLists.txt b/Dumpers/BinaryDumpers/CMakeLists.txt
index 71637c68db6..fd740e026e9 100644
--- a/Dumpers/BinaryDumpers/CMakeLists.txt
+++ b/Dumpers/BinaryDumpers/CMakeLists.txt
@@ -10,7 +10,7 @@ gaudi_add_library(BinaryDumpers
                     src/lib/TestUTBoards.cpp
                     src/lib/Utils.cpp
                   LINK PUBLIC
-                    NonEventData
+                    NonEventDataHeaders
                     AllenCommon
                     HostCommon
                     EventModel
diff --git a/Dumpers/BinaryDumpers/src/lib/Utils.cpp b/Dumpers/BinaryDumpers/src/lib/Utils.cpp
index 7c11567f1c4..642653e6cf7 100644
--- a/Dumpers/BinaryDumpers/src/lib/Utils.cpp
+++ b/Dumpers/BinaryDumpers/src/lib/Utils.cpp
@@ -6,7 +6,6 @@
 
 #include <Dumpers/Utils.h>
 #include <Dumpers/IUpdater.h>
-#include <Updater.h>
 
 namespace {
   namespace fs = boost::filesystem;
@@ -38,12 +37,3 @@ size_t MuonUtils::size_index(
     return index + 4 * tile.nY() - 2 * gridY[idx] + (2 * tile.nX() / gridX[idx]);
   }
 }
-
-Allen::NonEventData::IUpdater* binary_updater(std::map<std::string, std::string> const& options)
-{
-  static std::unique_ptr<Allen::NonEventData::IUpdater> updater;
-  if (!updater) {
-    updater = std::make_unique<Allen::NonEventData::Updater>(options);
-  }
-  return updater.get();
-}
diff --git a/integration/non_event_data/CMakeLists.txt b/integration/non_event_data/CMakeLists.txt
index f795c8be812..85c6642e2d5 100644
--- a/integration/non_event_data/CMakeLists.txt
+++ b/integration/non_event_data/CMakeLists.txt
@@ -3,19 +3,24 @@
 ###############################################################################
 file(GLOB SOURCES "src/*.cpp")
 
-allen_add_host_library(NonEventData SHARED ${SOURCES})
+allen_add_host_library(NonEventData STATIC ${SOURCES})
+add_library(NonEventDataHeaders INTERFACE)
+target_include_directories(NonEventDataHeaders INTERFACE
+  $<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/Dumpers/BinaryDumpers/include>
+  $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>)
+install(TARGETS NonEventDataHeaders EXPORT Allen)
 
-target_link_libraries(NonEventData PRIVATE
-  HostClustering
-  AllenRuntime
-  AllenCommon
-  HostCommon
-  EventModel
-  Gear
-  MuonCommon
-  UTCommon
-  nlohmann_json::nlohmann_json)
-
-target_include_directories(NonEventData PUBLIC
-  $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>
-  $<BUILD_INTERFACE:${PROJECT_SOURCE_DIR}/Dumpers/BinaryDumpers/include>)
+target_link_libraries(NonEventData
+  PRIVATE
+    HostClustering
+    AllenRuntime
+    AllenCommon
+    HostCommon
+    EventModel
+    Gear
+    MuonCommon
+    UTCommon
+    nlohmann_json::nlohmann_json
+  PUBLIC
+    NonEventDataHeaders
+  )
diff --git a/main/src/RegisterConsumers.cpp b/main/src/RegisterConsumers.cpp
index c3983fac8a2..265f5340fe0 100644
--- a/main/src/RegisterConsumers.cpp
+++ b/main/src/RegisterConsumers.cpp
@@ -1,8 +1,9 @@
 /*****************************************************************************\
 * (c) Copyright 2018-2020 CERN for the benefit of the LHCb Collaboration      *
 \*****************************************************************************/
-#include "RegisterConsumers.h"
-#include "Common.h"
+#include <RegisterConsumers.h>
+#include <Common.h>
+#include <Updater.h>
 
 /**
  * @brief      Register all consumers of non-event data
@@ -86,3 +87,12 @@ void register_consumers(
     updater->registerConsumer<id_t>(std::get<1>(c)());
   });
 }
+
+Allen::NonEventData::IUpdater* binary_updater(std::map<std::string, std::string> const& options)
+{
+  static std::unique_ptr<Allen::NonEventData::IUpdater> updater;
+  if (!updater) {
+    updater = std::make_unique<Allen::NonEventData::Updater>(options);
+  }
+  return updater.get();
+}
-- 
GitLab


From 91d1bae1bf79df53d1618dffc5411bd9fbce8b7c Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Mon, 27 Mar 2023 22:28:19 +0200
Subject: [PATCH 18/49] Fix configured_bank_types

---
 Rec/Allen/python/Allen/config.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/Rec/Allen/python/Allen/config.py b/Rec/Allen/python/Allen/config.py
index 307e28f25c9..4ebedce4dd2 100755
--- a/Rec/Allen/python/Allen/config.py
+++ b/Rec/Allen/python/Allen/config.py
@@ -92,6 +92,7 @@ def allen_detectors(allen_node):
 
 
 def configured_bank_types(sequence_json):
+    sequence_json = json.loads(sequence_json)
     bank_types = set()
     for t, n, c in sequence_json["sequence"]["configured_algorithms"]:
         props = sequence_json.get(n, {})
-- 
GitLab


From 5b03b6fd025fb92b66a215be906ee4d2f10c3352 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Mon, 27 Mar 2023 23:47:16 +0200
Subject: [PATCH 19/49] Fix allen.py

---
 Dumpers/BinaryDumpers/options/allen.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/Dumpers/BinaryDumpers/options/allen.py b/Dumpers/BinaryDumpers/options/allen.py
index 0ef2401e50d..35582b8a5e6 100755
--- a/Dumpers/BinaryDumpers/options/allen.py
+++ b/Dumpers/BinaryDumpers/options/allen.py
@@ -327,8 +327,8 @@ if args.mep:
     mep_provider = gaudi.service("MEPProvider", interface=gbl.IService)
     provider = cast_service(gbl.IInputProvider, mep_provider)
 else:
-    provider = gbl.Allen.make_provider(options)
-output_handler = gbl.Allen.output_handler(provider, zmqSvc, options)
+    provider = gbl.Allen.make_provider(options, sequence_json)
+output_handler = gbl.Allen.output_handler(provider, zmqSvc, options, sequence_json)
 
 # run Allen
 gbl.allen.__release_gil__ = 1
@@ -357,7 +357,7 @@ def allen_thread():
     if args.profile == "CUDA":
         runtime_lib.cudaProfilerStart()
 
-    gbl.allen(options, updater, shared_wrap(gbl.IInputProvider, provider),
+    gbl.allen(options, sequence_json, updater, shared_wrap(gbl.IInputProvider, provider),
               output_handler, zmqSvc, con.c_str())
 
     if args.profile == "CUDA":
-- 
GitLab


From c05f53f0239dc7fb8150fb61cbd67ab20e11ab74 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Tue, 28 Mar 2023 13:17:09 +0200
Subject: [PATCH 20/49] Add tests that create TCKs for a sizeable subsample of
 the available sequences from both JSON and python modules and compare them

---
 Rec/Allen/python/Allen/qmtest/utils.py       | 16 +++++
 Rec/Allen/python/Allen/tck.py                | 23 +++++-
 Rec/Allen/scripts/create_hlt1_tck.py         | 22 ++++--
 Rec/Allen/tests/options/compare_hlt1_tcks.py | 74 ++++++++++++++++++++
 Rec/Allen/tests/options/create_hlt1_tcks.py  | 42 +++++++++++
 Rec/Allen/tests/qmtest/compare_tcks.qmt      | 31 ++++++++
 Rec/Allen/tests/qmtest/create_tcks.qmt       | 21 ++++++
 7 files changed, 224 insertions(+), 5 deletions(-)
 create mode 100644 Rec/Allen/python/Allen/qmtest/utils.py
 create mode 100644 Rec/Allen/tests/options/compare_hlt1_tcks.py
 create mode 100644 Rec/Allen/tests/options/create_hlt1_tcks.py
 create mode 100644 Rec/Allen/tests/qmtest/compare_tcks.qmt
 create mode 100644 Rec/Allen/tests/qmtest/create_tcks.qmt

diff --git a/Rec/Allen/python/Allen/qmtest/utils.py b/Rec/Allen/python/Allen/qmtest/utils.py
new file mode 100644
index 00000000000..afb74d78220
--- /dev/null
+++ b/Rec/Allen/python/Allen/qmtest/utils.py
@@ -0,0 +1,16 @@
+###############################################################################
+# (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration           #
+#                                                                             #
+# This software is distributed under the terms of the Apache License          #
+# version 2 (Apache-2.0), copied verbatim in the file "COPYING".              #
+#                                                                             #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization  #
+# or submit itself to any jurisdiction.                                       #
+###############################################################################
+
+
+def good_sequence(s):
+    physics = s.startswith('hlt1') and 'validation' not in s
+    extra = s in ('calo_prescaled_plus_lumi', 'passthrough')
+    return physics or extra
diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
index 49d0230d199..f9fc4db8767 100644
--- a/Rec/Allen/python/Allen/tck.py
+++ b/Rec/Allen/python/Allen/tck.py
@@ -176,7 +176,11 @@ def sequence_from_git(repository: Path, tck: str, use_bindings=True) -> str:
             return None
         tck_db = json.loads(p.stdout)
         digest, manifest_entry = next(((k, m) for k, m in tck_db.items() if m["TCK"] == tck), None)
-        return json.dump(tck_to_sequence(tck_db[digest]))
+        release, seq_type = next((k, v) for k, v in manifest_entry["Release2Type"].items())
+        tck = manifest_entry["TCK"]
+        label = manifest_entry["label"]
+        info = {"digest": digest, "tck": tck, "release": release, "type": seq_type, "label": label}
+        return (json.dump(tck_to_sequence(tck_db[digest])), info)
 
 
 def property_from_git(repository: Path, tck: str, algorithm=".*", property=".*"):
@@ -194,3 +198,20 @@ def property_from_git(repository: Path, tck: str, algorithm=".*", property=".*")
             result[alg] = prop_result
 
     return result
+
+
+def manifest_from_git(repository: Path):
+    p = run(
+        [
+            "hlttck_cdb_listkeys",
+             "--list-manifest-as-json",
+            f"{str(repository)}",
+            "-",
+        ],
+        stdout=PIPE,
+    )
+    if p.returncode != 0:
+        print("Failed to convert manifest from git repo to JSON")
+        return None
+    else:
+        return json.loads(p.stdout)
diff --git a/Rec/Allen/scripts/create_hlt1_tck.py b/Rec/Allen/scripts/create_hlt1_tck.py
index fb16943240c..4f69405bf75 100644
--- a/Rec/Allen/scripts/create_hlt1_tck.py
+++ b/Rec/Allen/scripts/create_hlt1_tck.py
@@ -10,6 +10,7 @@
 ###############################################################################
 import argparse
 import json
+import sys
 from Allen.tck import sequence_to_git, sequence_from_python
 from pathlib import Path
 
@@ -27,20 +28,33 @@ sequence_arg = Path(args.sequence[0])
 repository = Path(args.repository[0])
 tck = int(args.tck[0], 16)
 
+def dec_reporter_name(conf):
+    return next((n for t, n, _ in conf['sequence']['configured_algorithms']
+                 if t == "dec_reporter::dec_reporter_t"), None)
+
 sequence = None
 if sequence_arg.suffix in (".py", ''):
     from AllenCore.configuration_options import is_allen_standalone
     is_allen_standalone.global_bind(standalone=True)
 
     from AllenConf.persistency import make_dec_reporter
+    sequence, dn = {}, None
     with make_dec_reporter.bind(TCK=tck):
         sequence = sequence_from_python(sequence_arg, node_name=args.hlt1_node)
+    dn = dec_reporter_name(sequence)
+    if dn is None:
+        print(f"Cannot create TCK {hex(tck)} for sequence {sequence_arg.stem}, because it does not contain the dec_reporter")
+        sys.exit(1)
 elif sequence_arg.suffix == '.json':
+    sequence, dn = {}, None
     with open(sequence_arg, "r") as sequence_file:
         sequence = json.load(sequence_file)
-        dec_reporter_name = next(
-            (n for t, n, _ in sequence['sequence']['configured_algorithms']
-             if t == "dec_reporter::dec_reporter_t"), None)
-        sequence[dec_reporter_name]['tck'] = tck
+        dn = dec_reporter_name(sequence)
+    if dn is None:
+        print(f"Cannot create TCK {hex(tck)} for sequence {sequence_arg.stem}, because it does not contain the dec_reporter")
+        sys.exit(1)
+    else:
+        sequence[dn]['tck'] = tck
 
 sequence_to_git(repository, sequence, sequence_arg.stem, args.label, tck)
+print(f"Created TCK {hex(tck)} for sequence {sequence_arg.stem}")
diff --git a/Rec/Allen/tests/options/compare_hlt1_tcks.py b/Rec/Allen/tests/options/compare_hlt1_tcks.py
new file mode 100644
index 00000000000..18d5f17dcbe
--- /dev/null
+++ b/Rec/Allen/tests/options/compare_hlt1_tcks.py
@@ -0,0 +1,74 @@
+###############################################################################
+# (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration           #
+#                                                                             #
+# This software is distributed under the terms of the Apache License          #
+# version 2 (Apache-2.0), copied verbatim in the file "COPYING".              #
+#                                                                             #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization  #
+# or submit itself to any jurisdiction.                                       #
+###############################################################################
+import os
+import sys
+import json
+from Allen.qmtest.utils import good_sequence
+from Allen.tck import manifest_from_git, sequence_from_git
+from pathlib import Path
+
+seq_dir = Path(os.path.expandvars("${ALLEN_INSTALL_DIR}/constants"))
+json_repo = Path("config_json.git")
+python_repo = Path("config_python.git")
+
+manifest_json = manifest_from_git(json_repo)
+manifest_python = manifest_from_git(python_repo)
+
+# Digests are not necessarily the same, but manifest values should be
+entries_json = sorted(manifest_json.values(), key=lambda v: v["TCK"])
+entries_python = sorted(manifest_python.values(), key=lambda v: v["TCK"])
+
+error = entries_json != entries_python
+if error:
+    print("ERROR: Manifests are not the same")
+
+for m, suf in ((manifest_json, "json"), (manifest_python, "python")):
+    with open(f"manifest_{suf}.json", "w") as f:
+        json.dump(m, f)
+
+for info in entries_json:
+    sequence_json = json.loads(sequence_from_git(json_repo, info["TCK"])[0])
+    sequence_python = json.loads(sequence_from_git(json_repo, info["TCK"])[0])
+    sequence_type = next(v for v in info["Release2Type"].values())
+    sequence_direct = None
+    tck = info["TCK"]
+
+    with open(str((seq_dir / f"{sequence_type}.json").resolve())) as js:
+        sequence_direct = json.load(js)
+        # Fixup the TCK here for comparison purposes because it's not
+        # set when running from the JSON file
+        sequence_direct['dec_reporter']['tck'] = int(tck, 16)
+
+    if sequence_json != sequence_python:
+        print(
+            f"ERROR: sequences loaded from JSON and python git repos for TCK {tck} are not the same"
+        )
+        error = True
+    if sequence_json != sequence_direct:
+        print(
+            f"ERROR: sequences loaded directly from JSON and from JSON git repo for {tck} are not the same"
+        )
+
+        diff_keys = set(sequence_direct.keys()).symmetric_difference(
+            set(sequence_json.keys()))
+        if diff_keys:
+            print("different keys")
+            print(diff_keys)
+        diff = {}
+        kj = [k for k in sequence_json.keys() if k not in diff_keys]
+        for k in kj:
+            if sequence_json[k] != sequence_direct[k]:
+                diff[k] = (sequence_json[k], sequence_direct[k])
+        print(diff)
+
+        error = True
+
+sys.exit(error)
diff --git a/Rec/Allen/tests/options/create_hlt1_tcks.py b/Rec/Allen/tests/options/create_hlt1_tcks.py
new file mode 100644
index 00000000000..caf417658e9
--- /dev/null
+++ b/Rec/Allen/tests/options/create_hlt1_tcks.py
@@ -0,0 +1,42 @@
+###############################################################################
+# (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration           #
+#                                                                             #
+# This software is distributed under the terms of the Apache License          #
+# version 2 (Apache-2.0), copied verbatim in the file "COPYING".              #
+#                                                                             #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization  #
+# or submit itself to any jurisdiction.                                       #
+###############################################################################
+import os
+import sys
+import subprocess
+from pathlib import Path
+from Allen.qmtest.utils import good_sequence
+
+seq_dir = os.path.expandvars("${ALLEN_INSTALL_DIR}/constants")
+tck_script = os.path.expandvars("${ALLENROOT}/scripts/create_hlt1_tck.py")
+
+error = False
+for i, seq in enumerate(s for s in os.listdir(seq_dir) if good_sequence(s)):
+    seq = Path(seq_dir) / seq
+
+    # Create TCKs from JSON files
+    tck = hex(0x10000001 + i)
+    r = subprocess.run(
+        ["python", tck_script,
+         str(seq), "config_json.git", tck])
+    if r.returncode != 0:
+        error = True
+    else:
+        print(f"Created TCK {tck} from JSON configuration {str(seq)}")
+
+    # Create TCKs from python configurations
+    r = subprocess.run(
+        ["python", tck_script, seq.stem, "config_python.git", tck])
+    if r.returncode != 0:
+        error = True
+    else:
+        print(f"Created TCK {tck} from Python configuration {seq.stem}")
+
+sys.exit(error)
diff --git a/Rec/Allen/tests/qmtest/compare_tcks.qmt b/Rec/Allen/tests/qmtest/compare_tcks.qmt
new file mode 100644
index 00000000000..1e62fc3306b
--- /dev/null
+++ b/Rec/Allen/tests/qmtest/compare_tcks.qmt
@@ -0,0 +1,31 @@
+<?xml version="1.0" ?><!DOCTYPE extension  PUBLIC '-//QM/2.3/Extension//EN'  'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'>
+<!--
+    (c) Copyright 2020 CERN for the benefit of the LHCb Collaboration
+-->
+<!--
+#######################################################
+# SUMMARY OF THIS TEST
+# ...................
+# Author: Roel Aaij
+# Purpose: Use ROOT python bindings to obtain the geometry directly
+#          from the stack and run the Allen event loop
+#######################################################
+-->
+<extension class="GaudiTest.GaudiExeTest" kind="test">
+  <argument name="program"><text>python</text></argument>
+  <argument name="args"><set>
+    <text>${ALLENROOT}/tests/options/compare_hlt1_tcks.py</text>
+  </set></argument>
+  <argument name="timeout"><integer>600</integer></argument>
+  <argument name="use_temp_dir"><enumeral>true</enumeral></argument>
+<argument name="validator"><text>
+
+# No validator for now: only check the exit code
+
+import glob
+workdir = self._common_tmpdir
+for fn in glob.glob(workdir + "/*.json"):
+    result[os.path.basename(fn)] = open(fn).read()
+
+</text></argument>
+</extension>
diff --git a/Rec/Allen/tests/qmtest/create_tcks.qmt b/Rec/Allen/tests/qmtest/create_tcks.qmt
new file mode 100644
index 00000000000..8ed501c2c30
--- /dev/null
+++ b/Rec/Allen/tests/qmtest/create_tcks.qmt
@@ -0,0 +1,21 @@
+<?xml version="1.0" ?><!DOCTYPE extension  PUBLIC '-//QM/2.3/Extension//EN'  'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'>
+<!--
+    (c) Copyright 2020 CERN for the benefit of the LHCb Collaboration
+-->
+<!--
+#######################################################
+# SUMMARY OF THIS TEST
+# ...................
+# Author: Roel Aaij
+# Purpose: Use ROOT python bindings to obtain the geometry directly
+#          from the stack and run the Allen event loop
+#######################################################
+-->
+<extension class="GaudiTest.GaudiExeTest" kind="test">
+  <argument name="program"><text>python</text></argument>
+  <argument name="args"><set>
+    <text>${ALLENROOT}/tests/options/create_hlt1_tcks.py</text>
+  </set></argument>
+  <argument name="timeout"><integer>600</integer></argument>
+  <argument name="use_temp_dir"><enumeral>true</enumeral></argument>
+</extension>
-- 
GitLab


From 2128525cc7bb72b0c9f9ed7de220fbb73e347f26 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Tue, 28 Mar 2023 14:09:05 +0200
Subject: [PATCH 21/49] Fix sequence_from_git without bindings

---
 Rec/Allen/python/Allen/tck.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
index f9fc4db8767..99df9ba93fe 100644
--- a/Rec/Allen/python/Allen/tck.py
+++ b/Rec/Allen/python/Allen/tck.py
@@ -62,7 +62,7 @@ def tck_to_sequence(config: dict):
         "sequence": {e: config["Scheduler/" + e] for e in scheduler_entries}
     }
 
-    for alg_type, alg_name, alg_kind in config["sequence"]["configured_algorithms"]:
+    for alg_type, alg_name, alg_kind in sequence_config["sequence"]["configured_algorithms"]:
         tck_props = config[f"{alg_kind}/{alg_type}/{alg_name}"]["Properties"]
         properties = {}
         for k, v in tck_props.items():
@@ -175,12 +175,12 @@ def sequence_from_git(repository: Path, tck: str, use_bindings=True) -> str:
             print("Failed to convert configuration in git repo to JSON")
             return None
         tck_db = json.loads(p.stdout)
-        digest, manifest_entry = next(((k, m) for k, m in tck_db.items() if m["TCK"] == tck), None)
+        digest, manifest_entry = next(((k, m) for k, m in tck_db["manifest"].items() if m["TCK"] == tck), None)
         release, seq_type = next((k, v) for k, v in manifest_entry["Release2Type"].items())
         tck = manifest_entry["TCK"]
         label = manifest_entry["label"]
         info = {"digest": digest, "tck": tck, "release": release, "type": seq_type, "label": label}
-        return (json.dump(tck_to_sequence(tck_db[digest])), info)
+        return (json.dumps(tck_to_sequence(tck_db[digest])), info)
 
 
 def property_from_git(repository: Path, tck: str, algorithm=".*", property=".*"):
-- 
GitLab


From 5964134257f06a4ade34110e8ca9ac0696bd7553 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Wed, 29 Mar 2023 16:39:30 +0200
Subject: [PATCH 22/49] Don't use properties that are never configured to
 propagate numbers

---
 device/lumi/include/CaloLumiCounters.cuh  | 31 +++++++---------
 device/lumi/include/MakeLumiSummary.cuh   | 14 ++++----
 device/lumi/include/MuonLumiCounters.cuh  | 32 +++++++----------
 device/lumi/include/PVLumiCounters.cuh    | 27 ++++++--------
 device/lumi/include/PlumeLumiCounters.cuh | 26 ++++++--------
 device/lumi/include/SciFiLumiCounters.cuh | 30 +++++++---------
 device/lumi/include/VeloLumiCounters.cuh  | 31 +++++++---------
 device/lumi/src/CaloLumiCounters.cu       | 35 +++++++++---------
 device/lumi/src/MakeLumiSummary.cu        | 43 +++++++----------------
 device/lumi/src/MuonLumiCounters.cu       | 41 ++++++++++-----------
 device/lumi/src/PVLumiCounters.cu         | 30 ++++++++--------
 device/lumi/src/PlumeLumiCounters.cu      | 32 ++++++++---------
 device/lumi/src/SciFiLumiCounters.cu      | 34 +++++++++---------
 device/lumi/src/VeloLumiCounters.cu       | 31 ++++++++--------
 14 files changed, 186 insertions(+), 251 deletions(-)

diff --git a/device/lumi/include/CaloLumiCounters.cuh b/device/lumi/include/CaloLumiCounters.cuh
index 258e5143bbf..88aca92d28a 100644
--- a/device/lumi/include/CaloLumiCounters.cuh
+++ b/device/lumi/include/CaloLumiCounters.cuh
@@ -39,21 +39,17 @@ namespace calo_lumi_counters {
       "shifts and scales extracted from the schema for lumi counters",
       std::map<std::string, std::pair<float, float>>)
     lumi_counter_shifts_and_scales;
-    PROPERTY(
-      calo_offsets_and_sizes_t,
-      "calo_offsets_and_sizes",
-      "offsets and sizes in bits for the calo counters",
-      std::array<unsigned, 2 * Lumi::Constants::n_calo_counters>)
-    calo_offsets_and_sizes;
-    PROPERTY(
-      calo_shifts_and_scales_t,
-      "calo_shifts_and_scales",
-      "shifts and scales for the calo counters",
-      std::array<float, 2 * Lumi::Constants::n_calo_counters>)
-    calo_shifts_and_scales;
   }; // struct Parameters
 
-  __global__ void calo_lumi_counters(Parameters, const unsigned number_of_events, const char* raw_ecal_geometry);
+  using offsets_and_sizes_t = std::array<unsigned, 2 * Lumi::Constants::n_calo_counters>;
+  using shifts_and_scales_t = std::array<float, 2 * Lumi::Constants::n_calo_counters>;
+
+  __global__ void calo_lumi_counters(
+    Parameters,
+    const unsigned number_of_events,
+    const offsets_and_sizes_t offsets_and_sizes,
+    const shifts_and_scales_t shifts_and_scales,
+    const char* raw_ecal_geometry);
 
   struct calo_lumi_counters_t : public DeviceAlgorithm, Parameters {
 
@@ -71,12 +67,9 @@ namespace calo_lumi_counters {
     Property<block_dim_t> m_block_dim {this, {{64, 1, 1}}};
     Property<lumi_counter_schema_t> m_lumi_counter_schema {this, {}};
     Property<lumi_counter_shifts_and_scales_t> m_lumi_counter_shifts_and_scales {this, {}};
-    Property<calo_offsets_and_sizes_t> m_calo_offsets_and_sizes {
-      this,
-      {{0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u}}};
-    Property<calo_shifts_and_scales_t> m_calo_shifts_and_scales {
-      this,
-      {{0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f}}};
+
+    offsets_and_sizes_t m_offsets_and_sizes = {0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u};
+    shifts_and_scales_t m_shifts_and_scales = {0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f};
 
   }; // struct calo_lumi_counters_t
 } // namespace calo_lumi_counters
diff --git a/device/lumi/include/MakeLumiSummary.cuh b/device/lumi/include/MakeLumiSummary.cuh
index ae9689e22a3..54f35cbcfc8 100644
--- a/device/lumi/include/MakeLumiSummary.cuh
+++ b/device/lumi/include/MakeLumiSummary.cuh
@@ -43,18 +43,15 @@ namespace make_lumi_summary {
       "lumi_counter_schema",
       "schema for lumi counters",
       std::map<std::string, std::pair<unsigned, unsigned>>);
-    PROPERTY(
-      basic_offsets_and_sizes_t,
-      "basic_offsets_and_sizes",
-      "offsets and sizes in bits for the ODIN and GEC counters",
-      std::array<unsigned, 2 * Lumi::Constants::n_basic_counters>)
-    basic_offsets_and_sizes;
   }; // struct Parameters
 
+  using offsets_and_sizes_t = std::array<unsigned, 2 * Lumi::Constants::n_basic_counters>;
+
   __global__ void make_lumi_summary(
     Parameters,
     const unsigned number_of_events,
     const unsigned number_of_events_passed_gec,
+    const offsets_and_sizes_t offsets_and_sizes,
     std::array<const Lumi::LumiInfo*, Lumi::Constants::n_sub_infos> lumiInfos,
     std::array<unsigned, Lumi::Constants::n_sub_infos> spanSize,
     const unsigned size_of_aggregate);
@@ -78,7 +75,8 @@ namespace make_lumi_summary {
     Property<encoding_key_full_t> m_key_full {this, 0};
     Property<lumi_sum_length_t> m_lumi_sum_length {this, 0u};
     Property<lumi_counter_schema_t> m_lumi_counter_schema {this, {}};
-    Property<basic_offsets_and_sizes_t> m_basic_offsets_and_sizes {this,
-                                                                   {{0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u}}};
+
+    offsets_and_sizes_t m_offsets_and_sizes {0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u};
+
   }; // struct make_lumi_summary_t
 } // namespace make_lumi_summary
diff --git a/device/lumi/include/MuonLumiCounters.cuh b/device/lumi/include/MuonLumiCounters.cuh
index 459f5aa23f4..82ea4c36d6d 100644
--- a/device/lumi/include/MuonLumiCounters.cuh
+++ b/device/lumi/include/MuonLumiCounters.cuh
@@ -39,21 +39,16 @@ namespace muon_lumi_counters {
       "shifts and scales extracted from the schema for lumi counters",
       std::map<std::string, std::pair<float, float>>)
     lumi_counter_shifts_and_scales;
-    PROPERTY(
-      muon_offsets_and_sizes_t,
-      "muon_offsets_and_sizes",
-      "offsets and sizes in bits for the muon counters",
-      std::array<unsigned, 2 * Lumi::Constants::n_muon_counters>)
-    muon_offsets_and_sizes;
-    PROPERTY(
-      muon_shifts_and_scales_t,
-      "muon_shifts_and_scales",
-      "shifts and scales for the muon counters",
-      std::array<float, 2 * Lumi::Constants::n_muon_counters>)
-    muon_shifts_and_scales;
   }; // struct Parameters
 
-  __global__ void muon_lumi_counters(Parameters, const unsigned number_of_events);
+  using offsets_and_sizes_t = std::array<unsigned, 2 * Lumi::Constants::n_muon_counters>;
+  using shifts_and_scales_t = std::array<float, 2 * Lumi::Constants::n_muon_counters>;
+
+  __global__ void muon_lumi_counters(
+    Parameters,
+    const unsigned number_of_events,
+    const offsets_and_sizes_t offsets_and_sizes,
+    const shifts_and_scales_t shifts_and_scales);
 
   struct muon_lumi_counters_t : public DeviceAlgorithm, Parameters {
     void set_arguments_size(ArgumentReferences<Parameters> arguments, const RuntimeOptions&, const Constants&) const;
@@ -70,11 +65,10 @@ namespace muon_lumi_counters {
     Property<block_dim_t> m_block_dim {this, {{64, 1, 1}}};
     Property<lumi_counter_schema_t> m_lumi_counter_schema {this, {}};
     Property<lumi_counter_shifts_and_scales_t> m_lumi_counter_shifts_and_scales {this, {}};
-    Property<muon_offsets_and_sizes_t> m_muon_offsets_and_sizes {
-      this,
-      {{0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u}}};
-    Property<muon_shifts_and_scales_t> m_muon_shifts_and_scales {this, {{0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f,
-                                                                         0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f,
-                                                                         0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f}}};
+
+    offsets_and_sizes_t m_offsets_and_sizes = {0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
+                                               0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u};
+    shifts_and_scales_t m_shifts_and_scales = {0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f,
+                                               0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f};
   }; // struct muon_lumi_counters_t
 } // namespace muon_lumi_counters
diff --git a/device/lumi/include/PVLumiCounters.cuh b/device/lumi/include/PVLumiCounters.cuh
index ebc47a4a97b..1b240b84416 100644
--- a/device/lumi/include/PVLumiCounters.cuh
+++ b/device/lumi/include/PVLumiCounters.cuh
@@ -39,21 +39,16 @@ namespace pv_lumi_counters {
       "shifts and scales extracted from the schema for lumi counters",
       std::map<std::string, std::pair<float, float>>)
     lumi_counter_shifts_and_scales;
-    PROPERTY(
-      pv_offsets_and_sizes_t,
-      "pv_offsets_and_sizes",
-      "offsets and sizes in bits for the PV counters",
-      std::array<unsigned, 2 * Lumi::Constants::n_pv_counters>)
-    pv_offsets_and_sizes;
-    PROPERTY(
-      pv_shifts_and_scales_t,
-      "pv_shifts_and_scales",
-      "shifts and scales for the PV counters",
-      std::array<float, 2 * Lumi::Constants::n_pv_counters>)
-    pv_shifts_and_scales;
   }; // struct Parameters
 
-  __global__ void pv_lumi_counters(Parameters, const unsigned number_of_events);
+  using offsets_and_sizes_t = std::array<unsigned, 2 * Lumi::Constants::n_pv_counters>;
+  using shifts_and_scales_t = std::array<float, 2 * Lumi::Constants::n_pv_counters>;
+
+  __global__ void pv_lumi_counters(
+    Parameters,
+    const unsigned number_of_events,
+    const offsets_and_sizes_t offsets_and_sizes,
+    const shifts_and_scales_t shifts_and_scales);
 
   struct pv_lumi_counters_t : public DeviceAlgorithm, Parameters {
     void set_arguments_size(ArgumentReferences<Parameters> arguments, const RuntimeOptions&, const Constants&) const;
@@ -70,8 +65,8 @@ namespace pv_lumi_counters {
     Property<block_dim_t> m_block_dim {this, {{64, 1, 1}}};
     Property<lumi_counter_schema_t> m_lumi_counter_schema {this, {}};
     Property<lumi_counter_shifts_and_scales_t> m_lumi_counter_shifts_and_scales {this, {}};
-    Property<pv_offsets_and_sizes_t> m_pv_offsets_and_sizes {this, {{0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u}}};
-    Property<pv_shifts_and_scales_t> m_pv_shifts_and_scales {this,
-                                                             {{0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f}}};
+
+    offsets_and_sizes_t m_offsets_and_sizes = {0u, 0u};
+    shifts_and_scales_t m_shifts_and_scales = {0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f};
   }; // struct pv_lumi_counters_t
 } // namespace pv_lumi_counters
diff --git a/device/lumi/include/PlumeLumiCounters.cuh b/device/lumi/include/PlumeLumiCounters.cuh
index d8794d0f60b..99f04ad5baf 100644
--- a/device/lumi/include/PlumeLumiCounters.cuh
+++ b/device/lumi/include/PlumeLumiCounters.cuh
@@ -38,21 +38,16 @@ namespace plume_lumi_counters {
       "shifts and scales extracted from the schema for lumi counters",
       std::map<std::string, std::pair<float, float>>)
     lumi_counter_shifts_and_scales;
-    PROPERTY(
-      plume_offsets_and_sizes_t,
-      "plume_offsets_and_sizes",
-      "offsets and sizes in bits for the PLUME counters",
-      std::array<unsigned, 2 * Lumi::Constants::n_plume_counters>)
-    plume_offsets_and_sizes;
-    PROPERTY(
-      plume_shifts_and_scales_t,
-      "plume_shifts_and_scales",
-      "shifts and scales for the PLUME counters",
-      std::array<float, 2 * Lumi::Constants::n_plume_counters>)
-    plume_shifts_and_scales;
   }; // struct Parameters
 
-  __global__ void plume_lumi_counters(Parameters, const unsigned number_of_events);
+  using offsets_and_sizes_t = std::array<unsigned, 2 * Lumi::Constants::n_plume_counters>;
+  using shifts_and_scales_t = std::array<float, 2 * Lumi::Constants::n_plume_counters>;
+
+  __global__ void plume_lumi_counters(
+    Parameters,
+    const unsigned number_of_events,
+    const offsets_and_sizes_t offsets_and_sizes,
+    const shifts_and_scales_t shifts_and_scales);
 
   struct plume_lumi_counters_t : public DeviceAlgorithm, Parameters {
     void set_arguments_size(ArgumentReferences<Parameters> arguments, const RuntimeOptions&, const Constants&) const;
@@ -69,7 +64,8 @@ namespace plume_lumi_counters {
     Property<block_dim_t> m_block_dim {this, {{64, 1, 1}}};
     Property<lumi_counter_schema_t> m_lumi_counter_schema {this, {}};
     Property<lumi_counter_shifts_and_scales_t> m_lumi_counter_shifts_and_scales {this, {}};
-    Property<plume_offsets_and_sizes_t> m_plume_offsets_and_sizes {this, {{0u, 0u, 0u, 0u, 0u, 0u}}};
-    Property<plume_shifts_and_scales_t> m_plume_shifts_and_scales {this, {{0.f, 1.f, 0.f, 1.f, 0.f, 1.f}}};
+
+    offsets_and_sizes_t m_offsets_and_sizes = {0u, 0u, 0u, 0u, 0u, 0u};
+    shifts_and_scales_t m_shifts_and_scales = {0.f, 1.f, 0.f, 1.f, 0.f, 1.f};
   }; // struct plume_lumi_counters_t
 } // namespace plume_lumi_counters
diff --git a/device/lumi/include/SciFiLumiCounters.cuh b/device/lumi/include/SciFiLumiCounters.cuh
index 65c58b903ae..b8d02eed630 100644
--- a/device/lumi/include/SciFiLumiCounters.cuh
+++ b/device/lumi/include/SciFiLumiCounters.cuh
@@ -37,21 +37,17 @@ namespace scifi_lumi_counters {
       "shifts and scales extracted from the schema for lumi counters",
       std::map<std::string, std::pair<float, float>>)
     lumi_counter_shifts_and_scales;
-    PROPERTY(
-      scifi_offsets_and_sizes_t,
-      "scifi_offsets_and_sizes",
-      "offsets and sizes in bits for the SciFi counters",
-      std::array<unsigned, 2 * Lumi::Constants::n_scifi_counters>)
-    scifi_offsets_and_sizes;
-    PROPERTY(
-      scifi_shifts_and_scales_t,
-      "scifi_shifts_and_scales",
-      "shifts and scales for the SciFi counters",
-      std::array<float, 2 * Lumi::Constants::n_scifi_counters>)
-    scifi_shifts_and_scales;
   }; // struct Parameters
 
-  __global__ void scifi_lumi_counters(Parameters, const unsigned number_of_events, const char* scifi_geometry);
+  using offsets_and_sizes_t = std::array<unsigned, 2 * Lumi::Constants::n_scifi_counters>;
+  using shifts_and_scales_t = std::array<float, 2 * Lumi::Constants::n_scifi_counters>;
+
+  __global__ void scifi_lumi_counters(
+    Parameters,
+    const unsigned number_of_events,
+    const offsets_and_sizes_t offsets_and_sizes,
+    const shifts_and_scales_t shifts_and_scales,
+    const char* scifi_geometry);
 
   struct scifi_lumi_counters_t : public DeviceAlgorithm, Parameters {
     void set_arguments_size(ArgumentReferences<Parameters> arguments, const RuntimeOptions&, const Constants&) const;
@@ -68,10 +64,8 @@ namespace scifi_lumi_counters {
     Property<block_dim_t> m_block_dim {this, {{64, 1, 1}}};
     Property<lumi_counter_schema_t> m_lumi_counter_schema {this, {}};
     Property<lumi_counter_shifts_and_scales_t> m_lumi_counter_shifts_and_scales {this, {}};
-    Property<scifi_offsets_and_sizes_t> m_scifi_offsets_and_sizes {this,
-                                                                   {{0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u}}};
-    Property<scifi_shifts_and_scales_t> m_scifi_shifts_and_scales {
-      this,
-      {{0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f}}};
+
+    offsets_and_sizes_t m_offsets_and_sizes = {0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u};
+    shifts_and_scales_t m_shifts_and_scales = {0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f};
   }; // struct scifi_lumi_counters_t
 } // namespace scifi_lumi_counters
diff --git a/device/lumi/include/VeloLumiCounters.cuh b/device/lumi/include/VeloLumiCounters.cuh
index 52998440634..d9205c66137 100644
--- a/device/lumi/include/VeloLumiCounters.cuh
+++ b/device/lumi/include/VeloLumiCounters.cuh
@@ -48,21 +48,16 @@ namespace velo_lumi_counters {
       "shifts and scales extracted from the schema for lumi counters",
       std::map<std::string, std::pair<float, float>>)
     lumi_counter_shifts_and_scales;
-    PROPERTY(
-      velo_offsets_and_sizes_t,
-      "velo_offsets_and_sizes",
-      "offsets and sizes in bits for the VELO counters",
-      std::array<unsigned, 2 * Lumi::Constants::n_velo_counters>)
-    velo_offsets_and_sizes;
-    PROPERTY(
-      velo_shifts_and_scales_t,
-      "velo_shifts_and_scales",
-      "shifts and scales for the VELO counters",
-      std::array<float, 2 * Lumi::Constants::n_velo_counters>)
-    velo_shifts_and_scales;
   }; // struct Parameters
 
-  __global__ void velo_lumi_counters(Parameters, const unsigned number_of_events);
+  using offsets_and_sizes_t = std::array<unsigned, 2 * Lumi::Constants::n_velo_counters>;
+  using shifts_and_scales_t = std::array<float, 2 * Lumi::Constants::n_velo_counters>;
+
+  __global__ void velo_lumi_counters(
+    Parameters,
+    const unsigned number_of_events,
+    const offsets_and_sizes_t offsets_and_sizes,
+    const shifts_and_scales_t shifts_and_scales);
 
   // doca and eta copied from device/event_model/common/include/CopyTrackParameters.cuh
   // to avoid extra header files requirements
@@ -99,10 +94,10 @@ namespace velo_lumi_counters {
     Property<block_dim_t> m_block_dim {this, {{64, 1, 1}}};
     Property<lumi_counter_schema_t> m_lumi_counter_schema {this, {}};
     Property<lumi_counter_shifts_and_scales_t> m_lumi_counter_shifts_and_scales {this, {}};
-    Property<velo_offsets_and_sizes_t> m_velo_offsets_and_sizes {this, {{0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
-                                                                         0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u}}};
-    Property<velo_shifts_and_scales_t> m_velo_shifts_and_scales {
-      this,
-      {{0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f}}};
+
+    offsets_and_sizes_t m_offsets_and_sizes = {0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u,
+                                               0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u, 0u};
+    shifts_and_scales_t m_shifts_and_scales = {0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f,
+                                               0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f, 0.f, 1.f};
   }; // struct velo_lumi_counters_t
 } // namespace velo_lumi_counters
diff --git a/device/lumi/src/CaloLumiCounters.cu b/device/lumi/src/CaloLumiCounters.cu
index d12805c9be0..5e4a828826a 100644
--- a/device/lumi/src/CaloLumiCounters.cu
+++ b/device/lumi/src/CaloLumiCounters.cu
@@ -29,31 +29,26 @@ void calo_lumi_counters::calo_lumi_counters_t::init()
 {
   std::map<std::string, std::pair<unsigned, unsigned>> schema = property<lumi_counter_schema_t>();
   std::map<std::string, std::pair<float, float>> shifts_and_scales = property<lumi_counter_shifts_and_scales_t>();
-  std::array<unsigned, 2 * Lumi::Constants::n_calo_counters> calo_offsets_and_sizes =
-    property<calo_offsets_and_sizes_t>();
-  std::array<float, 2 * Lumi::Constants::n_calo_counters> calo_shifts_and_scales = property<calo_shifts_and_scales_t>();
 
-  unsigned c_idx(0u);
+  unsigned c_idx = 0u;
   for (auto counter_name : Lumi::Constants::calo_counter_names) {
     if (schema.find(counter_name) == schema.end()) {
       std::cout << "LumiSummary schema does not use " << counter_name << std::endl;
     }
     else {
-      calo_offsets_and_sizes[2 * c_idx] = schema[counter_name].first;
-      calo_offsets_and_sizes[2 * c_idx + 1] = schema[counter_name].second;
+      m_offsets_and_sizes[2 * c_idx] = schema[counter_name].first;
+      m_offsets_and_sizes[2 * c_idx + 1] = schema[counter_name].second;
     }
     if (shifts_and_scales.find(counter_name) == shifts_and_scales.end()) {
-      calo_shifts_and_scales[2 * c_idx] = 0.f;
-      calo_shifts_and_scales[2 * c_idx + 1] = 1.f;
+      m_shifts_and_scales[2 * c_idx] = 0.f;
+      m_shifts_and_scales[2 * c_idx + 1] = 1.f;
     }
     else {
-      calo_shifts_and_scales[2 * c_idx] = shifts_and_scales[counter_name].first;
-      calo_shifts_and_scales[2 * c_idx + 1] = shifts_and_scales[counter_name].second;
+      m_shifts_and_scales[2 * c_idx] = shifts_and_scales[counter_name].first;
+      m_shifts_and_scales[2 * c_idx + 1] = shifts_and_scales[counter_name].second;
     }
     ++c_idx;
   }
-  set_property_value<calo_offsets_and_sizes_t>(calo_offsets_and_sizes);
-  set_property_value<calo_shifts_and_scales_t>(calo_shifts_and_scales);
 }
 
 void calo_lumi_counters::calo_lumi_counters_t::operator()(
@@ -66,12 +61,18 @@ void calo_lumi_counters::calo_lumi_counters_t::operator()(
   if (first<host_lumi_summaries_count_t>(arguments) == 0) return;
 
   global_function(calo_lumi_counters)(dim3(2), property<block_dim_t>(), context)(
-    arguments, first<host_number_of_events_t>(arguments), constants.dev_ecal_geometry);
+    arguments,
+    first<host_number_of_events_t>(arguments),
+    m_offsets_and_sizes,
+    m_shifts_and_scales,
+    constants.dev_ecal_geometry);
 }
 
 __global__ void calo_lumi_counters::calo_lumi_counters(
   calo_lumi_counters::Parameters parameters,
   const unsigned number_of_events,
+  const offsets_and_sizes_t offsets_and_sizes,
+  const shifts_and_scales_t shifts_and_scales,
   const char* raw_ecal_geometry)
 {
   for (unsigned event_number = blockIdx.x * blockDim.x + threadIdx.x; event_number < number_of_events;
@@ -115,11 +116,11 @@ __global__ void calo_lumi_counters::calo_lumi_counters(
     for (unsigned i = 0; i < Lumi::Constants::n_calo_counters; ++i) {
       fillLumiInfo(
         parameters.dev_lumi_infos[info_offset + i],
-        parameters.calo_offsets_and_sizes.get()[2 * i],
-        parameters.calo_offsets_and_sizes.get()[2 * i + 1],
+        offsets_and_sizes[2 * i],
+        offsets_and_sizes[2 * i + 1],
         E_vals[i],
-        parameters.calo_shifts_and_scales.get()[2 * i],
-        parameters.calo_shifts_and_scales.get()[2 * i + 1]);
+        shifts_and_scales[2 * i],
+        shifts_and_scales[2 * i + 1]);
     }
   }
 }
diff --git a/device/lumi/src/MakeLumiSummary.cu b/device/lumi/src/MakeLumiSummary.cu
index c96583970d4..67982287812 100644
--- a/device/lumi/src/MakeLumiSummary.cu
+++ b/device/lumi/src/MakeLumiSummary.cu
@@ -28,8 +28,6 @@ void make_lumi_summary::make_lumi_summary_t::set_arguments_size(
 void make_lumi_summary::make_lumi_summary_t::init()
 {
   std::map<std::string, std::pair<unsigned, unsigned>> schema = property<lumi_counter_schema_t>();
-  std::array<unsigned, 2 * Lumi::Constants::n_basic_counters> basic_offsets_and_sizes =
-    property<basic_offsets_and_sizes_t>();
 
   unsigned c_idx(0u);
   for (auto counter_name : Lumi::Constants::basic_counter_names) {
@@ -37,12 +35,11 @@ void make_lumi_summary::make_lumi_summary_t::init()
       std::cout << "LumiSummary schema does not use " << counter_name << std::endl;
     }
     else {
-      basic_offsets_and_sizes[2 * c_idx] = schema[counter_name].first;
-      basic_offsets_and_sizes[2 * c_idx + 1] = schema[counter_name].second;
+      m_offsets_and_sizes[2 * c_idx] = schema[counter_name].first;
+      m_offsets_and_sizes[2 * c_idx + 1] = schema[counter_name].second;
     }
     ++c_idx;
   }
-  set_property_value<basic_offsets_and_sizes_t>(basic_offsets_and_sizes);
 }
 
 void make_lumi_summary::make_lumi_summary_t::operator()(
@@ -87,6 +84,7 @@ void make_lumi_summary::make_lumi_summary_t::operator()(
     arguments,
     first<host_number_of_events_t>(arguments),
     size<dev_event_list_t>(arguments),
+    m_offsets_and_sizes,
     lumiInfos,
     infoSize,
     size_of_aggregate);
@@ -125,6 +123,7 @@ __global__ void make_lumi_summary::make_lumi_summary(
   make_lumi_summary::Parameters parameters,
   const unsigned number_of_events,
   const unsigned number_of_events_passed_gec,
+  const offsets_and_sizes_t offsets_and_sizes,
   std::array<const Lumi::LumiInfo*, Lumi::Constants::n_sub_infos> lumiInfos,
   std::array<unsigned, Lumi::Constants::n_sub_infos> infoSize,
   const unsigned size_of_aggregate)
@@ -152,36 +151,23 @@ __global__ void make_lumi_summary::make_lumi_summary(
     uint64_t t0 = static_cast<uint64_t>(odin.gpsTime()) - new_bcid * 1000 / 40078;
     // event time
     setField(
-      parameters.basic_offsets_and_sizes.get()[0],
-      parameters.basic_offsets_and_sizes.get()[1],
-      lumi_summary,
-      static_cast<unsigned>(t0 & 0xffffffff),
-      sum_length);
-    setField(
-      parameters.basic_offsets_and_sizes.get()[2],
-      parameters.basic_offsets_and_sizes.get()[3],
-      lumi_summary,
-      static_cast<unsigned>(t0 >> 32),
-      sum_length);
+      offsets_and_sizes[0], offsets_and_sizes[1], lumi_summary, static_cast<unsigned>(t0 & 0xffffffff), sum_length);
+    setField(offsets_and_sizes[2], offsets_and_sizes[3], lumi_summary, static_cast<unsigned>(t0 >> 32), sum_length);
 
     // gps time offset
     setField(
-      parameters.basic_offsets_and_sizes.get()[4],
-      parameters.basic_offsets_and_sizes.get()[5],
+      offsets_and_sizes[4],
+      offsets_and_sizes[5],
       lumi_summary,
       static_cast<unsigned>(new_bcid & 0xffffffff),
       sum_length);
     setField(
-      parameters.basic_offsets_and_sizes.get()[6],
-      parameters.basic_offsets_and_sizes.get()[7],
-      lumi_summary,
-      static_cast<unsigned>(new_bcid >> 32),
-      sum_length);
+      offsets_and_sizes[6], offsets_and_sizes[7], lumi_summary, static_cast<unsigned>(new_bcid >> 32), sum_length);
 
     // bunch crossing type
     setField(
-      parameters.basic_offsets_and_sizes.get()[8],
-      parameters.basic_offsets_and_sizes.get()[9],
+      offsets_and_sizes[8],
+      offsets_and_sizes[9],
       lumi_summary,
       static_cast<unsigned>(odin.bunchCrossingType()),
       sum_length);
@@ -194,12 +180,7 @@ __global__ void make_lumi_summary::make_lumi_summary(
         break;
       }
     }
-    setField(
-      parameters.basic_offsets_and_sizes.get()[10],
-      parameters.basic_offsets_and_sizes.get()[11],
-      lumi_summary,
-      passedGEC,
-      sum_length);
+    setField(offsets_and_sizes[10], offsets_and_sizes[11], lumi_summary, passedGEC, sum_length);
 
     /// write lumi infos to the summary
     for (unsigned i = 0; i < size_of_aggregate; ++i) {
diff --git a/device/lumi/src/MuonLumiCounters.cu b/device/lumi/src/MuonLumiCounters.cu
index 95cf4d10722..8ffa2563538 100644
--- a/device/lumi/src/MuonLumiCounters.cu
+++ b/device/lumi/src/MuonLumiCounters.cu
@@ -1,4 +1,4 @@
-/*****************************************************************************\
+/***************************************************************************** \
 * (c) Copyright 2022 CERN for the benefit of the LHCb Collaboration           *
 *                                                                             *
 * This software is distributed under the terms of the Apache License          *
@@ -27,9 +27,6 @@ void muon_lumi_counters::muon_lumi_counters_t::init()
 {
   std::map<std::string, std::pair<unsigned, unsigned>> schema = property<lumi_counter_schema_t>();
   std::map<std::string, std::pair<float, float>> shifts_and_scales = property<lumi_counter_shifts_and_scales_t>();
-  std::array<unsigned, 2 * Lumi::Constants::n_muon_counters> muon_offsets_and_sizes =
-    property<muon_offsets_and_sizes_t>();
-  std::array<float, 2 * Lumi::Constants::n_muon_counters> muon_shifts_and_scales = property<muon_shifts_and_scales_t>();
 
   unsigned c_idx(0u);
   for (auto counter_name : Lumi::Constants::muon_counter_names) {
@@ -37,21 +34,19 @@ void muon_lumi_counters::muon_lumi_counters_t::init()
       std::cout << "LumiSummary schema does not use " << counter_name << std::endl;
     }
     else {
-      muon_offsets_and_sizes[2 * c_idx] = schema[counter_name].first;
-      muon_offsets_and_sizes[2 * c_idx + 1] = schema[counter_name].second;
+      m_offsets_and_sizes[2 * c_idx] = schema[counter_name].first;
+      m_offsets_and_sizes[2 * c_idx + 1] = schema[counter_name].second;
     }
     if (shifts_and_scales.find(counter_name) == shifts_and_scales.end()) {
-      muon_shifts_and_scales[2 * c_idx] = 0.f;
-      muon_shifts_and_scales[2 * c_idx + 1] = 1.f;
+      m_shifts_and_scales[2 * c_idx] = 0.f;
+      m_shifts_and_scales[2 * c_idx + 1] = 1.f;
     }
     else {
-      muon_shifts_and_scales[2 * c_idx] = shifts_and_scales[counter_name].first;
-      muon_shifts_and_scales[2 * c_idx + 1] = shifts_and_scales[counter_name].second;
+      m_shifts_and_scales[2 * c_idx] = shifts_and_scales[counter_name].first;
+      m_shifts_and_scales[2 * c_idx + 1] = shifts_and_scales[counter_name].second;
     }
     ++c_idx;
   }
-  set_property_value<muon_offsets_and_sizes_t>(muon_offsets_and_sizes);
-  set_property_value<muon_shifts_and_scales_t>(muon_shifts_and_scales);
 }
 
 void muon_lumi_counters::muon_lumi_counters_t::operator()(
@@ -64,12 +59,14 @@ void muon_lumi_counters::muon_lumi_counters_t::operator()(
   if (first<host_lumi_summaries_count_t>(arguments) == 0) return;
 
   global_function(muon_lumi_counters)(dim3(4u), property<block_dim_t>(), context)(
-    arguments, first<host_number_of_events_t>(arguments));
+    arguments, first<host_number_of_events_t>(arguments), m_offsets_and_sizes, m_shifts_and_scales);
 }
 
 __global__ void muon_lumi_counters::muon_lumi_counters(
   muon_lumi_counters::Parameters parameters,
-  const unsigned number_of_events)
+  const unsigned number_of_events,
+  const offsets_and_sizes_t offsets_and_sizes,
+  const shifts_and_scales_t shifts_and_scales)
 {
   for (unsigned event_number = blockIdx.x * blockDim.x + threadIdx.x; event_number < number_of_events;
        event_number += blockDim.x * gridDim.x) {
@@ -100,19 +97,19 @@ __global__ void muon_lumi_counters::muon_lumi_counters(
     for (unsigned i = 0; i < Lumi::Constants::n_muon_station_regions; ++i) {
       fillLumiInfo(
         parameters.dev_lumi_infos[info_offset + i],
-        parameters.muon_offsets_and_sizes.get()[2 * i],
-        parameters.muon_offsets_and_sizes.get()[2 * i + 1],
+        offsets_and_sizes[2 * i],
+        offsets_and_sizes[2 * i + 1],
         muon_hits_offsets[muon_offsets[i + 1]] - muon_hits_offsets[muon_offsets[i]],
-        parameters.muon_shifts_and_scales.get()[2 * i],
-        parameters.muon_shifts_and_scales.get()[2 * i + 1]);
+        shifts_and_scales[2 * i],
+        shifts_and_scales[2 * i + 1]);
     }
 
     fillLumiInfo(
       parameters.dev_lumi_infos[info_offset + Lumi::Constants::n_muon_station_regions],
-      parameters.muon_offsets_and_sizes.get()[2 * Lumi::Constants::n_muon_station_regions],
-      parameters.muon_offsets_and_sizes.get()[2 * Lumi::Constants::n_muon_station_regions + 1],
+      offsets_and_sizes[2 * Lumi::Constants::n_muon_station_regions],
+      offsets_and_sizes[2 * Lumi::Constants::n_muon_station_regions + 1],
       parameters.dev_muon_number_of_tracks[event_number],
-      parameters.muon_shifts_and_scales.get()[2 * Lumi::Constants::n_muon_station_regions],
-      parameters.muon_shifts_and_scales.get()[2 * Lumi::Constants::n_muon_station_regions + 1]);
+      shifts_and_scales[2 * Lumi::Constants::n_muon_station_regions],
+      shifts_and_scales[2 * Lumi::Constants::n_muon_station_regions + 1]);
   }
 }
diff --git a/device/lumi/src/PVLumiCounters.cu b/device/lumi/src/PVLumiCounters.cu
index 156fc18a15a..c80f24c88ae 100644
--- a/device/lumi/src/PVLumiCounters.cu
+++ b/device/lumi/src/PVLumiCounters.cu
@@ -26,8 +26,6 @@ void pv_lumi_counters::pv_lumi_counters_t::init()
 {
   std::map<std::string, std::pair<unsigned, unsigned>> schema = property<lumi_counter_schema_t>();
   std::map<std::string, std::pair<float, float>> shifts_and_scales = property<lumi_counter_shifts_and_scales_t>();
-  std::array<unsigned, 2 * Lumi::Constants::n_pv_counters> pv_offsets_and_sizes = property<pv_offsets_and_sizes_t>();
-  std::array<float, 2 * Lumi::Constants::n_pv_counters> pv_shifts_and_scales = property<pv_shifts_and_scales_t>();
 
   unsigned c_idx(0u);
   for (auto counter_name : Lumi::Constants::pv_counter_names) {
@@ -35,21 +33,19 @@ void pv_lumi_counters::pv_lumi_counters_t::init()
       std::cout << "LumiSummary schema does not use " << counter_name << std::endl;
     }
     else {
-      pv_offsets_and_sizes[2 * c_idx] = schema[counter_name].first;
-      pv_offsets_and_sizes[2 * c_idx + 1] = schema[counter_name].second;
+      m_offsets_and_sizes[2 * c_idx] = schema[counter_name].first;
+      m_offsets_and_sizes[2 * c_idx + 1] = schema[counter_name].second;
     }
     if (shifts_and_scales.find(counter_name) == shifts_and_scales.end()) {
-      pv_shifts_and_scales[2 * c_idx] = 0.f;
-      pv_shifts_and_scales[2 * c_idx + 1] = 1.f;
+      m_shifts_and_scales[2 * c_idx] = 0.f;
+      m_shifts_and_scales[2 * c_idx + 1] = 1.f;
     }
     else {
-      pv_shifts_and_scales[2 * c_idx] = shifts_and_scales[counter_name].first;
-      pv_shifts_and_scales[2 * c_idx + 1] = shifts_and_scales[counter_name].second;
+      m_shifts_and_scales[2 * c_idx] = shifts_and_scales[counter_name].first;
+      m_shifts_and_scales[2 * c_idx + 1] = shifts_and_scales[counter_name].second;
     }
     ++c_idx;
   }
-  set_property_value<pv_offsets_and_sizes_t>(pv_offsets_and_sizes);
-  set_property_value<pv_shifts_and_scales_t>(pv_shifts_and_scales);
 }
 
 void pv_lumi_counters::pv_lumi_counters_t::operator()(
@@ -62,12 +58,14 @@ void pv_lumi_counters::pv_lumi_counters_t::operator()(
   if (first<host_lumi_summaries_count_t>(arguments) == 0) return;
 
   global_function(pv_lumi_counters)(dim3(4u), property<block_dim_t>(), context)(
-    arguments, first<host_number_of_events_t>(arguments));
+    arguments, first<host_number_of_events_t>(arguments), m_offsets_and_sizes, m_shifts_and_scales);
 }
 
 __global__ void pv_lumi_counters::pv_lumi_counters(
   pv_lumi_counters::Parameters parameters,
-  const unsigned number_of_events)
+  const unsigned number_of_events,
+  const offsets_and_sizes_t offsets_and_sizes,
+  const shifts_and_scales_t shifts_and_scales)
 {
   for (unsigned event_number = blockIdx.x * blockDim.x + threadIdx.x; event_number < number_of_events;
        event_number += blockDim.x * gridDim.x) {
@@ -102,11 +100,11 @@ __global__ void pv_lumi_counters::pv_lumi_counters(
     for (unsigned i = 0; i < Lumi::Constants::n_pv_counters; ++i) {
       fillLumiInfo(
         parameters.dev_lumi_infos[info_offset + i],
-        parameters.pv_offsets_and_sizes.get()[2 * i],
-        parameters.pv_offsets_and_sizes.get()[2 * i + 1],
+        offsets_and_sizes[2 * i],
+        offsets_and_sizes[2 * i + 1],
         pv_counters[i],
-        parameters.pv_shifts_and_scales.get()[2 * i],
-        parameters.pv_shifts_and_scales.get()[2 * i + 1]);
+        shifts_and_scales[2 * i],
+        shifts_and_scales[2 * i + 1]);
     }
   }
 }
diff --git a/device/lumi/src/PlumeLumiCounters.cu b/device/lumi/src/PlumeLumiCounters.cu
index d612a851e0f..3672969fca8 100644
--- a/device/lumi/src/PlumeLumiCounters.cu
+++ b/device/lumi/src/PlumeLumiCounters.cu
@@ -27,10 +27,6 @@ void plume_lumi_counters::plume_lumi_counters_t::init()
 {
   std::map<std::string, std::pair<unsigned, unsigned>> schema = property<lumi_counter_schema_t>();
   std::map<std::string, std::pair<float, float>> shifts_and_scales = property<lumi_counter_shifts_and_scales_t>();
-  std::array<unsigned, 2 * Lumi::Constants::n_plume_counters> plume_offsets_and_sizes =
-    property<plume_offsets_and_sizes_t>();
-  std::array<float, 2 * Lumi::Constants::n_plume_counters> plume_shifts_and_scales =
-    property<plume_shifts_and_scales_t>();
 
   unsigned c_idx(0u);
   for (auto counter_name : Lumi::Constants::plume_counter_names) {
@@ -38,21 +34,19 @@ void plume_lumi_counters::plume_lumi_counters_t::init()
       std::cout << "LumiSummary schema does not use " << counter_name << std::endl;
     }
     else {
-      plume_offsets_and_sizes[2 * c_idx] = schema[counter_name].first;
-      plume_offsets_and_sizes[2 * c_idx + 1] = schema[counter_name].second;
+      m_offsets_and_sizes[2 * c_idx] = schema[counter_name].first;
+      m_offsets_and_sizes[2 * c_idx + 1] = schema[counter_name].second;
     }
     if (shifts_and_scales.find(counter_name) == shifts_and_scales.end()) {
-      plume_shifts_and_scales[2 * c_idx] = 0.f;
-      plume_shifts_and_scales[2 * c_idx + 1] = 1.f;
+      m_shifts_and_scales[2 * c_idx] = 0.f;
+      m_shifts_and_scales[2 * c_idx + 1] = 1.f;
     }
     else {
-      plume_shifts_and_scales[2 * c_idx] = shifts_and_scales[counter_name].first;
-      plume_shifts_and_scales[2 * c_idx + 1] = shifts_and_scales[counter_name].second;
+      m_shifts_and_scales[2 * c_idx] = shifts_and_scales[counter_name].first;
+      m_shifts_and_scales[2 * c_idx + 1] = shifts_and_scales[counter_name].second;
     }
     ++c_idx;
   }
-  set_property_value<plume_offsets_and_sizes_t>(plume_offsets_and_sizes);
-  set_property_value<plume_shifts_and_scales_t>(plume_shifts_and_scales);
 }
 
 void plume_lumi_counters::plume_lumi_counters_t::operator()(
@@ -65,12 +59,14 @@ void plume_lumi_counters::plume_lumi_counters_t::operator()(
   if (first<host_lumi_summaries_count_t>(arguments) == 0) return;
 
   global_function(plume_lumi_counters)(dim3(4u), property<block_dim_t>(), context)(
-    arguments, first<host_number_of_events_t>(arguments));
+    arguments, first<host_number_of_events_t>(arguments), m_offsets_and_sizes, m_shifts_and_scales);
 }
 
 __global__ void plume_lumi_counters::plume_lumi_counters(
   plume_lumi_counters::Parameters parameters,
-  const unsigned number_of_events)
+  const unsigned number_of_events,
+  const offsets_and_sizes_t offsets_and_sizes,
+  const shifts_and_scales_t shifts_and_scales)
 {
   for (unsigned event_number = blockIdx.x * blockDim.x + threadIdx.x; event_number < number_of_events;
        event_number += blockDim.x * gridDim.x) {
@@ -101,11 +97,11 @@ __global__ void plume_lumi_counters::plume_lumi_counters(
     for (unsigned i = 0u; i < Lumi::Constants::n_plume_counters; ++i) {
       fillLumiInfo(
         parameters.dev_lumi_infos[info_offset + i],
-        parameters.plume_offsets_and_sizes.get()[2 * i],
-        parameters.plume_offsets_and_sizes.get()[2 * i + 1],
+        offsets_and_sizes[2 * i],
+        offsets_and_sizes[2 * i + 1],
         plume_counters[i],
-        parameters.plume_shifts_and_scales.get()[2 * i],
-        parameters.plume_shifts_and_scales.get()[2 * i + 1]);
+        shifts_and_scales[2 * i],
+        shifts_and_scales[2 * i + 1]);
     }
   }
 }
diff --git a/device/lumi/src/SciFiLumiCounters.cu b/device/lumi/src/SciFiLumiCounters.cu
index 2879ca650aa..17f67c4b5b8 100644
--- a/device/lumi/src/SciFiLumiCounters.cu
+++ b/device/lumi/src/SciFiLumiCounters.cu
@@ -30,10 +30,6 @@ void scifi_lumi_counters::scifi_lumi_counters_t::init()
 {
   std::map<std::string, std::pair<unsigned, unsigned>> schema = property<lumi_counter_schema_t>();
   std::map<std::string, std::pair<float, float>> shifts_and_scales = property<lumi_counter_shifts_and_scales_t>();
-  std::array<unsigned, 2 * Lumi::Constants::n_scifi_counters> scifi_offsets_and_sizes =
-    property<scifi_offsets_and_sizes_t>();
-  std::array<float, 2 * Lumi::Constants::n_scifi_counters> scifi_shifts_and_scales =
-    property<scifi_shifts_and_scales_t>();
 
   unsigned c_idx(0u);
   for (auto counter_name : Lumi::Constants::scifi_counter_names) {
@@ -41,21 +37,19 @@ void scifi_lumi_counters::scifi_lumi_counters_t::init()
       std::cout << "LumiSummary schema does not use " << counter_name << std::endl;
     }
     else {
-      scifi_offsets_and_sizes[2 * c_idx] = schema[counter_name].first;
-      scifi_offsets_and_sizes[2 * c_idx + 1] = schema[counter_name].second;
+      m_offsets_and_sizes[2 * c_idx] = schema[counter_name].first;
+      m_offsets_and_sizes[2 * c_idx + 1] = schema[counter_name].second;
     }
     if (shifts_and_scales.find(counter_name) == shifts_and_scales.end()) {
-      scifi_shifts_and_scales[2 * c_idx] = 0.f;
-      scifi_shifts_and_scales[2 * c_idx + 1] = 1.f;
+      m_shifts_and_scales[2 * c_idx] = 0.f;
+      m_shifts_and_scales[2 * c_idx + 1] = 1.f;
     }
     else {
-      scifi_shifts_and_scales[2 * c_idx] = shifts_and_scales[counter_name].first;
-      scifi_shifts_and_scales[2 * c_idx + 1] = shifts_and_scales[counter_name].second;
+      m_shifts_and_scales[2 * c_idx] = shifts_and_scales[counter_name].first;
+      m_shifts_and_scales[2 * c_idx + 1] = shifts_and_scales[counter_name].second;
     }
     ++c_idx;
   }
-  set_property_value<scifi_offsets_and_sizes_t>(scifi_offsets_and_sizes);
-  set_property_value<scifi_shifts_and_scales_t>(scifi_shifts_and_scales);
 }
 
 void scifi_lumi_counters::scifi_lumi_counters_t::operator()(
@@ -68,12 +62,18 @@ void scifi_lumi_counters::scifi_lumi_counters_t::operator()(
   if (first<host_lumi_summaries_count_t>(arguments) == 0) return;
 
   global_function(scifi_lumi_counters)(dim3(4u), property<block_dim_t>(), context)(
-    arguments, first<host_number_of_events_t>(arguments), constants.dev_scifi_geometry);
+    arguments,
+    first<host_number_of_events_t>(arguments),
+    m_offsets_and_sizes,
+    m_shifts_and_scales,
+    constants.dev_scifi_geometry);
 }
 
 __global__ void scifi_lumi_counters::scifi_lumi_counters(
   scifi_lumi_counters::Parameters parameters,
   const unsigned number_of_events,
+  const offsets_and_sizes_t offsets_and_sizes,
+  const shifts_and_scales_t shifts_and_scales,
   const char* scifi_geometry)
 {
   for (unsigned event_number = blockIdx.x * blockDim.x + threadIdx.x; event_number < number_of_events;
@@ -111,11 +111,11 @@ __global__ void scifi_lumi_counters::scifi_lumi_counters(
     for (unsigned i = 0; i < Lumi::Constants::n_scifi_counters; ++i) {
       fillLumiInfo(
         parameters.dev_lumi_infos[info_offset + i],
-        parameters.scifi_offsets_and_sizes.get()[2 * i],
-        parameters.scifi_offsets_and_sizes.get()[2 * i + 1],
+        offsets_and_sizes[2 * i],
+        offsets_and_sizes[2 * i + 1],
         SciFiCounters[i],
-        parameters.scifi_shifts_and_scales.get()[2 * i],
-        parameters.scifi_shifts_and_scales.get()[2 * i + 1]);
+        shifts_and_scales[2 * i],
+        shifts_and_scales[2 * i + 1]);
     }
   }
 }
diff --git a/device/lumi/src/VeloLumiCounters.cu b/device/lumi/src/VeloLumiCounters.cu
index a0cfffe9933..68ae22ce889 100644
--- a/device/lumi/src/VeloLumiCounters.cu
+++ b/device/lumi/src/VeloLumiCounters.cu
@@ -27,9 +27,6 @@ void velo_lumi_counters::velo_lumi_counters_t::init()
 {
   std::map<std::string, std::pair<unsigned, unsigned>> schema = property<lumi_counter_schema_t>();
   std::map<std::string, std::pair<float, float>> shifts_and_scales = property<lumi_counter_shifts_and_scales_t>();
-  std::array<unsigned, 2 * Lumi::Constants::n_velo_counters> velo_offsets_and_sizes =
-    property<velo_offsets_and_sizes_t>();
-  std::array<float, 2 * Lumi::Constants::n_velo_counters> velo_shifts_and_scales = property<velo_shifts_and_scales_t>();
 
   unsigned c_idx(0u);
   for (auto counter_name : Lumi::Constants::velo_counter_names) {
@@ -37,21 +34,19 @@ void velo_lumi_counters::velo_lumi_counters_t::init()
       std::cout << "LumiSummary schema does not use " << counter_name << std::endl;
     }
     else {
-      velo_offsets_and_sizes[2 * c_idx] = schema[counter_name].first;
-      velo_offsets_and_sizes[2 * c_idx + 1] = schema[counter_name].second;
+      m_offsets_and_sizes[2 * c_idx] = schema[counter_name].first;
+      m_offsets_and_sizes[2 * c_idx + 1] = schema[counter_name].second;
     }
     if (shifts_and_scales.find(counter_name) == shifts_and_scales.end()) {
-      velo_shifts_and_scales[2 * c_idx] = 0.f;
-      velo_shifts_and_scales[2 * c_idx + 1] = 1.f;
+      m_shifts_and_scales[2 * c_idx] = 0.f;
+      m_shifts_and_scales[2 * c_idx + 1] = 1.f;
     }
     else {
-      velo_shifts_and_scales[2 * c_idx] = shifts_and_scales[counter_name].first;
-      velo_shifts_and_scales[2 * c_idx + 1] = shifts_and_scales[counter_name].second;
+      m_shifts_and_scales[2 * c_idx] = shifts_and_scales[counter_name].first;
+      m_shifts_and_scales[2 * c_idx + 1] = shifts_and_scales[counter_name].second;
     }
     ++c_idx;
   }
-  set_property_value<velo_offsets_and_sizes_t>(velo_offsets_and_sizes);
-  set_property_value<velo_shifts_and_scales_t>(velo_shifts_and_scales);
 }
 
 void velo_lumi_counters::velo_lumi_counters_t::operator()(
@@ -64,12 +59,14 @@ void velo_lumi_counters::velo_lumi_counters_t::operator()(
   if (first<host_lumi_summaries_count_t>(arguments) == 0) return;
 
   global_function(velo_lumi_counters)(dim3(4u), property<block_dim_t>(), context)(
-    arguments, first<host_number_of_events_t>(arguments));
+    arguments, first<host_number_of_events_t>(arguments), m_offsets_and_sizes, m_shifts_and_scales);
 }
 
 __global__ void velo_lumi_counters::velo_lumi_counters(
   velo_lumi_counters::Parameters parameters,
-  const unsigned number_of_events)
+  const unsigned number_of_events,
+  const offsets_and_sizes_t offsets_and_sizes,
+  const shifts_and_scales_t shifts_and_scales)
 {
   for (unsigned event_number = blockIdx.x * blockDim.x + threadIdx.x; event_number < number_of_events;
        event_number += blockDim.x * gridDim.x) {
@@ -114,11 +111,11 @@ __global__ void velo_lumi_counters::velo_lumi_counters(
     for (unsigned info_index = 0u; info_index < Lumi::Constants::n_velo_counters; ++info_index) {
       fillLumiInfo(
         parameters.dev_lumi_infos[info_offset + info_index],
-        parameters.velo_offsets_and_sizes.get()[info_index * 2],
-        parameters.velo_offsets_and_sizes.get()[info_index * 2 + 1],
+        offsets_and_sizes[info_index * 2],
+        offsets_and_sizes[info_index * 2 + 1],
         velo_counters[info_index],
-        parameters.velo_shifts_and_scales.get()[2 * info_index],
-        parameters.velo_shifts_and_scales.get()[2 * info_index + 1]);
+        shifts_and_scales[2 * info_index],
+        shifts_and_scales[2 * info_index + 1]);
     }
   }
 }
-- 
GitLab


From d25bb1d3f31d64ca184ff548957931ce90e7ab26 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Wed, 29 Mar 2023 16:41:19 +0200
Subject: [PATCH 23/49] fix formatting

---
 Dumpers/BinaryDumpers/options/allen.py        | 23 ++++++++++++-------
 .../BinaryDumpers/src/TransposeRawBanks.cpp   |  8 ++++++-
 2 files changed, 22 insertions(+), 9 deletions(-)

diff --git a/Dumpers/BinaryDumpers/options/allen.py b/Dumpers/BinaryDumpers/options/allen.py
index 35582b8a5e6..85be23f75b1 100755
--- a/Dumpers/BinaryDumpers/options/allen.py
+++ b/Dumpers/BinaryDumpers/options/allen.py
@@ -201,13 +201,19 @@ if (m := tck_option.match(sequence)):
     tck = m.group(2)
     sequence_json, tck_info = sequence_from_git(repo, tck)
     if not sequence_json:
-        print(f"Failed to obtain configuration for TCK {tck} from repository {repo}")
+        print(
+            f"Failed to obtain configuration for TCK {tck} from repository {repo}"
+        )
         sys.exit(1)
     elif (allen_release := get_allen_version()) != tck_info.release:
-        print(f"TCK {tck} is compatible with Allen release {allen_release}, not with {tck_info.release}.")
+        print(
+            f"TCK {tck} is compatible with Allen release {allen_release}, not with {tck_info.release}."
+        )
         sys.exit(1)
     else:
-        print(f"Loaded TCK {tck} with sequence type {tck_info.type} and label {tck_info.label}.")
+        print(
+            f"Loaded TCK {tck} with sequence type {tck_info.type} and label {tck_info.label}."
+        )
 else:
     with open(sequence) as f:
         sequence_json = f.read()
@@ -299,8 +305,7 @@ for flag, value in [("g", args.det_folder), ("params", params),
                     ("r", args.repetitions), ("output-file", args.output_file),
                     ("output-batch-size", args.output_batch_size),
                     ("m", args.reserve), ("v", args.verbosity),
-                    ("p", args.print_memory),
-                    ("sequence", sequence),
+                    ("p", args.print_memory), ("sequence", sequence),
                     ("s", args.slices), ("mdf", os.path.expandvars(args.mdf)),
                     ("disable-run-changes", int(not args.enable_run_changes)),
                     ("monitoring-save-period", args.mon_save_period),
@@ -328,7 +333,8 @@ if args.mep:
     provider = cast_service(gbl.IInputProvider, mep_provider)
 else:
     provider = gbl.Allen.make_provider(options, sequence_json)
-output_handler = gbl.Allen.output_handler(provider, zmqSvc, options, sequence_json)
+output_handler = gbl.Allen.output_handler(provider, zmqSvc, options,
+                                          sequence_json)
 
 # run Allen
 gbl.allen.__release_gil__ = 1
@@ -357,8 +363,9 @@ def allen_thread():
     if args.profile == "CUDA":
         runtime_lib.cudaProfilerStart()
 
-    gbl.allen(options, sequence_json, updater, shared_wrap(gbl.IInputProvider, provider),
-              output_handler, zmqSvc, con.c_str())
+    gbl.allen(options, sequence_json, updater,
+              shared_wrap(gbl.IInputProvider, provider), output_handler,
+              zmqSvc, con.c_str())
 
     if args.profile == "CUDA":
         runtime_lib.cudaProfilerStop()
diff --git a/Dumpers/BinaryDumpers/src/TransposeRawBanks.cpp b/Dumpers/BinaryDumpers/src/TransposeRawBanks.cpp
index 8b126f85b2b..a4b88eb1030 100644
--- a/Dumpers/BinaryDumpers/src/TransposeRawBanks.cpp
+++ b/Dumpers/BinaryDumpers/src/TransposeRawBanks.cpp
@@ -120,7 +120,13 @@ std::array<TransposedBanks, LHCb::RawBank::types().size()> TransposeRawBanks::op
 
   // We have to deal with the fact that calo banks can come in different types
   for (auto bt : m_bankTypes.value()) {
-    if (bt == LHCb::RawBank::EcalPacked || bt == LHCb::RawBank::HcalPacked) {
+    if (bt == LHCb::RawBank::VP || bt == LHCb::RawBank::VPRetinaCluster) {
+      if (rawBanks[LHCb::RawBank::VP].empty() && rawBanks[LHCb::RawBank::VPRetinaCluster].empty()) {
+        // Both VP and Retina banks are empty
+        throw GaudiException {"Cannot find " + toString(bt) + " raw bank.", "", StatusCode::FAILURE};
+      }
+    }
+    else if (bt == LHCb::RawBank::EcalPacked || bt == LHCb::RawBank::HcalPacked) {
       if (rawBanks[bt].empty() && rawBanks[LHCb::RawBank::Calo].empty()) {
         // Old-style calo banks empty and new-style calo banks also empty
         throw GaudiException {"Cannot find " + toString(bt) + " raw bank.", "", StatusCode::FAILURE};
-- 
GitLab


From c8cd21480eb280f6824de2a6916a9349a1638344 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Wed, 29 Mar 2023 16:42:43 +0200
Subject: [PATCH 24/49] Remove VPRetinaClusters as a value that can configure a
 bank type, use VP instead

---
 Rec/Allen/python/Allen/config.py                        | 4 ----
 configuration/python/AllenConf/velo_reconstruction.py   | 2 +-
 configuration/python/AllenCore/gaudi_allen_generator.py | 2 ++
 main/src/BankTypes.cpp                                  | 1 -
 4 files changed, 3 insertions(+), 6 deletions(-)

diff --git a/Rec/Allen/python/Allen/config.py b/Rec/Allen/python/Allen/config.py
index 4ebedce4dd2..fef97e2ebcf 100755
--- a/Rec/Allen/python/Allen/config.py
+++ b/Rec/Allen/python/Allen/config.py
@@ -133,10 +133,6 @@ def setup_allen_non_event_data_service(allen_event_loop=False,
     elif bank_types is None:
         bank_types = set(converter_types.keys())
 
-    if 'VPRetinaCluster' in bank_types:
-        bank_types.remove('VPRetinaCluster')
-        bank_types.add('VP')
-
     # Always include the magnetic field polarity
     bank_types.add('Magnet')
 
diff --git a/configuration/python/AllenConf/velo_reconstruction.py b/configuration/python/AllenConf/velo_reconstruction.py
index 5388e365e7a..395e7f4e800 100644
--- a/configuration/python/AllenConf/velo_reconstruction.py
+++ b/configuration/python/AllenConf/velo_reconstruction.py
@@ -20,7 +20,7 @@ def decode_velo(retina_decoding=True):
 
     if retina_decoding:
         velo_banks = make_algorithm(
-            data_provider_t, name="velo_banks", bank_type="VPRetinaCluster")
+            data_provider_t, name="velo_banks", bank_type="VP")
 
         calculate_number_of_retinaclusters_each_sensor_pair = make_algorithm(
             calculate_number_of_retinaclusters_each_sensor_pair_t,
diff --git a/configuration/python/AllenCore/gaudi_allen_generator.py b/configuration/python/AllenCore/gaudi_allen_generator.py
index a0ee924611a..f5229c0e532 100644
--- a/configuration/python/AllenCore/gaudi_allen_generator.py
+++ b/configuration/python/AllenCore/gaudi_allen_generator.py
@@ -60,6 +60,8 @@ def make_algorithm(algorithm, name, *args, **kwargs):
         rawbank_list = ["ODIN"]
     elif bank_type == "ECal":
         rawbank_list = ["Calo", "EcalPacked"]
+    elif bank_type == "VP":
+        rawbank_list = ["VP", "VPRetinaCluster"]
     elif bank_type:
         rawbank_list = [bank_type]
 
diff --git a/main/src/BankTypes.cpp b/main/src/BankTypes.cpp
index 57690c30aa6..9b503915e81 100644
--- a/main/src/BankTypes.cpp
+++ b/main/src/BankTypes.cpp
@@ -10,7 +10,6 @@
 
 namespace {
   const std::map<std::string, BankTypes> BankNames = {{"VP", BankTypes::VP},
-                                                      {"VPRetinaCluster", BankTypes::VP},
                                                       {"UT", BankTypes::UT},
                                                       {"FTCluster", BankTypes::FT},
                                                       {"Muon", BankTypes::MUON},
-- 
GitLab


From 93292c5e675ed71ce377cf67eb1a6fa5c60d8c44 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Wed, 29 Mar 2023 16:45:24 +0200
Subject: [PATCH 25/49] Add test that compares configurations written by Allen
 to those loaded from TCK

---
 Rec/Allen/python/Allen/qmtest/utils.py        | 19 +++++
 Rec/Allen/python/Allen/tck.py                 | 71 ++++++++++++-------
 Rec/Allen/scripts/create_hlt1_tck.py          | 10 ++-
 Rec/Allen/tests/options/compare_hlt1_tcks.py  | 13 +---
 .../options/test_tck_allen_write_config.py    | 67 +++++++++++++++++
 .../tests/qmtest/compare_tck_allen_config.qmt | 24 +++++++
 Rec/Allen/tests/qmtest/compare_tcks.qmt       |  3 +
 configuration/parser/ParseAlgorithms.py       | 38 ++++++----
 .../AllenCore/AllenSequenceGenerator.py       |  6 +-
 main/src/InputReader.cpp                      | 10 ++-
 10 files changed, 205 insertions(+), 56 deletions(-)
 create mode 100644 Rec/Allen/tests/options/test_tck_allen_write_config.py
 create mode 100644 Rec/Allen/tests/qmtest/compare_tck_allen_config.qmt

diff --git a/Rec/Allen/python/Allen/qmtest/utils.py b/Rec/Allen/python/Allen/qmtest/utils.py
index afb74d78220..49966f1dad7 100644
--- a/Rec/Allen/python/Allen/qmtest/utils.py
+++ b/Rec/Allen/python/Allen/qmtest/utils.py
@@ -8,9 +8,28 @@
 # granted to it by virtue of its status as an Intergovernmental Organization  #
 # or submit itself to any jurisdiction.                                       #
 ###############################################################################
+from pprint import pprint
+from collections import defaultdict
 
 
 def good_sequence(s):
     physics = s.startswith('hlt1') and 'validation' not in s
     extra = s in ('calo_prescaled_plus_lumi', 'passthrough')
     return physics or extra
+
+
+def print_sequence_differences(a, b):
+    diff_keys = set(a.keys()).symmetric_difference(set(b.keys()))
+    diff = defaultdict(dict)
+    ka = [k for k in a.keys() if k not in diff_keys]
+    for k in ka:
+        props_a = a[k]
+        props_b = b[k]
+        diff_prop_keys = set(props_a.keys()).symmetric_difference(
+            set(props_b.keys()))
+        pka = [k for k in props_a.keys() if k not in diff_prop_keys]
+        for prop_key in pka:
+            if props_a[prop_key] != props_b[prop_key]:
+                diff[k][prop_key] = (props_a[prop_key], props_b[prop_key])
+
+    return dict(diff)
diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
index 99df9ba93fe..d23b7090e63 100644
--- a/Rec/Allen/python/Allen/tck.py
+++ b/Rec/Allen/python/Allen/tck.py
@@ -23,8 +23,7 @@ from subprocess import PIPE, run
 def get_allen_version():
     if "ALLEN_INSTALL_DIR" in os.environ:
         manifest_tree = etree.parse(
-            os.path.expandvars("${ALLEN_INSTALL_DIR}/manifest.xml")
-        )
+            os.path.expandvars("${ALLEN_INSTALL_DIR}/manifest.xml"))
         allen_version = manifest_tree.find("project").get("version")
         vs = allen_version.split(".")
         version = "ALLEN_"
@@ -39,7 +38,8 @@ def get_allen_version():
 def sequence_to_tck(config: dict):
     tck_config = {"Scheduler/" + k: v for k, v in config["sequence"].items()}
 
-    for alg_type, alg_name, alg_kind in config["sequence"]["configured_algorithms"]:
+    for alg_type, alg_name, alg_kind in config["sequence"][
+            "configured_algorithms"]:
         properties = {
             k: v if type(v) == str else json.dumps(v)
             for k, v in config[alg_name].items()
@@ -59,10 +59,12 @@ def tck_to_sequence(config: dict):
         k.split("/")[1] for k in config.keys() if k.startswith("Scheduler/")
     ]
     sequence_config = {
-        "sequence": {e: config["Scheduler/" + e] for e in scheduler_entries}
+        "sequence": {e: config["Scheduler/" + e]
+                     for e in scheduler_entries}
     }
 
-    for alg_type, alg_name, alg_kind in sequence_config["sequence"]["configured_algorithms"]:
+    for alg_type, alg_name, alg_kind in sequence_config["sequence"][
+            "configured_algorithms"]:
         tck_props = config[f"{alg_kind}/{alg_type}/{alg_name}"]["Properties"]
         properties = {}
         for k, v in tck_props.items():
@@ -75,9 +77,8 @@ def tck_to_sequence(config: dict):
     return sequence_config
 
 
-def json_tck_db(
-    configuration: dict, sequence_type: str, label: str, allen_version: str, tck: int
-):
+def json_tck_db(configuration: dict, sequence_type: str, label: str,
+                allen_version: str, tck: int):
     if allen_version is None:
         allen_version = get_allen_version()
     if type(allen_version) == str:
@@ -95,7 +96,9 @@ def json_tck_db(
     manifest = {
         digest: {
             "TCK": hex(tck),
-            "Release2Type": {version: sequence_type},
+            "Release2Type": {
+                version: sequence_type
+            },
             "label": label,
         }
     }
@@ -115,7 +118,8 @@ def sequence_from_python(python_file: Path, node_name="hlt1_node") -> dict:
             mod = importlib.import_module(f"AllenSequences.{module_name}")
         else:
             # Load sequence module from python file
-            spec = importlib.util.spec_from_file_location(module_name, python_file)
+            spec = importlib.util.spec_from_file_location(
+                module_name, python_file)
             mod = importlib.util.module_from_spec(spec)
             sys.modules[module_name] = mod
             spec.loader.exec_module(mod)
@@ -123,7 +127,8 @@ def sequence_from_python(python_file: Path, node_name="hlt1_node") -> dict:
         node = getattr(mod, node_name)
 
     if node is None:
-        print(f"Failed to get {node_name} from sequence file {str(python_file)}")
+        print(
+            f"Failed to get {node_name} from sequence file {str(python_file)}")
         return None
 
     algorithms = build_sequence(node, verbose=False)
@@ -131,13 +136,13 @@ def sequence_from_python(python_file: Path, node_name="hlt1_node") -> dict:
 
 
 def sequence_to_git(
-    repository: Path,
-    sequence: dict,
-    sequence_type: dict,
-    label: str,
-    tck: int,
-    allen_version=None,
-    write_intermediate=False,
+        repository: Path,
+        sequence: dict,
+        sequence_type: dict,
+        label: str,
+        tck: int,
+        allen_version=None,
+        write_intermediate=False,
 ):
     db = json_tck_db(sequence, sequence_type, label, allen_version, tck)
     if write_intermediate:
@@ -159,7 +164,10 @@ def sequence_from_git(repository: Path, tck: str, use_bindings=True) -> str:
     if use_bindings:
         from Allen import TCK
         sequence, info = TCK.sequence_from_git(str(repository), tck)
-        return (sequence, {k: getattr(info, k) for k in ("digest", "tck", "release", "type", "label")})
+        return (sequence, {
+            k: getattr(info, k)
+            for k in ("digest", "tck", "release", "type", "label")
+        })
     else:
         p = run(
             [
@@ -175,15 +183,27 @@ def sequence_from_git(repository: Path, tck: str, use_bindings=True) -> str:
             print("Failed to convert configuration in git repo to JSON")
             return None
         tck_db = json.loads(p.stdout)
-        digest, manifest_entry = next(((k, m) for k, m in tck_db["manifest"].items() if m["TCK"] == tck), None)
-        release, seq_type = next((k, v) for k, v in manifest_entry["Release2Type"].items())
+        digest, manifest_entry = next(
+            ((k, m) for k, m in tck_db["manifest"].items() if m["TCK"] == tck),
+            None)
+        release, seq_type = next(
+            (k, v) for k, v in manifest_entry["Release2Type"].items())
         tck = manifest_entry["TCK"]
         label = manifest_entry["label"]
-        info = {"digest": digest, "tck": tck, "release": release, "type": seq_type, "label": label}
+        info = {
+            "digest": digest,
+            "tck": tck,
+            "release": release,
+            "type": seq_type,
+            "label": label
+        }
         return (json.dumps(tck_to_sequence(tck_db[digest])), info)
 
 
-def property_from_git(repository: Path, tck: str, algorithm=".*", property=".*"):
+def property_from_git(repository: Path,
+                      tck: str,
+                      algorithm=".*",
+                      property=".*"):
     alg_re = re.compile(algorithm)
     prop_re = re.compile(property)
 
@@ -204,14 +224,17 @@ def manifest_from_git(repository: Path):
     p = run(
         [
             "hlttck_cdb_listkeys",
-             "--list-manifest-as-json",
+            "--list-manifest-as-json",
             f"{str(repository)}",
             "-",
         ],
         stdout=PIPE,
+        stderr=PIPE,
     )
     if p.returncode != 0:
         print("Failed to convert manifest from git repo to JSON")
+        print(p.stdout)
+        print(p.stderr)
         return None
     else:
         return json.loads(p.stdout)
diff --git a/Rec/Allen/scripts/create_hlt1_tck.py b/Rec/Allen/scripts/create_hlt1_tck.py
index 4f69405bf75..95353273ad5 100644
--- a/Rec/Allen/scripts/create_hlt1_tck.py
+++ b/Rec/Allen/scripts/create_hlt1_tck.py
@@ -28,10 +28,12 @@ sequence_arg = Path(args.sequence[0])
 repository = Path(args.repository[0])
 tck = int(args.tck[0], 16)
 
+
 def dec_reporter_name(conf):
     return next((n for t, n, _ in conf['sequence']['configured_algorithms']
                  if t == "dec_reporter::dec_reporter_t"), None)
 
+
 sequence = None
 if sequence_arg.suffix in (".py", ''):
     from AllenCore.configuration_options import is_allen_standalone
@@ -43,7 +45,9 @@ if sequence_arg.suffix in (".py", ''):
         sequence = sequence_from_python(sequence_arg, node_name=args.hlt1_node)
     dn = dec_reporter_name(sequence)
     if dn is None:
-        print(f"Cannot create TCK {hex(tck)} for sequence {sequence_arg.stem}, because it does not contain the dec_reporter")
+        print(
+            f"Cannot create TCK {hex(tck)} for sequence {sequence_arg.stem}, because it does not contain the dec_reporter"
+        )
         sys.exit(1)
 elif sequence_arg.suffix == '.json':
     sequence, dn = {}, None
@@ -51,7 +55,9 @@ elif sequence_arg.suffix == '.json':
         sequence = json.load(sequence_file)
         dn = dec_reporter_name(sequence)
     if dn is None:
-        print(f"Cannot create TCK {hex(tck)} for sequence {sequence_arg.stem}, because it does not contain the dec_reporter")
+        print(
+            f"Cannot create TCK {hex(tck)} for sequence {sequence_arg.stem}, because it does not contain the dec_reporter"
+        )
         sys.exit(1)
     else:
         sequence[dn]['tck'] = tck
diff --git a/Rec/Allen/tests/options/compare_hlt1_tcks.py b/Rec/Allen/tests/options/compare_hlt1_tcks.py
index 18d5f17dcbe..5daff69d2c7 100644
--- a/Rec/Allen/tests/options/compare_hlt1_tcks.py
+++ b/Rec/Allen/tests/options/compare_hlt1_tcks.py
@@ -57,18 +57,7 @@ for info in entries_json:
             f"ERROR: sequences loaded directly from JSON and from JSON git repo for {tck} are not the same"
         )
 
-        diff_keys = set(sequence_direct.keys()).symmetric_difference(
-            set(sequence_json.keys()))
-        if diff_keys:
-            print("different keys")
-            print(diff_keys)
-        diff = {}
-        kj = [k for k in sequence_json.keys() if k not in diff_keys]
-        for k in kj:
-            if sequence_json[k] != sequence_direct[k]:
-                diff[k] = (sequence_json[k], sequence_direct[k])
-        print(diff)
-
+        print_sequence_differences(sequence_direct, sequence_json)
         error = True
 
 sys.exit(error)
diff --git a/Rec/Allen/tests/options/test_tck_allen_write_config.py b/Rec/Allen/tests/options/test_tck_allen_write_config.py
new file mode 100644
index 00000000000..bbaa55b4cad
--- /dev/null
+++ b/Rec/Allen/tests/options/test_tck_allen_write_config.py
@@ -0,0 +1,67 @@
+###############################################################################
+# (c) Copyright 2023 CERN for the benefit of the LHCb Collaboration           #
+#                                                                             #
+# This software is distributed under the terms of the Apache License          #
+# version 2 (Apache-2.0), copied verbatim in the file "COPYING".              #
+#                                                                             #
+# In applying this licence, CERN does not waive the privileges and immunities #
+# granted to it by virtue of its status as an Intergovernmental Organization  #
+# or submit itself to any jurisdiction.                                       #
+###############################################################################
+import os
+import sys
+import json
+from pathlib import Path
+from subprocess import PIPE, run
+from Allen.qmtest.utils import good_sequence, print_sequence_differences
+from Allen.tck import manifest_from_git, sequence_from_git
+
+tck_repo = Path("config_json.git")
+
+manifest = manifest_from_git(tck_repo)
+
+# Take 5 configurations to check against Allen
+manifest_entries = sorted(manifest.values(), key=lambda v: v["TCK"])
+
+for info in manifest_entries:
+    s, tck_info = sequence_from_git(tck_repo, info["TCK"])
+    tck_sequence = json.loads(s)
+    print('{release} {type:30s} {tck}'.format(**tck_info))
+    tck = info["TCK"]
+
+    cmd = [
+        "Allen",
+        "-g", # Use default binary geometry, irrelavant in this mode, but needs to be found
+        os.path.expandvars("${ALLEN_PROJECT_ROOT}/input/detector_configuration"),
+        "--param",
+        os.path.expandvars("${PARAMFILESROOT}"),
+        "--mdf", # No input file
+        '""',
+        "--sequence", # Load configuration from TCK
+        f"config_json.git:{tck}",
+        "--write-configuration", # Write configuration to config.json
+        "1",
+    ]
+
+    p = run(
+        cmd,
+        stdout=PIPE,
+        stderr=PIPE,
+    )
+    if p.returncode != 0:
+        print(f"Failed to write configuration from Allen for TCK {tck}")
+        print(" ".join(cmd))
+        print(p.stdout.decode())
+        print(p.stderr.decode())
+        error = True
+    else:
+        # Open configuration JSON written by Allen
+        allen_sequence = ""
+        with open("config.json") as f:
+            allen_sequence = json.load(f)
+
+        # Compare configurations
+        if allen_sequence != tck_sequence:
+            diffs = print_sequence_differences(tck_sequence, allen_sequence)
+            print("Differences between input configuration from TCK and written by Allen:")
+            print(diffs)
diff --git a/Rec/Allen/tests/qmtest/compare_tck_allen_config.qmt b/Rec/Allen/tests/qmtest/compare_tck_allen_config.qmt
new file mode 100644
index 00000000000..2d8d74a7e6a
--- /dev/null
+++ b/Rec/Allen/tests/qmtest/compare_tck_allen_config.qmt
@@ -0,0 +1,24 @@
+<?xml version="1.0" ?><!DOCTYPE extension  PUBLIC '-//QM/2.3/Extension//EN'  'http://www.codesourcery.com/qm/dtds/2.3/-//qm/2.3/extension//en.dtd'>
+<!--
+    (c) Copyright 2020 CERN for the benefit of the LHCb Collaboration
+-->
+<!--
+#######################################################
+# SUMMARY OF THIS TEST
+# ...................
+# Author: Roel Aaij
+# Purpose: Compare a number of TCKs to the output of Allen
+#          \-\-write-configuration 1
+#######################################################
+-->
+<extension class="GaudiTest.GaudiExeTest" kind="test">
+  <argument name="program"><text>python</text></argument>
+  <argument name="args"><set>
+    <text>${ALLENROOT}/tests/options/test_tck_allen_write_config.py</text>
+  </set></argument>
+  <argument name="prerequisites"><set>
+    <tuple><text>create_tcks</text><enumeral>PASS</enumeral></tuple>
+  </set></argument>
+  <argument name="timeout"><integer>600</integer></argument>
+  <argument name="use_temp_dir"><enumeral>true</enumeral></argument>
+</extension>
diff --git a/Rec/Allen/tests/qmtest/compare_tcks.qmt b/Rec/Allen/tests/qmtest/compare_tcks.qmt
index 1e62fc3306b..3c9aecea522 100644
--- a/Rec/Allen/tests/qmtest/compare_tcks.qmt
+++ b/Rec/Allen/tests/qmtest/compare_tcks.qmt
@@ -16,6 +16,9 @@
   <argument name="args"><set>
     <text>${ALLENROOT}/tests/options/compare_hlt1_tcks.py</text>
   </set></argument>
+  <argument name="prerequisites"><set>
+    <tuple><text>create_tcks</text><enumeral>PASS</enumeral></tuple>
+  </set></argument>
   <argument name="timeout"><integer>600</integer></argument>
   <argument name="use_temp_dir"><enumeral>true</enumeral></argument>
 <argument name="validator"><text>
diff --git a/configuration/parser/ParseAlgorithms.py b/configuration/parser/ParseAlgorithms.py
index e8054ac2f8a..5aaf2fb0fd7 100755
--- a/configuration/parser/ParseAlgorithms.py
+++ b/configuration/parser/ParseAlgorithms.py
@@ -13,6 +13,7 @@ import argparse
 import pickle
 import json
 
+
 def get_clang_so_location():
     """Function that fetches location of detected clang so."""
     import clang.cindex
@@ -30,7 +31,8 @@ class Parser():
 
     # Pattern sought in every file, prior to parsing the file for an algorithm
     __algorithm_pattern_compiled = re.compile(
-        "(?P<scope>Host|Device|Selection|Validation|Provider|Barrier)Algorithm")
+        "(?P<scope>Host|Device|Selection|Validation|Provider|Barrier)Algorithm"
+    )
 
     # File extensions considered
     __sought_extensions_compiled = [
@@ -72,9 +74,9 @@ class Parser():
 
         return algorithm_files
 
-
     @staticmethod
-    def parse_all(algorithm_files, prefix_project_folder,
+    def parse_all(algorithm_files,
+                  prefix_project_folder,
                   algorithm_parser=AlgorithmTraversal()):
         """Parses all files and traverses algorithm definitions."""
         algorithms = []
@@ -82,7 +84,7 @@ class Parser():
         for algorithm_file in algorithm_files:
             try:
                 parsed_algorithms = algorithm_parser.traverse(
-                            algorithm_file, prefix_project_folder)
+                    algorithm_file, prefix_project_folder)
                 if parsed_algorithms:
                     algorithms += parsed_algorithms
             except:
@@ -396,8 +398,7 @@ class AllenCore():
             "std::tuple<" + ",".join(output_types) + "> output_container {};",
             "// TES wrappers", f"{tes_wrappers}",
             "// Inputs to set_arguments_size and operator()",
-            f"{tes_wrappers_reference}",
-            f"Allen::Context context{{}};",
+            f"{tes_wrappers_reference}", f"Allen::Context context{{}};",
             f"const auto argument_references = ArgumentReferences<{algorithm.namespace}::Parameters>{{tes_wrappers_references, input_aggregates_tuple}};",
             f"// set arguments size invocation",
             f"m_algorithm.set_arguments_size(argument_references, runtime_options, *constants);",
@@ -607,9 +608,13 @@ class AllenCore():
         # Run the default_properties executable to get a JSON
         # representation of the default values of all properties of
         # all algorithms
-        p = run([default_properties], stdout=PIPE,
-                input=';'.join(["{}::{}".format(a.namespace, a.name) for a in parsed_algorithms]),
-                encoding='ascii')
+        p = run(
+            [default_properties],
+            stdout=PIPE,
+            input=';'.join([
+                "{}::{}".format(a.namespace, a.name) for a in parsed_algorithms
+            ]),
+            encoding='ascii')
 
         default_properties = None
         if p.returncode == 0:
@@ -621,7 +626,8 @@ class AllenCore():
         s = AllenCore.write_preamble()
         for algorithm in parsed_algorithms:
             tn = "{}::{}".format(algorithm.namespace, algorithm.name)
-            s += AllenCore.write_algorithm_code(algorithm, default_properties[tn])
+            s += AllenCore.write_algorithm_code(algorithm,
+                                                default_properties[tn])
         with open(filename, "w") as f:
             f.write(s)
 
@@ -655,8 +661,7 @@ class AllenCore():
 
     @staticmethod
     def write_algorithms_db(algorithms, filename):
-        code = "\n".join(("#include <AlgorithmDB.h>",
-                          "\n"))
+        code = "\n".join(("#include <AlgorithmDB.h>", "\n"))
         for alg in algorithms:
             code += f"namespace {alg.namespace} {{ struct {alg.name}; }}\n"
         code += "\nAllen::TypeErasedAlgorithm instantiate_allen_algorithm(const ConfiguredAlgorithm& alg) {\n"
@@ -770,7 +775,8 @@ if __name__ == '__main__':
     elif args.generate == "algorithm_headers_list":
         # Write list of files including algorithm definitions
         algorithm_headers_list = Parser().find_algorithm_files(prefix_folder)
-        AllenCore.write_algorithm_filename_list(algorithm_headers_list, args.filename)
+        AllenCore.write_algorithm_filename_list(algorithm_headers_list,
+                                                args.filename)
     else:
 
         if args.parsed_algorithms:
@@ -780,11 +786,13 @@ if __name__ == '__main__':
         else:
             # Otherwise generate parsed_algorithms on the fly
             algorithm_files = Parser().find_algorithm_files(prefix_folder)
-            parsed_algorithms = Parser().parse_all(algorithm_files, prefix_folder)
+            parsed_algorithms = Parser().parse_all(algorithm_files,
+                                                   prefix_folder)
 
         if args.generate == "views":
             # Generate algorithm python views
-            AllenCore.write_algorithms_view(parsed_algorithms, args.filename, args.default_properties)
+            AllenCore.write_algorithms_view(parsed_algorithms, args.filename,
+                                            args.default_properties)
         elif args.generate == "wrapperlist":
             # Generate Gaudi wrapper filenames
             gaudi_wrapper_filenames = AllenCore.write_gaudi_algorithms(
diff --git a/configuration/python/AllenCore/AllenSequenceGenerator.py b/configuration/python/AllenCore/AllenSequenceGenerator.py
index cabd0b6d78b..6510154af4d 100644
--- a/configuration/python/AllenCore/AllenSequenceGenerator.py
+++ b/configuration/python/AllenCore/AllenSequenceGenerator.py
@@ -25,7 +25,11 @@ def generate_json_configuration(algorithms):
     sequence_json = {}
     # Add properties for each algorithm
     for algorithm in algorithms:
-        sequence_json[algorithm.name] = {str(k): v for k, v in algorithm.type.getDefaultProperties().items() if not isinstance(v, GaudiDataHandle)}
+        sequence_json[algorithm.name] = {
+            str(k): v
+            for k, v in algorithm.type.getDefaultProperties().items()
+            if not isinstance(v, GaudiDataHandle)
+        }
         if len(algorithm.properties):
             for k, v in algorithm.properties.items():
                 sequence_json[algorithm.name][str(k)] = v
diff --git a/main/src/InputReader.cpp b/main/src/InputReader.cpp
index cdaa76d7f12..91487e6a847 100644
--- a/main/src/InputReader.cpp
+++ b/main/src/InputReader.cpp
@@ -169,9 +169,15 @@ std::map<std::string, nlohmann::json> ConfigurationReader::get_sequence() const
 
 void ConfigurationReader::save(std::string file_name)
 {
-  nlohmann::json j(m_params);
+  using json_float = nlohmann::basic_json<std::map, std::vector, std::string, bool, std::int32_t, std::uint32_t, float>;
+  json_float j;
+  for (auto [alg, props] : m_params) {
+    for (auto [k, v] : props) {
+      j[alg][k] = v;
+    }
+  }
   std::ofstream o(file_name);
-  o << j.dump(4);
+  o << std::setw(4) << j;
   o.close();
 }
 
-- 
GitLab


From 23b577a906987848d28ccd5422a4b4bd0bb02692 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Wed, 29 Mar 2023 23:30:04 +0200
Subject: [PATCH 26/49] Add documentation and address comments

---
 Dumpers/BinaryDumpers/options/allen.py        |   4 +-
 Rec/Allen/python/Allen/tck.py                 | 119 +++++++++++++++++-
 Rec/Allen/scripts/create_hlt1_tck.py          |  41 ++++--
 Rec/Allen/tests/options/compare_hlt1_tcks.py  |  11 ++
 Rec/Allen/tests/options/create_hlt1_tcks.py   |   7 ++
 .../options/test_tck_allen_write_config.py    |  26 +++-
 configuration/src/bindings.cpp                |   3 +-
 7 files changed, 191 insertions(+), 20 deletions(-)

diff --git a/Dumpers/BinaryDumpers/options/allen.py b/Dumpers/BinaryDumpers/options/allen.py
index 85be23f75b1..3405b0766d9 100755
--- a/Dumpers/BinaryDumpers/options/allen.py
+++ b/Dumpers/BinaryDumpers/options/allen.py
@@ -196,7 +196,7 @@ sequence = os.path.expandvars(args.sequence)
 sequence_json = ""
 tck_option = re.compile(r"([^:]+):(0x[a-fA-F0-9]{8})")
 if (m := tck_option.match(sequence)):
-    from Allen.tck import sequence_from_git, get_allen_version
+    from Allen.tck import sequence_from_git, allen_version_from_build_manifest
     repo = m.group(1)
     tck = m.group(2)
     sequence_json, tck_info = sequence_from_git(repo, tck)
@@ -205,7 +205,7 @@ if (m := tck_option.match(sequence)):
             f"Failed to obtain configuration for TCK {tck} from repository {repo}"
         )
         sys.exit(1)
-    elif (allen_release := get_allen_version()) != tck_info.release:
+    elif (allen_release := allen_version_from_build_manifest()) != tck_info.release:
         print(
             f"TCK {tck} is compatible with Allen release {allen_release}, not with {tck_info.release}."
         )
diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
index d23b7090e63..76195e5a467 100644
--- a/Rec/Allen/python/Allen/tck.py
+++ b/Rec/Allen/python/Allen/tck.py
@@ -8,6 +8,70 @@
 # granted to it by virtue of its status as an Intergovernmental Organization  #
 # or submit itself to any jurisdiction.                                       #
 ###############################################################################
+"""Utilities to create and retrieve Allen configurations from the
+persistance format saved in a git repository.
+
+The persistent format is organised as a collection of JSON files, with
+each JSON file representing the configuration of a single
+component. In the case of Allen the only components that exist are
+algorithms.
+
+Each JSON file is stored in the repository at a path that is formatted as:
+
+"scope/namespace::type/instance_name",
+
+where scope is the defined as part of the Allen configuration and may
+be DeviceAlgorithm, HostAlgorithm, BarrierAlgorithm, ProviderAlgorithm
+or SelectionAlgorithm. There is an additional "folder" under Scheduler
+where the more-or-less free-form (but still in JSON files)
+configuration of the scheduler/sequence is persisted.
+
+The layout of the configuration of a single algorithm/component is for example for
+"DeviceAlgorithm/velo_search_by_triplet::velo_search_by_triplet_t/velo_search_by_triplet":
+
+{
+    "Kind": "DeviceAlgorithm",
+    "Name": "velo_search_by_triplet",
+    "Properties": {
+        "block_dim_x": "64",
+        "max_scatter": "0.08",
+        "max_skipped_modules": "1",
+        "phi_tolerance": "0.045",
+        "verbosity": "3"
+    },
+    "Type": "velo_search_by_triplet::velo_search_by_triplet_t"
+}
+
+It should be noted that the JSON type of all the values in
+"Properties" are strings and not JSON types. This is a requirement of
+the persistent format.
+
+For Allen/HLT1 the Scheduler "folder" contains four files:
+argument_dependencies, configured_algorithms, configured_arguments and
+configured_sequence_arguments. These are the same as the entries that
+an Allen-layout configuration expects under "sequence".
+
+Some additional metadata is needed when a configuration is persisted, in particular e.g.:
+
+{
+   "Release2Type": {
+        "ALLEN_v3r6": "hlt1_pp_only_matching"
+    },
+    "TCK": "0x10000016",
+    "label": "test"
+}
+
+Each of these entries is stored with a "digest" as key, whose value is
+not important, but is also used as a key for the corresponding
+configuration when it is extracted from the git repository.
+
+Some of the code that is needed to persist configurations is not
+available as python bindings, but instead through the
+"hlttck_cdb_listkeys" executable that resides in
+LHCb/Hlt/HltServices. It is also needed to create a JSON manifest that
+contains all configurations available in the repository.
+"""
+
 import json
 import os
 import sys
@@ -20,7 +84,12 @@ from hashlib import md5
 from subprocess import PIPE, run
 
 
-def get_allen_version():
+def allen_version_from_build_manifest():
+    """Get the built/installed version of Allen from the
+    build/install manifest in the format ALLEN_vXrYpZ where pZ is
+    optional.
+    """
+
     if "ALLEN_INSTALL_DIR" in os.environ:
         manifest_tree = etree.parse(
             os.path.expandvars("${ALLEN_INSTALL_DIR}/manifest.xml"))
@@ -36,6 +105,11 @@ def get_allen_version():
 
 
 def sequence_to_tck(config: dict):
+    """Convert an "Allen" configuration to the format required for
+    persistence. This includes in particular the (JSON) serialization
+    of all property values to strings.
+    """
+
     tck_config = {"Scheduler/" + k: v for k, v in config["sequence"].items()}
 
     for alg_type, alg_name, alg_kind in config["sequence"][
@@ -55,6 +129,9 @@ def sequence_to_tck(config: dict):
 
 
 def tck_to_sequence(config: dict):
+    """Convert a persisted configuration to an "Allen" configuration.
+    """
+
     scheduler_entries = [
         k.split("/")[1] for k in config.keys() if k.startswith("Scheduler/")
     ]
@@ -79,8 +156,18 @@ def tck_to_sequence(config: dict):
 
 def json_tck_db(configuration: dict, sequence_type: str, label: str,
                 allen_version: str, tck: int):
+    """Create a JSON-formatted string that hlttck_cdb_listkeys can
+    write to a git repository.
+
+    The hlttck_cdb_listkeys resides in LHCb/Hlt/HltServices. It is
+    passed a JSON-formatted string through stdin. The JSON contains
+    two entries: a single-entry manifest with the key "manifest" and
+    the respective configuration with its digest as key. The same
+    digest is used as the key for the entry containing the metadata in
+    the manifest.
+    """
     if allen_version is None:
-        allen_version = get_allen_version()
+        allen_version = allen_version_from_build_manifest()
     if type(allen_version) == str:
         version = allen_version
     else:
@@ -92,6 +179,10 @@ def json_tck_db(configuration: dict, sequence_type: str, label: str,
         )
 
     tck_config = sequence_to_tck(configuration)
+
+    # The value of the digest is not important as long as it matches
+    # between the manifest and the key of the configuration. Use MD5
+    # as that was used extensively and more meaninfully in Run 2.
     digest = md5(json.dumps(tck_config).encode("utf-8")).hexdigest()
     manifest = {
         digest: {
@@ -106,6 +197,9 @@ def json_tck_db(configuration: dict, sequence_type: str, label: str,
 
 
 def sequence_from_python(python_file: Path, node_name="hlt1_node") -> dict:
+    """Retrieve an Allen configuration in JSON format from a python module
+    """
+
     from AllenCore.allen_standalone_generator import generate, build_sequence
     from AllenCore.AllenSequenceGenerator import generate_json_configuration
 
@@ -144,6 +238,9 @@ def sequence_to_git(
         allen_version=None,
         write_intermediate=False,
 ):
+    """Write an Allen configuration to a git repository with metadata.
+    """
+
     db = json_tck_db(sequence, sequence_type, label, allen_version, tck)
     if write_intermediate:
         with open(hex(tck) + ".json", "w") as f:
@@ -161,6 +258,13 @@ def sequence_to_git(
 
 
 def sequence_from_git(repository: Path, tck: str, use_bindings=True) -> str:
+    """Retrieve the Allen configuration identified by the given TCK
+    from a git repository.
+
+    use_bindings determines wether a Python module (default) or
+    hlttck_cdb_listkeys is used to retrieve the JSON configuration.
+    """
+
     if use_bindings:
         from Allen import TCK
         sequence, info = TCK.sequence_from_git(str(repository), tck)
@@ -206,8 +310,13 @@ def property_from_git(repository: Path,
                       property=".*"):
     alg_re = re.compile(algorithm)
     prop_re = re.compile(property)
+    """Retrieve an Allen configuration identified by TCK from a git
+    repository and extract specific properties from it using regexes
+    to match algorithm name and property key
+    """
 
-    sequence, _ = sequence_from_git(repository, tck)
+    s, _ = sequence_from_git(repository, tck)
+    sequence = json.loads(s)
 
     result = {}
     for alg, props in sequence.items():
@@ -221,6 +330,10 @@ def property_from_git(repository: Path,
 
 
 def manifest_from_git(repository: Path):
+    """Use hlttck_cdb_listkeys to retrieve the manifest for a git
+    repositry
+    """
+
     p = run(
         [
             "hlttck_cdb_listkeys",
diff --git a/Rec/Allen/scripts/create_hlt1_tck.py b/Rec/Allen/scripts/create_hlt1_tck.py
index 95353273ad5..955a3ad89ea 100644
--- a/Rec/Allen/scripts/create_hlt1_tck.py
+++ b/Rec/Allen/scripts/create_hlt1_tck.py
@@ -14,13 +14,33 @@ import sys
 from Allen.tck import sequence_to_git, sequence_from_python
 from pathlib import Path
 
-parser = argparse.ArgumentParser()
+parser = argparse.ArgumentParser(description="""
+Persist an Allen configuration in a git repository identified by a TCK
+
+The configuration can be obtained from:
+- a JSON file
+- a python module that generates a configuration
+- a python file that generatea a configuration
+
+Some metadata is also persisted.
+""")
 parser.add_argument("sequence", nargs=1)
 parser.add_argument("repository", nargs=1)
 parser.add_argument("tck", nargs=1)
 parser.add_argument(
-    "--python-hlt1-node", type=str, default="hlt1_node", dest="hlt1_node")
-parser.add_argument("--label", default="test", type=str)
+    "--python-hlt1-node",
+    type=str,
+    help=
+    "Name of the variable that stores the configuration in the python module or file",
+    default="hlt1_node",
+    dest="hlt1_node",
+)
+parser.add_argument(
+    "--label",
+    help="Label persisted as metadata together with the TCK",
+    default="test",
+    type=str,
+)
 
 args = parser.parse_args()
 
@@ -30,16 +50,21 @@ tck = int(args.tck[0], 16)
 
 
 def dec_reporter_name(conf):
-    return next((n for t, n, _ in conf['sequence']['configured_algorithms']
-                 if t == "dec_reporter::dec_reporter_t"), None)
+    return next(
+        (n for t, n, _ in conf["sequence"]["configured_algorithms"]
+         if t == "dec_reporter::dec_reporter_t"),
+        None,
+    )
 
 
 sequence = None
-if sequence_arg.suffix in (".py", ''):
+if sequence_arg.suffix in (".py", ""):
     from AllenCore.configuration_options import is_allen_standalone
+
     is_allen_standalone.global_bind(standalone=True)
 
     from AllenConf.persistency import make_dec_reporter
+
     sequence, dn = {}, None
     with make_dec_reporter.bind(TCK=tck):
         sequence = sequence_from_python(sequence_arg, node_name=args.hlt1_node)
@@ -49,7 +74,7 @@ if sequence_arg.suffix in (".py", ''):
             f"Cannot create TCK {hex(tck)} for sequence {sequence_arg.stem}, because it does not contain the dec_reporter"
         )
         sys.exit(1)
-elif sequence_arg.suffix == '.json':
+elif sequence_arg.suffix == ".json":
     sequence, dn = {}, None
     with open(sequence_arg, "r") as sequence_file:
         sequence = json.load(sequence_file)
@@ -60,7 +85,7 @@ elif sequence_arg.suffix == '.json':
         )
         sys.exit(1)
     else:
-        sequence[dn]['tck'] = tck
+        sequence[dn]["tck"] = tck
 
 sequence_to_git(repository, sequence, sequence_arg.stem, args.label, tck)
 print(f"Created TCK {hex(tck)} for sequence {sequence_arg.stem}")
diff --git a/Rec/Allen/tests/options/compare_hlt1_tcks.py b/Rec/Allen/tests/options/compare_hlt1_tcks.py
index 5daff69d2c7..e082bfb0bc6 100644
--- a/Rec/Allen/tests/options/compare_hlt1_tcks.py
+++ b/Rec/Allen/tests/options/compare_hlt1_tcks.py
@@ -8,6 +8,17 @@
 # granted to it by virtue of its status as an Intergovernmental Organization  #
 # or submit itself to any jurisdiction.                                       #
 ###############################################################################
+"""For all configurations persisted by the create_hlt1_tcks
+test/options file, load them from two repositories and from the JSON
+files generated at build time. The entries in the two git repositories
+were created from the JSON files generated at build time and directly
+from their respective python modules.
+
+If all configurations are identical then neither the persistence nor
+the generation of configurations alters the content of the
+configuration.
+"""
+
 import os
 import sys
 import json
diff --git a/Rec/Allen/tests/options/create_hlt1_tcks.py b/Rec/Allen/tests/options/create_hlt1_tcks.py
index caf417658e9..dd1826d33a3 100644
--- a/Rec/Allen/tests/options/create_hlt1_tcks.py
+++ b/Rec/Allen/tests/options/create_hlt1_tcks.py
@@ -8,6 +8,13 @@
 # granted to it by virtue of its status as an Intergovernmental Organization  #
 # or submit itself to any jurisdiction.                                       #
 ###############################################################################
+"""Create TCKs for a respresentative set of configurations. The
+configurations should at least contain the dec_reporter algorithm for
+this to make sense. Use both JSON files and the python modules they
+were generated from and store the resulting configurations in
+different git repositories with the same TCK.
+"""
+
 import os
 import sys
 import subprocess
diff --git a/Rec/Allen/tests/options/test_tck_allen_write_config.py b/Rec/Allen/tests/options/test_tck_allen_write_config.py
index bbaa55b4cad..cb50ed70729 100644
--- a/Rec/Allen/tests/options/test_tck_allen_write_config.py
+++ b/Rec/Allen/tests/options/test_tck_allen_write_config.py
@@ -8,6 +8,17 @@
 # granted to it by virtue of its status as an Intergovernmental Organization  #
 # or submit itself to any jurisdiction.                                       #
 ###############################################################################
+"""Load all available configuration from a test repository, use them
+to configure Allen and in turn dump Allen's configuration to a JSON
+file. Then compare the configuration dumped by Allen to the original
+TCK and check that they are identical.
+
+This ensures that no properties are changed after they are set and a
+round trip of load configuration, configure Allen, dump configuration
+does not alter any properties as a side effect of configuration or
+persistence.
+"""
+
 import os
 import sys
 import json
@@ -31,15 +42,16 @@ for info in manifest_entries:
 
     cmd = [
         "Allen",
-        "-g", # Use default binary geometry, irrelavant in this mode, but needs to be found
-        os.path.expandvars("${ALLEN_PROJECT_ROOT}/input/detector_configuration"),
+        "-g",  # Use default binary geometry, irrelavant in this mode, but needs to be found
+        os.path.expandvars(
+            "${ALLEN_PROJECT_ROOT}/input/detector_configuration"),
         "--param",
         os.path.expandvars("${PARAMFILESROOT}"),
-        "--mdf", # No input file
+        "--mdf",  # No input file
         '""',
-        "--sequence", # Load configuration from TCK
+        "--sequence",  # Load configuration from TCK
         f"config_json.git:{tck}",
-        "--write-configuration", # Write configuration to config.json
+        "--write-configuration",  # Write configuration to config.json
         "1",
     ]
 
@@ -63,5 +75,7 @@ for info in manifest_entries:
         # Compare configurations
         if allen_sequence != tck_sequence:
             diffs = print_sequence_differences(tck_sequence, allen_sequence)
-            print("Differences between input configuration from TCK and written by Allen:")
+            print(
+                "Differences between input configuration from TCK and written by Allen:"
+            )
             print(diffs)
diff --git a/configuration/src/bindings.cpp b/configuration/src/bindings.cpp
index 030eaefc6b5..822c5b5fc4c 100644
--- a/configuration/src/bindings.cpp
+++ b/configuration/src/bindings.cpp
@@ -34,7 +34,8 @@ PYBIND11_MODULE(TCK, m)
     .def_readwrite("label", &LHCb::TCK::Info::label);
 
   m.doc() = R"pbdoc(
-    standalong background generator for KM3NeT
+    Utility functions to interact with a git repository that contains
+    persisted configurations identified by so-called TCK
 
     .. currentmodule:: TCK
 
-- 
GitLab


From ea062052eab1cfea825f903539b354cc1c1591d0 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Thu, 30 Mar 2023 23:29:00 +0200
Subject: [PATCH 27/49] Slight cleanup of tests

---
 .../tests/qmtest/lhcb_geometry_allen_event_loop.qmt              | 1 +
 Rec/Allen/tests/qmtest/mdf_input.qmt                             | 1 -
 2 files changed, 1 insertion(+), 1 deletion(-)

diff --git a/Dumpers/BinaryDumpers/tests/qmtest/lhcb_geometry_allen_event_loop.qmt b/Dumpers/BinaryDumpers/tests/qmtest/lhcb_geometry_allen_event_loop.qmt
index 9d37b026f7e..1d8e098e69e 100644
--- a/Dumpers/BinaryDumpers/tests/qmtest/lhcb_geometry_allen_event_loop.qmt
+++ b/Dumpers/BinaryDumpers/tests/qmtest/lhcb_geometry_allen_event_loop.qmt
@@ -18,6 +18,7 @@
     <text>--tags=dd4hep:trunk,master|detdesc:dddb-20220705,sim-20220705-vc-md100</text>
     <text>--mdf</text><text>root://eoslhcb.cern.ch///eos/lhcb/wg/rta/samples/mc/Jira_LHCBGAUSS-2635/Minbias_MagDown_Boole_lhcbhead3337_dddb-20220705_sim-20220705-vc-md100.mdf</text>
     <text>--sequence</text><text>$ALLEN_INSTALL_DIR/constants/hlt1_pp_no_ut.json</text>
+    <text>--monitoring-filename</text><text>allen_event_loop.root</text>
     <text>-n</text><text>10000</text>
   </set></argument>
   <arguement name="timeout"><integer>600</integer></arguement>
diff --git a/Rec/Allen/tests/qmtest/mdf_input.qmt b/Rec/Allen/tests/qmtest/mdf_input.qmt
index 09658166ffa..254dd70d22a 100644
--- a/Rec/Allen/tests/qmtest/mdf_input.qmt
+++ b/Rec/Allen/tests/qmtest/mdf_input.qmt
@@ -20,7 +20,6 @@
     <text>--sequence</text><text>${ALLEN_INSTALL_DIR}/constants/hlt1_pp_matching.json</text>
     <text>--tags=detdesc:dddb-20220705,sim-20220705-vc-md100|dd4hep:trunk,master</text>
     <text>--events-per-slice</text><text>500</text>
-    <text>--monitoring-filename</text><text>""</text>
     <text>-m</text><text>600</text>
     <text>-s</text><text>3</text>
     <text>-t</text><text>2</text>
-- 
GitLab


From 01d7cef79f76ada1b70eb2ba9d08f8d9d7982fcc Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Thu, 20 Apr 2023 21:56:50 +0200
Subject: [PATCH 28/49] Fix build after rebase

---
 device/utils/CMakeLists.txt                   | 33 ++++++++++++++-----
 .../scaler}/include/DeterministicScaler.cuh   |  0
 host/dummy_maker/CMakeLists.txt               |  4 +--
 3 files changed, 27 insertions(+), 10 deletions(-)
 rename device/{selections/Hlt1 => utils/scaler}/include/DeterministicScaler.cuh (100%)

diff --git a/device/utils/CMakeLists.txt b/device/utils/CMakeLists.txt
index ef109fab956..7405eea0c08 100644
--- a/device/utils/CMakeLists.txt
+++ b/device/utils/CMakeLists.txt
@@ -4,19 +4,36 @@
 file(GLOB prefix_sum "prefix_sum/src/*cu")
 file(GLOB get_type_id "get_type_id/src/*cu")
 
-allen_add_device_library(Utils STATIC
-  ${prefix_sum}
-  ${get_type_id}
-)
+add_library(UtilsHeaders INTERFACE)
 
-target_link_libraries(Utils PRIVATE Backend EventModel HostEventModel)
+target_link_libraries(UtilsHeaders
+  INTERFACE
+    Backend
+    EventModel
+    HostEventModel)
 
-target_include_directories(Utils PUBLIC
-  $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/prefix_sum/include>
+target_include_directories(UtilsHeaders INTERFACE
   $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/binary_search/include>
-  $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/get_type_id/include>
   $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/sorting/include>
   $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/float_operations/include>
   $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/memory/include>
+  $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/scaler/include>
   $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/tuple_operations/include>
   $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/warp/include>)
+
+allen_add_device_library(Utils STATIC
+  ${prefix_sum}
+  ${get_type_id}
+)
+
+target_link_libraries(Utils
+  PUBLIC
+    UtilsHeaders
+    PRIVATE
+      Backend
+      EventModel
+      HostEventModel)
+
+target_include_directories(Utils PUBLIC
+  $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/prefix_sum/include>
+  $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/get_type_id/include>)
diff --git a/device/selections/Hlt1/include/DeterministicScaler.cuh b/device/utils/scaler/include/DeterministicScaler.cuh
similarity index 100%
rename from device/selections/Hlt1/include/DeterministicScaler.cuh
rename to device/utils/scaler/include/DeterministicScaler.cuh
diff --git a/host/dummy_maker/CMakeLists.txt b/host/dummy_maker/CMakeLists.txt
index 1386ccdee59..2d2f6c1e9b3 100644
--- a/host/dummy_maker/CMakeLists.txt
+++ b/host/dummy_maker/CMakeLists.txt
@@ -7,6 +7,6 @@ allen_add_host_library(HostDummyMaker STATIC
   ${host_dummy_maker}
 )
 
-target_link_libraries(HostDummyMaker PUBLIC Selections Lumi HostEventModel EventModel Gear AllenCommon Backend)
+target_link_libraries(HostDummyMaker PRIVATE UtilsHeaders Selections Lumi HostEventModel EventModel Gear AllenCommon Backend)
 
-target_include_directories(HostDummyMaker PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>)
+target_include_directories(HostDummyMaker PRIVATE include)
-- 
GitLab


From ab5f777e76ccce80a0b2dcd8404b195e00d84d3f Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Fri, 21 Apr 2023 20:49:06 +0200
Subject: [PATCH 29/49] Follow rename of hlttck_cdb_listkeys to hlt_tck_tool

---
 Rec/Allen/python/Allen/tck.py | 16 ++++++++--------
 1 file changed, 8 insertions(+), 8 deletions(-)

diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
index 76195e5a467..6dd069be1c2 100644
--- a/Rec/Allen/python/Allen/tck.py
+++ b/Rec/Allen/python/Allen/tck.py
@@ -67,7 +67,7 @@ configuration when it is extracted from the git repository.
 
 Some of the code that is needed to persist configurations is not
 available as python bindings, but instead through the
-"hlttck_cdb_listkeys" executable that resides in
+"hlt_tck_tool" executable that resides in
 LHCb/Hlt/HltServices. It is also needed to create a JSON manifest that
 contains all configurations available in the repository.
 """
@@ -156,10 +156,10 @@ def tck_to_sequence(config: dict):
 
 def json_tck_db(configuration: dict, sequence_type: str, label: str,
                 allen_version: str, tck: int):
-    """Create a JSON-formatted string that hlttck_cdb_listkeys can
+    """Create a JSON-formatted string that hlt_tck_tool can
     write to a git repository.
 
-    The hlttck_cdb_listkeys resides in LHCb/Hlt/HltServices. It is
+    The hlt_tck_tool resides in LHCb/Hlt/HltServices. It is
     passed a JSON-formatted string through stdin. The JSON contains
     two entries: a single-entry manifest with the key "manifest" and
     the respective configuration with its digest as key. The same
@@ -247,7 +247,7 @@ def sequence_to_git(
             json.dump(db, f, indent=4, sort_keys=True)
 
     p = run(
-        ["hlttck_cdb_listkeys", "--convert-to-git", "-", f"{str(repository)}"],
+        ["hlt_tck_tool", "--convert-to-git", "-", f"{str(repository)}"],
         stdout=PIPE,
         input=json.dumps(db),
         encoding="ascii",
@@ -262,7 +262,7 @@ def sequence_from_git(repository: Path, tck: str, use_bindings=True) -> str:
     from a git repository.
 
     use_bindings determines wether a Python module (default) or
-    hlttck_cdb_listkeys is used to retrieve the JSON configuration.
+    hlt_tck_tool is used to retrieve the JSON configuration.
     """
 
     if use_bindings:
@@ -275,7 +275,7 @@ def sequence_from_git(repository: Path, tck: str, use_bindings=True) -> str:
     else:
         p = run(
             [
-                "hlttck_cdb_listkeys",
+                "hlt_tck_tool",
                 f"--tck={tck}",
                 "--convert-to-json",
                 f"{str(repository)}",
@@ -330,13 +330,13 @@ def property_from_git(repository: Path,
 
 
 def manifest_from_git(repository: Path):
-    """Use hlttck_cdb_listkeys to retrieve the manifest for a git
+    """Use hlt_tck_tool to retrieve the manifest for a git
     repositry
     """
 
     p = run(
         [
-            "hlttck_cdb_listkeys",
+            "hlt_tck_tool",
             "--list-manifest-as-json",
             f"{str(repository)}",
             "-",
-- 
GitLab


From c89bc17cf481f36468effc8fb478638d632f0178 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Fri, 21 Apr 2023 21:55:41 +0200
Subject: [PATCH 30/49] Fix failure of TCK creation

---
 Rec/Allen/python/Allen/tck.py        | 15 ++++++++-------
 Rec/Allen/scripts/create_hlt1_tck.py |  8 ++++++--
 2 files changed, 14 insertions(+), 9 deletions(-)

diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
index 6dd069be1c2..f1cf3ff3879 100644
--- a/Rec/Allen/python/Allen/tck.py
+++ b/Rec/Allen/python/Allen/tck.py
@@ -159,12 +159,13 @@ def json_tck_db(configuration: dict, sequence_type: str, label: str,
     """Create a JSON-formatted string that hlt_tck_tool can
     write to a git repository.
 
-    The hlt_tck_tool resides in LHCb/Hlt/HltServices. It is
-    passed a JSON-formatted string through stdin. The JSON contains
-    two entries: a single-entry manifest with the key "manifest" and
-    the respective configuration with its digest as key. The same
-    digest is used as the key for the entry containing the metadata in
-    the manifest.
+    The hlt_tck_tool resides in LHCb/Hlt/HltServices. It is passed a
+    JSON-formatted string through stdin. The JSON contains two
+    entries: a single-entry manifest with the key "manifest" and the
+    respective configuration with its digest as key. The same digest
+    is used as the key for the entry containing the metadata in the
+    manifest.
+
     """
     if allen_version is None:
         allen_version = allen_version_from_build_manifest()
@@ -254,7 +255,7 @@ def sequence_to_git(
     )
 
     if p.returncode != 0:
-        print("Failed to convert sequence to git repo")
+        raise RuntimeError("Failed to convert sequence to git repo")
 
 
 def sequence_from_git(repository: Path, tck: str, use_bindings=True) -> str:
diff --git a/Rec/Allen/scripts/create_hlt1_tck.py b/Rec/Allen/scripts/create_hlt1_tck.py
index 955a3ad89ea..9ffe89a989b 100644
--- a/Rec/Allen/scripts/create_hlt1_tck.py
+++ b/Rec/Allen/scripts/create_hlt1_tck.py
@@ -87,5 +87,9 @@ elif sequence_arg.suffix == ".json":
     else:
         sequence[dn]["tck"] = tck
 
-sequence_to_git(repository, sequence, sequence_arg.stem, args.label, tck)
-print(f"Created TCK {hex(tck)} for sequence {sequence_arg.stem}")
+try:
+    sequence_to_git(repository, sequence, sequence_arg.stem, args.label, tck)
+    print(f"Created TCK {hex(tck)} for sequence {sequence_arg.stem}")
+except RuntimeError as e:
+    print(e)
+    sys.exit(1)
-- 
GitLab


From 5c2a58b2d77af190e97a494f6f62a5c342eb25ba Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Fri, 21 Apr 2023 22:25:23 +0200
Subject: [PATCH 31/49] Fix MEPProvider configuration in allen.py

---
 Dumpers/BinaryDumpers/options/allen.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/Dumpers/BinaryDumpers/options/allen.py b/Dumpers/BinaryDumpers/options/allen.py
index 3405b0766d9..382241dbe51 100755
--- a/Dumpers/BinaryDumpers/options/allen.py
+++ b/Dumpers/BinaryDumpers/options/allen.py
@@ -205,7 +205,8 @@ if (m := tck_option.match(sequence)):
             f"Failed to obtain configuration for TCK {tck} from repository {repo}"
         )
         sys.exit(1)
-    elif (allen_release := allen_version_from_build_manifest()) != tck_info.release:
+    elif (allen_release :=
+          allen_version_from_build_manifest()) != tck_info.release:
         print(
             f"TCK {tck} is compatible with Allen release {allen_release}, not with {tck_info.release}."
         )
@@ -223,7 +224,7 @@ if args.mep:
     from Configurables import MEPProvider, AllenConfiguration
 
     allen_conf = AllenConfiguration("AllenConfiguration")
-    allen_conf.JSON = sequence_json
+    allen_conf.JSON = sequence
     allen_conf.OutputLevel = 3
 
     mep_provider = MEPProvider()
-- 
GitLab


From 8c5f59479ac900d375cde4ebf57c299ed0ad40da Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Sat, 22 Apr 2023 21:24:28 +0200
Subject: [PATCH 32/49] Fix AllenOnline tests

---
 Rec/Allen/python/AllenAlgorithms/__init__.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Rec/Allen/python/AllenAlgorithms/__init__.py b/Rec/Allen/python/AllenAlgorithms/__init__.py
index f4ecabff7c3..8044040360a 100644
--- a/Rec/Allen/python/AllenAlgorithms/__init__.py
+++ b/Rec/Allen/python/AllenAlgorithms/__init__.py
@@ -10,7 +10,7 @@
 ###############################################################################
 import os
 
-__path__ = [
+__path__ += [
     d for d in [
         os.path.realpath(
             os.path.join(
-- 
GitLab


From da4eb2c1234d0a16e6a9506ea3a5b5613d1cc10e Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Sat, 22 Apr 2023 22:10:18 +0200
Subject: [PATCH 33/49] [test_mep_banks] Different temporary config files to
 avoid a race condition in parallel tests

---
 mdf/test/test_mep_banks.cpp | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/mdf/test/test_mep_banks.cpp b/mdf/test/test_mep_banks.cpp
index 13c061434d4..dce64373fd6 100644
--- a/mdf/test/test_mep_banks.cpp
+++ b/mdf/test/test_mep_banks.cpp
@@ -77,7 +77,7 @@ namespace Allen {
   }
 } // namespace Allen
 
-fs::path write_json(std::unordered_set<BankTypes> const& bank_types, bool velo_sp)
+fs::path write_json(std::unordered_set<BankTypes> const& bank_types, bool velo_sp, bool transpose)
 {
 
   // Write a JSON file that can be fed to AllenConfiguration to
@@ -96,7 +96,7 @@ fs::path write_json(std::unordered_set<BankTypes> const& bank_types, bool velo_s
   }
   bank_types_json["sequence"]["configured_algorithms"] = configured_algorithms;
 
-  auto bt_filename = fs::canonical(fs::current_path()) / "bank_types.json";
+  auto bt_filename = fs::canonical(fs::current_path()) / ("bank_types"s + (transpose ? "_transpose" : "") + ".json");
   std::ofstream bt_json(bt_filename.string());
   if (!bt_json.is_open()) {
     std::cerr << "Failed to open json file for bank types configuration"
@@ -204,7 +204,7 @@ int main(int argc, char* argv[])
         s_config.sds.emplace(bt);
       }
     }
-    auto json_file = write_json(s_config.sds, velo_sp);
+    auto json_file = write_json(s_config.sds, velo_sp, s_config.transpose_mep);
 
     // Allocate providers and get slices
     std::map<std::string, std::string> options = {{"s", std::to_string(s_config.n_slices)},
-- 
GitLab


From 1b14795c6eb478a382f6d0b41523faaa5fa8b986 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Sun, 23 Apr 2023 21:39:01 +0200
Subject: [PATCH 34/49] Reduce number of sequences in TCK tests for dbg
 platforms to avoid timeouts

---
 Rec/Allen/tests/options/create_hlt1_tcks.py            | 10 +++++++++-
 Rec/Allen/tests/options/test_tck_allen_write_config.py |  2 +-
 2 files changed, 10 insertions(+), 2 deletions(-)

diff --git a/Rec/Allen/tests/options/create_hlt1_tcks.py b/Rec/Allen/tests/options/create_hlt1_tcks.py
index dd1826d33a3..be794a8c4e1 100644
--- a/Rec/Allen/tests/options/create_hlt1_tcks.py
+++ b/Rec/Allen/tests/options/create_hlt1_tcks.py
@@ -25,7 +25,15 @@ seq_dir = os.path.expandvars("${ALLEN_INSTALL_DIR}/constants")
 tck_script = os.path.expandvars("${ALLENROOT}/scripts/create_hlt1_tck.py")
 
 error = False
-for i, seq in enumerate(s for s in os.listdir(seq_dir) if good_sequence(s)):
+sequences = [s for s in os.listdir(seq_dir) if good_sequence(s)]
+# Create TCKs for few sequences in the DBG build to avoid timeouts
+if '-dbg' in os.environ['BINARY_TAG']:
+    import random
+    random.seed("HLT1TCKs-dbg")
+    random.shuffle(sequences)
+    sequences = sequences[:10]
+
+for i, seq in enumerate(sequences):
     seq = Path(seq_dir) / seq
 
     # Create TCKs from JSON files
diff --git a/Rec/Allen/tests/options/test_tck_allen_write_config.py b/Rec/Allen/tests/options/test_tck_allen_write_config.py
index cb50ed70729..6a4b2d4dc1a 100644
--- a/Rec/Allen/tests/options/test_tck_allen_write_config.py
+++ b/Rec/Allen/tests/options/test_tck_allen_write_config.py
@@ -24,7 +24,7 @@ import sys
 import json
 from pathlib import Path
 from subprocess import PIPE, run
-from Allen.qmtest.utils import good_sequence, print_sequence_differences
+from Allen.qmtest.utils import print_sequence_differences
 from Allen.tck import manifest_from_git, sequence_from_git
 
 tck_repo = Path("config_json.git")
-- 
GitLab


From 72b5a33cb069a2bc9347526a3fbd9fdc9b617701 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Fri, 28 Apr 2023 21:16:45 +0200
Subject: [PATCH 35/49] Test only 5 TCKs in dbg builds

---
 Rec/Allen/tests/options/create_hlt1_tcks.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Rec/Allen/tests/options/create_hlt1_tcks.py b/Rec/Allen/tests/options/create_hlt1_tcks.py
index be794a8c4e1..e874bc24e7c 100644
--- a/Rec/Allen/tests/options/create_hlt1_tcks.py
+++ b/Rec/Allen/tests/options/create_hlt1_tcks.py
@@ -31,7 +31,7 @@ if '-dbg' in os.environ['BINARY_TAG']:
     import random
     random.seed("HLT1TCKs-dbg")
     random.shuffle(sequences)
-    sequences = sequences[:10]
+    sequences = sequences[:5]
 
 for i, seq in enumerate(sequences):
     seq = Path(seq_dir) / seq
-- 
GitLab


From 68a724e4933dd8e45e8d493e52e5edd2240919d1 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Wed, 24 May 2023 15:35:55 +0200
Subject: [PATCH 36/49] Fix build and allow creation of suitable bare git
 repository from Python

---
 Dumpers/BinaryDumpers/CMakeLists.txt |  1 +
 Rec/Allen/python/Allen/tck.py        |  1 +
 Rec/Allen/scripts/create_hlt1_tck.py | 32 ++++++++++++++++++++++++++--
 configuration/include/TCK.h          |  6 ++++--
 configuration/src/TCK.cpp            | 15 +++++++++++--
 configuration/src/bindings.cpp       |  4 ++++
 6 files changed, 53 insertions(+), 6 deletions(-)

diff --git a/Dumpers/BinaryDumpers/CMakeLists.txt b/Dumpers/BinaryDumpers/CMakeLists.txt
index fd740e026e9..1098326057b 100644
--- a/Dumpers/BinaryDumpers/CMakeLists.txt
+++ b/Dumpers/BinaryDumpers/CMakeLists.txt
@@ -11,6 +11,7 @@ gaudi_add_library(BinaryDumpers
                     src/lib/Utils.cpp
                   LINK PUBLIC
                     NonEventDataHeaders
+                    Gear
                     AllenCommon
                     HostCommon
                     EventModel
diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
index f1cf3ff3879..3eb1d5bd1f4 100644
--- a/Rec/Allen/python/Allen/tck.py
+++ b/Rec/Allen/python/Allen/tck.py
@@ -82,6 +82,7 @@ from pathlib import Path
 from lxml import etree
 from hashlib import md5
 from subprocess import PIPE, run
+from Allen.TCK import create_git_repository # noqa
 
 
 def allen_version_from_build_manifest():
diff --git a/Rec/Allen/scripts/create_hlt1_tck.py b/Rec/Allen/scripts/create_hlt1_tck.py
index 9ffe89a989b..32f00366dbe 100644
--- a/Rec/Allen/scripts/create_hlt1_tck.py
+++ b/Rec/Allen/scripts/create_hlt1_tck.py
@@ -11,9 +11,14 @@
 import argparse
 import json
 import sys
-from Allen.tck import sequence_to_git, sequence_from_python
+from PyConf.filecontent_metadata import flush_key_registry, retrieve_encoding_dictionary, metainfo_repos, ConfigurationError, FILE_CONTENT_METADATA
+from Allen.tck import create_git_repository, sequence_to_git, sequence_from_python
 from pathlib import Path
 
+# Unset this environment variable to force generation of new encoding
+# keys in a local repo if they are not in the cvmfs one
+os.environ.pop('LHCbFileContentMetaDataRepo', None)
+
 parser = argparse.ArgumentParser(description="""
 Persist an Allen configuration in a git repository identified by a TCK
 
@@ -66,7 +71,7 @@ if sequence_arg.suffix in (".py", ""):
     from AllenConf.persistency import make_dec_reporter
 
     sequence, dn = {}, None
-    with make_dec_reporter.bind(TCK=tck):
+    with (make_dec_reporter.bind(TCK=tck), flush_key_registry()):
         sequence = sequence_from_python(sequence_arg, node_name=args.hlt1_node)
     dn = dec_reporter_name(sequence)
     if dn is None:
@@ -93,3 +98,26 @@ try:
 except RuntimeError as e:
     print(e)
     sys.exit(1)
+
+reports_key = sequence[dn]["encoding_key"]
+local_metainfo_repo = Path("./lhcb-metainfo/.git")
+if local_metainfo_repo.exists():
+    try:
+        with metainfo_repos.bind(repos=[(str(local_metainfo_repo.resolve()),
+                                         "master")]):
+            encoding = retrieve_encoding_dictionary(
+                reports_key, require_key_present=True)
+        print("Key created in local repo")
+    except ConfigurationError as e:
+        print(e)
+        print("Key was already in cvmfs repo")
+else:
+    try:
+        with metainfo_repos.bind(repos=[(FILE_CONTENT_METADATA, "master")]):
+            encoding = retrieve_encoding_dictionary(
+                reports_key, require_key_present=True)
+        print("Key was already in cvmfs repo")
+    except ConfigurationError as e:
+        print(e)
+        print("Key cannot be found!")
+        sys.exit(1)
diff --git a/configuration/include/TCK.h b/configuration/include/TCK.h
index 17b69d333af..c30d2b54a7c 100644
--- a/configuration/include/TCK.h
+++ b/configuration/include/TCK.h
@@ -21,8 +21,10 @@ namespace Allen {
     std::string current_release();
   }
 
-  std::string tck_from_git(std::string const& repo, std::string const& tck);
+  void create_git_repository(std::string repo);
 
-  std::tuple<std::string, LHCb::TCK::Info> sequence_from_git(std::string const& repo, std::string const& tck);
+  std::string tck_from_git(std::string repo, std::string tck);
+
+  std::tuple<std::string, LHCb::TCK::Info> sequence_from_git(std::string repo, std::string tck);
 
 } // namespace Allen
diff --git a/configuration/src/TCK.cpp b/configuration/src/TCK.cpp
index 85636b08a1b..0e5a13e1a5c 100644
--- a/configuration/src/TCK.cpp
+++ b/configuration/src/TCK.cpp
@@ -23,7 +23,18 @@ std::string Allen::TCK::current_release()
   return "ALLEN_v" + std::to_string(ALLEN_MAJOR_VERSION) + "r" + std::to_string(ALLEN_MINOR_VERSION);
 }
 
-std::string Allen::tck_from_git(std::string const& repo, std::string const& tck)
+void Allen::create_git_repository( std::string repo_name )
+{
+  git_libgit2_init();
+
+  auto [repo, sig] = LHCb::TCK::Git::create_git_repository( repo_name );
+
+  git_signature_free( sig );
+  git_repository_free( repo );
+  git_libgit2_shutdown();
+}
+
+std::string Allen::tck_from_git(std::string repo, std::string tck)
 {
 
   using LHCb::TCK::Git::check;
@@ -42,7 +53,7 @@ std::string Allen::tck_from_git(std::string const& repo, std::string const& tck)
   }
 }
 
-std::tuple<std::string, LHCb::TCK::Info> Allen::sequence_from_git(std::string const& repo, std::string const& tck)
+std::tuple<std::string, LHCb::TCK::Info> Allen::sequence_from_git(std::string repo, std::string tck)
 {
 
   auto tck_config = tck_from_git(repo, tck);
diff --git a/configuration/src/bindings.cpp b/configuration/src/bindings.cpp
index 822c5b5fc4c..37a3b71a506 100644
--- a/configuration/src/bindings.cpp
+++ b/configuration/src/bindings.cpp
@@ -53,6 +53,10 @@ PYBIND11_MODULE(TCK, m)
     &Allen::TCK::current_release,
     "Get the current Allen release string in the form of ALLEN_vXrYpZ, "
     "where pZ is optional");
+  m.def(
+    "create_git_repository",
+    &Allen::create_git_repository,
+    "Create a git repository that can store TCKs");
   m.def("tck_from_git", &Allen::tck_from_git, "Get the TCK as it is in the git repository");
   m.def(
     "sequence_from_git",
-- 
GitLab


From 267854e4a2bacbfcf366b617503c161d97b07026 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Thu, 25 May 2023 13:25:13 +0200
Subject: [PATCH 37/49] Check where encoding keys are

---
 Rec/Allen/python/Allen/tck.py        | 5 ++++-
 Rec/Allen/scripts/create_hlt1_tck.py | 5 +++--
 2 files changed, 7 insertions(+), 3 deletions(-)

diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
index 3eb1d5bd1f4..37193586746 100644
--- a/Rec/Allen/python/Allen/tck.py
+++ b/Rec/Allen/python/Allen/tck.py
@@ -251,11 +251,14 @@ def sequence_to_git(
     p = run(
         ["hlt_tck_tool", "--convert-to-git", "-", f"{str(repository)}"],
         stdout=PIPE,
-        input=json.dumps(db),
+        stderr=PIPE,
+        input=json.dumps(db, sort_keys=True),
         encoding="ascii",
     )
 
     if p.returncode != 0:
+        print(p.stdout)
+        print(p.stderr)
         raise RuntimeError("Failed to convert sequence to git repo")
 
 
diff --git a/Rec/Allen/scripts/create_hlt1_tck.py b/Rec/Allen/scripts/create_hlt1_tck.py
index 32f00366dbe..b738ee1ef65 100644
--- a/Rec/Allen/scripts/create_hlt1_tck.py
+++ b/Rec/Allen/scripts/create_hlt1_tck.py
@@ -8,6 +8,7 @@
 # granted to it by virtue of its status as an Intergovernmental Organization  #
 # or submit itself to any jurisdiction.                                       #
 ###############################################################################
+import os
 import argparse
 import json
 import sys
@@ -93,7 +94,7 @@ elif sequence_arg.suffix == ".json":
         sequence[dn]["tck"] = tck
 
 try:
-    sequence_to_git(repository, sequence, sequence_arg.stem, args.label, tck)
+    sequence_to_git(repository, sequence, sequence_arg.stem, args.label, tck, write_intermediate=True)
     print(f"Created TCK {hex(tck)} for sequence {sequence_arg.stem}")
 except RuntimeError as e:
     print(e)
@@ -107,7 +108,7 @@ if local_metainfo_repo.exists():
                                          "master")]):
             encoding = retrieve_encoding_dictionary(
                 reports_key, require_key_present=True)
-        print("Key created in local repo")
+        print("Key present in local repo")
     except ConfigurationError as e:
         print(e)
         print("Key was already in cvmfs repo")
-- 
GitLab


From 5f72e23213e18e104e71253da8658f5da3d8aa13 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Thu, 25 May 2023 16:29:33 +0200
Subject: [PATCH 38/49] Fix TCK tests

---
 Rec/Allen/scripts/create_hlt1_tck.py        | 68 +++++++++++++--------
 Rec/Allen/tests/options/create_hlt1_tcks.py | 22 ++++---
 2 files changed, 56 insertions(+), 34 deletions(-)

diff --git a/Rec/Allen/scripts/create_hlt1_tck.py b/Rec/Allen/scripts/create_hlt1_tck.py
index b738ee1ef65..fc157059daa 100644
--- a/Rec/Allen/scripts/create_hlt1_tck.py
+++ b/Rec/Allen/scripts/create_hlt1_tck.py
@@ -12,14 +12,11 @@ import os
 import argparse
 import json
 import sys
+import subprocess
 from PyConf.filecontent_metadata import flush_key_registry, retrieve_encoding_dictionary, metainfo_repos, ConfigurationError, FILE_CONTENT_METADATA
 from Allen.tck import create_git_repository, sequence_to_git, sequence_from_python
 from pathlib import Path
 
-# Unset this environment variable to force generation of new encoding
-# keys in a local repo if they are not in the cvmfs one
-os.environ.pop('LHCbFileContentMetaDataRepo', None)
-
 parser = argparse.ArgumentParser(description="""
 Persist an Allen configuration in a git repository identified by a TCK
 
@@ -54,6 +51,25 @@ sequence_arg = Path(args.sequence[0])
 repository = Path(args.repository[0])
 tck = int(args.tck[0], 16)
 
+local_metainfo_repo = Path("./lhcb-metainfo/.git")
+tck_metainfo_repos = [(str(local_metainfo_repo.resolve()), "master"),
+                      (FILE_CONTENT_METADATA, "master")]
+
+
+# Unset this environment variable to force generation of new encoding
+# keys in a local repo if they are not in the cvmfs one
+build_metainfo_repo = os.environ.pop('LHCbFileContentMetaDataRepo', None)
+if build_metainfo_repo is not None and not local_metainfo_repo.exists():
+    result = subprocess.run([
+        'git', 'clone', '-q', build_metainfo_repo,
+        str(local_metainfo_repo.resolve()).removesuffix('/.git')],
+                            capture_output=True,
+                            text=True,
+                            check=False)
+    if result.returncode != 0:
+        print(f"Failed to clone build metainfo repo {build_metainfo_repo} to local repo")
+        sys.exit(1)
+
 
 def dec_reporter_name(conf):
     return next(
@@ -72,7 +88,8 @@ if sequence_arg.suffix in (".py", ""):
     from AllenConf.persistency import make_dec_reporter
 
     sequence, dn = {}, None
-    with (make_dec_reporter.bind(TCK=tck), flush_key_registry()):
+    with (make_dec_reporter.bind(TCK=tck),
+          flush_key_registry()):
         sequence = sequence_from_python(sequence_arg, node_name=args.hlt1_node)
     dn = dec_reporter_name(sequence)
     if dn is None:
@@ -94,31 +111,32 @@ elif sequence_arg.suffix == ".json":
         sequence[dn]["tck"] = tck
 
 try:
-    sequence_to_git(repository, sequence, sequence_arg.stem, args.label, tck, write_intermediate=True)
+    sequence_to_git(repository, sequence, sequence_arg.stem, args.label, tck)
     print(f"Created TCK {hex(tck)} for sequence {sequence_arg.stem}")
 except RuntimeError as e:
     print(e)
     sys.exit(1)
 
-reports_key = sequence[dn]["encoding_key"]
-local_metainfo_repo = Path("./lhcb-metainfo/.git")
-if local_metainfo_repo.exists():
-    try:
-        with metainfo_repos.bind(repos=[(str(local_metainfo_repo.resolve()),
-                                         "master")]):
-            encoding = retrieve_encoding_dictionary(
-                reports_key, require_key_present=True)
-        print("Key present in local repo")
-    except ConfigurationError as e:
-        print(e)
-        print("Key was already in cvmfs repo")
-else:
+
+def get_encoding_key(repo):
     try:
-        with metainfo_repos.bind(repos=[(FILE_CONTENT_METADATA, "master")]):
-            encoding = retrieve_encoding_dictionary(
+        with metainfo_repos.bind(repos=[(repo, "master")]):
+            return retrieve_encoding_dictionary(
                 reports_key, require_key_present=True)
-        print("Key was already in cvmfs repo")
     except ConfigurationError as e:
-        print(e)
-        print("Key cannot be found!")
-        sys.exit(1)
+        return None
+
+
+reports_key = sequence[dn]["encoding_key"]
+
+local_key, key_present = (False, False)
+if local_metainfo_repo.exists():
+    encoding = get_encoding_key(str(local_metainfo_repo.resolve()))
+    key_present = local_key = encoding is not None
+if not local_key:
+    encoding = get_encoding_key(FILE_CONTENT_METADATA)
+    key_present = encoding is not None
+
+if not key_present:
+    print("Key {} cannot be found!".format(hex(reports_key)))
+    sys.exit(1)
diff --git a/Rec/Allen/tests/options/create_hlt1_tcks.py b/Rec/Allen/tests/options/create_hlt1_tcks.py
index e874bc24e7c..78c3c620a20 100644
--- a/Rec/Allen/tests/options/create_hlt1_tcks.py
+++ b/Rec/Allen/tests/options/create_hlt1_tcks.py
@@ -33,25 +33,29 @@ if '-dbg' in os.environ['BINARY_TAG']:
     random.shuffle(sequences)
     sequences = sequences[:5]
 
+
 for i, seq in enumerate(sequences):
     seq = Path(seq_dir) / seq
-
-    # Create TCKs from JSON files
     tck = hex(0x10000001 + i)
+
+    # Create TCKs from python configurations
+    # Note, these are created first such that missing encoding keys
+    # will be added to the test-local metainfo repository
     r = subprocess.run(
-        ["python", tck_script,
-         str(seq), "config_json.git", tck])
+        ["python", tck_script, seq.stem, "config_python.git", tck])
     if r.returncode != 0:
         error = True
     else:
-        print(f"Created TCK {tck} from JSON configuration {str(seq)}")
+        print(f"Created TCK {tck} from Python configuration {seq.stem}")
 
-    # Create TCKs from python configurations
+    # Create TCKs from JSON files
     r = subprocess.run(
-        ["python", tck_script, seq.stem, "config_python.git", tck])
+        ["python", tck_script,
+         str(seq), "config_json.git", tck])
     if r.returncode != 0:
         error = True
     else:
-        print(f"Created TCK {tck} from Python configuration {seq.stem}")
+        print(f"Created TCK {tck} from JSON configuration {str(seq)}")
 
-sys.exit(error)
+if error:
+    sys.exit(error)
-- 
GitLab


From 291f753e3ce02885fb43e415f77905ef4c01caa3 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Mon, 12 Jun 2023 22:18:26 +0200
Subject: [PATCH 39/49] Store additional metadata in the git repo

---
 Dumpers/BinaryDumpers/options/allen.py      | 26 ++++--
 Rec/Allen/python/Allen/tck.py               | 71 +++++++++-------
 Rec/Allen/scripts/create_hlt1_tck.py        | 13 ++-
 Rec/Allen/tests/options/create_hlt1_tcks.py |  4 +-
 configuration/CMakeLists.txt                |  4 +
 configuration/include/TCK.h                 | 13 +--
 configuration/src/TCK.cpp                   | 92 ++++++++++++++++-----
 configuration/src/bindings.cpp              | 16 ++--
 main/src/Provider.cpp                       | 10 +--
 9 files changed, 168 insertions(+), 81 deletions(-)

diff --git a/Dumpers/BinaryDumpers/options/allen.py b/Dumpers/BinaryDumpers/options/allen.py
index 382241dbe51..9fd78ba4777 100755
--- a/Dumpers/BinaryDumpers/options/allen.py
+++ b/Dumpers/BinaryDumpers/options/allen.py
@@ -143,6 +143,13 @@ parser.add_argument(
     default=False,
     help="Use binary files as the geometry",
 )
+parser.add_argument(
+    "--tck-no-bindings",
+    help="Avoid using python bindings to TCK utils",
+    dest="bindings",
+    action="store_false",
+    default=True
+)
 
 args = parser.parse_args()
 
@@ -196,24 +203,27 @@ sequence = os.path.expandvars(args.sequence)
 sequence_json = ""
 tck_option = re.compile(r"([^:]+):(0x[a-fA-F0-9]{8})")
 if (m := tck_option.match(sequence)):
-    from Allen.tck import sequence_from_git, allen_version_from_build_manifest
+    from Allen.tck import sequence_from_git, dependencies_from_build_manifest
+    import json
+
     repo = m.group(1)
     tck = m.group(2)
-    sequence_json, tck_info = sequence_from_git(repo, tck)
-    if not sequence_json:
+    sequence_json, tck_info = sequence_from_git(repo, tck, use_bindings=args.bindings)
+    tck_deps = tck_info["metadata"]["dependencies"]
+    if not sequence_json or sequence_json == 'null':
         print(
             f"Failed to obtain configuration for TCK {tck} from repository {repo}"
         )
         sys.exit(1)
-    elif (allen_release :=
-          allen_version_from_build_manifest()) != tck_info.release:
+    elif (deps :=
+          dependencies_from_build_manifest()) != tck_deps:
         print(
-            f"TCK {tck} is compatible with Allen release {allen_release}, not with {tck_info.release}."
+            f"TCK {tck} is compatible with Allen release {deps}, not with {tck_deps}."
         )
         sys.exit(1)
     else:
         print(
-            f"Loaded TCK {tck} with sequence type {tck_info.type} and label {tck_info.label}."
+            f"Loaded TCK {tck} with sequence type {tck_info['type']} and label {tck_info['label']}."
         )
 else:
     with open(sequence) as f:
@@ -224,7 +234,7 @@ if args.mep:
     from Configurables import MEPProvider, AllenConfiguration
 
     allen_conf = AllenConfiguration("AllenConfiguration")
-    allen_conf.JSON = sequence
+    allen_conf.JSON = sequence_json
     allen_conf.OutputLevel = 3
 
     mep_provider = MEPProvider()
diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
index 37193586746..ff7671d4359 100644
--- a/Rec/Allen/python/Allen/tck.py
+++ b/Rec/Allen/python/Allen/tck.py
@@ -82,10 +82,9 @@ from pathlib import Path
 from lxml import etree
 from hashlib import md5
 from subprocess import PIPE, run
-from Allen.TCK import create_git_repository # noqa
 
 
-def allen_version_from_build_manifest():
+def dependencies_from_build_manifest():
     """Get the built/installed version of Allen from the
     build/install manifest in the format ALLEN_vXrYpZ where pZ is
     optional.
@@ -94,15 +93,13 @@ def allen_version_from_build_manifest():
     if "ALLEN_INSTALL_DIR" in os.environ:
         manifest_tree = etree.parse(
             os.path.expandvars("${ALLEN_INSTALL_DIR}/manifest.xml"))
-        allen_version = manifest_tree.find("project").get("version")
-        vs = allen_version.split(".")
-        version = "ALLEN_"
-        for idx, prefix in enumerate(("v", "r", "p")):
-            if idx < len(vs):
-                version += prefix + vs[idx]
-        return version
+        projects = ([manifest_tree.find("project")]
+                    + [p for p in manifest_tree.find("used_projects").iterchildren()])
+        deps = {p.get("name"): p.get("version") for p in projects}
+        deps["LCG"] = manifest_tree.find("heptools").find("version").text
+        return deps
     else:
-        return None
+        return {}
 
 
 def sequence_to_tck(config: dict):
@@ -156,7 +153,7 @@ def tck_to_sequence(config: dict):
 
 
 def json_tck_db(configuration: dict, sequence_type: str, label: str,
-                allen_version: str, tck: int):
+                metadata: dict, tck: int):
     """Create a JSON-formatted string that hlt_tck_tool can
     write to a git repository.
 
@@ -168,31 +165,38 @@ def json_tck_db(configuration: dict, sequence_type: str, label: str,
     manifest.
 
     """
-    if allen_version is None:
-        allen_version = allen_version_from_build_manifest()
-    if type(allen_version) == str:
-        version = allen_version
-    else:
-        raise ValueError("failed to determine allen version")
-
     if len(hex(tck)) != 10 or hex(tck)[2] != "1":
         raise ValueError(
             "Badly formatted TCK, it must be a 32 bit hex number with most significant byte set to 1"
         )
 
+    # Add the configuration to the TCK
     tck_config = sequence_to_tck(configuration)
 
+    # Add the metadata to the TCK as a file with the same name
+    tck_config['metadata'] = metadata
+
     # The value of the digest is not important as long as it matches
     # between the manifest and the key of the configuration. Use MD5
     # as that was used extensively and more meaninfully in Run 2.
     digest = md5(json.dumps(tck_config).encode("utf-8")).hexdigest()
+
+    stack, version = metadata["stack"]
+
+    vs = version.split(".")
+    release = stack.upper() + "_"
+    for idx, prefix in enumerate(("v", "r", "p")):
+        if idx < len(vs):
+            release += prefix + vs[idx]
+
     manifest = {
         digest: {
             "TCK": hex(tck),
             "Release2Type": {
-                version: sequence_type
+                release: sequence_type
             },
             "label": label,
+            "metadata": metadata
         }
     }
     return {"manifest": manifest, digest: tck_config}
@@ -237,13 +241,22 @@ def sequence_to_git(
         sequence_type: dict,
         label: str,
         tck: int,
-        allen_version=None,
+        stack: tuple,
+        extra_metadata={},
         write_intermediate=False,
 ):
     """Write an Allen configuration to a git repository with metadata.
     """
+    from Allen import TCK
 
-    db = json_tck_db(sequence, sequence_type, label, allen_version, tck)
+    # Collect metadata for TCK
+    metadata = extra_metadata
+    metadata["stack"] = stack
+    metadata["version"] = TCK.version
+    metadata["projects"] = dependencies_from_build_manifest()
+
+    # Craete JSON TCK DB
+    db = json_tck_db(sequence, sequence_type, label, metadata, tck)
     if write_intermediate:
         with open(hex(tck) + ".json", "w") as f:
             json.dump(db, f, indent=4, sort_keys=True)
@@ -252,7 +265,7 @@ def sequence_to_git(
         ["hlt_tck_tool", "--convert-to-git", "-", f"{str(repository)}"],
         stdout=PIPE,
         stderr=PIPE,
-        input=json.dumps(db, sort_keys=True),
+        input=json.dumps(db, indent=4, sort_keys=True),
         encoding="ascii",
     )
 
@@ -273,10 +286,10 @@ def sequence_from_git(repository: Path, tck: str, use_bindings=True) -> str:
     if use_bindings:
         from Allen import TCK
         sequence, info = TCK.sequence_from_git(str(repository), tck)
-        return (sequence, {
-            k: getattr(info, k)
-            for k in ("digest", "tck", "release", "type", "label")
-        })
+        tck_info = {k: getattr(info, k)
+                    for k in ("digest", "tck", "release", "type", "label")}
+        tck_info["metadata"] = json.loads(info.metadata)
+        return (sequence,  tck_info)
     else:
         p = run(
             [
@@ -299,12 +312,14 @@ def sequence_from_git(repository: Path, tck: str, use_bindings=True) -> str:
             (k, v) for k, v in manifest_entry["Release2Type"].items())
         tck = manifest_entry["TCK"]
         label = manifest_entry["label"]
+        metadata = manifest_entry["metadata"]
         info = {
             "digest": digest,
             "tck": tck,
-            "release": release,
+            "metadata": metadata,
             "type": seq_type,
-            "label": label
+            "label": label,
+            "metadata": metadata
         }
         return (json.dumps(tck_to_sequence(tck_db[digest])), info)
 
diff --git a/Rec/Allen/scripts/create_hlt1_tck.py b/Rec/Allen/scripts/create_hlt1_tck.py
index fc157059daa..41585e8b1ab 100644
--- a/Rec/Allen/scripts/create_hlt1_tck.py
+++ b/Rec/Allen/scripts/create_hlt1_tck.py
@@ -9,12 +9,13 @@
 # or submit itself to any jurisdiction.                                       #
 ###############################################################################
 import os
+import re
 import argparse
 import json
 import sys
 import subprocess
 from PyConf.filecontent_metadata import flush_key_registry, retrieve_encoding_dictionary, metainfo_repos, ConfigurationError, FILE_CONTENT_METADATA
-from Allen.tck import create_git_repository, sequence_to_git, sequence_from_python
+from Allen.tck import sequence_to_git, sequence_from_python
 from pathlib import Path
 
 parser = argparse.ArgumentParser(description="""
@@ -27,9 +28,10 @@ The configuration can be obtained from:
 
 Some metadata is also persisted.
 """)
+parser.add_argument("stack", nargs=2, help="first argument should be a string and the second in the format N.M<.P>")
 parser.add_argument("sequence", nargs=1)
 parser.add_argument("repository", nargs=1)
-parser.add_argument("tck", nargs=1)
+parser.add_argument("tck", nargs=1, help="A 32-bit hexadecimal number")
 parser.add_argument(
     "--python-hlt1-node",
     type=str,
@@ -51,6 +53,11 @@ sequence_arg = Path(args.sequence[0])
 repository = Path(args.repository[0])
 tck = int(args.tck[0], 16)
 
+stack_version_expr = re.compile(r"^\d+\.\d+(?:\.\d+)?$")
+if not stack_version_expr.match(args.stack[1]):
+    print("Stack must be given as two arguments: string N.M<.P>, where N, M and P are integers.")
+    sys.exit(1)
+
 local_metainfo_repo = Path("./lhcb-metainfo/.git")
 tck_metainfo_repos = [(str(local_metainfo_repo.resolve()), "master"),
                       (FILE_CONTENT_METADATA, "master")]
@@ -111,7 +118,7 @@ elif sequence_arg.suffix == ".json":
         sequence[dn]["tck"] = tck
 
 try:
-    sequence_to_git(repository, sequence, sequence_arg.stem, args.label, tck)
+    sequence_to_git(repository, sequence, sequence_arg.stem, args.label, tck, tuple(args.stack), {}, True)
     print(f"Created TCK {hex(tck)} for sequence {sequence_arg.stem}")
 except RuntimeError as e:
     print(e)
diff --git a/Rec/Allen/tests/options/create_hlt1_tcks.py b/Rec/Allen/tests/options/create_hlt1_tcks.py
index 78c3c620a20..ea5e7731620 100644
--- a/Rec/Allen/tests/options/create_hlt1_tcks.py
+++ b/Rec/Allen/tests/options/create_hlt1_tcks.py
@@ -42,7 +42,7 @@ for i, seq in enumerate(sequences):
     # Note, these are created first such that missing encoding keys
     # will be added to the test-local metainfo repository
     r = subprocess.run(
-        ["python", tck_script, seq.stem, "config_python.git", tck])
+        ["python", tck_script, "HLT1", "1.0", seq.stem, "config_python.git", tck])
     if r.returncode != 0:
         error = True
     else:
@@ -50,7 +50,7 @@ for i, seq in enumerate(sequences):
 
     # Create TCKs from JSON files
     r = subprocess.run(
-        ["python", tck_script,
+        ["python", tck_script, "HLT1", "1.0",
          str(seq), "config_json.git", tck])
     if r.returncode != 0:
         error = True
diff --git a/configuration/CMakeLists.txt b/configuration/CMakeLists.txt
index 0e1832f97b5..e7d30416ad7 100644
--- a/configuration/CMakeLists.txt
+++ b/configuration/CMakeLists.txt
@@ -19,6 +19,9 @@ if (NOT STANDALONE)
         src/TCK.cpp)
     target_include_directories(Configuration
       PUBLIC
+      $<BUILD_INTERFACE:${DETECTOR_PREFIX_DIR}/include>  # for DETECTOR_VERSION.h
+      $<BUILD_INTERFACE:${LBCOM_PREFIX_DIR}/include>  # for LBCOM_VERSION.h
+      $<BUILD_INTERFACE:${REC_PREFIX_DIR}/include>  # for REC_VERSION.h
       $<BUILD_INTERFACE:${CMAKE_BINARY_DIR}/include>  # for ALLEN_VERSION.h
       include)
 
@@ -26,6 +29,7 @@ if (NOT STANDALONE)
     # linking AllenLib to it as well
     target_link_libraries(Configuration
       PUBLIC
+        AllenCommon
         Boost::headers
         LHCb::HltServicesLib)
 
diff --git a/configuration/include/TCK.h b/configuration/include/TCK.h
index c30d2b54a7c..2a87b35cf8f 100644
--- a/configuration/include/TCK.h
+++ b/configuration/include/TCK.h
@@ -13,17 +13,20 @@
 #include <string>
 #include <iostream>
 #include <HltServices/TCKUtils.h>
-#include <ALLEN_VERSION.h>
 
 namespace Allen {
 
   namespace TCK {
-    std::string current_release();
-  }
+    static constexpr unsigned version = 1u;
 
-  void create_git_repository(std::string repo);
+    std::map<std::string, std::string> project_dependencies();
 
-  std::string tck_from_git(std::string repo, std::string tck);
+    std::tuple<bool, std::string> check_projects(nlohmann::json metadata);
+
+    void create_git_repository(std::string repo);
+  } // namespace TCK
+
+  std::tuple<std::string, LHCb::TCK::Info> tck_from_git(std::string repo, std::string tck);
 
   std::tuple<std::string, LHCb::TCK::Info> sequence_from_git(std::string repo, std::string tck);
 
diff --git a/configuration/src/TCK.cpp b/configuration/src/TCK.cpp
index 0e5a13e1a5c..8f75ad92aac 100644
--- a/configuration/src/TCK.cpp
+++ b/configuration/src/TCK.cpp
@@ -8,9 +8,16 @@
 * granted to it by virtue of its status as an Intergovernmental Organization  *
 * or submit itself to any jurisdiction.                                       *
 \*****************************************************************************/
-#include "boost/algorithm/string/classification.hpp"
-#include "boost/algorithm/string/predicate.hpp"
-#include "boost/algorithm/string/split.hpp"
+#include <dlfcn.h>
+
+#include <boost/algorithm/string/classification.hpp>
+#include <boost/algorithm/string/predicate.hpp>
+#include <boost/algorithm/string/split.hpp>
+#include <boost/property_tree/ptree.hpp>
+#include <boost/property_tree/xml_parser.hpp>
+#include <boost/property_tree/detail/file_parser_error.hpp>
+#include <FileSystem.h>
+
 #include <git2.h>
 #include <string>
 #include <nlohmann/json.hpp>
@@ -18,23 +25,75 @@
 #include <HltServices/TCKUtils.h>
 #include <TCK.h>
 
-std::string Allen::TCK::current_release()
+// Version headers of dependent projects
+
+namespace {
+  namespace pt = boost::property_tree;
+}
+
+std::map<std::string, std::string> Allen::TCK::project_dependencies()
 {
-  return "ALLEN_v" + std::to_string(ALLEN_MAJOR_VERSION) + "r" + std::to_string(ALLEN_MINOR_VERSION);
+  Dl_info dl_info;
+  if (!dladdr(reinterpret_cast<void*>(Allen::TCK::create_git_repository), &dl_info)) {
+    throw std::runtime_error {"Failed to obtain path for this shared library"};
+  }
+  auto manifest_path = fs::absolute(fs::path {dl_info.dli_fname} / ".." / "manifest.xml");
+
+  std::cout << "manifest path " << manifest_path.string() << "\n";
+
+  // Create an empty property tree object
+  pt::ptree manifest;
+
+  try {
+    read_xml(manifest_path.string(), manifest);
+  } catch (pt::xml_parser_error& e) {
+    std::cout << "Failed to parse the xml string." << e.what();
+  } catch (...) {
+    std::cout << "Failed !!!";
+  }
+
+  std::map<std::string, std::string> deps;
+  deps["LCG"] = manifest.get<std::string>("manifest.heptools.version");
+  for (auto& [_, project] : manifest.get_child("manifest.used_projects")) {
+    deps[project.get<std::string>("<xmlattr>.name")] = project.get<std::string>("<xmlattr>.version");
+  }
+  return deps;
+}
+
+std::tuple<bool, std::string> Allen::TCK::check_projects(nlohmann::json metadata)
+{
+  auto projects_str = [](auto const& projects) {
+    std::stringstream ps;
+    for (auto [p, v] : projects) {
+      ps << p << " " << v << "\n";
+    }
+    return ps.str();
+  };
+
+  auto projects = metadata["projects"].get<std::map<std::string, std::string>>();
+  auto deps = Allen::TCK::project_dependencies();
+
+  auto check = projects == deps;
+  std::string error_msg;
+  if (!check) {
+    error_msg =
+      ("dependencies " + projects_str(deps) + " are incompatible with current dependencies " + projects_str(projects));
+  }
+  return std::tuple {check, error_msg};
 }
 
-void Allen::create_git_repository( std::string repo_name )
+void Allen::TCK::create_git_repository(std::string repo_name)
 {
   git_libgit2_init();
 
-  auto [repo, sig] = LHCb::TCK::Git::create_git_repository( repo_name );
+  auto [repo, sig] = LHCb::TCK::Git::create_git_repository(repo_name);
 
-  git_signature_free( sig );
-  git_repository_free( repo );
+  git_signature_free(sig);
+  git_repository_free(repo);
   git_libgit2_shutdown();
 }
 
-std::string Allen::tck_from_git(std::string repo, std::string tck)
+std::tuple<std::string, LHCb::TCK::Info> Allen::tck_from_git(std::string repo, std::string tck)
 {
 
   using LHCb::TCK::Git::check;
@@ -44,8 +103,9 @@ std::string Allen::tck_from_git(std::string repo, std::string tck)
   check(git_repository_open_bare(&git_repo, repo.c_str()));
   try {
     auto tck_config = LHCb::TCK::Git::extract_json(git_repo, tck);
+    auto tck_info = LHCb::TCK::Git::tck_info(git_repo, tck);
     git_libgit2_shutdown();
-    return tck_config;
+    return {std::move(tck_config), std::move(tck_info)};
   } catch (std::runtime_error const& e) {
     git_libgit2_shutdown();
     throw std::runtime_error {"Failed to extract JSON configuration for TCK " + tck + " from " + repo + ": " +
@@ -56,7 +116,7 @@ std::string Allen::tck_from_git(std::string repo, std::string tck)
 std::tuple<std::string, LHCb::TCK::Info> Allen::sequence_from_git(std::string repo, std::string tck)
 {
 
-  auto tck_config = tck_from_git(repo, tck);
+  auto [tck_config, tck_info] = tck_from_git(repo, tck);
   if (tck_config.empty()) {
     return {tck_config, {}};
   }
@@ -112,11 +172,5 @@ std::tuple<std::string, LHCb::TCK::Info> Allen::sequence_from_git(std::string re
     }
   }
 
-  // Get the TCK metadata from the manifest, which is indexed by digest.
-  auto const& digest = json_tck.key();
-  auto const& m = manifest[digest];
-  auto [release, type] = m["Release2Type"].items().begin();
-  LHCb::TCK::Info info {digest, m["TCK"], release, type, m["label"]};
-
-  return {sequence.dump(), info};
+  return {sequence.dump(), tck_info};
 }
diff --git a/configuration/src/bindings.cpp b/configuration/src/bindings.cpp
index 37a3b71a506..7107da5c283 100644
--- a/configuration/src/bindings.cpp
+++ b/configuration/src/bindings.cpp
@@ -31,7 +31,8 @@ PYBIND11_MODULE(TCK, m)
     .def_readwrite("tck", &LHCb::TCK::Info::tck)
     .def_readwrite("release", &LHCb::TCK::Info::release)
     .def_readwrite("type", &LHCb::TCK::Info::type)
-    .def_readwrite("label", &LHCb::TCK::Info::label);
+    .def_readwrite("label", &LHCb::TCK::Info::label)
+    .def_readwrite("metadata", &LHCb::TCK::Info::metadata);
 
   m.doc() = R"pbdoc(
     Utility functions to interact with a git repository that contains
@@ -43,20 +44,13 @@ PYBIND11_MODULE(TCK, m)
        :toctree: _generate
 
     TCKInfo
-    current_release
     tck_from_git
     sequence_to_git
     )pbdoc";
 
-  m.def(
-    "current_release",
-    &Allen::TCK::current_release,
-    "Get the current Allen release string in the form of ALLEN_vXrYpZ, "
-    "where pZ is optional");
-  m.def(
-    "create_git_repository",
-    &Allen::create_git_repository,
-    "Create a git repository that can store TCKs");
+  m.attr("version") = py::int_(Allen::TCK::version);
+
+  m.def("create_git_repository", &Allen::TCK::create_git_repository, "Create a git repository that can store TCKs");
   m.def("tck_from_git", &Allen::tck_from_git, "Get the TCK as it is in the git repository");
   m.def(
     "sequence_from_git",
diff --git a/main/src/Provider.cpp b/main/src/Provider.cpp
index 63dc95b2384..5e9cb433ca5 100644
--- a/main/src/Provider.cpp
+++ b/main/src/Provider.cpp
@@ -58,8 +58,6 @@ std::string Allen::sequence_conf(std::map<std::string, std::string> const& optio
   if (std::regex_match(sequence, tck_match, tck_option)) {
 #ifndef ALLEN_STANDALONE
 
-    // WORKING print some message using tck_info
-
     auto repo = tck_match.str(1);
     auto tck = tck_match.str(2);
     std::string config;
@@ -70,12 +68,14 @@ std::string Allen::sequence_conf(std::map<std::string, std::string> const& optio
       throw std::runtime_error {"Failed to obtain sequence for TCK " + tck + " from repository at " + repo + ":" +
                                 e.what()};
     }
+
+    auto [check, check_error] = Allen::TCK::check_projects(nlohmann::json::parse(info.metadata));
+
     if (config.empty()) {
       throw std::runtime_error {"Failed to obtain sequence for TCK " + tck + " from repository at " + repo};
     }
-    else if (info.release != TCK::current_release()) {
-      throw std::runtime_error {"TCK " + tck + " is compatible with release " + info.release +
-                                " and not current release " + TCK::current_release()};
+    else if (!check) {
+      throw std::runtime_error {std::string {"TCK "} + tck + ": " + check_error};
     }
     info_cout << "TCK " << tck << " loaded " << info.type << " sequence from git with label " << info.label << "\n";
     return config;
-- 
GitLab


From c7ed35c7685837269c7665436c30c0302769a77e Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Tue, 13 Jun 2023 17:46:17 +0200
Subject: [PATCH 40/49] Fix config for Retina clusters

---
 Rec/Allen/python/Allen/config.py | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/Rec/Allen/python/Allen/config.py b/Rec/Allen/python/Allen/config.py
index fef97e2ebcf..4ebedce4dd2 100755
--- a/Rec/Allen/python/Allen/config.py
+++ b/Rec/Allen/python/Allen/config.py
@@ -133,6 +133,10 @@ def setup_allen_non_event_data_service(allen_event_loop=False,
     elif bank_types is None:
         bank_types = set(converter_types.keys())
 
+    if 'VPRetinaCluster' in bank_types:
+        bank_types.remove('VPRetinaCluster')
+        bank_types.add('VP')
+
     # Always include the magnetic field polarity
     bank_types.add('Magnet')
 
-- 
GitLab


From b4332582ab8552403cf1a3b2383d692d613b565d Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Tue, 13 Jun 2023 22:05:18 +0200
Subject: [PATCH 41/49] Fix running allen.py from MEP with JSON sequence

---
 Dumpers/BinaryDumpers/options/allen.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/Dumpers/BinaryDumpers/options/allen.py b/Dumpers/BinaryDumpers/options/allen.py
index 9fd78ba4777..eead9ce4476 100755
--- a/Dumpers/BinaryDumpers/options/allen.py
+++ b/Dumpers/BinaryDumpers/options/allen.py
@@ -234,7 +234,8 @@ if args.mep:
     from Configurables import MEPProvider, AllenConfiguration
 
     allen_conf = AllenConfiguration("AllenConfiguration")
-    allen_conf.JSON = sequence_json
+    # Newlines in a string property cause issues
+    allen_conf.JSON = sequence_json.replace('\n', '')
     allen_conf.OutputLevel = 3
 
     mep_provider = MEPProvider()
-- 
GitLab


From ed56720e76c6bfd1140ece3dfb09149938e62607 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Tue, 13 Jun 2023 22:06:56 +0200
Subject: [PATCH 42/49] Add optinal argument to create_hlt1_tck to specify hlt1
 type

---
 Rec/Allen/python/Allen/tck.py        |  2 +-
 Rec/Allen/scripts/create_hlt1_tck.py | 31 +++++++++++++++++++++++-----
 2 files changed, 27 insertions(+), 6 deletions(-)

diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
index ff7671d4359..15f4f84a157 100644
--- a/Rec/Allen/python/Allen/tck.py
+++ b/Rec/Allen/python/Allen/tck.py
@@ -238,7 +238,7 @@ def sequence_from_python(python_file: Path, node_name="hlt1_node") -> dict:
 def sequence_to_git(
         repository: Path,
         sequence: dict,
-        sequence_type: dict,
+        sequence_type: str,
         label: str,
         tck: int,
         stack: tuple,
diff --git a/Rec/Allen/scripts/create_hlt1_tck.py b/Rec/Allen/scripts/create_hlt1_tck.py
index 41585e8b1ab..7f512c44809 100644
--- a/Rec/Allen/scripts/create_hlt1_tck.py
+++ b/Rec/Allen/scripts/create_hlt1_tck.py
@@ -32,6 +32,14 @@ parser.add_argument("stack", nargs=2, help="first argument should be a string an
 parser.add_argument("sequence", nargs=1)
 parser.add_argument("repository", nargs=1)
 parser.add_argument("tck", nargs=1, help="A 32-bit hexadecimal number")
+parser.add_argument(
+    "-t,--hlt1-type",
+    type=str,
+    help=
+    "Sequence type to use; also used as branch name in the Git repository.",
+    default='',
+    dest='sequence_type'
+)
 parser.add_argument(
     "--python-hlt1-node",
     type=str,
@@ -52,6 +60,7 @@ args = parser.parse_args()
 sequence_arg = Path(args.sequence[0])
 repository = Path(args.repository[0])
 tck = int(args.tck[0], 16)
+type_arg = args.sequence_type if args.sequence_type != '' else sequence_arg.stem
 
 stack_version_expr = re.compile(r"^\d+\.\d+(?:\.\d+)?$")
 if not stack_version_expr.match(args.stack[1]):
@@ -95,31 +104,41 @@ if sequence_arg.suffix in (".py", ""):
     from AllenConf.persistency import make_dec_reporter
 
     sequence, dn = {}, None
+    # Load the python module to get the sequence configuration; set
+    # the TCK to the right value and flush encoding keys
     with (make_dec_reporter.bind(TCK=tck),
           flush_key_registry()):
         sequence = sequence_from_python(sequence_arg, node_name=args.hlt1_node)
+
+    # Check that at least the dec_reporter is part of the sequence,
+    # otherwise it's meaningless to create a TCK for this sequence.
     dn = dec_reporter_name(sequence)
     if dn is None:
         print(
-            f"Cannot create TCK {hex(tck)} for sequence {sequence_arg.stem}, because it does not contain the dec_reporter"
+            f"Cannot create TCK {hex(tck)} for sequence {type_arg}, because it does not contain the dec_reporter"
         )
         sys.exit(1)
 elif sequence_arg.suffix == ".json":
+    # Load the sequence configuration from a JSON file
     sequence, dn = {}, None
     with open(sequence_arg, "r") as sequence_file:
         sequence = json.load(sequence_file)
-        dn = dec_reporter_name(sequence)
+
+    # Get the dec reporter and set its TCK property to the right value
+    # before creating the TCK from the configuration
+    dn = dec_reporter_name(sequence)
     if dn is None:
         print(
-            f"Cannot create TCK {hex(tck)} for sequence {sequence_arg.stem}, because it does not contain the dec_reporter"
+            f"Cannot create TCK {hex(tck)} for sequence {type_arg}, because it does not contain the dec_reporter"
         )
         sys.exit(1)
     else:
         sequence[dn]["tck"] = tck
 
+# Store the configuration in the Git repository and tag it with the TCK
 try:
-    sequence_to_git(repository, sequence, sequence_arg.stem, args.label, tck, tuple(args.stack), {}, True)
-    print(f"Created TCK {hex(tck)} for sequence {sequence_arg.stem}")
+    sequence_to_git(repository, sequence, type_arg, args.label, tck, tuple(args.stack), {}, True)
+    print(f"Created TCK {hex(tck)} for sequence {type_arg}")
 except RuntimeError as e:
     print(e)
     sys.exit(1)
@@ -134,6 +153,8 @@ def get_encoding_key(repo):
         return None
 
 
+# Check that the encoding key is either in CVMFS or in the local
+# metadata repository
 reports_key = sequence[dn]["encoding_key"]
 
 local_key, key_present = (False, False)
-- 
GitLab


From f41d7c40bbaa9bdd22a283110f1181d8590c0627 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Wed, 14 Jun 2023 17:46:00 +0200
Subject: [PATCH 43/49] Fix cmake linking issue with BinaryDumpersLib

---
 Dumpers/BinaryDumpers/CMakeLists.txt | 1 +
 1 file changed, 1 insertion(+)

diff --git a/Dumpers/BinaryDumpers/CMakeLists.txt b/Dumpers/BinaryDumpers/CMakeLists.txt
index 1098326057b..d04214f9a97 100644
--- a/Dumpers/BinaryDumpers/CMakeLists.txt
+++ b/Dumpers/BinaryDumpers/CMakeLists.txt
@@ -16,6 +16,7 @@ gaudi_add_library(BinaryDumpers
                     HostCommon
                     EventModel
                     AllenRuntime
+                    AllenLib
                     Gaudi::GaudiKernel
                     LHCb::DAQEventLib
                     LHCb::FTDAQLib
-- 
GitLab


From fe804bf760933e3dfae58351d68a3f53cb4180e8 Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Wed, 14 Jun 2023 17:46:52 +0200
Subject: [PATCH 44/49] Include the project itself in dependenies when parsing
 manifest.xml

---
 configuration/src/TCK.cpp | 12 ++++++++----
 1 file changed, 8 insertions(+), 4 deletions(-)

diff --git a/configuration/src/TCK.cpp b/configuration/src/TCK.cpp
index 8f75ad92aac..213bcbc5cab 100644
--- a/configuration/src/TCK.cpp
+++ b/configuration/src/TCK.cpp
@@ -37,9 +37,7 @@ std::map<std::string, std::string> Allen::TCK::project_dependencies()
   if (!dladdr(reinterpret_cast<void*>(Allen::TCK::create_git_repository), &dl_info)) {
     throw std::runtime_error {"Failed to obtain path for this shared library"};
   }
-  auto manifest_path = fs::absolute(fs::path {dl_info.dli_fname} / ".." / "manifest.xml");
-
-  std::cout << "manifest path " << manifest_path.string() << "\n";
+  auto manifest_path = fs::absolute(fs::path {dl_info.dli_fname}.parent_path() / ".." / "manifest.xml");
 
   // Create an empty property tree object
   pt::ptree manifest;
@@ -53,10 +51,16 @@ std::map<std::string, std::string> Allen::TCK::project_dependencies()
   }
 
   std::map<std::string, std::string> deps;
+
+  auto add_project_dep = [&deps](auto const& project) {
+    deps[project.template get<std::string>("<xmlattr>.name")] = project.template get<std::string>("<xmlattr>.version");
+  };
+
   deps["LCG"] = manifest.get<std::string>("manifest.heptools.version");
   for (auto& [_, project] : manifest.get_child("manifest.used_projects")) {
-    deps[project.get<std::string>("<xmlattr>.name")] = project.get<std::string>("<xmlattr>.version");
+    add_project_dep(project);
   }
+  add_project_dep(manifest.get_child("manifest.project"));
   return deps;
 }
 
-- 
GitLab


From 530d7c479c80af1c73321d2fbb7b2afbbb988b3a Mon Sep 17 00:00:00 2001
From: Rosen Matev <rosen.matev@cern.ch>
Date: Wed, 28 Jun 2023 01:04:08 +0200
Subject: [PATCH 45/49] Fix pybind lto warning with GCC 12

---
 configuration/CMakeLists.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/configuration/CMakeLists.txt b/configuration/CMakeLists.txt
index e7d30416ad7..aab142e0bab 100644
--- a/configuration/CMakeLists.txt
+++ b/configuration/CMakeLists.txt
@@ -37,7 +37,7 @@ if (NOT STANDALONE)
     install(FILES include/TCK.h
       DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/Allen)
 
-    pybind11_add_module(TCK src/bindings.cpp)
+    pybind11_add_module(TCK NO_EXTRAS src/bindings.cpp)
     target_link_libraries(TCK
       PRIVATE
         Configuration
-- 
GitLab


From 6464bf3c150c3185a2ec63e16a9966e81b8329e8 Mon Sep 17 00:00:00 2001
From: Rosen Matev <rosen.matev@cern.ch>
Date: Wed, 28 Jun 2023 01:04:56 +0200
Subject: [PATCH 46/49] Remove unused cmake file

---
 cmake/pybind11Tools.cmake | 209 --------------------------------------
 1 file changed, 209 deletions(-)
 delete mode 100644 cmake/pybind11Tools.cmake

diff --git a/cmake/pybind11Tools.cmake b/cmake/pybind11Tools.cmake
deleted file mode 100644
index 52a70c23c40..00000000000
--- a/cmake/pybind11Tools.cmake
+++ /dev/null
@@ -1,209 +0,0 @@
-# tools/pybind11Tools.cmake -- Build system for the pybind11 modules
-#
-# Copyright (c) 2015 Wenzel Jakob <wenzel@inf.ethz.ch>
-#
-# All rights reserved. Use of this source code is governed by a
-# BSD-style license that can be found in the LICENSE file.
-
-cmake_minimum_required(VERSION 2.8.12)
-
-# Add a CMake parameter for choosing a desired Python version
-if(NOT PYBIND11_PYTHON_VERSION)
-  set(PYBIND11_PYTHON_VERSION "" CACHE STRING "Python version to use for compiling modules")
-endif()
-
-set(Python_ADDITIONAL_VERSIONS 3.7 3.6 3.5 3.4)
-find_package(PythonLibsNew ${PYBIND11_PYTHON_VERSION} REQUIRED)
-
-include(CheckCXXCompilerFlag)
-include(CMakeParseArguments)
-
-if(NOT PYBIND11_CPP_STANDARD AND NOT CMAKE_CXX_STANDARD)
-  if(NOT MSVC)
-    check_cxx_compiler_flag("-std=c++14" HAS_CPP14_FLAG)
-
-    if (HAS_CPP14_FLAG)
-      set(PYBIND11_CPP_STANDARD -std=c++14)
-    else()
-      check_cxx_compiler_flag("-std=c++11" HAS_CPP11_FLAG)
-      if (HAS_CPP11_FLAG)
-        set(PYBIND11_CPP_STANDARD -std=c++11)
-      else()
-        message(FATAL_ERROR "Unsupported compiler -- pybind11 requires C++11 support!")
-      endif()
-    endif()
-  elseif(MSVC)
-    set(PYBIND11_CPP_STANDARD /std:c++14)
-  endif()
-
-  set(PYBIND11_CPP_STANDARD ${PYBIND11_CPP_STANDARD} CACHE STRING
-      "C++ standard flag, e.g. -std=c++11, -std=c++14, /std:c++14.  Defaults to C++14 mode." FORCE)
-endif()
-
-# Checks whether the given CXX/linker flags can compile and link a cxx file.  cxxflags and
-# linkerflags are lists of flags to use.  The result variable is a unique variable name for each set
-# of flags: the compilation result will be cached base on the result variable.  If the flags work,
-# sets them in cxxflags_out/linkerflags_out internal cache variables (in addition to ${result}).
-function(_pybind11_return_if_cxx_and_linker_flags_work result cxxflags linkerflags cxxflags_out linkerflags_out)
-  set(CMAKE_REQUIRED_LIBRARIES ${linkerflags})
-  check_cxx_compiler_flag("${cxxflags}" ${result})
-  if (${result})
-    set(${cxxflags_out} "${cxxflags}" CACHE INTERNAL "" FORCE)
-    set(${linkerflags_out} "${linkerflags}" CACHE INTERNAL "" FORCE)
-  endif()
-endfunction()
-
-# Internal: find the appropriate link time optimization flags for this compiler
-function(_pybind11_add_lto_flags target_name prefer_thin_lto)
-  if (NOT DEFINED PYBIND11_LTO_CXX_FLAGS)
-    set(PYBIND11_LTO_CXX_FLAGS "" CACHE INTERNAL "")
-    set(PYBIND11_LTO_LINKER_FLAGS "" CACHE INTERNAL "")
-
-    if(CMAKE_CXX_COMPILER_ID MATCHES "GNU|Clang")
-      set(cxx_append "")
-      set(linker_append "")
-      if (CMAKE_CXX_COMPILER_ID MATCHES "Clang" AND NOT APPLE)
-        # Clang Gold plugin does not support -Os; append -O3 to MinSizeRel builds to override it
-        set(linker_append ";$<$<CONFIG:MinSizeRel>:-O3>")
-      elseif(CMAKE_CXX_COMPILER_ID MATCHES "GNU")
-        set(cxx_append ";-fno-fat-lto-objects")
-      endif()
-
-      if (CMAKE_CXX_COMPILER_ID MATCHES "Clang" AND prefer_thin_lto)
-        _pybind11_return_if_cxx_and_linker_flags_work(HAS_FLTO_THIN
-          "-flto=thin${cxx_append}" "-flto=thin${linker_append}"
-          PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS)
-      endif()
-
-      if (NOT HAS_FLTO_THIN)
-        _pybind11_return_if_cxx_and_linker_flags_work(HAS_FLTO
-          "-flto${cxx_append}" "-flto${linker_append}"
-          PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS)
-      endif()
-    elseif (CMAKE_CXX_COMPILER_ID MATCHES "Intel")
-      # Intel equivalent to LTO is called IPO
-      _pybind11_return_if_cxx_and_linker_flags_work(HAS_INTEL_IPO
-      "-ipo" "-ipo" PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS)
-    elseif(MSVC)
-      # cmake only interprets libraries as linker flags when they start with a - (otherwise it
-      # converts /LTCG to \LTCG as if it was a Windows path).  Luckily MSVC supports passing flags
-      # with - instead of /, even if it is a bit non-standard:
-      _pybind11_return_if_cxx_and_linker_flags_work(HAS_MSVC_GL_LTCG
-        "/GL" "-LTCG" PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS)
-    endif()
-
-    if (PYBIND11_LTO_CXX_FLAGS)
-      message(STATUS "LTO enabled")
-    else()
-      message(STATUS "LTO disabled (not supported by the compiler and/or linker)")
-    endif()
-  endif()
-
-  # Enable LTO flags if found, except for Debug builds
-  if (PYBIND11_LTO_CXX_FLAGS)
-    target_compile_options(${target_name} PRIVATE "$<$<NOT:$<CONFIG:Debug>>:${PYBIND11_LTO_CXX_FLAGS}>")
-  endif()
-  if (PYBIND11_LTO_LINKER_FLAGS)
-    target_link_libraries(${target_name} PRIVATE "$<$<NOT:$<CONFIG:Debug>>:${PYBIND11_LTO_LINKER_FLAGS}>")
-  endif()
-endfunction()
-
-# Build a Python extension module:
-# pybind11_add_module(<name> [MODULE | SHARED] [EXCLUDE_FROM_ALL]
-#                     [NO_EXTRAS] [THIN_LTO] source1 [source2 ...])
-#
-function(pybind11_add_module target_name)
-  set(options MODULE SHARED EXCLUDE_FROM_ALL NO_EXTRAS THIN_LTO)
-  cmake_parse_arguments(ARG "${options}" "" "" ${ARGN})
-
-  if(ARG_MODULE AND ARG_SHARED)
-    message(FATAL_ERROR "Can't be both MODULE and SHARED")
-  elseif(ARG_SHARED)
-    set(lib_type SHARED)
-  else()
-    set(lib_type MODULE)
-  endif()
-
-  if(ARG_EXCLUDE_FROM_ALL)
-    set(exclude_from_all EXCLUDE_FROM_ALL)
-  endif()
-
-  add_library(${target_name} ${lib_type} ${exclude_from_all} ${ARG_UNPARSED_ARGUMENTS})
-
-  target_include_directories(${target_name}
-    PRIVATE ${PYBIND11_INCLUDE_DIR}  # from project CMakeLists.txt
-    PRIVATE ${pybind11_INCLUDE_DIR}  # from pybind11Config
-    PRIVATE ${PYTHON_INCLUDE_DIRS})
-
-  # Python debug libraries expose slightly different objects
-  # https://docs.python.org/3.6/c-api/intro.html#debugging-builds
-  # https://stackoverflow.com/questions/39161202/how-to-work-around-missing-pymodule-create2-in-amd64-win-python35-d-lib
-  if(PYTHON_IS_DEBUG)
-    target_compile_definitions(${target_name} PRIVATE Py_DEBUG)
-  endif()
-
-  # The prefix and extension are provided by FindPythonLibsNew.cmake
-  set_target_properties(${target_name} PROPERTIES PREFIX "${PYTHON_MODULE_PREFIX}")
-  set_target_properties(${target_name} PROPERTIES SUFFIX "${PYTHON_MODULE_EXTENSION}")
-
-  # -fvisibility=hidden is required to allow multiple modules compiled against
-  # different pybind versions to work properly, and for some features (e.g.
-  # py::module_local).  We force it on everything inside the `pybind11`
-  # namespace; also turning it on for a pybind module compilation here avoids
-  # potential warnings or issues from having mixed hidden/non-hidden types.
-  set_target_properties(${target_name} PROPERTIES CXX_VISIBILITY_PRESET "hidden")
-
-  if(WIN32 OR CYGWIN)
-    # Link against the Python shared library on Windows
-    target_link_libraries(${target_name} PRIVATE ${PYTHON_LIBRARIES})
-  elseif(APPLE)
-    # It's quite common to have multiple copies of the same Python version
-    # installed on one's system. E.g.: one copy from the OS and another copy
-    # that's statically linked into an application like Blender or Maya.
-    # If we link our plugin library against the OS Python here and import it
-    # into Blender or Maya later on, this will cause segfaults when multiple
-    # conflicting Python instances are active at the same time (even when they
-    # are of the same version).
-
-    # Windows is not affected by this issue since it handles DLL imports
-    # differently. The solution for Linux and Mac OS is simple: we just don't
-    # link against the Python library. The resulting shared library will have
-    # missing symbols, but that's perfectly fine -- they will be resolved at
-    # import time.
-
-    target_link_libraries(${target_name} PRIVATE "-undefined dynamic_lookup")
-
-    if(ARG_SHARED)
-      # Suppress CMake >= 3.0 warning for shared libraries
-      set_target_properties(${target_name} PROPERTIES MACOSX_RPATH ON)
-    endif()
-  endif()
-
-  # Make sure C++11/14 are enabled
-  target_compile_options(${target_name} PUBLIC ${PYBIND11_CPP_STANDARD})
-
-  if(ARG_NO_EXTRAS)
-    return()
-  endif()
-
-  _pybind11_add_lto_flags(${target_name} ${ARG_THIN_LTO})
-
-  if (NOT MSVC AND NOT ${CMAKE_BUILD_TYPE} MATCHES Debug)
-    # Strip unnecessary sections of the binary on Linux/Mac OS
-    if(CMAKE_STRIP)
-      if(APPLE)
-        add_custom_command(TARGET ${target_name} POST_BUILD
-                           COMMAND ${CMAKE_STRIP} -x $<TARGET_FILE:${target_name}>)
-      else()
-        add_custom_command(TARGET ${target_name} POST_BUILD
-                           COMMAND ${CMAKE_STRIP} $<TARGET_FILE:${target_name}>)
-      endif()
-    endif()
-  endif()
-
-  if(MSVC)
-    # /MP enables multithreaded builds (relevant when there are many files), /bigobj is
-    # needed for bigger binding projects due to the limit to 64k addressable sections
-    target_compile_options(${target_name} PRIVATE /MP /bigobj)
-  endif()
-endfunction()
-- 
GitLab


From c0352eb437dfc6c31d96bd67bb8df45093a02ac4 Mon Sep 17 00:00:00 2001
From: Rosen Matev <rosen.matev@cern.ch>
Date: Wed, 28 Jun 2023 01:05:10 +0200
Subject: [PATCH 47/49] Move all metadata to metadata.json

---
 Rec/Allen/python/Allen/qmtest/utils.py        |  1 -
 Rec/Allen/python/Allen/tck.py                 | 84 ++++++++++---------
 Rec/Allen/scripts/create_hlt1_tck.py          | 24 +++---
 Rec/Allen/tests/options/compare_hlt1_tcks.py  |  6 +-
 Rec/Allen/tests/options/create_hlt1_tcks.py   | 14 ++--
 .../options/test_tck_allen_write_config.py    |  3 +-
 .../tests/qmtest/compare_tck_allen_config.qmt |  2 +-
 Rec/Allen/tests/qmtest/compare_tcks.qmt       |  2 +-
 Rec/Allen/tests/qmtest/create_tcks.qmt        |  2 +-
 configuration/include/TCK.h                   |  2 +-
 configuration/src/TCK.cpp                     |  2 +-
 configuration/src/bindings.cpp                |  2 +-
 12 files changed, 75 insertions(+), 69 deletions(-)

diff --git a/Rec/Allen/python/Allen/qmtest/utils.py b/Rec/Allen/python/Allen/qmtest/utils.py
index 49966f1dad7..9c77fa5d9b2 100644
--- a/Rec/Allen/python/Allen/qmtest/utils.py
+++ b/Rec/Allen/python/Allen/qmtest/utils.py
@@ -8,7 +8,6 @@
 # granted to it by virtue of its status as an Intergovernmental Organization  #
 # or submit itself to any jurisdiction.                                       #
 ###############################################################################
-from pprint import pprint
 from collections import defaultdict
 
 
diff --git a/Rec/Allen/python/Allen/tck.py b/Rec/Allen/python/Allen/tck.py
index 15f4f84a157..a854f536093 100644
--- a/Rec/Allen/python/Allen/tck.py
+++ b/Rec/Allen/python/Allen/tck.py
@@ -84,6 +84,10 @@ from hashlib import md5
 from subprocess import PIPE, run
 
 
+def format_tck(tck: int):
+    return f"0x{tck:08X}"
+
+
 def dependencies_from_build_manifest():
     """Get the built/installed version of Allen from the
     build/install manifest in the format ALLEN_vXrYpZ where pZ is
@@ -93,8 +97,9 @@ def dependencies_from_build_manifest():
     if "ALLEN_INSTALL_DIR" in os.environ:
         manifest_tree = etree.parse(
             os.path.expandvars("${ALLEN_INSTALL_DIR}/manifest.xml"))
-        projects = ([manifest_tree.find("project")]
-                    + [p for p in manifest_tree.find("used_projects").iterchildren()])
+        projects = (
+            [manifest_tree.find("project")] +
+            [p for p in manifest_tree.find("used_projects").iterchildren()])
         deps = {p.get("name"): p.get("version") for p in projects}
         deps["LCG"] = manifest_tree.find("heptools").find("version").text
         return deps
@@ -152,8 +157,8 @@ def tck_to_sequence(config: dict):
     return sequence_config
 
 
-def json_tck_db(configuration: dict, sequence_type: str, label: str,
-                metadata: dict, tck: int):
+def json_tck_db(configuration: dict, sequence_type: str, metadata: dict,
+                tck: int):
     """Create a JSON-formatted string that hlt_tck_tool can
     write to a git repository.
 
@@ -173,29 +178,24 @@ def json_tck_db(configuration: dict, sequence_type: str, label: str,
     # Add the configuration to the TCK
     tck_config = sequence_to_tck(configuration)
 
-    # Add the metadata to the TCK as a file with the same name
-    tck_config['metadata'] = metadata
-
     # The value of the digest is not important as long as it matches
     # between the manifest and the key of the configuration. Use MD5
-    # as that was used extensively and more meaninfully in Run 2.
+    # as that was used extensively and more meaningfully in Run 2.
+    # The digest is calculated without including metadata.json
+    # (which contains the digest itself!)
     digest = md5(json.dumps(tck_config).encode("utf-8")).hexdigest()
+    metadata = metadata.copy()
+    metadata["digest"] = digest
 
-    stack, version = metadata["stack"]
-
-    vs = version.split(".")
-    release = stack.upper() + "_"
-    for idx, prefix in enumerate(("v", "r", "p")):
-        if idx < len(vs):
-            release += prefix + vs[idx]
+    # Add the metadata to the TCK in a file called "metadata.json"
+    # This is a name we can "never" change!
+    tck_config['metadata.json'] = metadata
 
     manifest = {
+        # FIXME the digest, TCK and branch are redundant, they're all in metadata
         digest: {
             "TCK": hex(tck),
-            "Release2Type": {
-                release: sequence_type
-            },
-            "label": label,
+            "branch": sequence_type,
             "metadata": metadata
         }
     }
@@ -241,7 +241,7 @@ def sequence_to_git(
         sequence_type: str,
         label: str,
         tck: int,
-        stack: tuple,
+        stack: str,
         extra_metadata={},
         write_intermediate=False,
 ):
@@ -249,14 +249,26 @@ def sequence_to_git(
     """
     from Allen import TCK
 
+    if not re.match(r"^0x1[0-9A-F]{7}$", format_tck(tck)):
+        raise ValueError(
+            f"TCK {format_tck(tck)} does not match 0x1XXXXXXX pattern")
+
     # Collect metadata for TCK
-    metadata = extra_metadata
-    metadata["stack"] = stack
-    metadata["version"] = TCK.version
-    metadata["projects"] = dependencies_from_build_manifest()
+    metadata = extra_metadata.copy()
+    metadata["version"] = 1  # updating this must be synchronised with TCKUtils
+    metadata["TCK"] = format_tck(tck)
+    metadata["config_version"] = ["Allen", TCK.config_version]
+    metadata[
+        "application"] = "Hlt1"  # match the "SourceID" or the "process/stage"
+    metadata["label"] = label
+    metadata["type"] = sequence_type
+    metadata["stack"] = {
+        "name": stack,
+        "projects": dependencies_from_build_manifest()
+    }
 
     # Craete JSON TCK DB
-    db = json_tck_db(sequence, sequence_type, label, metadata, tck)
+    db = json_tck_db(sequence, sequence_type, metadata, tck)
     if write_intermediate:
         with open(hex(tck) + ".json", "w") as f:
             json.dump(db, f, indent=4, sort_keys=True)
@@ -286,10 +298,12 @@ def sequence_from_git(repository: Path, tck: str, use_bindings=True) -> str:
     if use_bindings:
         from Allen import TCK
         sequence, info = TCK.sequence_from_git(str(repository), tck)
-        tck_info = {k: getattr(info, k)
-                    for k in ("digest", "tck", "release", "type", "label")}
+        tck_info = {
+            k: getattr(info, k)
+            for k in ("digest", "tck", "release", "type", "label")
+        }
         tck_info["metadata"] = json.loads(info.metadata)
-        return (sequence,  tck_info)
+        return (sequence, tck_info)
     else:
         p = run(
             [
@@ -354,16 +368,10 @@ def manifest_from_git(repository: Path):
     repositry
     """
 
-    p = run(
-        [
-            "hlt_tck_tool",
-            "--list-manifest-as-json",
-            f"{str(repository)}",
-            "-",
-        ],
-        stdout=PIPE,
-        stderr=PIPE,
-    )
+    args = [
+        "hlt_tck_tool", "--list-manifest-as-json", f"{str(repository)}", "-"
+    ]
+    p = run(args, stdout=PIPE, stderr=PIPE)
     if p.returncode != 0:
         print("Failed to convert manifest from git repo to JSON")
         print(p.stdout)
diff --git a/Rec/Allen/scripts/create_hlt1_tck.py b/Rec/Allen/scripts/create_hlt1_tck.py
index 7f512c44809..010de74cd28 100644
--- a/Rec/Allen/scripts/create_hlt1_tck.py
+++ b/Rec/Allen/scripts/create_hlt1_tck.py
@@ -28,10 +28,10 @@ The configuration can be obtained from:
 
 Some metadata is also persisted.
 """)
-parser.add_argument("stack", nargs=2, help="first argument should be a string and the second in the format N.M<.P>")
-parser.add_argument("sequence", nargs=1)
-parser.add_argument("repository", nargs=1)
-parser.add_argument("tck", nargs=1, help="A 32-bit hexadecimal number")
+parser.add_argument("stack")
+parser.add_argument("sequence")
+parser.add_argument("repository")
+parser.add_argument("tck", help="A 32-bit hexadecimal number")
 parser.add_argument(
     "-t,--hlt1-type",
     type=str,
@@ -57,16 +57,11 @@ parser.add_argument(
 
 args = parser.parse_args()
 
-sequence_arg = Path(args.sequence[0])
-repository = Path(args.repository[0])
-tck = int(args.tck[0], 16)
+sequence_arg = Path(args.sequence)
+repository = Path(args.repository)
+tck = int(args.tck, 16)
 type_arg = args.sequence_type if args.sequence_type != '' else sequence_arg.stem
 
-stack_version_expr = re.compile(r"^\d+\.\d+(?:\.\d+)?$")
-if not stack_version_expr.match(args.stack[1]):
-    print("Stack must be given as two arguments: string N.M<.P>, where N, M and P are integers.")
-    sys.exit(1)
-
 local_metainfo_repo = Path("./lhcb-metainfo/.git")
 tck_metainfo_repos = [(str(local_metainfo_repo.resolve()), "master"),
                       (FILE_CONTENT_METADATA, "master")]
@@ -109,6 +104,7 @@ if sequence_arg.suffix in (".py", ""):
     with (make_dec_reporter.bind(TCK=tck),
           flush_key_registry()):
         sequence = sequence_from_python(sequence_arg, node_name=args.hlt1_node)
+        sequence = json.loads(json.dumps(sequence, sort_keys=True))
 
     # Check that at least the dec_reporter is part of the sequence,
     # otherwise it's meaningless to create a TCK for this sequence.
@@ -137,7 +133,7 @@ elif sequence_arg.suffix == ".json":
 
 # Store the configuration in the Git repository and tag it with the TCK
 try:
-    sequence_to_git(repository, sequence, type_arg, args.label, tck, tuple(args.stack), {}, True)
+    sequence_to_git(repository, sequence, type_arg, args.label, tck, args.stack, {"settings":sequence_arg.stem}, True)
     print(f"Created TCK {hex(tck)} for sequence {type_arg}")
 except RuntimeError as e:
     print(e)
@@ -149,7 +145,7 @@ def get_encoding_key(repo):
         with metainfo_repos.bind(repos=[(repo, "master")]):
             return retrieve_encoding_dictionary(
                 reports_key, require_key_present=True)
-    except ConfigurationError as e:
+    except ConfigurationError:
         return None
 
 
diff --git a/Rec/Allen/tests/options/compare_hlt1_tcks.py b/Rec/Allen/tests/options/compare_hlt1_tcks.py
index e082bfb0bc6..54828bece5f 100644
--- a/Rec/Allen/tests/options/compare_hlt1_tcks.py
+++ b/Rec/Allen/tests/options/compare_hlt1_tcks.py
@@ -22,13 +22,13 @@ configuration.
 import os
 import sys
 import json
-from Allen.qmtest.utils import good_sequence
+from Allen.qmtest.utils import print_sequence_differences
 from Allen.tck import manifest_from_git, sequence_from_git
 from pathlib import Path
 
 seq_dir = Path(os.path.expandvars("${ALLEN_INSTALL_DIR}/constants"))
-json_repo = Path("config_json.git")
-python_repo = Path("config_python.git")
+json_repo = Path(os.getenv("PREREQUISITE_0", "")) / "config_json.git"
+python_repo = Path(os.getenv("PREREQUISITE_0", "")) / "config_python.git"
 
 manifest_json = manifest_from_git(json_repo)
 manifest_python = manifest_from_git(python_repo)
diff --git a/Rec/Allen/tests/options/create_hlt1_tcks.py b/Rec/Allen/tests/options/create_hlt1_tcks.py
index ea5e7731620..47964a6ac64 100644
--- a/Rec/Allen/tests/options/create_hlt1_tcks.py
+++ b/Rec/Allen/tests/options/create_hlt1_tcks.py
@@ -41,17 +41,21 @@ for i, seq in enumerate(sequences):
     # Create TCKs from python configurations
     # Note, these are created first such that missing encoding keys
     # will be added to the test-local metainfo repository
-    r = subprocess.run(
-        ["python", tck_script, "HLT1", "1.0", seq.stem, "config_python.git", tck])
+    r = subprocess.run([
+        "python", tck_script, "RTA/2050.01.01", seq.stem, "config_python.git",
+        tck
+    ])
     if r.returncode != 0:
         error = True
     else:
         print(f"Created TCK {tck} from Python configuration {seq.stem}")
+    os.rename(f"{tck}.json", f"{tck}_python.json")
 
     # Create TCKs from JSON files
-    r = subprocess.run(
-        ["python", tck_script, "HLT1", "1.0",
-         str(seq), "config_json.git", tck])
+    r = subprocess.run([
+        "python", tck_script, "RTA/2050.01.01",
+        str(seq), "config_json.git", tck
+    ])
     if r.returncode != 0:
         error = True
     else:
diff --git a/Rec/Allen/tests/options/test_tck_allen_write_config.py b/Rec/Allen/tests/options/test_tck_allen_write_config.py
index 6a4b2d4dc1a..10ab50cd342 100644
--- a/Rec/Allen/tests/options/test_tck_allen_write_config.py
+++ b/Rec/Allen/tests/options/test_tck_allen_write_config.py
@@ -20,14 +20,13 @@ persistence.
 """
 
 import os
-import sys
 import json
 from pathlib import Path
 from subprocess import PIPE, run
 from Allen.qmtest.utils import print_sequence_differences
 from Allen.tck import manifest_from_git, sequence_from_git
 
-tck_repo = Path("config_json.git")
+tck_repo = Path(os.getenv("PREREQUISITE_0", "")) / "config_json.git"
 
 manifest = manifest_from_git(tck_repo)
 
diff --git a/Rec/Allen/tests/qmtest/compare_tck_allen_config.qmt b/Rec/Allen/tests/qmtest/compare_tck_allen_config.qmt
index 2d8d74a7e6a..f1ff2513396 100644
--- a/Rec/Allen/tests/qmtest/compare_tck_allen_config.qmt
+++ b/Rec/Allen/tests/qmtest/compare_tck_allen_config.qmt
@@ -20,5 +20,5 @@
     <tuple><text>create_tcks</text><enumeral>PASS</enumeral></tuple>
   </set></argument>
   <argument name="timeout"><integer>600</integer></argument>
-  <argument name="use_temp_dir"><enumeral>true</enumeral></argument>
+  <argument name="use_temp_dir"><enumeral>per-test</enumeral></argument>
 </extension>
diff --git a/Rec/Allen/tests/qmtest/compare_tcks.qmt b/Rec/Allen/tests/qmtest/compare_tcks.qmt
index 3c9aecea522..3ebd0969e64 100644
--- a/Rec/Allen/tests/qmtest/compare_tcks.qmt
+++ b/Rec/Allen/tests/qmtest/compare_tcks.qmt
@@ -20,7 +20,7 @@
     <tuple><text>create_tcks</text><enumeral>PASS</enumeral></tuple>
   </set></argument>
   <argument name="timeout"><integer>600</integer></argument>
-  <argument name="use_temp_dir"><enumeral>true</enumeral></argument>
+  <argument name="use_temp_dir"><enumeral>per-test</enumeral></argument>
 <argument name="validator"><text>
 
 # No validator for now: only check the exit code
diff --git a/Rec/Allen/tests/qmtest/create_tcks.qmt b/Rec/Allen/tests/qmtest/create_tcks.qmt
index 8ed501c2c30..7174b0a0a90 100644
--- a/Rec/Allen/tests/qmtest/create_tcks.qmt
+++ b/Rec/Allen/tests/qmtest/create_tcks.qmt
@@ -17,5 +17,5 @@
     <text>${ALLENROOT}/tests/options/create_hlt1_tcks.py</text>
   </set></argument>
   <argument name="timeout"><integer>600</integer></argument>
-  <argument name="use_temp_dir"><enumeral>true</enumeral></argument>
+  <argument name="use_temp_dir"><enumeral>per-test</enumeral></argument>
 </extension>
diff --git a/configuration/include/TCK.h b/configuration/include/TCK.h
index 2a87b35cf8f..da417ed03dd 100644
--- a/configuration/include/TCK.h
+++ b/configuration/include/TCK.h
@@ -17,7 +17,7 @@
 namespace Allen {
 
   namespace TCK {
-    static constexpr unsigned version = 1u;
+    static constexpr unsigned config_version = 1u;
 
     std::map<std::string, std::string> project_dependencies();
 
diff --git a/configuration/src/TCK.cpp b/configuration/src/TCK.cpp
index 213bcbc5cab..a9aa7291623 100644
--- a/configuration/src/TCK.cpp
+++ b/configuration/src/TCK.cpp
@@ -74,7 +74,7 @@ std::tuple<bool, std::string> Allen::TCK::check_projects(nlohmann::json metadata
     return ps.str();
   };
 
-  auto projects = metadata["projects"].get<std::map<std::string, std::string>>();
+  auto projects = metadata.at("stack").at("projects").get<std::map<std::string, std::string>>();
   auto deps = Allen::TCK::project_dependencies();
 
   auto check = projects == deps;
diff --git a/configuration/src/bindings.cpp b/configuration/src/bindings.cpp
index 7107da5c283..f6bd482ad90 100644
--- a/configuration/src/bindings.cpp
+++ b/configuration/src/bindings.cpp
@@ -48,7 +48,7 @@ PYBIND11_MODULE(TCK, m)
     sequence_to_git
     )pbdoc";
 
-  m.attr("version") = py::int_(Allen::TCK::version);
+  m.attr("config_version") = py::int_(Allen::TCK::config_version);
 
   m.def("create_git_repository", &Allen::TCK::create_git_repository, "Create a git repository that can store TCKs");
   m.def("tck_from_git", &Allen::tck_from_git, "Get the TCK as it is in the git repository");
-- 
GitLab


From b179bca6abaa65f19468be93ccbe24798e08a4af Mon Sep 17 00:00:00 2001
From: Roel Aaij <raaij@nikhef.nl>
Date: Thu, 29 Jun 2023 21:08:17 +0200
Subject: [PATCH 48/49] Add install include dir to AllenLib too

---
 CMakeLists.txt | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/CMakeLists.txt b/CMakeLists.txt
index 6d01d3e494b..e82392ce00e 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -677,6 +677,8 @@ allen_add_host_library(AllenLib SHARED
   main/src/ZMQOutputSender.cpp)
 
 target_compile_definitions(AllenLib PUBLIC ${TARGET_DEFINITION})
+target_include_directories(AllenLib PUBLIC
+  $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>)
 
 target_link_libraries(AllenLib
   PRIVATE
-- 
GitLab


From 39cfcf93bee92b8172660a4a611bc4290a5c0a33 Mon Sep 17 00:00:00 2001
From: Rosen Matev <rosen.matev@cern.ch>
Date: Fri, 30 Jun 2023 23:57:07 +0200
Subject: [PATCH 49/49] Fix non-empty stderr in MooreOnline test

---
 Rec/Allen/scripts/create_hlt1_tck.py | 21 ++++++++++++---------
 1 file changed, 12 insertions(+), 9 deletions(-)

diff --git a/Rec/Allen/scripts/create_hlt1_tck.py b/Rec/Allen/scripts/create_hlt1_tck.py
index 010de74cd28..85cd7c9d0c8 100644
--- a/Rec/Allen/scripts/create_hlt1_tck.py
+++ b/Rec/Allen/scripts/create_hlt1_tck.py
@@ -9,15 +9,17 @@
 # or submit itself to any jurisdiction.                                       #
 ###############################################################################
 import os
-import re
 import argparse
 import json
 import sys
 import subprocess
+import logging
 from PyConf.filecontent_metadata import flush_key_registry, retrieve_encoding_dictionary, metainfo_repos, ConfigurationError, FILE_CONTENT_METADATA
 from Allen.tck import sequence_to_git, sequence_from_python
 from pathlib import Path
 
+logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
+
 parser = argparse.ArgumentParser(description="""
 Persist an Allen configuration in a git repository identified by a TCK
 
@@ -38,8 +40,7 @@ parser.add_argument(
     help=
     "Sequence type to use; also used as branch name in the Git repository.",
     default='',
-    dest='sequence_type'
-)
+    dest='sequence_type')
 parser.add_argument(
     "--python-hlt1-node",
     type=str,
@@ -66,19 +67,21 @@ local_metainfo_repo = Path("./lhcb-metainfo/.git")
 tck_metainfo_repos = [(str(local_metainfo_repo.resolve()), "master"),
                       (FILE_CONTENT_METADATA, "master")]
 
-
 # Unset this environment variable to force generation of new encoding
 # keys in a local repo if they are not in the cvmfs one
 build_metainfo_repo = os.environ.pop('LHCbFileContentMetaDataRepo', None)
 if build_metainfo_repo is not None and not local_metainfo_repo.exists():
     result = subprocess.run([
         'git', 'clone', '-q', build_metainfo_repo,
-        str(local_metainfo_repo.resolve()).removesuffix('/.git')],
+        str(local_metainfo_repo.resolve()).removesuffix('/.git')
+    ],
                             capture_output=True,
                             text=True,
                             check=False)
     if result.returncode != 0:
-        print(f"Failed to clone build metainfo repo {build_metainfo_repo} to local repo")
+        print(
+            f"Failed to clone build metainfo repo {build_metainfo_repo} to local repo"
+        )
         sys.exit(1)
 
 
@@ -101,8 +104,7 @@ if sequence_arg.suffix in (".py", ""):
     sequence, dn = {}, None
     # Load the python module to get the sequence configuration; set
     # the TCK to the right value and flush encoding keys
-    with (make_dec_reporter.bind(TCK=tck),
-          flush_key_registry()):
+    with (make_dec_reporter.bind(TCK=tck), flush_key_registry()):
         sequence = sequence_from_python(sequence_arg, node_name=args.hlt1_node)
         sequence = json.loads(json.dumps(sequence, sort_keys=True))
 
@@ -133,7 +135,8 @@ elif sequence_arg.suffix == ".json":
 
 # Store the configuration in the Git repository and tag it with the TCK
 try:
-    sequence_to_git(repository, sequence, type_arg, args.label, tck, args.stack, {"settings":sequence_arg.stem}, True)
+    sequence_to_git(repository, sequence, type_arg, args.label, tck,
+                    args.stack, {"settings": sequence_arg.stem}, True)
     print(f"Created TCK {hex(tck)} for sequence {type_arg}")
 except RuntimeError as e:
     print(e)
-- 
GitLab