diff --git a/.gitignore b/.gitignore
index 355164c126511e7d9f9896171414647f098dd30e..d7da82c2d8ee67d078ca09c214e939911185f4c5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1,5 @@
-*/
+/*/
+!/cmake/
+!/CMS/
+!/CMS/scripts/
+*.swp
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 374f3c72733dcf7e4029684e7bd880535adaadbf..474ee4759c73605f861caa4a66567cfda5fd9f48 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,17 +1,44 @@
-image: gitlab-registry.cern.ch/ci-tools/ci-worker:cc7
-
-compilation:
+LCG:
     stage: build
     tags:
         - cvmfs
     variables:
         TERM: xterm
     script:
-        - yum install -y cmake3 openssl-devel python3 zsh
-        - source ./setup
+        - yum install -y cmake python3 zsh which libtirpc libicu libglvnd-glx pcre2-utf16 libXrender libSM glibc-devel zlib-devel git-lfs
+        - source /cvmfs/sft.cern.ch/lcg/views/LCG_104/x86_64-el9-gcc13-opt/setup.sh
+        - ./install.sh
+        - source tools/setup.sh
+        - ctest --test-dir build/Darwin --output-junit Testing/Temporary/test_report.xml -j$(nproc)
+        - ctest --test-dir build/Core   --output-junit Testing/Temporary/test_report.xml -j$(nproc)
+    artifacts:
+      when: always
+      paths:
+        - build/Darwin/Testing/Temporary
+        - build/Core/Testing/Temporary
+      reports:
+        junit:
+        - build/Darwin/Testing/Temporary/test_report.xml
+        - build/Core/Testing/Temporary/test_report.xml
+
+micromamba:
+    stage: build
+    script:
+        - sh <(curl -L micro.mamba.pm/install.sh)
+        - source ~/.bashrc
+        - micromamba --version
+        - micromamba create -f prerequisites.yml -y -v
+        - micromamba activate DAS
         - ./install.sh
-        - cd CMSSW*/
-        - shopt -s expand_aliases  # allows aliases like cmsenv to be used
-        - cmsenv
-        - python -c "import CRABAPI"
-        - scram b -j$(nproc)
+        - source tools/setup.sh
+        - ctest --test-dir build/Darwin --output-junit Testing/Temporary/test_report.xml -j$(nproc)
+        - ctest --test-dir build/Core   --output-junit Testing/Temporary/test_report.xml -j$(nproc)
+    artifacts:
+      when: always
+      paths:
+        - build/Darwin/Testing/Temporary
+        - build/Core/Testing/Temporary
+      reports:
+        junit:
+        - build/Darwin/Testing/Temporary/test_report.xml
+        - build/Core/Testing/Temporary/test_report.xml
diff --git a/CMS/das.xml.in b/CMS/das.xml.in
new file mode 100644
index 0000000000000000000000000000000000000000..f346253b7b94e6afea63d0fde9df56ea615cce09
--- /dev/null
+++ b/CMS/das.xml.in
@@ -0,0 +1,5 @@
+<tool name="das" version="@DAS_VERSION@">
+  <info url="https://dasanalysissystem.docs.cern.ch"/>
+  <runtime name="CORE_BASE" value="$DAS_BASE/Core" type="path"/>
+  <runtime name="DARWIN_TABLES" value="$DAS_BASE/tables" type="path"/>
+</tool>
diff --git a/CMS/install_CMSSW.sh b/CMS/install_CMSSW.sh
new file mode 100755
index 0000000000000000000000000000000000000000..8c2e8599af00ed639f5a025577d051c41cafde37
--- /dev/null
+++ b/CMS/install_CMSSW.sh
@@ -0,0 +1,53 @@
+#!/usr/bin/env bash
+
+source /cvmfs/cms.cern.ch/cmsset_default.sh
+
+set -e
+
+if [[ $# -ne 2 ]]; then
+    echo "$0 CMSSW_X_Y_Z JetToolbox_branch" >&2
+    exit 2
+fi
+
+CMSSW_VERSION="$1"
+JETTOOLBOX_BRANCH="$2"
+
+# Get CMSSW if it doesn't exist
+[ ! -d $CMSSW_VERSION ] && scram p $CMSSW_VERSION
+
+# Silence perl warnings (from scram)
+export LANG=C
+export LC_ALL=C
+
+# Build!
+cd $CMSSW_VERSION/src
+eval $(scram runtime -sh) # cmsenv
+
+# And JetToolbox
+[ ! -d JMEAnalysis/JetToolbox ] && (
+    git clone https://github.com/cms-jet/JetToolbox.git JMEAnalysis/JetToolbox -b "$JETTOOLBOX_BRANCH"
+
+    if [ -f $DAS_BASE/CMS/$JETTOOLBOX_BRANCH.patch ]; then
+        cd JMEAnalysis/JetToolbox
+        patch -p1 <$DAS_BASE/CMS/jetToolbox_120X.patch
+    fi
+)
+
+# Set up symlinks to the relevant parts of Core
+mkdir -p $CMSSW_BASE/src/Core
+cd $CMSSW_BASE/src/Core
+[ ! -d Objects   ] && ln -s $DAS_BASE/Core/Objects
+[ ! -d Ntupliser ] && ln -s $DAS_BASE/Core/Ntupliser
+echo $CMSSW_BASE/src/Core/Ntupliser/plugins/UserInfo.h
+echo $DAS_BASE/Darwin/interface/UserInfo.h
+
+# Set up symlinks to the relevant parts of Darwin
+cd Ntupliser/plugins
+[ ! -f UserInfo.h  ] && ln -s $DAS_BASE/Darwin/interface/UserInfo.h
+[ ! -f UserInfo.cc ] && ln -s $DAS_BASE/Darwin/src/UserInfo.cc
+[ ! -f colours.h   ] && ln -s $DAS_BASE/Darwin/interface/colours.h
+
+# Compile
+cd $CMSSW_BASE
+scram b -j$(nproc)
+scram setup $DAS_BASE/build/CMS/das.xml
diff --git a/jetToolbox_120X.patch b/CMS/jetToolbox_120X.patch
similarity index 100%
rename from jetToolbox_120X.patch
rename to CMS/jetToolbox_120X.patch
diff --git a/CMS/scripts/das-cmssw b/CMS/scripts/das-cmssw
new file mode 100755
index 0000000000000000000000000000000000000000..2aca6af8c25e73dbfd4bfe5320571ec993bef237
--- /dev/null
+++ b/CMS/scripts/das-cmssw
@@ -0,0 +1,39 @@
+#!/usr/bin/env bash
+
+if [[ $# -lt 2 ]]; then
+    echo "$0 image command [args]" >&2
+    exit 2
+fi
+
+img=$1
+shift # rm first element of $@
+
+case "$img" in
+    *cc7)
+        export CMSSW_VERSION=CMSSW_10_6_30
+        ;;
+    *el8)
+        export CMSSW_VERSION=CMSSW_12_4_0
+        ;;
+    *)
+        tput setaf 1
+        echo "Unknown Linux version. Abort."
+        tput op
+        exit 1
+        ;;
+esac
+
+$img <<'SCRIPT' -s $CMSSW_VERSION "$@"
+CMSSW_VERSION=$1
+shift
+function source_CMS_setup() { source $DAS_BASE/CMS/setup > /dev/null ; }
+source_CMS_setup
+if [ ! -d $DAS_BASE/$CMSSW_VERSION ]; then
+    echo "$CMSSW_VERSION could not be found. Are you sure that it has been installed?"
+    exit 1
+fi
+cd $DAS_BASE/$CMSSW_VERSION
+eval $(scram runtime -sh) # cmsenv
+cd - > /dev/null
+"$@"
+SCRIPT
diff --git a/CMS/scripts/el8 b/CMS/scripts/el8
new file mode 100755
index 0000000000000000000000000000000000000000..21713dc7d536c62f6baca820b9a9cdff433d696d
--- /dev/null
+++ b/CMS/scripts/el8
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+
+# inspired from https://gitlab.cern.ch/cms-cat/cmssw-lxplus
+export APPTAINER_BINDPATH=/afs,/cvmfs,/cvmfs/grid.cern.ch/etc/grid-security:/etc/grid-security,/cvmfs/grid.cern.ch/etc/grid-security/vomses:/etc/vomses,/etc/pki/ca-trust,/run/user,/tmp,/var/run/user,/etc/sysconfig,/etc:/orig/etc,$DAS_BASE
+# note: apptainer will gently fail if any of the paths cannot be found
+
+# cluster-dependent paths
+case "$(hostname)" in
+    *desy.de)
+        export APPTAINER_BINDPATH=$APPTAINER_BINDPATH,/pnfs
+        ;;
+    *cern.ch)
+        export APPTAINER_BINDPATH=$APPTAINER_BINDPATH,/eos
+        ;;
+esac
+
+case "$0" in
+    *cc7)
+        export img=/cvmfs/unpacked.cern.ch/registry.hub.docker.com/cmssw/cc7:x86_64-latest
+        ;;
+    *el8)
+        export img=/cvmfs/unpacked.cern.ch/registry.hub.docker.com/cmssw/el8:x86_64
+        ;;
+    *)
+        tput setaf 1
+        echo "Unknown Linux version. Abort."
+        tput op
+        exit 1
+        ;;
+esac
+
+apptainer exec --env DAS_BASE=$DAS_BASE \
+               --env X509_USER_PROXY=$X509_USER_PROXY \
+               --cwd $PWD \
+               --containall \
+               $img bash "$@"
diff --git a/CMS/setup b/CMS/setup
new file mode 100755
index 0000000000000000000000000000000000000000..e94ec962dd2c48a751eb19ed16eb9f114ab23656
--- /dev/null
+++ b/CMS/setup
@@ -0,0 +1,9 @@
+source /cvmfs/cms.cern.ch/cmsset_default.sh
+source /cvmfs/cms.cern.ch/rucio/setup-py3.sh
+source /cvmfs/cms.cern.ch/crab3/crab.sh
+
+if [ -n "$RUCIO_ACCOUNT" ]
+then
+    echo "\$RUCIO_ACCOUNT not found in environment. Setting up with \$USER."
+    export RUCIO_ACCOUNT=$USER
+fi
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 8c5f143b074ea68c4a1a86f60060115a10056e8d..bbb05d9ebd24af6e0ddd96de0f1caa35bc5471cf 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -2,7 +2,7 @@
 #                                                                                                                     
 # SPDX-FileCopyrightText: Louis Moureaux <louis.moureaux@cern.ch>
 
-cmake_minimum_required(VERSION 3.17...3.28 FATAL_ERROR)                                                               
+cmake_minimum_required(VERSION 3.23...3.28 FATAL_ERROR)                                                               
 
 # Set a useful default install directory but let the user override it
 set(CMAKE_INSTALL_PREFIX "${CMAKE_SOURCE_DIR}/tools" CACHE PATH
@@ -12,55 +12,108 @@ set(CMAKE_INSTALL_PREFIX "${CMAKE_SOURCE_DIR}/tools" CACHE PATH
 set(CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING
     "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel ...")
 
-project(Installer VERSION 1.0 LANGUAGES C)  # Need a language for GNUInstallDirs
+# Import vendored CMake modules
+list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake")
+
+# taken from https://stackoverflow.com/a/48555098
+get_cmake_property(vars CACHE_VARIABLES)
+foreach(var ${vars})
+    get_property(currentHelpString CACHE "${var}" PROPERTY HELPSTRING)
+    if("${currentHelpString}" MATCHES "No help, variable specified on the command line." OR "${currentHelpString}" STREQUAL "")
+        list(APPEND CL_ARGS "-D${var}=${${var}}")
+    endif()
+endforeach()
+
+project(DAS VERSION 3.0 LANGUAGES C)  ## \todo Need a language for GNUInstallDirs
 
 find_package(Git REQUIRED)
+find_package(Python 3 REQUIRED COMPONENTS Interpreter)
 
 include(ExternalProject)
 include(GNUInstallDirs)
 
-install(FILES libgit2.xml TYPE SYSCONF)
+# Detect the user's environment.
+execute_process(COMMAND "${Python_EXECUTABLE}" detect_environment.py
+                WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
+                OUTPUT_VARIABLE ENVIRONMENT_SETUP
+                OUTPUT_STRIP_TRAILING_WHITESPACE)
 
-set(TUNFOLD_HEADERS TUnfold.h
-                    TUnfoldBinning.h
-                    TUnfoldBinningXML.h
-                    TUnfoldDensity.h
-                    TUnfoldIterativeEM.h
-                    TUnfoldSys.h
-)
-ExternalProject_Add(TUnfold
-    URL                 https://www.desy.de/~sschmitt/TUnfold/TUnfold_V17.9.tgz
-    URL_HASH            SHA256=d7f66f6a0e007eb946180643b8879bb2b8918441106bc0305b82a97391a391dc
-    SOURCE_DIR          "${CMAKE_SOURCE_DIR}/TUnfold"
-    CONFIGURE_COMMAND   ""
-    BUILD_COMMAND       make lib TUNFOLDVERSION='V17'
-    BUILD_IN_SOURCE     TRUE
-    INSTALL_COMMAND     install -DT libunfold.so <INSTALL_DIR>/${CMAKE_INSTALL_LIBDIR}/libtunfold.so
-            COMMAND     install TUnfoldV17Dict_rdict.pcm <INSTALL_DIR>/${CMAKE_INSTALL_LIBDIR}/
-            COMMAND     install -d <INSTALL_DIR>/${CMAKE_INSTALL_INCLUDEDIR}/TUnfold
-            COMMAND     install ${TUNFOLD_HEADERS} <INSTALL_DIR>/${CMAKE_INSTALL_INCLUDEDIR}/TUnfold/
-    INSTALL_DIR         "${CMAKE_INSTALL_PREFIX}"
-)
-configure_file(tunfold.xml.in tunfold.xml)
-install(FILES "${CMAKE_BINARY_DIR}/tunfold.xml" TYPE SYSCONF)
+# Create setup.sh for users to source if they didn't provide their own install dir.
+# If they did, we expect them to know what they are doing.
+if ("${CMAKE_INSTALL_PREFIX}" STREQUAL "${CMAKE_SOURCE_DIR}/tools")
+    configure_file(setup.sh.in setup.sh)
+    install(FILES "${CMAKE_CURRENT_BINARY_DIR}/setup.sh" DESTINATION .)
+endif()
+
+configure_file(CMS/das.xml.in CMS/das.xml)
+
+find_package(TUnfold 17.9)
+if (TUNFOLD_FOUND)
+    set(TUNFOLD_DEPENDENCY)  # This is used to make Core depend on TUnfold if we need to install it
+else()
+    message(STATUS "Will install TUnfold")
+    set(TUNFOLD_HEADERS TUnfold.h
+                        TUnfoldBinning.h
+                        TUnfoldBinningXML.h
+                        TUnfoldDensity.h
+                        TUnfoldIterativeEM.h
+                        TUnfoldSys.h
+    )
+    ExternalProject_Add(TUnfold
+        URL                 https://www.desy.de/~sschmitt/TUnfold/TUnfold_V17.9.tgz
+        URL_HASH            SHA256=d7f66f6a0e007eb946180643b8879bb2b8918441106bc0305b82a97391a391dc
+        SOURCE_DIR          "${CMAKE_SOURCE_DIR}/TUnfold"
+        PATCH_COMMAND       "${CMAKE_COMMAND}" -E copy_if_different
+                                    "${CMAKE_SOURCE_DIR}/cmake/TUnfold/CMakeLists.txt"
+                                    "${CMAKE_SOURCE_DIR}/cmake/TUnfold/LinkDef.h.in"
+                                    "${CMAKE_SOURCE_DIR}/cmake/TUnfold/TUnfoldConfig.cmake.in"
+                                    <SOURCE_DIR>
+        CMAKE_ARGS          -DCMAKE_INSTALL_PREFIX=<INSTALL_DIR> -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
+        BINARY_DIR          "${CMAKE_BINARY_DIR}/TUnfold"
+        INSTALL_DIR         "${CMAKE_INSTALL_PREFIX}"
+    )
+    set(TUNFOLD_DEPENDENCY TUnfold)  # Core needs to depend on TUnfold
+    configure_file(tunfold.xml.in tunfold.xml)
+    install(FILES "${CMAKE_BINARY_DIR}/tunfold.xml" TYPE SYSCONF)
+endif()
 
 ExternalProject_Add(Darwin
     GIT_REPOSITORY      https://gitlab.cern.ch/Proto/Darwin.git
     GIT_TAG             origin/master
+    UPDATE_COMMAND      ""
     SOURCE_DIR          "${CMAKE_SOURCE_DIR}/Darwin"
     CMAKE_ARGS          -DCMAKE_INSTALL_PREFIX=<INSTALL_DIR> -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
-    BINARY_DIR          "${CMAKE_SOURCE_DIR}/Darwin.build"
+    BINARY_DIR          "${CMAKE_BINARY_DIR}/Darwin"
     INSTALL_DIR         "${CMAKE_INSTALL_PREFIX}"
-    TEST_AFTER_INSTALL  ON
 )
-configure_file(darwin.xml.in darwin.xml)
-install(FILES "${CMAKE_BINARY_DIR}/darwin.xml" TYPE SYSCONF)
 
 ExternalProject_Add(tables
     GIT_REPOSITORY      https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/tables.git
     GIT_TAG             origin/master
+    UPDATE_COMMAND      ""
     SOURCE_DIR          "${CMAKE_SOURCE_DIR}/tables"
     CONFIGURE_COMMAND   "${GIT_EXECUTABLE}" lfs install
     BUILD_COMMAND       ""
     INSTALL_COMMAND     ""
 )
+
+set(CORE_GIT_REPOSITORY https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/Core.git
+    CACHE STRING "Remote to use for Core")
+set(CORE_GIT_TAG migration # TODO: change once https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/Core/-/merge_requests/174 is merged
+    CACHE STRING "Core tag, branch, or commit to use")
+ExternalProject_Add(Core
+    GIT_REPOSITORY      "${CORE_GIT_REPOSITORY}"
+    GIT_TAG             "${CORE_GIT_TAG}"
+    UPDATE_COMMAND      ""
+    SOURCE_DIR          "${CMAKE_SOURCE_DIR}/Core"
+    CMAKE_ARGS          -DCMAKE_INSTALL_PREFIX=<INSTALL_DIR> -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
+    DEPENDS             ${TUNFOLD_DEPENDENCY} Darwin
+    BINARY_DIR          "${CMAKE_BINARY_DIR}/Core"
+    INSTALL_DIR         "${CMAKE_INSTALL_PREFIX}"
+    CMAKE_ARGS          ${CL_ARGS}
+)
+
+set(PREFIX_COMMANDS CMS/scripts/el8 CMS/scripts/das-cmssw) # TODO: apply convention for the use of extensions
+file(COPY ${PREFIX_COMMANDS} DESTINATION "${CMAKE_BINARY_DIR}/bin")
+install(PROGRAMS ${PREFIX_COMMANDS} TYPE BIN COMPONENT ${CMAKE_PROJECT_NAME})
+INSTALL(CODE "execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink \"\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/el8\" \"\${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_BINDIR}/cc7\")")
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000000000000000000000000000000000000..e4d9522d44a5fcc4c621bdad59aa59818a84ce3b
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,21 @@
+# Contributing to Core
+
+## Making a new merge request
+
+- In the description of the merge request, describe the issuea or point to an existing issue, and describe the solution that you propose with the present merge request.
+- Use the labels:
+   - for the changes (patch by default; minor if n-tuples may need to be reproduced; major if the whole software needs to be reinstalled);
+   - for the status (a priori ongoing, but you may change when appropriate);
+   - for the type (software vs physics);
+   - miscellaneous (e.g. urgent).
+- Open points to be discussed with the reviewer should be described in subsequent threads in the discussion of the merge request.
+
+## Good practices
+
+Fight against the increase of entropy by
+- pulling regularly,
+- committing regularly,
+- documenting your code,
+- implemeting tests for your developments!
+
+Happy analysis!
diff --git a/README.md b/README.md
index dc44490569382f3e5d6fdaf704b7d0320303b4ea..dafe77d999dc49a7b2b6464b76477d721440bb67 100644
--- a/README.md
+++ b/README.md
@@ -1,21 +1,27 @@
 # Das Analysis System
 
+[![pipeline status](https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/Core/badges/master/pipeline.svg)](https://gitlab.cern.ch/paconnor/software/-/commits/main) 
+https://gitlab.example.com/<namespace>/<project>/badges/<branch>/pipeline.svg
+
+
 Das Analysis System is a general project involving modular tools for physics analysis with high-level objects such as jets, leptons, and photons. The principle is to perform physics directly from the shell in atomic steps, applying one correction at a time and treating all systematic uncertainties simultaneously.
 
 The GitLab group is divided in several repositories:
-- The `Installer` repo contains the necessary scripts to install the suite.
-- The `Core` repo corresponds to a CMSSW module and is where most of the code for the analysis of CMS data is to be found.
-- The `Tables` repo contains the calibration of the high-level objects.
-- The `Darwin` repo is a mirror of a [general toolkit](https://protodarwin.docs.cern.ch) for physics analysis.
+- The `Installer` repo contains the necessary scripts to install the suite [![pipeline status](https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/gitlab-profile/badges/master/pipeline.svg)](https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/Core).
+- The `Core` repo contains most of the code for the analysis of CMS data [![pipeline status](https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/Core/badges/main/pipeline.svg)](https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/Core).
+- The `Tables` repo contains the calibration of the high-level objects [![pipeline status](https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/Tables/badges/master/pipeline.svg)](https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/Core).
+- The `Darwin` repo is a mirror of a [general toolkit](https://protodarwin.docs.cern.ch) for physics analysis [![pipeline status](https://gitlab.cern.ch/Proto/Darwin/badges/main/pipeline.svg)](https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/Core).
 - `PlottingHelper` is a useful [library](https://github.com/zleba/PlottingHelper) originally made by Radek Zlebcík (Charles University) to help make plots with ROOT.
 
+*Note:* in the `Core` repo, the `Ntupliser` module require a working container that can only be installed if `/cvmfs` is available. See the dedicated section on containers for more details.
+
 The `Campaigns` subgroup contains actual repos corresponding to different analyses with configs, plotting macros, and possibly CRAB outputs, or anything relevant to the reproducibility of an analysis. A template repo is provided with suggestions and guidelines.
 
 ## Installation
 
 In the following, we provide two alternative methods to install the framework.
 
-### Method #1: From scratch with the default installer
+### Method \#1: From scratch with the default installer
 
 In general, it is recommended that you install the software on a fast disk to ensure fast compilation (e.g. AFS), but that you process the heavy n-tuples on a dedicated area (e.g. NFS at DESY, EOS at CERN). Keeping the software neat and clean is important for reproducibility.
 
@@ -23,74 +29,167 @@ In general, it is recommended that you install the software on a fast disk to en
 ```
 git clone https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/gitlab-profile.git DasAnalysisSystem
 cd DasAnalysisSystem
-source ./setup
 ```
-It is currently working at CERN (CH), at DESY (DE), at IIHE (BE), and for GitLab CI. Feel free to make a merge request to include your favourite facility.
 2. Run the installation:
 ```
+source /cvmfs/sft.cern.ch/lcg/views/LCG_104/x86_64-el9-gcc13-opt/setup.sh
 ./install.sh
 ```
-*Remark*: if a CMSSW release has already been sourced, it will use that one instead of installing a new one.
-3. Then change to the directory of the CMSSW release (by default, it is created in the local directory) and compile as follows:
+3. After a few minutes, you get back to the prompt. You only have to set up the newly compiled environment:
 ```
-cmsenv
-scram b -j$(nproc)
+source tools/setup.sh
+```
+
+### Method \#2: By hand, step by step
+
+The instructions below assume that you have an environment with all the required dependencies or that you know how to install them if CMake complains that they are missing. Recent LCG environments do not require additional setup. They build the software without any special flags, so the compiler will use no optimization and include basic debugging information. You can pass additional settings using the `CMAKE_BUILD_TYPE` variable.
+ 
+We will install the packages in order, starting with Darwin then following with Core. First, we create two folders, one that will contain the build artifacts and one that will contain the installed software. For convenience, we store their location in variables:
+```sh
+# readlink makes the paths absolute
+BUILD_DIR=$(readlink -f ./build)
+INSTALL_DIR=$(readlink -f ./install)
+mkdir -p $BUILD_DIR $INSTALL_DIR
+```
+
+The first step is then to download, compile, and install Darwin:
+```sh
+git clone https://gitlab.cern.ch/Proto/Darwin.git                         # download
+cmake -B $BUILD_DIR/Darwin -S Darwin -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR  # configure
+cmake --build $BUILD_DIR/Darwin --target install -j$(nproc)               # build and install
+```
+
+We then do the same for Core:
+```sh
+git clone https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/Core.git  # download
+cmake -B $BUILD_DIR/Core -S Core -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR              # configure
+cmake --build $BUILD_DIR/Core --target install -j$(nproc)                         # build and install
+```
+
+Finally, we clone the respository containing scale factors and other corrections:
+```sh
+git clone --recursive https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/tables.git
 ```
-4. After a few minutes, you get back to the prompt. You have to rerun `cmsenv` to actually see the commands in the shell. Then you're all set.
 
-### Method #2: By hand, step by step
+This gives us an installation of both Darwin and Core in the install folder. You need to set a few environment variables before you can use them:
+
+| Variable | Value |
+|:-|:-|
+| `PATH` | `$BUILD_DIR/bin:$PATH` |
+| `LD_LIBRARY_PATH` | `$BUILD_DIR/lib64:$LD_LIBRARY_PATH` (note: can be different on some systems) |
+| `PYTHONPATH` | `$BUILD_DIR/python:$PYTHONPATH` |
+| `DARWIN_FIRE_AND_FORGET` | `$BUILD_DIR/lib64` (note: can be different on some systems) | 
+
+In addition, we recommend setting variables for the location of the various repositories:
+| Variable | Value |
+|:-|:-|
+| `DAS_BASE` | `$PWD` |
+| `DARWIN_BASE` | `$PWD/Darwin` |
+| `CORE_BASE` | `$PWD/Core` |
+| `DARWIN_TABLES` | `$PWD/tables` |
 
-In the following, we explain the installation of the framework step by step. A few packages external to CMSSW are necessary (e.g. TUnfold), which you will need to tell CMSSW how to find them with `scram setup`.
 
-First source your CMSSW release and close to a directory where you want to install the external packages.
-1. Install TUnfold:
+### With `micromamba`
+
+Follow this method to install the software on your private machine:
+1. If `micromamba` is not yet available on your machine, you should install it:
 ```
-wget https://www.desy.de/~sschmitt/TUnfold/TUnfold_V17.9.tgz
-mkdir -p TUnfold TUnfold/lib
-mv TUnfold_V17.9.tgz TUnfold
-cd TUnfold
-tar xvzf TUnfold_V17.9.tgz
-make lib TUNFOLDVERSION='V17' -j
-mv -f libunfold.so lib/libtunfold.so
-mv -f TUnfoldV17Dict_rdict.pcm lib/
-(cd $CMSSW_BASE && scram setup $OLDPWD/../tunfold.xml)
+"${SHELL}" <(curl -L micro.mamba.pm/install.sh)
 ```
-2. Tell CMSSW where to find libgit2:
+2. Then create a new environment with all prerequisites:
 ```
-(cd $CMSSW_BASE && scram setup $OLDPWD/libgit2.xml)
+micromamba create -f prerequisites.yml -y
+micromamba activate DAS
 ```
-3. Install Darwin:
+where `DAS` is here just a name, which you can adapt to anything.
+3. Run the installation:
 ```
-git clone https://gitlab.cern.ch/Proto/Darwin.git
-cmake3 -B build -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$PWD/build/install
-cmake3 --build build -j`nproc`
-cmake3 --build build --target install
-(cd $CMSSW_BASE && scram setup $OLDPWD/../darwin.xml)
+./install.sh
 ```
-4. If you want to rerun jet clustering (e.g. to have AK8 jets at a low transverse momentum), you need to clone the branch `jetToolbox_120X` from the [JetToolbox](https://github.com/cms-jet/JetToolbox.git), and apply a minor patch with the following commands:
+3. After a few minutes, you get back to the prompt. You only have to set up the newly compiled environment:
 ```
-cd $CMSSW_BASE/src/JMEAnalysis/JetToolbox
-patch -p1 $OLDPWD/jetToolbox_120X.patch
+source tools/setup.sh
 ```
-5. Clone the `Tables` wherever you like and the `Core` in CMSSW, and compile as usual with `scram b -j`.
 
-## Setting up the environment
+## Loading the environment in a new session
+
+In both cases, first go to the root directory of `DasAnalysisSystem`.
 
-First source the minimal environment for CMSSW with `source ./setup`, then just source the CMSSW release as usual with `cmsenv`.
+### In a LCG environment
 
-To run CRAB jobs, you will also need to set up a valid [grid certificate](https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookStartingGrid#ObtainingCert). If you already have done so, this will enable it:
+Just run the following:
+```
+source tools/setup.sh
+```
+This will automatically activate the LCG environment that was used for installing.
+
+To run CRAB jobs (only in a LCG environment), you will also need to set up a valid [grid certificate](https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookStartingGrid#ObtainingCert). If you already have done so, this will enable it:
 ```
 voms-proxy-init --rfc --voms cms -valid 192:00
 ```
 
-To make [RUCIO](https://twiki.cern.ch/twiki/bin/viewauth/CMS/Rucio) requests, the `setup` file tried to guess your RUCIO username from your local username (unless was already set up). This is not guaranteed to work and you may have to define your RUCIO username ahead for sourcing the DAS environment.
+To make [RUCIO](https://twiki.cern.ch/twiki/bin/viewauth/CMS/Rucio) requests (only for LCG environment), `tools/setup.sh` tries to guess your RUCIO username from your local username (unless was already set up). This is not guaranteed to work and you may have to define your RUCIO username ahead for sourcing the DAS environment.
 
-### Good practices
+### With micromamba
+
+Just run the following:
+```
+micromamba activate DAS
+source tools/setup.sh
+```
 
-Fight against the increase of entropy by
-- pulling regularly,
-- committing regularly,
-- document your code,
-- making frequent merge requests.
+## Working with containers
 
-Happy analysis!
+CMSSW is necessary to produce n-tuples containing CMS data. If `/cvmfs` is available on your system, the installer should have installed a version of CMSSW on your system. Several commands are provided to make use of the CMSSW environment.
+
+The CMSSW environment is based on no longer maintained versions of Linux, such as Enterprise Linux 8, whereas most clusters use a more recent operating system, Alma Linux 9. A compatibility layer is thus needed to run CMS software, which is provided by means of "container images". For instance, CMSSW 10 requires CentOS7 (used for UL production), whereas CMSSW 12 requires EL8 (used to compile the n-tupliser). DAS provides commands to start containers, called `cc7` and `el8`, which take no arguments. After running `el8`, you start a shell in the image, then you can source the CMSSW environment as follows:
+```
+cd $DAS_BASE/CMSSW_12_4_0
+cmsenv
+```
+then you should be able to use CMSSW as usual. A similar command, `cc7`, is also available, in case you would need to install an earlier version of CMSSW.
+
+To run single commands, e.g. `scram b`, `dasgoclient`, `cmsRun`, the prefix command `das-cmssw` is also provided: like all prefix commands, it is added to the beginning of the command that you want to execute (e.g. `das-cmssw el8 scram b runtests`). The Darwin and DAS commands are not available in the image; only commands provided by a vanilla CMSSW and by the `Core/Ntupliser` module are available.
+
+The source code of all these commands may be found in `$DAS_BASE/CMS/scripts` (note that `cc7` is only a symlink of `el8`).
+
+*Note:* containers require a certain amount of memory (roughly 2GB). Make sure that you machine has the necessary resources, otherwise certain commands will take forever (or just never end).
+
+## Contributing
+
+### Applying local changes
+
+If you modify part of the source code of a module (e.g. `Core`), you can recompile it as follows:
+```
+cd $DAS_BASE
+cmake --build build/Core --target install
+```
+(This is analog to `cd $CMSSW_BASE; scram b`.)
+It is good practice to check that your changes do not harm the rest of the code. For this, tests can be run as follows:
+```
+cd $DAS_BASE/build/Core
+ctest
+```
+(analog to `cd $CMSSW_BASE; scram b runtests`.)
+
+For `cmake` and `ctest`, one can always specify `-j8` to compile with 8 cores (for instance), or more generally `-j$(nproc)` to compile with the number of cores on the present machine.
+
+Despite multitasking, the whole process of compiling and testing may take a few minutes. In case you would like to compile and/or test just one executable or library, do the following (where the `JEC` library is here just taken as an example):
+```
+cd $DAS_BASE
+cmake --build build --target JEC
+```
+(analog to `cd $CMSSW_BASE/src/Core/JEC; scram b`. Commands will differ if you specified a generator other than GNU Makefiles.)
+
+Whenever developing, it is perfectly fine to only compile subsets of the code to speed up the whole process. Instead, whenever running over large samples, it is advised to commit and to rerun the whole `Core` to ensure compatibility and reproducibility of the results.
+
+### Updating a module
+
+If you want to update the source code of a module (e.g. `Core`), assuming no local changes, you can proceed as follows:
+```
+cd $DAS_BASE/Core
+git pull
+cmake -B $DAS_BASE/build
+cmake --build $DAS_BASE/build --target install
+```
+In case of local changes, you should first commit them, or use `git stash` to pause the changes temporarily (after the update, it will be sufficient to run `git stash pop` to recover your changes).
diff --git a/cmake/ECMFindModuleHelpers.cmake b/cmake/ECMFindModuleHelpers.cmake
new file mode 100644
index 0000000000000000000000000000000000000000..461cc019876211244b25eecc75f1d48cc73828df
--- /dev/null
+++ b/cmake/ECMFindModuleHelpers.cmake
@@ -0,0 +1,278 @@
+# SPDX-FileCopyrightText: 2014 Alex Merry <alex.merry@kde.org>
+#
+# SPDX-License-Identifier: BSD-3-Clause
+
+#[=======================================================================[.rst:
+ECMFindModuleHelpers
+--------------------
+
+Helper macros for find modules: ``ecm_find_package_version_check()``,
+``ecm_find_package_parse_components()`` and
+``ecm_find_package_handle_library_components()``.
+
+::
+
+  ecm_find_package_version_check(<name>)
+
+Prints warnings if the CMake version or the project's required CMake version
+is older than that required by extra-cmake-modules.
+
+::
+
+  ecm_find_package_parse_components(<name>
+      RESULT_VAR <variable>
+      KNOWN_COMPONENTS <component1> [<component2> [...]]
+      [SKIP_DEPENDENCY_HANDLING])
+
+This macro will populate <variable> with a list of components found in
+<name>_FIND_COMPONENTS, after checking that all those components are in the
+list of ``KNOWN_COMPONENTS``; if there are any unknown components, it will print
+an error or warning (depending on the value of <name>_FIND_REQUIRED) and call
+``return()``.
+
+The order of components in <variable> is guaranteed to match the order they
+are listed in the ``KNOWN_COMPONENTS`` argument.
+
+If ``SKIP_DEPENDENCY_HANDLING`` is not set, for each component the variable
+<name>_<component>_component_deps will be checked for dependent components.
+If <component> is listed in <name>_FIND_COMPONENTS, then all its (transitive)
+dependencies will also be added to <variable>.
+
+::
+
+  ecm_find_package_handle_library_components(<name>
+      COMPONENTS <component> [<component> [...]]
+      [SKIP_DEPENDENCY_HANDLING])
+      [SKIP_PKG_CONFIG])
+
+Creates an imported library target for each component.  The operation of this
+macro depends on the presence of a number of CMake variables.
+
+The <name>_<component>_lib variable should contain the name of this library,
+and <name>_<component>_header variable should contain the name of a header
+file associated with it (whatever relative path is normally passed to
+'#include'). <name>_<component>_header_subdir variable can be used to specify
+which subdirectory of the include path the headers will be found in.
+``ecm_find_package_components()`` will then search for the library
+and include directory (creating appropriate cache variables) and create an
+imported library target named <name>::<component>.
+
+Additional variables can be used to provide additional information:
+
+If ``SKIP_PKG_CONFIG``, the <name>_<component>_pkg_config variable is set, and
+pkg-config is found, the pkg-config module given by
+<name>_<component>_pkg_config will be searched for and used to help locate the
+library and header file.  It will also be used to set
+<name>_<component>_VERSION.
+
+Note that if version information is found via pkg-config,
+<name>_<component>_FIND_VERSION can be set to require a particular version
+for each component.
+
+If ``SKIP_DEPENDENCY_HANDLING`` is not set, the ``INTERFACE_LINK_LIBRARIES`` property
+of the imported target for <component> will be set to contain the imported
+targets for the components listed in <name>_<component>_component_deps.
+<component>_FOUND will also be set to ``FALSE`` if any of the components in
+<name>_<component>_component_deps are not found.  This requires the components
+in <name>_<component>_component_deps to be listed before <component> in the
+``COMPONENTS`` argument.
+
+The following variables will be set:
+
+``<name>_TARGETS``
+  the imported targets
+``<name>_LIBRARIES``
+  the found libraries
+``<name>_INCLUDE_DIRS``
+  the combined required include directories for the components
+``<name>_DEFINITIONS``
+  the "other" CFLAGS provided by pkg-config, if any
+``<name>_VERSION``
+  the value of ``<name>_<component>_VERSION`` for the first component that
+  has this variable set (note that components are searched for in the order
+  they are passed to the macro), although if it is already set, it will not
+  be altered
+
+.. note::
+  These variables are never cleared, so if
+  ``ecm_find_package_handle_library_components()`` is called multiple times with
+  different components (typically because of multiple ``find_package()`` calls) then
+  ``<name>_TARGETS``, for example, will contain all the targets found in any
+  call (although no duplicates).
+
+Since pre-1.0.0.
+#]=======================================================================]
+
+macro(ecm_find_package_version_check module_name)
+    if(CMAKE_VERSION VERSION_LESS 3.16.0)
+        message(FATAL_ERROR "CMake 3.16.0 is required by Find${module_name}.cmake")
+    endif()
+    if(CMAKE_MINIMUM_REQUIRED_VERSION VERSION_LESS 3.16.0)
+        message(AUTHOR_WARNING "Your project should require at least CMake 3.16.0 to use Find${module_name}.cmake")
+    endif()
+endmacro()
+
+macro(ecm_find_package_parse_components module_name)
+    set(ecm_fppc_options SKIP_DEPENDENCY_HANDLING)
+    set(ecm_fppc_oneValueArgs RESULT_VAR)
+    set(ecm_fppc_multiValueArgs KNOWN_COMPONENTS DEFAULT_COMPONENTS)
+    cmake_parse_arguments(ECM_FPPC "${ecm_fppc_options}" "${ecm_fppc_oneValueArgs}" "${ecm_fppc_multiValueArgs}" ${ARGN})
+
+    if(ECM_FPPC_UNPARSED_ARGUMENTS)
+        message(FATAL_ERROR "Unexpected arguments to ecm_find_package_parse_components: ${ECM_FPPC_UNPARSED_ARGUMENTS}")
+    endif()
+    if(NOT ECM_FPPC_RESULT_VAR)
+        message(FATAL_ERROR "Missing RESULT_VAR argument to ecm_find_package_parse_components")
+    endif()
+    if(NOT ECM_FPPC_KNOWN_COMPONENTS)
+        message(FATAL_ERROR "Missing KNOWN_COMPONENTS argument to ecm_find_package_parse_components")
+    endif()
+    if(NOT ECM_FPPC_DEFAULT_COMPONENTS)
+        set(ECM_FPPC_DEFAULT_COMPONENTS ${ECM_FPPC_KNOWN_COMPONENTS})
+    endif()
+
+    if(${module_name}_FIND_COMPONENTS)
+        set(ecm_fppc_requestedComps ${${module_name}_FIND_COMPONENTS})
+
+        if(NOT ECM_FPPC_SKIP_DEPENDENCY_HANDLING)
+            # Make sure deps are included
+            foreach(ecm_fppc_comp ${ecm_fppc_requestedComps})
+                foreach(ecm_fppc_dep_comp ${${module_name}_${ecm_fppc_comp}_component_deps})
+                    list(FIND ecm_fppc_requestedComps "${ecm_fppc_dep_comp}" ecm_fppc_index)
+                    if("${ecm_fppc_index}" STREQUAL "-1")
+                        if(NOT ${module_name}_FIND_QUIETLY)
+                            message(STATUS "${module_name}: ${ecm_fppc_comp} requires ${${module_name}_${ecm_fppc_comp}_component_deps}")
+                        endif()
+                        list(APPEND ecm_fppc_requestedComps "${ecm_fppc_dep_comp}")
+                    endif()
+                endforeach()
+            endforeach()
+        else()
+            message(STATUS "Skipping dependency handling for ${module_name}")
+        endif()
+        list(REMOVE_DUPLICATES ecm_fppc_requestedComps)
+
+        # This makes sure components are listed in the same order as
+        # KNOWN_COMPONENTS (potentially important for inter-dependencies)
+        set(${ECM_FPPC_RESULT_VAR})
+        foreach(ecm_fppc_comp ${ECM_FPPC_KNOWN_COMPONENTS})
+            list(FIND ecm_fppc_requestedComps "${ecm_fppc_comp}" ecm_fppc_index)
+            if(NOT "${ecm_fppc_index}" STREQUAL "-1")
+                list(APPEND ${ECM_FPPC_RESULT_VAR} "${ecm_fppc_comp}")
+                list(REMOVE_AT ecm_fppc_requestedComps ${ecm_fppc_index})
+            endif()
+        endforeach()
+        # if there are any left, they are unknown components
+        if(ecm_fppc_requestedComps)
+            set(ecm_fppc_msgType STATUS)
+            if(${module_name}_FIND_REQUIRED)
+                set(ecm_fppc_msgType FATAL_ERROR)
+            endif()
+            if(NOT ${module_name}_FIND_QUIETLY)
+                message(${ecm_fppc_msgType} "${module_name}: requested unknown components ${ecm_fppc_requestedComps}")
+            endif()
+            return()
+        endif()
+    else()
+        set(${ECM_FPPC_RESULT_VAR} ${ECM_FPPC_DEFAULT_COMPONENTS})
+    endif()
+endmacro()
+
+macro(ecm_find_package_handle_library_components module_name)
+    set(ecm_fpwc_options SKIP_PKG_CONFIG SKIP_DEPENDENCY_HANDLING)
+    set(ecm_fpwc_oneValueArgs)
+    set(ecm_fpwc_multiValueArgs COMPONENTS)
+    cmake_parse_arguments(ECM_FPWC "${ecm_fpwc_options}" "${ecm_fpwc_oneValueArgs}" "${ecm_fpwc_multiValueArgs}" ${ARGN})
+
+    if(ECM_FPWC_UNPARSED_ARGUMENTS)
+        message(FATAL_ERROR "Unexpected arguments to ecm_find_package_handle_components: ${ECM_FPWC_UNPARSED_ARGUMENTS}")
+    endif()
+    if(NOT ECM_FPWC_COMPONENTS)
+        message(FATAL_ERROR "Missing COMPONENTS argument to ecm_find_package_handle_components")
+    endif()
+
+    include(FindPackageHandleStandardArgs)
+    find_package(PkgConfig QUIET)
+    foreach(ecm_fpwc_comp ${ECM_FPWC_COMPONENTS})
+        set(ecm_fpwc_dep_vars)
+        set(ecm_fpwc_dep_targets)
+        if(NOT SKIP_DEPENDENCY_HANDLING)
+            foreach(ecm_fpwc_dep ${${module_name}_${ecm_fpwc_comp}_component_deps})
+                list(APPEND ecm_fpwc_dep_vars "${module_name}_${ecm_fpwc_dep}_FOUND")
+                list(APPEND ecm_fpwc_dep_targets "${module_name}::${ecm_fpwc_dep}")
+            endforeach()
+        endif()
+
+        if(NOT ECM_FPWC_SKIP_PKG_CONFIG AND ${module_name}_${ecm_fpwc_comp}_pkg_config)
+            pkg_check_modules(PKG_${module_name}_${ecm_fpwc_comp} QUIET
+                              ${${module_name}_${ecm_fpwc_comp}_pkg_config})
+        endif()
+
+        find_path(${module_name}_${ecm_fpwc_comp}_INCLUDE_DIR
+            NAMES ${${module_name}_${ecm_fpwc_comp}_header}
+            HINTS ${PKG_${module_name}_${ecm_fpwc_comp}_INCLUDE_DIRS}
+            PATH_SUFFIXES ${${module_name}_${ecm_fpwc_comp}_header_subdir}
+        )
+        find_library(${module_name}_${ecm_fpwc_comp}_LIBRARY
+            NAMES ${${module_name}_${ecm_fpwc_comp}_lib}
+            HINTS ${PKG_${module_name}_${ecm_fpwc_comp}_LIBRARY_DIRS}
+        )
+
+        set(${module_name}_${ecm_fpwc_comp}_VERSION "${PKG_${module_name}_${ecm_fpwc_comp}_VERSION}")
+        if(NOT ${module_name}_VERSION)
+            set(${module_name}_VERSION ${${module_name}_${ecm_fpwc_comp}_VERSION})
+        endif()
+
+        set(FPHSA_NAME_MISMATCHED 1)
+        find_package_handle_standard_args(${module_name}_${ecm_fpwc_comp}
+            FOUND_VAR
+                ${module_name}_${ecm_fpwc_comp}_FOUND
+            REQUIRED_VARS
+                ${module_name}_${ecm_fpwc_comp}_LIBRARY
+                ${module_name}_${ecm_fpwc_comp}_INCLUDE_DIR
+                ${ecm_fpwc_dep_vars}
+            VERSION_VAR
+                ${module_name}_${ecm_fpwc_comp}_VERSION
+            )
+        unset(FPHSA_NAME_MISMATCHED)
+
+        mark_as_advanced(
+            ${module_name}_${ecm_fpwc_comp}_LIBRARY
+            ${module_name}_${ecm_fpwc_comp}_INCLUDE_DIR
+        )
+
+        if(${module_name}_${ecm_fpwc_comp}_FOUND)
+            list(APPEND ${module_name}_LIBRARIES
+                        "${${module_name}_${ecm_fpwc_comp}_LIBRARY}")
+            list(APPEND ${module_name}_INCLUDE_DIRS
+                        "${${module_name}_${ecm_fpwc_comp}_INCLUDE_DIR}")
+            set(${module_name}_DEFINITIONS
+                    ${${module_name}_DEFINITIONS}
+                    ${PKG_${module_name}_${ecm_fpwc_comp}_DEFINITIONS})
+            if(NOT TARGET ${module_name}::${ecm_fpwc_comp})
+                add_library(${module_name}::${ecm_fpwc_comp} UNKNOWN IMPORTED)
+                set_target_properties(${module_name}::${ecm_fpwc_comp} PROPERTIES
+                    IMPORTED_LOCATION "${${module_name}_${ecm_fpwc_comp}_LIBRARY}"
+                    INTERFACE_COMPILE_OPTIONS "${PKG_${module_name}_${ecm_fpwc_comp}_DEFINITIONS}"
+                    INTERFACE_INCLUDE_DIRECTORIES "${${module_name}_${ecm_fpwc_comp}_INCLUDE_DIR}"
+                    INTERFACE_LINK_LIBRARIES "${ecm_fpwc_dep_targets}"
+                )
+            endif()
+            list(APPEND ${module_name}_TARGETS
+                        "${module_name}::${ecm_fpwc_comp}")
+        endif()
+    endforeach()
+    if(${module_name}_LIBRARIES)
+        list(REMOVE_DUPLICATES ${module_name}_LIBRARIES)
+    endif()
+    if(${module_name}_INCLUDE_DIRS)
+        list(REMOVE_DUPLICATES ${module_name}_INCLUDE_DIRS)
+    endif()
+    if(${module_name}_DEFINITIONS)
+        list(REMOVE_DUPLICATES ${module_name}_DEFINITIONS)
+    endif()
+    if(${module_name}_TARGETS)
+        list(REMOVE_DUPLICATES ${module_name}_TARGETS)
+    endif()
+endmacro()
+
diff --git a/cmake/FindTUnfold.cmake b/cmake/FindTUnfold.cmake
new file mode 100644
index 0000000000000000000000000000000000000000..dce15bac350200bee86c2a1981bcd16c08ecc432
--- /dev/null
+++ b/cmake/FindTUnfold.cmake
@@ -0,0 +1,94 @@
+# SPDX-FileCopyrightText: 2014 Alex Merry <alex.merry@kde.org>
+# SPDX-FileCopyrightText: 2014 Martin Gräßlin <mgraesslin@kde.org>
+# SPDX-FileCopyrightText: 2014 Christoph Cullmann <cullmann@kde.org>
+# SPDX-FileCopyrightText: 2024 Louis Moureaux <louis.moureaux@cern.ch>
+#
+# SPDX-License-Identifier: BSD-3-Clause
+
+#[=======================================================================[.rst:
+FindTUnfold
+-----------
+
+Try to find TUnfold on a Unix system.
+
+This will define the following variables:
+
+``TUNFOLD_FOUND``
+    True if (the requested version of) TUnfold is available
+``TUNFOLD_VERSION``
+    The version of TUnfold
+``TUNFOLD_LIBRARIES``
+    This can be passed to target_link_libraries() instead of the ``TUnfold::TUnfold``
+    target
+``TUNFOLD_INCLUDE_DIRS``
+    This should be passed to target_include_directories() if the target is not
+    used for linking
+``TUNFOLD_DEFINITIONS``
+    This should be passed to target_compile_options() if the target is not
+    used for linking
+
+If ``TUNFOLD_FOUND`` is TRUE, it will also define the following imported target:
+
+``TUnfold::TUnfold``
+    The TUnfold library
+
+In general we recommend using the imported target, as it is easier to use.
+Bear in mind, however, that if the target is in the link interface of an
+exported library, it must be made available by the package config file.
+#]=======================================================================]
+
+include(${CMAKE_CURRENT_LIST_DIR}/ECMFindModuleHelpers.cmake)
+
+ecm_find_package_version_check(TUnfold)
+
+find_path(TUNFOLD_INCLUDE_DIR
+    NAMES
+        TUnfold.h
+)
+find_library(TUNFOLD_LIBRARY NAMES TUnfold RooUnfold)
+
+# get version from header, should work on windows, too
+if(TUNFOLD_INCLUDE_DIR)
+    file(STRINGS "${TUNFOLD_INCLUDE_DIR}/TUnfold.h" TUNFOLD_H REGEX "^#define TUnfold_VERSION +\"V[^\"]*\"$")
+
+    string(REGEX REPLACE "^.*TUnfold_VERSION +\"V([0-9]+).*$" "\\1" TUNFOLD_VERSION_MAJOR "${TUNFOLD_H}")
+    string(REGEX REPLACE "^.*TUnfold_VERSION +\"V[0-9]+\\.([0-9]+).*$" "\\1" TUNFOLD_VERSION_MINOR  "${TUNFOLD_H}")
+    set(TUNFOLD_VERSION "${TUNFOLD_VERSION_MAJOR}.${TUNFOLD_VERSION_MINOR}")
+
+    set(TUNFOLD_MAJOR_VERSION "${TUNFOLD_VERSION_MAJOR}")
+    set(TUNFOLD_MINOR_VERSION "${TUNFOLD_VERSION_MINOR}")
+
+    unset(TUNFOLD_H)
+endif()
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(TUnfold
+    FOUND_VAR
+        TUNFOLD_FOUND
+    REQUIRED_VARS
+        TUNFOLD_LIBRARY
+        TUNFOLD_INCLUDE_DIR
+    VERSION_VAR
+        TUNFOLD_VERSION
+)
+
+if(TUNFOLD_FOUND AND NOT TARGET TUnfold::TUnfold)
+    add_library(TUnfold::TUnfold UNKNOWN IMPORTED)
+    set_target_properties(TUnfold::TUnfold PROPERTIES
+        IMPORTED_LOCATION "${TUNFOLD_LIBRARY}"
+        INTERFACE_COMPILE_OPTIONS "${TUNFOLD_DEFINITIONS}"
+        INTERFACE_INCLUDE_DIRECTORIES "${TUNFOLD_INCLUDE_DIR}"
+    )
+endif()
+
+mark_as_advanced(TUNFOLD_LIBRARY TUNFOLD_INCLUDE_DIR)
+
+set(TUNFOLD_LIBRARIES ${TUNFOLD_LIBRARY})
+set(TUNFOLD_INCLUDE_DIRS ${TUNFOLD_INCLUDE_DIR})
+
+include(FeatureSummary)
+set_package_properties(TUnfold PROPERTIES
+    URL "https://libgit2.github.com/"
+    DESCRIPTION "A plain C library to interface with the git version control system."
+)
+
diff --git a/cmake/TUnfold/CMakeLists.txt b/cmake/TUnfold/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..f855ee035c05a984c566ee93447fe65a2bd62c52
--- /dev/null
+++ b/cmake/TUnfold/CMakeLists.txt
@@ -0,0 +1,68 @@
+# SPDX-License-Identifier: GPLv3-or-later
+#
+# SPDX-FileCopyrightText: Louis Moureaux <louis.moureaux@cern.ch>
+
+cmake_minimum_required(VERSION 3.23..3.26 FATAL_ERROR)
+project(TUnfold VERSION 17.9 LANGUAGES CXX)
+
+find_package(ROOT 6.24 REQUIRED)
+
+set(classes
+    TUnfoldBinning
+    TUnfoldBinningXML
+    TUnfoldDensity
+    TUnfoldIterativeEM
+    TUnfoldSys
+    TUnfold)
+
+# Library
+set(sources ${classes})
+list(TRANSFORM sources APPEND V${TUnfold_VERSION_MAJOR}.cxx)
+
+set(headers ${classes})
+list(TRANSFORM headers APPEND .h)
+
+add_library(TUnfold SHARED ${sources})
+target_sources(TUnfold PUBLIC FILE_SET HEADERS FILES ${headers})
+target_link_libraries(TUnfold PUBLIC ROOT::Hist ROOT::XMLParser)
+
+# Install
+include(GNUInstallDirs)
+set(include_dir "${CMAKE_INSTALL_INCLUDEDIR}/TUnfold")
+install(TARGETS TUnfold
+        EXPORT TUnfoldTargets
+        COMPONENT TUnfold
+        FILE_SET HEADERS DESTINATION "${include_dir}"
+        RUNTIME  DESTINATION "${CMAKE_INSTALL_LIBDIR}"
+        INCLUDES DESTINATION "${include_dir}")
+
+# Dictionary
+configure_file(LinkDef.h.in LinkDef.h)
+# https://github.com/root-project/root/issues/8308#issuecomment-1143791946
+set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}")
+root_generate_dictionary(TUnfoldV${TUnfold_VERSION_MAJOR}Dict
+                         ${headers}
+                         MODULE TUnfold
+                         LINKDEF "${CMAKE_CURRENT_BINARY_DIR}/LinkDef.h")
+
+# Export
+include(CMakePackageConfigHelpers)
+set(CMAKECONFIG_INSTALL_DIR "${CMAKE_INSTALL_LIBDIR}/cmake/TUnfold")
+configure_package_config_file(
+    "${CMAKE_CURRENT_SOURCE_DIR}/TUnfoldConfig.cmake.in"
+    "${CMAKE_CURRENT_BINARY_DIR}/TUnfoldConfig.cmake"
+    INSTALL_DESTINATION "${CMAKECONFIG_INSTALL_DIR}"
+)
+write_basic_package_version_file(
+    TUnfoldConfigVersion.cmake
+    COMPATIBILITY SameMajorVersion
+)
+install(FILES
+        "${CMAKE_CURRENT_BINARY_DIR}/TUnfoldConfig.cmake"
+        "${CMAKE_CURRENT_BINARY_DIR}/TUnfoldConfigVersion.cmake"
+        DESTINATION "${CMAKECONFIG_INSTALL_DIR}"
+        COMPONENT Devel)
+install(EXPORT TUnfoldTargets
+        NAMESPACE TUnfold::
+        DESTINATION "${CMAKECONFIG_INSTALL_DIR}"
+        FILE TUnfoldTargets.cmake)
diff --git a/cmake/TUnfold/LinkDef.h.in b/cmake/TUnfold/LinkDef.h.in
new file mode 100644
index 0000000000000000000000000000000000000000..a5bf752dd542787d1c595ca778db7bb6c3b00f65
--- /dev/null
+++ b/cmake/TUnfold/LinkDef.h.in
@@ -0,0 +1,8 @@
+#ifdef __CLING__
+#pragma link C++ class TUnfoldV@TUnfold_VERSION_MAJOR@+;
+#pragma link C++ class TUnfoldIterativeEMV@TUnfold_VERSION_MAJOR@+;
+#pragma link C++ class TUnfoldSysV@TUnfold_VERSION_MAJOR@+;
+#pragma link C++ class TUnfoldDensityV@TUnfold_VERSION_MAJOR@+;
+#pragma link C++ class TUnfoldBinningV@TUnfold_VERSION_MAJOR@+;
+#pragma link C++ class TUnfoldBinningXMLV@TUnfold_VERSION_MAJOR@+;
+#endif
diff --git a/cmake/TUnfold/TUnfoldConfig.cmake.in b/cmake/TUnfold/TUnfoldConfig.cmake.in
new file mode 100644
index 0000000000000000000000000000000000000000..fe637809a5022308419bd94434365c48dbbbcb10
--- /dev/null
+++ b/cmake/TUnfold/TUnfoldConfig.cmake.in
@@ -0,0 +1,3 @@
+include(CMakeFindDependencyMacro)
+find_dependency(ROOT @ROOT_VERSION@)
+include("${CMAKE_CURRENT_LIST_DIR}/@PROJECT_NAME@Targets.cmake")
diff --git a/darwin.xml.in b/darwin.xml.in
deleted file mode 100644
index bd0ae76c02b6298a2055acb4ae0908a1410d151c..0000000000000000000000000000000000000000
--- a/darwin.xml.in
+++ /dev/null
@@ -1,20 +0,0 @@
-<tool name="darwin" version="1.0"> <!-- TODO -->
-  <lib name="DarwinUserInfo"/>
-  <lib name="DarwinMetaInfo"/>
-  <lib name="DarwinOptions"/>
-  <lib name="DarwinObjects"/>
-  <info url="https://protodarwin.docs.cern.ch"/>
-  <client>
-    <environment name="INCLUDE" default="${CMAKE_INSTALL_FULL_INCLUDEDIR}/Darwin"/>
-    <environment name="LIBDIR" default="${CMAKE_INSTALL_FULL_LIBDIR}"/>
-  </client>
-  <use name="boost" />
-  <use name="boost_program_options" />
-  <runtime name="DARWIN_BASE" value="${CMAKE_SOURCE_DIR}/Darwin" type="path"/>
-  <runtime name="DARWIN_FIRE_AND_FORGET" value="$CMSSW_BASE/lib/$SCRAM_ARCH" type="path"/><!-- TODO -->
-  <runtime name="PATH" value="${CMAKE_INSTALL_FULL_BINDIR}" type="path"/>
-  <runtime name="LD_LIBRARY_PATH" value="${CMAKE_INSTALL_FULL_LIBDIR}" type="path"/>
-  <runtime name="PYTHON3PATH" value="${CMAKE_INSTALL_PREFIX}/python" type="path"/>
-  <runtime name="DARWIN_GIT_REPO" value="$CMSSW_BASE/src/Core" type="path"/><!-- TODO -->
-  <runtime name="DARWIN_TABLES" value="${CMAKE_SOURCE_DIR}/tables" type="path"/>
-</tool>
diff --git a/detect_environment.py b/detect_environment.py
new file mode 100644
index 0000000000000000000000000000000000000000..83d864ebe12d58dfcc46dc5e44372f07a25b55f8
--- /dev/null
+++ b/detect_environment.py
@@ -0,0 +1,105 @@
+#! /usr/bin/env python
+
+import os
+from os.path import abspath, isfile, join
+
+
+def lcg():
+    """
+    Generates the code to set up an LCG environment.
+    """
+
+    # The CMAKE_PREFIX_PATH seems to point to the root of the environment.
+    location = os.environ.get('CMAKE_PREFIX_PATH')
+    setup = join(location, 'setup.sh')
+    if not isfile(setup):
+        raise ValueError('Could not find LCG setup.sh')
+
+    return 'source ' + setup
+
+
+def conda_like():
+    r"""
+    Generates the code to set up a conda/mamba/micromamba environment.
+
+    \note Only micromamba is officially supported.
+    """
+
+    env_name = os.environ['CONDA_DEFAULT_ENV']
+
+    # Figure out which variant we're using
+    if 'MAMBA_EXE' in os.environ and 'MAMBA_ROOT_PREFIX' in os.environ:
+        ## \todo Is this also valid for environments created by mamba?
+        template = '''
+# Make sure {mamba_exe_name} is set up
+export MAMBA_EXE='{mamba_exe}';
+export MAMBA_ROOT_PREFIX='{mamba_root}';
+__mamba_setup="$("$MAMBA_EXE" shell hook --shell bash --root-prefix "$MAMBA_ROOT_PREFIX" 2> /dev/null)"
+if [ $? -eq 0 ]; then
+    eval "$__mamba_setup"
+else
+    alias {mamba_exe_name}="$MAMBA_EXE"  # Fallback on help from mamba activate
+fi
+unset __mamba_setup
+
+# Enable the environment
+{mamba_exe_name} activate '{env_name}'
+'''
+        mamba_exe = os.environ['MAMBA_EXE']
+        mamba_root = os.environ['MAMBA_ROOT_PREFIX']
+        mamba_exe_name = 'micromamba' if mamba_exe.endswith('micromamba') else 'mamba'
+        return template.format(mamba_exe=mamba_exe,
+                               mamba_root=mamba_root,
+                               mamba_exe_name=mamba_exe_name,
+                               env_name=env_name)
+    elif 'CONDA_EXE' in os.environ:
+        template = '''
+# Make sure conda is set up
+__conda_setup="$('{conda_exe}' 'shell.bash' 'hook' 2> /dev/null)"
+if [ $? -eq 0 ]; then
+    eval "$__conda_setup"
+else
+    if [ -f "{conda_base}/etc/profile.d/conda.sh" ]; then
+        . "{conda_base}/etc/profile.d/conda.sh"
+    else
+        export PATH="{conda_base}/bin:$PATH"
+    fi
+fi
+unset __conda_setup
+
+# Enable the environment
+conda activate '{env_name}'
+'''
+        conda_exe = os.environ['CONDA_EXE']
+        conda_root = abspath(join(conda_exe, os.pardir, os.pardir))
+        return template.format(conda_exe=conda_exe,
+                               conda_root=conda_root,
+                               env_name=env_name)
+    else:
+        raise ValueError('Unknown conda-like environment')
+
+
+def detect_environment():
+    """
+    Detect the environment in which the command is begin run, and returns code
+    to set it up again. Returns an empty string on failure.
+    """
+
+    # List of environment variables used to detect which environment is active.
+    detect = {
+        'LCG_VERSION': lcg,
+        'CONDA_DEFAULT_ENV': conda_like,
+    }
+
+    for env_var, function in detect.items():
+        if env_var in os.environ:
+            try:
+                return function()
+            except:
+                pass  # Try other possibilities
+
+    return ''
+
+
+if __name__ == '__main__':
+    print(detect_environment())
diff --git a/install.sh b/install.sh
index efc82cbb49de4b547066c174fe1b12cdb28f9ea1..2e4218c50ef5664846174b9834247e42ef14a56c 100755
--- a/install.sh
+++ b/install.sh
@@ -1,4 +1,4 @@
-#!/bin/sh
+#!/usr/bin/env bash
 
 set -e
 
@@ -28,37 +28,25 @@ if [ ! -z "$(git config url.ssh://git@gitlab.cern.ch:7999/.insteadOf)" ]; then
     ssh-add
 fi
 
-# Get a CMSSW if we don't have one
-if [ -z $CMSSW_BASE ]; then
-    version=CMSSW_12_4_0
-    [ ! -d $version ] && scram p $version
-    cd $version
-    eval `scramv1 runtime -sh`
-    unset version
-fi
-
-# Set up libgit2
-cd $CMSSW_BASE
-scram setup $BASE_DIR/libgit2.xml
+set -x
 
 # Download and build supporting tools
-cd $BASE_DIR
-cmake3 -B .build
-cmake3 --build .build --target install -j$(nproc)
+cmake -B build $(correction config --cmake) "$@"
+cmake --build build --target install -j$(nproc)
 
-# Get Core
-cd $CMSSW_BASE/src
-[ ! -d Core ] && git clone https://gitlab.cern.ch/cms-analysis/general/DasAnalysisSystem/Core.git
+source tools/setup.sh
 
-# And JetToolbox
-[ ! -d JMEAnalysis/JetToolbox ] && (
-    git clone https://github.com/cms-jet/JetToolbox.git JMEAnalysis/JetToolbox -b jetToolbox_120X
-    cd JMEAnalysis/JetToolbox
-    patch -p1 <$BASE_DIR/jetToolbox_120X.patch
-)
+if [ ! -d /cvmfs ]; then
+    echo "Cannot install the Ntupliser without access to /cvmfs"
+    exit
+fi
+if ! which apptainer &>/dev/null ; then
+    echo "Cannot install the Ntupliser without apptainer"
+    exit
+fi
 
-# Set up scram tools in CMSSW -- must be done after fetching Core
-cd $CMSSW_BASE
-for xml in $BASE_DIR/tools/etc/*.xml; do
-    scram setup $xml
-done
+# install CMSSW in a container
+# - 1st arg: CMSSW version
+# - 2nd arg: corresponding jet toolbox branch
+el8 CMS/install_CMSSW.sh CMSSW_12_4_0 jetToolbox_120X
+# cc7 CMS/install_CMSSW.sh CMSSW_10_6_30 jetToolbox_102X
diff --git a/libgit2.xml b/libgit2.xml
deleted file mode 100644
index be0c723e88d3a64400aecc762c2a0c0b77edc99b..0000000000000000000000000000000000000000
--- a/libgit2.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-<tool name="libgit2" version="1.6">
-  <lib name="git2"/>
-  <info url="https://libgit2.org/"/>
-  <client>
-    <environment name="INCLUDE" default="/cvmfs/sft.cern.ch/lcg/views/LCG_100/x86_64-centos7-gcc9-opt/include"/>
-    <environment name="LIBDIR"  default="/cvmfs/sft.cern.ch/lcg/releases/libgit2/1.0.1-eb69e/x86_64-centos7-gcc9-opt/lib64/"/>
-  </client>
-  <runtime name="LD_LIBRARY_PATH" value="/cvmfs/sft.cern.ch/lcg/releases/libgit2/1.0.1-eb69e/x86_64-centos7-gcc9-opt/lib64/" type="path"/>
-</tool>
diff --git a/prerequisites.yml b/prerequisites.yml
new file mode 100644
index 0000000000000000000000000000000000000000..d97bcc7ee912221104341773ecdb0576aeb299c0
--- /dev/null
+++ b/prerequisites.yml
@@ -0,0 +1,17 @@
+name: DAS
+channels:
+  - conda-forge
+dependencies:
+  - zsh
+  - python >= 3.9
+  - cmake >= 3.26
+  - boost >= 1.78
+  - libgit2
+  - root
+  - gxx
+  - correctionlib
+  - eigen
+  - git-lfs
+  - doxygen
+  - pip:
+      - git+https://gitlab.cern.ch/RooUnfold/RooUnfold@master
diff --git a/setup b/setup
deleted file mode 100644
index 9f22c47c24f83ed325e2baa9355ae741951e98d4..0000000000000000000000000000000000000000
--- a/setup
+++ /dev/null
@@ -1,35 +0,0 @@
-case "$(hostname)" in
-    *desy.de)
-        tput setaf 2
-        echo "Setting up environment for DESY (naf)."
-        tput op
-        module use -a /afs/desy.de/group/cms/modulefiles/
-        module load cmssw
-        source /cvmfs/grid.desy.de/etc/profile.d/grid-ui-env.sh
-        ;;
-    *cern.ch)
-        tput setaf 2
-        echo "Setting up environment for CERN (lxplus)."
-        tput op
-        ;;
-    *iihe.ac.be)
-        tput setaf 2
-        echo "Setting up environment for IIHE T2B cluster."
-        tput op
-        ;;
-    *)
-        tput setaf 3
-        echo "Unknown facility. Proceed at your own risks."
-        tput op
-        ;;
-esac
-
-source /cvmfs/cms.cern.ch/cmsset_default.sh
-source /cvmfs/cms.cern.ch/rucio/setup-py3.sh
-source /cvmfs/cms.cern.ch/crab3/crab.sh
-
-if [ -n "$RUCIO_ACCOUNT" ]
-then
-    echo "\$RUCIO_ACCOUNT not found in environment. Setting up with \$USER."
-    export RUCIO_ACCOUNT=$USER
-fi
diff --git a/setup.sh.in b/setup.sh.in
new file mode 100644
index 0000000000000000000000000000000000000000..db2d061e75508e5061faccda93adabdf99851d24
--- /dev/null
+++ b/setup.sh.in
@@ -0,0 +1,43 @@
+@ENVIRONMENT_SETUP@
+
+# Technically needed
+export PATH=@CMAKE_INSTALL_FULL_BINDIR@:$PATH
+export LD_LIBRARY_PATH=@CMAKE_INSTALL_FULL_LIBDIR@:$(correction config --libdir):$LD_LIBRARY_PATH
+export PYTHONPATH=@CMAKE_INSTALL_PREFIX@/python:$PYTHONPATH
+export DARWIN_FIRE_AND_FORGET=@CMAKE_INSTALL_FULL_LIBDIR@  # TODO use ldd or link statically
+
+# Convenience
+export DAS_BASE=@CMAKE_SOURCE_DIR@
+export DARWIN_BASE=@CMAKE_SOURCE_DIR@/Darwin
+export DARWIN_TABLES=@CMAKE_SOURCE_DIR@/tables
+export CORE_BASE=@CMAKE_SOURCE_DIR@/Core
+
+# Special rules for compute centers
+if [ ! "$CI" = "true" ] &&        # Do not run this on CI jobs
+   [ -d /cvmfs/grid.cern.ch ] &&  # Only run if /cvmfs/grid.cern.ch is mounted
+   [ -f /etc/redhat-release ]     # Only run on Red Hat and derivatives
+then    
+    # Some sites need special instructions
+    case "$(hostname)" in
+        *desy.de)
+            tput setaf 2
+            echo "Setting up environment for DESY (naf)."
+            tput op
+            source /cvmfs/grid.desy.de/etc/profile.d/grid-ui-env.sh
+            ;;
+        *)
+            # Known to be sufficient at CERN (lxplus) and IIHE (T2B).
+            tput setaf 2
+            echo "Setting up environment for a generic facility."
+            tput op
+            ;;
+    esac
+
+    # Rucio for dataset transfers
+    source /cvmfs/cms.cern.ch/rucio/setup-py3.sh
+    if [ -n "$RUCIO_ACCOUNT" ]
+    then
+        echo "\$RUCIO_ACCOUNT not found in environment. Setting up with \$USER."
+        export RUCIO_ACCOUNT=$USER
+    fi
+fi
diff --git a/tunfold.xml.in b/tunfold.xml.in
deleted file mode 100644
index f19a1e205cc62b5a6fd23bf95ea65b468e5257fa..0000000000000000000000000000000000000000
--- a/tunfold.xml.in
+++ /dev/null
@@ -1,13 +0,0 @@
-<tool name="tunfold" version="17.9">
-  <lib name="tunfold"/>
-  <info url="https://www.desy.de/~sschmitt/tunfold.html"/>
-  <client>
-    <environment name="TUNFOLD_BASE" default="${CMAKE_SOURCE_DIR}/TUnfold"/><!-- FIXME Needed? -->
-    <environment name="INCLUDE" default="${CMAKE_INSTALL_FULL_INCLUDEDIR}/TUnfold"/>
-    <environment name="LIBDIR"  default="${CMAKE_INSTALL_FULL_LIBDIR}"/>
-  </client>
-  <use name="root_cxxdefaults"/>
-  <lib name="XMLIO" />
-  <lib name="XMLParser" />
-  <runtime name="LD_LIBRARY_PATH" value="$LIBDIR" type="path"/>
-</tool>
diff --git a/uninstall.sh b/uninstall.sh
new file mode 100755
index 0000000000000000000000000000000000000000..5bf4f5d0a68ed5945726a80bc7efb21d1146859a
--- /dev/null
+++ b/uninstall.sh
@@ -0,0 +1,11 @@
+#!/usr/bin/env bash
+
+set -e
+
+rm -r CMSSW*/ *.build/ tools/
+
+## \note This only works if the code has been installed in the present directory
+
+## \todo Also remove Core, Darwin, tables repos? (check in case of unpushed changes?)
+
+## \todo Unset the environment