From 43614079e58386b5dccdd9e54f502dcd642f8a40 Mon Sep 17 00:00:00 2001
From: Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>
Date: Tue, 10 Sep 2019 14:41:04 +0200
Subject: [PATCH] Cleanup ART package: remove the software and only keep CMake

---
 Tools/ART/ARTEnvironmentConfig.cmake      |    6 +-
 Tools/ART/CMakeLists.txt                  |   10 +-
 Tools/ART/LICENSE-MIT                     |   21 -
 Tools/ART/python/ART/__init__.py          |   12 -
 Tools/ART/python/ART/art_base.py          |  255 -----
 Tools/ART/python/ART/art_build.py         |  186 ----
 Tools/ART/python/ART/art_configuration.py |  125 ---
 Tools/ART/python/ART/art_grid.py          | 1089 ---------------------
 Tools/ART/python/ART/art_header.py        |  167 ----
 Tools/ART/python/ART/art_misc.py          |  281 ------
 Tools/ART/python/ART/art_rucio.py         |  268 -----
 Tools/ART/python/ART/docopt.py            |  590 -----------
 Tools/ART/python/ART/docopt_dispatch.py   |   53 -
 Tools/ART/scripts/art-clean.py            |  208 ----
 Tools/ART/scripts/art-diff.py             |  278 ------
 Tools/ART/scripts/art-download.sh         |   36 -
 Tools/ART/scripts/art-internal.py         |  106 --
 Tools/ART/scripts/art-share.py            |  178 ----
 Tools/ART/scripts/art-task-build.sh       |   76 --
 Tools/ART/scripts/art-task-grid.sh        |  183 ----
 Tools/ART/scripts/art.py                  |  259 -----
 Tools/ART/share/localSetupART.sh          |    4 -
 22 files changed, 4 insertions(+), 4387 deletions(-)
 delete mode 100644 Tools/ART/LICENSE-MIT
 delete mode 100644 Tools/ART/python/ART/__init__.py
 delete mode 100755 Tools/ART/python/ART/art_base.py
 delete mode 100644 Tools/ART/python/ART/art_build.py
 delete mode 100644 Tools/ART/python/ART/art_configuration.py
 delete mode 100644 Tools/ART/python/ART/art_grid.py
 delete mode 100644 Tools/ART/python/ART/art_header.py
 delete mode 100644 Tools/ART/python/ART/art_misc.py
 delete mode 100755 Tools/ART/python/ART/art_rucio.py
 delete mode 100644 Tools/ART/python/ART/docopt.py
 delete mode 100644 Tools/ART/python/ART/docopt_dispatch.py
 delete mode 100755 Tools/ART/scripts/art-clean.py
 delete mode 100755 Tools/ART/scripts/art-diff.py
 delete mode 100755 Tools/ART/scripts/art-download.sh
 delete mode 100755 Tools/ART/scripts/art-internal.py
 delete mode 100755 Tools/ART/scripts/art-share.py
 delete mode 100755 Tools/ART/scripts/art-task-build.sh
 delete mode 100755 Tools/ART/scripts/art-task-grid.sh
 delete mode 100755 Tools/ART/scripts/art.py
 delete mode 100644 Tools/ART/share/localSetupART.sh

diff --git a/Tools/ART/ARTEnvironmentConfig.cmake b/Tools/ART/ARTEnvironmentConfig.cmake
index f448229bf523..dca7bf8f2696 100644
--- a/Tools/ART/ARTEnvironmentConfig.cmake
+++ b/Tools/ART/ARTEnvironmentConfig.cmake
@@ -1,12 +1,12 @@
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
 #
 # This module is used to set up the environment for ART inputs
 #
-# author :  Attila Krasznahorkay <attila.krasznahorkay@cern.ch>, Tulay Cuhadar Donszelmann <tcuhadar@cern.ch> 
+# author :  Attila Krasznahorkay <attila.krasznahorkay@cern.ch>, Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>
 
 # Set the environment variable(s):
 set( ARTENVIRONMENT_ENVIRONMENT
-   APPEND DATAPATH "/eos/atlas/atlascerngroupdisk/data-art/grid-input" )
+   APPEND DATAPATH "/cvmfs/atlas-nightlies.cern.ch/repo/data/data-art" )
 
 # Silently declare the module found:
 set( ARTENVIRONMENT_FOUND TRUE )
diff --git a/Tools/ART/CMakeLists.txt b/Tools/ART/CMakeLists.txt
index ef6eee55e8c1..8715bdea4a6c 100644
--- a/Tools/ART/CMakeLists.txt
+++ b/Tools/ART/CMakeLists.txt
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
 #
 # CMake configuration file for the ART package.
 #
@@ -7,14 +7,6 @@
 # Declare the package name:
 atlas_subdir( ART )
 
-# Declare the package's dependencies: 
-atlas_depends_on_subdirs( PRIVATE
-                          TestPolicy )
-
-# Install files from the package:
-atlas_install_python_modules( python/ART/*.py )
-atlas_install_scripts( scripts/*.py scripts/*.sh )
-
 # Set up the general runtime environment (by Attila Krasznahorkay <attila.krasznahorkay@cern.ch>):
 set( ARTEnvironment_DIR ${CMAKE_CURRENT_SOURCE_DIR}
    CACHE PATH "Location of ARTEnvironmentConfig.cmake" )
diff --git a/Tools/ART/LICENSE-MIT b/Tools/ART/LICENSE-MIT
deleted file mode 100644
index 501495f7f14c..000000000000
--- a/Tools/ART/LICENSE-MIT
+++ /dev/null
@@ -1,21 +0,0 @@
-Permission is hereby granted, free of charge, to any person
-obtaining a copy of this software and associated
-documentation files (the "Software"), to deal in the Software
-without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to
-whom the Software is furnished to do so, subject to the
-following conditions:
-
-The above copyright notice and this permission notice shall
-be included in all copies or substantial portions of the
-Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/Tools/ART/python/ART/__init__.py b/Tools/ART/python/ART/__init__.py
deleted file mode 100644
index 4662af8a69f7..000000000000
--- a/Tools/ART/python/ART/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-"""
-Import default Classes.
-
-Allows one to do:
-
-from ART import ArtBase
-"""
-
-from art_base import ArtBase  # noqa: F401
-from art_build import ArtBuild  # noqa: F401
-from art_grid import ArtGrid  # noqa: F401
diff --git a/Tools/ART/python/ART/art_base.py b/Tools/ART/python/ART/art_base.py
deleted file mode 100755
index fb7d57d2ce3b..000000000000
--- a/Tools/ART/python/ART/art_base.py
+++ /dev/null
@@ -1,255 +0,0 @@
-#!/usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-"""Base class for grid and (local) build submits."""
-
-__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
-
-import fnmatch
-import inspect
-import json
-import logging
-import os
-import re
-
-try:
-    import scandir as scan
-except ImportError:
-    import os as scan
-
-from art_configuration import ArtConfiguration
-# from art_diff import ArtDiff
-from art_header import ArtHeader
-from art_misc import is_exe, run_command
-
-MODULE = "art.base"
-
-
-class ArtBase(object):
-    """Base class for grid and (local) build submits."""
-
-    def __init__(self, art_directory):
-        """Keep arguments."""
-        self.art_directory = art_directory
-
-    def task_list(self, job_type, sequence_tag):
-        """Default implementation."""
-        self.not_implemented()
-
-    def task(self, package, job_type, sequence_tag):
-        """Default implementation."""
-        self.not_implemented()
-
-    def job(self, package, job_type, sequence_tag, index, out):
-        """Default implementation."""
-        self.not_implemented()
-
-    def compare(self, package, test_name, days, file_names):
-        """Default implementation."""
-        self.not_implemented()
-
-    def list(self, package, job_type, json_format=False):
-        """Default implementation."""
-        self.not_implemented()
-
-    def log(self, package, test_name):
-        """Default implementation."""
-        self.not_implemented()
-
-    def output(self, package, test_name, file_name):
-        """Default implementation."""
-        self.not_implemented()
-
-    def validate(self, script_directory):
-        """Validate all tests in given script_directory."""
-        log = logging.getLogger(MODULE)
-        directories = self.get_test_directories(script_directory.rstrip("/"))
-
-        found_test = False
-        for directory in directories.itervalues():
-            files = self.get_files(directory)
-            for fname in files:
-                test_name = os.path.join(directory, fname)
-                found_test = True
-                log.debug(test_name)
-                if not is_exe(test_name):
-                    log.error("%s is not executable.", test_name)
-                ArtHeader(test_name).validate()
-
-        if not found_test:
-            log.warning('No scripts found in %s directory', directories.values()[0])
-            return 0
-
-        log.info("Scripts in %s directory are validated", script_directory)
-        return 0
-
-    def included(self, script_directory, job_type, index_type, nightly_release, project, platform):
-        """Print all included tests for these arguments."""
-        log = logging.getLogger(MODULE)
-        directories = self.get_test_directories(script_directory.rstrip("/"))
-        for directory in directories.itervalues():
-            files = self.get_files(directory, job_type, index_type)
-            for fname in files:
-                test_name = os.path.join(directory, fname)
-                if self.is_included(test_name, nightly_release, project, platform):
-                    log.info("%s %s", test_name, ArtHeader(test_name).get(ArtHeader.ART_INCLUDE))
-        return 0
-
-    def config(self, package, nightly_release, project, platform, config):
-        """Show configuration."""
-        log = logging.getLogger(MODULE)
-        config = ArtConfiguration(config)
-        if package is None:
-            log.info("%s", config.packages())
-            return 0
-
-        keys = config.keys(nightly_release, project, platform, package)
-        for key in keys:
-            log.info("%s %s", key, config.get(nightly_release, project, platform, package, key))
-        return 0
-
-    #
-    # Default implementations
-    #
-    def compare_ref(self, path, ref_path, files, entries=-1, mode='detailed'):
-        """TBD."""
-        result = 0
-
-        (exit_code, out, err, command, start_time, end_time) = run_command(' '.join(("art-diff.py", "--diff-type=diff-pool", path, ref_path)))
-        if exit_code != 0:
-            result |= exit_code
-            print err
-        print out
-
-        (exit_code, out, err, command, start_time, end_time) = run_command(' '.join(("art-diff.py", "--diff-type=diff-root", "--mode=" + mode, "--entries=" + str(entries), (' '.join(('--file=' + s for s in files))), path, ref_path)))
-        if exit_code != 0:
-            result |= exit_code
-            print err
-        print out
-
-        return result
-
-    #
-    # Protected Methods
-    #
-    @staticmethod
-    def get_art_results(output):
-        """
-        Extract art-results.
-
-        find all
-        'art-result: x' or 'art-result: x name' or 'art-result: [x]'
-        and append them to result list
-        """
-        result = []
-        for line in output.splitlines():
-            match = re.search(r"art-result: (\d+)\s*(.*)", line)
-            if match:
-                item = json.loads(match.group(1))
-                name = match.group(2)
-                result.append({'name': name, 'result': item})
-            else:
-                match = re.search(r"art-result: (\[.*\])", line)
-                if match:
-                    array = json.loads(match.group(1))
-                    for item in array:
-                        result.append({'name': '', 'result': item})
-
-        return result
-
-    def get_files(self, directory, job_type=None, index_type="all", nightly_release=None, project=None, platform=None):
-        """
-        Return a list of all test files matching 'test_*.sh' of given 'job_type', 'index_type' and nightly/project/platform.
-
-        'job_type' can be 'grid' or 'build', given by the test
-
-        'index_type' can be 'all', 'batch' or 'single'.
-
-        Only the filenames are returned.
-        """
-        result = []
-        if directory is not None:
-            files = os.listdir(directory)
-            files.sort()
-            for fname in files:
-
-                # is not a test ?
-                if not fnmatch.fnmatch(fname, 'test_*.sh') and not fnmatch.fnmatch(fname, 'test_*.py'):
-                    continue
-
-                test_name = os.path.join(directory, fname)
-
-                has_art_input = ArtHeader(test_name).get(ArtHeader.ART_INPUT) is not None
-                has_art_athena_mt = ArtHeader(test_name).get(ArtHeader.ART_ATHENA_MT) > 0
-
-                # SKIP if is not of correct type
-                if job_type is not None and ArtHeader(test_name).get(ArtHeader.ART_TYPE) != job_type:
-                    continue
-
-                # SKIP if is not included in nightly_release, project, platform
-                if nightly_release is not None and not self.is_included(test_name, nightly_release, project, platform):
-                    continue
-
-                # SKIP if batch and does specify art-input or art-athena-mt
-                if index_type == "batch" and (has_art_input or has_art_athena_mt):
-                    continue
-
-                # SKIP if single and does NOT specify art-input or art-athena-mt
-                if index_type == "single" and not (has_art_input or has_art_athena_mt):
-                    continue
-
-                result.append(fname)
-
-        return result
-
-    def get_type(self, directory, test_name):
-        """Return the 'job_type' of a test."""
-        return ArtHeader(os.path.join(directory, test_name)).get(ArtHeader.ART_TYPE)
-
-    def get_test_directories(self, directory):
-        """
-        Search from '<directory>...' for '<package>/test' directories.
-
-        A dictionary key=<package>, value=<directory> is returned
-        """
-        result = {}
-        for root, dirs, files in scan.walk(directory):
-            # exclude some directories
-            dirs[:] = [d for d in dirs if not d.endswith('_test.dir')]
-            if root.endswith('/test'):
-                package = os.path.basename(os.path.dirname(root))
-                result[package] = root
-        return result
-
-    def get_list(self, directory, package, job_type, index_type):
-        """Return a list of tests for a particular package."""
-        test_directories = self.get_test_directories(directory)
-        test_dir = test_directories[package]
-        return self.get_files(test_dir, job_type, index_type)
-
-    def is_included(self, test_name, nightly_release, project, platform):
-        """Return true if a match is found for test_name in nightly_release, project, platform."""
-        patterns = ArtHeader(test_name).get(ArtHeader.ART_INCLUDE)
-
-        for pattern in patterns:
-            nightly_release_pattern = "*"
-            project_pattern = "*"
-            platform_pattern = "*-*-*-*"
-
-            count = pattern.count('/')
-            if count >= 2:
-                (nightly_release_pattern, project_pattern, platform_pattern) = pattern.split('/', 3)
-            elif count == 1:
-                (nightly_release_pattern, project_pattern) = pattern.split('/', 2)
-            else:
-                nightly_release_pattern = pattern
-
-            if fnmatch.fnmatch(nightly_release, nightly_release_pattern) and fnmatch.fnmatch(project, project_pattern) and fnmatch.fnmatch(platform, platform_pattern):
-                return True
-        return False
-
-    #
-    # Private Methods
-    #
-    def not_implemented(self):
-        """Default Not Implemented Method."""
-        raise NotImplementedError("Class %s doesn't implement method: %s(...)" % (self.__class__.__name__, inspect.stack()[1][3]))
diff --git a/Tools/ART/python/ART/art_build.py b/Tools/ART/python/ART/art_build.py
deleted file mode 100644
index 500b264600c3..000000000000
--- a/Tools/ART/python/ART/art_build.py
+++ /dev/null
@@ -1,186 +0,0 @@
-#!/usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-"""Class for (local) build submits."""
-
-__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
-
-import collections
-import concurrent.futures
-import fnmatch
-import json
-import logging
-import multiprocessing
-import os
-import socket
-
-from art_misc import memory, mkdir, run_command, GByte
-from art_base import ArtBase
-from art_header import ArtHeader
-
-MODULE = "art.build"
-
-
-def run_job(art_directory, sequence_tag, script_directory, package, job_type, job_index, test_name):
-    """
-    Job to be run by parallel or serial scheduler.
-
-    Needs to be defined outside a class.
-    Names of arguments are important, see call to scheduler.
-    """
-    # <script_directory> <sequence_tag> <package> <outfile> <job_type> <job_index>
-    log = logging.getLogger(MODULE)
-    log.info("job started %s %s %s %s %s %d %s", art_directory, sequence_tag, script_directory, package, job_type, job_index, test_name)
-    (exit_code, out, err, command, start_time, end_time) = run_command(' '.join((os.path.join(art_directory, './art-internal.py'), "build", "job", script_directory, sequence_tag, package, "out", job_type, str(job_index))))
-    log.info("job ended %s %s %s %s %s %d %s", art_directory, sequence_tag, script_directory, package, job_type, job_index, test_name)
-
-    return (package, test_name, exit_code, out, err, start_time, end_time)
-
-
-class ArtBuild(ArtBase):
-    """Class for (local) build submits."""
-
-    def __init__(self, art_directory, nightly_release, project, platform, nightly_tag, script_directory, max_jobs=0, ci=False):
-        """Keep arguments."""
-        super(ArtBuild, self).__init__(art_directory)
-        log = logging.getLogger(MODULE)
-        log.debug("ArtBuild %s %s %d", art_directory, script_directory, max_jobs)
-        self.art_directory = art_directory
-        self.script_directory = script_directory.rstrip("/")
-        self.nightly_release = nightly_release
-        self.project = project
-        self.platform = platform
-        self.nightly_tag = nightly_tag
-        mem = memory(GByte)
-        max_cores = min(mem / 4, multiprocessing.cpu_count())
-        max_cores = max_cores if max_cores >= 4 else 1
-        self.max_jobs = max_cores if max_jobs <= 0 else max_jobs
-        self.ci = ci
-
-    def task_list(self, job_type, sequence_tag):
-        """Run a list of packages for given job_type with sequence_tag."""
-        log = logging.getLogger(MODULE)
-        log.debug("task_list %s %s", job_type, sequence_tag)
-        test_directories = self.get_test_directories(self.script_directory)
-        if not test_directories:
-            log.warning('No tests found in directories ending in "test"')
-
-        log.info("Executor started with %d threads", self.max_jobs)
-        executor = concurrent.futures.ThreadPoolExecutor(max_workers=self.max_jobs)
-        future_set = []
-
-        for package, directory in test_directories.items():
-            future_set.extend(self.task(executor, package, job_type, sequence_tag))
-
-        # Create status of all packages
-        status = collections.defaultdict(lambda: collections.defaultdict(lambda: collections.defaultdict()))
-
-        # Some release information
-        status['release_info']['nightly_release'] = self.nightly_release
-        status['release_info']['nightly_tag'] = self.nightly_tag
-        status['release_info']['project'] = self.project
-        status['release_info']['platform'] = self.platform
-        status['release_info']['hostname'] = socket.gethostname()
-
-        # Package information with all tests in each package
-        for future in concurrent.futures.as_completed(future_set):
-            (package, test_name, exit_code, out, err, start_time, end_time) = future.result()
-            log.debug("Handling job for %s %s", package, test_name)
-            status[package][test_name]['exit_code'] = exit_code
-            status[package][test_name]['start_time'] = start_time.strftime('%Y-%m-%dT%H:%M:%S')
-            status[package][test_name]['end_time'] = end_time.strftime('%Y-%m-%dT%H:%M:%S')
-            status[package][test_name]['start_epoch'] = start_time.strftime('%s')
-            status[package][test_name]['end_epoch'] = end_time.strftime('%s')
-
-            test_directory = os.path.abspath(test_directories[package])
-            fname = os.path.join(test_directory, test_name)
-            if os.path.exists(fname):
-                status[package][test_name]['description'] = ArtHeader(fname).get(ArtHeader.ART_DESCRIPTION)
-            else:
-                log.warning("Test file cannot be opened to get description: %s", fname)
-                status[package][test_name]['description'] = ""
-            status[package][test_name]['test_directory'] = test_directory
-
-            # gather results
-            result = []
-            stdout_path = os.path.join(sequence_tag, package, os.path.splitext(test_name)[0], 'stdout.txt')
-            log.debug("Looking for results in %s", stdout_path)
-            if os.path.exists(stdout_path):
-                with open(stdout_path, 'r') as f:
-                    output = f.read()
-                    result = ArtBase.get_art_results(output)
-            else:
-                log.warning("Output file does not exist: %s", stdout_path)
-
-            status[package][test_name]['result'] = result
-
-        mkdir(sequence_tag)
-        with open(os.path.join(sequence_tag, "status.json"), 'w') as outfile:
-            json.dump(status, outfile, sort_keys=True, indent=4, ensure_ascii=False)
-
-        return 0
-
-    def task(self, executor, package, job_type, sequence_tag):
-        """Run tests of a single package."""
-        log = logging.getLogger(MODULE)
-        log.debug("task %s %s %s", package, job_type, sequence_tag)
-        test_directories = self.get_test_directories(self.script_directory)
-        test_directory = os.path.abspath(test_directories[package])
-        test_names = self.get_files(test_directory, job_type, "all", self.nightly_release, self.project, self.platform)
-        if not test_names:
-            log.debug("No tests found for package %s and job_type %s", package, job_type)
-
-        future_set = []
-        job_index = 0
-        for test_name in test_names:
-            schedule_test = False
-            fname = os.path.join(test_directory, test_name)
-            if self.ci:
-                branch_name = os.environ['AtlasBuildBranch']
-                cis = ArtHeader(fname).get(ArtHeader.ART_CI)
-                for ci in cis:
-                    if fnmatch.fnmatch(branch_name, ci):
-                        schedule_test = True
-                    break
-            else:
-                schedule_test = True
-
-            if not os.access(fname, os.X_OK):
-                schedule_test = False
-                log.warning("job skipped, file not executable: %s", fname)
-
-            if schedule_test:
-                future_set.append(executor.submit(run_job, self.art_directory, sequence_tag, self.script_directory, package, job_type, job_index, test_name))
-            job_index += 1
-
-        return future_set
-
-    def job(self, sequence_tag, package, out, job_type, job_index):
-        """Run a single test."""
-        log = logging.getLogger(MODULE)
-        log.debug("ArtBuild job %s %s %s %d %s", package, job_type, sequence_tag, job_index, out)
-        test_directories = self.get_test_directories(self.script_directory)
-        test_directory = os.path.abspath(test_directories[package])
-        test_name = self.get_files(test_directory, job_type, "all", self.nightly_release, self.project, self.platform)[int(job_index)]
-
-        work_directory = os.path.join(sequence_tag, package, os.path.splitext(test_name)[0])
-        mkdir(work_directory)
-        log.debug("Work dir %s", work_directory)
-
-        # Tests are called with arguments: PACKAGE TEST_NAME SCRIPT_DIRECTORY TYPE
-        script_directory = '.'
-        env = os.environ.copy()
-        env['ArtScriptDirectory'] = script_directory
-        env['ArtPackage'] = package
-        env['ArtJobType'] = job_type
-        env['ArtJobName'] = test_name
-        cmd = ' '.join((os.path.join(test_directory, test_name), package, test_name, script_directory, job_type))
-        (exit_code, output, err, command, start_time, end_time) = run_command(cmd, dir=work_directory, env=env)
-
-        with open(os.path.join(work_directory, "stdout.txt"), "w") as text_file:
-            log.debug("Copying stdout into %s", work_directory)
-            text_file.write(output)
-        with open(os.path.join(work_directory, "stderr.txt"), "w") as text_file:
-            log.debug("Copying stderr into %s", work_directory)
-            text_file.write(err)
-
-        return exit_code
diff --git a/Tools/ART/python/ART/art_configuration.py b/Tools/ART/python/ART/art_configuration.py
deleted file mode 100644
index d2d15ff94007..000000000000
--- a/Tools/ART/python/ART/art_configuration.py
+++ /dev/null
@@ -1,125 +0,0 @@
-#!/usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-"""Interface to the general ART configuration."""
-
-__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
-
-import fnmatch
-import logging
-import yaml
-
-MODULE = "art.configuration"
-
-
-class ArtConfiguration(object):
-    """Class to interface to the ART configuration."""
-
-    ALL = 'All'
-    SEPARATOR = '/'
-
-    def __init__(self, config_file=None):
-        """Init."""
-        log = logging.getLogger(MODULE)
-        if config_file is None:
-            config_file = 'art-configuration.yml'
-        try:
-            f = open(config_file, "r")
-            self.config = yaml.load(f)
-            f.close()
-        except IOError:
-            log.critical("Cannot read %s", config_file)
-            exit(2)
-
-    def release_key(self, nightly_release, project, platform):
-        """
-        Return release key.
-
-        Format is: /21.0/Athena/x86_64-slc6-gcc62-opt
-        """
-        return ArtConfiguration.SEPARATOR + ArtConfiguration.SEPARATOR.join((nightly_release, project, platform))
-
-    def release_key_compare(self, x, y):
-        """Compare two release keys."""
-        xa = x.split(ArtConfiguration.SEPARATOR)
-        ya = y.split(ArtConfiguration.SEPARATOR)
-
-        for index, item in sorted(enumerate(xa), None, None, True):
-            if xa[index] < ya[index]:
-                return -1
-            elif xa[index] > ya[index]:
-                return +1
-        return 0
-
-    def keys(self, nightly_release, project, platform, package=None):
-        """Return all keys for all matching patterns for one specific package."""
-        if self.config is None:
-            return []
-
-        if package is None:
-            package = ArtConfiguration.ALL
-
-        if package not in self.config:
-            return []
-
-        keys = []
-        for pattern in self.config[package]:
-            if fnmatch.fnmatch(self.release_key(nightly_release, project, platform), pattern):
-                for key in self.config[package][pattern].keys():
-                    if key not in keys:
-                        keys.append(key)
-
-        return keys
-
-    def packages(self):
-        """Return all packages, including 'All', defined in the configuration."""
-        if self.config is None:
-            return []
-
-        return self.config.keys()
-
-    def get(self, nightly_release, project, platform, package, key, default_value=None):
-        """Return most specific value for specified key and matching pattern.
-
-        By specifying more specific release_keys in the file [/21.0/*...]
-        one can override less specific keys [/*/*...]
-        (order in the file is not important):
-
-        Tier0ChainTests:
-            /*/*/*:
-                dst: /yourlocaldirectory
-            /21.0/*/*:
-                dst: /eos/atlas/atlascerngroupdisk/data-art/grid-output
-
-        """
-        log = logging.getLogger(MODULE)
-        log.debug("Looking for %s %s %s %s %s", nightly_release, project, platform, package, key)
-        if self.config is None:
-            log.debug("No configuration")
-            return default_value
-
-        if package is None:
-            log.debug("%s used for package", ArtConfiguration.ALL)
-            package = ArtConfiguration.ALL
-
-        if package not in self.config:
-            log.debug("%s not in config", package)
-            return default_value
-
-        value = default_value
-        for pattern in sorted(self.config[package], self.release_key_compare):
-            release_key = self.release_key(nightly_release, project, platform)
-            log.debug("release_key %s", release_key)
-            # print key, pattern
-            if fnmatch.fnmatch(release_key, pattern):
-                log.debug("matched %s", pattern)
-                release = self.config[package][pattern]
-                if key in release:
-                    value = release[key]
-
-        log.debug("Value %s", value)
-        return value
-
-    def get_option(self, nightly_release, project, platform, package, key, option_key):
-        """TBD."""
-        value = self.get(nightly_release, project, platform, package, key)
-        return option_key + value if value is not None else ''
diff --git a/Tools/ART/python/ART/art_grid.py b/Tools/ART/python/ART/art_grid.py
deleted file mode 100644
index 750a7f2dc4ba..000000000000
--- a/Tools/ART/python/ART/art_grid.py
+++ /dev/null
@@ -1,1089 +0,0 @@
-#!/usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-"""Class for grid submission."""
-
-__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
-
-import atexit
-import concurrent.futures
-import exceptions
-import glob
-import httplib
-import json
-import logging
-import multiprocessing
-import os
-import re
-# requests not available on lxplus, import only when needed
-# import requests
-import shutil
-import sys
-import tarfile
-import tempfile
-import time
-
-from datetime import datetime
-from datetime import timedelta
-
-from art_base import ArtBase
-from art_configuration import ArtConfiguration
-from art_header import ArtHeader
-from art_rucio import ArtRucio
-from art_misc import count_files, cp, ls, mkdir, make_executable, rm, run_command, run_command_parallel, touch
-
-MODULE = "art.grid"
-
-
-def copy_job(art_directory, indexed_package, dst, no_unpack, tmp, seq):
-    """
-    Copy job to be run by executor.
-
-    Needs to be defined outside a class.
-    """
-    log = logging.getLogger(MODULE)
-    log.debug("job started %s %s %s %s %d", art_directory, indexed_package, dst, no_unpack, tmp, seq)
-    (exit_code, out, err, command, start_time, end_time) = run_command(' '.join((os.path.join(art_directory, './art.py'), "copy", "--dst=" + dst, "--no-unpack" if no_unpack else "", "--tmp=" + tmp, "--seq=" + str(seq), indexed_package)))
-    log.debug("job ended %s %s %s %s %d", art_directory, indexed_package, dst, no_unpack, tmp, seq)
-
-    print "Copy job run with Exit Code:", exit_code
-    print out
-    print err
-    sys.stdout.flush()
-
-    return (indexed_package, exit_code, out, err, start_time, end_time)
-
-
-class ArtGrid(ArtBase):
-    """Class for grid submission."""
-
-    CVMFS_DIRECTORY = '/cvmfs/atlas-nightlies.cern.ch/repo/sw'
-    EOS_OUTPUT_DIR = '/eos/atlas/atlascerngroupdisk/data-art/grid-output'
-
-    ARTPROD = 'artprod'
-    JOB_REPORT = 'jobReport.json'
-    JOB_REPORT_ART_KEY = 'art'
-    INITIAL_RESULT_WAIT_INTERVAL = 30 * 60  # seconds, 30 mins
-    RESULT_WAIT_INTERVAL = 5 * 60  # seconds, 5 mins
-    KINIT_WAIT = 12  # 12 * RESULT_WAIT_INTERVAL, 1 hour
-
-    def __init__(self, art_directory, nightly_release, project, platform, nightly_tag, script_directory=None, skip_setup=False, submit_directory=None, max_jobs=0):
-        """Keep arguments."""
-        super(ArtGrid, self).__init__(art_directory)
-        self.nightly_release = nightly_release
-        self.nightly_release_short = re.sub(r"-VAL-.*", "-VAL", self.nightly_release)
-        self.project = project
-        self.platform = platform
-        self.nightly_tag = nightly_tag
-        self.script_directory = script_directory
-        self.skip_setup = skip_setup
-        self.submit_directory = submit_directory
-        self.max_jobs = multiprocessing.cpu_count() if max_jobs <= 0 else max_jobs
-
-        self.rucio = ArtRucio(self.art_directory, self.nightly_release_short, project, platform, nightly_tag)
-
-    def status(self, status):
-        """Print status for usage in gitlab-ci."""
-        print 'art-status:', status
-
-    def get_script_directory(self):
-        """Return calculated script directory, sometimes overriden by commandline."""
-        if self.script_directory is None:
-            self.script_directory = ArtGrid.CVMFS_DIRECTORY
-            self.script_directory = os.path.join(self.script_directory, self.nightly_release)  # e.g. 21.0
-            self.script_directory = os.path.join(self.script_directory, self.nightly_tag)  # e.g. 2017-10-25T2150
-            self.script_directory = os.path.join(self.script_directory, self.project)  # e.g. Athena
-            try:
-                self.script_directory = os.path.join(self.script_directory, os.listdir(self.script_directory)[0])  # e.g. 21.0.3
-                self.script_directory = os.path.join(self.script_directory, os.listdir(self.script_directory)[0])  # InstallArea
-            except OSError:
-                self.script_directory = os.path.join(self.script_directory, '*', '*')
-            self.script_directory = os.path.join(self.script_directory, self.platform)  # x86_64-slc6-gcc62-opt
-        return self.script_directory
-
-    def is_script_directory_in_cvmfs(self):
-        """Return true if the script directory is in cvmfs."""
-        return self.get_script_directory().startswith(ArtGrid.CVMFS_DIRECTORY)
-
-    def exit_if_no_script_directory(self):
-        """Exit with ERROR is script directory does not exist."""
-        log = logging.getLogger(MODULE)
-        if not os.path.isdir(self.get_script_directory()):
-            log.critical('Script directory does not exist: %s', self.get_script_directory())
-            self.status('error')
-            exit(1)
-
-    def exit_if_outfile_too_long(self, outfile_test):
-        """Exit with ERROR if outfile too long."""
-        log = logging.getLogger(MODULE)
-        MAX_OUTFILE_LEN = 132
-        if len(outfile_test) > MAX_OUTFILE_LEN:
-            log.error('OutFile string length > %d: %s', MAX_OUTFILE_LEN, outfile_test)
-            exit(1)
-
-    def copy_art(self, art_python, run_dir):
-        """Copy all art files to the the run directory. Returns final script directory to be used."""
-        log = logging.getLogger(MODULE)
-        ART = os.path.join(run_dir, "ART")
-        mkdir(ART)
-
-        # get the path of the python classes and support scripts
-        art_python_directory = os.path.join(self.art_directory, art_python, 'ART')
-
-        shutil.copy(os.path.join(self.art_directory, 'art.py'), run_dir)
-        shutil.copy(os.path.join(self.art_directory, 'art-diff.py'), run_dir)
-        shutil.copy(os.path.join(self.art_directory, 'art-internal.py'), run_dir)
-        shutil.copy(os.path.join(self.art_directory, 'art-task-grid.sh'), run_dir)
-        shutil.copy(os.path.join(self.art_directory, 'art-download.sh'), run_dir)
-        shutil.copy(os.path.join(art_python_directory, '__init__.py'), ART)
-        shutil.copy(os.path.join(art_python_directory, 'art_base.py'), ART)
-        shutil.copy(os.path.join(art_python_directory, 'art_build.py'), ART)
-        shutil.copy(os.path.join(art_python_directory, 'art_configuration.py'), ART)
-        shutil.copy(os.path.join(art_python_directory, 'art_grid.py'), ART)
-        shutil.copy(os.path.join(art_python_directory, 'art_header.py'), ART)
-        shutil.copy(os.path.join(art_python_directory, 'art_misc.py'), ART)
-        shutil.copy(os.path.join(art_python_directory, 'art_rucio.py'), ART)
-        shutil.copy(os.path.join(art_python_directory, 'docopt.py'), ART)
-        shutil.copy(os.path.join(art_python_directory, 'docopt_dispatch.py'), ART)
-
-        make_executable(os.path.join(run_dir, 'art.py'))
-        make_executable(os.path.join(run_dir, 'art-diff.py'))
-        make_executable(os.path.join(run_dir, 'art-internal.py'))
-        make_executable(os.path.join(run_dir, 'art-task-grid.sh'))
-        make_executable(os.path.join(run_dir, 'art-download.sh'))
-
-        script_directory = self.get_script_directory()
-
-        # copy a local test directory if needed (only for 'art grid')
-        if not self.is_script_directory_in_cvmfs():
-            script_directory = os.path.basename(os.path.normpath(self.get_script_directory()))
-            target_directory = os.path.join(run_dir, script_directory)
-            log.info("Copying script directory for grid submission to %s", target_directory)
-            shutil.copytree(self.get_script_directory(), target_directory)
-
-        return script_directory
-
-    def get_jedi_id(self, text):
-        """Return Jedi Task Id or 0."""
-        match = re.search(r"jediTaskID=(\d+)", text)
-        return match.group(1) if match else -1
-
-    def copy(self, indexed_package, dst=None, user=None, no_unpack=False, tmp=None, seq=0, keep_tmp=False):
-        """Copy output from scratch area to eos area."""
-        log = logging.getLogger(MODULE)
-        tmp = tempfile.mkdtemp(prefix=indexed_package + '-') if tmp is None else tmp
-        mkdir(tmp)
-
-        if indexed_package is not None:
-            return self.copy_package(indexed_package, dst, user, no_unpack, tmp, seq, keep_tmp)
-
-        # make sure script directory exist
-        self.exit_if_no_script_directory()
-
-        # get the test_*.sh from the test directory
-        test_directories = self.get_test_directories(self.get_script_directory())
-        if not test_directories:
-            log.warning('No tests found in directories ending in "test"')
-
-        # copy results for all packages
-        result = 0
-        for indexed_package, root in test_directories.items():
-            number_of_tests = len(self.get_files(root, "grid", "all", self.nightly_release, self.project, self.platform))
-            if number_of_tests > 0:
-                result |= self.copy_package(indexed_package, dst, user, no_unpack, tmp, seq, keep_tmp)
-        return result
-
-    def copy_package(self, indexed_package, dst, user, no_unpack, tmp, seq, keep_tmp):
-        """Copy package to dst."""
-        log = logging.getLogger(MODULE)
-        real_user = os.getenv('USER', ArtGrid.ARTPROD)
-        user = real_user if user is None else user
-        default_dst = ArtGrid.EOS_OUTPUT_DIR if real_user == ArtGrid.ARTPROD else '.'
-        dst = default_dst if dst is None else dst
-
-        result = 0
-
-        log.debug("Indexed Package %s", indexed_package)
-
-        package = indexed_package.split('.')[0]
-        nightly_tag = self.nightly_tag if seq == 0 else '-'.join((self.nightly_tag, str(seq)))
-        dst_dir = os.path.join(dst, self.nightly_release, self.project, self.platform, nightly_tag, package)
-        log.info("dst_dir %s", dst_dir)
-
-        table = self.rucio.get_table(user, indexed_package, tmp=tmp)
-        if not table:
-            log.warning("Nothing to be copied")
-            return result
-
-        for entry in table:
-            grid_index = entry['grid_index']
-            log.debug("Grid Index %d", grid_index)
-
-            # get the test name
-            test_name = entry['job_name']
-            if test_name is None:
-                log.warning("JSON TestName not found for test with grid_index %d", grid_index)
-                continue
-            log.debug("Test_name %s", test_name)
-
-            json_file = os.path.join(tmp, entry['outfile'] + "_EXT0", self.__get_rucio_name(user, entry, 'json'))
-            json_dst = dst_dir.replace('/', '.')
-            json_copying = json_file + ".copying_to" + json_dst
-            json_copied = json_file + ".copied_to" + json_dst
-
-            if os.path.isfile(json_copied):
-                log.debug("Already copied: %d %s", grid_index, test_name)
-            elif os.path.isfile(json_copying):
-                log.debug("Still copying:  %d %s", grid_index, test_name)
-            else:
-                touch(json_copying)
-
-                # create test directory
-                test_dir = os.path.join(tmp, test_name)
-                suffix = '-' + str(entry['grid_index'] - 1) if entry['single_index'] > 0 and entry['grid_index'] > 1 else ''
-                test_dir += suffix
-                mkdir(test_dir)
-
-                # copy art-job.json
-                result |= self.copy_json(json_file, test_dir)
-
-                # copy and unpack log
-                result |= self.copy_log(user, package, test_name, grid_index, test_dir, no_unpack, tmp)
-
-                # copy results and unpack
-                result |= self.copy_results(user, package, test_name, grid_index, test_dir, no_unpack, tmp)
-
-                # copy to eos
-                result |= self.copy_to_dst(test_name + suffix, test_dir, dst_dir)
-
-                if result == 0:
-                    rm(json_copying)
-                    touch(json_copied)
-
-                # cleanup
-                if not keep_tmp:
-                    shutil.rmtree(test_dir)
-
-        return result
-
-    def copy_json(self, json_file, test_dir):
-        """Copy json."""
-        log = logging.getLogger(MODULE)
-        log.info("Copying JSON: %s", json_file)
-        shutil.copyfile(json_file, os.path.join(test_dir, ArtRucio.ART_JOB))
-        return 0
-
-    def copy_log(self, user, package, test_name, grid_index, test_dir, no_unpack, tmp):
-        """Copy and unpack log file."""
-        log = logging.getLogger(MODULE)
-        log.info("Copying LOG: %s %s", package, test_name)
-
-        if no_unpack:
-            tmp_tar = self.__get_tar(user, package, test_name, grid_index=grid_index, tmp=tmp, tar=False)
-            cp(tmp_tar, test_dir)
-            os.remove(tmp_tar)
-        else:
-            tmp_tar = self.__get_tar(user, package, test_name, grid_index=grid_index, tmp=tmp, tar=False)
-            if tmp_tar is not None:
-                tar = tarfile.open(tmp_tar)
-                log.info("Unpacking LOG: %s", test_dir)
-                logdir = None
-                for member in tar.getmembers():
-                    # does not work: tar.extractall()
-                    tar.extract(member, path=test_dir)
-                    logdir = member.name.split('/', 2)[0]
-
-                tar.close()
-
-                # rename top level log dir to logs
-                if logdir is not None:
-                    os.chdir(test_dir)
-                    os.rename(logdir, "tarball_logs")
-
-                os.remove(tmp_tar)
-        return 0
-
-    def copy_results(self, user, package, test_name, grid_index, test_dir, no_unpack, tmp):
-        """Copy results and unpack."""
-        log = logging.getLogger(MODULE)
-        log.info("Copying TAR: %s %s", package, test_name)
-
-        if no_unpack:
-            tmp_tar = self.__get_tar(user, package, test_name, grid_index=grid_index, tmp=tmp)
-            cp(tmp_tar, test_dir)
-            os.remove(tmp_tar)
-        else:
-            tmp_tar = self.__get_tar(user, package, test_name, grid_index=grid_index, tmp=tmp)
-            if tmp_tar is not None:
-                tar = tarfile.open(tmp_tar)
-                log.info("Unpacking TAR: %s", test_dir)
-                tar.extractall(path=test_dir)
-                tar.close()
-                os.remove(tmp_tar)
-
-        return 0
-
-    def copy_to_dst(self, test_name, test_dir, dst_dir):
-        """Copy to dst."""
-        log = logging.getLogger(MODULE)
-
-        # extra check if dst is already made
-        dst_target = os.path.join(dst_dir, test_name)
-
-        # create the directory
-        if mkdir(dst_target) != 0:
-            return 1
-
-        exit_code = cp(test_dir, dst_target)
-
-        # check number of source files
-        nSrc = count_files(test_dir)
-        nDst = count_files(dst_target)
-
-        if nDst == nSrc:
-            log.info("Number of files in Src (%d) and Dst (%d) are equal for %s", nSrc, nDst, test_name)
-        else:
-            log.warning("Number of files in Src (%d) and Dst (%d) differ for %s", nSrc, nDst, test_name)
-
-        return exit_code
-
-    def task_package(self, root, package, job_type, sequence_tag, inform_panda, no_action, config_file):
-        """Submit a single package."""
-        log = logging.getLogger(MODULE)
-        result = {}
-        number_of_tests = len(self.get_files(root, job_type, "all", self.nightly_release, self.project, self.platform))
-        if number_of_tests > 0:
-            print 'art-package:', package
-            self.status('included')
-            log.info('root %s with %d jobs', root, number_of_tests)
-            log.info('Handling %s for %s project %s on %s', package, self.nightly_release, self.project, self.platform)
-
-            run_dir = os.path.join(self.submit_directory, package, 'run')
-            script_directory = self.copy_art('../python', run_dir)
-
-            result = self.task(script_directory, package, job_type, sequence_tag, inform_panda, no_action, config_file)
-        return result
-
-    def task_list(self, job_type, sequence_tag, inform_panda, package=None, no_action=False, wait_and_copy=True, config_file=None):
-        """Submit a list of packages."""
-        log = logging.getLogger(MODULE)
-        log.info("Inform Panda %s", inform_panda)
-
-        # job will be submitted from tmp directory
-        self.submit_directory = tempfile.mkdtemp(dir='.')
-
-        # make sure tmp is removed afterwards
-        atexit.register(shutil.rmtree, self.submit_directory, ignore_errors=True)
-
-        # make sure script directory exist
-        self.exit_if_no_script_directory()
-
-        # get the test_*.sh from the test directory
-        test_directories = self.get_test_directories(self.get_script_directory())
-        if not test_directories:
-            log.warning('No tests found in directories ending in "test"')
-
-        configuration = None if self.skip_setup else ArtConfiguration(config_file)
-
-        all_results = {}
-
-        if package is None:
-            # submit tasks for all packages
-            for package, root in test_directories.items():
-                if configuration is not None and configuration.get(self.nightly_release, self.project, self.platform, package, 'exclude', False):
-                    log.warning("Package %s is excluded", package)
-                else:
-                    all_results.update(self.task_package(root, package, job_type, sequence_tag, inform_panda, no_action, config_file))
-        else:
-            # Submit single package
-            root = test_directories[package]
-            all_results.update(self.task_package(root, package, job_type, sequence_tag, inform_panda, no_action, config_file))
-
-        if no_action:
-            log.info("--no-action specified, so not waiting for results")
-            return 0
-
-        if len(all_results) == 0:
-            log.warning('No tests found, nothing to submit.')
-            return 0
-
-        if not wait_and_copy:
-            log.debug("No copying")
-            return 0
-
-        # wait for all results
-        configuration = ArtConfiguration(config_file)
-
-        executor = None
-        future_set = []
-        seq = None
-
-        kinit_interval = ArtGrid.KINIT_WAIT  # ArtGrid.KINIT_WAIT * ArtGrid.RESULT_WAIT_INTERVAL
-        result_wait_interval = ArtGrid.INITIAL_RESULT_WAIT_INTERVAL
-        final_states = ["done", "finished", "failed", "aborted", "broken"]
-        tmp = tempfile.mkdtemp(prefix=sequence_tag + '-')
-        while len(all_results) > 0:
-            log.debug("No of Results %d", len(all_results))
-            log.debug("Waiting...")
-            time.sleep(result_wait_interval)
-            log.debug("Done Waiting")
-            result_wait_interval = ArtGrid.RESULT_WAIT_INTERVAL
-            kinit_interval -= 1
-            if kinit_interval <= 0:
-                os.system("kinit -R")
-                kinit_interval = ArtGrid.KINIT_WAIT
-
-            # force a copy of all_results since we are modifying all_results
-            for jedi_id in list(all_results):
-                package = all_results[jedi_id][0]
-                # skip packages without copy
-                if not configuration.get(self.nightly_release, self.project, self.platform, package, "copy"):
-                    log.info("Copy not configured for %s - skipped", package)
-                    del all_results[jedi_id]
-                    continue
-
-                # figure out the destination for the copy based on if the directory already exists, keep seq
-                if seq is None:
-                    dst = configuration.get(self.nightly_release, self.project, self.platform, package, "dst", ArtGrid.EOS_OUTPUT_DIR)
-                    dst_dir = os.path.join(dst, self.nightly_release, self.project, self.platform, self.nightly_tag)
-                    final_target = dst_dir
-                    max_seq = 10
-                    seq = 0
-                    while ls(final_target) == 0 and seq < max_seq:
-                        seq += 1
-                        final_target = '-'.join((dst_dir, str(seq)))
-
-                    if seq >= max_seq:
-                        log.warning("Too many retries (>%d) to copy, removing job %d", max_seq, jedi_id)
-                        del all_results[jedi_id]
-                        continue
-
-                    # create the directory
-                    if mkdir(final_target) != 0:
-                        log.warning("Could not create output dir %s, retrying later", final_target)
-                        continue
-
-                log.debug("Checking package %s for %s", package, str(jedi_id))
-                status = self.task_status(jedi_id)
-                if status is not None:
-
-                    # job_name = all_results[jedi_id][1]
-                    # outfile = all_results[jedi_id][2]
-                    index = all_results[jedi_id][3]
-
-                    # skip single jobs if status is not final
-                    if (index > 0) and (status not in final_states):
-                        continue
-
-                    # create executor if not already done
-                    if executor is None:
-                        log.info("Executor started with %d threads", self.max_jobs)
-                        executor = concurrent.futures.ThreadPoolExecutor(max_workers=self.max_jobs)
-
-                    no_unpack = configuration.get(self.nightly_release, self.project, self.platform, package, "no_unpack", False)
-                    indexed_package = package + ('.' + str(index) if index > 0 else '')
-                    log.debug("Copy whatever ready from %s to %s using seq %d", indexed_package, dst, seq)
-                    future_set.append(executor.submit(copy_job, self.art_directory, indexed_package, dst, no_unpack, tmp, seq))
-
-                    # job in final state
-                    if status in final_states:
-                        # remove job from waiting queue
-                        log.info("JediID %s finished with status %s", str(jedi_id), status)
-                        del all_results[jedi_id]
-                        log.info("Still waiting for results of %d jobs %s", len(all_results), all_results.keys())
-
-                    log.debug("Still waiting for results of %d jobs %s", len(all_results), all_results.keys())
-
-        if len(future_set) <= 0:
-            log.info("No need to wait for any copy jobs")
-            return 0
-
-        # wait for all copy jobs to finish
-        number_of_copy_jobs = len(future_set)
-        log.info("Waiting for %d copy jobs to finish...", number_of_copy_jobs)
-        for future in concurrent.futures.as_completed(future_set):
-            (indexed_package, exit_code, out, err, start_time, end_time) = future.result()
-            if exit_code == 0:
-                log.debug("Copied %s exit_code: %d", indexed_package, exit_code)
-                log.debug("  starting %s until %s", start_time.strftime('%Y-%m-%dT%H:%M:%S'), end_time.strftime('%Y-%m-%dT%H:%M:%S'))
-            else:
-                log.error("Failed to copy: %s exit_code: %d", indexed_package, exit_code)
-                print err
-                print out
-            number_of_copy_jobs -= 1
-            log.info("Still waiting for %d copy jobs to finish...", number_of_copy_jobs)
-
-        log.info("All copy jobs finished.")
-        return 0
-
-    def task_status(self, jedi_id):
-        """
-        Wait for job to finish.
-
-        Return final status of a task, or None if not finished
-        """
-        import requests
-        import urllib3
-        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
-
-        log = logging.getLogger(MODULE)
-
-        # fake return for simulation
-        if jedi_id == 0:
-            return "done"
-
-        try:
-            payload = {'json': 'true'}
-            url = 'https://bigpanda.cern.ch/task/' + str(jedi_id)
-            r = requests.get(url, params=payload, verify=False)
-            if r.status_code == requests.codes.ok:
-                s = r.json()
-                if (s is not None) and ('task' in s):
-                    task = s['task']
-                    if (task is not None) and ('status' in task):
-                        status = task['status']
-                        # if status in ["done", "finished", "failed", "aborted", "broken"]:
-                        log.debug("Task: %s %s", str(jedi_id), str(status))
-                        return status
-        except requests.exceptions.RequestException, e:
-            log.error('%s for %s status: %s', e, str(jedi_id), url)
-        except httplib.IncompleteRead, e:
-            log.error('%s for %s status: %s', e, str(jedi_id), url)
-        return None
-
-    def task_job(self, grid_options, sub_cmd, script_directory, sequence_tag, package, outfile, inform_panda, job_type='', number_of_tests=0, split=0, job_name='', inds=None, n_files=0, in_file=False, ncores=1, athena_mt=0, no_action=False):
-        """
-        Submit a batch or single job.
-
-        Returns jedi_id or 0 if submission failed.
-
-        # art-task-grid.sh [--no-action] batch <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_type> <number_of_tests>
-        #
-        # art-task-grid.sh [--no-action] single [--inds <input_file> --n-files <number_of_files> --split <split> --in] <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_name>
-        """
-        log = logging.getLogger(MODULE)
-        cmd = ' '.join((os.path.join(self.art_directory, 'art-task-grid.sh'),
-                        '--no-action' if no_action else '',
-                        sub_cmd))
-
-        if sub_cmd == 'single':
-            cmd = ' '.join((cmd,
-                            '--inds ' + str(inds) if inds is not None else '',
-                            '--n-files ' + str(n_files) if inds is not None and n_files > 0 else '',
-                            '--split ' + str(split) if inds is not None and split > 0 else '',
-                            '--in' if inds is not None and str(in_file) else '',
-                            '--ncore ' + str(ncores) if athena_mt == 0 and ncores > 1 else '',
-                            '--athena_mt ' + str(athena_mt) if ncores == 1 and athena_mt > 0 else ''))
-
-        cmd = ' '.join((cmd,
-                        self.submit_directory,
-                        script_directory,
-                        sequence_tag,
-                        package,
-                        outfile,
-                        str(inform_panda)))
-
-        if sub_cmd == 'batch':
-            cmd = ' '.join((cmd,
-                            job_type,
-                            str(number_of_tests)))
-        elif sub_cmd == 'single':
-            cmd = ' '.join((cmd,
-                            job_name))
-        else:
-            log.critical("Invalid sub_cmd %s", sub_cmd)
-            exit(1)
-
-        log.info("cmd: %s", cmd)
-
-        # run task from Bash Script as is needed in ATLAS setup
-        log.debug("Grid_options: %s", grid_options)
-        env = os.environ.copy()
-        env['PATH'] = '.:' + env['PATH']
-        env['ART_GRID_OPTIONS'] = grid_options
-
-        log.debug("ART_GRID_OPTIONS %s", env['ART_GRID_OPTIONS'])
-
-        jedi_id = -1
-        # run the command, no_action is forwarded and used inside the script
-        (exit_code, out, err, command, start_time, end_time) = run_command(cmd, env=env)
-        if exit_code != 0:
-            log.error("art-task-grid failed %d", exit_code)
-            print err
-        else:
-            jedi_id = 0 if no_action else self.get_jedi_id(err)
-        print out
-
-        log.info('jedi_id: %s', str(jedi_id))
-        return jedi_id
-
-    def get_grid_options(self, package, config_file):
-        """Return grid options for a package."""
-        log = logging.getLogger(MODULE)
-        if self.skip_setup:
-            return ''
-
-        configuration = ArtConfiguration(config_file)
-        grid_options = configuration.get_option(self.nightly_release, self.project, self.platform, package, 'exclude-sites', '--excludedSite=')
-        grid_options += ' ' + configuration.get_option(self.nightly_release, self.project, self.platform, package, 'sites', '--site=')
-        log.info('grid_options: %s', grid_options)
-        return grid_options
-
-    def task(self, script_directory, package, job_type, sequence_tag, inform_panda, no_action=False, config_file=None):
-        """
-        Submit a task, consisting of multiple jobs.
-
-        For 'single' jobs each task contains exactly one job.
-        Returns a map of jedi_id to (package, test_name, out_file, seq)
-        """
-        log = logging.getLogger(MODULE)
-        log.info('Running art task')
-
-        grid_options = self.get_grid_options(package, config_file)
-
-        test_directories = self.get_test_directories(self.get_script_directory())
-        test_directory = test_directories[package]
-        number_of_batch_tests = len(self.get_files(test_directory, job_type, "batch", self.nightly_release, self.project, self.platform))
-
-        user = os.getenv('USER', 'artprod') if self.skip_setup else ArtGrid.ARTPROD
-        outfile = self.rucio.get_outfile_name(user, package, sequence_tag)
-
-        result = {}
-
-        # submit batch tests, index = 0
-        if number_of_batch_tests > 0:
-            self.exit_if_outfile_too_long(outfile)
-
-            # Batch
-            log.info("Batch")
-            jedi_id = self.task_job(grid_options, "batch", script_directory, sequence_tag, package, outfile, inform_panda, job_type=job_type, number_of_tests=number_of_batch_tests, no_action=no_action)
-            if jedi_id > 0:
-                result[jedi_id] = (package, "", outfile, 0, None)
-
-        # submit single tests, index > 1
-        index = 1
-        for job_name in self.get_files(test_directory, job_type, "single", self.nightly_release, self.project, self.platform):
-            job = os.path.join(test_directory, job_name)
-            header = ArtHeader(job)
-            inds = header.get(ArtHeader.ART_INPUT)
-            n_files = header.get(ArtHeader.ART_INPUT_NFILES)
-            split = header.get(ArtHeader.ART_INPUT_SPLIT)
-            ncores = header.get(ArtHeader.ART_CORES)
-            athena_mt = header.get(ArtHeader.ART_ATHENA_MT)
-
-            outfile_test = self.rucio.get_outfile_name(user, package, sequence_tag, str(index))
-            self.exit_if_outfile_too_long(outfile_test)
-
-            # Single
-            log.info("Single")
-            jedi_id = self.task_job(grid_options, "single", script_directory, sequence_tag, package, outfile_test, inform_panda, split=split, job_name=job_name, inds=inds, n_files=n_files, in_file=True, ncores=ncores, athena_mt=athena_mt, no_action=no_action)
-
-            if jedi_id > 0:
-                result[jedi_id] = (package, job_name, outfile_test, index, None)
-
-            index += 1
-
-        return result
-
-    def batch(self, sequence_tag, package, out, inform_panda, job_type, job_index):
-        """Run a single job by job_index of a 'batch' submission."""
-        log = logging.getLogger(MODULE)
-        log.info('Running art grid batch')
-        log.info("%s %s %s %s %s %s %s %s %s", self.nightly_release, self.project, self.platform, self.nightly_tag, package, job_type, str(job_index), out, inform_panda)
-
-        test_directories = self.get_test_directories(self.get_script_directory())
-        test_directory = test_directories[package]
-
-        test_list = self.get_files(test_directory, job_type, "batch", self.nightly_release, self.project, self.platform)
-
-        # NOTE: grid counts from 1
-        index = int(job_index)
-        job_name = test_list[index - 1]
-
-        in_file = None
-
-        return self.job(test_directory, package, job_name, job_type, out, inform_panda, in_file)
-
-    def single(self, sequence_tag, package, out, inform_panda, job_name, in_file):
-        """Run a single job by name of a 'single' submission."""
-        log = logging.getLogger(MODULE)
-
-        log.info('Running art grid single')
-        log.info("%s %s %s %s %s %s %s %s %s", self.nightly_release, self.project, self.platform, self.nightly_tag, package, job_name, out, inform_panda, in_file)
-
-        test_directories = self.get_test_directories(self.get_script_directory())
-        test_directory = test_directories[package]
-
-        job_type = 'grid'
-        return self.job(test_directory, package, job_name, job_type, out, inform_panda, in_file)
-
-    def job(self, test_directory, package, job_name, job_type, out, inform_panda, in_file):
-        """Run a job."""
-        log = logging.getLogger(MODULE)
-
-        log.info("art-job-name: %s", job_name)
-        panda_id = os.getenv('PandaID', '0')
-        if inform_panda == 'True':
-            # informing panda, ignoring errors for now
-            self.inform_panda(panda_id, job_name, package)
-
-        test_file = os.path.join(test_directory, job_name)
-
-        # Tests are called with arguments: PACKAGE TEST_NAME SCRIPT_DIRECTORY TYPE [IN_FILE]
-        script_directory = self.get_script_directory()
-        command = ' '.join((test_file, package, job_name, script_directory, job_type, in_file if in_file is not None else ''))
-
-        log.debug(job_name)
-        log.debug(test_directory)
-        log.debug(command)
-
-        # run the test
-        env = os.environ.copy()
-        env['PATH'] = '.:' + env['PATH']
-        env['ArtScriptDirectory'] = script_directory
-        env['ArtPackage'] = package
-        env['ArtJobType'] = job_type
-        env['ArtJobName'] = job_name
-        if in_file is not None:
-            env['ArtInFile'] = in_file
-
-        header = ArtHeader(test_file)
-        athena_mt = header.get(ArtHeader.ART_ATHENA_MT)
-        ncores = header.get(ArtHeader.ART_CORES)
-        if athena_mt == 0 and ncores > 1:
-            nthreads = header.get(ArtHeader.ART_INPUT_NFILES)
-            (exit_code, output, error, command, start_time, end_time) = run_command_parallel(command, nthreads, ncores, env=env)
-        else:
-            (exit_code, output, error, command, start_time, end_time) = run_command(command, env=env)
-        print output
-        if (exit_code != 0):
-            log.error("Test %s failed %d", job_name, exit_code)
-            print error
-        # NOTE: exit_code always 0
-        print error
-
-        # gather results
-        result = {}
-        result['name'] = job_name
-        result['exit_code'] = exit_code
-        result['test_directory'] = test_directory
-        result['result'] = ArtBase.get_art_results(output)
-        result['panda_id'] = panda_id
-
-        # write out results
-        with open(os.path.join(ArtRucio.ART_JOB), 'w') as jobfile:
-            json.dump(result, jobfile, sort_keys=True, indent=4, ensure_ascii=False)
-            log.info("Wrote %s", ArtRucio.ART_JOB)
-
-        # grab the content of "jobReport.json", add the art dictionary and write it back
-        if os.path.isfile(ArtGrid.JOB_REPORT):
-            with open(ArtGrid.JOB_REPORT, 'r+') as json_file:
-                info = json.load(json_file)
-                info[ArtGrid.JOB_REPORT_ART_KEY] = result
-                # write out results
-                json_file.seek(0)
-                json.dump(info, json_file, sort_keys=True, indent=4, ensure_ascii=False)
-                json_file.truncate()
-                log.info("Updated %s", ArtGrid.JOB_REPORT)
-        else:
-            with open(ArtGrid.JOB_REPORT, 'w') as json_file:
-                info = {}
-                info[ArtGrid.JOB_REPORT_ART_KEY] = result
-                json.dump(info, json_file, sort_keys=True, indent=4, ensure_ascii=False)
-                log.info("Updated %s", ArtGrid.JOB_REPORT)
-
-        # pick up the outputs
-        files = set()
-
-        # pick up explicitly named output files
-        with open(test_file, "r") as f:
-            for line in f:
-                # remove comments
-                line = line.split('#', 1)[0]
-                out_names = re.findall(r"--output[^\s=]*[= ]*(\S*)", line)
-                log.debug(out_names)
-                for out_name in out_names:
-                    out_name = out_name.strip('\'"')
-                    if os.path.exists(out_name):
-                        files.add(out_name)
-
-        # pick up art-header named outputs
-        for path_name in ArtHeader(test_file).get(ArtHeader.ART_OUTPUT):
-            for out_name in glob.glob(path_name):
-                files.add(out_name)
-
-        tar_file = tarfile.open(out, mode='w')
-        for file in files:
-            log.info('Tar file contains: %s', file)
-            tar_file.add(file)
-
-        tar_file.close()
-        # Always return 0
-        return 0
-
-    def inform_panda(self, panda_id, job_name, package):
-        """Inform panda about the job we are running using panda ID."""
-        log = logging.getLogger(MODULE)
-        import requests
-
-        url = "http://bigpanda.cern.ch/art/registerarttest/?json"
-        n_attempts = 3
-        timeout = 10
-
-        payload = {}
-        payload['pandaid'] = panda_id
-        payload['testname'] = job_name
-        payload['nightly_release_short'] = self.nightly_release_short
-        payload['platform'] = self.platform
-        payload['project'] = self.project
-        payload['package'] = package
-        payload['nightly_tag'] = self.nightly_tag
-
-        headers = {'User-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36'}
-
-        for i in range(0, n_attempts):
-            reply = requests.post(url, data=payload, headers=headers, timeout=timeout, verify=False)
-            try:
-                reply = requests.post(url, data=payload, timeout=timeout, verify=False)
-                log.info('Informed panda about %s %s %s', panda_id, job_name, package)
-            except:
-                log.warning('Exception occured for %s %s %s', panda_id, job_name, package)
-                continue
-
-            if reply.status_code == 200:
-                try:
-                    reply = reply.json()
-                except:
-                    log.error('The panda inform response was corrupted for %s %s %s', panda_id, job_name, package)
-                    raise
-                if 'exit_code' in reply and reply['exit_code'] == 0:
-                    return True
-
-        log.error('Panda could not be informed about %s %s %s', panda_id, job_name, package)
-        return False
-
-    def list(self, package, job_type, index_type, json_format, user):
-        """List all jobs available."""
-        user = ArtGrid.ARTPROD if user is None else user
-
-        # make sure script directory exist
-        self.exit_if_no_script_directory()
-
-        json_array = []
-        for entry in self.rucio.get_table(user, package):
-            # print entry
-            json_array.append({
-                'name': entry['job_name'],
-                'grid_index': entry['grid_index'],
-                'job_index': entry['job_index'],
-                'single_index': entry['single_index'],
-                'file_index': entry['file_index'],
-                'outfile': entry['outfile']
-            })
-
-        if json_format:
-            json.dump(json_array, sys.stdout, sort_keys=True, indent=4)
-            return 0
-
-        i = 0
-        print "Example FileName: user.artprod.atlas.21.0.Athena.x86_64-slc6-gcc62-opt.2018-02-25T2154.314889.TrigInDetValidation.<Single>"
-        print "Example OutputName: user.artprod.<Job>.EXT1._<Grid>.tar.<File>"
-        print
-        print '{:-^5}'.format('Index'), \
-              '{:-^60}'.format('Name'), \
-              '{:-^6}'.format('Grid'), \
-              '{:-^9}'.format('Job'), \
-              '{:-^6}'.format('Single'), \
-              '{:-^4}'.format('File'), \
-              '{:-^80}'.format('FileName')
-
-        for entry in json_array:
-            print '{:5d}'.format(i), \
-                  '{:60}'.format('None' if entry['name'] is None else entry['name']), \
-                  '{:06d}'.format(entry['grid_index']), \
-                  '{:9d}'.format(entry['job_index']), \
-                  '{:6d}'.format(entry['single_index']), \
-                  '{:4d}'.format(entry['file_index']), \
-                  '{:80}'.format(entry['outfile'])
-            i += 1
-
-        return 0
-
-    def log(self, package, test_name, user):
-        """Print the log of a job."""
-        log = logging.getLogger(MODULE)
-        user = ArtGrid.ARTPROD if user is None else user
-
-        # make sure script directory exist
-        self.exit_if_no_script_directory()
-
-        tmp_tar = self.__get_tar(user, package, test_name, tar=False)
-        if tmp_tar is None:
-            log.error("No log tar file found")
-            return 1
-
-        tar = tarfile.open(tmp_tar)
-        for name in tar.getnames():
-            if ArtRucio.ATHENA_STDOUT in name:
-                f = tar.extractfile(name)
-                content = f.read()
-                print content
-                break
-        tar.close()
-        os.remove(tmp_tar)
-        return 0
-
-    def output(self, package, test_name, user):
-        """Download the output of a job."""
-        log = logging.getLogger(MODULE)
-        user = ArtGrid.ARTPROD if user is None else user
-
-        # make sure script directory exist
-        self.exit_if_no_script_directory()
-
-        outfile = self.rucio.get_outfiles(user, package)[0]
-        if not outfile.endswith(package):
-            # remove .13
-            outfile = os.path.splitext(outfile)[0]
-        job_name = os.path.splitext(test_name)[0]
-        tar_dir = os.path.join(tempfile.gettempdir(), outfile, job_name)
-        mkdir(tar_dir)
-
-        tmp_tar = self.__get_tar(user, package, test_name)
-        if tmp_tar is None:
-            log.error("No output tar file found")
-            return 1
-
-        tar = tarfile.open(tmp_tar)
-        tar.extractall(path=tar_dir)
-        tar.close()
-        os.remove(tmp_tar)
-
-        print "Output extracted in", tar_dir
-
-        return 0
-
-    def compare(self, package, test_name, days, user, files, entries=-1, mode='detailed', shell=False):
-        """Compare current output against a job of certain days ago."""
-        log = logging.getLogger(MODULE)
-        user = ArtGrid.ARTPROD if user is None else user
-
-        previous_nightly_tag = self.get_previous_nightly_tag(days)
-        log.info("LOG Previous Nightly Tag: %s", str(previous_nightly_tag))
-
-        if previous_nightly_tag is None:
-            log.error("No previous nightly tag found")
-            return 1
-
-        ref_dir = os.path.join('.', 'ref-' + previous_nightly_tag)
-        mkdir(ref_dir)
-
-        log.info("Shell = %s", shell)
-        tmp_tar = self.__get_tar(user, package, test_name, nightly_tag=previous_nightly_tag, shell=shell)
-        if tmp_tar is None:
-            log.error("No comparison tar file found")
-            return 1
-
-        tar = tarfile.open(tmp_tar)
-        for member in tar.getmembers():
-            tar.extractall(path=ref_dir, members=[member])
-        tar.close()
-        os.remove(tmp_tar)
-
-        return self.compare_ref('.', ref_dir, files, entries, mode)
-
-    def __get_tar(self, user, package, test_name, grid_index=-1, tmp=None, tar=True, nightly_tag=None, shell=False):
-        """Open tar file for particular release."""
-        log = logging.getLogger(MODULE)
-        log.debug("Tar: %s", tar)
-        tmp = tempfile.gettempdir() if tmp is None else tmp
-        nightly_tag = self.nightly_tag if nightly_tag is None else nightly_tag
-        job_name = os.path.splitext(test_name)[0]
-
-        max_tries = 3
-        wait_time = 5  # mins
-
-        tries = max_tries
-        while tries > 0:
-            try:
-                for entry in self.rucio.get_table(user, package, nightly_tag, shell, tmp):
-                    if entry['job_name'] == job_name and (grid_index < 0 or entry['grid_index'] == grid_index):
-
-                        log.debug("index %d", entry['grid_index'])
-                        rucio_name = self.__get_rucio_name(user, entry, 'tar' if tar else 'log')
-
-                        log.debug("RUCIO: %s", rucio_name)
-
-                        tmp_dir = tempfile.mkdtemp()
-                        atexit.register(shutil.rmtree, tmp_dir, ignore_errors=True)
-
-                        log.debug("Shell = %s", shell)
-                        exit_code = self.rucio.download(rucio_name, tmp_dir, shell)
-                        if exit_code == 0:
-                            tmp_tar = os.path.join(tmp_dir, 'user.' + user, rucio_name)
-                            return tmp_tar
-
-            except exceptions.Exception, e:
-                log.warning('(Rucio) Exception: %s in %s', str(e.code), str(e))
-                log.info("Waiting %d mins", wait_time)
-                tries -= 1
-                time.sleep(wait_time * 60)
-                continue
-
-            log.error("No log or tar found for package %s or test %s", package, test_name)
-            return None
-
-        log.error("Too many (%d) (Rucio) Exceptions", max_tries)
-        return None
-
-    def __get_rucio_name(self, user, entry, file_type):
-        rucio_name = None
-        if file_type == 'json':
-            rucio_name = '.'.join(('user', user, str(entry['job_index']), 'EXT0', '_{0:06d}'.format(entry['grid_index']), 'art-job', 'json'))
-        elif file_type == 'tar':
-            rucio_name = '.'.join(('user', user, str(entry['job_index']), 'EXT1', '_{0:06d}'.format(entry['grid_index']), 'tar'))
-        else:
-            rucio_name = '.'.join((entry['outfile'], 'log', str(entry['job_index']), '{0:06d}'.format(entry['grid_index']), 'log.tgz'))
-
-        if entry['file_index'] > 0:
-            rucio_name = '.'.join((rucio_name, str(entry['file_index'])))
-
-        return rucio_name
-
-    def get_previous_nightly_tag(self, days):
-        """
-        Return the nightly tag of given days ago.
-
-        21:00 is the cutoff time. Any submission before 21:00 counts as the previous day.
-        """
-        directory = os.path.join(ArtGrid.CVMFS_DIRECTORY, self.nightly_release)
-        tags = os.listdir(directory)
-        tags.sort(reverse=True)
-        tags = [x for x in tags if re.match(r'\d{4}-\d{2}-\d{2}T\d{2}\d{2}', x)]
-        found = False
-        for tag in tags:
-            if tag == self.nightly_tag:
-                found = True
-            elif found:
-                # check this is within days... (cutoff is 21:00, just move by 3 hours to get full days)
-                fmt = '%Y-%m-%dT%H%M'
-                offset = timedelta(hours=3)
-                nightly_tag_dt = datetime.strptime(self.nightly_tag, fmt) + offset
-                from_dt = nightly_tag_dt.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=days)
-                to_dt = from_dt + timedelta(days=1)
-                tag_dt = datetime.strptime(tag, fmt) + offset
-                within_days = from_dt <= tag_dt and tag_dt < to_dt
-                target_exists = len(glob.glob(os.path.join(directory, tag, self.project, '*', 'InstallArea', self.platform))) > 0
-                if within_days and target_exists:
-                    return tag
-        return None
-
-    def createpoolfile(self):
-        """Create 'empty' poolfile catalog."""
-        path = os.path.join('.', 'PoolFileCatalog.xml')
-        with open(path, 'w+') as pool_file:
-            pool_file.write('<!-- Edited By POOL -->\n')
-            pool_file.write('<!DOCTYPE POOLFILECATALOG SYSTEM "InMemory">\n')
-            pool_file.write('<POOLFILECATALOG>\n')
-            pool_file.write('</POOLFILECATALOG>\n')
-
-        return 0
diff --git a/Tools/ART/python/ART/art_header.py b/Tools/ART/python/ART/art_header.py
deleted file mode 100644
index f9e9f1aa0be7..000000000000
--- a/Tools/ART/python/ART/art_header.py
+++ /dev/null
@@ -1,167 +0,0 @@
-#!/usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-"""Class to handle art-headers."""
-
-__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
-
-import logging
-import re
-
-from types import IntType
-from types import ListType
-from types import StringType
-
-MODULE = "art.header"
-
-
-class ArtHeader(object):
-    """Class to handle art-headers."""
-
-    # headers in alphabetical order
-    ART_ATHENA_MT = 'art-athena-mt'
-    ART_CI = 'art-ci'
-    ART_CORES = 'art-cores'
-    ART_DESCRIPTION = 'art-description'
-    ART_INCLUDE = 'art-include'
-    ART_INPUT = 'art-input'
-    ART_INPUT_NFILES = 'art-input-nfiles'
-    ART_INPUT_SPLIT = 'art-input-split'
-    ART_OUTPUT = 'art-output'
-    ART_TYPE = 'art-type'
-
-    def __init__(self, filename):
-        """Keep arguments, setup patterns for re-use, define possible art-header definitions."""
-        self.header_format = re.compile(r'#\s(art-[\w-]+):\s+(.+)$')
-        self.header_format_error1 = re.compile(r'#(art-[\w-]*):\s*(.+)$')
-        self.header_format_error2 = re.compile(r'#\s\s+(art-[\w-]*):\s*(.+)$')
-        self.header_format_error3 = re.compile(r'#\s(art-[\w-]*):\S(.*)$')
-
-        self.filename = filename
-
-        self.header = {}
-
-        # general
-        self.add(ArtHeader.ART_DESCRIPTION, StringType, '')
-        self.add(ArtHeader.ART_TYPE, StringType, None, ['build', 'grid'])
-        self.add(ArtHeader.ART_INCLUDE, ListType, ['*'])
-
-        # "build" type only
-        self.add(ArtHeader.ART_CI, ListType, [])
-
-        # "grid" type only
-        self.add(ArtHeader.ART_CORES, IntType, 1)
-        self.add(ArtHeader.ART_OUTPUT, ListType, [])
-        self.add(ArtHeader.ART_INPUT, StringType, None)
-        self.add(ArtHeader.ART_INPUT_NFILES, IntType, 1)
-        self.add(ArtHeader.ART_INPUT_SPLIT, IntType, 0)
-        self.add(ArtHeader.ART_ATHENA_MT, IntType, 0)
-
-        self.read(filename)
-
-    def add(self, key, value_type, default_value=None, constraint=None):
-        """Add a single header definition."""
-        self.header[key] = {}
-        self.header[key]['type'] = value_type
-        self.header[key]['default'] = default_value
-        self.header[key]['constraint'] = constraint
-        self.header[key]['value'] = None    # e.g. the value was never set
-
-    def is_list(self, key):
-        """Return true if key exists and is of ListType."""
-        return self.header[key]['type'] is ListType if key in self.header else False
-
-    def read(self, filename):
-        """Read all headers from file."""
-        log = logging.getLogger(MODULE)
-        for line in open(filename, "r"):
-            line_match = self.header_format.match(line)
-            if line_match:
-                try:
-                    key = line_match.group(1)
-                    value = line_match.group(2).strip()
-                    if key in self.header and self.header[key]['type'] == IntType:
-                        value = int(value)
-
-                    if self.is_list(key):
-                        # handle list types
-                        if self.header[key]['value'] is None:
-                            self.header[key]['value'] = []
-                        self.header[key]['value'].append(value)
-                    else:
-                        # handle values
-                        if key not in self.header:
-                            log.warning("Unknown art-header %s: %s in file %s", key, value, filename)
-                            self.add(key, StringType)
-                        if self.header[key]['value'] is None:
-                            self.header[key]['value'] = value
-                        else:
-                            log.warning("key %s: already set to %s in file %s", key, self.header[key]['value'], filename)
-                except ValueError:
-                    log.error("Invalid value in art-header %s: %s in file %s", key, value, filename)
-
-    def get(self, key):
-        """
-        Get the value of a header by key.
-
-        Return default if header not specified.
-        Warn and return None if header is not defined.
-        """
-        log = logging.getLogger(MODULE)
-        if key not in self.header:
-            log.warning("Art seems to look for a header key %s which is not in the list of defined headers.", key)
-            return None
-
-        if self.header[key]['value'] is None:
-            return self.header[key]['default']
-
-        return self.header[key]['value']
-
-    def print_it(self):
-        """Print content of the headers for this file."""
-        log = logging.getLogger(MODULE)
-        for key in self.header:
-            log.info("%s: %s %s %s %s", key, self.header[key]['type'], self.header[key]['default'], self.header[key]['value'], self.header[key]['constraint'])
-
-    def validate(self):
-        """
-        Validate the '# art-*' headers in the file.
-
-        Validation fails if:
-        - a header is spaced correctly (e.g. '#art-header: value')
-        - a value in a header is not spaced correctly (e.g. '# art-header:value')
-        - a key is found which is not defined
-        - a value is found of the wrong value_type
-        - a value is found outside the constraint
-        """
-        log = logging.getLogger(MODULE)
-        for line in open(self.filename, "r"):
-            if self.header_format_error1.match(line):
-                log.error("LINE: %s", line.rstrip())
-                log.error("Header Validation - invalid header format, use space between '# and art-xxx' in file %s", self.filename)
-                log.error("")
-            if self.header_format_error2.match(line):
-                log.error("LINE: %s", line.rstrip())
-                log.error("Header Validation - invalid header format, too many spaces between '# and art-xxx' in file %s", self.filename)
-                log.error("")
-            if self.header_format_error3.match(line):
-                log.error("LINE: %s", line.rstrip())
-                log.error("Header Validation - invalid header format, use at least one space between ': and value' in file %s", self.filename)
-                log.error("")
-
-        for key in self.header:
-            if 'type' not in self.header[key]:
-                log.error("Header Validation - Invalid key: %s in file %s", key, self.filename)
-                log.error("")
-                continue
-            if type(self.header[key]['value']) != self.header[key]['type']:
-                if not isinstance(self.header[key]['value'], type(None)):
-                    log.error("Header Validation - value_type: %s not valid for key: %s, expected value_type: %s in file %s", type(self.header[key]['value']), key, self.header[key]['type'], self.filename)
-                    log.error("")
-            if self.header[key]['constraint'] is not None and self.header[key]['value'] not in self.header[key]['constraint']:
-                if self.header[key]['value'] is None:
-                    log.error("Header Validation - missing key: %s in file %s", key, self.filename)
-                else:
-                    log.error("Header Validation - value: %s for key: %s not in constraints: %s in file %s", self.header[key]['value'], key, self.header[key]['constraint'], self.filename)
-                log.error("")
-
-        return 0
diff --git a/Tools/ART/python/ART/art_misc.py b/Tools/ART/python/ART/art_misc.py
deleted file mode 100644
index 0e187b8a9bae..000000000000
--- a/Tools/ART/python/ART/art_misc.py
+++ /dev/null
@@ -1,281 +0,0 @@
-#!/usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-"""Miscellaneous functions."""
-
-__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
-
-import concurrent.futures
-import logging
-import os
-import shlex
-import subprocess
-import sys
-
-from datetime import datetime
-
-MODULE = "art.misc"
-EOS_MGM_URL = 'root://eosatlas.cern.ch/'
-
-KByte = 1024
-MByte = KByte * 1024
-GByte = MByte * 1024
-
-
-def set_log(kwargs):
-    """Set the default log level and message format depending on --verbose or --quiet options."""
-    level = logging.DEBUG if kwargs['verbose'] else logging.WARN if kwargs['quiet'] else logging.INFO
-    log = logging.getLogger("art")
-    log.setLevel(level)
-
-    # create and attach new handler, disable propagation to root logger to avoid double messages
-    handler = logging.StreamHandler(sys.stdout)
-    format_string = "%(asctime)s %(name)15s.%(funcName)-15s %(levelname)8s %(message)s"
-    date_format_string = None
-    formatter = logging.Formatter(format_string, date_format_string)
-    handler.setFormatter(formatter)
-    log.addHandler(handler)
-    log.propagate = False
-
-
-def get_atlas_env():
-    """Get all environment variables."""
-    log = logging.getLogger(MODULE)
-    try:
-        nightly_release = os.environ['AtlasBuildBranch']
-        project = os.environ['AtlasProject']
-        platform = os.environ[project + '_PLATFORM']
-        nightly_tag = os.environ['AtlasBuildStamp']
-        return (nightly_release, project, platform, nightly_tag)
-    except KeyError, e:
-        log.critical("Environment variable not set %s", e)
-        sys.exit(1)
-
-
-def run_command(cmd, dir=None, shell=False, env=None, verbose=True):
-    """
-    Run the given command locally.
-
-    The command runs as separate subprocesses for every piped command.
-    Returns tuple of exit_code, output and err.
-    """
-    # leave at print for basic debugging, log sometimes lost
-    start_time = datetime.now()
-    if verbose:
-        print "Execute:", cmd
-    if "|" in cmd:
-        cmd_parts = cmd.split('|')
-    else:
-        cmd_parts = []
-        cmd_parts.append(cmd)
-    i = 0
-    p = {}
-    for cmd_part in cmd_parts:
-        cmd_part = cmd_part.strip()
-        if i == 0:
-            p[i] = subprocess.Popen(shlex.split(cmd_part), stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=dir, shell=shell, env=env)
-        else:
-            p[i] = subprocess.Popen(shlex.split(cmd_part), stdin=p[i - 1].stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=dir, shell=shell, env=env)
-        i = i + 1
-    (output, err) = p[i - 1].communicate()
-    exit_code = p[0].wait()
-    end_time = datetime.now()
-
-    return (exit_code, str(output), str(err), cmd, start_time, end_time)
-
-
-def run_command_parallel(cmd, nthreads, ncores, dir=None, shell=False, env=None, verbose=True):
-    """
-    Run the given command locally in parallel.
-
-    The command runs as separate subprocesses for every piped command.
-    Returns tuple of exit_code, output and err.
-    """
-    start_time = datetime.now()
-    log = logging.getLogger(MODULE)
-    ncores = min(ncores, nthreads)
-
-    if env is None:
-        env = os.environ.copy()
-
-    env['ArtThreads'] = str(nthreads)
-    env['ArtCores'] = str(ncores)
-
-    # Results
-    full_exit_code = 0
-    full_out = ''
-    full_err = ''
-
-    # Start
-    env['ArtProcess'] = "start"
-    (exit_code, out, err, command, start_time_start, end_time_start) = run_command(cmd, dir=dir, shell=shell, env=env, verbose=verbose)
-    full_exit_code = full_exit_code if exit_code == 0 else exit_code
-    full_out += "-+-art-process start out " + start_time_start.strftime('%Y-%m-%dT%H:%M:%S') + "\n"
-    full_out += out
-    full_out += "---art-process start out " + end_time_start.strftime('%Y-%m-%dT%H:%M:%S') + "\n"
-
-    full_err += "-+-art-process start err " + start_time_start.strftime('%Y-%m-%dT%H:%M:%S') + "\n"
-    full_err += err
-    full_err += "---art-process start err " + end_time_start.strftime('%Y-%m-%dT%H:%M:%S') + "\n"
-
-    log.info("Creating executor with cores: %d", ncores)
-    executor = concurrent.futures.ThreadPoolExecutor(ncores)
-    future_set = []
-
-    # Processing
-    log.info("Running threads: %d", nthreads)
-    for index in range(nthreads):
-        process_env = env.copy()
-        process_env['ArtProcess'] = str(index)
-        future_set.append(executor.submit(run_command, cmd, dir=dir, shell=shell, env=process_env, verbose=verbose))
-
-    log.info("Waiting for threads to finish...")
-    concurrent.futures.wait(future_set)
-    for index, future in enumerate(future_set):
-        (exit_code, out, err, command, start_time_process, end_time_process) = future.result()
-        full_exit_code = full_exit_code if exit_code == 0 else exit_code
-        full_out += "-+-art-process " + str(index) + " out " + start_time_process.strftime('%Y-%m-%dT%H:%M:%S') + "\n"
-        full_out += out
-        full_out += "---art-process " + str(index) + " out " + end_time_process.strftime('%Y-%m-%dT%H:%M:%S') + "\n"
-
-        full_err += "-+-art-process " + str(index) + " err " + start_time_process.strftime('%Y-%m-%dT%H:%M:%S') + "\n"
-        full_err += err
-        full_err += "---art-process " + str(index) + " err " + end_time_process.strftime('%Y-%m-%dT%H:%M:%S') + "\n"
-
-    # End
-    env['ArtProcess'] = "end"
-    (exit_code, out, err, command, start_time_end, end_time_end) = run_command(cmd, dir=dir, shell=shell, env=env, verbose=verbose)
-    full_exit_code = full_exit_code if exit_code == 0 else exit_code
-    full_out += "-+-art-process end out " + start_time_end.strftime('%Y-%m-%dT%H:%M:%S') + "\n"
-    full_out += out
-    full_out += "---art-process end out " + end_time_end.strftime('%Y-%m-%dT%H:%M:%S') + "\n"
-
-    full_err += "-+-art-process end err " + start_time_end.strftime('%Y-%m-%dT%H:%M:%S') + "\n"
-    full_err += err
-    full_err += "---art-process end err " + end_time_end.strftime('%Y-%m-%dT%H:%M:%S') + "\n"
-
-    end_time = datetime.now()
-
-    return (full_exit_code, full_out, full_err, cmd, start_time, end_time)
-
-
-def is_exe(path):
-    """Return True if path is executable."""
-    return os.path.isfile(path) and os.access(path, os.X_OK)
-
-
-def make_executable(path):
-    """Make file executable (chmod +x)."""
-    mode = os.stat(path).st_mode
-    mode |= (mode & 0o444) >> 2    # copy R bits to X
-    os.chmod(path, mode)
-
-
-def mkdir(path):
-    """Make (missing) directories."""
-    log = logging.getLogger(MODULE)
-    if path.startswith('/eos'):
-        mkdir_cmd = 'eos ' + EOS_MGM_URL + ' mkdir -p'
-    else:
-        mkdir_cmd = 'mkdir -p'
-
-    if mkdir_cmd is not None:
-        (exit_code, out, err, command, start_time, end_time) = run_command(' '.join((mkdir_cmd, path)))
-        if exit_code != 0:
-            log.error("Mkdir Error: %d %s %s", exit_code, out, err)
-            return exit_code
-
-    return 0
-
-
-def ls(path):
-    """List files in directroy."""
-    if path.startswith('/eos'):
-        ls_cmd = 'eos ' + EOS_MGM_URL + ' ls ' + path + '/'
-    else:
-        ls_cmd = 'ls ' + path + '/'
-
-    (exit_code, out, err, command, start_time, end_time) = run_command(ls_cmd)
-    if exit_code == 0:
-        print out
-        print err
-
-    return exit_code
-
-
-def cp(src, dst):
-    """Copy files to directory."""
-    log = logging.getLogger(MODULE)
-    if dst.startswith('/eos'):
-        # check which xrdcp we are running
-        (exit_code, out, err, command, start_time, end_time) = run_command('which xrdcp')
-        print out
-        print err
-
-        # check which version of xrdcp we are running
-        (exit_code, out, err, command, start_time, end_time) = run_command('xrdcp --version')
-        print out
-        print err
-
-        cmd = ' '.join(('xrdcp -f -N -r -p -v', src, EOS_MGM_URL + dst + '/'))
-    else:
-        cmd = ' '.join(('xrdcp -f -N -r -p -v', src, dst + '/'))
-
-    # run the actual command
-    log.info("Using: %s", cmd)
-    (exit_code, exit_out, exit_err, command, start_time, end_time) = run_command(cmd)
-    if exit_code != 0:
-        log.error("COPY to DST Error: %d %s %s", exit_code, exit_out, exit_err)
-
-    return exit_code
-
-
-def count_files(path):
-    """Count number of files."""
-    log = logging.getLogger(MODULE)
-    if path.startswith('/eos'):
-        cmd = ' '.join(('eos', EOS_MGM_URL, 'find', path, '|', 'wc', '-l'))
-    else:
-        cmd = ' '.join(('find', path, '|', 'wc', '-l'))
-
-    (exit_code, out, err, command, start_time, end_time) = run_command(cmd)
-    if exit_code == 0:
-        nFiles = int(out)
-        return nFiles
-
-    log.error("Error retrieving number of files on %s, %s", path, err)
-    return -1
-
-
-def touch(fname, times=None):
-    """Touch a file."""
-    with open(fname, 'a'):
-        os.utime(fname, times)
-
-
-def rm(fname):
-    """Remove a file."""
-    try:
-        os.remove(fname)
-    except OSError:
-        pass
-
-
-def which(program):
-    """Show which program is actually found on the PATH."""
-    fpath, fname = os.path.split(program)
-    if fpath:
-        if is_exe(program):
-            return program
-    else:
-        for path in os.environ["PATH"].split(os.pathsep):
-            path = path.strip('"')
-            exe_file = os.path.join(path, program)
-            if is_exe(exe_file):
-                return exe_file
-
-    return None
-
-
-def memory(scale=1):
-    """Return free memory."""
-    return os.sysconf('SC_PHYS_PAGES') * os.sysconf('SC_PAGE_SIZE') / scale
diff --git a/Tools/ART/python/ART/art_rucio.py b/Tools/ART/python/ART/art_rucio.py
deleted file mode 100755
index f099d80921bf..000000000000
--- a/Tools/ART/python/ART/art_rucio.py
+++ /dev/null
@@ -1,268 +0,0 @@
-#!/usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-"""Class to interact with RUCIO."""
-
-__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
-
-import json
-import logging
-import os
-import re
-import tempfile
-
-try:
-    import rucio.client
-    RUCIO = True
-except ImportError:
-    RUCIO = False
-
-from art_misc import run_command
-
-MODULE = "art.rucio"
-
-
-class ArtRucio(object):
-    """Class to interact with RUCIO."""
-
-    ART_JOB = 'art-job.json'
-    ATHENA_STDOUT = 'athena_stdout.txt'
-    JSON = '_EXT0'
-
-    def __init__(self, art_directory, nightly_release, project, platform, nightly_tag):
-        """Keep arguments."""
-        self.art_directory = art_directory
-        self.nightly_release = nightly_release
-        self.project = project
-        self.platform = platform
-        self.nightly_tag = nightly_tag
-
-        self.table = None
-
-    def exit_if_no_rucio(self):
-        """Exit if RUCIO is not available."""
-        log = logging.getLogger(MODULE)
-        if not RUCIO:
-            log.critical("RUCIO not available")
-            exit(1)
-
-    def get_scope(self, user):
-        """Return scope."""
-        return '.'.join(('user', user))
-
-    def download(self, did, dst_dir, shell=False):
-        """Download did into temp directory."""
-        log = logging.getLogger(MODULE)
-        self.exit_if_no_rucio()
-
-        # rucio downloads cache properly
-        log.debug("DID = %s", did)
-        log.debug("Shell = %s", shell)
-        env = os.environ.copy()
-        if shell:
-            cmd = ' '.join((os.path.join(self.art_directory, 'art-download.sh'), did, dst_dir))
-            env['PATH'] = '.:' + env['PATH']
-        else:
-            cmd = ' '.join(('rucio', 'download', '--dir', dst_dir, did))
-
-        (exit_code, out, err, command, start_time, end_time) = run_command(cmd, env=env)
-        if (exit_code != 0):
-            log.error(err)
-        log.info(out)
-        return exit_code
-
-    def xrdcp(self, src, dst, force=False, recursive=False, verbose=False):
-        """Copy using xrdcp."""
-        if src is None or dst is None:
-            return 1
-        log = logging.getLogger(MODULE)
-        cmd = ' '.join(('xrdcp -N', '-f' if force else '', '-r' if recursive else '', '-v' if verbose else '', src, dst))
-        log.debug(cmd)
-        (exit_code, out, err, command, start_time, end_time) = run_command(cmd, verbose=False)
-        if exit_code != 0:
-            log.error(err)
-        # seems to write empty lines
-        # log.info(out)
-        return exit_code
-
-    def __parse_outfile(self, outfile):
-        """Parse outfile and return tuple (sequence_tag, single_index) or None."""
-        #
-        # Matching: user.artprod.atlas.master.Athena.x86_64-slc6-gcc62-opt.2018-01-21T2301.284099.MuonRecRTT.6.log.13062437.000001.log.tgz
-        #           user.artprod.atlas.master.Athena.x86_64-slc6-gcc62-opt.2018-01-21T2301.284099.MuonRecRTT.6
-        #           user.artprod.atlas.master.Athena.x86_64-slc6-gcc62-opt.2018-01-19T2301.283573.TrigAnalysisTest
-        #
-        PATTERN = r"user\.([^\.]+)\.([^\.]+)\." + self.nightly_release + "\." + self.project + "\." + self.platform + "\." + self.nightly_tag + "\.(.+)"
-        match = re.search(PATTERN, outfile)
-        if not match:
-            return None
-
-        (user, experiment, rest) = match.groups()
-
-        items = rest.split(".")
-        sequence_tag = items[0] if len(items) > 0 else -1
-        try:
-            single_index = int(items[2]) if len(items) > 2 else -1
-        except ValueError:
-            single_index = -1
-
-        if single_index < 0:
-            grid_index = int(items[4]) if len(items) > 4 else -1
-        else:
-            grid_index = int(items[5]) if len(items) > 5 else -1
-
-        # print outfile, sequence_tag, single_index, grid_index
-
-        return (sequence_tag, single_index, grid_index)
-
-    def get_sequence_tag(self, outfile):
-        """Return sequence tag or None."""
-        result = self.__parse_outfile(outfile)
-        return result[0] if result is not None else None
-
-    def get_single_index(self, outfile):
-        """Return single index or -1."""
-        result = self.__parse_outfile(outfile)
-        return result[1] if result is not None else -1
-
-    def get_grid_index(self, outfile):
-        """Return frid index or -1."""
-        result = self.__parse_outfile(outfile)
-        return result[2] if result is not None else -1
-
-    def get_outfile_name(self, user, package, sequence_tag, test_name=None, nightly_tag=None):
-        """Create outfile name based on parameters."""
-        nightly_tag = self.nightly_tag if nightly_tag is None else nightly_tag
-        outfile = '.'.join(('user', user, 'atlas', self.nightly_release, self.project, self.platform, nightly_tag, sequence_tag, package))
-        return outfile if test_name is None else '.'.join((outfile, test_name))
-
-    def get_outfiles(self, user, package, nightly_tag=None):
-        """
-        Create list of outfiles from parameters.
-
-        example: ['user.artprod.atlas.master.Athena.x86_64-slc6-gcc62-opt.2018-01-21T2301.284099.MuonRecRTT.3']
-        """
-        log = logging.getLogger(MODULE)
-        nightly_tag = self.nightly_tag if nightly_tag is None else nightly_tag
-
-        self.exit_if_no_rucio()
-        rucio_client = rucio.client.Client()
-
-        result = []
-
-        # look for "batch" outfile, and take latest (by sequence tag)
-        pattern = self.get_outfile_name(user, package, '*', None, nightly_tag)
-        outfile = None
-        sequence = None
-        log.debug("Pattern 1 %s", pattern)
-        for out in rucio_client.list_dids(self.get_scope(user), {'name': '.'.join((pattern, 'log'))}):
-            sequence_tag = self.get_sequence_tag(out)
-            if sequence is None or sequence_tag > sequence:
-                outfile = os.path.splitext(out)[0]
-                sequence = sequence_tag
-
-        if outfile is not None:
-            result.append(outfile)
-
-        # look for "single" outfile, deduce sequence_tag
-        pattern = self.get_outfile_name(user, package, '*', '*', nightly_tag)
-        outfile = None
-        sequence = None
-        log.debug("Pattern 2 %s", pattern)
-        for out in rucio_client.list_dids(self.get_scope(user), {'name': '.'.join((pattern, 'log'))}):
-            sequence_tag = self.get_sequence_tag(out)
-            if sequence is None or sequence_tag > sequence:
-                outfile = os.path.splitext(out)[0]
-                sequence = sequence_tag
-
-        if outfile is not None:
-            log.debug("Found %s", outfile)
-            sequence_tag = self.get_sequence_tag(outfile)
-            if sequence_tag is not None:
-                # found sequence_tag, find all 'single' outfiles
-                pattern = self.get_outfile_name(user, package, sequence_tag, '*', nightly_tag)
-                log.debug("Pattern 3 %s", pattern)
-                for out in rucio_client.list_dids(self.get_scope(user), {'name': '.'.join((pattern, 'log'))}):
-                    outfile = os.path.splitext(out)[0]
-                    log.debug("Adding 'single': %s", outfile)
-                    result.append(outfile)
-
-        return result
-
-    def get_table(self, user, package, nightly_tag=None, shell=False, tmp=None):
-        """Get full table with grid_index, single_index and test_name for particular package and nightly_tag."""
-        log = logging.getLogger(MODULE)
-
-        if self.table is not None:
-            return self.table
-
-        self.exit_if_no_rucio()
-
-        tmp = tempfile.gettempdir() if tmp is None else tmp
-
-        table = []
-
-        nightly_tag = self.nightly_tag if nightly_tag is None else nightly_tag
-
-        outfiles = self.get_outfiles(user, package, nightly_tag)
-
-        outfiles_str = [x + ArtRucio.JSON for x in outfiles]
-        if outfiles_str:
-            outfiles_str = ' '.join(outfiles_str)
-
-            dst_dir = tmp
-
-            log.debug("Shell = %s", shell)
-            exit_code = self.download(outfiles_str, dst_dir, shell)
-            if exit_code != 0:
-                log.error("Failed to execute rucio download %d", exit_code)
-                return table
-
-            for outfile in outfiles:
-                single_index = self.get_single_index(outfile)
-
-                json_directory = os.path.join(dst_dir, outfile + ArtRucio.JSON)
-                if not os.path.isdir(json_directory):
-                    log.debug("Adding, single_index: %d, outfile: %s", single_index, outfile)
-                    table.append({
-                        'single_index': single_index,
-                        'grid_index': -1,
-                        'file_index': -1,
-                        'job_index': -1,
-                        'outfile': outfile,
-                        'job_name': None
-                    })
-                    continue
-
-                for json_file in os.listdir(json_directory):
-                    if json_file.endswith(".json"):
-                        json_path = os.path.join(json_directory, json_file)
-                        if os.path.isfile(json_path):
-                            with open(json_path) as json_fd:
-                                info = json.load(json_fd)
-                                job_name = os.path.splitext(info['name'])[0]
-
-                                # Match: user.artprod.13199077.EXT0._000002.art-job.json
-                                # Match: user.artprod.13199077.EXT0._000003.art-job.json.4
-                                # job_index = 13199077, grid_index = 3, file_index = 4
-                                match = re.search(r"user\.([^\.]+)\.(\d+)\.EXT0\._(\d+)\.art-job.json(?:\.(\d+))?", json_file)
-                                if match:
-                                    job_index = int(match.group(2))
-                                    grid_index = int(match.group(3))
-                                    file_index = -1 if match.group(4) is None else int(match.group(4))
-                                else:
-                                    job_index = -1
-                                    grid_index = -1
-                                    file_index = -1
-
-                                log.debug("Adding, single_index: %d, grid_index: %d, file_index: %d, job_index %d, outfile: %s, job_name: %s", single_index, grid_index, file_index, job_index, outfile, job_name)
-                                table.append({
-                                    'single_index': single_index,
-                                    'grid_index': grid_index,
-                                    'file_index': file_index,
-                                    'job_index': job_index,
-                                    'outfile': outfile,
-                                    'job_name': job_name
-                                })
-
-        self.table = table
-        return table
diff --git a/Tools/ART/python/ART/docopt.py b/Tools/ART/python/ART/docopt.py
deleted file mode 100644
index 4cb99026f86d..000000000000
--- a/Tools/ART/python/ART/docopt.py
+++ /dev/null
@@ -1,590 +0,0 @@
-"""
-Pythonic command-line interface parser that will make you smile.
-
- * http://docopt.org
- * Repository and issue-tracker: https://github.com/docopt/docopt
- * Licensed under terms of MIT license (see LICENSE-MIT)
- * Copyright (c) 2013 Vladimir Keleshev, vladimir@keleshev.com
-
-"""
-import sys
-import re
-
-
-__all__ = ('docopt')
-__version__ = '0.6.2'
-
-
-class DocoptLanguageError(Exception):
-    """Error in construction of usage-message by developer."""
-
-
-class DocoptExit(SystemExit):
-    """Exit in case user invoked program with incorrect arguments."""
-
-    usage = ''
-
-    def __init__(self, message=''):
-        SystemExit.__init__(self, (message + '\n' + self.usage).strip())
-
-
-class Pattern(object):
-
-    def __eq__(self, other):
-        return repr(self) == repr(other)
-
-    def __hash__(self):
-        return hash(repr(self))
-
-    def fix(self):
-        self.fix_identities()
-        self.fix_repeating_arguments()
-        return self
-
-    def fix_identities(self, uniq=None):
-        """Make pattern-tree tips point to same object if they are equal."""
-        if not hasattr(self, 'children'):
-            return self
-        uniq = list(set(self.flat())) if uniq is None else uniq
-        for i, child in enumerate(self.children):
-            if not hasattr(child, 'children'):
-                assert child in uniq
-                self.children[i] = uniq[uniq.index(child)]
-            else:
-                child.fix_identities(uniq)
-
-    def fix_repeating_arguments(self):
-        """Fix elements that should accumulate/increment values."""
-        either = [list(child.children) for child in transform(self).children]
-        for case in either:
-            for e in [child for child in case if case.count(child) > 1]:
-                if type(e) is Argument or type(e) is Option and e.argcount:
-                    if e.value is None:
-                        e.value = []
-                    elif type(e.value) is not list:
-                        e.value = e.value.split()
-                if type(e) is Command or type(e) is Option and e.argcount == 0:
-                    e.value = 0
-        return self
-
-
-def transform(pattern):
-    """Expand pattern into an (almost) equivalent one, but with single Either.
-
-    Example: ((-a | -b) (-c | -d)) => (-a -c | -a -d | -b -c | -b -d)
-    Quirks: [-a] => (-a), (-a...) => (-a -a)
-
-    """
-    result = []
-    groups = [[pattern]]
-    while groups:
-        children = groups.pop(0)
-        parents = [Required, Optional, OptionsShortcut, Either, OneOrMore]
-        if any(t in map(type, children) for t in parents):
-            child = [c for c in children if type(c) in parents][0]
-            children.remove(child)
-            if type(child) is Either:
-                for c in child.children:
-                    groups.append([c] + children)
-            elif type(child) is OneOrMore:
-                groups.append(child.children * 2 + children)
-            else:
-                groups.append(child.children + children)
-        else:
-            result.append(children)
-    return Either(*[Required(*e) for e in result])
-
-
-class LeafPattern(Pattern):
-    """Leaf/terminal node of a pattern tree."""
-
-    def __init__(self, name, value=None):
-        self.name, self.value = name, value
-
-    def __repr__(self):
-        return '%s(%r, %r)' % (self.__class__.__name__, self.name, self.value)
-
-    def flat(self, *types):
-        return [self] if not types or type(self) in types else []
-
-    def match(self, left, collected=None):
-        collected = [] if collected is None else collected
-        pos, match = self.single_match(left)
-        if match is None:
-            return False, left, collected
-        left_ = left[:pos] + left[pos + 1:]
-        same_name = [a for a in collected if a.name == self.name]
-        if type(self.value) in (int, list):
-            if type(self.value) is int:
-                increment = 1
-            else:
-                increment = ([match.value] if type(match.value) is str
-                             else match.value)
-            if not same_name:
-                match.value = increment
-                return True, left_, collected + [match]
-            same_name[0].value += increment
-            return True, left_, collected
-        return True, left_, collected + [match]
-
-
-class BranchPattern(Pattern):
-    """Branch/inner node of a pattern tree."""
-
-    def __init__(self, *children):
-        self.children = list(children)
-
-    def __repr__(self):
-        return '%s(%s)' % (self.__class__.__name__,
-                           ', '.join(repr(a) for a in self.children))
-
-    def flat(self, *types):
-        if type(self) in types:
-            return [self]
-        return sum([child.flat(*types) for child in self.children], [])
-
-
-class Argument(LeafPattern):
-
-    def single_match(self, left):
-        for n, pattern in enumerate(left):
-            if type(pattern) is Argument:
-                return n, Argument(self.name, pattern.value)
-        return None, None
-
-    @classmethod
-    def parse(class_, source):
-        name = re.findall('(<\S*?>)', source)[0]
-        value = re.findall('\[default: (.*)\]', source, flags=re.I)
-        return class_(name, value[0] if value else None)
-
-
-class Command(Argument):
-
-    def __init__(self, name, value=False):
-        self.name, self.value = name, value
-
-    def single_match(self, left):
-        for n, pattern in enumerate(left):
-            if type(pattern) is Argument:
-                if pattern.value == self.name:
-                    return n, Command(self.name, True)
-                else:
-                    break
-        return None, None
-
-
-class Option(LeafPattern):
-
-    def __init__(self, short=None, long=None, argcount=0, value=False):
-        assert argcount in (0, 1)
-        self.short, self.long, self.argcount = short, long, argcount
-        self.value = None if value is False and argcount else value
-
-    @classmethod
-    def parse(class_, option_description):
-        short, long, argcount, value = None, None, 0, False
-        options, _, description = option_description.strip().partition('  ')
-        options = options.replace(',', ' ').replace('=', ' ')
-        for s in options.split():
-            if s.startswith('--'):
-                long = s
-            elif s.startswith('-'):
-                short = s
-            else:
-                argcount = 1
-        if argcount:
-            matched = re.findall('\[default: (.*)\]', description, flags=re.I)
-            value = matched[0] if matched else None
-        return class_(short, long, argcount, value)
-
-    def single_match(self, left):
-        for n, pattern in enumerate(left):
-            if self.name == pattern.name:
-                return n, pattern
-        return None, None
-
-    @property
-    def name(self):
-        return self.long or self.short
-
-    def __repr__(self):
-        return 'Option(%r, %r, %r, %r)' % (self.short, self.long,
-                                           self.argcount, self.value)
-
-
-class Required(BranchPattern):
-
-    def match(self, left, collected=None):
-        collected = [] if collected is None else collected
-        l = left
-        c = collected
-        for pattern in self.children:
-            matched, l, c = pattern.match(l, c)
-            if not matched:
-                return False, left, collected
-        return True, l, c
-
-
-class Optional(BranchPattern):
-
-    def match(self, left, collected=None):
-        collected = [] if collected is None else collected
-        for pattern in self.children:
-            m, left, collected = pattern.match(left, collected)
-        return True, left, collected
-
-
-class OptionsShortcut(Optional):
-    """Marker/placeholder for [options] shortcut."""
-
-
-class OneOrMore(BranchPattern):
-
-    def match(self, left, collected=None):
-        assert len(self.children) == 1
-        collected = [] if collected is None else collected
-        l = left
-        c = collected
-        l_ = None
-        matched = True
-        times = 0
-        while matched:
-            # could it be that something didn't match but changed l or c?
-            matched, l, c = self.children[0].match(l, c)
-            times += 1 if matched else 0
-            if l_ == l:
-                break
-            l_ = l
-        if times >= 1:
-            return True, l, c
-        return False, left, collected
-
-
-class Either(BranchPattern):
-
-    def match(self, left, collected=None):
-        collected = [] if collected is None else collected
-        outcomes = []
-        for pattern in self.children:
-            matched, _, _ = outcome = pattern.match(left, collected)
-            if matched:
-                outcomes.append(outcome)
-        if outcomes:
-            return min(outcomes, key=lambda outcome: len(outcome[1]))
-        return False, left, collected
-
-
-class Tokens(list):
-
-    def __init__(self, source, error=DocoptExit):
-        self += source.split() if hasattr(source, 'split') else source
-        self.error = error
-
-    @staticmethod
-    def from_pattern(source):
-        source = re.sub(r'([\[\]\(\)\|]|\.\.\.)', r' \1 ', source)
-        source = [s for s in re.split('\s+|(\S*<.*?>)', source) if s]
-        return Tokens(source, error=DocoptLanguageError)
-
-    def move(self):
-        return self.pop(0) if len(self) else None
-
-    def current(self):
-        return self[0] if len(self) else None
-
-
-def parse_long(tokens, options):
-    """Parse long according to definition.
-
-    long ::= '--' chars [ ( ' ' | '=' ) chars ] ;
-    """
-    long, eq, value = tokens.move().partition('=')
-    assert long.startswith('--')
-    value = None if eq == value == '' else value
-    similar = [o for o in options if o.long == long]
-    if tokens.error is DocoptExit and similar == []:  # if no exact match
-        similar = [o for o in options if o.long and o.long.startswith(long)]
-    if len(similar) > 1:  # might be simply specified ambiguously 2+ times?
-        raise tokens.error('%s is not a unique prefix: %s?' %
-                           (long, ', '.join(o.long for o in similar)))
-    elif len(similar) < 1:
-        argcount = 1 if eq == '=' else 0
-        o = Option(None, long, argcount)
-        options.append(o)
-        if tokens.error is DocoptExit:
-            o = Option(None, long, argcount, value if argcount else True)
-    else:
-        o = Option(similar[0].short, similar[0].long,
-                   similar[0].argcount, similar[0].value)
-        if o.argcount == 0:
-            if value is not None:
-                raise tokens.error('%s must not have an argument' % o.long)
-        else:
-            if value is None:
-                if tokens.current() in [None, '--']:
-                    raise tokens.error('%s requires argument' % o.long)
-                value = tokens.move()
-        if tokens.error is DocoptExit:
-            o.value = value if value is not None else True
-    return [o]
-
-
-def parse_shorts(tokens, options):
-    """Parse shorts according to definition.
-
-    shorts ::= '-' ( chars )* [ [ ' ' ] chars ] ;
-    """
-    token = tokens.move()
-    assert token.startswith('-') and not token.startswith('--')
-    left = token.lstrip('-')
-    parsed = []
-    while left != '':
-        short, left = '-' + left[0], left[1:]
-        similar = [o for o in options if o.short == short]
-        if len(similar) > 1:
-            raise tokens.error('%s is specified ambiguously %d times' %
-                               (short, len(similar)))
-        elif len(similar) < 1:
-            o = Option(short, None, 0)
-            options.append(o)
-            if tokens.error is DocoptExit:
-                o = Option(short, None, 0, True)
-        else:  # why copying is necessary here?
-            o = Option(short, similar[0].long,
-                       similar[0].argcount, similar[0].value)
-            value = None
-            if o.argcount != 0:
-                if left == '':
-                    if tokens.current() in [None, '--']:
-                        raise tokens.error('%s requires argument' % short)
-                    value = tokens.move()
-                else:
-                    value = left
-                    left = ''
-            if tokens.error is DocoptExit:
-                o.value = value if value is not None else True
-        parsed.append(o)
-    return parsed
-
-
-def parse_pattern(source, options):
-    tokens = Tokens.from_pattern(source)
-    result = parse_expr(tokens, options)
-    if tokens.current() is not None:
-        raise tokens.error('unexpected ending: %r' % ' '.join(tokens))
-    return Required(*result)
-
-
-def parse_expr(tokens, options):
-    """Parse expr according to definition.
-
-    expr ::= seq ( '|' seq )* ;
-    """
-    seq = parse_seq(tokens, options)
-    if tokens.current() != '|':
-        return seq
-    result = [Required(*seq)] if len(seq) > 1 else seq
-    while tokens.current() == '|':
-        tokens.move()
-        seq = parse_seq(tokens, options)
-        result += [Required(*seq)] if len(seq) > 1 else seq
-    return [Either(*result)] if len(result) > 1 else result
-
-
-def parse_seq(tokens, options):
-    """Parse seq according to definition.
-
-    seq ::= ( atom [ '...' ] )* ;
-    """
-    result = []
-    while tokens.current() not in [None, ']', ')', '|']:
-        atom = parse_atom(tokens, options)
-        if tokens.current() == '...':
-            atom = [OneOrMore(*atom)]
-            tokens.move()
-        result += atom
-    return result
-
-
-def parse_atom(tokens, options):
-    """Parse atom according to definition.
-
-    atom ::= '(' expr ')' | '[' expr ']' | 'options'
-            | long | shorts | argument | command ;
-    """
-    token = tokens.current()
-    result = []
-    if token in '([':
-        tokens.move()
-        matching, pattern = {'(': [')', Required], '[': [']', Optional]}[token]
-        result = pattern(*parse_expr(tokens, options))
-        if tokens.move() != matching:
-            raise tokens.error("unmatched '%s'" % token)
-        return [result]
-    elif token == 'options':
-        tokens.move()
-        return [OptionsShortcut()]
-    elif token.startswith('--') and token != '--':
-        return parse_long(tokens, options)
-    elif token.startswith('-') and token not in ('-', '--'):
-        return parse_shorts(tokens, options)
-    elif token.startswith('<') and token.endswith('>') or token.isupper():
-        return [Argument(tokens.move())]
-    else:
-        return [Command(tokens.move())]
-
-
-def parse_argv(tokens, options, options_first=False):
-    """Parse argv argument vector.
-
-    If options_first:
-        argv ::= [ long | shorts ]* [ argument ]* [ '--' [ argument ]* ] ;
-    else:
-        argv ::= [ long | shorts | argument ]* [ '--' [ argument ]* ] ;
-    """
-    parsed = []
-    while tokens.current() is not None:
-        if tokens.current() == '--':
-            return parsed + [Argument(None, v) for v in tokens]
-        elif tokens.current().startswith('--'):
-            parsed += parse_long(tokens, options)
-        elif tokens.current().startswith('-') and tokens.current() != '-':
-            parsed += parse_shorts(tokens, options)
-        elif options_first:
-            return parsed + [Argument(None, v) for v in tokens]
-        else:
-            parsed.append(Argument(None, tokens.move()))
-    return parsed
-
-
-def parse_defaults(doc):
-    defaults = []
-    for s in parse_section('options:', doc):
-        # FIXME corner case "bla: options: --foo"
-        _, _, s = s.partition(':')  # get rid of "options:"
-        split = re.split('\n[ \t]*(-\S+?)', '\n' + s)[1:]
-        split = [s1 + s2 for s1, s2 in zip(split[::2], split[1::2])]
-        options = [Option.parse(t) for t in split if t.startswith('-')]
-        defaults += options
-    return defaults
-
-
-def parse_section(name, source):
-    pattern = re.compile('^([^\n]*' + name + '[^\n]*\n?(?:[ \t].*?(?:\n|$))*)',
-                         re.IGNORECASE | re.MULTILINE)
-    return [s.strip() for s in pattern.findall(source)]
-
-
-def formal_usage(section):
-    _, _, section = section.partition(':')  # drop "usage:"
-    pu = section.split()
-    return '( ' + ' '.join(') | (' if s == pu[0] else s for s in pu[1:]) + ' )'
-
-
-def extras(help, version, options, doc):
-    if help and any((o.name in ('-h', '--help')) and o.value for o in options):
-        print(doc.strip("\n"))
-        sys.exit()
-    if version and any(o.name == '--version' and o.value for o in options):
-        print(version)
-        sys.exit()
-
-
-class Dict(dict):
-    def __repr__(self):
-        return '{%s}' % ',\n '.join('%r: %r' % i for i in sorted(self.items()))
-
-
-def docopt(doc, argv=None, help=True, version=None, options_first=False):
-    """Parse `argv` based on command-line interface described in `doc`.
-
-    `docopt` creates your command-line interface based on its
-    description that you pass as `doc`. Such description can contain
-    --options, <positional-argument>, commands, which could be
-    [optional], (required), (mutually | exclusive) or repeated...
-
-    Parameters
-    ----------
-    doc : str
-        Description of your command-line interface.
-    argv : list of str, optional
-        Argument vector to be parsed. sys.argv[1:] is used if not
-        provided.
-    help : bool (default: True)
-        Set to False to disable automatic help on -h or --help
-        options.
-    version : any object
-        If passed, the object will be printed if --version is in
-        `argv`.
-    options_first : bool (default: False)
-        Set to True to require options precede positional arguments,
-        i.e. to forbid options and positional arguments intermix.
-
-    Returns
-    -------
-    args : dict
-        A dictionary, where keys are names of command-line elements
-        such as e.g. "--verbose" and "<path>", and values are the
-        parsed values of those elements.
-
-    Example
-    -------
-    >>> from docopt import docopt
-    >>> doc = '''
-    ... Usage:
-    ...     my_program tcp <host> <port> [--timeout=<seconds>]
-    ...     my_program serial <port> [--baud=<n>] [--timeout=<seconds>]
-    ...     my_program (-h | --help | --version)
-    ...
-    ... Options:
-    ...     -h, --help  Show this screen and exit.
-    ...     --baud=<n>  Baudrate [default: 9600]
-    ... '''
-    >>> argv = ['tcp', '127.0.0.1', '80', '--timeout', '30']
-    >>> docopt(doc, argv)
-    {'--baud': '9600',
-     '--help': False,
-     '--timeout': '30',
-     '--version': False,
-     '<host>': '127.0.0.1',
-     '<port>': '80',
-     'serial': False,
-     'tcp': True}
-
-    See also
-    --------
-    * For video introduction see http://docopt.org
-    * Full documentation is available in README.rst as well as online
-      at https://github.com/docopt/docopt#readme
-
-    """
-    argv = sys.argv[1:] if argv is None else argv
-
-    usage_sections = parse_section('usage:', doc)
-    if len(usage_sections) == 0:
-        raise DocoptLanguageError('"usage:" (case-insensitive) not found.')
-    if len(usage_sections) > 1:
-        raise DocoptLanguageError('More than one "usage:" (case-insensitive).')
-    DocoptExit.usage = usage_sections[0]
-
-    options = parse_defaults(doc)
-    pattern = parse_pattern(formal_usage(DocoptExit.usage), options)
-    # [default] syntax for argument is disabled
-    # for a in pattern.flat(Argument):
-    #    same_name = [d for d in arguments if d.name == a.name]
-    #    if same_name:
-    #        a.value = same_name[0].value
-    argv = parse_argv(Tokens(argv), list(options), options_first)
-    pattern_options = set(pattern.flat(Option))
-    for options_shortcut in pattern.flat(OptionsShortcut):
-        doc_options = parse_defaults(doc)
-        options_shortcut.children = list(set(doc_options) - pattern_options)
-        # if any_options:
-        #    options_shortcut.children += [Option(o.short, o.long, o.argcount)
-        #                    for o in argv if type(o) is Option]
-    extras(help, version, argv, doc)
-    matched, left, collected = pattern.fix().match(argv)
-    if matched and left == []:  # better error message if left?
-        return Dict((a.name, a.value) for a in (pattern.flat() + collected))
-    raise DocoptExit()
diff --git a/Tools/ART/python/ART/docopt_dispatch.py b/Tools/ART/python/ART/docopt_dispatch.py
deleted file mode 100644
index 966386db60f5..000000000000
--- a/Tools/ART/python/ART/docopt_dispatch.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""Dispatch from command-line arguments to functions."""
-import re
-from collections import OrderedDict
-
-
-__all__ = ('dispatch', 'DispatchError')
-__author__ = 'Vladimir Keleshev <vladimir@keleshev.com>'
-__version__ = '0.0.2'
-__license__ = 'LICENSE-MIT'
-__keywords__ = 'docopt dispatch function adapter kwargs'
-__url__ = 'https://github.com/halst/docopt-dispatch'
-
-
-class DispatchError(Exception):
-    """TBD."""
-
-    pass
-
-
-class Dispatch(object):
-
-    def __init__(self):
-        self._functions = OrderedDict()
-
-    def on(self, *patterns):
-        def decorator(function):
-            self._functions[patterns] = function
-            return function
-        return decorator
-
-    def __call__(self, *args, **kwargs):
-        from docopt import docopt
-        arguments = docopt(*args, **kwargs)
-        for patterns, function in self._functions.items():
-            if all(arguments[pattern] for pattern in patterns):
-                function(**self._kwargify(arguments))
-                return
-        raise DispatchError('None of dispatch conditions %s is triggered'
-                            % self._formated_patterns)
-
-    @property
-    def _formated_patterns(self):
-        return ', '.join(' '.join(pattern)
-                         for pattern in self._functions.keys())
-
-    @staticmethod
-    def _kwargify(arguments):
-        def kwargify(string):
-            return re.sub('\W', '_', string).strip('_')
-        return dict((kwargify(key), value) for key, value in arguments.items())
-
-
-dispatch = Dispatch()
diff --git a/Tools/ART/scripts/art-clean.py b/Tools/ART/scripts/art-clean.py
deleted file mode 100755
index 3f714b72146e..000000000000
--- a/Tools/ART/scripts/art-clean.py
+++ /dev/null
@@ -1,208 +0,0 @@
-#!/usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-"""
-ART  - ATLAS Release Tester - Clean.
-
-Usage:
-  art-clean.py [-v -q --base-dir=<base_dir> --delete --days=<days> --eos --config=<file> --release --package=<package>] [<nightly_release> <project> [<platform>]]
-
-Options:
-  --base-dir=<base_dir>      Start search from basedir [default: /eos/atlas/atlascerngroupdisk/data-art/grid-output]
-  --config=<file>            art-configuration to retrieve packages and days [default: art-configuration.yml]
-  --delete                   Actually delete the directories to be cleaned
-  --days=<days>              Number of nighlies to keep [default: 7]
-  --eos                      Use eos commands
-  -h --help                  Show this screen
-  --package=<package>        Package to be cleaned up, no configuration
-  --release                  Clean up full release, no configuration
-  -q --quiet                 Show less information, only warnings and errors
-  -v --verbose               Show more information, debug level
-  --version                  Show version
-
-Arguments:
-  nightly_release            Name of the nightly release (e.g. 21.0)
-  project                    Project to clean (e.g. Athena)
-  platform                   Platform to clean [default: x86_64-slc6-gcc62-opt]
-
-Environment:
-  AtlasBuildBranch          Name of the nightly release (e.g. 21.0)
-  AtlasProject              Name of the project (e.g. Athena)
-  <AtlasProject>_PLATFORM   Platform (e.g. x86_64-slc6-gcc62-opt)
-"""
-
-__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
-
-import datetime
-import logging
-import os
-import re
-import shutil
-import sys
-
-from ART.docopt import docopt
-from ART.art_misc import get_atlas_env, run_command
-from ART.art_configuration import ArtConfiguration
-
-MODULE = "art.clean"
-
-
-class ArtClean(object):
-    """Class to cleanup eos area."""
-
-    EOS_MGM_URL = 'root://eosatlas.cern.ch'
-
-    def __init__(self, arguments):
-        """Clean when more than 'days' old."""
-        log = logging.getLogger(MODULE)
-        self.verbose = arguments['--verbose']
-        self.eos = arguments['--eos']
-        default_days = int(arguments['--days'])
-        self.delete = arguments['--delete']
-        nightly_release = arguments['<nightly_release>']
-        project = arguments['<project>']
-        platform = arguments['<platform>']
-        base_dir = arguments['--base-dir']
-
-        # Check for missing arguments
-        if nightly_release is None:
-            (nightly_release, project, platform, dummy) = get_atlas_env()
-
-        if platform is None:
-            platform = 'x86_64-slc6-gcc62-opt'
-            log.info("Defaulting to platform %s", platform)
-
-        if self.eos:
-            # Test if we have access to kerberos
-            (code, out, err, command, start_time, end_time) = run_command('klist', verbose=self.verbose)
-            if code != 0:
-                log.critical("%s", err)
-                exit(1)
-
-        package = arguments['--package']
-        if package is not None:
-            self.clean_release(os.path.join(base_dir, nightly_release, project, platform), default_days, package)
-            return
-
-        if arguments['--release']:
-            self.clean_release(os.path.join(base_dir, nightly_release, project, platform), default_days)
-            return
-
-        config_file = arguments['--config']
-        config = ArtConfiguration(config_file)
-        for package in config.packages():
-            copy = config.get(nightly_release, project, platform, package, 'copy', False)
-            if copy:
-                days = config.get(nightly_release, project, platform, package, 'days', default_days)
-                self.clean_release(os.path.join(base_dir, nightly_release, project, platform), days, package)
-        return
-
-    def clean_release(self, release, days, package=None):
-        """Clean a release dir."""
-        log = logging.getLogger(MODULE)
-        log.debug("Starting to clean up release, keeping %d days, for %s in %s", days, "All" if package is None else package, release)
-        now = datetime.datetime.now()
-        count = 0
-        date = '1970-01-01'
-        for entry in reversed(self.listdirs(release)):
-            # Matches 2018-12-05T0345
-            match = re.match(r"(\d{4}-\d{2}-\d{2})T\d{4}", entry)
-            if match:
-                tag = os.path.join(release, entry)
-                # package_dir = tag for full release cleanup
-                package_dir = tag if package is None else os.path.join(tag, package)
-
-                # only count entries on different dates and where package actually exist
-                if self.isdir(package_dir) and match.group(1) != date:
-                    count += 1
-                    date = match.group(1)
-
-                if count > days:
-                    # compare times
-                    dir_time = datetime.datetime.strptime(match.group(0), '%Y-%m-%dT%H%M')
-                    time_diff = now - dir_time
-                    if time_diff.days > days:
-                        log.info("- Nightly tag %d days old, removing %s for package %s", time_diff.days, package_dir, package if package is not None else 'All')
-                        self.remove_dir(package_dir)
-                        if package is not None and self.isempty(tag):
-                            log.info("- Nightly tag contains no more packages, removing %s for package %s", tag, package if package is not None else 'All')
-                            self.remove_dir(tag)
-                    else:
-                        log.debug("- Tag within %d days, keeping %s for package %s", days, tag, package if package is not None else 'All')
-                else:
-                    log.debug("- Tag within %d entries, keeping %s for package %s", days, tag, package if package is not None else 'All')
-
-        if package is not None and count == 0:
-            log.debug("No package %s found in any available nightly tag for %s", package, release)
-
-    def listdirs(self, directory):
-        """Return list of directories in directory."""
-        dirs = []
-        if self.isdir(directory):
-            for entry in self.listdir(directory):
-                path = os.path.join(directory, entry)
-                if self.isdir(path):
-                    dirs.append(entry)
-        return dirs
-
-    def isempty(self, directory):
-        """Return true if directory is empty."""
-        if self.isdir(directory):
-            for entry in self.listdir(directory):
-                return False
-            return True
-
-    def listdir(self, directory):
-        """Return list of entries in directory."""
-        log = logging.getLogger(MODULE)
-        if self.eos:
-            (code, out, err, command, start_time, end_time) = run_command('eos ' + ArtClean.EOS_MGM_URL + ' ls ' + directory, verbose=self.verbose)
-            if code == 0:
-                return out.splitlines()
-            log.info("eos listdir %d %s", code, err)
-            return []
-        else:
-            return os.listdir(directory)
-
-    def isdir(self, path):
-        """Return true is path is directory."""
-        log = logging.getLogger(MODULE)
-        if self.eos:
-            (code, out, err, command, start_time, end_time) = run_command('eos ' + ArtClean.EOS_MGM_URL + ' stat -d ' + path, verbose=self.verbose)
-            if code == 0:
-                return True
-            log.debug("eos isdir(%s) %d %s", path, code, err)
-            return False
-        else:
-            return os.path.isdir(path)
-
-    def remove_dir(self, directory):
-        """Remove directory and all below."""
-        log = logging.getLogger(MODULE)
-        if not self.isdir(directory):
-            return
-
-        if self.delete:
-            if self.eos:
-                (code, out, err, command, start_time, end_time) = run_command('eos ' + ArtClean.EOS_MGM_URL + ' rm -r ' + directory, verbose=self.verbose)
-                if code == 0:
-                    return True
-                log.info("eos rm -r (%s) %d %s", directory, code, err)
-            else:
-                shutil.rmtree(directory)
-
-
-if __name__ == '__main__':
-    if sys.version_info < (2, 7, 0):
-        sys.stderr.write("You need python 2.7 or later to run this script\n")
-        exit(1)
-
-    # NOTE: import should be here, to keep the order of the decorators (module first, art last and unused)
-    from art import __version__
-
-    logging.basicConfig()
-    log = logging.getLogger('art')
-
-    arguments = docopt(__doc__, version=os.path.splitext(os.path.basename(__file__))[0] + ' ' + __version__)
-    level = logging.DEBUG if arguments['--verbose'] else logging.WARN if arguments['--quiet'] else logging.INFO
-    log.setLevel(level)
-    ArtClean(arguments)
diff --git a/Tools/ART/scripts/art-diff.py b/Tools/ART/scripts/art-diff.py
deleted file mode 100755
index 31281893548f..000000000000
--- a/Tools/ART/scripts/art-diff.py
+++ /dev/null
@@ -1,278 +0,0 @@
-#!/usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-"""
-ART  - ATLAS Release Tester - Diff.
-
-Usage:
-  art-diff.py [--diff-type=<diff_type> --file=<pattern>... --exclude=<pattern>... --platform-ref=<platform> --entries=<entries> --mode=<mode>] <nightly_release_ref> <project_ref> <nightly_tag_ref> <package>
-  art-diff.py [--diff-type=<diff_type> --file=<pattern>... --exclude=<pattern>... --entries=<entries> --mode=<mode>] <path> <ref_path>
-
-Options:
-  --diff-type=<diff_type>    Type of diff (e.g. diff-pool or diff-root) [default: diff-pool]
-  --entries=<entries>        Only diff over number of entries [default: -1]
-  --exclude=<pattern>...     Exclude test files according to pattern
-  --file=<pattern>...        Compare the following file patterns for diff-root [default: *AOD*.pool.root *ESD*.pool.root *HITS*.pool.root *RDO*.pool.root *TAG*.root]
-  -h --help                  Show this screen
-  --mode=<mode>              Sets the mode for diff-root {summary, detailed} [default: detailed]
-  --platform-ref=<platform>  Reference Platform [default: x86_64-slc6-gcc62-opt]
-  --test-name=<test_name>    Test name to compare
-  --version                  Show version
-
-Arguments:
-  path                       Directory or File to compare
-  nightly_release_ref        Reference Name of the nightly release (e.g. 21.0)
-  nightly_tag_ref            Reference Nightly tag (e.g. 2017-02-26T2119)
-  package                    Package of the test (e.g. Tier0ChainTests)
-  project_ref                Reference Name of the project (e.g. Athena)
-  ref_path                   Directory or File to compare to
-
-Environment:
-  AtlasBuildBranch           Name of the nightly release (e.g. 21.0)
-  AtlasProject               Name of the project (e.g. Athena)
-  <AtlasProject>_PLATFORM    Platform (e.g. x86_64-slc6-gcc62-opt)
-  AtlasBuildStamp            Nightly tag (e.g. 2017-02-26T2119)
-"""
-
-__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
-
-import fnmatch
-import glob
-import os
-import re
-import shlex
-import subprocess
-import sys
-
-from ART.docopt import docopt
-
-VERSION = "0.10.16"
-ATHENA_STDOUT = "athena_stdout.txt"
-DEFAULT_ENTRIES = -1
-DEFAULT_MODE = "detailed"
-
-
-class ArtDiff(object):
-    """Class for comparing output files."""
-
-    EOS_OUTPUT_DIR = '/eos/atlas/atlascerngroupdisk/data-art/grid-output'
-
-    def __init__(self):
-        """Constructor of ArtDiff."""
-        self.default_file_patterns = ['*AOD*.pool.root', '*ESD*.pool.root', '*HITS*.pool.root', '*RDO*.pool.root', '*TAG*.root']
-
-    def parse(self, arguments):
-        """Called from comandline."""
-        diff_type = arguments['--diff-type']
-        files = self.default_file_patterns if diff_type == 'diff-pool' else list(set(arguments['--file']))
-        entries = arguments['--entries']
-        mode = arguments['--mode']
-        excludes = arguments['--exclude']
-        if arguments['<nightly_release_ref>'] is not None:
-            try:
-                nightly_release = os.environ['AtlasBuildBranch']
-                project = os.environ['AtlasProject']
-                platform = os.environ[project + '_PLATFORM']
-                nightly_tag = os.environ['AtlasBuildStamp']
-            except KeyError, e:
-                print "Environment variable not set", e
-                sys.exit(1)
-
-            nightly_release_ref = arguments['<nightly_release_ref>']
-            project_ref = arguments['<project_ref>']
-            platform_ref = arguments['--platform-ref']
-            nightly_tag_ref = arguments['<nightly_tag_ref>']
-
-            package = arguments['<package>']
-            print nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref
-            exit(self.diff(nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref, package, diff_type, files, excludes, entries=entries, mode=mode))
-
-        # directory compare
-        path = arguments['<path>']
-        ref_path = arguments['<ref_path>']
-
-        if os.path.isfile(path):
-            # file compare
-            if not os.path.isfile(ref_path):
-                print "Error: <ref_path> should be a file, if <path> is a file."
-                sys.exit(1)
-
-            exit(self.diff_file(path, ref_path, diff_type, entries=entries, mode=mode))
-
-        if os.path.isfile(ref_path):
-            print "Error: <ref_path> should be a directory, if <path> is a directory."
-            sys.exit(1)
-
-        # check if path contains "test_" entries
-        if len(glob.glob(os.path.join(path, 'test_*'))) > 0:
-            # directory compare
-            exit(self.diff_dirs(path, ref_path, diff_type, files, excludes, entries=entries, mode=mode))
-
-        # single test compare
-        exit(self.diff_test(path, ref_path, diff_type, files, entries=entries, mode=mode))
-
-    def diff(self, nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref, package, diff_type, files, excludes=[], entries=DEFAULT_ENTRIES, mode=DEFAULT_MODE):
-        """Run difference between two results."""
-        path = os.path.join(ArtDiff.EOS_OUTPUT_DIR, nightly_release, project, platform, nightly_tag, package)
-        ref_path = os.path.join(ArtDiff.EOS_OUTPUT_DIR, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref, package)
-        return self.diff_dirs(path, ref_path, diff_type, files, excludes, entries=entries, mode=mode)
-
-    def diff_dirs(self, path, ref_path, diff_type, files, excludes=[], entries=DEFAULT_ENTRIES, mode=DEFAULT_MODE):
-        """Run difference between two directories."""
-        print "    path: %s" % path
-        print "ref_path: %s" % ref_path
-
-        stat_per_chain = {}
-        for test_name in os.listdir(path):
-            # skip tests in pattern
-            exclude_test = False
-            for exclude in excludes:
-                if fnmatch.fnmatch(test_name, exclude):
-                    print "Excluding test %s according to pattern %s" % (test_name, exclude)
-                    exclude_test = True
-                    break
-            if exclude_test:
-                continue
-
-            print "******************************************"
-            print "Test: %s" % test_name
-            print "******************************************"
-            stat_per_chain[test_name] = self.diff_test(os.path.join(path, test_name), os.path.join(ref_path, test_name), diff_type, files, entries=entries, mode=mode)
-
-        result = 0
-        for test_name, status in stat_per_chain.iteritems():
-            if status:
-                print "%-70s CHANGED" % test_name
-                result = 1
-            else:
-                print "%-70s IDENTICAL" % test_name
-
-        return result
-
-    def diff_test(self, path, ref_path, diff_type, files, entries=DEFAULT_ENTRIES, mode=DEFAULT_MODE):
-        """Run differences between two directories."""
-        result = self.get_result(path)
-        ref_result = self.get_result(ref_path)
-        for key, value in result.iteritems():
-            if key in ref_result:
-                print "%-10s: ref: %d events, val: %d events" % (key, int(ref_result[key][1]), int(result[key][1]))
-
-        test_dir = path
-        # get files in all patterns
-        test_files = []
-        for test_pattern in files:
-            test_files.extend(glob.glob(os.path.join(test_dir, test_pattern)))
-        # run test over all files
-        result = 0
-        for test_file in test_files:
-            basename = os.path.basename(test_file)
-            val_file = os.path.join(path, basename)
-            ref_file = os.path.join(ref_path, basename)
-            print "val_file: %s" % val_file
-            print "ref_file: %s" % ref_file
-
-            result |= self.diff_file(val_file, ref_file, diff_type, entries=entries, mode=mode)
-
-        return result
-
-    def diff_file(self, path, ref_path, diff_type, entries=DEFAULT_ENTRIES, mode=DEFAULT_MODE):
-        """Compare two files."""
-        if not os.path.exists(ref_path):
-            print "no test found in ref_dir to compare: %s" % ref_path
-            return 0
-
-        if fnmatch.fnmatch(path, '*TAG*.root'):
-            return self.diff_tag(path, ref_path)
-
-        if diff_type == 'diff-pool':
-            return self.diff_pool(path, ref_path)
-
-        return self.diff_root(path, ref_path, entries, mode)
-
-    def get_result(self, directory):
-        """
-        Return map [ESD|AOD,...] -> (success, succeeded event count).
-
-        find event counts in logfile
-        'Event count check for AOD to TAG passed: all processed events found (500 output events)'
-        'Event count check for BS to ESD failed: found 480 events, expected 500'
-        """
-        result = {}
-        for entry in os.listdir(directory):
-            if re.match(r"tarball_PandaJob_(\d+)_(\w+)", entry):
-                logfile = os.path.join(directory, entry, ATHENA_STDOUT)
-                with open(logfile, "r") as f:
-                    for line in f:
-                        match = re.search(r"Event count check for \w+ to (\w+) (passed|failed):[^\d]+(\d+)", line)
-                        if match:
-                            result[match.group(1)] = (match.group(2), match.group(3))
-        return result
-
-    def diff_tag(self, file_name, ref_file):
-        """TBD."""
-        (code, out, err) = self.run_command("diffTAGTree.py " + file_name + " " + ref_file)
-        if code != 0:
-            print "Error: %d" % code
-            print err
-
-        print out
-        return code
-
-    def diff_pool(self, file_name, ref_file):
-        """TBD."""
-        import PyUtils.PoolFile as PF
-
-        # diff-pool
-        df = PF.DiffFiles(refFileName=ref_file, chkFileName=file_name, ignoreList=['RecoTimingObj_p1_RAWtoESD_timings', 'RecoTimingObj_p1_ESDtoAOD_timings'])
-        df.printSummary()
-        stat = df.status()
-        print stat
-        del df
-
-        return stat
-
-    def diff_root(self, file_name, ref_file, entries, mode):
-        """TBD."""
-        # diff-root
-        (code, out, err) = self.run_command("acmd.py diff-root " + file_name + " " + ref_file + " --error-mode resilient --ignore-leaves RecoTimingObj_p1_HITStoRDO_timings RecoTimingObj_p1_RAWtoESD_mems RecoTimingObj_p1_RAWtoESD_timings RAWtoESD_mems RAWtoESD_timings ESDtoAOD_mems ESDtoAOD_timings HITStoRDO_timings RAWtoALL_mems RAWtoALL_timings RecoTimingObj_p1_RAWtoALL_mems RecoTimingObj_p1_RAWtoALL_timings RecoTimingObj_p1_EVNTtoHITS_timings --entries " + str(entries) + " --mode " + mode)
-        if code != 0:
-            print "Error: %d" % code
-            print err
-
-        print out
-        return code
-
-    def run_command(self, cmd, dir=None, shell=False, env=None):
-        """
-        Run the given command locally.
-
-        The command runs as separate subprocesses for every piped command.
-        Returns tuple of exit_code, output and err.
-        """
-        print "Execute:", cmd
-        if "|" in cmd:
-            cmd_parts = cmd.split('|')
-        else:
-            cmd_parts = []
-            cmd_parts.append(cmd)
-        i = 0
-        p = {}
-        for cmd_part in cmd_parts:
-            cmd_part = cmd_part.strip()
-            if i == 0:
-                p[i] = subprocess.Popen(shlex.split(cmd_part), stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=dir, shell=shell, env=env)
-            else:
-                p[i] = subprocess.Popen(shlex.split(cmd_part), stdin=p[i - 1].stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=dir, shell=shell, env=env)
-            i = i + 1
-        (output, err) = p[i - 1].communicate()
-        exit_code = p[0].wait()
-
-        return exit_code, str(output), str(err)
-
-
-if __name__ == '__main__':
-    if sys.version_info < (2, 7, 0):
-        sys.stderr.write("You need python 2.7 or later to run this script\n")
-        exit(1)
-
-    arguments = docopt(__doc__, version=os.path.splitext(os.path.basename(__file__))[0] + ' ' + VERSION)
-    ArtDiff().parse(arguments)
diff --git a/Tools/ART/scripts/art-download.sh b/Tools/ART/scripts/art-download.sh
deleted file mode 100755
index 89ed645c4f6f..000000000000
--- a/Tools/ART/scripts/art-download.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-#
-# NOTE do NOT run with /bin/bash -x as the output is too big for gitlab-ci
-# arguments:  INPUTNAME
-#
-# author : Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>
-#
-# example: art-download NAME DIRECTORY
-
-if [ $# -ne 2 ]; then
-    echo 'Usage: art-get-input.sh NAME DIRECTORY'
-    exit 1
-fi
-
-NAME=$1
-shift
-DIRECTORY=$1
-shift
-
-export ATLAS_LOCAL_ROOT_BASE="${ATLAS_LOCAL_ROOT_BASE:-/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase}"
-# shellcheck source=/dev/null
-source "${ATLAS_LOCAL_ROOT_BASE}"/user/atlasLocalSetup.sh --quiet
-
-unset ALRB_noGridMW
-
-lsetup -f rucio
-lsetup -f "xrootd 4.7.1"
-
-echo "Name: ${NAME}"
-echo "Directory: ${DIRECTORY}"
-
-# Do not use: rucio delivers warnings as exit code 127
-#set -e
-
-rucio download --dir "${DIRECTORY}" "${NAME}"
diff --git a/Tools/ART/scripts/art-internal.py b/Tools/ART/scripts/art-internal.py
deleted file mode 100755
index ec313bd86aac..000000000000
--- a/Tools/ART/scripts/art-internal.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-"""
-ART-internal - ATLAS Release Tester (internal command).
-
-Usage:
-  art-internal.py build job   [-v -q]                 <script_directory> <sequence_tag> <package> <outfile> <job_type> <job_index>
-  art-internal.py grid batch  [-v -q --skip-setup -n] <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_type> <job_index>
-  art-internal.py grid single [-v -q --skip-setup --in=<in_file> -n] <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_name>
-
-Options:
-  -h --help         Show this screen.
-  --skip-setup      Do not run atlas setup or voms
-  --in=<in_file>    Normally percentage IN
-  -n --no-action    No real submit will be done
-  -q --quiet        Show less information, only warnings and errors
-  -v --verbose      Show more information, debug level
-  --version         Show version.
-
-Arguments:
-  inform_panda      Inform Big Panda about job
-  job_index         Index of the test inside the package
-  job_name          Index of the test (batch), or its name (single)
-  job_type          Type of job (e.g. grid, ci, build)
-  outfile           Tar filename used for the output of the job
-  package           Package of the test (e.g. Tier0ChainTests)
-  script_directory  Directory containing the package(s) with tests
-  sequence_tag      Sequence tag (e.g. 0 or PIPELINE_ID)
-  submit_directory  Temporary directory with all files for submission
-
-Environment:
-  AtlasBuildBranch          Name of the nightly release (e.g. 21.0)
-  AtlasProject              Name of the project (e.g. Athena)
-  <AtlasProject>_PLATFORM   Platform (e.g. x86_64-slc6-gcc62-opt)
-  AtlasBuildStamp           Nightly tag (e.g. 2017-02-26T2119)
-"""
-
-__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
-
-import logging
-import os
-import sys
-
-from ART.docopt_dispatch import dispatch
-
-from ART import ArtGrid, ArtBuild
-
-from ART.art_misc import get_atlas_env, set_log
-
-MODULE = "art.internal"
-
-
-@dispatch.on('build', 'job')
-def build_job(script_directory, sequence_tag, package, outfile, job_type, job_index, **kwargs):
-    """Build a single job, given a particular index.
-
-    Tests are called with the following parameters:
-    SCRIPT_DIRECTORY, PACKAGE, TYPE, TEST_NAME
-    """
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    (nightly_release, project, platform, nightly_tag) = get_atlas_env()
-    exit(ArtBuild(art_directory, nightly_release, project, platform, nightly_tag, script_directory).job(sequence_tag, package, outfile, job_type, job_index))
-
-
-@dispatch.on('grid', 'batch')
-def grid_batch(script_directory, sequence_tag, package, outfile, inform_panda, job_type, job_index, **kwargs):
-    """Run a batch job, given a particular index.
-
-    Tests are called with the following parameters:
-    SCRIPT_DIRECTORY, PACKAGE, TYPE, TEST_NAME, STAGE
-    """
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    (nightly_release, project, platform, nightly_tag) = get_atlas_env()
-    skip_setup = kwargs['skip_setup']
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory, skip_setup).batch(sequence_tag, package, outfile, inform_panda, job_type, job_index))
-
-
-@dispatch.on('grid', 'single')
-def grid_single(script_directory, sequence_tag, package, outfile, inform_panda, job_name, **kwargs):
-    """Run a single job, given a particular name.
-
-    Tests are called with the following parameters:
-    SCRIPT_DIRECTORY, PACKAGE, TYPE, TEST_NAME, STAGE, IN_FILE
-    """
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    (nightly_release, project, platform, nightly_tag) = get_atlas_env()
-    skip_setup = kwargs['skip_setup']
-    in_file = kwargs['in']
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory, skip_setup).single(sequence_tag, package, outfile, inform_panda, job_name, in_file))
-
-
-if __name__ == '__main__':
-    if sys.version_info < (2, 7, 0):
-        sys.stderr.write("You need python 2.7 or later to run this script\n")
-        exit(1)
-
-    # NOTE: import should be here, to keep the order of the decorators (module first, art last and unused)
-    from art import __version__
-    logging.basicConfig()
-    log = logging.getLogger(MODULE)
-    log.setLevel(logging.INFO)
-    log.info("ART_PATH %s", os.path.dirname(os.path.realpath(sys.argv[0])))
-    dispatch(__doc__, version=os.path.splitext(os.path.basename(__file__))[0] + ' ' + __version__)
diff --git a/Tools/ART/scripts/art-share.py b/Tools/ART/scripts/art-share.py
deleted file mode 100755
index 003fe7bfd6d6..000000000000
--- a/Tools/ART/scripts/art-share.py
+++ /dev/null
@@ -1,178 +0,0 @@
-#!/usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-"""
-ART  - ATLAS Release Tester - Share.
-
-Usage:
-  art-share.py [options] <data_directory>
-
-Options:
-  -h --help               Show this screen
-  --version               Show version
-  -q --quiet              Show less information, only warnings and errors
-  --write                 Write to directory
-  -v --verbose            Show more information, debug level
-
-Arguments:
-  data_directory          directory to scan for shared files
-
-"""
-
-__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
-
-import collections
-import hashlib
-import logging
-import os
-import sys
-
-try:
-    import scandir as scan
-except ImportError:
-    import os as scan
-
-from ART.docopt import docopt
-
-MODULE = "art.share"
-
-
-class ArtShare(object):
-    """Class for copying input files.
-
-    Files are copies to the .art area under their SHA1 sum. The orignal file is replaced
-    by a symbolic link. Any duplicates will result in the same SHA1 sum and thus just
-    be replaced by their symbiolic link. Removing a file results in removing a link.
-    If the .art directory has files without links pointing to them, these files are also
-    removed.
-    """
-
-    def __init__(self, data_directory, write):
-        """Constructor of ArtShare."""
-        log = logging.getLogger(MODULE)
-        self.data_directory = data_directory
-        self.write = write
-
-        self.art_directory = '.art'
-        self.extension = '.art'
-        self.count = collections.defaultdict(int)   # key is sha1
-
-        if not os.path.isdir(self.data_directory):
-            log.critical("data_directory does not exist: %s", self.data_directory)
-            sys.exit(1)
-
-        if self.write:
-            log.warning("NOTE - Changing File System")
-        else:
-            log.warning("NOT Changing File System, use --write to change the File System")
-
-        self.share()
-
-    def create_sha1sum(self, path):
-        """Calculate SHA1 from file on path."""
-        BUF_SIZE = 65536
-        sha1 = hashlib.sha1()
-        with open(path, 'rb') as f:
-            while True:
-                data = f.read(BUF_SIZE)
-                if not data:
-                    break
-                sha1.update(data)
-        return sha1.hexdigest()
-
-    def sha1sum(self, artpath):
-        """Retrieve SHA1 from artpath specification (in the filename)."""
-        f = os.path.basename(artpath)
-        return os.path.splitext(f)[0]
-
-    def share(self):
-        """Share the files by copying."""
-        log = logging.getLogger(MODULE)
-        art_root = os.path.join(self.data_directory, self.art_directory)
-        if not os.path.isdir(art_root):
-            log.info("NOTE - art_directory does not exist.")
-            log.info("       creating... %s", art_root)
-            if self.write:
-                os.makedirs(art_root)
-
-        if os.path.isdir(art_root):
-            for f in os.listdir(art_root):
-                sha1art = os.path.join(art_root, f)
-                if os.path.isfile(sha1art):
-                    sha1 = self.sha1sum(sha1art)
-                    self.count[sha1] = 0
-
-        i = 0
-        for root, dirs, files in scan.walk(self.data_directory):
-            for f in files:
-                if os.path.basename(root) == self.art_directory:
-                    continue
-
-                path = os.path.join(root, f)
-                i += 1
-                if os.path.islink(path):
-                    # link
-                    if not os.path.exists(path):
-                        log.warning("WARNING - Stale link/file, skipping")
-                        log.warning("          path: %s", path)
-                        continue
-                    sha1 = self.sha1sum(os.path.realpath(path))
-                    log.debug("Link %d path %s", i, path)
-                    log.debug("SHA1 %s", sha1)
-                    self.count[sha1] += 1
-                else:
-                    # file
-                    byte_size = os.path.getsize(path)
-                    if byte_size <= 0:
-                        log.warning("WARNING - zero sized file, skipping")
-                        log.warning("          path: %s", path)
-                        continue
-
-                    megabyte_size = byte_size / 1024 / 1024
-                    log.debug("File %d %s", i, path)
-                    log.debug("File size %d", megabyte_size)
-                    sha1 = self.create_sha1sum(path) if self.write or megabyte_size < 100 else "????????????????????????????????????????"
-                    log.debug("SHA1 %s", sha1)
-
-                    art_path = os.path.join(art_root, sha1 + self.extension)
-                    art_relpath = os.path.relpath(art_path, os.path.dirname(path))
-
-                    if sha1 not in self.count.keys():
-                        log.info("    Moving file from %s", path)
-                        log.info("                  to %s", art_path)
-                        if self.write:
-                            os.rename(path, art_path)
-                        self.count[sha1] = 0
-                    else:
-                        log.info("    Removing file from %s", path)
-                        if self.write:
-                            os.remove(path)
-
-                    log.info("    Creating link from %s", path)
-                    log.info("                    to %s", art_relpath)
-                    if self.write:
-                        os.symlink(art_relpath, path)
-                    self.count[sha1] += 1
-
-        for sha1, count in self.count.iteritems():
-            if count <= 0:
-                art_path = os.path.join(art_root, sha1 + self.extension)
-                log.info("    Removing file  %s", art_path)
-                if self.write:
-                    os.remove(art_path)
-
-
-if __name__ == '__main__':
-    if sys.version_info < (2, 7, 0):
-        sys.stderr.write("You need python 2.7 or later to run this script\n")
-        exit(1)
-
-    # NOTE: import should be here, to keep the order of the decorators (module first, art last and unused)
-    from art import __version__
-
-    logging.basicConfig()
-    log = logging.getLogger('art')
-
-    arguments = docopt(__doc__, version=os.path.splitext(os.path.basename(__file__))[0] + ' ' + __version__)
-    level = logging.DEBUG if arguments['--verbose'] else logging.WARN if arguments['--quiet'] else logging.INFO
-    log.setLevel(level)
-    ArtShare(arguments['<data_directory>'], arguments['--write'])
diff --git a/Tools/ART/scripts/art-task-build.sh b/Tools/ART/scripts/art-task-build.sh
deleted file mode 100755
index d8b5c1d925ad..000000000000
--- a/Tools/ART/scripts/art-task-build.sh
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/bin/bash
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-# arguments: RELEASE_BASE, PROJECT, PLATFORM
-# author : Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>, Emil Obreshkov <Emil.Obreshkov@cern.ch>
-
-echo "INFO: Script executed by $(whoami) on $(date)"
-
-RELEASE_BASE=$1
-PROJECT=$2
-PLATFORM=$3
-
-BRANCH="$(echo "${RELEASE_BASE}" | tr '/' ' ' | awk '{print $5}')"
-echo BRANCH "${BRANCH}"
-
-if [ -d /cvmfs/atlas.cern.ch/repo/sw/tdaq ]; then
-   echo "WARNING: Setting TDAQ_RELEASE_BASE to /cvmfs/atlas.cern.ch/repo/sw/tdaq"
-   export TDAQ_RELEASE_BASE=/cvmfs/atlas.cern.ch/repo/sw/tdaq
-else
-   echo "ERROR: Cannot find TDAQ software installation"
-   return 1
-fi
-
-export ATLAS_LOCAL_ROOT_BASE="${ATLAS_LOCAL_ROOT_BASE:-/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase}"
-# shellcheck source=/dev/null 
-source "${ATLAS_LOCAL_ROOT_BASE}"/user/atlasLocalSetup.sh --quiet
-if [ "${BRANCH}" == "master" ]; then
-   lsetup -a testing asetup
-   echo "INFO: setting up for master"
-else
-   lsetup -a current asetup
-   echo "INFO: setting up for ${BRANCH}"
-fi
-asetup "${PROJECT}" --platform "${PLATFORM}" --releasebase "${RELEASE_BASE}"/build/install --noLcgReleaseBase
-
-
-# setup AtlasBuildBranch since that is not set bu the above asetup for the local build setup
-export AtlasBuildBranch=${BRANCH}
-# for nightly testing point AtlasVersion to AtlasBuildStamp
-export AtlasVersion="${AtlasBuildStamp}"
-
-echo "TDAQ_RELEASE_BASE = ${TDAQ_RELEASE_BASE}"
-echo "AtlasBuildBranch = ${AtlasBuildBranch}"
-echo "AtlasProject = ${AtlasProject}"
-echo "AtlasBuildStamp  = ${AtlasBuildStamp}"
-echo "AtlasVersion = ${AtlasVersion}"
-
-ART_DIRECTORY=$(command -v art.py)
-ART_VERSION=$(art.py --version)
-echo "INFO: Using ART version ${ART_VERSION} in ${ART_DIRECTORY} directory"
-
-# automatic clean-up build-output EOS area
-art-clean.py --eos --release --base-dir=/eos/atlas/atlascerngroupdisk/data-art/build-output --delete "${AtlasBuildBranch}" "${AtlasProject}" "${PLATFORM}" || true &
-
-# configure EOS_MGM_URL
-if [ -z "${EOS_MGM_URL}" ]; then
-  echo "WARNING: EOS_MGM_URL variable is empty, setting it to root://eosatlas.cern.ch"
-  export EOS_MGM_URL="root://eosatlas.cern.ch"
-else
-  echo "EOS_MGM_URL variable contains", ${EOS_MGM_URL}
-fi
-
-# run build tests
-SUBDIR=${AtlasBuildBranch}/${AtlasProject}/${PLATFORM}/${AtlasBuildStamp}
-OUTDIR="${RELEASE_BASE}/art-build/${SUBDIR}"
-CMD="art.py run ${RELEASE_BASE}/build/install/${AtlasProject}/*/InstallArea/${PLATFORM}/src ${OUTDIR}"
-echo "${CMD}"
-RESULT=$(eval "${CMD}")
-echo "${RESULT}"
-
-# copy the test results to EOS area
-TARGETDIR=/eos/atlas/atlascerngroupdisk/data-art/build-output/${SUBDIR}
-if [[ ! -e ${TARGETDIR} ]]; then
-  echo Target directory "${TARGETDIR}"
-  eos mkdir -p "${TARGETDIR}"
-  xrdcp -vr "${OUTDIR}" "${TARGETDIR}"
-fi
diff --git a/Tools/ART/scripts/art-task-grid.sh b/Tools/ART/scripts/art-task-grid.sh
deleted file mode 100755
index e459d17a8f2f..000000000000
--- a/Tools/ART/scripts/art-task-grid.sh
+++ /dev/null
@@ -1,183 +0,0 @@
-#!/bin/bash
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-#
-# NOTE do NOT run with /bin/bash -x as the output is too big for gitlab-ci
-#
-# Example command lines for three types:
-#
-# art-task-grid.sh [--no-action] batch <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_type> <number_of_tests>
-#
-# art-task-grid.sh [--no-action] single [--inds <input_file> --n-files <number_of_files> --split <split>] <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_name>
-#
-# env: ART_GRID_OPTIONS
-#
-# author : Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>
-#
-# options have to be in-order, and at the correct place
-#
-# example: [--test-name TestName --inDS user.tcuhadar.SingleMuon... --nFiles 3 --in] tmp /cvmfs/atlas-nightlies.cern.ch/sw/... Tier0ChainTests grid 316236 3  user.${USER}.atlas.${NIGHTLY_RELEASE_SHORT}.${PROJECT}.${PLATFORM}.${NIGHTLY_TAG}.${SEQUENCE_TAG}.${PACKAGE}[.${TEST_NUMBER}]
-set -e
-
-echo "art-task-grid.sh executed by $(whoami) on $(date)"
-
-NO_ACTION=0
-if [ "$1" == "--no-action" ]; then
-    NO_ACTION=1
-    shift
-    echo "NO_ACTION=${NO_ACTION}"
-fi
-
-TYPE=$1
-shift
-echo "TYPE=${TYPE}"
-
-case ${TYPE} in
-
-    'batch')
-        echo "Running 'batch'"
-        SPLIT=""
-        ;;
-    'single')
-        echo "Running 'single'"
-        INDS=""
-        if [ "$1" == "--inds" ]; then
-            INDS="--inDS $2"
-            shift
-            shift
-        fi
-        NFILES=""
-        NFILES_PER_JOB=""
-        if [ "$1" == "--n-files" ]; then
-            NFILES="--nFiles $2"
-            NFILES_PER_JOB="--nFilesPerJob $2"
-            NCORE_NFILES_PER_JOB="--nFilesPerJob $2"
-            shift
-            shift
-        fi
-        SPLIT=""
-        LARGE_JOB="--long --memory 4096"
-        if [ "$1" == "--split" ]; then
-            SPLIT="--split $2"
-            NFILES_PER_JOB=""
-            LARGE_JOB=""
-            shift
-            shift
-        fi
-        IN_FILE=""
-        if [ "$1" == "--in" ]; then
-          IN_FILE="--in=%IN"
-          shift
-        fi
-        NCORES=""
-        if [ "$1" == "--ncore" ]; then
-            NCORES="--nCore $2"
-            NFILES_PER_JOB="${NCORE_NFILES_PER_JOB}"
-            LARGE_JOB=""
-            shift
-            shift
-        fi
-        ATHENA_MT=""
-        if [ "$1" == "--athena_mt" ]; then
-            NCORES="--nCore $2"
-            LARGE_JOB=""
-            shift
-            shift
-        fi
-        ;;
-    *)
-        echo "Unknown TYPE: ${TYPE}"
-        exit 1
-        ;;
-esac
-
-SUBMIT_DIRECTORY=$1
-shift
-echo "SUBMIT_DIRECTORY=${SUBMIT_DIRECTORY}"
-
-SCRIPT_DIRECTORY=$1
-shift
-echo "SCRIPT_DIRECTORY=${SCRIPT_DIRECTORY}"
-
-SEQUENCE_TAG=$1
-shift
-echo "SEQUENCE_TAG=${SEQUENCE_TAG}"
-
-PACKAGE=$1
-shift
-echo "PACKAGE=${PACKAGE}"
-
-OUTFILE=$1
-shift
-echo "OUTFILE=${OUTFILE}"
-
-INFORM_PANDA=$1
-shift
-echo "INFORM_PANDA=${INFORM_PANDA}"
-
-case ${TYPE} in
-
-    'batch')
-        JOB_TYPE=$1
-        shift
-        echo "JOB_TYPE=${JOB_TYPE}"
-
-        NUMBER_OF_TESTS=$1
-        SPLIT="--split ${NUMBER_OF_TESTS}"
-        shift
-        echo "NUMBER_OF_TESTS=${NUMBER_OF_TESTS}"
-        echo "SPLIT=${SPLIT}"
-        ;;
-
-    'single')
-        JOB_NAME=$1
-        shift
-        echo "JOB_NAME=${JOB_NAME}"
-        ;;
-esac
-
-# general options
-PATHENA_OPTIONS="--noBuild --expertOnly_skipScout --noEmail --maxAttempt 2"
-OUT="%OUT.tar"
-
-# we seem to have to copy the env variables locally
-GRID_OPTIONS=$ART_GRID_OPTIONS
-echo "GRID_OPTIONS=${GRID_OPTIONS}"
-
-
-case ${TYPE} in
-
-    'batch')
-        # <script_directory> <sequence_tag> <package> <outfile> <job_type> <job_index>
-        INTERNAL_COMMAND="grid batch"
-        JOB_INDEX="%RNDM:0"
-        ARGS="${JOB_TYPE} ${JOB_INDEX}"
-        echo "JOB_INDEX=${JOB_INDEX}"
-        echo "ARGS=${ARGS}"
-        ;;
-    'single')
-        # <script_directory> <sequence_tag> <package> <outfile> <job_name>
-        INTERNAL_COMMAND="grid single"
-        PATHENA_TYPE_OPTIONS="${LARGE_JOB} ${INDS} ${NFILES} ${NFILES_PER_JOB} ${NCORES} ${ATHENA_MT}"
-        ARGS="${JOB_NAME}"
-        echo "PATHENA_TYPE_OPTIONS=${PATHENA_TYPE_OPTIONS}"
-        echo "ARGS=${ARGS}"
-        ;;
-esac
-
-
-# NOTE: for art-internal.py the current dir can be used as it is copied there
-cd "${SUBMIT_DIRECTORY}"/"${PACKAGE}"/run
-SUBCOMMAND="./art-internal.py ${INTERNAL_COMMAND} ${IN_FILE} ${SCRIPT_DIRECTORY} ${SEQUENCE_TAG} ${PACKAGE} ${OUT} ${INFORM_PANDA} ${ARGS}"
-CMD="pathena ${GRID_OPTIONS} ${PATHENA_OPTIONS} ${PATHENA_TYPE_OPTIONS} --trf \"${SUBCOMMAND}\" ${SPLIT} --outDS ${OUTFILE} --extOutFile art-job.json"
-
-#--disableAutoRetry
-#--excludedSite=ANALY_TECHNION-HEP-CREAM
-#--site=ANALY_NIKHEF-ELPROD_SHORT,ANALY_NIKHEF-ELPROD"
-
-echo "Command: ${CMD}"
-
-if [ ${NO_ACTION} -ne 1 ]; then
-    echo "Submitting..."
-    RESULT=$(eval "${CMD}")
-    echo "${RESULT}"
-fi
diff --git a/Tools/ART/scripts/art.py b/Tools/ART/scripts/art.py
deleted file mode 100755
index b364ee9af212..000000000000
--- a/Tools/ART/scripts/art.py
+++ /dev/null
@@ -1,259 +0,0 @@
-#!/usr/bin/env python
-# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
-"""
-ART - ATLAS Release Tester.
-
-You need to setup for an ATLAS release before using ART.
-
-Usage:
-  art.py run             [-v -q --type=<T> --max-jobs=<N> --ci] <script_directory> <sequence_tag>
-  art.py grid            [-v -q --type=<T> --max-jobs=<N> -n] <script_directory> <sequence_tag>
-  art.py submit          [-v -q --type=<T> --max-jobs=<N> --config=<file> -n] <sequence_tag> [<package>]
-  art.py copy            [-v -q --user=<user> --dst=<dir> --no-unpack --tmp=<dir> --seq=<N> --keep-tmp] <indexed_package>
-  art.py validate        [-v -q] <script_directory>
-  art.py included        [-v -q --type=<T> --test-type=<TT>] <script_directory>
-  art.py compare grid    [-v -q --days=<D> --user=<user> --entries=<entries> --file=<pattern>... --mode=<mode>] <package> <test_name>
-  art.py compare ref     [-v -q --entries=<entries> --file=<pattern>... --mode=<mode>] <path> <ref_path>
-  art.py list grid       [-v -q --user=<user> --json --test-type=<TT>] <package>
-  art.py log grid        [-v -q --user=<user>] <package> <test_name>
-  art.py output grid     [-v -q --user=<user>] <package> <test_name>
-  art.py config          [-v -q --config=<file>] [<package>]
-  art.py createpoolfile  [-v -q]
-
-Options:
-  --ci                   Run Continuous Integration tests only (using env: AtlasBuildBranch)
-  --config=<file>        Use specific config file [default: art-configuration.yml]
-  --days=<D>             Number of days ago to pick up reference for compare [default: 1]
-  --dst=<dir>            Destination directory for downloaded files
-  --entries=<entries>    Number of entries to compare [default: 10]
-  --file=<pattern>...    Compare the following file patterns for diff-root [default: *AOD*.pool.root *ESD*.pool.root *HITS*.pool.root *RDO*.pool.root *TAG*.root]
-  -h --help              Show this screen.
-  --json                 Output in json format
-  --keep-tmp             Keep temporary directory while copying
-  --max-jobs=<N>         Maximum number of concurrent jobs to run [default: 0]
-  --mode=<mode>          Sets the mode for diff-root {summary, detailed} [default: detailed]
-  -n --no-action         No real submit will be done
-  --no-unpack            Do not unpack downloaded tar files
-  -q --quiet             Show less information, only warnings and errors
-  --seq=<N>              Use N as postfix on destination nightly-tag (for retries) [default: 0]
-  --test-type=<TT>       Type of test (e.g. all, batch or single) [default: all]
-  --tmp=<dir>            Temporary directory for downloaded files and caching of EXT0
-  --type=<T>             Type of job (e.g. grid, build)
-  --user=<user>          User to use for RUCIO
-  -v --verbose           Show more information, debug level
-  --version              Show version.
-
-Sub-commands:
-  run               Run jobs from a package in a local build (needs release and grid setup)
-  grid              Run jobs from a package on the grid (needs release and grid setup)
-  submit            Submit nightly jobs to the grid and informs big panda (NOT for users)
-  copy              Copy outputs and logs from RUCIO
-  validate          Check headers in tests
-  included          Show list of files which will be included for art submit/art grid
-  compare           Compare the output of a job
-  list              List the jobs of a package
-  log               Show the log of a job
-  output            Get the output of a job
-  config            Show configuration
-  createpoolfile    Creates an 'empty' poolfile catalog
-
-Arguments:
-  indexed_package   Package of the test or indexed package (e.g. MooPerformance.4)
-  package           Package of the test (e.g. Tier0ChainTests)
-  path              Directory or File to compare
-  ref_path          Directory or File to compare to
-  script_directory  Directory containing the package(s) with tests
-  sequence_tag      Sequence tag (e.g. 0 or PIPELINE_ID)
-  test_name         Name of the test inside the package (e.g. test_q322.sh)
-
-Environment:
-  AtlasBuildBranch          Name of the nightly release (e.g. 21.0)
-  AtlasProject              Name of the project (e.g. Athena)
-  <AtlasProject>_PLATFORM   Platform (e.g. x86_64-slc6-gcc62-opt)
-  AtlasBuildStamp           Nightly tag (e.g. 2017-02-26T2119)
-
-Tests are called with:
-  arguments: PACKAGE TEST_NAME SCRIPT_DIRECTORY TYPE [IN_FILE]
-  environment: ArtScriptDirectory, ArtPackage, ArtJobType, ArtJobName, [ArtInFile]
-"""
-
-__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
-__version__ = '0.10.21'
-
-import logging
-import os
-import sys
-
-from ART.docopt_dispatch import dispatch
-
-from ART import ArtBase, ArtGrid, ArtBuild
-
-from ART.art_misc import get_atlas_env, set_log
-
-MODULE = "art"
-
-#
-# First list the double commands
-#
-
-
-@dispatch.on('compare', 'ref')
-def compare_ref(path, ref_path, **kwargs):
-    """Compare the output of a job."""
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    files = kwargs['file']
-    entries = kwargs['entries']
-    mode = kwargs['mode']
-    exit(ArtBase(art_directory).compare_ref(path, ref_path, files, entries, mode))
-
-
-@dispatch.on('compare', 'grid')
-def compare_grid(package, test_name, **kwargs):
-    """Compare the output of a job."""
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    (nightly_release, project, platform, nightly_tag) = get_atlas_env()
-    days = int(kwargs['days'])
-    user = kwargs['user']
-    files = kwargs['file']
-    entries = kwargs['entries']
-    mode = kwargs['mode']
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag).compare(package, test_name, days, user, files, entries=entries, mode=mode, shell=True))
-
-
-@dispatch.on('list', 'grid')
-def list(package, **kwargs):
-    """List the jobs of a package."""
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    (nightly_release, project, platform, nightly_tag) = get_atlas_env()
-    job_type = 'grid'
-    index_type = kwargs['test_type']
-    json_format = kwargs['json']
-    user = kwargs['user']
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag).list(package, job_type, index_type, json_format, user))
-
-
-@dispatch.on('log', 'grid')
-def log(package, test_name, **kwargs):
-    """Show the log of a job."""
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    (nightly_release, project, platform, nightly_tag) = get_atlas_env()
-    user = kwargs['user']
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag).log(package, test_name, user))
-
-
-@dispatch.on('output', 'grid')
-def output(package, test_name, **kwargs):
-    """Get the output of a job."""
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    (nightly_release, project, platform, nightly_tag) = get_atlas_env()
-    user = kwargs['user']
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag).output(package, test_name, user))
-
-
-@dispatch.on('submit')
-def submit(sequence_tag, **kwargs):
-    """Submit nightly jobs to the grid, NOT for users."""
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    (nightly_release, project, platform, nightly_tag) = get_atlas_env()
-    job_type = 'grid' if kwargs['type'] is None else kwargs['type']
-    user = os.getenv('USER', 'artprod')
-    inform_panda = user == 'artprod'
-    package = kwargs['package']
-    config = kwargs['config']
-    no_action = kwargs['no_action']
-    wait_and_copy = True
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, max_jobs=int(kwargs['max_jobs'])).task_list(job_type, sequence_tag, inform_panda, package, no_action, wait_and_copy, config))
-
-
-@dispatch.on('grid')
-def grid(script_directory, sequence_tag, **kwargs):
-    """Run jobs from a package on the grid, needs release and grid setup."""
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    (nightly_release, project, platform, nightly_tag) = get_atlas_env()
-    job_type = 'grid' if kwargs['type'] is None else kwargs['type']
-    inform_panda = False
-    package = None
-    config = None
-    no_action = kwargs['no_action']
-    wait_and_copy = False
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory=script_directory, skip_setup=True, max_jobs=int(kwargs['max_jobs'])).task_list(job_type, sequence_tag, inform_panda, package, no_action, wait_and_copy, config))
-
-
-@dispatch.on('run')
-def run(script_directory, sequence_tag, **kwargs):
-    """Run jobs from a package in a local build, needs release and grid setup."""
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    (nightly_release, project, platform, nightly_tag) = get_atlas_env()
-    job_type = 'build' if kwargs['type'] is None else kwargs['type']
-    exit(ArtBuild(art_directory, nightly_release, project, platform, nightly_tag, script_directory, max_jobs=int(kwargs['max_jobs']), ci=kwargs['ci']).task_list(job_type, sequence_tag))
-
-
-@dispatch.on('copy')
-def copy(indexed_package, **kwargs):
-    """Copy outputs to eos area."""
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    (nightly_release, project, platform, nightly_tag) = get_atlas_env()
-    # NOTE: default depends on USER, not set it here but in ArtGrid.copy
-    dst = kwargs['dst']
-    user = kwargs['user']
-    no_unpack = kwargs['no_unpack']
-    tmp = kwargs['tmp']
-    seq = int(kwargs['seq'])
-    keep_tmp = kwargs['keep_tmp']
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag).copy(indexed_package, dst=dst, user=user, no_unpack=no_unpack, tmp=tmp, seq=seq, keep_tmp=keep_tmp))
-
-
-@dispatch.on('validate')
-def validate(script_directory, **kwargs):
-    """Check headers in tests."""
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    exit(ArtBase(art_directory).validate(script_directory))
-
-
-@dispatch.on('included')
-def included(script_directory, **kwargs):
-    """Show list of files which will be included for art submit/art grid."""
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    (nightly_release, project, platform, nightly_tag) = get_atlas_env()
-    job_type = kwargs['type']   # None will list all types
-    index_type = kwargs['test_type']
-    exit(ArtBase(art_directory).included(script_directory, job_type, index_type, nightly_release, project, platform))
-
-
-@dispatch.on('config')
-def config(package, **kwargs):
-    """Show configuration."""
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    (nightly_release, project, platform, nightly_tag) = get_atlas_env()
-    config = kwargs['config']
-    exit(ArtBase(art_directory).config(package, nightly_release, project, platform, config))
-
-
-@dispatch.on('createpoolfile')
-def createpoolfile(package, **kwargs):
-    """Show configuration."""
-    set_log(kwargs)
-    art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
-    (nightly_release, project, platform, nightly_tag) = get_atlas_env()
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag).createpoolfile())
-
-
-if __name__ == '__main__':
-    if sys.version_info < (2, 7, 0):
-        sys.stderr.write("You need python 2.7 or later to run this script\n")
-        exit(1)
-
-    logging.basicConfig()
-    dispatch(__doc__, version=os.path.splitext(os.path.basename(__file__))[0] + ' ' + __version__)
diff --git a/Tools/ART/share/localSetupART.sh b/Tools/ART/share/localSetupART.sh
deleted file mode 100644
index fcfe53590f45..000000000000
--- a/Tools/ART/share/localSetupART.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-SCRIPTPATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-export PATH=${SCRIPTPATH}/../scripts:${PATH}
-export PYTHONPATH=${SCRIPTPATH}/../python:${PYTHONPATH}
-
-- 
GitLab