diff --git a/Tools/ART/ARTEnvironmentConfig.cmake b/Tools/ART/ARTEnvironmentConfig.cmake
new file mode 100644
index 0000000000000000000000000000000000000000..f448229bf52346b68723efa0c324e5cf4280a0f0
--- /dev/null
+++ b/Tools/ART/ARTEnvironmentConfig.cmake
@@ -0,0 +1,12 @@
+# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+#
+# This module is used to set up the environment for ART inputs
+#
+# author :  Attila Krasznahorkay <attila.krasznahorkay@cern.ch>, Tulay Cuhadar Donszelmann <tcuhadar@cern.ch> 
+
+# Set the environment variable(s):
+set( ARTENVIRONMENT_ENVIRONMENT
+   APPEND DATAPATH "/eos/atlas/atlascerngroupdisk/data-art/grid-input" )
+
+# Silently declare the module found:
+set( ARTENVIRONMENT_FOUND TRUE )
diff --git a/Tools/ART/CMakeLists.txt b/Tools/ART/CMakeLists.txt
index 402a3c7f39085085b23f300452cac5308a742d75..ef6eee55e8c1c3b5cb9f16c9a9e821a7f386bced 100644
--- a/Tools/ART/CMakeLists.txt
+++ b/Tools/ART/CMakeLists.txt
@@ -1,6 +1,8 @@
-################################################################################
-# Package: ART
-################################################################################
+# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+#
+# CMake configuration file for the ART package.
+#
+# author :  Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>, Attila Krasznahorkay <attila.krasznahorkay@cern.ch>
 
 # Declare the package name:
 atlas_subdir( ART )
@@ -13,4 +15,7 @@ atlas_depends_on_subdirs( PRIVATE
 atlas_install_python_modules( python/ART/*.py )
 atlas_install_scripts( scripts/*.py scripts/*.sh )
 
-
+# Set up the general runtime environment (by Attila Krasznahorkay <attila.krasznahorkay@cern.ch>):
+set( ARTEnvironment_DIR ${CMAKE_CURRENT_SOURCE_DIR}
+   CACHE PATH "Location of ARTEnvironmentConfig.cmake" )
+find_package( ARTEnvironment )
diff --git a/Tools/ART/python/ART/__init__.py b/Tools/ART/python/ART/__init__.py
index 83f3adc32ad22eff9c7d9f08203397dea1528c7d..4662af8a69f79e17e461f932fc356f1d0fca4c75 100644
--- a/Tools/ART/python/ART/__init__.py
+++ b/Tools/ART/python/ART/__init__.py
@@ -7,6 +7,6 @@ Allows one to do:
 from ART import ArtBase
 """
 
-from art_base import ArtBase
-from art_build import ArtBuild
-from art_grid import ArtGrid
+from art_base import ArtBase  # noqa: F401
+from art_build import ArtBuild  # noqa: F401
+from art_grid import ArtGrid  # noqa: F401
diff --git a/Tools/ART/python/ART/art_base.py b/Tools/ART/python/ART/art_base.py
index 1175dae2956dfc45f06f30522b18aa869938ce98..fb7d57d2ce3bdfef16c8dacd57c8cfbd7bf2a7f9 100755
--- a/Tools/ART/python/ART/art_base.py
+++ b/Tools/ART/python/ART/art_base.py
@@ -110,7 +110,7 @@ class ArtBase(object):
     #
     # Default implementations
     #
-    def compare_ref(self, path, ref_path, entries=-1):
+    def compare_ref(self, path, ref_path, files, entries=-1, mode='detailed'):
         """TBD."""
         result = 0
 
@@ -120,7 +120,7 @@ class ArtBase(object):
             print err
         print out
 
-        (exit_code, out, err, command, start_time, end_time) = run_command(' '.join(("art-diff.py", "--diff-type=diff-root", "--entries=" + str(entries), path, ref_path)))
+        (exit_code, out, err, command, start_time, end_time) = run_command(' '.join(("art-diff.py", "--diff-type=diff-root", "--mode=" + mode, "--entries=" + str(entries), (' '.join(('--file=' + s for s in files))), path, ref_path)))
         if exit_code != 0:
             result |= exit_code
             print err
@@ -171,26 +171,30 @@ class ArtBase(object):
             files = os.listdir(directory)
             files.sort()
             for fname in files:
+
                 # is not a test ?
                 if not fnmatch.fnmatch(fname, 'test_*.sh') and not fnmatch.fnmatch(fname, 'test_*.py'):
                     continue
 
                 test_name = os.path.join(directory, fname)
 
-                # is not of correct type
+                has_art_input = ArtHeader(test_name).get(ArtHeader.ART_INPUT) is not None
+                has_art_athena_mt = ArtHeader(test_name).get(ArtHeader.ART_ATHENA_MT) > 0
+
+                # SKIP if is not of correct type
                 if job_type is not None and ArtHeader(test_name).get(ArtHeader.ART_TYPE) != job_type:
                     continue
 
-                # is not included in nightly_release, project, platform
+                # SKIP if is not included in nightly_release, project, platform
                 if nightly_release is not None and not self.is_included(test_name, nightly_release, project, platform):
                     continue
 
-                # batch and does specify art-input
-                if index_type == "batch" and ArtHeader(test_name).get(ArtHeader.ART_INPUT) is not None:
+                # SKIP if batch and does specify art-input or art-athena-mt
+                if index_type == "batch" and (has_art_input or has_art_athena_mt):
                     continue
 
-                # single and does not specify art-input
-                if index_type == "single" and ArtHeader(test_name).get(ArtHeader.ART_INPUT) is None:
+                # SKIP if single and does NOT specify art-input or art-athena-mt
+                if index_type == "single" and not (has_art_input or has_art_athena_mt):
                     continue
 
                 result.append(fname)
diff --git a/Tools/ART/python/ART/art_build.py b/Tools/ART/python/ART/art_build.py
index 67f9aecdcb9c80408fc4b9438f0602a74c6b9faa..500b264600c3ac578dcba521eb1ed1425fb26c1f 100644
--- a/Tools/ART/python/ART/art_build.py
+++ b/Tools/ART/python/ART/art_build.py
@@ -11,8 +11,9 @@ import json
 import logging
 import multiprocessing
 import os
+import socket
 
-from art_misc import run_command, mkdir_p
+from art_misc import memory, mkdir, run_command, GByte
 from art_base import ArtBase
 from art_header import ArtHeader
 
@@ -49,7 +50,10 @@ class ArtBuild(ArtBase):
         self.project = project
         self.platform = platform
         self.nightly_tag = nightly_tag
-        self.max_jobs = multiprocessing.cpu_count() if max_jobs <= 0 else max_jobs
+        mem = memory(GByte)
+        max_cores = min(mem / 4, multiprocessing.cpu_count())
+        max_cores = max_cores if max_cores >= 4 else 1
+        self.max_jobs = max_cores if max_jobs <= 0 else max_jobs
         self.ci = ci
 
     def task_list(self, job_type, sequence_tag):
@@ -75,15 +79,13 @@ class ArtBuild(ArtBase):
         status['release_info']['nightly_tag'] = self.nightly_tag
         status['release_info']['project'] = self.project
         status['release_info']['platform'] = self.platform
+        status['release_info']['hostname'] = socket.gethostname()
 
         # Package information with all tests in each package
         for future in concurrent.futures.as_completed(future_set):
             (package, test_name, exit_code, out, err, start_time, end_time) = future.result()
             log.debug("Handling job for %s %s", package, test_name)
             status[package][test_name]['exit_code'] = exit_code
-            # Removed, seem to give empty lines
-            # status[package][test_name]['out'] = out
-            # status[package][test_name]['err'] = err
             status[package][test_name]['start_time'] = start_time.strftime('%Y-%m-%dT%H:%M:%S')
             status[package][test_name]['end_time'] = end_time.strftime('%Y-%m-%dT%H:%M:%S')
             status[package][test_name]['start_epoch'] = start_time.strftime('%s')
@@ -111,7 +113,7 @@ class ArtBuild(ArtBase):
 
             status[package][test_name]['result'] = result
 
-        mkdir_p(sequence_tag)
+        mkdir(sequence_tag)
         with open(os.path.join(sequence_tag, "status.json"), 'w') as outfile:
             json.dump(status, outfile, sort_keys=True, indent=4, ensure_ascii=False)
 
@@ -161,7 +163,7 @@ class ArtBuild(ArtBase):
         test_name = self.get_files(test_directory, job_type, "all", self.nightly_release, self.project, self.platform)[int(job_index)]
 
         work_directory = os.path.join(sequence_tag, package, os.path.splitext(test_name)[0])
-        mkdir_p(work_directory)
+        mkdir(work_directory)
         log.debug("Work dir %s", work_directory)
 
         # Tests are called with arguments: PACKAGE TEST_NAME SCRIPT_DIRECTORY TYPE
diff --git a/Tools/ART/python/ART/art_grid.py b/Tools/ART/python/ART/art_grid.py
index ea938ebe9e3bb5b3c8cd49cd7c99458417e6d356..750a7f2dc4ba41ba7f03f4ae8509025db1d84115 100644
--- a/Tools/ART/python/ART/art_grid.py
+++ b/Tools/ART/python/ART/art_grid.py
@@ -6,19 +6,21 @@ __author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
 
 import atexit
 import concurrent.futures
+import exceptions
 import glob
+import httplib
 import json
 import logging
 import multiprocessing
 import os
 import re
-import requests
+# requests not available on lxplus, import only when needed
+# import requests
 import shutil
 import sys
 import tarfile
 import tempfile
 import time
-import urllib2
 
 from datetime import datetime
 from datetime import timedelta
@@ -27,26 +29,26 @@ from art_base import ArtBase
 from art_configuration import ArtConfiguration
 from art_header import ArtHeader
 from art_rucio import ArtRucio
-from art_misc import mkdir_p, make_executable, run_command, run_command_parallel
+from art_misc import count_files, cp, ls, mkdir, make_executable, rm, run_command, run_command_parallel, touch
 
 MODULE = "art.grid"
 
 
-def copy_job(art_directory, indexed_package, dst):
+def copy_job(art_directory, indexed_package, dst, no_unpack, tmp, seq):
     """
     Copy job to be run by executor.
 
     Needs to be defined outside a class.
-    Names of arguments are important, see call to scheduler.
     """
     log = logging.getLogger(MODULE)
-    log.info("job started %s %s %s", art_directory, indexed_package, dst)
-    (exit_code, out, err, command, start_time, end_time) = run_command(' '.join((os.path.join(art_directory, './art.py'), "copy", "--dst=" + dst, indexed_package)))
-    log.info("job ended %s %s %s", art_directory, indexed_package, dst)
+    log.debug("job started %s %s %s %s %d", art_directory, indexed_package, dst, no_unpack, tmp, seq)
+    (exit_code, out, err, command, start_time, end_time) = run_command(' '.join((os.path.join(art_directory, './art.py'), "copy", "--dst=" + dst, "--no-unpack" if no_unpack else "", "--tmp=" + tmp, "--seq=" + str(seq), indexed_package)))
+    log.debug("job ended %s %s %s %s %d", art_directory, indexed_package, dst, no_unpack, tmp, seq)
 
-    print "Exit Code:", exit_code
-    print "Out: ", out
-    print "Err: ", err
+    print "Copy job run with Exit Code:", exit_code
+    print out
+    print err
+    sys.stdout.flush()
 
     return (indexed_package, exit_code, out, err, start_time, end_time)
 
@@ -55,13 +57,14 @@ class ArtGrid(ArtBase):
     """Class for grid submission."""
 
     CVMFS_DIRECTORY = '/cvmfs/atlas-nightlies.cern.ch/repo/sw'
-    EOS_MGM_URL = 'root://eosatlas.cern.ch/'
     EOS_OUTPUT_DIR = '/eos/atlas/atlascerngroupdisk/data-art/grid-output'
 
     ARTPROD = 'artprod'
     JOB_REPORT = 'jobReport.json'
     JOB_REPORT_ART_KEY = 'art'
-    RESULT_WAIT_INTERVAL = 5 * 60
+    INITIAL_RESULT_WAIT_INTERVAL = 30 * 60  # seconds, 30 mins
+    RESULT_WAIT_INTERVAL = 5 * 60  # seconds, 5 mins
+    KINIT_WAIT = 12  # 12 * RESULT_WAIT_INTERVAL, 1 hour
 
     def __init__(self, art_directory, nightly_release, project, platform, nightly_tag, script_directory=None, skip_setup=False, submit_directory=None, max_jobs=0):
         """Keep arguments."""
@@ -121,7 +124,7 @@ class ArtGrid(ArtBase):
         """Copy all art files to the the run directory. Returns final script directory to be used."""
         log = logging.getLogger(MODULE)
         ART = os.path.join(run_dir, "ART")
-        mkdir_p(ART)
+        mkdir(ART)
 
         # get the path of the python classes and support scripts
         art_python_directory = os.path.join(self.art_directory, art_python, 'ART')
@@ -164,12 +167,14 @@ class ArtGrid(ArtBase):
         match = re.search(r"jediTaskID=(\d+)", text)
         return match.group(1) if match else -1
 
-    def copy(self, indexed_package, dst=None, user=None):
+    def copy(self, indexed_package, dst=None, user=None, no_unpack=False, tmp=None, seq=0, keep_tmp=False):
         """Copy output from scratch area to eos area."""
         log = logging.getLogger(MODULE)
+        tmp = tempfile.mkdtemp(prefix=indexed_package + '-') if tmp is None else tmp
+        mkdir(tmp)
 
         if indexed_package is not None:
-            return self.copy_package(indexed_package, dst, user)
+            return self.copy_package(indexed_package, dst, user, no_unpack, tmp, seq, keep_tmp)
 
         # make sure script directory exist
         self.exit_if_no_script_directory()
@@ -184,10 +189,10 @@ class ArtGrid(ArtBase):
         for indexed_package, root in test_directories.items():
             number_of_tests = len(self.get_files(root, "grid", "all", self.nightly_release, self.project, self.platform))
             if number_of_tests > 0:
-                result |= self.copy_package(indexed_package, dst, user)
+                result |= self.copy_package(indexed_package, dst, user, no_unpack, tmp, seq, keep_tmp)
         return result
 
-    def copy_package(self, indexed_package, dst, user):
+    def copy_package(self, indexed_package, dst, user, no_unpack, tmp, seq, keep_tmp):
         """Copy package to dst."""
         log = logging.getLogger(MODULE)
         real_user = os.getenv('USER', ArtGrid.ARTPROD)
@@ -195,49 +200,68 @@ class ArtGrid(ArtBase):
         default_dst = ArtGrid.EOS_OUTPUT_DIR if real_user == ArtGrid.ARTPROD else '.'
         dst = default_dst if dst is None else dst
 
-        # for debugging
-        cleanup = True
-
         result = 0
 
+        log.debug("Indexed Package %s", indexed_package)
+
         package = indexed_package.split('.')[0]
-        dst_dir = os.path.join(dst, self.nightly_release, self.project, self.platform, self.nightly_tag, package)
+        nightly_tag = self.nightly_tag if seq == 0 else '-'.join((self.nightly_tag, str(seq)))
+        dst_dir = os.path.join(dst, self.nightly_release, self.project, self.platform, nightly_tag, package)
         log.info("dst_dir %s", dst_dir)
 
-        tmp_dir = tempfile.mkdtemp()
-        if cleanup:
-            atexit.register(shutil.rmtree, tmp_dir, ignore_errors=True)
+        table = self.rucio.get_table(user, indexed_package, tmp=tmp)
+        if not table:
+            log.warning("Nothing to be copied")
+            return result
+
+        for entry in table:
+            grid_index = entry['grid_index']
+            log.debug("Grid Index %d", grid_index)
 
-        for entry in self.rucio.get_table(user, indexed_package):
-            index = entry['grid_index']
-            log.debug("Index %d", index)
             # get the test name
             test_name = entry['job_name']
             if test_name is None:
-                log.error("JSON Lookup Error for test %d", index)
-                result = 1
+                log.warning("JSON TestName not found for test with grid_index %d", grid_index)
                 continue
             log.debug("Test_name %s", test_name)
 
-            # create tmp test directory
-            test_dir = os.path.join(tmp_dir, test_name)
-            mkdir_p(test_dir)
+            json_file = os.path.join(tmp, entry['outfile'] + "_EXT0", self.__get_rucio_name(user, entry, 'json'))
+            json_dst = dst_dir.replace('/', '.')
+            json_copying = json_file + ".copying_to" + json_dst
+            json_copied = json_file + ".copied_to" + json_dst
+
+            if os.path.isfile(json_copied):
+                log.debug("Already copied: %d %s", grid_index, test_name)
+            elif os.path.isfile(json_copying):
+                log.debug("Still copying:  %d %s", grid_index, test_name)
+            else:
+                touch(json_copying)
 
-            # copy art-job.json
-            result |= self.copy_json(os.path.join(tempfile.gettempdir(), entry['outfile'] + "_EXT0", self.__get_rucio_name(user, entry, 'json')), test_dir)
+                # create test directory
+                test_dir = os.path.join(tmp, test_name)
+                suffix = '-' + str(entry['grid_index'] - 1) if entry['single_index'] > 0 and entry['grid_index'] > 1 else ''
+                test_dir += suffix
+                mkdir(test_dir)
 
-            # copy and unpack log
-            result |= self.copy_log(user, package, test_name, test_dir)
+                # copy art-job.json
+                result |= self.copy_json(json_file, test_dir)
 
-            # copy results and unpack
-            result |= self.copy_results(user, package, test_name, test_dir)
+                # copy and unpack log
+                result |= self.copy_log(user, package, test_name, grid_index, test_dir, no_unpack, tmp)
 
-            # copy to eos
-            result |= self.copy_to_eos(index, test_name, test_dir, dst_dir)
+                # copy results and unpack
+                result |= self.copy_results(user, package, test_name, grid_index, test_dir, no_unpack, tmp)
 
-            # cleanup
-            if cleanup:
-                shutil.rmtree(test_dir)
+                # copy to eos
+                result |= self.copy_to_dst(test_name + suffix, test_dir, dst_dir)
+
+                if result == 0:
+                    rm(json_copying)
+                    touch(json_copied)
+
+                # cleanup
+                if not keep_tmp:
+                    shutil.rmtree(test_dir)
 
         return result
 
@@ -248,84 +272,81 @@ class ArtGrid(ArtBase):
         shutil.copyfile(json_file, os.path.join(test_dir, ArtRucio.ART_JOB))
         return 0
 
-    def copy_log(self, user, package, test_name, test_dir):
+    def copy_log(self, user, package, test_name, grid_index, test_dir, no_unpack, tmp):
         """Copy and unpack log file."""
         log = logging.getLogger(MODULE)
         log.info("Copying LOG: %s %s", package, test_name)
 
-        tar = self.__open_tar(user, package, test_name, tar=False)
-        if tar is not None:
-            log.info("Unpacking LOG: %s", test_dir)
-            logdir = None
-            for member in tar.getmembers():
-                # does not work: tar.extractall()
-                tar.extract(member, path=test_dir)
-                logdir = member.name.split('/', 2)[0]
-
-            tar.close()
-
-            # rename top level log dir to logs
-            if logdir is not None:
-                os.chdir(test_dir)
-                os.rename(logdir, "tarball_logs")
+        if no_unpack:
+            tmp_tar = self.__get_tar(user, package, test_name, grid_index=grid_index, tmp=tmp, tar=False)
+            cp(tmp_tar, test_dir)
+            os.remove(tmp_tar)
+        else:
+            tmp_tar = self.__get_tar(user, package, test_name, grid_index=grid_index, tmp=tmp, tar=False)
+            if tmp_tar is not None:
+                tar = tarfile.open(tmp_tar)
+                log.info("Unpacking LOG: %s", test_dir)
+                logdir = None
+                for member in tar.getmembers():
+                    # does not work: tar.extractall()
+                    tar.extract(member, path=test_dir)
+                    logdir = member.name.split('/', 2)[0]
+
+                tar.close()
+
+                # rename top level log dir to logs
+                if logdir is not None:
+                    os.chdir(test_dir)
+                    os.rename(logdir, "tarball_logs")
+
+                os.remove(tmp_tar)
         return 0
 
-    def copy_results(self, user, package, test_name, test_dir):
+    def copy_results(self, user, package, test_name, grid_index, test_dir, no_unpack, tmp):
         """Copy results and unpack."""
         log = logging.getLogger(MODULE)
         log.info("Copying TAR: %s %s", package, test_name)
 
-        tar = self.__open_tar(user, package, test_name)
-        if tar is not None:
-            log.info("Unpacking TAR: %s", test_dir)
-            tar.extractall(path=test_dir)
-            tar.close()
+        if no_unpack:
+            tmp_tar = self.__get_tar(user, package, test_name, grid_index=grid_index, tmp=tmp)
+            cp(tmp_tar, test_dir)
+            os.remove(tmp_tar)
+        else:
+            tmp_tar = self.__get_tar(user, package, test_name, grid_index=grid_index, tmp=tmp)
+            if tmp_tar is not None:
+                tar = tarfile.open(tmp_tar)
+                log.info("Unpacking TAR: %s", test_dir)
+                tar.extractall(path=test_dir)
+                tar.close()
+                os.remove(tmp_tar)
+
         return 0
 
-    def copy_to_eos(self, index, test_name, test_dir, dst_dir):
-        """Copy to eos."""
+    def copy_to_dst(self, test_name, test_dir, dst_dir):
+        """Copy to dst."""
         log = logging.getLogger(MODULE)
+
+        # extra check if dst is already made
         dst_target = os.path.join(dst_dir, test_name)
-        if dst_target.startswith('/eos'):
-            # mkdir_cmd = 'eos ' + ArtGrid.EOS_MGM_URL + ' mkdir -p'
-            mkdir_cmd = None
-            xrdcp_target = ArtGrid.EOS_MGM_URL + dst_target + '/'
+
+        # create the directory
+        if mkdir(dst_target) != 0:
+            return 1
+
+        exit_code = cp(test_dir, dst_target)
+
+        # check number of source files
+        nSrc = count_files(test_dir)
+        nDst = count_files(dst_target)
+
+        if nDst == nSrc:
+            log.info("Number of files in Src (%d) and Dst (%d) are equal for %s", nSrc, nDst, test_name)
         else:
-            mkdir_cmd = 'mkdir -p'
-            xrdcp_target = dst_target
-        log.info("Copying to DST: %d %s", index, xrdcp_target)
-
-        if mkdir_cmd is not None:
-            (exit_code, out, err, command, start_time, end_time) = run_command(' '.join((mkdir_cmd, dst_target)))
-            if exit_code != 0:
-                log.error("Mkdir Error: %d %s %s", exit_code, out, err)
-                return 1
-
-        cmd = ' '.join(('xrdcp -N -r -p -v', test_dir, xrdcp_target))
-        max_trials = 6
-        wait_time = 4 * 60  # seconds
-        trial = 1
-        while True:
-            log.info("Trial %d, using: %s", trial, cmd)
-            (exit_code, out, err, command, start_time, end_time) = run_command(cmd)
-            if exit_code in [0, 50, 51, 54]:
-                # 0 all is ok
-                # 50 File exists
-                # 51 File exists
-                # 54 is already copied
-                return 0
-
-            # 3010 connection problem
-            if exit_code != 3010 or trial >= max_trials:
-                log.error("XRDCP to EOS Error: %d %s %s", exit_code, out, err)
-                return 1
-
-            log.error("Possibly recoverable EOS Error: %d %s %s", exit_code, out, err)
-            log.info("Waiting for %d seconds", wait_time)
-            time.sleep(wait_time)
-            trial += 1
-
-    def task_package(self, root, package, job_type, sequence_tag, no_action, config_file):
+            log.warning("Number of files in Src (%d) and Dst (%d) differ for %s", nSrc, nDst, test_name)
+
+        return exit_code
+
+    def task_package(self, root, package, job_type, sequence_tag, inform_panda, no_action, config_file):
         """Submit a single package."""
         log = logging.getLogger(MODULE)
         result = {}
@@ -333,115 +354,165 @@ class ArtGrid(ArtBase):
         if number_of_tests > 0:
             print 'art-package:', package
             self.status('included')
-            log.info('root %s', root)
+            log.info('root %s with %d jobs', root, number_of_tests)
             log.info('Handling %s for %s project %s on %s', package, self.nightly_release, self.project, self.platform)
-            log.info("Number of tests: %d", number_of_tests)
 
             run_dir = os.path.join(self.submit_directory, package, 'run')
             script_directory = self.copy_art('../python', run_dir)
 
-            result = self.task(script_directory, package, job_type, sequence_tag, no_action, config_file)
+            result = self.task(script_directory, package, job_type, sequence_tag, inform_panda, no_action, config_file)
         return result
 
-    def task_list(self, job_type, sequence_tag, package=None, no_action=False, wait_and_copy=True, config_file=None):
+    def task_list(self, job_type, sequence_tag, inform_panda, package=None, no_action=False, wait_and_copy=True, config_file=None):
         """Submit a list of packages."""
         log = logging.getLogger(MODULE)
+        log.info("Inform Panda %s", inform_panda)
 
-        test_copy = False
+        # job will be submitted from tmp directory
+        self.submit_directory = tempfile.mkdtemp(dir='.')
 
-        if test_copy:
-            all_results = {}
-            all_results[0] = ('TrigAnalysisTest', "xxx", "yyy", 0)
+        # make sure tmp is removed afterwards
+        atexit.register(shutil.rmtree, self.submit_directory, ignore_errors=True)
 
-        else:
-            # job will be submitted from tmp directory
-            self.submit_directory = tempfile.mkdtemp(dir='.')
-
-            # make sure tmp is removed afterwards
-            atexit.register(shutil.rmtree, self.submit_directory, ignore_errors=True)
+        # make sure script directory exist
+        self.exit_if_no_script_directory()
 
-            # make sure script directory exist
-            self.exit_if_no_script_directory()
+        # get the test_*.sh from the test directory
+        test_directories = self.get_test_directories(self.get_script_directory())
+        if not test_directories:
+            log.warning('No tests found in directories ending in "test"')
 
-            # get the test_*.sh from the test directory
-            test_directories = self.get_test_directories(self.get_script_directory())
-            if not test_directories:
-                log.warning('No tests found in directories ending in "test"')
+        configuration = None if self.skip_setup else ArtConfiguration(config_file)
 
-            configuration = None if self.skip_setup else ArtConfiguration(config_file)
+        all_results = {}
 
-            all_results = {}
+        if package is None:
+            # submit tasks for all packages
+            for package, root in test_directories.items():
+                if configuration is not None and configuration.get(self.nightly_release, self.project, self.platform, package, 'exclude', False):
+                    log.warning("Package %s is excluded", package)
+                else:
+                    all_results.update(self.task_package(root, package, job_type, sequence_tag, inform_panda, no_action, config_file))
+        else:
+            # Submit single package
+            root = test_directories[package]
+            all_results.update(self.task_package(root, package, job_type, sequence_tag, inform_panda, no_action, config_file))
 
-            if package is None:
-                # submit tasks for all packages
-                for package, root in test_directories.items():
-                    if configuration is not None and configuration.get(self.nightly_release, self.project, self.platform, package, 'exclude', False):
-                        log.warning("Package %s is excluded", package)
-                    else:
-                        all_results.update(self.task_package(root, package, job_type, sequence_tag, no_action, config_file))
-            else:
-                # Submit single package
-                root = test_directories[package]
-                all_results.update(self.task_package(root, package, job_type, sequence_tag, no_action, config_file))
+        if no_action:
+            log.info("--no-action specified, so not waiting for results")
+            return 0
 
-            if no_action:
-                log.info("--no-action specified, so not waiting for results")
-                return 0
+        if len(all_results) == 0:
+            log.warning('No tests found, nothing to submit.')
+            return 0
 
-            if len(all_results) == 0:
-                log.warning('No tests found, nothing to submit.')
-                return 0
+        if not wait_and_copy:
+            log.debug("No copying")
+            return 0
 
         # wait for all results
-        if wait_and_copy:
-            configuration = ArtConfiguration(config_file)
-
-            log.info("Executor started with %d threads", self.max_jobs)
-            executor = concurrent.futures.ThreadPoolExecutor(max_workers=self.max_jobs)
-            future_set = []
-
-            while len(all_results) > 0:
-                log.debug("No of Results %d", len(all_results))
-                log.debug("Waiting...")
-                if not test_copy:
-                    time.sleep(ArtGrid.RESULT_WAIT_INTERVAL)
-                log.debug("Done Waiting")
-
-                # force a copy of all_results since we are modifying all_results
-                for jedi_id in list(all_results):
-                    package = all_results[jedi_id][0]
-                    # skip packages without copy
-                    if not configuration.get(self.nightly_release, self.project, self.platform, package, "copy"):
-                        log.info("Copy not configured for %s - skipped", package)
+        configuration = ArtConfiguration(config_file)
+
+        executor = None
+        future_set = []
+        seq = None
+
+        kinit_interval = ArtGrid.KINIT_WAIT  # ArtGrid.KINIT_WAIT * ArtGrid.RESULT_WAIT_INTERVAL
+        result_wait_interval = ArtGrid.INITIAL_RESULT_WAIT_INTERVAL
+        final_states = ["done", "finished", "failed", "aborted", "broken"]
+        tmp = tempfile.mkdtemp(prefix=sequence_tag + '-')
+        while len(all_results) > 0:
+            log.debug("No of Results %d", len(all_results))
+            log.debug("Waiting...")
+            time.sleep(result_wait_interval)
+            log.debug("Done Waiting")
+            result_wait_interval = ArtGrid.RESULT_WAIT_INTERVAL
+            kinit_interval -= 1
+            if kinit_interval <= 0:
+                os.system("kinit -R")
+                kinit_interval = ArtGrid.KINIT_WAIT
+
+            # force a copy of all_results since we are modifying all_results
+            for jedi_id in list(all_results):
+                package = all_results[jedi_id][0]
+                # skip packages without copy
+                if not configuration.get(self.nightly_release, self.project, self.platform, package, "copy"):
+                    log.info("Copy not configured for %s - skipped", package)
+                    del all_results[jedi_id]
+                    continue
+
+                # figure out the destination for the copy based on if the directory already exists, keep seq
+                if seq is None:
+                    dst = configuration.get(self.nightly_release, self.project, self.platform, package, "dst", ArtGrid.EOS_OUTPUT_DIR)
+                    dst_dir = os.path.join(dst, self.nightly_release, self.project, self.platform, self.nightly_tag)
+                    final_target = dst_dir
+                    max_seq = 10
+                    seq = 0
+                    while ls(final_target) == 0 and seq < max_seq:
+                        seq += 1
+                        final_target = '-'.join((dst_dir, str(seq)))
+
+                    if seq >= max_seq:
+                        log.warning("Too many retries (>%d) to copy, removing job %d", max_seq, jedi_id)
                         del all_results[jedi_id]
                         continue
 
-                    log.debug("Checking package %s for %s", package, str(jedi_id))
-                    status = self.task_status(jedi_id)
-                    if status is not None:
+                    # create the directory
+                    if mkdir(final_target) != 0:
+                        log.warning("Could not create output dir %s, retrying later", final_target)
+                        continue
+
+                log.debug("Checking package %s for %s", package, str(jedi_id))
+                status = self.task_status(jedi_id)
+                if status is not None:
+
+                    # job_name = all_results[jedi_id][1]
+                    # outfile = all_results[jedi_id][2]
+                    index = all_results[jedi_id][3]
+
+                    # skip single jobs if status is not final
+                    if (index > 0) and (status not in final_states):
+                        continue
+
+                    # create executor if not already done
+                    if executor is None:
+                        log.info("Executor started with %d threads", self.max_jobs)
+                        executor = concurrent.futures.ThreadPoolExecutor(max_workers=self.max_jobs)
+
+                    no_unpack = configuration.get(self.nightly_release, self.project, self.platform, package, "no_unpack", False)
+                    indexed_package = package + ('.' + str(index) if index > 0 else '')
+                    log.debug("Copy whatever ready from %s to %s using seq %d", indexed_package, dst, seq)
+                    future_set.append(executor.submit(copy_job, self.art_directory, indexed_package, dst, no_unpack, tmp, seq))
+
+                    # job in final state
+                    if status in final_states:
+                        # remove job from waiting queue
                         log.info("JediID %s finished with status %s", str(jedi_id), status)
-                        if status in ['finished', 'done']:
-                            # job_name = all_results[jedi_id][1]
-                            # outfile = all_results[jedi_id][2]
-                            index = all_results[jedi_id][3]
-                            dst = configuration.get(self.nightly_release, self.project, self.platform, package, "dst", ArtGrid.EOS_OUTPUT_DIR)
-                            indexed_package = package + ('.' + str(index) if index > 0 else '')
-                            log.info("Copy %s to %s", indexed_package, dst)
-                            future_set.append(executor.submit(copy_job, self.art_directory, indexed_package, dst))
                         del all_results[jedi_id]
+                        log.info("Still waiting for results of %d jobs %s", len(all_results), all_results.keys())
 
-            # wait for all copy jobs to finish
-            log.info("Waiting for copy jobs to finish...")
-            for future in concurrent.futures.as_completed(future_set):
-                (indexed_package, exit_code, out, err, start_time, end_time) = future.result()
-                if exit_code == 0:
-                    log.info("Copied %s exit_code: %d", indexed_package, exit_code)
-                    log.info("  starting %s until %s", start_time.strftime('%Y-%m-%dT%H:%M:%S'), end_time.strftime('%Y-%m-%dT%H:%M:%S'))
-                else:
-                    log.error("Failed to copy: %s exit_code: %d", indexed_package, exit_code)
-                    print err
-                    print out
+                    log.debug("Still waiting for results of %d jobs %s", len(all_results), all_results.keys())
+
+        if len(future_set) <= 0:
+            log.info("No need to wait for any copy jobs")
+            return 0
+
+        # wait for all copy jobs to finish
+        number_of_copy_jobs = len(future_set)
+        log.info("Waiting for %d copy jobs to finish...", number_of_copy_jobs)
+        for future in concurrent.futures.as_completed(future_set):
+            (indexed_package, exit_code, out, err, start_time, end_time) = future.result()
+            if exit_code == 0:
+                log.debug("Copied %s exit_code: %d", indexed_package, exit_code)
+                log.debug("  starting %s until %s", start_time.strftime('%Y-%m-%dT%H:%M:%S'), end_time.strftime('%Y-%m-%dT%H:%M:%S'))
+            else:
+                log.error("Failed to copy: %s exit_code: %d", indexed_package, exit_code)
+                print err
+                print out
+            number_of_copy_jobs -= 1
+            log.info("Still waiting for %d copy jobs to finish...", number_of_copy_jobs)
 
+        log.info("All copy jobs finished.")
         return 0
 
     def task_status(self, jedi_id):
@@ -450,6 +521,10 @@ class ArtGrid(ArtBase):
 
         Return final status of a task, or None if not finished
         """
+        import requests
+        import urllib3
+        urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+
         log = logging.getLogger(MODULE)
 
         # fake return for simulation
@@ -457,29 +532,33 @@ class ArtGrid(ArtBase):
             return "done"
 
         try:
-            url = 'https://bigpanda.cern.ch/task/' + str(jedi_id) + '?json=true'
-            r = urllib2.urlopen(url)
-            s = json.load(r)
-            if (s is not None) and ('task' in s):
-                task = s['task']
-                if (task is not None) and ('status' in task):
-                    status = task['status']
-                    if status in ["done", "finished", "failed", "aborted", "broken"]:
-                        log.info("Task: %s %s", str(jedi_id), str(status))
+            payload = {'json': 'true'}
+            url = 'https://bigpanda.cern.ch/task/' + str(jedi_id)
+            r = requests.get(url, params=payload, verify=False)
+            if r.status_code == requests.codes.ok:
+                s = r.json()
+                if (s is not None) and ('task' in s):
+                    task = s['task']
+                    if (task is not None) and ('status' in task):
+                        status = task['status']
+                        # if status in ["done", "finished", "failed", "aborted", "broken"]:
+                        log.debug("Task: %s %s", str(jedi_id), str(status))
                         return status
-        except urllib2.HTTPError, e:
-            log.error('%s for %s status: %s', str(e.code), str(jedi_id), url)
+        except requests.exceptions.RequestException, e:
+            log.error('%s for %s status: %s', e, str(jedi_id), url)
+        except httplib.IncompleteRead, e:
+            log.error('%s for %s status: %s', e, str(jedi_id), url)
         return None
 
-    def task_job(self, grid_options, sub_cmd, script_directory, sequence_tag, package, outfile, job_type='', number_of_tests=0, split=0, job_name='', inds='', n_files=0, in_file=False, ncores=1, no_action=False):
+    def task_job(self, grid_options, sub_cmd, script_directory, sequence_tag, package, outfile, inform_panda, job_type='', number_of_tests=0, split=0, job_name='', inds=None, n_files=0, in_file=False, ncores=1, athena_mt=0, no_action=False):
         """
-        Submit a single job.
+        Submit a batch or single job.
 
         Returns jedi_id or 0 if submission failed.
 
-        # art-task-grid.sh [--no-action] batch <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <job_type> <number_of_tests>
+        # art-task-grid.sh [--no-action] batch <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_type> <number_of_tests>
         #
-        # art-task-grid.sh [--no-action] single [--inds <input_file> --n-files <number_of_files> --split <split> --in] <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <job_name>
+        # art-task-grid.sh [--no-action] single [--inds <input_file> --n-files <number_of_files> --split <split> --in] <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_name>
         """
         log = logging.getLogger(MODULE)
         cmd = ' '.join((os.path.join(self.art_directory, 'art-task-grid.sh'),
@@ -488,18 +567,20 @@ class ArtGrid(ArtBase):
 
         if sub_cmd == 'single':
             cmd = ' '.join((cmd,
-                            '--inds ' + inds if inds != '' else '',
-                            '--n-files ' + str(n_files) if n_files > 0 else '',
-                            '--split ' + str(split) if split > 0 else '',
-                            '--in' if in_file else '',
-                            '--ncore ' + str(ncores) if ncores > 1 else ''))
+                            '--inds ' + str(inds) if inds is not None else '',
+                            '--n-files ' + str(n_files) if inds is not None and n_files > 0 else '',
+                            '--split ' + str(split) if inds is not None and split > 0 else '',
+                            '--in' if inds is not None and str(in_file) else '',
+                            '--ncore ' + str(ncores) if athena_mt == 0 and ncores > 1 else '',
+                            '--athena_mt ' + str(athena_mt) if ncores == 1 and athena_mt > 0 else ''))
 
         cmd = ' '.join((cmd,
                         self.submit_directory,
                         script_directory,
                         sequence_tag,
                         package,
-                        outfile))
+                        outfile,
+                        str(inform_panda)))
 
         if sub_cmd == 'batch':
             cmd = ' '.join((cmd,
@@ -515,12 +596,12 @@ class ArtGrid(ArtBase):
         log.info("cmd: %s", cmd)
 
         # run task from Bash Script as is needed in ATLAS setup
-        log.info("Grid_options: %s", grid_options)
+        log.debug("Grid_options: %s", grid_options)
         env = os.environ.copy()
         env['PATH'] = '.:' + env['PATH']
         env['ART_GRID_OPTIONS'] = grid_options
 
-        log.info("ART_GRID_OPTIONS %s", env['ART_GRID_OPTIONS'])
+        log.debug("ART_GRID_OPTIONS %s", env['ART_GRID_OPTIONS'])
 
         jedi_id = -1
         # run the command, no_action is forwarded and used inside the script
@@ -547,12 +628,12 @@ class ArtGrid(ArtBase):
         log.info('grid_options: %s', grid_options)
         return grid_options
 
-    def task(self, script_directory, package, job_type, sequence_tag, no_action=False, config_file=None):
+    def task(self, script_directory, package, job_type, sequence_tag, inform_panda, no_action=False, config_file=None):
         """
         Submit a task, consisting of multiple jobs.
 
         For 'single' jobs each task contains exactly one job.
-        Returns a map of jedi_id to (package, test_name, out_file)
+        Returns a map of jedi_id to (package, test_name, out_file, seq)
         """
         log = logging.getLogger(MODULE)
         log.info('Running art task')
@@ -568,17 +649,17 @@ class ArtGrid(ArtBase):
 
         result = {}
 
-        # submit batch tests
+        # submit batch tests, index = 0
         if number_of_batch_tests > 0:
             self.exit_if_outfile_too_long(outfile)
 
             # Batch
             log.info("Batch")
-            jedi_id = self.task_job(grid_options, "batch", script_directory, sequence_tag, package, outfile, job_type=job_type, number_of_tests=number_of_batch_tests, no_action=no_action)
+            jedi_id = self.task_job(grid_options, "batch", script_directory, sequence_tag, package, outfile, inform_panda, job_type=job_type, number_of_tests=number_of_batch_tests, no_action=no_action)
             if jedi_id > 0:
-                result[jedi_id] = (package, "", outfile, 0)
+                result[jedi_id] = (package, "", outfile, 0, None)
 
-        # submit single tests
+        # submit single tests, index > 1
         index = 1
         for job_name in self.get_files(test_directory, job_type, "single", self.nightly_release, self.project, self.platform):
             job = os.path.join(test_directory, job_name)
@@ -587,26 +668,27 @@ class ArtGrid(ArtBase):
             n_files = header.get(ArtHeader.ART_INPUT_NFILES)
             split = header.get(ArtHeader.ART_INPUT_SPLIT)
             ncores = header.get(ArtHeader.ART_CORES)
+            athena_mt = header.get(ArtHeader.ART_ATHENA_MT)
 
             outfile_test = self.rucio.get_outfile_name(user, package, sequence_tag, str(index))
             self.exit_if_outfile_too_long(outfile_test)
 
             # Single
             log.info("Single")
-            jedi_id = self.task_job(grid_options, "single", script_directory, sequence_tag, package, outfile_test, split=split, job_name=job_name, inds=inds, n_files=n_files, in_file=True, ncores=ncores, no_action=no_action)
+            jedi_id = self.task_job(grid_options, "single", script_directory, sequence_tag, package, outfile_test, inform_panda, split=split, job_name=job_name, inds=inds, n_files=n_files, in_file=True, ncores=ncores, athena_mt=athena_mt, no_action=no_action)
 
             if jedi_id > 0:
-                result[jedi_id] = (package, job_name, outfile_test, index)
+                result[jedi_id] = (package, job_name, outfile_test, index, None)
 
             index += 1
 
         return result
 
-    def batch(self, sequence_tag, package, out, job_type, job_index):
+    def batch(self, sequence_tag, package, out, inform_panda, job_type, job_index):
         """Run a single job by job_index of a 'batch' submission."""
         log = logging.getLogger(MODULE)
         log.info('Running art grid batch')
-        log.info("%s %s %s %s %s %s %s %s", self.nightly_release, self.project, self.platform, self.nightly_tag, package, job_type, str(job_index), out)
+        log.info("%s %s %s %s %s %s %s %s %s", self.nightly_release, self.project, self.platform, self.nightly_tag, package, job_type, str(job_index), out, inform_panda)
 
         test_directories = self.get_test_directories(self.get_script_directory())
         test_directory = test_directories[package]
@@ -619,29 +701,30 @@ class ArtGrid(ArtBase):
 
         in_file = None
 
-        return self.job(test_directory, package, job_name, job_type, out, in_file)
+        return self.job(test_directory, package, job_name, job_type, out, inform_panda, in_file)
 
-    def single(self, sequence_tag, package, out, job_name, in_file):
+    def single(self, sequence_tag, package, out, inform_panda, job_name, in_file):
         """Run a single job by name of a 'single' submission."""
         log = logging.getLogger(MODULE)
 
         log.info('Running art grid single')
-        log.info("%s %s %s %s %s %s %s %s", self.nightly_release, self.project, self.platform, self.nightly_tag, package, job_name, out, in_file)
+        log.info("%s %s %s %s %s %s %s %s %s", self.nightly_release, self.project, self.platform, self.nightly_tag, package, job_name, out, inform_panda, in_file)
 
         test_directories = self.get_test_directories(self.get_script_directory())
         test_directory = test_directories[package]
 
         job_type = 'grid'
-        return self.job(test_directory, package, job_name, job_type, out, in_file)
+        return self.job(test_directory, package, job_name, job_type, out, inform_panda, in_file)
 
-    def job(self, test_directory, package, job_name, job_type, out, in_file):
+    def job(self, test_directory, package, job_name, job_type, out, inform_panda, in_file):
         """Run a job."""
         log = logging.getLogger(MODULE)
 
-        # informing panda, ignoring errors for now
-        panda_id = os.getenv('PandaID', '0')
-
         log.info("art-job-name: %s", job_name)
+        panda_id = os.getenv('PandaID', '0')
+        if inform_panda == 'True':
+            # informing panda, ignoring errors for now
+            self.inform_panda(panda_id, job_name, package)
 
         test_file = os.path.join(test_directory, job_name)
 
@@ -664,8 +747,9 @@ class ArtGrid(ArtBase):
             env['ArtInFile'] = in_file
 
         header = ArtHeader(test_file)
+        athena_mt = header.get(ArtHeader.ART_ATHENA_MT)
         ncores = header.get(ArtHeader.ART_CORES)
-        if ncores > 1:
+        if athena_mt == 0 and ncores > 1:
             nthreads = header.get(ArtHeader.ART_INPUT_NFILES)
             (exit_code, output, error, command, start_time, end_time) = run_command_parallel(command, nthreads, ncores, env=env)
         else:
@@ -708,7 +792,7 @@ class ArtGrid(ArtBase):
                 log.info("Updated %s", ArtGrid.JOB_REPORT)
 
         # pick up the outputs
-        tar_file = tarfile.open(out, mode='w')
+        files = set()
 
         # pick up explicitly named output files
         with open(test_file, "r") as f:
@@ -720,19 +804,63 @@ class ArtGrid(ArtBase):
                 for out_name in out_names:
                     out_name = out_name.strip('\'"')
                     if os.path.exists(out_name):
-                        log.info('Tar file contain: %s', out_name)
-                        tar_file.add(out_name)
+                        files.add(out_name)
 
         # pick up art-header named outputs
         for path_name in ArtHeader(test_file).get(ArtHeader.ART_OUTPUT):
             for out_name in glob.glob(path_name):
-                log.info('Tar file contains: %s', out_name)
-                tar_file.add(out_name)
+                files.add(out_name)
+
+        tar_file = tarfile.open(out, mode='w')
+        for file in files:
+            log.info('Tar file contains: %s', file)
+            tar_file.add(file)
 
         tar_file.close()
         # Always return 0
         return 0
 
+    def inform_panda(self, panda_id, job_name, package):
+        """Inform panda about the job we are running using panda ID."""
+        log = logging.getLogger(MODULE)
+        import requests
+
+        url = "http://bigpanda.cern.ch/art/registerarttest/?json"
+        n_attempts = 3
+        timeout = 10
+
+        payload = {}
+        payload['pandaid'] = panda_id
+        payload['testname'] = job_name
+        payload['nightly_release_short'] = self.nightly_release_short
+        payload['platform'] = self.platform
+        payload['project'] = self.project
+        payload['package'] = package
+        payload['nightly_tag'] = self.nightly_tag
+
+        headers = {'User-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36'}
+
+        for i in range(0, n_attempts):
+            reply = requests.post(url, data=payload, headers=headers, timeout=timeout, verify=False)
+            try:
+                reply = requests.post(url, data=payload, timeout=timeout, verify=False)
+                log.info('Informed panda about %s %s %s', panda_id, job_name, package)
+            except:
+                log.warning('Exception occured for %s %s %s', panda_id, job_name, package)
+                continue
+
+            if reply.status_code == 200:
+                try:
+                    reply = reply.json()
+                except:
+                    log.error('The panda inform response was corrupted for %s %s %s', panda_id, job_name, package)
+                    raise
+                if 'exit_code' in reply and reply['exit_code'] == 0:
+                    return True
+
+        log.error('Panda could not be informed about %s %s %s', panda_id, job_name, package)
+        return False
+
     def list(self, package, job_type, index_type, json_format, user):
         """List all jobs available."""
         user = ArtGrid.ARTPROD if user is None else user
@@ -788,11 +916,12 @@ class ArtGrid(ArtBase):
         # make sure script directory exist
         self.exit_if_no_script_directory()
 
-        tar = self.__open_tar(user, package, test_name, tar=False)
-        if tar is None:
+        tmp_tar = self.__get_tar(user, package, test_name, tar=False)
+        if tmp_tar is None:
             log.error("No log tar file found")
             return 1
 
+        tar = tarfile.open(tmp_tar)
         for name in tar.getnames():
             if ArtRucio.ATHENA_STDOUT in name:
                 f = tar.extractfile(name)
@@ -800,6 +929,7 @@ class ArtGrid(ArtBase):
                 print content
                 break
         tar.close()
+        os.remove(tmp_tar)
         return 0
 
     def output(self, package, test_name, user):
@@ -816,20 +946,23 @@ class ArtGrid(ArtBase):
             outfile = os.path.splitext(outfile)[0]
         job_name = os.path.splitext(test_name)[0]
         tar_dir = os.path.join(tempfile.gettempdir(), outfile, job_name)
-        mkdir_p(tar_dir)
+        mkdir(tar_dir)
 
-        tar = self.__open_tar(user, package, test_name)
-        if tar is None:
+        tmp_tar = self.__get_tar(user, package, test_name)
+        if tmp_tar is None:
             log.error("No output tar file found")
             return 1
 
+        tar = tarfile.open(tmp_tar)
         tar.extractall(path=tar_dir)
         tar.close()
+        os.remove(tmp_tar)
+
         print "Output extracted in", tar_dir
 
         return 0
 
-    def compare(self, package, test_name, days, user, entries=-1, shell=False):
+    def compare(self, package, test_name, days, user, files, entries=-1, mode='detailed', shell=False):
         """Compare current output against a job of certain days ago."""
         log = logging.getLogger(MODULE)
         user = ArtGrid.ARTPROD if user is None else user
@@ -842,45 +975,64 @@ class ArtGrid(ArtBase):
             return 1
 
         ref_dir = os.path.join('.', 'ref-' + previous_nightly_tag)
-        mkdir_p(ref_dir)
+        mkdir(ref_dir)
 
         log.info("Shell = %s", shell)
-        tar = self.__open_tar(user, package, test_name, nightly_tag=previous_nightly_tag, shell=shell)
-        if tar is None:
+        tmp_tar = self.__get_tar(user, package, test_name, nightly_tag=previous_nightly_tag, shell=shell)
+        if tmp_tar is None:
             log.error("No comparison tar file found")
             return 1
 
+        tar = tarfile.open(tmp_tar)
         for member in tar.getmembers():
             tar.extractall(path=ref_dir, members=[member])
         tar.close()
+        os.remove(tmp_tar)
 
-        return self.compare_ref('.', ref_dir, entries)
+        return self.compare_ref('.', ref_dir, files, entries, mode)
 
-    def __open_tar(self, user, package, test_name, tar=True, nightly_tag=None, shell=False):
+    def __get_tar(self, user, package, test_name, grid_index=-1, tmp=None, tar=True, nightly_tag=None, shell=False):
         """Open tar file for particular release."""
         log = logging.getLogger(MODULE)
-        log.info("Tar: %s", tar)
+        log.debug("Tar: %s", tar)
+        tmp = tempfile.gettempdir() if tmp is None else tmp
         nightly_tag = self.nightly_tag if nightly_tag is None else nightly_tag
         job_name = os.path.splitext(test_name)[0]
 
-        for entry in self.rucio.get_table(user, package, nightly_tag, shell):
-            if entry['job_name'] == job_name:
+        max_tries = 3
+        wait_time = 5  # mins
+
+        tries = max_tries
+        while tries > 0:
+            try:
+                for entry in self.rucio.get_table(user, package, nightly_tag, shell, tmp):
+                    if entry['job_name'] == job_name and (grid_index < 0 or entry['grid_index'] == grid_index):
+
+                        log.debug("index %d", entry['grid_index'])
+                        rucio_name = self.__get_rucio_name(user, entry, 'tar' if tar else 'log')
 
-                rucio_name = self.__get_rucio_name(user, entry, 'tar' if tar else 'log')
+                        log.debug("RUCIO: %s", rucio_name)
 
-                log.info("RUCIO: %s", rucio_name)
+                        tmp_dir = tempfile.mkdtemp()
+                        atexit.register(shutil.rmtree, tmp_dir, ignore_errors=True)
 
-                # tmp_dir = tempfile.gettempdir()
-                tmp_dir = tempfile.mkdtemp()
-                atexit.register(shutil.rmtree, tmp_dir, ignore_errors=True)
+                        log.debug("Shell = %s", shell)
+                        exit_code = self.rucio.download(rucio_name, tmp_dir, shell)
+                        if exit_code == 0:
+                            tmp_tar = os.path.join(tmp_dir, 'user.' + user, rucio_name)
+                            return tmp_tar
+
+            except exceptions.Exception, e:
+                log.warning('(Rucio) Exception: %s in %s', str(e.code), str(e))
+                log.info("Waiting %d mins", wait_time)
+                tries -= 1
+                time.sleep(wait_time * 60)
+                continue
 
-                log.info("Shell = %s", shell)
-                exit_code = self.rucio.download(rucio_name, tmp_dir, shell)
-                if exit_code == 0:
-                    tmp_tar = os.path.join(tmp_dir, 'user.' + user, rucio_name)
-                    return tarfile.open(tmp_tar)
+            log.error("No log or tar found for package %s or test %s", package, test_name)
+            return None
 
-        log.error("No log or tar found for package %s or test %s", package, test_name)
+        log.error("Too many (%d) (Rucio) Exceptions", max_tries)
         return None
 
     def __get_rucio_name(self, user, entry, file_type):
diff --git a/Tools/ART/python/ART/art_header.py b/Tools/ART/python/ART/art_header.py
index 737c377f956e7db8595f667ba5e1aebd9df1e739..f9e9f1aa0be77b5c983f4b3d07fef885cfa3955b 100644
--- a/Tools/ART/python/ART/art_header.py
+++ b/Tools/ART/python/ART/art_header.py
@@ -17,6 +17,8 @@ MODULE = "art.header"
 class ArtHeader(object):
     """Class to handle art-headers."""
 
+    # headers in alphabetical order
+    ART_ATHENA_MT = 'art-athena-mt'
     ART_CI = 'art-ci'
     ART_CORES = 'art-cores'
     ART_DESCRIPTION = 'art-description'
@@ -52,6 +54,7 @@ class ArtHeader(object):
         self.add(ArtHeader.ART_INPUT, StringType, None)
         self.add(ArtHeader.ART_INPUT_NFILES, IntType, 1)
         self.add(ArtHeader.ART_INPUT_SPLIT, IntType, 0)
+        self.add(ArtHeader.ART_ATHENA_MT, IntType, 0)
 
         self.read(filename)
 
@@ -75,12 +78,9 @@ class ArtHeader(object):
             if line_match:
                 try:
                     key = line_match.group(1)
-                    value = line_match.group(2)
-                    if key in self.header:
-                        if self.header[key]['type'] == StringType:
-                            value = value.strip()
-                        elif self.header[key]['type'] == IntType:
-                            value = int(value)
+                    value = line_match.group(2).strip()
+                    if key in self.header and self.header[key]['type'] == IntType:
+                        value = int(value)
 
                     if self.is_list(key):
                         # handle list types
diff --git a/Tools/ART/python/ART/art_misc.py b/Tools/ART/python/ART/art_misc.py
index 41e4707f5309c49bc23d2e67e8356057084a1e13..0e187b8a9baebf2e513fe74dbde305125d9e68e2 100644
--- a/Tools/ART/python/ART/art_misc.py
+++ b/Tools/ART/python/ART/art_misc.py
@@ -5,7 +5,6 @@
 __author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
 
 import concurrent.futures
-import errno
 import logging
 import os
 import shlex
@@ -15,6 +14,11 @@ import sys
 from datetime import datetime
 
 MODULE = "art.misc"
+EOS_MGM_URL = 'root://eosatlas.cern.ch/'
+
+KByte = 1024
+MByte = KByte * 1024
+GByte = MByte * 1024
 
 
 def set_log(kwargs):
@@ -166,15 +170,94 @@ def make_executable(path):
     os.chmod(path, mode)
 
 
-def mkdir_p(path):
+def mkdir(path):
     """Make (missing) directories."""
+    log = logging.getLogger(MODULE)
+    if path.startswith('/eos'):
+        mkdir_cmd = 'eos ' + EOS_MGM_URL + ' mkdir -p'
+    else:
+        mkdir_cmd = 'mkdir -p'
+
+    if mkdir_cmd is not None:
+        (exit_code, out, err, command, start_time, end_time) = run_command(' '.join((mkdir_cmd, path)))
+        if exit_code != 0:
+            log.error("Mkdir Error: %d %s %s", exit_code, out, err)
+            return exit_code
+
+    return 0
+
+
+def ls(path):
+    """List files in directroy."""
+    if path.startswith('/eos'):
+        ls_cmd = 'eos ' + EOS_MGM_URL + ' ls ' + path + '/'
+    else:
+        ls_cmd = 'ls ' + path + '/'
+
+    (exit_code, out, err, command, start_time, end_time) = run_command(ls_cmd)
+    if exit_code == 0:
+        print out
+        print err
+
+    return exit_code
+
+
+def cp(src, dst):
+    """Copy files to directory."""
+    log = logging.getLogger(MODULE)
+    if dst.startswith('/eos'):
+        # check which xrdcp we are running
+        (exit_code, out, err, command, start_time, end_time) = run_command('which xrdcp')
+        print out
+        print err
+
+        # check which version of xrdcp we are running
+        (exit_code, out, err, command, start_time, end_time) = run_command('xrdcp --version')
+        print out
+        print err
+
+        cmd = ' '.join(('xrdcp -f -N -r -p -v', src, EOS_MGM_URL + dst + '/'))
+    else:
+        cmd = ' '.join(('xrdcp -f -N -r -p -v', src, dst + '/'))
+
+    # run the actual command
+    log.info("Using: %s", cmd)
+    (exit_code, exit_out, exit_err, command, start_time, end_time) = run_command(cmd)
+    if exit_code != 0:
+        log.error("COPY to DST Error: %d %s %s", exit_code, exit_out, exit_err)
+
+    return exit_code
+
+
+def count_files(path):
+    """Count number of files."""
+    log = logging.getLogger(MODULE)
+    if path.startswith('/eos'):
+        cmd = ' '.join(('eos', EOS_MGM_URL, 'find', path, '|', 'wc', '-l'))
+    else:
+        cmd = ' '.join(('find', path, '|', 'wc', '-l'))
+
+    (exit_code, out, err, command, start_time, end_time) = run_command(cmd)
+    if exit_code == 0:
+        nFiles = int(out)
+        return nFiles
+
+    log.error("Error retrieving number of files on %s, %s", path, err)
+    return -1
+
+
+def touch(fname, times=None):
+    """Touch a file."""
+    with open(fname, 'a'):
+        os.utime(fname, times)
+
+
+def rm(fname):
+    """Remove a file."""
     try:
-        os.makedirs(path)
-    except OSError as exc:  # Python >2.5
-        if exc.errno == errno.EEXIST and os.path.isdir(path):
-            pass
-        else:
-            raise
+        os.remove(fname)
+    except OSError:
+        pass
 
 
 def which(program):
@@ -191,3 +274,8 @@ def which(program):
                 return exe_file
 
     return None
+
+
+def memory(scale=1):
+    """Return free memory."""
+    return os.sysconf('SC_PHYS_PAGES') * os.sysconf('SC_PAGE_SIZE') / scale
diff --git a/Tools/ART/python/ART/art_rucio.py b/Tools/ART/python/ART/art_rucio.py
index 037dae10d3b99f54d0bf22fdf50b932e04eafda8..f099d80921bf0068aa80067ebbd265708a381e74 100755
--- a/Tools/ART/python/ART/art_rucio.py
+++ b/Tools/ART/python/ART/art_rucio.py
@@ -55,7 +55,8 @@ class ArtRucio(object):
         self.exit_if_no_rucio()
 
         # rucio downloads cache properly
-        log.info("Shell = %s", shell)
+        log.debug("DID = %s", did)
+        log.debug("Shell = %s", shell)
         env = os.environ.copy()
         if shell:
             cmd = ' '.join((os.path.join(self.art_directory, 'art-download.sh'), did, dst_dir))
@@ -152,6 +153,7 @@ class ArtRucio(object):
         pattern = self.get_outfile_name(user, package, '*', None, nightly_tag)
         outfile = None
         sequence = None
+        log.debug("Pattern 1 %s", pattern)
         for out in rucio_client.list_dids(self.get_scope(user), {'name': '.'.join((pattern, 'log'))}):
             sequence_tag = self.get_sequence_tag(out)
             if sequence is None or sequence_tag > sequence:
@@ -159,14 +161,13 @@ class ArtRucio(object):
                 sequence = sequence_tag
 
         if outfile is not None:
-            log.debug("Adding 'batch': %s", outfile)
             result.append(outfile)
 
         # look for "single" outfile, deduce sequence_tag
         pattern = self.get_outfile_name(user, package, '*', '*', nightly_tag)
-        log.debug("Trying pattern %s", pattern)
         outfile = None
         sequence = None
+        log.debug("Pattern 2 %s", pattern)
         for out in rucio_client.list_dids(self.get_scope(user), {'name': '.'.join((pattern, 'log'))}):
             sequence_tag = self.get_sequence_tag(out)
             if sequence is None or sequence_tag > sequence:
@@ -179,6 +180,7 @@ class ArtRucio(object):
             if sequence_tag is not None:
                 # found sequence_tag, find all 'single' outfiles
                 pattern = self.get_outfile_name(user, package, sequence_tag, '*', nightly_tag)
+                log.debug("Pattern 3 %s", pattern)
                 for out in rucio_client.list_dids(self.get_scope(user), {'name': '.'.join((pattern, 'log'))}):
                     outfile = os.path.splitext(out)[0]
                     log.debug("Adding 'single': %s", outfile)
@@ -186,7 +188,7 @@ class ArtRucio(object):
 
         return result
 
-    def get_table(self, user, package, nightly_tag=None, shell=False):
+    def get_table(self, user, package, nightly_tag=None, shell=False, tmp=None):
         """Get full table with grid_index, single_index and test_name for particular package and nightly_tag."""
         log = logging.getLogger(MODULE)
 
@@ -195,6 +197,8 @@ class ArtRucio(object):
 
         self.exit_if_no_rucio()
 
+        tmp = tempfile.gettempdir() if tmp is None else tmp
+
         table = []
 
         nightly_tag = self.nightly_tag if nightly_tag is None else nightly_tag
@@ -202,61 +206,63 @@ class ArtRucio(object):
         outfiles = self.get_outfiles(user, package, nightly_tag)
 
         outfiles_str = [x + ArtRucio.JSON for x in outfiles]
-        outfiles_str = ' '.join(outfiles_str)
-
-        tmp_dir = tempfile.gettempdir()
-        dst_dir = tmp_dir
-
-        log.info("Shell = %s", shell)
-        exit_code = self.download(outfiles_str, dst_dir, shell)
-        if exit_code != 0:
-            log.error("Failed to execute rucio download %d", exit_code)
-            return table
-
-        for outfile in outfiles:
-            single_index = self.get_single_index(outfile)
-
-            json_directory = os.path.join(dst_dir, outfile + ArtRucio.JSON)
-            if not os.path.isdir(json_directory):
-                # print single_index, rucio_name
-                table.append({
-                    'single_index': single_index,
-                    'grid_index': -1,
-                    'file_index': -1,
-                    'job_index': -1,
-                    'outfile': outfile,
-                    'job_name': None
-                })
-                continue
-
-            for json_file in os.listdir(json_directory):
-                json_path = os.path.join(json_directory, json_file)
-                if os.path.isfile(json_path):
-                    with open(json_path) as json_fd:
-                        info = json.load(json_fd)
-                        job_name = os.path.splitext(info['name'])[0]
-
-                        # Match: user.artprod.13199077.EXT0._000002.art-job.json
-                        # Match: user.artprod.13199077.EXT0._000003.art-job.json.4
-                        # job_index = 13199077, grid_index = 3, file_index = 4
-                        match = re.search(r"user\.([^\.]+)\.(\d+)\.EXT0\._(\d+)\.art-job.json(?:\.(\d+))?", json_file)
-                        if match:
-                            job_index = int(match.group(2))
-                            grid_index = int(match.group(3))
-                            file_index = -1 if match.group(4) is None else int(match.group(4))
-                        else:
-                            job_index = -1
-                            grid_index = -1
-                            file_index = -1
-
-                        table.append({
-                            'single_index': single_index,
-                            'grid_index': grid_index,
-                            'file_index': file_index,
-                            'job_index': job_index,
-                            'outfile': outfile,
-                            'job_name': job_name
-                        })
+        if outfiles_str:
+            outfiles_str = ' '.join(outfiles_str)
+
+            dst_dir = tmp
+
+            log.debug("Shell = %s", shell)
+            exit_code = self.download(outfiles_str, dst_dir, shell)
+            if exit_code != 0:
+                log.error("Failed to execute rucio download %d", exit_code)
+                return table
+
+            for outfile in outfiles:
+                single_index = self.get_single_index(outfile)
+
+                json_directory = os.path.join(dst_dir, outfile + ArtRucio.JSON)
+                if not os.path.isdir(json_directory):
+                    log.debug("Adding, single_index: %d, outfile: %s", single_index, outfile)
+                    table.append({
+                        'single_index': single_index,
+                        'grid_index': -1,
+                        'file_index': -1,
+                        'job_index': -1,
+                        'outfile': outfile,
+                        'job_name': None
+                    })
+                    continue
+
+                for json_file in os.listdir(json_directory):
+                    if json_file.endswith(".json"):
+                        json_path = os.path.join(json_directory, json_file)
+                        if os.path.isfile(json_path):
+                            with open(json_path) as json_fd:
+                                info = json.load(json_fd)
+                                job_name = os.path.splitext(info['name'])[0]
+
+                                # Match: user.artprod.13199077.EXT0._000002.art-job.json
+                                # Match: user.artprod.13199077.EXT0._000003.art-job.json.4
+                                # job_index = 13199077, grid_index = 3, file_index = 4
+                                match = re.search(r"user\.([^\.]+)\.(\d+)\.EXT0\._(\d+)\.art-job.json(?:\.(\d+))?", json_file)
+                                if match:
+                                    job_index = int(match.group(2))
+                                    grid_index = int(match.group(3))
+                                    file_index = -1 if match.group(4) is None else int(match.group(4))
+                                else:
+                                    job_index = -1
+                                    grid_index = -1
+                                    file_index = -1
+
+                                log.debug("Adding, single_index: %d, grid_index: %d, file_index: %d, job_index %d, outfile: %s, job_name: %s", single_index, grid_index, file_index, job_index, outfile, job_name)
+                                table.append({
+                                    'single_index': single_index,
+                                    'grid_index': grid_index,
+                                    'file_index': file_index,
+                                    'job_index': job_index,
+                                    'outfile': outfile,
+                                    'job_name': job_name
+                                })
 
         self.table = table
         return table
diff --git a/Tools/ART/python/ART/docopt.py b/Tools/ART/python/ART/docopt.py
index 7c6a52df58a5561b0e491fda4499c548d262e36e..4cb99026f86d9fdade9d677621d7b6e53826f479 100644
--- a/Tools/ART/python/ART/docopt.py
+++ b/Tools/ART/python/ART/docopt.py
@@ -1,4 +1,5 @@
-"""Pythonic command-line interface parser that will make you smile.
+"""
+Pythonic command-line interface parser that will make you smile.
 
  * http://docopt.org
  * Repository and issue-tracker: https://github.com/docopt/docopt
@@ -10,17 +11,15 @@ import sys
 import re
 
 
-__all__ = ['docopt']
+__all__ = ('docopt')
 __version__ = '0.6.2'
 
 
 class DocoptLanguageError(Exception):
-
     """Error in construction of usage-message by developer."""
 
 
 class DocoptExit(SystemExit):
-
     """Exit in case user invoked program with incorrect arguments."""
 
     usage = ''
@@ -97,7 +96,6 @@ def transform(pattern):
 
 
 class LeafPattern(Pattern):
-
     """Leaf/terminal node of a pattern tree."""
 
     def __init__(self, name, value=None):
@@ -131,7 +129,6 @@ class LeafPattern(Pattern):
 
 
 class BranchPattern(Pattern):
-
     """Branch/inner node of a pattern tree."""
 
     def __init__(self, *children):
@@ -239,7 +236,6 @@ class Optional(BranchPattern):
 
 
 class OptionsShortcut(Optional):
-
     """Marker/placeholder for [options] shortcut."""
 
 
@@ -299,7 +295,10 @@ class Tokens(list):
 
 
 def parse_long(tokens, options):
-    """long ::= '--' chars [ ( ' ' | '=' ) chars ] ;"""
+    """Parse long according to definition.
+
+    long ::= '--' chars [ ( ' ' | '=' ) chars ] ;
+    """
     long, eq, value = tokens.move().partition('=')
     assert long.startswith('--')
     value = None if eq == value == '' else value
@@ -332,7 +331,10 @@ def parse_long(tokens, options):
 
 
 def parse_shorts(tokens, options):
-    """shorts ::= '-' ( chars )* [ [ ' ' ] chars ] ;"""
+    """Parse shorts according to definition.
+
+    shorts ::= '-' ( chars )* [ [ ' ' ] chars ] ;
+    """
     token = tokens.move()
     assert token.startswith('-') and not token.startswith('--')
     left = token.lstrip('-')
@@ -375,7 +377,10 @@ def parse_pattern(source, options):
 
 
 def parse_expr(tokens, options):
-    """expr ::= seq ( '|' seq )* ;"""
+    """Parse expr according to definition.
+
+    expr ::= seq ( '|' seq )* ;
+    """
     seq = parse_seq(tokens, options)
     if tokens.current() != '|':
         return seq
@@ -388,7 +393,10 @@ def parse_expr(tokens, options):
 
 
 def parse_seq(tokens, options):
-    """seq ::= ( atom [ '...' ] )* ;"""
+    """Parse seq according to definition.
+
+    seq ::= ( atom [ '...' ] )* ;
+    """
     result = []
     while tokens.current() not in [None, ']', ')', '|']:
         atom = parse_atom(tokens, options)
@@ -400,8 +408,10 @@ def parse_seq(tokens, options):
 
 
 def parse_atom(tokens, options):
-    """atom ::= '(' expr ')' | '[' expr ']' | 'options'
-             | long | shorts | argument | command ;
+    """Parse atom according to definition.
+
+    atom ::= '(' expr ')' | '[' expr ']' | 'options'
+            | long | shorts | argument | command ;
     """
     token = tokens.current()
     result = []
@@ -426,13 +436,12 @@ def parse_atom(tokens, options):
 
 
 def parse_argv(tokens, options, options_first=False):
-    """Parse command-line argument vector.
+    """Parse argv argument vector.
 
     If options_first:
         argv ::= [ long | shorts ]* [ argument ]* [ '--' [ argument ]* ] ;
     else:
         argv ::= [ long | shorts | argument ]* [ '--' [ argument ]* ] ;
-
     """
     parsed = []
     while tokens.current() is not None:
@@ -456,7 +465,7 @@ def parse_defaults(doc):
         _, _, s = s.partition(':')  # get rid of "options:"
         split = re.split('\n[ \t]*(-\S+?)', '\n' + s)[1:]
         split = [s1 + s2 for s1, s2 in zip(split[::2], split[1::2])]
-        options = [Option.parse(s) for s in split if s.startswith('-')]
+        options = [Option.parse(t) for t in split if t.startswith('-')]
         defaults += options
     return defaults
 
@@ -562,7 +571,7 @@ def docopt(doc, argv=None, help=True, version=None, options_first=False):
     options = parse_defaults(doc)
     pattern = parse_pattern(formal_usage(DocoptExit.usage), options)
     # [default] syntax for argument is disabled
-    #for a in pattern.flat(Argument):
+    # for a in pattern.flat(Argument):
     #    same_name = [d for d in arguments if d.name == a.name]
     #    if same_name:
     #        a.value = same_name[0].value
@@ -571,7 +580,7 @@ def docopt(doc, argv=None, help=True, version=None, options_first=False):
     for options_shortcut in pattern.flat(OptionsShortcut):
         doc_options = parse_defaults(doc)
         options_shortcut.children = list(set(doc_options) - pattern_options)
-        #if any_options:
+        # if any_options:
         #    options_shortcut.children += [Option(o.short, o.long, o.argcount)
         #                    for o in argv if type(o) is Option]
     extras(help, version, argv, doc)
diff --git a/Tools/ART/scripts/art-clean.py b/Tools/ART/scripts/art-clean.py
new file mode 100755
index 0000000000000000000000000000000000000000..3f714b72146ec6b34eee51e12d52fa95f4366a5e
--- /dev/null
+++ b/Tools/ART/scripts/art-clean.py
@@ -0,0 +1,208 @@
+#!/usr/bin/env python
+# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+"""
+ART  - ATLAS Release Tester - Clean.
+
+Usage:
+  art-clean.py [-v -q --base-dir=<base_dir> --delete --days=<days> --eos --config=<file> --release --package=<package>] [<nightly_release> <project> [<platform>]]
+
+Options:
+  --base-dir=<base_dir>      Start search from basedir [default: /eos/atlas/atlascerngroupdisk/data-art/grid-output]
+  --config=<file>            art-configuration to retrieve packages and days [default: art-configuration.yml]
+  --delete                   Actually delete the directories to be cleaned
+  --days=<days>              Number of nighlies to keep [default: 7]
+  --eos                      Use eos commands
+  -h --help                  Show this screen
+  --package=<package>        Package to be cleaned up, no configuration
+  --release                  Clean up full release, no configuration
+  -q --quiet                 Show less information, only warnings and errors
+  -v --verbose               Show more information, debug level
+  --version                  Show version
+
+Arguments:
+  nightly_release            Name of the nightly release (e.g. 21.0)
+  project                    Project to clean (e.g. Athena)
+  platform                   Platform to clean [default: x86_64-slc6-gcc62-opt]
+
+Environment:
+  AtlasBuildBranch          Name of the nightly release (e.g. 21.0)
+  AtlasProject              Name of the project (e.g. Athena)
+  <AtlasProject>_PLATFORM   Platform (e.g. x86_64-slc6-gcc62-opt)
+"""
+
+__author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
+
+import datetime
+import logging
+import os
+import re
+import shutil
+import sys
+
+from ART.docopt import docopt
+from ART.art_misc import get_atlas_env, run_command
+from ART.art_configuration import ArtConfiguration
+
+MODULE = "art.clean"
+
+
+class ArtClean(object):
+    """Class to cleanup eos area."""
+
+    EOS_MGM_URL = 'root://eosatlas.cern.ch'
+
+    def __init__(self, arguments):
+        """Clean when more than 'days' old."""
+        log = logging.getLogger(MODULE)
+        self.verbose = arguments['--verbose']
+        self.eos = arguments['--eos']
+        default_days = int(arguments['--days'])
+        self.delete = arguments['--delete']
+        nightly_release = arguments['<nightly_release>']
+        project = arguments['<project>']
+        platform = arguments['<platform>']
+        base_dir = arguments['--base-dir']
+
+        # Check for missing arguments
+        if nightly_release is None:
+            (nightly_release, project, platform, dummy) = get_atlas_env()
+
+        if platform is None:
+            platform = 'x86_64-slc6-gcc62-opt'
+            log.info("Defaulting to platform %s", platform)
+
+        if self.eos:
+            # Test if we have access to kerberos
+            (code, out, err, command, start_time, end_time) = run_command('klist', verbose=self.verbose)
+            if code != 0:
+                log.critical("%s", err)
+                exit(1)
+
+        package = arguments['--package']
+        if package is not None:
+            self.clean_release(os.path.join(base_dir, nightly_release, project, platform), default_days, package)
+            return
+
+        if arguments['--release']:
+            self.clean_release(os.path.join(base_dir, nightly_release, project, platform), default_days)
+            return
+
+        config_file = arguments['--config']
+        config = ArtConfiguration(config_file)
+        for package in config.packages():
+            copy = config.get(nightly_release, project, platform, package, 'copy', False)
+            if copy:
+                days = config.get(nightly_release, project, platform, package, 'days', default_days)
+                self.clean_release(os.path.join(base_dir, nightly_release, project, platform), days, package)
+        return
+
+    def clean_release(self, release, days, package=None):
+        """Clean a release dir."""
+        log = logging.getLogger(MODULE)
+        log.debug("Starting to clean up release, keeping %d days, for %s in %s", days, "All" if package is None else package, release)
+        now = datetime.datetime.now()
+        count = 0
+        date = '1970-01-01'
+        for entry in reversed(self.listdirs(release)):
+            # Matches 2018-12-05T0345
+            match = re.match(r"(\d{4}-\d{2}-\d{2})T\d{4}", entry)
+            if match:
+                tag = os.path.join(release, entry)
+                # package_dir = tag for full release cleanup
+                package_dir = tag if package is None else os.path.join(tag, package)
+
+                # only count entries on different dates and where package actually exist
+                if self.isdir(package_dir) and match.group(1) != date:
+                    count += 1
+                    date = match.group(1)
+
+                if count > days:
+                    # compare times
+                    dir_time = datetime.datetime.strptime(match.group(0), '%Y-%m-%dT%H%M')
+                    time_diff = now - dir_time
+                    if time_diff.days > days:
+                        log.info("- Nightly tag %d days old, removing %s for package %s", time_diff.days, package_dir, package if package is not None else 'All')
+                        self.remove_dir(package_dir)
+                        if package is not None and self.isempty(tag):
+                            log.info("- Nightly tag contains no more packages, removing %s for package %s", tag, package if package is not None else 'All')
+                            self.remove_dir(tag)
+                    else:
+                        log.debug("- Tag within %d days, keeping %s for package %s", days, tag, package if package is not None else 'All')
+                else:
+                    log.debug("- Tag within %d entries, keeping %s for package %s", days, tag, package if package is not None else 'All')
+
+        if package is not None and count == 0:
+            log.debug("No package %s found in any available nightly tag for %s", package, release)
+
+    def listdirs(self, directory):
+        """Return list of directories in directory."""
+        dirs = []
+        if self.isdir(directory):
+            for entry in self.listdir(directory):
+                path = os.path.join(directory, entry)
+                if self.isdir(path):
+                    dirs.append(entry)
+        return dirs
+
+    def isempty(self, directory):
+        """Return true if directory is empty."""
+        if self.isdir(directory):
+            for entry in self.listdir(directory):
+                return False
+            return True
+
+    def listdir(self, directory):
+        """Return list of entries in directory."""
+        log = logging.getLogger(MODULE)
+        if self.eos:
+            (code, out, err, command, start_time, end_time) = run_command('eos ' + ArtClean.EOS_MGM_URL + ' ls ' + directory, verbose=self.verbose)
+            if code == 0:
+                return out.splitlines()
+            log.info("eos listdir %d %s", code, err)
+            return []
+        else:
+            return os.listdir(directory)
+
+    def isdir(self, path):
+        """Return true is path is directory."""
+        log = logging.getLogger(MODULE)
+        if self.eos:
+            (code, out, err, command, start_time, end_time) = run_command('eos ' + ArtClean.EOS_MGM_URL + ' stat -d ' + path, verbose=self.verbose)
+            if code == 0:
+                return True
+            log.debug("eos isdir(%s) %d %s", path, code, err)
+            return False
+        else:
+            return os.path.isdir(path)
+
+    def remove_dir(self, directory):
+        """Remove directory and all below."""
+        log = logging.getLogger(MODULE)
+        if not self.isdir(directory):
+            return
+
+        if self.delete:
+            if self.eos:
+                (code, out, err, command, start_time, end_time) = run_command('eos ' + ArtClean.EOS_MGM_URL + ' rm -r ' + directory, verbose=self.verbose)
+                if code == 0:
+                    return True
+                log.info("eos rm -r (%s) %d %s", directory, code, err)
+            else:
+                shutil.rmtree(directory)
+
+
+if __name__ == '__main__':
+    if sys.version_info < (2, 7, 0):
+        sys.stderr.write("You need python 2.7 or later to run this script\n")
+        exit(1)
+
+    # NOTE: import should be here, to keep the order of the decorators (module first, art last and unused)
+    from art import __version__
+
+    logging.basicConfig()
+    log = logging.getLogger('art')
+
+    arguments = docopt(__doc__, version=os.path.splitext(os.path.basename(__file__))[0] + ' ' + __version__)
+    level = logging.DEBUG if arguments['--verbose'] else logging.WARN if arguments['--quiet'] else logging.INFO
+    log.setLevel(level)
+    ArtClean(arguments)
diff --git a/Tools/ART/scripts/art-diff.py b/Tools/ART/scripts/art-diff.py
index 8a3b9340934a28e81ec7b40c754f377d20e45851..31281893548f1d4e56989d02186ee03a634773d9 100755
--- a/Tools/ART/scripts/art-diff.py
+++ b/Tools/ART/scripts/art-diff.py
@@ -4,14 +4,16 @@
 ART  - ATLAS Release Tester - Diff.
 
 Usage:
-  art-diff.py [--diff-type=<diff_type> --exclude=<pattern>... --platform-ref=<platform> --entries=<entries>] <nightly_release_ref> <project_ref> <nightly_tag_ref> <package>
-  art-diff.py [--diff-type=<diff_type> --exclude=<pattern>... --entries=<entries>] <path> <ref_path>
+  art-diff.py [--diff-type=<diff_type> --file=<pattern>... --exclude=<pattern>... --platform-ref=<platform> --entries=<entries> --mode=<mode>] <nightly_release_ref> <project_ref> <nightly_tag_ref> <package>
+  art-diff.py [--diff-type=<diff_type> --file=<pattern>... --exclude=<pattern>... --entries=<entries> --mode=<mode>] <path> <ref_path>
 
 Options:
   --diff-type=<diff_type>    Type of diff (e.g. diff-pool or diff-root) [default: diff-pool]
   --entries=<entries>        Only diff over number of entries [default: -1]
   --exclude=<pattern>...     Exclude test files according to pattern
+  --file=<pattern>...        Compare the following file patterns for diff-root [default: *AOD*.pool.root *ESD*.pool.root *HITS*.pool.root *RDO*.pool.root *TAG*.root]
   -h --help                  Show this screen
+  --mode=<mode>              Sets the mode for diff-root {summary, detailed} [default: detailed]
   --platform-ref=<platform>  Reference Platform [default: x86_64-slc6-gcc62-opt]
   --test-name=<test_name>    Test name to compare
   --version                  Show version
@@ -43,9 +45,10 @@ import sys
 
 from ART.docopt import docopt
 
-VERSION = "0.7.8"
+VERSION = "0.10.16"
 ATHENA_STDOUT = "athena_stdout.txt"
 DEFAULT_ENTRIES = -1
+DEFAULT_MODE = "detailed"
 
 
 class ArtDiff(object):
@@ -55,11 +58,14 @@ class ArtDiff(object):
 
     def __init__(self):
         """Constructor of ArtDiff."""
+        self.default_file_patterns = ['*AOD*.pool.root', '*ESD*.pool.root', '*HITS*.pool.root', '*RDO*.pool.root', '*TAG*.root']
 
     def parse(self, arguments):
         """Called from comandline."""
         diff_type = arguments['--diff-type']
+        files = self.default_file_patterns if diff_type == 'diff-pool' else list(set(arguments['--file']))
         entries = arguments['--entries']
+        mode = arguments['--mode']
         excludes = arguments['--exclude']
         if arguments['<nightly_release_ref>'] is not None:
             try:
@@ -67,7 +73,6 @@ class ArtDiff(object):
                 project = os.environ['AtlasProject']
                 platform = os.environ[project + '_PLATFORM']
                 nightly_tag = os.environ['AtlasBuildStamp']
-                return (nightly_release, project, platform, nightly_tag)
             except KeyError, e:
                 print "Environment variable not set", e
                 sys.exit(1)
@@ -78,8 +83,8 @@ class ArtDiff(object):
             nightly_tag_ref = arguments['<nightly_tag_ref>']
 
             package = arguments['<package>']
-            print nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref 
-            exit(self.diff(nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref, package, diff_type, excludes, entries=entries))
+            print nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref
+            exit(self.diff(nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref, package, diff_type, files, excludes, entries=entries, mode=mode))
 
         # directory compare
         path = arguments['<path>']
@@ -91,7 +96,7 @@ class ArtDiff(object):
                 print "Error: <ref_path> should be a file, if <path> is a file."
                 sys.exit(1)
 
-            exit(self.diff_file(path, ref_path, diff_type, entries=entries))
+            exit(self.diff_file(path, ref_path, diff_type, entries=entries, mode=mode))
 
         if os.path.isfile(ref_path):
             print "Error: <ref_path> should be a directory, if <path> is a directory."
@@ -100,18 +105,18 @@ class ArtDiff(object):
         # check if path contains "test_" entries
         if len(glob.glob(os.path.join(path, 'test_*'))) > 0:
             # directory compare
-            exit(self.diff_dirs(path, ref_path, diff_type, excludes, entries=entries))
+            exit(self.diff_dirs(path, ref_path, diff_type, files, excludes, entries=entries, mode=mode))
 
         # single test compare
-        exit(self.diff_test(path, ref_path, diff_type, entries=entries))
+        exit(self.diff_test(path, ref_path, diff_type, files, entries=entries, mode=mode))
 
-    def diff(self, nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref, package, diff_type, excludes=[], entries=DEFAULT_ENTRIES):
+    def diff(self, nightly_release, project, platform, nightly_tag, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref, package, diff_type, files, excludes=[], entries=DEFAULT_ENTRIES, mode=DEFAULT_MODE):
         """Run difference between two results."""
         path = os.path.join(ArtDiff.EOS_OUTPUT_DIR, nightly_release, project, platform, nightly_tag, package)
         ref_path = os.path.join(ArtDiff.EOS_OUTPUT_DIR, nightly_release_ref, project_ref, platform_ref, nightly_tag_ref, package)
-        return self.diff_dirs(path, ref_path, diff_type, excludes, entries=entries)
+        return self.diff_dirs(path, ref_path, diff_type, files, excludes, entries=entries, mode=mode)
 
-    def diff_dirs(self, path, ref_path, diff_type, excludes=[], entries=DEFAULT_ENTRIES):
+    def diff_dirs(self, path, ref_path, diff_type, files, excludes=[], entries=DEFAULT_ENTRIES, mode=DEFAULT_MODE):
         """Run difference between two directories."""
         print "    path: %s" % path
         print "ref_path: %s" % ref_path
@@ -131,7 +136,7 @@ class ArtDiff(object):
             print "******************************************"
             print "Test: %s" % test_name
             print "******************************************"
-            stat_per_chain[test_name] = self.diff_test(os.path.join(path, test_name), os.path.join(ref_path, test_name), diff_type, entries=entries)
+            stat_per_chain[test_name] = self.diff_test(os.path.join(path, test_name), os.path.join(ref_path, test_name), diff_type, files, entries=entries, mode=mode)
 
         result = 0
         for test_name, status in stat_per_chain.iteritems():
@@ -143,7 +148,7 @@ class ArtDiff(object):
 
         return result
 
-    def diff_test(self, path, ref_path, diff_type, entries=DEFAULT_ENTRIES):
+    def diff_test(self, path, ref_path, diff_type, files, entries=DEFAULT_ENTRIES, mode=DEFAULT_MODE):
         """Run differences between two directories."""
         result = self.get_result(path)
         ref_result = self.get_result(ref_path)
@@ -152,10 +157,9 @@ class ArtDiff(object):
                 print "%-10s: ref: %d events, val: %d events" % (key, int(ref_result[key][1]), int(result[key][1]))
 
         test_dir = path
-        test_patterns = ['*AOD*.pool.root', '*ESD*.pool.root', '*HITS*.pool.root', '*RDO*.pool.root', '*TAG*.root']
         # get files in all patterns
         test_files = []
-        for test_pattern in test_patterns:
+        for test_pattern in files:
             test_files.extend(glob.glob(os.path.join(test_dir, test_pattern)))
         # run test over all files
         result = 0
@@ -166,11 +170,11 @@ class ArtDiff(object):
             print "val_file: %s" % val_file
             print "ref_file: %s" % ref_file
 
-            result |= self.diff_file(val_file, ref_file, diff_type, entries=entries)
+            result |= self.diff_file(val_file, ref_file, diff_type, entries=entries, mode=mode)
 
         return result
 
-    def diff_file(self, path, ref_path, diff_type, entries=DEFAULT_ENTRIES):
+    def diff_file(self, path, ref_path, diff_type, entries=DEFAULT_ENTRIES, mode=DEFAULT_MODE):
         """Compare two files."""
         if not os.path.exists(ref_path):
             print "no test found in ref_dir to compare: %s" % ref_path
@@ -182,7 +186,7 @@ class ArtDiff(object):
         if diff_type == 'diff-pool':
             return self.diff_pool(path, ref_path)
 
-        return self.diff_root(path, ref_path, entries)
+        return self.diff_root(path, ref_path, entries, mode)
 
     def get_result(self, directory):
         """
@@ -226,10 +230,10 @@ class ArtDiff(object):
 
         return stat
 
-    def diff_root(self, file_name, ref_file, entries):
+    def diff_root(self, file_name, ref_file, entries, mode):
         """TBD."""
         # diff-root
-        (code, out, err) = self.run_command("acmd.py diff-root " + file_name + " " + ref_file + " --error-mode resilient --ignore-leaves RecoTimingObj_p1_HITStoRDO_timings RecoTimingObj_p1_RAWtoESD_mems RecoTimingObj_p1_RAWtoESD_timings RAWtoESD_mems RAWtoESD_timings ESDtoAOD_mems ESDtoAOD_timings HITStoRDO_timings RAWtoALL_mems RAWtoALL_timings RecoTimingObj_p1_RAWtoALL_mems RecoTimingObj_p1_RAWtoALL_timings RecoTimingObj_p1_EVNTtoHITS_timings --entries " + str(entries))
+        (code, out, err) = self.run_command("acmd.py diff-root " + file_name + " " + ref_file + " --error-mode resilient --ignore-leaves RecoTimingObj_p1_HITStoRDO_timings RecoTimingObj_p1_RAWtoESD_mems RecoTimingObj_p1_RAWtoESD_timings RAWtoESD_mems RAWtoESD_timings ESDtoAOD_mems ESDtoAOD_timings HITStoRDO_timings RAWtoALL_mems RAWtoALL_timings RecoTimingObj_p1_RAWtoALL_mems RecoTimingObj_p1_RAWtoALL_timings RecoTimingObj_p1_EVNTtoHITS_timings --entries " + str(entries) + " --mode " + mode)
         if code != 0:
             print "Error: %d" % code
             print err
diff --git a/Tools/ART/scripts/art-download.sh b/Tools/ART/scripts/art-download.sh
index b852dc8d75342e6ce4cbfd2b652a220f1c5d2c24..89ed645c4f6f1afba45d983f56a3475cea7f84a6 100755
--- a/Tools/ART/scripts/art-download.sh
+++ b/Tools/ART/scripts/art-download.sh
@@ -25,6 +25,7 @@ source "${ATLAS_LOCAL_ROOT_BASE}"/user/atlasLocalSetup.sh --quiet
 unset ALRB_noGridMW
 
 lsetup -f rucio
+lsetup -f "xrootd 4.7.1"
 
 echo "Name: ${NAME}"
 echo "Directory: ${DIRECTORY}"
diff --git a/Tools/ART/scripts/art-internal.py b/Tools/ART/scripts/art-internal.py
index 7eb30f6791318a0397ab4512a401bbbd9c82114d..ec313bd86aac6c78f8cb8b82c17cc57d2a4287f0 100755
--- a/Tools/ART/scripts/art-internal.py
+++ b/Tools/ART/scripts/art-internal.py
@@ -5,8 +5,8 @@ ART-internal - ATLAS Release Tester (internal command).
 
 Usage:
   art-internal.py build job   [-v -q]                 <script_directory> <sequence_tag> <package> <outfile> <job_type> <job_index>
-  art-internal.py grid batch  [-v -q --skip-setup -n] <script_directory> <sequence_tag> <package> <outfile> <job_type> <job_index>
-  art-internal.py grid single [-v -q --skip-setup --in=<in_file> -n] <script_directory> <sequence_tag> <package> <outfile> <job_name>
+  art-internal.py grid batch  [-v -q --skip-setup -n] <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_type> <job_index>
+  art-internal.py grid single [-v -q --skip-setup --in=<in_file> -n] <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_name>
 
 Options:
   -h --help         Show this screen.
@@ -18,6 +18,7 @@ Options:
   --version         Show version.
 
 Arguments:
+  inform_panda      Inform Big Panda about job
   job_index         Index of the test inside the package
   job_name          Index of the test (batch), or its name (single)
   job_type          Type of job (e.g. grid, ci, build)
@@ -63,7 +64,7 @@ def build_job(script_directory, sequence_tag, package, outfile, job_type, job_in
 
 
 @dispatch.on('grid', 'batch')
-def grid_batch(script_directory, sequence_tag, package, outfile, job_type, job_index, **kwargs):
+def grid_batch(script_directory, sequence_tag, package, outfile, inform_panda, job_type, job_index, **kwargs):
     """Run a batch job, given a particular index.
 
     Tests are called with the following parameters:
@@ -73,11 +74,11 @@ def grid_batch(script_directory, sequence_tag, package, outfile, job_type, job_i
     art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
     (nightly_release, project, platform, nightly_tag) = get_atlas_env()
     skip_setup = kwargs['skip_setup']
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory, skip_setup).batch(sequence_tag, package, outfile, job_type, job_index))
+    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory, skip_setup).batch(sequence_tag, package, outfile, inform_panda, job_type, job_index))
 
 
 @dispatch.on('grid', 'single')
-def grid_single(script_directory, sequence_tag, package, outfile, job_name, **kwargs):
+def grid_single(script_directory, sequence_tag, package, outfile, inform_panda, job_name, **kwargs):
     """Run a single job, given a particular name.
 
     Tests are called with the following parameters:
@@ -88,7 +89,7 @@ def grid_single(script_directory, sequence_tag, package, outfile, job_name, **kw
     (nightly_release, project, platform, nightly_tag) = get_atlas_env()
     skip_setup = kwargs['skip_setup']
     in_file = kwargs['in']
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory, skip_setup).single(sequence_tag, package, outfile, job_name, in_file))
+    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory, skip_setup).single(sequence_tag, package, outfile, inform_panda, job_name, in_file))
 
 
 if __name__ == '__main__':
diff --git a/Tools/ART/scripts/art-share.py b/Tools/ART/scripts/art-share.py
index 46732ebc00f8a2393022bca5ae4e41c02cf5a32f..003fe7bfd6d63b5d49c960a4a5bfe2ff8db8baa3 100755
--- a/Tools/ART/scripts/art-share.py
+++ b/Tools/ART/scripts/art-share.py
@@ -170,7 +170,7 @@ if __name__ == '__main__':
     from art import __version__
 
     logging.basicConfig()
-    log = logging.getLogger(MODULE)
+    log = logging.getLogger('art')
 
     arguments = docopt(__doc__, version=os.path.splitext(os.path.basename(__file__))[0] + ' ' + __version__)
     level = logging.DEBUG if arguments['--verbose'] else logging.WARN if arguments['--quiet'] else logging.INFO
diff --git a/Tools/ART/scripts/art-task-build.sh b/Tools/ART/scripts/art-task-build.sh
index 80a4a7d08e8ff534a0df7e82edb1c99bac1393ec..d8b5c1d925ad73af32ef179c47dea604aedb6718 100755
--- a/Tools/ART/scripts/art-task-build.sh
+++ b/Tools/ART/scripts/art-task-build.sh
@@ -21,7 +21,7 @@ else
 fi
 
 export ATLAS_LOCAL_ROOT_BASE="${ATLAS_LOCAL_ROOT_BASE:-/cvmfs/atlas.cern.ch/repo/ATLASLocalRootBase}"
-# shellcheck source=/dev/null
+# shellcheck source=/dev/null 
 source "${ATLAS_LOCAL_ROOT_BASE}"/user/atlasLocalSetup.sh --quiet
 if [ "${BRANCH}" == "master" ]; then
    lsetup -a testing asetup
@@ -48,6 +48,17 @@ ART_DIRECTORY=$(command -v art.py)
 ART_VERSION=$(art.py --version)
 echo "INFO: Using ART version ${ART_VERSION} in ${ART_DIRECTORY} directory"
 
+# automatic clean-up build-output EOS area
+art-clean.py --eos --release --base-dir=/eos/atlas/atlascerngroupdisk/data-art/build-output --delete "${AtlasBuildBranch}" "${AtlasProject}" "${PLATFORM}" || true &
+
+# configure EOS_MGM_URL
+if [ -z "${EOS_MGM_URL}" ]; then
+  echo "WARNING: EOS_MGM_URL variable is empty, setting it to root://eosatlas.cern.ch"
+  export EOS_MGM_URL="root://eosatlas.cern.ch"
+else
+  echo "EOS_MGM_URL variable contains", ${EOS_MGM_URL}
+fi
+
 # run build tests
 SUBDIR=${AtlasBuildBranch}/${AtlasProject}/${PLATFORM}/${AtlasBuildStamp}
 OUTDIR="${RELEASE_BASE}/art-build/${SUBDIR}"
@@ -57,13 +68,6 @@ RESULT=$(eval "${CMD}")
 echo "${RESULT}"
 
 # copy the test results to EOS area
-if [ -z "${EOS_MGM_URL}" ]; then
-  echo "WARNING: EOS_MGM_URL variable is empty, setting it to root://eosatlas.cern.ch"
-  export EOS_MGM_URL="root://eosatlas.cern.ch"
-else
-  echo "EOS_MGM_URL variable contains", ${EOS_MGM_URL}
-fi
-
 TARGETDIR=/eos/atlas/atlascerngroupdisk/data-art/build-output/${SUBDIR}
 if [[ ! -e ${TARGETDIR} ]]; then
   echo Target directory "${TARGETDIR}"
diff --git a/Tools/ART/scripts/art-task-grid.sh b/Tools/ART/scripts/art-task-grid.sh
index 0ad216bbc9b905e9f4067958dae4966ae1e3b530..e459d17a8f2f4d783519b614c22d9e8c12fa88dd 100755
--- a/Tools/ART/scripts/art-task-grid.sh
+++ b/Tools/ART/scripts/art-task-grid.sh
@@ -5,9 +5,9 @@
 #
 # Example command lines for three types:
 #
-# art-task-grid.sh [--no-action] batch <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <job_type> <number_of_tests>
+# art-task-grid.sh [--no-action] batch <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_type> <number_of_tests>
 #
-# art-task-grid.sh [--no-action] single [--inds <input_file> --n-files <number_of_files> --split <split>] <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <job_name>
+# art-task-grid.sh [--no-action] single [--inds <input_file> --n-files <number_of_files> --split <split>] <submit_directory> <script_directory> <sequence_tag> <package> <outfile> <inform_panda> <job_name>
 #
 # env: ART_GRID_OPTIONS
 #
@@ -50,6 +50,7 @@ case ${TYPE} in
         if [ "$1" == "--n-files" ]; then
             NFILES="--nFiles $2"
             NFILES_PER_JOB="--nFilesPerJob $2"
+            NCORE_NFILES_PER_JOB="--nFilesPerJob $2"
             shift
             shift
         fi
@@ -70,7 +71,15 @@ case ${TYPE} in
         NCORES=""
         if [ "$1" == "--ncore" ]; then
             NCORES="--nCore $2"
-            NFILES_PER_JOB=""
+            NFILES_PER_JOB="${NCORE_NFILES_PER_JOB}"
+            LARGE_JOB=""
+            shift
+            shift
+        fi
+        ATHENA_MT=""
+        if [ "$1" == "--athena_mt" ]; then
+            NCORES="--nCore $2"
+            LARGE_JOB=""
             shift
             shift
         fi
@@ -101,6 +110,10 @@ OUTFILE=$1
 shift
 echo "OUTFILE=${OUTFILE}"
 
+INFORM_PANDA=$1
+shift
+echo "INFORM_PANDA=${INFORM_PANDA}"
+
 case ${TYPE} in
 
     'batch')
@@ -123,7 +136,7 @@ case ${TYPE} in
 esac
 
 # general options
-PATHENA_OPTIONS="--destSE=CERN-PROD_SCRATCHDISK"
+PATHENA_OPTIONS="--noBuild --expertOnly_skipScout --noEmail --maxAttempt 2"
 OUT="%OUT.tar"
 
 # we seem to have to copy the env variables locally
@@ -144,7 +157,7 @@ case ${TYPE} in
     'single')
         # <script_directory> <sequence_tag> <package> <outfile> <job_name>
         INTERNAL_COMMAND="grid single"
-        PATHENA_TYPE_OPTIONS="${LARGE_JOB} ${INDS} ${NFILES} ${NFILES_PER_JOB} ${NCORES}"
+        PATHENA_TYPE_OPTIONS="${LARGE_JOB} ${INDS} ${NFILES} ${NFILES_PER_JOB} ${NCORES} ${ATHENA_MT}"
         ARGS="${JOB_NAME}"
         echo "PATHENA_TYPE_OPTIONS=${PATHENA_TYPE_OPTIONS}"
         echo "ARGS=${ARGS}"
@@ -154,13 +167,12 @@ esac
 
 # NOTE: for art-internal.py the current dir can be used as it is copied there
 cd "${SUBMIT_DIRECTORY}"/"${PACKAGE}"/run
-SUBCOMMAND="./art-internal.py ${INTERNAL_COMMAND} ${IN_FILE} ${SCRIPT_DIRECTORY} ${SEQUENCE_TAG} ${PACKAGE} ${OUT} ${ARGS}"
-CMD="pathena ${GRID_OPTIONS} ${PATHENA_OPTIONS} ${PATHENA_TYPE_OPTIONS} --noBuild --expertOnly_skipScout --trf \"${SUBCOMMAND}\" ${SPLIT} --outDS ${OUTFILE} --extOutFile art-job.json"
+SUBCOMMAND="./art-internal.py ${INTERNAL_COMMAND} ${IN_FILE} ${SCRIPT_DIRECTORY} ${SEQUENCE_TAG} ${PACKAGE} ${OUT} ${INFORM_PANDA} ${ARGS}"
+CMD="pathena ${GRID_OPTIONS} ${PATHENA_OPTIONS} ${PATHENA_TYPE_OPTIONS} --trf \"${SUBCOMMAND}\" ${SPLIT} --outDS ${OUTFILE} --extOutFile art-job.json"
 
 #--disableAutoRetry
 #--excludedSite=ANALY_TECHNION-HEP-CREAM
 #--site=ANALY_NIKHEF-ELPROD_SHORT,ANALY_NIKHEF-ELPROD"
-#--site=ANALY_FZK,ANALY_BNL,ANALY_RAL"
 
 echo "Command: ${CMD}"
 
diff --git a/Tools/ART/scripts/art.py b/Tools/ART/scripts/art.py
index 04b4c72f7598c1c330ce16863dda514709c04111..b364ee9af2123586875d2626059bca0f6d838d25 100755
--- a/Tools/ART/scripts/art.py
+++ b/Tools/ART/scripts/art.py
@@ -7,13 +7,13 @@ You need to setup for an ATLAS release before using ART.
 
 Usage:
   art.py run             [-v -q --type=<T> --max-jobs=<N> --ci] <script_directory> <sequence_tag>
-  art.py grid            [-v -q --type=<T> --max-jobs=<N> --config=<file> --copy -n] <script_directory> <sequence_tag>
+  art.py grid            [-v -q --type=<T> --max-jobs=<N> -n] <script_directory> <sequence_tag>
   art.py submit          [-v -q --type=<T> --max-jobs=<N> --config=<file> -n] <sequence_tag> [<package>]
-  art.py copy            [-v -q --user=<user> --dst=<dir>] <indexed_package>
+  art.py copy            [-v -q --user=<user> --dst=<dir> --no-unpack --tmp=<dir> --seq=<N> --keep-tmp] <indexed_package>
   art.py validate        [-v -q] <script_directory>
   art.py included        [-v -q --type=<T> --test-type=<TT>] <script_directory>
-  art.py compare grid    [-v -q --days=<D> --user=<user> --entries=<entries>] <package> <test_name>
-  art.py compare ref     [-v -q --entries=<entries>] <path> <ref_path>
+  art.py compare grid    [-v -q --days=<D> --user=<user> --entries=<entries> --file=<pattern>... --mode=<mode>] <package> <test_name>
+  art.py compare ref     [-v -q --entries=<entries> --file=<pattern>... --mode=<mode>] <path> <ref_path>
   art.py list grid       [-v -q --user=<user> --json --test-type=<TT>] <package>
   art.py log grid        [-v -q --user=<user>] <package> <test_name>
   art.py output grid     [-v -q --user=<user>] <package> <test_name>
@@ -23,16 +23,21 @@ Usage:
 Options:
   --ci                   Run Continuous Integration tests only (using env: AtlasBuildBranch)
   --config=<file>        Use specific config file [default: art-configuration.yml]
-  --copy                 Run the copy after running the jobs
   --days=<D>             Number of days ago to pick up reference for compare [default: 1]
   --dst=<dir>            Destination directory for downloaded files
   --entries=<entries>    Number of entries to compare [default: 10]
+  --file=<pattern>...    Compare the following file patterns for diff-root [default: *AOD*.pool.root *ESD*.pool.root *HITS*.pool.root *RDO*.pool.root *TAG*.root]
   -h --help              Show this screen.
   --json                 Output in json format
+  --keep-tmp             Keep temporary directory while copying
   --max-jobs=<N>         Maximum number of concurrent jobs to run [default: 0]
+  --mode=<mode>          Sets the mode for diff-root {summary, detailed} [default: detailed]
   -n --no-action         No real submit will be done
+  --no-unpack            Do not unpack downloaded tar files
   -q --quiet             Show less information, only warnings and errors
+  --seq=<N>              Use N as postfix on destination nightly-tag (for retries) [default: 0]
   --test-type=<TT>       Type of test (e.g. all, batch or single) [default: all]
+  --tmp=<dir>            Temporary directory for downloaded files and caching of EXT0
   --type=<T>             Type of job (e.g. grid, build)
   --user=<user>          User to use for RUCIO
   -v --verbose           Show more information, debug level
@@ -41,7 +46,7 @@ Options:
 Sub-commands:
   run               Run jobs from a package in a local build (needs release and grid setup)
   grid              Run jobs from a package on the grid (needs release and grid setup)
-  submit            Submit nightly jobs to the grid (NOT for users)
+  submit            Submit nightly jobs to the grid and informs big panda (NOT for users)
   copy              Copy outputs and logs from RUCIO
   validate          Check headers in tests
   included          Show list of files which will be included for art submit/art grid
@@ -73,7 +78,7 @@ Tests are called with:
 """
 
 __author__ = "Tulay Cuhadar Donszelmann <tcuhadar@cern.ch>"
-__version__ = '0.9.5'
+__version__ = '0.10.21'
 
 import logging
 import os
@@ -97,8 +102,10 @@ def compare_ref(path, ref_path, **kwargs):
     """Compare the output of a job."""
     set_log(kwargs)
     art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
+    files = kwargs['file']
     entries = kwargs['entries']
-    exit(ArtBase(art_directory).compare_ref(path, ref_path, entries))
+    mode = kwargs['mode']
+    exit(ArtBase(art_directory).compare_ref(path, ref_path, files, entries, mode))
 
 
 @dispatch.on('compare', 'grid')
@@ -108,9 +115,11 @@ def compare_grid(package, test_name, **kwargs):
     art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
     (nightly_release, project, platform, nightly_tag) = get_atlas_env()
     days = int(kwargs['days'])
-    entries = kwargs['entries']
     user = kwargs['user']
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag).compare(package, test_name, days, user, entries=entries, shell=True))
+    files = kwargs['file']
+    entries = kwargs['entries']
+    mode = kwargs['mode']
+    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag).compare(package, test_name, days, user, files, entries=entries, mode=mode, shell=True))
 
 
 @dispatch.on('list', 'grid')
@@ -153,11 +162,13 @@ def submit(sequence_tag, **kwargs):
     art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
     (nightly_release, project, platform, nightly_tag) = get_atlas_env()
     job_type = 'grid' if kwargs['type'] is None else kwargs['type']
+    user = os.getenv('USER', 'artprod')
+    inform_panda = user == 'artprod'
     package = kwargs['package']
     config = kwargs['config']
     no_action = kwargs['no_action']
     wait_and_copy = True
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, max_jobs=int(kwargs['max_jobs'])).task_list(job_type, sequence_tag, package, no_action, wait_and_copy, config))
+    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, max_jobs=int(kwargs['max_jobs'])).task_list(job_type, sequence_tag, inform_panda, package, no_action, wait_and_copy, config))
 
 
 @dispatch.on('grid')
@@ -167,11 +178,12 @@ def grid(script_directory, sequence_tag, **kwargs):
     art_directory = os.path.dirname(os.path.realpath(sys.argv[0]))
     (nightly_release, project, platform, nightly_tag) = get_atlas_env()
     job_type = 'grid' if kwargs['type'] is None else kwargs['type']
+    inform_panda = False
     package = None
-    config = kwargs['config']
+    config = None
     no_action = kwargs['no_action']
-    wait_and_copy = kwargs['copy']
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory=script_directory, skip_setup=True, max_jobs=int(kwargs['max_jobs'])).task_list(job_type, sequence_tag, package, no_action, wait_and_copy, config))
+    wait_and_copy = False
+    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag, script_directory=script_directory, skip_setup=True, max_jobs=int(kwargs['max_jobs'])).task_list(job_type, sequence_tag, inform_panda, package, no_action, wait_and_copy, config))
 
 
 @dispatch.on('run')
@@ -193,7 +205,11 @@ def copy(indexed_package, **kwargs):
     # NOTE: default depends on USER, not set it here but in ArtGrid.copy
     dst = kwargs['dst']
     user = kwargs['user']
-    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag).copy(indexed_package, dst=dst, user=user))
+    no_unpack = kwargs['no_unpack']
+    tmp = kwargs['tmp']
+    seq = int(kwargs['seq'])
+    keep_tmp = kwargs['keep_tmp']
+    exit(ArtGrid(art_directory, nightly_release, project, platform, nightly_tag).copy(indexed_package, dst=dst, user=user, no_unpack=no_unpack, tmp=tmp, seq=seq, keep_tmp=keep_tmp))
 
 
 @dispatch.on('validate')