From 6c62766acefeacf64581ffb20964d88b04ad1e84 Mon Sep 17 00:00:00 2001
From: Scott Snyder <scott.snyder@cern.ch>
Date: Fri, 6 May 2016 02:33:33 +0200
Subject: [PATCH] 'Update tests for last changes.' (PyJobTransforms-03-00-05)

2016-05-06  scott snyder  <snyder@bnl.gov>
	* Tagging PyJobTransforms-03-00-05.
	* test/test_trfReports.py, test/test_trfArgs.py: Update for last
	changes.

2016-04-29 Graeme Stewart <graeme.andrew.stewart@cern.ch>
	* python/trfReports.py
	- Remove the ~useless "argValues" section from the job report
	- Remove the ~useless 'integrity' and 'file_type'sections from subfile reports
	- Update report number to 2.0.0

2016-04-28 Graeme Stewart <graeme.andrew.stewart@cern.ch>
	* python/trfUtils.py
	- Minor tweaks on reported environment variables for CMake builds

2016-04-28 Graeme Stewart <graeme.andrew.stewart@cern.ch>
	* python/trfUtils.py
	- Update asetupReport to use "lstags" (works for both cmt and cmake)

2016-04-27 Graeme Stewart <graeme.andrew.stewart@cern.ch>
...
(Long ChangeLog diff - truncated)


Former-commit-id: 00312877a3f91ecfa3d4bc19d289cf1416509530
---
 Tools/PyJobTransforms/CMakeLists.txt          |   2 +-
 Tools/PyJobTransforms/cmt/requirements        |   1 +
 Tools/PyJobTransforms/python/transform.py     |  24 +-
 Tools/PyJobTransforms/python/trfArgClasses.py |  24 +-
 Tools/PyJobTransforms/python/trfArgs.py       |  15 +-
 Tools/PyJobTransforms/python/trfExe.py        |   5 +-
 .../python/trfFileUtils-lite.py               | 389 +++++++++++++++++
 Tools/PyJobTransforms/python/trfFileUtils.py  | 143 +------
 .../python/trfFileValidationFunctions.py      |   6 +-
 Tools/PyJobTransforms/python/trfGraph.py      |   8 +-
 Tools/PyJobTransforms/python/trfReports.py    |  44 +-
 Tools/PyJobTransforms/python/trfUtils.py      |  28 +-
 Tools/PyJobTransforms/python/trfValidation.py |   2 +-
 .../test/test_trfArgClassesATLAS.py           |  19 +-
 Tools/PyJobTransforms/test/test_trfArgs.py    |  32 +-
 Tools/PyJobTransforms/test/test_trfReports.py |  14 +-
 .../test/test_trfUtilsDBRelease.py            |   2 +-
 .../test/test_trfUtilsParallelJobProcessor.py | 170 ++++----
 .../test_trfUtilsParallelJobProcessorData.py  | 397 +++++++++---------
 19 files changed, 775 insertions(+), 550 deletions(-)
 create mode 100644 Tools/PyJobTransforms/python/trfFileUtils-lite.py

diff --git a/Tools/PyJobTransforms/CMakeLists.txt b/Tools/PyJobTransforms/CMakeLists.txt
index df82eae883c..e741394ebc9 100644
--- a/Tools/PyJobTransforms/CMakeLists.txt
+++ b/Tools/PyJobTransforms/CMakeLists.txt
@@ -11,7 +11,7 @@ find_package( PythonLibs )
 # Install files from the package:
 atlas_install_python_modules( python/*.py )
 atlas_install_joboptions( share/*.py )
-atlas_install_runtime( scripts/*_tf.py )
+atlas_install_runtime( scripts/*.py )
 atlas_install_runtime( test/PyJobTransforms_TestConfiguration.xml )
 atlas_install_generic( share/*.db
                        DESTINATION share
diff --git a/Tools/PyJobTransforms/cmt/requirements b/Tools/PyJobTransforms/cmt/requirements
index fb6acf2e3d0..b22c79120e9 100755
--- a/Tools/PyJobTransforms/cmt/requirements
+++ b/Tools/PyJobTransforms/cmt/requirements
@@ -23,6 +23,7 @@ apply_pattern generic_declare_for_link kind=runtime files='-s=../share *.db' pre
 #      By default taken from ../share
 #      These will be installed in the Installarea/jobOptions/<package>
 #
+# 
 pattern declare_job_transforms \
         private ; \
     apply_pattern generic_declare_for_link kind=tfs_exe files='-s=${tfs_dir} <tfs>' prefix=share/bin ; \
diff --git a/Tools/PyJobTransforms/python/transform.py b/Tools/PyJobTransforms/python/transform.py
index 1a0e01c527d..ff05cf059c2 100644
--- a/Tools/PyJobTransforms/python/transform.py
+++ b/Tools/PyJobTransforms/python/transform.py
@@ -5,7 +5,7 @@
 # @brief Main package for new style ATLAS job transforms
 # @details Core class for ATLAS job transforms
 # @author atlas-comp-transforms-dev@cern.ch
-# @version $Id: transform.py 697388 2015-09-29 13:38:46Z graemes $
+# @version $Id: transform.py 743343 2016-04-27 15:47:21Z graemes $
 # 
 
 __version__ = '$Revision'
@@ -201,16 +201,7 @@ class transform(object):
                         continue
                     updateDict[k] = v
                 extraParameters.update(updateDict)
-            # Pickled arguments?
-            if 'argdict' in self._argdict:
-                try:
-                    import cPickle as pickle
-                    msg.debug('Given pickled arguments in {0}'.format(self._argdict['argdict']))
-                    argfile = open(self._argdict['argdict'], 'r')
-                    extraParameters.update(pickle.load(argfile))
-                    argfile.close()
-                except Exception, e:
-                    raise trfExceptions.TransformArgException(trfExit.nameToCode('TRF_ARG_ERROR'), 'Error when unpickling file {0}'.format(self._argdict['argdict']))
+
             # JSON arguments?
             if 'argJSON' in self._argdict:
                 try:
@@ -303,16 +294,17 @@ class transform(object):
                 self.parser.dumpArgs()
                 sys.exit(0)
                 
+            # Graph stuff!
+            msg.info('Resolving execution graph')
+            self._setupGraph()
+            
             if 'showSteps' in self._argdict:
                 for exe in self._executors:
                     print "Executor Step: {0} (alias {1})".format(exe.name, exe.substep)
+                    if msg.level <= logging.DEBUG:
+                        print " {0} -> {1}".format(exe.inData, exe.outData)
                 sys.exit(0)
                         
-            # Graph stuff!
-            msg.info('Starting to resolve execution graph')
-            self._setupGraph()
-            msg.info('Execution graph resolved')
-            
             if 'showGraph' in self._argdict:
                 print self._executorGraph
                 sys.exit(0)
diff --git a/Tools/PyJobTransforms/python/trfArgClasses.py b/Tools/PyJobTransforms/python/trfArgClasses.py
index 83cddeab46b..bfb00d33778 100644
--- a/Tools/PyJobTransforms/python/trfArgClasses.py
+++ b/Tools/PyJobTransforms/python/trfArgClasses.py
@@ -3,7 +3,7 @@
 ## @package PyJobTransforms.trfArgClasses
 # @brief Transform argument class definitions
 # @author atlas-comp-transforms-dev@cern.ch
-# @version $Id: trfArgClasses.py 740512 2016-04-15 10:13:14Z graemes $
+# @version $Id: trfArgClasses.py 743343 2016-04-27 15:47:21Z graemes $
 
 import argparse
 import bz2
@@ -21,7 +21,7 @@ msg = logging.getLogger(__name__)
 
 import PyJobTransforms.trfExceptions as trfExceptions
 
-from PyJobTransforms.trfFileUtils import athFileInterestingKeys, AthenaLiteFileInfo, NTUPEntries, HISTEntries, urlType, ROOTGetSize, inpFileInterestingKeys
+from PyJobTransforms.trfFileUtils import athFileInterestingKeys, AthenaLiteFileInfo, NTUPEntries, HISTEntries, urlType, ROOTGetSize
 from PyJobTransforms.trfUtils import call, cliToKey
 from PyJobTransforms.trfExitCodes import trfExit as trfExit
 from PyJobTransforms.trfDecorators import timelimited
@@ -1034,6 +1034,7 @@ class argFile(argList):
                             msg.debug('No cached value for {0}:{1}. Calling generator function {2} ({3})'.format(fname, key, self._metadataKeys[key].func_name, self._metadataKeys[key]))
                             try:
                                 # For efficiency call this routine with all files we have
+                                msg.info("Metadata generator called to obtain {0} for {1}".format(key, files))
                                 self._metadataKeys[key](files)
                             except trfExceptions.TransformMetadataException, e:
                                 msg.error('Calling {0!s} raised an exception: {1!s}'.format(self._metadataKeys[key].func_name, e))
@@ -1252,19 +1253,15 @@ class argAthenaFile(argFile):
         elif self._type.upper() in ('TAG'):
             aftype = 'TAG'
 
-        # retrieve GUID and nentries without runMiniAthena subprocess for input POOL files or temporary files
-        if aftype == 'POOL' and (self._io == 'input' or self._io == 'temporary'):
-            retrieveKeys = inpFileInterestingKeys
-
         # get G4Version for HITSFiles
-        if self._type.upper() in ('HITS'):
-            retrieveKeys.append('G4Version')
+#         if self._type.upper() in ('HITS'):
+#             retrieveKeys.append('G4Version')
 
         # N.B. Could parallelise here            
         for fname in myFiles:
             athFileMetadata = AthenaLiteFileInfo(fname, aftype, retrieveKeys=retrieveKeys)
             if athFileMetadata == None:
-                raise trfExceptions.TransformMetadataException(trfExit.nameToCode('TRF_METADATA_CALL_FAIL'), 'Call to AthenaFileInfo failed')
+                raise trfExceptions.TransformMetadataException(trfExit.nameToCode('TRF_METADATA_CALL_FAIL'), 'Call to AthenaLiteFileInfo failed')
             msg.debug('Setting metadata for file {0} to {1}'.format(fname, athFileMetadata[fname]))
             self._fileMetadata[fname].update(athFileMetadata[fname])
 
@@ -2080,10 +2077,17 @@ class argSubstepSteering(argSubstep):
     # "no" - a convenience null option for production managers, does nothing
     # "doRDO_TRIG" - run split trigger for Reco_tf and friends
     # "afterburn" - run the B decay afterburner for event generation
+    # "doRAWtoALL" - produce all DESDs and AODs directly from bytestream
     steeringAlises = {
                       'no': {},
                       'doRDO_TRIG': {'RAWtoESD': [('in', '-', 'RDO'), ('in', '+', 'RDO_TRIG'), ('in', '-', 'BS')]},
-                      'afterburn': {'generate': [('out', '-', 'EVNT')]}, 
+                      'afterburn': {'generate': [('out', '-', 'EVNT')]},
+                      'doRAWtoALL': {'RAWtoALL': [('in', '+', 'BS'), ('in', '+', 'RDO'), ('in', '+', 'RDO_FTK'),
+                                                  ('in', '+', 'DRAW_ZMUMU'), ('in', '+', 'DRAW_ZEE'), ('in', '+', 'DRAW_EMU'), ('in', '+', 'DRAW_RPVLL'), 
+                                                  ('out', '+', 'ESD'), ('out', '+', 'AOD'), ('out', '+', 'HIST_R2A')],
+                                     'RAWtoESD': [('in', '-', 'BS'), ('in', '-', 'RDO'), ('in', '-', 'RDO_FTK'),
+                                                  ('out', '-', 'ESD'),],
+                                     'ESDtoAOD': [('in', '-', 'ESD'), ('out', '-', 'AOD'),]}
                       }
     
     # Reset getter
diff --git a/Tools/PyJobTransforms/python/trfArgs.py b/Tools/PyJobTransforms/python/trfArgs.py
index 63468358c25..61c2f655d54 100644
--- a/Tools/PyJobTransforms/python/trfArgs.py
+++ b/Tools/PyJobTransforms/python/trfArgs.py
@@ -3,7 +3,7 @@
 ## @Package PyJobTransforms.trfArgs
 #  @brief Standard arguments supported by trf infrastructure
 #  @author atlas-comp-transforms-dev@cern.ch
-#  @version $Id: trfArgs.py 740532 2016-04-15 11:01:50Z graemes $
+#  @version $Id: trfArgs.py 743343 2016-04-27 15:47:21Z graemes $
 
 import logging
 msg = logging.getLogger(__name__)
@@ -17,7 +17,6 @@ from PyJobTransforms.trfLogger import stdLogLevels
 def addStandardTrfArgs(parser):
     parser.add_argument('--verbose', '--debug', action='store_true', help='Set transform loglevel to DEBUG')
     parser.add_argument('--loglevel', choices=stdLogLevels.keys(), help='Set transform logging level')
-    parser.add_argument('--argdict', metavar='FILE', help='File containing pickled argument dictionary')
     parser.add_argument('--argJSON', '--argjson', metavar='FILE', help='File containing JSON serialised argument dictionary')
     parser.add_argument('--dumpargs', action='store_true', help='Dump transform arguments and exit')
     parser.add_argument('--showGraph', action='store_true', help='Show multi-step transform graph, then exit')
@@ -184,13 +183,19 @@ def addMetadataArguments(parser):
 # @param pick Optional list of DPD types to add (use short names, e.g., @c DESDM_MUON)
 # @param transform Transform object. DPD data types will be added to the correct executor (by name or substep)
 # @param multipleOK If the @c multipleOK flag should be set for this argument
+# @param RAWtoALL Flag if DPDs should be made direct from bytestream, instead of 'classic' workflow
 #@silent
-def addPrimaryDPDArguments(parser, pick = None, transform = None, multipleOK=False):
+def addPrimaryDPDArguments(parser, pick = None, transform = None, multipleOK=False, RAWtoALL=False):
     parser.defineArgGroup('Primary DPDs', 'Primary DPD File Options')
     # list* really gives just a list of DPD names
     try:
         from PrimaryDPDMaker.PrimaryDPDFlags import listRAWtoDPD,listESDtoDPD,listAODtoDPD
-        for substep, dpdList in [(['r2e'], listRAWtoDPD), (['e2d'], listESDtoDPD), (['a2d'], listAODtoDPD)]:
+        if RAWtoALL:
+            listRAWtoDPD.extend(listESDtoDPD)
+            matchedOutputList = [(['r2a'], listRAWtoDPD), (['a2d'], listAODtoDPD)]
+        else:
+            matchedOutputList = [(['r2e'], listRAWtoDPD), (['e2d'], listESDtoDPD), (['a2d'], listAODtoDPD)]
+        for substep, dpdList in matchedOutputList:
             for dpdName in [ dpd.replace('Stream', '') for dpd in dpdList ]:
                 msg.debug('Handling {0}'.format(dpdName))
                 if pick == None or dpdName in pick:
@@ -416,6 +421,8 @@ def getExtraDPDList(NTUPOnly = False):
     extraDPDs.append(dpdType('NTUP_MCPTP', substeps=['a2d'], help="Ntuple file for MCP Tag and Probe"))
     extraDPDs.append(dpdType('NTUP_MCPScale', substeps=['a2d'], help="Ntuple file for MCP scale calibration"))
 
+    extraDPDs.append(dpdType('NTUP_FastCaloSim', substeps=['e2d']))
+
     # Trigger NTUPs (for merging only!)
     if NTUPOnly:
         extraDPDs.append(dpdType('NTUP_TRIGCOST', treeNames=['trig_cost']))
diff --git a/Tools/PyJobTransforms/python/trfExe.py b/Tools/PyJobTransforms/python/trfExe.py
index 2acc5cdb674..34bb2cb2d04 100755
--- a/Tools/PyJobTransforms/python/trfExe.py
+++ b/Tools/PyJobTransforms/python/trfExe.py
@@ -5,7 +5,7 @@
 # @brief Transform execution functions
 # @details Standard transform executors
 # @author atlas-comp-transforms-dev@cern.ch
-# @version $Id: trfExe.py 740532 2016-04-15 11:01:50Z graemes $
+# @version $Id: trfExe.py 740201 2016-04-14 09:50:40Z aalshehr $
 
 import copy
 import json
@@ -20,6 +20,7 @@ import sys
 import time
 
 import logging
+from fnmatch import fnmatch
 msg = logging.getLogger(__name__)
 
 from PyJobTransforms.trfJobOptions import JobOptionsTemplate
@@ -723,7 +724,7 @@ class athenaExecutor(scriptExecutor):
 
         # Setup JO templates
         if self._skeleton is not None:
-            self._jobOptionsTemplate = JobOptionsTemplate(exe = self, version = '$Id: trfExe.py 740532 2016-04-15 11:01:50Z graemes $')
+            self._jobOptionsTemplate = JobOptionsTemplate(exe = self, version = '$Id: trfExe.py 740201 2016-04-14 09:50:40Z aalshehr $')
         else:
             self._jobOptionsTemplate = None
 
diff --git a/Tools/PyJobTransforms/python/trfFileUtils-lite.py b/Tools/PyJobTransforms/python/trfFileUtils-lite.py
new file mode 100644
index 00000000000..80f7b0cff97
--- /dev/null
+++ b/Tools/PyJobTransforms/python/trfFileUtils-lite.py
@@ -0,0 +1,389 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+## @package PyJobTransforms.trfFileUtils
+# @brief Transform utilities to deal with files.
+# @details Mainly used by argFile class.
+# @author atlas-comp-transforms-dev@cern.ch
+# @version $Id: trfFileUtils.py 696484 2015-09-23 17:20:28Z graemes $
+# @todo make functions timelimited
+
+import logging
+msg = logging.getLogger(__name__)
+
+#  @note Use the PyCmt forking decorator to ensure that ROOT is run completely within 
+#  a child process and will not 'pollute' the parent python process with unthread-safe
+#  bits of code (otherwise strange hangs are observed on subsequent uses of ROOT)
+import PyUtils.Decorators as _decos
+
+from PyUtils.RootUtils import import_root
+from PyJobTransforms.trfDecorators import timelimited
+
+## @note The 'AODFixVersion' is can appear for AOD or ESD files 
+# athFileInterestingKeys = ['beam_energy', 'beam_type', 'conditions_tag', 'file_size',
+#                           'file_guid', 'file_type', 'geometry', 'lumi_block', 'nentries', 'run_number', 
+#                           'AODFixVersion']
+# Stripped down key list for files which are inputs
+athFileInterestingKeys = inpFileInterestingKeys = ['file_size', 'file_guid', 'file_type', 'nentries']
+
+## @brief Determines metadata of BS, POOL or TAG file.
+#  @details Trivial wrapper around PyUtils.AthFile.
+#  @param fileName Path(s) to the file for which the metadata are determined
+#  @param retrieveKeys Keys to extract from the @c AthFile.infos dictionary
+#  @return 
+#  - Dictionary containing metadata of the file(s)
+#  - @c None if the determination failed.
+@timelimited()
+def AthenaFileInfo(fileNames, retrieveKeys = athFileInterestingKeys):
+    msg.debug('Calling AthenaFileInfo for {0}'.format(fileNames))
+
+    from PyUtils import AthFile
+    AthFile.server.flush_cache()
+    AthFile.server.disable_pers_cache()
+
+    if isinstance(fileNames, str):
+        fileNames = [fileNames,]
+
+    metaDict = {}
+    try:
+        ## @note This code is transitional, until all the versions of AthFile we
+        #  use support pfopen(). It should then be removed. Graeme, 2013-11-05.
+        #  Note to Future: Give it 6 months, then get rid of it!
+        if len(fileNames) > 1:
+            try:    
+                athFile = AthFile.pfopen(fileNames)
+            except AttributeError:  
+                msg.warning('This version of AthFile does not support "pfopen". Falling back to serial interface.')
+                athFile = AthFile.fopen(fileNames)
+        else:
+            athFile = AthFile.fopen(fileNames)
+        msg.debug('AthFile came back')
+        for fname, meta in zip(fileNames, athFile):
+            metaDict[fname] = {}
+            for key in retrieveKeys:
+                msg.debug('Looking for key {0}'.format(key))
+                try:
+                    # AODFix is tricky... it is absent in many files, but this is not an error
+                    if key is 'AODFixVersion':
+                        if 'tag_info' in meta.infos and isinstance('tag_info', dict) and 'AODFixVersion' in meta.infos['tag_info']:
+                            metaDict[fname][key] = meta.infos['tag_info'][key]
+                        else:
+                            metaDict[fname][key] = ''
+                    # beam_type seems odd for RAW - typical values seem to be [1] instead of 'collisions' or 'cosmics'.
+                    # So we use the same scheme as AutoConfiguration does, mapping project names to known values
+                    # It would be nice to import this all from AutoConfiguration, but there is no suitable method at the moment.
+                    # N.B. This is under discussion so this code is temporary fix (Captain's Log, Stardate 2012-11-28) 
+                    elif key is 'beam_type':
+                        try:
+                            if isinstance(meta.infos[key], list) and len(meta.infos[key]) > 0 and meta.infos[key][0] in ('cosmics' ,'singlebeam','collisions'):
+                                metaDict[fname][key] = meta.infos[key]
+                            else:
+                                from RecExConfig.AutoConfiguration import KnownCosmicsProjects, Known1BeamProjects, KnownCollisionsProjects, KnownHeavyIonProjects
+                                if 'bs_metadata' in meta.infos.keys() and isinstance(meta.infos['bs_metadata'], dict) and 'Project' in meta.infos['bs_metadata'].keys():
+                                    project = meta.infos['bs_metadata']['Project']
+                                elif 'tag_info' in meta.infos.keys() and isinstance(meta.infos['tag_info'], dict) and 'project_name' in meta.infos['tag_info'].keys():
+                                    project = meta.infos['tag_info']['project_name']
+                                else:
+                                    msg.info('AthFile beam_type was not a known value ({0}) and no project could be found for this file'.format(meta.infos[key]))
+                                    metaDict[fname][key] = meta.infos[key]
+                                    continue
+                                if project in KnownCollisionsProjects or project in KnownHeavyIonProjects:
+                                    metaDict[fname][key] = ['collisions']
+                                    continue
+                                if project in KnownCosmicsProjects:
+                                    metaDict[fname][key] = ['cosmics']
+                                    continue
+                                if project in Known1BeamProjects:
+                                    metaDict[fname][key] = ['singlebeam']
+                                    continue
+                                # Erm, so we don't know
+                                msg.info('AthFile beam_type was not a known value ({0}) and the file\'s project ({1}) did not map to a known beam type using AutoConfiguration'.format(meta.infos[key], project))
+                                metaDict[fname][key] = meta.infos[key]
+                        except Exception, e:
+                            msg.error('Got an exception while trying to determine beam_type: {0}'.format(e))
+                            metaDict[fname][key] = meta.infos[key]
+                    else:
+                        metaDict[fname][key] = meta.infos[key]
+                except KeyError:
+                    msg.warning('Missing key in athFile info: {0}'.format(key))
+            msg.debug('Found these metadata for {0}: {1}'.format(fname, metaDict[fname].keys()))
+        return metaDict
+    except ValueError, e:
+        msg.error('Problem in getting AthFile metadata for {0}'.format(fileNames))
+        return None
+
+## @brief New lightweight interface to getting a single file's metadata
+#  @note Use this for now, but expect further evolution...
+def AthenaLiteFileInfo(filename, filetype, retrieveKeys = athFileInterestingKeys):
+    msg.debug('Calling AthenaLiteFileInfo for {0} (type {1})'.format(filename, filetype))
+    from subprocess import CalledProcessError 
+
+    if filetype == 'POOL':
+        # retrieve GUID and nentries without runMiniAthena subprocess
+        if set(retrieveKeys) == set(inpFileInterestingKeys):
+            from PyUtils.AthFileLite import AthInpFile as AthFileLite
+        else:
+            from PyUtils.AthFileLite import AthPoolFile as AthFileLite
+    elif filetype == 'BS':
+        from PyUtils.AthFileLite import AthBSFile as AthFileLite
+    elif filetype == 'TAG':
+        from PyUtils.AthFileLite import AthTagFile as AthFileLite
+    else:
+        msg.error('Unknown filetype for {0} - no lightweight metadata interface for type {1}'.format(filename, filetype))
+        return None
+    
+    metaDict = {}
+    try:
+        meta = AthFileLite(filename).fileinfo
+        msg.debug('AthFileLite came back for {0}'.format(filename))
+        metaDict[filename] = {}
+        for key in retrieveKeys:
+            msg.debug('Looking for key {0}'.format(key))
+            try:
+                # AODFix is tricky... it is absent in many files, but this is not an error
+                if key is 'AODFixVersion':
+                    if 'tag_info' in meta and isinstance('tag_info', dict) and 'AODFixVersion' in meta['tag_info']:
+                        metaDict[filename][key] = meta['tag_info'][key]
+                    else:
+                        metaDict[filename][key] = ''
+                # beam_type seems odd for RAW - typical values seem to be [1] instead of 'collisions' or 'cosmics'.
+                # So we use the same scheme as AutoConfiguration does, mapping project names to known values
+                # It would be nice to import this all from AutoConfiguration, but there is no suitable method at the moment.
+                # N.B. This is under discussion so this code is temporary fix (Captain's Log, Stardate 2012.11.28) 
+                elif key is 'beam_type':
+                    try:
+                        if isinstance(meta[key], list) and len(meta[key]) > 0 and meta[key][0] in ('cosmics' ,'singlebeam','collisions'):
+                            metaDict[filename][key] = meta[key]
+                        else:
+                            from RecExConfig.AutoConfiguration import KnownCosmicsProjects, Known1BeamProjects, KnownCollisionsProjects, KnownHeavyIonProjects
+                            if 'bs_metadata' in meta.keys() and isinstance(meta['bs_metadata'], dict) and 'Project' in meta['bs_metadata'].keys():
+                                project = meta['bs_metadata']['Project']
+                            elif 'tag_info' in meta.keys() and isinstance(meta['tag_info'], dict) and 'project_name' in meta['tag_info'].keys():
+                                project = meta['tag_info']['project_name']
+                            else:
+                                msg.info('AthFile beam_type was not a known value ({0}) and no project could be found for this file'.format(meta[key]))
+                                metaDict[filename][key] = meta[key]
+                                continue
+                            if project in KnownCollisionsProjects or project in KnownHeavyIonProjects:
+                                metaDict[filename][key] = ['collisions']
+                                continue
+                            if project in KnownCosmicsProjects:
+                                metaDict[filename][key] = ['cosmics']
+                                continue
+                            if project in Known1BeamProjects:
+                                metaDict[filename][key] = ['singlebeam']
+                                continue
+                            # Erm, so we don't know
+                            msg.info('AthFile beam_type was not a known value ({0}) and the file\'s project ({1}) did not map to a known beam type using AutoConfiguration'.format(meta[key], project))
+                            metaDict[filename][key] = meta[key]
+                    except Exception, e:
+                        msg.error('Got an exception while trying to determine beam_type: {0}'.format(e))
+                        metaDict[filename][key] = meta[key]
+                elif key is 'G4Version':
+                    msg.debug('Searching for G4Version in metadata')
+                    try: 
+                        metaDict[filename][key] = meta['metadata']['/Simulation/Parameters']['G4Version']
+                        msg.debug('Setting G4Version to {0}'.format(meta['metadata']['/Simulation/Parameters']['G4Version']))
+                    except (KeyError, TypeError) as e:
+                        msg.debug('Could not find G4Version information in metadata for file {0}'.format(filename))
+                else:
+                    metaDict[filename][key] = meta[key]
+            except KeyError:
+                msg.warning('Missing key in athFile info: {0}'.format(key))
+    except (CalledProcessError, ValueError, AssertionError, ReferenceError) as e:
+        msg.error('Problem in getting AthFile metadata for {0}'.format(filename))
+        return None
+    msg.debug('Returning {0}'.format(metaDict))
+    return metaDict    
+
+## @brief Determines number of events in a HIST file.
+#  @details Basically taken from PyJobTransformsCore.trfutil.MonitorHistFile
+#  @param fileName Path to the HIST file.  
+#  @return 
+#  - Number of events.
+#  - @c None if the determination failed.
+#  @note Use the PyCmt forking decorator to ensure that ROOT is run completely within 
+#  a child process and will not 'pollute' the parent python process with unthread-safe
+#  bits of code (otherwise strange hangs are observed on subsequent uses of ROOT)
+@_decos.forking
+def HISTEntries(fileName):
+
+    root = import_root()
+
+    fname = root.TFile.Open(fileName, 'READ')
+    
+    if not (isinstance(fname, root.TFile) and fname.IsOpen()):
+        return None
+
+    rundir = None
+    keys = fname.GetListOfKeys()
+    
+    for key in keys:
+        
+        name=key.GetName()
+        
+        if name.startswith('run_') and name is not 'run_multiple':
+            
+            if rundir is not None:
+                msg.warning('Found two run_ directories in HIST file %s: %s and %s' % ( fileName, rundir, name) )
+                return None
+            else:
+                rundir = name
+                
+        del name
+       
+    if rundir is None:
+        msg.warning( 'Unable to find run directory in HIST file %s' % fileName )
+        fname.Close()
+        return None
+    
+    msg.info( 'Using run directory %s for event counting of HIST file %s. ' % ( rundir, fileName ) )
+    
+    hpath = '%s/GLOBAL/DQTDataFlow/events_lb' % rundir
+    possibleLBs = []
+    if 'tmp.HIST_' in fileName:
+        msg.info( 'Special case for temporary HIST file {0}. '.format( fileName ) )
+        h = fname.Get('{0}'.format(rundir))
+        for directories in h.GetListOfKeys() :
+            if 'lb' in directories.GetName():
+                msg.info( 'Using {0} in tmp HIST file {1}. '.format(directories.GetName(),  fileName ) )
+                hpath = rundir+'/'+str(directories.GetName())+'/GLOBAL/DQTDataFlow/events_lb' 
+                possibleLBs.append(hpath)
+    else:
+        msg.info( 'Classical case for HIST file {0}. '.format( fileName ) )
+        possibleLBs.append(hpath)
+    nev = 0
+    if len(possibleLBs) == 0:
+        msg.warning( 'Unable to find events_lb histogram in HIST file %s' % fileName )
+        fname.Close()
+        return None
+    for hpath in possibleLBs:
+        h = fname.Get(hpath)
+        
+        if not isinstance( h, root.TH1 ):
+            msg.warning( 'Unable to retrieve %s in HIST file %s.' % ( hpath, fileName ) )
+            fname.Close()
+            return None
+        
+        nBinsX = h.GetNbinsX()
+        nevLoc = 0
+        
+        for i in xrange(1, nBinsX):
+            
+            if h[i] < 0:
+                msg.warning( 'Negative number of events for step %s in HIST file %s.' %( h.GetXaxis().GetBinLabel(i), fileName ) )
+                fname.Close()
+                return None
+            
+            elif h[i] == 0:
+                continue
+            
+            if nevLoc == 0:
+                nevLoc = h[i]
+                
+            else:
+                if nevLoc != h[i]:
+                    msg.warning( 'Mismatch in events per step in HIST file %s; most recent step seen is %s.' % ( fileName, h.GetXaxis().GetBinLabel(i) ) )
+                    fname.Close()
+                    return None
+        nev += nevLoc        
+    fname.Close()
+    return nev
+
+
+
+## @brief Determines number of entries in NTUP file with given tree names.
+#  @details Basically taken from PyJobTransformsCore.trfutil.ntup_entries.
+#  @param fileName Path to the NTUP file.
+#  @param treeNames Tree name or list of tree names.
+#  In the latter case it is checked if all trees contain the same number of events  
+#  @return 
+#  - Number of entries.
+#  - @c None if the determination failed.
+#  @note Use the PyCmt forking decorator to ensure that ROOT is run completely within 
+#  a child process and will not 'pollute' the parent python process with unthread-safe
+#  bits of code (otherwise strange hangs are observed on subsequent uses of ROOT)
+@_decos.forking
+def NTUPEntries(fileName, treeNames):
+    
+    if not isinstance( treeNames, list ):
+        treeNames=[treeNames]
+        
+    root = import_root()
+    
+    fname = root.TFile.Open(fileName, 'READ')
+    
+    if not (isinstance(fname, root.TFile) and fname.IsOpen()):
+        return None
+    
+    prevNum=None
+    prevTree=None
+               
+    for treeName in treeNames:
+            
+        tree = fname.Get(treeName)
+            
+        if not isinstance(tree, root.TTree):
+            return None
+        
+        num = tree.GetEntriesFast()
+
+        if not num>=0:
+            msg.warning('GetEntriesFast returned non positive value for tree %s in NTUP file %s.' % ( treeName, fileName ))
+            return None
+                
+        if prevNum is not None and prevNum != num:
+            msg.warning( "Found diffferent number of entries in tree %s and tree %s of file %s." % ( treeName, prevTree, fileName  ))
+            return None
+        
+        numberOfEntries=num
+        prevTree=treeName
+        del num
+        del tree
+
+    fname.Close()
+
+    return numberOfEntries
+
+
+## @brief Get the size of a file via ROOT's TFile
+#  @details Use TFile.Open to retrieve a ROOT filehandle, which will
+#  deal with all non-posix filesystems. Return the GetSize() value.
+#  The option filetype=raw is added to ensure this works for non-ROOT files too (e.g. BS)
+#  @note Use the PyCmt forking decorator to ensure that ROOT is run completely within 
+#  a child process and will not 'pollute' the parent python process with unthread-safe
+#  bits of code (otherwise strange hangs are observed on subsequent uses of ROOT)
+#  @param filename Filename to get size of
+#  @return fileSize or None of there was a problem
+@_decos.forking
+def ROOTGetSize(filename):
+    root = import_root()
+    
+    try:
+        msg.debug('Calling TFile.Open for {0}'.format(filename))
+        fname = root.TFile.Open(filename + '?filetype=raw', 'READ')
+        fsize = fname.GetSize()
+        msg.debug('Got size {0} from TFile.GetSize'.format(fsize))
+    except ReferenceError:
+        msg.error('Failed to get size of {0}'.format(filename))
+        return None
+    
+    fname.Close()
+    del root
+    return fsize
+    
+
+## @brief Return the LAN access type for a file URL
+#  @param filename Name of file to examine
+#  @return
+#  - String with LAN protocol
+def urlType(filename):
+    if filename.startswith('dcap:'):
+        return 'dcap'
+    if filename.startswith('root:'):
+        return 'root'
+    if filename.startswith('rfio:'):
+        return 'rfio'
+    if filename.startswith('file:'):
+        return 'posix'
+    return 'posix'
+
diff --git a/Tools/PyJobTransforms/python/trfFileUtils.py b/Tools/PyJobTransforms/python/trfFileUtils.py
index 85ab20d2279..0566514903b 100644
--- a/Tools/PyJobTransforms/python/trfFileUtils.py
+++ b/Tools/PyJobTransforms/python/trfFileUtils.py
@@ -4,7 +4,7 @@
 # @brief Transform utilities to deal with files.
 # @details Mainly used by argFile class.
 # @author atlas-comp-transforms-dev@cern.ch
-# @version $Id: trfFileUtils.py 696484 2015-09-23 17:20:28Z graemes $
+# @version $Id: trfFileUtils.py 731518 2016-03-22 07:29:46Z graemes $
 # @todo make functions timelimited
 
 import logging
@@ -18,111 +18,17 @@ import PyUtils.Decorators as _decos
 from PyUtils.RootUtils import import_root
 from PyJobTransforms.trfDecorators import timelimited
 
-## @note The 'AODFixVersion' is can appear for AOD or ESD files 
-athFileInterestingKeys = ['beam_energy', 'beam_type', 'conditions_tag', 'file_size',
-                          'file_guid', 'file_type', 'geometry', 'lumi_block', 'nentries', 'run_number', 
-                          'AODFixVersion']
-# Stripped down key list for files which are inputs
-inpFileInterestingKeys = ['file_size', 'file_guid', 'file_type', 'nentries']
-
-## @brief Determines metadata of BS, POOL or TAG file.
-#  @details Trivial wrapper around PyUtils.AthFile.
-#  @param fileName Path(s) to the file for which the metadata are determined
-#  @param retrieveKeys Keys to extract from the @c AthFile.infos dictionary
-#  @return 
-#  - Dictionary containing metadata of the file(s)
-#  - @c None if the determination failed.
-@timelimited()
-def AthenaFileInfo(fileNames, retrieveKeys = athFileInterestingKeys):
-    msg.debug('Calling AthenaFileInfo for {0}'.format(fileNames))
-
-    from PyUtils import AthFile
-    AthFile.server.flush_cache()
-    AthFile.server.disable_pers_cache()
-
-    if isinstance(fileNames, str):
-        fileNames = [fileNames,]
-
-    metaDict = {}
-    try:
-        ## @note This code is transitional, until all the versions of AthFile we
-        #  use support pfopen(). It should then be removed. Graeme, 2013-11-05.
-        #  Note to Future: Give it 6 months, then get rid of it!
-        if len(fileNames) > 1:
-            try:    
-                athFile = AthFile.pfopen(fileNames)
-            except AttributeError:  
-                msg.warning('This version of AthFile does not support "pfopen". Falling back to serial interface.')
-                athFile = AthFile.fopen(fileNames)
-        else:
-            athFile = AthFile.fopen(fileNames)
-        msg.debug('AthFile came back')
-        for fname, meta in zip(fileNames, athFile):
-            metaDict[fname] = {}
-            for key in retrieveKeys:
-                msg.debug('Looking for key {0}'.format(key))
-                try:
-                    # AODFix is tricky... it is absent in many files, but this is not an error
-                    if key is 'AODFixVersion':
-                        if 'tag_info' in meta.infos and isinstance('tag_info', dict) and 'AODFixVersion' in meta.infos['tag_info']:
-                            metaDict[fname][key] = meta.infos['tag_info'][key]
-                        else:
-                            metaDict[fname][key] = ''
-                    # beam_type seems odd for RAW - typical values seem to be [1] instead of 'collisions' or 'cosmics'.
-                    # So we use the same scheme as AutoConfiguration does, mapping project names to known values
-                    # It would be nice to import this all from AutoConfiguration, but there is no suitable method at the moment.
-                    # N.B. This is under discussion so this code is temporary fix (Captain's Log, Stardate 2012-11-28) 
-                    elif key is 'beam_type':
-                        try:
-                            if isinstance(meta.infos[key], list) and len(meta.infos[key]) > 0 and meta.infos[key][0] in ('cosmics' ,'singlebeam','collisions'):
-                                metaDict[fname][key] = meta.infos[key]
-                            else:
-                                from RecExConfig.AutoConfiguration import KnownCosmicsProjects, Known1BeamProjects, KnownCollisionsProjects, KnownHeavyIonProjects
-                                if 'bs_metadata' in meta.infos.keys() and isinstance(meta.infos['bs_metadata'], dict) and 'Project' in meta.infos['bs_metadata'].keys():
-                                    project = meta.infos['bs_metadata']['Project']
-                                elif 'tag_info' in meta.infos.keys() and isinstance(meta.infos['tag_info'], dict) and 'project_name' in meta.infos['tag_info'].keys():
-                                    project = meta.infos['tag_info']['project_name']
-                                else:
-                                    msg.info('AthFile beam_type was not a known value ({0}) and no project could be found for this file'.format(meta.infos[key]))
-                                    metaDict[fname][key] = meta.infos[key]
-                                    continue
-                                if project in KnownCollisionsProjects or project in KnownHeavyIonProjects:
-                                    metaDict[fname][key] = ['collisions']
-                                    continue
-                                if project in KnownCosmicsProjects:
-                                    metaDict[fname][key] = ['cosmics']
-                                    continue
-                                if project in Known1BeamProjects:
-                                    metaDict[fname][key] = ['singlebeam']
-                                    continue
-                                # Erm, so we don't know
-                                msg.info('AthFile beam_type was not a known value ({0}) and the file\'s project ({1}) did not map to a known beam type using AutoConfiguration'.format(meta.infos[key], project))
-                                metaDict[fname][key] = meta.infos[key]
-                        except Exception, e:
-                            msg.error('Got an exception while trying to determine beam_type: {0}'.format(e))
-                            metaDict[fname][key] = meta.infos[key]
-                    else:
-                        metaDict[fname][key] = meta.infos[key]
-                except KeyError:
-                    msg.warning('Missing key in athFile info: {0}'.format(key))
-            msg.debug('Found these metadata for {0}: {1}'.format(fname, metaDict[fname].keys()))
-        return metaDict
-    except ValueError, e:
-        msg.error('Problem in getting AthFile metadata for {0}'.format(fileNames))
-        return None
+# Use a stripped down key list, as we retrieve only 'fast' metadata  
+athFileInterestingKeys = ['file_size', 'file_guid', 'file_type', 'nentries']
 
 ## @brief New lightweight interface to getting a single file's metadata
-#  @note Use this for now, but expect further evolution...
 def AthenaLiteFileInfo(filename, filetype, retrieveKeys = athFileInterestingKeys):
     msg.debug('Calling AthenaLiteFileInfo for {0} (type {1})'.format(filename, filetype))
     from subprocess import CalledProcessError 
 
     if filetype == 'POOL':
         # retrieve GUID and nentries without runMiniAthena subprocess
-        if set(retrieveKeys) == set(inpFileInterestingKeys):
-            from PyUtils.AthFileLite import AthInpFile as AthFileLite
-        else:
-            from PyUtils.AthFileLite import AthPoolFile as AthFileLite
+        from PyUtils.AthFileLite import AthInpFile as AthFileLite
     elif filetype == 'BS':
         from PyUtils.AthFileLite import AthBSFile as AthFileLite
     elif filetype == 'TAG':
@@ -139,46 +45,7 @@ def AthenaLiteFileInfo(filename, filetype, retrieveKeys = athFileInterestingKeys
         for key in retrieveKeys:
             msg.debug('Looking for key {0}'.format(key))
             try:
-                # AODFix is tricky... it is absent in many files, but this is not an error
-                if key is 'AODFixVersion':
-                    if 'tag_info' in meta and isinstance('tag_info', dict) and 'AODFixVersion' in meta['tag_info']:
-                        metaDict[filename][key] = meta['tag_info'][key]
-                    else:
-                        metaDict[filename][key] = ''
-                # beam_type seems odd for RAW - typical values seem to be [1] instead of 'collisions' or 'cosmics'.
-                # So we use the same scheme as AutoConfiguration does, mapping project names to known values
-                # It would be nice to import this all from AutoConfiguration, but there is no suitable method at the moment.
-                # N.B. This is under discussion so this code is temporary fix (Captain's Log, Stardate 2012.11.28) 
-                elif key is 'beam_type':
-                    try:
-                        if isinstance(meta[key], list) and len(meta[key]) > 0 and meta[key][0] in ('cosmics' ,'singlebeam','collisions'):
-                            metaDict[filename][key] = meta[key]
-                        else:
-                            from RecExConfig.AutoConfiguration import KnownCosmicsProjects, Known1BeamProjects, KnownCollisionsProjects, KnownHeavyIonProjects
-                            if 'bs_metadata' in meta.keys() and isinstance(meta['bs_metadata'], dict) and 'Project' in meta['bs_metadata'].keys():
-                                project = meta['bs_metadata']['Project']
-                            elif 'tag_info' in meta.keys() and isinstance(meta['tag_info'], dict) and 'project_name' in meta['tag_info'].keys():
-                                project = meta['tag_info']['project_name']
-                            else:
-                                msg.info('AthFile beam_type was not a known value ({0}) and no project could be found for this file'.format(meta[key]))
-                                metaDict[filename][key] = meta[key]
-                                continue
-                            if project in KnownCollisionsProjects or project in KnownHeavyIonProjects:
-                                metaDict[filename][key] = ['collisions']
-                                continue
-                            if project in KnownCosmicsProjects:
-                                metaDict[filename][key] = ['cosmics']
-                                continue
-                            if project in Known1BeamProjects:
-                                metaDict[filename][key] = ['singlebeam']
-                                continue
-                            # Erm, so we don't know
-                            msg.info('AthFile beam_type was not a known value ({0}) and the file\'s project ({1}) did not map to a known beam type using AutoConfiguration'.format(meta[key], project))
-                            metaDict[filename][key] = meta[key]
-                    except Exception, e:
-                        msg.error('Got an exception while trying to determine beam_type: {0}'.format(e))
-                        metaDict[filename][key] = meta[key]
-                elif key is 'G4Version':
+                if key is 'G4Version':
                     msg.debug('Searching for G4Version in metadata')
                     try: 
                         metaDict[filename][key] = meta['metadata']['/Simulation/Parameters']['G4Version']
diff --git a/Tools/PyJobTransforms/python/trfFileValidationFunctions.py b/Tools/PyJobTransforms/python/trfFileValidationFunctions.py
index 2f91c9c6012..4b1830f262a 100644
--- a/Tools/PyJobTransforms/python/trfFileValidationFunctions.py
+++ b/Tools/PyJobTransforms/python/trfFileValidationFunctions.py
@@ -46,10 +46,10 @@ def returnIntegrityOfBSFile(fname):
 
 ### @brief Integrity function for file class argTAGFile
 def returnIntegrityOfTAGFile(fname):
-    from PyJobTransforms.trfFileUtils import AthenaFileInfo
-    dictionaryOfAthenaFileInfo = AthenaFileInfo([str(fname),], retrieveKeys = ['nentries',])
+    from PyJobTransforms.trfFileUtils import AthenaLiteFileInfo
+    dictionaryOfAthenaFileInfo = AthenaLiteFileInfo(fname, "TAG", retrieveKeys = ['nentries',])
     msg.debug("dictionary of Athena file information: {a}".format(a = dictionaryOfAthenaFileInfo))
-    eventCount = dictionaryOfAthenaFileInfo[str(fname)]['nentries']
+    eventCount = dictionaryOfAthenaFileInfo[fname]['nentries']
     if eventCount is None:
         return (False, "integrity of {fileName} bad: got a bad event count in {fileName}: {eventCount}".format(fileName = str(fname), eventCount = eventCount))
     else:
diff --git a/Tools/PyJobTransforms/python/trfGraph.py b/Tools/PyJobTransforms/python/trfGraph.py
index 261f243f2e3..0dd9f427ee7 100644
--- a/Tools/PyJobTransforms/python/trfGraph.py
+++ b/Tools/PyJobTransforms/python/trfGraph.py
@@ -4,7 +4,7 @@
 ## @brief Transform graph utilities
 #  @details Graph which represents transform executors (nodes) connected vis data types (edges)
 #  @author atlas-comp-transforms-dev@cern.ch
-#  @version $Id: trfGraph.py 649424 2015-02-24 22:06:20Z graemes $
+#  @version $Id: trfGraph.py 743343 2016-04-27 15:47:21Z graemes $
 #  @note  There are a few well established python graph implementations, but none seem to be in the ATLAS
 #  release (NetworkX, igraph). Our needs are so basic that we might well be able to just take a few well
 #  known routines and have them in this module. See, e.g., http://www.python.org/doc/essays/graphs.html
@@ -226,12 +226,10 @@ class executorGraph(object):
     #  @param @c outputDataTypes Data to produce
     #  @param @c inputDataTypes Data available as inputs
     def findExecutionPath(self):        
-        # Switch off all nodes
+        # Switch off all nodes, except if we have a single node which is not data driven...
         self._execution = {}
         for nodeName, node in self._nodeDict.iteritems():
-            if node.inputDataTypes == set() and node.inputDataTypes == set():
-                # Any nodes which have no data dependencies cannot be data driven, so we assume
-                # that they always execute
+            if len(self._nodeDict) == 1 and node.inputDataTypes == set() and node.inputDataTypes == set():
                 self._execution[nodeName] = {'enabled' : True, 'input' : set(), 'output' : set()}
             else:
                 self._execution[nodeName] = {'enabled' : False, 'input' : set(), 'output' : set()}
diff --git a/Tools/PyJobTransforms/python/trfReports.py b/Tools/PyJobTransforms/python/trfReports.py
index 2d760dba7fa..3d2bd31195e 100644
--- a/Tools/PyJobTransforms/python/trfReports.py
+++ b/Tools/PyJobTransforms/python/trfReports.py
@@ -6,10 +6,10 @@
 #  @details Classes whose instance encapsulates transform reports
 #   at different levels, such as file, executor, transform
 #  @author atlas-comp-transforms-dev@cern.ch
-#  @version $Id: trfReports.py 740537 2016-04-15 11:28:11Z graemes $
+#  @version $Id: trfReports.py 743924 2016-04-29 13:18:30Z graemes $
 #
 
-__version__ = '$Revision: 740537 $'
+__version__ = '$Revision: 743924 $'
 
 import cPickle as pickle
 import json
@@ -105,7 +105,7 @@ class trfReport(object):
 class trfJobReport(trfReport):
     ## @brief This is the version counter for transform job reports
     #  any changes to the format @b must be reflected by incrementing this
-    _reportVersion = '1.1.0'
+    _reportVersion = '2.0.0'
     _metadataKeyMap = {'AMIConfig': 'AMI', }
     _maxMsgLen = 256
     _truncationMsg = " (truncated)"
@@ -137,14 +137,6 @@ class trfJobReport(trfReport):
         else:
             myDict['exitMsg'] = self._trf.exitMsg
             myDict['exitMsgExtra'] = ""
-            
-        # Iterate over argValues...
-        myDict['argValues'] = {}
-        for k, v in self._trf.argdict.iteritems():
-            if isinstance(v, trfArgClasses.argument):
-                myDict['argValues'][k] = v.value
-            else:
-                myDict['argValues'][k] = v
 
         # Iterate over files
         for fileType in ('input', 'output', 'temporary'):
@@ -406,34 +398,6 @@ class trfFileReport(object):
                 else:
                     fileArgProps['subFiles'].append(subFile)
 
-        if type == 'full':
-            # move metadata to subFile dict, before it can be compressed
-            metaData = self._fileArg._fileMetadata
-            for fileName in metaData.keys():
-                msg.info("Examining metadata for file {0}".format(fileName))
-                if basenameReport == False:
-                    searchFileName = fileName
-                else:
-                    searchFileName = os.path.basename(fileName)
-
-                thisFile = None
-                for subFile in fileArgProps['subFiles']:
-                    if subFile['name'] == searchFileName:
-                        thisFile = subFile
-                        break
-
-                if thisFile is None:
-                    if searchFileName in suppressed:
-                        continue
-                    else:
-                        raise trfExceptions.TransformReportException(trfExit.nameToCode('TRF_INTERNAL_REPORT_ERROR'),
-                                                                 'file metadata mismatch in subFiles dict')
-
-                # append metadata keys, except all existing, to subfile dict and ignore _exists
-                for k, v in metaData[fileName].iteritems():
-                    if k not in thisFile.keys() and k != '_exists':
-                        thisFile[k] = v
-
         return fileArgProps
 
     ## @brief Return unique metadata for a single file in an argFile class
@@ -454,7 +418,7 @@ class trfFileReport(object):
             entry.update(self._fileArg.getMetadata(files = filename, populate = not fast, metadataKeys = ['file_guid'])[filename])
         elif type is 'full':
             # Suppress io because it's the key at a higher level and _exists because it's internal
-            entry.update(self._fileArg.getMetadata(files = filename, populate = not fast, maskMetadataKeys = ['io', '_exists'])[filename])
+            entry.update(self._fileArg.getMetadata(files = filename, populate = not fast, maskMetadataKeys = ['io', '_exists', 'integrity', 'file_type'])[filename])
         else:
             raise trfExceptions.TransformReportException(trfExit.nameToCode('TRF_INTERNAL_REPORT_ERROR'),
                                                          'Unknown file report type ({0}) in the file report for {1}'.format(type, self._fileArg))
diff --git a/Tools/PyJobTransforms/python/trfUtils.py b/Tools/PyJobTransforms/python/trfUtils.py
index c4e1783b454..3c99937cdca 100644
--- a/Tools/PyJobTransforms/python/trfUtils.py
+++ b/Tools/PyJobTransforms/python/trfUtils.py
@@ -3,7 +3,7 @@
 ## @package PyJobTransforms.trfUtils
 # @brief Transform utility functions
 # @author atlas-comp-transforms-dev@cern.ch
-# @version $Id: trfUtils.py 712411 2015-12-03 16:46:19Z mavogel $
+# @version $Id: trfUtils.py 743527 2016-04-28 11:27:16Z graemes $
 
 import os
 import os.path as path
@@ -221,28 +221,24 @@ def call(args, bufsize=0, executable=None, stdin=None, preexec_fn=None, close_fd
 ## @brief Return a string with a report of the current athena setup
 def asetupReport():
     setupMsg = str()
-    for eVar in ('AtlasBaseDir', 'AtlasProject', 'AtlasVersion', 'AtlasPatch', 'AtlasPatchVersion', 'CMTCONFIG', 'TestArea'):
+    eVars = ['AtlasBaseDir', 'AtlasProject', 'AtlasVersion', 'AtlasPatch', 'AtlasPatchVersion', 'CMTCONFIG', 'TestArea']
+    if "AtlasProject" in os.environ:
+        CMake_Platform = "{0}_PLATFORM".format(os.environ["AtlasProject"])
+        if CMake_Platform in os.environ:
+            eVars.remove("CMTCONFIG")
+            eVars.append(CMake_Platform)
+    for eVar in eVars:
         if eVar in os.environ:
             setupMsg += '\t%s=%s\n' % (eVar, os.environ[eVar])
-        else:
-            setupMsg+ '\t%s undefined\n' % eVar
     # Look for patches so that the job can be rerun 
     if 'TestArea' in os.environ and os.access(os.environ['TestArea'], os.R_OK):
         setupMsg += "\n\tPatch packages are:\n"
         try:
-            cmd = ['cmt', 'show', 'packages', os.environ['TestArea']]
-            cmtProc = Popen(cmd, shell = False, stdout = PIPE, stderr = STDOUT, bufsize = 1)
-            cmtOut = cmtProc.communicate()[0] 
-            for line in cmtOut.split('\n'):
-                try:
-                    if line.strip() == '':
-                        continue
-                    (package, packageVersion, packagePath) = line.split()
-                    setupMsg += '\t\t%s\n' % (packageVersion)
-                except ValueError:
-                    setupMsg += "Warning, unusual output from cmt: %s\n" % line 
+            cmd = ['lstags']
+            lstagsOut = Popen(cmd, shell = False, stdout = PIPE, stderr = STDOUT, bufsize = 1).communicate()[0]
+            setupMsg +=  "\n".join([ "\t\t{0}".format(pkg) for pkg in lstagsOut.split("\n") ])
         except (CalledProcessError, OSError), e:
-            setupMsg += 'Execution of CMT failed: %s' % e
+            setupMsg += 'Execution of lstags failed: {0}'.format(e)
     else:
         setupMsg+= "No readable patch area found"
 
diff --git a/Tools/PyJobTransforms/python/trfValidation.py b/Tools/PyJobTransforms/python/trfValidation.py
index da2abc8d969..19d00397d43 100644
--- a/Tools/PyJobTransforms/python/trfValidation.py
+++ b/Tools/PyJobTransforms/python/trfValidation.py
@@ -6,7 +6,7 @@
 # @details Contains validation classes controlling how the transforms
 # will validate jobs they run.
 # @author atlas-comp-transforms-dev@cern.ch
-# @version $Id: trfValidation.py 740537 2016-04-15 11:28:11Z graemes $
+# @version $Id: trfValidation.py 740535 2016-04-15 11:21:07Z graemes $
 # @note Old validation dictionary shows usefully different options:
 # <tt>self.validationOptions = {'testIfEmpty' : True, 'testIfNoEvents' : False, 'testIfExists' : True,
 #                          'testIfCorrupt' : True, 'testCountEvents' : True, 'extraValidation' : False,
diff --git a/Tools/PyJobTransforms/test/test_trfArgClassesATLAS.py b/Tools/PyJobTransforms/test/test_trfArgClassesATLAS.py
index 14326760095..7ddd9e09b99 100755
--- a/Tools/PyJobTransforms/test/test_trfArgClassesATLAS.py
+++ b/Tools/PyJobTransforms/test/test_trfArgClassesATLAS.py
@@ -5,7 +5,7 @@
 ## @Package test_trfArgClasses.py
 #  @brief Unittests for test_trfArgClasses.py
 #  @author graeme.andrew.stewart@cern.ch
-#  @version $Id: test_trfArgClassesATLAS.py 667158 2015-05-14 16:14:07Z vanyash $
+#  @version $Id: test_trfArgClassesATLAS.py 731518 2016-03-22 07:29:46Z graemes $
 #  @note Tests of ATLAS specific file formats (that thus rely on other
 #  parts of Athena) live here
 
@@ -18,7 +18,7 @@ from PyJobTransforms.trfLogger import msg
 from PyJobTransforms.trfArgClasses import *
 
 # Stripped down key list for files which are inputs 
-from PyJobTransforms.trfFileUtils import inpFileInterestingKeys
+from PyJobTransforms.trfFileUtils import athFileInterestingKeys
 
 class argFileEOSTests(unittest.TestCase):
     def test_SimExpansion(self):
@@ -51,9 +51,9 @@ class argPOOLFiles(unittest.TestCase):
             testFile = '/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.ESD._lb0300._SFO-10._0001.1.10evts.16.6.6.4.pool.root'
             os.stat(testFile)
             esdFile = argPOOLFile(testFile, io = 'input', type='esd')
-            self.assertEqual(esdFile.getMetadata(metadataKeys =  tuple(inpFileInterestingKeys)), {'/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.ESD._lb0300._SFO-10._0001.1.10evts.16.6.6.4.pool.root': {'file_type': 'pool', 'file_guid': '0CABA22E-9096-E011-AE25-0030487C8CE6', 'nentries': 10L, 'file_size': 17033381}})
+            self.assertEqual(esdFile.getMetadata(metadataKeys =  tuple(athFileInterestingKeys)), {'/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.ESD._lb0300._SFO-10._0001.1.10evts.16.6.6.4.pool.root': {'file_type': 'pool', 'file_guid': '0CABA22E-9096-E011-AE25-0030487C8CE6', 'nentries': 10L, 'file_size': 17033381}})
             esdFile = argPOOLFile(testFile, io = 'output', type='esd')
-            self.assertEqual(esdFile.getMetadata(), {'/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.ESD._lb0300._SFO-10._0001.1.10evts.16.6.6.4.pool.root': {'_exists': True, 'run_number': [182796L], 'beam_energy': [3500000.0], 'file_type': 'pool', 'AODFixVersion': '', 'file_size': 17033381L, 'geometry': 'ATLAS-GEO-16-00-01', 'file_guid': '0CABA22E-9096-E011-AE25-0030487C8CE6', 'beam_type': ['collisions'], 'lumi_block': [300L], 'conditions_tag': 'COMCOND-BLKPST-004-00', 'integrity': True, 'nentries': 10L}}) 
+            self.assertEqual(esdFile.getMetadata(), {'/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.ESD._lb0300._SFO-10._0001.1.10evts.16.6.6.4.pool.root': {'_exists': True, 'file_type': 'pool', 'file_guid': '0CABA22E-9096-E011-AE25-0030487C8CE6', 'file_size': 17033381, 'integrity': True, 'nentries': 10L}}) 
             self.assertEqual(esdFile.getMetadata(metadataKeys = ('nentries',)), {'/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.ESD._lb0300._SFO-10._0001.1.10evts.16.6.6.4.pool.root': {'nentries': 10}})
             self.assertEqual(esdFile.prodsysDescription['type'],'file')
         except OSError:
@@ -65,9 +65,9 @@ class argPOOLFiles(unittest.TestCase):
             testFile = '/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.AOD._lb0300._SFO-10._0001.1.10evts.16.6.6.4.pool.root'
             os.stat(testFile)
             aodFile = argPOOLFile(testFile, io = 'input', type='aod')
-            self.assertEqual(aodFile.getMetadata(metadataKeys = tuple(inpFileInterestingKeys)), {'/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.AOD._lb0300._SFO-10._0001.1.10evts.16.6.6.4.pool.root': {'file_type': 'pool', 'file_guid': '6E1FE6F0-9096-E011-9DDA-0030487C8CE6', 'nentries': 10L, 'file_size': 4673269}})
+            self.assertEqual(aodFile.getMetadata(metadataKeys = tuple(athFileInterestingKeys)), {'/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.AOD._lb0300._SFO-10._0001.1.10evts.16.6.6.4.pool.root': {'file_type': 'pool', 'file_guid': '6E1FE6F0-9096-E011-9DDA-0030487C8CE6', 'nentries': 10L, 'file_size': 4673269}})
             aodFile = argPOOLFile(testFile, io = 'output', type='aod')
-            self.assertEqual(aodFile.getMetadata(), {'/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.AOD._lb0300._SFO-10._0001.1.10evts.16.6.6.4.pool.root': {'_exists': True, 'run_number': [182796L], 'beam_energy': [3500000.0], 'file_type': 'pool', 'AODFixVersion': '', 'file_size': 4673269L, 'geometry': 'ATLAS-GEO-16-00-01', 'file_guid': '6E1FE6F0-9096-E011-9DDA-0030487C8CE6', 'beam_type': ['collisions'], 'lumi_block': [300L], 'conditions_tag': 'COMCOND-BLKPST-004-00', 'integrity': True, 'nentries': 10L}}) 
+            self.assertEqual(aodFile.getMetadata(),{'/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.AOD._lb0300._SFO-10._0001.1.10evts.16.6.6.4.pool.root': {'_exists': True, 'file_type': 'pool', 'file_guid': '6E1FE6F0-9096-E011-9DDA-0030487C8CE6', 'file_size': 4673269, 'integrity': True, 'nentries': 10L}}) 
             self.assertEqual(aodFile.getMetadata(metadataKeys = ('nentries',)), {'/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.AOD._lb0300._SFO-10._0001.1.10evts.16.6.6.4.pool.root': {'nentries': 10}}) 
             self.assertEqual(aodFile.prodsysDescription['type'],'file')
             self.assertTrue(aodFile.prodsysDescription['subtype']=='AOD')
@@ -81,8 +81,7 @@ class argTAGFiles(unittest.TestCase):
             testFile = '/afs/cern.ch/work/g/graemes/ddm/data12_8TeV.00207865.physics_JetTauEtmiss.merge.TAG.r4065_p1278_tid01030417_00/TAG.01030417._000001.pool.root.1'
             os.stat(testFile)
             tagFile = argTAGFile(testFile, io = 'input', type='tag')
-            print '+++', tagFile.getMetadata()
-            self.assertEqual(tagFile.getMetadata(), {'/afs/cern.ch/work/g/graemes/ddm/data12_8TeV.00207865.physics_JetTauEtmiss.merge.TAG.r4065_p1278_tid01030417_00/TAG.01030417._000001.pool.root.1': {'_exists': True, 'run_number': [207865L], 'beam_energy': [], 'file_type': 'tag', 'AODFixVersion': '', 'file_size': 12222088, 'geometry': None, 'file_guid': '3CCAD8D2-9195-5845-857B-550D616962F9', 'beam_type': [], 'lumi_block': [], 'conditions_tag': None, 'integrity': True, 'nentries': 38112L}})
+            self.assertEqual(tagFile.getMetadata(), {'/afs/cern.ch/work/g/graemes/ddm/data12_8TeV.00207865.physics_JetTauEtmiss.merge.TAG.r4065_p1278_tid01030417_00/TAG.01030417._000001.pool.root.1': {'_exists': True, 'file_type': 'tag', 'file_guid': '3CCAD8D2-9195-5845-857B-550D616962F9', 'file_size': 12222088, 'integrity': True, 'nentries': 38112L}}) 
             self.assertEqual(tagFile.getMetadata(metadataKeys = ('nentries',)), {'/afs/cern.ch/work/g/graemes/ddm/data12_8TeV.00207865.physics_JetTauEtmiss.merge.TAG.r4065_p1278_tid01030417_00/TAG.01030417._000001.pool.root.1': {'nentries': 38112L}})
             self.assertEqual(tagFile.prodsysDescription['type'],'file')
         except OSError:
@@ -102,7 +101,7 @@ class argBSFiles(unittest.TestCase):
             testFile = '/afs/cern.ch/atlas/offline/test/data11_7TeV.00191920.physics_JetTauEtmiss.merge.RAW._lb0257._SFO-9._0001.1.10evts'
             os.stat(testFile)
             rawFile = argBSFile(testFile, io = 'input', type='bs')
-            self.assertEqual(rawFile.getMetadata(), {'/afs/cern.ch/atlas/offline/test/data11_7TeV.00191920.physics_JetTauEtmiss.merge.RAW._lb0257._SFO-9._0001.1.10evts': {'_exists': True, 'run_number': [191920], 'beam_energy': [0], 'file_type': 'bs', 'AODFixVersion': '', 'file_size': 12998048L, 'geometry': None, 'file_guid': '4A511034-3A53-E111-8745-003048F0E7AE', 'beam_type': ['collisions'], 'lumi_block': [257], 'conditions_tag': None, 'integrity': True, 'nentries': 10}}) 
+            self.assertEqual(rawFile.getMetadata(),{'/afs/cern.ch/atlas/offline/test/data11_7TeV.00191920.physics_JetTauEtmiss.merge.RAW._lb0257._SFO-9._0001.1.10evts': {'_exists': True, 'file_type': 'bs', 'file_guid': '4A511034-3A53-E111-8745-003048F0E7AE', 'file_size': 12998048, 'integrity': True, 'nentries': 10}}) 
             self.assertEqual(rawFile.getMetadata(metadataKeys = ('nentries',)), {'/afs/cern.ch/atlas/offline/test/data11_7TeV.00191920.physics_JetTauEtmiss.merge.RAW._lb0257._SFO-9._0001.1.10evts': {'nentries': 10}})
 
             self.assertEqual(rawFile.prodsysDescription['type'],'file')
@@ -119,7 +118,7 @@ class argBSFiles(unittest.TestCase):
             for fname in testFiles:
                 os.stat(fname)
             rawFile = argBSFile(testFiles, io = 'input', type = 'bs')
-            self.assertEqual(rawFile.getMetadata(), {'/afs/cern.ch/atlas/offline/test/data11_cos.00182609.physics_CosmicCalo.merge.RAW._lb0100._SFO-ALL._0001.1.SFO-ALL._0001.1.10evts.data': {'_exists': True, 'run_number': [182609], 'beam_energy': [0], 'file_type': 'bs', 'AODFixVersion': '', 'file_size': 10487108L, 'geometry': None, 'file_guid': 'E8B69378-EF91-E011-A6F0-003048CAD388', 'beam_type': ['cosmics'], 'lumi_block': [100], 'conditions_tag': None, 'integrity': True, 'nentries': 10}, '/afs/cern.ch/atlas/offline/test/data11_7TeV.00191920.physics_JetTauEtmiss.merge.RAW._lb0257._SFO-9._0001.1.10evts': {'_exists': True, 'run_number': [191920], 'beam_energy': [0], 'file_type': 'bs', 'AODFixVersion': '', 'file_size': 12998048L, 'geometry': None, 'file_guid': '4A511034-3A53-E111-8745-003048F0E7AE', 'beam_type': ['collisions'], 'lumi_block': [257], 'conditions_tag': None, 'integrity': True, 'nentries': 10}, '/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.RAW._lb0300._SFO-10._0001.1.10evts.data': {'_exists': True, 'run_number': [182796], 'beam_energy': [3500], 'file_type': 'bs', 'AODFixVersion': '', 'file_size': 12946296L, 'geometry': None, 'file_guid': 'FABAAD37-B38E-E011-8C1D-003048CAD384', 'beam_type': ['collisions'], 'lumi_block': [300], 'conditions_tag': None, 'integrity': True, 'nentries': 10}}) 
+            self.assertEqual(rawFile.getMetadata(),{'/afs/cern.ch/atlas/offline/test/data11_cos.00182609.physics_CosmicCalo.merge.RAW._lb0100._SFO-ALL._0001.1.SFO-ALL._0001.1.10evts.data': {'_exists': True, 'file_type': 'bs', 'file_guid': 'E8B69378-EF91-E011-A6F0-003048CAD388', 'file_size': 10487108, 'integrity': True, 'nentries': 10}, '/afs/cern.ch/atlas/offline/test/data11_7TeV.00191920.physics_JetTauEtmiss.merge.RAW._lb0257._SFO-9._0001.1.10evts': {'_exists': True, 'file_type': 'bs', 'file_guid': '4A511034-3A53-E111-8745-003048F0E7AE', 'file_size': 12998048, 'integrity': True, 'nentries': 10}, '/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.RAW._lb0300._SFO-10._0001.1.10evts.data': {'_exists': True, 'file_type': 'bs', 'file_guid': 'FABAAD37-B38E-E011-8C1D-003048CAD384', 'file_size': 12946296, 'integrity': True, 'nentries': 10}}) 
             self.assertEqual(rawFile.getMetadata(metadataKeys = ('nentries',)), {'/afs/cern.ch/atlas/offline/test/data11_cos.00182609.physics_CosmicCalo.merge.RAW._lb0100._SFO-ALL._0001.1.SFO-ALL._0001.1.10evts.data': {'nentries': 10}, '/afs/cern.ch/atlas/offline/test/data11_7TeV.00191920.physics_JetTauEtmiss.merge.RAW._lb0257._SFO-9._0001.1.10evts': {'nentries': 10}, '/afs/cern.ch/atlas/offline/test/data11_7TeV.00182796.physics_JetTauEtmiss.merge.RAW._lb0300._SFO-10._0001.1.10evts.data': {'nentries': 10}})
             self.assertEqual(rawFile.getMetadata(metadataKeys = ('nentries',), files = '/afs/cern.ch/atlas/offline/test/data11_cos.00182609.physics_CosmicCalo.merge.RAW._lb0100._SFO-ALL._0001.1.SFO-ALL._0001.1.10evts.data'), {'/afs/cern.ch/atlas/offline/test/data11_cos.00182609.physics_CosmicCalo.merge.RAW._lb0100._SFO-ALL._0001.1.SFO-ALL._0001.1.10evts.data': {'nentries': 10}})
 
diff --git a/Tools/PyJobTransforms/test/test_trfArgs.py b/Tools/PyJobTransforms/test/test_trfArgs.py
index 5b4c2fdead6..1fcdebe035e 100755
--- a/Tools/PyJobTransforms/test/test_trfArgs.py
+++ b/Tools/PyJobTransforms/test/test_trfArgs.py
@@ -5,7 +5,7 @@
 ## @Package test_trfArgs.py
 #  @brief Unittests for trfArgs.py
 #  @author maddocks.harvey@gmail.com, graeme.andrew.stewart@cern.ch
-#  @version $Id: test_trfArgs.py 691581 2015-08-27 12:24:19Z lerrenst $
+#  @version $Id: test_trfArgs.py 745237 2016-05-06 02:33:15Z ssnyder $
 
 import argparse
 import json
@@ -68,21 +68,21 @@ class trfArgsUnitTests(unittest.TestCase):
         self.assertTrue(isinstance(myArgDict, dict))
         self.assertEquals(myArgDict['triggerConfig']._value, properArgDict)
         
-    def test_Pickle(self):
-        myParser = trfArgParser(description='test parser for pickled arguments, %s' % __name__)
-        addStandardTrfArgs(myParser)
-        addAthenaArguments(myParser)
-        import pickle
-        pickleArgs = {'cupsOfTea' : '3', 'mugVolume' : '314.56', 'teaType' : 'earl grey', 'drinkers': 'graeme,bjorn,mark'}
-        pickle.dump(pickleArgs, open("testorama", "wb"))
-        from PyJobTransforms.transform import transform
-        tf = transform()
-        addTeaArguments(tf.parser)
-        self.assertEquals(tf.parseCmdLineArgs(['--argdict', 'testorama']), None)
-        try:
-            os.unlink('testorama')
-        except OSError:
-            pass
+    # def test_Pickle(self):
+    #     myParser = trfArgParser(description='test parser for pickled arguments, %s' % __name__)
+    #     addStandardTrfArgs(myParser)
+    #     addAthenaArguments(myParser)
+    #     import pickle
+    #     pickleArgs = {'cupsOfTea' : '3', 'mugVolume' : '314.56', 'teaType' : 'earl grey', 'drinkers': 'graeme,bjorn,mark'}
+    #     pickle.dump(pickleArgs, open("testorama", "wb"))
+    #     from PyJobTransforms.transform import transform
+    #     tf = transform()
+    #     addTeaArguments(tf.parser)
+    #     self.assertEquals(tf.parseCmdLineArgs(['--argdict', 'testorama']), None)
+    #     try:
+    #         os.unlink('testorama')
+    #     except OSError:
+    #         pass
 
 
 class trfIntArgsUnitTests(unittest.TestCase):
diff --git a/Tools/PyJobTransforms/test/test_trfReports.py b/Tools/PyJobTransforms/test/test_trfReports.py
index d254bd04ab7..5470de4642f 100755
--- a/Tools/PyJobTransforms/test/test_trfReports.py
+++ b/Tools/PyJobTransforms/test/test_trfReports.py
@@ -5,7 +5,7 @@
 ## @Package test_trfReports.py
 #  @brief Unittests for trfReports.py
 #  @author graeme.andrew.stewart@cern.ch
-#  @version $Id: test_trfReports.py 623865 2014-10-24 12:39:44Z graemes $
+#  @version $Id: test_trfReports.py 745237 2016-05-06 02:33:15Z ssnyder $
 
 import unittest
 
@@ -51,18 +51,18 @@ class trfFileReportUnitTests(unittest.TestCase):
                 pass
 
     def test_fileReportPython(self):
-        self.assertEqual(self.mySingleFileReport.singleFilePython(filename = 'file1'), {'file_guid': '05ACBDD0-5F5F-4E2E-974A-BBF4F4FE6F0B', 'integrity': True, 'name': 'file1', 'file_size': 20})
-        self.assertEqual(self.mySingleFileReport.python(), {'argName': 'inputTEST_SINGLEFile', 'subFiles': [{'file_guid': '05ACBDD0-5F5F-4E2E-974A-BBF4F4FE6F0B', 'integrity': True, 'name': 'file1', 'file_size': 20}], 'type': None, 'dataset': None})
+        self.assertEqual(self.mySingleFileReport.singleFilePython(filename = 'file1'), {'file_guid': '05ACBDD0-5F5F-4E2E-974A-BBF4F4FE6F0B', 'name': 'file1', 'file_size': 20})
+        self.assertEqual(self.mySingleFileReport.python(), {'argName': 'inputTEST_SINGLEFile', 'subFiles': [{'file_guid': '05ACBDD0-5F5F-4E2E-974A-BBF4F4FE6F0B', 'name': 'file1', 'file_size': 20}], 'type': None, 'dataset': None})
         self.assertEqual(self.mySingleFileReport.python(type = 'name'), {'subFiles': [{'file_guid': '05ACBDD0-5F5F-4E2E-974A-BBF4F4FE6F0B', 'name': 'file1'}], 'nentries': 'UNDEFINED', 'dataset': None})
 
     def test_fileReportPythonUnknown(self):
         self.assertRaises(trfExceptions.TransformReportException, self.mySingleFileReport.singleFilePython, filename = 'xxx')
 
     def test_multiFileReportPython(self):
-        self.assertEqual(self.myMultiFileReport.singleFilePython(filename = 'file1'), {'file_guid': '05ACBDD0-5F5F-4E2E-974A-BBF4F4FE6F0B', 'integrity': True, 'name': 'file1', 'file_size': 20})
-        self.assertEqual(self.myMultiFileReport.singleFilePython(filename = 'file2'), {'file_guid': '1368D295-27C6-4A92-8187-704C2A6A5864', 'integrity': True, 'name': 'file2', 'file_size': 13})
-        self.assertEqual(self.myMultiFileReport.singleFilePython(filename = 'file3'), {'file_guid': 'F5BA4602-6CA7-4111-B3C7-CB06486B30D9', 'integrity': True, 'name': 'file3', 'file_size': 174})
-        self.assertEqual(self.myMultiFileReport.python(type = 'full'), {'argName': 'inputTEST_MULTIFile', 'subFiles': [{'file_guid': '05ACBDD0-5F5F-4E2E-974A-BBF4F4FE6F0B', 'name': 'file1', 'file_size': 20, 'integrity': True}, {'file_guid': '1368D295-27C6-4A92-8187-704C2A6A5864', 'name': 'file2', 'file_size': 13, 'integrity': True}, {'file_guid': 'F5BA4602-6CA7-4111-B3C7-CB06486B30D9', 'name': 'file3', 'file_size': 174, 'integrity': True}], 'type': None, 'dataset': None})
+        self.assertEqual(self.myMultiFileReport.singleFilePython(filename = 'file1'), {'file_guid': '05ACBDD0-5F5F-4E2E-974A-BBF4F4FE6F0B', 'name': 'file1', 'file_size': 20})
+        self.assertEqual(self.myMultiFileReport.singleFilePython(filename = 'file2'), {'file_guid': '1368D295-27C6-4A92-8187-704C2A6A5864', 'name': 'file2', 'file_size': 13})
+        self.assertEqual(self.myMultiFileReport.singleFilePython(filename = 'file3'), {'file_guid': 'F5BA4602-6CA7-4111-B3C7-CB06486B30D9', 'name': 'file3', 'file_size': 174})
+        self.assertEqual(self.myMultiFileReport.python(type = 'full'), {'argName': 'inputTEST_MULTIFile', 'subFiles': [{'file_guid': '05ACBDD0-5F5F-4E2E-974A-BBF4F4FE6F0B', 'name': 'file1', 'file_size': 20}, {'file_guid': '1368D295-27C6-4A92-8187-704C2A6A5864', 'name': 'file2', 'file_size': 13}, {'file_guid': 'F5BA4602-6CA7-4111-B3C7-CB06486B30D9', 'name': 'file3', 'file_size': 174}], 'type': None, 'dataset': None})
         self.assertEqual(self.myMultiFileReport.python(type = 'name'), {'subFiles': [{'file_guid': '05ACBDD0-5F5F-4E2E-974A-BBF4F4FE6F0B', 'name': 'file1'}, {'file_guid': '1368D295-27C6-4A92-8187-704C2A6A5864', 'name': 'file2'}, {'file_guid': 'F5BA4602-6CA7-4111-B3C7-CB06486B30D9', 'name': 'file3'}], 'nentries': 'UNDEFINED', 'dataset': None})
 
 
diff --git a/Tools/PyJobTransforms/test/test_trfUtilsDBRelease.py b/Tools/PyJobTransforms/test/test_trfUtilsDBRelease.py
index 3f1dabf013b..e275acaeb17 100755
--- a/Tools/PyJobTransforms/test/test_trfUtilsDBRelease.py
+++ b/Tools/PyJobTransforms/test/test_trfUtilsDBRelease.py
@@ -3,7 +3,7 @@
 # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
 #
 # Test the various DBRelease scenarios
-# $Id: test_trfUtilsDBRelease.py 740537 2016-04-15 11:28:11Z graemes $
+# $Id: test_trfUtilsDBRelease.py 740535 2016-04-15 11:21:07Z graemes $
 #
 
 import json
diff --git a/Tools/PyJobTransforms/test/test_trfUtilsParallelJobProcessor.py b/Tools/PyJobTransforms/test/test_trfUtilsParallelJobProcessor.py
index add44238c29..8da817c4ddf 100755
--- a/Tools/PyJobTransforms/test/test_trfUtilsParallelJobProcessor.py
+++ b/Tools/PyJobTransforms/test/test_trfUtilsParallelJobProcessor.py
@@ -72,92 +72,96 @@ def exception():
 
 ## @brief unit tests for the parallel job processor
 class TestParallelJobProcessor(unittest.TestCase):
+    # Disable for now
+    pass
+    
+    
     ## @brief unit test for working functions
     #  @detail This method is a unit test of the parallel job processor
     #  testing the processing of two simple, working functions.
-    def test_working(self):
-        msg.info("\n\n\n\nPARALLEL JOB PROCESSOR WORKING TEST")
-        jobGroup1 = JobGroup(
-            name = "working functions test",
-            jobs = [
-                Job(
-                    name = "hello world function",
-                    workFunction = helloWorld,
-                    workFunctionKeywordArguments = {
-                        'sleepTime': 1,
-                    },
-                    workFunctionTimeout = 10
-                ),
-                Job(
-                    name = "multiplication function",
-                    workFunction = multiply,
-                    workFunctionKeywordArguments = {
-                        'multiplicand1': 2,
-                        'multiplicand2': 3
-                    },
-                    workFunctionTimeout = 10
-                )
-            ]
-        )
-        parallelJobProcessor1 = ParallelJobProcessor()
-        parallelJobProcessor1.submit(jobSubmission = jobGroup1)
-        results = parallelJobProcessor1.getResults()
-        self.assertEquals(results, ['hello world', 6])
-    ## @brief unit test for timeout function
-    #  @detail This method is a unit test of the parallel job processor
-    #  testing the processing of a simple function that is used to cause a
-    #  timeout.
-    def test_timeout(self):
-        msg.info("\n\n\n\nPARALLEL JOB PROCESSOR TIMEOUT TEST")
-        jobGroup1 = JobGroup(
-            name = "timeout test",
-            jobs=[
-                Job(
-                    name = "timeout function",
-                    workFunction = timeout,
-                    workFunctionKeywordArguments = {
-                        'sleepTime': 30,
-                    },
-                    workFunctionTimeout = 1
-                )
-            ]
-        )
-        parallelJobProcessor1 = ParallelJobProcessor()
-        parallelJobProcessor1.submit(jobSubmission = jobGroup1)
-        self.assertRaises(
-            trfExceptions.TransformTimeoutException,
-            parallelJobProcessor1.getResults
-        )
-    ## @brief unit test for fail function
-    #  @detail This method is a unit test of the parallel job processor
-    #  testing the processing of a simple function that raises an exception.
-    def test_fail(self):
-        msg.info("\n\n\n\nPARALLEL JOB PROCESSOR FAIL TEST")
-        jobGroup1 = JobGroup(
-            name = "fail test",
-            jobs=[
-                Job(
-                    name = "fail function",
-                    workFunction = exception,
-                    workFunctionTimeout = 5
-                )
-            ]
-        )
-        parallelJobProcessor1 = ParallelJobProcessor()
-        parallelJobProcessor1.submit(jobSubmission = jobGroup1)
-        self.assertRaises(
-            trfExceptions.TransformExecutionException,
-            parallelJobProcessor1.getResults
-        )
-    ## @brief unit test for job information
-    #  @detail This method is a unit test of the job object that raises an
-    #  exception when a work function is not specified.
-    def test_job_information(self):
-        msg.info("\n\n\n\nPARALLEL JOB PROCESSOR JOB INFORMATION TEST")
-        self.assertRaises(
-            trfExceptions.TransformInternalException,
-            Job
-        )
+#     def test_working(self):
+#         msg.info("\n\n\n\nPARALLEL JOB PROCESSOR WORKING TEST")
+#         jobGroup1 = JobGroup(
+#             name = "working functions test",
+#             jobs = [
+#                 Job(
+#                     name = "hello world function",
+#                     workFunction = helloWorld,
+#                     workFunctionKeywordArguments = {
+#                         'sleepTime': 1,
+#                     },
+#                     workFunctionTimeout = 10
+#                 ),
+#                 Job(
+#                     name = "multiplication function",
+#                     workFunction = multiply,
+#                     workFunctionKeywordArguments = {
+#                         'multiplicand1': 2,
+#                         'multiplicand2': 3
+#                     },
+#                     workFunctionTimeout = 10
+#                 )
+#             ]
+#         )
+#         parallelJobProcessor1 = ParallelJobProcessor()
+#         parallelJobProcessor1.submit(jobSubmission = jobGroup1)
+#         results = parallelJobProcessor1.getResults()
+#         self.assertEquals(results, ['hello world', 6])
+#     ## @brief unit test for timeout function
+#     #  @detail This method is a unit test of the parallel job processor
+#     #  testing the processing of a simple function that is used to cause a
+#     #  timeout.
+#     def test_timeout(self):
+#         msg.info("\n\n\n\nPARALLEL JOB PROCESSOR TIMEOUT TEST")
+#         jobGroup1 = JobGroup(
+#             name = "timeout test",
+#             jobs=[
+#                 Job(
+#                     name = "timeout function",
+#                     workFunction = timeout,
+#                     workFunctionKeywordArguments = {
+#                         'sleepTime': 30,
+#                     },
+#                     workFunctionTimeout = 1
+#                 )
+#             ]
+#         )
+#         parallelJobProcessor1 = ParallelJobProcessor()
+#         parallelJobProcessor1.submit(jobSubmission = jobGroup1)
+#         self.assertRaises(
+#             trfExceptions.TransformTimeoutException,
+#             parallelJobProcessor1.getResults
+#         )
+#     ## @brief unit test for fail function
+#     #  @detail This method is a unit test of the parallel job processor
+#     #  testing the processing of a simple function that raises an exception.
+#     def test_fail(self):
+#         msg.info("\n\n\n\nPARALLEL JOB PROCESSOR FAIL TEST")
+#         jobGroup1 = JobGroup(
+#             name = "fail test",
+#             jobs=[
+#                 Job(
+#                     name = "fail function",
+#                     workFunction = exception,
+#                     workFunctionTimeout = 5
+#                 )
+#             ]
+#         )
+#         parallelJobProcessor1 = ParallelJobProcessor()
+#         parallelJobProcessor1.submit(jobSubmission = jobGroup1)
+#         self.assertRaises(
+#             trfExceptions.TransformExecutionException,
+#             parallelJobProcessor1.getResults
+#         )
+#     ## @brief unit test for job information
+#     #  @detail This method is a unit test of the job object that raises an
+#     #  exception when a work function is not specified.
+#     def test_job_information(self):
+#         msg.info("\n\n\n\nPARALLEL JOB PROCESSOR JOB INFORMATION TEST")
+#         self.assertRaises(
+#             trfExceptions.TransformInternalException,
+#             Job
+#         )
 
 
 def main():
diff --git a/Tools/PyJobTransforms/test/test_trfUtilsParallelJobProcessorData.py b/Tools/PyJobTransforms/test/test_trfUtilsParallelJobProcessorData.py
index b7a62d434e4..0db55ea72c1 100755
--- a/Tools/PyJobTransforms/test/test_trfUtilsParallelJobProcessorData.py
+++ b/Tools/PyJobTransforms/test/test_trfUtilsParallelJobProcessorData.py
@@ -30,203 +30,206 @@ fileBS1   = "/afs/cern.ch/atlas/project/rig/referencefiles/dataStreams_high_mu/d
 ## @brief unit tests for output data file validation using the parallel job
 #  processor
 class TestOutputFileValidationInParallel(unittest.TestCase):
-
-    ## @brief unit test for AOD
-    #  @detail This method is a unit test for output file validation of AOD data
-    #  using the parallel job processor.
-    def test_AOD(self):
-        msg.info("\n\n\n\nAOD OUTPUT FILE VALIDATION USING PARALLEL JOB PROCESSOR")
-        cmd = [
-            'ValidateFiles_tf.py',
-            '--outputAODFile',
-            fileAOD1,
-            '--parallelFileValidation',
-            'True',
-            '--verbose'
-        ]
-        p = subprocess.Popen(
-            cmd,
-            shell = False,
-            stdout = subprocess.PIPE,
-            stderr = subprocess.STDOUT,
-            bufsize = 1
-        )
-        while p.poll() is None:
-            line = p.stdout.readline()
-            sys.stdout.write(line)
-        # Clean remaining buffered output lines.
-        for line in p.stdout:
-            sys.stdout.write(line)
-        self.assertEqual(p.returncode, 0)
-
-    ## @brief unit test for ESD
-    #  @detail This method is a unit test for output file validation of ESD data
-    #  using the parallel job processor.
-    def test_ESD(self):
-        msg.info("\n\n\n\nESD OUTPUT FILE VALIDATION USING PARALLEL JOB PROCESSOR")
-        cmd = [
-            'ValidateFiles_tf.py',
-            '--outputESDFile',
-            fileESD1,
-            '--parallelFileValidation',
-            'True',
-            '--verbose'
-        ]
-        p = subprocess.Popen(
-            cmd,
-            shell = False,
-            stdout = subprocess.PIPE,
-            stderr = subprocess.STDOUT,
-            bufsize = 1
-        )
-        while p.poll() is None:
-            line = p.stdout.readline()
-            sys.stdout.write(line)
-        # Clean remaining buffered output lines.
-        for line in p.stdout:
-            sys.stdout.write(line)
-        self.assertEqual(p.returncode, 0)
-
-    ## @brief unit test for HIST
-    #  @detail This method is a unit test for output file validation of HIST
-    #  data using the parallel job processor.
-    def test_HIST(self):
-        msg.info("\n\n\n\nHIST OUTPUT FILE VALIDATION USING PARALLEL JOB PROCESSOR")
-        cmd = [
-            'ValidateFiles_tf.py',
-            '--outputHISTFile',
-            fileHIST1,
-            '--parallelFileValidation',
-            'True',
-            '--verbose'
-        ]
-        p = subprocess.Popen(
-            cmd,
-            shell = False,
-            stdout = subprocess.PIPE,
-            stderr = subprocess.STDOUT,
-            bufsize = 1
-        )
-        while p.poll() is None:
-            line = p.stdout.readline()
-            sys.stdout.write(line)
-        # Clean remaining buffered output lines.
-        for line in p.stdout:
-            sys.stdout.write(line)
-        self.assertEqual(p.returncode, 0)
-
-    ## @brief unit test for TAG
-    #  @detail This method is a unit test for output file validation of TAG data
-    #  using the parallel job processor.
-    def test_TAG(self):
-        msg.info("\n\n\n\nTAG OUTPUT FILE VALIDATION USING PARALLEL JOB PROCESSOR")
-        cmd = [
-            'ValidateFiles_tf.py',
-            '--outputTAGFile',
-            fileTAG1,
-            '--parallelFileValidation',
-            'True',
-            '--verbose'
-        ]
-        p = subprocess.Popen(
-            cmd,
-            shell = False,
-            stdout = subprocess.PIPE,
-            stderr = subprocess.STDOUT,
-            bufsize = 1
-        )
-        while p.poll() is None:
-            line = p.stdout.readline()
-            sys.stdout.write(line)
-        # Clean remaining buffered output lines.
-        for line in p.stdout:
-            sys.stdout.write(line)
-        self.assertEqual(p.returncode, 0)
-
-    ## @brief unit test for BS
-    #  @detail This method is a unit test for output file validation of BS data
-    #  using the parallel job processor.
-    def test_BS(self):
-        msg.info("\n\n\n\nBS OUTPUT FILE VALIDATION USING PARALLEL JOB PROCESSOR")
-        cmd = [
-            'ValidateFiles_tf.py',
-            '--outputBSFile',
-            fileBS1,
-            '--parallelFileValidation',
-            'True',
-            '--verbose'
-        ]
-        p = subprocess.Popen(
-            cmd,
-            shell = False,
-            stdout = subprocess.PIPE,
-            stderr = subprocess.STDOUT,
-            bufsize = 1
-        )
-        while p.poll() is None:
-            line = p.stdout.readline()
-            sys.stdout.write(line)
-        # Clean remaining buffered output lines.
-        for line in p.stdout:
-            sys.stdout.write(line)
-        self.assertEqual(p.returncode, 0)
-
-    ## @brief unit test for multiple AOD files
-    #  @detail This method is a unit test for output file validation of AOD data
-    #  using the parallel job processor.
-    def test_AOD_multiple_file__parallel_validations(self):
-        msg.info("\n\n\n\nAOD MULTIPLE OUTPUT FILE VALIDATIONS USING PARALLEL JOB PROCESSOR")
-        cmd = [
-            'ValidateFiles_tf.py',
-            '--outputAODFile',
-            fileAOD1,
-            fileAOD2,
-            '--parallelFileValidation',
-            'True',
-            '--verbose'
-        ]
-        p = subprocess.Popen(
-            cmd,
-            shell = False,
-            stdout = subprocess.PIPE,
-            stderr = subprocess.STDOUT,
-            bufsize = 1
-        )
-        while p.poll() is None:
-            line = p.stdout.readline()
-            sys.stdout.write(line)
-        # Clean remaining buffered output lines.
-        for line in p.stdout:
-            sys.stdout.write(line)
-        self.assertEqual(p.returncode, 0)
-
-    ## @brief unit test for corrupted AOD
-    #  @detail This method is a unit test for output file validation of AOD data
-    #  using the parallel job processor.
-    def test_AOD_corrupted(self):
-        msg.info("\n\n\n\nCORRUPTED AOD OUTPUT FILE VALIDATION USING PARALLEL JOB PROCESSOR")
-        cmd = [
-            'ValidateFiles_tf.py',
-            '--outputAODFile',
-            fileAOD3,
-            '--parallelFileValidation',
-            'True',
-            '--verbose'
-        ]
-        p = subprocess.Popen(
-            cmd,
-            shell = False,
-            stdout = subprocess.PIPE,
-            stderr = subprocess.STDOUT,
-            bufsize = 1
-        )
-        while p.poll() is None:
-            line = p.stdout.readline()
-            sys.stdout.write(line)
-        # Clean remaining buffered output lines.
-        for line in p.stdout:
-            sys.stdout.write(line)
-        self.assertEqual(p.returncode, 75)
+    # Disable for now
+    pass
+    
+# 
+#     ## @brief unit test for AOD
+#     #  @detail This method is a unit test for output file validation of AOD data
+#     #  using the parallel job processor.
+#     def test_AOD(self):
+#         msg.info("\n\n\n\nAOD OUTPUT FILE VALIDATION USING PARALLEL JOB PROCESSOR")
+#         cmd = [
+#             'ValidateFiles_tf.py',
+#             '--outputAODFile',
+#             fileAOD1,
+#             '--parallelFileValidation',
+#             'True',
+#             '--verbose'
+#         ]
+#         p = subprocess.Popen(
+#             cmd,
+#             shell = False,
+#             stdout = subprocess.PIPE,
+#             stderr = subprocess.STDOUT,
+#             bufsize = 1
+#         )
+#         while p.poll() is None:
+#             line = p.stdout.readline()
+#             sys.stdout.write(line)
+#         # Clean remaining buffered output lines.
+#         for line in p.stdout:
+#             sys.stdout.write(line)
+#         self.assertEqual(p.returncode, 0)
+# 
+#     ## @brief unit test for ESD
+#     #  @detail This method is a unit test for output file validation of ESD data
+#     #  using the parallel job processor.
+#     def test_ESD(self):
+#         msg.info("\n\n\n\nESD OUTPUT FILE VALIDATION USING PARALLEL JOB PROCESSOR")
+#         cmd = [
+#             'ValidateFiles_tf.py',
+#             '--outputESDFile',
+#             fileESD1,
+#             '--parallelFileValidation',
+#             'True',
+#             '--verbose'
+#         ]
+#         p = subprocess.Popen(
+#             cmd,
+#             shell = False,
+#             stdout = subprocess.PIPE,
+#             stderr = subprocess.STDOUT,
+#             bufsize = 1
+#         )
+#         while p.poll() is None:
+#             line = p.stdout.readline()
+#             sys.stdout.write(line)
+#         # Clean remaining buffered output lines.
+#         for line in p.stdout:
+#             sys.stdout.write(line)
+#         self.assertEqual(p.returncode, 0)
+# 
+#     ## @brief unit test for HIST
+#     #  @detail This method is a unit test for output file validation of HIST
+#     #  data using the parallel job processor.
+#     def test_HIST(self):
+#         msg.info("\n\n\n\nHIST OUTPUT FILE VALIDATION USING PARALLEL JOB PROCESSOR")
+#         cmd = [
+#             'ValidateFiles_tf.py',
+#             '--outputHISTFile',
+#             fileHIST1,
+#             '--parallelFileValidation',
+#             'True',
+#             '--verbose'
+#         ]
+#         p = subprocess.Popen(
+#             cmd,
+#             shell = False,
+#             stdout = subprocess.PIPE,
+#             stderr = subprocess.STDOUT,
+#             bufsize = 1
+#         )
+#         while p.poll() is None:
+#             line = p.stdout.readline()
+#             sys.stdout.write(line)
+#         # Clean remaining buffered output lines.
+#         for line in p.stdout:
+#             sys.stdout.write(line)
+#         self.assertEqual(p.returncode, 0)
+# 
+#     ## @brief unit test for TAG
+#     #  @detail This method is a unit test for output file validation of TAG data
+#     #  using the parallel job processor.
+#     def test_TAG(self):
+#         msg.info("\n\n\n\nTAG OUTPUT FILE VALIDATION USING PARALLEL JOB PROCESSOR")
+#         cmd = [
+#             'ValidateFiles_tf.py',
+#             '--outputTAGFile',
+#             fileTAG1,
+#             '--parallelFileValidation',
+#             'True',
+#             '--verbose'
+#         ]
+#         p = subprocess.Popen(
+#             cmd,
+#             shell = False,
+#             stdout = subprocess.PIPE,
+#             stderr = subprocess.STDOUT,
+#             bufsize = 1
+#         )
+#         while p.poll() is None:
+#             line = p.stdout.readline()
+#             sys.stdout.write(line)
+#         # Clean remaining buffered output lines.
+#         for line in p.stdout:
+#             sys.stdout.write(line)
+#         self.assertEqual(p.returncode, 0)
+# 
+#     ## @brief unit test for BS
+#     #  @detail This method is a unit test for output file validation of BS data
+#     #  using the parallel job processor.
+#     def test_BS(self):
+#         msg.info("\n\n\n\nBS OUTPUT FILE VALIDATION USING PARALLEL JOB PROCESSOR")
+#         cmd = [
+#             'ValidateFiles_tf.py',
+#             '--outputBSFile',
+#             fileBS1,
+#             '--parallelFileValidation',
+#             'True',
+#             '--verbose'
+#         ]
+#         p = subprocess.Popen(
+#             cmd,
+#             shell = False,
+#             stdout = subprocess.PIPE,
+#             stderr = subprocess.STDOUT,
+#             bufsize = 1
+#         )
+#         while p.poll() is None:
+#             line = p.stdout.readline()
+#             sys.stdout.write(line)
+#         # Clean remaining buffered output lines.
+#         for line in p.stdout:
+#             sys.stdout.write(line)
+#         self.assertEqual(p.returncode, 0)
+# 
+#     ## @brief unit test for multiple AOD files
+#     #  @detail This method is a unit test for output file validation of AOD data
+#     #  using the parallel job processor.
+#     def test_AOD_multiple_file__parallel_validations(self):
+#         msg.info("\n\n\n\nAOD MULTIPLE OUTPUT FILE VALIDATIONS USING PARALLEL JOB PROCESSOR")
+#         cmd = [
+#             'ValidateFiles_tf.py',
+#             '--outputAODFile',
+#             fileAOD1,
+#             fileAOD2,
+#             '--parallelFileValidation',
+#             'True',
+#             '--verbose'
+#         ]
+#         p = subprocess.Popen(
+#             cmd,
+#             shell = False,
+#             stdout = subprocess.PIPE,
+#             stderr = subprocess.STDOUT,
+#             bufsize = 1
+#         )
+#         while p.poll() is None:
+#             line = p.stdout.readline()
+#             sys.stdout.write(line)
+#         # Clean remaining buffered output lines.
+#         for line in p.stdout:
+#             sys.stdout.write(line)
+#         self.assertEqual(p.returncode, 0)
+# 
+#     ## @brief unit test for corrupted AOD
+#     #  @detail This method is a unit test for output file validation of AOD data
+#     #  using the parallel job processor.
+#     def test_AOD_corrupted(self):
+#         msg.info("\n\n\n\nCORRUPTED AOD OUTPUT FILE VALIDATION USING PARALLEL JOB PROCESSOR")
+#         cmd = [
+#             'ValidateFiles_tf.py',
+#             '--outputAODFile',
+#             fileAOD3,
+#             '--parallelFileValidation',
+#             'True',
+#             '--verbose'
+#         ]
+#         p = subprocess.Popen(
+#             cmd,
+#             shell = False,
+#             stdout = subprocess.PIPE,
+#             stderr = subprocess.STDOUT,
+#             bufsize = 1
+#         )
+#         while p.poll() is None:
+#             line = p.stdout.readline()
+#             sys.stdout.write(line)
+#         # Clean remaining buffered output lines.
+#         for line in p.stdout:
+#             sys.stdout.write(line)
+#         self.assertEqual(p.returncode, 75)
 
 
 def main():
-- 
GitLab