diff --git a/PhysicsAnalysis/PATJobTransforms/CMakeLists.txt b/PhysicsAnalysis/PATJobTransforms/CMakeLists.txt
deleted file mode 100644
index c9412d61ccd709d2941205c0b778e3ceb62c203f..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/CMakeLists.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-################################################################################
-# Package: PATJobTransforms
-################################################################################
-
-# Declare the package name:
-atlas_subdir( PATJobTransforms )
-
-# Declare the package's dependencies:
-atlas_depends_on_subdirs( PUBLIC
-                          Reconstruction/RecJobTransforms
-                          Tools/PyJobTransforms
-                          Tools/PyJobTransformsCore )
-
-# External dependencies:
-find_package( PythonLibs )
-
-# Install files from the package:
-atlas_install_python_modules( python/*.py )
-atlas_install_joboptions( share/*.py )
-atlas_install_runtime( scripts/*.py )
-
-# Aliases:
-atlas_add_alias( Reco_trf "Reco_trf.py" )
-atlas_add_alias( Merging_trf "Merging_trf.py" )
-
diff --git a/PhysicsAnalysis/PATJobTransforms/cmt/requirements b/PhysicsAnalysis/PATJobTransforms/cmt/requirements
index a18e7a54113c73163b91fc19f850fec06e7d5b40..99e14cae8cdb7bc13cc342a1998bb29d1f215cde 100644
--- a/PhysicsAnalysis/PATJobTransforms/cmt/requirements
+++ b/PhysicsAnalysis/PATJobTransforms/cmt/requirements
@@ -1,14 +1,9 @@
 package PATJobTransforms
 
 use AtlasPolicy AtlasPolicy-*
-use AtlasPython AtlasPython-* External
 use PyJobTransforms PyJobTransforms-* Tools
 use PyJobTransformsCore PyJobTransformsCore-* Tools
 use RecJobTransforms RecJobTransforms-* Reconstruction
 
-alias Reco_trf     Reco_trf.py
-alias Merging_trf  Merging_trf.py
-
 apply_pattern declare_python_modules files="*.py"
 apply_pattern declare_job_transforms tfs='*.py' jo='*.py'
-
diff --git a/PhysicsAnalysis/PATJobTransforms/python/ArgDicTools.py b/PhysicsAnalysis/PATJobTransforms/python/ArgDicTools.py
deleted file mode 100644
index 2e840812abcd027e1337495d3dae41ea5b6c55e5..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/python/ArgDicTools.py
+++ /dev/null
@@ -1,556 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-# Creation: David Cote (DESY), September 2008
-# Usage:
-#   -transform input arguments into a dictionary
-
-import os
-import pickle
-import pprint
-from subprocess import Popen, STDOUT, PIPE, CalledProcessError
-import sys
-import time
-
-# This function looks for known exceptions occuring when interpreting sysArgs
-def SysArgsExceptionCatcher(sysArgs):
-    #Help should be provided if the transform is executed withtout argument, e.g. like this: "BStoESDAODDPD_trf.py"
-    if len(sysArgs) is 1:
-        return "Help"    
-
-    #Help should be provided if the transform is executed with -h argument, e.g. like this: "BStoESDAODDPD_trf.py -h"
-    if len(sysArgs) is 2:
-        if sysArgs[1] == "-h":
-            return "Help"
-        
-    #No known exception found, return OK
-    return "OK"
-
-# This function looks for known exceptions occuring when interpreting sysArgs
-def KeyExceptionCatcher(key):
-    #This allowed for special options like: --ignoreunknown, -h, etc.
-    if key.startswith('-') and key!='--argdict' and key!='--athenaopts':
-        return "OK"
-    #Unknown exepction... 
-    return "ERROR"
-
-
-def PickleToDico(sysArgv):
-    #Expect: Transform.py --argdict=NameOfLocalFile
-    #Just get the input dictionary
-    if sysArgv[1].startswith('--argdict='):
-        fname=sysArgv[1][len('--argdict='):]
-        f = open(fname, 'r')
-        dic = pickle.load(f)
-        f.close()
-        print "Successfully interpreted command line: method pickled argDict..."               
-        return dic
-    else:
-        return False
-
-
-def SysArgvToDico(sysArgv):
-    #Expect: Transform.py arg1=value1 ... argN=valueN
-    #Create an input dictionary from sysArgv
-    dic={}
-    #loop over sysArgv values, excluding 0th element
-    for arg in sysArgv[1:]:
-        try:
-            eqPos=arg.index('=')
-            key=arg[:eqPos]
-            value=arg[eqPos+1:]
-            dic[key]=value
-            if key is '--argdict':
-                print "WARNING - pickled dic method: use PickleToDico()"
-                return False
-        except:
-            if KeyExceptionCatcher(arg) is "OK":
-                dic[arg]=''
-                print "Special arg: %s accepted..."%arg
-            else:
-                print "WARNING - positional argument method: use PositionalToDico()"
-                return False
-    print "Successfully interpreted command line: method arg=value..."               
-    return dic
-
-
-def PositionalToDico(sysArgv):
-    #Expect: Transform.py value1 ... valueN, with no key names.
-    #In this case, let the transform determines its default behavior.
-    print "Positional value method. Returning dic['defaultFromPositionalValues']=True."
-    dic={}
-    dic['defaultFromPositionalValues']=True
-    return dic
-
-
-def DicHasOutputs(aDic):
-    from PATJobTransforms.Configuration import ConfigDic
-    for key in aDic.keys():
-        if ConfigDic.has_key(key) and hasattr(ConfigDic[key],"isOutput"):
-            return True
-    return False
-
-
-def DicInputs(aDic):
-    from PATJobTransforms.Configuration import ConfigDic
-    for key in aDic.keys():
-        if ConfigDic.has_key(key) and hasattr(ConfigDic[key],"isInput"):
-            return aDic[key]
-    return ""
-
-
-## @brief Deprecate this now
-def GetAMIClient(useReplica=False):
-    pprint.pprint(' '.join(["The old job transforms framework is now _deprecated_.",
-                            "There is no support for the use of AMI and the old transforms will be removed",
-                            "shortly from the release. Please migrate to the new transforms framework.",
-                            "https://twiki.cern.ch/twiki/bin/viewauth/AtlasComputing/JobTransform#New_Style_Transforms"]),
-                  stream = sys.stderr) 
-    sys.exit(1)
-
-def BuildDicFromCommandLineIgnoreAMI(sysArgv):
-    if SysArgsExceptionCatcher(sysArgv) is "Help":
-        inDic={}
-        inDic['-h']=''
-    else:
-        inDic = PickleToDico(sysArgv)
-        if not inDic:
-            inDic=SysArgvToDico(sysArgv)
-            if not inDic:
-                inDic=PositionalToDico(sysArgv)
-                if not inDic:
-                    raise RuntimeError("Unable to create input dictionary from sys.argv")
-
-    if inDic.has_key('keepFullCommandUntouched'):
-        from PATJobTransforms.TrfFlags import trfFlags
-        val=inDic.pop('keepFullCommandUntouched')
-        if val=="no" or val=="NO" or val=="False" or val=="false":
-            print "INFO DRAW synonyms will be resolved"
-            trfFlags.KeepFullCommandUntouched=False
-        else:
-            print "INFO DRAW synonyms will be kept as they are. Might cause a failure at the end of the transform"
-            trfFlags.KeepFullCommandUntouched=True
-
-
-    if inDic.has_key('applyIfMatchPattern'):
-        from PATJobTransforms.TrfFlags import trfFlags
-        val=inDic.pop('applyIfMatchPattern')
-        if val=="no" or val=="NO" or val=="False" or val=="false":
-            print "INFO Will produce all outputs regardless of the input stream name"
-            trfFlags.ApplyIfMatchPattern=False
-        else:
-            print "INFO Will only produce output that match the input stream ifMatch pattern"
-            trfFlags.ApplyIfMatchPattern=True
-
-            
-            
-    PopSynonyms(inDic)
-    return inDic
-
-
-def GetInfoFromAMIXML(amitag, suppressPass = True):
-    try:
-        import Eowyn.luxml as luxml
-    except ImportError:
-        print "WARNING unable to import luxml with standard $PYTHONPATH."
-        print "Will manually add tzero/prod1/code, then try again..." 
-        import sys
-        sys.path.insert(0,'/afs/cern.ch/atlas/project/tzero/prod1/code')
-        import Eowyn.luxml as luxml
-        sys.path.pop(0)
-        print "import luxml was succesful"
-
-    #get dic from AMI
-    amiclient=GetAMIClient()
-    l=['ListConfigurationTag','-configTag='+amitag]
-    result=amiclient.execute(l)
-    dicOfDico=result.getDict()
-    xmlstr = str(dicOfDico[u'rowset_'+amitag][u''+amitag][u'moreInfo'])
-    amiPhysDic = luxml.toPy(xmlstr)['phconfig']
-    strDic=dicOfDico[u'rowset_'+amitag][u''+amitag][u'transformation']
-    amiTransform=str(strDic)
-    strDic=dicOfDico[u'rowset_'+amitag][u''+amitag][u'SWReleaseCache']
-    amiRelease=str(strDic)
-    results={}
-    results['amiPhysDic']=amiPhysDic
-    results['amiInputDic']={}
-    results['amiOuputDic']={}
-    results['amiTransform']=amiTransform
-    results['amiRelease']=amiRelease
-
-    #tweak to facilitate compatibility with ProdSys 
-    #ReleaseDic={}
-    #ReleaseDic['Release']=amiRelease
-    #results['amiRelease']=ReleaseDic
-
-    return results
-
-
-def GetInfoFromAMIPython(amitag, suppressPass = True):
-    #get dics from AMI
-    amiclient=GetAMIClient()
-    l=['ListConfigurationTag','configTag={0}'.format(amitag)]
-    
-    try:
-        result=amiclient.execute(l)
-    except Exception, e:
-        print "WARNING problem in amiclient.execute, try using CERN replica instead ({0})".format(e)
-        amiclient=GetAMIClient(useReplica=True)
-        try:
-            result=amiclient.execute(l)
-        except Exception:
-            print "FATAL could not execute AMI-command. Will reraise last exception for debugging."
-            raise
-        
-    dicOfDico=result.to_dict()
-    #configuration is a python dic in string format, get back to real python using exec 
-    strDic=dicOfDico[u'rowset_'+amitag][u''+amitag][u'phconfig']
-    exec("amiPhysDic="+strDic)
-    strDic=dicOfDico[u'rowset_'+amitag][u''+amitag][u'inputs']
-    if (amitag=='q130'): strDic="{'inputHitsFile': {}, 'NDMinbiasHitsFile': {}, 'cavernHitsFile': {}}"
-    exec("amiInputDic="+strDic)
-    strDic=dicOfDico[u'rowset_'+amitag][u''+amitag][u'outputs']
-    exec("amiOuputDic="+strDic)
-    strDic=dicOfDico[u'rowset_'+amitag][u''+amitag][u'transformation']
-    amiTransform=str(strDic)
-    strDic=dicOfDico[u'rowset_'+amitag][u''+amitag][u'SWReleaseCache']
-    amiRelease=str(strDic)
-
-    results={}
-    results['amiPhysDic']=amiPhysDic
-    results['amiInputDic']=amiInputDic
-    results['amiOuputDic']=amiOuputDic
-    results['amiTransform']=amiTransform
-    results['amiRelease']=amiRelease
-    
-    #tweak to facilitate compatibility with ProdSys 
-    #ReleaseDic={}
-    #ReleaseDic['Release']=amiRelease
-    #results['amiRelease']=ReleaseDic
-
-    return results
-
-def GetInfoFromAMI(amiTag):
-    if amiTag=="q109":
-        print "\n\n\nAMI tag q109 has been superceded by q116, which does the same thing but has an updated syntax that the Tier0 can better read."
-        print "\n\n***   Please try again using the same command but AMI=q116 instead of AMI=q109   ***\n\n\n"
-        sys.exit(0)
-    try:
-        info=GetInfoFromAMIPython(amiTag)
-    except KeyError:
-        print "unable to interpret AMI tag as Python. Will try with XML."
-        try:
-            info=GetInfoFromAMIXML(amiTag)
-        except:
-            raise RuntimeError("Unable to interpret AMI tag!")
-    return info
-
-
-def AppendDic1WithDic2(dic1,dic2):
-    pattern='append_'
-    for key in dic2.keys():
-        if key.startswith(pattern):
-            appKey=key[len(pattern):]
-            appValue=dic2.pop(key)
-            orig=None
-            if dic1.has_key(appKey):
-                orig=dic1[appKey]
-                dic1[appKey]=[orig,appValue]
-            else:
-                dic1[appKey]=appValue
-            print "INFO appended key: %s. Original value: %s. New value: %s."%(appKey,orig,dic1[appKey])
-    return
-
-
-def PopSynonyms_DRAWOutput(aDic):
-    from PATJobTransforms.TrfFlags import trfFlags
-    if trfFlags.KeepFullCommandUntouched():
-        return
-
-    validSynonyms={}
-    validSynonyms['outputDESD_ZEEFile']='outputESDFile'
-    validSynonyms['outputDESD_ZMUMUFile']='outputESDFile'
-    validSynonyms['outputDESD_WENUFile']='outputESDFile'
-    validSynonyms['outputDESD_WMUNUFile']='outputESDFile'
-    validSynonyms['outputDAOD_ZEEFile']='outputAODFile'
-    validSynonyms['outputDAOD_ZMUMUFile']='outputAODFile'
-    validSynonyms['outputDAOD_WENUFile']='outputAODFile'
-    validSynonyms['outputDAOD_WMUNUFile']='outputAODFile'
-    for oldKey in validSynonyms.keys():
-        if aDic.has_key(oldKey):
-            newKey=validSynonyms[oldKey]
-            print "INFO Argument '%s' replaced by synonym '%s'."%(oldKey,newKey)
-            newValue=aDic.pop(oldKey)
-            if aDic.has_key(newKey):
-                print "WARNING argument '%s' specified multiple times. Current value '%s' kept, new value '%s' ignored."%(newKey,aDic[newKey],newValue)
-            else:
-                aDic[newKey]=newValue
-
-
-def PopSynonyms(aDic):
-
-    obsoleteArgs={}
-    obsoleteArgs['DESD_IDCOMM']='outputDESD_IDCOMMFile'
-    obsoleteArgs['DESD_PIXELCOMM']='outputDESD_PIXELCOMMFile'
-    obsoleteArgs['DESD_MUONCOMM']='outputDESD_MUONCOMMFile'
-    obsoleteArgs['DESD_TILECOMM']='outputDESD_TILECOMMFile'
-    obsoleteArgs['DESD_CALOCOMM']='outputDESD_CALOCOMMFile'
-    obsoleteArgs['DESD_PHOJET']='outputDESD_PHOJETFile'
-    obsoleteArgs['DESD_SGLMU']='outputDESD_SGLMUFile'
-    obsoleteArgs['DESDM_TRACK']='outputDESDM_TRACKFile'
-    obsoleteArgs['DESDM_MUON']='outputDESDM_MUONFile'
-    obsoleteArgs['DESD_MET']='outputDESD_METFile'
-    obsoleteArgs['DESD_MBIAS']='outputDESD_MBIASFile'
-    obsoleteArgs['DESDM_EGAMMA']='outputDESDM_EGAMMAFile'
-    obsoleteArgs['DESDM_CALJET']='outputDESDM_CALJETFile'
-    obsoleteArgs['DESD_SGLEL']='outputDESD_SGLELFile'
-    obsoleteArgs['outputNTUP_TRIG']='outputNTUP_TRIGFile'
-    obsoleteArgs['outputCBNT']='outputCBNTFile'
-    obsoleteArgs['outputPixelCalibNtup']='outputNTUP_TRKVALIDFile'
-    obsoleteArgs['outputNTUP_PIXELCALIBFile']='outputNTUP_TRKVALIDFile'
-    obsoleteArgs['outputMuonCalibNtup']='outputNTUP_MUONCALIBFile'
-    obsoleteArgs['outputTAGComm']='outputTAG_COMMFile'
-    obsoleteArgs['HIST']='outputHISTFile'
-    obsoleteArgs['outputD2PD_TOPFile']='outputDAODM2_TOPFile'
-    obsoleteArgs['outputDAODM_TOPFile']='outputDAODM2_TOPFile'
-    obsoleteArgs['outputDESDM_CALJETFile']='outputDESD_CALJETFile'
-    obsoleteArgs['outputDESD_METFile']='outputDESDM_METFile'
-    obsoleteArgs['Geometry']='geometryVersion'
-    for oldKey in obsoleteArgs.keys():
-        if aDic.has_key(oldKey):
-            newKey=obsoleteArgs[oldKey]
-            print "WARNING Argument '%s' is obsolete! Please use '%s' instead."%(oldKey,newKey)
-            newValue=aDic.pop(oldKey)
-            if aDic.has_key(newKey):
-                print "WARNING argument '%s' specified multiple times. Current value '%s' overwritten by new value '%s'."%(newKey,aDic[newKey],newValue)
-            aDic[newKey]=newValue
-    
-    if aDic.has_key('extraParameter'):
-        extraP=aDic.pop('extraParameter')
-        print "INFO extraParamater=%s is removed. This pseudo-argument is stricly internal to ProdSys."%(extraP)
-    return
-
-
-def UpdateDicListWithAMI(userDic,amiTag):
-    # Check for tags handled by panda, not AMI
-    if amiTag[0] in 'rpdsea':
-        from PATJobTransforms.ProdSysDicTools import GetInfoFromPANDA
-        infoList=GetInfoFromPANDA(amiTag) 
-    else:
-        infoList=[GetInfoFromAMI(amiTag)]
-
-    outList=[]
-    for info in infoList:
-        d={}
-        outDic,outInfo=UpdateDicWithAMI(userDic,amiTag,info)
-        d['info']=outInfo
-        d['outDic']=outDic
-        #d['Release']=outInfo['amiRelease']
-        outList.append(d)
-
-    return outList
-
-
-def UpdateDicWithAMI(userDic,amiTag,info):
-    amiInputDic=info['amiInputDic']
-    amiOuputDic=info['amiOuputDic']
-    amiPhysDic=info['amiPhysDic']
-
-    PopSynonyms(amiInputDic)
-    PopSynonyms(amiOuputDic)
-    PopSynonyms(amiPhysDic)
-    
-    #Now update userDic, becoming outDic
-    outDic={}
-    #The rules are:
-    # 1) if userDic specifies an input, use it. Otherwise use the default
-    # 2) if userDic specifies one output, overwrite whole amiOuputDic. Otherwise use all amiOuputDic with default values.
-    # 3) any physConfig specified in userDic overwrites amiConfigDic, unless the append option is used.
-
-    #inputs
-    inputFileValue=DicInputs(userDic)
-    if inputFileValue=="":
-        print "\n"
-        if len(amiInputDic.keys())>0:
-            from PATJobTransforms.DefaultInputs import DefaultInputs
-            #inKey=amiInputDic.keys()[0]
-            for inKey in amiInputDic.keys():
-                if DefaultInputs.has_key(inKey):
-                    inputFileValue=DefaultInputs[inKey]
-                    if amiTag=="q120": inputFileValue=DefaultInputs["cosmicsBS"]                
-                    elif amiTag=="q126": inputFileValue=DefaultInputs["topBS"]                
-                    outDic[inKey]=inputFileValue
-                    print "INFO Using default input value: %s=%s"%(inKey,outDic[inKey])
-                else:
-                    raise RuntimeError("Key %s is not defined in DefaultInputs"%inKey)
-    
-    #outputs
-    #if no output is specified, use default values for all those specified in AMI tag if input matches regular expression 
-    if DicHasOutputs(userDic):
-        print "\nUsing outputs specified by user, bypassing those from AMI."
-    else:
-        print "\n"
-        from PATJobTransforms.Configuration import ConfigDic
-        from PATJobTransforms.TrfFlags import trfFlags
-        import re
-        for key in amiOuputDic.keys():
-            if not ConfigDic.has_key(key):
-                raise RuntimeError("Key %s from amiOutputDic is not known by job transform ConfigDic"%key)
-            if not hasattr(ConfigDic[key],"isOutput"):
-                raise RuntimeError("Key %s from amiOutputDic is not known as an output by job transform ConfigDic"%key)
-
-            pattern=".*" #trivial pattern that always matches
-            if amiOuputDic[key].has_key('ifMatch'):
-                pattern=amiOuputDic[key]['ifMatch'] #overwrites trivial pattern with the one from AMI
-
-            idx=inputFileValue.rfind("/")
-            if idx != -1:
-                inputFileValue=inputFileValue[1+idx:]
-                print "Reduced input file Name:",inputFileValue
-
-            if (not trfFlags.ApplyIfMatchPattern()) or re.match(pattern,inputFileValue):
-                type=ConfigDic[key].isOutput
-                defaultValue=None
-                if type=='bs':
-                    defaultValue='my'+str(amiOuputDic[key]['dstype'])+'.data'
-                elif type=='root':
-                    defaultValue='my'+str(amiOuputDic[key]['dstype'])+'.root'
-                elif type=='pool':
-                    defaultValue='my'+str(amiOuputDic[key]['dstype'])+'.pool.root'
-                else:
-                    raise RuntimeError("Don't know to define a default value for type %s"%type)            
-                outDic[key]=defaultValue
-                print "INFO Using default output value: %s=%s"%(key,outDic[key])
-            else:
-                print "INFO %s not produced since input file '%s' does not match pattern '%s'."%(key,inputFileValue,pattern)
-
-        
-    #physics_configuration: take it all
-    outDic.update(amiPhysDic)
-    #now update outDic with everything from userDic (including input/output if relevant)
-    #first look for append option...
-    AppendDic1WithDic2(outDic,userDic)
-    #print "OutDict after append",outDic
-    
-    #at this point userDic will supercede what was in AMI in case of conflicts
-    outDic.update(userDic)
-
-    #Call PopSynonyms_DRAWOutput here and in BuildDicFromCommandLine to be sure it's executed in any case
-    PopSynonyms_DRAWOutput(outDic)
-    
-    return outDic,info
-
-
-
-def BuildDicFromCommandLine(sysArgv,returnList=False):
-    print "###############################"
-    print "Original job transform command:"
-    # Take a bit of care with quotes - argv[0] shouldn't need them, nor should any of the 'keys'
-    # But with the other stuff, we enclose them in single quotes and quote out any existing single
-    # quotes
-    origCmd = sysArgv[0] + ' '
-    for i in sysArgv[1:]:
-        if i.find('=') > -1:
-            key, value = i.split('=', 1)
-            origCmd += key + '=' + "'" + value.replace("'", "'\\''") + "'" + ' '
-        else:
-            origCmd += "'" + i.replace("'", "'\\''") + "'" + ' '
-    print origCmd
-    print "###############################"
-
-    dicList=[] #only used by GetCommand.py
-    inDic=BuildDicFromCommandLineIgnoreAMI(sysArgv)
-
-    #dynamicly created user outputs
-    for key in inDic.keys():
-        from PATJobTransforms.OutputsMgr import outputsMgr,magicKey
-        if key.startswith(magicKey):
-            outputsMgr.addTrfOutput(key)
-            inDic['--test']=''
-            inDic['--omitvalidation']='ALL'
-            print "BLABLABLA_1",outputsMgr.userOutputList
-
-    if inDic.has_key('AMI'):
-        amiTag=inDic.pop('AMI')
-        dicList=UpdateDicListWithAMI(inDic,amiTag)
-        inDic=dicList[0]['outDic']
-    
-    # Short report on atlas setup
-    print "###############################"
-    print 'Atlas Setup Report:'
-    for eVar in ('AtlasVersion', 'AtlasProject','AtlasPatch', 'AtlasPatchVersion', 'CMTCONFIG','TestArea'):
-        if eVar in os.environ:
-            print '\t%s=%s' % (eVar, os.environ[eVar])
-        else:
-            print '\t%s undefined' % eVar
-    # Look for patches so that the job can be rerun 
-    if 'TestArea' in os.environ and os.access(os.environ['TestArea'], os.R_OK):
-        print "Patch packages are:"
-        try:
-            cmd = ['cmt', 'show', 'packages', os.environ['TestArea']]
-            cmtProc = Popen(cmd, shell = False, stdout = PIPE, stderr = STDOUT, bufsize = 1)
-            cmtOut = cmtProc.communicate()[0] 
-            for line in cmtOut.split('\n'):
-                try:
-                    if line.strip() == '':
-                        continue
-                    (package, packageVersion, packagePath) = line.split()
-                    print '\t%s' % (packageVersion)
-                except ValueError:
-                    print "Warning, unusual output from cmt: %s" % line 
-        except (CalledProcessError, OSError), e:
-            print 'Execution of CMT failed: %s' % e
-    else:
-        print "No readable patch area found"
-    print "###############################"
-
-
-    PopSynonyms_DRAWOutput(inDic) #Call a second time to make sure it's executed even if no input comes from AMI
-
-    #Write out inDic in a pickle file
-    import pickle
-    f = open('inputDictionary.pickle', 'w')
-    pickle.dump(inDic, f)
-    f.close()
-    print "INFO trf configuration written in inputDictionary.pickle"
-
-    if returnList:
-        return dicList  #only used by GetCommand.py
-
-    return inDic
-
-
-def addDefaultArgumentFromPositionalValue(dic,key,value):
-    #in itself this method is trivial, but it's required for Tier1-style trf introspection with grep (see e.g. Reco_trf)
-    if not dic.has_key(key):
-        if value=='NONE' or value=='none': 
-            print "Ignored key '%s' with value '%s'"%(key,value)
-        elif KeyExceptionCatcher(key)=="OK":
-            dic[key]=''
-            print "%s=''"%key
-        else:
-            dic[key]=value
-            print "%s=%s"%(key,value)
-    else:
-        raise RuntimeError("dic key '%s' is already defined. Forbidden!"%key)
-    return
-
-
-def DefaultConfigFromSysArgv(ListOfDefaultPositionalKeys,dic):
-    dic.clear()
-    #Configure default with positional values from sys.argv
-    import sys
-    if len(sys.argv) > (len(ListOfDefaultPositionalKeys)+1):
-        print "sys.argv:",sys.argv
-        print "ListOfDefaultPositionalKeys:",ListOfDefaultPositionalKeys
-        raise RuntimeError("Default configuration undefined: too many values in sys.argv")
-        
-    print "DefaultConfigFromSysArgv..."
-    i=0
-    for val in sys.argv[1:]:
-        addDefaultArgumentFromPositionalValue(dic,key=ListOfDefaultPositionalKeys[i],value=val)
-        i+=1
-
-    return dic
-
diff --git a/PhysicsAnalysis/PATJobTransforms/python/BSutils.py b/PhysicsAnalysis/PATJobTransforms/python/BSutils.py
deleted file mode 100644
index 630d688be58d6a5c5f438837268fc1a851aa7402..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/python/BSutils.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-# Functions to remove existing BS files from working dir before the job
-# starts and to rename the BS file to the name given to the trf.
-#
-# Taken from Reconstruction/RecJobTransforms/scripts/csc_RDOtoBS_trf.py
-# so they can be used by other transform scripts.
-#
-
-import os, re
-class _BSutils():
-    def match_deleteOld_BS_datafile(self,pattern,path,listfilenames):
-        """  Scans the path directory looking for old BS files
-        and it deletes them """
-        p=re.compile(pattern)
-        for i in listfilenames:
-            if p.match(i):
-                try:
-                    os.remove(i)
-                    print 'BSutils.py: Found the old BS data file in run directory ', p.match(i).group(),' and deleted'
-                except Exception,err:
-                    print "BSutils.py: --> error:",err
-                    import traceback,sys
-                    traceback.print_exc( file = sys.stdout )
-                    raise RuntimeError('BSutils.py: failed deleting old BS file %s ' % i)
-
-
-    def match_rename_BS_datafile(self,args,path,listfilenames):
-        """ Renames the generated BS file with the name from the
-        transform arguments. The file is found by regular exp
-        using args[0] and is renamed to args[1] """
-        print "match_rename_BS_datafile",args
-        p=re.compile(args[0])
-        for i in listfilenames:
-            if p.match(i):
-                try:
-                    os.rename(p.match(i).group(),args[1])
-                    print 'BSutils.py: Found the BS data file ', p.match(i).group(), ' and renamed to ',args[1]
-                except Exception,err:
-                    print "BSutils.py: --> error:",err
-                    import traceback,sys
-                    traceback.print_exc( file = sys.stdout )
-                    raise RuntimeError('BSutils.py: failed to rename BS file %s ' % i)
-                
-BSutils=_BSutils()
diff --git a/PhysicsAnalysis/PATJobTransforms/python/BaseOfBasicTrf.py b/PhysicsAnalysis/PATJobTransforms/python/BaseOfBasicTrf.py
deleted file mode 100755
index 79531e29e470b08fb4184bd267c0d9ba7522d322..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/python/BaseOfBasicTrf.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-from PyJobTransformsCore.trf import JobTransform
-
-class BaseOfBasicTrf( JobTransform ):
-    def __init__(self,inDic,authors,skeleton,help,name="default",lastInChain=True):
-        JobTransform.__init__(self, name=name, authors=authors, skeleton=skeleton, help=help,lastInChain=lastInChain)
-
-        if not isinstance(inDic,dict):
-            raise TypeError("inDic has type '%s' but should be a dictionary." %type(inDic))
-
-        from PATJobTransforms.ConfigDicUtils import AutoConfigureFromDic
-        self.inDic=inDic
-        AutoConfigureFromDic(self,inDic)
-
-    def matchEventsExpectEqual(self,inputFileArgName,outputFileArgName):
-        #Note: _namedArgs has lower case keys 
-        inputFileArgName=inputFileArgName.lower()
-        outputFileArgName=outputFileArgName.lower()
-        
-        if self._namedArgs.has_key("inputfile"):
-            inFile=self.getArgument("inputfile")
-        elif self._namedArgs.has_key(inputFileArgName):
-            inFile=self.getArgument(inputFileArgName)
-        else:
-            self.logger().warning("No input file matching '%s'. MatchEvents not executed."%inputFileArgName)
-            return
-
-        if self._namedArgs.has_key(outputFileArgName):
-            outFile=self.getArgument(outputFileArgName)
-        else:
-            self.logger().warning("No output file matching '%s'. MatchEvents not executed."%outputFileArgName)
-            return
-
-        maxEvents=-1
-        if self._namedArgs.has_key("maxevents"):
-            maxEvents=self.getArgument("maxevents").value()
-
-        inEvents=inFile.eventCount()
-        if not isinstance(inEvents, (int, long)):
-            self.logger().warning("Input events could not be determined. MatchEvents not executed. ")
-            return
-        
-        if (maxEvents > 0) and (maxEvents < inEvents):
-            self.logger().info("MaxEvents < input_Events. MatchEvents not executed.")
-            return
-
-        outEvents=outFile.eventCount()
-        if not isinstance(outEvents, (int, long)):
-            self.logger().warning("Output events could not be determined. MatchEvents not executed. ")
-            return
-
-        diff=inEvents-outEvents
-        if diff==0:
-            self.logger().info("Input and output files have the same number of events. That's good!")
-        else:
-            #            raise TransformValidationError( outFile.value(), "failed validation. Input (%i events) and output (%i events) files have different number of events. That's unexpected. Stopping!", 'TRF_OUTFILE_TOOFEW' )
-            from PyJobTransformsCore import AtlasErrorCodes
-            self.logger().warning("Input (%i events) and output (%i events) files have different number of events. That's unexpected."%(inEvents,outEvents))
-            self.addError( acronym = 'TRF_OUTFILE_TOOFEW', severity = AtlasErrorCodes.FATAL )
-
-        return
-
-
diff --git a/PhysicsAnalysis/PATJobTransforms/python/BaseOfCompositeTrf.py b/PhysicsAnalysis/PATJobTransforms/python/BaseOfCompositeTrf.py
deleted file mode 100644
index 81c7436a51991505209d960f3b829b6a6b92f21d..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/python/BaseOfCompositeTrf.py
+++ /dev/null
@@ -1,201 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-from PyJobTransformsCore.trf import JobTransform
-from PATJobTransforms.ArgDicTools import DefaultConfigFromSysArgv
-
-BasicCommonSteps=["any","none","first"]
-
-class BaseOfCompositeTrf( JobTransform ):
-    def __init__(self,inputDic,ProdSysKeys,name,authors,help,lastInChain=True):
-        JobTransform.__init__(self, name=name, authors=authors, help=help, lastInChain=lastInChain)
-
-        if not isinstance(inputDic,dict):
-            raise TypeError("inputDic has %s but should be a dictionary." %type(inputDic))
-
-        #Help?
-        if '-h' in inputDic or '--help' in inputDic:
-            print "\nThousands of parameter combinations can be generated dynamically by this transform."
-            print "See: https://twiki.cern.ch/twiki/bin/view/AtlasComputing/RecoTrf"
-            print "\nThis design has one exception: ProdSys (Tier1), which can only use keys:"
-            print ProdSysKeys
-            print "\nTo bypass an argument in positional mode, give value: 'NONE'."
-            print "Please use CmdToPositional.py to translate a key=val command into a positional one.\n\n"
-
-        #Default configuration?
-        if inputDic.has_key('defaultFromPositionalValues'):
-            inputDic=DefaultConfigFromSysArgv(ProdSysKeys,inputDic)
-
-        self.inDic=inputDic
-        self.ValidSubStepsDic={}
-        self.ValidSubStepsRunFcts={}
-        return
-
-    def ConfigureCommonInternalSubSteps(self):
-        from PATJobTransforms.Configuration import ConfigDic
-            
-        #Loop over all inputDic keys
-        for key in self.inDic.keys():
-            newKey,subStep=self.GetKeyAndSubStep(key)
-            if not ConfigDic.has_key(newKey):
-                raise RuntimeError("key '%s' is not defined in ConfigDic"%newKey)
-
-            #Set subSteps from above if specified, otherwise use default value from ConfigDic
-            subSteps=[subStep]
-            if subStep==None:
-                subSteps=[]
-                tmpSteps=ConfigDic[newKey].subSteps
-                for t in tmpSteps:
-                    if BasicCommonSteps.__contains__(t) or self.ValidSubStepsDic.keys().__contains__(t):
-                        subSteps.append(t)
-
-            meth="update"
-            #meth="create" <--- problematic, leave it off (OK)                
-            
-            #Now configure the sub-step(s) for this key
-            for step in subSteps:
-                if step=="any":
-                    self.AddToAllDics(newKey,self.inDic[key],meth)
-                elif step=="none":
-                    #these are typically for Reco_trf only configuration (e.g. DBRelease)
-                    ConfigDic[newKey](self,self.inDic)
-                elif step=="first":
-                    #handle this later, when the input/output configuration is known.
-                    pass
-                elif self.ValidSubStepsDic.has_key(step):
-                    self.AddToDic(self.ValidSubStepsDic[step],newKey,self.inDic[key],meth)
-                else:
-                    raise RuntimeError("Unexpected sub-step '%s' for key '%s'"%(step,newKey))
-
-        return
-
-    def CommonSpecialFinalConfig(self):
-        #These are semi-hacks... please minimize the stuff here!
-        if self.inDic.has_key('skipEvents'):
-            skip=self.inDic['skipEvents']
-            print "INFO skipEvents is special. It is only added to the first sub-step(s)."
-            firstSteps=self.GetFirstSubStep()
-            if firstSteps==None:
-                raise RuntimeError("skipEvents requested, but can't find which sub-step it should be added to.")
-
-            for step in firstSteps:
-                self.AddToDic(self.ValidSubStepsDic[step],'skipEvents',skip,"create")
-                print "skipEvents=%i added to %s."%(int(skip),step)
-
-        #Remove duplicate outputs, giving precedence to the first running step of the substeps list from ConfigDic 
-        from PATJobTransforms.Configuration import ConfigDic
-        for argKey in ConfigDic.keys():
-            argObj=ConfigDic[argKey]
-            if hasattr(argObj,"isOutput") and hasattr(argObj,"subSteps"):
-                steps=argObj.subSteps
-                alreadyProduced=False
-                for step in steps:
-                    if self.SubStepIsExecuted(step) and self.ValidSubStepsDic[step].has_key(argKey):
-                        if alreadyProduced:
-                            self.ValidSubStepsDic[step].pop(argKey)
-                        else:
-                            alreadyProduced=True
-        return
-
-    def hasOutput(self,dic):
-        from PATJobTransforms.Configuration import ConfigDic
-        for key in dic.keys():
-            if ConfigDic.has_key(key):
-                if hasattr(ConfigDic[key],"isOutput"):
-                    return True
-                else:
-                    pass
-            else:
-                raise RuntimeError("key %s is not defined in ConfigDic"%key)
-            pass
-        return False
-    
-    def hasInput(self,dic):
-        from PATJobTransforms.Configuration import ConfigDic
-        for key in dic.keys():
-            if ConfigDic.has_key(key):
-                if hasattr(ConfigDic[key],"isInput"):
-                    return True
-                else:
-                    pass
-            else:
-                raise RuntimeError("key %s is not defined in ConfigDic"%key)
-            pass
-        return False
- 
-    def GetKeyAndSubStep(self,key):
-        #This method is to translate e.g. key=preExec_r2e into newKey=preExec,step=r2e 
-        for step in self.ValidSubStepsDic.keys():
-            #is this key only for a specific sub-step?
-            if key.endswith("_"+step):
-                ind=len(key)-len(step)-1
-                newKey=key[:ind]
-                print "%s will be only passed to step %s as %s"%(key,step,newKey)
-                return newKey,step
-            pass        
-        #At this point, key is to be used by any sub-step 
-        return key,None
-
-    def AddToDic(self,dic,key,val,meth):
-        if not dic.has_key(key):
-            dic[key]=val
-        else:
-            if meth=="update":
-                newVal=val+",,"+dic[key]
-                dic[key]=newVal
-            else:
-                raise RuntimeError("Key '%s' is already defined in the current dictionary and cannot be overwritten."%(key))
-            return
-
-    def AddToAllDics(self,newKey,val,meth):
-        for key in self.ValidSubStepsDic.keys():
-            dic=self.ValidSubStepsDic[key]
-            self.AddToDic(dic,newKey,val,meth)
-        return
-
-    def GetFirstSubStep(self):
-        raise RuntimeError("Virtual method GetFirstSubStep() needs to be implemented in concrete composite transform.")
-
-    def AddNewSubStep(self,key,runSubStepFct=None):
-        if self.ValidSubStepsDic.has_key(key):
-            raise RuntimeError("Sub-step '%s' is already defined.")
-        self.ValidSubStepsDic[key]={}
-        self.ValidSubStepsRunFcts[key]=runSubStepFct
-        return self.ValidSubStepsDic[key]
-
-    def SubStepIsExecuted(self,stepKey):
-        if self.ValidSubStepsRunFcts.has_key(stepKey):
-            if self.ValidSubStepsRunFcts[stepKey]!=None:
-                return self.ValidSubStepsRunFcts[stepKey]()
-            pass
-        return False
-
-    def Print(self):
-        for key in self.ValidSubStepsDic.keys():
-            print "----------------------------"
-            print "key"
-            print self.ValidSubStepsDic[key]
-            print "----------------------------"
-        return
-
-    def getPotentialArgs(self):
-        from PyJobTransformsCore.trfutil import AddValidItemToList,OverlapLists
-        trfSteps=self.ValidSubStepsDic.keys()
-        AddValidItemToList(BasicCommonSteps,trfSteps)
-        trfArgs=[]
-
-        from PATJobTransforms.Configuration import ConfigDic
-        for key in ConfigDic.keys():
-            if hasattr(ConfigDic[key],"subSteps"):
-                keySteps=ConfigDic[key].subSteps
-                if OverlapLists(trfSteps,keySteps):
-                    trfArgs.append(key)
-                    pass
-                pass
-            else:
-                trfArgs.append(key)
-                pass
-            pass
-        return trfArgs
-
diff --git a/PhysicsAnalysis/PATJobTransforms/python/ConfigDicUtils.py b/PhysicsAnalysis/PATJobTransforms/python/ConfigDicUtils.py
deleted file mode 100644
index 688bc4fbd971677a0423ae91e4638e32e9c7a767..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/python/ConfigDicUtils.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-# Creation: David Cote (DESY), September 2008
-# Usage:
-#   -determines the transform behavior dynamically, according to args values
-#       |-->note: this was the idea, but it was never not really done this way in the end... (David, August 2010)
-
-from PATJobTransforms.Configuration import ConfigDic
-from PATJobTransforms.ArgDicTools import KeyExceptionCatcher
-    
-def AutoConfigureFromDic(trf,zeDic):
-    #configure the transform from zeDic, but also modify zeDic as needed
-    origDic=zeDic.copy()
-
-    #loop over origDic, since zeDic may be modified by the functions returned by ConfigDic
-    for key in origDic.keys():
-        if ConfigDic.has_key(key):        
-            ConfigDic[key](trf,zeDic)
-        elif KeyExceptionCatcher(key) is "OK":
-            print "No ConfigDic entry for key '%s'."
-            print "This allowed for special options like: --ignoreunknown, --athenaopts, -h, etc."
-        else:
-            raise RuntimeError("key %s is not defined in ConfigDic"%key)
-
-    return zeDic
-
diff --git a/PhysicsAnalysis/PATJobTransforms/python/Configuration.py b/PhysicsAnalysis/PATJobTransforms/python/Configuration.py
deleted file mode 100644
index 3a1f2fb8a2e19ef952fd8ad2a1caada7f2747c40..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/python/Configuration.py
+++ /dev/null
@@ -1,820 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-# Creation: David Cote (DESY), September 2008
-# Usage:
-#   -define the list of allowed input arguments 
-#   -define the configuration functions to be executed by each input argument
-#   -associate allowed input and config functions in ConfigDic
-
-import uuid
-
-from PyJobTransformsCore.full_trfarg import *
-
-#Note, each function in ConfigDic must accept arguments (transform_to_configure, input_dictionary)
-#Hint: avoid giving default values to transform arguments
-ConfigDic={}
-
-#Use this function to add new keys in ConfigDic
-def AddToConfigDic(key,val):
-    #print "adding ",key,val
-    if ConfigDic.has_key(key):
-        raise RuntimeError("Key '%s' already defined in ConfigDic and cannot be overwritten."%key)
-    if hasattr(val,"subSteps"):
-        if not isinstance(getattr(val,"subSteps"),list):
-            raise RuntimeError("subSteps must be a list.")
-    else:
-        val.subSteps=['any']
-    ConfigDic[key]=val
-    return
-
-def DoNothing(trf,inDic):
-    return
-
-#Core transform options
-AddToConfigDic('--uploadtoami',DoNothing)
-AddToConfigDic('--usesqlite',DoNothing)
-AddToConfigDic('--useathenaapp',DoNothing)
-AddToConfigDic('--mcinput',DoNothing)
-AddToConfigDic('--omitvalidation',DoNothing)
-AddToConfigDic('--test',DoNothing)
-AddToConfigDic('-t',DoNothing)
-AddToConfigDic('--ccommand',DoNothing)
-AddToConfigDic('-c',DoNothing)
-AddToConfigDic('--leakcheckexecute',DoNothing)
-AddToConfigDic('--rss',DoNothing)
-AddToConfigDic('--vmem',DoNothing)
-AddToConfigDic('--extrametadatadict',DoNothing)
-AddToConfigDic('--extraignorefilters',DoNothing)
-AddToConfigDic('--usenewmeta',DoNothing)
-AddToConfigDic('--ignoreerrors',DoNothing)
-AddToConfigDic('--ignoreunknown',DoNothing)
-AddToConfigDic('--ignoreall',DoNothing)
-AddToConfigDic('--athenaopts',DoNothing)
-AddToConfigDic('--help',DoNothing)
-AddToConfigDic('-h',DoNothing)
-AddToConfigDic('dummy',DoNothing)
-
-
-#############
-# Automatically add all official primary DPD to ConfigDic
-#
-# AddDPDFunctionMaker is technically refered to as a "closure" in the standard python documentation.
-# It's just a function that builds and returns another function, in this case: AddDPDXXX(trf,inDic)
-def AddDPDFunctionMaker(dpdName, fileArg, **kw):
-    if type(dpdName)!=str:
-        raise RuntimeError("AddDPDFunctionMaker only accepts string input!")
-    
-    def AddDPDXXX(trf,inDic):
-        trf.add( fileArg(name=dpdName, **kw) )
-        return
-    return AddDPDXXX
-
-def AddDPDs (dpdNames, subSteps):
-    for dpdName in dpdNames:        
-        fileArg=OutputDPDFileArg #default value is pool dpd, bs and ntup handled below
-        kw = {}
-        extension='pool'
-        if type(dpdName) != type(''):
-            if hasattr (dpdName, 'TreeNames'):
-                kw['tree_names'] = dpdName.TreeNames
-                extension='root'
-                fileArg=NtupleFileArg
-                pass
-            dpdName = dpdName.StreamName
-            pass
-        dName='output'+(dpdName.lstrip("Stream"))+'File'
-        if dpdName.startswith('StreamDRAW') :
-            extension='bs'
-            fileArg=OutputSkimmedBSFileArg
-            pass
-        function=AddDPDFunctionMaker(dName, fileArg, **kw)
-        function.isOutput=extension
-        function.subSteps=subSteps
-        AddToConfigDic(dName,function)
-        pass
-    return
-
-def AddUserDPD(dpdName,subSteps):
-    fileArg=OutputDPDFileArg #default value is pool dpd, bs and ntup handled below
-    kw = {}
-    extension='pool'
-    if hasattr (dpdName, 'TreeNames'):
-        kw['tree_names'] = dpdName.TreeNames
-        extension='root'
-        fileArg=NtupleFileArg
-        dName=dpdName.trfKeyName
-        function=AddDPDFunctionMaker(dName, fileArg, **kw)
-        function.isOutput=extension
-        function.subSteps=subSteps
-        AddToConfigDic(dName,function)
-        pass
-    return
-
-# Add a try: except: protection to be usable without AtlasAnalysis project
-try:
-    from PrimaryDPDMaker.PrimaryDPDFlags import listRAWtoDPD,listESDtoDPD,listAODtoDPD
-    AddDPDs (listRAWtoDPD, ['r2e'] )
-    AddDPDs (listESDtoDPD, ['e2d'] )
-    AddDPDs (listAODtoDPD, ['a2d'] )
-except ImportError:
-    print "WARNING PrimaryDPDFlags not available. Only OK if you're using job transforms without the AtlasAnalysis project."
-
-#Add Top D2AODMs
-try:
-    from TopPhysD2PDMaker.TopPhysD2PDFlags import TopPhysAllDAODs
-    AddDPDs (TopPhysAllDAODs, ['a2d'])
-except ImportError:
-    print "WARNING TopPhysD2PDFlags not available. Only OK if you're using job transforms without the AtlasAnalysis project."
-
-# Add D3PD
-try:
-    from D3PDMakerConfig.D3PDProdFlags import listAllKnownD3PD
-    for d3pd in listAllKnownD3PD:
-        AddDPDs( [d3pd], d3pd.SubSteps )
-except ImportError:
-    print "WARNING D3PDProdFlags not available. Only OK if you're using job transforms without the AtlasAnalysis project."
-
-
-
-######################
-# Optional config args
-
-def AddTrigFilter(trf,inDic):
-    trf.add( ListOfStringsArg(name='trigFilterList') )
-    return
-AddToConfigDic('trigFilterList',AddTrigFilter)
-
-def AddRunNumber(trf,inDic):
-    trf.add( SkipEventsArg(name='RunNumber') )
-    return
-AddToConfigDic('RunNumber',AddRunNumber)
-
-def AddSkipEvents(trf,inDic):
-    trf.add( SkipEventsArg() )
-    return
-AddSkipEvents.subSteps=['first']
-AddToConfigDic('skipEvents',AddSkipEvents)
-
-def AddMaxEvents(trf,inDic):
-    trf.add( MaxEventsArg() )
-    return
-AddToConfigDic('maxEvents',AddMaxEvents)
-
-def AddMergeChunks(trf,inDic):
-    trf.add( BasicIntArg(name='mergeChunks') )
-    return
-AddToConfigDic('mergeChunks',AddMergeChunks)
-
-def AddMergeParallel(trf,inDic):
-    trf.add( BasicIntArg(name='mergeParallel') )
-    return
-AddToConfigDic('mergeParallel',AddMergeParallel)
-
-def AddDBRelease(trf,inDic):
-    trf.add( DBReleaseArg() )
-    return
-AddDBRelease.subSteps=['none']
-AddToConfigDic('DBRelease',AddDBRelease)
-
-def AddSQLite(trf,inDic):
-    trf.add( SQLiteSupport() )
-    return
-AddToConfigDic('useSQLite',AddSQLite)
-
-def AddAsetup(trf, inDic):
-    trf.add(AsetupArg())
-    return
-AddToConfigDic('asetup', AddAsetup)
-
-## New optional config
-def AddBeamType(trf,inDic):
-    from AthenaCommon.BeamFlags import beamType
-    trf.add( BasicStringArg(name='beamType',help='Beam Type, chose from %s' % beamType.allowedValues) )
-    return
-AddToConfigDic('beamType',AddBeamType)
-
-def AddAMITag(trf,inDic):
-    trf.add( BasicStringArg(name='AMITag') )
-    return
-AddToConfigDic('AMITag',AddAMITag)
-
-def AddProjectName(trf,inDic):
-    trf.add( BasicStringArg(name='projectName') )
-    return
-AddToConfigDic('projectName',AddProjectName)
-
-def AddTrigStream(trf,inDic):
-    trf.add( BasicStringArg(name='trigStream') )
-    return
-AddToConfigDic('trigStream',AddTrigStream)
-
-def AddOutputTypes(trf,inDic):
-    trf.add( BasicStringArg(name='outputTypes') )
-    return
-AddToConfigDic('outputTypes',AddOutputTypes)
-
-def AddPreInclude(trf,inDic):
-    trf.add( ListOfStringsArg(name='preInclude') )
-    return
-AddToConfigDic('preInclude',AddPreInclude)
-
-def AddPostInclude(trf,inDic):
-    trf.add( ListOfStringsArg(name='postInclude') )
-    return
-AddToConfigDic('postInclude',AddPostInclude)
-
-def AddPreExec(trf,inDic):
-    trf.add( BasicExec(name='preExec') )
-    return
-AddToConfigDic('preExec',AddPreExec)
-
-def AddPostExec(trf,inDic):
-    trf.add( BasicExec(name='postExec') )
-    return
-AddToConfigDic('postExec',AddPostExec)
-
-def AddUserExec(trf,inDic):
-    trf.add( BasicExec(name='userExec') )
-    return
-AddToConfigDic('userExec',AddUserExec)
-
-def AddTopOptions(trf,inDic):
-    trf.add( BasicStringArg(name='topOptions') )
-    return
-AddToConfigDic('topOptions',AddTopOptions)
-
-def AddOfflinePrescales(trf,inDic):
-    trf.add( ListOfStringsArg(name='prescales') )
-    return
-AddOfflinePrescales.subSteps=['e2d','a2d']
-AddToConfigDic('prescales',AddOfflinePrescales)
-
-def AddAutoConfiguration(trf,inDic):
-    trf.add( ListOfStringsArg(name='autoConfiguration') )
-    return
-AddToConfigDic('autoConfiguration',AddAutoConfiguration)
-
-def AddTriggerConfig(trf,inDic):
-    trf.add( BasicStringArg(name='triggerConfig') )
-    return
-AddToConfigDic('triggerConfig',AddTriggerConfig)
-
-def AddGeometryVersion(trf,inDic):
-    trf.add( GeometryVersionArg() )
-    return
-AddToConfigDic('geometryVersion',AddGeometryVersion)
-
-def AddConditionsTag(trf,inDic):
-    trf.add( ConditionsTagArg() )
-    return
-AddToConfigDic('conditionsTag',AddConditionsTag)
-
-def AddEventSelectorQuery(trf, inDic):
-    trf.add(EventSelectorQueryArg(name='eventSelectorQuery'))
-    return
-AddToConfigDic('eventSelectorQuery', AddEventSelectorQuery)
-
-#######################
-# Mandatory input files
-def AddInputFile(trf,inDic):    
-    trf.add( ListOfStringsArg(name='inputFile') )
-    return
-AddInputFile.isInput=True
-AddToConfigDic('inputFile',AddInputFile)
-
-def AddInputTAGFile(trf,inDic):    
-    trf.add( ListOfStringsArg(name='inputTAGFile') )
-    return
-AddInputTAGFile.subSteps=['e2d','a2d']
-AddInputTAGFile.isInput=True
-AddToConfigDic('inputTAGFile',AddInputTAGFile)
-
-def AddInputTAG_AODFile(trf,inDic):    
-    trf.add( ListOfStringsArg(name='inputTAG_AODFile') )
-    return
-AddInputTAG_AODFile.subSteps=['a2d','a2a']
-AddInputTAG_AODFile.isInput=True
-AddToConfigDic('inputTAG_AODFile',AddInputTAG_AODFile)
-
-def AddInputEvgenFile(trf,inDic):    
-    trf.add( InputEvgenFileArg() )
-    return
-AddInputEvgenFile.subSteps=['e2h']
-AddInputEvgenFile.isInput=True
-AddToConfigDic('inputEvgenFile',AddInputEvgenFile)
-
-def AddInputHITSFile(trf,inDic):    
-    trf.add( InputHitsFileArg() )
-    return
-AddInputHITSFile.subSteps=['h2r','mergeHITS']
-AddInputHITSFile.isInput=True
-AddToConfigDic('inputHitsFile',AddInputHITSFile)
-
-def AddInputBSFile(trf,inDic):    
-    trf.add( InputBSFileArg() )
-    return
-AddInputBSFile.subSteps=['r2r','r2e']
-AddInputBSFile.isInput=True
-AddToConfigDic('inputBSFile',AddInputBSFile)
-
-def AddInputRDOFile(trf,inDic):    
-    trf.add( InputRDOFileArg() )
-    return
-AddInputRDOFile.subSteps=['r2e','r2b','mergeRDO','rdo2rdotrig']
-AddInputRDOFile.isInput=True
-AddToConfigDic('inputRDOFile',AddInputRDOFile)
-
-def AddInputEVNTFile(trf,inDic):    
-    trf.add( InputRDOFileArg(name='inputEVNTFile') )
-    return
-AddInputEVNTFile.subSteps=['r2e','a2d']
-AddInputEVNTFile.isInput=True
-AddToConfigDic('inputEVNTFile',AddInputEVNTFile)
-
-def AddInputESDFile(trf,inDic):    
-    trf.add( InputESDFileArg() )
-    return
-AddInputESDFile.subSteps=['e2a','e2d','e2e','merge']
-AddInputESDFile.isInput=True
-AddToConfigDic('inputESDFile',AddInputESDFile)
-
-def AddInputAODFile(trf,inDic):    
-    trf.add( InputAODFileArg() )
-    return
-AddInputAODFile.subSteps=['a2d','a2t','merge']
-AddInputAODFile.isInput=True
-AddToConfigDic('inputAODFile',AddInputAODFile)
-
-class LogsFile( FileType ):
-    defaultContents = 'logs'
-    defaultType = 'tgz'
-    def __init__(self,contents=defaultContents,type=defaultType):
-        FileType.__init__(self,type,contents)
-
-    def getGUID(self,filename):
-        print "Checking!! LogsFile.getGUID"
-        if TRF_SETTING[ 'testrun' ]:
-            return None
-        guid = str(uuid.uuid4()).upper()
-        print "GUID retrieval: %s (%s) generated with uuid.uuid4()" % ( guid, filename )
-        return guid
- 
-
-class InputLogsFileArg(InputDataFileArg):
-    """Input file - log tarballs """
-    def __init__(self,help='default',name='default'):
-        InputDataFileArg.__init__(self,help,LogsFile(),name)
-         
-    def MoveLogs(self):
-        vals = self.value()
-        for val in vals:
-            if not fileutil.exists(val):
-                found = fileutil.exists_suffix_number(val + '.')
-                if not found:
-                    code = AtlasErrorCodes.getCode('TRF_INFILE_NOTFOUND')
-                    raise InputFileError( val, 'not found. Argument %s' % (self.name()), code )
-                if found != val:
-                    self.logger().warning('replacing %s with %s' % (val,found) )
-                    vals[vals.index(val)] = found
-                    val = found
-            newval=val+".merged"
-            os.system('mv -f %s %s' % (val,newval))
-
-    def postRunAction(self):
-        if not self: return
-        #InputDataFileArg.postRunAction(self)
-        self.MoveLogs()
-
-
-    def isFullArgument(self):
-        return True
-
-def AddInputLogsFile(trf,inDic):    
-    trf.add( InputLogsFileArg() )
-    return
-AddInputLogsFile.subSteps=['mergeHITS']
-AddInputLogsFile.isInput=True
-AddToConfigDic('inputLogsFile',AddInputLogsFile)
-
-########################
-## Optional output files
-def AddOutputEvgenFile(trf,inDic):
-    trf.add( OutputEvgenFileArg() )
-    return
-AddOutputEvgenFile.subSteps=['e2h']
-AddOutputEvgenFile.isOutput='pool'
-AddToConfigDic('outputEvgenFile',AddOutputEvgenFile)
-
-def AddOutputHITFile(trf,inDic):
-    trf.add( OutputHitsFileArg() )
-    return
-AddOutputHITFile.subSteps=['e2h','mergeHITS']
-AddOutputHITFile.isOutput='pool'
-AddToConfigDic('outputHitsFile',AddOutputHITFile)
-
-def AddOutputRDOFile(trf,inDic):
-    trf.add( OutputRDOFileArg() )
-    return
-AddOutputRDOFile.subSteps=['h2r','mergeRDO','rdo2rdotrig']
-AddOutputRDOFile.isOutput='pool'
-AddToConfigDic('outputRDOFile',AddOutputRDOFile)
-
-def AddOutputBSFile(trf,inDic):
-    trf.add( OutputBSFileArg() )
-    return
-AddOutputBSFile.subSteps=['r2r','r2b']
-AddOutputBSFile.isOutput='bs'
-AddToConfigDic('outputBSFile',AddOutputBSFile)
-
-def AddOutputESDFile(trf,inDic):
-    trf.add( OutputESDFileArg() )
-    return
-AddOutputESDFile.subSteps=['r2e','e2e','merge']
-AddOutputESDFile.isOutput='pool'
-AddToConfigDic('outputESDFile',AddOutputESDFile)
-
-def AddOutputAODFile(trf,inDic):
-    trf.add( OutputAODFileArg() )
-    return
-AddOutputAODFile.subSteps=['e2a','merge','a2d']
-AddOutputAODFile.isOutput='pool'
-AddToConfigDic('outputAODFile',AddOutputAODFile)
-
-def AddTmpRDOFile(trf,inDic):
-    trf.add( OutputRDOFileArg(name='tmpRDO',temporary=True) )
-    return
-AddTmpRDOFile.subSteps=['h2r']
-AddTmpRDOFile.isOutput='pool'
-AddToConfigDic('tmpRDO',AddTmpRDOFile)
-
-def AddTmpESDFile(trf,inDic):
-    trf.add( OutputESDFileArg(name='tmpESD',temporary=True) )
-    return
-AddTmpESDFile.subSteps=['r2e','e2e']
-AddTmpESDFile.isOutput='pool'
-AddToConfigDic('tmpESD',AddTmpESDFile)
-
-def AddTmpAODFile(trf,inDic):
-    trf.add( OutputAODFileArg(name='tmpAOD',temporary=True) )
-    return
-AddTmpAODFile.subSteps=['e2a']
-AddTmpAODFile.isOutput='pool'
-AddToConfigDic('tmpAOD',AddTmpAODFile)
-
-def AddOutputDQMonitorFile(trf,inDic):
-    trf.add( MonitorHistArg(name='outputDQMonitorFile',temporary=True) )
-    return
-AddOutputDQMonitorFile.subSteps=['none']
-AddOutputDQMonitorFile.isOutput='root'
-AddToConfigDic('outputDQMonitorFile',AddOutputDQMonitorFile)
-
-def AddHIST(trf,inDic):
-    trf.add( MonitorHistArg(name='outputHISTFile') )
-    return
-AddHIST.subSteps=['none']
-AddHIST.isOutput='root'
-AddToConfigDic('outputHISTFile',AddHIST)
-
-def AddOutputHIST_PHYSVALMONFile(trf,inDic):
-    trf.add( NtupleFileArg(name='outputHIST_PHYSVALMONFile') )
-    return
-AddOutputHIST_PHYSVALMONFile.subSteps=['e2a']
-AddOutputHIST_PHYSVALMONFile.isOutput='root'
-AddToConfigDic('outputHIST_PHYSVALMONFile',AddOutputHIST_PHYSVALMONFile)
-
-def AddCBNT(trf,inDic):
-    trf.add( NtupleFileArg(name='outputCBNTFile') )
-    return
-AddCBNT.subSteps=['r2e']
-AddCBNT.isOutput='root'
-AddToConfigDic('outputCBNTFile',AddCBNT)
-
-def AddOutputNTUP_MUFASTFile(trf,inDic):
-    trf.add( NtupleFileArg(name='outputNTUP_MUFASTFile') )
-    return
-AddOutputNTUP_MUFASTFile.subSteps=['r2r']
-AddOutputNTUP_MUFASTFile.isOutput='root'
-AddToConfigDic('outputNTUP_MUFASTFile',AddOutputNTUP_MUFASTFile)
-
-def AddOutputHIST_TRIGEXPERTFile(trf,inDic):
-    trf.add( NtupleFileArg(name='outputHIST_TRIGEXPERTFile') )
-    return
-AddOutputHIST_TRIGEXPERTFile.subSteps=['r2r']
-AddOutputHIST_TRIGEXPERTFile.isOutput='root'
-AddToConfigDic('outputHIST_TRIGEXPERTFile',AddOutputHIST_TRIGEXPERTFile)
-
-def AddOutputTXT_FTKIPFile(trf, inDic):
-    trf.add(OutputFTKIPFileArg(name='outputTXT_FTKIPFile'))
-    return
-AddOutputTXT_FTKIPFile.subSteps=['r2e']
-AddOutputTXT_FTKIPFile.isOutput='txt.bz2'
-AddToConfigDic('outputTXT_FTKIPFile', AddOutputTXT_FTKIPFile)
-
-def AddOutputNTUP_FTKIPFile(trf, inDic):
-    trf.add(NtupleFileArg(name='outputNTUP_FTKIPFile'))
-    return
-AddOutputNTUP_FTKIPFile.subSteps=['r2e']
-AddOutputNTUP_FTKIPFile.isOutput='root'
-AddToConfigDic('outputNTUP_FTKIPFile', AddOutputNTUP_FTKIPFile)
-
-def AddOutputTXT_JIVEXMLTGZFile(trf, inDic):
-    trf.add(OutputJiveXMLTGZFileArg(name='outputTXT_JIVEXMLTGZFile'))
-AddOutputTXT_JIVEXMLTGZFile.subSteps=['r2e', 'e2a']
-AddOutputTXT_JIVEXMLTGZFile.isOutput='XML.tar.gz'
-AddToConfigDic('outputTXT_JIVEXMLTGZFile', AddOutputTXT_JIVEXMLTGZFile)
-
-def AddNTUP_BTAG(trf,inDic):
-    trf.add( NtupleFileArg(name='outputNTUP_BTAGFile',tree_names="vtuple") )
-    return
-AddNTUP_BTAG.subSteps=['e2a','a2d']
-AddNTUP_BTAG.isOutput='root'
-AddToConfigDic('outputNTUP_BTAGFile',AddNTUP_BTAG)
-
-def AddNTUP_SMEW(trf,inDic):
-    trf.add( NtupleFileArg(name='outputNTUP_SMEWFile',tree_names="wwd3pd") )
-    return
-AddNTUP_SMEW.subSteps=['a2d']
-AddNTUP_SMEW.isOutput='root'
-AddToConfigDic('outputNTUP_SMEWFile',AddNTUP_SMEW)
-
-# this is obsolete, commeting it out for the time being
-#def AddNTUP_1LHSG2(trf,inDic):
-#    trf.add( NtupleFileArg(name='outputNTUP_1LHSG2File',tree_names="physics") )
-#    return
-#AddNTUP_1LHSG2.subSteps=['a2d']
-#AddNTUP_1LHSG2.isOutput='root'
-#AddToConfigDic('outputNTUP_1LHSG2File',AddNTUP_1LHSG2)
-
-# this is obsolete, commeting it out for the time being
-#def AddNTUP_2LHSG2(trf,inDic):
-#    trf.add( NtupleFileArg(name='outputNTUP_2LHSG2File',tree_names="physics") )
-#    return
-#AddNTUP_2LHSG2.subSteps=['a2d']
-#AddNTUP_2LHSG2.isOutput='root'
-#AddToConfigDic('outputNTUP_2LHSG2File',AddNTUP_2LHSG2)
-
-# this is obsolete, commeting it out for the time being
-#def AddNTUP_HSG2(trf,inDic):
-#    trf.add( NtupleFileArg(name='outputNTUP_HSG2File',tree_names="physics") )
-#    return
-#AddNTUP_HSG2.subSteps=['a2d']
-#AddNTUP_HSG2.isOutput='root'
-#AddToConfigDic('outputNTUP_HSG2File',AddNTUP_HSG2)
-#
-def AddNTUP_WZ(trf,inDic):
-    trf.add( NtupleFileArg(name='outputNTUP_WZFile',tree_names="physics") )
-    return
-AddNTUP_WZ.subSteps=['e2d']
-AddNTUP_WZ.isOutput='root'
-AddToConfigDic('outputNTUP_WZFile',AddNTUP_WZ)
-
-def AddNTUP_TRT(trf,inDic):
-    trf.add( NtupleFileArg(name='outputNTUP_TRTFile',tree_names="MyCollectionTree") )
-    return
-AddNTUP_TRT.subSteps=['e2d']
-AddNTUP_TRT.isOutput='root'
-AddToConfigDic('outputNTUP_TRTFile',AddNTUP_TRT)
-
-def AddNTUP_PROMPTPHOT(trf,inDic):
-    trf.add( NtupleFileArg(name='outputNTUP_PROMPTPHOTFile',tree_names=["PAUReco","HggUserData"]) )
-    return
-AddNTUP_PROMPTPHOT.subSteps=['e2d','a2d']
-AddNTUP_PROMPTPHOT.isOutput='root'
-AddToConfigDic('outputNTUP_PROMPTPHOTFile',AddNTUP_PROMPTPHOT)
-
-def AddNTUP_TRKVALID(trf,inDic):
-    trf.add( NtupleFileArg(name='outputNTUP_TRKVALIDFile',tree_names="Validation/EventToTrackLink") )
-    return
-AddNTUP_TRKVALID.subSteps=['r2e']
-AddNTUP_TRKVALID.isOutput='root'
-AddToConfigDic('outputNTUP_TRKVALIDFile',AddNTUP_TRKVALID)
-
-def AddNTUP_MUONCALIB(trf,inDic):
-    trf.add( NtupleFileArg(name='outputNTUP_MUONCALIBFile',tree_names="PatternNtupleMaker/Segments") )
-    return
-AddNTUP_MUONCALIB.subSteps=['r2e','e2e']
-AddNTUP_MUONCALIB.isOutput='root'
-AddToConfigDic('outputNTUP_MUONCALIBFile',AddNTUP_MUONCALIB)
-
-def AddNTUP_MCP(trf,inDic):
-    trf.add( NtupleFileArg(name="outputNTUP_MCPFile",tree_names="CollectionTree") )
-    return
-AddNTUP_MCP.subSteps=['e2d']
-AddNTUP_MCP.isOutput='root'
-AddToConfigDic('outputNTUP_MCPFile', AddNTUP_MCP)
-
-def AddNTUP_HECNOISE(trf,inDic):
-    trf.add( NtupleFileArg(name='outputNTUP_HECNOISEFile',tree_names="HECNoise") )
-    return
-AddNTUP_HECNOISE.subSteps=['e2d']
-AddNTUP_HECNOISE.isOutput='root'
-AddToConfigDic('outputNTUP_HECNOISEFile',AddNTUP_HECNOISE)
-
-def AddOutputNTUP_SCTFile(trf,inDic):
-    trf.add( NtupleFileArg( name='outputNTUP_SCTFile',tree_names="InDetTrackTree" ) )
-    return
-AddOutputNTUP_SCTFile.subSteps=['r2e','e2d','a2d']
-AddOutputNTUP_SCTFile.isOutput='root'
-AddToConfigDic('outputNTUP_SCTFile',AddOutputNTUP_SCTFile)
-
-def AddNTUP_ENHBIAS(trf,inDic):
-    trf.add( NtupleFileArg(name='outputNTUP_ENHBIASFile',tree_names="vertices") )
-    return
-AddNTUP_ENHBIAS.subSteps=['e2a','e2d']
-AddNTUP_ENHBIAS.isOutput='root'
-AddToConfigDic('outputNTUP_ENHBIASFile',AddNTUP_ENHBIAS)
-
-def AddNTUP_TRUTH(trf,inDic):
-    trf.add( NtupleFileArg(name='outputNTUP_TRUTHFile',tree_names="truth") )
-    return
-AddNTUP_TRUTH.subSteps=['a2d']
-AddNTUP_TRUTH.isOutput='root'
-AddToConfigDic('outputNTUP_TRUTHFile',AddNTUP_TRUTH)
-
-def AddNTUP_SUSYTRUTH(trf,inDic):
-    trf.add( NtupleFileArg(name='outputNTUP_SUSYTRUTHFile',tree_names="susytruth") )
-    return
-AddNTUP_SUSYTRUTH.subSteps=['a2d']
-AddNTUP_SUSYTRUTH.isOutput='root'
-AddToConfigDic('outputNTUP_SUSYTRUTHFile',AddNTUP_SUSYTRUTH)
-
-def AddNTUP_HIGHMULT(trf, inDic):
-    trf.add( NtupleFileArg(name='outputNTUP_HIGHMULTFile', tree_names="MinBiasTree") )
-    return
-AddNTUP_HIGHMULT.subSteps=['e2a']
-AddNTUP_HIGHMULT.isOutput='root'
-AddToConfigDic('outputNTUP_HIGHMULTFile', AddNTUP_HIGHMULT)
-
-def AddOutputTAGFile(trf,inDic):
-    trf.add( OutputTAGFileArg() )
-    return
-AddOutputTAGFile.subSteps=['e2a','a2t']
-AddOutputTAGFile.isOutput='root'
-AddToConfigDic('outputTAGFile',AddOutputTAGFile)
-
-def AddTAGComm(trf,inDic):
-    trf.add( OutputTAGFileArg(name='outputTAG_COMMFile') )
-    return
-AddTAGComm.subSteps=['r2e']
-AddTAGComm.isOutput='root'
-AddToConfigDic('outputTAG_COMMFile',AddTAGComm)
-
-def AddDESDM_BEAMSPOTFile(trf,inDic):
-    trf.add( OutputDPDFileArg(name='outputDESDM_BEAMSPOTFile') )
-    return
-AddDESDM_BEAMSPOTFile.subSteps=['e2a']
-AddDESDM_BEAMSPOTFile.isOutput='pool'
-AddToConfigDic('outputDESDM_BEAMSPOTFile',AddDESDM_BEAMSPOTFile)
-
-#def AddDAOD_2LHSG2File(trf,inDic):
-#    trf.add( OutputDPDFileArg(name='outputDAOD_2LHSG2File') )
-#    return
-#AddDAOD_2LHSG2File.subSteps=['a2d','e2d']
-#AddDAOD_2LHSG2File.isOutput='pool'
-#AddToConfigDic('outputDAOD_2LHSG2File',AddDAOD_2LHSG2File)
-#
-#def AddDAOD_4LHSG2File(trf,inDic):
-#    trf.add( OutputDPDFileArg(name='outputDAOD_4LHSG2File') )
-#    return
-#AddDAOD_4LHSG2File.subSteps=['a2d','e2d']
-#AddDAOD_4LHSG2File.isOutput='pool'
-#AddToConfigDic('outputDAOD_4LHSG2File',AddDAOD_4LHSG2File)
-#
-
-def AddDAOD_HSG2File(trf,inDic):
-    trf.add( OutputDPDFileArg(name='outputDAOD_HSG2File') )
-    return
-AddDAOD_HSG2File.subSteps=['a2d','e2d']
-AddDAOD_HSG2File.isOutput='pool'
-AddToConfigDic('outputDAOD_HSG2File',AddDAOD_HSG2File)
-
-
-#Dummy outputs for ESD/AOD synonyms. Special case: do not copy this example! 
-def AddOutputDESD_ZEEFile(trf,inDic):
-    trf.add( OutputESDFileArg( name='outputDESD_ZEEFile' ) )
-    return
-AddOutputDESD_ZEEFile.subSteps=['none']
-AddOutputDESD_ZEEFile.isOutput='pool'
-AddToConfigDic('outputDESD_ZEEFile',AddOutputDESD_ZEEFile)
-
-def AddOutputDAOD_ZEEFile(trf,inDic):
-    trf.add( OutputESDFileArg( name='outputDAOD_ZEEFile' ) )
-    return
-AddOutputDAOD_ZEEFile.subSteps=['none']
-AddOutputDAOD_ZEEFile.isOutput='pool'
-AddToConfigDic('outputDAOD_ZEEFile',AddOutputDAOD_ZEEFile)
-
-def AddOutputDESD_ZMUMUFile(trf,inDic):
-    trf.add( OutputESDFileArg( name='outputDESD_ZMUMUFile' ) )
-    return
-AddOutputDESD_ZMUMUFile.subSteps=['none']
-AddOutputDESD_ZMUMUFile.isOutput='pool'
-AddToConfigDic('outputDESD_ZMUMUFile',AddOutputDESD_ZMUMUFile)
-
-def AddOutputDAOD_ZMUMUFile(trf,inDic):
-    trf.add( OutputESDFileArg( name='outputDAOD_ZMUMUFile' ) )
-    return
-AddOutputDAOD_ZMUMUFile.subSteps=['none']
-AddOutputDAOD_ZMUMUFile.isOutput='pool'
-AddToConfigDic('outputDAOD_ZMUMUFile',AddOutputDAOD_ZMUMUFile)
-
-def AddOutputDESD_WENUFile(trf,inDic):
-    trf.add( OutputESDFileArg( name='outputDESD_WENUFile' ) )
-    return
-AddOutputDESD_WENUFile.subSteps=['none']
-AddOutputDESD_WENUFile.isOutput='pool'
-AddToConfigDic('outputDESD_WENUFile',AddOutputDESD_WENUFile)
-
-def AddOutputDAOD_WENUFile(trf,inDic):
-    trf.add( OutputESDFileArg( name='outputDAOD_WENUFile' ) )
-    return
-AddOutputDAOD_WENUFile.subSteps=['none']
-AddOutputDAOD_WENUFile.isOutput='pool'
-AddToConfigDic('outputDAOD_WENUFile',AddOutputDAOD_WENUFile)
-
-def AddOutputDESD_WMUNUFile(trf,inDic):
-    trf.add( OutputESDFileArg( name='outputDESD_WMUNUFile' ) )
-    return
-AddOutputDESD_WMUNUFile.subSteps=['none']
-AddOutputDESD_WMUNUFile.isOutput='pool'
-AddToConfigDic('outputDESD_WMUNUFile',AddOutputDESD_WMUNUFile)
-
-def AddOutputDAOD_WMUNUFile(trf,inDic):
-    trf.add( OutputESDFileArg( name='outputDAOD_WMUNUFile' ) )
-    return
-AddOutputDAOD_WMUNUFile.subSteps=['none']
-AddOutputDAOD_WMUNUFile.isOutput='pool'
-AddToConfigDic('outputDAOD_WMUNUFile',AddOutputDAOD_WMUNUFile)
-
-# Note that FASTMON and PHYSVAL have no well defined event count
-def AddOutputNTUP_FASTMONFile(trf,inDic):
-    #trf.add(MonitorHistArg(name='outputNTUP_FASTMONFile') )
-    trf.add(NtupleFileArg(name='outputNTUP_FASTMONFile', tree_names="") )
-    return
-AddOutputNTUP_FASTMONFile.subSteps=['a2t','a2d','e2a']
-AddOutputNTUP_FASTMONFile.isOutput='root'
-AddToConfigDic('outputNTUP_FASTMONFile',AddOutputNTUP_FASTMONFile)
-
-def AddNTUP_PHYSVAL(trf,inDic):
-    trf.add(NtupleFileArg(name='outputNTUP_PHYSVALFile',tree_names=""))
-AddNTUP_PHYSVAL.subSteps=['e2d', 'a2d']
-AddNTUP_PHYSVAL.isOutput='root'
-AddToConfigDic('outputNTUP_PHYSVALFile', AddNTUP_PHYSVAL)
-
-
-def AddOutputNTUP_LARNOISE(trf,inDic):
-    trf.add( NtupleFileArg(name='outputNTUP_LARNOISEFile', tree_names="CollectionTree") )
-    return
-AddOutputNTUP_LARNOISE.subSteps=['e2d']
-AddOutputNTUP_LARNOISE.isOutput='root'
-AddToConfigDic('outputNTUP_LARNOISEFile',AddOutputNTUP_LARNOISE)
-
-## def Add(trf,inDic):
-##     return
-## AddToConfigDic('',)
-
-def AddFastMerge(trf,inDic):
-    trf.add(BasicBoolArg(name='fastPoolMerge'))
-    return
-AddToConfigDic('fastPoolMerge',AddFastMerge)
-
-
-def AddRDOTrigger(trf,inDic):
-    trf.add(BasicBoolArg(name='doRDOTrigger'))
-    return
-AddToConfigDic('doRDOTrigger',AddRDOTrigger)
-
-def AddSortInputFiles(trf,inDic):
-    trf.add(BasicBoolArg(name='sortInputFiles'))
-    return
-AddToConfigDic('sortInputFiles',AddSortInputFiles)
-
-def AddD3PDVal(trf,inDic):
-    trf.add( ListOfStringsArg(name='d3pdVal') )
-    return
-AddToConfigDic('d3pdVal',AddD3PDVal)
-
-def AddInputNTUPFunctionMaker(name, tree_names):
-    def AddInputNTUPXXX(trf,inDic):
-        trf.add(InputNtupleFileArg(name=name,tree_names=tree_names))
-        return
-    return AddInputNTUPXXX
-
-def AddInputNTUPs():
-    ConfigDicCopy=ConfigDic.copy()
-    for key in ConfigDicCopy:
-        trf=set()
-        ConfigDicCopy[key](trf=trf,inDic={})
-        for x in trf:
-            if key.startswith('outputNTUP') and key.endswith('File') and isinstance(x, NtupleFileArg):
-                tree_names=x._fileType.tree_names
-                name=key.replace('outputNTUP','inputNTUP')
-                function=AddInputNTUPFunctionMaker(name, tree_names)
-                AddToConfigDic(name,function)
-    return
-
-AddInputNTUPs()
-
-
diff --git a/PhysicsAnalysis/PATJobTransforms/python/DPDUtils.py b/PhysicsAnalysis/PATJobTransforms/python/DPDUtils.py
index 607403b1cf6bed760d0701c31db0747a7acf1e0b..9480ee8ec9d88dc498910362cba1afebcb8a9173 100644
--- a/PhysicsAnalysis/PATJobTransforms/python/DPDUtils.py
+++ b/PhysicsAnalysis/PATJobTransforms/python/DPDUtils.py
@@ -1,166 +1,12 @@
 #!/usr/bin/env python
 
 # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+#
+# Temporary support for old ESDtoDPD workflow - should update to RecJobTransforms version
+# in next release... (Captain's Log, Stardate 2016-04-08)
 
-# Creation: David Cote (DESY), 1st July 2008
-# Usage: Utility functions for DPD transforms
-
-from PATJobTransforms.ArgDicTools import SysArgsExceptionCatcher
 from RecExConfig.RecoFunctions import AddValidItemToList
 
-def outputBSNameStripper(fullName):
-    if not fullName.endswith('._0001.data'):
-        raise RuntimeError("Illegal output bytestream name '%s'. The name must ends with '._0001.data'."%fullName)
-    
-    strippedName=fullName.strip('._0001.data')
-    return strippedName
-
-# This function interprets sysArgs and returns the number of output DPD
-def nDPDGetter(sysArgs,key,defaultPosition):
-    #First look for known exceptions
-    if SysArgsExceptionCatcher(sysArgs) is "Help":
-        return 0
-
-    #Then look for the given key (e.g. nbOutputDPD= )
-    for ar in sysArgs:
-        if isinstance(ar,str) and ar.startswith(key):
-            return int(ar[len(key):])
-        
-    #If key not found, use the defaultPosition argument
-    try:
-        nDPD=int(sysArgs[defaultPosition])
-    except:
-        raise RuntimeError("Unable to find nDPD with default key '%s' or default position "%key+str(defaultPosition))
-
-    return nDPD
-
-
-#---------------------------------------------------------
-# This function interprets sysArgs and returns the input file name according to key or defaultPosition
-def inputFileGetter(sysArgs,key,defaultPosition):
-    #First look for known exceptions
-    if SysArgsExceptionCatcher(sysArgs) is "Help":
-        return "dummyFileName"
-
-    #Then look for the given key (e.g. inputBSFile=, inputESDFile= , inputAODFile=)
-    for ar in sysArgs:
-        if isinstance(ar,str) and ar.startswith(key):
-            return ar[len(key):]
-
-    #If key not found, use the defaultPosition argument
-    fileName=sysArgs[defaultPosition]
-    if not isinstance(fileName,str):
-        raise RuntimeError("Unable to find input file with arg key '%s' or default position "%key+str(defaultPosition))
-
-    return fileName
-
-#---------------------------------------------------------
-# This function returns a dictionary with DPD configuration from the inputESD name.
-# The DPD configuration is taken from PrimaryDPDMaker.
-try:
-    from PrimaryDPDMaker.JobTransformConfiguration import *
-except ImportError:
-    print "WARNING PrimaryDPDMaker not available. Only OK if you're using job transforms without the AtlasAnalysis project."
-
-def DPDConfigFromInputFile(inputFile):
-    if inputFile.endswith("ESD.TCT.pool.root") or inputFile.endswith("AOD.TCT.pool.root"):
-        print "Configuring DPD for TCT...\n"
-        return TCTConfiguration()
-    
-    elif inputFile.find(".physics.Express.") >= 0:
-        print "Configuring DPD for physics Express stream...\n"
-        return TCTConfiguration()
-    
-    elif inputFile.find(".physics_Egamma.") >= 0 or inputFile.find(".physics.Egamma.") >= 0 :
-        print "Configuring DPD for EGamma trigger stream...\n"
-        return EGammaStreamConfiguration()
-
-    elif inputFile.find(".physics_Muon.") >= 0 or inputFile.find(".physics.Muon.") >= 0:
-        print "Configuring DPD for Muon trigger stream...\n"
-        return MuonStreamConfiguration()
-
-    elif inputFile.find(".physics_Jet.") >= 0 or inputFile.find(".physics.Jet.") >= 0:
-        print "Configuring DPD for Jet trigger stream...\n"
-        return JetStreamConfiguration()
-
-    elif inputFile.find(".physics_Minbias.") >= 0 or inputFile.find(".physics.Minbias.") >= 0:
-        print "Configuring DPD for MinBias trigger stream...\n"
-        return MinBiasConfiguration()
-
-    #elif inputFile.find(".physics_Bphys.") >= 0 or inputFile.find(".physics.Bphys.") >= 0:
-    #    print "Configuring DPD for Bphys trigger stream...\n"
-    #    return BphysConfiguration()
-
-    else:
-        print "WARNING no DPD configuration known for input file named %s"%inputFile
-        print "***** No DPD will be produced in this job *****\n"
-        conf={}
-        conf['nDPD']=0
-        return conf
-        #raise RuntimeError("Unable to setup DPD from unknown input file name %s"%inputFile)
-    return -1
-
-
-#-------------------------------------------
-# This function interprets sysArgs and returns the Tier-0 DPD names according to key 
-def GetDPDNamesAndRemoveFromSysArgs(sysArgs,key,nPotentialDPD,conf):
-
-    nDPD=conf['nDPD']
-    if nDPD>0:
-        T0ArgPositions = conf['T0ArgPositions']
-        DefaultDPDNames = conf['DefaultDPDNames']
-
-    #First get *potential* DPD names from the command line and remove them from sys.argv
-    #Example: DPDName1=MainEGamma.pool.root DPDName4=SingleElectron.pool.root DPDName7=SinglePhoton.pool.root 
-    T0Names=[]
-    T0Positions=[]
-    i=1    
-    while i<=nPotentialDPD:
-        defPos=0 # positional argument not yet supported
-        try:
-            tmp=T0ArgPositions.index(i-1)
-            msgSuffix="(DPD #%i)"%tmp
-        except:
-            msgSuffix="(potential DPD ignored)"
-
-        iKey=key+str(i)+"="
-        tmpT0Name=inputFileGetter(sysArgs,iKey,defPos)
-        zeArg=iKey+tmpT0Name
-        try:
-            sysArgs.remove(zeArg)
-            print "Removed %s from sys.argv. Will use this name instead of default."%zeArg,msgSuffix
-            T0Names.append( tmpT0Name )
-            T0Positions.append( i-1 )
-        except:
-            print "%s not specified. Will use default name."%iKey,msgSuffix
-        
-        i+=1
-
-    #Now get final DPD names, either from T0Names or DefaultNames
-    nDef=0
-    nT0=0
-    DPDNames=[]
-    for i in xrange(nDPD):
-        try:
-            t0Ind=T0Positions.index(T0ArgPositions[i])
-            DPDNames.append(T0Names[t0Ind])
-            nT0+=1
-        except:
-            DPDNames.append(DefaultDPDNames[i])
-            nDef+=1
-
-    #final coherence check...
-    if nDef>0 and nT0==0:
-        print "Auto-configuration completely from default DPD names."
-    elif nDef==0 and nT0>0: 
-        print "File names completely specified by DPDName1 ... DPDName%i."%nPotentialDPD
-    elif nDef>0 and nT0>0:
-        print "WARNING job configured with a mixture of default and T0 names... is it really what you want?"
-    if nDPD != (nDef+nT0):
-        raise RuntimeError("Error in auto-configuration: nDPD !=(nDef+nT0)")
-    
-    return DPDNames
-
 def SetupOutputDPDs(runArgs,flagContainerList):
     DPDMakerScripts=[]
     for flagContainer in flagContainerList:
@@ -183,64 +29,3 @@ def SetupOutputDPDs(runArgs,flagContainerList):
                         raise RuntimeError("%s is not conform with the job transform convention, hence unusable. Please fix this."%dpdName)
 
     return DPDMakerScripts
-
-def SetupDPDIncludes(runArgs,flagContainerList,includeType):
-    """This function can be used to include all the pre-include scripts
-       defined for the requested DPD types.
-
-       Returns a list of output types in which a pre/post-include was used."""
-    # We must not produce multiple output types when pre/post-include
-    # scripts are involved internally. This variable keeps track of
-    # the output types which have used the pre/postInclude functionality
-    includeTypeList = []
-    # Make a logger for the function:
-    from AthenaCommon.Logging import logging
-    logger = logging.getLogger( "SetupDPDIncludes" )
-    # Loop over all DPD job properties:
-    for flagContainer in flagContainerList:
-        for flagName in flagContainer.__dict__.keys():
-            flag=getattr(flagContainer,flagName)
-            # Check if this DPD type has been requested in the job:
-            if hasattr(flag,"StreamName"):
-                dpdName=flag.StreamName.lstrip("Stream")
-                argName='output'+dpdName+'File'
-                if hasattr(runArgs,argName):
-                    # We found a match for an output, however if there was already a script 
-                    # included, then we may be in trouble (note that we print an error even when
-                    # the current output type does not itself have an include, because the include 
-                    # that already exists can even mess up this new output type).
-                    if len(includeTypeList) > 0:
-                        logger.error( "Multiple output DPD types requested with "
-                                      "pre/post-includes present: {0} after includes from {1}".format(argName, includeTypeList) )
-                        logger.error( "This will most probably lead to bad output" )
-                        pass
-                    # Only some DPD types define pre/post-include scripts.
-                    # It's okay if this one doesn't define any,
-                    if not hasattr(flag,includeType): 
-                        continue
-                    includeTypeList.append(argName)
-                    # Make sure that we have a list of scripts, not just one:
-                    includes = getattr(flag,includeType)
-                    if type(includes) != list:
-                        includes = [includes]
-                        pass
-                    # Now include all the specified scripts:
-                    for incfile in includes:
-                        logger.warning( "Including script: {0} for {1}".format(incfile, argName) )
-                        from AthenaCommon.Include import include
-                        include( incfile )
-                        pass
-                    pass
-                pass
-            pass
-        pass
-    # Tell the caller which types used pre/postIncludes:
-    return includeTypeList
-
-def SetupDPDPreIncludes(runArgs,flagContainerList):
-    # Let the generic function do the work:
-    return SetupDPDIncludes(runArgs,flagContainerList,"PreIncludeScripts")
-
-def SetupDPDPostIncludes(runArgs,flagContainerList):
-    # Let the generic function do the work:
-    return SetupDPDIncludes(runArgs,flagContainerList,"PostIncludeScripts")
diff --git a/PhysicsAnalysis/PATJobTransforms/python/DefaultInputs.py b/PhysicsAnalysis/PATJobTransforms/python/DefaultInputs.py
deleted file mode 100644
index c6a4bd90e78977d60f64de8704ddbbfc7d7d50e1..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/python/DefaultInputs.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-# Creation: John Chapman (Cambridge), September 2010
-# Usage:
-#   -specify default inputs for transforms
-
-#Default values of input/output types, for standard tests
-DefaultInputs={
-    'inputBSFile' : '/afs/cern.ch/atlas/offline/test/data11_7TeV.00191920.physics_JetTauEtmiss.merge.RAW._lb0257._SFO-9._0001.1.10evts',
-    'inputRDOFile': '/afs/cern.ch/atlas/offline/test/mc11_valid.105200.T1_McAtNlo_Jimmy.digit.RDO.e835_s1310_s1300_d622.RDO.10evts.pool.root',
-    'inputESDFile': '/afs/cern.ch/atlas/offline/test/data11_7TeV.00191920.physics_JetTauEtmiss.merge.RAW._lb0257._SFO-9._0001.1.10evts.ESD.pool.root', 
-    'inputAODFile': '/afs/cern.ch/atlas/offline/test/data11_7TeV.00191920.physics_JetTauEtmiss.merge.RAW._lb0257._SFO-9._0001.1.10evts.AOD.pool.root', 
-    'cosmicsBS'   : '/afs/cern.ch/atlas/offline/test/data11_cos.00182609.physics_CosmicCalo.merge.RAW._lb0100._SFO-ALL._0001.1.SFO-ALL._0001.1.10evts.data',
-    'topBS'       : '/afs/cern.ch/atlas/offline/test/user.jboyd.DiLeptonicTopSkim.RAW.Run183347.10events.dat',
-    
-    'inputEvgenFile': '/afs/cern.ch/atlas/offline/ProdData/15.6.11.3/mu_E50_eta0-25-7000.evgen.pool.root',
-    'inputHitsFile' : 'root://eosatlas//eos/atlas/atlascerngroupdisk/proj-sit/digitization/RTT/mc10/mc10_7TeV.105200.T1_McAtNlo_Jimmy.simul.HITS.e598_s933_tid168076_00/HITS.168076._008421.pool.root.1',
-    'NDMinbiasHitsFile': 'root://eosatlas//eos/atlas/atlascerngroupdisk/proj-sit/digitization/RTT/mc10/mc10_7TeV.105001.pythia_minbias.merge.HITS.e577_s932_s952_tid170554_00/HITS.170554._000034.pool.root.1,root://eosatlas//eos/atlas/atlascerngroupdisk/proj-sit/digitization/RTT/mc10/mc10_7TeV.105001.pythia_minbias.merge.HITS.e577_s932_s952_tid170554_00/HITS.170554._000043.pool.root.1,root://eosatlas//eos/atlas/atlascerngroupdisk/proj-sit/digitization/RTT/mc10/mc10_7TeV.105001.pythia_minbias.merge.HITS.e577_s932_s952_tid170554_00/HITS.170554._000060.pool.root.1,root://eosatlas//eos/atlas/atlascerngroupdisk/proj-sit/digitization/RTT/mc10/mc10_7TeV.105001.pythia_minbias.merge.HITS.e577_s932_s952_tid170554_00/HITS.170554._000082.pool.root.1',
-    #'SDMinbiasHitsFile': '/afs/cern.ch/atlas/offline/ProdData/15.6.11.3/mu_E50_eta0-25-7000_ATLAS-GEO-11-00-00.hits.pool.root',
-    #'DDMinbiasHitsFile': '/afs/cern.ch/atlas/offline/ProdData/15.6.11.3/mu_E50_eta0-25-7000_ATLAS-GEO-11-00-00.hits.pool.root',
-    'cavernHitsFile': 'root://eosatlas//eos/atlas/atlascerngroupdisk/proj-sit/digitization/RTT/mc10/mc10_7TeV.005008.CavernInput.merge.HITS.e4_e607_s951_s952_tid170551_00/HITS.170551._000011.pool.root.1,root://eosatlas//eos/atlas/atlascerngroupdisk/proj-sit/digitization/RTT/mc10/mc10_7TeV.005008.CavernInput.merge.HITS.e4_e607_s951_s952_tid170551_00/HITS.170551._000111.pool.root.1,root://eosatlas//eos/atlas/atlascerngroupdisk/proj-sit/digitization/RTT/mc10/mc10_7TeV.005008.CavernInput.merge.HITS.e4_e607_s951_s952_tid170551_00/HITS.170551._000144.pool.root.1,root://eosatlas//eos/atlas/atlascerngroupdisk/proj-sit/digitization/RTT/mc10/mc10_7TeV.005008.CavernInput.merge.HITS.e4_e607_s951_s952_tid170551_00/HITS.170551._000150.pool.root.1,root://eosatlas//eos/atlas/atlascerngroupdisk/proj-sit/digitization/RTT/mc10/mc10_7TeV.005008.CavernInput.merge.HITS.e4_e607_s951_s952_tid170551_00/HITS.170551._000151.pool.root.1',
-    'beamHaloHitsFile': 'root://eosatlas//eos/atlas/atlascerngroupdisk/proj-sit/digitization/RTT/mc10/mc10_7TeV.108852.BeamHaloInputs.merge.HITS.e4_e567_s949_s952_tid170552_00/HITS.170552._000001.pool.root.1',
-    'beamGasHitsFile': 'root://eosatlas//eos/atlas/atlascerngroupdisk/proj-sit/digitization/RTT/mc10/mc10_7TeV.108863.Hijing_beamgas.merge.HITS.e4_s950_s952_tid170553_00/HITS.170552._000087.pool.root.1'
-    }
-
-
-
diff --git a/PhysicsAnalysis/PATJobTransforms/python/GeoConfig.py b/PhysicsAnalysis/PATJobTransforms/python/GeoConfig.py
deleted file mode 100644
index 987cfa167251432ce42a5e619c2518d02bd506e1..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/python/GeoConfig.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-def SetFieldForHLT():
-    """ setting magnetic field automatically for RAWtoRAWHLT transform, needs athenaCommonFlags.BSRDOInput and globalflags.InputFormat to be set in advance"""
-    # by C.Mora August 2009
-    from RecExConfig.GetCool import cool
-    if cool.toroidCurrent()>100. and cool.solenoidCurrent()>100.:
-        setField='All'
-    elif cool.solenoidCurrent()>100.:
-        setField='Solenoid'
-    elif cool.toroidCurrent()>100.:
-        setField='Toroid'
-    else:
-        setField='None'
-    return setField
-
-def GetRunNumber(runArgs):
-    RunNumber='NONE'
-    if hasattr(runArgs,"RunNumber"):
-        RunNumber=runArgs.RunNumber
-    elif hasattr(runArgs,"inputBSFile") or hasattr(runArgs,"inputESDFile") or hasattr(runArgs,"inputAODFile"):
-        #base on a file named like this:
-        #/castor/cern.ch/grid/atlas/DAQ/2008/87863/physics_BPTX/daq.NoTag.0087863.physics.BPTX.LB0000.SFO-1._0001.data
-        #or directly
-        #daq.NoTag.0087863.physics.BPTX.LB0000.SFO-1._0001.data
-
-        #from a list of inputFiles, take the first one
-        if hasattr(runArgs,"inputBSFile"): fileName=runArgs.inputBSFile[0]
-        elif hasattr(runArgs,"inputESDFile"): fileName=runArgs.inputESDFile[0]
-        #if several directories, take the last part
-        subNames=fileName.split('/')
-        lastSub=subNames[len(subNames)-1]
-        #from daq.NoTag.0087863.physics.BPTX.LB0000.SFO-1._0001.data, take the first integer
-        #split by dots
-        parts=lastSub.split('.')
-        Found=False
-        for part in parts:            
-            if not Found:
-                try:
-                    RunNumber=int(part)
-                    Found=True
-                except:
-                    pass
-
-    if RunNumber is 'NONE':            
-        raise RuntimeError("could not find RunNumber from inputFile nor from direct argument")
-    else:
-        print "The RunNumber is",RunNumber
-    return RunNumber
-
diff --git a/PhysicsAnalysis/PATJobTransforms/python/OutputsMgr.py b/PhysicsAnalysis/PATJobTransforms/python/OutputsMgr.py
deleted file mode 100644
index 6b20c15447d45c56d625a59783137b6507a52894..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/python/OutputsMgr.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-class TmpClass():
-    """dynamically created output."""
-    def __init__(self,key='userOutputWasUndefined'):
-        keySuffix=key[len('userOutput'):]
-        self.StreamName = 'StreamNTUP_'+keySuffix
-        self.TreeNames = ['']
-        self.trfKeyName=key
-        return
-
-
-class OutputsMgr():
-    def __init__(self):
-        self.userOutputList=[]
-        return
-    
-    def addTrfOutput(self,key):
-        from PATJobTransforms.Configuration import AddUserDPD,ConfigDic
-        if ConfigDic.has_key(key):
-            raise RuntimeError("key %s already exists, cannot be redefined"%key)
-        tmpClass=TmpClass(key)
-        AddUserDPD(tmpClass, ['e2d','a2d'])
-        self.addOutput(key,'defaultFileName.root')
-        return
-
-    def addOutput(self,key,defaultValue):
-        setattr(self,key,defaultValue)
-        self.userOutputList.append(key)
-        return
-
-    def getFileName(self,dataTypeKey,defaultName):
-        if self.userOutputList.__contains__(dataTypeKey):
-            return getattr(self,dataTypeKey)
-
-        self.addOutput(dataTypeKey,defaultName)
-        return defaultName
-    
-outputsMgr = OutputsMgr()
-magicKey='userOutput'
diff --git a/PhysicsAnalysis/PATJobTransforms/python/ProdSysDicTools.py b/PhysicsAnalysis/PATJobTransforms/python/ProdSysDicTools.py
deleted file mode 100644
index 60612c294e780e3693e7d90f683ffc08f576542c..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/python/ProdSysDicTools.py
+++ /dev/null
@@ -1,247 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-# Creation: David Cote (CERN) from Joao Firmino da Costa (DESY), April 2010
-# Usage:
-#   -downloads job transforms configurations from ProdSys database
-
-
-#---------------------------------------
-def CorrectDict(aDic):
-    
-    for key in aDic.keys():
-        if str(aDic[key])=='NONE': del aDic[key]
-
-    if aDic.has_key('preInclude'):
-        aDic['preInclude']=aDic['preInclude'].replace(' ',',')
-
-    if aDic.has_key('postInclude'):
-        aDic['postInclude']=aDic['postInclude'].replace(' ',',')  
-
-    return aDic
-
-#----------------------------------------
-def AddSimjobOptions(aDic):
-    if not aDic.has_key('skipEvents'):
-        aDic['skipEvents']=0
-
-    if not aDic.has_key('randomSeed'):
-        aDic['randomSeed']= 10
-
-    if aDic.has_key('ConditionsTag'):
-        aDic['conditionsTag']=aDic['ConditionsTag']
-        del aDic['ConditionsTag']
-
-    if aDic.has_key('JobConfig'):
-        aDic['preInclude']=','.join(['SimulationJobOptions/%s'%i if i.find('SimulationJobOptions')<0 and i.find('/')<0 else i for i in aDic['JobConfig'].split(",")])
-        del aDic['JobConfig']
-
-    if aDic.has_key('PhysicsList'):
-        aDic['physicsList']=aDic['PhysicsList']
-        del aDic['PhysicsList']
-    
-    if aDic.has_key('Geometry'):
-        aDic['geometryVersion']=aDic['Geometry']
-        del aDic['Geometry']
-    return
-
-#----------------------------------------
-def AddDigijobOptions(aDic):
-    if not aDic.has_key('skipEvents'):
-        aDic['skipEvents']=0
-
-    if not aDic.has_key('digiSeedOffset1'):
-        aDic['digiSeedOffset1']= 1
-
-    if not aDic.has_key('digiSeedOffset2'):
-        aDic['digiSeedOffset2']= 2
-
-    if aDic.has_key('DigiRndmSvc'):
-        aDic['digiRndmSvc']=aDic['DigiRndmSvc']
-        del aDic['DigiRndmSvc']
-
-    if aDic.has_key('ConditionsTag'):
-        aDic['conditionsTag']=aDic['ConditionsTag']
-        del aDic['ConditionsTag']
-
-    if aDic.has_key('JobConfig'):
-        aDic['preInclude']=','.join(['SimulationJobOptions/%s'%i if i.find('SimulationJobOptions')<0 and i.find('/')<0 else i for i in aDic['JobConfig'].split(",")])
-        del aDic['JobConfig']
-
-    if aDic.has_key('SamplingFractionDbTag'):
-        aDic['samplingFractionDbTag']=aDic['SamplingFractionDbTag']
-        del aDic['SamplingFractionDbTag']
-        
-    ## temp hack to remove pile-up hits
-    pileupkeys = {'beamHaloHits': 'beamHaloHitsFile','beamGasHits': 'beamGasHitsFile',
-                  'cavernHits': 'cavernHitsFile','minbiasHits': 'NDMinbiasHitsFile',
-                  'SDMinbiasHits': 'SDMinbiasHitsFile', 'DDMinbiasHits': 'DDMinbiasHitsFile'}
-    from PATJobTransforms.DefaultInputs import DefaultInputs
-    for key in pileupkeys.keys():
-        if aDic.has_key(key): 
-            if DefaultInputs.has_key(pileupkeys[key]):
-                aDic[pileupkeys[key]]=DefaultInputs[pileupkeys[key]]
-            else:
-                print "Removing argument: %s=%s. GetCommand.py does not handle background input files yet"%(key,aDic[key])
-            del aDic[key]
-    return
-
-#-------------------------------
-
-#def ChangeGenjobOptions(aDic):
-
-#    if aDic.has_key('Input_file_base'):
-#        tmp = aDic['Input_file_base']
-#        del  aDic['Input_file_base']
-#        aDic['inputGeneratorFile'] = tmp
-
-#-------------------------------
-def GetPANDAClient(amitag):
-    import cx_Oracle
-    #cur = cx_Oracle.connect('atlas_grisli_r/panda_c10@atlas_panda').cursor()
-    cur = cx_Oracle.connect('atlas_grisli_r/panda_c10@adcr_panda').cursor()
-    return cur
-
-#-------------------------------
-# Macro provided by Pavel
-def ReadablePANDA(s):
-    # This a a mess - panda is encoding the tag in some sort of HTML escaped form, so we need to un-escape it here
-    return s.replace('%0B',' ').replace('%9B','; ').replace('%8B','"').replace('%3B',';').replace('%2C',',').replace('%2B','+')
-
-
-#-----------------------------------
-def GetInfoFromPANDA(amitag):
-    import string
-    from PATJobTransforms.DefaultInputs import DefaultInputs
-    
-    tagi=amitag
-    cur=GetPANDAClient(amitag)
-    c1  = cur.execute(" select trf,trfv,lparams,vparams,formats from t_trf_config where tag='%s' and cid=%d " %(tagi[:1],int(tagi[1:])) )
-    trf,trfv,lp,vp,fmt=cur.fetchone()
-    #print " lp ",lp
-    #print "vp ",vp
-    #print "fmt ", fmt
-    #print "trf ",trf
-    nbr= trf.count(',')+1 # determine how many job transforms exist
-    llp=lp.split(';')
-    vvp=vp.split(';')
-    trf=trf.split(',')
-    
-    listDicsPANDA=[]
-#    DicSW={}
-#    DicSW['Release']=trfv
-    
-    hasDIGI = 0
-    for i in range(0,nbr): # loop that fills for each nbr the dictionary with the physconf information 
-        PhysDicPANDA={}
-        InputDicPANDA={}        
-        OutputDicPANDA={}        
-        results={}
-        
-        results['amiTransform'] = trf[i]
-        llp[i]=llp[i].replace(' ','_')
-        llp[i]=llp[i].replace(',',' ')
-        tempkey = string.split(llp[i]) # have created an array of keys
-        items=len(tempkey)
-        
-        vvp[i]=vvp[i].replace(' ','lilili') 
-        vvp[i]=vvp[i].replace(',',' ')
-        vvp[i]=vvp[i].replace('lilili',',') 
-        
-        tempval = string.split(vvp[i]) # have created an array of values
-        
-        for j in range(0,items):
-            PhysDicPANDA[tempkey[j]]= ReadablePANDA(tempval[j])
-            PhysDicPANDA[tempkey[j]]=PhysDicPANDA[tempkey[j]].replace(';',',')
-            PhysDicPANDA[tempkey[j]]=PhysDicPANDA[tempkey[j]].replace(' ',',')
-            
-        CorrectDict(PhysDicPANDA)
-        input={}        
-        
-        if trf[i]=='csc_atlasG4_trf.py' or trf[i]=='AtlasG4_trf.py': # G4 sim jobTransform
-            # assuming that there is no input file
-            AddSimjobOptions(PhysDicPANDA)
-            input['inputEvgenFile']=DefaultInputs['inputEvgenFile']
-            OutputDicPANDA['outputHitsFile']={'dstype': 'HITS'}
-            if PhysDicPANDA.has_key('DBRelease'):
-                del PhysDicPANDA['DBRelease']
-                #PhysDicPANDA['DBRelease']='/afs/cern.ch/atlas/www/GROUPS/DATABASE/pacman4/DBRelease/DBRelease-'+PhysDicPANDA['DBRelease']+'.tar.gz'
-
-        elif trf[i]=='csc_digi_trf.py' or trf[i]=='Digi_trf.py': # DIGI jobTransform
-            # assuming that there is no input file
-            AddDigijobOptions(PhysDicPANDA)
-            input['inputHitsFile']=DefaultInputs['inputHitsFile']
-            OutputDicPANDA['outputRDOFile']={'dstype': 'RDO'}
-            if PhysDicPANDA.has_key('DBRelease'):
-                del PhysDicPANDA['DBRelease']
-                #PhysDicPANDA['DBRelease']='/afs/cern.ch/atlas/www/GROUPS/DATABASE/pacman4/DBRelease/DBRelease-'+PhysDicPANDA['DBRelease']+'.tar.gz'
-            hasDIGI = 1
-
-        #elif trf[i]=='Evgen_trf.py' : # GEN jobTransform
-        #    # assuming that there is no input file
-        #    ChangeGenjobOptions(PhysDicPANDA)
-
-        elif trf[i]=='Reco_trf.py':
-            if hasDIGI==1:
-                input['inputRDOFile']='myRDOFile.pool.root'
-            else:
-                if PhysDicPANDA.has_key('conditionsTag'):
-                    if PhysDicPANDA['conditionsTag'].startswith('OFLCOND'):
-                        input['inputRDOFile']=DefaultInputs['inputRDOFile']
-                    elif PhysDicPANDA['conditionsTag'].startswith('COM'):
-                        input['inputBSFile']=DefaultInputs['inputBSFile']
-                else:
-                    # there are no conditionsTAG, assuming it's MC
-                    input['inputRDOFile']=DefaultInputs['inputRDOFile']
-
-            fmt=fmt.replace('.',' ') 
-            tmpfmt = string.split(fmt)
-            items = len(tmpfmt)
-            for j in range(0,items):
-                if not tmpfmt[j].startswith('RDO'): 
-                    key='output'+tmpfmt[j]+'File'
-                    OutputDicPANDA[key]={'dstype': tmpfmt[j]}
-
-        elif trf[i]=='Merging_trf.py':
-            print fmt
-            fmt=fmt.replace('.',' ').replace('HITS','Hits')
-            tmpfmt = string.split(fmt)
-            items = len(tmpfmt)
-            print " fmt in merging loop : ",fmt," and items : ",items
-            for j in range(0,items):
-                inkey='input'+tmpfmt[j]+'File'
-                try:
-                    input[inkey]=DefaultInputs[inkey]
-                    outkey='output'+tmpfmt[i]+'File'
-                except KeyError:
-                    print 'Warning: no default value for {0}. Probably TAG is incorrectly stored'.format(inkey)
-                    sys.exit(1)
-                OutputDicPANDA[outkey]={'dstype': tmpfmt[j]}
-                
-        elif trf[i]=='DigiMReco_trf.py':
-            input['inputHitsFile']=DefaultInputs['inputHitsFile']
-            fmt=fmt.replace('.',' ') 
-            tmpfmt = string.split(fmt)
-            items = len(tmpfmt)
-            for j in range(0,items):
-                if not tmpfmt[j].startswith('RDO'): 
-                    key='output'+tmpfmt[j]+'File'
-                    OutputDicPANDA[key]={'dstype': tmpfmt[j]}
-            if not ('outputRDOFile' in OutputDicPANDA or 'tmpRDO' in OutputDicPANDA):
-                OutputDicPANDA['tmpRDO'] = {'dstype': 'RDO.tmp'}
-
-        else:
-            raise RuntimeError("No support yet for job transform '%s' returned by ProdSys tag %s"%(trf[i],amitag))
-
-
-        results['amiInputDic']=input
-        CorrectDict(OutputDicPANDA)   
-        results['amiOuputDic']=OutputDicPANDA
-        results['amiPhysDic']=PhysDicPANDA            
-#        results['amiRelease']=DicSW
-        listDicsPANDA.append(results)
-
-    return listDicsPANDA
-
-
diff --git a/PhysicsAnalysis/PATJobTransforms/python/TrfFlags.py b/PhysicsAnalysis/PATJobTransforms/python/TrfFlags.py
deleted file mode 100644
index 62b89bc779ee08784b18246c3cf5a45375212412..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/python/TrfFlags.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-##Creation: David Cote (CERN), September 2010
-
-from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer
-from AthenaCommon.JobProperties import jobproperties
-
-class TrfFlags(JobPropertyContainer):
-    """Flag/job property container for job transforms."""
-jobproperties.add_Container(TrfFlags)
-
-class KeepFullCommandUntouched(JobProperty):
-    """ Blocks the execution of PopSynonyms. """
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-    pass
-jobproperties.TrfFlags.add_JobProperty(KeepFullCommandUntouched)
-
-class ApplyIfMatchPattern(JobProperty):
-    """ Activates output vs stream pattern matching """
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = False
-    pass
-jobproperties.TrfFlags.add_JobProperty(ApplyIfMatchPattern)
-
-
-trfFlags = jobproperties.TrfFlags
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/AODtoDPD_trf.py b/PhysicsAnalysis/PATJobTransforms/scripts/AODtoDPD_trf.py
deleted file mode 100755
index df35ca0e632c0803e2f202137af8865171ac4db8..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/AODtoDPD_trf.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-__doc__ = """Make DPD's from AOD's"""
-
-from PATJobTransforms.BaseOfBasicTrf import BaseOfBasicTrf
-from PyJobTransformsCore.trf import Author
-
-class AODtoDPDJobTransform( BaseOfBasicTrf ):
-    def __init__(self,inDic,lastInChain=True):
-        BaseOfBasicTrf.__init__(self,inDic,
-                                authors=[Author('David Cote','david.cote@cern.ch')],
-                                skeleton='PATJobTransforms/skeleton.AODtoDPD_trf.py',
-                                help = __doc__ , lastInChain=lastInChain)
-
-    def matchEvents(self):
-        self.logger().info("MatchEvents is not executed for DPD outputs.")
-        return
-
-
-#----------------------------------------------------------------------
-if __name__ == '__main__':    
-    #Special preparation for command-line
-    import sys
-    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
-    inDic=BuildDicFromCommandLine(sys.argv)
-    #Construct and execute the transform
-    trf = AODtoDPDJobTransform(inDic)          
-    sys.exit(trf.exeArgDict(inDic).exitCode())
-
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/AODtoTAG_trf.py b/PhysicsAnalysis/PATJobTransforms/scripts/AODtoTAG_trf.py
deleted file mode 100755
index c2b59ebf7211c19c35b8f86c16cc87a33e44f404..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/AODtoTAG_trf.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-__doc__ = """Make TAG's from AOD's"""
-
-from PATJobTransforms.BaseOfBasicTrf import BaseOfBasicTrf
-from PyJobTransformsCore.trf import Author
-
-class AODtoTAGJobTransform( BaseOfBasicTrf ):
-    def __init__(self,inDic,lastInChain=True):
-        BaseOfBasicTrf.__init__(self,inDic,
-                                authors = [ Author('Tulay Cuhadar Donszelmann', 'tcuhadar@cern.ch'),
-                                            Author('David Cote', 'David.Cote@cern.ch')],
-                                skeleton='PATJobTransforms/skeleton.AODtoTAG_trf.py',
-                                help = __doc__, lastInChain=lastInChain)
-
-    def matchEvents(self):
-        return self.matchEventsExpectEqual("inputAODFile","outputTAGFile")
-
-#----------------------------------------------------------------------
-if __name__ == '__main__':    
-    #Special preparation for command-line
-    import sys
-    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
-    inDic=BuildDicFromCommandLine(sys.argv)
-    #Construct and execute the transform
-    trf = AODtoTAGJobTransform(inDic)          
-    sys.exit(trf.exeArgDict(inDic).exitCode())
-
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/CmdToPickledDic.py b/PhysicsAnalysis/PATJobTransforms/scripts/CmdToPickledDic.py
deleted file mode 100755
index cdff367d50739bf6edbc2f951a6e9d9ddf486da3..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/CmdToPickledDic.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-print " * * * CmdToPickledDic * * *"
-if __name__ == '__main__':
-    import sys
-    if len(sys.argv) < 2 or sys.argv[1].upper()=="-H":
-        print "CmdToPickledDic: Tool to create inputDictionary.pickle to be used with Reco_trf.py"
-        print "Parameters are the same as for Reco_trf.py"
-        sys.exit(-1)
-    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
-
-    kfcu_set=False
-    for arg in sys.argv:
-        if arg.startswith("keepFullCommandUntouched"):
-            kfcu_set=True
-            break
-    if kfcu_set:
-        mySysArgv=sys.argv
-    else:
-        mySysArgv=sys.argv + ["keepFullCommandUntouched=True",]
-        
-    #Get the dico and pickle it
-    inDic=BuildDicFromCommandLine(mySysArgv)
-    #print final command
-    print "\n###########"
-    print "To run, do:"
-    #print "%s --argdict=%s 2>&1 | tee Log.txt"%(mySysArgv[0],'inputDictionary.pickle')
-    print "Reco_trf.py --argdict=inputDictionary.pickle 2>&1 | tee Log.txt"
-    print "\n###########"
-    print "To upload to AMI (manager only), use:  UploadAMITag.py"
-    print "\n"
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/CmdToPositional.py b/PhysicsAnalysis/PATJobTransforms/scripts/CmdToPositional.py
deleted file mode 100755
index 9e7b8fae80b16ec51fcca2c8749eaab50de8c9b6..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/CmdToPositional.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-print "\n * * * CmdToPositional * * *\n"
-def PositionInList(key,poslist):
-    ind=-1
-    i=0
-    for item in poslist:
-        if key==item:
-            if ind<0:
-                ind=i
-            else:
-                raise RuntimeError("Key '%s' requested twice. Not allowed!"%key)
-        i+=1
-    if ind<0:
-        raise RuntimeError("Key '%s' not found!"%key)
-    else:
-        print "key '%s' has position %i"%(key,ind)
-    return ind
-
-if __name__ == '__main__':
-    import sys
-    #Remove CmdToPickledDic itself...
-    myTrf=sys.argv[1]
-    mySysArgv=sys.argv[1:]
-
-    #Get positional values for myTrf
-    if myTrf=='Reco_trf.py':
-        from PATJobTransforms.Reco_trf import ListOfDefaultPositionalKeys
-    elif myTrf=='MergeBS_trf.py':
-        from PATJobTransforms.MergeBS_trf import ListOfDefaultPositionalKeys
-    elif myTrf=='MergePool_trf.py':
-        from PATJobTransforms.MergePool_trf import ListOfDefaultPositionalKeys
-    else:
-        raise RuntimeError("CmdToPositional not implemented for transform: %s"%myTrf)
-    print "Known positional keys for %s: "%myTrf
-    print ListOfDefaultPositionalKeys,"\n"
-
-    #Initialize positional value list:
-    posList=[]
-    for i in xrange(len(ListOfDefaultPositionalKeys)):
-        posList+=['NONE']
-
-    #Transform mySysArbs into a dictionnary
-    from PATJobTransforms.ArgDicTools import SysArgvToDico,KeyExceptionCatcher
-    dic=SysArgvToDico(mySysArgv)
-    if not isinstance(dic,dict):
-        raise RuntimeError("Unable to interpret given command-line...")
-    
-    #Now fill relevant values
-    print ""
-    for key in dic.keys():
-        p=PositionInList(key,ListOfDefaultPositionalKeys)
-        posList[p]=dic[key]
-        if KeyExceptionCatcher(key)=="OK":
-            posList[p]=key
-    
-    #build and print the final command
-    cmd=myTrf
-    for val in posList:
-        cmd+=" "
-        cmd+=str(val)
-    cmd+=" 2>&1 | tee Log.txt"
-    print "\nPositional command:"
-    print cmd
-
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/DQHistogramMerge_trf.py b/PhysicsAnalysis/PATJobTransforms/scripts/DQHistogramMerge_trf.py
deleted file mode 100755
index ec16dd5e067c38527e66e7c11b033a69cc638625..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/DQHistogramMerge_trf.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-__doc__ = """Merge RAW->ESD and ESD->AOD monitoring histograms."""
-
-from PATJobTransforms.BaseOfBasicTrf import BaseOfBasicTrf
-from PyJobTransformsCore.trf import Author
-
-class DQHistogramMergeJobTransform( BaseOfBasicTrf ):
-    def __init__(self,inDic,lastInChain=True):
-        BaseOfBasicTrf.__init__(self,inDic,
-                                authors = [ Author('David Cote', 'David.Cote@cern.ch')],
-                                skeleton='PATJobTransforms/skeleton.DQHistogramMerge_trf.py',
-                                help = __doc__,lastInChain=lastInChain)
-
-    def matchEvents(self):
-        self.logger().info("MatchEvents is not executed for DQHistogramMerge.")
-        return
-
-    def forceSingleProcess(self):
-        import os
-        os.putenv('ATHENA_PROC_NUMBER','0')
-        for opt in self._athenaOptions:
-            if opt.split('=')[0]=='--nprocs' :
-                self._athenaOptions.remove(opt)
-                break
-
-#----------------------------------------------------------------------
-if __name__ == '__main__':    
-    #Special preparation for command-line
-    import sys
-    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
-    inDic=BuildDicFromCommandLine(sys.argv)
-    #Construct and execute the transform
-    trf = DQHistogramMergeJobTransform(inDic)          
-    sys.exit(trf.exeArgDict(inDic).exitCode())
-
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/ESDtoDPD_trf.py b/PhysicsAnalysis/PATJobTransforms/scripts/ESDtoDPD_trf.py
deleted file mode 100755
index a2959c6914830cbc3dffb8239ec60579ed2f6706..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/ESDtoDPD_trf.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-__doc__ = """Make DPD from ESD."""
-
-from PATJobTransforms.BaseOfBasicTrf import BaseOfBasicTrf
-from PyJobTransformsCore.trf import Author
-
-class ESDtoDPDJobTransform( BaseOfBasicTrf ):
-    def __init__(self,inDic,lastInChain=True):
-        BaseOfBasicTrf.__init__(self,inDic,
-                                authors=[Author('David Cote','david.cote@cern.ch')],
-                                skeleton='PATJobTransforms/skeleton.ESDtoDPD_trf.py' ,
-                                help = __doc__,lastInChain=lastInChain )
-
-    def matchEvents(self):
-        self.logger().info("MatchEvents is not executed for DPD outputs.")
-        return
-
-
-#----------------------------------------------------------------------
-if __name__ == '__main__':
-    #Special preparation for command-line
-    import sys
-    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
-    inDic=BuildDicFromCommandLine(sys.argv)
-    #Construct and execute the transform
-    trf = ESDtoDPDJobTransform(inDic)
-    sys.exit(trf.exeArgDict(inDic).exitCode())
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/GetCommand.py b/PhysicsAnalysis/PATJobTransforms/scripts/GetCommand.py
deleted file mode 100755
index 1780db137e611b0a8d9b3a859dcf73cd73db4e89..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/GetCommand.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-import sys
-
-print >>sys.stderr, '''
-This script is now deprecated. Please update to the new transforms
-version: 'GetTfCommand.py --AMI=qXXX'
-'''
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/MergeBS_trf.py b/PhysicsAnalysis/PATJobTransforms/scripts/MergeBS_trf.py
deleted file mode 100755
index afe35d66fc804242bea436f6d766688b67705efe..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/MergeBS_trf.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-__doc__ = """BS->BS"""
-
-from PyJobTransformsCore.trf import JobTransform,Author,JobReport
-from PATJobTransforms.ArgDicTools import DefaultConfigFromSysArgv
-
-ListOfDefaultPositionalKeys=['maxEvents','inputBSFile','outputBSFile','preInclude','postInclude','preExec','postExec','--ignoreunknown','--athenaopts']
-
-class MergeBSJobTransform( JobTransform ):
-    def __init__(self,inDic):
-        JobTransform.__init__(self,                              
-                              authors = [ Author('David Cote', 'David.Cote@cern.ch')],
-                              skeleton='PATJobTransforms/skeleton.MergeBS.py',
-                              help = __doc__ )
-
-        if not isinstance(inDic,dict):
-            raise TypeError("inDic has %s but should be a dictionary." %type(inDic))
-
-        #Default configuration?
-        if inDic.has_key('defaultFromPositionalValues'):
-            inDic=DefaultConfigFromSysArgv(ListOfDefaultPositionalKeys,inDic)
-
-        from PATJobTransforms.ConfigDicUtils import AutoConfigureFromDic
-        self.inDic=inDic
-        AutoConfigureFromDic(self,inDic)
-        return
-
-#----------------------------------------------------------------------
-if __name__ == '__main__':    
-    #Special preparation for command-line
-    import sys
-    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
-    inDic=BuildDicFromCommandLine(sys.argv)
-    #Construct and execute the transform
-    trf = MergeBSJobTransform(inDic)
-    exitCode=trf.exeArgDict(inDic).exitCode()
-    sys.exit(exitCode)
-
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/MergeHIT_trf.py b/PhysicsAnalysis/PATJobTransforms/scripts/MergeHIT_trf.py
deleted file mode 100755
index 748ac47d039dc37b405688fce826fdccc41a28cd..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/MergeHIT_trf.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-__doc__=""" Merging of HIT files """
-
-from PATJobTransforms.BaseOfBasicTrf import BaseOfBasicTrf
-from PyJobTransformsCore.trf import Author
-
-class MergeHITSJobTransform( BaseOfBasicTrf ):
-    def __init__(self,inDic,lastInChain=True):
-        BaseOfBasicTrf.__init__(self,inDic,
-                              authors = [ Author('Jack Cranshaw','Jack.Cranshaw@cern.ch'),
-                                          Author('Borut Kersevan', 'Borut.Kersevan@cern.ch') ] ,
-                              skeleton='PATJobTransforms/skeleton.MergeHIT.py' ,
-                              help = __doc__, lastInChain=lastInChain )
-
-##         #add arguments
-##         self.add( InputHitsFileArg() )
-##         self.add( InputLogsFileArg() )
-##         self.add( OutputHitsFileArg() )
-##         self.add( MaxEventsArg() )
-##         self.add( SkipEventsArg() )
-##         self.add( GeometryVersionArg() )
-##         self.add( JobConfigArg(package='SimuJobTransforms'), default='NONE' )
-##         self.add( DBReleaseArg(), default='NONE' )
-##         self.add( IgnoreConfigErrorArg(), default=False )
-
-##         # avoid sqlite nfs lock problems
-##         self.add( SQLiteSupport() )
-
-
-# execute transform it if not imported
-if __name__ == '__main__':
-    #Special preparation for command-line
-    import sys
-    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
-    inDic=BuildDicFromCommandLine(sys.argv)
-    #Construct and execute the transform
-    trf = MergeHITSJobTransform(inDic)
-    sys.exit(trf.exeArgDict(inDic).exitCode())
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/MergeNTUP_trf.py b/PhysicsAnalysis/PATJobTransforms/scripts/MergeNTUP_trf.py
deleted file mode 100755
index e4557cdc0822a88b55e0522beda3a2b0174c192a..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/MergeNTUP_trf.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-__doc__ = """Merge NTUP files."""
-
-from PATJobTransforms.BaseOfBasicTrf import BaseOfBasicTrf
-from PyJobTransformsCore.trf import Author
-
-ListOfDefaultPositionalKeys=['--AMIConfig', '--AMITag', '--argJSON', '--checkEventCount', '--dumpJSON', '--dumpPickle', '--env', '--execOnly', '--ignoreErrors', '--ignoreFiles', '--ignorePatterns', '--imf', '--inputNTUP_2L2QHSG2File', '--inputNTUP_2LHSG2File', '--inputNTUP_4LHSG2File', '--inputNTUP_BKGDFile', '--inputNTUP_BTAGD3PDFile', '--inputNTUP_BTAGEFFFile', '--inputNTUP_BTAGFULLFile', '--inputNTUP_BTAGGHOSTFile', '--inputNTUP_BTAGSLIMFile', '--inputNTUP_CLUSTERCORRECTIONFile', '--inputNTUP_COMMONFile', '--inputNTUP_EGAMMAFile', '--inputNTUP_EMBHHDNFile', '--inputNTUP_EMBHHIMFile', '--inputNTUP_EMBHHUPFile', '--inputNTUP_EMBLHDNFile', '--inputNTUP_EMBLHIMFile', '--inputNTUP_EMBLHUPFile', '--inputNTUP_EMBLLDNFile', '--inputNTUP_EMBLLIMFile', '--inputNTUP_EMBLLUPFile', '--inputNTUP_HSG2File', '--inputNTUP_HSG5GAMHFile', '--inputNTUP_HSG5GAMZFile', '--inputNTUP_HSG5WHFile', '--inputNTUP_HSG5WHQFile', '--inputNTUP_HSG5WHUFile', '--inputNTUP_HSG5ZBBFile', '--inputNTUP_HSG5ZHLLFile', '--inputNTUP_HSG5ZHLLUFile', '--inputNTUP_HSG5ZHMETFile', '--inputNTUP_HSG5ZHMETUFile', '--inputNTUP_IDVTXFile', '--inputNTUP_IDVTXLUMIFile', '--inputNTUP_JETMETEMCLFile', '--inputNTUP_JETMETFULLFile', '--inputNTUP_JETMETFile', '--inputNTUP_JETMETWZFile', '--inputNTUP_L1CALOFile', '--inputNTUP_L1CALOPROBFile', '--inputNTUP_L1TGCFile', '--inputNTUP_MINBIASFile', '--inputNTUP_MUONFile', '--inputNTUP_PHOTONFile', '--inputNTUP_PHYSICSFile', '--inputNTUP_PHYSVALFile', '--inputNTUP_SMBKGEFile', '--inputNTUP_SMBKGMUFile', '--inputNTUP_SMDILEPFile', '--inputNTUP_SMDYEEFile', '--inputNTUP_SMDYMUMUFile', '--inputNTUP_SMLIGHTFile', '--inputNTUP_SMQCDFile', '--inputNTUP_SMTRILEPFile', '--inputNTUP_SMWENUFile', '--inputNTUP_SMWENUJJFile', '--inputNTUP_SMWMUNUFile', '--inputNTUP_SMWMUNUJJFile', '--inputNTUP_SMWZFile', '--inputNTUP_SMWZSOFTFile', '--inputNTUP_SMZEEFile', '--inputNTUP_SMZMUMUFile', '--inputNTUP_SUSYLLPFile', '--inputNTUP_TAUFULLFile', '--inputNTUP_TAUFile', '--inputNTUP_TAUMEDIUMFile', '--inputNTUP_TAUSMALLFile', '--inputNTUP_TOPBOOSTFile', '--inputNTUP_TPHSG2File', '--inputNTUP_TRIGBJETFile', '--inputNTUP_TRIGFile', '--inputNTUP_TRIGMUFile', '--inputNTUP_WPRIMEENFile', '--inputNTUP_WPRIMEMNFile', '--inputNTUP_ZPRIMEEEFile', '--inputNTUP_ZPRIMEMMFile', '--orphanKiller', '--outputNTUP_2L2QHSG2_MRGFile', '--outputNTUP_2LHSG2_MRGFile', '--outputNTUP_4LHSG2_MRGFile', '--outputNTUP_BKGD_MRGFile', '--outputNTUP_BTAGD3PD_MRGFile', '--outputNTUP_BTAGEFF_MRGFile', '--outputNTUP_BTAGFULL_MRGFile', '--outputNTUP_BTAGGHOST_MRGFile', '--outputNTUP_BTAGSLIM_MRGFile', '--outputNTUP_CLUSTERCORRECTION_MRGFile', '--outputNTUP_COMMON_MRGFile', '--outputNTUP_EGAMMA_MRGFile', '--outputNTUP_EMBHHDN_MRGFile', '--outputNTUP_EMBHHIM_MRGFile', '--outputNTUP_EMBHHUP_MRGFile', '--outputNTUP_EMBLHDN_MRGFile', '--outputNTUP_EMBLHIM_MRGFile', '--outputNTUP_EMBLHUP_MRGFile', '--outputNTUP_EMBLLDN_MRGFile', '--outputNTUP_EMBLLIM_MRGFile', '--outputNTUP_EMBLLUP_MRGFile', '--outputNTUP_HSG2_MRGFile', '--outputNTUP_HSG5GAMH_MRGFile', '--outputNTUP_HSG5GAMZ_MRGFile', '--outputNTUP_HSG5WHQ_MRGFile', '--outputNTUP_HSG5WHU_MRGFile', '--outputNTUP_HSG5WH_MRGFile', '--outputNTUP_HSG5ZBB_MRGFile', '--outputNTUP_HSG5ZHLLU_MRGFile', '--outputNTUP_HSG5ZHLL_MRGFile', '--outputNTUP_HSG5ZHMETU_MRGFile', '--outputNTUP_HSG5ZHMET_MRGFile', '--outputNTUP_IDVTXLUMI_MRGFile', '--outputNTUP_IDVTX_MRGFile', '--outputNTUP_JETMETEMCL_MRGFile', '--outputNTUP_JETMETFULL_MRGFile', '--outputNTUP_JETMETWZ_MRGFile', '--outputNTUP_JETMET_MRGFile', '--outputNTUP_L1CALOPROB_MRGFile', '--outputNTUP_L1CALO_MRGFile', '--outputNTUP_L1TGC_MRGFile', '--outputNTUP_MINBIAS_MRGFile', '--outputNTUP_MUON_MRGFile', '--outputNTUP_PHOTON_MRGFile', '--outputNTUP_PHYSICS_MRGFile', '--outputNTUP_PHYSVAL_MRGFile', '--outputNTUP_SMBKGE_MRGFile', '--outputNTUP_SMBKGMU_MRGFile', '--outputNTUP_SMDILEP_MRGFile', '--outputNTUP_SMDYEE_MRGFile', '--outputNTUP_SMDYMUMU_MRGFile', '--outputNTUP_SMLIGHT_MRGFile', '--outputNTUP_SMQCD_MRGFile', '--outputNTUP_SMTRILEP_MRGFile', '--outputNTUP_SMWENUJJ_MRGFile', '--outputNTUP_SMWENU_MRGFile', '--outputNTUP_SMWMUNUJJ_MRGFile', '--outputNTUP_SMWMUNU_MRGFile', '--outputNTUP_SMWZSOFT_MRGFile', '--outputNTUP_SMWZ_MRGFile', '--outputNTUP_SMZEE_MRGFile', '--outputNTUP_SMZMUMU_MRGFile', '--outputNTUP_SUSYLLP_MRGFile', '--outputNTUP_TAUFULL_MRGFile', '--outputNTUP_TAUMEDIUM_MRGFile', '--outputNTUP_TAUSMALL_MRGFile', '--outputNTUP_TAU_MRGFile', '--outputNTUP_TOPBOOST_MRGFile', '--outputNTUP_TPHSG2_MRGFile', '--outputNTUP_TRIGBJET_MRGFile', '--outputNTUP_TRIGMU_MRGFile', '--outputNTUP_TRIG_MRGFile', '--outputNTUP_WPRIMEEN_MRGFile', '--outputNTUP_WPRIMEMN_MRGFile', '--outputNTUP_ZPRIMEEE_MRGFile', '--outputNTUP_ZPRIMEMM_MRGFile', '--reportName', '--reportType', '--showGraph', '--showPath', '--showSteps', '--skipFileValidation', '--skipInputFileValidation', '--skipOutputFileValidation', '--steering', '--tcmalloc']
-
-class MergeNTUPJobTransform( BaseOfBasicTrf ):
-    def __init__(self,inDic):
-        BaseOfBasicTrf.__init__(self,inDic,
-                                authors = [ Author('Bjorn Sarrazin', 'Bjorn.Sarrazin@cern.ch')],
-                                skeleton='PATJobTransforms/skeleton.MergeNTUP_trf.py',
-                                help = __doc__ )
-
-
-    def matchEvents(self):
-        for key in self._namedArgs.iterkeys():
-            if key.startswith('inputntup') and key.endswith('file'):
-                inputFileArgName=key
-            if key.startswith('outputntup') and key.endswith('file'):
-                outputFileArgName=key
-
-        inFile=self.getArgument(inputFileArgName)
-        outFile=self.getArgument(outputFileArgName)
-
-        in_tree_names=inFile._fileType.tree_names 
-        out_tree_names=outFile._fileType.tree_names
-
-        if not in_tree_names:
-            self.logger().info("MatchEvents is not executed for MergeNTUP. No tree_name is given for input file.")
-            return
-
-        if not out_tree_names:
-            self.logger().info("MatchEvents is not executed for MergeNTUP. No tree_name is given for output file.")
-            return
-
-        if in_tree_names!=out_tree_names:
-            self.logger().info("MatchEvents is not executed for MergeNTUP. Different tree_name given for input and output file.")
-            return
-                
-        self.matchEventsExpectEqual(inputFileArgName, outputFileArgName)
-        
-        return
-
-#----------------------------------------------------------------------
-if __name__ == '__main__':    
-    #Special preparation for command-line
-    import sys
-    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
-    inDic=BuildDicFromCommandLine(sys.argv)
-    #Construct and execute the transform
-    trf = MergeNTUPJobTransform(inDic)          
-    sys.exit(trf.exeArgDict(inDic).exitCode())
-
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/MergePool_trf.py b/PhysicsAnalysis/PATJobTransforms/scripts/MergePool_trf.py
deleted file mode 100755
index 3b0978714d1bc16f37940035f503fb0f48ee3271..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/MergePool_trf.py
+++ /dev/null
@@ -1,134 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-__doc__ = """Merge several input pool files into a single output file."""
-
-from PyJobTransformsCore.trf import *
-from PyJobTransformsCore.full_trfarg import *
-from PyJobTransformsCore.trfutil import *
-from PATJobTransforms.BaseOfBasicTrf import BaseOfBasicTrf
-
-from subprocess import Popen, PIPE, STDOUT, check_call, CalledProcessError
-
-class MergePoolJobTransform( BaseOfBasicTrf ):
-    def __init__(self,inDic,lastInChain=True):
-        JobTransform.__init__(self,
-                              authors = [ Author('David Cote', 'david.cote@cern.ch') ] ,
-                              skeleton='PATJobTransforms/skeleton.MergePool.py' ,
-                              help = __doc__ ,
-                              lastInChain=lastInChain)
-
-        if not isinstance(inDic,dict):
-            raise TypeError("inDic has %s but should be a dictionary." %type(inDic))
-
-
-        from PATJobTransforms.ConfigDicUtils import AutoConfigureFromDic
-        self.inDic=inDic
-        # Look to see if we should use the fast hybrid POOL merger (default)
-        if self.inDic.get('fastPoolMerge', 'true').lower() == 'true':
-            print "Using hybrid merge - will skip all events with athena and merge later."
-            # This needs to become a run option, so set it early
-            self.inDic['skipEvents'] = 10000000
-            # This must be really set to trigger the correct JO fragments for fast merge 
-            self.inDic['fastPoolMerge'] = 'true'
-        AutoConfigureFromDic(self,inDic)
-                
-        self._addPostRunAction(self, prepend=True)
-
-
-    def postRunAction(self):
-        # Run fast merge as a post run action to the main transform
-        print "Executing postRunActions for MergePoolJobTransform"
-        if self.inDic.get('fastPoolMerge', 'true').lower() == 'true':
-            print "Now doing hybrid event merge"
-            self.fastMerge()
-
-
-    def fastMerge(self):
-        filelist = []
-        outputfile = self._outputFiles[0].value()
-        for file in self._inputFiles:
-            if file:
-                value = file.value()
-                if type(value).__name__ == 'list':
-                    filelist += value
-            else:
-                filelist.append(value)
-        print "Files to Merge: %s" % filelist
-
-        # First run mergePOOL.exe to get events.pool
-        cmd = ['mergePOOL.exe', '-o', 'events.pool.root']
-        for file in filelist:
-            cmd.extend(['-i', file])
-        cmd.extend(['-e', 'MetaData', '-e', 'MetaDataHdrDataHeaderForm', '-e', 'MetaDataHdrDataHeader', '-e', 'MetaDataHdr'])
-        print "Will execute hybrid merge step 1: %s" % cmd
-
-        p = Popen(cmd, stdout=PIPE, stderr=STDOUT, close_fds=True)
-        while p.poll() is None:
-            line = p.stdout.readline()
-            if line:
-                print "mergePOOL.exe Report: %s" % line.strip()
-        rc = p.returncode
-        print "1st mergePOOL (event data) finished with code %s" % rc
-        if rc == 1:
-            print "mergePOOL.exe finished with unknown status (upgrade your RootFileTools to a newer version)"
-        elif rc != 0:
-            raise TransformError("mergePOOL.exe (event merge) encountered a problem",error='TRF_MERGEERR') 
-
-        # Second merge with metadata.pool to produce final output
-        cmd = ['mergePOOL.exe', '-o', 'events.pool.root', '-i', outputfile]
-        print "Will execute hybrid merge step 2: %s" % cmd
-        
-        p = Popen(cmd, stdout=PIPE, stderr=STDOUT, close_fds=True)
-        while p.poll() is None:
-            line = p.stdout.readline()
-            if line:
-                print "mergePOOL.exe Report: %s" % line.strip()
-        rc = p.returncode
-        print "2nd mergePOOL (metadata) finished with code %s" % rc
-        if rc == 1:
-            print "mergePOOL.exe finished with unknown status (upgrade your RootFileTools to a newer version) - assuming all is ok"
-        elif rc != 0:
-            raise TransformError("mergePOOL.exe (final merge) encountered a problem",error='TRF_MERGEERR') 
-
-        # Finish hybrid merge by moving the full file to the final output location
-        shutil.move('events.pool.root', outputfile)
-
-        # Now fix the metadata, which has been left by POOL as the _stub_ file's metadata
-        # so it has the wrong GUID in the PFC
-        print 'Now fixing metadata in PFC for %s' % outputfile
-        try:
-            check_call(['FCdeletePFN', '-p', outputfile])
-            correctGUID = None
-            p = Popen(['pool_extractFileIdentifier.py', outputfile], stdout=PIPE, stderr=STDOUT, close_fds=True, bufsize=1)
-            while p.poll() is None:
-                line = p.stdout.readline()
-                words = line.split()
-                if len(words) >= 2 and outputfile in words[1]:
-                    correctGUID = words[0]
-            if correctGUID == None or p.returncode != 0:
-                raise TransformError("pool_extractFileIdentifier.py failed to get merged file GUID", error='TRF_MERGEERR')
-            print 'GUID is %s' % correctGUID
-            check_call(['FCregisterPFN', '-p', outputfile, '-t', 'ROOT_All', '-g', correctGUID]) 
-        except CalledProcessError, e:
-            print 'Attempt to fix PFC with new merged file information failed: %s' % e
-
-
-    def matchEvents(self):
-        # Switch between ESD and AOD merging 
-        if 'outputAODFile' in self.inDic:
-            return self.matchEventsExpectEqual("inputAODFile","outputAODFile")
-        if 'outputESDFile' in self.inDic:
-            return self.matchEventsExpectEqual("inputESDFile","outputESDFile")
-
-
-# Python executable
-if __name__ == '__main__':
-    #Special preparation for command-line
-    import sys
-    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
-    inDic=BuildDicFromCommandLine(sys.argv)
-    #Construct and execute the transform
-    trf = MergePoolJobTransform(inDic)          
-    sys.exit(trf.exeArgDict(inDic).exitCode())
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/MergeRDO_trf.py b/PhysicsAnalysis/PATJobTransforms/scripts/MergeRDO_trf.py
deleted file mode 100755
index 0879402c79ebaeac38a1295bde607787901ad834..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/MergeRDO_trf.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-__doc__=""" Merging of RDO files """
-
-from PATJobTransforms.BaseOfBasicTrf import BaseOfBasicTrf
-from PyJobTransformsCore.trf import Author
-
-class MergeRDOJobTransform( BaseOfBasicTrf ):
-    def __init__(self,inDic,lastInChain=True):
-        BaseOfBasicTrf.__init__(self,inDic,
-                              authors = [ Author('Steven Beale','Steven.Beale@cern.ch'),
-                                          Author('John Chapman', '@cern.ch') ] ,
-                              skeleton='PATJobTransforms/skeleton.MergeRDO.py' ,
-                              help = __doc__, lastInChain=lastInChain )
-
-# execute transform it if not imported
-if __name__ == '__main__':
-    #Special preparation for command-line
-    import sys
-    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
-    inDic=BuildDicFromCommandLine(sys.argv)
-    #Construct and execute the transform
-    trf = MergeRDOJobTransform(inDic)
-    sys.exit(trf.exeArgDict(inDic).exitCode())
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/Merging_trf.py b/PhysicsAnalysis/PATJobTransforms/scripts/Merging_trf.py
deleted file mode 100755
index 0b64f2bb08e4130a06adffd8ab3dc76973c76e37..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/Merging_trf.py
+++ /dev/null
@@ -1,199 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-__doc__ = """Merge several input pool files into a single output file. Make TAG from merged file."""
-
-from PyJobTransformsCore.trf import *
-from PyJobTransformsCore.full_trfarg import *
-from PyJobTransformsCore.trfutil import *
-
-from PATJobTransforms.BaseOfCompositeTrf import BaseOfCompositeTrf
-from PATJobTransforms.AODtoTAG_trf import AODtoTAGJobTransform
-from PATJobTransforms.AODtoDPD_trf import AODtoDPDJobTransform
-from PATJobTransforms.MergePool_trf import MergePoolJobTransform
-from PATJobTransforms.MergeHIT_trf import MergeHITSJobTransform
-from PATJobTransforms.MergeRDO_trf import MergeRDOJobTransform
-
-ListOfDefaultPositionalKeys=['maxEvents','skipEvents','RunNumber','fastPoolMerge','inputAODFile','inputESDFile','outputAODFile','outputESDFile','outputTAGFile','DBRelease','geometryVersion','conditionsTag','autoConfiguration','preInclude','postInclude','preExec','postExec','--ignoreerrors','--athenaopts','--omitvalidation','extraParameter','inputHitsFile','outputHitsFile','inputLogsFile','outputNTUP_SUSYFile','outputNTUP_TOPFile','outputNTUP_TOPELFile','outputNTUP_TOPMUFile','outputNTUP_TOPJETFile','outputNTUP_TOPEJETFile','outputNTUP_PHOTONFile', 'outputNTUP_FASTMONFile','outputNTUP_HSG2File','inputRDOFile','outputRDOFile']
-
-
-class MergingTransform( BaseOfCompositeTrf ):
-    def __init__(self, inDic):
-        BaseOfCompositeTrf.__init__(self,inputDic=inDic,ProdSysKeys=ListOfDefaultPositionalKeys,
-                                    name="Merging_trf",
-                                    authors = [ Author('David Cote','david.cote@cern.ch') ] ,
-                                    help = __doc__ )
-
-        self.dicMergeHITS=self.AddNewSubStep("mergeHITS",self.runMergeHITS)
-        self.dicMergeRDO=self.AddNewSubStep("mergeRDO",self.runMergeRDO)
-        self.dicMergePool=self.AddNewSubStep("merge",self.runMergePool)
-        self.dicAODToDPD=self.AddNewSubStep("a2d",self.runAODtoDPD)
-        self.dicAODToTAG=self.AddNewSubStep("a2t",self.runAODtoTAG)
-
-        #Internal sub-step configuration (i.e. fill dictionaries)
-        self.ConfigureInternalSubSteps()
-        self.ConfigureInputOutputs()
-        self.CommonSpecialFinalConfig()
-        return
-
-    def runJob(self):
-        #initialization...
-        allOK=True
-        mergeOK=False
-        tagAodOK=False
-        report=JobReport()
-        report.setProducer('MergingTrf')
-
-        #First setup DBRelease if requested (do not propagate this arg to sub-transforms)
-        if self.inDic.has_key('DBRelease'):
-            self.getArgument('DBRelease').install()
-
-        ######################
-        # HIT Merging
-        if(allOK and self.SubStepIsExecuted('mergeHITS')):
-            dic=self.dicMergeHITS.copy()            
-            print "MergeHITS dic:",dic
-            mHITS = MergeHITSJobTransform(dic,lastInChain=False)
-            mHITS.setParent(self)
-            mHITS.setJobReportOptions('Summary')
-            reportMergeHITS = mHITS.exeArgDict( dic )
-            report.addReport( reportMergeHITS )
-            mergeHITS_OK = ( reportMergeHITS.exitCode() == 0 )
-            print "mergeHITS_OK is ",mergeHITS_OK
-            allOK = (allOK and mergeHITS_OK)
-        else:
-            print "Skipping MergeHIT step..."
-
-        ######################
-        # RDO Merging
-        if(allOK and self.SubStepIsExecuted('mergeRDO')):
-            dic=self.dicMergeRDO.copy()
-            print "MergeRDO dic:",dic
-            mRDO = MergeRDOJobTransform(dic,lastInChain=False)
-            mRDO.setParent(self)
-            mRDO.setJobReportOptions('Summary')
-            reportMergeRDO = mRDO.exeArgDict(dic)
-            report.addReport( reportMergeRDO )
-            mergeRDO_OK = ( reportMergeRDO.exitCode() == 0 )
-            print "mergeRDO_OK is ", mergeRDO_OK
-            allOK = (allOK and mergeRDO_OK)
-        else:
-            print "Skipping MergeRDO step..."
-
-        ######################
-        # Pool Merging
-        # N.B. Option to use hybrid POOL merging has now moved to MergePool_trf itself 
-        if(allOK and self.SubStepIsExecuted('merge')):
-            dic=self.dicMergePool.copy()
-            print "MergePool dic:",dic
-            mPool = MergePoolJobTransform(dic,lastInChain=False)
-            mPool.setParent(self)
-            mPool.setJobReportOptions('Summary')
-            reportMergePool = mPool.exeArgDict( dic )
-            report.addReport( reportMergePool )
-            mergeOK = ( reportMergePool.exitCode() == 0 )
-            print "mergeOK is ",mergeOK
-            allOK = (allOK and mergeOK)
-        else:
-            print "Skipping MergePool step..."
-
-        ######################
-        # (merged) AOD->DPD
-        if(allOK and self.SubStepIsExecuted('a2d')):
-            dic=self.dicAODToDPD.copy()            
-            print "AODtoDPD dic:",dic
-            dpdAOD = AODtoDPDJobTransform(dic,lastInChain=False)
-            dpdAOD._lastInChain=False
-            dpdAOD.setParent(self)
-            dpdAOD.setJobReportOptions('Summary')
-            reportDpdAod = dpdAOD.exeArgDict( dic )
-            report.addReport( reportDpdAod )
-            dpdAodOK = ( reportDpdAod.exitCode() == 0 )
-            print "dpdAodOK is ",dpdAodOK
-            allOK = (allOK and dpdAodOK)
-        else:
-            print "Skipping AOD->DPD step..."
-
-        ######################
-        # (merged) AOD->TAG
-        if(allOK and self.SubStepIsExecuted('a2t')):
-            dic=self.dicAODToTAG.copy()            
-            print "AODtoTAG dic:",dic
-            tagAOD = AODtoTAGJobTransform(dic,lastInChain=False)
-            tagAOD.setParent(self)
-            tagAOD.setJobReportOptions('Summary')
-            reportTagAod = tagAOD.exeArgDict( dic )
-            report.addReport( reportTagAod )
-            tagAodOK = ( reportTagAod.exitCode() == 0 )
-            print "tagAodOK is ",tagAodOK
-            allOK = (allOK and tagAodOK)
-        else:
-            print "Skipping AOD->TAG step..."
-
-        return report
-
-
-    def ConfigureInternalSubSteps(self):
-        self.ConfigureCommonInternalSubSteps()
-        return
-
-    def ConfigureInputOutputs(self):
-        #Check for potential misconfigurations...        
-        if self.inDic.has_key('inputHitsFile') and ( self.inDic.has_key('outputESDFile') or self.inDic.has_key('outputTAGFile') or self.inDic.has_key('outputAODFile') ):
-            raise RuntimeError("input HITS and output ESD/AOD/TAG is not supported.")
-        # Disable this - logfile merging is now optional for Hits merging step
-#        if self.inDic.has_key('inputHitsFile') and not self.inDic.has_key('inputLogsFile'):
-#            raise RuntimeError("inputLogsFile must be provided to the HIT merging step.")
-        if self.inDic.has_key('inputAODFile') and self.inDic.has_key('outputESDFile'):
-            raise RuntimeError("input AOD and output ESD is not supported.")
-        if self.inDic.has_key('inputESDFile') and self.inDic.has_key('outputTAGFile'):
-            raise RuntimeError("input ESD and output TAG is not supported.")
-
-        #AOD->MergeAOD->TAG
-        if self.runMergeAOD() and self.hasOutput(self.dicAODToTAG):
-            #We overwrite dicAODToTAG['inputAODFile']. That's OK in this case.
-            self.dicAODToTAG['inputAODFile']=self.dicMergePool['outputAODFile']
-            print "AOD->MergeAOD->TAG requested. Input of TAG will be: '%s'."%self.dicAODToTAG['inputAODFile']
-        return
-
-    def GetFirstSubStep(self):
-        if self.runMergeHITS():
-            return ["mergeHITS"] #["dicMergeHIT"]
-        if self.runMergePool():
-            return ["merge"] #["dicMergePool"]
-        elif self.runAODtoTAG():
-            return ["a2t"] #["dicAODToTAG"]
-        return None
-
-    def runAODtoTAG(self):
-        return (self.hasInput(self.dicAODToTAG) and self.dicAODToTAG.has_key('outputTAGFile'))
-
-    def runAODtoDPD(self):
-        return (self.hasInput(self.dicAODToDPD) and self.hasOutput(self.dicAODToDPD))
-
-    def runMergePool(self):
-        return (self.runMergeAOD() or self.runMergeESD())
-
-    def runMergeHITS(self):
-        return (self.hasInput(self.dicMergeHITS) and self.dicMergeHITS.has_key('outputHitsFile'))
-
-    def runMergeAOD(self):
-        return (self.hasInput(self.dicMergePool) and self.dicMergePool.has_key('outputAODFile'))
-        
-    def runMergeESD(self):
-        return (self.hasInput(self.dicMergePool) and self.dicMergePool.has_key('outputESDFile'))
-        
-    def runMergeRDO(self):
-        return (self.hasInput(self.dicMergeRDO) and self.dicMergeRDO.has_key('outputRDOFile'))
-
-
-################# Python executable
-if __name__ == '__main__':
-    #Special preparation for command-line
-    import sys
-    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
-    inDic=BuildDicFromCommandLine(sys.argv)
-    #Construct and execute the transform
-    trf = MergingTransform(inDic)          
-    sys.exit(trf.exeArgDict(inDic).exitCode())
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/RAWtoRAWHLT_trf.py b/PhysicsAnalysis/PATJobTransforms/scripts/RAWtoRAWHLT_trf.py
deleted file mode 100755
index 91fc68bd7835f37e1d78ab9ffa476af6cda9d5c4..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/RAWtoRAWHLT_trf.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-
-__doc__ = """Read in raw data (byte stream), run HLT, and write out raw data with new HLT fragments."""
-
-from PyJobTransformsCore.trf import JobTransform,Author
-from PyJobTransformsCore.full_trfarg import SQLiteSupport
-from RecJobTransforms.RecConfig import recConfig
-from PATJobTransforms.BSutils import BSutils
-import os
-
-class RAWtoRAWHLTJobTransform( JobTransform ):
-    def __init__(self,inDic,lastInChain=True):
-        JobTransform.__init__(self,
-                              authors=[Author('Simon George','S.George@rhul.ac.uk')],
-                              skeleton='PATJobTransforms/skeleton.runHLT.py' ,
-                              help = __doc__,lastInChain=lastInChain)
-
-        if not isinstance(inDic,dict):
-            raise TypeError("inDic has %s but should be a dictionary." %type(inDic))
-
-        from PATJobTransforms.ConfigDicUtils import AutoConfigureFromDic
-        self.inDic=inDic
-        AutoConfigureFromDic(self,inDic)
-        
-        #add other options and features
-        self.add( SQLiteSupport() )
-
-    def doPreRunActions(self):
-        """ Cleans the run directory from old BS file """
-        outputbsfilearg  = self.getArgument("outputBSFile").value()
-        os.path.walk(os.curdir, BSutils.match_deleteOld_BS_datafile, '(data_test|'+outputbsfilearg+').*\.data')
-        # default prerun actions
-        JobTransform.doPreRunActions(self)
-
-    def doPostRunActions(self):
-        """ Renames the BS ouput file to the name requested in the transform """
-        outputbsfilearg  = self.getArgument("outputBSFile").value()
-        print "RAWtoRAWHLT_trf.py: Try to rename outputBSFile"
-        os.path.walk(os.curdir, BSutils.match_rename_BS_datafile, ['(data_test|'+outputbsfilearg+').*\.data',outputbsfilearg] )
-        # finish with default post-run actions
-        JobTransform.doPostRunActions(self)
-
-# execute it if not imported
-if __name__ == '__main__':
-    #Special preparation for command-line
-    import sys
-    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
-    inDic=BuildDicFromCommandLine(sys.argv)
-    #Construct and execute the transform
-    trf = RAWtoRAWHLTJobTransform(inDic)
-    sys.exit(trf.exeArgDict(inDic).exitCode())
-
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/UpdatePickledDic.py b/PhysicsAnalysis/PATJobTransforms/scripts/UpdatePickledDic.py
deleted file mode 100755
index 4086aa046731ae5b57885ea5a4bce9421c57f99d..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/UpdatePickledDic.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-import pickle,sys
-
-def GetCommand(sysArgv):
-    fname=sys.argv[1]
-    f = open(fname, 'r')
-    dic = pickle.load(f)
-    f.close()
-
-    print "original config:"
-    print dic,"\n"
-
-    dic['postExec']='PoolSvc.MaxFilesOpen = 32'
-
-    print "updated config:"
-    print dic
-
-    outName="updatedConfig.pickle"
-    f = open(outName, 'w')
-    pickle.dump(dic, f)
-    f.close()
-
-
-if __name__ == '__main__':
-    GetCommand(sys.argv)
-
-        
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/ValidateD3PD_trf.py b/PhysicsAnalysis/PATJobTransforms/scripts/ValidateD3PD_trf.py
deleted file mode 100755
index 5ee7aa412906a11a00b36d9c8954e620b4db087e..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/ValidateD3PD_trf.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-__doc__ =""" Generate Validation D3PDs """
-
-import os
-import subprocess
-
-from PATJobTransforms.BaseOfBasicTrf import BaseOfBasicTrf
-from PyJobTransformsCore.trf import Author
-
-ListOfDefaultPositionalKeys=['maxEvents','inputESDFile','inputAODFile','preInclude','postInclude','preExec','postExec','--ignoreerrors','d3pdVal','outputNTUP_PHYSVALFile']
-
-#List of root files created by the transform (for merging)
-rootfiles= [ 'PhysVal_InDetPerf.root', 'PhysVal_BackTracking.root', 'PhysVal_MissingET.root', 'PhysVal_Jets.root', 'PhysVal_Tau.root', 'PhysVal_Electrons.root','PhysVal_MUONSPLACEHOLDER.root','PhysVal_Btag.root', 'PhysVal_SUSY.root', 'PhysVal_MonTop.root', 'PhysVal_Zee.root', 'PhysVal_Exotics.root', 'PhysVal_HSG6.root', 'PhysVal_Photons.root'] 
-
-class ValidationD3PDJobTransform( BaseOfBasicTrf ):
-    def __init__(self,inDic):
-        BaseOfBasicTrf.__init__(self,inDic,
-                                # Original author Steven Beale
-                                authors = [ Author('Graeme Stewart','graeme.andrew.stewart@cern.ch') ],
-                                skeleton='PATJobTransforms/skeleton.ValidateD3PD_trf.py' ,
-                                help = __doc__ )
-        
-        # Add the postRunAction associated with the transform.
-        self._addPostRunAction(self, prepend=True)
-
-    def postRunAction(self):
-        # Merge the individual perf NTUPs into one
-        inlist = [ ]
-        for file in rootfiles:
-            if os.path.exists(file):
-                inlist.append(file)
-  
-        if 'outputNTUP_PHYSVALFile' in self.inDic:
-            cmd = ['hadd', self.inDic['outputNTUP_PHYSVALFile']]
-        else:
-            print 'WARNING: No outputNTUP_PHYSVALFile name given - falling back to "PhysVal.root"'
-            cmd = ['hadd' , 'PhysVal.root'] 
-        cmd.extend(inlist)
-
-        print "Merging root files: '%s'" % cmd 
-
-        proc = subprocess.Popen(args = cmd,bufsize = 1, shell = False, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
-        while proc.poll() is None:
-            line = proc.stdout.readline()
-            if line:
-                print line
-
-        rc=proc.returncode
-
-        if not rc==0:
-            raise RuntimeError("hadd returned with value {0:d} instead of 0. Stopping!".format(rc))
-
-        print "Merging finished"
-
-
-if __name__ == '__main__':
-    #Special preparation for command-line
-    import sys
-    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
-    inDic=BuildDicFromCommandLine(sys.argv)
-    #Construct and execute the transform
-    trf = ValidationD3PDJobTransform(inDic)
-    sys.exit(trf.exeArgDict(inDic).exitCode())
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/WriteRecoTrfSignature.py b/PhysicsAnalysis/PATJobTransforms/scripts/WriteRecoTrfSignature.py
deleted file mode 100755
index aebc9300eacff1eed1e684703557b1124718c196..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/scripts/WriteRecoTrfSignature.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
-import pickle
-
-def WriteRecoTrfSignature():
-
-    signatureDic={}
-    signatureDic['InputAndOutputFileTypes']={}
-    signatureDic['phconfig']=[]
-
-
-    #A few examples of inputs and their possible outputs... (not all yet)
-    signatureDic['InputAndOutputFileTypes']['inputESDFile']=['outputAODFile','outputESDFile','outputNTUP_TRIGFile','outputDESDM_EGAMMAFile']
-    signatureDic['InputAndOutputFileTypes']['inputBSFile']=['outputBSFile','outputESDFile','outputCBNTFile','outputNTUP_TRKVALIDFile','outputTAG_COMMFile']
-    signatureDic['InputAndOutputFileTypes']['inputAODFile']=['outputTAGFile','outputDAODM_SGLMUFile']
-
-
-    #A few examples of physics configuration parameters... (not all yet)
-    signatureDic['phconfig'].append('autoConfiguration')
-    signatureDic['phconfig'].append('preExec')
-    signatureDic['phconfig'].append('postExec')
-    signatureDic['phconfig'].append('preInclude')
-    signatureDic['phconfig'].append('postInclude')
-    signatureDic['phconfig'].append('geometryVersion')
-    signatureDic['phconfig'].append('conditionsTag')
-    signatureDic['phconfig'].append('beamType')
-    
-    outName="Reco_trf.signature.pickle"
-    f = open(outName, 'w')
-    pickle.dump(signatureDic, f)
-    f.close()
-
-    print "salut Pavel"
-
-    #Towards an automated writing of the pickled dic...
-    from PATJobTransforms.Configuration import ConfigDic
-    #searching for outputs from ESD input
-    for key in ConfigDic.keys():
-        obj=ConfigDic[key]
-        if hasattr(obj,'isOutput') and hasattr(obj,'subSteps'):
-            for s in obj.subSteps:
-                if s.startswith('e2'):
-                    print key
-
-
-if __name__ == '__main__':
-    WriteRecoTrfSignature()
-
-        
diff --git a/PhysicsAnalysis/PATJobTransforms/scripts/rhadd.py b/PhysicsAnalysis/PATJobTransforms/scripts/rhadd.py
index 8795394417151b90569f433627b54a37f11bc69e..6068ecf7626745c8cf17447e031c542e731e2436 100755
--- a/PhysicsAnalysis/PATJobTransforms/scripts/rhadd.py
+++ b/PhysicsAnalysis/PATJobTransforms/scripts/rhadd.py
@@ -2,7 +2,7 @@
 
 # Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
 #
-# $Id: rhadd.py 543578 2013-04-04 13:45:02Z graemes $
+# $Id: rhadd.py 677431 2015-06-23 08:09:12Z graemes $
 #
 # Recursive historgam adder, wrapping around hadd
 # Author: Graeme A Stewart <graeme.andrew.stewart@cern.ch>
@@ -31,7 +31,7 @@ import os
 import sys
 
 from multiprocessing import Pool
-from subprocess import Popen, STDOUT, PIPE, CalledProcessError
+from subprocess import Popen, STDOUT, PIPE
 from tempfile import mkstemp
 
 logging.basicConfig(level=logging.INFO)
diff --git a/PhysicsAnalysis/PATJobTransforms/share/skeleton.AODtoDAOD_tf.py b/PhysicsAnalysis/PATJobTransforms/share/skeleton.AODtoDAOD_tf.py
index b82e3430440e73d577ffc73993d0712ec4a606b3..0cd75325c1fe11f421fb3782a998a257499a8532 100644
--- a/PhysicsAnalysis/PATJobTransforms/share/skeleton.AODtoDAOD_tf.py
+++ b/PhysicsAnalysis/PATJobTransforms/share/skeleton.AODtoDAOD_tf.py
@@ -1,13 +1,19 @@
 # Skeleton file for AOD to DAOD (Reduction framework) job
 #
-# $Id: skeleton.AODtoDAOD_tf.py 664818 2015-05-04 19:20:22Z blumen $
+# $Id: skeleton.AODtoDAOD_tf.py 731616 2016-03-22 15:25:39Z cranshaw $
 #
 from AthenaCommon.AlgSequence import AlgSequence
 from AthenaCommon.Logging import logging
-from OutputStreamAthenaPool.MultipleStreamManager import MSMgr
 msg = logging.getLogger('AODtoDAOD')
 msg.info( '****************** STARTING AOD->DAOD MAKING *****************' )
 
+def getSubSequences(sequence,sequenceList):
+    sequenceList.append(sequence)
+    for item in sequence:
+        if type(item).__name__ == 'AthSequencer':
+            getSubSequences(item,sequenceList)
+    return
+
 if hasattr(runArgs, "reductionConf"):
     msg.info('Will attempt to make the following reduced formats: {0}'.format(runArgs.reductionConf))
 else:
@@ -16,18 +22,28 @@ else:
 
 include("RecJobTransforms/CommonRecoSkeletonJobOptions.py")
 
+try:
+    import os
+    project = os.environ ['AtlasProject']
+    version = os.environ ['AtlasVersion']
+    release = project + '-' + version
+    rec.AtlasReleaseVersion = release
+except:
+    print "WARNING: Unable to construct AtlasReleaseVersion from environment"
 
 if hasattr(runArgs,"inputAODFile"):
     globalflags.InputFormat.set_Value_and_Lock('pool')
     rec.readAOD.set_Value_and_Lock( True )
     rec.readRDO.set_Value_and_Lock( False )
     rec.doDPD.set_Value_and_Lock(True)
+    rec.OutputFileNameForRecoStep.set_Value_and_Lock("AODtoDAOD")
     athenaCommonFlags.PoolAODInput.set_Value_and_Lock( runArgs.inputAODFile )
 elif hasattr(runArgs,'inputEVNTFile') or hasattr(runArgs,'jobConfig'):
     # Assume that we're running from EVNT or straight through evgen
     globalflags.InputFormat.set_Value_and_Lock('pool')
     rec.readAOD.set_Value_and_Lock( True )
     rec.readRDO.set_Value_and_Lock( False )
+    rec.OutputFileNameForRecoStep.set_Value_and_Lock("EVNTtoDAOD")
     rec.AutoConfiguration.set_Value_and_Lock(['ProjectName','BeamType','RealOrSim','DoTruth','InputType'])
     rec.doInDet.set_Value_and_Lock(False)
     rec.doCalo.set_Value_and_Lock(False)
@@ -81,18 +97,17 @@ if hasattr(runArgs,"topOptions"): include(runArgs.topOptions)
 else: include( "RecExCommon/RecExCommon_topOptions.py" )
 
 # Intervene and strip SkimmingTools from algs if pass through mode requested
+# recusion is necessary to ensure all kernels from all subsequences are 
+# caught
 if passThroughMode:
-    seq = AlgSequence()
-    for stream in MSMgr.StreamList:
-        evtStream = stream.GetEventStream()
-        for alg in list( set(evtStream.AcceptAlgs) | set(evtStream.RequireAlgs) | set(evtStream.VetoAlgs) ):
-            for item in seq:
-                if item.name() == alg:
-                    item.SkimmingTools = []
-    msgFresh = logging.getLogger('AODtoDAOD')
-    msgFresh.info( 'Pass through mode was requested. Skimming tools have been removed from all kernels.')
-    MSMgr.Print()
-
+    sequenceList = []
+    mainSeq = AlgSequence()
+    getSubSequences(mainSeq,sequenceList)
+    for seq in sequenceList:
+        for item in seq:
+            if type(item).__name__=='DerivationFramework__DerivationKernel':
+                item.SkimmingTools = []
+    msg.info( 'Pass through mode was requested. Skimming tools have been removed from all kernels.')
 
 ## Post-include
 if hasattr(runArgs,"postInclude"): 
diff --git a/PhysicsAnalysis/PATJobTransforms/share/skeleton.AODtoDPD_trf.py b/PhysicsAnalysis/PATJobTransforms/share/skeleton.AODtoDPD_trf.py
deleted file mode 100644
index e7bc6d0b9ebe93b6728bdaa8f50d65f0a1ffabe1..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/share/skeleton.AODtoDPD_trf.py
+++ /dev/null
@@ -1,221 +0,0 @@
-###############################################################
-#
-# Skeleton top job options for AOD->DPD 
-# Put here outputs that require rec.doAOD=False
-#
-#==============================================================
-
-#Common job options disable most RecExCommon by default. Re-enable below on demand.
-include("PATJobTransforms/CommonSkeletonJobOptions.py")
-rec.doAOD=False
-
-from AthenaCommon.Logging import logging
-recoLog = logging.getLogger('aod_to_dpd')
-recoLog.info( '****************** STARTING AOD->DPD MAKING *****************' )
-
-
-## Automatically turn ON/OFF and set output file name of each possible DPD
-listOfFlags=[]
-try:
-    from PrimaryDPDMaker.PrimaryDPDFlags import primDPD
-    listOfFlags.append(primDPD)
-except ImportError:
-    print "WARNING PrimaryDPDFlags not available. Only OK if you're using job transforms without the AtlasAnalysis project."
-try:
-    from D2PDMaker.D2PDFlags import D2PDFlags
-    listOfFlags.append(D2PDFlags)
-except ImportError:
-    print "WARNING D2PDFlags not available. Requires D2PDMaker-00-00-50 in AtlasAnalysis."
-try:
-    from TopPhysD2PDMaker.TopPhysD2PDFlags import topPhysDPD
-    listOfFlags.append(topPhysDPD)
-except ImportError:
-    print "WARNING TopPhysD2PDFlags not available. Only OK if you're using job transforms without the AtlasAnalysis project."
-try:
-    from D3PDMakerConfig.D3PDProdFlags import prodFlags
-    listOfFlags.append( prodFlags )
-except ImportError:
-    print "WARNING D3PDProdFlags not available. Only OK if you're using job transforms without the AtlasAnalysis project."
-
-from PATJobTransforms.DPDUtils import SetupOutputDPDs
-rec.DPDMakerScripts.append(SetupOutputDPDs(runArgs,listOfFlags))
-
-
-from PATJobTransforms.OutputsMgr import outputsMgr,magicKey
-for key in dir(runArgs):
-    if key.startswith(magicKey):
-        outputsMgr.addOutput(key,getattr(runArgs,key))
-
-## Input
-if hasattr(runArgs,"inputFile"): athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputFile )
-if hasattr(runArgs,"inputEVNTFile"): athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputEVNTFile )
-if hasattr(runArgs,"inputAODFile"):
-    globalflags.InputFormat.set_Value_and_Lock('pool')
-    rec.readAOD.set_Value_and_Lock( True )
-    rec.readRDO.set_Value_and_Lock( False )
-    athenaCommonFlags.PoolAODInput.set_Value_and_Lock( runArgs.inputAODFile )
-if hasattr(runArgs,"inputTAGFile") or hasattr(runArgs,"inputTAG_AODFile"):
-    #for TAG->AOD->skimmedAOD
-    rec.readTAG.set_Value_and_Lock( True )
-    rec.readAOD.set_Value_and_Lock( True )
-    rec.doAOD.set_Value_and_Lock( False )
-    rec.TAGFromRDO.set_Value_and_Lock( False )
-        
-    if hasattr(runArgs,"inputTAGFile"):
-        athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputTAGFile )
-    else:
-        athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputTAG_AODFile )
-
-# Keep track of whether an output format file is requested:
-outputRequested = False
-
-## Outputs
-if hasattr(runArgs,"outputAODFile"):
-    #for TAG->AOD->skimmedAOD
-    rec.doWriteAOD.set_Value_and_Lock( True )
-    athenaCommonFlags.PoolAODOutput.set_Value_and_Lock( runArgs.outputAODFile )
-    outputRequested = True
-
-if hasattr(runArgs,"outputNTUP_BTAGFile"):
-    from BTagging.BTaggingFlags import BTaggingFlags
-    BTaggingFlags.doJetTagNtuple = True
-    BTaggingFlags.JetTagNtupleName = runArgs.outputNTUP_BTAGFile
-    outputRequested = True
-
-if hasattr(runArgs,"outputNTUP_PROMPTPHOTFile"):
-    from PhotonAnalysisUtils.PhotonAnalysisUtilsFlags import PAUflags
-    PAUflags.FileName = runArgs.outputNTUP_PROMPTPHOTFile
-    #little hack while autoConfiguration=everything is still not the default...
-    if hasattr(runArgs,"inputAODFile") and not hasattr(runArgs,"inputFile"):
-        athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputAODFile )
-    outputRequested = True
-
-if hasattr(runArgs,"outputNTUP_SMEWFile"):
-    from WWAnalyze.WWD3PDFlags import WWD3PDFlags
-    WWD3PDFlags.OutputFilename = runArgs.outputNTUP_SMEWFile
-    #little hack while autoConfiguration=everything is still not the default...
-    if hasattr(runArgs,"inputAODFile") and not hasattr(runArgs,"inputFile"):
-        athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputAODFile )
-    outputRequested = True
-
-if hasattr(runArgs,"outputNTUP_SUSYTRUTHFile"):
-    from TruthD3PDMaker.TruthD3PDMakerFlags import TruthD3PDFlags
-    TruthD3PDFlags.TruthD3PDOutputFileName = runArgs.outputNTUP_SUSYTRUTHFile
-    include("TruthD3PDMaker/TruthSusyD3PDfromEVGEN_preInclude.py")
-    outputRequested = True
-if hasattr(runArgs,"outputNTUP_TRUTHFile"):
-    from TruthD3PDMaker.TruthD3PDMakerFlags import TruthD3PDFlags
-    TruthD3PDFlags.TruthD3PDOutputFileName = runArgs.outputNTUP_TRUTHFile
-    include("TruthD3PDMaker/TruthD3PDfromEVGEN_preInclude.py")
-    outputRequested = True
-
-if hasattr(runArgs,"outputDAOD_2LHSG2File"):
-    #FIXME: input/outputs should configured via job properties instead of directly using the runArgs object
-    from HSG2DPDUtils import HSG2DPDFlags
-    outputRequested = True
-if hasattr(runArgs,"outputDAOD_HSG2File"):
-    #FIXME: input/outputs should configured via job properties instead of directly using the runArgs object
-    from HSG2DPDUtils import HSG2DPDFlags
-    outputRequested = True
-
-
-if hasattr(runArgs,"outputNTUP_1LHSG2File"):
-    #FIXME: input/outputs should configured via job properties instead of directly using the runArgs object
-    from HSG2DPDUtils import HSG2DPDFlags
-    outputRequested = True
-if hasattr(runArgs,"outputNTUP_2LHSG2File"):
-    #FIXME: input/outputs should configured via job properties instead of directly using the runArgs object
-    from HSG2DPDUtils import HSG2DPDFlags
-    outputRequested = True
-if hasattr(runArgs,"outputNTUP_HSG2File"):
-    #FIXME: input/outputs should configured via job properties instead of directly using the runArgs object
-    from HSG2DPDUtils import HSG2DPDFlags
-    outputRequested = True
-
-
- 
-if hasattr(runArgs,"outputNTUP_SCTFile"):
-    from TrackD3PDMaker.TrackD3PDMakerSCTFlags import TrackD3PDSCTFlags
-    TrackD3PDSCTFlags.outputFile = runArgs.outputNTUP_SCTFile
-    if hasattr(runArgs,"inputESDFile") and not hasattr(runArgs,"inputFile"):
-        athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputESDFile )
-    outputRequested = True
-
-
-if hasattr(runArgs,"outputNTUP_FASTMONFile"):
-    rec.doFastPhysMonitoring.set_Value_and_Lock(True)
-    rec.RootFastPhysMonOutput.set_Value_and_Lock(runArgs.outputNTUP_FASTMONFile)
-    outputRequested = True
-    
-
-
-#Import D3PD flags before preExec, for convenience
-from D3PDMakerConfig.D3PDProdFlags  import oldProdFlags
-from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags
-from SUSYD3PDMaker.SUSYD3PDFlags    import SUSYD3PDFlags
-
-## Pre-exec
-if hasattr(runArgs,"preExec"):
-    recoLog.info("transform pre-exec")
-    for cmd in runArgs.preExec:
-        recoLog.info(cmd)
-        exec(cmd)
-
-## Pre-include
-if hasattr(runArgs,"preInclude"): 
-    for fragment in runArgs.preInclude:
-        include(fragment)
-
-## Pre-includes defined for the DPDs:
-from PATJobTransforms.DPDUtils import SetupDPDPreIncludes
-dpdPreIncludeUsed = SetupDPDPreIncludes(runArgs,listOfFlags)
-if outputRequested and dpdPreIncludeUsed:
-    recoLog.error( "Multiple output types requested with pre-includes present" )
-    recoLog.error( "This will most probably lead to weird output" )
-    pass
-
-#========================================================
-# Central topOptions (this is one is a string not a list)
-#========================================================
-if hasattr(runArgs,"topOptions"): include(runArgs.topOptions)
-else: include( "RecExCommon/RecExCommon_topOptions.py" )
-
-
-# Skimming options
-if hasattr(runArgs,"eventSelectorQuery"):
-    svcMgr.EventSelector.RefName= "StreamAOD"
-    svcMgr.EventSelector.CollectionType="ExplicitROOT"
-    svcMgr.EventSelector.Query=runArgs.eventSelectorQuery
-
-
-## Make "old style" D3PDs.
-for c in SetupOutputDPDs(runArgs, [oldProdFlags]): c()
-
-## Offline prescales (has to be *after* the topOptions)
-if hasattr(runArgs,"prescales"):
-    recoLog.info( '**** DPD offline prescale arguments:' )
-    from PrimaryDPDMaker.JobTransformConfiguration import ApplyPrescale
-    for prescale in runArgs.prescales:
-        recoLog.info( prescale )
-        ApplyPrescale(prescale)
-
-## Post-include
-if hasattr(runArgs,"postInclude"): 
-    for fragment in runArgs.postInclude:
-        include(fragment)
-
-## Post-includes defined for the DPDs:
-from PATJobTransforms.DPDUtils import SetupDPDPostIncludes
-dpdPostIncludeUsed = SetupDPDPostIncludes(runArgs,listOfFlags)
-if outputRequested and dpdPostIncludeUsed:
-    recoLog.error( "Multiple output types requested with post-includes present" )
-    recoLog.error( "This will most probably lead to weird output" )
-    pass
-
-## Post-exec
-if hasattr(runArgs,"postExec"):
-    recoLog.info("transform post-exec")
-    for cmd in runArgs.postExec:
-        recoLog.info(cmd)
-        exec(cmd)
-
diff --git a/PhysicsAnalysis/PATJobTransforms/share/skeleton.AODtoTAG_trf.py b/PhysicsAnalysis/PATJobTransforms/share/skeleton.AODtoTAG_trf.py
deleted file mode 100644
index 7d8ee5646e0df8d466f403165c35290537685b7a..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/share/skeleton.AODtoTAG_trf.py
+++ /dev/null
@@ -1,72 +0,0 @@
-###############################################################
-#
-# Skeleton top job options for AOD->TAG
-#
-#==============================================================
-
-#Common job options disable most RecExCommon by default. Re-enable below on demand.
-include("PATJobTransforms/CommonSkeletonJobOptions.py")
-rec.doWriteTAG=True
-
-from AthenaCommon.Logging import logging
-recoLog = logging.getLogger('aod_to_tag')
-recoLog.info( '****************** STARTING AOD->TAG MAKING *****************' )
-
-## Input
-if hasattr(runArgs,"inputFile"): athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputFile )
-if hasattr(runArgs,"inputAODFile"):
-    globalflags.InputFormat.set_Value_and_Lock('pool')
-    rec.readAOD.set_Value_and_Lock( True )
-    rec.readRDO.set_Value_and_Lock( False )
-    athenaCommonFlags.PoolAODInput.set_Value_and_Lock( runArgs.inputAODFile )
-
-#set all output file names
-if hasattr(runArgs,"outputTAGFile"):
-    athenaCommonFlags.PoolTAGOutput.set_Value_and_Lock( runArgs.outputTAGFile )
-
-
-if hasattr(runArgs,"outputNTUP_FASTMONFile"):
-    rec.doFastPhysMonitoring.set_Value_and_Lock(True)
-    rec.RootFastPhysMonOutput.set_Value_and_Lock(runArgs.outputNTUP_FASTMONFile)
-
-## Pre-exec
-if hasattr(runArgs,"preExec"):
-    recoLog.info("transform pre-exec")
-    for cmd in runArgs.preExec:
-        recoLog.info(cmd)
-        exec(cmd)
-
-## Pre-include
-if hasattr(runArgs,"preInclude"): 
-    for fragment in runArgs.preInclude:
-        include(fragment)
-
-#========================================================
-# Central topOptions (this is one is a string not a list)
-#========================================================
-if hasattr(runArgs,"topOptions"): include(runArgs.topOptions)
-else: include("RecExCommon/RecExCommon_topOptions.py")
-
-if rec.Commissioning:
-    Service("GeoModelSvc").IgnoreTagDifference = True
-    Service("GeoModelSvc").OutputLevel=6
-
-if rec.doWriteAOD:
-   if rec.readAOD():
-       try:
-           StreamAOD.ExtendProvenanceRecord = False
-       except:
-           print "StreamAOD was not defined, cannot set ExtendProvenanceRecord = False. Check your flags."
-
-## Post-include
-if hasattr(runArgs,"postInclude"): 
-    for fragment in runArgs.postInclude:
-        include(fragment)
-
-## Post-exec
-if hasattr(runArgs,"postExec"):
-    recoLog.info("transform post-exec")
-    for cmd in runArgs.postExec:
-        recoLog.info(cmd)
-        exec(cmd)
-
diff --git a/PhysicsAnalysis/PATJobTransforms/share/skeleton.DQHistogramMerge_trf.py b/PhysicsAnalysis/PATJobTransforms/share/skeleton.DQHistogramMerge_trf.py
deleted file mode 100644
index e07e684103aa616ae322baef3bbe53274ce515af..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/share/skeleton.DQHistogramMerge_trf.py
+++ /dev/null
@@ -1,47 +0,0 @@
-###############################################################
-#
-# Skeleton top job options for DQHistogramMerge_trf
-#
-#==============================================================
-
-#hack: we are forced to use athena (as a dummy) within the current PyJobTransformsCore
-theApp.EvtMax=1
-
-# merge and/or rename monitoring histogram file
-# file with list of ROOT files to merge
-mergeListFile=open('hist_merge_list.txt','w')
-inFiles=runArgs.inputFile
-for f in inFiles:
-    mergeListFile.write( str(f) + '\n' )    
-mergeListFile.close()
-
-#
-#
-# This is the old method which was causing stray DQHistogramMerge processes.
-#
-# call DQHistogramMerge
-#cmd = 'DQHistogramMerge.py hist_merge_list.txt %s False' % (runArgs.outputHISTFile)
-#cmd = 'sleep 500' 
-#import commands
-#(status, output) = commands.getstatusoutput(cmd)
-
-import os,subprocess
-#writing in a temp file should allow termination eventually even if parent is killed
-tmpbuff=os.tmpfile()
-dqhistpipe=subprocess.Popen(["DQHistogramMerge.py", "hist_merge_list.txt", runArgs.outputHISTFile, "False"],
-                            stdout=tmpbuff, stderr=tmpbuff, shell=False)
-status=dqhistpipe.wait()
-
-print "---------------------------------------------------------------------------------------"
-print '## Output of \'DQHistogramMerge.py hist_merge_list.txt '  + runArgs.outputHISTFile + ' False\':'
-try:
-    tmpbuff.seek(0)
-    for line in tmpbuff:
-        print line,
-finally:
-    tmpbuff.close()
-print '## DQHistogramMerge.py finished with retcode = %s' % (status)
-print "---------------------------------------------------------------------------------------"
-
-if not status==0:
-    raise RuntimeError("DQ HiST merging did NOT work. Stopping!")
diff --git a/PhysicsAnalysis/PATJobTransforms/share/skeleton.ESDtoDPD_tf.py b/PhysicsAnalysis/PATJobTransforms/share/skeleton.ESDtoDPD_tf.py
index 4d2d01f3ea9b1091e7699ff969cc6e9becdb4929..87edb734e8eb43abf79a6674375a71736c6bc9de 100644
--- a/PhysicsAnalysis/PATJobTransforms/share/skeleton.ESDtoDPD_tf.py
+++ b/PhysicsAnalysis/PATJobTransforms/share/skeleton.ESDtoDPD_tf.py
@@ -35,11 +35,6 @@ from PATJobTransforms.DPDUtils import SetupOutputDPDs
 rec.DPDMakerScripts.append(SetupOutputDPDs(runArgs,listOfFlags))
 
 
-from PATJobTransforms.OutputsMgr import outputsMgr,magicKey
-for key in dir(runArgs):
-    if key.startswith(magicKey):
-        outputsMgr.addOutput(key,getattr(runArgs,key))
-
 rec.OutputFileNameForRecoStep="ESDtoDPD"
 
 ## Input
@@ -165,11 +160,6 @@ if hasattr(runArgs,"preInclude"):
     for fragment in runArgs.preInclude:
         include(fragment)
 
-# temporary hack (proper fix would be to cleanly protect all DESD building code against missing trigger)
-if not rec.doTrigger:
-    rec.doDPD.set_Value_and_Lock(False)
-    rec.DPDMakerScripts.set_Value_and_Lock([])
-
 #========================================================
 # Central topOptions (this is one is a string not a list)
 #========================================================
diff --git a/PhysicsAnalysis/PATJobTransforms/share/skeleton.ESDtoDPD_trf.py b/PhysicsAnalysis/PATJobTransforms/share/skeleton.ESDtoDPD_trf.py
deleted file mode 100644
index 741f0c68850c74a887e7bd819dc580cb87752602..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/share/skeleton.ESDtoDPD_trf.py
+++ /dev/null
@@ -1,219 +0,0 @@
-###############################################################
-#
-# Skeleton top job options for ESD->DPD 
-# Put here outputs that require rec.doESD=False
-#
-#==============================================================
-
-#Common job options disable most RecExCommon by default. Re-enable below on demand.
-include("PATJobTransforms/CommonSkeletonJobOptions.py")
-rec.doESD=False
-from AthenaCommon.Logging import logging
-recoLog = logging.getLogger('esd_to_dpd')
-recoLog.info( '****************** STARTING ESD->DPD MAKING *****************' )
-
-## Automatically turn ON/OFF and set output file name of each possible DPD
-listOfFlags=[]
-try:
-    from PrimaryDPDMaker.PrimaryDPDFlags import primDPD
-    listOfFlags.append(primDPD)
-except ImportError:
-    print "WARNING PrimaryDPDFlags not available. Only OK if you're using job transforms without the AtlasAnalysis project."
-try:
-    from D2PDMaker.D2PDFlags import D2PDFlags
-    listOfFlags.append(D2PDFlags)
-except ImportError:
-    print "Unable to import listAODtoD2PD. This requires D2PDMaker-00-00-55-08 or D2PDMaker-00-00-62"
-try:
-    from D3PDMakerConfig.D3PDProdFlags import prodFlags
-    listOfFlags.append( prodFlags )
-except ImportError:
-    print "WARNING D3PDProdFlags not available. Only OK if you're using job transforms without the AtlasAnalysis project."
-
-
-from PATJobTransforms.DPDUtils import SetupOutputDPDs
-rec.DPDMakerScripts.append(SetupOutputDPDs(runArgs,listOfFlags))
-
-
-from PATJobTransforms.OutputsMgr import outputsMgr,magicKey
-for key in dir(runArgs):
-    if key.startswith(magicKey):
-        outputsMgr.addOutput(key,getattr(runArgs,key))
-
-rec.OutputFileNameForRecoStep="ESDtoDPD"
-
-## Input
-if hasattr(runArgs,"inputFile"): athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputFile )
-if hasattr(runArgs,"inputBSFile"):
-    globalflags.InputFormat.set_Value_and_Lock('bytestream')
-    athenaCommonFlags.BSRDOInput.set_Value_and_Lock( runArgs.inputBSFile )
-if hasattr(runArgs,"inputRDOFile"):
-    rec.readRDO.set_Value_and_Lock( True )
-    globalflags.InputFormat.set_Value_and_Lock('pool')
-    athenaCommonFlags.PoolRDOInput.set_Value_and_Lock( runArgs.inputRDOFile )
-if hasattr(runArgs,"inputESDFile"):
-    globalflags.InputFormat.set_Value_and_Lock('pool')
-    rec.readESD.set_Value_and_Lock( True )
-    athenaCommonFlags.PoolESDInput.set_Value_and_Lock( runArgs.inputESDFile )
-if hasattr(runArgs,"inputTAGFile"):
-    rec.readTAG.set_Value_and_Lock( True )
-    rec.readESD.set_Value_and_Lock( True )
-    rec.doESD.set_Value_and_Lock( False )
-    rec.doWriteAOD.set_Value_and_Lock( False )
-    rec.TAGFromRDO.set_Value_and_Lock( False )
-    athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputTAGFile )
-
-# Keep track of whether an output format file is requested:
-outputRequested = False
-
-#Outputs
-if hasattr(runArgs,"outputNTUP_PROMPTPHOTFile"):
-    from PhotonAnalysisUtils.PhotonAnalysisUtilsFlags import PAUflags
-    PAUflags.FileName = runArgs.outputNTUP_PROMPTPHOTFile
-    #little hack while autoConfiguration=everything is still not the default...
-    if hasattr(runArgs,"inputESDFile") and not hasattr(runArgs,"inputFile"):
-        athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputESDFile )
-    outputRequested = True
-
-if hasattr(runArgs,"outputNTUP_WZFile"):
-    from D3PDMakerConfig.D3PDProdFlags import prodFlags
-    prodFlags.WZOutputFile=runArgs.outputNTUP_WZFile
-    #little hack while autoConfiguration=everything is still not the default...
-    if hasattr(runArgs,"inputESDFile") and not hasattr(runArgs,"inputFile"):
-        athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputESDFile )
-    outputRequested = True
-
-if hasattr(runArgs,"outputNTUP_TRTFile"):
-    from ConversionDumper.ConversionDumperFlags import CDflags
-    CDflags.FileName=runArgs.outputNTUP_TRTFile
-    #little hack while autoConfiguration=everything is still not the default...
-    if hasattr(runArgs,"inputESDFile") and not hasattr(runArgs,"inputFile"):
-        athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputESDFile )
-    outputRequested = True
-
-if hasattr(runArgs,"outputNTUP_HECNOISEFile"):
-    from LArCalibTest.HECNoiseD3PDFlags import HECNoiseflags
-    HECNoiseflags.FileName=runArgs.outputNTUP_HECNOISEFile
-    rec.UserAlgs += ['LArCalibTest/HECD3PDFilter.py']
-    #little hack while autoConfiguration=everything is still not the default...
-    if hasattr(runArgs,"inputESDFile") and not hasattr(runArgs,"inputFile"):
-        athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputESDFile )
-    outputRequested = True
-
-if hasattr(runArgs,"outputNTUP_MCPFile"):
-    from MuonIDNtupleMakers.MuonIDNtupleMakersFlags import MNMFlags
-    MNMFlags.outputFile = runArgs.outputNTUP_MCPFile
-    MNMFlags.inputFiles = runArgs.inputESDFile
-    outputRequested = True
-
-if hasattr(runArgs,"outputNTUP_SCTFile"):
-    from TrackD3PDMaker.TrackD3PDMakerSCTFlags import TrackD3PDSCTFlags
-    TrackD3PDSCTFlags.outputFile = runArgs.outputNTUP_SCTFile
-    if hasattr(runArgs,"inputESDFile") and not hasattr(runArgs,"inputFile"):
-        athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputESDFile )
-    outputRequested = True
-
-if hasattr(runArgs,"outputESDFile"):
-    #for TAG->ESD->skimmedESD
-    rec.doWriteESD.set_Value_and_Lock( True )
-    athenaCommonFlags.PoolESDOutput.set_Value_and_Lock( runArgs.outputESDFile )
-    outputRequested = True
-if hasattr(runArgs,"outputRDOFile"):
-    #for TAG->RDO->skimmedRDO
-    rec.doWriteRDO.set_Value_and_Lock( True )
-    athenaCommonFlags.PoolRDOOutput.set_Value_and_Lock( runArgs.outputRDOFile )
-    if hasattr(runArgs,"inputTAGFile"):
-        rec.readTAG.set_Value_and_Lock( True )
-        rec.readRDO.set_Value_and_Lock( True )
-        rec.readESD.set_Value_and_Lock( False )
-        rec.doWriteAOD.set_Value_and_Lock( False )
-        rec.doWriteESD.set_Value_and_Lock( False )
-    outputRequested = True
-
-if hasattr(runArgs,"outputNTUP_ENHBIASFile"):
-    from TrigCostAthena.TrigCostAthenaFlags import TrigCostAthenaFlags
-    TrigCostAthenaFlags.StoreNtVerticesOutputFile.set_Value_and_Lock( runArgs.outputNTUP_ENHBIASFile )
-    TrigCostAthenaFlags.DoStoreNtVertices.set_Value_and_Lock( True )
-    if hasattr(runArgs,"inputESDFile") and not hasattr(runArgs,"inputFile"):
-        athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputESDFile )
-    include("TrigCostAthena/ESDtoNTUP_ENHBIAS.py")
-    outputRequested = True
-
-
-if hasattr(runArgs,"outputNTUP_LARNOISEFile"):
-    from LArMonitoring.LArMonitoringFlags import larNoiseBurstFlags
-    larNoiseBurstFlags.outputFile = runArgs.outputNTUP_LARNOISEFile
-    #little hack while autoConfiguration=everything is still not the default...
-    if hasattr(runArgs,"inputESDFile") and not hasattr(runArgs,"inputFile"):
-        athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputESDFile )
-    include("LArMonitoring/LArNoiseBursts_prodJO.py")
-    outputRequested = True
-
-#Import D3PD flags before preExec, for convenience
-from D3PDMakerConfig.D3PDProdFlags  import oldProdFlags
-from D3PDMakerConfig.D3PDMakerFlags import D3PDMakerFlags
-from SUSYD3PDMaker.SUSYD3PDFlags    import SUSYD3PDFlags
-
-## Pre-exec
-if hasattr(runArgs,"preExec"):
-    recoLog.info("transform pre-exec")
-    for cmd in runArgs.preExec:
-        recoLog.info(cmd)
-        exec(cmd)
-
-## Pre-include
-if hasattr(runArgs,"preInclude"): 
-    for fragment in runArgs.preInclude:
-        include(fragment)
-
-## Pre-includes defined for the DPDs:
-from PATJobTransforms.DPDUtils import SetupDPDPreIncludes
-dpdPreIncludeUsed = SetupDPDPreIncludes(runArgs,listOfFlags)
-if outputRequested and dpdPreIncludeUsed:
-    recoLog.error( "Multiple output types requested with pre-includes present" )
-    recoLog.error( "This will most probably lead to weird output" )
-    pass
-
-# temporary hack (proper fix would be to cleanly protect all DESD building code against missing trigger)
-if not rec.doTrigger:
-    rec.doDPD.set_Value_and_Lock(False)
-    rec.DPDMakerScripts.set_Value_and_Lock([])
-
-#========================================================
-# Central topOptions (this is one is a string not a list)
-#========================================================
-if hasattr(runArgs,"topOptions"): include(runArgs.topOptions)
-elif rec.DPDMakerScripts()!=[]: include("PrimaryDPDMaker/esdtodpd.py")
-else: include( "RecExCommon/RecExCommon_topOptions.py" )
-
-## Make "old style" D3PDs.
-for c in SetupOutputDPDs(runArgs, [oldProdFlags]): c()
-
-## Offline prescales (has to be *after* the topOptions)
-if hasattr(runArgs,"prescales"):
-    recoLog.info( '**** DPD offline prescale arguments:' )
-    from PrimaryDPDMaker.JobTransformConfiguration import ApplyPrescale
-    for prescale in runArgs.prescales:
-        recoLog.info( prescale )
-        ApplyPrescale(prescale)
-
-## Post-include
-if hasattr(runArgs,"postInclude"): 
-    for fragment in runArgs.postInclude:
-        include(fragment)
-
-## Post-includes defined for the DPDs:
-from PATJobTransforms.DPDUtils import SetupDPDPostIncludes
-dpdPostIncludeUsed = SetupDPDPostIncludes(runArgs,listOfFlags)
-if outputRequested and dpdPostIncludeUsed:
-    recoLog.error( "Multiple output types requested with post-includes present" )
-    recoLog.error( "This will most probably lead to weird output" )
-    pass
-
-## Post-exec
-if hasattr(runArgs,"postExec"):
-    recoLog.info("transform post-exec")
-    for cmd in runArgs.postExec:
-        recoLog.info(cmd)
-        exec(cmd)
-
diff --git a/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergeBS.py b/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergeBS.py
deleted file mode 100644
index 01a37fab080e94805d9c08efb34bf05089d68894..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergeBS.py
+++ /dev/null
@@ -1,59 +0,0 @@
-###############################################################
-# Simple job options for bytestream merging
-# David Cote (DESY), November 2008
-#==============================================================
-
-from AthenaCommon.Logging import logging
-recoLog = logging.getLogger('bs_to_bs')
-recoLog.info( '****************** STARTING BYTESTREAM MERGING *****************' )
-
-include ("RecExCommon/RecoUsefulFlags.py")
-
-include( "ByteStreamCnvSvc/BSEventStorageEventSelector_jobOptions.py" )
-svcMgr = theApp.serviceMgr()
-ByteStreamInputSvc = svcMgr.ByteStreamInputSvc
-
-## max events
-theApp.EvtMax=-1
-if hasattr(runArgs,"maxEvents"): theApp.EvtMax = runArgs.maxEvents
-
-## Input
-if hasattr(runArgs,"inputBSFile"):
-    ByteStreamInputSvc.FullFileName += runArgs.inputBSFile
-
-## Pre-include
-if hasattr(runArgs,"preInclude"): 
-    for fragment in runArgs.preInclude:
-        include(fragment)
-
-## Pre-exec
-if hasattr(runArgs,"preExec"):
-    recoLog.info("transform pre-exec")
-    for cmd in runArgs.preExec:
-        recoLog.info(cmd)
-        exec(cmd)
-
-from OutputStreamAthenaPool.MultipleStreamManager import MSMgr
-bs=MSMgr.NewByteStream("outputBS")
-
-#set max output file size to 15 GB
-bs.bsOutputSvc.MaxFileMB = 15000
-bs.bsOutputSvc.MaxFileNE = 15000000
-
-if hasattr(runArgs,"outputBSFile"):
-    from PATJobTransforms.DPDUtils import outputBSNameStripper
-    simpleName = outputBSNameStripper(runArgs.outputBSFile)
-    bs.SetOutputFileName(simpleName)
-
-## Post-include
-if hasattr(runArgs,"postInclude"): 
-    for fragment in runArgs.postInclude:
-        include(fragment)
-
-## Post-exec
-if hasattr(runArgs,"postExec"):
-    recoLog.info("transform post-exec")
-    for cmd in runArgs.postExec:
-        recoLog.info(cmd)
-        exec(cmd)
-
diff --git a/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergeHIT.py b/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergeHIT.py
deleted file mode 100644
index 942a035e0be90185176d2c30d65de0306f61e7ad..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergeHIT.py
+++ /dev/null
@@ -1,151 +0,0 @@
-#import glob, os, re
-import traceback
-
-from AthenaCommon.Logging import logging
-merHitLog = logging.getLogger('MergeHITS')
-
-merHitLog.info( '****************** STARTING HIT MERGING *****************' )
-
-merHitLog.info( '**** Transformation run arguments' )
-merHitLog.info( str(runArgs) )
-
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-#==============================================================
-# Job definition parameters:
-#==============================================================
-from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
-#Jobs should stop if an include fails.
-if hasattr(runArgs,"IgnoreConfigError"):
-    athenaCommonFlags.AllowIgnoreConfigError=runArgs.IgnoreConfigError 
-else:
-    athenaCommonFlags.AllowIgnoreConfigError=False
-
-from AthenaCommon.AppMgr import theApp
-EvtMax=-1
-if hasattr(runArgs,"maxEvents"):
-    EvtMax = runArgs.maxEvents
-theApp.EvtMax = EvtMax
-
-#--------------------------------------------------------------
-# Peek at input to configure DetFlags
-#--------------------------------------------------------------
-if not hasattr(runArgs,"inputHitsFile"):
-    raise RuntimeError("No inputHitsFile provided.")
-
-from SimuJobTransforms.HitsFilePeeker import HitsFilePeeker
-HitsFilePeeker(runArgs, merHitLog)
-from AthenaCommon.DetFlags import DetFlags
-DetFlags.geometry.all_setOff()
-
-#==============================================================
-# Job Configuration parameters:
-#==============================================================
-## Pre-exec
-if hasattr(runArgs,"preExec"):
-    merHitLog.info("transform pre-exec")
-    for cmd in runArgs.preExec:
-        merHitLog.info(cmd)
-        exec(cmd)
-
-## Pre-include
-if hasattr(runArgs,"preInclude"): 
-    for fragment in runArgs.preInclude:
-        include(fragment)
-
-#--------------------------------------------------------------
-# Load POOL support
-#--------------------------------------------------------------
-from AthenaCommon.AppMgr import ServiceMgr
-from AthenaPoolCnvSvc.AthenaPoolCnvSvcConf import AthenaPoolCnvSvc
-ServiceMgr += AthenaPoolCnvSvc()
-
-ServiceMgr.AthenaPoolCnvSvc.PoolAttributes = [ "DEFAULT_BUFFERSIZE = '2048'" ]
-
-import AthenaPoolCnvSvc.ReadAthenaPool
-
-from CLIDComps.CLIDCompsConf import ClassIDSvc
-ServiceMgr += ClassIDSvc()
-include( "PartPropSvc/PartPropSvc.py" )
-
-# load all possible converters for EventCheck
-GeoModelSvc = Service( "GeoModelSvc" )
-GeoModelSvc.IgnoreTagDifference=True
-
-# set up all detector description stuff + some voodoo
-include( "RecExCond/AllDet_detDescr.py" )
-from AthenaCommon.DetFlags import DetFlags
-DetFlags.Print()
-
-#--------------------------------------------------------------
-# Setup Input
-#--------------------------------------------------------------
-In = runArgs.inputHitsFile
-EventSelector = ServiceMgr.EventSelector
-EventSelector.InputCollections = In
-
-# Check collection type
-try:
-  EventSelector.CollectionType = CollType
-except:
-  print "Reading from file"
-
-SkipEvents=0
-if hasattr(runArgs,"skipEvents"):
-    SkipEvents = runArgs.skipEvents
-ServiceMgr.EventSelector.SkipEvents = SkipEvents
-
-#--------------------------------------------------------------
-# Setup Output
-#--------------------------------------------------------------
-if not hasattr(runArgs,"outputHitsFile"):
-    raise RuntimeError("No outputHitsFile provided.")
-Out = runArgs.outputHitsFile 
-from AthenaPoolCnvSvc.WriteAthenaPool import AthenaPoolOutputStream
-try: 
-  StreamHITS = AthenaPoolOutputStream( "StreamHITS", Out, True )
-except:
-  StreamHITS = AthenaPoolOutputStream( "StreamHITS", "DidNotSetOutputName.root", True )
-StreamHITS.TakeItemsFromInput=TRUE;
-StreamHITS.ForceRead=TRUE;  #force read of output data objs
-# The next line is an example on how to exclude clid's if they are causing a  problem
-#StreamHITS.ExcludeList = ['6421#*']
-
-# Look for lists of filter algorithms
-try:
-  StreamHITS.AcceptAlgs = AcceptList
-except:
-  print "No accept algs indicated in AcceptList"
-try:
-  StreamHITS.RequireAlgs = RequireList
-except:
-  print "No accept algs indicated in RequireList"
-try:
-  StreamHITS.VetoAlgs = VetoList
-except:
-  print "No accept algs indicated in VetoList"
-
-
-MessageSvc = ServiceMgr.MessageSvc
-MessageSvc.OutputLevel = INFO
-
-StreamHITS.ExtendProvenanceRecord = False
-
-ServiceMgr.AthenaPoolCnvSvc.MaxFileSizes = [ "15000000000" ]
-
-#--------------------------------------------------------------
-
-## Post-include
-if hasattr(runArgs,"postInclude"):
-    for fragment in runArgs.postInclude:
-        include(fragment)
-
-## Post-exec
-if hasattr(runArgs,"postExec"):
-    merHitLog.info("transform post-exec")
-    for cmd in runArgs.postExec:
-        merHitLog.info(cmd)
-        exec(cmd)
-#--------------------------------------------------------------
-print topSequence
diff --git a/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergeNTUP_trf.py b/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergeNTUP_trf.py
deleted file mode 100644
index 2d2a410de2fb41049481c69d6cc7d30109ecf80a..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergeNTUP_trf.py
+++ /dev/null
@@ -1,83 +0,0 @@
-###############################################################
-#
-# Skeleton top job options for MergeNTUP_trf
-#
-#==============================================================
-
-import subprocess
-from PATJobTransforms.Configuration import ConfigDic
-from PyJobTransformsCore.trferr import TransformArgumentError
-
-theApp.EvtMax = 0
-
-inFileArgs=0
-outFileArgs=0
-inKey=None
-outKey=None
-
-for key in ConfigDic.iterkeys():
-    if key.startswith('outputNTUP') and key.endswith('File') and hasattr(runArgs,key):
-        outFileArgs+=1
-        outKey=key
-        outFile=getattr(runArgs,key)
-        print "Using argument ", key, " = ",outFile
-    if key.startswith('inputNTUP') and key.endswith('File') and  hasattr(runArgs,key):
-        inFileArgs+=1
-        inKey=key
-        inFile=getattr(runArgs,key)
-        print "Using argument ", key, " = ",inFile
-
-if outFileArgs!=1:
-    raise TransformArgumentError(message='Wrong number of outputNTUPXXXFile arguments: {0:d} instead of 1. Stopping!'.format(outFileArgs))
-
-if inFileArgs!=1:
-    raise TransformArgumentError(message='Wrong number of inputNTUPXXXFile arguments: {0:d} instead of 1. Stopping!'.format(inFileArgs))
-
-if inKey.lstrip('input') != outKey.lstrip('output'):
-    raise TransformArgumentError(message='Using different input and output types: {0:s} and {0:s}. Stopping!'.format(inKey, outKey ))
-
-
-if not (hasattr(runArgs,"sortInputFiles") and not runArgs.sortInputFiles):
-    inFileSorted=[]
-    inFileWithoutEvents=[]
-    for file in inFile:
-        print "Determing number of events of file", file
-        myset=set()
-        ConfigDic[inKey](trf=myset,inDic={})
-        for x in myset:
-            x.setValue(file)
-            nevents=x.eventCount()
-            print "Number of events is", nevents
-            if nevents>0:
-                inFileSorted.append(file)
-            else:    
-                inFileWithoutEvents.append(file)
-    inFileSorted.extend(inFileWithoutEvents);
-    inFile=inFileSorted
-
-# If we have the mergeChunks parameter, then we will use rhadd instead of hadd (due to memory leaks)
-# Parallel merge can be considerably faster
-if hasattr(runArgs,'mergeChunks'):
-    cmd=['rhadd.py', '-n', str(getattr(runArgs,'mergeChunks'))]
-    if hasattr(runArgs,'mergeParallel'):
-        cmd.extend(['-p', str(getattr(runArgs, 'mergeParallel'))])
-else:
-    cmd=['hadd']
-
-cmd.append(outFile)
-cmd.extend(inFile)
-
-print 'Will merge using: %s' % cmd
-proc=subprocess.Popen( args=cmd, bufsize = 1, shell = False,stdout = subprocess.PIPE, stderr = subprocess.STDOUT )
-
-while proc.poll() is None:
-    line = proc.stdout.readline()
-    if line:
-        print line.rstrip()
-
-rc=proc.returncode
-
-if not rc==0:
-    raise RuntimeError("hadd returned with value {0:d} instead of 0. Stopping!".format(rc))
-
-print "Merging finished"                    
diff --git a/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergePool.py b/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergePool.py
deleted file mode 100644
index e8ab0ebfd7f8d5ad11c773b0e8fcc08fe06b5e38..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergePool.py
+++ /dev/null
@@ -1,87 +0,0 @@
-###############################################################
-#
-# Skeleton top job options for ESD/AOD/DPD merging
-#
-#==============================================================
-
-#Common job options disable most RecExCommon by default. Re-enable below on demand.
-include("RecJobTransforms/CommonRecoSkeletonJobOptions.py")
-rec.doAOD=False
-rec.doESD=False
-
-from AthenaCommon.Logging import logging
-recoLog = logging.getLogger('merge_pool')
-recoLog.info( '****************** STARTING POOL FILE MERGING *****************' )
-
-## Input
-if hasattr(runArgs,"inputFile"): athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputFile )
-if hasattr(runArgs,"inputAODFile"):
-    rec.readAOD.set_Value_and_Lock( True )
-    rec.doWriteAOD.set_Value_and_Lock( True )
-    athenaCommonFlags.PoolAODInput.set_Value_and_Lock( runArgs.inputAODFile )
-
-if hasattr(runArgs,"inputESDFile"):
-    rec.readESD.set_Value_and_Lock( True )
-    rec.doWriteESD.set_Value_and_Lock( True )
-    athenaCommonFlags.PoolESDInput.set_Value_and_Lock( runArgs.inputESDFile )
-
-## Output
-if hasattr(runArgs,"outputAODFile"): athenaCommonFlags.PoolAODOutput.set_Value_and_Lock( runArgs.outputAODFile )
-if hasattr(runArgs,"outputESDFile"): athenaCommonFlags.PoolESDOutput.set_Value_and_Lock( runArgs.outputESDFile )
-
-## Pre-exec
-if hasattr(runArgs,"preExec"):
-    recoLog.info("transform pre-exec")
-    for cmd in runArgs.preExec:
-        recoLog.info(cmd)
-        exec(cmd)
-
-## Pre-include
-if hasattr(runArgs,"preInclude"): 
-    for fragment in runArgs.preInclude:
-        include(fragment)
-
-#Lock doAOD/ESD before starting RecExCommon, so they are not overwritten by the auto-configuration
-rec.doAOD.lock()
-rec.doESD.lock()
-
-include( "RecExCommon/RecExCommon_topOptions.py" )
-
-if rec.doWriteAOD:
-    if rec.readAOD(): 
-        StreamAOD.ExtendProvenanceRecord = False
-    else:
-        print "StreamAOD was not defined, cannot set ExtendProvenanceRecord = False. Check your flags."
-
-if rec.doWriteESD:
-    if rec.readESD(): 
-        StreamESD.ExtendProvenanceRecord = False
-    else:
-        print "StreamESD was not defined, cannot set ExtendProvenanceRecord = False. Check your flags."
-
-# Fast merge options
-if hasattr(runArgs,"fastPoolMerge") and runArgs.fastPoolMerge == True:
-     recoLog.info("Using CopyEventStreamInfo")
-     from OutputStreamAthenaPool.OutputStreamAthenaPoolConf import CopyEventStreamInfo
-     if rec.doWriteAOD: stream = StreamAOD
-     if rec.doWriteESD: stream = StreamESD
-     stream.HelperTools = []
-     streamInfoToolKey = stream.name()
-     streamInfoTool = CopyEventStreamInfo( streamInfoToolKey+"_CopyEventStreamInfo" )
-     streamInfoTool.Key = streamInfoToolKey
-     ToolSvc += streamInfoTool
-     ServiceMgr.MetaDataSvc.MetaDataTools += [ streamInfoTool ]
-
-
-## Post-include
-if hasattr(runArgs,"postInclude"): 
-    for fragment in runArgs.postInclude:
-        include(fragment)
-
-## Post-exec
-if hasattr(runArgs,"postExec"):
-    recoLog.info("transform post-exec")
-    for cmd in runArgs.postExec:
-        recoLog.info(cmd)
-        exec(cmd)
-
diff --git a/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergeRDO.py b/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergeRDO.py
deleted file mode 100644
index 6515e3713b4f166a46b15f1b8c3212df66458beb..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/share/skeleton.MergeRDO.py
+++ /dev/null
@@ -1,134 +0,0 @@
-###############################################################
-#
-# Skeleton top job options for RDO merging
-#
-#==============================================================
-
-#import glob, os, re
-import traceback
-
-from AthenaCommon.Logging import logging
-merHitLog = logging.getLogger('MergeRDOS')
-
-merHitLog.info( '****************** STARTING RDO MERGING *****************' )
-
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-#==============================================================
-# Job definition parameters:
-#==============================================================
-from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
-#Jobs should stop if an include fails.
-athenaCommonFlags.AllowIgnoreConfigError.set_Value_and_Lock(False)
-if hasattr(runArgs,"inputRDOFile"): athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputRDOFile )
-
-#from AthenaCommon.AppMgr import theApp
-#theApp.EvtMax = -1 
-
-from AthenaCommon.GlobalFlags import globalflags
-if hasattr(runArgs,"geometryVersion"): globalflags.DetDescrVersion.set_Value_and_Lock( runArgs.geometryVersion )
-if hasattr(runArgs,"conditionsTag"): globalflags.ConditionsTag.set_Value_and_Lock( runArgs.conditionsTag )
-
-## Pre-exec
-if hasattr(runArgs,"preExec"):
-    merHitLog.info("transform pre-exec")
-    for cmd in runArgs.preExec:
-        merHitLog.info(cmd)
-        exec(cmd)
-
-## Pre-include
-if hasattr(runArgs,"preInclude"): 
-    for fragment in runArgs.preInclude:
-        include(fragment)
-
-#--------------------------------------------------------------
-# Load POOL support
-#--------------------------------------------------------------
-from AthenaCommon.AppMgr import ServiceMgr
-from AthenaPoolCnvSvc.AthenaPoolCnvSvcConf import AthenaPoolCnvSvc
-ServiceMgr += AthenaPoolCnvSvc()
-
-ServiceMgr.AthenaPoolCnvSvc.PoolAttributes = [ "DEFAULT_BUFFERSIZE = '2048'" ]
-
-import AthenaPoolCnvSvc.ReadAthenaPool
-
-from CLIDComps.CLIDCompsConf import ClassIDSvc
-ServiceMgr += ClassIDSvc()
-include( "PartPropSvc/PartPropSvc.py" )
-
-# load all possible converters for EventCheck
-GeoModelSvc = Service( "GeoModelSvc" )
-GeoModelSvc.IgnoreTagDifference=True
-
-# set up all detector description stuff 
-include( "RecExCond/AllDet_detDescr.py" )
-
-## TGCcablingServerSvc Hack
-import MuonCnvExample.MuonCablingConfig
-
-#--------------------------------------------------------------
-# Setup Input
-#--------------------------------------------------------------
-ServiceMgr.EventSelector.InputCollections = athenaCommonFlags.FilesInput()
-
-# Check collection type
-try:
-  ServiceMgr.EventSelector.CollectionType = CollType
-except:
-  print "Reading from file"
-
-SkipEvents=0
-ServiceMgr.EventSelector.SkipEvents = SkipEvents
-
-#--------------------------------------------------------------
-# Setup Output
-#--------------------------------------------------------------
-if hasattr(runArgs,"outputRDOFile"): Out = runArgs.outputRDOFile
-
-from AthenaPoolCnvSvc.WriteAthenaPool import AthenaPoolOutputStream
-try: 
-  StreamRDO = AthenaPoolOutputStream( "StreamRDO", Out, True )
-except:
-  StreamRDO = AthenaPoolOutputStream( "StreamRDO", "DidNotSetOutputName.root", True )
-StreamRDO.TakeItemsFromInput=TRUE;
-StreamRDO.ForceRead=TRUE;  #force read of output data objs
-# The next line is an example on how to exclude clid's if they are causing a  problem
-#StreamRDO.ExcludeList = ['6421#*']
-
-# Look for lists of filter algorithms
-try:
-  StreamRDO.AcceptAlgs = AcceptList
-except:
-  print "No accept algs indicated in AcceptList"
-try:
-  StreamRDO.RequireAlgs = RequireList
-except:
-  print "No accept algs indicated in RequireList"
-try:
-  StreamRDO.VetoAlgs = VetoList
-except:
-  print "No accept algs indicated in VetoList"
-
-
-MessageSvc = ServiceMgr.MessageSvc
-MessageSvc.OutputLevel = INFO
-
-StreamRDO.ExtendProvenanceRecord = False
-
-ServiceMgr.AthenaPoolCnvSvc.MaxFileSizes = [ "15000000000" ]
-
-## Post-include
-if hasattr(runArgs,"postInclude"): 
-    for fragment in runArgs.postInclude:
-        include(fragment)
-
-## Post-exec
-if hasattr(runArgs,"postExec"):
-    merHitLog.info("transform post-exec")
-    for cmd in runArgs.postExec:
-        merHitLog.info(cmd)
-        exec(cmd)
-
-
-#--------------------------------------------------------------
diff --git a/PhysicsAnalysis/PATJobTransforms/share/skeleton.ValidateD3PD_trf.py b/PhysicsAnalysis/PATJobTransforms/share/skeleton.ValidateD3PD_trf.py
deleted file mode 100644
index ca54786ecd2a07830debca5482893f55e39bc095..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/share/skeleton.ValidateD3PD_trf.py
+++ /dev/null
@@ -1,220 +0,0 @@
-###############################################################
-#
-# Skeleton top job options for D3PD validation
-#
-#==============================================================
-
-
-# ==============================================================================
-# Load your input file that you want to process
-# ==============================================================================
-include("PATJobTransforms/CommonSkeletonJobOptions.py")
-
-from AthenaCommon.Logging import logging
-skelLog = logging.getLogger('ValidateD3PD')
-skelLog.info( '****************** Starting D3PD Validation *****************' )
-
-if hasattr(runArgs,"inputESDFile"):
-    rec.readESD.set_Value_and_Lock( True )
-    athenaCommonFlags.PoolESDInput.set_Value_and_Lock( runArgs.inputESDFile )
-elif hasattr(runArgs,"inputAODFile"):
-    rec.readAOD.set_Value_and_Lock( True )
-    athenaCommonFlags.PoolAODInput.set_Value_and_Lock( runArgs.inputAODFile )
-else:
-    raise RuntimeError('No input file argument given (ESD or AOD input required)')
-
-#Default 'do' list:
-doPhysInDetPerf     = True #!!
-doPhysBackTrack     = True
-doPhysMet           = True  
-doPhysJets          = False ## Tag update needed
-doPhysTau           = True
-doPhysElectrons     = False ## Not tagged
-doPhysMuons         = False ## Need to be implemented 
-doPhysBtag          = True
-doPhysSUSY          = True
-doPhysMonTop        = True
-doPhysPhotons       = False
-doZee               = False ## Tag update needed
-doExotics           = True
-doHSG6              = True
-
-Routines = ['PhysInDetPerf','PhysBackTrack','PhysMet','PhysJets','PhysTau','PhysElectrons','PhysMuons','PhysBtag','PhysSUSY','PhysMonTop','Zee','Exotics','HSG6','PhysPhotons']
-
-#Switch on/off various validation routines:
-if hasattr(runArgs,"d3pdVal"):
-    for val in Routines:
-        dostr = val
-        dontstr = 'no'+val
-        if dostr in runArgs.d3pdVal:
-            vars()['do'+dostr] = True
-        if dontstr in runArgs.d3pdVal:
-            vars()['do'+dostr] = False
-
-
-# ==============================================================================
-# Configure RecExCommon (the mother of all job options in Athena) 
-# and schedule your DPD making.
-# Unfortunately, for now, you still have to turn OFF some things by hand
-# ==============================================================================
-
-from InDetRecExample.InDetKeys import InDetKeys
-InDetKeys.UnslimmedTracks.set_Value_and_Lock('Tracks')
-InDetKeys.UnslimmedTracksTruth.set_Value_and_Lock('TrackTruthCollection')
-
-rec.doHist.set_Value_and_Lock(True)
-rec.doWriteTAG.set_Value_and_Lock(False)
-rec.doWriteAOD.set_Value_and_Lock(False)
-rec.doCBNT.set_Value_and_Lock(False)
-
-from ParticleBuilderOptions.AODFlags import AODFlags
-AODFlags.ParticleJet = False
-
-# ----------------------------------------------------------------------------------------------------
-# If you have your own DPD Maker scripts
-# (for examples, see in svn: PhysicsAnalysis/D2PDMaker/share/D2PD_ExampleSimple*.py ),
-# then just append your script (wherever it is) to this list:
-#       rec.DPDMakerScripts.append("MyPackage/MyScript")
-# An example scripts is appended below, so you can see how it works!
-# ----------------------------------------------------------------------------------------------------
-
-
-# Set up trigger for All tools
-from TrigDecisionTool.TrigDecisionToolConf import Trig__TrigDecisionTool
-tdt = Trig__TrigDecisionTool("TrigDecisionTool")
-ToolSvc += tdt
-    
-from TriggerJobOpts.TriggerFlags import TriggerFlags
-TriggerFlags.configurationSourceList = ['ds']
-
-# set up trigger config service
-from TriggerJobOpts.TriggerConfigGetter import TriggerConfigGetter
-cfg =  TriggerConfigGetter("ReadPool")
-    
-# ----------------------------------------------------------------------------------------------------
-## primary tracking jobOptions
-### Removed General definitions and filter
-# ----------------------------------------------------------------------------------------------------
-if doPhysInDetPerf:
-    from InDetRecExample.InDetJobProperties import InDetFlags
-    InDetFlags.doStandardPlots.set_Value_and_Lock(True)
-    rec.DPDMakerScripts.append("RunPhysVal/PhysValInDetPerf_jobOptions.py")
-
-# ----------------------------------------------------------------------------------------------------
-## secondary tracking jobOptions
-# ----------------------------------------------------------------------------------------------------
-if doPhysBackTrack:
-    rec.DPDMakerScripts.append("RunPhysVal/PhysValBackTrack_jobOptions.py")
-
-
-# ----------------------------------------------------------------------------------------------------
-## Tau jobOptions
-# ----------------------------------------------------------------------------------------------------
-if doPhysTau:
-    rec.DPDMakerScripts.append("RunPhysVal/PhysValTau_jobOptions.py")
-
-
-# ----------------------------------------------------------------------------------------------------
-## b-tagging jobOptions
-# ----------------------------------------------------------------------------------------------------
-if doPhysBtag:
-    rec.DPDMakerScripts.append("RunPhysVal/PhysValBtag_jobOptions.py")
-
-
-# ----------------------------------------------------------------------------------------------------
-## MET jobOptions (ESD)
-# ----------------------------------------------------------------------------------------------------
-if doPhysMet:
-    rec.DPDMakerScripts.append("RunPhysVal/PhysValMET_jobOptions.py")
-
-
-# ----------------------------------------------------------------------------------------------------
-## Jets jobOptions 
-# ----------------------------------------------------------------------------------------------------
-if doPhysJets:
-    rec.DPDMakerScripts.append("RunPhysVal/PhysValJets_jobOptions.py")
-
-
-# ----------------------------------------------------------------------------------------------------
-## electrons jobOptions
-# ----------------------------------------------------------------------------------------------------
-if doPhysElectrons:
-    rec.DPDMakerScripts.append("RunPhysVal/PhysValElectrons_jobOptions.py")
-
-
-# ----------------------------------------------------------------------------------------------------
-## muons jobOptions
-# ----------------------------------------------------------------------------------------------------
-if doPhysMuons:
-    rec.DPDMakerScripts.append("RunPhysVal/PhysValMuons_jobOptions.py")
-
-# ----------------------------------------------------------------------------------------------------
-## SUSY jobOptions
-# ----------------------------------------------------------------------------------------------------
-if doPhysSUSY:
-    rec.DPDMakerScripts.append("RunPhysVal/PhysValSUSY_jobOptions.py")
-
-# ----------------------------------------------------------------------------------------------------
-## PhysValMon + Top jobOptions
-# ----------------------------------------------------------------------------------------------------
-if doPhysMonTop:
-    rec.DPDMakerScripts.append("RunPhysVal/PhysValMonTop_jobOptions.py")
-
-# ----------------------------------------------------------------------------------------------------
-## Zee jobOptions
-# ----------------------------------------------------------------------------------------------------
-if doZee:
-    rec.DPDMakerScripts.append("RunPhysVal/PhysValZee_jobOptions.py")
-
-# ----------------------------------------------------------------------------------------------------
-## Exotics jobOptions
-# ----------------------------------------------------------------------------------------------------
-if doExotics:
-    rec.DPDMakerScripts.append("RunPhysVal/PhysValExotics_jobOptions.py")
-
-# ----------------------------------------------------------------------------------------------------
-## HSG6 jobOptions
-# ----------------------------------------------------------------------------------------------------
-if doHSG6:
-    rec.DPDMakerScripts.append("RunPhysVal/PhysValHSG6_jobOptions.py")
-
-# ----------------------------------------------------------------------------------------------------
-## photons jobOptions
-# ----------------------------------------------------------------------------------------------------
-if doPhysPhotons:
-    rec.DPDMakerScripts.append("RunPhysVal/PhysValPhotons_jobOptions.py")
-
-
-## Pre-exec
-if hasattr(runArgs,"preExec"):
-    skelLog.info("transform pre-exec")
-    for cmd in runArgs.preExec:
-        skelLog.info(cmd)
-        exec(cmd)
-
-## Pre-include
-if hasattr(runArgs,"preInclude"): 
-    for fragment in runArgs.preInclude:
-        include(fragment)
-    
-# ==============================================================================
-# Now, include the master top options from RecExCommon.
-# This automatically ensures that your Athena job will be set up correctly,
-# i.e., if RecExCommon doesn't work, the release is broken!
-# ==============================================================================
-include ("RecExCommon/RecExCommon_topOptions.py")
-
-
-## Post-include
-if hasattr(runArgs,"postInclude"): 
-    for fragment in runArgs.postInclude:
-        include(fragment)
-
-## Post-exec
-if hasattr(runArgs,"postExec"):
-    skelLog.info("transform post-exec")
-    for cmd in runArgs.postExec:
-        skelLog.info(cmd)
-        exec(cmd)
-
-
diff --git a/PhysicsAnalysis/PATJobTransforms/share/skeleton.runHLT.py b/PhysicsAnalysis/PATJobTransforms/share/skeleton.runHLT.py
deleted file mode 100644
index fcb8982e0f266c33516bf8b33e44d75115f7f921..0000000000000000000000000000000000000000
--- a/PhysicsAnalysis/PATJobTransforms/share/skeleton.runHLT.py
+++ /dev/null
@@ -1,167 +0,0 @@
-###############################################################
-# Simple job options to:
-#   read BS, run HLT, and write out BS identical but for new HLT fragments
-# See ~aagaard/public/Reprocessing/testReproWrite.py
-# 
-# Simon George, November 2008
-# Clemencia Mora, August 2009
-#==============================================================
-
-from AthenaCommon.Logging import logging
-recoLog = logging.getLogger('RawToRaw_WithHLT')
-recoLog.info( '****************** STARTING BS->HLT->BS *****************' )
-
-from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
-from AthenaCommon.GlobalFlags import globalflags
-from TriggerJobOpts.TriggerFlags import TriggerFlags
-
-## Pre-include
-if hasattr(runArgs,"preInclude"): 
-    for fragment in runArgs.preInclude:
-        include(fragment)
-
-## Pre-exec
-if hasattr(runArgs,"preExec"):
-    recoLog.info("transform pre-exec")
-    for cmd in runArgs.preExec:
-        recoLog.info(cmd)
-        exec(cmd)
-
-## max events
-theApp.EvtMax=-1
-if hasattr(runArgs,"maxEvents"): theApp.EvtMax = runArgs.maxEvents
-EvtMax=theApp.EvtMax
-
-## Input
-if hasattr(runArgs,"inputBSFile"):
-    BSRDOInput = runArgs.inputBSFile
-    athenaCommonFlags.BSRDOInput = BSRDOInput
-    globalflags.InputFormat.set_Value_and_Lock('bytestream')
-else:
-    raise RuntimeError("runArgs should include inputBSFile - without input this transform makes no sense.")
-
-## Output
-if hasattr(runArgs,"outputBSFile"):
-    BSRDOOutput = runArgs.outputBSFile
-    athenaCommonFlags.BSRDOOutput = BSRDOOutput
-else:
-    raise RuntimeError("runArgs should include outputBSFile - without output this transform makes no sense.")
-
-
-## Log level
-if hasattr(runArgs,"loglevel"):
-    from AthenaCommon.Constants import __all__ as validMessageLevels
-    if runArgs.loglevel in validMessageLevels:
-        myloglevel=validMessageLevels.index(runArgs.loglevel)
-        HLTOutputLevel=myloglevel
-        recoLog.info("set HLTOutputLevel to " + runArgs.loglevel + " " + str(myloglevel))
-
-## Field and Geo Configuration
-if hasattr(runArgs,"geometryVersion"):
-    setDetDescr=runArgs.geometryVersion
-    
-elif not hasattr(runArgs,"autoConfiguration"):
-    #use the setField flag from runHLT_standalone.py
-    from PATJobTransforms.GeoConfig import SetFieldForHLT
-    setField=SetFieldForHLT()
-    recoLog.info("setField="+setField)
-
-
-## Trigger Stream
-if hasattr(runArgs,"trigStream"):
-    from RecExConfig.RecFlags import rec
-    rec.triggerStream=runArgs.trigStream
-
-## Conditions Tag
-if hasattr(runArgs,"conditionsTag"):
-    setGlobalTag=runArgs.conditionsTag
-
-## AutoConfigure stuff
-if hasattr(runArgs,"autoConfiguration"):
-    from RecExConfig.RecFlags import rec
-    for key in runArgs.autoConfiguration:
-        rec.AutoConfiguration.append(key)
-        
-    if 'FieldAndGeo' not in runArgs.autoConfiguration:
-        from PATJobTransforms.GeoConfig import SetFieldForHLT
-        setField=SetFieldForHLT()
-        recoLog.info("setField="+setField)
-        
-    if 'ConditionsTag' in runArgs.autoConfiguration and not ( hasattr(runArgs,"trigStream") or 'TriggerStream' in runArgs.autoConfiguration):
-        recoLog.warning("Cannot autoConfigure conditionsTag if trigStream is not known or autoConfigured as well")
-        rec.AutoConfiguration.remove('ConditionsTag')
-        
-    from RecExConfig.AutoConfiguration import ConfigureFromListOfKeys
-    ConfigureFromListOfKeys(rec.AutoConfiguration())
-
-
-# turn on mufast monitoring
-if hasattr(runArgs,"outputNTUP_MUFASTFile"):
-    doMufastNtuple=True
-
-## Trigger Menu configuration (using setMenu flag or testBlah=True)
-if hasattr(runArgs,"triggerConfig"):
-    #setup menu for runHLT_standalone, can be SomeThing:MenuName or just  MenuName format, careful with what this means for RAWtoESD and ESDtoAOD 
-    setupForMC=False
-    config=runArgs.triggerConfig.split(':')
-    if len(config)>1:
-        setMenu=config[1]
-    else:
-        setMenu=config[0]
-    recoLog.info("setMenu="+setMenu)
-else:
-    # set Menu by default to Cosmic2009_v1
-    recoLog.info("Set testCosmic2009V1=True")
-    testCosmic2009V1=True
-
-## no longer default in flags
-TriggerFlags.outputHLTconfigFile="outputHLTconfig.xml"
-
-##reduce output from TriggerMenuPython
-from AthenaCommon.Include import excludeTracePattern,includeTracePattern
-excludeTracePattern.append("*/TriggerMenuPython/*.py")
-includeTracePattern.append("*/TriggerMenuPython/GenerateMenu.py")
-
-## main job options 
-include("TriggerRelease/runHLT_standalone.py")
-
-# rename special root output files from trigger
-if hasattr(runArgs,"outputNTUP_MUFASTFile"):
-    svcMgr.NTupleSvc.Output = [ "FILE1 DATAFILE='%s' OPT='NEW'" % runArgs.outputNTUP_MUFASTFile ]
-
-if hasattr(runArgs,"outputHIST_TRIGEXPERTFile"):
-    output=svcMgr.THistSvc.Output
-    for idx in range(len(output)):
-        if output[idx].count('EXPERT'):
-            output[idx]="EXPERT DATAFILE='%s' TYPE='ROOT' OPT='RECREATE'" % runArgs.outputHIST_TRIGEXPERTFile
-        svcMgr.THistSvc.Output=output
-
-#add HLT results to output bytestream
-from ByteStreamCnvSvc import WriteByteStream
-StreamBSFileOutput = WriteByteStream.getStream("EventStorage","StreamBSFileOutput")
-
-# BS content definition
-StreamBSFileOutput.ItemList   += [ "HLT::HLTResult#HLTResult_L2" ]
-StreamBSFileOutput.ItemList   += [ "HLT::HLTResult#HLTResult_EF" ]
-
-# set up special bs output service that just merged changed fragments with unchanged copy of original input bs.
-from ByteStreamCnvSvc.ByteStreamCnvSvcConf import ByteStreamMergeOutputSvc
-mergeSvc=ByteStreamMergeOutputSvc(ByteStreamOutputSvc='ByteStreamEventStorageOutputSvc', ByteStreamInputSvc='ByteStreamInputSvc', overWriteHeader=True)
-svcMgr+=mergeSvc
-
-StreamBSFileOutput.OutputFile = "ByteStreamMergeOutputSvc"
-svcMgr.ByteStreamCnvSvc.ByteStreamOutputSvcList=['ByteStreamMergeOutputSvc']
-
-
-## Post-include
-if hasattr(runArgs,"postInclude"): 
-    for fragment in runArgs.postInclude:
-        include(fragment)
-
-## Post-exec
-if hasattr(runArgs,"postExec"):
-    recoLog.info("transform post-exec")
-    for cmd in runArgs.postExec:
-        recoLog.info(cmd)
-        exec(cmd)
-