diff --git a/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/CMakeLists.txt b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..6503e868064da9ffec65b791c3dd3bafd6168450
--- /dev/null
+++ b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/CMakeLists.txt
@@ -0,0 +1,14 @@
+################################################################################
+# Package: NTUPtoNTUPCore
+################################################################################
+
+# Declare the package name:
+atlas_subdir( NTUPtoNTUPCore )
+
+# Declare the package's dependencies:
+atlas_depends_on_subdirs( PUBLIC
+                          Tools/PyJobTransformsCore )
+
+# Install files from the package:
+atlas_install_python_modules( python/*.py )
+
diff --git a/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/cmt/requirements b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/cmt/requirements
new file mode 100644
index 0000000000000000000000000000000000000000..0fa4d86494bd43b28db0fd8ab7d3e2011d7ffdf6
--- /dev/null
+++ b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/cmt/requirements
@@ -0,0 +1,13 @@
+package NTUPtoNTUPCore
+
+use AtlasPolicy AtlasPolicy-*
+use PyJobTransformsCore PyJobTransformsCore-* Tools
+
+private
+
+branches python share
+
+apply_pattern declare_python_modules files="*.py"
+apply_pattern declare_jobtransforms trfs='*_trf.py' jo='*.py'
+
+end_private
diff --git a/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/MultipleNTUPStreamManager.py b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/MultipleNTUPStreamManager.py
new file mode 100644
index 0000000000000000000000000000000000000000..951d5a50544ee7fb3562cf4d50b0048d923a3b37
--- /dev/null
+++ b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/MultipleNTUPStreamManager.py
@@ -0,0 +1,248 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+from OutputStreamAthenaPool.MultipleStreamManager import *
+from AthenaCommon.AppMgr import theApp
+
+class AugmentedNTUPStream( AugmentedStreamBase ):
+    def  __init__(self, StreamName, FileName, tupleName = None, asAlg = False ):
+
+        # Initialize the base class:
+        AugmentedStreamBase.__init__(self,StreamName)
+
+        # Check if the user specified a tree name or not:
+        if tupleName == None:
+            tupleName = StreamName
+
+        # event-by-event stream
+        import AthenaCommon.CfgMgr as CfgMgr
+        from AthenaRootComps.WriteAthenaRoot import createNtupleOutputStream
+        from AthenaRootComps.AthenaRootCompsConf import Athena__RootOutputStreamTool as AthenaRootOutputStreamTool
+        from AthenaRootComps.AthenaRootCompsConf import Athena__RootNtupleOutputMetadataTool as RootNtupleOutputMetadataTool
+
+        self.Stream = createNtupleOutputStream( StreamName, FileName, tupleName, asAlg)
+        metadataTool = RootNtupleOutputMetadataTool( StreamName + "Meta" )
+        metadataTool.OutputFile = FileName
+        metadataTool.StreamName = StreamName
+        metadataTool.TupleName = tupleName
+        theApp.serviceMgr().ToolSvc += metadataTool
+        self.Stream.HelperTools = [metadataTool]
+        self.Stream.ForceRead=True;
+
+        return
+
+    def SetOutputFileName(self, name):
+        self.Stream.OutputFile = name
+        return
+
+    #########################################
+    #Items & MetaDataItems
+    def AddItem(self, item):
+        self._AddValidItemToList(item, self.Stream.ItemList)
+        return
+
+    def RemoveItem(self, item):
+        self._RemoveValidItemFromList(item, self.Stream.ItemList)
+        return
+
+    def GetItems(self):
+        return self.Stream.ItemList
+
+    def AddMetaDataItem(self, item):
+        #self._AddValidItemToList(item, self.Stream_FH.ItemList)
+        return
+
+    def RemoveMetaDataItem(self, item):
+        #self._RemoveValidItemFromList(item, self.Stream_FH.ItemList)
+        #return
+        pass
+
+    def GetMetaDataItems(self):
+        #return self.Stream_FH.ItemList
+        pass
+
+    #########################################
+    def Print(self):
+        print "**** AugmentedPoolStream",self.Name,"****"
+        print "Output file:"
+        print self.Stream.OutputFile
+        print "AcceptAlgs:"
+        print self.Stream.AcceptAlgs
+        print "RequireAlgs:"
+        print self.Stream.RequireAlgs
+        print "VetoAlgs:"
+        print self.Stream.VetoAlgs
+        print "OtherAlgs to bookkeep (but not directly used by the Stream):"
+        print self.OtherAlgs
+        print "Master prescale:"
+        print self.GetPrescale()
+        print "ItemList:"
+        print self.Stream.ItemList
+        #print "MetaData ItemList:"
+        #print self.Stream_FH.ItemList
+        return
+#############################################################
+class MultipleNTUPStreamManager:
+    def __init__(self):
+        self.StreamList=[]
+        self.nStream=0
+        self.StreamDict={}
+        self._Locked=False
+        return
+
+    def NewNTUPStream(self,StreamName,FileName=None,TreeName=None,asAlg=False):
+        # Check if a file name was specified or not:
+        if FileName == None:
+            FileName = StreamName + ".root"
+        # Use the common function for creating the stream:
+        return self.NewStream( StreamName, FileName, type='ntup', asAlg = asAlg,
+                               TreeName = TreeName )
+    def NewStream(self,StreamName,FileName="default",type='pool',asAlg=False,TreeName=None):
+        if FileName=="default":
+            FileName=StreamName+".pool.root"
+        try:
+            #Check wheter a stream with the same name already exists
+            index=self.StreamDict[StreamName]
+        except KeyError:
+            #The stream doesn't already exist. Register it and set it up.
+            #(This is expected, not actually an error.)
+            index=self.nStream
+            if type=='ntup':
+                self.StreamList += [ AugmentedNTUPStream(StreamName,FileName,TreeName,asAlg) ]
+            else:
+                raise RuntimeError("Unknown type '%s'"%type)
+
+            self.StreamDict[StreamName]=index
+            self.nStream+=1
+        else:
+            #This is the real error case...
+            raise NameError("Stream %s already exists"%StreamName)
+        return self.StreamList[index]
+    
+    def GetStream(self, NameOrIndex):
+        #If NameOrIndex is an int, treat it as an index
+        if isinstance(NameOrIndex, int):
+            if NameOrIndex < self.nStream:
+                return self.StreamList[NameOrIndex]
+            else:
+                raise IndexError("ERROR: No stream with index %i is defined in MultipleNTUPStreamManager."%NameOrIndex)
+
+        #else treat NameOrIndex as a name in the Stream Dictionary
+        try:
+            #Check wheter a stream with the same name already exists
+            index=self.StreamDict[NameOrIndex]
+        except KeyError:
+            raise NameError("Stream %s undefined!"%NameOrIndex)
+        
+        return self.StreamList[index]
+    
+    def StreamExists(self, StreamName):        
+        try:
+            index=self.StreamDict[StreamName]
+        except KeyError:
+            return False
+        
+        return True
+    
+    def Print(self):
+        print "**** MultipleNTUPStreamManager INFOS ****" 
+        print "Number of streams:", self.nStream
+        i=0
+        for Stream in self.StreamList:
+            print "----------------------- Stream #",i," -----------------------"
+            Stream.Print()
+            i+=1
+        return
+
+    #Commands for the real manager (normal users only manipulate their own streams with the functions above)
+    def Lock(self):
+        self._Locked=True
+        return
+
+    def Unlock(self):
+        self._Locked=False
+        return
+    
+    def AddItemToAllStreams(self, item):
+        if self._Locked is True:
+            raise AssertionError("MSMgr is locked. AddItemToAllStreams cannot be used.")
+        for Stream in self.StreamList:
+            Stream.AddItem(item)
+        return
+    
+    def RemoveItemFromAllStreams(self, item):
+        if self._Locked is True:
+            raise AssertionError("MSMgr is locked. RemoveItemFromAllStreams cannot be used.")
+        for Stream in self.StreamList:
+            Stream.RemoveItem(item)
+        return
+    
+    def AddMetaDataItemToAllStreams(self, item):
+        if self._Locked is True:
+            raise AssertionError("MSMgr is locked. AddMetaDataItemToAllStreams cannot be used.")
+        for Stream in self.StreamList:
+            Stream.AddMetaDataItem(item)
+        return
+
+    def RemoveMetaDataItemFromAllStreams(self, item):
+        if self._Locked is True:
+            raise AssertionError("MSMgr is locked. AddMetaDataItemFromAllStreams cannot be used.")
+        for Stream in self.StreamList:
+            Stream.RemoveMetaDataItem(item)
+        return
+
+    def RenameAllStreams(self, NameList):
+        if self._Locked is True:
+            raise AssertionError("MSMgr is locked. RenameAllStreams cannot be used.")
+        if not isinstance(NameList, list):
+            raise TypeError("RenameAllStreams does not accep arguments of type %s"%type(NameList))
+        if len(NameList) != self.nStream:
+            raise IndexError("NameList needs to have the same length as self.StreamList.")
+        
+        i=0
+        while i<self.nStream:
+            self.StreamList[i].SetOutputFileName(NameList[i])
+            i+=1            
+        return
+
+    def WriteSkimDecisionsOfAllStreams(self):
+        if self._Locked:
+            raise AssertionError("MSMgr is locked. WriteSkimDecisionsOfAllStreams cannot be used.")
+        
+        from AthenaCommon.AlgSequence import AlgSequence
+        topSequence = AlgSequence()
+        for Stream in self.StreamList:
+            if Stream.GetAcceptAlgs() or Stream.GetOtherAlgsToBookkeep() or Stream.GetRequireAlgs() or Stream.GetVetoAlgs():
+                sdw=Stream.GetSkimDecisionsWriter()
+                topSequence+=sdw
+                if isinstance(Stream,AugmentedPoolStream):
+                    Stream.AddItem("SkimDecisionCollection#"+sdw.SkimDecisionsContainerName)
+        return
+
+    def CreateEventBookkeepersWriterForAllFilters(self,doMCTruth=False,cycle_number=0):        
+        from EventBookkeeperTools.BookkeepingInfoWriter import EventBookkeepersWriter
+        ebw=EventBookkeepersWriter()
+        ebw.setDoMC( doMCTruth )
+        ebw.setCycle(cycle_number)
+        
+        #Loop over all streams and add all associated algorithms to ebw
+        for Stream in self.StreamList:
+            for a in Stream.GetRequireAlgs():
+                ebw.addRequireAlg(a,StreamName=Stream.Name)
+            for a in Stream.GetAcceptAlgs():
+                ebw.addAcceptAlg(a,StreamName=Stream.Name)
+            for a in Stream.GetVetoAlgs():
+                ebw.addVetoAlg(a,StreamName=Stream.Name)
+            for a in Stream.GetOtherAlgsToBookkeep():
+                ebw.addOtherAlg(a,StreamName=Stream.Name)
+
+        return ebw
+    
+
+
+############################################################################
+# Create one instance of MultipleNTUPStreamManager (MNSMgr) if not already done.
+# Otherwise, do dothing (avoid overwriting MSMgr!).
+if vars().has_key('MNSMgr'):
+    raise RuntimeError("MNSMgr already exists?!? This will almost certainly create erroneous results.")
+MNSMgr=MultipleNTUPStreamManager()
+
diff --git a/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/NTUPUtils.py b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/NTUPUtils.py
new file mode 100644
index 0000000000000000000000000000000000000000..ac9eddda1d85ebf9d7a61309ccbdd919068a49c6
--- /dev/null
+++ b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/NTUPUtils.py
@@ -0,0 +1,63 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+from RecExConfig.RecoFunctions import AddValidItemToList
+def SetupOutputNTUPs(runArgs,flagContainerList):
+     NTUPScripts=[]
+     for flagContainer in flagContainerList:
+         for flagName in flagContainer.__dict__.keys():
+             flag=getattr(flagContainer,flagName)
+             if hasattr(flag,"StreamName"):
+                 ntupName=flag.StreamName.lstrip("Stream")
+                 argName='output'+ntupName+'File'
+                 if hasattr(runArgs,argName):
+                     if hasattr(flag,"FileName"):
+                         flag.FileName=getattr(runArgs,argName)
+                         flag.set_Value_and_Lock( True )
+                         if hasattr(flag,"NTUPScript"):
+                             AddValidItemToList(flag.NTUPScript,NTUPScripts)
+                         elif hasattr (flag, 'configure'):
+                             NTUPScripts.append(flag.configure)
+                         else:
+                             raise RuntimeError("No athena script attached to argument '%s'"%argName)
+                     else:
+                         raise RuntimeError("%s is not conform with the job transform convention, hence unusable. Please fix this."%ntupName)
+
+     return NTUPScripts
+
+def SetupOutputSkimNTUPs(runArgs,flagContainerList):
+     SkimNTUPScripts=[]
+     for flagContainer in flagContainerList:
+         for flagName in flagContainer.__dict__.keys():
+             flag=getattr(flagContainer,flagName)
+             if hasattr(flag,"StreamName"):
+                 ntupName=flag.StreamName.lstrip("Stream")
+                 argName='output'+ntupName+'File'
+                 if hasattr(runArgs,argName):
+                     if hasattr(flag,"FileName"):
+                         flag.FileName=getattr(runArgs,argName)
+                         flag.set_Value_and_Lock( True )
+                         if hasattr(flag,"SkimNTUPScript"):
+                             AddValidItemToList(flag.SkimNTUPScript,SkimNTUPScripts)
+                         elif hasattr (flag, 'configure'):
+                             SkimNTUPScripts.append(flag.configure)
+                         else:
+                             raise RuntimeError("No athena script attached to argument '%s'"%argName)
+                     else:
+                         raise RuntimeError("%s is not conform with the job transform convention, hence unusable. Please fix this."%ntupName)
+
+     return SkimNTUPScripts
+
+def NTUPtoNTUPProdFlags():
+    from NTUPtoNTUPCore.NTUPtoNTUPProdFlags import prodFlags
+    return prodFlags
+
+def SkimProdFlags():
+    from NTUPtoNTUPCore.SkimProdFlags import prodFlags
+    return prodFlags
+
+def NTUPtoNTUPTopOptionsInclude():
+    include( "NTUPtoNTUPCore/NTUPtoNTUP_topOptions.py" )
+
+def MNSMgr():
+    from NTUPtoNTUPCore.MultipleNTUPStreamManager import MNSMgr
+    return MNSMgr
diff --git a/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/NTUPtoNTUPProdFlags.py b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/NTUPtoNTUPProdFlags.py
new file mode 100644
index 0000000000000000000000000000000000000000..b7657aadd3d4b97038849e099bb51beaad827f8f
--- /dev/null
+++ b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/NTUPtoNTUPProdFlags.py
@@ -0,0 +1,111 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+# NTUPtoNTUPProdFlags.py for NTUP->NTUP
+#
+
+from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer
+from AthenaCommon.JobProperties import jobproperties
+
+listAllKnownNTUPtoNTUP=[]
+
+
+class NTUPtoNTUPProdFlags (JobPropertyContainer):
+    """ The NTUPtoNTUPProdFlags flag/job property  container."""
+jobproperties.add_Container(NTUPtoNTUPProdFlags)
+
+prodFlags = jobproperties.NTUPtoNTUPProdFlags
+
+# This function correctly determines the name of the output file.
+# If the transform set the FileName to the "official" name and locked
+# the jobProperty, that one will be used. Otherwise the user sets it.
+def buildFileName( jobPropFlag ):
+    if jobPropFlag.is_locked() :
+        fileName = jobPropFlag.FileName
+    else :
+        fileName = (prodFlags.OutputDirectoryName() +
+                    prodFlags.OutputPoolRootFileNamePrefix() +
+                    jobPropFlag.StreamName +
+                    prodFlags.OutputMiddleName() +
+                    ".root")
+
+    return fileName
+
+# Allows prodflags from elsewhere to be defined 
+def _importFlagsFromModule (mod):
+    """Given the full name of a module MOD, import it.
+Then look for objects in the module with names like WriteNTUP_RED that
+derive from JobProperty and add them to our list."""
+    name = mod.rsplit('.', 1)[-1]
+    mod = __import__ (mod, fromlist = [name])
+    for k, v in mod.__dict__.items():
+        if (k.startswith ('WriteNTUP_RED') and
+            issubclass (v, JobProperty)):
+            if hasattr (v, 'NTUPScript') :
+                prodFlags.add_JobProperty (v)
+                listAllKnownNTUPtoNTUP.append (getattr (prodFlags, k))
+    return
+   
+class TreeName(JobProperty):
+    """ Tree name """
+    statusOn     = True
+    allowedTypes = ['str']
+    StoredValue  = ""
+    pass
+prodFlags.add_JobProperty(TreeName)
+
+from RecExConfig.RecoFunctions import AddValidItemToList,RemoveValidItemFromList
+class NTUPScripts(JobProperty):
+    """ Setting NTUPScripts+=['MyPackage/NTUPOptions.py'] will execute NTUPOptions.py
+    """ 
+    statusOn=True
+    allowedTypes=['list']
+    StoredValue=[]
+    def append(self,item):
+        if self.is_locked():
+            self._log.info('The JobProperty %s is blocked' % self.__name__)
+        else:
+            AddValidItemToList(item,self.StoredValue)
+        return
+    def remove(self,item):
+        if self.is_locked():
+            self._log.info('The JobProperty %s is blocked' % self.__name__)
+        else:
+            RemoveValidItemFromList(item,self.StoredValue)
+        return
+prodFlags.add_JobProperty(NTUPScripts)
+
+class WriteMyNTUP (JobProperty):
+    """skimming NTUP"""
+    statusOn = True
+    allowedTypes = ['bool']
+    StoredValue = False
+    StreamName = 'StreamNTUP_MYNTUP'
+    FileName = ''
+    isVirtual = False
+    NTUPScript = "NTUPtoNTUPExample/MyNTUP_prodJobOFragment.py"
+    TreeNames = ['physics']
+    SubSteps = ['n2n']
+prodFlags.add_JobProperty (WriteMyNTUP)
+listAllKnownNTUPtoNTUP.append (prodFlags.WriteMyNTUP)
+
+class WriteMyNTUP2 (JobProperty):
+    """test NTUP"""
+    statusOn = True
+    allowedTypes = ['bool']
+    StoredValue = False
+    StreamName = 'StreamNTUP_MYNTUP2'
+    FileName = ''
+    isVirtual = False
+    NTUPScript = "NTUPtoNTUPExample/MyNTUP2_prodJobOFragment.py"
+    TreeNames = ['physics']
+    SubSteps = ['n2n']
+prodFlags.add_JobProperty (WriteMyNTUP2)
+listAllKnownNTUPtoNTUP.append (prodFlags.WriteMyNTUP2)
+
+################################################################
+# Following block of stream names for the Derivation Framework #
+
+_importFlagsFromModule('DerivationFrameworkCore.DerivationFrameworkProdFlags')
+
+# End of Derivation Framework formats #
+#######################################
diff --git a/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/SkimNTUP_ProdFlags.py b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/SkimNTUP_ProdFlags.py
new file mode 100644
index 0000000000000000000000000000000000000000..a33179f250b4faf64b88a9670f07f947673820e3
--- /dev/null
+++ b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/SkimNTUP_ProdFlags.py
@@ -0,0 +1,113 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+# SkimNTUP_ProdFlags.py for NTUP->NTUP
+#
+
+from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer
+from AthenaCommon.JobProperties import jobproperties
+
+listAllKnownSkimNTUP=[]
+
+
+class SkimNTUP_ProdFlags (JobPropertyContainer):
+    """ The SkimNTUP_ProdFlags flag/job property  container."""
+jobproperties.add_Container(SkimNTUP_ProdFlags)
+
+prodFlags = jobproperties.SkimNTUP_ProdFlags
+
+# This function correctly determines the name of the output file.
+# If the transform set the FileName to the "official" name and locked
+# the jobProperty, that one will be used. Otherwise the user sets it.
+def buildFileName( jobPropFlag ):
+    if jobPropFlag.is_locked() :
+        fileName = jobPropFlag.FileName
+    else :
+        fileName = (prodFlags.OutputDirectoryName() +
+                    prodFlags.OutputPoolRootFileNamePrefix() +
+                    jobPropFlag.StreamName +
+                    prodFlags.OutputMiddleName() +
+                    ".root")
+
+    return fileName
+
+def _importFlagsFromModule (mod):
+    """Given the full name of a module MOD, import it.
+Then look for objects in the module with names like Write*NTUP that
+derive from JobProperty and add them to our list."""
+    name = mod.rsplit('.', 1)[-1]
+    mod = __import__ (mod, fromlist = [name])
+    for k, v in mod.__dict__.items():
+        if (k.startswith ('Write') and k.endswith ('NTUP') and
+            issubclass (v, JobProperty)):
+            jobproperties.SkimNTUP_ProdFlags.add_JobProperty (v)
+            listAllKnownSkimNTUP.append (getattr (prodFlags, k))
+    return
+
+class TreeName(JobProperty):
+    """ Tree name """
+    statusOn     = True
+    allowedTypes = ['str']
+    StoredValue  = ""
+    pass
+prodFlags.add_JobProperty(TreeName)
+
+from RecExConfig.RecoFunctions import AddValidItemToList,RemoveValidItemFromList
+class SkimNTUPScripts(JobProperty):
+    """ Setting SkimNTUPScripts+=['MyPackage/NTUPOptions.py'] will execute NTUPOptions.py
+    """ 
+    statusOn=True
+    allowedTypes=['list']
+    StoredValue=[]
+    def append(self,item):
+        if self.is_locked():
+            self._log.info('The JobProperty %s is blocked' % self.__name__)
+        else:
+            AddValidItemToList(item,self.StoredValue)
+        return
+    def remove(self,item):
+        if self.is_locked():
+            self._log.info('The JobProperty %s is blocked' % self.__name__)
+        else:
+            RemoveValidItemFromList(item,self.StoredValue)
+        return
+prodFlags.add_JobProperty(SkimNTUPScripts)
+
+class WriteMySkimNTUP (JobProperty):
+    """skimming NTUP"""
+    statusOn = True
+    allowedTypes = ['bool']
+    StoredValue = False
+    StreamName = 'StreamNTUP_MYSKIMNTUP'
+    FileName = ''
+    isVirtual = False
+    SkimNTUPScript = "NTUPtoNTUPExample/MySkimNTUP_prodJobOFragment.py"
+    TreeNames = ['physics']
+    SubSteps = ['n2n']
+prodFlags.add_JobProperty (WriteMySkimNTUP)
+listAllKnownSkimNTUP.append (prodFlags.WriteMySkimNTUP)
+
+class WriteMySkimNTUP2 (JobProperty):
+    """skimming NTUP"""
+    statusOn = True
+    allowedTypes = ['bool']
+    StoredValue = False
+    StreamName = 'StreamNTUP_MYSKIMNTUP2'
+    FileName = ''
+    isVirtual = False
+    SkimNTUPScript = "NTUPtoNTUPExample/MySkimNTUP2_prodJobOFragment.py"
+    TreeNames = ['physics']
+    SubSteps = ['n2n']
+prodFlags.add_JobProperty (WriteMySkimNTUP2)
+listAllKnownSkimNTUP.append (prodFlags.WriteMySkimNTUP2)
+
+
+# Egamma D3PDs
+_importFlagsFromModule ('EgammaN2N.EgammaN2NProdFlags')
+
+# Exotics D3PDs
+_importFlagsFromModule ('ExoticsN2N.ExoticsN2NProdFlags')
+
+# Jet D3PDs
+_importFlagsFromModule ('JetN2N.JetN2NProdFlags')
+
+print 'Known NTUPs are', listAllKnownSkimNTUP
diff --git a/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/__init__.py b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..74583d364ec2ca794156596c7254d9b234a940c6
--- /dev/null
+++ b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/python/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
diff --git a/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/scripts/NTUPtoNTUP_trf.py b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/scripts/NTUPtoNTUP_trf.py
new file mode 100755
index 0000000000000000000000000000000000000000..fae173520710ecdaac8c5080c3ce166ab7a4f182
--- /dev/null
+++ b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/scripts/NTUPtoNTUP_trf.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+__doc__ = """NTUP -> NTUP skimming/thinning files."""
+
+ListOfDefaultPositionalKeys=[ 'maxEvents', 'skipEvents', 'preInclude', 'postInclude', 'preExec', 'postExec', 'topOptions', 'AMITag', 'tree_name', '--ignoreerrors', '--athenaopts', '--omitvalidation']
+from PATJobTransforms.Configuration import ConfigDic
+for key in ConfigDic.keys():
+    if key.startswith('inputNTUP') and key.endswith('File'):
+        ListOfDefaultPositionalKeys.append(key)
+from NTUPtoNTUPCore.NTUPtoNTUPProdFlags import listAllKnownNTUPtoNTUP
+for ntup in listAllKnownNTUPtoNTUP:
+    dInName='input'+(ntup.StreamName.lstrip("Stream"))+'File'
+    ListOfDefaultPositionalKeys.append(dInName)
+    dOutName='output'+(ntup.StreamName.lstrip("Stream"))+'File'
+    ListOfDefaultPositionalKeys.append(dOutName)
+
+from PATJobTransforms.BaseOfBasicTrf import BaseOfBasicTrf
+from PyJobTransformsCore.trf import Author
+from PyJobTransformsCore.trf import JobTransform
+
+class NTUPtoNTUPJobTransform( BaseOfBasicTrf ):
+    def __init__(self,inDic):
+        JobTransform.__init__(self,
+                                authors = [ Author('Michiru Kaneda', 'Michiru.Kaneda@cern.ch')],
+                                skeleton='NTUPtoNTUPCore/skeleton.NTUPtoNTUP_trf.py',
+                                help = __doc__ )
+
+        if not isinstance(inDic,dict):
+            raise TypeError("inDic has type '%s' but should be a dictionary." %type(inDic))
+        # Add tree_name to ConfigDic
+        from PyJobTransformsCore.full_trfarg import BasicStringArg
+        from PATJobTransforms.Configuration import AddToConfigDic
+        def AddTreeName(trf,inDic):
+            trf.add(BasicStringArg(name='tree_name'))
+        AddToConfigDic('tree_name',AddTreeName)
+
+        # Set tree_name if it is in arguments
+        self.tree_name=''
+        if inDic.has_key('tree_name'):
+            self.tree_name=inDic['tree_name']
+        else:
+            self.tree_name=''
+            inDic['tree_name']=''
+
+        # Add NTUPtoNTUP types to input files of ConfigDic
+        self.AddNTUPToInputFilesOfConfigDic()
+
+        # Check tree name from input file
+        if self.tree_name == '':self.CheckTreeName(inDic)
+
+        # Add NTUPtoNTUP types to output files of ConfigDic
+        self.AddNTUPToOutputFilesOfConfigDic()
+
+        # set tree_name in arguments
+        inDic['tree_name']=self.tree_name
+
+        # Auto Configure From Dictionary
+        from PATJobTransforms.ConfigDicUtils import AutoConfigureFromDic
+        self.inDic=inDic
+        AutoConfigureFromDic(self,inDic)
+
+    def matchEventsExpectEqual(self,inputFileArgName,outputFileArgName):
+        self.logger().info("MatchEvents is not executed for NTUP outputs.")
+        return
+
+    def AddNTUPToInputFilesOfConfigDic(self):
+        # Add NTUP skim to input files of ConfigDic
+        try:
+            from PATJobTransforms.Configuration import AddDPDFunctionMaker, AddToConfigDic, AddInputNTUPFunctionMaker
+            from PyJobTransformsCore.full_trfarg import NtupleFileArg
+            from NTUPtoNTUPCore.NTUPtoNTUPProdFlags import listAllKnownNTUPtoNTUP
+            for ntup in listAllKnownNTUPtoNTUP:
+                dpdName = ntup.StreamName
+                inName='input'+(dpdName.lstrip("Stream"))+'File'
+                if self.tree_name != '':
+                    tree_names = [self.tree_name]
+                else:
+                    tree_names = ntup.TreeNames
+                function=AddInputNTUPFunctionMaker(inName,tree_names)
+                AddToConfigDic(inName,function)
+                pass
+        except ImportError:
+            print "WARNING NTUPtoNTUPProdFlags is not available. Only OK if you're using job transforms without the AtlasAnalysis project."
+
+    def CheckTreeName(self,inDic):
+        from PyJobTransformsCore.full_trfarg import InputNtupleFileArg
+        # Check tree name from input file
+        for key in inDic.keys():
+            if key.startswith('inputNTUP') and key.endswith('File') and ConfigDic.has_key(key):
+                trf=set()
+                ConfigDic[key](trf=trf,inDic={})
+                for x in trf:
+                    if isinstance(x, InputNtupleFileArg):
+                        self.tree_name=x._fileType.tree_names[0]
+                        return
+
+    def AddNTUPToOutputFilesOfConfigDic(self):
+        # Add NTUP skim to output files of ConfigDic
+        try:
+            from PATJobTransforms.Configuration import AddDPDFunctionMaker, AddToConfigDic, AddInputNTUPFunctionMaker
+            from PyJobTransformsCore.full_trfarg import NtupleFileArg
+            from NTUPtoNTUPCore.NTUPtoNTUPProdFlags import listAllKnownNTUPtoNTUP
+            for ntup in listAllKnownNTUPtoNTUP:
+                kw = {}
+                fileArg=NtupleFileArg
+                if self.tree_name!='':
+                    kw['tree_names'] = [self.tree_name]
+                else:
+                    kw['tree_names'] = ntup.TreeNames
+                dpdName = ntup.StreamName
+                outName='output'+(dpdName.lstrip("Stream"))+'File'
+                function=AddDPDFunctionMaker(outName, fileArg, **kw)
+                function.isOutput='root'
+                function.subSteps=ntup.SubSteps
+                AddToConfigDic(outName,function)
+                pass
+        except ImportError:
+            print "WARNING NTUPtoNTUPProdFlags is not available. Only OK if you're using job transforms without the AtlasAnalysis project."
+
+#----------------------------------------------------------------------
+if __name__ == '__main__':
+    #Special preparation for command-line
+    import sys
+    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
+    inDic=BuildDicFromCommandLine(sys.argv)
+    #Construct and execute the transform
+    trf = NTUPtoNTUPJobTransform(inDic)
+    sys.exit(trf.exeArgDict(inDic).exitCode())
+
diff --git a/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/scripts/SkimNTUP_trf.py b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/scripts/SkimNTUP_trf.py
new file mode 100755
index 0000000000000000000000000000000000000000..cc1a689039bf74f257ced4f756456b93bcf474df
--- /dev/null
+++ b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/scripts/SkimNTUP_trf.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+__doc__ = """Skim/Slim NTUP files."""
+
+ListOfDefaultPositionalKeys=[ 'maxEvents', 'skipEvents', 'preInclude', 'postInclude', 'preExec', 'postExec', 'topOptions', 'AMITag', 'tree_name', '--ignoreerrors', '--athenaopts', '--omitvalidation']
+from PATJobTransforms.Configuration import ConfigDic
+for key in ConfigDic.keys():
+    if key.startswith('inputNTUP') and key.endswith('File'):
+        ListOfDefaultPositionalKeys.append(key)
+from NTUPtoNTUPCore.SkimNTUP_ProdFlags import listAllKnownSkimNTUP
+for ntup in listAllKnownSkimNTUP:
+    dInName='input'+(ntup.StreamName.lstrip("Stream"))+'File'
+    ListOfDefaultPositionalKeys.append(dInName)
+    dOutName='output'+(ntup.StreamName.lstrip("Stream"))+'File'
+    ListOfDefaultPositionalKeys.append(dOutName)
+
+from PATJobTransforms.BaseOfBasicTrf import BaseOfBasicTrf
+from PyJobTransformsCore.trf import Author
+from PyJobTransformsCore.trf import JobTransform
+class SkimNTUPJobTransform( BaseOfBasicTrf ):
+    def __init__(self,inDic):
+        JobTransform.__init__(self,
+                                authors = [ Author('Michiru Kaneda', 'Michiru.Kaneda@cern.ch')],
+                                skeleton='NTUPtoNTUPCore/skeleton.SkimNTUP_trf.py',
+                                help = __doc__ )
+
+        if not isinstance(inDic,dict):
+            raise TypeError("inDic has type '%s' but should be a dictionary." %type(inDic))
+        # Add tree_name to ConfigDic
+        from PyJobTransformsCore.full_trfarg import BasicStringArg
+        from PATJobTransforms.Configuration import AddToConfigDic
+        def AddTreeName(trf,inDic):
+            trf.add(BasicStringArg(name='tree_name'))
+        AddToConfigDic('tree_name',AddTreeName)
+
+        # Set tree_name if it is in arguments
+        self.tree_name=''
+        if inDic.has_key('tree_name'):
+            self.tree_name=inDic['tree_name']
+        else:
+            self.tree_name=''
+            inDic['tree_name']=''
+
+        # Add SkimNTUP types to input files of ConfigDic
+        self.AddNTUPSkimToInputFilesOfConfigDic()
+
+        # Check tree name from input file
+        if self.tree_name == '':self.CheckTreeName(inDic)
+
+        # Add SkimNTUP types to output files of ConfigDic
+        self.AddNTUPSkimToOutputFilesOfConfigDic()
+
+        # set tree_name in arguments
+        inDic['tree_name']=self.tree_name
+
+        # Auto Configure From Dictionary
+        from PATJobTransforms.ConfigDicUtils import AutoConfigureFromDic
+        self.inDic=inDic
+        AutoConfigureFromDic(self,inDic)
+
+    def matchEventsExpectEqual(self,inputFileArgName,outputFileArgName):
+        self.logger().info("MatchEvents is not executed for NTUP outputs.")
+        return
+
+    def AddNTUPSkimToInputFilesOfConfigDic(self):
+        # Add NTUP skim to input files of ConfigDic
+        try:
+            from PATJobTransforms.Configuration import AddDPDFunctionMaker, AddToConfigDic, AddInputNTUPFunctionMaker
+            from PyJobTransformsCore.full_trfarg import NtupleFileArg
+            from NTUPtoNTUPCore.SkimNTUP_ProdFlags import listAllKnownSkimNTUP
+            for ntup in listAllKnownSkimNTUP:
+                dpdName = ntup.StreamName
+                inName='input'+(dpdName.lstrip("Stream"))+'File'
+                if self.tree_name != '':
+                    tree_names = [self.tree_name]
+                else:
+                    tree_names = ntup.TreeNames
+                function=AddInputNTUPFunctionMaker(inName,tree_names)
+                AddToConfigDic(inName,function)
+                pass
+        except ImportError:
+            print "WARNING SkimNTUP_ProdFlags is not available. Only OK if you're using job transforms without the AtlasAnalysis project."
+
+    def CheckTreeName(self,inDic):
+        from PyJobTransformsCore.full_trfarg import InputNtupleFileArg
+        # Check tree name from input file
+        for key in inDic.keys():
+            if key.startswith('inputNTUP') and key.endswith('File') and ConfigDic.has_key(key):
+                trf=set()
+                ConfigDic[key](trf=trf,inDic={})
+                for x in trf:
+                    if isinstance(x, InputNtupleFileArg):
+                        if type(x._fileType.tree_names) is list:
+                            self.tree_name=x._fileType.tree_names[0]
+                        else:
+                            self.tree_name=x._fileType.tree_names
+                        return
+
+    def AddNTUPSkimToOutputFilesOfConfigDic(self):
+        # Add NTUP skim to output files of ConfigDic
+        try:
+            from PATJobTransforms.Configuration import AddDPDFunctionMaker, AddToConfigDic, AddInputNTUPFunctionMaker
+            from PyJobTransformsCore.full_trfarg import NtupleFileArg
+            from NTUPtoNTUPCore.SkimNTUP_ProdFlags import listAllKnownSkimNTUP
+            for ntup in listAllKnownSkimNTUP:
+                kw = {}
+                fileArg=NtupleFileArg
+                if self.tree_name!='':
+                    kw['tree_names'] = [self.tree_name]
+                else:
+                    kw['tree_names'] = ntup.TreeNames
+                dpdName = ntup.StreamName
+                outName='output'+(dpdName.lstrip("Stream"))+'File'
+                function=AddDPDFunctionMaker(outName, fileArg, **kw)
+                function.isOutput='root'
+                function.subSteps=ntup.SubSteps
+                AddToConfigDic(outName,function)
+                pass
+        except ImportError:
+            print "WARNING SkimNTUP_ProdFlags is not available. Only OK if you're using job transforms without the AtlasAnalysis project."
+
+#----------------------------------------------------------------------
+if __name__ == '__main__':
+    #Special preparation for command-line
+    import sys
+    from PATJobTransforms.ArgDicTools import BuildDicFromCommandLine
+    inDic=BuildDicFromCommandLine(sys.argv)
+    #Construct and execute the transform
+    trf = SkimNTUPJobTransform(inDic)
+    sys.exit(trf.exeArgDict(inDic).exitCode())
+
diff --git a/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/share/NTUPtoNTUP_topOptions.py b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/share/NTUPtoNTUP_topOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..14f0e9959f117c65a43d12e2466b676e82202151
--- /dev/null
+++ b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/share/NTUPtoNTUP_topOptions.py
@@ -0,0 +1,45 @@
+include.block ("NTUPtoNTUP_topOptions.py")
+
+from AthenaCommon.Logging import logging
+logNTUPtoNTUP_topOptions = logging.getLogger( 'NTUPtoNTUP_topOptions' )
+from AthenaCommon.AlgSequence import AlgSequence
+topSequence = AlgSequence()
+from AthenaCommon.AppMgr import ToolSvc,theApp,ServiceMgr
+
+### Set Input
+import AthenaRootComps.ReadAthenaRoot
+from AthenaCommon.AthenaCommonFlags import jobproperties
+ntupFlags=jobproperties.NTUPtoNTUPProdFlags
+ServiceMgr.EventSelector.InputCollections = athenaCommonFlags.FilesInput()
+ServiceMgr.EventSelector.TupleName = jobproperties.NTUPtoNTUPProdFlags.TreeName()
+
+# Number of events to be processed, skip event
+theApp.EvtMax = athenaCommonFlags.EvtMax()
+try:
+    ServiceMgr.EventSelector.SkipEvents = athenaCommonFlags.SkipEvents()
+except Exception:
+    treatException("Could not set EventSelector.SkipEvents")
+
+
+#### Schedule EventCounterAlg
+#from EventBookkeeperTools.EventCounterAlg import EventCounterAlg
+#topSequence+=EventCounterAlg("AllExecutedEvents")
+#
+####EventBookkeepers
+#if not hasattr(ServiceMgr,"CutFlowSvc"):
+#    from EventBookkeeperTools.EventBookkeeperToolsConf import CutFlowSvc
+#    ServiceMgr+=CutFlowSvc()
+#    theApp.CreateSvc+=['CutFlowSvc']
+#    pass
+
+### NTUP->NTUP
+ntupFlags=jobproperties.NTUPtoNTUPProdFlags
+print "Content of rec.NTUPScripts = %s", ntupFlags.NTUPScripts
+if ntupFlags.NTUPScripts()!=[]:
+  for ntup in ntupFlags.NTUPScripts():
+      NTUPName = str(ntup)
+      print "Including %s...",NTUPName
+      include(ntup)
+      pass
+else:
+  print "no scripts in NTUPScripts..."
diff --git a/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/share/SkimNTUP_topOptions.py b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/share/SkimNTUP_topOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..8dcef75747e9b8d708223b99a6528207347e9413
--- /dev/null
+++ b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/share/SkimNTUP_topOptions.py
@@ -0,0 +1,22 @@
+include.block ("SkimNTUP_topOptions.py")
+
+
+from AthenaCommon.Logging import logging
+logSkimNTUP_topOptions = logging.getLogger( 'SkimNTUP_topOptions' )
+from AthenaCommon.AlgSequence import AlgSequence
+topSequence = AlgSequence()
+from AthenaCommon.AppMgr import ToolSvc,theApp,ServiceMgr
+
+theApp.EvtMax = 0
+
+### NTUP->NTUP
+ntupFlags=jobproperties.SkimNTUP_ProdFlags
+print "Content of rec.SkimNTUPScripts = %s", ntupFlags.SkimNTUPScripts
+if ntupFlags.SkimNTUPScripts()!=[]:
+  for ntup in ntupFlags.SkimNTUPScripts():
+      NTUPName = str(ntup)
+      print "Including %s...",NTUPName
+      include(ntup)
+      pass
+else:
+  print "no scripts in SkimNTUPScripts..."
diff --git a/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/share/skeleton.NTUPtoNTUP_trf.py b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/share/skeleton.NTUPtoNTUP_trf.py
new file mode 100644
index 0000000000000000000000000000000000000000..e85db23e2133c030209e56cc80d49e7211177f56
--- /dev/null
+++ b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/share/skeleton.NTUPtoNTUP_trf.py
@@ -0,0 +1,85 @@
+
+###############################################################
+#
+# Skeleton top job options for NTUP->NTUP
+#
+#
+#==============================================================
+
+from AthenaCommon.Logging import logging
+recoLog = logging.getLogger('ntup_to_ntup')
+recoLog.info( '****************** STARTING NTUP->NTUP MAKING *****************' )
+
+## max/skip events
+from AthenaCommon.AthenaCommonFlags  import athenaCommonFlags
+if hasattr(runArgs,"skipEvents"): athenaCommonFlags.SkipEvents.set_Value_and_Lock( runArgs.skipEvents )
+else: athenaCommonFlags.SkipEvents=0
+if hasattr(runArgs,"maxEvents"): athenaCommonFlags.EvtMax.set_Value_and_Lock( runArgs.maxEvents )
+else: athenaCommonFlags.EvtMax=-1
+
+## TreeName
+from NTUPtoNTUPCore.NTUPtoNTUPProdFlags import prodFlags
+if hasattr(runArgs,"tree_name"): prodFlags.TreeName.set_Value_and_Lock( runArgs.tree_name )
+else:
+    raise TransformArgumentError(message='tree_name is not defined!')
+
+## Input
+from PATJobTransforms.Configuration import ConfigDic
+from PyJobTransformsCore.trferr import TransformArgumentError
+from PyJobTransformsCore.full_trfarg import InputNtupleFileArg
+inFileArgs=0
+for arg in dir(runArgs):
+    if arg.startswith('inputNTUP') and arg.endswith('File'):
+        inFile = getattr(runArgs,arg)
+        athenaCommonFlags.FilesInput.set_Value_and_Lock(inFile)
+        inFileArgs+=1
+        print "Using argument ", arg, " = ",inFile, ", tree name = ", prodFlags.TreeName()
+if inFileArgs!=1:
+    raise TransformArgumentError(message='Wrong number of inputNTUPXXXFile arguments: {0:d} instead of 1. Stopping!'.format(inFileArgs))
+
+## Automatically turn ON/OFF and set output file name of each possible NTUP
+listOfFlags=[]
+try:
+    listOfFlags.append( prodFlags )
+except ImportError:
+    print "WARNING NTUPtoNTUPProdFlags.py is not available. Only OK if you're using job transforms without the AtlasAnalysis project."
+
+from NTUPtoNTUPCore.NTUPUtils import SetupOutputNTUPs
+from AthenaCommon.JobProperties import jobproperties
+prodFlags.NTUPScripts.append(SetupOutputNTUPs(runArgs,listOfFlags))
+
+from PATJobTransforms.OutputsMgr import outputsMgr,magicKey
+for key in dir(runArgs):
+    if key.startswith(magicKey):
+        outputsMgr.addOutput(key,getattr(runArgs,key))
+
+## Pre-exec
+if hasattr(runArgs,"preExec"):
+    recoLog.info("transform pre-exec")
+    for cmd in runArgs.preExec:
+        recoLog.info(cmd)
+        exec(cmd)
+
+## Pre-include
+if hasattr(runArgs,"preInclude"): 
+    for fragment in runArgs.preInclude:
+        include(fragment)
+
+#========================================================
+# Central topOptions (this is one is a string not a list)
+#========================================================
+if hasattr(runArgs,"topOptions"): include(runArgs.topOptions)
+else: include( "NTUPtoNTUPCore/NTUPtoNTUP_topOptions.py" )
+
+## Post-include
+if hasattr(runArgs,"postInclude"): 
+    for fragment in runArgs.postInclude:
+        include(fragment)
+
+## Post-exec
+if hasattr(runArgs,"postExec"):
+    recoLog.info("transform post-exec")
+    for cmd in runArgs.postExec:
+        recoLog.info(cmd)
+        exec(cmd)
+
diff --git a/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/share/skeleton.SkimNTUP_trf.py b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/share/skeleton.SkimNTUP_trf.py
new file mode 100644
index 0000000000000000000000000000000000000000..515944a613bc7c5a5ad60b5c7f3f6bb6a83082e9
--- /dev/null
+++ b/PhysicsAnalysis/NTUPtoNTUP/NTUPtoNTUPCore/share/skeleton.SkimNTUP_trf.py
@@ -0,0 +1,83 @@
+###############################################################
+#
+# Skeleton top job options for SkimNTUP_trf
+#
+#==============================================================
+
+from AthenaCommon.Logging import logging
+recoLog = logging.getLogger('ntup_to_ntup')
+recoLog.info( '****************** STARTING NTUP->NTUP MAKING *****************' )
+
+## max/skip events
+from AthenaCommon.AthenaCommonFlags  import athenaCommonFlags
+if hasattr(runArgs,"skipEvents"): athenaCommonFlags.SkipEvents.set_Value_and_Lock( runArgs.skipEvents )
+else: athenaCommonFlags.SkipEvents=0
+if hasattr(runArgs,"maxEvents"): athenaCommonFlags.EvtMax.set_Value_and_Lock( runArgs.maxEvents )
+else: athenaCommonFlags.EvtMax=-1
+
+## TreeName
+from NTUPtoNTUPCore.SkimNTUP_ProdFlags import prodFlags
+if hasattr(runArgs,"tree_name"): prodFlags.TreeName.set_Value_and_Lock( runArgs.tree_name )
+else:
+    raise TransformArgumentError(message='tree_name is not defined!')
+
+## Input
+from PATJobTransforms.Configuration import ConfigDic
+from PyJobTransformsCore.trferr import TransformArgumentError
+from PyJobTransformsCore.full_trfarg import InputNtupleFileArg
+inFileArgs=0
+for arg in dir(runArgs):
+    if arg.startswith('inputNTUP') and arg.endswith('File'):
+        inFile = getattr(runArgs,arg)
+        athenaCommonFlags.FilesInput.set_Value_and_Lock(inFile)
+        inFileArgs+=1
+        print "Using argument ", arg, " = ",inFile, ", tree name = ", prodFlags.TreeName()
+if inFileArgs!=1:
+    raise TransformArgumentError(message='Wrong number of inputNTUPXXXFile arguments: {0:d} instead of 1. Stopping!'.format(inFileArgs))
+
+## Automatically turn ON/OFF and set output file name of each possible NTUP
+listOfFlags=[]
+try:
+    listOfFlags.append( prodFlags )
+except ImportError:
+    print "WARNING SkimNTUPProdFlags.py is not available. Only OK if you're using job transforms without the AtlasAnalysis project."
+
+from NTUPtoNTUPCore.NTUPUtils import SetupOutputSkimNTUPs
+from AthenaCommon.JobProperties import jobproperties
+prodFlags.SkimNTUPScripts.append(SetupOutputSkimNTUPs(runArgs,listOfFlags))
+
+from PATJobTransforms.OutputsMgr import outputsMgr,magicKey
+for key in dir(runArgs):
+    if key.startswith(magicKey):
+        outputsMgr.addOutput(key,getattr(runArgs,key))
+
+## Pre-exec
+if hasattr(runArgs,"preExec"):
+    recoLog.info("transform pre-exec")
+    for cmd in runArgs.preExec:
+        recoLog.info(cmd)
+        exec(cmd)
+
+## Pre-include
+if hasattr(runArgs,"preInclude"): 
+    for fragment in runArgs.preInclude:
+        include(fragment)
+
+#========================================================
+# Central topOptions (this is one is a string not a list)
+#========================================================
+if hasattr(runArgs,"topOptions"): include(runArgs.topOptions)
+else: include( "NTUPtoNTUPCore/SkimNTUP_topOptions.py" )
+
+## Post-include
+if hasattr(runArgs,"postInclude"): 
+    for fragment in runArgs.postInclude:
+        include(fragment)
+
+## Post-exec
+if hasattr(runArgs,"postExec"):
+    recoLog.info("transform post-exec")
+    for cmd in runArgs.postExec:
+        recoLog.info(cmd)
+        exec(cmd)
+