diff --git a/Simulation/ISF/ISF_Example/CMakeLists.txt b/Simulation/ISF/ISF_Example/CMakeLists.txt
index a5d0b0d8d57d2838e2c863a0b6d9199430308ecd..6907e43b736602956773adb4eec50e0ded509586 100644
--- a/Simulation/ISF/ISF_Example/CMakeLists.txt
+++ b/Simulation/ISF/ISF_Example/CMakeLists.txt
@@ -7,5 +7,3 @@ atlas_subdir( ISF_Example )
 
 # Install files from the package:
 atlas_install_python_modules( python/*.py POST_BUILD_CMD ${ATLAS_FLAKE8} )
-atlas_install_joboptions( share/*.py )
-
diff --git a/Simulation/ISF/ISF_Example/share/FatrasPhysicsList.py b/Simulation/ISF/ISF_Example/share/FatrasPhysicsList.py
deleted file mode 100644
index c092f412eb00cc87421d52bd9e0384a95ff89a83..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_Example/share/FatrasPhysicsList.py
+++ /dev/null
@@ -1,94 +0,0 @@
-#
-## @file FatrasExample/python/FatrasPhysicsList.py
-## @purpose Python module to hold common flags to configure JobOptions
-##
-
-""" FatrasPhysicsList
-    Python module to hold storegate keys of InDet objects.
-
-"""
-
-__author__ = "A. Salzburger"
-__version__= "$Revision: 463109 $"
-__doc__    = "FatrasPhysicsList"
-
-__all__    = [ "FatrasPhysicsList" ]
-
-# kindly stolen from AthenaCommonFlags from S. Binet and M. Gallas
-
-##-----------------------------------------------------------------------------
-## Import
-
-from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer
-from AthenaCommon.JobProperties import jobproperties
-
-##-----------------------------------------------------------------------------
-## 1st step: define JobProperty classes
-class MultipleScattering(JobProperty):
-    """Steering of multiple scattering in simulation/reconstruction"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class EnergyLoss(JobProperty):
-    """Steering of energy loss (ionisation) in simulation/reconstruction"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class Bremsstrahlung(JobProperty):
-    """Steering of energy loss (radiative) in simulation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class PairProduction(JobProperty):
-    """Steering of pair production in simulation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-class HadronicInteraction(JobProperty):
-    """Steering of nuclear (hadronic) interactions in simulation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-    
-class ParticleDecay(JobProperty):
-    """Steering of particle decay in simulation"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-##-----------------------------------------------------------------------------
-## 2nd step
-## Definition of the InDet flag container
-class FatrasPhysicsList(JobPropertyContainer):
-    """Container for the Fatras physics list flags
-    """
-    pass
-
-
-##-----------------------------------------------------------------------------
-## 3rd step
-## adding the container to the general top-level container
-jobproperties.add_Container(FatrasPhysicsList)
-
-##-----------------------------------------------------------------------------
-## 4th step
-## adding ID flags to the InDetKeys container
-jobproperties.FatrasPhysicsList.add_JobProperty(MultipleScattering)
-jobproperties.FatrasPhysicsList.add_JobProperty(EnergyLoss)
-jobproperties.FatrasPhysicsList.add_JobProperty(Bremsstrahlung)
-jobproperties.FatrasPhysicsList.add_JobProperty(PairProduction)
-jobproperties.FatrasPhysicsList.add_JobProperty(HadronicInteraction)
-jobproperties.FatrasPhysicsList.add_JobProperty(ParticleDecay)
-
-##-----------------------------------------------------------------------------
-## 5th step
-## short-cut for lazy people
-## carefull: do not select FatrasPhysicsList as a short name as well. 
-## otherwise problems with pickle
-## Note: you still have to import it:
-## >>> from FatrasExample.FatrasPhysicsList import FatrasPhysicsListFlags
-FatrasPhysicsListFlags = jobproperties.FatrasPhysicsList
diff --git a/Simulation/ISF/ISF_Example/share/FatrasTuning.py b/Simulation/ISF/ISF_Example/share/FatrasTuning.py
deleted file mode 100644
index 580809e44498da9ccc9d615a77860451f7bfcd5c..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_Example/share/FatrasTuning.py
+++ /dev/null
@@ -1,243 +0,0 @@
-#
-## @file FatrasExample/python/FatrasTuning.py
-## @purpose Python module to hold common flags to configure JobOptions
-##
-
-""" FatrasKeys
-    Python module to hold storegate keys of InDet objects.
-
-"""
-
-__author__ = "A. Salzburger"
-__version__= "$Revision: 463109 $"
-__doc__    = "FatrasTuning"
-
-__all__    = [ "FatrasTuning" ]
-
-# kindly stolen from AthenaCommonFlags from S. Binet and M. Gallas
-
-##-----------------------------------------------------------------------------
-## Import
-
-from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer
-from AthenaCommon.JobProperties import jobproperties
-
-##-----------------------------------------------------------------------------
-## 1st step: define JobProperty classes
-
-class MinimumParticlePtPrimaryTrackCreation(JobProperty):
-    """do not simulate primary tracks below this momentum"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 250.
-
-class MinimumMuonPtTrackCreation(JobProperty):
-    """Kill the particle when it falls underneath this threshold"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1000.        
-    
-class MinimumParticleMomentum(JobProperty):
-    """Kill the particle when it falls underneath this threshold"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 50.
-
-
-class MinimumBremPhotonMomentum(JobProperty):
-    """Follow the photon if it is over threshold"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 150.
-
-# Conversion Creation: 
-class InteractionVolumeRadius(JobProperty):
-    """Volume (r) in which brem photons / had int are processed"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1200.
-      
-class InteractionVolumeHalfZ(JobProperty):
-    """Volume (z) in which brem photons / had int are processed"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 4000.    
-      
-## ============================================================================
-## Energy Loss through: Bethe-Heitler distribution 
-class BetheHeitlerScalor(JobProperty):
-    """Scalor to adapt Bethe-Heitler contribution"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1.
-    
-## ============================================================================
-## Multiple Scattering: Gaussian mixture model 
-class GaussianMixtureModel(JobProperty):
-    """Gaussian mixture model on/off for Multiple Scattering"""
-    statusOn     = True
-    allowedTypes = ['bool']
-    StoredValue  = True
-
-## ============================================================================
-## Conversion Creation: 
-class ParticleDecayMinChildEnergy(JobProperty):
-    """minimum child energy from a conversion"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 250.
-        
-## ============================================================================
-## Conversion Creation: 
-class ConversionMinChildEnergy(JobProperty):
-    """minimum child energy from a conversion"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 100.
-    
-class ConversionChildEnergyScalor(JobProperty):
-    """minimum child energy from a conversion"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 2.
- 
-# Conversion Creation: 
-class ConversionProbabilityScalor(JobProperty):
-    """Scalor to adapt the conversion probability"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 0.98
-    
-# Conversion Creation: 
-class ConversionVolumeRadius(JobProperty):
-    """Volume (r) in which conversions are created"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1200.
-      
-class ConversionVolumeHalfZ(JobProperty):
-    """Volume (z) in which conversions are created"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 4000.
-
-    
-## ============================================================================
-## Hadronic interactions:     
-class HadronicInteractionProbabilityScalor(JobProperty):
-    """ """
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1.
-
-## Hadronic interactions:     
-class HadronicInteractionMinMomentumIn(JobProperty):
-    """ """
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1000.
-    
-## Hadronic interactions:     
-class HadronicInteractionMinMomentumOut(JobProperty):
-    """Volume (z) in which conversions are created"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 200.    
-           
-
-## ============================================================================
-## Noise Levels
-class PixNoiseLevel(JobProperty):
-    """Pixel overall noise level (fraction of noisy pixels)"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1e-5
-
-class SctNoiseLevel(JobProperty):
-    """SCT overall noise level (fraction of noisy strips)"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1e-5
-
-class TrtNoiseLevel(JobProperty):
-    """TRT overall noise level (fraction of noisy straws)"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 0.02
-
-class TrtMultiHitFlag(JobProperty):
-    """Flag how to deal with TRT multiple hits on straw"""
-    statusOn     = True
-    allowedTypes = ['int']
-    StoredValue  = 1
-
-# Material distortions
-class MaterialScalor(JobProperty):    
-    """efficiencies (vs. eta) for the TRT"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 1.00
-
-class MaterialDistortionsX0(JobProperty):    
-    """efficiencies (vs. eta) for the TRT"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 0.02
-    
-# Material distortions
-class MaterialDistortionsRho(JobProperty):    
-    """efficiencies (vs. eta) for the TRT"""
-    statusOn     = True
-    allowedTypes = ['float']
-    StoredValue  = 0.02
-    
-##-----------------------------------------------------------------------------
-## 2nd step
-## Definition of the InDet flag container
-class FatrasTuning(JobPropertyContainer):
-    """Container for the Fatras key flags
-    """
-    pass
-
-
-##-----------------------------------------------------------------------------
-## 3rd step
-## adding the container to the general top-level container
-jobproperties.add_Container(FatrasTuning)
-
-
-##-----------------------------------------------------------------------------
-## 4th step
-## adding ID flags to the InDetKeys container
-jobproperties.FatrasTuning.add_JobProperty(MinimumParticlePtPrimaryTrackCreation)
-jobproperties.FatrasTuning.add_JobProperty(MinimumMuonPtTrackCreation)
-jobproperties.FatrasTuning.add_JobProperty(MinimumParticleMomentum)
-jobproperties.FatrasTuning.add_JobProperty(MinimumBremPhotonMomentum)
-jobproperties.FatrasTuning.add_JobProperty(InteractionVolumeRadius)
-jobproperties.FatrasTuning.add_JobProperty(InteractionVolumeHalfZ)
-jobproperties.FatrasTuning.add_JobProperty(BetheHeitlerScalor)
-jobproperties.FatrasTuning.add_JobProperty(GaussianMixtureModel)
-jobproperties.FatrasTuning.add_JobProperty(ParticleDecayMinChildEnergy)
-jobproperties.FatrasTuning.add_JobProperty(ConversionMinChildEnergy)
-jobproperties.FatrasTuning.add_JobProperty(ConversionChildEnergyScalor)
-jobproperties.FatrasTuning.add_JobProperty(ConversionProbabilityScalor)
-jobproperties.FatrasTuning.add_JobProperty(ConversionVolumeRadius)
-jobproperties.FatrasTuning.add_JobProperty(ConversionVolumeHalfZ)     
-jobproperties.FatrasTuning.add_JobProperty(HadronicInteractionProbabilityScalor)
-jobproperties.FatrasTuning.add_JobProperty(HadronicInteractionMinMomentumIn)
-jobproperties.FatrasTuning.add_JobProperty(HadronicInteractionMinMomentumOut)
-jobproperties.FatrasTuning.add_JobProperty(PixNoiseLevel)
-jobproperties.FatrasTuning.add_JobProperty(SctNoiseLevel)
-jobproperties.FatrasTuning.add_JobProperty(TrtNoiseLevel)
-jobproperties.FatrasTuning.add_JobProperty(TrtMultiHitFlag)
-jobproperties.FatrasTuning.add_JobProperty(MaterialScalor)
-jobproperties.FatrasTuning.add_JobProperty(MaterialDistortionsX0)
-jobproperties.FatrasTuning.add_JobProperty(MaterialDistortionsRho)
-##-----------------------------------------------------------------------------
-## 5th step
-## short-cut for lazy people
-## carefull: do not select FatrasKeys as a short name as well. 
-## otherwise problems with pickle
-## Note: you still have to import it:
-## >>> from FatrasExample.FatrasTuning import FatrasTuningFlags
-FatrasTuningFlags = jobproperties.FatrasTuning
diff --git a/Simulation/ISF/ISF_Example/share/FileGrepper.py b/Simulation/ISF/ISF_Example/share/FileGrepper.py
deleted file mode 100755
index 966c34d686048458754030b573e09411083d91d3..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_Example/share/FileGrepper.py
+++ /dev/null
@@ -1,157 +0,0 @@
-#!/usr/bin/env python
-
-"""FileGrepper - runs fileGrepper.py. RTT Postprocessing class.
-vlaidates inputs, calls fileGrepper for each file matching input file pattern.
-"""
-
-######################################################
-#                                                    #
-# Do NOT bind any objects (self.xxx) which contain   #
-# file objects (such as self.logger in this class    #
-# otherwise cannot shelve the objects                #
-# Instead, unload the necessary variables in from    #
-# more complicated classes in __init__               #
-#                                                    #
-######################################################
-
-import os
-import glob
-
-from ExeRunnerBase import ExeRunnerBase
-
-
-def fixPatterns(pList, pString):
-    """PList is a list of comma separated strings or ''
-    pString is a list of strings or ''For
-
-    """
-    if not isinstance(pString, list):
-        if pString:
-            pString = [pString]
-        else:
-            pString = []
-
-    if not isinstance(pList, list):
-        if pList:
-            pList = [pList]
-        else:
-            pList = []
-
-    l = []
-    l.extend(pList)
-    l.extend(pString)
-
-    l = ','.join(l)
-    l = "'%s'" % l  # escape the regexes as will be seen by the shell
-
-    return l
-
-
-class FileGrepper(ExeRunnerBase):
-    """Grep files with using two lists of regexes"""
-
-    def __init__(self, paramDict):
-        """Parameters are passed in via a parameter dictionary.
-        Read these, and use the veto and serach regexes lists
-        to set up the corresponding matcher objects. These
-        will find matching lines, and place the results in
-        a {regex:[line]} dictionary"""
-
-        testIdentifier = paramDict['testIdentifierObj']
-
-        # find the name of the output file.
-        # new-style tests: the output file is passed in via an
-        # optional xml tag. If the tag is missing, the value defaults to ''.
-        # For old style tests (and new style tests using old style xml tags),
-        # the name is sometimes passed in using the <arg> tag, with
-        # <argName>outputFile</argName> as a sub tag.
-        #
-        # The old-style name is used preferentially to the new-style name
-        # to avoid abliging users to change there package files.
-        #
-
-        outputFile = paramDict['testOutputFile']  # pick up new-style name
-        try:  # use old-style name is present.
-            outputFile = paramDict['outputFile']
-        except:
-            pass
-
-        ExeRunnerBase.__init__(self,
-                               cmd='',    # set in run
-                               testIdentifier=testIdentifier,
-                               outputFile=outputFile)
-
-        # argument passing gone mad....
-
-        fList = paramDict.get('searchList', '')
-        fStrings = paramDict.get('searchStrings', '')
-        self.fPatterns = fixPatterns(fList, fStrings)
-
-        vList = paramDict.get('vetoList', '')
-        vStrings = paramDict.get('vetoStrings', '')
-        self.vPatterns = fixPatterns(vList, vStrings)
-
-        nList = paramDict.get('multipleInstanceList', '')
-        nStrings = paramDict.get('multipleInstanceStrings', '')
-        self.nPatterns = fixPatterns(nList, nStrings)
-
-        self.iFilePat = paramDict['inputFile']
-
-        ok, msg = self.configurationIsOK()
-        if not ok:
-            raise RuntimeError(
-                '%s.__init__, bad configuration:\n %s' % (
-                    self.__class__.__name__,
-                    msg
-                    )
-                )
-
-        self.iFilePat = os.path.abspath(self.iFilePat)
-
-    def configurationIsOK(self):
-        """Check the user has supplied all the necessary parameters."""
-
-        if not self.iFilePat:
-            m = '%s ==> no input file pattern to match was given. Abandoning.'\
-                % self.__class__.__name__
-            self.logger.error(m)
-            return (False, m)
-
-        return (True, '')
-
-    def getInFiles(self):
-        """Find the input files with the  user supplied file name pattern"""
-
-        matches = [m for m in glob.glob(self.iFilePat) if os.path.isfile(m)]
-        if not matches:
-            m = '%s => No matches found for the input file pattern: %s\n' % \
-                (self.__class__.__name__, self.iFilePat)
-            self.logger.warning(m)
-        return matches
-
-    def run(self):
-        """Check the user has supplied necessary parameters. For files
-        with names matching the user supplied file name pattern,
-        store any lines mathcing the user supplied regexes.
-        Print out a file of results containing name
-        of file checked, the regexes checked for and the correponding lines
-        found.
-
-        Return 1 if any matching file has any line matching a veto pattern.
-        Return 0 otherwise."""
-
-        iFiles = self.getInFiles()
-
-        for iFile in iFiles:
-
-            self.cmd = 'fileGrepperScript.py '
-            if self.fPatterns != "''":
-                self.cmd += '-f %s ' % self.fPatterns
-            if self.vPatterns != "''":
-                self.cmd += '-v %s ' % self.vPatterns
-            if self.nPatterns != "''":
-                self.cmd += '-n %s ' % self.nPatterns
-
-            self.cmd += iFile
-
-            ExeRunnerBase.run(self)
diff --git a/Simulation/ISF/ISF_Example/share/ISF_ExampleCommonConfig.py b/Simulation/ISF/ISF_Example/share/ISF_ExampleCommonConfig.py
deleted file mode 100644
index 865ba2b3cfe4971c7393504e2922ac87af1fe586..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_Example/share/ISF_ExampleCommonConfig.py
+++ /dev/null
@@ -1,10 +0,0 @@
-"""
-Common configurations for ISF
-KG Tan, 17/06/2012
-"""
-
-include.block('ISF_Example/ISF_ExampleCommonConfig.py')
-
-print "DEPRECATION WARNING: include('ISF_Example/ISF_ExampleCommonConfig.py') is deprecated."
-print "Please Use include('ISF_Config/ISF_ConfigJobInclude.py') instead."
-include('ISF_Config/ISF_ConfigJobInclude.py')
diff --git a/Simulation/ISF/ISF_Example/share/fileGrepperScript.py b/Simulation/ISF/ISF_Example/share/fileGrepperScript.py
deleted file mode 100755
index 8063ff34222e9bbe1260b2d1023b61f605d17e24..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_Example/share/fileGrepperScript.py
+++ /dev/null
@@ -1,238 +0,0 @@
-#!/usr/bin/env python
-
-"""
-fileGrepper - function to look for strings or veto strings in a file.
-
-classes:
-FileGrepResult  Holds grep results for a file
-Matcher         Applies the regexes to the lines of a file. Returns the
-                results in a dictionary {regex:[matching lines]}
-
-functions:
-resultsDictToString formatting helper function.
-fileGrepper
-"""
-import getopt
-import re
-import sys
-import collections
-import os
-
-
-def resultsDictToString(ddict):
-    """Convert a dictionary of regex: matching lines to a string"""
-
-    s = []
-    for k, matches in ddict.items():
-        s.append('  %s:' % k)
-        for v in matches:
-            s.append('    ' + v)
-    return '\n'.join(s)
-
-
-class FileGrepResult(object):
-    """Class that holds a file name and dictionaries
-    of regex:matching lines for two lists of regexes"""
-
-    def __init__(self, fn, searchResults, vetoResults, multInstanceResults):
-
-        self.fn = fn
-        self.searchResults = searchResults  # {pattern: [lines]}
-        self.vetoResults = vetoResults  # {pattern: [lines]}
-        self.multInstanceResults = multInstanceResults # {pattern: [[lines],nInstances] }
-        self.nMultInstanceFailures = 0
-        self.multInstanceResultString = self.multInstances()
-
-    def vetoIsPresent(self):
-        'return True if there is a non-empty vetoResult dictionary'
-
-        if self.vetoResults:
-            return True
-
-        return False
-
-    def multInstances(self):
-        'return False if number of instances is too high'
-
-        s = []
-        if len(self.multInstanceResults)>0:
-            for res,nInstances in self.multInstanceResults:
-                for k, matches in res.items():
-                    if len(matches) > int(nInstances):
-                        self.nMultInstanceFailures += 1
-                        s.append('  %s ( >%s instances ):' % (k,nInstances))
-                        for v in matches:
-                            s.append('    ' + v)
-        return '\n'.join(s)
-
-    def __str__(self):
-        """Present data as a formatted string"""
-
-        s = ['File: %s' % self.fn]
-        s.append('Search Results [%d]' % len(self.searchResults))
-        s.append(resultsDictToString(self.searchResults))
-        s.append('')
-        s.append('Veto Results [%d]' % len(self.vetoResults))
-        s.append(resultsDictToString(self.vetoResults))
-        s.append('')
-        s.append('Multiple Instance results [%d]' % self.nMultInstanceFailures)
-        s.append(self.multInstanceResultString)
-        return '\n'.join(s)
-
-
-class Matcher(object):
-    """A class that performs regex matches on lines in a file"""
-
-    def __init__(self, patternList,checkNInstances=False):
-        """Converts an input list of regexes to a list of
-        raw and compiled regexes. The raw regexes are needed
-        for presenting results"""
-
-
-        self.patternList = []
-        self.nInstancesList = []
-        self.checkNInstances = checkNInstances
-
-        if checkNInstances:
-            for i in range(0,len(patternList),2):
-                p = patternList[i]
-                self.patternList += [(p,re.compile(p))] 
-                self.nInstancesList += patternList[i+1]
-        else:
-            
-            self.patternList = [(p, re.compile(p)) for p in patternList]
-
-
-    def match(self, fn):
-        """Opens file with supplied file name, reads all it lines
-        checking the against the regexes. If a line matches, store it
-        in the {raw regex:[lines]} dictionary"""
-
-
-        if not self.checkNInstances:
-            matchDict = collections.defaultdict(list)
-            with open(fn) as ifile:
-                for l in ifile:
-                    for  ori, comp in self.patternList:
-                        if comp.search(l):
-                            matchDict[ori].append(l)
-            return matchDict
-
-        else:
-            result = []
-            for i in range(len(self.patternList)):
-                ori=self.patternList[i][0]
-                comp=self.patternList[i][1]
-                nInstances=self.nInstancesList[i]
-
-                matchDict = collections.defaultdict(list)                
-                with open(fn) as ifile:
-                    for l in ifile:
-                        if comp.search(l):
-                            matchDict[ori].append(l)
-                result.append([matchDict,nInstances])
-            return result
-
-def usage():
-    print 'NAME '
-    print '  fileGrepper'
-    print
-    print 'SYNOPSIS'
-    print '  fileGrepper [option] [FILE]'
-    print
-    print 'DESCRIPTION'
-    print ' Look for  lines in FILE that match search and veto '
-    print 'fPattern vPattern are split at the commas. The input file'
-    print 'is read line by line. Lines matching any of the fPattern or '
-    print 'vPattern regexes are written to the output file.'
-    print 'If a veto pattern matches any line, the script exits with '
-    print 'exit code 1. Otherwise it exits with exit code 0'
-    print
-    print '  -f'
-    print '     a comma separated string of regexes'
-    print '  -v'
-    print '     a comma separated string of regexes'
-    print
-    print '  -o'
-    print '     name of output file if present. Otherwise output written to'
-    print '     standard out unless -s option is present'
-    print
-    print '  -s '
-    print '     silent mode'
-    print ''
-
-def fileGrepper(fList, vList, nList, inFile, outFile, silent=True):
-    """Check the user has supplied necessary parameters. For files
-    with names matching the user supplied file name pattern,
-    store any lines mathcing the user supplied regexes.
-    Print out a file of results containing name
-    of file checked, the regexes checked for and the correponding lines
-    found.
-
-    Return 1 if any matching file has any line matching a veto pattern.
-    Return 0 otherwise."""
-
-    # run the Matchers. Store results in a FileGrepResult object.
-    # make the results an attribute for unit tests.
-    
-
-    grepResult = FileGrepResult(inFile,
-                                Matcher(fList).match(inFile),
-                                Matcher(vList).match(inFile),
-                                Matcher(nList,True).match(inFile)
-                                )
-
-    if not silent:
-        text = [(str(grepResult))]
-        text.append('')
-        text = '\n'.join(text)
-
-        print text
-
-    if grepResult.vetoIsPresent():
-        return 1
-
-    if grepResult.multInstances():
-        return 1
-
-    return 0
-
-if __name__ == '__main__':
-
-    opts, args = getopt.getopt(sys.argv[1:], 'f:v:o:s:n:', [])
-
-    fList = []
-    vList = []
-    nList = []
-    outFile = ''
-    silent = False
-    nInstances = 0
-
-    for o, v in opts:
-        if o == '-f':
-            fList = v.split(',')
-        if o == '-v':
-            vList = v.split(',')
-        if o == '-o':
-            outFile = v
-        if o == '-s':
-            silent = True
-        if o == '-n':
-            nList = v.split(',')
-            
-    if len(args) != 1:
-        print 'Supply only one input file'
-        print args
-        usage()
-        sys.exit(1)
-
-    inFile = args[0]
-
-    status = fileGrepper(fList=fList,
-                         vList=vList,
-                         nList=nList,
-                         inFile=inFile,
-                         outFile=outFile,
-                         silent=silent)
-
-    sys.exit(status)
diff --git a/Simulation/ISF/ISF_Example/share/jobOptions_IDOnly_trf.py b/Simulation/ISF/ISF_Example/share/jobOptions_IDOnly_trf.py
deleted file mode 100644
index e4cfd4e314566f3c465c96b7ee9005ce6bcc8703..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_Example/share/jobOptions_IDOnly_trf.py
+++ /dev/null
@@ -1,184 +0,0 @@
-#--------------------------------------------------------------
-# Template jobOptions: SLHC 
-#  - Tracking 
-#--------------------------------------------------------------
-
-from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
-
-from RecExConfig.RecFlags import rec
-# --- Set output level threshold (2=DEBUG, 3=INFO, 4=WARNING, 5=ERROR, 6=FATAL )
-OutputLevel     = INFO
-# --- produce an atlantis data file
-doJiveXML       = False
-# --- run the Virtual Point 1 event visualisation
-doVP1           = False
-# --- controls what is written out. ESD includes AOD, so it's normally enough
-doWriteESD      = rec.doWriteESD()
-doWriteAOD      = rec.doWriteAOD()
-# --- read BS - please look at the default input file(s) to know the det descr and cond tag
-doReadBS        = False
-# --- do auditors ?
-doAuditors      = True
-
-if os.environ['CMTCONFIG'].endswith('-dbg'):
-  # --- do EDM monitor (debug mode only)
-  doEdmMonitor    = True 
-  # --- write out a short message upon entering or leaving each algorithm
-  doNameAuditor   = True
-else:
-  doEdmMonitor    = False
-  doNameAuditor   = False
-
-
-
-#--------------------------------------------------------------
-# load Global Flags and set defaults (import the new jobProperty globalflags)
-#--------------------------------------------------------------
-
-from AthenaCommon.GlobalFlags import globalflags
-# --- default is atlas geometry
-globalflags.DetGeo = 'atlas'
-# --- set defaults
-globalflags.DataSource = 'geant4'
-# --- input is pool for SLHC 
-globalflags.InputFormat = 'pool'
-
-from AthenaCommon.GlobalFlags import jobproperties
-jobproperties.Global.DetDescrVersion='ATLAS-GEO-20-00-01'
-
-# --- printout
-globalflags.print_JobProperties()
-    
-from AthenaCommon.BeamFlags import jobproperties
-# --- default is high luminosity for SLHC
-jobproperties.Beam.numberOfCollisions = 40.0  
-
-from AthenaCommon.GlobalFlags import globalflags
-globalflags.ConditionsTag = "OFLCOND-MC12-SDR-06"
-
-# --- no conditions for SLHC
-if len(globalflags.ConditionsTag())!=0:
-   from IOVDbSvc.CondDB import conddb
-   conddb.setGlobalTag(globalflags.ConditionsTag())
-
-#--------------------------------------------------------------
-# Set Detector setup
-#--------------------------------------------------------------
-
-from RecExConfig.RecFlags import rec
-rec.Commissioning=False
-
-from AthenaCommon.DetFlags import DetFlags 
-# --- switch on InnerDetector
-DetFlags.ID_setOn()
-# --- and switch off all the rest
-DetFlags.Calo_setOff()
-DetFlags.Muon_setOff()
-# --- printout
-DetFlags.Print()
-
-#DetFlags.writeRDOPool.pixel_setOn()
-
-#--------------------------------------------------------------
-# Load InDet configuration
-#--------------------------------------------------------------
-
-# --- setup InDetJobProperties
-from InDetRecExample.InDetJobProperties import InDetFlags
-InDetFlags.doTruth       = (globalflags.InputFormat() == 'pool')
-
-InDetFlags.doLowBetaFinder = False
-
-
-# --- uncomment to change the default of one of the following options:
-#OutputLevel          = DEBUG
-
-# --- possibility to change the trackfitter
-#InDetFlags.trackFitterType = 'KalmanFitter'
-#if (hasattr(InDetFlags,"doPixelClusterSplitting")) :
-#  InDetFlags.doPixelClusterSplitting=False
-#---slhc
-#InDetFlags.doTrackSegmentsPixel = False
-#InDetFlags.doTrackSegmentsSCT   = False
-#InDetFlags.doTrackSegmentsTRT   = False
-
-#InDetFlags.doNewTracking  =         True
-#InDetFlags.doLowPt        =         False
-#InDetFlags.doxKalman      =         False
-#InDetFlags.doiPatRec      =         False
-#InDetFlags.doBackTracking =         False
-#InDetFlags.doTRTStandalone =        False
-#InDetFlags.doSingleSpBackTracking = False
-InDetFlags.postProcessing =         True
-InDetFlags.doBeamGas      =         False
-InDetFlags.doBeamHalo     =         False
-InDetFlags.doCosmics      =         False
-InDetFlags.doSharedHits   =         True
-# --- Turn off track slimming
-InDetFlags.doSlimming = False
-#---endslhc
-
-# --- de-activate monitorings
-InDetFlags.doMonitoringGlobal    = False
-InDetFlags.doMonitoringPixel     = False
-InDetFlags.doMonitoringSCT       = False
-InDetFlags.doMonitoringTRT       = False
-InDetFlags.doMonitoringAlignment = False
-
-# --- activate (memory/cpu) monitoring
-#InDetFlags.doPerfMon = True
-
-# --- activate creation of ntuples, standard plots
-#InDetFlags.doTrkNtuple      = False
-InDetFlags.doPhysValMon  = False
-InDetFlags.doSGDeletion     = False
-#InDetFlags.doTrkD3PD        = True
-
-from TrackD3PDMaker.TrackD3PDMakerFlags import TrackD3PDFlags
-TrackD3PDFlags.trackParametersAtGlobalPerigeeLevelOfDetails     = 2
-TrackD3PDFlags.storeTrackPredictionAtBLayer                     = False
-TrackD3PDFlags.storeTrackSummary                                = True
-TrackD3PDFlags.storeHitTruthMatching                            = True
-TrackD3PDFlags.storeDetailedTruth                               = True
-
-
-# activate the print InDetXYZAlgorithm statements
-InDetFlags.doPrintConfigurables = True
-
-
-# IMPORTANT NOTE: initialization of the flags and locking them is done in InDetRec_jobOptions.py!
-# This way RecExCommon just needs to import the properties without doing anything else!
-# DO NOT SET JOBPROPERTIES AFTER THIS LINE! The change will be ignored!
-
-#--------------------------------------------------------------
-# turn on SLHC tracking
-#--------------------------------------------------------------
-
-from InDetRecExample.InDetJobProperties import InDetFlags
-
-# --- do tracking D3PD
-from InDetRecExample.InDetKeys import InDetKeys
-if hasattr(runArgs,"outputDESDM_TRACKFile"):
-  InDetFlags.doTrkD3PD.set_Value_and_Lock(True)
-  InDetKeys.trkD3PDFileName.set_Value_and_Lock(runArgs.outputDESDM_TRACKFile)
-
-# --- Set output names such that they work with Reco_trf.py
-if athenaCommonFlags.PoolESDOutput(): InDetKeys.OutputESDFileName = athenaCommonFlags.PoolESDOutput()
-if athenaCommonFlags.PoolAODOutput(): InDetKeys.OutputAODFileName = athenaCommonFlags.PoolAODOutput()
-#--------------------------------------------------------------
-# load master joboptions file
-#--------------------------------------------------------------
-
-include("InDetRecExample/InDetRec_all.py")
-
-
-#--------------------------------------------------------------
-# Event related parameters and input files
-#--------------------------------------------------------------
-
-# Number of events to be processed 
-theApp.EvtMax = athenaCommonFlags.EvtMax()
-
-# --- default SLHC test file from digit output
-ServiceMgr.EventSelector.InputCollections = athenaCommonFlags.PoolRDOInput()
-athenaCommonFlags.FilesInput = athenaCommonFlags.PoolRDOInput()
diff --git a/Simulation/ISF/ISF_Example/share/jobOptions_PhysValMon.py b/Simulation/ISF/ISF_Example/share/jobOptions_PhysValMon.py
deleted file mode 100644
index ad88b7cbfb00608439790658cb0dd05a73159d57..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_Example/share/jobOptions_PhysValMon.py
+++ /dev/null
@@ -1,252 +0,0 @@
-# *************************
-# Application configuration
-# *************************
-### This joboption bases on PhysicsAnalysis/PhysicsValidation/PhysValMon/share/PhysValMonFCT.py, Thanks to the author ####
-
-
-##EvtMax = 400
-##SkipEvents = 0
-##InputCollections = ["ttbar_AtlfastII_AOD_400.pool.root"]
-##OutputFileName = "Hist_50.root"
-##DetDescrVersion = "ATLAS-GEO-20-00-01"
-##TriggerSwitch  = "NONE"
-##CounterCheck = False
-##WriteReference = False
-##ConditionsTag = "OFLCOND-MC12-SIM-00" 
-
-options=locals()
-options.setdefault('EvtMax',50)
-options.setdefault('SkipEvents',0)
-options.setdefault('InputCollections',"ttbar_AtlfastII_AOD_50.pool.root")  ###not exist in current dir, should be changed to an exist file 
-options.setdefault('DetDescrVersion','ATLAS-GEO-20-00-01')
-options.setdefault('OutputFileName','Hist_50.root')
-options.setdefault('TriggerSwitch','NONE')
-options.setdefault('CounterCheck',False)
-options.setdefault('WriteReference',False)
-options.setdefault('ConditionsTag','OFLCOND-MC12-SIM-00')
-
-
-from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
-athenaCommonFlags.PoolAODInput.set_Value_and_Lock([InputCollections])
-
-print 'IACOPO Conditions ' + str(ConditionsTag)
-
-TriggerOn = 0
-
-if TriggerSwitch != "NONE":
-    TriggerOn = 1
-
-# *************************
-
-from AthenaCommon.GlobalFlags import GlobalFlags
-GlobalFlags.DetGeo = 'atlas'
-GlobalFlags.DataSource = 'geant4'
-GlobalFlags.InputFormat = 'pool'
-
-
-
-include ("RecExCond/AllDet_detDescr.py")
-
-from RecExConfig.RecFlags  import rec
-rec.readRDO=False
-rec.readAOD=True
-rec.doWriteAOD=False
-rec.doWriteESD=False
-
-from AthenaCommon.AppMgr import theApp
-IOVDbSvc = theApp.service('IOVDbSvc')
-IOVDbSvc.GlobalTag = ConditionsTag
-
-import JetRec.ParticleJetCompatibility
-
-if TriggerOn == 1:
-    # Set up trigger configuration service and metadata service it relies on, for analysis job without RecExCommon
-    # set up trigger decision tool
-
-        # set up metadata services ... NB comment if "aod" state is selected above
-    from EventInfoMgt.EventInfoMgtConf import TagInfoMgr
-    ServiceMgr += TagInfoMgr()
-   #ServiceMgr.TagInfoMgr.AddGeoModelTags = False
-    from IOVDbMetaDataTools.IOVDbMetaDataToolsConf import IOVDbMetaDataTool
-    ToolSvc += IOVDbMetaDataTool( "IOVDbMetaDataTool" )
-    from AthenaServices.AthenaServicesConf import MetaDataSvc
-    ServiceMgr += MetaDataSvc( "MetaDataSvc" )
-    ServiceMgr.MetaDataSvc.MetaDataContainer = "MetaDataHdr"
-    ServiceMgr.MetaDataSvc.MetaDataTools += [ "IOVDbMetaDataTool" ]
-    import IOVDbSvc.IOVDb
-
-
-    from RecExConfig.RecFlags  import rec
-    rec.readAOD=True
-    rec.doWriteAOD=False
-    rec.doWriteESD=False
-    
-#    from TrigDecisionTool.TrigDecisionConf import TrigDecisionTool
-#    tdt = TrigDecisionTool()
-    from TrigDecisionTool.TrigDecisionToolConf import Trig__TrigDecisionTool
-    tdt = Trig__TrigDecisionTool()
-    ToolSvc += tdt
-    # flags needed for TriggerConfigGetter
-
-    
-    # To read AOD produced with 13.0.30 you need to change ds to aod:
-    from TriggerJobOpts.TriggerFlags import TriggerFlags
-    TriggerFlags.configurationSourceList = ['ds']
-    
-    # set up trigger config service
-    from TriggerJobOpts.TriggerConfigGetter import TriggerConfigGetter
-    cfg =  TriggerConfigGetter()
-
-from AthenaCommon.AppMgr import ServiceMgr as svcMgr
-svcMgr.MessageSvc.OutputLevel = INFO
-
-## read pool data
-
-
-import AthenaPoolCnvSvc.ReadAthenaPool
-svcMgr.EventSelector.InputCollections = [InputCollections]
-
-## get the one and only one Athena Application Mgr
-from AthenaCommon.AppMgr import theApp
-theApp.EvtMax  = EvtMax
-
-## get a handle on the top sequence of algorithms
-from AthenaCommon.AlgSequence import AlgSequence
-topSequence = AlgSequence()
-
-## add an AthenaMonManager algorithm to the list of algorithms to be ran
-from AthenaMonitoring.AthenaMonitoringConf import AthenaMonManager
-topSequence += AthenaMonManager( "AthenaMonManagerPV" )
-
-# ************************
-# Monitoring configuration
-# ************************
-
-## Setup the output file(s):
-from GaudiSvc.GaudiSvcConf import THistSvc
-svcMgr += THistSvc()
-
-## The string "TestMon" in the argument below is the 'FileKey'
-## used by Athena to access the output file internally
-
-if OutputFileName != 'NONE':
-    svcMgr.THistSvc.Output += ["PhysValMon DATAFILE='" + OutputFileName + "' OPT='RECREATE'"]
-else:
-    svcMgr.THistSvc.Output += ["PhysValMon DATAFILE='PhysValMon.root' OPT='RECREATE'"]
-
-## AthenaMonManager is the Algorithm that manages many classes inheriting
-## from ManagedMonitorToolBase
-monMan = topSequence.AthenaMonManagerPV
-
-## FileKey must match that given to THistSvc
-monMan.FileKey = "PhysValMon"
-
-## Set global monitoring parameters: see the AthenaMonManager class
-## in the Control/AthenaMonitoring package
-monMan.ManualDataTypeSetup = True
-monMan.DataType            = "monteCarlo"
-monMan.Environment         = "altprod"
-monMan.ManualRunLBSetup    = True
-monMan.Run                 = 1
-monMan.LumiBlock           = 1
-monMan.OutputLevel         = INFO
-
-# ****************************************
-# Configuration of PhysValMon tools
-# ****************************************
-
-from PhysValMon.PhysValMonConf import *
-
-# configuration of the muon tools
-# ****************************************
-
-TopoClusterTool = PhysValMonTopoCluster("TopoClusterTool", TopoClusterContainer = "CaloCalTopoCluster")
-TopoClusterTool.DoOnTheFlyCheck = CounterCheck
-TopoClusterTool.WriteReference = WriteReference
-ToolSvc += TopoClusterTool
-monMan.AthenaMonTools += [ TopoClusterTool ]
-
-MuidMuonTool = PhysValMonMuon("MuidMuonTool", MuonContainer = "MuidMuonCollection", DoTrigger = TriggerOn)
-ToolSvc += MuidMuonTool
-MuidMuonTool.DoOnTheFlyCheck = CounterCheck
-MuidMuonTool.WriteReference = WriteReference
-monMan.AthenaMonTools += [ MuidMuonTool ]
-
-
-StacoMuonTool = PhysValMonMuon("StacoMuonTool", MuonContainer = "StacoMuonCollection", DoTrigger = TriggerOn)
-StacoMuonTool.DoOnTheFlyCheck = CounterCheck
-StacoMuonTool.WriteReference = WriteReference
-ToolSvc += StacoMuonTool
-monMan.AthenaMonTools += [ StacoMuonTool ]
-
-# configuration of the electron tools
-# ****************************************
-
-import MCTruthClassifier.MCTruthClassifierBase
-
-
-ElectronTool = PhysValMonElectron("ElectronTool", ElectronContainer = "ElectronAODCollection", DoTrigger = TriggerOn)
-ElectronTool.DoOnTheFlyCheck = CounterCheck
-ElectronTool.WriteReference = WriteReference
-ToolSvc += ElectronTool
-monMan.AthenaMonTools += [ ElectronTool ]
-
-# configuration of the photon tools
-# ****************************************
-
-PhotonTool = PhysValMonPhoton("PhotonTool", PhotonContainer = "PhotonAODCollection", DoTrigger = TriggerOn)
-PhotonTool.DoOnTheFlyCheck = CounterCheck
-PhotonTool.WriteReference = WriteReference
-ToolSvc += PhotonTool
-monMan.AthenaMonTools += [ PhotonTool ]
-
-# configuration of the jet tools
-# ****************************************
-
-#AntiKt4TowerJetTool = PhysValMonJet("AntiKt4TowerJetTool", JetContainer = "AntiKt4TowerAODJets", TruthJetContainer = "AntiKt4TruthJets", DoTrigger = TriggerOn)
-#AntiKt4TowerJetTool.DoOnTheFlyCheck = CounterCheck
-#AntiKt4TowerJetTool.WriteReference = WriteReference
-AntiKt4TopoJetTool = PhysValMonJet("AntiKt4TopoJetTool", JetContainer = "AntiKt4TopoEMJets", TruthJetContainer = "AntiKt4TruthJets", DoTrigger = TriggerOn)
-AntiKt4TopoJetTool.DoOnTheFlyCheck = CounterCheck
-AntiKt4TopoJetTool.WriteReference = WriteReference
-
-AntiKt6TowerJetTool = PhysValMonJet("AntiKt6TowerJetTool", JetContainer = "AntiKt6TopoEMJets", TruthJetContainer = "AntiKt6TruthJets", DoTrigger = TriggerOn)
-
-#AntiKt4TowerJetTool.OutputLevel=VERBOSE
-AntiKt4TopoJetTool.OutputLevel=VERBOSE
-
-#ToolSvc += AntiKt4TowerJetTool
-ToolSvc += AntiKt4TopoJetTool
-ToolSvc += AntiKt6TowerJetTool
-
-#monMan.AthenaMonTools += [ AntiKt4TowerJetTool ]
-monMan.AthenaMonTools += [ AntiKt4TopoJetTool ]
-monMan.AthenaMonTools += [ AntiKt6TowerJetTool]
-#AntiKt4TowerJetBTaggingTool = PhysValMonBTagging("AntiKt4TowerJetBTaggingTool",
-#JetContainer = "AntiKt4TowerJets", TruthParticleContainer = "Spc1MC" )
-
-# AntiKt4TowerJetBTaggingTool.OutputLevel=1
-
-#ToolSvc += AntiKt4TowerJetBTaggingTool
-#monMan.AthenaMonTools += [ AntiKt4TowerJetBTaggingTool ]
-
-# configuration of the met tools
-# ****************************************
-
-from AthenaCommon.Resilience import protectedInclude
-protectedInclude("PhysValMon/PhysValMonMet_jobOptions.py")
-
-# configuration of the tau tools
-# ****************************************
-TauRecContainerTool = PhysValMonTau("TauRecContainerTool", TauContainer = "TauRecContainer", DoTrigger = TriggerOn)
-TauRecContainerTool.DoOnTheFlyCheck = CounterCheck
-TauRecContainerTool.WriteReference = WriteReference
-ToolSvc += TauRecContainerTool
-monMan.AthenaMonTools += [ TauRecContainerTool ]
-
-#configuration of the topoCluster toop
-
-from AthenaCommon.Resilience import protectedInclude
-protectedInclude("HiggsValidation/HiggsValidation_montools.py")
-
-#  LocalWords:  TowerJetTool
diff --git a/Simulation/ISF/ISF_Example/share/preInclude.IDonly_reconstruction.py b/Simulation/ISF/ISF_Example/share/preInclude.IDonly_reconstruction.py
deleted file mode 100644
index a78083371f70d5828bb5631d95ff01a51e0a4c7b..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_Example/share/preInclude.IDonly_reconstruction.py
+++ /dev/null
@@ -1,75 +0,0 @@
-from AthenaCommon.DetFlags import DetFlags
-DetFlags.ID_setOn()
-DetFlags.Calo_setOff()
-DetFlags.Muon_setOff()
-
-import MagFieldServices.SetupField
-
-from InDetRecExample.InDetJobProperties import InDetFlags
-InDetFlags.doBremRecovery.set_Value_and_Lock(False)
-InDetFlags.doPhysValMon.set_Value_and_Lock(True)
-
-#from GaudiSvc.GaudiSvcConf import THistSvc
-
-# --- controls what is written out. ESD includes AOD, so it's normally enough
-# --- ESD writing temporarily disabled post 2013 migration - to be reinstated! 
-
-from ParticleBuilderOptions.AODFlags import AODFlags
-AODFlags.TrackParticleSlimmer=False 
-AODFlags.TrackParticleLastHitAndPerigeeSlimmer=False
-
-#rec.doESD.set_Value_and_Lock             (True)
-#rec.doWriteESD.set_Value_and_Lock        (True)###
-#rec.doAOD.set_Value_and_Lock             (True)
-#rec.doWriteAOD.set_Value_and_Lock        (True)
-#rec.doDPD.set_Value_and_Lock             (False)
-#rec.doCBNT.set_Value_and_Lock            (False)
-#rec.doWriteTAG.set_Value_and_Lock        (False)
-
-# --- turn on InDet
-rec.doInDet.set_Value_and_Lock           (True)
-# --- turn off calo
-rec.doCalo.set_Value_and_Lock            (False)
-# --- turn off muons
-rec.doMuon.set_Value_and_Lock            (False) 
-# --- turn off forward detectors
-rec.doForwardDet.set_Value_and_Lock      (False)
-# --- turn off trigger
-rec.doTrigger.set_Value_and_Lock         (False)
-
-# --- turn off combined reconstruction
-rec.doEgamma.set_Value_and_Lock          (False)
-rec.doMuonCombined.set_Value_and_Lock    (False)
-rec.doTau.set_Value_and_Lock             (False)
-rec.doJetMissingETTag.set_Value_and_Lock (False)
-
-# --- turn of calo stuff we don't need anyway
-from CaloRec.CaloRecFlags import jobproperties
-jobproperties.CaloRecFlags.doCaloTopoCluster.set_Value_and_Lock  (False)
-jobproperties.CaloRecFlags.doCaloEMTopoCluster.set_Value_and_Lock(False)
-jobproperties.CaloRecFlags.doCaloTopoTower.set_Value_and_Lock    (False)
-
-# --- turn of jets (Hack!!!)
-from JetRec.JetRecFlags import jetFlags
-jetFlags.Enabled.set_Value_and_Lock          (False)
-
-# --- turn off egamma Brem
-recAlgs.doEgammaBremReco.set_Value_and_Lock  (False)
-# --- turn off Eflow and missing ET
-recAlgs.doEFlow.set_Value_and_Lock           (False)
-recAlgs.doEFlowJet.set_Value_and_Lock        (False)
-recAlgs.doMissingET.set_Value_and_Lock       (False)
-recAlgs.doMissingETSig.set_Value_and_Lock    (False)
-recAlgs.doObjMissingET.set_Value_and_Lock    (False)
-# --- turn off combined muons
-recAlgs.doMuGirl.set_Value_and_Lock          (False)
-recAlgs.doMuTag.set_Value_and_Lock           (False)
-recAlgs.doMuidLowPt.set_Value_and_Lock       (False)
-recAlgs.doMuonIDCombined.set_Value_and_Lock  (False)
-recAlgs.doMuonIDStandAlone.set_Value_and_Lock(False)
-recAlgs.doMuonSpShower.set_Value_and_Lock    (False)
-recAlgs.doStaco.set_Value_and_Lock           (False)
-recAlgs.doCaloTrkMuId.set_Value_and_Lock     (False)
-recAlgs.doTileMuID.set_Value_and_Lock        (False)
-# --- trigger
-recAlgs.doTrigger.set_Value_and_Lock         (False)
diff --git a/Simulation/ISF/ISF_Example/share/preInclude.pionsGenerator.py b/Simulation/ISF/ISF_Example/share/preInclude.pionsGenerator.py
deleted file mode 100644
index 132b2115a24ef035cb79de8aad5338788f6b5f6b..0000000000000000000000000000000000000000
--- a/Simulation/ISF/ISF_Example/share/preInclude.pionsGenerator.py
+++ /dev/null
@@ -1,16 +0,0 @@
-print 'SINGLE PARTICLE GENERATOR'
-## Run ParticleGenerator
-import AthenaCommon.AtlasUnixGeneratorJob
-spgorders = ['pdgcode: sequence 211 -211',
-             'vertX: constant 0.0',
-             'vertY: constant 0.0',
-             'vertZ: constant 0.0',
-             't: constant 0.0',
-             'eta: constant 0.2',
-             'phi: flat  0 6.28318',
-             'pt: constant 50000']
-from AthenaCommon.AlgSequence import AlgSequence
-topSeq = AlgSequence()
-from ParticleGenerator.ParticleGeneratorConf import ParticleGenerator
-topSeq += ParticleGenerator()
-topSeq.ParticleGenerator.orders = sorted(spgorders)