diff --git a/Event/EventOverlay/EventOverlayJobTransforms/cmt/grid.requirements b/Event/EventOverlay/EventOverlayJobTransforms/cmt/grid.requirements
new file mode 100644
index 0000000000000000000000000000000000000000..73c5172d1896f6f88b8306eef791918b0fe9648f
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/cmt/grid.requirements
@@ -0,0 +1,20 @@
+package EventOverlayJobTransforms
+
+use AtlasPolicy AtlasPolicy-*
+use AtlasPython AtlasPython-* External
+
+use PyJobTransformsCore PyJobTransformsCore-* Tools
+
+apply_pattern declare_python_modules files="*.py"
+apply_pattern declare_jobtransforms trfs='*_trf.py' jo='*.py'
+
+#apply_pattern generic_declare_for_link kind=runtime files='-s=../share *.db' prefix=share name=trf
+
+apply_pattern declare_runtime_extras extras="../test/EventOverlayJobTransforms_TestConfiguration.xml" files='-s=../share *.db'
+#macro EventOverlayJobTransforms_TestConfiguration "../test/EventOverlayJobTransforms_TestConfiguration.xml"
+
+#private
+#use TestPolicy               TestPolicy-*
+#apply_pattern validate_xml
+#public
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/cmt/requirements b/Event/EventOverlay/EventOverlayJobTransforms/cmt/requirements
new file mode 100644
index 0000000000000000000000000000000000000000..985445bfc058ac22747650685950b7486dbe1f6c
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/cmt/requirements
@@ -0,0 +1,21 @@
+package EventOverlayJobTransforms
+
+use AtlasPolicy AtlasPolicy-*
+
+use PyJobTransforms PyJobTransforms-* Tools
+use PyJobTransformsCore PyJobTransformsCore-* Tools
+
+apply_pattern declare_job_transforms tfs='*.py' jo='*.py'
+apply_pattern declare_python_modules files="*.py"
+
+#apply_pattern generic_declare_for_link kind=runtime files='-s=../share *.db' prefix=share name=trf
+apply_pattern declare_runtime_extras files='-s=../share *.db'
+
+#apply_pattern declare_runtime_extras extras="../test/EventOverlayJobTransforms_TestConfiguration.xml" files='-s=../share *.db'
+#macro EventOverlayJobTransforms_TestConfiguration "../test/EventOverlayJobTransforms_TestConfiguration.xml"
+
+#private
+#use TestPolicy               TestPolicy-*
+#apply_pattern validate_xml
+#public
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/python/Configuration.py b/Event/EventOverlay/EventOverlayJobTransforms/python/Configuration.py
new file mode 100644
index 0000000000000000000000000000000000000000..bb59cae9e1863ea7880604bc7754b369da9e8961
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/python/Configuration.py
@@ -0,0 +1,55 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+## -*- python -*-
+
+from PyJobTransformsCore.full_trfarg import *
+from PATJobTransforms.Configuration import *
+
+class DigiRndmSvcArg (StringChoicesArg):
+    """random number service to use for digitization - AtRndmGenSvc uses Ranecu, AtRanluxGenSvc uses Ranlux64"""
+    def __init__(self, choices, caseSensitive = False, help = 'default', name = 'digiRndmSvc'):
+        StringChoicesArg.__init__(self, choices, help, name, caseSensitive)
+    def isFullArgument(self):
+        return True
+def AddDigiRndmSvc(trf, inDic):
+    trf.add( DigiRndmSvcArg( ['AtRndmGenSvc', 'AtRanluxGenSvc', 'AtDSFMTGenSvc']) )
+    return
+AddDigiRndmSvc.subSteps = ['h2r']
+AddToConfigDic('digiRndmSvc', AddDigiRndmSvc)
+
+class SamplingFractionDbTagArg (StringChoicesArg):
+    """liquid argon calorimeter sampling fraction data base tag, passed on in jobOptions to LArfSamplG4Phys"""
+    def __init__(self, choices, caseSensitive = False, help = 'default', name = 'samplingFractionDbTag'):
+        StringChoicesArg.__init__(self, choices, help, name, caseSensitive)
+    def isFullArgument(self):
+        return True
+def AddSamplingFractionDbTagArg(trf, inDic):
+    trf.add( SamplingFractionDbTagArg(['QGSP_BERT', 'QGSP_EMV', 'QGSP', 'QGSP_BERT_EMV', 'FTFP_BERT', 'QGSP_FTFP_BERT', 'FTF_BIC', 'QGSP_BERT_CHIPS', 'QGSP_BIC', 'CHIPS']) )
+    return
+AddSamplingFractionDbTagArg.subSteps = ['h2r']
+AddToConfigDic('samplingFractionDbTag', AddSamplingFractionDbTagArg)
+class NoiseControlArg (StringChoicesArg):
+    """overall control of noise simualtion - useful for overlay jobs, will set doCaloNoise, doMuonNoise, doInDetNoise digitization jobproperties"""
+    def __init__(self, choices, caseSensitive = False, help = 'override noise simulation flags in all subdetectors, [True/False]', name = 'doAllNoise'):
+        StringChoicesArg.__init__(self, choices, help, name, caseSensitive)
+    def isFullArgument(self):
+        return True
+def AddNoiseControlArg(trf, inDic):
+    trf.add( NoiseControlArg(['True', 'False', 'NONE']) )
+    return
+AddNoiseControlArg.subSteps = ['h2r']
+AddToConfigDic('doAllNoise', AddNoiseControlArg)
+
+class AddCaloDigiArg(BoolArg):
+    """Option to save Calo Digits too, not just RawChannels"""
+    def __init__(self, help, name = 'AddCaloDigi'):
+        BoolArg.__init__(self, help, name)
+    def isFullArgument(self):
+        return True
+    def jobOrTask(self):
+        return 'job'
+def AddAddCaloDigiArg(trf, inDic):
+    trf.add( AddCaloDigiArg ("True/False: Save CaloDigits as well, not just RawChannels") )
+    return
+AddAddCaloDigiArg.subSteps = ['h2r']
+AddToConfigDic('AddCaloDigi', AddAddCaloDigiArg)
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/python/OverlayConfig.py b/Event/EventOverlay/EventOverlayJobTransforms/python/OverlayConfig.py
new file mode 100644
index 0000000000000000000000000000000000000000..9ab32dc36a1a021066b8091bf57d87a0799319c4
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/python/OverlayConfig.py
@@ -0,0 +1,29 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+#####################
+## OverlayConfig.py
+#####################
+
+import os
+__all__ = []
+
+from PyJobTransformsCore.TransformConfig import *
+
+import PyJobTransformsCore.basic_trfarg as trfarg
+
+# define configuration properties
+class OverlayConfig(TransformConfig):
+   # prevent any mistypings by not allowing dynamic members
+   __slots__ = ()
+
+   def __init__(self,name='overlayConfig',metaData=None):
+     if metaData is None: metaData = []
+     TransformConfig.__init__(self,name,metaData)
+     self.maxeventsstrategy = 'INPUTEVENTS'
+
+# make configuration object
+overlayConfig = OverlayConfig()
+
+# EOF
+##########################
+                                    
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/python/overlayTransformUtils.py b/Event/EventOverlay/EventOverlayJobTransforms/python/overlayTransformUtils.py
new file mode 100644
index 0000000000000000000000000000000000000000..4eefc09098c86014f8f4f4f339566ea8e4146a32
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/python/overlayTransformUtils.py
@@ -0,0 +1,187 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+## @brief Specialist reconstruction and bytestream transforms
+#  @author atlas-comp-jt-dev@cern.ch
+#  @version $Id: overlayTransformUtils.py 659164 2015-04-07 09:33:47Z jchapman $
+
+import os
+import re
+import subprocess
+import sys
+import tarfile
+
+import logging
+# Logging needs to be in the PyJobTransforms "namespace"
+msg = logging.getLogger('PyJobTransforms.'+__name__.split('.')[-1])
+
+import PyJobTransforms.trfExceptions as trfExceptions
+import PyJobTransforms.trfArgClasses as trfArgClasses
+import PyJobTransforms.trfValidation as trfValidation
+
+from PyJobTransforms.trfExitCodes import trfExit
+from PyJobTransforms.trfExe import athenaExecutor
+
+## @brief write me
+#  numbers
+class BSJobSplitterExecutor(athenaExecutor):
+
+    def preExecute(self, input = set(), output = set()):
+        msg.debug('Preparing for execution of {0} with inputs {1} and outputs {2}'.format(self.name, input, output))
+
+        # There are two ways to configure this transform:
+        # - Give an inputBSFile argument directly
+        # - Give a overlayConfigFile and jobNumber argument
+        # Check now that we have a configuration that works
+
+        if 'inputBSFile' in self.conf.argdict and 'overlayConfigFile' in self.conf.argdict:
+            raise trfExceptions.TransformSetupException(trfExit.nameToCode('TRF_EXEC_SETUP_FAIL'), 'Both inputBSFile and overlayConfigFile have been specified - please use only one.')
+
+        if 'overlayConfigFile' in self.conf.argdict:
+            if 'jobNumber' not in self.conf.argdict:
+                raise trfExceptions.TransformSetupException(trfExit.nameToCode('TRF_EXEC_SETUP_FAIL'), 'overlayConfigFile is specified, but no jobNumber was given.')
+            # Job number has to wrap around from 500, dropping back to 1
+            wrappedJobNumber = (self.conf.argdict['jobNumber'].value-1)%500 + 1
+
+            self._inputFilelist = 'filelist_{0}.txt'.format(wrappedJobNumber)
+            self._lblList = 'lbn_anal_map_{0}.txt'.format(wrappedJobNumber)
+
+            try:
+                f=tarfile.open(name=self.conf.argdict['overlayConfigFile'].value)
+                f.extract('filelist_{0}.txt'.format(wrappedJobNumber))
+                f.extract('lbn_anal_map_{0}.txt'.format(wrappedJobNumber))
+                f.close()
+
+                bsInputs = open(self._inputFilelist).readline().rstrip().split(',')
+                self.conf.addToArgdict('inputBSFile', trfArgClasses.argBSFile(bsInputs, io='input', type='BS', subtype='BS'))
+                self.conf.addToDataDictionary('BS', self.conf.argdict['inputBSFile'])
+                input.add('BS')
+                msg.info('Validating resolved input bytestream files')
+                trfValidation.performStandardFileValidation({'BS': self.conf.argdict['inputBSFile']}, io='input')
+            except Exception, e:
+                raise trfExceptions.TransformSetupException(trfExit.nameToCode('TRF_EXEC_SETUP_FAIL'), 'Error while unpacking and extracting input files for transform: {0}'.format(e))
+
+            # Now setup correct input arguments
+            self.conf.argdict['InputLbnMapFile'] = trfArgClasses.argString(self._lblList)
+            self.conf.argdict['InputFileMapFile'] = trfArgClasses.argString(self._inputFilelist)
+
+        else:
+            #if 'lumiBlockMapFile' not in self.conf.argdict:
+            #    raise trfExceptions.TransformSetupException(trfExit.nameToCode('TRF_EXEC_SETUP_FAIL'), 'inputBSFile is specified, but no lumiBlockMapFile was given.')
+            #self.conf.argdict['InputLbnMapFile'] = self.conf.argdict['lumiBlockMapFile']
+            if 'lumiBlockMapFile' in self.conf.argdict:
+                self.conf.argdict['InputLbnMapFile'] = self.conf.argdict['lumiBlockMapFile']
+        super(BSJobSplitterExecutor, self).preExecute(input=input, output=output)
+
+
+    def execute(self):
+        if 'maxFilesPerSubjob' in self.conf.argdict:
+            self._hasExecuted = True
+            msg.info('Activating job splitting with {0} files per subjob'.format(self.conf.argdict['maxFilesPerSubjob'].value))
+
+            tmpFiles = self.conf.argdict['inputBSFile'].value
+            self._subJobInputs = []
+            while len(tmpFiles) > 0:
+                self._subJobInputs.append(tmpFiles[0:self.conf.argdict['maxFilesPerSubjob'].value-1])
+                del tmpFiles[0:self.conf.argdict['maxFilesPerSubjob'].value-1]
+
+            #### TESTING TESTING!
+            # self._subJobInputs = self._subJobInputs[0:2]
+            ####
+
+            msg.info('Job will be split into {0} pieces: {1}'.format(len(self._subJobInputs), self._subJobInputs))
+
+            # Now loop over each subjob
+            from PyJobTransforms.trfExe import athenaExecutor, executorConfig, bsMergeExecutor
+            jobCounter = 0
+            outputBSFileList = []
+            for sjInputs in self._subJobInputs:
+                sjobArgdict = {'triggerBit': self.conf.argdict['triggerBit'],
+                              'InputLbnMapFile': self.conf.argdict['InputLbnMapFile'],
+                              'checkEventCount': trfArgClasses.argSubstepBool('False', runarg=False)}
+
+                sjOutput = self.conf.argdict['outputBS_SKIMFile'].value[0] + '_sub{0:03d}.tmp'.format(jobCounter)
+                outputBSFileList.append(sjOutput)
+                sjDataDictionary = {'BS': trfArgClasses.argBSFile(sjInputs, io='input', type='BS', subtype='BS'),
+                                    'BS_SKIM':  trfArgClasses.argBSFile(sjOutput, io='output', type='BS', subtype='BS_SKIM')}
+
+                sjconf = executorConfig(sjobArgdict, sjDataDictionary)
+                sjexe = athenaExecutor(name='BSOverlayFilter_{0:03d}'.format(jobCounter), skeletonFile = 'EventOverlayJobTransforms/skeleton.BSOverlayFilter_tf.py',
+                                       conf=sjconf, inData=set(['BS']), outData=set(['BS_SKIM']))
+                sjexe.doAll(input=set(['BS']), output=set(['BS_SKIM']))
+                jobCounter += 1
+
+            # Now do the merging
+            mrgArgdict = {'maskEmptyInputs': trfArgClasses.argBool(True, runarg=False),
+                          'allowRename': trfArgClasses.argBool(True, runarg=False)}
+            mrgDataDictionary = {'BS': trfArgClasses.argBSFile(outputBSFileList, io='input', type='BS', subtype='BS_SKIM'),
+                                 'BS_MRG': self.conf.argdict['outputBS_SKIMFile']}
+            mrgConf = executorConfig(mrgArgdict, mrgDataDictionary)
+            mrgExe = bsMergeExecutor(name = 'RAWFileMerge', exe = 'file_merging', conf=mrgConf,
+                                     inData = set(['BS']), outData = set(['BS_MRG']))
+            mrgExe.doAll(input=set(['BS']), output=set(['BS_MRG']))
+
+            # If we got here then all must be well, so we have executed successfully
+            self._rc = 0
+
+        else:
+            # Ordinary run
+            super(BSJobSplitterExecutor, self).execute()
+
+
+
+### Add Argument Methods
+def addOverlayChainOverrideArguments(parser):
+    from EventOverlayJobTransforms.overlayTrfArgs import  addOverlayChainOverrideArgs
+    addOverlayChainOverrideArgs(parser)
+
+def addOverlayBSFilterArguments(parser):
+    from EventOverlayJobTransforms.overlayTrfArgs import addOverlayBSFilterArgs
+    addOverlayBSFilterArgs(parser)
+
+def addOverlay_PoolArguments(parser):
+    from SimuJobTransforms.simTrfArgs import addForwardDetTrfArgs, addBasicDigiArgs
+    from EventOverlayJobTransforms.overlayTrfArgs import addOverlayTrfArgs, addOverlayPoolTrfArgs
+    addBasicDigiArgs(parser)
+    addForwardDetTrfArgs(parser)
+    addOverlayTrfArgs(parser)
+    addOverlayPoolTrfArgs(parser)
+
+def addOverlay_BSArguments(parser):
+    from SimuJobTransforms.simTrfArgs import addForwardDetTrfArgs, addBasicDigiArgs
+    from EventOverlayJobTransforms.overlayTrfArgs import addOverlayTrfArgs, addOverlayBSTrfArgs
+    addBasicDigiArgs(parser)
+    addForwardDetTrfArgs(parser)
+    addOverlayTrfArgs(parser)
+    addOverlayBSTrfArgs(parser)
+
+### Add Sub-step Methods
+def addOverlayBSFilterSubstep(executorSet):
+    executorSet.add(BSJobSplitterExecutor(name = 'BSFilter', skeletonFile = 'EventOverlayJobTransforms/skeleton.BSOverlayFilter_tf.py', substep='overlayBSFilt',
+                                          perfMonFile = 'ntuple.pmon.gz', inData = ['BS'], outData = ['BS_SKIM']))
+
+def addOverlay_PoolSubstep(executorSet):
+    executorSet.add(athenaExecutor(name = 'OverlayPool', skeletonFile = 'EventOverlayJobTransforms/skeleton.OverlayPool_tf.py',
+                                   substep = 'overlayPOOL', tryDropAndReload = False, perfMonFile = 'ntuple.pmon.gz',
+                                   inData = ['HITS', 'RDO_BKG'], outData = ['RDO', 'RDO_SGNL']))
+
+def addOverlay_BSSubstep(executorSet):
+    executorSet.add(athenaExecutor(name = 'OverlayBS', skeletonFile = 'EventOverlayJobTransforms/skeleton.OverlayBS_tf.py',
+                                   substep = 'overlayBS', tryDropAndReload = False, perfMonFile = 'ntuple.pmon.gz',
+                                   inData = [('HITS', 'BS')], outData = ['RDO', 'RDO_SGNL']))
+
+### Append Sub-step Methods
+def appendOverlayBSFilterSubstep(trf):
+    executor = set()
+    addOverlayBSFilterSubstep(executor)
+    trf.appendToExecutorSet(executor)
+
+def appendOverlay_PoolSubstep(trf):
+    executor = set()
+    addOverlay_PoolSubstep(executor)
+    trf.appendToExecutorSet(executor)
+
+def appendOverlay_BSSubstep(trf):
+    executor = set()
+    addOverlay_BSSubstep(executor)
+    trf.appendToExecutorSet(executor)
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/python/overlayTrfArgs.py b/Event/EventOverlay/EventOverlayJobTransforms/python/overlayTrfArgs.py
new file mode 100644
index 0000000000000000000000000000000000000000..23347189e9c61add11925f8e777ff4146bafb1e3
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/python/overlayTrfArgs.py
@@ -0,0 +1,79 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+import argparse
+import logging
+msg = logging.getLogger(__name__)
+import unittest
+import pickle
+import os
+
+from PyJobTransforms.trfArgClasses import argFactory, argFile, argInt, argFloat, argString, argSubstep, trfArgParser, argList, argBool, argBSFile, argPOOLFile, argHITSFile, argRDOFile, argSubstepInt, argSubstepBool
+
+## Add arguments whose default properties have to be overridden for the Overlay Chain
+def addOverlayChainOverrideArgs(parser):
+    parser.defineArgGroup('Overlay Chain', 'Overlay Chain transform arguments')
+    parser.add_argument('--skipEvents', group='Overlay Chain', type=argFactory(argSubstepInt, defaultSubstep='EVNTtoHITS'),
+                        help='Number of events to skip over in the EVNTtoHITS processing step')
+
+
+## Add Overlay transform arguments to an argparse ArgumentParser
+def addOverlayBSFilterArgs(parser):
+    parser.defineArgGroup('Overlay Filter', 'Overlay filter transform arguments')
+    parser.add_argument('--overlayConfigFile',
+                        type=argFactory(argString),
+                        help='Input overlay configuration tarball file', group='Overlay Filter')
+    parser.add_argument('--inputBSFile', nargs='+',
+                        type=argFactory(argBSFile, io='input'),
+                        help='Input overlay BS file(s)', group='Overlay Filter')
+    parser.add_argument('--lumiBlockMapFile',
+                        type=argFactory(argString),
+                        help='Lumi block information file', group='Overlay Filter')
+    parser.add_argument('--outputBS_SKIMFile', '--outputBSFile',
+                        type=argFactory(argBSFile, io='output'),
+                        help='Output skimmed BS file', group='Overlay Filter')
+    parser.add_argument('--eventIdFile',
+                        type=argFactory(argString), help='The name of the file to write to for EventIdModifierSvc lines', group='Overlay Filter')
+    parser.add_argument('--jobNumber',
+                        type=argFactory(argInt),
+                        help='Job number', group='Overlay Filter')
+    parser.add_argument('--triggerBit',
+                        type=argFactory(argInt),
+                        help='Trigger bit', group='Overlay Filter')
+    parser.add_argument('--maxFilesPerSubjob',
+                        type=argFactory(argInt),
+                        help='Number of bytestream input files for each athena subjob', group='Overlay Filter')
+
+# jobNumber=102
+# InputDataTarFile=/afs/cern.ch/work/e/efeld/overlay/prep/mytar.tar.gz
+# InputLbnMapFile=lbn_anal_map.txt
+# InputFileMapFile=filelist.txt
+# TriggerBit=240
+
+def addOverlayTrfArgs(parser):
+    parser.defineArgGroup('EventOverlay', 'Event Overlay Options')
+    parser.add_argument('--ReadByteStream',
+                        type=argFactory(argBool),
+                        help='Are the real data input files byte stream or POOL?', group='EventOverlay')
+    parser.add_argument('--muonForceUse',
+                        type=argFactory(argBool),
+                        help='A hack for muon 8-fold versus 12-fold problems?', group='EventOverlay')
+    parser.add_argument('--outputRDO_SGNLFile', nargs='+',
+                        type=argFactory(argRDOFile, io='output'),
+                        help='The RDO file of the MC signal alone', group='EventOverlay')
+    parser.add_argument('--fSampltag',
+                        type=argFactory(argString),
+                        help='The cool tag for /LAR/ElecCalib/fSampl/Symmetry, see https://twiki.cern.ch/twiki/bin/viewauth/Atlas/LArCalibMCPoolCool', group='EventOverlay')
+    parser.add_argument('--cscCondOverride',
+                        type=argFactory(argBool),
+                        help='A hack to override CSC conditions folder', group='EventOverlay')
+def addOverlayBSTrfArgs(parser):
+    parser.defineArgGroup('EventOverlayBS', 'Event Overlay Options')
+    parser.add_argument('--inputBS_SKIMFile', nargs='+',
+                        type=argFactory(argBSFile, io='input'),
+                        help='Input RAW BS for pileup overlay', group='EventOverlayBS')
+
+def addOverlayPoolTrfArgs(parser):
+    parser.defineArgGroup('EventOverlayPool', 'Event Overlay Options')
+    parser.add_argument('--inputRDO_BKGFile', nargs='+',
+                        type=argFactory(argRDOFile, io='input'),
+                        help='Input RAW RDO for pileup overlay', group='EventOverlayPool')
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/GetNEventsLB.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/GetNEventsLB.py
new file mode 100644
index 0000000000000000000000000000000000000000..fe985acd7b87d2a19913e1b7455a8b29197825a4
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/GetNEventsLB.py
@@ -0,0 +1,98 @@
+#!/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+#
+# ----------------------------------------------------------------
+# Script : GetNEventsLB.py
+# Purpose: Utility to retrieve number of events per LB from SFO DB
+# Authors: Andreas Hoecker (CERN)
+# Created: Aug 17, 2011
+# ----------------------------------------------------------------
+
+import sys, getopt
+
+def GetSFO_LBNEvents( cursor, runno, streamname ):
+    """returns nfiles, fsize, nevents"""
+    stype, sep, sname = streamname.partition('_')
+    cursor.execute( "SELECT SUM(NREVENTS),LUMIBLOCKNR,COUNT(FILESIZE),SUM(FILESIZE) FROM SFO_TZ_File WHERE RUNNR=:arg_1 and STREAMTYPE=:arg_2 and STREAM=:arg_3 GROUP BY LUMIBLOCKNR ORDER BY LUMIBLOCKNR",
+                    arg_1=runno, arg_2=stype, arg_3=sname )
+    return cursor.fetchall()
+
+def GetSFO_LBs( cursor, runno ):
+    cursor.execute( "SELECT MIN(LUMIBLOCKNR), MAX(LUMIBLOCKNR) FROM SFO_TZ_Lumiblock WHERE RUNNR=:arg_1", arg_1=runno )
+    return cursor.fetchone()[0:2]
+
+def OpenSFOConnection():
+    import cx_Oracle
+    return cx_Oracle.connect("ATLAS_SFO_T0_R/readmesfotz2008@atlr")
+
+def usage():
+    print ' '
+    print 'Usage: python %s [options]' % sys.argv[0]
+    print '  -r | --run      : run number (REQUIRED)'
+    print '  -s | --stream   : full stream name (REQUIRED)'
+    print '  -h | --help     : print this usage message'
+    print ' '
+    exit(1)
+
+def main():
+
+    try:
+        shortopts  = "r:s:h?"
+        longopts   = ["run=", "stream=", "help", "usage"]
+        opts, args = getopt.getopt( sys.argv[1:], shortopts, longopts )
+
+    except getopt.GetoptError:
+        # print help information and exit:
+        print >> sys.stderr, 'ERROR: unknown options in argument %s' % sys.argv[1:]
+        usage()
+
+    runno   = None
+    stream  = None
+    for o, a in opts:
+        if o in ("-?", "-h", "--help", "--usage"):
+            usage()
+        elif o in ("-r", "--run"):
+            runno = a
+        elif o in ("-s", "--stream"):
+            stream = a
+
+    if not runno or not stream: usage()
+    runno = int(runno)
+
+    connection = OpenSFOConnection()
+    cursor     = connection.cursor()
+
+    print 'Results for run: %i, stream: "%s"' % (runno, stream)
+
+    # min/max LB number for given run/stream
+    # --> this command is slow... don't know why
+    # minLB, maxLB = GetSFO_LBs( cursor, runno )
+    # print 'LB range: %i -- %i' % (minLB, maxLB)
+
+    # list with 
+    lblist = GetSFO_LBNEvents( cursor, runno, stream )
+    print 'First non-zero LB: ',lblist[0][1]
+    print 'Last  non-zero LB: ',lblist[-1][1]
+
+    # access to all LBs
+    sumnev    = 0
+    sumnfiles = 0
+    sumfsize  = 0
+    for (nev,lb,nfiles,fsize) in lblist:
+        fsize /= 1.e6
+        print ' ... Run %i, LB %i has %i events, %i RAW files and %f MB' % (runno,lb,nev,nfiles,fsize)
+        sumnev    += nev
+        sumnfiles += nfiles
+        sumfsize  += fsize
+    print '--------------------------------------------------'
+    print 'Total #events : ', sumnev
+    print 'Total #files  : ', sumnfiles
+    print 'Total RAW size: ', sumfsize/1000.0, ' GB'
+    
+    cursor.close()
+    connection.close()
+
+if __name__ == '__main__':
+    main()
+    
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/atlas_error_ignore.db b/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/atlas_error_ignore.db
new file mode 100755
index 0000000000000000000000000000000000000000..1e5500232123bdacecedca5db939dd702c5f0da7
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/atlas_error_ignore.db
@@ -0,0 +1,90 @@
+#Each line contains 3 fields, separated by comma's:
+#atlas_release_regexp,  who_prints_it,  error_message_regexp
+# For the regular expression syntax that can be used in <error_message_regexp>, see:
+# http://docs.python.org/lib/re-syntax.html
+# Note in particular the special regexp characters that need to be backslashed if meant litteral: ()[]{}^$.*+?
+#   In constructing the total regular expression used to match the lines:
+#     - whitespace is stripped of both ends of the fields <atlas_release_regexp> and <who_prints_it>,
+#       and from the right end of <error_message_regexp>
+#     - zero or more whitespace characters are allowed between <who_prints_it> and <error_message_regexp>
+#     - if the <who_prints_it> field is empty, the <error_message_regexp> is the total regexp.
+# error detection can be tested by running on a relevant log file:
+#  checklog.py someLogFile
+
+## Errors to ignore for ALL releases
+## =================================
+ALL   ,.*?, INFO .+
+ALL   ,ByteStreamInputSvc,ERROR Skipping bad event
+ALL   ,ToolSvc.CscSplitClusterFitter,ERROR   Peak-to-Val dist is [-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?  Val-to-Peak dist is [-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?  Shouldnot be negative value :[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?  [-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)? [-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?
+ALL   ,AlgErrorAuditor,ERROR Illegal Return Code: Algorithm CscThresholdClusterBuilder reported an ERROR, but returned a StatusCode "SUCCESS"
+ALL   ,AlgErrorAuditor,ERROR Illegal Return Code: Algorithm InDetSCTRawDataProvider reported an ERROR, but returned a StatusCode "SUCCESS"
+ALL   ,(?:Py:)?Athena      ,  ERROR inconsistent case used in property name ".*?" of ApplicationMgr
+ALL   ,(?:Py:)?Athena      ,  ERROR Algorithm ".*?": not in TopAlg or other known list, no properties set
+ALL   ,(?:Py:)?Athena      ,  ERROR Algorithm ".*?": type missing, no properties set
+ALL   ,(?:Py:)?Athena      ,  ERROR attempt to add .* to non-existent property .*?
+ALL   ,(?:Py:)?Configurable,  ERROR .* undeclared or uses a backdoor
+ALL   ,(?:Py:)?Configurable,  ERROR children\(\) is deprecated
+ALL   ,(?:Py:)?Configurable,  ERROR getChildren\(\) returns a copy
+ALL   ,(?:Py:)?Configurable,  ERROR jobOptName\(\) is deprecated
+# Reco
+ALL   ,(?:Py:)?Configurable,  ERROR attempt to add a duplicate \(CellCalibrator.CellCalibrator.H1WeightCone7H1Tower\)
+ALL   ,(?:Py:)?ResourceLimits,ERROR failed to set max resource limits
+ALL   ,AlgErrorAuditor,       ERROR Illegal Return Code: Algorithm StreamESD reported an ERROR, but returned a StatusCode "SUCCESS"
+# Trigger BStoRDO 
+ALL   ,AthenaRefIOHandler,    ERROR Failed to set ElementLink
+ALL   ,ElementLink,           ERROR toPersistent: the internal state of link
+ALL   ,StoreGateSvc,          ERROR record: object not added to store
+ALL   ,StoreGateSvc,          ERROR  setupProxy:: error setting up proxy 
+ALL   ,AlgErrorAuditor,       ERROR Illegal Return Code: Algorithm MooHLTAlgo 
+ALL   ,AlgErrorAuditor,       ERROR Illegal Return Code: Algorithm TrigSteer_EF
+ALL   ,AlgErrorAuditor,       ERROR Illegal Return Code: Algorithm muFast_(?:Muon|900GeV)
+ 
+# Trigger reco_ESD 
+ALL   ,THistSvc,              ERROR already registered an object with identifier "/EXPERT/
+ALL   ,RpcRawDataNtuple  ,  ERROR .*
+ALL   ,CBNT_L1CaloROD\S+ ,  ERROR .*
+ALL   ,CBNTAA_Tile\S+    ,  ERROR .*
+ALL   ,TileDigitsMaker   ,  ERROR .*
+ALL   ,MdtDigitToMdtRDO  ,  ERROR .* 
+ALL   ,HelloWorld        ,  ERROR .*
+ALL   ,HelloWorld        ,  FATAL .*
+ALL   ,PythiaB           ,  ERROR  ERROR in PYTHIA PARAMETERS
+ALL   ,ToolSvc           ,  ERROR Tool .* not found and creation not requested
+ALL   ,ToolSvc           ,  ERROR Unable to finalize the following tools
+ALL   ,ToolSvc           ,  ERROR Factory for Tool .* not found
+ALL   ,CBNT_Audit        ,  ERROR  Memory leak!.*
+ALL   ,ToolSvc.InDetSCTRodDecoder   ,  ERROR Unknown offlineId for OnlineId*
+ALL   ,THistSvc.sysFinali,  FATAL  Standard std::exception is caught
+ALL   ,,.*Message limit reached for .*
+ALL   ,,\s+ERROR IN C-S .*=.*
+ALL   ,,.*ERROR\s+\|.*
+ALL   ,,^\s*FATAL ERROR\s*$
+ALL   ,,ERROR \(poolDb\):
+ALL   ,,ERROR \(pool\):
+ALL   ,,ERROR - G4Navigator::ComputeStep\(\)
+ALL   ,,.*ERROR OCCURED DURING A SECONDARY SCATTER AND WAS
+ALL   ,THistSvc        , ERROR already registered an object with identifier .*
+ALL   ,,ERROR MuonDetectorManager::getCscReadoutElement stNameindex out of range .*
+ALL   ,muFast_\S+      , ERROR CSM for Subsystem \d+, MrodId \d+, LinkId \d+ not found
+ALL   ,TRTDetectorManager , FATAL Unable to apply Inner Detector alignments
+ALL   ,TRTDetectorManager , ERROR AlignableTransformContainer for key \/TRT\/Align is empty
+ALL   ,,ERROR in Single_Process::CalculateTotalXSec
+ALL   ,,.*ERROR WITH DELM.*
+#ALL   ,ToolSvc.TrigTSerializer,ERROR Errors while decoding
+ALL   ,AlgErrorAuditor,ERROR Illegal Return Code: Algorithm 
+
+
+## Errors to ignore for specific releases
+## ======================================
+# bug #47761
+15.[0-99].[0-99]|(?:rel|dev|bug)_[0-6],PoolSvc,               ERROR Failed to get ContainerHandle (for: POOLContainer_DataHeader_p3|to set POOL property\.)
+# Cosmic Reco
+14.[0-99].[0-99]|(?:rel|dev|bug)_[0-6],(?:Py:)?Configurable,                ERROR attempt to add a duplicate \(.*\) \.\.\. dupe ignored
+14.[0-99].[0-99]|(?:rel|dev|bug)_[0-6],ToolSvc,                             ERROR Cannot create tool ICalorimeterNoiseTool .*
+14.[0-99].[0-99]|(?:rel|dev|bug)_[0-6],ToolSvc.MdtPrepDataProviderTool,     ERROR Error in MDT RDO decoder for subdetId/mrodId/csmId*
+14.[0-99].[0-99]|(?:rel|dev|bug)_[0-6],AlgErrorAuditor,                     ERROR Illegal Return Code: Algorithm MdtRdoToMdtPrepData reported an ERROR*
+14.[0-99].[0-99]|(?:rel|dev|bug)_[0-6],\S+::fillBranch,                     ERROR <index out of range, (pat|hit) not added to ntuple> 100
+# csc_recoESD, 14.2.21.1
+14.2.[0-99]|(?:rel|dev|bug)_[0-6],TrigSteer_EF,     ERROR std exception thrown from algorithm \(basic error\)To many objects in trigger collection of one type, end: 65535 >= 65535
+14.2.[0-99]|(?:rel|dev|bug)_[0-6],AlgErrorAuditor,  ERROR Illegal Return Code: Algorithm TrigCaloCellMaker_jet reported an ERROR, but returned a StatusCode "SUCCESS" 
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/data12_8TeV.periodAllYear_DetStatus-v49-pro13-03_CoolRunQuery-00-04-08_Atlas_Ready.xml b/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/data12_8TeV.periodAllYear_DetStatus-v49-pro13-03_CoolRunQuery-00-04-08_Atlas_Ready.xml
new file mode 100644
index 0000000000000000000000000000000000000000..1238728e2d807c226be8705b60f7b58b1a3c4675
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/data12_8TeV.periodAllYear_DetStatus-v49-pro13-03_CoolRunQuery-00-04-08_Atlas_Ready.xml
@@ -0,0 +1,475 @@
+<?xml version="1.0" ?>
+<!DOCTYPE LumiRangeCollection
+  SYSTEM 'http://atlas-runquery.cern.ch/LumiRangeCollection.dtd'>
+<!-- Good-runs-list created by AtlRunQuery.py on 2012-07-20 02:04:53.491441 -->
+<?xml-stylesheet type="text/xsl" href="/atlasdqm/grlview/grl.xsl" title="grlview" ?>
+<LumiRangeCollection>
+   <NamedLumiRange>
+      <Name>Atlas_Ready</Name>
+      <Version>2.1</Version>
+      <Metadata Name="Query">find run data12_8TeV.periodAllYear and partition ATLAS and db DATA and ptag data12_8TeV and dq global_status DEFECTS#DetStatus-v49-pro13-03 g and dq lumi DEFECTS#DetStatus-v49-pro13-03 g  </Metadata>
+      <Metadata Name="RunList">206614,206573,206564,206497,206409,206369,206368,206367,206299,205113,205112,205071,205055,205017,205016,205010,204976,204955,204954,204932,204910,204857,204853,204796,204772,204769,204763,204726,204707,204668,204633,204564,204474,204442,204416,204265,204240,204158,204153,204134,204073,204071,204026,204025,203934,203876,203875,203792,203779,203760,203745,203739,203719,203680,203636,203605,203602,203524,203523,203456,203454,203432,203353,203336,203335,203277,203258,203256,203228,203195,203191,203169,203027,202991,202987,202965,202798,202740,202712,202668,202660,201556,201555,201494,201489,201289,201280,201269,201257,201191,201190,201138,201120,201113,201052,201006,200987,200982,200967,200965,200926,200913,200863,200842,200841</Metadata>
+      <Metadata Name="RQTSVNVersion">CoolRunQuery-00-04-08</Metadata>
+      <Metadata Name="StreamListInfo">
+         <Stream Name="physics_Background" TotalNumOfEvents="24753317" NumOfSelectedEvents="21341263"/>
+         <Stream Name="physics_Bphysics" TotalNumOfEvents="75013593" NumOfSelectedEvents="74950674"/>
+         <Stream Name="physics_CosmicCalo" TotalNumOfEvents="15697532" NumOfSelectedEvents="7295368"/>
+         <Stream Name="physics_CosmicMuons" TotalNumOfEvents="378039" NumOfSelectedEvents="376009"/>
+         <Stream Name="physics_Egamma" TotalNumOfEvents="234689196" NumOfSelectedEvents="233427111"/>
+         <Stream Name="physics_EnhancedBias" TotalNumOfEvents="3246211" NumOfSelectedEvents="3246211"/>
+         <Stream Name="physics_HadDelayed" TotalNumOfEvents="71148836" NumOfSelectedEvents="70609675"/>
+         <Stream Name="physics_IDCosmic" TotalNumOfEvents="157654" NumOfSelectedEvents="157188"/>
+         <Stream Name="physics_JetCalibDelayed" TotalNumOfEvents="1476687" NumOfSelectedEvents="1385121"/>
+         <Stream Name="physics_JetTauEtmiss" TotalNumOfEvents="259059470" NumOfSelectedEvents="257665735"/>
+         <Stream Name="physics_L1Calo" TotalNumOfEvents="454255" NumOfSelectedEvents="0"/>
+         <Stream Name="physics_L1Muon" TotalNumOfEvents="49709" NumOfSelectedEvents="0"/>
+         <Stream Name="physics_MinBias" TotalNumOfEvents="21497504" NumOfSelectedEvents="21116234"/>
+         <Stream Name="physics_Muons" TotalNumOfEvents="226312112" NumOfSelectedEvents="222999951"/>
+         <Stream Name="physics_Standby" TotalNumOfEvents="45123129" NumOfSelectedEvents="0"/>
+         <Stream Name="physics_ZeroBias" TotalNumOfEvents="2619584" NumOfSelectedEvents="2431086"/>
+         <Stream Name="physics_ZeroBiasOverlay" TotalNumOfEvents="24409616" NumOfSelectedEvents="24266437"/>
+      </Metadata>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">206614</Run>
+         <LBRange Start="206" End="275"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">206573</Run>
+         <LBRange Start="3" End="188"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">206564</Run>
+         <LBRange Start="102" End="392"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">206497</Run>
+         <LBRange Start="60" End="82"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">206409</Run>
+         <LBRange Start="212" End="881"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">206369</Run>
+         <LBRange Start="192" End="756"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">206368</Run>
+         <LBRange Start="3" End="521"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">206367</Run>
+         <LBRange Start="33" End="63"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">206299</Run>
+         <LBRange Start="42" End="85"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">205113</Run>
+         <LBRange Start="590" End="745"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">205112</Run>
+         <LBRange Start="264" End="480"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">205071</Run>
+         <LBRange Start="224" End="1313"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">205055</Run>
+         <LBRange Start="301" End="1246"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">205017</Run>
+         <LBRange Start="3" End="407"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">205016</Run>
+         <LBRange Start="3" End="365"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">205010</Run>
+         <LBRange Start="78" End="198"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204976</Run>
+         <LBRange Start="123" End="331"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204955</Run>
+         <LBRange Start="3" End="466"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204954</Run>
+         <LBRange Start="87" End="324"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204932</Run>
+         <LBRange Start="92" End="244"/>
+         <LBRange Start="246" End="649"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204910</Run>
+         <LBRange Start="302" End="749"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204857</Run>
+         <LBRange Start="126" End="201"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204853</Run>
+         <LBRange Start="46" End="214"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204796</Run>
+         <LBRange Start="93" End="121"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204772</Run>
+         <LBRange Start="3" End="499"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204769</Run>
+         <LBRange Start="113" End="517"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204763</Run>
+         <LBRange Start="56" End="1015"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204726</Run>
+         <LBRange Start="136" End="154"/>
+         <LBRange Start="156" End="237"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204707</Run>
+         <LBRange Start="196" End="201"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204668</Run>
+         <LBRange Start="29" End="695"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204633</Run>
+         <LBRange Start="158" End="327"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204564</Run>
+         <LBRange Start="585" End="1475"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204474</Run>
+         <LBRange Start="96" End="843"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204442</Run>
+         <LBRange Start="177" End="260"/>
+         <LBRange Start="271" End="271"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204416</Run>
+         <LBRange Start="15" End="323"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204265</Run>
+         <LBRange Start="134" End="1580"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204240</Run>
+         <LBRange Start="589" End="1447"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204158</Run>
+         <LBRange Start="340" End="958"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204153</Run>
+         <LBRange Start="71" End="293"/>
+         <LBRange Start="295" End="306"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204134</Run>
+         <LBRange Start="169" End="268"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204073</Run>
+         <LBRange Start="144" End="405"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204071</Run>
+         <LBRange Start="377" End="551"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204026</Run>
+         <LBRange Start="117" End="574"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">204025</Run>
+         <LBRange Start="123" End="202"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203934</Run>
+         <LBRange Start="653" End="1170"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203876</Run>
+         <LBRange Start="460" End="1154"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203875</Run>
+         <LBRange Start="74" End="151"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203792</Run>
+         <LBRange Start="138" End="211"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203779</Run>
+         <LBRange Start="319" End="689"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203760</Run>
+         <LBRange Start="197" End="224"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203745</Run>
+         <LBRange Start="3" End="695"/>
+         <LBRange Start="699" End="760"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203739</Run>
+         <LBRange Start="558" End="988"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203719</Run>
+         <LBRange Start="80" End="534"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203680</Run>
+         <LBRange Start="352" End="689"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203636</Run>
+         <LBRange Start="61" End="1052"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203605</Run>
+         <LBRange Start="314" End="336"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203602</Run>
+         <LBRange Start="25" End="938"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203524</Run>
+         <LBRange Start="61" End="367"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203523</Run>
+         <LBRange Start="241" End="327"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203456</Run>
+         <LBRange Start="3" End="166"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203454</Run>
+         <LBRange Start="89" End="156"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203432</Run>
+         <LBRange Start="117" End="469"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203353</Run>
+         <LBRange Start="260" End="743"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203336</Run>
+         <LBRange Start="178" End="789"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203335</Run>
+         <LBRange Start="36" End="351"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203277</Run>
+         <LBRange Start="150" End="479"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203258</Run>
+         <LBRange Start="51" End="843"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203256</Run>
+         <LBRange Start="242" End="265"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203228</Run>
+         <LBRange Start="111" End="220"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203195</Run>
+         <LBRange Start="78" End="357"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203191</Run>
+         <LBRange Start="81" End="111"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203169</Run>
+         <LBRange Start="647" End="724"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">203027</Run>
+         <LBRange Start="425" End="1070"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">202991</Run>
+         <LBRange Start="273" End="558"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">202987</Run>
+         <LBRange Start="122" End="198"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">202965</Run>
+         <LBRange Start="122" End="337"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">202798</Run>
+         <LBRange Start="446" End="920"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">202740</Run>
+         <LBRange Start="73" End="123"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">202712</Run>
+         <LBRange Start="297" End="546"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">202668</Run>
+         <LBRange Start="33" End="479"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">202660</Run>
+         <LBRange Start="322" End="459"/>
+         <LBRange Start="660" End="745"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201556</Run>
+         <LBRange Start="421" End="1041"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201555</Run>
+         <LBRange Start="53" End="198"/>
+         <LBRange Start="202" End="205"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201494</Run>
+         <LBRange Start="6" End="220"/>
+         <LBRange Start="241" End="264"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201489</Run>
+         <LBRange Start="6" End="374"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201289</Run>
+         <LBRange Start="96" End="566"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201280</Run>
+         <LBRange Start="143" End="200"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201269</Run>
+         <LBRange Start="95" End="671"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201257</Run>
+         <LBRange Start="362" End="1014"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201191</Run>
+         <LBRange Start="57" End="411"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201190</Run>
+         <LBRange Start="84" End="498"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201138</Run>
+         <LBRange Start="279" End="769"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201120</Run>
+         <LBRange Start="292" End="458"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201113</Run>
+         <LBRange Start="208" End="561"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201052</Run>
+         <LBRange Start="142" End="315"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">201006</Run>
+         <LBRange Start="80" End="639"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">200987</Run>
+         <LBRange Start="42" End="686"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">200982</Run>
+         <LBRange Start="81" End="153"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">200967</Run>
+         <LBRange Start="197" End="197"/>
+         <LBRange Start="206" End="261"/>
+         <LBRange Start="265" End="330"/>
+         <LBRange Start="334" End="395"/>
+         <LBRange Start="398" End="456"/>
+         <LBRange Start="604" End="604"/>
+         <LBRange Start="609" End="668"/>
+         <LBRange Start="670" End="729"/>
+         <LBRange Start="734" End="796"/>
+         <LBRange Start="798" End="858"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">200965</Run>
+         <LBRange Start="26" End="116"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">200926</Run>
+         <LBRange Start="324" End="546"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">200913</Run>
+         <LBRange Start="104" End="280"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">200863</Run>
+         <LBRange Start="54" End="332"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">200842</Run>
+         <LBRange Start="3" End="24"/>
+         <LBRange Start="27" End="85"/>
+      </LumiBlockCollection>
+      <LumiBlockCollection>
+         <Run PrescaleRD0="8" PrescaleRD1="8">200841</Run>
+         <LBRange Start="134" End="135"/>
+         <LBRange Start="171" End="172"/>
+         <LBRange Start="174" End="174"/>
+         <LBRange Start="176" End="177"/>
+         <LBRange Start="179" End="181"/>
+      </LumiBlockCollection>
+   </NamedLumiRange>
+</LumiRangeCollection>
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/lbn_anal_map_splitter.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/lbn_anal_map_splitter.py
new file mode 100644
index 0000000000000000000000000000000000000000..4f44cdf634d8b35cffeb6de27dab8f52d85efff2
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/lbn_anal_map_splitter.py
@@ -0,0 +1,96 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+
+#get the list of special run,lbn with more than 1 file
+special_run_lbn = {}
+with open("runs_lbn_files_lb.txt") as f:
+    for line in f:
+        #print line
+        linfo=line.split("\t")
+        #print linfo[1]
+        finfo=linfo[1].split(".")
+        run=finfo[1]
+        lbn=finfo[5].replace("_lb","")
+        #print run+"_"+lbn
+        if run+"_"+lbn in special_run_lbn:
+            if linfo[1] in special_run_lbn[run+"_"+lbn]:
+                print linfo[1], "already in map for ",run,lbn
+            else:
+                special_run_lbn[run+"_"+lbn].append(linfo[1])
+        else:
+            special_run_lbn[run+"_"+lbn]=[linfo[1]]
+    for key in special_run_lbn.keys():
+        #if len(special_run_lbn[key])>1: print special_run_lbn[key]
+        pass
+    print "done with special_run_lbn"
+
+#do the work
+for s in range(0,50): #50
+    with open("/home/ahaas/nfs3/zerobias_skim/lbn_anal_map_"+str(s)+"wanted.txt") as f:
+        tot=0
+        nfile=1
+        of=open("/home/ahaas/nfs3/zerobias_skim/lbn_anal_map_"+str(s)+"wanted_"+str(nfile)+".txt","w")
+        of2=open("/home/ahaas/nfs3/zerobias_skim/filelist_"+str(s)+"wanted_"+str(nfile)+".txt","w")
+        for line in f:
+            linfo=line.split(" ")
+            #print line
+            #print linfo[16]
+            tot+=int(linfo[16])
+            run=linfo[3].replace(",","").zfill(8)
+            lbn=linfo[5].replace(",","").zfill(4)
+            if tot>100:
+                #replace last bit of line and also write it to the new file
+                extra=tot-100
+                forthisone=int(linfo[16])-extra
+                rline=line.replace(linfo[16]+" wanted",str(forthisone)+" wanted")
+                of.write(rline)
+                if run+"_"+lbn in special_run_lbn:
+                    for f in special_run_lbn[run+"_"+lbn]:
+                        #print of2.tell()
+                        if of2.tell()>0: of2.write(",")
+                        of2.write(f)
+                else:
+                    print run,lbn," not in files map!"
+                #print line, rline
+                #print run, lbn
+
+                of.close(); of2.close(); nfile+=1
+                of=open("/home/ahaas/nfs3/zerobias_skim/lbn_anal_map_"+str(s)+"wanted_"+str(nfile)+".txt","w")
+                of2=open("/home/ahaas/nfs3/zerobias_skim/filelist_"+str(s)+"wanted_"+str(nfile)+".txt","w")
+                tot=extra #the number left over from the last set
+            
+                rline=line.replace(linfo[16]+" wanted",str(extra)+" wanted")
+                of.write(rline)
+                if run+"_"+lbn in special_run_lbn:
+                    for f in special_run_lbn[run+"_"+lbn]:
+                        if of2.tell()>0: of2.write(",")
+                        of2.write(f)
+                else:
+                    print run,lbn," not in files map!"
+
+            elif tot==100:
+                of.write(line)
+                if run+"_"+lbn in special_run_lbn:
+                    for f in special_run_lbn[run+"_"+lbn]:
+                        if of2.tell()>0: of2.write(",")
+                        of2.write(f)
+                else:
+                    print run,lbn," not in files map!"            
+
+                of.close(); of2.close(); nfile+=1
+                of=open("/home/ahaas/nfs3/zerobias_skim/lbn_anal_map_"+str(s)+"wanted_"+str(nfile)+".txt","w")
+                of2=open("/home/ahaas/nfs3/zerobias_skim/filelist_"+str(s)+"wanted_"+str(nfile)+".txt","w")
+                tot=0
+            else:
+                of.write(line)
+                if run+"_"+lbn in special_run_lbn:
+                    for f in special_run_lbn[run+"_"+lbn]:
+                        #print of2.tell()
+                        if of2.tell()>0: of2.write(",")
+                        of2.write(f)
+                else:
+                    print run,lbn," not in files map!"
+
+        print "done ",s
+        of.close(); of2.close()
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/lbn_analyze.C b/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/lbn_analyze.C
new file mode 100644
index 0000000000000000000000000000000000000000..1dec483027f5301a6d6309ebcd872185855fff9e
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/lbn_analyze.C
@@ -0,0 +1,193 @@
+/*
+  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+*/
+
+#include <map>
+#include "TRandom3.h"
+
+class lbn_info{
+public:
+ lbn_info(){nevt=0; intlumi=0; nevtwanted=0;}
+ int nevt;
+ float intlumi;
+ int nevtwanted;
+};
+
+class run_info{
+public:
+ run_info(){nevt=0; intlumi=0; nevtwanted=0;}
+ int nevt;
+ float intlumi;
+ std::map<int,lbn_info> lbnmap;
+ int nevtwanted;
+};
+
+FILE *outfile=fopen("lbn_anal_map.txt","w");
+int debug=0;
+
+std::map< int, int > eventcounts;
+void readeventcounts(int run){
+  eventcounts.clear();
+  char buf[50];
+  sprintf(buf,"lbnevents_%d.txt",run);
+  if (debug) printf("Opening %s, eventcounts size is %d\n",buf,eventcounts.size());
+  FILE *fp = fopen(buf,"r");
+  if (!fp) printf("Failed to open %s!!!\n",buf);
+  char *line=new char[500];
+  while (fgets(line,480,fp)) {
+     int lbn,ne,nf,runn;
+     float mb;
+     int s=sscanf(&line[0]," ... Run %d, LB %d has %d events, %d RAW files and %f MB",&runn,&lbn,&ne,&nf,&mb);
+     if (s>4){
+       assert(run==runn);
+       if (debug) printf("run %d lbn %d has %d events\n",run,lbn,ne);
+       eventcounts[lbn]=ne;
+     }
+     else {
+       printf("s=%d, bad read?\n",s);
+       assert(false);
+     }
+  }
+  if (debug) printf("Closing %s, eventcounts size is %d\n",buf,eventcounts.size());
+  fclose(fp); 
+}
+
+void lbn_analyze(int stream, int nwanted)
+{
+   FILE *fp = fopen("lbn","r");
+   if (!fp) printf("Failed to open lbn input file!!!\n");
+   char *line=new char[500];
+   int run,lbn,L1Acc,beforeps,afterps,L1p,L2p,L3p,valid; 
+   int liveL1,livebp,liveap;
+   float instlumi,dt,avevtperbx,live;
+   //float intlumi,intlumitrig;
+   int grun=0;
+   float gtotaltotallumi=0,gtotaltotallumiprescaled=0;
+   float gtotallumi=0, gtotallumiprescaled=0;
+
+   TRandom myrand;
+   myrand.SetSeed(0);
+   printf("- myrand seed is %u\n",myrand.GetSeed());
+   myrand.SetSeed(myrand.Integer(10000000)+stream);
+   printf("- myrand seed is now %u\n",myrand.GetSeed());
+
+   std::map<int,run_info> runmap;
+
+   while (fgets(line,480,fp)) {
+     if (line[0]!='-') continue;
+     
+     int s=sscanf(&line[0],"--- LumiCalculator      : %d[%d]: L1Acc: %d, Livetime trigger L1Acc: %d, InstLumi: %f, deltaT: %f, AvEvtsPerBX: %f, BeforePrescale: %d, AfterPrescale: %d, Livetime trigger BeforePrescale: %d Livetime trigger AfterPrescale: %d, Livefrac: %f, L1Presc: %d, L2Presc: %d, L3Presc: %d, Valid: %d", &run,&lbn,&L1Acc,&liveL1,&instlumi,&dt,&avevtperbx,&beforeps,&afterps,&livebp,&liveap,&live,&L1p,&L2p,&L3p,&valid);
+     
+     if (s>8){
+       if (debug) printf("- run=%d, lbn=%d, L1Acc=%d, instlumi=%f,L1p=%d, dt=%f, afterps=%d",run,lbn,L1Acc,instlumi,L1p,dt,afterps);
+       
+       if (run!=grun){
+	 if (grun>0){//change of run
+	   runmap[grun].intlumi=gtotallumi; printf("Setting lumi for run %d to %f\n",grun,gtotallumi);
+	   if (debug) printf("Adding to totaltotal lumis\n");
+	   gtotaltotallumi+=gtotallumi; gtotaltotallumiprescaled+=gtotallumiprescaled;
+	   gtotallumi=0;gtotallumiprescaled=0;//reset the int lumi calculation for this run
+	   //runmap[grun].nevt=0;
+	 }
+	 grun=run;//save the run number
+	 printf("Setting grun to %d\n",run);
+
+	 //read in event counts from runquery file for run
+	 readeventcounts(run);
+ 
+       }//new run
+       
+       if (L1p<0) {
+	 runmap[run].lbnmap[lbn].nevt=0;
+	 if (eventcounts[lbn]!=0) printf("For lbn %d, L1p<0 but eventcounts is %d\n",lbn,eventcounts[lbn]);
+       }
+       else{
+	 int ne = L1Acc;//afterps
+	 if (ne!=eventcounts[lbn]){
+	   if (0==stream) printf("For lbn %d, ne from lumicalc is %d but we will trust runquery value of %d\n",lbn,ne,eventcounts[lbn]);
+	   ne=eventcounts[lbn];
+	 }
+	 if (0==ne){dt=0; printf("For lbn %d, setting lumi to 0 since 0 events were recorded\n",lbn);}
+	 runmap[run].lbnmap[lbn].nevt=ne; 
+	 runmap[run].lbnmap[lbn].intlumi=instlumi*dt*live;
+	 runmap[run].nevt+=ne;
+	 gtotallumiprescaled+= instlumi*dt*live/L1p;
+	 gtotallumi+= instlumi*dt*live;
+       }
+       if (debug) printf(", s=%d, grun=%d, gtotallumi=%f, gtotallumiprescaled=%f\n",s,grun,gtotallumi,gtotallumiprescaled);
+     }//good line
+   }//loop over lines in file
+   
+   //after last run
+   runmap[grun].intlumi=gtotallumi; printf("Setting lumi for run %d to %f\n",grun,gtotallumi);
+   if (debug) printf("Adding to totaltotal lumis for last run\n");
+   gtotaltotallumi+=gtotallumi; gtotaltotallumiprescaled+=gtotallumiprescaled;
+
+   fclose(fp);
+   printf("- %d runs, gtotaltotallumi=%f, gtotaltotallumiprescaled=%f\n",runmap.size(),gtotaltotallumi,gtotaltotallumiprescaled);
+
+   if (runmap.size()<1) {printf("- runmap size is %d, quitting!\n",runmap.size()); return;}
+   //return;
+
+   //check the total lumi...
+   double tempr=0;
+   for (std::map<int,run_info>::const_iterator r=runmap.begin(); r!=runmap.end(); ++r){
+     tempr += r->second.intlumi;
+   }
+   if (fabs(tempr-gtotaltotallumi)/tempr>0.001){
+     printf("tempr=%f and gtotaltotallumi=%f\n",tempr,gtotaltotallumi);
+     assert(false);
+   }
+   
+   for (int e=0;e<nwanted;++e){     
+     
+     //pick a random run, proportional to intlumi
+     double rnd = myrand.Uniform(tempr);
+     run=-1;
+     for (std::map<int,run_info>::const_iterator r=runmap.begin(); r!=runmap.end(); ++r){
+       if (rnd<r->second.intlumi) {run=r->first; break;}
+       rnd -= r->second.intlumi;
+     }
+     assert(run>=0);
+
+     //check the total run lumi...
+     double tempt=0;
+     for (std::map<int,lbn_info>::const_iterator l=runmap[run].lbnmap.begin(); l!=runmap[run].lbnmap.end(); ++l){
+       tempt += l->second.intlumi;
+     }
+     if (fabs(tempt-runmap[run].intlumi)/tempt>0.001){
+       printf("tempt=%f and runmap[%d].intlumi=%f\n",tempt,run,runmap[run].intlumi);
+       assert(false);
+     }
+
+     //pick a random lbn, proportional to intlumi
+     rnd = myrand.Uniform(tempt);
+     lbn=-1;
+     for (std::map<int,lbn_info>::const_iterator l=runmap[run].lbnmap.begin(); l!=runmap[run].lbnmap.end(); ++l){
+       if (rnd<l->second.intlumi) {lbn=l->first; break;}
+       rnd -= l->second.intlumi;
+     }
+     assert(lbn>=0);
+     
+     runmap[run].nevtwanted++; runmap[run].lbnmap[lbn].nevtwanted++;
+     if (debug) printf("- stream %d, run %d, lbn %d, choose %d of out %d\n",stream,run,lbn,runmap[run].lbnmap[lbn].nevtwanted,runmap[run].lbnmap[lbn].nevt);
+
+   }//loop over nwanted
+
+   for (std::map<int,run_info>::const_iterator r=runmap.begin(); r!=runmap.end(); ++r){
+     int totnevt=0; float totintlumi=0;
+     printf("stream %d, run %d, has %d events and %f/ub, %f intlumi of total, and %d wanted\n", stream, 
+        r->first,r->second.nevt,r->second.intlumi,r->second.intlumi/gtotaltotallumi,r->second.nevtwanted);
+     for (std::map<int,lbn_info>::const_iterator l=r->second.lbnmap.begin(); l!=r->second.lbnmap.end(); ++l){
+       fprintf(outfile,"stream %d, run %d, lbn %d, has %d events and %f/ub, %f intlumi of run, and %d wanted", stream, r->first, 
+        l->first,l->second.nevt,l->second.intlumi,l->second.intlumi/r->second.intlumi,l->second.nevtwanted);
+       if (l->second.nevtwanted>l->second.nevt) fprintf(outfile," : WARNING, more than available, will be duplicates!\n"); else fprintf(outfile,"\n");
+       totnevt+=l->second.nevt; totintlumi+=l->second.intlumi;
+     }
+     if (totnevt!=r->second.nevt) printf("  XXX events do not agree !!! \n");
+     if (totintlumi>0.0 && fabs(totintlumi - r->second.intlumi)/totintlumi>.001) printf("  XXX intlumi does not agree !!! %f %f \n",totintlumi,r->second.intlumi);
+
+   } 
+   printf("--\n\n");
+}
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/query.sh b/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/query.sh
new file mode 100755
index 0000000000000000000000000000000000000000..2d979ee01181f6a6d3605d989431df1656562de4
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/query.sh
@@ -0,0 +1,66 @@
+#!/bin/bash
+
+#use GRL
+wget -nc http://atlasdqm.web.cern.ch/atlasdqm/grlgen/All_Good/data12_8TeV.periodAllYear_DetStatus-v61-pro14-02_DQDefects-00-01-00_PHYS_StandardGRL_All_Good.xml
+
+#See https://atlas-datasummary.cern.ch/lumicalc/, https://atlas-lumicalc.cern.ch/
+#iLumiCalc.exe -t L1_ZB -V -x data12_8TeV.periodAllYear_DetStatus-v61-pro14-02_DQDefects-00-01-00_PHYS_StandardGRL_All_Good.xml --lumitag=OflLumi-8TeV-003 --livetrigger=L1_EM30 --lar --lartag=LARBadChannelsOflEventVeto-UPD4-04 > lbn
+iLumiCalc.exe -t L1_ZB -V -x data12_8TeV.periodAllYear_DetStatus-v61-pro14-02_DQDefects-00-01-00_PHYS_StandardGRL_All_Good.xml --lumitag=OflLumi-8TeV-003 --livetrigger=L1_EM30 > lbn
+grep Total lbn
+
+#get info on number of events and files per lumi block in each run
+rm -f lbnevents*
+rm -f runs_lbn_files.txt
+sstream="physics_ZeroBiasOverlay"
+#sstream="physics_ZeroBias"
+for runn in `echo "200842,200863,200913,200926,200965,200967,200982,200987,201006,201052,201113,201120,201138,201190,201191,201257,201269,201280,201289,201489,201494,201555,201556,202660,202668,202712,202740,202798,202965,202991,203027,203169,203191,203195,203228,203256,203258,203277,203335,203336,203353,203432,203454,203456,203523,203524,203602,203605,203636,203680,203719,203739,203745,203760,203779,203792,203875,203876,203934,204025,204026,204071,204073,204134,204153,204158,204240,204265,204416,204442,204474,204564,204633,204668,204726,204763,204769,204772,204796,204853,204857,204910,204932,204954,204955,204976,205010,205016,205017,205055,205071,205112,205113,206368,206369,206409,206497,206573,206614,206955,206962,206971,207044,207046,207221,207262,207304,207306,207332,207397,207447,207490,207528,207531,207532,207582,207589,207620,207664,207696,207749,207772,207800,207809,207845,207864,207865,207931,207934,207975,207982,208123,208126,208179,208184,208189,208258,208261,208354,208485,208631,208642,208662,208705,208717,208720,208780,208781,208811,208870,208930,208931,208970,208982,209024,209025,209074,209084,209109,209161,209183,209214,209254,209265,209269,209353,209381,209550,209580,209608,209628,209629,209736,209776,209787,209812,209864,209866,209899,209980,209995,210302,210308,211620,211670,211697,211772,211787,211867,211902,211937,212034,212103,212142,212144,212172,212199,212272,212619,212663,212687,212721,212742,212809,212815,212858,212967,212993,213039,213079,213092,213130,213155,213157,213204,213250,213359,213431,213479,213486,213539,213627,213640,213684,213695,213702,213754,213796,213816,213819,213900,213951,213964,213968,214021,214086,214160,214176,214216,214388,214390,214494,214523,214544,214553,214618,214651,214680,214714,214721,214758,214777,215027,215061,215063,215091,215414,215433,215456,215464,215473,215541,215571,215589,215643"|sed "s%,% %g"` ; do
+ echo "Getting event info for run $runn "
+ python GetNEventsLB.py -r $runn -s $sstream |grep "... Run" > lbnevents_${runn}.txt
+ dq2-ls -f -H data12_8TeV.00${runn}.physics_ZeroBiasOverlay.merge.RAW >> runs_lbn_files.txt
+done
+grep "_lb" runs_lbn_files.txt > runs_lbn_files_lb.txt
+
+#make lbn_anal_map.txt
+root -l -b -q run_lbn_analyze.C > ~/nfs3/zerobias_skim/log_lbn_analyze.txt 
+
+echo -n "Total events in dataset before GRL: "
+cat lbnevents_*.txt |cut -d ' ' -f 8 |awk '{total = total + $1}END{print total}'
+echo -n "Total events in dataset from log_lbn_analyze.txt: "
+grep "stream 0" ~/nfs3/zerobias_skim/log_lbn_analyze.txt|cut -d ' ' -f 6 |awk '{total = total + $1}END{print total}'
+echo -n "Total events in dataset from lbn_anal_map: "
+grep "stream 1," lbn_anal_map.txt |cut -d ' ' -f 8 |awk '{total = total + $1}END{print total}'
+echo -n "Selected events per stream: "
+grep "stream 8," lbn_anal_map.txt |cut -d ' ' -f 17 |awk '{total = total + $1}END{print total}'
+
+#do this for all 50 streams
+for s in {0..49}; do grep "stream ${s}," lbn_anal_map.txt | grep -v "0 want" > ~/nfs3/zerobias_skim/lbn_anal_map_${s}wanted.txt ; done
+rm lbn_anal_map.txt
+
+#split into jobs for each stream
+python lbn_anal_map_splitter.py 
+rm ~/nfs3/zerobias_skim/*501.txt
+
+for s in {0..49}; do
+ echo "renaming stream $s to 0"
+ for f in {1..500}; do sed -i -e "s%stream $s,%stream 0,%g" ~/nfs3/zerobias_skim/lbn_anal_map_${s}wanted_${f}.txt; done
+done
+
+cd ~/nfs3/zerobias_skim/
+it=2
+for s in {0..49}; do
+ tar cfz ${s}wanted${it}.tar.gz *_${s}wanted_*.txt
+done
+cd -
+
+#send to afs
+scp ~/nfs3/zerobias_skim/*wanted${it}.tar.gz ahaas@lxplus.cern.ch:public/overlay/lists/
+
+#to check the events in each map:
+for s in {0..49}; do
+ echo -n "files with 100 events in stream $s :"
+ for f in {1..500}; do cat ~/nfs3/zerobias_skim/lbn_anal_map_${s}wanted_$f.txt | cut -d ' ' -f 17 |awk '{total = total + $1}END{print total}'; done |grep -c 100
+done
+
+#to show number of files in each filelist:
+for f in ~/nfs3/zerobias_skim/filelist_0wanted_*.txt; do echo -n `grep -o "data12" $f | wc -l`; echo " : $f"; done |sort -n
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/run_lbn_analyze.C b/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/run_lbn_analyze.C
new file mode 100644
index 0000000000000000000000000000000000000000..90c83cf0ec3894dc4defed81dd7b09134ee93a6e
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/0biasselection/run_lbn_analyze.C
@@ -0,0 +1,14 @@
+/*
+  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+*/
+
+{
+ gROOT->ProcessLine(".L lbn_analyze.C+");
+
+ for (int i=0; i<50; ++i){//number of zerobias streams to make
+   //gSystem->Sleep(2000);//to get new random number seed
+   lbn_analyze(i,50000);//number of events per zerobias stream
+ }
+
+}
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/BSFilter_trf.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/BSFilter_trf.py
new file mode 100755
index 0000000000000000000000000000000000000000..2ce1c204dc88cf3d52cfb1b3561f6c3e9a926a53
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/BSFilter_trf.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+__doc__ = """Filter BS data based on trigger bit"""
+
+from PyJobTransformsCore.trf import *
+from PyJobTransformsCore.full_trfarg import *
+from PyJobTransformsCore.trfutil import *
+from PyJobTransformsCore.TransformConfig import *
+import PyJobTransformsCore.basic_trfarg as trfarg
+
+from RecJobTransforms.RecConfig import recConfig
+
+class OutputBSFilterFileArg(StringArg):
+    """The output file prefix"""
+    def __init__(self,help='default',name='OutputBSFilterFile'):
+        StringArg.__init__(self,help,name)
+        
+    def isFullArgument(self):
+        return True
+
+class EventIdFileArg(StringArg):
+    """The output text file for EventIdModifierSvc lines"""
+    def __init__(self,help='default',name='EventIdFile'):
+        StringArg.__init__(self,help,name)
+        
+    def isFullArgument(self):
+        return True
+
+class FilterFileArg(StringArg):
+    """The name of the text file for filtering (HI) events"""
+    def __init__(self,help='default',name='FilterFile'):
+        StringArg.__init__(self,help,name)
+        
+    def isFullArgument(self):
+        return True
+ 
+class TriggerBitArg(IntegerArg):
+    """The trigger bit to select"""
+    def __init__(self,help='default',name='TriggerBit'):
+        IntegerArg.__init__(self,help,name)
+        
+    def isFullArgument(self):
+        return True
+
+class PostIncludeArg(JobOptionsArg):
+    """Joboptions file with user settings, to run after the job itself"""
+    def __init__(self,help='default',package='',name='default'):
+        # split comma separated string into list
+        if type(package) == str: package = package.split(',')
+        # always add 'EventOverlayJobTransforms' package (which contain common postIncludeConfig files)
+        commonPack = 'EventOverlayJobTransforms'
+        if commonPack not in package: package.append(commonPack)
+        JobOptionsArg.__init__(self,help=help,package=package,name=name)
+        self.__config = None
+
+    def isFullArgument(self):
+        return True
+
+    
+class BSFilterJobTransform( JobTransform ):
+    def __init__(self):
+        JobTransform.__init__(self,
+                              authors = [ Author('Andrew Haas', 'ahaas@cern.ch') ] ,
+                              skeleton='EventOverlayJobTransforms/skeleton.BSFilter.py' ,
+                              help = __doc__,
+                              config = recConfig )
+
+        #add arguments
+        self.add( InputBSFileArg() )
+        self.add( OutputBSFilterFileArg() )
+        self.add( EventIdFileArg(), default="" )
+        self.add( FilterFileArg(), default="" )
+        self.add( TriggerBitArg(), default=-1 )
+        self.add( MaxEventsArg(), default=-1 )
+        self.add( SkipEventsArg(), default=0 )
+        self.add( PostIncludeArg(), default='NONE' ) 
+        
+        #add other features
+        self.add( SQLiteSupport() )
+
+
+# execute it if not imported
+if __name__ == '__main__':
+    trf = BSFilterJobTransform()
+    sys.exit(trf.exeSysArgs().exitCode())
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/BSFw.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/BSFw.py
new file mode 100755
index 0000000000000000000000000000000000000000..f9b240e116573284eba6b08a68ab1b87f5cb751f
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/BSFw.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+__doc__ = """Wrapper for BSFilter for ZeroBias overlay"""
+
+import sys
+import re
+import os, commands
+
+#ListOfDefaultPositionalKeys = ['inputEvgenFile','EventIdFile','OutputBSFilterFile','PostInclude','TriggerBit','ZeroBiasFile','outputRAWFile']
+ListOfDefaultPositionalKeys = ['inputEvgenFile','PostInclude','TriggerBit','pileupBSFile','outputEvgenFile']
+
+# execute it if not imported
+if __name__ == '__main__':
+
+    argMap = {}
+    for tmpKeyVal in sys.argv[1:]:
+        try:
+            tmpMatch = re.search('^([^=]+)=(.+)$',tmpKeyVal)
+            if tmpMatch != None:
+                mapKey = tmpMatch.group(1)
+                mapVal = tmpMatch.group(2)
+                if mapKey in ['ipileupBSFile']:
+                    # convert to list
+                    argMap[mapKey] = mapVal.split(',')
+                else:
+                    # convert to int
+                    try:
+                        argMap[mapKey] = eval(mapVal)
+                    except:
+                        # use string
+                        argMap[mapKey] = mapVal
+        except:
+            pass
+
+    print "arguments : " + str(sys.argv[1:])
+    print "arg map   : " + str(argMap)
+
+    #print "zeroBiasFileName", argMap['ZeroBiasFile']
+
+    zeroBiasFileName=argMap['pileupBSFile']
+    triggerBit=argMap['TriggerBit']
+    
+    # execute original trf
+    # EventIdFile=events.txt
+    com = "BSFilter_trf.py inputBSFile=%s EventIdFile=events.txt OutputBSFilterFile=simpledummy.data.RAW TriggerBit=%d skipEvents=0 maxEvents=100" % (zeroBiasFileName,triggerBit)    
+
+    retStat = os.system(com)
+
+    inputEvgenFile=argMap['inputEvgenFile']
+    outputEvgenFile=argMap['outputEvgenFile']
+    
+    try:
+        print "renaming %s to %s" % (inputEvgenFile, outputEvgenFile)
+        os.rename(inputEvgenFile, outputEvgenFile)
+    except:
+        pass
+            
+    sys.exit(retStat % 255)
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/BSFw_HI.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/BSFw_HI.py
new file mode 100755
index 0000000000000000000000000000000000000000..935ceaff1801f7fd154bb2a0da5a1866857f3a84
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/BSFw_HI.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+__doc__ = """Wrapper for BSFilter for ZeroBias overlay on Heavy Ions"""
+
+import sys
+import re
+import os, commands
+
+
+ListOfDefaultPositionalKeys = ['inputEvgenFile','PostInclude','TriggerBit','pileupBSFile','extraBSFile','outputEvgenFile','filterFile']
+
+# execute it if not imported
+if __name__ == '__main__':
+
+    argMap = {}
+    for tmpKeyVal in sys.argv[1:]:
+        try:
+            tmpMatch = re.search('^([^=]+)=(.+)$',tmpKeyVal)
+            if tmpMatch != None:
+                mapKey = tmpMatch.group(1)
+                mapVal = tmpMatch.group(2)
+                if mapKey in ['ipileupBSFile']:
+                    # convert to list
+                    argMap[mapKey] = mapVal.split(',')
+                else:
+                    # convert to int
+                    try:
+                        argMap[mapKey] = eval(mapVal)
+                    except:
+                        # use string
+                        argMap[mapKey] = mapVal
+        except:
+            pass
+
+    print "arguments : " + str(sys.argv[1:])
+    print "arg map   : " + str(argMap)
+
+
+    zeroBiasFileName=argMap['pileupBSFile']
+    triggerBit=argMap['TriggerBit']
+
+    #unpack the tarball with vtx and filter text files.
+    extraBSTarball=argMap['extraBSFile']
+    comt="tar xvzf %s" %(extraBSTarball)
+    retStat = os.system(comt)
+    if retStat:
+        sys.exit(retStat % 255)
+
+    #BSFilter_trf.py inputBSFile=HIMinBiasOverlay.RAW OutputBSFilterFile=simplefiltered.RAW EventIdFile=events_orig.txt filterfile=filter.txt ; BSFilter_trf.py inputBSFile=simplefiltered.RAW OutputBSFilterFile=simpledummy.RAW EventIdFile=events.txt TriggerBit=5 skipEvents=0 maxEvents=100 ; 
+
+    # execute original trf
+    # EventIdFile=events.txt
+    com1 = "BSFilter_trf.py inputBSFile=%s EventIdFile=events_orig.txt OutputBSFilterFile=simplefiltered.data.RAW filterfile=%s" % (zeroBiasFileName,argMap['filterFile'])
+    retStat = os.system(com1)
+    if retStat:
+        sys.exit(retStat % 255)
+
+    com2 = "BSFilter_trf.py inputBSFile=simplefiltered.data.RAW EventIdFile=events.txt OutputBSFilterFile=simpledummy.data.RAW TriggerBit=%d skipEvents=0 maxevents=100" % (triggerBit)    
+    retStat = os.system(com2)
+    if retStat:
+        sys.exit(retStat % 255)
+
+    inputEvgenFile=argMap['inputEvgenFile']
+    outputEvgenFile=argMap['outputEvgenFile']
+    
+    try:
+        print "renaming %s to %s" % (inputEvgenFile, outputEvgenFile)
+        os.rename(inputEvgenFile, outputEvgenFile)
+    except:
+        pass
+            
+    sys.exit(retStat % 255)
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/BSOverlayFilter_tf.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/BSOverlayFilter_tf.py
new file mode 100755
index 0000000000000000000000000000000000000000..73b68b5cc0c42871337ca0b9248b4d5dcbd0a0a8
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/BSOverlayFilter_tf.py
@@ -0,0 +1,51 @@
+#! /usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+## Transform for preparation of BS overlay
+# @version $Id: BSOverlayFilter_tf.py 634748 2014-12-09 14:59:16Z jchapman $ 
+
+import os.path
+import sys
+import time
+
+import logging
+
+# Setup core logging here
+from PyJobTransforms.trfLogger import msg
+msg.info('logging set in %s' % sys.argv[0])
+
+from PyJobTransforms.transform import transform
+from EventOverlayJobTransforms.overlayTransformUtils import BSJobSplitterExecutor
+from PyJobTransforms.trfArgs import addAthenaArguments, addDetectorArguments
+from PyJobTransforms.trfDecorators import stdTrfExceptionHandler, sigUsrStackTrace
+
+ListOfDefaultPositionalKeys=['--amiConfig', '--amiMetadataTag', '--asetup', '--athena', '--athenaopts', '--checkEventCount', '--command', '--env', '--eventAcceptanceEfficiency', '--execOnly', '--ignoreErrors', '--ignoreFiles', '--ignorePatterns', '--inputBSFile', '--jobNumber', '--lumiBlockMapFile', '--maxEvents', '--maxFilesPerSubjob', '--noimf', '--notcmalloc', '--outputBS_SKIMFile', '--overlayConfigFile', '--postExec', '--postInclude', '--preExec', '--preInclude', '--reportName', '--reportType', '--showGraph', '--showPath', '--showSteps', '--skipEvents', '--skipFileValidation', '--skipInputFileValidation', '--skipOutputFileValidation', '--triggerBit']
+
+@stdTrfExceptionHandler
+@sigUsrStackTrace
+def main():
+
+    msg.info('This is %s' % sys.argv[0])
+
+    trf = getTransform()
+    trf.parseCmdLineArgs(sys.argv[1:])
+    trf.execute()
+    trf.generateReport()
+
+    msg.info("%s stopped at %s, trf exit code %d" % (sys.argv[0], time.asctime(), trf.exitCode))
+    sys.exit(trf.exitCode)
+
+
+def getTransform():
+    executorSet = set()
+    from EventOverlayJobTransforms.overlayTransformUtils import addOverlayBSFilterSubstep, addOverlayBSFilterArguments
+    addOverlayBSFilterSubstep(executorSet)
+    trf = transform(executor = executorSet, description = 'Filter BS data based on trigger bit')
+    addAthenaArguments(trf.parser)
+    addOverlayBSFilterArguments(trf.parser)
+    return trf
+
+
+if __name__ == '__main__':
+    main()
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/BS_multipleSelector_trf.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/BS_multipleSelector_trf.py
new file mode 100755
index 0000000000000000000000000000000000000000..5b97c6426a53a2d85bd0517df0bbb675ac09ebab
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/BS_multipleSelector_trf.py
@@ -0,0 +1,582 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+__doc__ = """Filter BS data based on trigger bit and lbn_map and write to multiple output streams"""
+
+from PyJobTransformsCore.trf import *
+from PyJobTransformsCore.full_trfarg import *
+from PyJobTransformsCore.trfutil import *
+from PyJobTransformsCore.TransformConfig import *
+import PyJobTransformsCore.basic_trfarg as trfarg
+
+from RecJobTransforms.RecConfig import recConfig
+
+class OutputBSFilterFileArg0(StringArg):
+    """The output file prefix0"""
+    def __init__(self,help='default',name='OutputBSFilterFile0'):
+        StringArg.__init__(self,help,name)
+        
+    def isFullArgument(self):
+        return True
+    
+class OutputBSFilterFileArg1(StringArg):
+    """The output file prefix1"""
+    def __init__(self,help='default',name='OutputBSFilterFile1'):
+        StringArg.__init__(self,help,name)
+        
+    def isFullArgument(self):
+        return True
+    
+class OutputBSFilterFileArg2(StringArg):
+    """The output file prefix2"""
+    def __init__(self,help='default',name='OutputBSFilterFile2'):
+        StringArg.__init__(self,help,name)
+        
+    def isFullArgument(self):
+        return True
+    
+class OutputBSFilterFileArg3(StringArg):
+    """The output file prefix3"""
+    def __init__(self,help='default',name='OutputBSFilterFile3'):
+        StringArg.__init__(self,help,name)
+        
+    def isFullArgument(self):
+        return True
+    
+class OutputBSFilterFileArg4(StringArg):
+    """The output file prefix4"""
+    def __init__(self,help='default',name='OutputBSFilterFile4'):
+        StringArg.__init__(self,help,name)
+        
+    def isFullArgument(self):
+        return True
+
+#####################################
+#from "outputclasses.py" made by:
+#cd Event/EventOverlay/EventOverlayJobTransforms/scripts/
+#./makeoutputclasses.sh
+
+class OutputBSFilterFileArg5(StringArg):
+        """The output file prefix5"""
+        def __init__(self,help='default',name='OutputBSFilterFile5'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg6(StringArg):
+        """The output file prefix6"""
+        def __init__(self,help='default',name='OutputBSFilterFile6'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg7(StringArg):
+        """The output file prefix7"""
+        def __init__(self,help='default',name='OutputBSFilterFile7'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg8(StringArg):
+        """The output file prefix8"""
+        def __init__(self,help='default',name='OutputBSFilterFile8'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg9(StringArg):
+        """The output file prefix9"""
+        def __init__(self,help='default',name='OutputBSFilterFile9'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg10(StringArg):
+        """The output file prefix10"""
+        def __init__(self,help='default',name='OutputBSFilterFile10'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg11(StringArg):
+        """The output file prefix11"""
+        def __init__(self,help='default',name='OutputBSFilterFile11'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg12(StringArg):
+        """The output file prefix12"""
+        def __init__(self,help='default',name='OutputBSFilterFile12'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg13(StringArg):
+        """The output file prefix13"""
+        def __init__(self,help='default',name='OutputBSFilterFile13'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg14(StringArg):
+        """The output file prefix14"""
+        def __init__(self,help='default',name='OutputBSFilterFile14'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg15(StringArg):
+        """The output file prefix15"""
+        def __init__(self,help='default',name='OutputBSFilterFile15'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg16(StringArg):
+        """The output file prefix16"""
+        def __init__(self,help='default',name='OutputBSFilterFile16'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg17(StringArg):
+        """The output file prefix17"""
+        def __init__(self,help='default',name='OutputBSFilterFile17'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg18(StringArg):
+        """The output file prefix18"""
+        def __init__(self,help='default',name='OutputBSFilterFile18'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg19(StringArg):
+        """The output file prefix19"""
+        def __init__(self,help='default',name='OutputBSFilterFile19'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg20(StringArg):
+        """The output file prefix20"""
+        def __init__(self,help='default',name='OutputBSFilterFile20'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg21(StringArg):
+        """The output file prefix21"""
+        def __init__(self,help='default',name='OutputBSFilterFile21'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg22(StringArg):
+        """The output file prefix22"""
+        def __init__(self,help='default',name='OutputBSFilterFile22'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg23(StringArg):
+        """The output file prefix23"""
+        def __init__(self,help='default',name='OutputBSFilterFile23'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg24(StringArg):
+        """The output file prefix24"""
+        def __init__(self,help='default',name='OutputBSFilterFile24'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg25(StringArg):
+        """The output file prefix25"""
+        def __init__(self,help='default',name='OutputBSFilterFile25'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg26(StringArg):
+        """The output file prefix26"""
+        def __init__(self,help='default',name='OutputBSFilterFile26'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg27(StringArg):
+        """The output file prefix27"""
+        def __init__(self,help='default',name='OutputBSFilterFile27'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg28(StringArg):
+        """The output file prefix28"""
+        def __init__(self,help='default',name='OutputBSFilterFile28'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg29(StringArg):
+        """The output file prefix29"""
+        def __init__(self,help='default',name='OutputBSFilterFile29'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg30(StringArg):
+        """The output file prefix30"""
+        def __init__(self,help='default',name='OutputBSFilterFile30'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg31(StringArg):
+        """The output file prefix31"""
+        def __init__(self,help='default',name='OutputBSFilterFile31'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg32(StringArg):
+        """The output file prefix32"""
+        def __init__(self,help='default',name='OutputBSFilterFile32'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg33(StringArg):
+        """The output file prefix33"""
+        def __init__(self,help='default',name='OutputBSFilterFile33'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg34(StringArg):
+        """The output file prefix34"""
+        def __init__(self,help='default',name='OutputBSFilterFile34'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg35(StringArg):
+        """The output file prefix35"""
+        def __init__(self,help='default',name='OutputBSFilterFile35'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg36(StringArg):
+        """The output file prefix36"""
+        def __init__(self,help='default',name='OutputBSFilterFile36'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg37(StringArg):
+        """The output file prefix37"""
+        def __init__(self,help='default',name='OutputBSFilterFile37'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg38(StringArg):
+        """The output file prefix38"""
+        def __init__(self,help='default',name='OutputBSFilterFile38'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg39(StringArg):
+        """The output file prefix39"""
+        def __init__(self,help='default',name='OutputBSFilterFile39'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg40(StringArg):
+        """The output file prefix40"""
+        def __init__(self,help='default',name='OutputBSFilterFile40'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg41(StringArg):
+        """The output file prefix41"""
+        def __init__(self,help='default',name='OutputBSFilterFile41'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg42(StringArg):
+        """The output file prefix42"""
+        def __init__(self,help='default',name='OutputBSFilterFile42'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg43(StringArg):
+        """The output file prefix43"""
+        def __init__(self,help='default',name='OutputBSFilterFile43'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg44(StringArg):
+        """The output file prefix44"""
+        def __init__(self,help='default',name='OutputBSFilterFile44'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg45(StringArg):
+        """The output file prefix45"""
+        def __init__(self,help='default',name='OutputBSFilterFile45'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg46(StringArg):
+        """The output file prefix46"""
+        def __init__(self,help='default',name='OutputBSFilterFile46'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg47(StringArg):
+        """The output file prefix47"""
+        def __init__(self,help='default',name='OutputBSFilterFile47'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg48(StringArg):
+        """The output file prefix48"""
+        def __init__(self,help='default',name='OutputBSFilterFile48'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg49(StringArg):
+        """The output file prefix49"""
+        def __init__(self,help='default',name='OutputBSFilterFile49'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+##########
+
+class EventIdFileArg(StringArg):
+    """The output text file for EventIdModifierSvc lines"""
+    def __init__(self,help='default',name='EventIdFile'):
+        StringArg.__init__(self,help,name)        
+    def isFullArgument(self):
+        return True
+
+class LbnMapFileArg(StringArg):
+    """The lbn_map_file for the selector to read in"""
+    def __init__(self,help='default',name='LbnMapFile'):
+        StringArg.__init__(self,help,name)        
+    def isFullArgument(self):
+        return True
+
+class UniqArg(StringArg):
+    """A unique ID for the output files from this job"""
+    def __init__(self,help='default',name='uniq'):
+        StringArg.__init__(self,help,name)        
+    def isFullArgument(self):
+        return True
+    
+class TriggerBitArg(IntegerArg):
+    """The trigger bit to select"""
+    def __init__(self,help='default',name='TriggerBit'):
+        IntegerArg.__init__(self,help,name)        
+    def isFullArgument(self):
+        return True
+
+class NoutputsArg(IntegerArg):
+    """The number of outputstreams"""
+    def __init__(self,help='default',name='Noutputs'):
+        IntegerArg.__init__(self,help,name)        
+    def isFullArgument(self):
+        return True
+
+class MaxEventsVecArg(StringArg):
+    """A vector of maxevent args for each stream"""
+    def __init__(self,help='default',name='maxeventsvec'):
+        StringArg.__init__(self,help,name)        
+    def isFullArgument(self):
+        return True
+
+class SkipEventsVecArg(StringArg):
+    """A vector of skipevent args for each stream"""
+    def __init__(self,help='default',name='skipeventsvec'):
+        StringArg.__init__(self,help,name)        
+    def isFullArgument(self):
+        return True
+
+class NoutputsVecArg(StringArg):
+    """A vector of noutputs args for each stream"""
+    def __init__(self,help='default',name='noutputsvec'):
+        StringArg.__init__(self,help,name)        
+    def isFullArgument(self):
+        return True
+
+class StreamVecArg(StringArg):
+    """A vector of stream indices for each stream"""
+    def __init__(self,help='default',name='streamvec'):
+        StringArg.__init__(self,help,name)        
+    def isFullArgument(self):
+        return True
+
+class PostIncludeArg(JobOptionsArg):
+    """Joboptions file with user settings, to run after the job itself"""
+    def __init__(self,help='default',package='',name='default'):
+        # split comma separated string into list
+        if type(package) == str: package = package.split(',')
+        # always add 'EventOverlayJobTransforms' package (which contain common postIncludeConfig files)
+        commonPack = 'EventOverlayJobTransforms'
+        if commonPack not in package: package.append(commonPack)
+        JobOptionsArg.__init__(self,help=help,package=package,name=name)
+        self.__config = None
+    def isFullArgument(self):
+        return True
+
+    
+class BSFilterJobTransform( JobTransform ):
+    def __init__(self):
+        JobTransform.__init__(self,
+                              authors = [ Author('Andrew Haas', 'ahaas@cern.ch') ] ,
+                              skeleton='EventOverlayJobTransforms/skeleton.BS_multipleSelector.py' ,
+                              help = __doc__,
+                              config = recConfig )
+
+        #add arguments
+        self.add( InputBSFileArg() )
+
+        self.add( OutputBSFilterFileArg0(),default="" )
+        self.add( OutputBSFilterFileArg1(),default="" )
+        self.add( OutputBSFilterFileArg2(),default="" )
+        self.add( OutputBSFilterFileArg3(),default="" )
+        self.add( OutputBSFilterFileArg4(),default="" )
+        self.add( OutputBSFilterFileArg5(),default="" )
+        self.add( OutputBSFilterFileArg6(),default="" )
+        self.add( OutputBSFilterFileArg7(),default="" )
+        self.add( OutputBSFilterFileArg8(),default="" )
+        self.add( OutputBSFilterFileArg9(),default="" )
+        self.add( OutputBSFilterFileArg10(),default="" )
+        self.add( OutputBSFilterFileArg11(),default="" )
+        self.add( OutputBSFilterFileArg12(),default="" )
+        self.add( OutputBSFilterFileArg13(),default="" )
+        self.add( OutputBSFilterFileArg14(),default="" )
+        self.add( OutputBSFilterFileArg15(),default="" )
+        self.add( OutputBSFilterFileArg16(),default="" )
+        self.add( OutputBSFilterFileArg17(),default="" )
+        self.add( OutputBSFilterFileArg18(),default="" )
+        self.add( OutputBSFilterFileArg19(),default="" )
+        self.add( OutputBSFilterFileArg20(),default="" )
+        self.add( OutputBSFilterFileArg21(),default="" )
+        self.add( OutputBSFilterFileArg22(),default="" )
+        self.add( OutputBSFilterFileArg23(),default="" )
+        self.add( OutputBSFilterFileArg24(),default="" )
+        self.add( OutputBSFilterFileArg25(),default="" )
+        self.add( OutputBSFilterFileArg26(),default="" )
+        self.add( OutputBSFilterFileArg27(),default="" )
+        self.add( OutputBSFilterFileArg28(),default="" )
+        self.add( OutputBSFilterFileArg29(),default="" )
+        self.add( OutputBSFilterFileArg30(),default="" )
+        self.add( OutputBSFilterFileArg31(),default="" )
+        self.add( OutputBSFilterFileArg32(),default="" )
+        self.add( OutputBSFilterFileArg33(),default="" )
+        self.add( OutputBSFilterFileArg34(),default="" )
+        self.add( OutputBSFilterFileArg35(),default="" )
+        self.add( OutputBSFilterFileArg36(),default="" )
+        self.add( OutputBSFilterFileArg37(),default="" )
+        self.add( OutputBSFilterFileArg38(),default="" )
+        self.add( OutputBSFilterFileArg39(),default="" )
+        self.add( OutputBSFilterFileArg40(),default="" )
+        self.add( OutputBSFilterFileArg41(),default="" )
+        self.add( OutputBSFilterFileArg42(),default="" )
+        self.add( OutputBSFilterFileArg43(),default="" )
+        self.add( OutputBSFilterFileArg44(),default="" )
+        self.add( OutputBSFilterFileArg45(),default="" )
+        self.add( OutputBSFilterFileArg46(),default="" )
+        self.add( OutputBSFilterFileArg47(),default="" )
+        self.add( OutputBSFilterFileArg48(),default="" )
+        self.add( OutputBSFilterFileArg49(),default="" )
+
+        self.add( EventIdFileArg(), default="" )
+        self.add( LbnMapFileArg(), default="lbn_anal_map.txt" )
+        self.add( UniqArg(), default="" )
+        self.add( TriggerBitArg(), default=5 )
+        self.add( NoutputsArg(), default=50 )
+        self.add( MaxEventsArg() )
+        self.add( SkipEventsArg(), default=0  )
+        self.add( PostIncludeArg(), default='NONE' )
+        self.add( MaxEventsVecArg(), default='' )
+        self.add( SkipEventsVecArg(), default='' )
+        self.add( NoutputsVecArg(), default='' )
+        self.add( StreamVecArg(), default='' )
+        
+        #add other features
+        self.add( SQLiteSupport() )
+
+
+# execute it if not imported
+if __name__ == '__main__':
+    trf = BSFilterJobTransform()
+    sys.exit(trf.exeSysArgs().exitCode())
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/G4HitMerge_trf.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/G4HitMerge_trf.py
new file mode 100755
index 0000000000000000000000000000000000000000..17b42b13d49e4e83b8ff8f88faa7b1c0d4c27898
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/G4HitMerge_trf.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+__doc__ = """Merge two G4 HITS files into one file, adding the HITS from event 1 in one file to those from event 1 in the other file, 2 to 2, etc."""
+
+from PyJobTransformsCore.trf import *
+from PyJobTransformsCore.full_trfarg import *
+from PyJobTransformsCore.trfutil import *
+from PyJobTransformsCore.TransformConfig import *
+import PyJobTransformsCore.basic_trfarg as trfarg
+
+from RecJobTransforms.RecConfig import recConfig
+
+class InputHITS1FileArg(StringArg):
+    """The first input HITS file"""
+    def __init__(self,help='default',name='inputHITS1File'):
+        StringArg.__init__(self,help,name)
+        
+    def isFullArgument(self):
+        return True
+
+class InputHITS2FileArg(StringArg):
+    """The second input HITS file"""
+    def __init__(self,help='default',name='inputHITS2File'):
+        StringArg.__init__(self,help,name)
+        
+    def isFullArgument(self):
+        return True
+
+class OutputHITSFileArg(StringArg):
+    """The output HITS file"""
+    def __init__(self,help='default',name='outputHITSFile'):
+        StringArg.__init__(self,help,name)
+        
+    def isFullArgument(self):
+        return True
+
+class PostIncludeArg(JobOptionsArg):
+    """Joboptions file with user settings, to run after the job itself"""
+    def __init__(self,help='default',package='',name='default'):
+        # split comma separated string into list
+        if type(package) == str: package = package.split(',')
+        # always add 'EventOverlayJobTransforms' package (which contain common postIncludeConfig files)
+        commonPack = 'EventOverlayJobTransforms'
+        if commonPack not in package: package.append(commonPack)
+        JobOptionsArg.__init__(self,help=help,package=package,name=name)
+        self.__config = None
+
+    def isFullArgument(self):
+        return True
+
+    
+class G4HitMergeJobTransform( JobTransform ):
+    def __init__(self):
+        JobTransform.__init__(self,
+                              authors = [ Author('Andrew Haas', 'ahaas@cern.ch'), Author('William Lockman','William.Lockman@cern.ch') ] ,
+                              skeleton='EventOverlayJobTransforms/skeleton.G4HitMerge.py' ,
+                              help = __doc__,
+                              config = recConfig )
+
+        #add arguments
+        self.add( InputHITS1FileArg() )
+        self.add( InputHITS2FileArg() )
+        self.add( OutputHITSFileArg() )
+        self.add( MaxEventsArg() )
+        self.add( SkipEventsArg() )
+        self.add( GeometryVersionArg() )
+        self.add( DBReleaseArg(), default='NONE' )
+        self.add( ConditionsTagArg(), default='NONE' )
+        self.add( PostIncludeArg(), default='NONE' ) 
+        
+        #add other features
+        self.add( SQLiteSupport() )
+
+
+# execute it if not imported
+if __name__ == '__main__':
+    trf = G4HitMergeJobTransform()
+    sys.exit(trf.exeSysArgs().exitCode())
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/HITAGprinter.C b/Event/EventOverlay/EventOverlayJobTransforms/scripts/HITAGprinter.C
new file mode 100644
index 0000000000000000000000000000000000000000..9bb5822a8ddb0e8d8961bc408d0ccff2a325d897
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/HITAGprinter.C
@@ -0,0 +1,83 @@
+/*
+  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+*/
+
+#define HITAGprinter_cxx
+#include "HITAGprinter.h"
+#include <TH2.h>
+#include <TStyle.h>
+#include <TCanvas.h>
+#include <map>
+
+std::map< int , std::map< int, int> > runeventmap;
+
+void HITAGprinter::Loop()
+{
+//   In a ROOT session, you can do:
+//      Root > .L HITAGprinter.C
+//      Root > HITAGprinter t
+//      Root > t.GetEntry(12); // Fill t data members with entry number 12
+//      Root > t.Show();       // Show values of entry 12
+//      Root > t.Show(16);     // Read and show values of entry 16
+//      Root > t.Loop();       // Loop on all entries
+//
+
+//     This is the loop skeleton where:
+//    jentry is the global entry number in the chain
+//    ientry is the entry number in the current Tree
+//  Note that the argument to GetEntry must be:
+//    jentry for TChain::GetEntry
+//    ientry for TTree::GetEntry and TBranch::GetEntry
+//
+//       To read only selected branches, Insert statements like:
+// METHOD1:
+//    fChain->SetBranchStatus("*",0);  // disable all branches
+//    fChain->SetBranchStatus("branchname",1);  // activate branchname
+// METHOD2: replace line
+//    fChain->GetEntry(jentry);       //read all branches
+//by  b_branchname->GetEntry(ientry); //read only this branch
+   if (fChain == 0) return;
+
+   FILE *vfile=fopen("HI_vtx.txt","w");
+   FILE *ffile=fopen("HI_filter.txt","w");
+
+   Long64_t nentries = fChain->GetEntriesFast();
+   Long64_t nbytes = 0, nb = 0, passed=0,total=0,duplicate=0;
+   for (Long64_t jentry=0; jentry<nentries;jentry++) {
+      Long64_t ientry = LoadTree(jentry);
+      if (ientry < 0) break;
+
+      MBTSTimeDiff=0;//default in case it's not there
+      L1PassedTrigMaskTBP5 = 1<<29;//otherwise no events pass
+
+      nb = fChain->GetEntry(jentry);   nbytes += nb;
+      // if (Cut(ientry) < 0) continue;
+      
+      //run event vx vy vz
+      if (NVtx<1) {
+	//printf("Warning: %d %d NVtx=%d, setting to 0 VtxX,VtxY,VtxZ = %f %f %f\n",RunNumber,EventNumber,NVtx,VtxX,VtxY,VtxZ);
+	VtxX=0;VtxY=0;VtxZ=0;
+      }
+
+      //check if we already have this event
+      if (runeventmap[RunNumber][EventNumber]==7744) {
+	++duplicate;
+	continue;
+      }
+      runeventmap[RunNumber][EventNumber]=7744;
+
+      fprintf(vfile,"%d %d %f %f %f\n",RunNumber,EventNumber,VtxX,VtxY,VtxZ);
+      fprintf(ffile,"%d %d %d %d %f\n",RunNumber,EventNumber,(L1PassedTrigMaskTBP5>>29)&0x1,NVtx,MBTSTimeDiff);
+
+      ++total;
+      if ( (L1PassedTrigMaskTBP5>>29)&0x1 ==1 && NVtx==1 && fabs(MBTSTimeDiff)<3 ) ++passed;
+      
+   }//Loop over events
+
+   printf("passed %d out of %d events, with %d duplicates skipped\n",passed,total,duplicate);
+
+   fclose(vfile);
+   fclose(ffile);
+
+}//Loop()
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/HITAGprinter.h b/Event/EventOverlay/EventOverlayJobTransforms/scripts/HITAGprinter.h
new file mode 100644
index 0000000000000000000000000000000000000000..3966ba6a7d183637e6be182fc5f074d5641c870d
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/HITAGprinter.h
@@ -0,0 +1,1105 @@
+/*
+  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+*/
+
+//////////////////////////////////////////////////////////
+// This class has been automatically generated on
+// Thu Oct  6 06:14:07 2011 by ROOT version 5.28/00e
+// from TChain POOLCollectionTree/
+//////////////////////////////////////////////////////////
+
+#ifndef HITAGprinter_h
+#define HITAGprinter_h
+
+#include <TROOT.h>
+#include <TChain.h>
+#include <TFile.h>
+
+class HITAGprinter {
+public :
+   TTree          *fChain;   //!pointer to the analyzed TTree or TChain
+   Int_t           fCurrent; //!current Tree number in a TChain
+
+   // Declaration of leaf types
+   Char_t          StreamESD_ref[153];
+   Char_t          StreamRAW_ref[137];
+   UInt_t          RunNumber;
+   UInt_t          EventNumber;
+   UInt_t          EventTime;
+   UInt_t          EventTimeNanoSec;
+   UInt_t          LumiBlockN;
+   UInt_t          BunchId;
+   UInt_t          PixelFlags;
+   UInt_t          SCTFlags;
+   UInt_t          TRTFlags;
+   UInt_t          LArFlags;
+   UInt_t          TileFlags;
+   UInt_t          MuonFlags;
+   UInt_t          ForwardDetFlags;
+   UInt_t          CoreFlags;
+   Char_t          IsSimulation;
+   Char_t          IsCalibration;
+   Char_t          IsTestBeam;
+   Float_t         RandomNumber;
+   UInt_t          NTrk;
+   UInt_t          NVtx;
+   Float_t         VtxX;
+   Float_t         VtxY;
+   Float_t         VtxZ;
+   Float_t         VtxChiSq;
+   UInt_t          VtxNDOF;
+   Int_t           NVtxTight;
+   Float_t         LooseElectronEta1;
+   Float_t         LooseElectronEta2;
+   Float_t         LooseElectronEta3;
+   Float_t         LooseElectronEta4;
+   Char_t          LooseElectronIsForward1;
+   Char_t          LooseElectronIsForward2;
+   Char_t          LooseElectronIsForward3;
+   Char_t          LooseElectronIsForward4;
+   Float_t         LooseElectronPhi1;
+   Float_t         LooseElectronPhi2;
+   Float_t         LooseElectronPhi3;
+   Float_t         LooseElectronPhi4;
+   Float_t         LooseElectronPt1;
+   Float_t         LooseElectronPt2;
+   Float_t         LooseElectronPt3;
+   Float_t         LooseElectronPt4;
+   UInt_t          LooseElectronTightness1;
+   UInt_t          LooseElectronTightness2;
+   UInt_t          LooseElectronTightness3;
+   UInt_t          LooseElectronTightness4;
+   UInt_t          NLooseElectron;
+   Float_t         LoosePhotonEta1;
+   Float_t         LoosePhotonEta2;
+   Float_t         LoosePhotonPhi1;
+   Float_t         LoosePhotonPhi2;
+   Float_t         LoosePhotonPt1;
+   Float_t         LoosePhotonPt2;
+   UInt_t          LoosePhotonTightness1;
+   UInt_t          LoosePhotonTightness2;
+   UInt_t          NConvertedLoosePhoton;
+   UInt_t          NLoosePhoton;
+   Int_t           LooseMuonCSCHits1;
+   Int_t           LooseMuonCSCHits2;
+   Int_t           LooseMuonCSCHits3;
+   Int_t           LooseMuonCSCHits4;
+   Float_t         LooseMuonEta1;
+   Float_t         LooseMuonEta2;
+   Float_t         LooseMuonEta3;
+   Float_t         LooseMuonEta4;
+   Float_t         LooseMuonIsolEt1;
+   Float_t         LooseMuonIsolEt2;
+   Float_t         LooseMuonIsolEt3;
+   Float_t         LooseMuonIsolEt4;
+   UInt_t          LooseMuonIsolN1;
+   UInt_t          LooseMuonIsolN2;
+   UInt_t          LooseMuonIsolN3;
+   UInt_t          LooseMuonIsolN4;
+   Int_t           LooseMuonMDTHits1;
+   Int_t           LooseMuonMDTHits2;
+   Int_t           LooseMuonMDTHits3;
+   Int_t           LooseMuonMDTHits4;
+   Float_t         LooseMuonPhi1;
+   Float_t         LooseMuonPhi2;
+   Float_t         LooseMuonPhi3;
+   Float_t         LooseMuonPhi4;
+   Float_t         LooseMuonPt1;
+   Float_t         LooseMuonPt2;
+   Float_t         LooseMuonPt3;
+   Float_t         LooseMuonPt4;
+   Int_t           LooseMuonRPCHits1;
+   Int_t           LooseMuonRPCHits2;
+   Int_t           LooseMuonRPCHits3;
+   Int_t           LooseMuonRPCHits4;
+   Int_t           LooseMuonTGCHits1;
+   Int_t           LooseMuonTGCHits2;
+   Int_t           LooseMuonTGCHits3;
+   Int_t           LooseMuonTGCHits4;
+   UInt_t          LooseMuonTightness1;
+   UInt_t          LooseMuonTightness2;
+   UInt_t          LooseMuonTightness3;
+   UInt_t          LooseMuonTightness4;
+   UInt_t          NLooseMuon;
+   Float_t         BJetLikelihood1;
+   Float_t         BJetLikelihood2;
+   Float_t         BJetLikelihood3;
+   Float_t         BJetLikelihood4;
+   Float_t         BJetLikelihood5;
+   Float_t         BJetLikelihood6;
+   Float_t         JetEmFrac1;
+   Float_t         JetEmFrac2;
+   Float_t         JetEmFrac3;
+   Float_t         JetEmFrac4;
+   Float_t         JetEmFrac5;
+   Float_t         JetEmFrac6;
+   Float_t         JetEta1;
+   Float_t         JetEta2;
+   Float_t         JetEta3;
+   Float_t         JetEta4;
+   Float_t         JetEta5;
+   Float_t         JetEta6;
+   Float_t         JetPhi1;
+   Float_t         JetPhi2;
+   Float_t         JetPhi3;
+   Float_t         JetPhi4;
+   Float_t         JetPhi5;
+   Float_t         JetPhi6;
+   Float_t         JetPt1;
+   Float_t         JetPt2;
+   Float_t         JetPt3;
+   Float_t         JetPt4;
+   Float_t         JetPt5;
+   Float_t         JetPt6;
+   Float_t         JetSumET;
+   UInt_t          NBJet;
+   UInt_t          NJet;
+   UInt_t          BunchGroup;
+   ULong64_t       EFPassedTrigMask0;
+   ULong64_t       EFPassedTrigMask1;
+   ULong64_t       EFPassedTrigMask10;
+   ULong64_t       EFPassedTrigMask11;
+   ULong64_t       EFPassedTrigMask12;
+   ULong64_t       EFPassedTrigMask13;
+   ULong64_t       EFPassedTrigMask14;
+   ULong64_t       EFPassedTrigMask15;
+   ULong64_t       EFPassedTrigMask16;
+   ULong64_t       EFPassedTrigMask17;
+   ULong64_t       EFPassedTrigMask18;
+   ULong64_t       EFPassedTrigMask19;
+   ULong64_t       EFPassedTrigMask2;
+   ULong64_t       EFPassedTrigMask20;
+   ULong64_t       EFPassedTrigMask21;
+   ULong64_t       EFPassedTrigMask22;
+   ULong64_t       EFPassedTrigMask23;
+   ULong64_t       EFPassedTrigMask24;
+   ULong64_t       EFPassedTrigMask25;
+   ULong64_t       EFPassedTrigMask26;
+   ULong64_t       EFPassedTrigMask27;
+   ULong64_t       EFPassedTrigMask28;
+   ULong64_t       EFPassedTrigMask29;
+   ULong64_t       EFPassedTrigMask3;
+   ULong64_t       EFPassedTrigMask30;
+   ULong64_t       EFPassedTrigMask31;
+   ULong64_t       EFPassedTrigMask4;
+   ULong64_t       EFPassedTrigMask5;
+   ULong64_t       EFPassedTrigMask6;
+   ULong64_t       EFPassedTrigMask7;
+   ULong64_t       EFPassedTrigMask8;
+   ULong64_t       EFPassedTrigMask9;
+   UInt_t          L1PassedTrigMaskTAP0;
+   UInt_t          L1PassedTrigMaskTAP1;
+   UInt_t          L1PassedTrigMaskTAP2;
+   UInt_t          L1PassedTrigMaskTAP3;
+   UInt_t          L1PassedTrigMaskTAP4;
+   UInt_t          L1PassedTrigMaskTAP5;
+   UInt_t          L1PassedTrigMaskTAP6;
+   UInt_t          L1PassedTrigMaskTAP7;
+   UInt_t          L1PassedTrigMaskTAV0;
+   UInt_t          L1PassedTrigMaskTAV1;
+   UInt_t          L1PassedTrigMaskTAV2;
+   UInt_t          L1PassedTrigMaskTAV3;
+   UInt_t          L1PassedTrigMaskTAV4;
+   UInt_t          L1PassedTrigMaskTAV5;
+   UInt_t          L1PassedTrigMaskTAV6;
+   UInt_t          L1PassedTrigMaskTAV7;
+   UInt_t          L1PassedTrigMaskTBP0;
+   UInt_t          L1PassedTrigMaskTBP1;
+   UInt_t          L1PassedTrigMaskTBP2;
+   UInt_t          L1PassedTrigMaskTBP3;
+   UInt_t          L1PassedTrigMaskTBP4;
+   UInt_t          L1PassedTrigMaskTBP5;
+   UInt_t          L1PassedTrigMaskTBP6;
+   UInt_t          L1PassedTrigMaskTBP7;
+   UInt_t          L2PassedTrigMask0;
+   UInt_t          L2PassedTrigMask1;
+   UInt_t          L2PassedTrigMask10;
+   UInt_t          L2PassedTrigMask11;
+   UInt_t          L2PassedTrigMask12;
+   UInt_t          L2PassedTrigMask13;
+   UInt_t          L2PassedTrigMask14;
+   UInt_t          L2PassedTrigMask15;
+   UInt_t          L2PassedTrigMask16;
+   UInt_t          L2PassedTrigMask17;
+   UInt_t          L2PassedTrigMask18;
+   UInt_t          L2PassedTrigMask19;
+   UInt_t          L2PassedTrigMask2;
+   UInt_t          L2PassedTrigMask20;
+   UInt_t          L2PassedTrigMask21;
+   UInt_t          L2PassedTrigMask22;
+   UInt_t          L2PassedTrigMask23;
+   UInt_t          L2PassedTrigMask24;
+   UInt_t          L2PassedTrigMask25;
+   UInt_t          L2PassedTrigMask26;
+   UInt_t          L2PassedTrigMask27;
+   UInt_t          L2PassedTrigMask28;
+   UInt_t          L2PassedTrigMask29;
+   UInt_t          L2PassedTrigMask3;
+   UInt_t          L2PassedTrigMask30;
+   UInt_t          L2PassedTrigMask31;
+   UInt_t          L2PassedTrigMask4;
+   UInt_t          L2PassedTrigMask5;
+   UInt_t          L2PassedTrigMask6;
+   UInt_t          L2PassedTrigMask7;
+   UInt_t          L2PassedTrigMask8;
+   UInt_t          L2PassedTrigMask9;
+   UInt_t          Level1TriggerType;
+   UInt_t          SFOStreams;
+   Float_t         TrackLead1D0;
+   Float_t         TrackLead1Z0;
+   Float_t         TrackLead1Phi0;
+   Float_t         TrackLead1Theta;
+   Float_t         TrackLead1QOverP;
+   Float_t         TrackLead2D0;
+   Float_t         TrackLead2Z0;
+   Float_t         TrackLead2Phi0;
+   Float_t         TrackLead2Theta;
+   Float_t         TrackLead2QOverP;
+   Float_t         CellEnergySum;
+   Float_t         CellEnergySumEMB;
+   Float_t         CellEnergySumEMEC;
+   Float_t         CellEnergySumHEC;
+   Float_t         CellEnergySumFCAL;
+   Float_t         CellEnergySumTile;
+   Float_t         ClusterEnergySum;
+   Float_t         TopoClusterEt1;
+   Float_t         TopoClusterEta1;
+   Float_t         TopoClusterPhi1;
+   Float_t         CellMissingET;
+   Float_t         CellMissingETPhi;
+   Float_t         MBTSTimeDiff;
+   Float_t         LArECTimeDiff;
+   Float_t         TRTEventPhase;
+   Int_t           NPixelTracks;
+   Int_t           NSCTTracks;
+   Int_t           NTRTTracks;
+   Int_t           NMooreSegs;
+   Int_t           NMboySegs;
+   Int_t           NInnerMboySegs;
+   Int_t           NHitsInnerMboySegs;
+   Int_t           NInnerMuSegs;
+   Int_t           NHitsInnerMooreSegs;
+   Int_t           NSCTSPs;
+   Int_t           NPixelSPs;
+   Int_t           NTRTDCs;
+   Int_t           NTRTHtDCs;
+   Int_t           NMDTHits;
+   Int_t           NRPCHits;
+   Int_t           NTGCHits;
+   Int_t           NCSCHits;
+   Int_t           NBCMHits;
+   UInt_t          MBTSWord;
+   UInt_t          BPhysWord;
+   UInt_t          CombinedMuonWord;
+   UInt_t          DAODStreams;
+   UInt_t          DESDStreams;
+   UInt_t          EgammaWord;
+   UInt_t          ExoticWord;
+   UInt_t          HeavyIonWord;
+   UInt_t          HiggsWord;
+   UInt_t          JetMissingETWord;
+   UInt_t          JetTagWord;
+   UInt_t          SMWord;
+   UInt_t          SUSYWord;
+   UInt_t          TauIdWord;
+   UInt_t          TopWord;
+
+   // List of branches
+   TBranch        *b_StreamESD_ref;   //!
+   TBranch        *b_StreamRAW_ref;   //!
+   TBranch        *b_RunNumber;   //!
+   TBranch        *b_EventNumber;   //!
+   TBranch        *b_EventTime;   //!
+   TBranch        *b_EventTimeNanoSec;   //!
+   TBranch        *b_LumiBlockN;   //!
+   TBranch        *b_BunchId;   //!
+   TBranch        *b_PixelFlags;   //!
+   TBranch        *b_SCTFlags;   //!
+   TBranch        *b_TRTFlags;   //!
+   TBranch        *b_LArFlags;   //!
+   TBranch        *b_TileFlags;   //!
+   TBranch        *b_MuonFlags;   //!
+   TBranch        *b_ForwardDetFlags;   //!
+   TBranch        *b_CoreFlags;   //!
+   TBranch        *b_IsSimulation;   //!
+   TBranch        *b_IsCalibration;   //!
+   TBranch        *b_IsTestBeam;   //!
+   TBranch        *b_RandomNumber;   //!
+   TBranch        *b_NTrk;   //!
+   TBranch        *b_NVtx;   //!
+   TBranch        *b_VtxX;   //!
+   TBranch        *b_VtxY;   //!
+   TBranch        *b_VtxZ;   //!
+   TBranch        *b_VtxChiSq;   //!
+   TBranch        *b_VtxNDOF;   //!
+   TBranch        *b_NVtxTight;   //!
+   TBranch        *b_LooseElectronEta1;   //!
+   TBranch        *b_LooseElectronEta2;   //!
+   TBranch        *b_LooseElectronEta3;   //!
+   TBranch        *b_LooseElectronEta4;   //!
+   TBranch        *b_LooseElectronIsForward1;   //!
+   TBranch        *b_LooseElectronIsForward2;   //!
+   TBranch        *b_LooseElectronIsForward3;   //!
+   TBranch        *b_LooseElectronIsForward4;   //!
+   TBranch        *b_LooseElectronPhi1;   //!
+   TBranch        *b_LooseElectronPhi2;   //!
+   TBranch        *b_LooseElectronPhi3;   //!
+   TBranch        *b_LooseElectronPhi4;   //!
+   TBranch        *b_LooseElectronPt1;   //!
+   TBranch        *b_LooseElectronPt2;   //!
+   TBranch        *b_LooseElectronPt3;   //!
+   TBranch        *b_LooseElectronPt4;   //!
+   TBranch        *b_LooseElectronTightness1;   //!
+   TBranch        *b_LooseElectronTightness2;   //!
+   TBranch        *b_LooseElectronTightness3;   //!
+   TBranch        *b_LooseElectronTightness4;   //!
+   TBranch        *b_NLooseElectron;   //!
+   TBranch        *b_LoosePhotonEta1;   //!
+   TBranch        *b_LoosePhotonEta2;   //!
+   TBranch        *b_LoosePhotonPhi1;   //!
+   TBranch        *b_LoosePhotonPhi2;   //!
+   TBranch        *b_LoosePhotonPt1;   //!
+   TBranch        *b_LoosePhotonPt2;   //!
+   TBranch        *b_LoosePhotonTightness1;   //!
+   TBranch        *b_LoosePhotonTightness2;   //!
+   TBranch        *b_NConvertedLoosePhoton;   //!
+   TBranch        *b_NLoosePhoton;   //!
+   TBranch        *b_LooseMuonCSCHits1;   //!
+   TBranch        *b_LooseMuonCSCHits2;   //!
+   TBranch        *b_LooseMuonCSCHits3;   //!
+   TBranch        *b_LooseMuonCSCHits4;   //!
+   TBranch        *b_LooseMuonEta1;   //!
+   TBranch        *b_LooseMuonEta2;   //!
+   TBranch        *b_LooseMuonEta3;   //!
+   TBranch        *b_LooseMuonEta4;   //!
+   TBranch        *b_LooseMuonIsolEt1;   //!
+   TBranch        *b_LooseMuonIsolEt2;   //!
+   TBranch        *b_LooseMuonIsolEt3;   //!
+   TBranch        *b_LooseMuonIsolEt4;   //!
+   TBranch        *b_LooseMuonIsolN1;   //!
+   TBranch        *b_LooseMuonIsolN2;   //!
+   TBranch        *b_LooseMuonIsolN3;   //!
+   TBranch        *b_LooseMuonIsolN4;   //!
+   TBranch        *b_LooseMuonMDTHits1;   //!
+   TBranch        *b_LooseMuonMDTHits2;   //!
+   TBranch        *b_LooseMuonMDTHits3;   //!
+   TBranch        *b_LooseMuonMDTHits4;   //!
+   TBranch        *b_LooseMuonPhi1;   //!
+   TBranch        *b_LooseMuonPhi2;   //!
+   TBranch        *b_LooseMuonPhi3;   //!
+   TBranch        *b_LooseMuonPhi4;   //!
+   TBranch        *b_LooseMuonPt1;   //!
+   TBranch        *b_LooseMuonPt2;   //!
+   TBranch        *b_LooseMuonPt3;   //!
+   TBranch        *b_LooseMuonPt4;   //!
+   TBranch        *b_LooseMuonRPCHits1;   //!
+   TBranch        *b_LooseMuonRPCHits2;   //!
+   TBranch        *b_LooseMuonRPCHits3;   //!
+   TBranch        *b_LooseMuonRPCHits4;   //!
+   TBranch        *b_LooseMuonTGCHits1;   //!
+   TBranch        *b_LooseMuonTGCHits2;   //!
+   TBranch        *b_LooseMuonTGCHits3;   //!
+   TBranch        *b_LooseMuonTGCHits4;   //!
+   TBranch        *b_LooseMuonTightness1;   //!
+   TBranch        *b_LooseMuonTightness2;   //!
+   TBranch        *b_LooseMuonTightness3;   //!
+   TBranch        *b_LooseMuonTightness4;   //!
+   TBranch        *b_NLooseMuon;   //!
+   TBranch        *b_BJetLikelihood1;   //!
+   TBranch        *b_BJetLikelihood2;   //!
+   TBranch        *b_BJetLikelihood3;   //!
+   TBranch        *b_BJetLikelihood4;   //!
+   TBranch        *b_BJetLikelihood5;   //!
+   TBranch        *b_BJetLikelihood6;   //!
+   TBranch        *b_JetEmFrac1;   //!
+   TBranch        *b_JetEmFrac2;   //!
+   TBranch        *b_JetEmFrac3;   //!
+   TBranch        *b_JetEmFrac4;   //!
+   TBranch        *b_JetEmFrac5;   //!
+   TBranch        *b_JetEmFrac6;   //!
+   TBranch        *b_JetEta1;   //!
+   TBranch        *b_JetEta2;   //!
+   TBranch        *b_JetEta3;   //!
+   TBranch        *b_JetEta4;   //!
+   TBranch        *b_JetEta5;   //!
+   TBranch        *b_JetEta6;   //!
+   TBranch        *b_JetPhi1;   //!
+   TBranch        *b_JetPhi2;   //!
+   TBranch        *b_JetPhi3;   //!
+   TBranch        *b_JetPhi4;   //!
+   TBranch        *b_JetPhi5;   //!
+   TBranch        *b_JetPhi6;   //!
+   TBranch        *b_JetPt1;   //!
+   TBranch        *b_JetPt2;   //!
+   TBranch        *b_JetPt3;   //!
+   TBranch        *b_JetPt4;   //!
+   TBranch        *b_JetPt5;   //!
+   TBranch        *b_JetPt6;   //!
+   TBranch        *b_JetSumET;   //!
+   TBranch        *b_NBJet;   //!
+   TBranch        *b_NJet;   //!
+   TBranch        *b_BunchGroup;   //!
+   TBranch        *b_EFPassedTrigMask0;   //!
+   TBranch        *b_EFPassedTrigMask1;   //!
+   TBranch        *b_EFPassedTrigMask10;   //!
+   TBranch        *b_EFPassedTrigMask11;   //!
+   TBranch        *b_EFPassedTrigMask12;   //!
+   TBranch        *b_EFPassedTrigMask13;   //!
+   TBranch        *b_EFPassedTrigMask14;   //!
+   TBranch        *b_EFPassedTrigMask15;   //!
+   TBranch        *b_EFPassedTrigMask16;   //!
+   TBranch        *b_EFPassedTrigMask17;   //!
+   TBranch        *b_EFPassedTrigMask18;   //!
+   TBranch        *b_EFPassedTrigMask19;   //!
+   TBranch        *b_EFPassedTrigMask2;   //!
+   TBranch        *b_EFPassedTrigMask20;   //!
+   TBranch        *b_EFPassedTrigMask21;   //!
+   TBranch        *b_EFPassedTrigMask22;   //!
+   TBranch        *b_EFPassedTrigMask23;   //!
+   TBranch        *b_EFPassedTrigMask24;   //!
+   TBranch        *b_EFPassedTrigMask25;   //!
+   TBranch        *b_EFPassedTrigMask26;   //!
+   TBranch        *b_EFPassedTrigMask27;   //!
+   TBranch        *b_EFPassedTrigMask28;   //!
+   TBranch        *b_EFPassedTrigMask29;   //!
+   TBranch        *b_EFPassedTrigMask3;   //!
+   TBranch        *b_EFPassedTrigMask30;   //!
+   TBranch        *b_EFPassedTrigMask31;   //!
+   TBranch        *b_EFPassedTrigMask4;   //!
+   TBranch        *b_EFPassedTrigMask5;   //!
+   TBranch        *b_EFPassedTrigMask6;   //!
+   TBranch        *b_EFPassedTrigMask7;   //!
+   TBranch        *b_EFPassedTrigMask8;   //!
+   TBranch        *b_EFPassedTrigMask9;   //!
+   TBranch        *b_L1PassedTrigMaskTAP0;   //!
+   TBranch        *b_L1PassedTrigMaskTAP1;   //!
+   TBranch        *b_L1PassedTrigMaskTAP2;   //!
+   TBranch        *b_L1PassedTrigMaskTAP3;   //!
+   TBranch        *b_L1PassedTrigMaskTAP4;   //!
+   TBranch        *b_L1PassedTrigMaskTAP5;   //!
+   TBranch        *b_L1PassedTrigMaskTAP6;   //!
+   TBranch        *b_L1PassedTrigMaskTAP7;   //!
+   TBranch        *b_L1PassedTrigMaskTAV0;   //!
+   TBranch        *b_L1PassedTrigMaskTAV1;   //!
+   TBranch        *b_L1PassedTrigMaskTAV2;   //!
+   TBranch        *b_L1PassedTrigMaskTAV3;   //!
+   TBranch        *b_L1PassedTrigMaskTAV4;   //!
+   TBranch        *b_L1PassedTrigMaskTAV5;   //!
+   TBranch        *b_L1PassedTrigMaskTAV6;   //!
+   TBranch        *b_L1PassedTrigMaskTAV7;   //!
+   TBranch        *b_L1PassedTrigMaskTBP0;   //!
+   TBranch        *b_L1PassedTrigMaskTBP1;   //!
+   TBranch        *b_L1PassedTrigMaskTBP2;   //!
+   TBranch        *b_L1PassedTrigMaskTBP3;   //!
+   TBranch        *b_L1PassedTrigMaskTBP4;   //!
+   TBranch        *b_L1PassedTrigMaskTBP5;   //!
+   TBranch        *b_L1PassedTrigMaskTBP6;   //!
+   TBranch        *b_L1PassedTrigMaskTBP7;   //!
+   TBranch        *b_L2PassedTrigMask0;   //!
+   TBranch        *b_L2PassedTrigMask1;   //!
+   TBranch        *b_L2PassedTrigMask10;   //!
+   TBranch        *b_L2PassedTrigMask11;   //!
+   TBranch        *b_L2PassedTrigMask12;   //!
+   TBranch        *b_L2PassedTrigMask13;   //!
+   TBranch        *b_L2PassedTrigMask14;   //!
+   TBranch        *b_L2PassedTrigMask15;   //!
+   TBranch        *b_L2PassedTrigMask16;   //!
+   TBranch        *b_L2PassedTrigMask17;   //!
+   TBranch        *b_L2PassedTrigMask18;   //!
+   TBranch        *b_L2PassedTrigMask19;   //!
+   TBranch        *b_L2PassedTrigMask2;   //!
+   TBranch        *b_L2PassedTrigMask20;   //!
+   TBranch        *b_L2PassedTrigMask21;   //!
+   TBranch        *b_L2PassedTrigMask22;   //!
+   TBranch        *b_L2PassedTrigMask23;   //!
+   TBranch        *b_L2PassedTrigMask24;   //!
+   TBranch        *b_L2PassedTrigMask25;   //!
+   TBranch        *b_L2PassedTrigMask26;   //!
+   TBranch        *b_L2PassedTrigMask27;   //!
+   TBranch        *b_L2PassedTrigMask28;   //!
+   TBranch        *b_L2PassedTrigMask29;   //!
+   TBranch        *b_L2PassedTrigMask3;   //!
+   TBranch        *b_L2PassedTrigMask30;   //!
+   TBranch        *b_L2PassedTrigMask31;   //!
+   TBranch        *b_L2PassedTrigMask4;   //!
+   TBranch        *b_L2PassedTrigMask5;   //!
+   TBranch        *b_L2PassedTrigMask6;   //!
+   TBranch        *b_L2PassedTrigMask7;   //!
+   TBranch        *b_L2PassedTrigMask8;   //!
+   TBranch        *b_L2PassedTrigMask9;   //!
+   TBranch        *b_Level1TriggerType;   //!
+   TBranch        *b_SFOStreams;   //!
+   TBranch        *b_TrackLead1D0;   //!
+   TBranch        *b_TrackLead1Z0;   //!
+   TBranch        *b_TrackLead1Phi0;   //!
+   TBranch        *b_TrackLead1Theta;   //!
+   TBranch        *b_TrackLead1QOverP;   //!
+   TBranch        *b_TrackLead2D0;   //!
+   TBranch        *b_TrackLead2Z0;   //!
+   TBranch        *b_TrackLead2Phi0;   //!
+   TBranch        *b_TrackLead2Theta;   //!
+   TBranch        *b_TrackLead2QOverP;   //!
+   TBranch        *b_CellEnergySum;   //!
+   TBranch        *b_CellEnergySumEMB;   //!
+   TBranch        *b_CellEnergySumEMEC;   //!
+   TBranch        *b_CellEnergySumHEC;   //!
+   TBranch        *b_CellEnergySumFCAL;   //!
+   TBranch        *b_CellEnergySumTile;   //!
+   TBranch        *b_ClusterEnergySum;   //!
+   TBranch        *b_TopoClusterEt1;   //!
+   TBranch        *b_TopoClusterEta1;   //!
+   TBranch        *b_TopoClusterPhi1;   //!
+   TBranch        *b_CellMissingET;   //!
+   TBranch        *b_CellMissingETPhi;   //!
+   TBranch        *b_MBTSTimeDiff;   //!
+   TBranch        *b_LArECTimeDiff;   //!
+   TBranch        *b_TRTEventPhase;   //!
+   TBranch        *b_NPixelTracks;   //!
+   TBranch        *b_NSCTTracks;   //!
+   TBranch        *b_NTRTTracks;   //!
+   TBranch        *b_NMooreSegs;   //!
+   TBranch        *b_NMboySegs;   //!
+   TBranch        *b_NInnerMboySegs;   //!
+   TBranch        *b_NHitsInnerMboySegs;   //!
+   TBranch        *b_NInnerMuSegs;   //!
+   TBranch        *b_NHitsInnerMooreSegs;   //!
+   TBranch        *b_NSCTSPs;   //!
+   TBranch        *b_NPixelSPs;   //!
+   TBranch        *b_NTRTDCs;   //!
+   TBranch        *b_NTRTHtDCs;   //!
+   TBranch        *b_NMDTHits;   //!
+   TBranch        *b_NRPCHits;   //!
+   TBranch        *b_NTGCHits;   //!
+   TBranch        *b_NCSCHits;   //!
+   TBranch        *b_NBCMHits;   //!
+   TBranch        *b_MBTSWord;   //!
+   TBranch        *b_BPhysWord;   //!
+   TBranch        *b_CombinedMuonWord;   //!
+   TBranch        *b_DAODStreams;   //!
+   TBranch        *b_DESDStreams;   //!
+   TBranch        *b_EgammaWord;   //!
+   TBranch        *b_ExoticWord;   //!
+   TBranch        *b_HeavyIonWord;   //!
+   TBranch        *b_HiggsWord;   //!
+   TBranch        *b_JetMissingETWord;   //!
+   TBranch        *b_JetTagWord;   //!
+   TBranch        *b_SMWord;   //!
+   TBranch        *b_SUSYWord;   //!
+   TBranch        *b_TauIdWord;   //!
+   TBranch        *b_TopWord;   //!
+
+   HITAGprinter(TTree *tree=0);
+   virtual ~HITAGprinter();
+   virtual Int_t    Cut(Long64_t entry);
+   virtual Int_t    GetEntry(Long64_t entry);
+   virtual Long64_t LoadTree(Long64_t entry);
+   virtual void     Init(TTree *tree);
+   virtual void     Loop();
+   virtual Bool_t   Notify();
+   virtual void     Show(Long64_t entry = -1);
+};
+
+#endif
+
+#ifdef HITAGprinter_cxx
+HITAGprinter::HITAGprinter(TTree *tree)
+{
+// if parameter tree is not specified (or zero), connect the file
+// used to generate this class and read the Tree.
+   if (tree == 0) {
+
+#ifdef SINGLE_TREE
+      // The following code should be used if you want this class to access
+      // a single tree instead of a chain
+      TFile *f = (TFile*)gROOT->GetListOfFiles()->FindObject("/u/at/ahaas/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000005.pool.root.1");
+      if (!f) {
+         f = new TFile("/u/at/ahaas/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000005.pool.root.1");
+         f->cd("Rint:/");
+      }
+      tree = (TTree*)gDirectory->Get("POOLCollectionTree");
+
+#else // SINGLE_TREE
+
+      // The following code should be used if you want this class to access a chain
+      // of trees.
+      TChain * chain = new TChain("POOLCollectionTree","");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000001.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000002.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000003.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000004.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000005.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000006.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000007.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000008.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000009.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000010.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000011.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000012.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000013.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000014.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000015.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000016.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000017.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000018.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000019.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000020.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000021.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000022.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000023.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000024.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000025.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000026.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000027.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000028.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000029.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000030.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000031.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000032.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000033.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000034.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000035.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000036.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000037.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000038.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000039.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000040.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000041.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000042.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000043.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000044.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000045.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000046.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000047.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000048.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000049.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000050.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000051.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000052.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000053.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000054.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000055.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000056.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000057.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000058.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000059.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000060.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000061.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000062.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000063.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000064.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000065.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000066.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000067.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000068.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000069.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000070.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000071.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000072.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000073.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000074.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000075.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000076.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000077.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000078.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000079.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000080.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000081.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000082.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000083.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000084.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000085.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000086.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000087.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000088.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000089.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000090.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000091.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000092.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000093.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000094.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000095.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000096.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000097.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000098.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000099.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000100.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000101.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000102.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000103.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000104.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000105.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000106.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000107.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000108.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000109.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000110.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000111.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000112.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000113.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000114.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000115.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000116.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000117.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000118.pool.root.1/POOLCollectionTree");
+      chain->Add("~/nfs3/data10_hi.00170467.physics_bulk.merge.TAG.r2111_p455_tid288698_00/TAG.288698._000119.pool.root.1/POOLCollectionTree");
+      tree = chain;
+#endif // SINGLE_TREE
+
+   }
+   Init(tree);
+}
+
+HITAGprinter::~HITAGprinter()
+{
+   if (!fChain) return;
+   delete fChain->GetCurrentFile();
+}
+
+Int_t HITAGprinter::GetEntry(Long64_t entry)
+{
+// Read contents of entry.
+   if (!fChain) return 0;
+   return fChain->GetEntry(entry);
+}
+Long64_t HITAGprinter::LoadTree(Long64_t entry)
+{
+// Set the environment to read one entry
+   if (!fChain) return -5;
+   Long64_t centry = fChain->LoadTree(entry);
+   if (centry < 0) return centry;
+   if (!fChain->InheritsFrom(TChain::Class()))  return centry;
+   TChain *chain = (TChain*)fChain;
+   if (chain->GetTreeNumber() != fCurrent) {
+      fCurrent = chain->GetTreeNumber();
+      Notify();
+   }
+   return centry;
+}
+
+void HITAGprinter::Init(TTree *tree)
+{
+   // The Init() function is called when the selector needs to initialize
+   // a new tree or chain. Typically here the branch addresses and branch
+   // pointers of the tree will be set.
+   // It is normally not necessary to make changes to the generated
+   // code, but the routine can be extended by the user if needed.
+   // Init() will be called many times when running on PROOF
+   // (once per file to be processed).
+
+   // Set branch addresses and branch pointers
+   if (!tree) return;
+   fChain = tree;
+   fCurrent = -1;
+   fChain->SetMakeClass(1);
+
+   fChain->SetBranchAddress("StreamESD_ref", StreamESD_ref, &b_StreamESD_ref);
+   fChain->SetBranchAddress("StreamRAW_ref", StreamRAW_ref, &b_StreamRAW_ref);
+   fChain->SetBranchAddress("RunNumber", &RunNumber, &b_RunNumber);
+   fChain->SetBranchAddress("EventNumber", &EventNumber, &b_EventNumber);
+   fChain->SetBranchAddress("EventTime", &EventTime, &b_EventTime);
+   fChain->SetBranchAddress("EventTimeNanoSec", &EventTimeNanoSec, &b_EventTimeNanoSec);
+   fChain->SetBranchAddress("LumiBlockN", &LumiBlockN, &b_LumiBlockN);
+   fChain->SetBranchAddress("BunchId", &BunchId, &b_BunchId);
+   fChain->SetBranchAddress("PixelFlags", &PixelFlags, &b_PixelFlags);
+   fChain->SetBranchAddress("SCTFlags", &SCTFlags, &b_SCTFlags);
+   fChain->SetBranchAddress("TRTFlags", &TRTFlags, &b_TRTFlags);
+   fChain->SetBranchAddress("LArFlags", &LArFlags, &b_LArFlags);
+   fChain->SetBranchAddress("TileFlags", &TileFlags, &b_TileFlags);
+   fChain->SetBranchAddress("MuonFlags", &MuonFlags, &b_MuonFlags);
+   fChain->SetBranchAddress("ForwardDetFlags", &ForwardDetFlags, &b_ForwardDetFlags);
+   fChain->SetBranchAddress("CoreFlags", &CoreFlags, &b_CoreFlags);
+   fChain->SetBranchAddress("IsSimulation", &IsSimulation, &b_IsSimulation);
+   fChain->SetBranchAddress("IsCalibration", &IsCalibration, &b_IsCalibration);
+   fChain->SetBranchAddress("IsTestBeam", &IsTestBeam, &b_IsTestBeam);
+   fChain->SetBranchAddress("RandomNumber", &RandomNumber, &b_RandomNumber);
+   fChain->SetBranchAddress("NTrk", &NTrk, &b_NTrk);
+   fChain->SetBranchAddress("NVtx", &NVtx, &b_NVtx);
+   fChain->SetBranchAddress("VtxX", &VtxX, &b_VtxX);
+   fChain->SetBranchAddress("VtxY", &VtxY, &b_VtxY);
+   fChain->SetBranchAddress("VtxZ", &VtxZ, &b_VtxZ);
+   fChain->SetBranchAddress("VtxChiSq", &VtxChiSq, &b_VtxChiSq);
+   fChain->SetBranchAddress("VtxNDOF", &VtxNDOF, &b_VtxNDOF);
+   fChain->SetBranchAddress("NVtxTight", &NVtxTight, &b_NVtxTight);
+   fChain->SetBranchAddress("LooseElectronEta1", &LooseElectronEta1, &b_LooseElectronEta1);
+   fChain->SetBranchAddress("LooseElectronEta2", &LooseElectronEta2, &b_LooseElectronEta2);
+   fChain->SetBranchAddress("LooseElectronEta3", &LooseElectronEta3, &b_LooseElectronEta3);
+   fChain->SetBranchAddress("LooseElectronEta4", &LooseElectronEta4, &b_LooseElectronEta4);
+   fChain->SetBranchAddress("LooseElectronIsForward1", &LooseElectronIsForward1, &b_LooseElectronIsForward1);
+   fChain->SetBranchAddress("LooseElectronIsForward2", &LooseElectronIsForward2, &b_LooseElectronIsForward2);
+   fChain->SetBranchAddress("LooseElectronIsForward3", &LooseElectronIsForward3, &b_LooseElectronIsForward3);
+   fChain->SetBranchAddress("LooseElectronIsForward4", &LooseElectronIsForward4, &b_LooseElectronIsForward4);
+   fChain->SetBranchAddress("LooseElectronPhi1", &LooseElectronPhi1, &b_LooseElectronPhi1);
+   fChain->SetBranchAddress("LooseElectronPhi2", &LooseElectronPhi2, &b_LooseElectronPhi2);
+   fChain->SetBranchAddress("LooseElectronPhi3", &LooseElectronPhi3, &b_LooseElectronPhi3);
+   fChain->SetBranchAddress("LooseElectronPhi4", &LooseElectronPhi4, &b_LooseElectronPhi4);
+   fChain->SetBranchAddress("LooseElectronPt1", &LooseElectronPt1, &b_LooseElectronPt1);
+   fChain->SetBranchAddress("LooseElectronPt2", &LooseElectronPt2, &b_LooseElectronPt2);
+   fChain->SetBranchAddress("LooseElectronPt3", &LooseElectronPt3, &b_LooseElectronPt3);
+   fChain->SetBranchAddress("LooseElectronPt4", &LooseElectronPt4, &b_LooseElectronPt4);
+   fChain->SetBranchAddress("LooseElectronTightness1", &LooseElectronTightness1, &b_LooseElectronTightness1);
+   fChain->SetBranchAddress("LooseElectronTightness2", &LooseElectronTightness2, &b_LooseElectronTightness2);
+   fChain->SetBranchAddress("LooseElectronTightness3", &LooseElectronTightness3, &b_LooseElectronTightness3);
+   fChain->SetBranchAddress("LooseElectronTightness4", &LooseElectronTightness4, &b_LooseElectronTightness4);
+   fChain->SetBranchAddress("NLooseElectron", &NLooseElectron, &b_NLooseElectron);
+   fChain->SetBranchAddress("LoosePhotonEta1", &LoosePhotonEta1, &b_LoosePhotonEta1);
+   fChain->SetBranchAddress("LoosePhotonEta2", &LoosePhotonEta2, &b_LoosePhotonEta2);
+   fChain->SetBranchAddress("LoosePhotonPhi1", &LoosePhotonPhi1, &b_LoosePhotonPhi1);
+   fChain->SetBranchAddress("LoosePhotonPhi2", &LoosePhotonPhi2, &b_LoosePhotonPhi2);
+   fChain->SetBranchAddress("LoosePhotonPt1", &LoosePhotonPt1, &b_LoosePhotonPt1);
+   fChain->SetBranchAddress("LoosePhotonPt2", &LoosePhotonPt2, &b_LoosePhotonPt2);
+   fChain->SetBranchAddress("LoosePhotonTightness1", &LoosePhotonTightness1, &b_LoosePhotonTightness1);
+   fChain->SetBranchAddress("LoosePhotonTightness2", &LoosePhotonTightness2, &b_LoosePhotonTightness2);
+   fChain->SetBranchAddress("NConvertedLoosePhoton", &NConvertedLoosePhoton, &b_NConvertedLoosePhoton);
+   fChain->SetBranchAddress("NLoosePhoton", &NLoosePhoton, &b_NLoosePhoton);
+   fChain->SetBranchAddress("LooseMuonCSCHits1", &LooseMuonCSCHits1, &b_LooseMuonCSCHits1);
+   fChain->SetBranchAddress("LooseMuonCSCHits2", &LooseMuonCSCHits2, &b_LooseMuonCSCHits2);
+   fChain->SetBranchAddress("LooseMuonCSCHits3", &LooseMuonCSCHits3, &b_LooseMuonCSCHits3);
+   fChain->SetBranchAddress("LooseMuonCSCHits4", &LooseMuonCSCHits4, &b_LooseMuonCSCHits4);
+   fChain->SetBranchAddress("LooseMuonEta1", &LooseMuonEta1, &b_LooseMuonEta1);
+   fChain->SetBranchAddress("LooseMuonEta2", &LooseMuonEta2, &b_LooseMuonEta2);
+   fChain->SetBranchAddress("LooseMuonEta3", &LooseMuonEta3, &b_LooseMuonEta3);
+   fChain->SetBranchAddress("LooseMuonEta4", &LooseMuonEta4, &b_LooseMuonEta4);
+   fChain->SetBranchAddress("LooseMuonIsolEt1", &LooseMuonIsolEt1, &b_LooseMuonIsolEt1);
+   fChain->SetBranchAddress("LooseMuonIsolEt2", &LooseMuonIsolEt2, &b_LooseMuonIsolEt2);
+   fChain->SetBranchAddress("LooseMuonIsolEt3", &LooseMuonIsolEt3, &b_LooseMuonIsolEt3);
+   fChain->SetBranchAddress("LooseMuonIsolEt4", &LooseMuonIsolEt4, &b_LooseMuonIsolEt4);
+   fChain->SetBranchAddress("LooseMuonIsolN1", &LooseMuonIsolN1, &b_LooseMuonIsolN1);
+   fChain->SetBranchAddress("LooseMuonIsolN2", &LooseMuonIsolN2, &b_LooseMuonIsolN2);
+   fChain->SetBranchAddress("LooseMuonIsolN3", &LooseMuonIsolN3, &b_LooseMuonIsolN3);
+   fChain->SetBranchAddress("LooseMuonIsolN4", &LooseMuonIsolN4, &b_LooseMuonIsolN4);
+   fChain->SetBranchAddress("LooseMuonMDTHits1", &LooseMuonMDTHits1, &b_LooseMuonMDTHits1);
+   fChain->SetBranchAddress("LooseMuonMDTHits2", &LooseMuonMDTHits2, &b_LooseMuonMDTHits2);
+   fChain->SetBranchAddress("LooseMuonMDTHits3", &LooseMuonMDTHits3, &b_LooseMuonMDTHits3);
+   fChain->SetBranchAddress("LooseMuonMDTHits4", &LooseMuonMDTHits4, &b_LooseMuonMDTHits4);
+   fChain->SetBranchAddress("LooseMuonPhi1", &LooseMuonPhi1, &b_LooseMuonPhi1);
+   fChain->SetBranchAddress("LooseMuonPhi2", &LooseMuonPhi2, &b_LooseMuonPhi2);
+   fChain->SetBranchAddress("LooseMuonPhi3", &LooseMuonPhi3, &b_LooseMuonPhi3);
+   fChain->SetBranchAddress("LooseMuonPhi4", &LooseMuonPhi4, &b_LooseMuonPhi4);
+   fChain->SetBranchAddress("LooseMuonPt1", &LooseMuonPt1, &b_LooseMuonPt1);
+   fChain->SetBranchAddress("LooseMuonPt2", &LooseMuonPt2, &b_LooseMuonPt2);
+   fChain->SetBranchAddress("LooseMuonPt3", &LooseMuonPt3, &b_LooseMuonPt3);
+   fChain->SetBranchAddress("LooseMuonPt4", &LooseMuonPt4, &b_LooseMuonPt4);
+   fChain->SetBranchAddress("LooseMuonRPCHits1", &LooseMuonRPCHits1, &b_LooseMuonRPCHits1);
+   fChain->SetBranchAddress("LooseMuonRPCHits2", &LooseMuonRPCHits2, &b_LooseMuonRPCHits2);
+   fChain->SetBranchAddress("LooseMuonRPCHits3", &LooseMuonRPCHits3, &b_LooseMuonRPCHits3);
+   fChain->SetBranchAddress("LooseMuonRPCHits4", &LooseMuonRPCHits4, &b_LooseMuonRPCHits4);
+   fChain->SetBranchAddress("LooseMuonTGCHits1", &LooseMuonTGCHits1, &b_LooseMuonTGCHits1);
+   fChain->SetBranchAddress("LooseMuonTGCHits2", &LooseMuonTGCHits2, &b_LooseMuonTGCHits2);
+   fChain->SetBranchAddress("LooseMuonTGCHits3", &LooseMuonTGCHits3, &b_LooseMuonTGCHits3);
+   fChain->SetBranchAddress("LooseMuonTGCHits4", &LooseMuonTGCHits4, &b_LooseMuonTGCHits4);
+   fChain->SetBranchAddress("LooseMuonTightness1", &LooseMuonTightness1, &b_LooseMuonTightness1);
+   fChain->SetBranchAddress("LooseMuonTightness2", &LooseMuonTightness2, &b_LooseMuonTightness2);
+   fChain->SetBranchAddress("LooseMuonTightness3", &LooseMuonTightness3, &b_LooseMuonTightness3);
+   fChain->SetBranchAddress("LooseMuonTightness4", &LooseMuonTightness4, &b_LooseMuonTightness4);
+   fChain->SetBranchAddress("NLooseMuon", &NLooseMuon, &b_NLooseMuon);
+   fChain->SetBranchAddress("BJetLikelihood1", &BJetLikelihood1, &b_BJetLikelihood1);
+   fChain->SetBranchAddress("BJetLikelihood2", &BJetLikelihood2, &b_BJetLikelihood2);
+   fChain->SetBranchAddress("BJetLikelihood3", &BJetLikelihood3, &b_BJetLikelihood3);
+   fChain->SetBranchAddress("BJetLikelihood4", &BJetLikelihood4, &b_BJetLikelihood4);
+   fChain->SetBranchAddress("BJetLikelihood5", &BJetLikelihood5, &b_BJetLikelihood5);
+   fChain->SetBranchAddress("BJetLikelihood6", &BJetLikelihood6, &b_BJetLikelihood6);
+   fChain->SetBranchAddress("JetEmFrac1", &JetEmFrac1, &b_JetEmFrac1);
+   fChain->SetBranchAddress("JetEmFrac2", &JetEmFrac2, &b_JetEmFrac2);
+   fChain->SetBranchAddress("JetEmFrac3", &JetEmFrac3, &b_JetEmFrac3);
+   fChain->SetBranchAddress("JetEmFrac4", &JetEmFrac4, &b_JetEmFrac4);
+   fChain->SetBranchAddress("JetEmFrac5", &JetEmFrac5, &b_JetEmFrac5);
+   fChain->SetBranchAddress("JetEmFrac6", &JetEmFrac6, &b_JetEmFrac6);
+   fChain->SetBranchAddress("JetEta1", &JetEta1, &b_JetEta1);
+   fChain->SetBranchAddress("JetEta2", &JetEta2, &b_JetEta2);
+   fChain->SetBranchAddress("JetEta3", &JetEta3, &b_JetEta3);
+   fChain->SetBranchAddress("JetEta4", &JetEta4, &b_JetEta4);
+   fChain->SetBranchAddress("JetEta5", &JetEta5, &b_JetEta5);
+   fChain->SetBranchAddress("JetEta6", &JetEta6, &b_JetEta6);
+   fChain->SetBranchAddress("JetPhi1", &JetPhi1, &b_JetPhi1);
+   fChain->SetBranchAddress("JetPhi2", &JetPhi2, &b_JetPhi2);
+   fChain->SetBranchAddress("JetPhi3", &JetPhi3, &b_JetPhi3);
+   fChain->SetBranchAddress("JetPhi4", &JetPhi4, &b_JetPhi4);
+   fChain->SetBranchAddress("JetPhi5", &JetPhi5, &b_JetPhi5);
+   fChain->SetBranchAddress("JetPhi6", &JetPhi6, &b_JetPhi6);
+   fChain->SetBranchAddress("JetPt1", &JetPt1, &b_JetPt1);
+   fChain->SetBranchAddress("JetPt2", &JetPt2, &b_JetPt2);
+   fChain->SetBranchAddress("JetPt3", &JetPt3, &b_JetPt3);
+   fChain->SetBranchAddress("JetPt4", &JetPt4, &b_JetPt4);
+   fChain->SetBranchAddress("JetPt5", &JetPt5, &b_JetPt5);
+   fChain->SetBranchAddress("JetPt6", &JetPt6, &b_JetPt6);
+   fChain->SetBranchAddress("JetSumET", &JetSumET, &b_JetSumET);
+   fChain->SetBranchAddress("NBJet", &NBJet, &b_NBJet);
+   fChain->SetBranchAddress("NJet", &NJet, &b_NJet);
+   fChain->SetBranchAddress("BunchGroup", &BunchGroup, &b_BunchGroup);
+   fChain->SetBranchAddress("EFPassedTrigMask0", &EFPassedTrigMask0, &b_EFPassedTrigMask0);
+   fChain->SetBranchAddress("EFPassedTrigMask1", &EFPassedTrigMask1, &b_EFPassedTrigMask1);
+   fChain->SetBranchAddress("EFPassedTrigMask10", &EFPassedTrigMask10, &b_EFPassedTrigMask10);
+   fChain->SetBranchAddress("EFPassedTrigMask11", &EFPassedTrigMask11, &b_EFPassedTrigMask11);
+   fChain->SetBranchAddress("EFPassedTrigMask12", &EFPassedTrigMask12, &b_EFPassedTrigMask12);
+   fChain->SetBranchAddress("EFPassedTrigMask13", &EFPassedTrigMask13, &b_EFPassedTrigMask13);
+   fChain->SetBranchAddress("EFPassedTrigMask14", &EFPassedTrigMask14, &b_EFPassedTrigMask14);
+   fChain->SetBranchAddress("EFPassedTrigMask15", &EFPassedTrigMask15, &b_EFPassedTrigMask15);
+   fChain->SetBranchAddress("EFPassedTrigMask16", &EFPassedTrigMask16, &b_EFPassedTrigMask16);
+   fChain->SetBranchAddress("EFPassedTrigMask17", &EFPassedTrigMask17, &b_EFPassedTrigMask17);
+   fChain->SetBranchAddress("EFPassedTrigMask18", &EFPassedTrigMask18, &b_EFPassedTrigMask18);
+   fChain->SetBranchAddress("EFPassedTrigMask19", &EFPassedTrigMask19, &b_EFPassedTrigMask19);
+   fChain->SetBranchAddress("EFPassedTrigMask2", &EFPassedTrigMask2, &b_EFPassedTrigMask2);
+   fChain->SetBranchAddress("EFPassedTrigMask20", &EFPassedTrigMask20, &b_EFPassedTrigMask20);
+   fChain->SetBranchAddress("EFPassedTrigMask21", &EFPassedTrigMask21, &b_EFPassedTrigMask21);
+   fChain->SetBranchAddress("EFPassedTrigMask22", &EFPassedTrigMask22, &b_EFPassedTrigMask22);
+   fChain->SetBranchAddress("EFPassedTrigMask23", &EFPassedTrigMask23, &b_EFPassedTrigMask23);
+   fChain->SetBranchAddress("EFPassedTrigMask24", &EFPassedTrigMask24, &b_EFPassedTrigMask24);
+   fChain->SetBranchAddress("EFPassedTrigMask25", &EFPassedTrigMask25, &b_EFPassedTrigMask25);
+   fChain->SetBranchAddress("EFPassedTrigMask26", &EFPassedTrigMask26, &b_EFPassedTrigMask26);
+   fChain->SetBranchAddress("EFPassedTrigMask27", &EFPassedTrigMask27, &b_EFPassedTrigMask27);
+   fChain->SetBranchAddress("EFPassedTrigMask28", &EFPassedTrigMask28, &b_EFPassedTrigMask28);
+   fChain->SetBranchAddress("EFPassedTrigMask29", &EFPassedTrigMask29, &b_EFPassedTrigMask29);
+   fChain->SetBranchAddress("EFPassedTrigMask3", &EFPassedTrigMask3, &b_EFPassedTrigMask3);
+   fChain->SetBranchAddress("EFPassedTrigMask30", &EFPassedTrigMask30, &b_EFPassedTrigMask30);
+   fChain->SetBranchAddress("EFPassedTrigMask31", &EFPassedTrigMask31, &b_EFPassedTrigMask31);
+   fChain->SetBranchAddress("EFPassedTrigMask4", &EFPassedTrigMask4, &b_EFPassedTrigMask4);
+   fChain->SetBranchAddress("EFPassedTrigMask5", &EFPassedTrigMask5, &b_EFPassedTrigMask5);
+   fChain->SetBranchAddress("EFPassedTrigMask6", &EFPassedTrigMask6, &b_EFPassedTrigMask6);
+   fChain->SetBranchAddress("EFPassedTrigMask7", &EFPassedTrigMask7, &b_EFPassedTrigMask7);
+   fChain->SetBranchAddress("EFPassedTrigMask8", &EFPassedTrigMask8, &b_EFPassedTrigMask8);
+   fChain->SetBranchAddress("EFPassedTrigMask9", &EFPassedTrigMask9, &b_EFPassedTrigMask9);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAP0", &L1PassedTrigMaskTAP0, &b_L1PassedTrigMaskTAP0);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAP1", &L1PassedTrigMaskTAP1, &b_L1PassedTrigMaskTAP1);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAP2", &L1PassedTrigMaskTAP2, &b_L1PassedTrigMaskTAP2);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAP3", &L1PassedTrigMaskTAP3, &b_L1PassedTrigMaskTAP3);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAP4", &L1PassedTrigMaskTAP4, &b_L1PassedTrigMaskTAP4);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAP5", &L1PassedTrigMaskTAP5, &b_L1PassedTrigMaskTAP5);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAP6", &L1PassedTrigMaskTAP6, &b_L1PassedTrigMaskTAP6);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAP7", &L1PassedTrigMaskTAP7, &b_L1PassedTrigMaskTAP7);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAV0", &L1PassedTrigMaskTAV0, &b_L1PassedTrigMaskTAV0);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAV1", &L1PassedTrigMaskTAV1, &b_L1PassedTrigMaskTAV1);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAV2", &L1PassedTrigMaskTAV2, &b_L1PassedTrigMaskTAV2);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAV3", &L1PassedTrigMaskTAV3, &b_L1PassedTrigMaskTAV3);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAV4", &L1PassedTrigMaskTAV4, &b_L1PassedTrigMaskTAV4);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAV5", &L1PassedTrigMaskTAV5, &b_L1PassedTrigMaskTAV5);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAV6", &L1PassedTrigMaskTAV6, &b_L1PassedTrigMaskTAV6);
+   fChain->SetBranchAddress("L1PassedTrigMaskTAV7", &L1PassedTrigMaskTAV7, &b_L1PassedTrigMaskTAV7);
+   fChain->SetBranchAddress("L1PassedTrigMaskTBP0", &L1PassedTrigMaskTBP0, &b_L1PassedTrigMaskTBP0);
+   fChain->SetBranchAddress("L1PassedTrigMaskTBP1", &L1PassedTrigMaskTBP1, &b_L1PassedTrigMaskTBP1);
+   fChain->SetBranchAddress("L1PassedTrigMaskTBP2", &L1PassedTrigMaskTBP2, &b_L1PassedTrigMaskTBP2);
+   fChain->SetBranchAddress("L1PassedTrigMaskTBP3", &L1PassedTrigMaskTBP3, &b_L1PassedTrigMaskTBP3);
+   fChain->SetBranchAddress("L1PassedTrigMaskTBP4", &L1PassedTrigMaskTBP4, &b_L1PassedTrigMaskTBP4);
+   fChain->SetBranchAddress("L1PassedTrigMaskTBP5", &L1PassedTrigMaskTBP5, &b_L1PassedTrigMaskTBP5);
+   fChain->SetBranchAddress("L1PassedTrigMaskTBP6", &L1PassedTrigMaskTBP6, &b_L1PassedTrigMaskTBP6);
+   fChain->SetBranchAddress("L1PassedTrigMaskTBP7", &L1PassedTrigMaskTBP7, &b_L1PassedTrigMaskTBP7);
+   fChain->SetBranchAddress("L2PassedTrigMask0", &L2PassedTrigMask0, &b_L2PassedTrigMask0);
+   fChain->SetBranchAddress("L2PassedTrigMask1", &L2PassedTrigMask1, &b_L2PassedTrigMask1);
+   fChain->SetBranchAddress("L2PassedTrigMask10", &L2PassedTrigMask10, &b_L2PassedTrigMask10);
+   fChain->SetBranchAddress("L2PassedTrigMask11", &L2PassedTrigMask11, &b_L2PassedTrigMask11);
+   fChain->SetBranchAddress("L2PassedTrigMask12", &L2PassedTrigMask12, &b_L2PassedTrigMask12);
+   fChain->SetBranchAddress("L2PassedTrigMask13", &L2PassedTrigMask13, &b_L2PassedTrigMask13);
+   fChain->SetBranchAddress("L2PassedTrigMask14", &L2PassedTrigMask14, &b_L2PassedTrigMask14);
+   fChain->SetBranchAddress("L2PassedTrigMask15", &L2PassedTrigMask15, &b_L2PassedTrigMask15);
+   fChain->SetBranchAddress("L2PassedTrigMask16", &L2PassedTrigMask16, &b_L2PassedTrigMask16);
+   fChain->SetBranchAddress("L2PassedTrigMask17", &L2PassedTrigMask17, &b_L2PassedTrigMask17);
+   fChain->SetBranchAddress("L2PassedTrigMask18", &L2PassedTrigMask18, &b_L2PassedTrigMask18);
+   fChain->SetBranchAddress("L2PassedTrigMask19", &L2PassedTrigMask19, &b_L2PassedTrigMask19);
+   fChain->SetBranchAddress("L2PassedTrigMask2", &L2PassedTrigMask2, &b_L2PassedTrigMask2);
+   fChain->SetBranchAddress("L2PassedTrigMask20", &L2PassedTrigMask20, &b_L2PassedTrigMask20);
+   fChain->SetBranchAddress("L2PassedTrigMask21", &L2PassedTrigMask21, &b_L2PassedTrigMask21);
+   fChain->SetBranchAddress("L2PassedTrigMask22", &L2PassedTrigMask22, &b_L2PassedTrigMask22);
+   fChain->SetBranchAddress("L2PassedTrigMask23", &L2PassedTrigMask23, &b_L2PassedTrigMask23);
+   fChain->SetBranchAddress("L2PassedTrigMask24", &L2PassedTrigMask24, &b_L2PassedTrigMask24);
+   fChain->SetBranchAddress("L2PassedTrigMask25", &L2PassedTrigMask25, &b_L2PassedTrigMask25);
+   fChain->SetBranchAddress("L2PassedTrigMask26", &L2PassedTrigMask26, &b_L2PassedTrigMask26);
+   fChain->SetBranchAddress("L2PassedTrigMask27", &L2PassedTrigMask27, &b_L2PassedTrigMask27);
+   fChain->SetBranchAddress("L2PassedTrigMask28", &L2PassedTrigMask28, &b_L2PassedTrigMask28);
+   fChain->SetBranchAddress("L2PassedTrigMask29", &L2PassedTrigMask29, &b_L2PassedTrigMask29);
+   fChain->SetBranchAddress("L2PassedTrigMask3", &L2PassedTrigMask3, &b_L2PassedTrigMask3);
+   fChain->SetBranchAddress("L2PassedTrigMask30", &L2PassedTrigMask30, &b_L2PassedTrigMask30);
+   fChain->SetBranchAddress("L2PassedTrigMask31", &L2PassedTrigMask31, &b_L2PassedTrigMask31);
+   fChain->SetBranchAddress("L2PassedTrigMask4", &L2PassedTrigMask4, &b_L2PassedTrigMask4);
+   fChain->SetBranchAddress("L2PassedTrigMask5", &L2PassedTrigMask5, &b_L2PassedTrigMask5);
+   fChain->SetBranchAddress("L2PassedTrigMask6", &L2PassedTrigMask6, &b_L2PassedTrigMask6);
+   fChain->SetBranchAddress("L2PassedTrigMask7", &L2PassedTrigMask7, &b_L2PassedTrigMask7);
+   fChain->SetBranchAddress("L2PassedTrigMask8", &L2PassedTrigMask8, &b_L2PassedTrigMask8);
+   fChain->SetBranchAddress("L2PassedTrigMask9", &L2PassedTrigMask9, &b_L2PassedTrigMask9);
+   fChain->SetBranchAddress("Level1TriggerType", &Level1TriggerType, &b_Level1TriggerType);
+   fChain->SetBranchAddress("SFOStreams", &SFOStreams, &b_SFOStreams);
+   fChain->SetBranchAddress("TrackLead1D0", &TrackLead1D0, &b_TrackLead1D0);
+   fChain->SetBranchAddress("TrackLead1Z0", &TrackLead1Z0, &b_TrackLead1Z0);
+   fChain->SetBranchAddress("TrackLead1Phi0", &TrackLead1Phi0, &b_TrackLead1Phi0);
+   fChain->SetBranchAddress("TrackLead1Theta", &TrackLead1Theta, &b_TrackLead1Theta);
+   fChain->SetBranchAddress("TrackLead1QOverP", &TrackLead1QOverP, &b_TrackLead1QOverP);
+   fChain->SetBranchAddress("TrackLead2D0", &TrackLead2D0, &b_TrackLead2D0);
+   fChain->SetBranchAddress("TrackLead2Z0", &TrackLead2Z0, &b_TrackLead2Z0);
+   fChain->SetBranchAddress("TrackLead2Phi0", &TrackLead2Phi0, &b_TrackLead2Phi0);
+   fChain->SetBranchAddress("TrackLead2Theta", &TrackLead2Theta, &b_TrackLead2Theta);
+   fChain->SetBranchAddress("TrackLead2QOverP", &TrackLead2QOverP, &b_TrackLead2QOverP);
+   fChain->SetBranchAddress("CellEnergySum", &CellEnergySum, &b_CellEnergySum);
+   fChain->SetBranchAddress("CellEnergySumEMB", &CellEnergySumEMB, &b_CellEnergySumEMB);
+   fChain->SetBranchAddress("CellEnergySumEMEC", &CellEnergySumEMEC, &b_CellEnergySumEMEC);
+   fChain->SetBranchAddress("CellEnergySumHEC", &CellEnergySumHEC, &b_CellEnergySumHEC);
+   fChain->SetBranchAddress("CellEnergySumFCAL", &CellEnergySumFCAL, &b_CellEnergySumFCAL);
+   fChain->SetBranchAddress("CellEnergySumTile", &CellEnergySumTile, &b_CellEnergySumTile);
+   fChain->SetBranchAddress("ClusterEnergySum", &ClusterEnergySum, &b_ClusterEnergySum);
+   fChain->SetBranchAddress("TopoClusterEt1", &TopoClusterEt1, &b_TopoClusterEt1);
+   fChain->SetBranchAddress("TopoClusterEta1", &TopoClusterEta1, &b_TopoClusterEta1);
+   fChain->SetBranchAddress("TopoClusterPhi1", &TopoClusterPhi1, &b_TopoClusterPhi1);
+   fChain->SetBranchAddress("CellMissingET", &CellMissingET, &b_CellMissingET);
+   fChain->SetBranchAddress("CellMissingETPhi", &CellMissingETPhi, &b_CellMissingETPhi);
+   fChain->SetBranchAddress("MBTSTimeDiff", &MBTSTimeDiff, &b_MBTSTimeDiff);
+   fChain->SetBranchAddress("LArECTimeDiff", &LArECTimeDiff, &b_LArECTimeDiff);
+   fChain->SetBranchAddress("TRTEventPhase", &TRTEventPhase, &b_TRTEventPhase);
+   fChain->SetBranchAddress("NPixelTracks", &NPixelTracks, &b_NPixelTracks);
+   fChain->SetBranchAddress("NSCTTracks", &NSCTTracks, &b_NSCTTracks);
+   fChain->SetBranchAddress("NTRTTracks", &NTRTTracks, &b_NTRTTracks);
+   fChain->SetBranchAddress("NMooreSegs", &NMooreSegs, &b_NMooreSegs);
+   fChain->SetBranchAddress("NMboySegs", &NMboySegs, &b_NMboySegs);
+   fChain->SetBranchAddress("NInnerMboySegs", &NInnerMboySegs, &b_NInnerMboySegs);
+   fChain->SetBranchAddress("NHitsInnerMboySegs", &NHitsInnerMboySegs, &b_NHitsInnerMboySegs);
+   fChain->SetBranchAddress("NInnerMuSegs", &NInnerMuSegs, &b_NInnerMuSegs);
+   fChain->SetBranchAddress("NHitsInnerMooreSegs", &NHitsInnerMooreSegs, &b_NHitsInnerMooreSegs);
+   fChain->SetBranchAddress("NSCTSPs", &NSCTSPs, &b_NSCTSPs);
+   fChain->SetBranchAddress("NPixelSPs", &NPixelSPs, &b_NPixelSPs);
+   fChain->SetBranchAddress("NTRTDCs", &NTRTDCs, &b_NTRTDCs);
+   fChain->SetBranchAddress("NTRTHtDCs", &NTRTHtDCs, &b_NTRTHtDCs);
+   fChain->SetBranchAddress("NMDTHits", &NMDTHits, &b_NMDTHits);
+   fChain->SetBranchAddress("NRPCHits", &NRPCHits, &b_NRPCHits);
+   fChain->SetBranchAddress("NTGCHits", &NTGCHits, &b_NTGCHits);
+   fChain->SetBranchAddress("NCSCHits", &NCSCHits, &b_NCSCHits);
+   fChain->SetBranchAddress("NBCMHits", &NBCMHits, &b_NBCMHits);
+   fChain->SetBranchAddress("MBTSWord", &MBTSWord, &b_MBTSWord);
+   fChain->SetBranchAddress("BPhysWord", &BPhysWord, &b_BPhysWord);
+   fChain->SetBranchAddress("CombinedMuonWord", &CombinedMuonWord, &b_CombinedMuonWord);
+   fChain->SetBranchAddress("DAODStreams", &DAODStreams, &b_DAODStreams);
+   fChain->SetBranchAddress("DESDStreams", &DESDStreams, &b_DESDStreams);
+   fChain->SetBranchAddress("EgammaWord", &EgammaWord, &b_EgammaWord);
+   fChain->SetBranchAddress("ExoticWord", &ExoticWord, &b_ExoticWord);
+   fChain->SetBranchAddress("HeavyIonWord", &HeavyIonWord, &b_HeavyIonWord);
+   fChain->SetBranchAddress("HiggsWord", &HiggsWord, &b_HiggsWord);
+   fChain->SetBranchAddress("JetMissingETWord", &JetMissingETWord, &b_JetMissingETWord);
+   fChain->SetBranchAddress("JetTagWord", &JetTagWord, &b_JetTagWord);
+   fChain->SetBranchAddress("SMWord", &SMWord, &b_SMWord);
+   fChain->SetBranchAddress("SUSYWord", &SUSYWord, &b_SUSYWord);
+   fChain->SetBranchAddress("TauIdWord", &TauIdWord, &b_TauIdWord);
+   fChain->SetBranchAddress("TopWord", &TopWord, &b_TopWord);
+   Notify();
+}
+
+Bool_t HITAGprinter::Notify()
+{
+   // The Notify() function is called when a new file is opened. This
+   // can be either for a new TTree in a TChain or when when a new TTree
+   // is started when using PROOF. It is normally not necessary to make changes
+   // to the generated code, but the routine can be extended by the
+   // user if needed. The return value is currently not used.
+
+   return kTRUE;
+}
+
+void HITAGprinter::Show(Long64_t entry)
+{
+// Print contents of entry.
+// If entry is not specified, print current entry
+   if (!fChain) return;
+   fChain->Show(entry);
+}
+Int_t HITAGprinter::Cut(Long64_t entry)
+{
+// This function may be called from Loop.
+// returns  1 if entry is accepted.
+// returns -1 otherwise.
+   return 1;
+}
+#endif // #ifdef HITAGprinter_cxx
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/HITAGprinter_run.C b/Event/EventOverlay/EventOverlayJobTransforms/scripts/HITAGprinter_run.C
new file mode 100644
index 0000000000000000000000000000000000000000..9478f99bf9758a85a70b094c7c8efdd47e5c779c
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/HITAGprinter_run.C
@@ -0,0 +1,14 @@
+/*
+  Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+*/
+
+{
+
+  TChain c("POOLCollectionTree");
+  int n=0;
+  n=c.Add("/datadisk1/temp2/HITAG/*"); if (n<1) {printf("Added %d files!\n",n); return;} else {printf("Added %d files.\n",n);}
+  gROOT->ProcessLine(".L HITAGprinter.C");
+  HITAGprinter t(&c);
+  t.Loop();
+
+}
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/HITAGprinter_run.sh b/Event/EventOverlay/EventOverlayJobTransforms/scripts/HITAGprinter_run.sh
new file mode 100755
index 0000000000000000000000000000000000000000..4069acb1236022892befeed6551d65045ca79120
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/HITAGprinter_run.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+#mkdir /datadisk1/temp2/HITAG; cd /datadisk1/temp2/HITAG
+#set +o noglob
+#for f in ~/nfs3/zerobias_skim/data11_hi.*MinBiasOverlay*TAG*/*; do ln -s $f; done
+#cd -
+
+root -l -b -q HITAGprinter_run.C 
+
+#mkdir HI_files
+#mv HI_filter.txt HI_files/data11_hi.metadata.physics_MinBiasOverlay.filter.txt
+#mv HI_vtx.txt HI_files/data11_hi.metadata.physics_MinBiasOverlay.vtx.txt
+#dq2-put -L CERN-PROD_SCRATCHDISK -s HI_files group.dataprep.data11_hi.metadata.physics_MinBiasOverlay.test15
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/OverlayBS_tf.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/OverlayBS_tf.py
new file mode 100755
index 0000000000000000000000000000000000000000..3aaa7516e24b02a4256d731d001c0d4989dbb981
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/OverlayBS_tf.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+__doc__ = """Overlay simulated physics events into zero bias real data events; the real data events are in BS format."""
+
+import os.path
+import sys
+import time
+
+import logging
+
+# Setup core logging here
+from PyJobTransforms.trfLogger import msg
+msg.info('logging set in %s' % sys.argv[0])
+
+from PyJobTransforms.transform import transform
+from PyJobTransforms.trfExe import athenaExecutor
+from PyJobTransforms.trfArgs import addAthenaArguments, addDetectorArguments, addTriggerArguments
+from PyJobTransforms.trfDecorators import stdTrfExceptionHandler, sigUsrStackTrace
+
+import PyJobTransforms.trfArgClasses as trfArgClasses
+
+# Prodsys hack...
+ListOfDefaultPositionalKeys=['--AFPOn', '--ALFAOn', '--AMIConfig', '--AMITag', '--AddCaloDigi', '--DBRelease', '--FwdRegionOn', '--LucidOn', '--ReadByteStream', '--ZDCOn', '--asetup', '--athena', '--athenaopts', '--beamType', '--checkEventCount', '--command', '--conditionsTag', '--cscCondOverride', '--digiRndmSvc', '--digiSeedOffset1', '--digiSeedOffset2', '--doAllNoise', '--env', '--eventAcceptanceEfficiency', '--execOnly', '--fSampltag', '--geometryVersion', '--ignoreErrors', '--ignoreFiles', '--ignorePatterns', '--imf', '--inputHITSFile', '--inputPileUpBSFile', '--maxEvents', '--muonForceUse', '--orphanKiller', '--outputRDOFile', '--outputSignalRDOFile', '--postExec', '--postInclude', '--preExec', '--preInclude', '--reportName', '--reportType', '--runNumber', '--samplingFractionDbTag', '--showGraph', '--showPath', '--showSteps', '--skipEvents', '--skipFileValidation', '--skipInputFileValidation', '--skipOutputFileValidation', '--tcmalloc', '--tmpRDO', '--triggerConfig']
+
+
+@stdTrfExceptionHandler
+@sigUsrStackTrace
+
+def main():
+
+    msg.info('This is %s' % sys.argv[0])
+
+    trf = getTransform()
+    trf.parseCmdLineArgs(sys.argv[1:])
+    trf.execute()
+    trf.generateReport()
+
+    msg.info("%s stopped at %s, trf exit code %d" % (sys.argv[0], time.asctime(), trf.exitCode))
+    sys.exit(trf.exitCode)
+
+def getTransform():
+    executorSet = set()
+    from EventOverlayJobTransforms.overlayTransformUtils import addOverlay_BSSubstep, addOverlay_BSArguments
+    addOverlay_BSSubstep(executorSet)
+    trf = transform(executor = executorSet, description = 'ATLAS Overlay transform. Inputs must be HITS + BS. Outputs must be RDO.')
+    addAthenaArguments(trf.parser)
+    addDetectorArguments(trf.parser)
+    addTriggerArguments(trf.parser, addTrigFilter=False)
+    addOverlay_BSArguments(trf.parser)
+    return trf
+
+if __name__ == '__main__':
+    main()
+
+# old skeleton='EventOverlayJobTransforms/skeleton.commis.py'
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/OverlayChain_tf.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/OverlayChain_tf.py
new file mode 100755
index 0000000000000000000000000000000000000000..12a27ecc768e2203755b80b3f13e45eb27840b3d
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/OverlayChain_tf.py
@@ -0,0 +1,59 @@
+#! /usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+## Overlay chain transform
+# @version $Id: OverlayChain_tf.py 529035 2012-12-05 15:45:24Z graemes $
+
+import sys
+import time
+
+import logging
+
+# Setup core logging here
+from PyJobTransforms.trfLogger import msg
+msg.info('logging set in %s' % sys.argv[0])
+
+from PyJobTransforms.transform import transform
+from PyJobTransforms.trfExe import athenaExecutor, DQMergeExecutor
+from PyJobTransforms.trfArgs import addAthenaArguments, addDetectorArguments
+from PyJobTransforms.trfDecorators import stdTrfExceptionHandler, sigUsrStackTrace
+
+import PyJobTransforms.trfArgClasses as trfArgClasses
+
+# Prodsys hack...
+ListOfDefaultPositionalKeys=['--AFPOn', '--ALFAOn', '--AMIConfig', '--AMITag', '--AddCaloDigi', '--CosmicFilterVolume', '--CosmicFilterVolume2', '--CosmicPtSlice', '--DBRelease', '--DataRunNumber', '--FwdRegionOn', '--LucidOn', '--ReadByteStream', '--ZDCOn', '--argJSON', '--asetup', '--athena', '--athenaMPMergeTargetSize', '--athenaopts', '--attempt', '--beamType', '--checkEventCount', '--command', '--conditionsTag', '--cscCondOverride', '--digiRndmSvc', '--digiSeedOffset1', '--digiSeedOffset2', '--digiSteeringConf', '--doAllNoise', '--dumpJSON', '--dumpPickle', '--enableLooperKiller', '--env', '--eventAcceptanceEfficiency', '--eventIdFile', '--execOnly', '--fSampltag', '--fileValidation', '--firstEvent', '--geometryVersion', '--ignoreErrors', '--ignoreFiles', '--ignorePatterns', '--imf', '--inputBSFile', '--inputBS_SKIMFile', '--inputEVNTFile', '--inputEVNT_CAVERNFile', '--inputEVNT_COSMICSFile', '--inputFileValidation', '--inputHITSFile', '--jobNumber', '--jobid', '--lumiBlockMapFile', '--maxEvents', '--maxFilesPerSubjob', '--muonForceUse', '--orphanKiller', '--outputBS_SKIMFile', '--outputEVNT_CAVERNTRFile', '--outputEVNT_COSMICSTRFile', '--outputFileValidation', '--outputHITSFile', '--outputRDOFile', '--outputRDO_FILTFile', '--outputRDO_SGNLFile', '--overlayConfigFile', '--parallelFileValidation', '--physicsList', '--postExec', '--postInclude', '--preExec', '--preInclude', '--randomSeed', '--reportName', '--reportType', '--runNumber', '--samplingFractionDbTag', '--showGraph', '--showPath', '--showSteps', '--simulator', '--skipEvents', '--skipFileValidation', '--skipInputFileValidation', '--skipOutputFileValidation', '--steering', '--taskid', '--tcmalloc', '--tmpRDO', '--tmpRDO_FILT', '--triggerBit', '--truthStrategy', '--useISF', '--valgrind', '--valgrindbasicopts', '--valgrindextraopts']
+
+@stdTrfExceptionHandler
+@sigUsrStackTrace
+def main():
+
+    msg.info('This is %s' % sys.argv[0])
+
+    trf = getTransform()
+    trf.parseCmdLineArgs(sys.argv[1:])
+    trf.execute()
+    trf.generateReport()
+
+    msg.info("%s stopped at %s, trf exit code %d" % (sys.argv[0], time.asctime(), trf.exitCode))
+    sys.exit(trf.exitCode)
+
+def getTransform():
+    executorSet = set()
+    from EventOverlayJobTransforms.overlayTransformUtils import addOverlayBSFilterSubstep, addOverlay_BSSubstep, addOverlayBSFilterArguments, addOverlay_BSArguments, addOverlayChainOverrideArguments
+    from SimuJobTransforms.SimTransformUtils import addSimulationSubstep, addSimulationArguments
+    addOverlayBSFilterSubstep(executorSet)
+    addSimulationSubstep(executorSet, overlayTransform = True)
+    addOverlay_BSSubstep(executorSet)
+    trf = transform(executor = executorSet, description = 'Full Overlay Chain')
+    addOverlayChainOverrideArguments(trf.parser)
+    addAthenaArguments(trf.parser, maxEventsDefaultSubstep='all')
+    addDetectorArguments(trf.parser)
+    addOverlayBSFilterArguments(trf.parser)
+    addSimulationArguments(trf.parser)
+    addOverlay_BSArguments(trf.parser)
+
+    return trf
+
+if __name__ == '__main__':
+    main()
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/OverlayPool_tf.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/OverlayPool_tf.py
new file mode 100755
index 0000000000000000000000000000000000000000..5945e0a991c894c8f646a3be084647e71c85a739
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/OverlayPool_tf.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+__doc__ = """Overlay simulated physics events into zero bias real data events; the real data events are in POOL format."""
+
+import os.path
+import sys
+import time
+
+import logging
+
+# Setup core logging here
+from PyJobTransforms.trfLogger import msg
+msg.info('logging set in %s' % sys.argv[0])
+
+from PyJobTransforms.transform import transform
+from PyJobTransforms.trfExe import athenaExecutor
+from PyJobTransforms.trfArgs import addAthenaArguments, addDetectorArguments, addTriggerArguments
+from PyJobTransforms.trfDecorators import stdTrfExceptionHandler, sigUsrStackTrace
+
+import PyJobTransforms.trfArgClasses as trfArgClasses
+
+# Prodsys hack...
+ListOfDefaultPositionalKeys=['--AFPOn', '--ALFAOn', '--AMIConfig', '--AMITag', '--AddCaloDigi', '--DBRelease', '--FwdRegionOn', '--LucidOn', '--ReadByteStream', '--ZDCOn', '--asetup', '--athena', '--athenaopts', '--beamType', '--checkEventCount', '--command', '--conditionsTag', '--cscCondOverride', '--digiRndmSvc', '--digiSeedOffset1', '--digiSeedOffset2', '--doAllNoise', '--env', '--eventAcceptanceEfficiency', '--execOnly', '--fSampltag', '--geometryVersion', '--ignoreErrors', '--ignoreFiles', '--ignorePatterns', '--imf', '--inputHITSFile', '--inputPileUpRDOFile', '--maxEvents', '--muonForceUse', '--orphanKiller', '--outputRDOFile', '--outputSignalRDOFile', '--postExec', '--postInclude', '--preExec', '--preInclude', '--reportName', '--reportType', '--runNumber', '--samplingFractionDbTag', '--showGraph', '--showPath', '--showSteps', '--skipEvents', '--skipFileValidation', '--skipInputFileValidation', '--skipOutputFileValidation', '--tcmalloc', '--tmpRDO', '--triggerConfig']
+
+@stdTrfExceptionHandler
+@sigUsrStackTrace
+
+def main():
+
+    msg.info('This is %s' % sys.argv[0])
+
+    trf = getTransform()
+    trf.parseCmdLineArgs(sys.argv[1:])
+    trf.execute()
+    trf.generateReport()
+
+    msg.info("%s stopped at %s, trf exit code %d" % (sys.argv[0], time.asctime(), trf.exitCode))
+    sys.exit(trf.exitCode)
+
+def getTransform():
+    executorSet = set()
+    from EventOverlayJobTransforms.overlayTransformUtils import addOverlay_PoolSubstep, addOverlay_PoolArguments
+    addOverlay_PoolSubstep(executorSet)
+    trf = transform(executor = executorSet, description = 'ATLAS Overlay transform. Inputs must be HITS. Outputs must be RDO.')
+    addAthenaArguments(trf.parser)
+    addDetectorArguments(trf.parser)
+    addTriggerArguments(trf.parser, addTrigFilter=False)
+    addOverlay_PoolArguments(trf.parser)
+    return trf
+
+if __name__ == '__main__':
+    main()
+
+# old skeleton='EventOverlayJobTransforms/skeleton.overlay.py'
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/check_merge.sh b/Event/EventOverlay/EventOverlayJobTransforms/scripts/check_merge.sh
new file mode 100755
index 0000000000000000000000000000000000000000..fcfcb273bcb1e57044c278233c77ae6926396780
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/check_merge.sh
@@ -0,0 +1,41 @@
+#!/bin/bash
+
+if [ $# -lt 3 ]
+then
+  echo "Usage example: `basename $0` test stream outset <skipdq2>"
+  exit 65
+fi
+
+testn=$1
+stream=$2
+outset=$3
+skipdq2=$4
+
+mkdir -p ~/nfs3/zerobias_skim_${testn}/merge/
+cd ~/nfs3/zerobias_skim_${testn}/merge/
+
+if [ "$skipdq2" != "" ]; then echo "skipping dq2"; else dq2-get -f "*tgz*" group.dataprep.BSmultipleselector.GRL.${testn}_EXT${stream}.merge.*.test${outset}/ ; fi
+rm -rf tarball_PandaJob_*
+for f in `ls group.dataprep.BSmultipleselector.GRL.${testn}_EXT${stream}.merge.*.test${outset}.*/*.tgz* `; do echo $f; tar xzf $f; done
+
+for p in `ls -d tarball_PandaJob_* `; do
+ grep "Job successfully completed" ${p}/pilotlog.txt  > /dev/null
+ if [ $? -eq 0 ]; then echo "Good job"; 
+ else 
+  echo "Failed job";
+  rm -rf $p
+ fi
+done
+
+echo -n "total events: "
+grep "number of events written" tarball_PandaJob_*/athena_stdout.txt | cut -d ':' -f 3 | awk '{total = total + $1}END{print total}'
+
+echo -n "total files: "
+grep "number of events written" tarball_PandaJob_*/athena_stdout.txt | grep -c number
+
+echo -n "files with 100 events: "
+grep "number of events written" tarball_PandaJob_*/athena_stdout.txt | grep -c "100"
+
+echo -n "total jobs: "
+ls tarball_PandaJob_*/athena_stdout.txt | grep -c athena
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/checkforemptydatafile.sh b/Event/EventOverlay/EventOverlayJobTransforms/scripts/checkforemptydatafile.sh
new file mode 100755
index 0000000000000000000000000000000000000000..24f4074f048465874459e073cf6aab4c58bc8a68
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/checkforemptydatafile.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+
+if [ $# -lt 3 ]
+then
+  echo "Usage example: `basename $0` test (test12p4) stream (\"19 24 45\") outset (3) "
+  exit 65
+fi
+
+testn=$1
+stream=$2
+outset=$3
+
+rm -f progress77.txt ; touch progress77.txt
+
+for s in {0..9}; do echo "EXT${s}"
+ for d in `dq2-list-datasets-container group.dataprep.data12_hip.HImerge.physics_MinBiasOverlay_EXT${s}.test${outset}/`; do
+
+#for s in ${stream}; do echo "EXT${s}"; #pass in the list of bad EXT numbers from above step, i.e. "10 42 44"
+# for d in `dq2-list-datasets-container group.dataprep.BSmultipleselector.GRL.${testn}_EXT${s}.merge.test${outset}/`; do
+
+  dq2-ls -f -H $d |grep Zerobias > temp192.txt;
+  cat temp192.txt >> progress77.txt
+  grep "bytes" temp192.txt > temp193.txt
+  if [ $? -eq 0 ]; then echo "Dataset $d has "; cat temp193.txt; 
+   echo
+   cat temp193.txt | cut -f 3
+   echo
+   for gg in `cat temp193.txt | cut -f 3`; do 
+    echo "Deleting file with guid $gg from dataset $d"
+    dq2-delete-files $d $gg
+   done
+  fi
+
+ done
+ rm temp*.txt
+done
+rm progress77.txt
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/makeoutputclasses.sh b/Event/EventOverlay/EventOverlayJobTransforms/scripts/makeoutputclasses.sh
new file mode 100755
index 0000000000000000000000000000000000000000..78ec24adda903703016bfbebabc245f800918893
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/makeoutputclasses.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+rm outputclasses.py
+for (( i=5; i<50; ++i )); do echo $i ; 
+cat outputclass_template.txt | sed "s%XYZ%${i}%g" >> outputclasses.py
+done
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/outputclass_template.txt b/Event/EventOverlay/EventOverlayJobTransforms/scripts/outputclass_template.txt
new file mode 100644
index 0000000000000000000000000000000000000000..1090af48cd0656d577ab359a29ed566fa62bd999
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/outputclass_template.txt
@@ -0,0 +1,8 @@
+class OutputBSFilterFileArgXYZ(StringArg):
+        """The output file prefixXYZ"""
+        def __init__(self,help='default',name='OutputBSFilterFileXYZ'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/outputclasses.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/outputclasses.py
new file mode 100644
index 0000000000000000000000000000000000000000..d8a5f483abcaa3bb8025f3220b2f87d6e7e2c9c6
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/outputclasses.py
@@ -0,0 +1,362 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+class OutputBSFilterFileArg5(StringArg):
+        """The output file prefix5"""
+        def __init__(self,help='default',name='OutputBSFilterFile5'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg6(StringArg):
+        """The output file prefix6"""
+        def __init__(self,help='default',name='OutputBSFilterFile6'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg7(StringArg):
+        """The output file prefix7"""
+        def __init__(self,help='default',name='OutputBSFilterFile7'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg8(StringArg):
+        """The output file prefix8"""
+        def __init__(self,help='default',name='OutputBSFilterFile8'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg9(StringArg):
+        """The output file prefix9"""
+        def __init__(self,help='default',name='OutputBSFilterFile9'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg10(StringArg):
+        """The output file prefix10"""
+        def __init__(self,help='default',name='OutputBSFilterFile10'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg11(StringArg):
+        """The output file prefix11"""
+        def __init__(self,help='default',name='OutputBSFilterFile11'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg12(StringArg):
+        """The output file prefix12"""
+        def __init__(self,help='default',name='OutputBSFilterFile12'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg13(StringArg):
+        """The output file prefix13"""
+        def __init__(self,help='default',name='OutputBSFilterFile13'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg14(StringArg):
+        """The output file prefix14"""
+        def __init__(self,help='default',name='OutputBSFilterFile14'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg15(StringArg):
+        """The output file prefix15"""
+        def __init__(self,help='default',name='OutputBSFilterFile15'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg16(StringArg):
+        """The output file prefix16"""
+        def __init__(self,help='default',name='OutputBSFilterFile16'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg17(StringArg):
+        """The output file prefix17"""
+        def __init__(self,help='default',name='OutputBSFilterFile17'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg18(StringArg):
+        """The output file prefix18"""
+        def __init__(self,help='default',name='OutputBSFilterFile18'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg19(StringArg):
+        """The output file prefix19"""
+        def __init__(self,help='default',name='OutputBSFilterFile19'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg20(StringArg):
+        """The output file prefix20"""
+        def __init__(self,help='default',name='OutputBSFilterFile20'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg21(StringArg):
+        """The output file prefix21"""
+        def __init__(self,help='default',name='OutputBSFilterFile21'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg22(StringArg):
+        """The output file prefix22"""
+        def __init__(self,help='default',name='OutputBSFilterFile22'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg23(StringArg):
+        """The output file prefix23"""
+        def __init__(self,help='default',name='OutputBSFilterFile23'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg24(StringArg):
+        """The output file prefix24"""
+        def __init__(self,help='default',name='OutputBSFilterFile24'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg25(StringArg):
+        """The output file prefix25"""
+        def __init__(self,help='default',name='OutputBSFilterFile25'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg26(StringArg):
+        """The output file prefix26"""
+        def __init__(self,help='default',name='OutputBSFilterFile26'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg27(StringArg):
+        """The output file prefix27"""
+        def __init__(self,help='default',name='OutputBSFilterFile27'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg28(StringArg):
+        """The output file prefix28"""
+        def __init__(self,help='default',name='OutputBSFilterFile28'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg29(StringArg):
+        """The output file prefix29"""
+        def __init__(self,help='default',name='OutputBSFilterFile29'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg30(StringArg):
+        """The output file prefix30"""
+        def __init__(self,help='default',name='OutputBSFilterFile30'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg31(StringArg):
+        """The output file prefix31"""
+        def __init__(self,help='default',name='OutputBSFilterFile31'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg32(StringArg):
+        """The output file prefix32"""
+        def __init__(self,help='default',name='OutputBSFilterFile32'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg33(StringArg):
+        """The output file prefix33"""
+        def __init__(self,help='default',name='OutputBSFilterFile33'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg34(StringArg):
+        """The output file prefix34"""
+        def __init__(self,help='default',name='OutputBSFilterFile34'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg35(StringArg):
+        """The output file prefix35"""
+        def __init__(self,help='default',name='OutputBSFilterFile35'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg36(StringArg):
+        """The output file prefix36"""
+        def __init__(self,help='default',name='OutputBSFilterFile36'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg37(StringArg):
+        """The output file prefix37"""
+        def __init__(self,help='default',name='OutputBSFilterFile37'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg38(StringArg):
+        """The output file prefix38"""
+        def __init__(self,help='default',name='OutputBSFilterFile38'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg39(StringArg):
+        """The output file prefix39"""
+        def __init__(self,help='default',name='OutputBSFilterFile39'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg40(StringArg):
+        """The output file prefix40"""
+        def __init__(self,help='default',name='OutputBSFilterFile40'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg41(StringArg):
+        """The output file prefix41"""
+        def __init__(self,help='default',name='OutputBSFilterFile41'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg42(StringArg):
+        """The output file prefix42"""
+        def __init__(self,help='default',name='OutputBSFilterFile42'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg43(StringArg):
+        """The output file prefix43"""
+        def __init__(self,help='default',name='OutputBSFilterFile43'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg44(StringArg):
+        """The output file prefix44"""
+        def __init__(self,help='default',name='OutputBSFilterFile44'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg45(StringArg):
+        """The output file prefix45"""
+        def __init__(self,help='default',name='OutputBSFilterFile45'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg46(StringArg):
+        """The output file prefix46"""
+        def __init__(self,help='default',name='OutputBSFilterFile46'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg47(StringArg):
+        """The output file prefix47"""
+        def __init__(self,help='default',name='OutputBSFilterFile47'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg48(StringArg):
+        """The output file prefix48"""
+        def __init__(self,help='default',name='OutputBSFilterFile48'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
+class OutputBSFilterFileArg49(StringArg):
+        """The output file prefix49"""
+        def __init__(self,help='default',name='OutputBSFilterFile49'):
+            StringArg.__init__(self,help,name)
+        
+        def isFullArgument(self):
+            return True
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/overlay_bs_trf.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/overlay_bs_trf.py
new file mode 100755
index 0000000000000000000000000000000000000000..2ce27c6c22d5145898ca9544fdfd19444ee99457
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/overlay_bs_trf.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+__doc__ = """Overlay simulated physics events into zero bias real data events; the real data events are in Byte Stream format."""
+
+from PyJobTransformsCore.trf import *
+from PyJobTransformsCore.full_trfarg import *
+from PyJobTransformsCore.trfutil import *
+
+from EventOverlayJobTransforms.OverlayConfig import overlayConfig
+from EventOverlayJobTransforms.Configuration import DigiRndmSvcArg
+from EventOverlayJobTransforms.Configuration import SamplingFractionDbTagArg
+from EventOverlayJobTransforms.Configuration import NoiseControlArg
+from EventOverlayJobTransforms.Configuration import AddCaloDigiArg
+
+ListOfDefaultPositionalKeys = ['inputHitsFile', 'pileupBSFile', 'signalRDOFile', 'outputRDOFile', 'maxEvents', 'skipEvents', 'geometryVersion', 'digiSeedOffset1', 'digiSeedOffset2', 'conditionsTag', 'digiRndmSvc', 'samplingFractionDbTag', 'fSampltag', 'triggerConfig',  'DBRelease', 'jobConfig', 'preExec', 'postInclude', 'postExec', '--extraignorefilters']
+
+class PostIncludeArg(JobOptionsArg):
+    """Joboptions file with user settings, to run after the job itself"""
+    def __init__(self,help='default',package='',name='default'):
+        # split comma separated string into list
+        if type(package) == str: package = package.split(',')
+        # always add 'EventOverlayJobTransforms' package (which contain common postIncludeConfig files)
+        commonPack = 'EventOverlayJobTransforms'
+        if commonPack not in package: package.append(commonPack)
+        JobOptionsArg.__init__(self,help=help,package=package,name=name)
+        self.__config = None
+
+    def isFullArgument(self):
+        return True
+
+class CscCondOverrideArg(BoolArg):
+    """A hack to override CSC conditions folder"""
+    def __init__(self,help,name='cscCondOverride'):
+        BoolArg.__init__(self,help,name)
+
+    def isFullArgument(self):
+        return True
+
+class fSampltagArg(StringArg):
+    """The cool tag for /LAR/ElecCalib/fSampl/Symmetry, see https://twiki.cern.ch/twiki/bin/viewauth/Atlas/LArCalibMCPoolCool"""
+    def __init__(self,help='default',name='fSampltag'):
+        StringArg.__init__(self,help,name)
+        
+    def isFullArgument(self):
+        return True
+
+class preExecArg(StringArg):
+    """Can add python statements to be executed early on"""
+    def __init__(self,help='default',name='preExec'):
+        StringArg.__init__(self,help,name)
+    def isFullArgument(self):
+        return True
+
+class postExecArg(StringArg):
+    """Can add python statements to be executed later on"""
+    def __init__(self,help='default',name='postExec'):
+        StringArg.__init__(self,help,name)
+    def isFullArgument(self):
+        return True
+
+class OverlayJobTransform( JobTransform ):
+    def __init__(self):
+        JobTransform.__init__(self,
+                              authors = [ Author('Ketevi A. Assamagan', 'ketevi.adikle.assamagan@cern.ch') ] ,
+                              skeleton='EventOverlayJobTransforms/skeleton.commis.py' ,
+                              help = __doc__,
+                              config=overlayConfig )
+
+        #add arguments
+        self.add( InputHitsFileArg() )
+        self.add( InputBSFileArg(name='pileupBSFile',  help='input RAW BS for pileup overlay') )
+        self.add( OutputRDOFileArg() )
+        self.add( MaxEventsArg() )
+        self.add( SkipEventsArg() )
+        self.add( JobConfigArg(package='EventOverlayJobTransforms') )
+        self.add( GeometryVersionArg() )
+        self.add( RandomSeedArg(name = 'digiSeedOffset1', help = 'random seed offset for digitization') )
+        self.add( RandomSeedArg(name = 'digiSeedOffset2', help = 'random seed offset for digitization') )
+        self.add( ConditionsTagArg() )
+        self.add( SamplingFractionDbTagArg( ['QGSP_BERT', 'QGSP_BERT_BIRK', 'QGSP_EMV', 'QGSP', 'QGSP_BERT_EMV', 'FTFP_BERT'] ) )
+        self.add( fSampltagArg() )
+        self.add( DigiRndmSvcArg( ['AtRndmGenSvc','AtRanluxGenSvc','AtDSFMTGenSvc'] ), default='AtRanluxGenSvc' )
+        self.add( OutputRDOFileArg(name='signalRDOFile', help='the RDO file of the MC signal alone'), default='NONE' )
+        self.add( TriggerConfigArg(), default='NONE' )
+        self.add( DBReleaseArg(), default='NONE' )
+        self.add( PostIncludeArg(), default='NONE' )
+        self.add( preExecArg(), default='NONE' )
+        self.add( postExecArg(), default='NONE' ) 
+
+        # avoid sqlite nfs lock problems
+        self.add( SQLiteSupport() )
+
+# execute it if not imported
+if __name__ == '__main__':
+    trf = OverlayJobTransform()
+    sys.exit(trf.exeSysArgs().exitCode())
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/overlay_grid.sh b/Event/EventOverlay/EventOverlayJobTransforms/scripts/overlay_grid.sh
new file mode 100755
index 0000000000000000000000000000000000000000..625a58f3361230a98720185e16d723902cc015e7
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/overlay_grid.sh
@@ -0,0 +1,128 @@
+#!/bin/bash
+
+#The evgen dataset should have files with 5000 events each
+#Make sure it's local using: dq2-list-dataset-site -n mc*EVNT* SLACXRD_DATADISK (or LOCALGROUPDISK ... )
+#process="mu100";evgen="user09.AndrewHaas477621.${process}.EVGEN.test1"
+#process="nu10";evgen="user09.AndrewHaas477621.${process}.EVGEN.v1";
+#project="mc11";process="J4";evgen="${project}_7TeV.105013.${process}_pythia_jetjet.evgen.EVNT.e815";slash="/"
+#HI evnt below
+#project="mc10";process="J3";evgen="${project}_2TeV.105012.${process}_pythia_jetjet.evgen.EVNT.e733";slash="/"
+#project="mc10";process="J4";evgen="${project}_2TeV.105013.${process}_pythia_jetjet.evgen.EVNT.e733";slash="/"
+#project="mc10";process="Pythia_directJpsie3e3";evgen="${project}_2TeV.105751.${process}.evgen.EVNT.*";slash="/"
+#project="mc10";process="PythiaPhotonJet_Unbinned35";evgen="${project}_2TeV.108081.${process}.evgen.EVNT.e890";slash="/"
+project="mc10";process="PythiaPhotonJet_Unbinned35";evgen="${project}_2TeV.108081.${process}.evgen.EVNT.e890";slash="/"
+#project="mc11";process="PythiaZeeJet_0Ptcut";evgen="${project}_2TeV.119104.${process}.evgen.EVNT.e1046";slash="/"
+
+#The zerobias dataset should have 500 files with 100 RAW zerobias events each = 50000 events total
+#run="180241";zerobias="user09.AndrewHaas477621.zerobiasDS.${run}.RAW.test3";
+#stream="7";run="2011";zerobias="user.haastyle.BSmultipleselector.GRL.test9_EXT${stream}.merge.test8/";
+#run="193211";zerobias="data11_hi.00${run}.physics_MinBiasOverlay.merge.RAW";
+run="193825";zerobias="user.haastyle.HImerge.193825p.90308.test12/"; #for small tests
+#run="193825";zerobias="user.haastyle.HImerge.${run}p.test12/"; #merged 193825,193826
+
+#conditions
+#https://twiki.cern.ch/twiki/bin/viewauth/Atlas/CoolProdTags
+dco="COMCOND-BLKPST-005-08"
+mcco1="OFLCOND-SIM-BS7T-02"
+mcco2="OFLCOND-DR-BS7T-ANom-11"
+#geo="ATLAS-GEO-16-00-00"
+geo="ATLAS-GEO-18-01-00"
+
+#Can use the build job of a previous run
+#lib="--libDS "
+
+#Override vertex position, for heavy-ion
+doG4vertexoverride=true
+
+#The number of 100 event blocks to skip of evgen events in each job, usually goes from 0 to <50
+for (( i=0; i<10; i+=1 )) ; do
+inputevgenfile=$i
+echo
+echo "inputevgenfile is $inputevgenfile "
+
+#The output dataset to create with the HITS and RDO files
+#output="user.haastyle.overlaygrid. "
+output="group.dataprep.overlaygrid.${process}.${inputevgenfile}.${run}_${stream}.geo18_MC12.test3"
+
+#skipfiles is the number of input zerobias files to use (jobs to run) for each round of input evgen files
+#can't be bigger than the number of evgen files in the input dataset, but should be 50 if possible
+skipfiles=10; #50 if possible
+
+#events to do
+testr=$skipfiles; teste=100; # $skipfiles blocks of 100 events each (for inputfile 1-10..., so total of 50*100*10=50000 events, for skipfiles=50)
+#testr=2; teste=2; let i=1000000 ; #to run just a small test
+
+########################################
+
+ffile="%IN"
+filterstring="TriggerBit=240"
+if [ "$doG4vertexoverride" == true ]; then 
+  extraG4preinclude=",EventOverlayJobTransforms/vertex_override.py" ; 
+  extfile="--extFile vtx.txt,filter.txt"
+  filterstring="filterfile=filter.txt"
+
+  mcco1="OFLCOND-SDR-BS7T-05-25-HI"
+  mcco2="OFLCOND-SDR-BS7T-05-25-HI"
+
+  #filter events
+  ffile="simplefiltered.RAW"; DoFilter="BSFilter_trf.py inputBSFile=%IN OutputBSFilterFile=$ffile EventIdFile=events_orig.txt $filterstring skipEvents=%SKIPEVENTS ; "
+fi
+
+#The number of input evgen files for each zerobias file
+#If this is 1, then you can do 50 blocks of 100 evgen events before running out (assuming 5000 events/evgenfile), 
+# i.e. inputevgenfile could go up to <50, and you could do 250k events total.
+#Basicallly, you should set this to (Nfiles in EVNT dataset / 50 ), assuming again 5000 events/evgenfile.
+ninputmin=1
+
+#for skipping the right number of input RAW files
+let skipfiles*=inputevgenfile; 
+
+#nfilesevgen=`python /afs/slac.stanford.edu/g/atlas/c/AtlasCore/16.6.7/Database/Bookkeeping/AMIClients/pyAMI/python/pyAMI.py ListDataset logicalDatasetName=$evgen project=$project processingStep=production | grep NFILES`
+
+echo "Will use evgen dataset $evgen$slash $nfilesevgen and do up to $teste events per job"
+echo "Will use $testr files of zerobias dataset $zerobias from run ${run}, skipping the first $skipfiles"
+echo "Will create output dataset $output"
+if [ -z "$lib" ]; then echo "Building library"; else echo "Using library $lib"; fi
+
+let inputevgenfile*=100
+let maxinputevgenfile=$ninputmin*5000-100
+if [ "$process" == "nu10" ]; then let inputevgenfile=0; fi; #reuse the same neutrino events, since there's only 5k, and they're all the same
+echo "Will skip $inputevgenfile (out of $maxinputevgenfile) evgen events in each job"
+echo "extraG4preinclude is $extraG4preinclude and extfile is $extfile and filterstring is $filterstring "
+echo
+
+#AtlCoolConsole.py "COOLONL_LAR/COMP200;readoracle"
+#listtags /LAR/ElecCalib/fSampl/Symmetry
+fsamp="fSampltag=LARElecCalibMCfSampl-CSC02-D-"
+
+#this messes up grid jobs... and fix the requirements file and remove the RTT garbage
+rm -rf Event/EventOverlay/EventOverlayJobTransforms/test/
+cp Event/EventOverlay/EventOverlayJobTransforms/cmt/grid.requirements Event/EventOverlay/EventOverlayJobTransforms/cmt/requirements
+
+#verbose: preInclude="SimulationJobOptions/preInclude.VerboseVertexPositioner.py,"
+
+DoEvents="BSFilter_trf.py inputBSFile=$ffile OutputBSFilterFile=simpledummy.RAW EventIdFile=events.txt TriggerBit=5 skipEvents=0 maxEvents=$teste"
+
+MCcond="; AtlasG4_trf.py --omitvalidation=ALL inputEvgenFile=%MININ outputHitsFile=%OUT.MCcond.HITS.pool.root maxEvents=100 skipEvents=${inputevgenfile} randomSeed=%RNDM:${inputevgenfile} geometryVersion=$geo conditionsTag=$mcco1 physicsList=QGSP_BERT preInclude='EventOverlayJobTransforms/g4runnumber2.py${extraG4preinclude}' postInclude='EventOverlayJobTransforms/UseOracle.py' "
+MCcond="${MCcond}; Digi_trf.py --omitvalidation=ALL inputHitsFile=%OUT.MCcond.HITS.pool.root outputRDOFile=%OUT.MCcond.RDO.pool.root maxEvents=$teste skipEvents=0 geometryVersion=$geo digiSeedOffset1=211 digiSeedOffset2=122 conditionsTag=$mcco2 samplingFractionDbTag=QGSP_BERT postInclude='EventOverlayJobTransforms/UseOracle.py' "
+
+Datacond="; AtlasG4_trf.py --omitvalidation=ALL runNumber=$run inputEvgenFile=%MININ outputHitsFile=%OUT.HITS.pool.root maxEvents=100 skipEvents=${inputevgenfile} randomSeed=%RNDM:${inputevgenfile} geometryVersion=$geo conditionsTag=$dco physicsList=QGSP_BERT preInclude='EventOverlayJobTransforms/custom.py,EventOverlayJobTransforms/magfield.py,EventOverlayJobTransforms/g4runnumber.py${extraG4preinclude}' postInclude='EventOverlayJobTransforms/UseOracle.py,EventOverlayJobTransforms/Rt_override.py,EventOverlayJobTransforms/muAlign.py' "
+Datacond="${Datacond}; overlay_bs_trf.py --omitvalidation=ALL inputHitsFile=%OUT.HITS.pool.root outputRDOFile=%OUT.RDO.pool.root pileupBSFile=$ffile maxEvents=$teste skipEvents=0 geometryVersion=$geo digiSeedOffset1=211 digiSeedOffset2=122 conditionsTag=$dco jobConfig='[EventOverlayJobTransforms/UseOracle.py,EventOverlayJobTransforms/custom.py]' samplingFractionDbTag=QGSP_BERT $fsamp postInclude='EventOverlayJobTransforms/Rt_override.py' "
+
+MCdigidata="; Digi_trf.py --omitvalidation=ALL inputHitsFile=%OUT.HITS.pool.root outputRDOFile=%OUT.MCconddigi.RDO.pool.root maxEvents=$teste skipEvents=0 geometryVersion=$geo digiSeedOffset1=211 digiSeedOffset2=122 conditionsTag=$mcco2 samplingFractionDbTag=QGSP_BERT postInclude='EventOverlayJobTransforms/UseOracle.py' "
+
+tmp6dir="dummy"
+if [ -e /scratch/ahaas ]; then tmp6dir="/scratch/ahaas/"; fi
+if [ -e /tmp/ahaas ]; then tmp6dir="/tmp/ahaas/"; fi
+
+ddo="--dbRelease ddo.000001.Atlas.Ideal.DBRelease.v170601:DBRelease-17.6.1.tar.gz"
+
+echo "pathena --trf \" $DoFilter $DoEvents $MCcond $Datacond $MCdigidata \" --individualOutDS --tmpDir $tmp6dir --inDS $zerobias --inputType RAW --minDS $evgen$slash --nMin $ninputmin --outDS $output --nFiles $testr --nSkipFiles $skipfiles --extOutFile \"events*.txt\" $ddo $extfile $lib --nFilesPerJob 1 --memory=3000 --skipScan --official \"--voms=atlas:/atlas/dataprep/Role=production\" " 
+pathena --trf "$DoFilter $DoEvents $MCcond $Datacond $MCdigidata " --individualOutDS --tmpDir $tmp6dir --inDS $zerobias --inputType RAW --minDS $evgen$slash --nMin $ninputmin --outDS $output --nFiles $testr --nSkipFiles $skipfiles --extOutFile "events*.txt" $ddo $extfile $lib --nFilesPerJob 1 --memory=3000 --skipScan --official "--voms=atlas:/atlas/dataprep/Role=production" 
+#--noSubmit
+#--nEventsPerJob 100 --nEventsPerFile 100
+#--excludedSite ANALY_SLAC 
+#--extOutFile "athfile*.log.txt" 
+#--site ANALY_SLAC
+
+done
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/overlay_pool_trf.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/overlay_pool_trf.py
new file mode 100755
index 0000000000000000000000000000000000000000..6fc1834de5a87627ddea0ff956576e84d379416c
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/overlay_pool_trf.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+__doc__ = """Overlay simulated physics events into zero bias real data events; the real data events are in POOL format."""
+
+from PyJobTransformsCore.trf import *
+from PyJobTransformsCore.full_trfarg import *
+from PyJobTransformsCore.trfutil import *
+
+from EventOverlayJobTransforms.OverlayConfig import overlayConfig
+from EventOverlayJobTransforms.Configuration import DigiRndmSvcArg
+from EventOverlayJobTransforms.Configuration import SamplingFractionDbTagArg
+from EventOverlayJobTransforms.Configuration import NoiseControlArg
+from EventOverlayJobTransforms.Configuration import AddCaloDigiArg
+
+class PostIncludeArg(JobOptionsArg):
+    """Joboptions file with user settings, to run after the job itself"""
+    def __init__(self,help='default',package='',name='default'):
+        # split comma separated string into list
+        if type(package) == str: package = package.split(',')
+        # always add 'EventOverlayJobTransforms' package (which contain common postIncludeConfig files)
+        commonPack = 'EventOverlayJobTransforms'
+        if commonPack not in package: package.append(commonPack)
+        JobOptionsArg.__init__(self,help=help,package=package,name=name)
+        self.__config = None
+
+    def isFullArgument(self):
+        return True
+
+class ReadByteStreamArg(BoolArg):
+    """Are the real data input files byte stream or POOL?"""
+    def __init__(self,help,name='ReadByteStream'):
+        BoolArg.__init__(self,help,name)
+
+    def isFullArgument(self):
+        return True
+
+class MuonForceUseArg(BoolArg):
+    """A hack for muon 8-fold versus 12-fold problems"""
+    def __init__(self,help,name='muonForceUse'):
+        BoolArg.__init__(self,help,name)
+
+    def isFullArgument(self):
+        return True
+
+class OverlayJobTransform( JobTransform ):
+    def __init__(self):
+        JobTransform.__init__(self,
+                              authors = [ Author('Ketevi A. Assamagan', 'ketevi.adikle.assamagan@cern.ch') ] ,
+                              skeleton='EventOverlayJobTransforms/skeleton.overlay.py' ,
+                              help = __doc__,
+                              config=overlayConfig )
+
+        #add arguments
+        self.add( InputHitsFileArg() )
+        self.add( InputRDOFileArg(name='pileupBSFile',  help='input RAW RDO for pileup overlay') )
+        self.add( OutputRDOFileArg() )
+        self.add( MaxEventsArg() )
+        self.add( SkipEventsArg() )
+        self.add( GeometryVersionArg() )
+        self.add( RandomSeedArg(name = 'digiSeedOffset1', help = 'random seed offset for digitization') )
+        self.add( RandomSeedArg(name = 'digiSeedOffset2', help = 'random seed offset for digitization') )
+        self.add( ConditionsTagArg(), default='NONE' )
+        self.add( OutputRDOFileArg(name='signalRDOFile', help='the RDO file of the MC signal alone'), default='NONE' )
+        self.add( DigiRndmSvcArg( ['AtRndmGenSvc','AtRanluxGenSvc']), default = 'AtRanluxGenSvc')
+        self.add( SamplingFractionDbTagArg(['QGSP_BERT','QGSP_EMV', 'QGSP', 'QGSP_BERT_EMV']), default = 'QGSP_EMV' )
+        self.add( TriggerConfigArg(), default='default' )
+        self.add( DBReleaseArg(), default='NONE' )
+        self.add( JobConfigArg(package='EventOverlayJobTransforms'), default='NONE' )
+        self.add( PostIncludeArg(), default='NONE' ) 
+        self.add( ReadByteStreamArg("True/False: are the input real data files POOL or ByteStream?"), default=False)
+
+        # avoid sqlite nfs lock problems
+        self.add( SQLiteSupport() )
+
+# execute it if not imported
+if __name__ == '__main__':
+    trf = OverlayJobTransform()
+    sys.exit(trf.exeSysArgs().exitCode())
+
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/reco_grid.sh b/Event/EventOverlay/EventOverlayJobTransforms/scripts/reco_grid.sh
new file mode 100644
index 0000000000000000000000000000000000000000..24bd2effe81225df8207ddfe3d402f0efd3a346c
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/reco_grid.sh
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+#uncomment this to run over just one file and do a few events, as a test run
+#test="maxEvents=3"; test2="--nFiles 1";
+
+#uncomment this to make JiveXML files
+#jive=",EventOverlayJobTransforms/dojive.py"; jive2="--extOutFile Jive*.xml";
+
+#conditions
+#https://twiki.cern.ch/twiki/bin/viewauth/Atlas/CoolProdTags
+dco="COMCOND-BLKPST-005-08"
+#dco="COMCOND-BLKPST-002-00"
+mcco1="OFLCOND-SIM-BS7T-02"
+mcco2="OFLCOND-DR-BS7T-ANom-11"
+#mcco1="OFLCOND-SDR-BS7T-05-HI"
+#mcco2="OFLCOND-SDR-BS7T-05-HI"
+#geo="ATLAS-GEO-18-01-00"
+geo="ATLAS-GEO-16-00-00"
+
+#trig="triggerConfig=NONE"
+
+#intype="raw"
+#intype="mcrdo"
+intype="overlayrdo"
+if [ $intype == "raw" ]; then
+ #this is for running on RAW data
+ inds="user.haastyle.BSmultipleselector.GRL.test9_EXT7.merge.test8/"
+ outds="user.haastyle.BSmultipleselector.GRL.test9_EXT7.merge.test8.reco.test1"
+ datamc="conditionsTag=$dco preInclude=EventOverlayJobTransforms/UseOracle.py$jive"
+ input="inputBSFile=%IN"; input2="--inputType RAW";
+elif [ $intype == "mcrdo" ]; then
+ #this is for running on overlay RDO's with MC conditions
+ inds="user.haastyle.overlaygrid.J4.*.193211.test20_EXT2/"
+ outds="user.haastyle.overlaygrid.J4.193211.test20.mcreco.test1"
+ datamc="conditionsTag=$mcco2 preInclude=EventOverlayJobTransforms/UseOracle.py,EventOverlayJobTransforms/custom_MC.py$jive"
+ input="inputRDOFile=%IN";
+elif [ $intype == "overlayrdo" ]; then
+ #this is for running on overlay RDO's with data conditions (actual overlay)
+ inds="user.haastyle.overlaygrid.J4.*.2011.stream7.test32_EXT2/"
+ outds="user.haastyle.overlaygrid.J4.2011.stream7.test32.reco.test1"
+ datamc="conditionsTag=$dco preInclude=EventOverlayJobTransforms/UseOracle.py,EventOverlayJobTransforms/custom.py,EventOverlayJobTransforms/recotrfpre.py$jive postInclude=EventOverlayJobTransforms/Rt_override.py,EventOverlayJobTransforms/muAlign_reco.py"
+ input="inputRDOFile=%IN";
+fi
+
+condition="geometryVersion=$geo $datamc $trig"
+
+#don't ask
+rm -rf Event/EventOverlay/EventOverlayJobTransforms/test/
+if [ -e LVL1config_SingleBeam_v1_7-bit_trigger_types_20080905.xml ]; then echo "Have LVL1config_SingleBeam_v1_7-bit_trigger_types_20080905.xml"; else get_files LVL1config_SingleBeam_v1_7-bit_trigger_types_20080905.xml ; fi
+ln -fs Event/EventOverlay/EventOverlayJobTransforms/scripts/atlas_error_ignore.db
+
+#lib="--libDS user.haastyle.0105143637.423039.lib._001537"
+
+echo "pathena --trf \"Reco_trf.py $input $condition outputESDFile=%OUT.ESD.pool.root outputCBNTFile=%OUT.CBNT.root $test \" --inDS $inds --outDS $outds --individualOutDS --nFilesPerJob 1 $test2 $jive2 --extFile LVL1config_SingleBeam_v1_7-bit_trigger_types_20080905.xml,atlas_error_ignore.db --memory=3000 $lib $input2 "
+pathena --trf "Reco_trf.py $input $condition outputESDFile=%OUT.ESD.pool.root outputCBNTFile=%OUT.CBNT.root $test " --inDS $inds --outDS $outds --individualOutDS --tmpDir /scratch/ahaas/ --nFilesPerJob 1 $test2 $jive2 --extFile LVL1config_SingleBeam_v1_7-bit_trigger_types_20080905.xml,atlas_error_ignore.db --memory=3000 $lib $input2 
+#--memory=10000 --cmtConfig x86_64-slc5-gcc43-opt 
+#--site ANALY_SLAC
+#--excludedSite ANALY_SLAC
+#--noSubmit
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/reco_grid_HI.sh b/Event/EventOverlay/EventOverlayJobTransforms/scripts/reco_grid_HI.sh
new file mode 100755
index 0000000000000000000000000000000000000000..01d11ce4f4ee04d9d34503a821c4922cf2c6e8d3
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/reco_grid_HI.sh
@@ -0,0 +1,128 @@
+#!/bin/bash
+
+#uncomment this to run over just one file and do a few events, as a test run
+#test="maxEvents=3"; test2="--nFiles 2 --nSkipFiles 0";
+
+#uncomment this to make JiveXML files
+#jive=",EventOverlayJobTransforms/dojive.py"; jive2="--extOutFile Jive*.xml";
+
+#Library to use. Comment out to build new one
+#lib="--libDS "
+
+#conditions
+#https://twiki.cern.ch/twiki/bin/viewauth/Atlas/CoolProdTags
+dco="COMCOND-BLKPST-005-08"
+#dco="COMCOND-BLKPST-002-00"
+#mcco1="OFLCOND-SIM-BS7T-02"
+#mcco2="OFLCOND-DR-BS7T-ANom-11"
+mcco1="OFLCOND-SDR-BS7T-05-25-HI"
+mcco2="OFLCOND-SDR-BS7T-05-25-HI"
+#geo="ATLAS-GEO-16-00-00"
+geo="ATLAS-GEO-18-01-00"
+
+trig="triggerConfig=MCRECO:MC_HI_v1_no_prescale"
+#trig="triggerConfig=NONE"
+
+#Loop over Jx samples
+for i in 4; do echo "Loop value for Jx is $i"
+
+#Data types to run
+for inputchoice in MC MCdigi Data; do 
+#MC MCdigi Data RAW ESDData
+
+#inputstring1="group.dataprep.overlaygrid.J${i}"
+inputstring1="group.dataprep.overlaygrid.PythiaPhotonJet_Unbinned35"
+#inputstring1="group.dataprep.overlaygrid.PythiaZeeJet_0Ptcut"
+#inputstring1="group.dataprep.overlaygrid.Pythia_directJpsie3e3"
+
+#inputstring2="193825_.test34"
+inputstring2="193825_.geo18_MC12.test2"
+
+outputstring="test3"
+
+echo "inputchoice is ${inputchoice}"
+if [ "$inputchoice" == "MC" ]; then
+ #this is for running on overlay RDO's with MC conditions
+ inds="${inputstring1}.*.${inputstring2}_EXT2/"
+ outds="${inputstring1}.${inputstring2}.mcreco.${outputstring}/"
+ datamc="conditionsTag=$mcco2 preInclude=EventOverlayJobTransforms/UseOracle.py,EventOverlayJobTransforms/custom_MC.py,RecJobTransforms/HeavyIonDataRecConfig.py$jive "
+ input="inputRDOFile=%IN"
+ outputs="outputESDFile=%OUT.ESD.pool.root outputTAGFile=%OUT.TAG.root outputNTUP_HIFile=%OUT.NTUP_HI.root"
+ inputf="inputRDOFile=/u/at/ahaas/nfs3/user.haastyle.overlaygrid.J4.0.193211.test16.111123071929_EXT2/user.haastyle.000291.EXT2._00004.MCcond.RDO.pool.root"
+elif [ "$inputchoice" == "Data" ]; then
+ #this is for running on overlay RDO's with data conditions (actual overlay)
+ inds="${inputstring1}.*.${inputstring2}_EXT4/"
+ outds="${inputstring1}.${inputstring2}.reco.${outputstring}/"
+ datamc="conditionsTag=$dco preInclude=EventOverlayJobTransforms/UseOracle.py,EventOverlayJobTransforms/custom.py,EventOverlayJobTransforms/recotrfpre.py,RecJobTransforms/HeavyIonDataRecConfig.py$jive postInclude=EventOverlayJobTransforms/Rt_override.py,EventOverlayJobTransforms/muAlign_reco.py"
+ input="inputRDOFile=%IN"
+ outputs="outputESDFile=%OUT.ESD.pool.root outputTAGFile=%OUT.TAG.root outputNTUP_HIFile=%OUT.NTUP_HI.root"
+ inputf="inputRDOFile=XXX"
+elif [ "$inputchoice" == "MCdigi" ]; then
+ #this is for running on overlay RDO's made with MC conditions from data HITS
+ inds="${inputstring1}.*.${inputstring2}_EXT5/"
+ outds="${inputstring1}.${inputstring2}.mcdigireco.${outputstring}/"
+ datamc="conditionsTag=$mcco2 preInclude=EventOverlayJobTransforms/UseOracle.py,EventOverlayJobTransforms/custom_MC.py,RecJobTransforms/HeavyIonDataRecConfig.py$jive "
+ input="inputRDOFile=%IN"
+ outputs="outputESDFile=%OUT.ESD.pool.root outputTAGFile=%OUT.TAG.root outputNTUP_HIFile=%OUT.NTUP_HI.root"
+ inputf="inputRDOFile=XXX"
+elif [ "$inputchoice" == "ESDData" ]; then
+ #this is for re-running TAG and NTUP_HI on overlay ESD's with data conditions (actual overlay)
+ inds="group.dataprep.overlaygrid.Pythia_directJpsie3e3.193825.test34.reco.test1_EXT0/"
+ outds="group.dataprep.overlaygrid.Pythia_directJpsie3e3.193825.test34.recoESD.${outputstring}/"
+ datamc="conditionsTag=$dco preInclude=EventOverlayJobTransforms/UseOracle.py,EventOverlayJobTransforms/custom.py,EventOverlayJobTransforms/recotrfpre.py,RecJobTransforms/HeavyIonDataRecConfig.py$jive postInclude=EventOverlayJobTransforms/Rt_override.py,EventOverlayJobTransforms/muAlign_reco.py"
+ input="inputESDFile=%IN"
+ outputs="outputTAGFile=%OUT.TAG.root outputNTUP_HIFile=%OUT.NTUP_HI.root"
+ inputf="inputESDFile=/u/at/ahaas/nfs3/group.dataprep.overlaygrid.Pythia_directJpsie3e3.193825.test34.reco.test1.120120122022_EXT0/group.dataprep.57068_002072.EXT0._00118.ESD.pool.root"
+elif [ "$inputchoice" == "RAW" ]; then
+ #this is for running on RAW data
+ inds="user.haastyle.HImerge.193825p.test12/"
+ outds="user.haastyle.HImerge.193825p.test12.reco.${outputstring}"
+ #inds="data11_hi.00193211.physics_MinBiasOverlay.merge.RAW"
+ #outds="user.haastyle.193211.MinBiasOverlay.reco.test29"
+ datamc="conditionsTag=$dco preInclude=RecJobTransforms/HeavyIonDataRecConfig.py$jive"
+ input="inputBSFile=%IN"; input2="--inputType RAW";
+ outputs="outputESDFile=%OUT.ESD.pool.root outputTAGFile=%OUT.TAG.root outputNTUP_HIFile=%OUT.NTUP_HI.root"
+ inputf="inputBSFile=/u/at/ahaas/nfs3/data11_hi.00193211.physics_MinBiasOverlay.merge.RAW/data11_hi.00193211.physics_MinBiasOverlay.merge.RAW._lb0820._SFO-ALL._0001.1"
+else echo "dont know inputchoice $inputchoice"; exit
+fi
+
+beamconstraint="InDetFlags.useBeamConstraint.set_Value_and_Lock(True);"
+dozdc="jobproperties.HeavyIonD3PDMakerFlags.DoZDC.set_Value_and_Lock(False);"
+condition="geometryVersion=$geo $datamc preExec='from HIJetRec.HIJetRecFlags import jobproperties; jobproperties.HIJetRecFlags.doHIJetMergeEvent=True; $beamconstraint InDetFlags.cutLevel=2; from HeavyIonD3PDMaker.HeavyIonD3PDMakerFlags import jobproperties; jobproperties.HeavyIonD3PDMakerFlags.DoHijingPars.set_Value_and_Lock(False); $dozdc jobproperties.HeavyIonD3PDMakerFlags.DoTruth.set_Value_and_Lock(True); ' $trig preInclude_e2d=HeavyIonD3PDMaker/HeavyIonD3PDMaker_PbPb2011_conf.py "
+
+local="false"
+if [ "$local" == "true" ]; then
+
+ tempoodir="/scratch/ahaas/run21/"
+ mkdir -p $tempoodir
+ cp atlas_error_ignore.db $tempoodir
+ cd $tempoodir ; rm *.root ; rm JiveXML* ; pwd ; 
+ command="Reco_trf.py $inputf $condition outputESDFile=OUT.ESD.pool.root outputTAGFile=OUT.TAG.root outputNTUP_HIFile=OUT.NTUP_HI.root $test >& log.txt &" ;
+ echo $command ; eval $command ; 
+
+else
+
+ rm -rf Event/EventOverlay/EventOverlayJobTransforms/test/
+ cp Event/EventOverlay/EventOverlayJobTransforms/cmt/grid.requirements Event/EventOverlay/EventOverlayJobTransforms/cmt/requirements
+
+ tmp6dir="dummy"
+ if [ -e /scratch ]; then mkdir /scratch/ahaas; tmp6dir="/scratch/ahaas/"; fi
+ if [ -e /tmp ]; then mkdir /tmp/ahaas; tmp6dir="/tmp/ahaas/"; fi
+
+ ddo="--dbRelease ddo.000001.Atlas.Ideal.DBRelease.v170601:DBRelease-17.6.1.tar.gz"
+
+ echo "pathena --trf \"Reco_trf.py $input $condition $outputs $test \" --inDS $inds --outDS $outds --individualOutDS --tmpDir $tmp6dir --nFilesPerJob 1 $test2 $jive2 --extFile atlas_error_ignore.db $lib $input2 $ddo --official "--voms=atlas:/atlas/dataprep/Role=production" "
+
+ pathena --trf "Reco_trf.py $input $condition $outputs $test " --inDS $inds --outDS $outds --individualOutDS --tmpDir $tmp6dir --nFilesPerJob 1 $test2 $jive2 --extFile atlas_error_ignore.db $lib $input2 $ddo --official "--voms=atlas:/atlas/dataprep/Role=production" 
+ #--memory=10000 --cmtConfig x86_64-slc5-gcc43-opt
+ #--site ANALY_SLAC
+ #--excludedSite ANALY_SLAC
+ #--noSubmit
+ echo
+ echo "--------------------------------------------------------------"
+
+fi
+
+done
+done; #loop over "i"
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/test_geom_new.py b/Event/EventOverlay/EventOverlayJobTransforms/scripts/test_geom_new.py
new file mode 100644
index 0000000000000000000000000000000000000000..96671d541754fefa3b0fe3b7fa411482052ca198
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/test_geom_new.py
@@ -0,0 +1,86 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+## Job options file for Geant4 ATLAS detector simulations
+
+## Algorithm sequence
+from AthenaCommon.AlgSequence import AlgSequence
+topSeq = AlgSequence()
+
+## Output threshold (DEBUG, INFO, WARNING, ERROR, FATAL)
+ServiceMgr.MessageSvc.OutputLevel = INFO
+
+## Detector flags
+from AthenaCommon.DetFlags import DetFlags
+DetFlags.ID_setOn()
+DetFlags.Calo_setOn()
+DetFlags.Muon_setOn()
+#DetFlags.Lucid_setOn()
+DetFlags.Truth_setOn()
+
+## Global conditions tag
+from AthenaCommon.GlobalFlags import globalflags
+
+## Simulation flags
+from G4AtlasApps.SimFlags import SimFlags
+SimFlags.load_atlas_flags()
+
+## Layout tags: see SimFlags.SimLayout for allowed values
+## Use the default layout:
+SimFlags.SimLayout.set_On()
+## Set a specific layout tag:
+SimFlags.SimLayout = 'ATLAS-GEO-10-00-00'
+#SimFlags.SimLayout = 'ATLAS-GEO-16-00-00'
+## Set a specific non-officially-supported layout tag using the _VALIDATION suffix:
+#SimFlags.SimLayout = 'ATLAS-GEO-16-00-00_VALIDATION'
+
+#MC conditions / alignments
+globalflags.ConditionsTag='OFLCOND-SIM-BS900-00'
+
+#Data conditions / alignments
+#globalflags.DataSource.set_Value_and_Lock('data') #force to use the data database
+#globalflags.ConditionsTag='COMCOND-ES1PS-001-00'
+#SimFlags.RunNumber.set_Value_and_Lock(152845)
+#SimFlags.RunNumber.set_Value_and_Lock(142383)
+
+#Custom alignments
+#from IOVDbSvc.CondDB import conddb
+#conddb.addOverride('/Indet/Align','InDetAlign_CSC_00')
+#conddb.addOverride('/Indet/Align','InDetAlign_Collision_2009_08')
+#conddb.addOverride('/Indet/Align','InDetAlign_Collision_7T_2010_07')
+#conddb.addOverride('/TRT/Align','TRTAlign_Collision_2009_04')
+
+## Set the EtaPhi, VertexSpread and VertexRange checks on
+SimFlags.EventFilter.set_On()
+
+## Set the LAr parameterization
+#SimFlags.LArParameterization = 2
+
+## No magnetic field
+#SimFlags.MagneticField.set_Off()
+
+## Register callback functions at various init levels
+def test_preInit():
+    print "CALLBACK AT PREINIT"
+def test_postInit():
+    print "CALLBACK AT POSTINIT"
+def use_geometry_check():
+    print "CALLBACK use_geometry_check"
+    from G4AtlasApps import AtlasG4Eng
+    AtlasG4Eng.G4Eng._ctrl.geometryMenu.SetGeometryCheck(10)
+SimFlags.InitFunctions.add_function("preInitG4", use_geometry_check)
+SimFlags.InitFunctions.add_function("preInit", test_preInit)
+SimFlags.InitFunctions.add_function("postInit", test_postInit)
+## Change the field stepper and use verbose G4 tracking
+#from G4AtlasApps import callbacks
+#SimFlags.InitFunctions.add_function("postInit", callbacks.use_nystromrk4_stepper)
+#SimFlags.InitFunctions.add_function("postInit", callbacks.use_verbose_tracking)
+
+## AthenaCommon flags
+from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
+athenaCommonFlags.PoolEvgenInput = ['/afs/cern.ch/atlas/offline/ProdData/15.6.11.3/mu_E200_eta0-60-10000.evgen.pool.root']
+athenaCommonFlags.PoolHitsOutput = "atlasG4.hits.pool.root"
+athenaCommonFlags.EvtMax = 1
+
+## Add G4 sim framework alg sequence
+from G4AtlasApps.PyG4Atlas import PyG4AtlasAlg
+topSeq += PyG4AtlasAlg()
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/zerobias_grid.sh b/Event/EventOverlay/EventOverlayJobTransforms/scripts/zerobias_grid.sh
new file mode 100755
index 0000000000000000000000000000000000000000..de65abc70e1a04d63703b6ee40f7a35cbd005af0
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/zerobias_grid.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+for (( c=0; c<50; c+=1 )); do command="$command outputbsfilterfile${c}=%OUT.Zerobias.bit240.stream${c}.RAW" ; done
+
+dest="--destSE CERN-PROD_SCRATCHDISK"
+#dest="--destSE BNL-OSG2_SCRATCHDISK"
+
+#grl="data12_8TeV.periodAllYear_DetStatus-v49-pro13-03_CoolRunQuery-00-04-08_Atlas_Ready.xml"
+grl="data12_8TeV.periodAllYear_DetStatus-v61-pro14-02_DQDefects-00-01-00_PHYS_StandardGRL_All_Good.xml"
+#lib="--libDS user.haastyle.1110025940.3439.lib._000159"
+
+test="test12a1"
+#eout="--extOutFile=BS_multipleSelector.log"; #for test12p5
+
+data="data12_8TeV.*.physics_ZeroBias.*.RAW"
+#data="data12_8TeV.*.physics_ZeroBiasOverlay.*.RAW"
+split="--nGBPerJob=10"
+
+#data="data12_8TeV.00202660.physics_ZeroBiasOverlay.merge.RAW,data12_8TeV.00203456.physics_ZeroBiasOverlay.merge.RAW"
+#split="--nFilesPerJob=5"
+
+#make sure we have the right map file, and back it up
+mv -i -v lbn_anal_map.txt lbn_anal_map_${test}.txt ; #call the new one by the test name
+#mv -v lbn_anal_map_*.txt ~/nfs3/lbn_anal_map/ ; #move them all to backup
+#cp -u -v ~/nfs3/lbn_anal_map/lbn_anal_map_${test}.txt . ; #bring back the one we want
+if [ -e lbn_anal_map_${test}.txt ]; then echo "Using lbn_anal_map_${test}.txt";
+else echo "No lbn_anal_map_${test}.txt found!"; exit; fi; ls -l lbn_anal_map* ;
+
+pathena --trf "BS_multipleSelector_trf.py inputbsfile=%IN $command lbnmapfile=lbn_anal_map_${test}.txt TriggerBit=240 " --extFile=lbn_anal_map_${test}.txt $eout --outDS group.dataprep.BSmultipleselector.GRL.${test} --official "--voms=atlas:/atlas/dataprep/Role=production" --individualOutDS $split --memory=3000 --goodRunListXML $grl --goodRunListDataType RAW --goodRunListDS $data $lib $dest 
+#--nFiles 50 --express
+#--site ANALY_FZK
+#--excludedSite ANALY_IN2P3-CC,ANALY_BNL_ATLAS
+#--v
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/zerobias_merge.sh b/Event/EventOverlay/EventOverlayJobTransforms/scripts/zerobias_merge.sh
new file mode 100755
index 0000000000000000000000000000000000000000..4218a45b1a6e5bd9af512920ee0a630cd82a84da
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/zerobias_merge.sh
@@ -0,0 +1,276 @@
+#!/bin/bash
+
+#defaults
+test="test12p3"; #the dataset made by "zerobias_grid.sh"
+outset=3; #for the "testX" on the end of the merged dataset name
+startstream=0
+nstream=50
+makeinfo="true"; untar="true";
+checkevents="false";
+makestreams="true";
+domerging="true";
+doregister="false"; dorealregistering="true";
+
+function usage
+{
+    echo "usage: zerobias_merge.sh [ --test test --outset outset -s startstream -n nstream --makeinfo true|false --untar true|false --checkevents true|false --makestreams true|false --domerging true|false --doregister true|false dorealregistering true|false --help]"
+}
+
+while [ "$1" != "" ]; do
+    case $1 in
+        -t | --test )           shift; test=$1
+                                ;;
+        -o | --outset )    shift; outset=$1
+                                ;;
+        -s | --startstream )  shift; startstream=$1
+                                ;;
+        -n | --nstream )    shift; nstream=$1
+                                ;;
+        --makeinfo )    shift; makeinfo=$1
+                                ;;
+        --untar )    shift; untar=$1
+                                ;;
+        --checkevents )    shift; checkevents=$1
+                                ;;
+        --makestreams )    shift; makestreams=$1
+                                ;;
+        --domerging )    shift; domerging=$1
+                                ;;
+        --doregister )    shift; doregister=$1
+                                ;;
+        --dorealregistering )     shift; dorealregistering=$1
+                                           ;;
+        -h | --help )           usage
+                                exit
+                                ;;
+        * )                     usage; exit 69
+    esac
+    shift
+done
+
+#do things in the temp directory
+oldd=`pwd`
+tmp="$HOME/nfs3/zerobias_skim_$test"
+mkdir -p $tmp
+cd $tmp ; pwd
+
+zerods="group.dataprep.BSmultipleselector.GRL.$test"
+let maxstream=$startstream+$nstream
+echo "Doing $nstream streams, starting with stream $startstream ( maxstream is $maxstream )"
+echo "makeinfo=$makeinfo untar=$untar checkevents=$checkevents makestreams=$makestreams domerging=$domerging doregister=$doregister zerods=$zerods outset=$outset "
+
+if [ $makeinfo == "true" ]; then
+if [ $untar == "true" ]; then 
+ voms-proxy-info -exists
+ if [ $? -gt 0 ]; then echo "Do voms.sh"; exit; fi
+ dq2-get -T 5,5 --ignore ${zerods}/
+ #rm run_* ; rm warn_* ;
+fi
+for d in `ls -d ${zerods}* `; do 
+ #if [ $d == "user.haastyle.BSmultipleselector.GRL.test9.111215101153" ]; then echo "$d had bad transfer, being skipped"; continue; fi
+ cd $d ; if [ $? -ne 0 ]; then pwd; echo "Directory $d did not exist"; continue; else pwd; fi
+ if [ $untar == "true" ]; then for f in `ls group.dataprep.*.tgz*`; do echo $f; tar xzf $f; done; fi
+ for j in `ls -d tarball_PandaJob_*`; do pwd; echo $j; 
+  cd $j ; if [ $? -ne 0 ]; then pwd; echo "Directory $j did not exist"; continue; else pwd; fi
+  grep "Job successfully completed" pilotlog.txt 
+  if [ $? -eq 0 ]; then echo "Good job"; 
+   run=`grep "start of run" athena_stdout.txt |head -1|cut -d ' ' -f 12`
+   id=`ls jobState-*|cut -d '-' -f 2`
+   echo "first run for job $id is $run"
+   fil="../../run_${run}_${id}_${j}.txt"
+   if [ -e "$fil" ]; then echo "$fil already exists"
+   else
+    grep "number of events written" athena_stdout.txt > tempev${1}.txt
+    if [ `grep -c "number of events written" tempev${1}.txt ` -eq 50 ]; then
+     for jf in `grep RAW OutPutFileCatalog.xml |cut -d \" -f 4`; do #echo "JF is $jf "
+      stream=`echo $jf | cut -d '.' -f 8 | sed s%stream%% `; #echo "Stream is $stream  "
+      nev=0 ; nev=`grep "BSESOutputSvc${stream}\ " tempev${1}.txt | cut -d ':' -f 2 `; #echo "NEv is $nev "
+      echo "${jf}:${nev}" >> $fil
+     done
+     echo "$fil updated"
+    else echo "Cant find number of events written"
+    fi; rm tempev${1}.txt
+   fi
+
+   filw="../../warn_${run}_${id}_${j}.txt"
+   if [ -e $filw ]; then echo "$filw already exists"
+   else
+    grep "OutputStreamBS" athena_stdout.txt | grep "expected" > $filw
+   fi
+
+  else echo "Failed job"
+  fi
+  cd .. ; pwd; echo "-----"
+ done #loop over tarball_PandaJob* directories
+ #rm -rf tarball_PandaJob_*
+ cd .. ; pwd; echo; echo "=========="; echo
+done #loop over group.* directories
+fi #makeinfo
+
+#check for errors with:
+#cat log.txt | grep -v "done: 1 time"|grep -v skipping|grep -v GOOD
+#and then full logfile with:
+#less group.dataprep.BSmultipleselector.GRL.test*/tarball_PandaJob_1590664535_ANALY_LONG_BNL_ATLAS/athena_stdout.txt
+if [ $checkevents == "true" ]; then
+ if [ -e lbn_anal_map_temp.txt ]; then echo "lbn_anal_map_temp.txt exists"; 
+ else cat lbn_anal_map.txt | grep "stream 0,"  > lbn_anal_map_temp.txt
+ fi
+while read myLine ; do echo -n $myLine ; 
+run=`echo $myLine | cut -d " " -f 4 | sed s%,%% `
+lbn=`echo $myLine | cut -d " " -f 6 | sed s%,%% `
+nev=`echo $myLine | cut -d " " -f 8 `
+echo -n " - Run LBN nev : $run $lbn $nev "
+if [ $nev -eq 0 ]; then echo " - skipping "; continue; fi
+
+grep " $run $lbn " warn_${run}* > tt.txt
+if [ $? -ne 0 ]; then echo -n " - searching all"; grep " $run $lbn " warn_* > tt.txt ; 
+ if [ $? -ne 0 ]; then echo " - not found";  continue; fi; 
+fi
+
+n=`grep -c INFO tt.txt `
+echo -n " - done: $n time(s)"
+if [ $n -gt 0 ]; then echo; continue; fi
+
+n=`grep -c WARNING tt.txt `
+if [ $n -gt 0 ]; then
+ thetotal=`cat tt.txt | head -1 | cut -d " " -f 9 `
+ foundtotal=`cat tt.txt | cut -d " " -f 5 | awk '{total = total + $1}END{print total}' `
+ echo -n " : found $foundtotal out of $thetotal events"
+ if [ $foundtotal -eq $thetotal ]; then echo " -- GOOD"; else echo " -- BAD"; fi
+else 
+ echo " : no warning "
+fi
+
+rm tt.txt
+done < lbn_anal_map_temp.txt
+fi #checkevents
+
+if [ $makestreams == "true" ]; then
+for (( targetstream=$startstream; targetstream<$maxstream; targetstream+=1 )); do 
+rm stream-$targetstream-*filelist.txt
+maxneperjob=2000; neperjob=$maxneperjob; tottot=0; tot=0; nfiles=0; skip=0; filelist="";
+runtext=""; greptext="stream${targetstream}\.";
+for r in run${runtext}_*.txt ; do echo $r
+f=`grep $greptext $r | cut -d ':' -f 1 `
+ne=`grep $greptext $r | cut -d ':' -f 2 |sed "s%\ %%g"`
+echo "file $f has $ne events"; 
+if [ $ne -eq 0 ]; then continue; fi
+let tot+=$ne
+let tottot+=$ne
+echo "Total events is $tot and totaltotal is $tottot and neperjob is set to $neperjob and nfiles is $nfiles "
+echo
+if [ $tot -ge $neperjob ]; then 
+ if [ "$filelist" != "" ]; then filelist="${filelist},"; fi
+ filelist="${filelist}${f}"
+ if [ $tottot -le 51000 ]; then 
+  echo $filelist > stream-${targetstream}-tot-${tottot}-skip-${skip}-ne-${neperjob}-filelist.txt ; 
+  echo "Wrote stream-${targetstream}-tot-${tottot}-skip-${skip}-ne-${neperjob}-filelist.txt with $nfiles files"
+ fi
+ let skip=$ne-$tot+$neperjob
+ if [ $skip -eq $ne ]; then filelist=""; skip=0; #no reason to add the whole file and skip through the whole thing
+ else filelist="${f}"
+ fi
+ let tot=$tot-$neperjob
+ let nfiles=0
+ let neperjob=$maxneperjob
+ echo "Skip set to $skip and filelist is now $filelist and tot is $tot and tottot is $tottot "
+ echo
+else
+ if [ "$filelist" != "" ]; then filelist="${filelist},"; fi
+ filelist="${filelist}${f}"
+ let nfiles=$nfiles+1
+ if [ $nfiles -gt 100 ]; then  #check if we need to cut it off short to stay below 200 file limit
+  oldneperjob=$neperjob ; let neperjob=$tot+100 ; let neperjob=$neperjob/100*100 ;
+  if [ $oldneperjob -ne $neperjob ]; then echo "neperjob now set to $neperjob"; fi
+ fi
+fi #tot -ge neperjob
+done #for each run_*.txt file 
+if [ "$filelist" != "" ]; then 
+ #echo "WARNING, filelist not empty at the end: "; echo $filelist ; 
+ oldneperjob=$neperjob ; let neperjob=$tot+100 ; let neperjob=$neperjob/100*100 ;
+ if [ $oldneperjob -ne $neperjob ]; then echo "neperjob now set to $neperjob"; fi
+ if [ $tottot -le 51000 ]; then 
+  echo $filelist > stream-${targetstream}-tot-${tottot}-skip-${skip}-ne-${neperjob}-filelist.txt ; 
+  echo "Wrote stream-${targetstream}-tot-${tottot}-skip-${skip}-ne-${neperjob}-filelist.txt with $nfiles files"
+ fi
+fi
+echo "Done with stream $targetstream and TotalTotal is $tottot events "
+echo
+done #for each targetstream
+fi
+
+#go back to our original directory
+cd $oldd ; pwd
+
+tmp6dir="dummy"
+if [ -e /scratch/ahaas ]; then tmp6dir="/scratch/ahaas"; fi
+if [ -e /tmp/ahaas ]; then tmp6dir="/tmp/ahaas"; fi
+mkdir -p $tmp6dir
+
+if [ $domerging == "true" ]; then echo
+justonejob="false"; #"true" to do just one job, for a build job, or "skipfirst" to skip the first job, after a build job
+tarball="--outTarBall ${tmp6dir}/tarball${startstream}.tgz  --outRunConfig ${tmp6dir}/runconf${startstream}.txt"
+for (( targetstream=$startstream; targetstream<$maxstream; targetstream+=1 )); do echo 
+ for c in `ls $tmp/stream-${targetstream}-*-filelist.txt` ; do echo $c
+  if [ $justonejob == "skipfirst" ]; then echo "Skipping first file "; justonejob="false"; continue; fi
+  jstream=`echo $c | cut -d '-' -f 2 `
+  jtot=`echo $c | cut -d '-' -f 4 `
+  jskip=`echo $c | cut -d '-' -f 6 `
+  jne=`echo $c | cut -d '-' -f 8 `
+  let maxe=${jne}+${jskip}
+  let nout=${jne}/100
+  uniq=`date +%s `; uniq="${uniq}_${test}_dataset${targetstream}"
+  echo "noutputs is $nout since jne is $jne "
+  inoutds="--inDS ${zerods}_EXT${jstream}/ --outDS `echo ${zerods}|sed s%user.haastyle.%group.dataprep.%g`_EXT${jstream}.merge.${jtot}.test${outset}"
+  echo "inoutds is $inoutds"
+  command=""; for (( ci=0; ci<${nout}; ci+=1 )); do command="$command outputbsfilterfile${ci}=%OUT.Zerobias${uniq}.bit-1.stream${ci}.RAW"; done
+  echo "Doing jstream $jstream and jtot $jtot and jskip $jskip and maxe $maxe and uniq is $uniq "
+  catc=`cat $c`
+  echo "Filelist is $catc "
+  #if [ $justonejob != "true" ]; then lib="--libDS user.haastyle.0328175735.886361.lib._005840"; fi; #17.0.6.5
+  lib="--noBuild";
+  #site="--site ANALY_CERN_XROOTD"
+  site="--site ANALY_BNL_ATLAS_1"
+  let jobsetid=$outset*100+100000+$targetstream ; jobset="--panda_jobsetID $jobsetid " ; echo "jobset is $jobset "
+  ~/bin/pathena --trf "BS_multipleSelector_trf.py inputbsfile=%IN $command lbnmapfile=serial TriggerBit=-1 Noutputs=$nout maxevents=$maxe skipevents=$jskip uniq=$uniq " --tmpDir ${tmp6dir}/ $inoutds --fileList $catc --memory=1800 --split=1 --dbRelease ddo.000001.Atlas.Ideal.DBRelease.v170601:DBRelease-17.6.1.tar.gz --skipScan $tarball $lib $jobset --official "--voms=atlas:/atlas/dataprep/Role=production" ${site} 
+  #--noSubmit -v 
+  rt=$? ; if [ $rt -ne 0 ]; then echo "return value $rt from pathena for $c" > fail_$c ; fi
+  tarball="--inTarBall ${tmp6dir}/tarball${startstream}.tgz --inRunConfig ${tmp6dir}/runconf${startstream}.txt"
+  echo
+  if [ $justonejob == "true" ]; then echo "just doing one job"; break; fi
+ done
+done
+fi #domerging
+
+#../zerobias_merge.sh -s 1 -n 4 -t test12p3 -o 3 --makeinfo false --untar false --checkevents false --makestreams false --domerging false --doregister true
+if [ "$doregister" == "true" ]; then echo
+for (( targetstream=$startstream; targetstream<$maxstream; targetstream+=1 )); do echo 
+
+    #dq2-delete-datasets group.dataprep.BSmultipleselector.GRL.test9_EXT43.merge.test14/ `dq2-list-datasets-container group.dataprep.BSmultipleselector.GRL.test9_EXT43.merge.test14/`
+
+    dname="`echo ${zerods}|sed s%user.haastyle.%group.dataprep.%g`_EXT${targetstream}.merge"
+    outdname="${dname}.test${outset}/"
+    if [ $dorealregistering == "true" ]; then dq2-register-container "${outdname}"; fi
+ 
+    outsetin="${dname}.*.test${outset}/"
+    #outsetin="user.haastyle.BSmultipleselector.GRL.test9_EXT0.merge.*.test12/"
+    echo "Taking datasets from $outsetin"; echo
+
+    if [ $dorealregistering == "true" ]; then
+    for dc in `dq2-ls "${outsetin}"`; do echo "dataset container is ${dc}";
+	for dd in `dq2-list-datasets-container ${dc}`; do dq2-register-datasets-container $outdname "${dd}"; echo $dd; done
+	#dq2-register-datasets-container $outdname `dq2-list-datasets-container ${dc}`
+    done; fi
+
+    echo "${outdname} now has `dq2-list-datasets-container ${outdname} |grep -c merge` datasets and `dq2-ls -f -H ${outdname} |grep -c RAW` RAW files and `dq2-ls -f -H ${outdname} |grep \"total size:\"`"
+
+    #check for empty files
+    dq2-ls -f -H $outdname |grep Zerobias > temp92.txt
+    grep "bytes" temp92.txt > temp93.txt
+    if [ $? -eq 0 ]; then echo "Dataset $outdname has empty files: "; cat temp93.txt; 
+    else echo "Dataset $outdname has no empty files"; fi
+    rm temp92.txt temp93.txt
+
+done
+fi
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/zerobias_merge_HI.sh b/Event/EventOverlay/EventOverlayJobTransforms/scripts/zerobias_merge_HI.sh
new file mode 100644
index 0000000000000000000000000000000000000000..ae5f907e0dae07b908ec2dbff3037d0c161b450e
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/zerobias_merge_HI.sh
@@ -0,0 +1,178 @@
+#!/bin/bash
+
+outset=2; #for the "testX" on the end of the merged dataset name - and also modifies the jobset id
+
+startstream=99
+maxstream=100
+echo "Doing 1 stream, starting with stream $startstream ( maxstream is $maxstream )"
+
+#do things in the temp directory
+oldd=`pwd`
+tmp="$HOME/nfs3/HI_minbias_skim"
+mkdir -p $tmp
+cd $tmp ; pwd
+
+makeinfo="f"
+makestreams="f"
+domerging="f"
+justonejob="f"; #"t" to do just one job, for a build job, or "skipfirst" to skip the first job, after a build job
+doregister="t"
+
+#prefix="data11_hi"
+prefix="data12_hip"
+
+if [ $makeinfo == "t" ]; then
+rm runHI_*
+
+#for r in 194374 194370 194193 194192 194179 194163 194160 194121 194061 194060 194017 193890 193834 193826 193825 193823 193795 193718 193687 193679 193662 193655 193641 193604 193599 193558 193546 193494 193493 193492 193491 193481 193463 193447 193412 193403 193321 193295 193291 193270 193211 194382 ; #2011 Pb-Pb
+for r in 210184 ; #2011 p-Pb
+
+ do echo "Run is $r";
+ if [ -e dq2_${r}.txt ]; then echo "dq2_${r}.txt already exists"; else dq2-ls -f ${prefix}.00${r}.physics_MinBiasOverlay.merge.RAW >& dq2_${r}.txt; fi
+ if [ -e nevent_${r}.txt ]; then echo "nevent_${r}.txt already exists"; else python $oldd/GetNEventsLB.py --run $r --stream physics_MinBiasOverlay | sed "s%\ %%g" >& nevent_${r}.txt; fi
+ for f in `grep SFO dq2_${r}.txt |cut -f 2`; do echo $f
+  lb=`echo $f | cut -d . -f 6 | sed 's%_lb%%' | sed 's/^[0]*//' `;
+  sfo=`echo $f | cut -d . -f 7| sed 's%_SFO-%%'`; echo "lbn is $lb and sfo $sfo "
+  ne=`grep "LB${lb}has" nevent_${r}.txt | cut -d e -f 1 |cut -d s -f 2`; 
+  if [ $sfo != "ALL" ]; then 
+   let ne=ne/10; #since there's 10 SFO's per lbn, at least for 2012 data...
+  fi
+  echo "ne is $ne"
+  echo "${f}:${ne}">runHI_${r}_${lb}_${sfo}_${ne}.txt
+ done
+done
+fi #makeinfo
+
+if [ $makestreams == "t" ]; then
+for (( targetstream=$startstream; targetstream<$maxstream; targetstream+=1 )); do 
+#if [ `ls stream-$targetstream-*|grep -c stream ` -eq 50 ]; then continue; fi
+rm stream-$targetstream-*filelist.txt
+maxneperjob=2000
+neperjob=$maxneperjob
+tottot=0
+tot=0
+nfiles=0
+skip=0
+filelist=""
+runtext="HI"; greptext="MinBiasOverlay";
+for r in run${runtext}_*.txt ; do echo $r
+f=`grep $greptext $r | cut -d ':' -f 1 `
+ne=`grep $greptext $r | cut -d ':' -f 2 |sed "s%\ %%g"`
+echo "file $f has $ne events"; 
+if [ $ne -eq 0 ]; then continue; fi
+let tot+=$ne
+let tottot+=$ne
+echo "Total events is $tot and totaltotal is $tottot and neperjob is set to $neperjob and nfiles is $nfiles "
+echo
+if [ $tot -ge $neperjob ]; then 
+ if [ "$filelist" != "" ]; then filelist="${filelist},"; fi
+ filelist="${filelist}${f}"
+  echo $filelist > stream-${targetstream}-tot-${tottot}-skip-${skip}-ne-${neperjob}-filelist.txt ; 
+  echo "Wrote stream-${targetstream}-tot-${tottot}-skip-${skip}-ne-${neperjob}-filelist.txt with $nfiles files"
+ let skip=$ne-$tot+$neperjob
+ if [ $skip -eq $ne ]; then filelist=""; skip=0; #no reason to add the whole file and skip through the whole thing
+ else filelist="${f}"
+ fi
+ let tot=$tot-$neperjob
+ let nfiles=0
+ let neperjob=$maxneperjob
+ echo "Skip set to $skip and filelist is now $filelist and tot is $tot and tottot is $tottot "
+ echo
+else
+ if [ "$filelist" != "" ]; then filelist="${filelist},"; fi
+ filelist="${filelist}${f}"
+ let nfiles=$nfiles+1
+ if [ $nfiles -gt 100 ]; then  #check if we need to cut it off short to stay below 200 file limit
+  oldneperjob=$neperjob ; let neperjob=$tot+100 ; let neperjob=$neperjob/100*100 ;
+  if [ $oldneperjob -ne $neperjob ]; then echo "neperjob now set to $neperjob"; fi
+ fi
+fi #tot -ge neperjob
+done #for each run_*.txt file 
+if [ "$filelist" != "" ]; then 
+ #echo "WARNING, filelist not empty at the end: "; echo $filelist ; 
+ oldneperjob=$neperjob ; let neperjob=$tot+100 ; let neperjob=$neperjob/100*100 ;
+ if [ $oldneperjob -ne $neperjob ]; then echo "neperjob now set to $neperjob"; fi
+  echo $filelist > stream-${targetstream}-tot-${tottot}-skip-${skip}-ne-${neperjob}-filelist.txt ; 
+  echo "Wrote stream-${targetstream}-tot-${tottot}-skip-${skip}-ne-${neperjob}-filelist.txt with $nfiles files"
+fi
+echo "Done with stream $targetstream and TotalTotal is $tottot events "
+echo
+done #for each targetstream
+fi
+
+#go back to our original directory
+cd $oldd ; pwd
+
+tmp6dir="dummy"
+if [ -e /scratch/ahaas ]; then tmp6dir="/scratch/ahaas"; fi
+if [ -e /tmp/ahaas ]; then tmp6dir="/tmp/ahaas"; fi
+mkdir -p $tmp6dir
+
+if [ $domerging == "t" ]; then echo
+tarball="--outTarBall ${tmp6dir}/tarball${startstream}.tgz  --outRunConfig ${tmp6dir}/runconf${startstream}.txt"
+for (( targetstream=$startstream; targetstream<$maxstream; targetstream+=1 )); do echo 
+ for c in `ls $tmp/stream-${targetstream}-*-filelist.txt` ; do echo $c
+  if [ $justonejob == "skipfirst" ]; then echo "Skipping first file "; justonejob="f"; continue; fi
+  jstream=`echo $c | cut -d '-' -f 2 `
+  jtot=`echo $c | cut -d '-' -f 4 `
+  jskip=`echo $c | cut -d '-' -f 6 `
+  jne=`echo $c | cut -d '-' -f 8 `
+  let maxe=${jne}+${jskip}
+  let nout=${jne}/100
+  uniq=`date +%s `; uniq="${uniq}q"
+  echo "noutputs is $nout since jne is $jne "
+   hiinds=""
+   for run2 in `cat $c | sed "s%,%\n%g" | cut -d . -f 2`; do 
+    if [[ "$hiinds" == *"$run2"* ]]; then continue
+    else
+     if [ "$hiinds" != "" ]; then hiinds="${hiinds},"; fi
+     hiinds="${hiinds}${prefix}.${run2}.physics_MinBiasOverlay.merge.RAW";
+    fi
+   done
+   inoutds="--inDS $hiinds --outDS group.dataprep.${prefix}.HImerge.physics_MinBiasOverlay.${jtot}.test$outset"
+  echo "inoutds is $inoutds"
+  command=""; for (( ci=0; ci<${nout}; ci+=1 )); do command="$command outputbsfilterfile${ci}=%OUT.Zerobias${uniq}.bit-1.stream${ci}.RAW"; done
+  echo "Doing jstream $jstream and jtot $jtot and jskip $jskip and maxe $maxe and uniq is $uniq "
+  catc=`cat $c`
+  echo "Filelist is $catc "
+  if [ $justonejob != "t" ]; then lib="--libDS user.haastyle.1012174230.597093.lib._014565"; fi;
+  #if [ $justonejob != "t" ]; then lib="--noBuild"; fi
+  #site="--site ANALY_CERN_XROOTD"
+  #site="--site ANALY_BNL_ATLAS_1"
+  let jobsetid=$outset*100+100000+$targetstream ; jobset="--panda_jobsetID $jobsetid " ; echo "jobset is $jobset "
+  pathena --trf "BS_multipleSelector_trf.py inputbsfile=%IN $command lbnmapfile=serial TriggerBit=-1 Noutputs=$nout maxevents=$maxe skipevents=$jskip uniq=$uniq " --tmpDir ${tmp6dir}/ $inoutds --fileList $catc --memory=1800 --split=1 --dbRelease ddo.000001.Atlas.Ideal.DBRelease.v170601:DBRelease-17.6.1.tar.gz --skipScan $tarball $lib $jobset --official "--voms=atlas:/atlas/dataprep/Role=production" ${site} 
+  rt=$? ; if [ $rt -ne 0 ]; then echo "return value $rt from pathena for $c" > fail_`basename $c` ; fi
+  tarball="--inTarBall ${tmp6dir}/tarball${startstream}.tgz --inRunConfig ${tmp6dir}/runconf${startstream}.txt"
+  echo
+  if [ $justonejob == "t" ]; then echo "just doing one job"; break; fi
+ done
+done
+fi #domerging
+
+if [ "$doregister" == "t" ]; then echo
+for (( targetstream=$startstream; targetstream<$maxstream; targetstream+=1 )); do echo 
+     dname="group.dataprep.${prefix}.HImerge.physics_MinBiasOverlay";
+     for (( targetstr=0; targetstr<10; targetstr+=1 )); do dq2-register-container "${dname}_EXT${targetstr}.test${outset}/"; done
+ 
+ outsetin="${dname}.*.test${outset}/"
+ #outsetin="user.haastyle.HImerge.193825p.test12/"
+ echo "Taking datasets from $outsetin"
+
+ dcx=0;
+ for dc in `dq2-ls "${outsetin}"`; do echo "dcx is $dcx and dataset container is ${dc}";
+      dcxmod=$(($dcx % 10)); #break HI dataset into 10 streams
+      outdname="${dname}_EXT${dcxmod}.test${outset}/"
+      echo "selecting output dataset ${outdname}"
+      let dcx=dcx+1
+  #for dd in `dq2-list-datasets-container ${dc}`; do dq2-register-datasets-container $outdname "${dd}"; done
+  dq2-register-datasets-container $outdname `dq2-list-datasets-container ${dc}`
+ done
+ 
+ for (( targetstr=0; targetstr<10; targetstr+=1 )); do
+     outdname="${dname}_EXT${targetstr}.test${outset}/"
+     echo "${outdname} now has `dq2-list-datasets-container ${outdname} |grep -c merge` datasets and `dq2-ls -f -H ${outdname} |grep -c RAW` RAW files and `dq2-ls -f -H ${outdname} |grep \"total size:\"`"
+ done
+ 
+done
+fi
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/scripts/zerobias_select.sh b/Event/EventOverlay/EventOverlayJobTransforms/scripts/zerobias_select.sh
new file mode 100755
index 0000000000000000000000000000000000000000..c36e1984eafb603844cb080851e2aa7516fdacc4
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/scripts/zerobias_select.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+
+nout=20
+for (( c=0; c<$nout; c+=1 )); do command="$command outputbsfilterfile${c}=%OUT.Zerobias.bit240.stream${c}.RAW" ; done
+
+#dest="--destSE CERN-PROD_SCRATCHDISK"
+#dest="--destSE BNL-OSG2_SCRATCHDISK"
+
+#grl="data12_8TeV.periodAllYear_DetStatus-v49-pro13-03_CoolRunQuery-00-04-08_Atlas_Ready.xml"
+#grl="data12_8TeV.periodAllYear_DetStatus-v61-pro14-02_DQDefects-00-01-00_PHYS_StandardGRL_All_Good.xml"
+
+#lib="--libDS user.haastyle.1110025940.3439.lib._000159"
+#eout="--extOutFile=BS_multipleSelector.log";
+
+stream="11"
+iter="122"
+test="test3"
+
+#make sure we have the right map files
+cp ~/nfs3/zerobias_skim/lbn_anal_map_${stream}wanted_${iter}.txt .
+cp ~/nfs3/zerobias_skim/filelist_${stream}wanted_${iter}.txt .
+if [ -e lbn_anal_map_${stream}wanted_${iter}.txt ]; then echo "Using lbn_anal_map_${stream}wanted_${iter}.txt";
+else echo "No lbn_anal_map_${stream}wanted_${iter}.txt found"; exit; fi; 
+
+rm -f lbn_anal_map_temp.txt
+for s in $(seq 0 1 `expr $nout - 1`); do sed "s%stream 0,%stream $s,%" lbn_anal_map_${stream}wanted_${iter}.txt >> lbn_anal_map_temp.txt; done
+mv lbn_anal_map_temp.txt lbn_anal_map_${stream}wanted_${iter}.txt
+
+#data="data12_8TeV.*.physics_ZeroBias.*.RAW"
+data="data12_8TeV.*.physics_ZeroBiasOverlay.*.RAW"
+#split="--nGBPerJob=10"
+split="--fileList=`cat filelist_${stream}wanted_${iter}.txt`"
+
+echo "Command is: pathena --trf \"BS_multipleSelector_trf.py inputbsfile=%IN $command Noutputs=$nout lbnmapfile=lbn_anal_map_${stream}wanted_${iter}.txt TriggerBit=240 \" --extFile=lbn_anal_map_${stream}wanted_${iter}.txt,filelist_${stream}wanted_${iter}.txt $eout --outDS user.ahaas.BSmultipleselector_select${stream}.$test --fileList=\`cat filelist_${stream}wanted_${iter}.txt\` --inDS $data $lib $dest --noBuild "
+pathena --trf "BS_multipleSelector_trf.py inputbsfile=%IN $command Noutputs=$nout lbnmapfile=lbn_anal_map_${stream}wanted_${iter}.txt TriggerBit=240 " --extFile=lbn_anal_map_${stream}wanted_${iter}.txt,filelist_${stream}wanted_${iter}.txt $eout --outDS user.ahaas.BSmultipleselector_select${stream}.$test $split --inDS $data $lib $dest --noBuild 
+#--nFiles 50 --express
+#--site ANALY_FZK
+#--excludedSite ANALY_IN2P3-CC,ANALY_BNL_ATLAS
+#--v
+#--individualOutDS
+
+#rm lbn_anal_map_${stream}wanted_${iter}.txt filelist_${stream}wanted_${iter}.txt
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/BS_Selector_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/BS_Selector_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..cee37f5f9e62ba8c9e9e26053c3a0368e0f760ca
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/BS_Selector_jobOptions.py
@@ -0,0 +1,52 @@
+include( "ByteStreamCnvSvc/BSEventStorageEventSelector_jobOptions.py" )
+svcMgr = theApp.serviceMgr()
+ByteStreamInputSvc = svcMgr.ByteStreamInputSvc
+
+theApp.EvtMax = 20
+theApp.SkipEvents = 0
+MessageSvc.OutputLevel = INFO
+
+ByteStreamInputSvc.FullFileName += ["/u/at/ahaas/nfs2/temp/minbias/data10_7TeV.00152845.physics_RNDM.merge.RAW/data10_7TeV.00152845.physics_RNDM.merge.RAW._lb0200._0001.1","/u/at/ahaas/nfs2/temp/minbias/data10_7TeV.00152845.physics_RNDM.merge.RAW/data10_7TeV.00152845.physics_RNDM.merge.RAW._lb0201._0001.1"]
+#ByteStreamInputSvc.ValidateEvent=False
+#ByteStreamInputSvc.DumpFlag = True
+#ByteStreamInputSvc.SkipNeventBeforeNext=10
+print ByteStreamInputSvc
+
+from AthenaCommon.AlgSequence import AlgSequence
+topSequence = AlgSequence()
+
+# get the filter algortihm
+from TrigT1ResultByteStream.TrigT1ResultByteStreamConf import CTPByteStreamTool,RecCTPByteStreamTool
+if not hasattr( svcMgr, "ByteStreamAddressProviderSvc" ):
+    from ByteStreamCnvSvcBase.ByteStreamCnvSvcBaseConf import ByteStreamAddressProviderSvc 
+    svcMgr += ByteStreamAddressProviderSvc()
+svcMgr.ByteStreamAddressProviderSvc.TypeNames += [
+    "ROIB::RoIBResult/RoIBResult",
+    "MuCTPI_RDO/MUCTPI_RDO",
+    "CTP_RDO/CTP_RDO",
+    "MuCTPI_RIO/MUCTPI_RIO",
+    "CTP_RIO/CTP_RIO"
+    ]
+from OverlayCommonAlgs.OverlayCommonAlgsConf import  BSFilter
+filAlg=BSFilter("BSFilter")
+filAlg.TriggerBit=63 # The trigger bit to select
+topSequence+=filAlg
+
+# BS OutputStream Tool
+OutStreamName="OutputStreamBSCopy"
+from ByteStreamCnvSvc.ByteStreamCnvSvcConf import ByteStreamEventStorageOutputSvc,ByteStreamOutputStreamCopyTool
+bsCopyTool = ByteStreamOutputStreamCopyTool("OutputStreamBSCopyTool")
+svcMgr.ToolSvc += bsCopyTool
+
+# Service to write out BS events
+bsOutputSvc=ByteStreamEventStorageOutputSvc("BSESOutputSvc0",OutputDirectory="temp/",SimpleFileName="SelectedBSEvents")
+svcMgr += bsOutputSvc
+bsCopyTool.ByteStreamOutputSvc=bsOutputSvc
+bsCopyTool.ByteStreamInputSvc=svcMgr.ByteStreamInputSvc
+
+# create AthenaOutputStream for BS Copy 
+from AthenaServices.AthenaServicesConf import AthenaOutputStream
+OutputStreamBSCopy = AthenaOutputStream( OutStreamName, WritingTool=bsCopyTool )
+topSequence += OutputStreamBSCopy
+OutputStreamBSCopy.AcceptAlgs =["BSFilter"] 
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/BS_multipleSelector_HI_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/BS_multipleSelector_HI_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..fb0a32b5f8274150fc35c76d11d03cc6340ec312
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/BS_multipleSelector_HI_jobOptions.py
@@ -0,0 +1,85 @@
+
+include( "ByteStreamCnvSvc/BSEventStorageEventSelector_jobOptions.py" )
+svcMgr = theApp.serviceMgr()
+ByteStreamInputSvc = svcMgr.ByteStreamInputSvc
+
+theApp.EvtMax = -1
+theApp.SkipEvents = 0
+#MessageSvc.OutputLevel = DEBUG
+#MessageSvc.debugLimit = 10000
+
+###############
+#make a filelist of local files
+filelist = []
+
+#dq2-ls -f -p -H data10_7TeV.00167776.physics_ZeroBias.merge.RAW | sed "s%srm://osgserv04.slac.stanford.edu:8443/srm/v2/server?SFN=/xrootd/atlas/%filelist += [\"root://atl-xrdr//atlas/xrootd/%g" | sed "s%$%\"]%g" | grep xrootd > ./filelist.txt
+#include("./filelist.txt")
+
+mydir = "/u/at/ahaas/nfs3/"
+mydir+="user.haastyle.data10_hi.00170467.physics_bulk.daq.RAW_der1317833222/"
+files = os.listdir(mydir)
+for f in files:
+    filelist += [mydir+f]
+
+ByteStreamInputSvc.FullFileName += filelist
+################
+
+#use the FileStager
+#sampleFile="samples/"+RUN+".physics_ZeroBias.merge.RAW.def"
+#include ("EventOverlayJobTransforms/input_FileStager.py")
+#ByteStreamInputSvc.FullFileName = ic
+
+#ByteStreamInputSvc.ValidateEvent=False
+#ByteStreamInputSvc.DumpFlag = True
+#ByteStreamInputSvc.SkipNeventBeforeNext=10
+print ByteStreamInputSvc
+
+from AthenaCommon.AlgSequence import AlgSequence
+topSequence = AlgSequence()
+
+# get the filter algortihm
+from TrigT1ResultByteStream.TrigT1ResultByteStreamConf import CTPByteStreamTool,RecCTPByteStreamTool
+if not hasattr( svcMgr, "ByteStreamAddressProviderSvc" ):
+    from ByteStreamCnvSvcBase.ByteStreamCnvSvcBaseConf import ByteStreamAddressProviderSvc 
+    svcMgr += ByteStreamAddressProviderSvc()
+svcMgr.ByteStreamAddressProviderSvc.TypeNames += [
+    "ROIB::RoIBResult/RoIBResult",
+    "MuCTPI_RDO/MUCTPI_RDO",
+    "CTP_RDO/CTP_RDO",
+    "MuCTPI_RIO/MUCTPI_RIO",
+    "CTP_RIO/CTP_RIO"
+    ]
+
+from OverlayCommonAlgs.OverlayCommonAlgsConf import  BSFilter, ByteStreamMultipleOutputStreamCopyTool
+filAlg=BSFilter("BSFilter")
+filAlg.TriggerBit=BIT # The trigger bit to select - i.e. don't select by trigger bit, use TAG info!
+filAlg.filterfile=FILTERFILE # The thing made from the TAG files via "root -l -b -q HITAGprinter_run.C"
+print filAlg
+topSequence+=filAlg
+
+# BS OutputStream Tool
+from ByteStreamCnvSvc.ByteStreamCnvSvcConf import ByteStreamEventStorageOutputSvc
+bsCopyTool = ByteStreamMultipleOutputStreamCopyTool("MultipleOutputStreamBSCopyTool")
+bsCopyTool.uselbnmap=0 #don't try to read the lbn map!
+svcMgr.ToolSvc += bsCopyTool
+
+bsCopyTool.NoutputSvc = 1 #just one output file! It is set to have a max of 100 events, so you'll end up with more files
+for i in range(0,bsCopyTool.NoutputSvc):
+    bsOutputSvc=ByteStreamEventStorageOutputSvc("BSESOutputSvc"+str(i),MaxFileNE=100,OutputDirectory=str(OUTPUTDIR),ProjectTag="HIevents",AppName="Filtered",FileTag="test")
+    svcMgr += bsOutputSvc
+    if i==0: bsCopyTool.ByteStreamOutputSvc0=bsOutputSvc
+    if i==1: bsCopyTool.ByteStreamOutputSvc1=bsOutputSvc
+
+# BS InputStream
+bsCopyTool.ByteStreamInputSvc=svcMgr.ByteStreamInputSvc
+
+# create AthenaOutputStream for BS Copy 
+from AthenaServices.AthenaServicesConf import AthenaOutputStream
+OutputStreamBSCopy = AthenaOutputStream( "OutputStreamBSCopy", WritingTool=bsCopyTool )
+topSequence += OutputStreamBSCopy 
+        
+# apply prescale to the OutputStream 
+OutputStreamBSCopy.AcceptAlgs =["BSFilter"] 
+
+#
+print bsCopyTool
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/BS_multipleSelector_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/BS_multipleSelector_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..355e9bbb1d9d0fa647dfb29c901ee9d15f8a7ac4
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/BS_multipleSelector_jobOptions.py
@@ -0,0 +1,119 @@
+
+include( "ByteStreamCnvSvc/BSEventStorageEventSelector_jobOptions.py" )
+svcMgr = theApp.serviceMgr()
+ByteStreamInputSvc = svcMgr.ByteStreamInputSvc
+
+theApp.EvtMax = -1
+theApp.SkipEvents = 0
+#MessageSvc.OutputLevel = DEBUG
+#MessageSvc.debugLimit = 10000
+
+#make a filelist of local files
+#dq2-ls -f -p -H data10_7TeV.00167776.physics_ZeroBias.merge.RAW | sed "s%srm://osgserv04.slac.stanford.edu:8443/srm/v2/server?SFN=/xrootd/atlas/%filelist += [\"root://atl-xrdr//atlas/xrootd/%g" | sed "s%$%\"]%g" | grep xrootd > ./filelist.txt
+#filelist = []
+#include("./filelist.txt")
+#ByteStreamInputSvc.FullFileName += filelist
+
+#use the FileStager
+sampleFile="samples/"+RUN+".physics_ZeroBias.merge.RAW.def"
+include ("EventOverlayJobTransforms/input_FileStager.py")
+ByteStreamInputSvc.FullFileName = ic
+
+#ByteStreamInputSvc.ValidateEvent=False
+#ByteStreamInputSvc.DumpFlag = True
+#ByteStreamInputSvc.SkipNeventBeforeNext=10
+print ByteStreamInputSvc
+
+from AthenaCommon.AlgSequence import AlgSequence
+topSequence = AlgSequence()
+
+# get the filter algortihm
+from TrigT1ResultByteStream.TrigT1ResultByteStreamConf import CTPByteStreamTool,RecCTPByteStreamTool
+if not hasattr( svcMgr, "ByteStreamAddressProviderSvc" ):
+    from ByteStreamCnvSvcBase.ByteStreamCnvSvcBaseConf import ByteStreamAddressProviderSvc 
+    svcMgr += ByteStreamAddressProviderSvc()
+svcMgr.ByteStreamAddressProviderSvc.TypeNames += [
+    "ROIB::RoIBResult/RoIBResult",
+    "MuCTPI_RDO/MUCTPI_RDO",
+    "CTP_RDO/CTP_RDO",
+    "MuCTPI_RIO/MUCTPI_RIO",
+    "CTP_RIO/CTP_RIO"
+    ]
+
+from OverlayCommonAlgs.OverlayCommonAlgsConf import  BSFilter, ByteStreamMultipleOutputStreamCopyTool
+filAlg=BSFilter("BSFilter")
+filAlg.TriggerBit=BIT # The trigger bit to select
+print filAlg
+topSequence+=filAlg
+
+# BS OutputStream Tool
+from ByteStreamCnvSvc.ByteStreamCnvSvcConf import ByteStreamEventStorageOutputSvc
+bsCopyTool = ByteStreamMultipleOutputStreamCopyTool("MultipleOutputStreamBSCopyTool")
+svcMgr.ToolSvc += bsCopyTool
+
+bsCopyTool.NoutputSvc = 50
+for i in range(0,bsCopyTool.NoutputSvc):
+    bsOutputSvc=ByteStreamEventStorageOutputSvc("BSESOutputSvc"+str(i),OutputDirectory="./",SimpleFileName="PrescaledEvents_bit"+str(BIT)+"_"+str(i)+".RAW")
+    svcMgr += bsOutputSvc
+    if i==0: bsCopyTool.ByteStreamOutputSvc0=bsOutputSvc
+    if i==1: bsCopyTool.ByteStreamOutputSvc1=bsOutputSvc
+    if i==2: bsCopyTool.ByteStreamOutputSvc2=bsOutputSvc
+    if i==3: bsCopyTool.ByteStreamOutputSvc3=bsOutputSvc
+    if i==4: bsCopyTool.ByteStreamOutputSvc4=bsOutputSvc
+    if i==5: bsCopyTool.ByteStreamOutputSvc5=bsOutputSvc
+    if i==6: bsCopyTool.ByteStreamOutputSvc6=bsOutputSvc
+    if i==7: bsCopyTool.ByteStreamOutputSvc7=bsOutputSvc
+    if i==8: bsCopyTool.ByteStreamOutputSvc8=bsOutputSvc
+    if i==9: bsCopyTool.ByteStreamOutputSvc9=bsOutputSvc
+    if i==10: bsCopyTool.ByteStreamOutputSvc10=bsOutputSvc
+    if i==11: bsCopyTool.ByteStreamOutputSvc11=bsOutputSvc
+    if i==12: bsCopyTool.ByteStreamOutputSvc12=bsOutputSvc
+    if i==13: bsCopyTool.ByteStreamOutputSvc13=bsOutputSvc
+    if i==14: bsCopyTool.ByteStreamOutputSvc14=bsOutputSvc
+    if i==15: bsCopyTool.ByteStreamOutputSvc15=bsOutputSvc
+    if i==16: bsCopyTool.ByteStreamOutputSvc16=bsOutputSvc
+    if i==17: bsCopyTool.ByteStreamOutputSvc17=bsOutputSvc
+    if i==18: bsCopyTool.ByteStreamOutputSvc18=bsOutputSvc
+    if i==19: bsCopyTool.ByteStreamOutputSvc19=bsOutputSvc
+    if i==20: bsCopyTool.ByteStreamOutputSvc20=bsOutputSvc
+    if i==21: bsCopyTool.ByteStreamOutputSvc21=bsOutputSvc
+    if i==22: bsCopyTool.ByteStreamOutputSvc22=bsOutputSvc
+    if i==23: bsCopyTool.ByteStreamOutputSvc23=bsOutputSvc
+    if i==24: bsCopyTool.ByteStreamOutputSvc24=bsOutputSvc
+    if i==25: bsCopyTool.ByteStreamOutputSvc25=bsOutputSvc
+    if i==26: bsCopyTool.ByteStreamOutputSvc26=bsOutputSvc
+    if i==27: bsCopyTool.ByteStreamOutputSvc27=bsOutputSvc
+    if i==28: bsCopyTool.ByteStreamOutputSvc28=bsOutputSvc
+    if i==29: bsCopyTool.ByteStreamOutputSvc29=bsOutputSvc
+    if i==30: bsCopyTool.ByteStreamOutputSvc30=bsOutputSvc
+    if i==31: bsCopyTool.ByteStreamOutputSvc31=bsOutputSvc
+    if i==32: bsCopyTool.ByteStreamOutputSvc32=bsOutputSvc
+    if i==33: bsCopyTool.ByteStreamOutputSvc33=bsOutputSvc
+    if i==34: bsCopyTool.ByteStreamOutputSvc34=bsOutputSvc
+    if i==35: bsCopyTool.ByteStreamOutputSvc35=bsOutputSvc
+    if i==36: bsCopyTool.ByteStreamOutputSvc36=bsOutputSvc
+    if i==37: bsCopyTool.ByteStreamOutputSvc37=bsOutputSvc
+    if i==38: bsCopyTool.ByteStreamOutputSvc38=bsOutputSvc
+    if i==39: bsCopyTool.ByteStreamOutputSvc39=bsOutputSvc
+    if i==40: bsCopyTool.ByteStreamOutputSvc40=bsOutputSvc
+    if i==41: bsCopyTool.ByteStreamOutputSvc41=bsOutputSvc
+    if i==42: bsCopyTool.ByteStreamOutputSvc42=bsOutputSvc
+    if i==43: bsCopyTool.ByteStreamOutputSvc43=bsOutputSvc
+    if i==44: bsCopyTool.ByteStreamOutputSvc44=bsOutputSvc
+    if i==45: bsCopyTool.ByteStreamOutputSvc45=bsOutputSvc
+    if i==46: bsCopyTool.ByteStreamOutputSvc46=bsOutputSvc
+    if i==47: bsCopyTool.ByteStreamOutputSvc47=bsOutputSvc
+    if i==48: bsCopyTool.ByteStreamOutputSvc48=bsOutputSvc
+    if i==49: bsCopyTool.ByteStreamOutputSvc49=bsOutputSvc
+
+# BS InputStream
+bsCopyTool.ByteStreamInputSvc=svcMgr.ByteStreamInputSvc
+
+# create AthenaOutputStream for BS Copy 
+from AthenaServices.AthenaServicesConf import AthenaOutputStream
+OutputStreamBSCopy = AthenaOutputStream( "OutputStreamBSCopy", WritingTool=bsCopyTool )
+topSequence += OutputStreamBSCopy 
+        
+# apply prescale to the OutputStream 
+OutputStreamBSCopy.AcceptAlgs =["BSFilter"] 
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/BeamOverlay_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/BeamOverlay_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..7a6140be2019934d5f15d930933c8309c4914f72
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/BeamOverlay_jobOptions.py
@@ -0,0 +1,15 @@
+
+include.block ( "EventOverlayJobTransforms/BeamOverlay_jobOptions.py" )
+
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+
+from AthenaCommon.Resilience import treatException,protectedInclude
+
+if OverlayFlags.doBCM():
+    protectedInclude( "InDetBCM_EventAthenaPool/InDetBCM_EventAthenaPool_joboptions.py" )
+    from AthenaCommon.AlgSequence import AlgSequence
+    job = AlgSequence()
+    from AthenaCommon import CfgGetter
+    job += CfgGetter.getAlgorithm("BCM_Digitization/BCM_Digitization", tryDefaultConfigurable=True)
+    job.BCM_Digitization.DigitizationTool.EvtStore = "BkgEvent_0_SG"
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/CaloOverlay_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/CaloOverlay_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..bff2dfc397686cda282b46946908f58dd9a0dcef
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/CaloOverlay_jobOptions.py
@@ -0,0 +1,80 @@
+
+include.block ( "EventOverlayJobTransforms/CaloOverlay_jobOptions.py" )
+
+from AthenaCommon.Resilience import treatException,protectedInclude
+
+from Digitization.DigitizationFlags import jobproperties
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+
+if OverlayFlags.doBkg():
+   if OverlayFlags.doLAr():
+       from OverlayCommonAlgs.OverlayCommonAlgsConf import DeepCopyObjects
+       job += DeepCopyObjects("BkgRdo2")
+       job.BkgRdo2.LArObjects = True
+   if OverlayFlags.doTile():
+       from OverlayCommonAlgs.OverlayCommonAlgsConf import DeepCopyObjects
+       job += DeepCopyObjects("BkgRdo3")
+       job.BkgRdo3.TileObjects = True
+                               
+                            
+if OverlayFlags.doLAr() or OverlayFlags.doTile():
+
+   jobproperties.Digitization.doCaloNoise=False
+
+   if isRealData:
+      include("LArConditionsCommon/LArIdMap_comm_jobOptions.py")
+      include("LArConditionsCommon/LArConditionsCommon_comm_jobOptions.py")
+   else:
+      include("LArConditionsCommon/LArIdMap_MC_jobOptions.py")
+      include("LArConditionsCommon/LArConditionsCommon_MC_jobOptions.py")
+
+   include( "LArIdCnv/LArIdCnv_joboptions.py" )
+   include( "CaloDetMgrDetDescrCnv/CaloDetMgrDetDescrCnv_joboptions.py" )
+
+if OverlayFlags.doLAr():
+
+    from AthenaCommon.GlobalFlags import GlobalFlags
+    
+    # FIXME this is for doing Overlay with MC RDO + MC hits
+    #   real data RDO will require some changes in the setup for proper db acces
+    #include( "LArDetDescr/LArDetDescr_joboptions.py" )
+    #include( "LArAthenaPool/LArAthenaPool_joboptions.py" )
+    # We also need the conditions svc for MC constants:
+    if readBS and isRealData:
+       theApp.Dlls += [ "LArByteStream"]
+       LArDigitKey = "FREE"
+       ServiceMgr.ByteStreamAddressProviderSvc.TypeNames += ["LArDigitContainer/"+LArDigitKey]
+       ServiceMgr.ByteStreamAddressProviderSvc.TypeNames += ["LArFebHeaderContainer/LArFebHeader"]
+       ServiceMgr.ByteStreamAddressProviderSvc.TypeNames+=["LArDigitContainer/LArDigitContainer_MC"]
+   
+    from LArROD.LArRawChannelGetter import LArRawChannelGetter
+    LArRawChannelGetter()
+    
+    from LArROD.LArDigits import DefaultLArDigitThinner
+    LArDigitThinner = DefaultLArDigitThinner('LArDigitThinner') # automatically added to topSequence
+    if isRealData:
+       #job.digitmaker1.LArPileUpTool.OutputLevel=DEBUG
+       #MessageSvc.debugLimit = 100000
+       #job.digitmaker1.LArPileUpTool.useLArFloat=False
+       job.digitmaker1.LArPileUpTool.PedestalKey = "Pedestal"
+       job.LArRawChannelBuilder.DataLocation = "LArDigitContainer_MC"
+
+#----------------------------------------------------------------
+if OverlayFlags.doTile():
+
+    include( "TileIdCnv/TileIdCnv_jobOptions.py" )
+    include( "TileConditions/TileConditions_jobOptions.py" )        
+
+    include("TileSimAlgs/TileDigitization_jobOptions.py")
+    job.TileHitVecToCnt.DigitizationTool.RndmEvtOverlay = True
+    theTileDigitsMaker.RndmEvtOverlay = True
+    if readBS and isRealData:
+       theApp.Dlls += [ "TileByteStream"]
+       ServiceMgr.ByteStreamAddressProviderSvc.TypeNames += [ "TileBeamElemContainer/TileBeamElemCnt"]
+       ServiceMgr.ByteStreamAddressProviderSvc.TypeNames += [ "TileRawChannelContainer/TileRawChannelCnt"]
+       ServiceMgr.ByteStreamAddressProviderSvc.TypeNames += [ "TileDigitsContainer/TileDigitsCnt"]
+       ServiceMgr.ByteStreamAddressProviderSvc.TypeNames += [ "TileL2Container/TileL2Cnt"]
+       ServiceMgr.ByteStreamAddressProviderSvc.TypeNames += [ "TileLaserObject/TileLaserObj"]
+
+#--------------------
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/ConfiguredOverlay_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/ConfiguredOverlay_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..777c919e0a888a8128cee9e62b868770fe409ef8
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/ConfiguredOverlay_jobOptions.py
@@ -0,0 +1,111 @@
+include.block ( "EventOverlayJobTransforms/ConfiguredOverlay_jobOptions.py" )
+
+#--------------------------------------------------------------
+# Load POOL support
+# (modified by Piyali.Banerjee to include skip  events) 
+#--------------------------------------------------------------
+
+from AthenaCommon.AlgSequence import AlgSequence
+job = AlgSequence()
+topSequence = job
+
+#=======================================================================
+from AthenaCommon.AppMgr import ServiceMgr
+from PileUpComps.PileUpCompsConf import PileUpEventLoopMgr
+    
+from PileUpComps.PileUpCompsConf import BkgStreamsCache
+
+import AthenaPoolCnvSvc.WriteAthenaPool
+from EventSelectorAthenaPool.EventSelectorAthenaPoolConf import EventSelectorAthenaPool
+
+from StoreGate.StoreGateConf import StoreGateSvc
+
+from Digitization.DigitizationFlags import jobproperties
+digitization = jobproperties.Digitization
+
+pileUpEventLoopMgr = PileUpEventLoopMgr()
+
+pupStream = "PileUpCollXingStream"
+jobproperties.Digitization.rndmSeedList.addSeed( pupStream, 123, 345 )
+
+pileUpEventLoopMgr.OutStreamType = "AthenaOutputStream"
+
+include ("EventOverlayJobTransforms/synchFlags_jobOptions.py")
+print "================ OverlayFlags ================ "
+OverlayFlags.Print()
+
+#if globalflags.InputFormat()=='bytestream':
+if readBS:
+    #if isRealData:
+    #   include ("RecExCommission/RecExCommission_BSRead_config_hack.py")
+    #else:
+    from ByteStreamCnvSvc import ReadByteStream
+    include("RecExCommon/BSRead_config.py")
+    ServiceMgr.ByteStreamInputSvc.FullFileName = DataInputCollections
+    ServiceMgr.ByteStreamInputSvc.EventStore= "StoreGateSvc/OriginalEvent_SG"
+    ServiceMgr.ByteStreamAddressProviderSvc.StoreID=6
+    from EventSelectorAthenaPool.EventSelectorAthenaPoolConf import AthenaPoolAddressProviderSvc
+    ServiceMgr += AthenaPoolAddressProviderSvc("AthenaPoolAddressProviderSvc")
+    ServiceMgr.ProxyProviderSvc.ProviderNames += [ "AthenaPoolAddressProviderSvc" ]
+else:
+    import AthenaPoolCnvSvc.ReadAthenaPool
+    ServiceMgr.EventSelector.InputCollections = DataInputCollections
+
+# Set up data input
+pileUpEventLoopMgr.OrigSelector="EventSelector"
+pileUpEventLoopMgr.firstXing=0
+pileUpEventLoopMgr.lastXing=0
+pileUpEventLoopMgr.IsEventOverlayJob=True
+ServiceMgr.EventSelector.SkipEvents = athenaCommonFlags.SkipEvents()
+
+# Set up MC input
+pileUpEventLoopMgr.bkgCaches += [BkgStreamsCache("mcSignalEvent"+"_Cache")]
+tmpCache=pileUpEventLoopMgr.bkgCaches[0]
+tmpCache.CollPerXing=1
+tmpCache.CollDistribution="Fixed"
+tmpCache.ReadDownscaleFactor=1
+tmpCache.RndmGenSvc=jobproperties.Digitization.rndmSvc()
+tmpCache.RndmStreamName=pupStream
+
+tmpEvtSel=EventSelectorAthenaPool("mcSignalEvent"+"_EventSelector")
+tmpEvtSel.InputCollections = SignalInputCollections
+
+tmpEvtSel.KeepInputFilesOpen = True
+tmpEvtSel.SkipEvents = athenaCommonFlags.SkipEvents()
+
+ServiceMgr += tmpEvtSel
+tmpCache.EventSelector="mcSignalEvent"+"_EventSelector"
+
+ServiceMgr += pileUpEventLoopMgr
+
+if not hasattr(ServiceMgr, 'PileUpMergeSvc'):
+    from PileUpTools.PileUpToolsConf import PileUpMergeSvc
+    ServiceMgr += PileUpMergeSvc()
+    del PileUpMergeSvc
+
+# In overlaying unlike pileup we are NOT combining different MC events.
+# Make sure HepMcParticleLinks use the right offset in McEventCollection.
+ServiceMgr.PileUpMergeSvc.ReturnTimedData=False
+
+#================================================================
+
+#synchronization of Beam flags and Digitization flags - KAA
+#if not (digitization.doMinimumBias.get_Value() or digitization.doCavern.get_Value() or
+#        digitization.doBeamGas.get_Value() or digitization.doBeamHalo.get_Value()) :
+DetFlags.pileup.all_setOff()
+digitization.numberOfCollisions=0.0
+
+#-----------------------------------------------------------
+# Check Beam and Digitization jobproperties are synchronised
+#-----------------------------------------------------------
+from AthenaCommon.BeamFlags import jobproperties
+if jobproperties.Beam.numberOfCollisions.get_Value() != digitization.numberOfCollisions.get_Value() :
+   jobproperties.Beam.numberOfCollisions = digitization.numberOfCollisions.get_Value()
+   jobproperties.Beam.override = True ## just incase - default is True
+
+if jobproperties.Beam.bunchSpacing.get_Value() != digitization.bunchSpacing.get_Value() :
+   jobproperties.Beam.bunchSpacing = digitization.bunchSpacing.get_Value()
+   jobproperties.Beam.override = True ## just incase - default is True
+
+#================================================================
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/InDetMcSignal_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/InDetMcSignal_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..bda402582415ef62ca3d65afd0bbcfdab47f47a3
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/InDetMcSignal_jobOptions.py
@@ -0,0 +1,11 @@
+include.block ( "EventOverlayJobTransforms/InDetMcSignal_jobOptions.py" )
+
+from AthenaCommon.GlobalFlags import globalflags
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+if OverlayFlags.doPixel() or OverlayFlags.doSCT() or OverlayFlags.doTRT():
+
+   from OverlayCommonAlgs.OverlayCommonAlgsConf import SaveInDetObjects
+   job += SaveInDetObjects()
+   if globalflags.DataSource()=='data':
+      job.SaveInDetObjects.SCTType = True
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/InnerDetectorOverlay_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/InnerDetectorOverlay_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..f1461d2dfdf55a7729543afc22bc2a0302222028
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/InnerDetectorOverlay_jobOptions.py
@@ -0,0 +1,116 @@
+include.block ( "EventOverlayJobTransforms/InnerDetectorOverlay_jobOptions.py" )
+
+from Digitization.DigitizationFlags import jobproperties
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+from AthenaCommon import CfgMgr
+
+from AthenaCommon.Resilience import treatException,protectedInclude
+
+if OverlayFlags.doPixel() or OverlayFlags.doSCT() or OverlayFlags.doTRT():
+
+    if OverlayFlags.doBkg():
+       from OverlayCommonAlgs.OverlayCommonAlgsConf import DeepCopyObjects
+       job += DeepCopyObjects("BkgRdo1")
+       job.BkgRdo1.InDetObjects = True
+
+    include( "InDetEventAthenaPool/InDetEventAthenaPool_joboptions.py" ) # FIXME: is needed?
+
+    from InDetOverlay.InDetOverlayConf import InDetOverlay
+    indetovl = InDetOverlay()
+
+    jobproperties.Digitization.doInDetNoise=False
+
+    #if readBS and isRealData:
+    #   include( "InDetCosmicRecExample/InDetCosmicFlags_jobOptions.py" )
+
+    if OverlayFlags.doPixel():
+        protectedInclude( "PixelDigitization/PixelDigitization_jobOptions.py" )
+        from AthenaCommon import CfgGetter
+        CfgGetter.getPublicTool("PixelDigitizationTool",checkType=True).EvtStore = "BkgEvent_0_SG"
+        indetovl.do_Pixel = True
+        if readBS and isRealData:
+           job.InDetPixelRawDataProvider.EvtStore = "OriginalEvent_SG"
+           ServiceMgr.ByteStreamAddressProviderSvc.TypeNames += [ "PixelRDO_Container/PixelRDOs" ]
+           #ServiceMgr.ByteStreamAddressProviderSvc.TypeNames += [ "Trk::PixelClusterContainer/PixelOnlineClusters" ]
+    else:
+        indetovl.do_Pixel = False
+
+    if OverlayFlags.doSCT():
+
+        # Setup the ReadCalibChip folders and Svc
+        if isRealData:
+           #conddb.blockFolder("/SCT/DAQ/Calibration/ChipGain")
+           #conddb.blockFolder("/SCT/DAQ/Calibration/ChipNoise")
+           #conddb.addFolder("SCT_OFL","/SCT/DAQ/Calibration/ChipGain",forceMC=True)
+           #conddb.addFolder("SCT_OFL","/SCT/DAQ/Calibration/ChipNoise",forceMC=True)
+           conddb.addFolder("SCT_OFL","/SCT/DAQ/Calibration/ChipGain<tag>SctDaqCalibrationChipGain-Apr10-01</tag>",forceMC=True)
+           conddb.addFolder("SCT_OFL","/SCT/DAQ/Calibration/ChipNoise<tag>SctDaqCalibrationChipNoise-Apr10-01</tag>",forceMC=True)
+
+           #if not conddb.folderRequested('/SCT/DAQ/Calibration/ChipGain'):
+           #   conddb.addFolderSplitOnline("SCT","/SCT/DAQ/Calibration/ChipGain","/SCT/DAQ/Calibration/ChipGain",forceMC=True)
+           #if not conddb.folderRequested('/SCT/DAQ/Calibration/ChipNoise'):
+           #   conddb.addFolderSplitOnline("SCT","/SCT/DAQ/Calibration/ChipNoise","/SCT/DAQ/Calibration/ChipNoise",forceMC=True)
+
+        # Dynamic configuration of SCT RDO type
+        # This algorithm must be executed before SCT_Digitization
+        from InDetOverlay.InDetOverlayConf import DynConfSCT
+        job += DynConfSCT()
+
+
+        protectedInclude( "SCT_Digitization/SCT_Digitization_jobOptions.py" )
+        if hasattr(job.SCT_Digitization, "DigitizationTool"):
+            from AthenaCommon import CfgGetter
+            CfgGetter.getPublicTool("SCT_DigitizationTool",checkType=True).EvtStore = "BkgEvent_0_SG"
+        else:
+            job.SCT_Digitization.EvtStore  = "BkgEvent_0_SG"
+        indetovl.do_SCT = True
+        if readBS and isRealData:
+           job.InDetSCTRawDataProvider.EvtStore = "OriginalEvent_SG"
+           ServiceMgr.ByteStreamAddressProviderSvc.TypeNames += [ "SCT_RDO_Container/SCT_RDOs" ]
+           #ServiceMgr.ByteStreamAddressProviderSvc.TypeNames += [ "Trk::SCT_ClusterContainer/SCT_OnlineClusters" ]
+    else:
+        indetovl.do_SCT = False
+
+    if OverlayFlags.doTRT():
+        if isRealData:
+           conddb.blockFolder("/TRT/Cond/DigVers")
+           #conddb.addFolderWithTag("TRT_OFL","/TRT/Cond/DigVers","TRTCondDigVers-Collisions-01",force=True,forceMC=True)
+           conddb.addFolder("TRT_OFL","/TRT/Cond/DigVers",forceMC=True)
+        protectedInclude( "TRT_Digitization/TRT_Digitization_jobOptions.py" )
+        if hasattr(job.TRTDigitization, "DigitizationTool"):
+            job.TRTDigitization.DigitizationTool.EvtStore = "BkgEvent_0_SG"
+        else:
+            job.TRTDigitization.EvtStore   = "BkgEvent_0_SG"
+        ### AG: FIXME: support old TRT digi for now.  The following block should be removed.
+        try:
+            job.TRTDigitization.StoreName   = "BkgEvent_0_SG"
+            log.warning("TRTDigitization has ambiguous event store setup: two StoreGate handles!")
+        except:
+            log.info("InnerDetectorOverlay_jobOptions.py: caught an exception when trying to configure TRTDigitization using pre-AthAlgorithm StoreName. Time to remove this oboslete jO code.")
+
+        indetovl.do_TRT = True
+        if readBS and isRealData:
+           job.InDetTRTRawDataProvider.EvtStore = "OriginalEvent_SG"
+           ServiceMgr.ByteStreamAddressProviderSvc.TypeNames += [ "TRT_RDO_Container/TRT_RDOs" ]
+           #ServiceMgr.ByteStreamAddressProviderSvc.TypeNames += [ "Trk::TRT_DriftCircleContainer/TRT_DriftCircle" ]
+           if hasattr(job.TRTDigitization, "DigitizationTool"):
+               job.TRTDigitization.DigitizationTool.Override_getT0FromData = 1
+           else:
+               job.TRTDigitization.Override_getT0FromData = 1
+
+           from TRT_ConditionsServices.TRT_ConditionsServicesConf import TRT_CalDbSvc
+           InDetTRTCalDbSvc = TRT_CalDbSvc()
+           ServiceMgr += InDetTRTCalDbSvc
+          #from IOVDbSvc.CondDB import conddb
+#           conddb.addFolder("TRT","/TRT/Calib/T0","<tag>TrtCalibt0-UPD2-FDR2-01</tag>")
+#           conddb.addFolder("TRT","/TRT/Calib/RT","<tag>TrtCalibRt-UPD2-FDR2-01</tag>")
+           conddb.addFolder("TRT","/TRT/Calib/T0","<tag>TrtCalibRt-HLT-UPD1-01</tag>")
+           conddb.addFolder("TRT","/TRT/Calib/RT","<tag>TrtCalibT0-HLT-UPD1-01</tag>")
+           conddb.addFolder("TRT_ONL","/TRT/Onl/ROD/Compress")
+    else:
+        indetovl.do_TRT = False
+
+    if OverlayFlags.doSignal():
+       include ("EventOverlayJobTransforms/InDetMcSignal_jobOptions.py")
+
+    job += indetovl
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/L1Signal_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/L1Signal_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..4f75935828261bf7df49967eb7a39fe8aad4d6c7
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/L1Signal_jobOptions.py
@@ -0,0 +1,187 @@
+
+include.block ( "EventOverlayJobTransforms/L1Signal_jobOptions.py" )
+
+from AthenaCommon.Logging import logging
+
+from AthenaCommon.DetFlags import DetFlags
+from AthenaCommon.GlobalFlags import globalflags
+from Digitization.DigitizationFlags import jobproperties
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+
+if OverlayFlags.doLVL1():
+
+    from AthenaCommon.AlgSequence import AlgSequence
+    topSequence = AlgSequence()
+
+    if DetFlags.simulateLVL1.LAr_on():
+        from LArL1Sim.LArL1SimConf import *
+        newLArTTL1Maker=LArTTL1Maker("newLArTTL1Maker")
+        # check if LArdigitization is run before. If yes, uses hit map from detector store produces from lardigitization
+        if not hasattr(topSequence,"digitmaker2"):
+           newLArTTL1Maker.useMapFromStore = False
+
+        job += newLArTTL1Maker
+        # Noise 
+        if not jobproperties.Digitization.doCaloNoise.get_Value():
+            job.newLArTTL1Maker.NoiseOnOff= False #(default:True) 
+        # PileUp
+        job.newLArTTL1Maker.PileUp = True
+        job.newLArTTL1Maker.EventStore = "BkgEvent_0_SG"    
+
+    if DetFlags.simulateLVL1.Tile_on():
+       from TileSimAlgs.TileSimAlgsConf import TileHitToTTL1
+       newTileHitToTTL1=TileHitToTTL1("newTileHitToTTL1")
+       newTileHitToTTL1.TileHitContainer=job.TileHitToTTL1.TileHitContainer
+       newTileHitToTTL1.TileInfoName=job.TileHitToTTL1.TileInfoName
+       newTileHitToTTL1.TileTTL1Container=job.TileHitToTTL1.TileTTL1Container
+       newTileHitToTTL1.EvtStore = "BkgEvent_0_SG" 
+       job += newTileHitToTTL1
+
+       from Digitization.DigitizationFlags import jobproperties
+       newTileHitToTTL1.RndmSvc=jobproperties.Digitization.rndmSvc()
+       jobproperties.Digitization.rndmSeedList.addSeed("newTile_HitToTTL1", 2789801, 189245514)
+
+    from AthenaCommon.BeamFlags import jobproperties
+    if jobproperties.Beam.beamType == 'cosmics':
+         newTileTTL1Cosmics=TileHitToTTL1("newTileTTL1Cosmics")
+         newTileTTL1Cosmics.RndmSvc=jobproperties.Digitization.rndmSvc()
+         jobproperties.Digitization.rndmSeedList.addSeed("newTile_TTL1Cosmics", 1789891, 189240513)
+         newTileTTL1Cosmics.EvtStore = "BkgEvent_0_SG"
+         job += newTileTTL1Cosmics 
+         job.newTileTTL1Cosmics.TileTTL1Type = job.TileHitToTTL1_Cosmics.TileTTL1Type 
+         job.newTileTTL1Cosmics.TileHitContainer = job.TileHitToTTL1_Cosmics.TileHitContainer
+         job.newTileTTL1Cosmics.TileTTL1Container = job.TileHitToTTL1_Cosmics.TileTTL1Container
+         job.newTileTTL1Cosmics.TileMBTSTTL1Container = job.TileHitToTTL1_Cosmics.TileMBTSTTL1Container    
+
+    if DetFlags.digitize.LVL1_on():
+
+       #--------------------------------------------------------------
+       # RPC stuff
+       #--------------------------------------------------------------
+       if DetFlags.simulateLVL1.RPC_on():
+          from TrigT1RPCsteering.TrigT1RPCsteeringConf import *
+          newTrigT1RPC = TrigT1RPC("newTrigT1RPC")
+          newTrigT1RPC.EventStore = "BkgEvent_0_SG"
+          job += newTrigT1RPC
+
+       #--------------------------------------------------------------
+       # TGC stuff
+       #--------------------------------------------------------------
+       if DetFlags.simulateLVL1.TGC_on():
+          from TrigT1TGC.TrigT1TGCConf import LVL1TGCTrigger__LVL1TGCTrigger
+          newTrigT1TGC = LVL1TGCTrigger__LVL1TGCTrigger ("newTrigT1TGC")
+          newTrigT1TGC.EventStore = "BkgEvent_0_SG"
+          job += newTrigT1TGC
+          job.newTrigT1TGC.InputData_perEvent = "TGC_DIGITS"
+          job.newTrigT1TGC.ASDOutDataLocation = "/Event/ASDOutDataLocation" 
+          #job.newTrigT1TGC.MuonTrigConfig = "/Run/MuonTrigConfig"
+          job.newTrigT1TGC.MuCTPIInput_TGC = "/Event/L1MuctpiStoreTGC"
+          job.newTrigT1TGC.MaskFileName = "TrigT1TGCMaskedChannel.db"
+          job.newTrigT1TGC.MaskFileName12 = "TrigT1TGCMaskedChannel._12.db"
+
+       #--------------------------------------------------------------
+       # TrigT1Muctpi Algos
+       #--------------------------------------------------------------
+       if DetFlags.simulateLVL1.RPC_on() or DetFlags.simulateLVL1.TGC_on():
+          from TrigT1Muctpi.TrigT1MuctpiConfig import L1Muctpi
+          #ACH job += L1Muctpi("newL1Muctpi")
+          #ACH job.newL1Muctpi.StoreGateSvc = "BkgEvent_0_SG"
+
+       #-------------------------------------------------------
+       # TrigT1CaloSim Algos
+       #-------------------------------------------------------
+       if DetFlags.simulateLVL1.Calo_on():
+          if DetFlags.simulateLVL1.LAr_on() and DetFlags.simulateLVL1.Tile_on():
+             from OverlayCommonAlgs.OverlayCommonAlgsConf import RemoveObjects
+             job += RemoveObjects("RemoveTriggerTowerOldMC")
+             if globalflags.DataSource()=='data':
+                job.RemoveTriggerTowerOldMC.RemoveTriggerTowerMC=True
+
+             from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__TriggerTowerMaker
+             from TrigT1CaloSim.TrigT1CaloSimConfig import TriggerTowerMaker_TTL1_Rel13
+             newTriggerTowerMaker = TriggerTowerMaker_TTL1_Rel13( 'newTriggerTowerMaker' )
+             job += [newTriggerTowerMaker]
+             job.newTriggerTowerMaker.EventStore = "BkgEvent_0_SG"   
+             job.newTriggerTowerMaker.DoOverlay = False
+             job.newTriggerTowerMaker.OverlayPedestal = 40.
+             job.newTriggerTowerMaker.DoNotCalibrate=job.TriggerTowerMaker.DoNotCalibrate
+             job.newTriggerTowerMaker.MatchFIR=job.TriggerTowerMaker.MatchFIR
+             job.newTriggerTowerMaker.EmBFilterCoeffs=job.TriggerTowerMaker.EmBFilterCoeffs
+             job.newTriggerTowerMaker.EmECFilterCoeffs=job.TriggerTowerMaker.EmECFilterCoeffs
+             job.newTriggerTowerMaker.FcalEFilterCoeffs=job.TriggerTowerMaker.FcalEFilterCoeffs
+             job.newTriggerTowerMaker.TileFilterCoeffs=job.TriggerTowerMaker.TileFilterCoeffs
+             job.newTriggerTowerMaker.HecFilterCoeffs=job.TriggerTowerMaker.HecFilterCoeffs
+             job.newTriggerTowerMaker.FcalHFilterCoeffs=job.TriggerTowerMaker.FcalHFilterCoeffs
+             job.newTriggerTowerMaker.EmBThresh=job.TriggerTowerMaker.EmBThresh
+             job.newTriggerTowerMaker.EmECThresh=job.TriggerTowerMaker.EmECThresh
+             job.newTriggerTowerMaker.FcalEThresh=job.TriggerTowerMaker.FcalEThresh
+             job.newTriggerTowerMaker.TileThresh=job.TriggerTowerMaker.TileThresh
+             job.newTriggerTowerMaker.HecThresh=job.TriggerTowerMaker.HecThresh
+             job.newTriggerTowerMaker.FcalHThresh=job.TriggerTowerMaker.FcalHThresh
+
+             from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__CPMTowerMaker
+             from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__JetElementMaker
+             from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__EmTauTrigger
+             from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__JetTrigger
+             from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__EnergyTrigger
+             from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__CPCMMMaker
+             from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__JEPCMMMaker
+             from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__ROD
+             from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__Tester
+             from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__DumpTriggerObjects
+
+             job += LVL1__CPMTowerMaker( 'newCPMTowerMaker' )
+             job.newCPMTowerMaker.EventStore = "BkgEvent_0_SG"
+             job += LVL1__JetElementMaker( 'newJetElementMaker' )
+             job.newJetElementMaker.EventStore = "BkgEvent_0_SG"
+             job += LVL1__EmTauTrigger( 'newEmTauTrigger' )
+             job.newEmTauTrigger.EventStore = "BkgEvent_0_SG"
+             job += LVL1__JetTrigger( 'newJetTrigger' )
+             job.newJetTrigger.EventStore = "BkgEvent_0_SG"
+             job += LVL1__EnergyTrigger( 'newEnergyTrigger' )
+             job.newEnergyTrigger.EventStore = "BkgEvent_0_SG"
+             job += LVL1__ROD( 'newROD' )
+             job.newROD.EventStore = "BkgEvent_0_SG"
+             job += LVL1__CPCMMMaker( 'newCPCMMMaker' )
+             job.newCPCMMMaker.EventStore = "BkgEvent_0_SG"
+             job += LVL1__JEPCMMMaker( 'newJEPCMMMaker' )
+             job.newJEPCMMMaker.EventStore = "BkgEvent_0_SG" 
+
+       #-------------------------------------------------------
+       # TrigT1MBTS Alg
+       #-------------------------------------------------------
+       if DetFlags.simulateLVL1.Calo_on():
+          from TrigT1MBTS.TrigT1MBTSConf import LVL1__TrigT1MBTS
+          job += LVL1__TrigT1MBTS("newTrigT1MBTS")
+          job.newTrigT1MBTS.EvtStore = "BkgEvent_0_SG"
+
+       #-------------------------------------------------------
+       # TrigT1BCM Alg
+       #-------------------------------------------------------
+       if DetFlags.simulateLVL1.BCM_on():
+          from TrigT1BCM.TrigT1BCMConf import LVL1__TrigT1BCM
+          job += LVL1__TrigT1BCM("newTrigT1BCM")
+          job.newTrigT1BCM.EvtStore = "BkgEvent_0_SG"
+
+       #-------------------------------------------------------
+       # TrigT1LUCID Alg
+       #-------------------------------------------------------
+       if DetFlags.simulateLVL1.Lucid_on():
+          from TrigT1Lucid.TrigT1LucidConf import LVL1__TrigT1Lucid
+          job += LVL1__TrigT1Lucid("newTrigT1Lucid")
+          job.newTrigT1Lucid.EvtStore = "BkgEvent_0_SG"
+    
+       #-------------------------------------------------------
+       # TrigT1CTP Algos
+       #-------------------------------------------------------
+       from TrigT1CTP.TrigT1CTPConfig import CTPSimulationInOverlay
+       job += CTPSimulationInOverlay("newCTPSimulationInDigi","CTPSimulation2")
+       job.newCTPSimulationInDigi.EvtStore = "BkgEvent_0_SG"
+
+       #-------------------------------------------------------
+       # TrigT1RoIB Algos
+       #-------------------------------------------------------
+       from TrigT1RoIB.TrigT1RoIBConfig import RoIBuilderInDigi
+       job += RoIBuilderInDigi("newRoIBuilder")
+       job.newRoIBuilder.StoreGateSvc = "BkgEvent_0_SG"
+    
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/LAr.py b/Event/EventOverlay/EventOverlayJobTransforms/share/LAr.py
new file mode 100644
index 0000000000000000000000000000000000000000..0f5b99261ede8b0ae690b1698190db94d2ad4da4
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/LAr.py
@@ -0,0 +1,32 @@
+
+#Useful for loading in LAr MC folders, needed when running digitization with data conditions
+
+from IOVDbSvc.CondDB import conddb
+
+conddb.blockFolder("/LAR/ElecCalibMC/AutoCorr")
+conddb.addFolderWithTag("LAR_OFL","/LAR/ElecCalibMC/AutoCorr","LARElecCalibMCAutoCorr-CSC02-I2",force=True,forceMC=True)
+conddb.blockFolder("/LAR/ElecCalibMC/DAC2uA")
+conddb.addFolderWithTag("LAR_OFL","/LAR/ElecCalibMC/DAC2uA","LARElecCalibMCDAC2uA-CSC02-J",force=True,forceMC=True)
+conddb.blockFolder("/LAR/ElecCalibMC/MinBias")
+conddb.addFolderWithTag("LAR_OFL","/LAR/ElecCalibMC/MinBias","LARElecCalibMCMinBias-CSC02-E",force=True,forceMC=True)
+conddb.blockFolder("/LAR/ElecCalibMC/MinBiasAverage")
+conddb.addFolderWithTag("LAR_OFL","/LAR/ElecCalibMC/MinBiasAverage","LARElecCalibMCMinBiasAverage-CSC02-I",force=True,forceMC=True)
+conddb.blockFolder("/LAR/ElecCalibMC/MphysOverMcal")
+conddb.addFolderWithTag("LAR_OFL","/LAR/ElecCalibMC/MphysOverMcal","LARElecCalibMCMphysOverMcal-CSC02-I",force=True,forceMC=True)
+conddb.blockFolder("/LAR/ElecCalibMC/Noise")
+conddb.addFolderWithTag("LAR_OFL","/LAR/ElecCalibMC/Noise","LARElecCalibMCNoise-CSC02-I",force=True,forceMC=True)
+conddb.blockFolder("/LAR/ElecCalibMC/OFC")
+conddb.addFolder("LAR_OFL","/LAR/ElecCalibMC/OFC",force=True,forceMC=True)
+conddb.blockFolder("/LAR/ElecCalibMC/Pedestal")
+conddb.addFolderWithTag("LAR_OFL","/LAR/ElecCalibMC/Pedestal","LARElecCalibMCPedestal-DC3-B-IdFix7",force=True,forceMC=True)
+conddb.blockFolder("/LAR/ElecCalibMC/Ramp")
+conddb.addFolderWithTag("LAR_OFL","/LAR/ElecCalibMC/Ramp","LARElecCalibMCRamp-CSC02-K",force=True,forceMC=True)
+conddb.blockFolder("/LAR/ElecCalibMC/Shape")
+conddb.addFolderWithTag("LAR_OFL","/LAR/ElecCalibMC/Shape","LARElecCalibMCShape-CSC02-I",force=True,forceMC=True)
+conddb.blockFolder("/LAR/ElecCalibMC/ShapeFullWave")
+conddb.addFolder("LAR_OFL","/LAR/ElecCalibMC/ShapeFullWave",force=True,forceMC=True)
+conddb.blockFolder("/LAR/ElecCalibMC/fSampl")
+conddb.addFolderWithTag("LAR_OFL","/LAR/ElecCalibMC/fSampl","LARElecCalibMCfSampl-CSC02-F-QGSP_BERT_BIRK",force=True,forceMC=True)
+conddb.blockFolder("/LAR/ElecCalibMC/uA2MeV")
+conddb.addFolderWithTag("LAR_OFL","/LAR/ElecCalibMC/uA2MeV","LARElecCalibMCuA2MeV-CSC02-I",force=True,forceMC=True)
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/LArMcSignal_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/LArMcSignal_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..c08319513e3af58e3b591142464697782d89693e
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/LArMcSignal_jobOptions.py
@@ -0,0 +1,76 @@
+include.block ( "EventOverlayJobTransforms/LArMcSignal_jobOptions.py" )
+
+from AthenaCommon.GlobalFlags import globalflags
+from LArROD.LArRODFlags import larRODFlags
+
+# calorimeter
+if OverlayFlags.doLAr():
+
+   # Remove some objects from MC event store before re-running digitization
+   from OverlayCommonAlgs.OverlayCommonAlgsConf import RemoveObjects
+   job += RemoveObjects("RemoveLArOldMC")
+   if globalflags.DataSource()=='data':
+      job.RemoveLArOldMC.RemoveLArMC=True
+
+   from LArDigitization.LArDigitizationConf import LArDigitMaker
+   theLArDigits = LArDigitMaker("digitmaker2")
+   theLArDigits.EventStore = "BkgEvent_0_SG"
+   #theLArDigits.DigitContainer = job.digitmaker1.DigitContainer
+   #theLArDigits.RandomDigitContainer = job.digitmaker1.RandomDigitContainer
+   #theLArDigits.RndmEvtOverlay = False
+   #theLArDigits.NoiseOnOff = False
+   #theLArDigits.NoiseInEMB =  job.digitmaker1.NoiseInEMB
+   #theLArDigits.NoiseInEMEC = job.digitmaker1.NoiseInEMEC
+   #theLArDigits.NoiseInHEC = job.digitmaker1.NoiseInHEC
+   #theLArDigits.NoiseInFCAL = job.digitmaker1.NoiseInFCAL
+   #theLArDigits.RecordMap = False
+
+   #job.digitmaker1.LArPileUpTool.NoiseOnOff = True #ACH
+   #job.digitmaker1.OutputLevel=DEBUG #ACH
+   #job.digitmaker1.LArPileUpTool.OutputLevel=DEBUG #ACH
+   #MessageSvc.debugLimit = 10000 #ACH
+
+   from LArDigitization.LArDigitizationConf import LArPileUpTool
+   theLArPileUpTool = LArPileUpTool("LArPileUpTool2")
+   theLArPileUpTool.EventStore = "BkgEvent_0_SG"
+   theLArPileUpTool.DigitContainer = job.digitmaker1.LArPileUpTool.DigitContainer
+   theLArPileUpTool.RndmEvtOverlay = False
+   theLArPileUpTool.NoiseOnOff = False
+   theLArPileUpTool.RecordMap = False
+   
+   if isRealData:
+      theLArPileUpTool.PedestalKey = job.digitmaker1.LArPileUpTool.PedestalKey
+
+   theLArPileUpTool.ADC2MeVTool = job.digitmaker1.LArPileUpTool.ADC2MeVTool
+   theLArPileUpTool.AutoCorrNoiseTool = job.digitmaker1.LArPileUpTool.AutoCorrNoiseTool
+   theLArPileUpTool.useLArFloat = job.digitmaker1.LArPileUpTool.useLArFloat
+   theLArPileUpTool.MaskingTool = job.digitmaker1.LArPileUpTool.MaskingTool
+   theLArPileUpTool.BadChannelTool = job.digitmaker1.LArPileUpTool.BadChannelTool
+   theLArDigits.LArPileUpTool = theLArPileUpTool
+
+   job += theLArDigits
+
+
+   from LArROD.LArRODConf import LArRawChannelBuilder
+   newLArRawChannelBuilder =  LArRawChannelBuilder( "newLArRawChannelBuilder" )
+   newLArRawChannelBuilder.DataLocation = job.LArRawChannelBuilder.DataLocation
+   newLArRawChannelBuilder.ADC2MeVTool = ToolSvc.LArADC2MeVToolDefault
+   if globalflags.DataSource()=='data' or larRODFlags.forceIter() :
+      newLArRawChannelBuilder.UseOFCTool= False
+      newLArRawChannelBuilder.PhaseInversion=True
+      newLArRawChannelBuilder.LArRawChannelContainerName = "LArRawChannels"
+   else :
+      from LArRecUtils.LArOFCToolDefault import LArOFCToolDefault
+      theOFCTool = LArOFCToolDefault()
+      ToolSvc += theOFCTool
+      newLArRawChannelBuilder.OFCTool =  theOFCTool
+      newLArRawChannelBuilder.LArRawChannelContainerName = job.LArRawChannelBuilder.LArRawChannelContainerName
+      newLArRawChannelBuilder.UseOFCTool= True
+   newLArRawChannelBuilder.EventStore = "BkgEvent_0_SG"
+   newLArRawChannelBuilder.OutputLevel = DEBUG
+   job += newLArRawChannelBuilder
+
+   from LArROD.LArDigits import DefaultLArDigitThinner
+   newLArDigitThinner = DefaultLArDigitThinner('newLArDigitThinner') # automatically added to topSequence
+   job.newLArDigitThinner.EventStore = "BkgEvent_0_SG"
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/Level1Overlay_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/Level1Overlay_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..bc23cd5e7e82a6b7b52d3dd81c7e494f08beca56
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/Level1Overlay_jobOptions.py
@@ -0,0 +1,158 @@
+
+include.block ( "EventOverlayJobTransforms/Level1Overlay_jobOptions.py" )
+
+from AthenaCommon.DetFlags import DetFlags
+from Digitization.DigitizationFlags import jobproperties
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+
+if OverlayFlags.doLVL1():
+
+    if OverlayFlags.doBkg():
+       from OverlayCommonAlgs.OverlayCommonAlgsConf import DeepCopyObjects
+       job += DeepCopyObjects("BkgRdo5")
+       job.BkgRdo5.Level1Objects = True
+          
+    if readBS and isRealData:
+       include ("TrigT1CaloByteStream/ReadLVL1CaloBS_jobOptions.py")
+       #include ("MuonCommRecExample/ReadMuCTPI_jobOptions.py")
+
+       ServiceMgr.ByteStreamAddressProviderSvc.TypeNames += [                         
+          "ROIB::RoIBResult/RoIBResult",
+          "MuCTPI_RDO/MUCTPI_RDO",        
+          "L2Result/L2Result",
+          "CTP_RDO/CTP_RDO",
+          "ROIB::RecRoIBResult/RecRoIBResult",
+          "MuCTPI_RIO/MUCTPI_RIO",
+          "CTP_RIO/CTP_RIO"
+       ] 
+
+    if DetFlags.simulateLVL1.LAr_on():
+        include( "LArL1Sim/LArL1Sim_G4_jobOptions.py" )
+        # Noise 
+        if not jobproperties.Digitization.doCaloNoise.get_Value():
+            job.LArTTL1Maker.NoiseOnOff= False #(default:True) 
+        # PileUp
+        job.LArTTL1Maker.PileUp = True
+    
+    if DetFlags.simulateLVL1.Tile_on():
+        include( "TileSimAlgs/TileTTL1_jobOptions.py" )
+
+    if DetFlags.digitize.LVL1_on():
+       #--------------------------------------------------------------
+       # set up TrigConfigSvc for LVL1 simulation
+       #--------------------------------------------------------------
+       #In case TriggerFlags are not setup
+       if 'TriggerFlags' not in dir():
+          log.info( "TriggerFlags not previously defined so using default XML file" )
+          # enable reading from XML file option and then set trigger menu
+          from TriggerJobOpts.TriggerFlags import TriggerFlags
+          # default is taken from TriggerFlags of TriggerJobOpts package
+          TriggerFlags.triggerConfig = "LVL1:default"
+        
+       #PJB 9/2/2009 now use the triggerConfig flags
+       # - TriggerFlags already setup so use them here
+       #
+       from AthenaCommon.AppMgr import ServiceMgr
+       if not hasattr( ServiceMgr, 'LVL1ConfigSvc' ):
+          log.info( "Will setup LVL1ConfigSvc and add instance to ServiceMgr" )
+        
+          from TrigConfigSvc.TrigConfigSvcConfig import LVL1ConfigSvc,findFileInXMLPATH
+          LVL1ConfigSvc = LVL1ConfigSvc('LVL1ConfigSvc')
+
+          #If read from DB then set up the connection and pass keys
+          from TriggerJobOpts.TriggerFlags import TriggerFlags
+          if TriggerFlags.readMenuFromTriggerDb():
+             log.info( "LVL1ConfigSvc uses the TriggerDB %s ", TriggerFlags.triggerDbConnection() )
+             db = TriggerFlags.triggerDbConnection()
+             if 'sqlite' in db.lower():  #e.g. "sqlite_file://test.db"
+                LVL1ConfigSvc.ConfigSource = 'sqlite'
+                LVL1ConfigSvc.DBServer = db.split('//')[-1]
+             if 'oracle' in db.lower():  #e.g. "oracle://ATONR;schema=ATLAS_CONF_TRIGGER_V2;user=ATLAS_CONF_TRIGGER_V2_R;passwd=xxx;'"
+                LVL1ConfigSvc.ConfigSource = 'oracle'
+                LVL1ConfigSvc.DBServer  = (db.split(';')[0]).split('//')[-1]
+                LVL1ConfigSvc.DBAccount = (db.split(';')[1]).split('=')[-1]
+                LVL1ConfigSvc.DBUser    = (db.split(';')[2]).split('=')[-1]
+                LVL1ConfigSvc.DBPass    = (db.split(';')[3]).split('=')[-1]
+             if 'dblookup' in db.lower(): #e.g. "dblookup://TRIGGERDB
+                LVL1ConfigSvc.ConfigSource = 'DBLookUp'
+                LVL1ConfigSvc.DBServer  = db.split('//')[-1]
+                
+             LVL1ConfigSvc.DBSMKey     = int(TriggerFlags.triggerDbKeys()[0])
+             LVL1ConfigSvc.DBLVL1PSKey = int(TriggerFlags.triggerDbKeys()[1])
+             LVL1ConfigSvc.DBBGSKey    = int(TriggerFlags.triggerDbKeys()[3])
+
+          #Otherwise read from xml
+          else:
+             log.info( "LVL1ConfigSvc uses xml file %s ", TriggerFlags.inputLVL1configFile()  )
+             LVL1ConfigSvc.XMLFile = findFileInXMLPATH(TriggerFlags.inputLVL1configFile())
+
+          ServiceMgr += LVL1ConfigSvc
+       else:
+          log.info( "LVL1ConfigSvc already created" )
+
+       #--------------------------------------------------------------
+       # RPC stuff
+       #--------------------------------------------------------------
+       if DetFlags.simulateLVL1.RPC_on():
+          import TrigT1RPCsteering.TrigT1RPCsteeringConfig
+
+       #--------------------------------------------------------------
+       # TGC stuff
+       #--------------------------------------------------------------
+       if DetFlags.simulateLVL1.TGC_on():
+          import TrigT1TGC.TrigT1TGCConfig
+
+       #--------------------------------------------------------------
+       # TrigT1Muctpi Algos
+       #--------------------------------------------------------------
+       if DetFlags.simulateLVL1.RPC_on() or DetFlags.simulateLVL1.TGC_on():
+          from TrigT1Muctpi.TrigT1MuctpiConfig import L1Muctpi
+          job += L1Muctpi()
+
+       #-------------------------------------------------------
+       # TrigT1CaloSim Algos
+       #-------------------------------------------------------
+       if DetFlags.simulateLVL1.Calo_on():
+          if DetFlags.simulateLVL1.LAr_on() and DetFlags.simulateLVL1.Tile_on():
+             protectedInclude( "TrigT1CaloSim/TrigT1CaloSimJobOptions_TTL1.py" )
+             job.TriggerTowerMaker.DoOverlay = True
+             job.TriggerTowerMaker.OverlayPedestal = 40.
+          else:
+             log.warning("NOT SIMULATING L1CALO!")
+             log.warning("If only one of LAr and Tile LVL1 digitzation is set on then the L1Calo will NOT be simulated.")
+             log.warning("Currently DetFlags.simulateLVL1.LAr_on()=%s, DetFlags.simulateLVL1.Tile_on()=%s.", str(DetFlags.simulateLVL1.LAr_on()), str(DetFlags.simulateLVL1.Tile_on()) )
+             log.warning("If you REALLY want to do this then run L1Calo within reconstruction.")
+
+       #-------------------------------------------------------
+       # TrigT1MBTS Alg
+       #-------------------------------------------------------
+       if DetFlags.simulateLVL1.Calo_on():
+          from TrigT1MBTS.TrigT1MBTSConf import LVL1__TrigT1MBTS
+          job += LVL1__TrigT1MBTS()
+
+       #-------------------------------------------------------
+       # TrigT1BCM Alg
+       #-------------------------------------------------------
+       if DetFlags.simulateLVL1.BCM_on():
+          from TrigT1BCM.TrigT1BCMConf import LVL1__TrigT1BCM
+          job += LVL1__TrigT1BCM()
+
+       #-------------------------------------------------------
+       # TrigT1LUCID Alg
+       #-------------------------------------------------------
+       if DetFlags.simulateLVL1.Lucid_on():
+          from TrigT1Lucid.TrigT1LucidConf import LVL1__TrigT1Lucid
+          job += LVL1__TrigT1Lucid()
+    
+       #-------------------------------------------------------
+       # TrigT1CTP Algos
+       #-------------------------------------------------------
+       from TrigT1CTP.TrigT1CTPConfig import CTPSimulationInDigi
+       job += CTPSimulationInDigi()
+
+       #-------------------------------------------------------
+       # TrigT1RoIB Algos
+       #-------------------------------------------------------
+       from TrigT1RoIB.TrigT1RoIBConfig import RoIBuilderInDigi
+       job += RoIBuilderInDigi("RoIBuilder")
+    
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/Looper.py b/Event/EventOverlay/EventOverlayJobTransforms/share/Looper.py
new file mode 100644
index 0000000000000000000000000000000000000000..070ee3d55ef029b4237fa6c1f1359c976741c23c
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/Looper.py
@@ -0,0 +1,12 @@
+
+## Enable the looper killer, with options for overlay
+#  Don't kill the event, but flag it as bad
+def use_looperkiller():
+    from G4AtlasApps import PyG4Atlas, AtlasG4Eng
+    lkAction = PyG4Atlas.UserAction('G4UserActions', 'LooperKiller', ['BeginOfRun', 'EndOfRun', 'BeginOfEvent', 'EndOfEvent', 'Step'])
+    #lkAction.set_Properties( {"PrintSteps":"0", "MaxSteps":"10", "VerboseLevel":"0", "AbortEvent":"0", "SetError":"1"} ) #to kill most everything
+    lkAction.set_Properties( {"PrintSteps":"10", "MaxSteps":"1000000", "VerboseLevel":"1", "AbortEvent":"0", "SetError":"1"} )
+    AtlasG4Eng.G4Eng.menu_UserActions.add_UserAction(lkAction)
+
+simFlags.InitFunctions.add_function("postInit", use_looperkiller)
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/MuonMcSignal_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/MuonMcSignal_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..4dc6af897d7c99ab912e014485b5fcf4a2bac0e0
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/MuonMcSignal_jobOptions.py
@@ -0,0 +1,40 @@
+include.block ( "EventOverlayJobTransforms/MuonMcSignal_jobOptions.py" )
+
+# run the muon Digit -> RDO for the signal in the MC Store
+# and, in the case where the background is MC generated, 
+# also for the background in the temporary background store
+# if the background is real data, we do not do Digit -> RDO
+# in the temporary background store because of MDT Cabling problem
+# for CSC, Digit -> RDO was already done during overlay and 
+# should not be repeated here
+
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+
+theApp.Dlls += [ "MuonByteStreamCnvTest" ]
+theApp.Dlls += [ "TrigT1RPChardware" ]
+theApp.Dlls += [ "TrigT1RPCsteering" ]
+theApp.Dlls += [ "TrigT1RPCmonitoring" ]
+
+if OverlayFlags.doMDT():
+        from MuonByteStreamCnvTest.MuonByteStreamCnvTestConf import MdtDigitToMdtRDO
+        job += MdtDigitToMdtRDO( "SigMdtDigitToMdtRDO" )
+        job.SigMdtDigitToMdtRDO.Store = job.MdtOverlay.MCStore
+        #ACH if readBS==False:
+           #ACH job += MdtDigitToMdtRDO( "BkgMdtDigitToMdtRDO" )
+           #ACH job.BkgMdtDigitToMdtRDO.Store = job.MdtOverlay.TempBkgStore
+if OverlayFlags.doRPC():
+        from MuonByteStreamCnvTest.MuonByteStreamCnvTestConf import RpcDigitToRpcRDO
+        job += RpcDigitToRpcRDO( "SigRpcDigitToRpcRDO" )
+        job.SigRpcDigitToRpcRDO.Store = job.RpcOverlay.MCStore
+        #ACH if readBS==False:
+           #ACH job += RpcDigitToRpcRDO( "BkgRpcDigitToRpcRDO" )
+           #ACH job.BkgRpcDigitToRpcRDO.Store = job.RpcOverlay.TempBkgStore
+if OverlayFlags.doTGC():
+        from MuonByteStreamCnvTest.MuonByteStreamCnvTestConf import TgcDigitToTgcRDO
+        job += TgcDigitToTgcRDO( "SigTgcDigitToTgcRDO" )
+        job.SigTgcDigitToTgcRDO.Store = job.TgcOverlay.MCStore
+        #ACH if readBS==False:
+           #ACH job += TgcDigitToTgcRDO( "BkgTgcDigitToTgcRDO" )
+           #ACH job.BkgTgcDigitToTgcRDO.Store = job.TgcOverlay.TempBkgStore
+
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/MuonOverlay_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/MuonOverlay_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..ee30b64d5f695dd2320521e4522b157b515b791c
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/MuonOverlay_jobOptions.py
@@ -0,0 +1,92 @@
+include.block ( "EventOverlayJobTransforms/MuonOverlay_jobOptions.py" )
+
+from AthenaCommon.AppMgr import ServiceMgr
+from AthenaCommon.AppMgr import ToolSvc
+from Digitization.DigitizationFlags import digitizationFlags
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+from MuonRecExample.MuonRecFlags import muonRecFlags,mooreFlags,muonboyFlags
+
+from RecExConfig.RecFlags import rec as recFlags
+
+if OverlayFlags.doMDT() or OverlayFlags.doCSC() or OverlayFlags.doRPC() or OverlayFlags.doTGC():
+   
+    include( "MuonEventAthenaPool/MuonEventAthenaPool_joboptions.py" )
+ 
+    import MuonRecExample.MuonReadCalib
+    if readBS and isRealData:
+       theApp.Dlls += [ "MuonByteStream"]
+       if OverlayFlags.doCSC():
+          from MuonRecExample.MuonRecFlags import muonRecFlags
+          muonRecFlags.doCSCs.set_Value_and_Lock = True
+
+       from MuonByteStream.MuonByteStreamFlags import muonByteStreamFlags
+       muonByteStreamFlags.TgcDataType = "atlas"#FIXME should not be setting jobproperties at this point in the configuration.
+       muonByteStreamFlags.RpcDataType = "atlas"#FIXME should not be setting jobproperties at this point in the configuration.
+       muonByteStreamFlags.MdtDataType = "atlas"#FIXME should not be setting jobproperties at this point in the configuration.
+
+    if OverlayFlags.doBkg():
+       from OverlayCommonAlgs.OverlayCommonAlgsConf import DeepCopyObjects
+       job += DeepCopyObjects("BkgRdo4")
+       job.BkgRdo4.MuonObjects = True
+       
+    import MuonCnvExample.MuonCablingConfig
+
+    digitizationFlags.doMuonNoise=False #FIXME should not be setting jobproperties at this point in the configuration.
+
+    if readBS:
+       include("MuonRecExample/MuonReadBS_jobOptions.py")
+
+    if OverlayFlags.doCSC():
+        if readBS:
+           ToolSvc.CscRawDataProviderTool.EvtStore = "OriginalEvent_SG"
+        include ( "CscOverlay/CscOverlay_jobOptions.py" )
+        job.CscOverlay.IsByteStream = readBS
+        job.CscOverlay.DataStore = "OriginalEvent_SG"
+
+        #print "ACH123: Setting DEBUG v99"
+        #job.CscOverlay.MakeRDOTool.OutputLevel=DEBUG
+        #job.CscOverlay.MakeRDOTool.cscCalibTool.OutputLevel=DEBUG
+        #job.CscOverlay.OutputLevel=DEBUG
+        #MessageSvc.debugLimit = 100000
+
+        #print "ACH123: NumSamples = 2 for MakeRDOTool"
+        #job.CscOverlay.MakeRDOTool.NumSamples=2
+        
+    if OverlayFlags.doMDT():
+        if not hasattr(ToolSvc, 'MdtDigitizationTool'):
+            from AthenaCommon import CfgGetter
+            ToolSvc += CfgGetter.getPrivateTool("MdtDigitizationTool", checkType=True)
+        ToolSvc.MdtDigitizationTool.GetT0FromBD = True
+        
+        include ( "MdtOverlay/MdtOverlay_jobOptions.py" )
+        #job.MdtOverlay.DataStore = "OriginalEvent_SG"
+        if readBS:
+           ToolSvc.MdtRawDataProviderTool.EvtStore = "OriginalEvent_SG"
+           job.MdtOverlay.ConvertRDOToDigitTool.RetrievePrivateCopy = False
+
+        #job.MdtOverlay.OutputLevel = VERBOSE
+        #job.MdtDigitToMdtRDO.OutputLevel = VERBOSE
+
+    if OverlayFlags.doRPC():
+        include ( "RpcOverlay/RpcOverlay_jobOptions.py" )
+        #job.RpcOverlay.DataStore = "BkgEvent_2_SG"
+        if readBS:
+           ToolSvc.RpcRawDataProviderTool.EvtStore = "OriginalEvent_SG"
+           job.RpcOverlay.ConvertRDOToDigitTool.RetrievePrivateCopy = False 
+        #job.RpcOverlay.OutputLevel = VERBOSE
+        #job.RpcDigitToRpcRDO.OutputLevel = VERBOSE
+
+    if OverlayFlags.doTGC():
+        include ( "TgcOverlay/TgcOverlay_jobOptions.py" )
+        #job.TgcOverlay.DataStore = "BkgEvent_2_SG"
+        if readBS:
+           ToolSvc.TgcRawDataProviderTool.EvtStore = "OriginalEvent_SG"
+           job.TgcOverlay.ConvertRDOToDigitTool.RetrievePrivateCopy = False
+
+           # storegate dump
+           # StoreGateSvc = Service( "StoreGateSvc" )
+           # StoreGateSvc.Dump = True  #true will dump data store contents
+
+           # StoreGateSvc.OutputLevel=DEBUG
+
+            
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/OverlayOutputItemList_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/OverlayOutputItemList_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..1bb73ab32eb950ee39d09c13328080771d490afa
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/OverlayOutputItemList_jobOptions.py
@@ -0,0 +1,93 @@
+include.block ( "EventOverlayJobTransforms/OverlayOutputItemList_jobOptions.py" )
+
+from AthenaCommon.AppMgr import ServiceMgr
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+
+# The output - overlay
+from AthenaPoolCnvSvc.WriteAthenaPool import AthenaPoolOutputStream
+outStream = AthenaPoolOutputStream( "StreamRDO",OverlayCollection )
+outStream.Store = ServiceMgr.StoreGateSvc
+print "ACH123 new OverlayOutputItemList_jobOptions.py"
+
+# overlay output stream
+outStream.ItemList += [ "EventInfo#*", "PileUpEventInfo#*" ]
+outStream.ItemList += [ "LumiBlockCollection#*" ]
+
+if OverlayFlags.doTruth():
+   outStream.ItemList += [
+                             "McEventCollection#*",
+                             "TrackRecordCollection#*"
+                           ]
+   if OverlayFlags.doCSC():
+      outStream.ItemList += [ "CscSimDataCollection#CSC_SDO" ]
+   if OverlayFlags.doMDT():
+      outStream.ItemList += [ "MuonSimDataCollection#MDT_SDO" ]
+   if OverlayFlags.doRPC():
+      outStream.ItemList += [ "MuonSimDataCollection#RPC_SDO" ]
+   if OverlayFlags.doTGC():
+      outStream.ItemList += [ "MuonSimDataCollection#TGC_SDO" ]
+   if OverlayFlags.doLAr() or OverlayFlags.doTile():
+      outStream.ItemList += [ "CaloCalibrationHitContainer#*" ]
+   if OverlayFlags.doPixel():
+      outStream.ItemList += [ "InDetSimDataCollection#PixelSDO_Map" ]
+   if OverlayFlags.doSCT():
+      outStream.ItemList += [ "InDetSimDataCollection#SCT_SDO_Map" ]
+   if OverlayFlags.doTRT():
+      outStream.ItemList += [ "InDetSimDataCollection#TRT_SDO_Map" ]
+   if OverlayFlags.doBCM():
+      outStream.ItemList += [ "InDetSimDataCollection#BCM_SDO_Map" ] 
+
+if OverlayFlags.doPixel():
+   outStream.ItemList += ["PixelRDO_Container#*"]
+if OverlayFlags.doSCT():
+   outStream.ItemList += ["SCT_RDO_Container#*"]
+if OverlayFlags.doTRT():
+   outStream.ItemList += ["TRT_RDO_Container#*"]
+
+if OverlayFlags.doLAr():
+   outStream.ItemList+=["LArRawChannelContainer#*"]
+   outStream.ItemList+=["LArDigitContainer#LArDigitContainer_MC_Thinned"]
+if OverlayFlags.doTile():
+   if isRealData:
+      outStream.ItemList += [ "TileDigitsContainer#*" ]
+   else:
+      outStream.ItemList += [ "TileDigitsContainer#TileDigitsFlt" ]
+   outStream.ItemList += [ "TileRawChannelContainer#*" ]
+
+if OverlayFlags.doCSC():
+   outStream.ItemList += [ "CscRawDataContainer#*" ]
+   outStream.ItemList += [ "CscDigitContainer#*" ]
+if OverlayFlags.doMDT():
+   outStream.ItemList += [ "MdtCsmContainer#*" ]
+#   outStream.ItemList += [ "MdtDigitContainer#*" ]
+if OverlayFlags.doRPC():
+   outStream.ItemList += [ "RpcPadContainer#*" ]
+#   outStream.ItemList += [ "RpcDigitContainer#*" ]
+if OverlayFlags.doTGC():
+   outStream.ItemList += [ "TgcRdoContainer#*" ]
+#   outStream.ItemList += [ "TgcDigitContainer#*" ]
+
+if OverlayFlags.doBCM():
+   outStream.ItemList+=["BCM_RDO_Container#*"]
+
+if OverlayFlags.doLVL1():
+   outStream.ItemList+=["LArTTL1Container#*"]
+   outStream.ItemList+=["TileTTL1Container#*"]
+   outStream.ItemList+=[
+                           "ROIB::RoIBResult#*",
+                           "MuCTPI_RDO#*",
+                           "CTP_RDO#*",
+                           "DataVector<LVL1::TriggerTower>#*",
+                           "DataVector<LVL1::CPMTower>#*",
+                           "DataVector<LVL1::JetElement>#*",
+                           "DataVector<LVL1::CPMHits>#*",
+                           "DataVector<LVL1::CPMRoI>#*",
+                           "DataVector<LVL1::CMMCPHits>#*",
+                           "DataVector<LVL1::CMMJetHits>#*",
+                           "DataVector<LVL1::CMMEtSums>#*",
+                           "DataVector<LVL1::JEMHits>#*",
+                           "DataVector<LVL1::JEMRoI>#*",
+                           "DataVector<LVL1::JEMEtSums>#*",
+                           "LVL1::CMMRoI#*"]
+
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/Rt_override.py b/Event/EventOverlay/EventOverlayJobTransforms/share/Rt_override.py
new file mode 100644
index 0000000000000000000000000000000000000000..5cadf898e450c6d023fa0a86b7b568b0ddab8085
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/Rt_override.py
@@ -0,0 +1,67 @@
+
+from IOVDbSvc.CondDB import conddb
+
+conddb.blockFolder("/TRT/Calib/RT")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Calib/RT","TrtCalibRt-MC09_900GeV-REP-Field-00",force=True,forceMC=True)
+conddb.blockFolder("/TRT/Calib/T0")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Calib/T0","TrtCalibT0-Physics-BLK-UPD4-FieldOn-00-00",force=True,forceData=True)
+
+conddb.blockFolder("/MDT/RT")
+conddb.addFolderWithTag("MDT_OFL","/MDT/RT","MDTRT_Sim-00",force=True,forceMC=True)
+conddb.blockFolder("/MDT/RTBLOB")
+#conddb.addFolderWithTag("MDT_OFL","/MDT/RTBLOB","MDTRT-UPD4-09-BLOB",force=True,forceData=True)
+conddb.addFolderWithTag("MDT_OFL","/MDT/RTBLOB","MDTRT_Sim-00-BLOB",force=True,forceMC=True)
+
+#can try using MC T0 constants, or force a data tag to use
+conddb.blockFolder("/MDT/T0")
+conddb.addFolderWithTag("MDT_OFL","/MDT/T0","MDTT0-UPD4-09",force=True,forceData=True)
+#conddb.addFolderWithTag("MDT_OFL","/MDT/T0","MDTT0_Sim-00",force=True,forceMC=True)
+conddb.blockFolder("/MDT/T0BLOB")
+conddb.addFolderWithTag("MDT_OFL","/MDT/T0BLOB","MDTT0-UPD4-09-BLOB",force=True,forceData=True)
+#conddb.addFolderWithTag("MDT_OFL","/MDT/T0BLOB","MDTT0_Sim-00-BLOB",force=True,forceMC=True) 
+
+conddb.blockFolder("/PIXEL/HLT/DCS/HV")
+conddb.addFolderWithTag("PIXEL_ONL","/PIXEL/HLT/DCS/HV","PixDCSHV-UPD1-00",force=True,forceData=True)
+conddb.blockFolder("/PIXEL/HLT/DCS/TEMPERATURE")
+conddb.addFolderWithTag("PIXEL_ONL","/PIXEL/HLT/DCS/TEMPERATURE","PixDCSTemp-UPD1-00",force=True,forceData=True)
+conddb.blockFolder("/PIXEL/ReadoutSpeed")
+conddb.addFolderSplitMC("PIXEL","/PIXEL/ReadoutSpeed","/PIXEL/ReadoutSpeed",force=True)
+#conddb.addFolder("PIXEL_OFL","/PIXEL/ReadoutSpeed",force=True,forceData=True)
+
+#conddb.blockFolder("/GLOBAL/BField/Map")
+#conddb.addFolderWithTag("GLOBAL_ONL","/GLOBAL/BField/Map","BFieldMap-FullAsym-09-solTil3",force=True,forceData=True)
+#conddb.addOverride("/GLOBAL/BField/Map","BFieldMap-FullAsym-09-solTil3")
+
+conddb.blockFolder("/TRT/Cond/DigVers")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Cond/DigVers","TRTCondDigVers-Collisions-01",force=True,forceMC=True)
+
+conddb.blockFolder("/Indet/Align")
+conddb.addFolderWithTag("INDET_OFL","/Indet/Align","InDetAlign_Collision_2009_08",force=True,forceData=True)
+conddb.blockFolder("/LAR/Align")
+conddb.addFolderWithTag("LAR_ONL","/LAR/Align","LARAlign-Repro2011-00",force=True,forceData=True)
+conddb.blockFolder("/TRT/Align")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Align","TRTAlign_Collision_2009_04",force=True,forceData=True)
+
+print "Haas: RT OVERRIDE 4"
+
+
+#from overlay_scripts/tagin.sh :
+conddb.blockFolder("/MUONALIGN/MDT/BARREL")
+conddb.blockFolder("/MUONALIGN/MDT/ENDCAP/SIDEA")
+conddb.blockFolder("/MUONALIGN/MDT/ENDCAP/SIDEC")
+conddb.blockFolder("/MUONALIGN/TGC/SIDEA")
+conddb.blockFolder("/MUONALIGN/TGC/SIDEC")
+
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/BARREL","MuonAlignMDTBarrelAlign-BA_ROLLING_2010_02-ES1-UPD1-00",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/ENDCAP/SIDEA","MuonAlignMDTEndCapAAlign-COSMICS-200909-01-ES1-UPD1-00",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/ENDCAP/SIDEC","MuonAlignMDTEndCapCAlign-COSMICS-200909-01-ES1-UPD1-00",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/TGC/SIDEA","MuonAlignTGCEndCapAAlign-0001-DEFAULT",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/TGC/SIDEC","MuonAlignTGCEndCapCAlign-0001-DEFAULT",force=True,forceData=True)
+
+
+#conddb.blockFolder("/LAR/ElecCalib/fSampl/Symmetry")
+#conddb.addFolderWithTag("LAR_ONL","/LAR/ElecCalib/fSampl/Symmetry","LARElecCalibMCfSampl-CSC02-F-QGSP_BERT_BIRK",force=True,forceData=True)
+
+
+#conddb.dumpFolderTags('myconddb.txt',True)
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/Rt_override_BLKPA-006-05.py b/Event/EventOverlay/EventOverlayJobTransforms/share/Rt_override_BLKPA-006-05.py
new file mode 100644
index 0000000000000000000000000000000000000000..69539e22c5ce514cce9a663a81110847db817a0d
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/Rt_override_BLKPA-006-05.py
@@ -0,0 +1,60 @@
+
+from IOVDbSvc.CondDB import conddb
+
+#use MC TRT RT functions
+conddb.blockFolder("/TRT/Calib/RT")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Calib/RT","TrtCalibRt-MCnewDigi_ToTon_7TeV_00-00",force=True,forceMC=True)
+
+#use MC MDT RT functions
+conddb.blockFolder("/MDT/RT")
+conddb.addFolderWithTag("MDT_OFL","/MDT/RT","MDTRT_Sim-00",force=True,forceMC=True)
+conddb.blockFolder("/MDT/RTBLOB")
+conddb.addFolderWithTag("MDT_OFL","/MDT/RTBLOB","MDTRT_Sim-00-BLOB",force=True,forceMC=True)
+
+conddb.blockFolder("/PIXEL/HLT/DCS/HV")
+conddb.addFolderWithTag("PIXEL_ONL","/PIXEL/HLT/DCS/HV","PixDCSHV-UPD1-00",force=True,forceData=True)
+conddb.blockFolder("/PIXEL/HLT/DCS/TEMPERATURE")
+conddb.addFolderWithTag("PIXEL_ONL","/PIXEL/HLT/DCS/TEMPERATURE","PixDCSTemp-UPD1-00",force=True,forceData=True)
+conddb.blockFolder("/PIXEL/ReadoutSpeed")
+conddb.addFolderSplitMC("PIXEL","/PIXEL/ReadoutSpeed","/PIXEL/ReadoutSpeed",force=True)
+
+conddb.blockFolder("/TRT/Cond/DigVers")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Cond/DigVers","TRTCondDigVers-Collisions-01",force=True,forceMC=True)
+
+###################################################
+print "Haas: RT OVERRIDE, for COMCOND-BLKPA-006-05"
+
+#use data TRT T0 constants
+conddb.blockFolder("/TRT/Calib/T0")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Calib/T0","TrtCalibT0-Physics-BLK-UPD4-00-01",force=True,forceData=True)
+
+#use data MDT T0 constants
+conddb.blockFolder("/MDT/T0")
+conddb.addFolderWithTag("MDT_OFL","/MDT/T0","MDTT0-UPD4-10",force=True,forceData=True)
+conddb.blockFolder("/MDT/T0BLOB")
+conddb.addFolderWithTag("MDT_OFL","/MDT/T0BLOB","MDTT0-UPD4-10-BLOB",force=True,forceData=True)
+
+conddb.blockFolder("/TRT/Align")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Align","TRTAlign-BLK-UPD4-06",force=True,forceData=True)
+conddb.blockFolder("/LAR/Align")
+conddb.addFolderWithTag("LAR_ONL","/LAR/Align","LARAlign-2012-00",force=True,forceData=True)
+conddb.blockFolder("/Indet/Align")
+conddb.addFolderWithTag("INDET_OFL","/Indet/Align","InDetAlign-BLK-UPD4-07",force=True,forceData=True)
+
+conddb.blockFolder("/MUONALIGN/MDT/BARREL")
+conddb.blockFolder("/MUONALIGN/MDT/ENDCAP/SIDEA")
+conddb.blockFolder("/MUONALIGN/MDT/ENDCAP/SIDEC")
+conddb.blockFolder("/MUONALIGN/TGC/SIDEA")
+conddb.blockFolder("/MUONALIGN/TGC/SIDEC")
+
+#use updated muon alignments
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/BARREL","MuonAlignMDTBarrelAlign-BA_ROLLING_2010_03-BLKP-UPD4-00",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/ENDCAP/SIDEA","MuonAlignMDTEndCapAAlign-ECA_ROLLING_2011_02_02-UPD4-01",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/ENDCAP/SIDEC","MuonAlignMDTEndCapCAlign-ECC_ROLLING_2011_02_02-UPD4-01",force=True,forceData=True)
+    
+#the TGC alignments had overlap issues... caused G4 crash
+#conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/TGC/SIDEA","MuonAlignTGCEndCapAAlign-TGCA_ROLLING_2011_01-ES1-UPD1-00",force=True,forceData=True)
+#conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/TGC/SIDEC","MuonAlignTGCEndCapCAlign-TGCC_ROLLING_2011_01-ES1-UPD1-00",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/TGC/SIDEA","MuonAlignTGCEndCapAAlign-0001-DEFAULT",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/TGC/SIDEC","MuonAlignTGCEndCapCAlign-0001-DEFAULT",force=True,forceData=True)
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/Rt_override_BLKPA-006-11.py b/Event/EventOverlay/EventOverlayJobTransforms/share/Rt_override_BLKPA-006-11.py
new file mode 100755
index 0000000000000000000000000000000000000000..87d103ce34fcbe31c23cee6460cf184cc470ca79
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/Rt_override_BLKPA-006-11.py
@@ -0,0 +1,67 @@
+
+from IOVDbSvc.CondDB import conddb
+
+########################################
+#These top ones are MC folders (and other random things) and shouldn't have to change much
+
+#use MC TRT RT functions
+conddb.blockFolder("/TRT/Calib/RT")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Calib/RT","TrtCalibRt-MCnewDigi_ToTon_7TeV_00-00",force=True,forceMC=True)
+
+#use MC MDT RT functions
+conddb.blockFolder("/MDT/RT")
+conddb.addFolderWithTag("MDT_OFL","/MDT/RT","MDTRT_Sim-00",force=True,forceMC=True)
+conddb.blockFolder("/MDT/RTBLOB")
+conddb.addFolderWithTag("MDT_OFL","/MDT/RTBLOB","MDTRT_Sim-00-BLOB",force=True,forceMC=True)
+
+conddb.blockFolder("/PIXEL/HLT/DCS/HV")
+conddb.addFolderWithTag("PIXEL_ONL","/PIXEL/HLT/DCS/HV","PixDCSHV-UPD1-00",force=True,forceData=True)
+conddb.blockFolder("/PIXEL/HLT/DCS/TEMPERATURE")
+conddb.addFolderWithTag("PIXEL_ONL","/PIXEL/HLT/DCS/TEMPERATURE","PixDCSTemp-UPD1-00",force=True,forceData=True)
+conddb.blockFolder("/PIXEL/ReadoutSpeed")
+conddb.addFolderSplitMC("PIXEL","/PIXEL/ReadoutSpeed","/PIXEL/ReadoutSpeed",force=True)
+
+conddb.blockFolder("/TRT/Cond/DigVers")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Cond/DigVers","TRTCondDigVers-Collisions-01",force=True,forceMC=True)
+
+###################################################
+print "Haas: RT OVERRIDE, for COMCOND-BLKPA-006-11"
+#See https://twiki.cern.ch/twiki/pub/AtlasComputing/ConditionsTagComCondBlkpa00611/checkDB_COMCOND-BLKPA-006-11-17.2.10.6.txt
+
+#use data TRT T0 constants
+conddb.blockFolder("/TRT/Calib/T0")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Calib/T0","TrtCalibT0-Physics-BLK-UPD4-00-01",force=True,forceData=True)
+
+#use data MDT T0 constants
+#conddb.blockFolder("/MDT/T0")
+#conddb.addFolderWithTag("MDT_OFL","/MDT/T0","MDTT0-UPD4-11",force=True,forceData=True)
+conddb.blockFolder("/MDT/T0BLOB")
+conddb.addFolderWithTag("MDT_OFL","/MDT/T0BLOB","MDTT0-UPD4-11",force=True,forceData=True)
+
+conddb.blockFolder("/TRT/Align")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Align","TRTAlign-BLK-UPD4-07",force=True,forceData=True)
+conddb.blockFolder("/LAR/Align")
+conddb.addFolderWithTag("LAR_ONL","/LAR/Align","LARAlign-2012-00",force=True,forceData=True)
+conddb.blockFolder("/Indet/Align")
+conddb.addFolderWithTag("INDET_OFL","/Indet/Align","InDetAlign-BLK-UPD4-08",force=True,forceData=True)
+
+conddb.blockFolder("/MUONALIGN/MDT/BARREL")
+conddb.blockFolder("/MUONALIGN/MDT/ENDCAP/SIDEA")
+conddb.blockFolder("/MUONALIGN/MDT/ENDCAP/SIDEC")
+conddb.blockFolder("/MUONALIGN/TGC/SIDEA")
+conddb.blockFolder("/MUONALIGN/TGC/SIDEC")
+
+#use updated muon alignments
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/BARREL","MuonAlignMDTBarrelAlign-BA_ROLLING_04-BLKP-UPD4-00",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/ENDCAP/SIDEA","MuonAlignMDTEndCapAAlign-ECA_ROLLING_2012_03_01-UPD4-01",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/ENDCAP/SIDEC","MuonAlignMDTEndCapCAlign-ECC_ROLLING_2012_03_01-UPD4-01",force=True,forceData=True)
+#conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/BARREL","MuonAlignMDTBarrelAlign-BA_ROLLING_2010_03-BLKP-UPD4-00",force=True,forceData=True)
+#conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/ENDCAP/SIDEA","MuonAlignMDTEndCapAAlign-ECA_ROLLING_2011_02_02-UPD4-01",force=True,forceData=True)
+#conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/ENDCAP/SIDEC","MuonAlignMDTEndCapCAlign-ECC_ROLLING_2011_02_02-UPD4-01",force=True,forceData=True)
+    
+#the TGC alignments had overlap issues... caused G4 crash
+#conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/TGC/SIDEA","MuonAlignTGCEndCapAAlign-TGCA_ROLLING_2011_01-ES1-UPD1-00",force=True,forceData=True)
+#conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/TGC/SIDEC","MuonAlignTGCEndCapCAlign-TGCC_ROLLING_2011_01-ES1-UPD1-00",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/TGC/SIDEA","MuonAlignTGCEndCapAAlign-0001-DEFAULT",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/TGC/SIDEC","MuonAlignTGCEndCapCAlign-0001-DEFAULT",force=True,forceData=True)
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/Rt_override_old.py b/Event/EventOverlay/EventOverlayJobTransforms/share/Rt_override_old.py
new file mode 100755
index 0000000000000000000000000000000000000000..9e390dc81a106db31f8c649cf44a7e205d93a7a1
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/Rt_override_old.py
@@ -0,0 +1,61 @@
+
+from IOVDbSvc.CondDB import conddb
+
+########################################
+#These top ones are MC folders (and other random things) and shouldn't have to change much
+
+#use MC TRT RT functions
+conddb.blockFolder("/TRT/Calib/RT")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Calib/RT","TrtCalibRt-MC09_900GeV-REP-Field-00",force=True,forceMC=True)
+
+#use MC MDT RT functions
+conddb.blockFolder("/MDT/RT")
+conddb.addFolderWithTag("MDT_OFL","/MDT/RT","MDTRT_Sim-00",force=True,forceMC=True)
+conddb.blockFolder("/MDT/RTBLOB")
+conddb.addFolderWithTag("MDT_OFL","/MDT/RTBLOB","MDTRT_Sim-00-BLOB",force=True,forceMC=True)
+
+conddb.blockFolder("/PIXEL/HLT/DCS/HV")
+conddb.addFolderWithTag("PIXEL_ONL","/PIXEL/HLT/DCS/HV","PixDCSHV-UPD1-00",force=True,forceData=True)
+conddb.blockFolder("/PIXEL/HLT/DCS/TEMPERATURE")
+conddb.addFolderWithTag("PIXEL_ONL","/PIXEL/HLT/DCS/TEMPERATURE","PixDCSTemp-UPD1-00",force=True,forceData=True)
+conddb.blockFolder("/PIXEL/ReadoutSpeed")
+conddb.addFolderSplitMC("PIXEL","/PIXEL/ReadoutSpeed","/PIXEL/ReadoutSpeed",force=True)
+
+conddb.blockFolder("/TRT/Cond/DigVers")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Cond/DigVers","TRTCondDigVers-Collisions-01",force=True,forceMC=True)
+
+###################################################
+print "Haas: RT OVERRIDE, old conditions (from Rt_override.py)"
+#See https://twiki.cern.ch/twiki/pub/AtlasComputing/ConditionsTagComCondBlkpa00611/checkDB_COMCOND-BLKPA-006-11-17.2.10.6.txt
+
+#use data TRT T0 constants
+conddb.blockFolder("/TRT/Calib/T0")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Calib/T0","TrtCalibT0-Physics-BLK-UPD4-FieldOn-00-00",force=True,forceData=True)
+
+#use data MDT T0 constants
+conddb.blockFolder("/MDT/T0")
+conddb.addFolderWithTag("MDT_OFL","/MDT/T0","MDTT0-UPD4-09",force=True,forceData=True)
+conddb.blockFolder("/MDT/T0BLOB")
+conddb.addFolderWithTag("MDT_OFL","/MDT/T0BLOB","MDTT0-UPD4-09-BLOB",force=True,forceData=True)
+
+conddb.blockFolder("/TRT/Align")
+conddb.addFolderWithTag("TRT_OFL","/TRT/Align","TRTAlign_Collision_2009_04",force=True,forceData=True)
+conddb.blockFolder("/LAR/Align")
+conddb.addFolderWithTag("LAR_ONL","/LAR/Align","LARAlign-Repro2011-00",force=True,forceData=True)
+conddb.blockFolder("/Indet/Align")
+conddb.addFolderWithTag("INDET_OFL","/Indet/Align","InDetAlign_Collision_2009_08",force=True,forceData=True)
+
+conddb.blockFolder("/MUONALIGN/MDT/BARREL")
+conddb.blockFolder("/MUONALIGN/MDT/ENDCAP/SIDEA")
+conddb.blockFolder("/MUONALIGN/MDT/ENDCAP/SIDEC")
+conddb.blockFolder("/MUONALIGN/TGC/SIDEA")
+conddb.blockFolder("/MUONALIGN/TGC/SIDEC")
+
+#use updated muon alignments
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/BARREL","MuonAlignMDTBarrelAlign-BA_ROLLING_2010_02-ES1-UPD1-00",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/ENDCAP/SIDEA","MuonAlignMDTEndCapAAlign-COSMICS-200909-01-ES1-UPD1-00",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/MDT/ENDCAP/SIDEC","MuonAlignMDTEndCapCAlign-COSMICS-200909-01-ES1-UPD1-00",force=True,forceData=True)
+
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/TGC/SIDEA","MuonAlignTGCEndCapAAlign-0001-DEFAULT",force=True,forceData=True)
+conddb.addFolderWithTag("MUONALIGN_OFL","/MUONALIGN/TGC/SIDEC","MuonAlignTGCEndCapCAlign-0001-DEFAULT",force=True,forceData=True)
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/SignalOutputItemList_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/SignalOutputItemList_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..6cc53aa2b64dcbe64e83286a68b0b9e818813ab2
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/SignalOutputItemList_jobOptions.py
@@ -0,0 +1,133 @@
+include.block ( "EventOverlayJobTransforms/SignalOutputItemList_jobOptions.py" )
+
+from AthenaCommon.DetFlags import DetFlags
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+
+#copy stuff back into the MC before persistency
+from OverlayCommonAlgs.OverlayCommonAlgsConf import CopyObjects
+
+if OverlayFlags.doTruth():
+   job += CopyObjects("CopyTruth")
+   job.CopyTruth.TruthObjects = True
+
+# copy InDet objects back into Signal event store
+if OverlayFlags.doPixel() or OverlayFlags.doSCT() or OverlayFlags.doTRT():
+   job += CopyObjects("CopyInDet")
+   job.CopyInDet.InDetObjects = True
+
+if OverlayFlags.doCSC() or OverlayFlags.doMDT() or OverlayFlags.doRPC() or OverlayFlags.doTGC():
+   job += CopyObjects("CopyMuons")
+   job.CopyMuons.MuonObjects = True
+
+# convert Digit to RDO for both signal store and temporary background store
+
+include ( "EventOverlayJobTransforms/MuonMcSignal_jobOptions.py" )
+
+include ( "EventOverlayJobTransforms/LArMcSignal_jobOptions.py" )
+
+include ( "EventOverlayJobTransforms/TileMcSignal_jobOptions.py" )
+
+include ( "EventOverlayJobTransforms/L1Signal_jobOptions.py" ) 
+
+from OverlayCommonAlgs.OverlayCommonAlgsConf import UpdateEventInfo
+job += UpdateEventInfo()
+job.UpdateEventInfo.InfoType="McEventInfo"
+
+# The output - signal
+from AthenaPoolCnvSvc.WriteAthenaPool import AthenaPoolOutputStream
+signalStream = AthenaPoolOutputStream( "StreamRDO_MC" )
+signalStream.OutputFile  = SignalCollection
+signalStream.Store = "TemporaryStore"
+
+signalStream.ItemList += [ "EventInfo#*", "EventStreamInfo#*", "PileUpEventInfo#*" ]
+
+if OverlayFlags.doTruth():
+   signalStream.ItemList += [
+                             "McEventCollection#*",
+                             "TrackRecordCollection#*"
+                           ]
+   if OverlayFlags.doCSC():
+      signalStream.ItemList += [ "CscSimDataCollection#CSC_SDO" ]
+   if OverlayFlags.doMDT():
+      signalStream.ItemList += [ "MuonSimDataCollection#MDT_SDO" ]
+   if OverlayFlags.doRPC():
+      signalStream.ItemList += [ "MuonSimDataCollection#RPC_SDO" ]
+   if OverlayFlags.doTGC():
+      signalStream.ItemList += [ "MuonSimDataCollection#TGC_SDO" ]
+   if OverlayFlags.doLAr() or OverlayFlags.doTile():
+      signalStream.ItemList += [ "CaloCalibrationHitContainer#*" ]
+   if OverlayFlags.doPixel():
+      signalStream.ItemList += [ "InDetSimDataCollection#PixelSDO_Map" ]
+   if OverlayFlags.doSCT():
+      signalStream.ItemList += [ "InDetSimDataCollection#SCT_SDO_Map" ]
+   if OverlayFlags.doTRT():
+      signalStream.ItemList += [ "InDetSimDataCollection#TRT_SDO_Map" ]
+   if OverlayFlags.doBCM():
+      signalStream.ItemList += [ "InDetSimDataCollection#BCM_SDO_Map" ] 
+
+if OverlayFlags.doPixel():
+   signalStream.ItemList += ["PixelRDO_Container#*"]
+if OverlayFlags.doSCT():
+   signalStream.ItemList += ["SCT_RDO_Container#*"]
+if OverlayFlags.doTRT():
+   signalStream.ItemList += ["TRT_RDO_Container#*"]
+
+if OverlayFlags.doLAr():
+    signalStream.ItemList+=["LArRawChannelContainer#*"]
+    signalStream.ItemList+=["LArDigitContainer#LArDigitContainer_MC_Thinned"]
+if OverlayFlags.doTile():
+    signalStream.ItemList += [ "TileDigitsContainer#TileDigitsFlt" ]
+    signalStream.ItemList += ["TileL2Container#TileL2Cnt"]
+    signalStream.ItemList += [ "TileRawChannelContainer#*" ]
+
+if OverlayFlags.doCSC():
+   signalStream.ItemList += [ "CscRawDataContainer#*" ]
+if OverlayFlags.doMDT():
+   signalStream.ItemList += [ "MdtCsmContainer#*" ]
+if OverlayFlags.doRPC():
+   signalStream.ItemList += [ "RpcPadContainer#*" ]
+if OverlayFlags.doTGC():
+   signalStream.ItemList += [ "TgcRdoContainer#*" ]
+
+if OverlayFlags.doBCM():
+   signalStream.ItemList+=["BCM_RDO_Container#*"]
+
+if OverlayFlags.doLVL1():
+   signalStream.ItemList+=["LArTTL1Container#*"]
+   signalStream.ItemList+=["TileTTL1Container#*"]
+   signalStream.ItemList+=[
+                           "ROIB::RoIBResult#*",
+                           "MuCTPI_RDO#*",
+                           "CTP_RDO#*",
+                           "DataVector<LVL1::TriggerTower>#*",
+                           "DataVector<LVL1::CPMTower>#*",
+                           "DataVector<LVL1::JetElement>#*",
+                           "DataVector<LVL1::CPMHits>#*",
+                           "DataVector<LVL1::CPMRoI>#*",
+                           "DataVector<LVL1::CMMCPHits>#*",
+                           "DataVector<LVL1::CMMJetHits>#*",
+                           "DataVector<LVL1::CMMEtSums>#*",
+                           "DataVector<LVL1::JEMHits>#*",
+                           "DataVector<LVL1::JEMRoI>#*",
+                           "DataVector<LVL1::JEMEtSums>#*",
+                           "LVL1::CMMRoI#*"]
+
+#options for writing the MetaData into the output file
+MetaDataStore=ServiceMgr.MetaDataStore
+from AthenaPoolCnvSvc.WriteAthenaPool import AthenaPoolOutputStream
+StreamRDO_MC_FH =  AthenaPoolOutputStream( "StreamRDO_MC_FH" )
+from AthenaCommon.AthenaCommonFlags import jobproperties
+StreamRDO_MC_FH.OutputFile = "ROOTTREE:"+SignalCollection
+StreamRDO_MC_FH.Store = MetaDataStore
+StreamRDO_MC_FH.WriteOnExecute = False
+StreamRDO_MC_FH.WriteOnFinalize = True
+# Write all IOV meta data containers
+StreamRDO_MC_FH.ItemList += [ "IOVMetaDataContainer#*" ]
+StreamRDO_MC_FH.ItemList += [ "LumiBlockCollection#*" ]
+
+from OutputStreamAthenaPool.OutputStreamAthenaPoolConf import AthenaPoolOutputStreamTool
+StreamRDO_MC_FHTool = AthenaPoolOutputStreamTool("StreamRDO_MC_FHTool")
+StreamRDO_MC_FHTool.OutputCollection = "MetaDataHdr";
+StreamRDO_MC_FHTool.PoolContainerPrefix = "MetaData"
+StreamRDO_MC_FH.WritingTool = StreamRDO_MC_FHTool
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/TileMcSignal_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/TileMcSignal_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..39f13ba63d16f425ac7ddb0217a8398809a8d858
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/TileMcSignal_jobOptions.py
@@ -0,0 +1,149 @@
+include.block ( "EventOverlayJobTransforms/TileMcSignal_jobOptions.py" )
+
+from AthenaCommon.AppMgr import ToolSvc
+from AthenaCommon.GlobalFlags import globalflags
+from AthenaCommon import CfgGetter
+
+if OverlayFlags.doTile():
+#copy stuff back into the MC before persstency
+   from OverlayCommonAlgs.OverlayCommonAlgsConf import RemoveObjects
+   job += RemoveObjects("RemoveTileOldMC")
+   if globalflags.DataSource()=='data':
+      job.RemoveTileOldMC.RemoveTileMC=True
+
+   from TileRecUtils.TileRecFlags import jobproperties
+   job += CfgGetter.getAlgorithm("TileHitVecToCnt/tilehitvect", tryDefaultConfigurable=True)
+   tileHitVecToCnt=job.tilehitvect.DigitizationTool
+   tileHitVecToCnt.EvtStore = "BkgEvent_0_SG"
+   tileHitVecToCnt.RndmEvtOverlay = False
+   tileHitVecToCnt.RndmSvc = job.TileHitVecToCnt.DigitizationTool.RndmSvc
+   tileHitVecToCnt.TileHitVectors=job.TileHitVecToCnt.DigitizationTool.TileHitVectors
+   tileHitVecToCnt.TileInfoName=job.TileHitVecToCnt.DigitizationTool.TileInfoName
+   tileHitVecToCnt.TileHitContainer=job.TileHitVecToCnt.DigitizationTool.TileHitContainer
+   from TileSimAlgs.TileSimAlgsConf import TileDigitsMaker
+   theTileDigits=TileDigitsMaker("tiledigitmaker2")
+   theTileDigits.EvtStore = "BkgEvent_0_SG"
+   theTileDigits.TileHitContainer=job.TileDigitsMaker.TileHitContainer
+   theTileDigits.TileInfoName=job.TileDigitsMaker.TileInfoName
+   theTileDigits.IntegerDigits=job.TileDigitsMaker.IntegerDigits
+   theTileDigits.CalibrationRun=job.TileDigitsMaker.CalibrationRun
+   theTileDigits.TileDigitsContainer=job.TileDigitsMaker.TileDigitsContainer
+   theTileDigits.RndmSvc = job.TileDigitsMaker.RndmSvc
+   theTileDigits.UseCoolPulseShapes=job.TileDigitsMaker.UseCoolPulseShapes
+   theTileDigits.MaskBadChannels=job.TileDigitsMaker.MaskBadChannels
+   theTileDigits.RndmEvtOverlay=False
+   job += theTileDigits
+   from TileRecUtils.TileRecUtilsConf import TileRawChannelMaker
+   newTileRawChannelMaker=TileRawChannelMaker("newTileRawChannelMaker")
+   newTileRawChannelMaker.EventStore = "BkgEvent_0_SG"
+   newTileRawChannelMaker.TileRawChannelBuilder = []
+   job += newTileRawChannelMaker
+
+   from TileRecUtils.TileRecUtilsConf import TileBeamInfoProvider
+   newTileBeamInfoProvider = TileBeamInfoProvider("newTileBeamInfoProvider")
+   newTileBeamInfoProvider.TileBeamElemContainer = ToolSvc.TileBeamInfoProvider.TileBeamElemContainer
+   newTileBeamInfoProvider.TileDigitsContainer = ToolSvc.TileBeamInfoProvider.TileDigitsContainer
+   newTileBeamInfoProvider.TileRawChannelContainer = ToolSvc.TileBeamInfoProvider.TileRawChannelContainer
+   newTileBeamInfoProvider.EventStore = "BkgEvent_0_SG"
+   ToolSvc += newTileBeamInfoProvider
+  
+   if jobproperties.TileRecFlags.doTileManyAmps():
+      from TileRecUtils.TileRecUtilsConf import TileRawChannelBuilderManyAmps
+      newTileRawChannelBuilderManyAmps= TileRawChannelBuilderManyAmps("newTileRawChannelBuilderManyAmps")
+      newTileRawChannelBuilderManyAmps.TileRawChannelContainer = ToolSvc.TileRawChannelBuilderManyAmps.TileRawChannelContainer
+      newTileRawChannelBuilderManyAmps.RunType = ToolSvc.TileRawChannelBuilderManyAmps.RunType
+      newTileRawChannelBuilderManyAmps.calibrateEnergy = ToolSvc.TileRawChannelBuilderManyAmps.calibrateEnergy
+      newTileRawChannelBuilderManyAmps.correctTime     =   ToolSvc.TileRawChannelBuilderManyAmps.correctTime
+      newTileRawChannelBuilderManyAmps.EventStore = "BkgEvent_0_SG"
+      newTileRawChannelBuilderManyAmps.BeamInfo = ToolSvc.newTileBeamInfoProvider
+      ToolSvc += newTileRawChannelBuilderManyAmps
+      job.newTileRawChannelMaker.TileRawChannelBuilder += [ ToolSvc.newTileRawChannelBuilderManyAmps ]
+
+   if jobproperties.TileRecFlags.doTileFlat():
+      from TileRecUtils.TileRecUtilsConf import TileRawChannelBuilderFlatFilter
+      newTileRawChannelBuilderFlatFilter= TileRawChannelBuilderFlatFilter("newTileRawChannelBuilderFlatFilter")
+      newTileRawChannelBuilderFlatFilter.TileRawChannelContainer = ToolSvc.TileRawChannelBuilderFlatFilter.TileRawChannelContainer
+      newTileRawChannelBuilderFlatFilter.RunType = ToolSvc.TileRawChannelBuilderFlatFilter.RunType
+      newTileRawChannelBuilderFlatFilter.calibrateEnergy = ToolSvc.TileRawChannelBuilderFlatFilter.calibrateEnergy
+      newTileRawChannelBuilderFlatFilter.correctTime     = ToolSvc.TileRawChannelBuilderFlatFilter.correctTime    
+      newTileRawChannelBuilderFlatFilter.EventStore = "BkgEvent_0_SG"
+      newTileRawChannelBuilderFlatFilter.BeamInfo = ToolSvc.newTileBeamInfoProvider
+      ToolSvc += newTileRawChannelBuilderFlatFilter
+      job.newTileRawChannelMaker.TileRawChannelBuilder += [ ToolSvc.newTileRawChannelBuilderFlatFilter ]
+
+   if jobproperties.TileRecFlags.doTileFit():
+      from TileRecUtils.TileRecUtilsConf import TileRawChannelBuilderFitFilter
+      newTileRawChannelBuilderFitFilter= TileRawChannelBuilderFitFilter("newTileRawChannelBuilderFitFilter")
+      newTileRawChannelBuilderFitFilter.TileRawChannelContainer =ToolSvc.TileRawChannelBuilderFitFilter.TileRawChannelContainer 
+      newTileRawChannelBuilderFitFilter.RunType = ToolSvc.TileRawChannelBuilderFitFilter.RunType
+      newTileRawChannelBuilderFitFilter.calibrateEnergy = ToolSvc.TileRawChannelBuilderFitFilter.calibrateEnergy
+      newTileRawChannelBuilderFitFilter.correctTime     = ToolSvc.TileRawChannelBuilderFitFilter.correctTime
+      newTileRawChannelBuilderFitFilter.EventStore = "BkgEvent_0_SG"
+      newTileRawChannelBuilderFitFilter.BeamInfo = ToolSvc.newTileBeamInfoProvider
+      ToolSvc += newTileRawChannelBuilderFitFilter
+      job.newTileRawChannelMaker.TileRawChannelBuilder += [ ToolSvc.newTileRawChannelBuilderFitFilter ] 
+
+   if jobproperties.TileRecFlags.doTileFitCool():
+      from TileRecUtils.TileRecUtilsConf import TileRawChannelBuilderFitFilterCool
+      TileRawChannelBuilderFitFilterCool= TileRawChannelBuilderFitFilterCool("newTileRawChannelBuilderFitFilterCool")
+      newTileRawChannelBuilderFitFilterCool.TileRawChannelContainer = ToolSvc.TileRawChannelBuilderFitFilterCool.TileRawChannelContainer
+      newTileRawChannelBuilderFitFilterCool.RunType = ToolSvc.TileRawChannelBuilderFitFilterCool.RunType
+      newTileRawChannelBuilderFitFilterCool.calibrateEnergy =  ToolSvc.TileRawChannelBuilderFitFilterCool.calibrateEnergy
+      newTileRawChannelBuilderFitFilterCool.correctTime     =  ToolSvc.TileRawChannelBuilderFitFilterCool.correctTime    
+      newTileRawChannelBuilderFitFilterCool.EventStore = "BkgEvent_0_SG"
+      newTileRawChannelBuilderFitFilterCool.BeamInfo = ToolSvc.newTileBeamInfoProvider
+      ToolSvc += newTileRawChannelBuilderFitFilterCool
+      job.newTileRawChannelMaker.TileRawChannelBuilder += [ ToolSvc.newTileRawChannelBuilderFitFilterCool ]
+
+   if jobproperties.TileRecFlags.doTileOpt():
+      from TileRecUtils.TileRecUtilsConf import TileRawChannelBuilderOptFilter
+      newTileRawChannelBuilderOptFilter= TileRawChannelBuilderOptFilter("newTileRawChannelBuilderOptFilter")
+      newTileRawChannelBuilderOptFilter.TileRawChannelContainer = ToolSvc.TileRawChannelBuilderOptFilter.TileRawChannelContainer
+      newTileRawChannelBuilderOptFilter.RunType = ToolSvc.TileRawChannelBuilderOptFilter.RunType
+      newTileRawChannelBuilderOptFilter.calibrateEnergy = ToolSvc.TileRawChannelBuilderOptFilter.calibrateEnergy
+      newTileRawChannelBuilderOptFilter.correctTime     = ToolSvc.TileRawChannelBuilderOptFilter.correctTime
+      newTileRawChannelBuilderOptFilter.OF2 = ToolSvc.TileRawChannelBuilderOptFilter.OF2
+      newTileRawChannelBuilderOptFilter.PedestalMode = ToolSvc.TileRawChannelBuilderOptFilter.PedestalMode
+      newTileRawChannelBuilderOptFilter.MaxIterations = ToolSvc.TileRawChannelBuilderOptFilter.MaxIterations
+      newTileRawChannelBuilderOptFilter.Minus1Iteration = ToolSvc.TileRawChannelBuilderOptFilter.Minus1Iteration
+      newTileRawChannelBuilderOptFilter.AmplitudeCorrection = ToolSvc.TileRawChannelBuilderOptFilter.AmplitudeCorrection 
+      newTileRawChannelBuilderOptFilter.EventStore = "BkgEvent_0_SG"
+      newTileRawChannelBuilderOptFilter.BeamInfo = ToolSvc.newTileBeamInfoProvider
+      ToolSvc += newTileRawChannelBuilderOptFilter
+      job.newTileRawChannelMaker.TileRawChannelBuilder += [ ToolSvc.newTileRawChannelBuilderOptFilter ]
+
+   if jobproperties.TileRecFlags.doTileOpt2():
+      from TileRecUtils.TileRecUtilsConf import TileRawChannelBuilderOpt2Filter
+      newTileRawChannelBuilderOpt2Filter= TileRawChannelBuilderOpt2Filter("newTileRawChannelBuilderOpt2Filter")
+      newTileRawChannelBuilderOpt2Filter.TileRawChannelContainer = ToolSvc.TileRawChannelBuilderOpt2Filter.TileRawChannelContainer
+      newTileRawChannelBuilderOpt2Filter.RunType = ToolSvc.TileRawChannelBuilderOpt2Filter.RunType
+      newTileRawChannelBuilderOpt2Filter.calibrateEnergy = ToolSvc.TileRawChannelBuilderOpt2Filter.calibrateEnergy
+      newTileRawChannelBuilderOpt2Filter.correctTime     = ToolSvc.TileRawChannelBuilderOpt2Filter.correctTime
+      newTileRawChannelBuilderOpt2Filter.OF2 = ToolSvc.TileRawChannelBuilderOpt2Filter.OF2
+      newTileRawChannelBuilderOpt2Filter.PedestalMode = ToolSvc.TileRawChannelBuilderOpt2Filter.PedestalMode
+      newTileRawChannelBuilderOpt2Filter.MaxIterations = ToolSvc.TileRawChannelBuilderOpt2Filter.MaxIterations
+      newTileRawChannelBuilderOpt2Filter.Minus1Iteration = ToolSvc.TileRawChannelBuilderOpt2Filter.Minus1Iteration
+      newTileRawChannelBuilderOpt2Filter.AmplitudeCorrection = ToolSvc.TileRawChannelBuilderOpt2Filter.AmplitudeCorrection
+      newTileRawChannelBuilderOpt2Filter.OfcfromCool = ToolSvc.TileRawChannelBuilderOpt2Filter.OfcfromCool          
+      newTileRawChannelBuilderOpt2Filter.BeamInfo = ToolSvc.newTileBeamInfoProvider
+      newTileRawChannelBuilderOpt2Filter.EventStore = "BkgEvent_0_SG"
+      ToolSvc += newTileRawChannelBuilderOpt2Filter
+      job.newTileRawChannelMaker.TileRawChannelBuilder += [ ToolSvc.newTileRawChannelBuilderOpt2Filter ]
+
+   if jobproperties.TileRecFlags.doTileOptATLAS():
+      from TileRecUtils.TileRecUtilsConf import TileRawChannelBuilderOpt2Filter
+      newTileRawChannelBuilderOptATLAS= TileRawChannelBuilderOpt2Filter("newTileRawChannelBuilderOptATLAS")
+      newTileRawChannelBuilderOptATLAS.TileRawChannelContainer = ToolSvc.TileRawChannelBuilderOptATLAS.TileRawChannelContainer
+      newTileRawChannelBuilderOptATLAS.RunType         = ToolSvc.TileRawChannelBuilderOptATLAS.RunType
+      newTileRawChannelBuilderOptATLAS.calibrateEnergy = ToolSvc.TileRawChannelBuilderOptATLAS.calibrateEnergy
+      newTileRawChannelBuilderOptATLAS.correctTime     = ToolSvc.TileRawChannelBuilderOptATLAS.correctTime
+      newTileRawChannelBuilderOptATLAS.OF2 = ToolSvc.TileRawChannelBuilderOptATLAS.OF2
+      newTileRawChannelBuilderOptATLAS.MaxIterations = ToolSvc.TileRawChannelBuilderOptATLAS.MaxIterations
+      newTileRawChannelBuilderOptATLAS.Minus1Iteration = ToolSvc.TileRawChannelBuilderOptATLAS.Minus1Iteration
+      newTileRawChannelBuilderOptATLAS.AmplitudeCorrection = ToolSvc.TileRawChannelBuilderOptATLAS.AmplitudeCorrection
+      newTileRawChannelBuilderOptATLAS.OfcfromCool = ToolSvc.TileRawChannelBuilderOptATLAS.OfcfromCool            
+      newTileRawChannelBuilderOptATLAS.BeamInfo = ToolSvc.newTileBeamInfoProvider
+      newTileRawChannelBuilderOptATLAS.EventStore = "BkgEvent_0_SG"
+      ToolSvc += newTileRawChannelBuilderOptATLAS
+      job.newTileRawChannelMaker.TileRawChannelBuilder += [ ToolSvc.newTileRawChannelBuilderOptATLAS ]
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/TruthOverlay_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/TruthOverlay_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..e03d832621452b026b8f4e3858ad0f30f3653bcf
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/TruthOverlay_jobOptions.py
@@ -0,0 +1,19 @@
+
+include.block ( "EventOverlayJobTransforms/TruthOverlay_jobOptions.py" )
+
+# McEventCollection copying.  Alghough logically it belongs to
+# overlaying that is done later, it has to be scheduled before
+# digitization algorithms as a workaround for bug #35465
+
+if OverlayFlags.doBkg():
+    from OverlayCommonAlgs.OverlayCommonAlgsConf import DeepCopyObjects   
+    job += DeepCopyObjects("BkgRdo")
+    job.BkgRdo.EvtInfoObjects = True
+    job += DeepCopyObjects("BkgRdo0")
+    job.BkgRdo0.TruthObjects = True
+
+from OverlayCommonAlgs.OverlayCommonAlgsConf import CopyMcEventCollection
+job += CopyMcEventCollection()
+if readBS and isRealData:
+    job.CopyMcEventCollection.RealData = True
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/UseOracle.py b/Event/EventOverlay/EventOverlayJobTransforms/share/UseOracle.py
new file mode 100644
index 0000000000000000000000000000000000000000..e681e324c8d79bc40321dd3819dcf825fc0d5172
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/UseOracle.py
@@ -0,0 +1,7 @@
+# setup DBReplicaSvc to choose closest Oracle replica, configurables style
+from AthenaCommon.AppMgr import ServiceMgr
+from PoolSvc.PoolSvcConf import PoolSvc
+ServiceMgr+=PoolSvc(SortReplicas=True)
+from DBReplicaSvc.DBReplicaSvcConf import DBReplicaSvc
+ServiceMgr+=DBReplicaSvc(UseCOOLSQLite=False)
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/custom.py b/Event/EventOverlay/EventOverlayJobTransforms/share/custom.py
new file mode 100644
index 0000000000000000000000000000000000000000..3cc0f6af89d895c14a4f72f5b147fd8585358687
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/custom.py
@@ -0,0 +1,15 @@
+from AthenaCommon.GlobalFlags import globalflags
+globalflags.DataSource.set_Value_and_Lock('data') #force to use the data database
+#globalflags.DataSource.set_Value_and_Lock('geant4') #force to use the mc database
+
+#from AthenaCommon.GlobalFlags import GlobalFlags
+#GlobalFlags.DetGeo.set_atlas()
+#GlobalFlags.DataSource.set_data()
+#GlobalFlags.DataSource.set_geant4()
+
+## FIXME This part should go in the transform skeleton(s) in the future.
+if 'runArgs' in dir():
+    if hasattr(runArgs,'DataRunNumber'):
+        from RecExConfig.RecFlags import rec
+        rec.projectName = 'data'+str(runArgs.DataRunNumber%100)
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/custom_MC.py b/Event/EventOverlay/EventOverlayJobTransforms/share/custom_MC.py
new file mode 100644
index 0000000000000000000000000000000000000000..419bb7e0474bc5c0ccb701ff333f84a51fc11ea0
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/custom_MC.py
@@ -0,0 +1,8 @@
+from AthenaCommon.GlobalFlags import globalflags
+#globalflags.DataSource.set_Value_and_Lock('data') #force to use the data database
+globalflags.DataSource.set_Value_and_Lock('geant4') #force to use the mc database
+
+#from AthenaCommon.DetFlags import DetFlags
+#DetFlags.Calo_setOff()
+#DetFlags.Tile_setOn()
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/dojive.py b/Event/EventOverlay/EventOverlayJobTransforms/share/dojive.py
new file mode 100644
index 0000000000000000000000000000000000000000..4a89e693a3679dc617367a15a774c4ba58294b6b
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/dojive.py
@@ -0,0 +1,4 @@
+
+from RecExConfig.RecFlags  import rec
+rec.doJiveXML = True
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/g4runnumber.py b/Event/EventOverlay/EventOverlayJobTransforms/share/g4runnumber.py
new file mode 100644
index 0000000000000000000000000000000000000000..87171a923c0a5de8277a87736d1bfd93bdb68353
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/g4runnumber.py
@@ -0,0 +1,44 @@
+
+#update the run/event info for each event
+if not hasattr(svcMgr,'EvtIdModifierSvc'):
+    import AthenaServices.Configurables as asc
+    svcMgr +=asc.EvtIdModifierSvc(EvtStoreName="StoreGateSvc") #OriginalEvent_SG
+    from AthenaCommon.AppMgr import theApp
+    theApp.CreateSvc += ["EvtIdModifierSvc"]
+else:
+    print 'g4runnumber.py: Will override the settings of the EvtIdModifierSvc that was previously set up!'
+    ## remove any existing settings
+    svcMgr.EvtIdModifierSvc.Modifiers = []
+svcMgr.EvtIdModifierSvc.OutputLevel=DEBUG
+
+include("events.txt")
+#svcMgr.EvtIdModifierSvc.add_modifier(run_nbr=167776, evt_nbr=18, time_stamp=1269948350, lbk_nbr=124, nevts=1)
+
+
+#set the max number of events
+fname = "events.txt"
+num_lines = 0
+with open(fname, 'r') as f:
+    for line in f:
+        num_lines += 1
+        print line,
+print "Number of lines in events.txt is "+str(num_lines)
+from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
+athenaCommonFlags.EvtMax.unlock()
+athenaCommonFlags.EvtMax.set_Value_and_Lock(num_lines)
+
+#fix iov metadata
+if not hasattr(ServiceMgr.ToolSvc, 'IOVDbMetaDataTool'):
+    from AthenaCommon import CfgMgr
+    svcMgr.ToolSvc += CfgMgr.IOVDbMetaDataTool()
+    svcMgr.ToolSvc.IOVDbMetaDataTool.MinMaxRunNumbers = [svcMgr.EvtIdModifierSvc.Modifiers[0], 2147483647]
+    ## FIXME need to use maxRunNumber = 2147483647 for now to keep overlay working but in the future this should be set properly.
+
+#use conditions from this run number and timestamp
+svcMgr.EventSelector.RunNumber = svcMgr.EvtIdModifierSvc.Modifiers[0]
+svcMgr.EventSelector.OverrideRunNumber = True
+svcMgr.EventSelector.OverrideEventNumber = True
+svcMgr.EventSelector.InitialTimeStamp = svcMgr.EvtIdModifierSvc.Modifiers[2]
+print svcMgr.EventSelector
+svcMgr.TagInfoMgr.OutputLevel=DEBUG
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/g4runnumber2.py b/Event/EventOverlay/EventOverlayJobTransforms/share/g4runnumber2.py
new file mode 100644
index 0000000000000000000000000000000000000000..de8116f968b6ec62f00e11cdffff6e63de5ed61a
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/g4runnumber2.py
@@ -0,0 +1,20 @@
+
+#set the max number of events
+fname = "events.txt"
+num_lines = 0
+with open(fname, 'r') as f:
+    for line in f:
+        num_lines += 1
+print "Number of lines in events.txt is "+str(num_lines)
+from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
+athenaCommonFlags.EvtMax.unlock()
+athenaCommonFlags.EvtMax.set_Value_and_Lock(num_lines)
+
+#use this for the run/event numbers to look up in the vertexoverridefile, if we are overriding vertices
+#job.VertexPositionGenerator.VertexOverrideEventFile = "events.txt" #old MC11 way
+
+#new MC12 way, should be a preInclude
+from G4AtlasApps.SimFlags import SimFlags
+SimFlags.load_atlas_flags()
+SimFlags.VertexOverrideEventFile.set_Value_and_Lock("events.txt")
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/g4runnumber_override.py b/Event/EventOverlay/EventOverlayJobTransforms/share/g4runnumber_override.py
new file mode 100644
index 0000000000000000000000000000000000000000..54e90ad1f92022ee0784812fef7fd55fa4d82776
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/g4runnumber_override.py
@@ -0,0 +1,30 @@
+
+#to get sim run number into output
+from Digitization.DigitizationFlags import digitizationFlags
+
+#digitizationFlags.simRunNumber = int(digitizationFlags.getHitFileRunNumber(athenaCommonFlags.PoolHitsInput.get_Value()[0]))
+
+myCommand = 'dumpRunNumber.py EVNT.*'
+import commands, re
+sc,out = commands.getstatusoutput(myCommand)
+if sc != 0:
+    print 'ERR: problem:\n%s',str(out)
+    raise SystemExit(sc)
+myOutput = '0' 
+for l in out.splitlines():
+    if re.match('^run number: .', l):
+        tempout = re.split('^run number: .',l)
+        if len(tempout) > 1:
+            myOutput = tempout[1].strip()
+            del tempout    
+    if len(myOutput) > 0 :
+        digitizationFlags.simRunNumber = int(myOutput)
+        ServiceMgr.EventSelector.RunNumber=digitizationFlags.simRunNumber.get_Value()
+        pileUpEventLoopMgr.mcRunNumber=digitizationFlags.simRunNumber.get_Value()
+    else :
+        print "Failed to find Run Number in hits file metadata."
+
+print "simRunNumber: "
+print ServiceMgr.EventSelector
+print pileUpEventLoopMgr
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/input_FileStager.py b/Event/EventOverlay/EventOverlayJobTransforms/share/input_FileStager.py
new file mode 100644
index 0000000000000000000000000000000000000000..f9d7b515cb887af71f48ee2f2318e63413328a02
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/input_FileStager.py
@@ -0,0 +1,64 @@
+if ('sampleList' in dir()) or ('sampleFile' in dir()):
+  #################################################################################################
+  # Provide input for the FileStager here
+  #################################################################################################
+  
+  ## import filestager tool
+  from FileStager.FileStagerTool import FileStagerTool
+  
+  if ('sampleList' in dir()):
+    stagetool = FileStagerTool(sampleList=sampleList)
+  elif ('sampleFile' in dir()):
+    print "FileStager() : Now processing sample file : %s" % sampleFile
+    stagetool = FileStagerTool(sampleFile=sampleFile)
+  
+  ## Configure copy command used by the stager; default is 'lcg-cp -v --vo altas -t 1200'.
+  stagetool.CpCommand = "lcg-cp" #"wrapper_lcg-cp"
+  stagetool.CpArguments = ["-v","-n","5","--connect-timeout","60","--sendreceive-timeout","300","--bdii-timeout","60","--srm-timeout","300","--vo","atlas","-b","-D","srmv2"]
+  #stagetool.OutfilePrefix = "file:"
+  #stagetool.checkGridProxy = True
+  stagetool.LogfileDir = "./"
+  
+  #################################################################################################
+  # Configure the FileStager -- no need to change these lines
+  #################################################################################################
+  
+  ## get Reference to existing Athena job
+  from AthenaCommon.AlgSequence import AlgSequence
+  thejob = AlgSequence()
+  
+  ## check if collection names begin with "gridcopy"
+  print "FileStager() : doStaging ?", stagetool.DoStaging()
+  
+  ## Import file stager algorithm
+  from FileStager.FileStagerConf import FileStagerAlg
+  
+  ## filestageralg needs to be the first algorithm added to the thejob.
+  if stagetool.DoStaging():
+     thejob += FileStagerAlg('FileStager')
+     thejob.FileStager.InputCollections = stagetool.GetSampleList()
+     thejob.FileStager.PipeLength = 2
+     thejob.FileStager.VerboseStager = True
+     thejob.FileStager.KeepLogfiles = True
+     thejob.FileStager.LogfileDir    = stagetool.LogfileDir
+     thejob.FileStager.BaseTmpdir    = stagetool.GetTmpdir()
+     thejob.FileStager.InfilePrefix  = stagetool.InfilePrefix
+     thejob.FileStager.OutfilePrefix = stagetool.OutfilePrefix
+     thejob.FileStager.CpCommand     = stagetool.CpCommand
+     thejob.FileStager.CpArguments   = stagetool.CpArguments
+     thejob.FileStager.FirstFileAlreadyStaged = stagetool.StageFirstFile
+     thejob.FileStager.StoreStatistics = False
+     print "Haas is here"
+     print thejob.FileStager
+  
+  #################################################################################################
+  # Pass collection names to EventSelector
+  #################################################################################################
+  
+  ## set input collections
+  ic = []
+  if stagetool.DoStaging():
+    ic = stagetool.GetStageCollections()
+  else:
+    ic = stagetool.GetSampleList()
+  
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/magfield.py b/Event/EventOverlay/EventOverlayJobTransforms/share/magfield.py
new file mode 100644
index 0000000000000000000000000000000000000000..01bb464fcf6e48f1663ddf85c22df12f09a22035
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/magfield.py
@@ -0,0 +1,11 @@
+
+#disable DCS for MagField??
+#MagFieldAthenaSvc = ServiceMgr.MagFieldAthenaSvc
+#MagFieldAthenaSvc.UseDCS = False
+
+from G4AtlasApps.SimFlags import SimFlags
+#SimFlags.load_atlas_flags()
+#get currents from COOL
+#MagFieldAthenaSvc.NameOfTheSource="COOL"
+SimFlags.MagneticField="AtlasFieldSvc"
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/muAlign.py b/Event/EventOverlay/EventOverlayJobTransforms/share/muAlign.py
new file mode 100644
index 0000000000000000000000000000000000000000..6f829436bf80eb0e612f8767f651b7c0d5bd6632
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/muAlign.py
@@ -0,0 +1,37 @@
+
+haas_mualign2=True
+if haas_mualign2:
+    #use muon alignments
+    print "Haas: Reading muon alignment constants from DB"
+
+    #from IOVDbSvc.CondDB import conddb
+    #conddb.addFolderSplitOnline('MUONALIGN','/MUONALIGN/Onl/MDT/BARREL','/MUONALIGN/MDT/BARREL')
+    #conddb.addFolderSplitOnline('MUONALIGN','/MUONALIGN/Onl/MDT/ENDCAP/SIDEA','/MUONALIGN/MDT/ENDCAP/SIDEA')
+    #conddb.addFolderSplitOnline('MUONALIGN','/MUONALIGN/Onl/MDT/ENDCAP/SIDEC','/MUONALIGN/MDT/ENDCAP/SIDEC')
+    #conddb.addFolderSplitOnline('MUONALIGN','/MUONALIGN/Onl/TGC/SIDEA','/MUONALIGN/TGC/SIDEA')
+    #conddb.addFolderSplitOnline('MUONALIGN','/MUONALIGN/Onl/TGC/SIDEC','/MUONALIGN/TGC/SIDEC')
+
+    from MuonCondTool.MuonCondToolConf import MuonAlignmentDbTool
+    MuonAlignmentDbTool = MuonAlignmentDbTool("MGM_AlignmentDbTool")
+    MuonAlignmentDbTool.ParlineFolders = ["/MUONALIGN/MDT/BARREL",
+                                          "/MUONALIGN/MDT/ENDCAP/SIDEA",
+                                          "/MUONALIGN/MDT/ENDCAP/SIDEC",
+                                          "/MUONALIGN/TGC/SIDEA",
+                                          "/MUONALIGN/TGC/SIDEC"]
+    
+    ToolSvc += MuonAlignmentDbTool
+    MGM_AlignmentDbTool = ToolSvc.MGM_AlignmentDbTool
+    MGM_AlignmentDbTool.OutputLevel=DEBUG
+    from AtlasGeoModel.MuonGM import GeoModelSvc
+    MuonDetectorTool = GeoModelSvc.DetectorTools[ "MuonDetectorTool" ]
+    MuonDetectorTool.UseConditionDb = 1
+    MuonDetectorTool.OutputLevel=DEBUG
+    
+    MuonDetectorTool.EnableFineClashFixing = 1 #this should be on for g4?
+    print MuonDetectorTool
+
+    #register callbacks for alignments, to get IOVs?
+    #GeoModelSvc.AlignCallbacks = True
+    
+
+    
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/muAlign_reco.py b/Event/EventOverlay/EventOverlayJobTransforms/share/muAlign_reco.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc073bc2fcb3cd1c5d31fb3748f794acc3dbfde8
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/muAlign_reco.py
@@ -0,0 +1,34 @@
+
+#use muon alignments
+print "Haas: Reading muon alignment constants from DB for reco"
+
+#from IOVDbSvc.CondDB import conddb
+#conddb.addFolderSplitOnline('MUONALIGN','/MUONALIGN/Onl/MDT/BARREL','/MUONALIGN/MDT/BARREL')
+#conddb.addFolderSplitOnline('MUONALIGN','/MUONALIGN/Onl/MDT/ENDCAP/SIDEA','/MUONALIGN/MDT/ENDCAP/SIDEA')
+#conddb.addFolderSplitOnline('MUONALIGN','/MUONALIGN/Onl/MDT/ENDCAP/SIDEC','/MUONALIGN/MDT/ENDCAP/SIDEC')
+#conddb.addFolderSplitOnline('MUONALIGN','/MUONALIGN/Onl/TGC/SIDEA','/MUONALIGN/TGC/SIDEA')
+#conddb.addFolderSplitOnline('MUONALIGN','/MUONALIGN/Onl/TGC/SIDEC','/MUONALIGN/TGC/SIDEC')
+
+#from MuonCondTool.MuonCondToolConf import MuonAlignmentDbTool
+#MuonAlignmentDbTool = MuonAlignmentDbTool("MGM_AlignmentDbTool")
+#MuonAlignmentDbTool.ParlineFolders = ["/MUONALIGN/MDT/BARREL",
+#                                      "/MUONALIGN/MDT/ENDCAP/SIDEA",
+#                                      "/MUONALIGN/MDT/ENDCAP/SIDEC",
+#                                      "/MUONALIGN/TGC/SIDEA",
+#                                      "/MUONALIGN/TGC/SIDEC"]
+
+#ToolSvc += MuonAlignmentDbTool
+MGM_AlignmentDbTool = ToolSvc.MGM_AlignmentDbTool
+MGM_AlignmentDbTool.OutputLevel=DEBUG
+print MGM_AlignmentDbTool
+from AtlasGeoModel.MuonGM import GeoModelSvc
+MuonDetectorTool = GeoModelSvc.DetectorTools[ "MuonDetectorTool" ]
+MuonDetectorTool.UseConditionDb = 1
+MuonDetectorTool.OutputLevel=DEBUG
+
+MuonDetectorTool.EnableFineClashFixing = 0 #this should be on for g4?
+print MuonDetectorTool
+print GeoModelSvc
+print ToolSvc
+
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/overlay.ignore.db b/Event/EventOverlay/EventOverlayJobTransforms/share/overlay.ignore.db
new file mode 100644
index 0000000000000000000000000000000000000000..6ae46838e91fd61b1b8793f2bb6a3d659b467981
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/overlay.ignore.db
@@ -0,0 +1,7 @@
+ALL, CoreDumpSvc.*, INFO .*
+ALL, ToolSvc.SCT.*, ERROR .*
+ALL, MuonCalib::CscCoolStrSvc, ERROR .*
+ALL   ,MuonMDT_CablingSvc,  ERROR .*
+ALL   ,Py:inputFilePeeker,  ERROR .*
+ALL   ,Py:AutoConfiguration,ERROR .*
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/overlay_rdotobs_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/overlay_rdotobs_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..dc743f414ac71a89ab580efa0691c195e43eaec8
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/overlay_rdotobs_jobOptions.py
@@ -0,0 +1,81 @@
+from RecExConfig.RecFlags  import rec
+from AthenaCommon.BeamFlags import jobproperties
+from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
+from AthenaCommon.GlobalFlags import globalflags
+from RecExConfig.RecAlgsFlags  import recAlgs
+
+#rec.ScopingLevel.set_Value_and_Lock(1)
+
+if not 'Input' in dir():
+    # uncomment if input is real data
+    #Input='data'
+    # uncomment if input is overlay of simulated Hits onto real data
+    Input='overlay'
+    # uncomment if input is simulation BS
+    # Input='simbs'
+    # uncomment if input is simulation RDO
+    # Input='simrdo'
+
+# uncomment if reading from TAG
+#rec.readTAG=True
+
+
+# input configuration (lock to prevent override)
+if Input=='overlay':
+
+    jobproperties.Beam.beamType.set_Value_and_Lock('collisions')
+
+
+    TileFrameLength=7
+
+    from LArConditionsCommon.LArCondFlags import larCondFlags
+    larCondFlags.useShape=True
+    larCondFlags.OFCShapeFolder = ""
+
+    from LArROD.LArRODFlags import larRODFlags
+    larRODFlags.readDigits = True
+
+    from LArConditionsCommon.LArCondFlags import larCondFlags
+    larCondFlags.LArCoolChannelSelection.set_Value_and_Lock("")
+    larCondFlags.OFCShapeFolder.set_Value_and_Lock("")
+
+    athenaCommonFlags.PoolRDOInput.set_Value_and_Lock([
+            'rfio:/castor/cern.ch/grid/atlas/atlasgroupdisk/proj-sit/pileup/MC_RandomTrigger.RDO.pool.root'])
+    globalflags.DetDescrVersion.set_Value_and_Lock('ATLAS-GEO-03-00-00')
+    globalflags.ConditionsTag.set_Value_and_Lock('COMCOND-ES1C-000-00')
+    from AthenaCommon.BFieldFlags import jobproperties
+    jobproperties.BField.solenoidOn.set_Value_and_Lock(True)
+    jobproperties.BField.barrelToroidOn.set_Value_and_Lock(True)
+    jobproperties.BField.endcapToroidOn.set_Value_and_Lock(True)
+    globalflags.InputFormat.set_Value_and_Lock('pool')
+    globalflags.DataSource.set_Value_and_Lock('data')
+else:
+    raise RuntimeError,"Wrong Input value"
+
+athenaCommonFlags.EvtMax.set_Value_and_Lock(10)
+#athenaCommonFlags.SkipEvents = 0
+
+doESD=False
+doWriteESD=False
+doCBNT=False
+doHist=False
+doAOD=False
+doWriteAOD=False
+doWriteBS=True
+doMonitoring=False
+doWriteTAG=False
+doWriteTAGCOM=False
+doNameAuditor=True
+
+BSRDOOutput='AppName=Athena, OutputDirectory=./, FileTag=Release14.2.25.9, Run=0091890'
+
+# the main jobOpt
+include("RecExCommission/RecExCommission.py")                
+
+# filtered ESD if not collisions
+
+include ("RecExCommon/RecExCommon_topOptions.py")
+
+ServiceMgr.MboySvc.UseAllSegments=0 # standard way to run Muonboy on collisions
+ServiceMgr.TileInfoLoader.filenameDeltaPhysicsSuffix="of2_Delta_Phys_7Samples"
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/postInclude.py b/Event/EventOverlay/EventOverlayJobTransforms/share/postInclude.py
new file mode 100644
index 0000000000000000000000000000000000000000..8580a77710fd745508ea06f3898b252aaa095e87
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/postInclude.py
@@ -0,0 +1,6 @@
+import MDTcabling.MDTcablingConfig
+ServiceMgr.MDTcablingSvc.RODfile="AtlasRODmap.data"
+ServiceMgr.MDTcablingSvc.MEZfile="AtlasMEZmap.data"
+ServiceMgr.MDTcablingSvc.ShiftLinks=False
+ServiceMgr.MDTcablingSvc.DisableTagInfoMatch = True
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/recotrfpre.py b/Event/EventOverlay/EventOverlayJobTransforms/share/recotrfpre.py
new file mode 100644
index 0000000000000000000000000000000000000000..bac49d1dfe96f739a8cd0df423083d4ae76c2dbb
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/recotrfpre.py
@@ -0,0 +1,20 @@
+
+from RecExConfig.RecFlags  import rec
+rec.doTruth.unlock()
+rec.doTruth.set_Value_and_Lock(True)
+
+from AthenaCommon.GlobalFlags import globalflags
+globalflags.isOverlay.unlock()
+globalflags.isOverlay.set_Value_and_Lock(True)
+
+#To be more like MC
+#from MuonRecExample.MuonRecFlags import muonRecFlags
+#muonRecFlags.doSegmentT0Fit.set_Value_and_Lock(False)
+
+#Since we already made digits during overlay
+from LArROD.LArRODFlags import larRODFlags
+larRODFlags.readDigits = False
+from LArConditionsCommon.LArCondFlags import larCondFlags
+larCondFlags.LArCoolChannelSelection.set_Value_and_Lock("")
+#larCondFlags.OFCShapeFolder.set_Value_and_Lock("")
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/removeTempBkgObjects.py b/Event/EventOverlay/EventOverlayJobTransforms/share/removeTempBkgObjects.py
new file mode 100644
index 0000000000000000000000000000000000000000..654c980ff4712af43cc14dd9a7741867a9c5f768
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/removeTempBkgObjects.py
@@ -0,0 +1,14 @@
+# Remove all objects that were copied to the temporary background
+# storegate BkgEvent_2_SG
+# author: Piyali Banerjee Piyali.Banerjee@cern.ch 
+# date: February 2011
+
+from OverlayCommonAlgs.OverlayCommonAlgsConf import RemoveTempBkgObjects
+
+if not "topSequence" in dir():    
+   from AthenaCommon.AlgSequence import AlgSequence
+   topSequence = AlgSequence()
+
+topSequence += RemoveTempBkgObjects('RemoveTempBkgObjects')
+topSequence.RemoveTempBkgObjects.TempBkgStore = 'StoreGateSvc/BkgEvent_2_SG'
+topSequence.RemoveTempBkgObjects.BackgroundIsData = readBS
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.BSFilter.py b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.BSFilter.py
new file mode 100644
index 0000000000000000000000000000000000000000..404b3aa18e243aeaad3a2ce178644358119a2b47
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.BSFilter.py
@@ -0,0 +1,100 @@
+from AthenaCommon.Logging import logging
+BSFilterLog = logging.getLogger('BSFilter')
+BSFilterLog.info( '****************** STARTING BSFilter *****************' )
+
+BSFilterLog.info( '**** Transformation run arguments' )
+BSFilterLog.info( str(runArgs) )
+
+#---------------------------
+# Job definition parameters:
+theApp.EvtMax           = runArgs.maxEvents
+
+if not hasattr( runArgs, "OutputBSFilterFile"):
+    raise RuntimeError ("No output BS file defined")
+
+#---------------------------
+BSFilterLog.info( '**** ByteStreamInputSvc configuration' )
+
+include( "ByteStreamCnvSvc/BSEventStorageEventSelector_jobOptions.py" )
+svcMgr = theApp.serviceMgr()
+ByteStreamInputSvc = svcMgr.ByteStreamInputSvc
+#theApp.SkipEvents = runArgs.skipEvents #doesn't work
+MessageSvc.OutputLevel = INFO
+ByteStreamInputSvc.FullFileName = runArgs.inputBSFile
+#ByteStreamInputSvc.ValidateEvent=False
+#ByteStreamInputSvc.DumpFlag = True
+#ByteStreamInputSvc.SkipNeventBeforeNext=10
+print ByteStreamInputSvc
+
+# ---------------------------
+# Service to write out BS events
+BSFilterLog.info( '**** ByteStreamOutputSvc configuration' )
+
+from ByteStreamCnvSvc.ByteStreamCnvSvcConf import ByteStreamEventStorageOutputSvc
+if runArgs.OutputBSFilterFile.startswith("simple"):
+    bsOutputSvc=ByteStreamEventStorageOutputSvc("BSESOutputSvc0",OutputDirectory="./",SimpleFileName=runArgs.OutputBSFilterFile)
+else:
+    bsOutputSvc=ByteStreamEventStorageOutputSvc("BSESOutputSvc0",MaxFileNE=100,OutputDirectory="./",ProjectTag=runArgs.OutputBSFilterFile,AppName="Filtered",FileTag="test")
+svcMgr += bsOutputSvc
+
+print bsOutputSvc
+
+# ---------------------------
+BSFilterLog.info( '**** ByteStreamFilter configuration' )
+
+from AthenaCommon.AlgSequence import AlgSequence
+topSequence = AlgSequence()
+
+# get the filter algortihm
+from TrigT1ResultByteStream.TrigT1ResultByteStreamConf import CTPByteStreamTool,RecCTPByteStreamTool
+if not hasattr( svcMgr, "ByteStreamAddressProviderSvc" ):
+    from ByteStreamCnvSvcBase.ByteStreamCnvSvcBaseConf import ByteStreamAddressProviderSvc 
+    svcMgr += ByteStreamAddressProviderSvc()
+svcMgr.ByteStreamAddressProviderSvc.TypeNames += ["ROIB::RoIBResult/RoIBResult", "MuCTPI_RDO/MUCTPI_RDO", "CTP_RDO/CTP_RDO", "MuCTPI_RIO/MUCTPI_RIO", "CTP_RIO/CTP_RIO"
+]
+from OverlayCommonAlgs.OverlayCommonAlgsConf import  BSFilter
+filAlg=BSFilter("BSFilter")
+topSequence+=filAlg
+if hasattr( runArgs, "TriggerBit"):
+    filAlg.TriggerBit = runArgs.TriggerBit
+else:
+    filAlg.TriggerBit = -1
+
+if hasattr( runArgs, "EventIdFile"):
+    filAlg.EventIdFile=runArgs.EventIdFile # The name of the file to write to for EventIdModifierSvc lines
+else:
+    filAlg.EventIdFile=""
+
+if hasattr( runArgs, "FilterFile"):
+    filAlg.filterfile=runArgs.FilterFile # The thing made from the TAG files via "root -l -b -q HITAGprinter_run.C"
+else:
+    filAlg.filterfile = ""
+
+# ---------------------------
+BSFilterLog.info( '**** ByteStreamCopyTool configuration' )
+
+# BS OutputStream Tool
+OutStreamName="OutputStreamBSCopy"
+from ByteStreamCnvSvc.ByteStreamCnvSvcConf import ByteStreamOutputStreamCopyTool
+bsCopyTool = ByteStreamOutputStreamCopyTool("OutputStreamBSCopyTool")
+svcMgr.ToolSvc += bsCopyTool
+
+bsCopyTool.ByteStreamOutputSvc=bsOutputSvc
+bsCopyTool.ByteStreamInputSvc=svcMgr.ByteStreamInputSvc
+
+# ---------------------------
+BSFilterLog.info( '**** AthenaOutputStream configuration' )
+
+# create AthenaOutputStream for BS Copy 
+from AthenaServices.AthenaServicesConf import AthenaOutputStream
+OutputStreamBSCopy = AthenaOutputStream( OutStreamName, WritingTool=bsCopyTool )
+topSequence += OutputStreamBSCopy
+OutputStreamBSCopy.AcceptAlgs =["BSFilter"] 
+
+print topSequence
+
+#---------------------------
+# Post-include
+if hasattr(runArgs,"postInclude"):
+    for fragment in runArgs.postInclude:
+        include(fragment)
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.BSOverlayFilter_tf.py b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.BSOverlayFilter_tf.py
new file mode 100644
index 0000000000000000000000000000000000000000..76208a28ac54016ea326ad693cc7f60e9d788db7
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.BSOverlayFilter_tf.py
@@ -0,0 +1,104 @@
+from AthenaCommon.Logging import logging
+BSFilterLog = logging.getLogger('BSFilter')
+BSFilterLog.info( '****************** STARTING BSFilter *****************' )
+
+# BSFilterLog.info( '**** Transformation run arguments' )
+# BSFilterLog.info( str(runArgs) )
+
+#---------------------------
+theApp.EvtMax = runArgs.maxEvents
+
+#---------------------------
+## Run performance monitoring (memory logging)
+from PerfMonComps.PerfMonFlags import jobproperties as perfmonjp
+perfmonjp.PerfMonFlags.doMonitoring = True # to enable monitoring
+perfmonjp.PerfMonFlags.doFastMon = True    # to only enable a lightweight monitoring
+#perfmonjp.PerfMonFlags.doSemiDetailedMonitoring = True
+
+#---------------------------
+BSFilterLog.info( '**** ByteStreamInputSvc configuration' )
+
+include( "ByteStreamCnvSvc/BSEventStorageEventSelector_jobOptions.py" )
+ByteStreamInputSvc = svcMgr.ByteStreamInputSvc
+# ByteStreamInputSvc.FullFileName = open(runArgs.InputFileMapFile).readline().rstrip().split(',')
+ByteStreamInputSvc.FullFileName = runArgs.inputBSFile
+
+print ByteStreamInputSvc
+
+# ---------------------------
+# Service to write out BS events
+BSFilterLog.info( '**** ByteStreamOutputSvc configuration' )
+
+from ByteStreamCnvSvc.ByteStreamCnvSvcConf import ByteStreamEventStorageOutputSvc
+bsOutputSvc=ByteStreamEventStorageOutputSvc("BSESOutputSvc",OutputDirectory='./',SimpleFileName=runArgs.outputBS_SKIMFile)
+svcMgr += bsOutputSvc
+
+print bsOutputSvc
+
+# ---------------------------
+BSFilterLog.info( '**** ByteStreamFilter configuration' )
+
+from AthenaCommon.AlgSequence import AlgSequence
+topSequence = AlgSequence()
+
+# get the filter algortihm
+from TrigT1ResultByteStream.TrigT1ResultByteStreamConf import CTPByteStreamTool,RecCTPByteStreamTool
+if not hasattr( svcMgr, "ByteStreamAddressProviderSvc" ):
+    from ByteStreamCnvSvcBase.ByteStreamCnvSvcBaseConf import ByteStreamAddressProviderSvc 
+    svcMgr += ByteStreamAddressProviderSvc()
+svcMgr.ByteStreamAddressProviderSvc.TypeNames += ["ROIB::RoIBResult/RoIBResult", "MuCTPI_RDO/MUCTPI_RDO", "CTP_RDO/CTP_RDO", "MuCTPI_RIO/MUCTPI_RIO", "CTP_RIO/CTP_RIO"]
+
+from OverlayCommonAlgs.OverlayCommonAlgsConf import  BSFilter
+filAlg = BSFilter("BSFilter")
+topSequence += filAlg
+if hasattr( runArgs, "triggerBit"):
+    filAlg.TriggerBit = runArgs.triggerBit
+else:
+    filAlg.TriggerBit = -1
+
+if hasattr(runArgs, 'eventIdFile'):
+    filAlg.EventIdFile=runArgs.eventIdFile # The name of the file to write to for EventIdModifierSvc lines
+else:
+    filAlg.EventIdFile=""
+
+if hasattr( runArgs, "FilterFile"): #TODO currently no such argument
+    filAlg.filterfile=runArgs.FilterFile # The thing made from the TAG files via "root -l -b -q HITAGprinter_run.C"
+else:
+    filAlg.filterfile = ""
+
+# ---------------------------
+BSFilterLog.info( '**** ByteStreamCopyTool configuration' )
+
+if hasattr( runArgs, 'InputLbnMapFile'):
+    from OverlayCommonAlgs.OverlayCommonAlgsConf import ByteStreamMultipleOutputStreamCopyTool
+    bsCopyTool = ByteStreamMultipleOutputStreamCopyTool("MultipleOutputStreamBSCopyTool")
+    bsCopyTool.lbn_map_file = runArgs.InputLbnMapFile
+    bsCopyTool.NoutputSvc = 1
+    bsCopyTool.ByteStreamOutputSvc0=bsOutputSvc
+else:
+    from ByteStreamCnvSvc.ByteStreamCnvSvcConf import ByteStreamOutputStreamCopyTool
+    bsCopyTool = ByteStreamOutputStreamCopyTool("OutputStreamBSCopyTool")
+    bsCopyTool.ByteStreamOutputSvc=bsOutputSvc
+
+svcMgr.ToolSvc += bsCopyTool
+
+bsCopyTool.ByteStreamInputSvc=svcMgr.ByteStreamInputSvc
+
+# ---------------------------
+BSFilterLog.info( '**** AthenaOutputStream configuration' )
+
+# create AthenaOutputStream for BS Copy 
+from AthenaServices.AthenaServicesConf import AthenaOutputStream
+OutputStreamBSCopy = AthenaOutputStream( "OutputStreamBSCopy", WritingTool=bsCopyTool )
+topSequence += OutputStreamBSCopy
+
+OutputStreamBSCopy.AcceptAlgs =["BSFilter"] 
+
+print topSequence
+
+# ---------------------------
+# Post-include
+
+if hasattr(runArgs,"postInclude"):
+    for fragment in runArgs.postInclude:
+        include(fragment)
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.BS_multipleSelector.py b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.BS_multipleSelector.py
new file mode 100644
index 0000000000000000000000000000000000000000..7cb85bd81c8c23c48f7c13f7879ad31733b40bb5
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.BS_multipleSelector.py
@@ -0,0 +1,153 @@
+from AthenaCommon.Logging import logging
+BS_multipleSelectorLog = logging.getLogger('BS_multipleSelector')
+BS_multipleSelectorLog.info( '****************** STARTING BS_multipleSelector 2*****************' )
+
+# Job definition parameters:
+EvtMax = runArgs.maxEvents
+SkipEvents = runArgs.skipEvents
+BSInput = runArgs.inputBSFile
+TriggerBit = runArgs.TriggerBit
+Noutputs = runArgs.Noutputs
+Uniq = runArgs.uniq
+
+BS_multipleSelectorLog.info( '**** Transformation run arguments' )
+BS_multipleSelectorLog.info( str(runArgs) )
+
+maxeventsvec= eval('['+runArgs.maxeventsvec+']')
+skipeventsvec= eval('['+runArgs.skipeventsvec+']')
+noutputsvec= eval('['+runArgs.noutputsvec+']')
+streamvec= eval('['+runArgs.streamvec+']')
+
+if len(streamvec)>0:
+    mystream=int(BSInput[0].split(".")[7].replace("stream",""))
+    print "mystream is ",mystream
+    mystreamindex= int(streamvec.index(mystream))
+    print "mystreamindex is ",mystreamindex," for mystream ",mystream
+
+    EvtMax=noutputsvec[mystreamindex]
+    SkipEvents=skipeventsvec[mystreamindex]
+    Noutputs=noutputsvec[mystreamindex]
+    maxnout=max(noutputsvec)
+    if maxnout>Noutputs:
+        print "WARNING - settting Noutputs "+str(Noutputs)+" to maxnout ",maxnout
+        Noutputs=maxnout
+    print "evtmax, skip, nout = ",EvtMax,SkipEvents,Noutputs
+
+
+#---------------------------
+include( "ByteStreamCnvSvc/BSEventStorageEventSelector_jobOptions.py" )
+svcMgr = theApp.serviceMgr()
+ByteStreamInputSvc = svcMgr.ByteStreamInputSvc
+theApp.EvtMax = EvtMax
+#theApp.SkipEvents = SkipEvents #doesn't work
+MessageSvc.OutputLevel = INFO
+ByteStreamInputSvc.FullFileName = BSInput
+#ByteStreamInputSvc.ValidateEvent=False
+#ByteStreamInputSvc.DumpFlag = True
+#ByteStreamInputSvc.SkipNeventBeforeNext=10
+print ByteStreamInputSvc
+from AthenaCommon.AlgSequence import AlgSequence
+topSequence = AlgSequence()
+
+# get the filter algortihm
+from TrigT1ResultByteStream.TrigT1ResultByteStreamConf import CTPByteStreamTool,RecCTPByteStreamTool
+if not hasattr( svcMgr, "ByteStreamAddressProviderSvc" ):
+    from ByteStreamCnvSvcBase.ByteStreamCnvSvcBaseConf import ByteStreamAddressProviderSvc 
+    svcMgr += ByteStreamAddressProviderSvc()
+svcMgr.ByteStreamAddressProviderSvc.TypeNames += [
+    "ROIB::RoIBResult/RoIBResult",
+    "MuCTPI_RDO/MUCTPI_RDO",
+    "CTP_RDO/CTP_RDO",
+    "MuCTPI_RIO/MUCTPI_RIO",
+    "CTP_RIO/CTP_RIO"
+    ]
+from OverlayCommonAlgs.OverlayCommonAlgsConf import  BSFilter, ByteStreamMultipleOutputStreamCopyTool
+filAlg=BSFilter("BSFilter")
+filAlg.TriggerBit=TriggerBit # The trigger bit to select
+filAlg.EventIdFile=runArgs.EventIdFile # Not really used, but writes out the EventIdOverrides
+topSequence+=filAlg
+print filAlg
+
+# BS OutputStream Tool
+from ByteStreamCnvSvc.ByteStreamCnvSvcConf import ByteStreamEventStorageOutputSvc
+bsCopyTool = ByteStreamMultipleOutputStreamCopyTool("MultipleOutputStreamBSCopyTool")
+bsCopyTool.lbn_map_file = runArgs.LbnMapFile
+bsCopyTool.skipevents = SkipEvents
+svcMgr.ToolSvc += bsCopyTool
+
+bsCopyTool.NoutputSvc = Noutputs
+for i in range(0,bsCopyTool.NoutputSvc):
+    if len(streamvec)>0:
+        bsOutputSvc=ByteStreamEventStorageOutputSvc("BSESOutputSvc"+str(i),OutputDirectory="./",SimpleFileName="Zerobias"+Uniq+".bit"+str(TriggerBit)+".out"+str(i)+".RAW")
+    else:
+        bsOutputSvc=ByteStreamEventStorageOutputSvc("BSESOutputSvc"+str(i),OutputDirectory="./",SimpleFileName="Zerobias"+Uniq+".bit"+str(TriggerBit)+".stream"+str(i)+".RAW")
+        #bsOutputSvc=ByteStreamEventStorageOutputSvc("BSESOutputSvc"+str(i),OutputDirectory="./",SimpleFileName=BSOutput+"_"+str(TriggerBit)+"_"+str(i)+".RAW")
+    
+    svcMgr += bsOutputSvc
+    if i==0: bsCopyTool.ByteStreamOutputSvc0=bsOutputSvc
+    if i==1: bsCopyTool.ByteStreamOutputSvc1=bsOutputSvc
+    if i==2: bsCopyTool.ByteStreamOutputSvc2=bsOutputSvc
+    if i==3: bsCopyTool.ByteStreamOutputSvc3=bsOutputSvc
+    if i==4: bsCopyTool.ByteStreamOutputSvc4=bsOutputSvc
+    if i==5: bsCopyTool.ByteStreamOutputSvc5=bsOutputSvc
+    if i==6: bsCopyTool.ByteStreamOutputSvc6=bsOutputSvc
+    if i==7: bsCopyTool.ByteStreamOutputSvc7=bsOutputSvc
+    if i==8: bsCopyTool.ByteStreamOutputSvc8=bsOutputSvc
+    if i==9: bsCopyTool.ByteStreamOutputSvc9=bsOutputSvc
+    if i==10: bsCopyTool.ByteStreamOutputSvc10=bsOutputSvc
+    if i==11: bsCopyTool.ByteStreamOutputSvc11=bsOutputSvc
+    if i==12: bsCopyTool.ByteStreamOutputSvc12=bsOutputSvc
+    if i==13: bsCopyTool.ByteStreamOutputSvc13=bsOutputSvc
+    if i==14: bsCopyTool.ByteStreamOutputSvc14=bsOutputSvc
+    if i==15: bsCopyTool.ByteStreamOutputSvc15=bsOutputSvc
+    if i==16: bsCopyTool.ByteStreamOutputSvc16=bsOutputSvc
+    if i==17: bsCopyTool.ByteStreamOutputSvc17=bsOutputSvc
+    if i==18: bsCopyTool.ByteStreamOutputSvc18=bsOutputSvc
+    if i==19: bsCopyTool.ByteStreamOutputSvc19=bsOutputSvc
+    if i==20: bsCopyTool.ByteStreamOutputSvc20=bsOutputSvc
+    if i==21: bsCopyTool.ByteStreamOutputSvc21=bsOutputSvc
+    if i==22: bsCopyTool.ByteStreamOutputSvc22=bsOutputSvc
+    if i==23: bsCopyTool.ByteStreamOutputSvc23=bsOutputSvc
+    if i==24: bsCopyTool.ByteStreamOutputSvc24=bsOutputSvc
+    if i==25: bsCopyTool.ByteStreamOutputSvc25=bsOutputSvc
+    if i==26: bsCopyTool.ByteStreamOutputSvc26=bsOutputSvc
+    if i==27: bsCopyTool.ByteStreamOutputSvc27=bsOutputSvc
+    if i==28: bsCopyTool.ByteStreamOutputSvc28=bsOutputSvc
+    if i==29: bsCopyTool.ByteStreamOutputSvc29=bsOutputSvc
+    if i==30: bsCopyTool.ByteStreamOutputSvc30=bsOutputSvc
+    if i==31: bsCopyTool.ByteStreamOutputSvc31=bsOutputSvc
+    if i==32: bsCopyTool.ByteStreamOutputSvc32=bsOutputSvc
+    if i==33: bsCopyTool.ByteStreamOutputSvc33=bsOutputSvc
+    if i==34: bsCopyTool.ByteStreamOutputSvc34=bsOutputSvc
+    if i==35: bsCopyTool.ByteStreamOutputSvc35=bsOutputSvc
+    if i==36: bsCopyTool.ByteStreamOutputSvc36=bsOutputSvc
+    if i==37: bsCopyTool.ByteStreamOutputSvc37=bsOutputSvc
+    if i==38: bsCopyTool.ByteStreamOutputSvc38=bsOutputSvc
+    if i==39: bsCopyTool.ByteStreamOutputSvc39=bsOutputSvc
+    if i==40: bsCopyTool.ByteStreamOutputSvc40=bsOutputSvc
+    if i==41: bsCopyTool.ByteStreamOutputSvc41=bsOutputSvc
+    if i==42: bsCopyTool.ByteStreamOutputSvc42=bsOutputSvc
+    if i==43: bsCopyTool.ByteStreamOutputSvc43=bsOutputSvc
+    if i==44: bsCopyTool.ByteStreamOutputSvc44=bsOutputSvc
+    if i==45: bsCopyTool.ByteStreamOutputSvc45=bsOutputSvc
+    if i==46: bsCopyTool.ByteStreamOutputSvc46=bsOutputSvc
+    if i==47: bsCopyTool.ByteStreamOutputSvc47=bsOutputSvc
+    if i==48: bsCopyTool.ByteStreamOutputSvc48=bsOutputSvc
+    if i==49: bsCopyTool.ByteStreamOutputSvc49=bsOutputSvc
+
+# BS InputStream
+bsCopyTool.ByteStreamInputSvc=svcMgr.ByteStreamInputSvc
+
+# create AthenaOutputStream for BS Copy 
+from AthenaServices.AthenaServicesConf import AthenaOutputStream
+OutputStreamBSCopy = AthenaOutputStream( "OutputStreamBSCopy", WritingTool=bsCopyTool )
+topSequence += OutputStreamBSCopy 
+        
+# apply prescale to the OutputStream 
+OutputStreamBSCopy.AcceptAlgs =["BSFilter"] 
+
+#---------------------------
+# Post-include
+if hasattr(runArgs,"postInclude"):
+    for fragment in runArgs.postInclude:
+        include(fragment)
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.G4HitMerge.py b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.G4HitMerge.py
new file mode 100644
index 0000000000000000000000000000000000000000..651cf0cc5b07faf3f9712fce6931fc6aeaa96304
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.G4HitMerge.py
@@ -0,0 +1,151 @@
+# Andy Haas (ahaas@cern.ch), SLAC, 4/2010
+# Use pileUp infrastructure to overlay HITS from one signal stream to HITS from one background stream
+# One background event is read for each signal event
+# Modified by Piyali (Piyali.Banerjee@cern.ch) to include Metadata containers in the output hits file
+
+from AthenaCommon.Logging import logging
+G4HitMergeLog = logging.getLogger('G4HitMerge')
+G4HitMergeLog.info( '****************** STARTING G4HitMerge *****************' )
+
+# Job definition parameters:
+EvtMax           = runArgs.maxEvents
+SkipEvents      = runArgs.skipEvents
+
+G4HitMergeLog.info( '**** Transformation run arguments' )
+G4HitMergeLog.info( str(runArgs) )
+
+#------------------------------------------------
+
+# setup DBReplicaSvc to choose closest Oracle replica, configurables style
+from AthenaCommon.AppMgr import ServiceMgr
+from PoolSvc.PoolSvcConf import PoolSvc
+ServiceMgr+=PoolSvc(SortReplicas=True)
+from DBReplicaSvc.DBReplicaSvcConf import DBReplicaSvc
+ServiceMgr+=DBReplicaSvc(UseCOOLSQLite=False)
+
+# Overall common flags
+# DetDescrVersion='ATLAS-GEO-08-00-02'
+
+DetDescrVersion=runArgs.geometryVersion
+IOVTag=runArgs.conditionsTag
+
+include( "AthenaCommon/AthenaCommonFlags.py" )
+
+minBiasInputCols = [ runArgs.inputHITS1File ] #["Input1.g4.pool.root"]
+PoolHitsInput = [ runArgs.inputHITS2File ] #["Input2.g4.pool.root"]
+
+from AthenaCommon.DetFlags import DetFlags
+DetFlags.ID_setOn()
+DetFlags.Calo_setOn()
+DetFlags.Muon_setOn()
+DetFlags.Truth_setOn()
+DetFlags.LVL1_setOn()
+
+#include ( "Digitization/Digitization_flags.py" )
+
+# EventInfo Converters
+import AthenaPoolCnvSvc.ReadAthenaPool
+include( "EventAthenaPool/EventAthenaPool_joboptions.py" )
+
+# GeoModel stuff
+from AthenaCommon.GlobalFlags import jobproperties
+jobproperties.Global.DetDescrVersion=DetDescrVersion
+from AtlasGeoModel import SetGeometryVersion
+from AtlasGeoModel import GeoModelInit
+
+from AthenaCommon.AppMgr import ServiceMgr
+from PileUpComps.PileUpCompsConf import PileUpEventLoopMgr
+
+# ad-hoc configuration of pileup event loop manager
+theApp.EventLoop = "PileUpEventLoopMgr"
+pileUpEventLoopMgr = PileUpEventLoopMgr()
+from Digitization.DigitizationFlags import jobproperties
+pupStream = "PileUpCollXingStream"
+jobproperties.Digitization.rndmSeedList.addSeed( pupStream, 123, 345 )
+pileUpEventLoopMgr.OrigSelector="EventSelector"
+
+from PileUpComps.PileUpCompsConf import BkgStreamsCache
+from EventSelectorAthenaPool.EventSelectorAthenaPoolConf import EventSelectorAthenaPool
+minBiasCache = BkgStreamsCache("MinBiasCache")
+minBiasCache.CollPerXing=1
+minBiasCache.CollDistribution="Fixed"
+minBiasCache.ReadDownscaleFactor=1  # read one new event every event
+#minBiasCache.RndmGenSvc=digitizationRndmSvc
+minBiasCache.RndmStreamName=pupStream
+minBiasEvtSel = EventSelectorAthenaPool("minBiasEventSelector")
+minBiasEvtSel.InputCollections = minBiasInputCols
+minBiasEvtSel.KeepInputFilesOpen = True
+ServiceMgr += minBiasEvtSel
+minBiasCache.EventSelector="minBiasEventSelector"
+pileUpEventLoopMgr.bkgCaches += [ minBiasCache ]
+
+from AthenaCommon.AppMgr import ToolSvc
+from LArRecUtils.LArRecUtilsConf import LArAutoCorrTotalTool
+try:
+  ToolSvc.LArAutoCorrTotalTool.NMinBias=1.;   # FIXME  <ncol> per crossing
+except AttributeError:
+  ToolSvc += LArAutoCorrTotalTool(NMinBias=1.)
+
+pileUpEventLoopMgr.OutStreamType = "AthenaOutputStream"
+pileUpEventLoopMgr.XingFrequency=25;
+pileUpEventLoopMgr.firstXing=0
+pileUpEventLoopMgr.lastXing=0
+pileUpEventLoopMgr.OutputLevel=VERBOSE
+ServiceMgr += pileUpEventLoopMgr
+
+# in any case we need the PileUpMergeSvc for the digitize algos
+if not hasattr(ServiceMgr, 'PileUpMergeSvc'):
+    from PileUpTools.PileUpToolsConf import PileUpMergeSvc
+    ServiceMgr += PileUpMergeSvc()
+
+# Pool input (Change this to use a different file)
+if not hasattr(ServiceMgr, 'EventSelector'):
+   import AthenaPoolCnvSvc.ReadAthenaPool
+ServiceMgr.EventSelector.InputCollections = PoolHitsInput
+
+# Pool Output
+from AthenaPoolCnvSvc.WriteAthenaPool import AthenaPoolOutputStream
+Stream1 = AthenaPoolOutputStream("StreamHITS", runArgs.outputHITSFile )
+#find full list with "checkSG.py <inputfile.pool.root>"
+Stream1.ItemList+=["EventInfo#*"];
+Stream1.ItemList+=["CaloCalibrationHitContainer#*"];
+Stream1.ItemList+=["SiHitCollection#*"];
+Stream1.ItemList+=["TrackRecordCollection#*"];
+Stream1.ItemList+=["LArHitContainer#*"];
+Stream1.ItemList+=["TRTUncompressedHitCollection#*"];
+Stream1.ItemList+=["CSCSimHitCollection#*"];
+Stream1.ItemList+=["TileHitVector#*"];
+#Stream1.ItemList+=["DataHeader#*"];
+Stream1.ItemList+=["McEventCollection#*"];
+Stream1.ItemList+=["RPCSimHitCollection#*"];
+Stream1.ItemList+=["MDTSimHitCollection#*"];
+Stream1.ItemList+=["TGCSimHitCollection#*"];
+Stream1.ItemList += [ "IOVMetaDataContainer#*" ]
+print Stream1
+
+# storegate dump
+StoreGateSvc = Service( "StoreGateSvc" )
+StoreGateSvc.Dump = True  #true will dump data store contents
+
+# the LAr and Calo detector description package
+include( "CaloDetMgrDetDescrCnv/CaloDetMgrDetDescrCnv_joboptions.py" )
+include( "LArDetMgrDetDescrCnv/LArDetMgrDetDescrCnv_joboptions.py" )
+include( "LArAthenaPool/LArAthenaPool_joboptions.py" )
+
+from AthenaCommon.AlgSequence import AlgSequence 
+topSequence = AlgSequence()
+
+from OverlayCommonAlgs.OverlayCommonAlgsConf import G4HitMerger
+G4HitMerger = G4HitMerger("G4HitMerger")
+topSequence += G4HitMerger
+
+theApp.EvtMax = EvtMax
+theApp.SkipEvents = SkipEvents
+MessageSvc.OutputLevel = INFO
+
+#---------------------------
+# Post-include
+if hasattr(runArgs,"postInclude"):
+    for fragment in runArgs.postInclude:
+        include(fragment)
+
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.OverlayBS_tf.py b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.OverlayBS_tf.py
new file mode 100644
index 0000000000000000000000000000000000000000..efa64c0ae4275408a3817e5afe54348aaca09f34
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.OverlayBS_tf.py
@@ -0,0 +1,235 @@
+from AthenaCommon.Logging import logging
+overlaylog = logging.getLogger('overlay_bs')
+overlaylog.info( '****************** STARTING OVERLAY *****************' )
+
+overlaylog.info( '**** Transformation run arguments' )
+overlaylog.info( str(runArgs) )
+
+from PerfMonComps.PerfMonFlags import jobproperties as jp
+jp.PerfMonFlags.doMonitoring = True # to enable monitoring
+jp.PerfMonFlags.doFastMon = True    # to only enable a lightweight monitoring
+
+if hasattr(runArgs, "preExec") and runArgs.preExec != 'NONE':
+    exec( runArgs.preExec )
+
+TileFrameLength=7
+
+from AthenaCommon.AppMgr import ServiceMgr
+from AthenaCommon.GlobalFlags  import globalflags
+from AthenaCommon.AthenaCommonFlags  import athenaCommonFlags
+
+from MuonRecExample.MuonRecFlags import muonRecFlags
+muonRecFlags.doCSCs.set_Value_and_Lock(True)
+
+from LArConditionsCommon.LArCondFlags import larCondFlags
+larCondFlags.LArCoolChannelSelection.set_Value_and_Lock("")
+larCondFlags.useShape.set_Value_and_Lock(True)
+larCondFlags.OFCShapeFolder.set_Value_and_Lock("")
+
+globalflags.isOverlay.set_Value_and_Lock(True)
+isRealData = globalflags.isOverlay()
+
+from AthenaCommon.BeamFlags import jobproperties
+jobproperties.Beam.beamType.set_Value_and_Lock("collisions")
+
+import AthenaCommon.AtlasUnixStandardJob #FIXME: Is this needed?
+
+if hasattr(runArgs,"preInclude"):
+    for cf in runArgs.preInclude:
+        include(cf)
+
+#==============================================================
+# Job definition parameters:
+#==============================================================
+
+if hasattr(runArgs,"inputBSFile"):
+    athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputBSFile )
+    DataInputCollections=runArgs.inputBSFile # Remove this line
+else:
+    raise RuntimeError ("No input BS file defined")
+
+if hasattr(runArgs,"skipEvents"): athenaCommonFlags.SkipEvents.set_Value_and_Lock( runArgs.skipEvents )
+if hasattr(runArgs,"maxEvents"): athenaCommonFlags.EvtMax.set_Value_and_Lock( runArgs.maxEvents )
+
+if hasattr(runArgs,"inputHITSFile"):
+    athenaCommonFlags.PoolHitsInput.set_Value_and_Lock( runArgs.inputHITSFile )
+    SignalInputCollections = runArgs.inputHITSFile #remove this line
+else:
+    raise RuntimeError ("No input HITS file defined")
+
+if hasattr(runArgs,"outputRDOFile"):
+    athenaCommonFlags.PoolRDOOutput.set_Value_and_Lock( runArgs.outputRDOFile )
+    OverlayCollection = runArgs.outputRDOFile #remove this line
+
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+
+if not hasattr(runArgs, 'outputRDO_SGNLFile') or runArgs.outputRDO_SGNLFile=="NONE":
+    OverlayFlags.set_SignalOff()
+    SignalCollection = "NONE"
+else:
+    SignalCollection = runArgs.outputRDO_SGNLFile
+
+if hasattr(runArgs,"geometryVersion"): globalflags.DetDescrVersion.set_Value_and_Lock( runArgs.geometryVersion )
+if hasattr(runArgs,"conditionsTag"): globalflags.ConditionsTag.set_Value_and_Lock( runArgs.conditionsTag )
+
+from Digitization.DigitizationFlags import digitizationFlags
+if hasattr(runArgs,"digiSeedOffset1"): digitizationFlags.rndmSeedOffset1=int(runArgs.digiSeedOffset1)
+if hasattr(runArgs,"digiSeedOffset2"): digitizationFlags.rndmSeedOffset2=int(runArgs.digiSeedOffset2)
+if hasattr(runArgs,"samplingFractionDbTag"): digitizationFlags.physicsList=runArgs.samplingFractionDbTag
+if hasattr(runArgs,"digiRndmSvc"): digitizationFlags.rndmSvc=runArgs.digiRndmSvc
+
+readBS = True
+OverlayFlags.set_BkgOff() #ACH
+
+#GlobalFlags.InputFormat.set_bytestream()
+globalflags.InputFormat.set_Value_and_Lock('bytestream')
+globalflags.DetGeo = 'commis'
+globalflags.DataSource.set_Value_and_Lock('data')
+#GlobalFlags.DataSource.set_data()
+
+import PyUtils.AthFile as af
+def getHITSFile(runArgs):
+    if hasattr(runArgs,"inputHITSFile"):
+        return runArgs.inputHITSFile[0]
+    elif hasattr(runArgs,"inputHitsFile"):
+        return runArgs.inputHitsFile[0]
+    else:
+        raise SystemExit("No HITS file in runArgs!!")
+try:
+    f = af.fopen(getHITSFile(runArgs))
+except AssertionError:
+    skeletonLog.error("Failed to open input file: %s", getHITSFile(runArgs))
+metadatadict = dict()
+if 'metadata' in f.infos.keys():
+    if '/Simulation/Parameters' in f.infos['metadata'].keys():
+        metadatadict = f.infos['metadata']['/Simulation/Parameters']
+        if isinstance(metadatadict, list):
+            skeletonLog.warning("%s inputfile: %s contained %s sets of Simulation Metadata. Using the final set in the list.",inputtype,inputfile,len(metadatadict))
+            metadatadict=metadatadict[-1]
+        if 'RunNumber' in metadatadict.keys():
+            year = metadatadict['RunNumber']%100
+            print "Found Year = %s", year
+            from RecExConfig.RecFlags import rec
+            rec.projectName = 'data'+str(year)
+            pass
+        pass
+    pass
+pass
+
+import MagFieldServices.SetupField
+
+from IOVDbSvc.CondDB import conddb
+
+if hasattr(runArgs, 'conditionsTag') and runArgs.conditionsTag!='NONE' and runArgs.conditionsTag!='':
+   globalflags.ConditionsTag=runArgs.conditionsTag
+   if len(globalflags.ConditionsTag())!=0:
+      conddb.setGlobalTag(globalflags.ConditionsTag())
+
+OverlayFlags.Print()
+
+# LVL1 Trigger Menu
+if hasattr(runArgs, "triggerConfig") and runArgs.triggerConfig!="NONE":
+    # LVL1 Trigger Menu
+    # PJB 9/2/2009 Setup the new triggerConfig flags here
+    from TriggerJobOpts.TriggerFlags import TriggerFlags
+    triggerArg = runArgs.triggerConfig
+    #if not prefixed with LVL1: add it here
+    Args = triggerArg.split(":")
+    if Args[0] != "LVL1":
+        TriggerFlags.triggerConfig ="LVL1:"+triggerArg
+    else:
+        TriggerFlags.triggerConfig =triggerArg
+    overlaylog.info( 'triggerConfig argument is: %s ', TriggerFlags.triggerConfig.get_Value() )
+    from TriggerJobOpts.TriggerConfigGetter import TriggerConfigGetter
+    cfg = TriggerConfigGetter("HIT2RDO")
+
+from AthenaCommon.DetFlags import DetFlags
+DetFlags.ID_setOn()
+DetFlags.Muon_setOn()
+DetFlags.LAr_setOn()
+DetFlags.Tile_setOn()
+if not hasattr(runArgs, "triggerConfig") or runArgs.triggerConfig=="NONE":
+  DetFlags.LVL1_setOff()
+else:
+  DetFlags.LVL1_setOn()
+
+DetFlags.BCM_setOn()
+DetFlags.Lucid_on()
+DetFlags.simulateLVL1.Lucid_setOff()
+
+print "================ DetFlags ================ "
+DetFlags.Print()
+
+include ( "RecExCond/AllDet_detDescr.py" )
+
+from AthenaCommon.AppMgr import theApp
+theApp.EventLoop = "PileUpEventLoopMgr"
+if hasattr( runArgs, 'maxEvents'):
+    theApp.EvtMax = runArgs.maxEvents
+
+include ( "EventOverlayJobTransforms/ConfiguredOverlay_jobOptions.py" )
+
+if OverlayFlags.doTruth():
+   include ( "EventOverlayJobTransforms/TruthOverlay_jobOptions.py" )
+
+if OverlayFlags.doBCM() or OverlayFlags.doLUCID():
+   include ( "EventOverlayJobTransforms/BeamOverlay_jobOptions.py" )
+
+if OverlayFlags.doPixel() or OverlayFlags.doSCT() or OverlayFlags.doTRT():
+
+   include ( "EventOverlayJobTransforms/InnerDetectorOverlay_jobOptions.py" )
+
+if OverlayFlags.doLAr() or OverlayFlags.doTile():
+   include ( "EventOverlayJobTransforms/CaloOverlay_jobOptions.py" )
+
+if OverlayFlags.doCSC() or OverlayFlags.doMDT() or OverlayFlags.doRPC() or OverlayFlags.doTGC():
+   include ( "EventOverlayJobTransforms/MuonOverlay_jobOptions.py" )
+
+if OverlayFlags.doLVL1():
+   include ( "EventOverlayJobTransforms/Level1Overlay_jobOptions.py" )
+
+# save the overlay output first
+include ( "EventOverlayJobTransforms/OverlayOutputItemList_jobOptions.py" )
+
+# now save the signal information in the same job
+if OverlayFlags.doSignal():
+   include ( "EventOverlayJobTransforms/SignalOutputItemList_jobOptions.py" )
+
+# For random number initialization
+from AthenaCommon.ConfigurableDb import getConfigurable
+ServiceMgr += getConfigurable(digitizationFlags.rndmSvc.get_Value())()
+digitizationFlags.rndmSeedList.addtoService()
+digitizationFlags.rndmSeedList.printSeeds()
+
+#================================================================
+print "overlay_trf: final outStream = ", outStream
+
+#================================================================
+# Logging
+#--------------------------------------------------------------
+#ServiceMgr.MessageSvc.OutputLevel = INFO
+ServiceMgr.MessageSvc.OutputLevel = INFO
+ServiceMgr.MessageSvc.Format = "% F%45W%S%7W%R%T %0W%M"
+
+if hasattr(runArgs, 'fSampltag'):
+    conddb.addFolder("LAR","/LAR/ElecCalib/fSampl/Symmetry")
+    conddb.addOverride( "/LAR/ElecCalib/fSampl/Symmetry", runArgs.fSampltag + digitizationFlags.physicsList.get_Value() )
+else:
+    raise RuntimeError ("--fSampltag not specified on command-line - see --help message")
+#if OverlayFlags.doSignal():
+#   InputDBConnection = "COOLOFL_LAR/COMP200"
+#   conddb.addFolder("","/LAR/ElecCalibOfl/AutoCorrs/AutoCorr"+"<dbConnection>"+InputDBConnection+"</dbConnection>")
+#   conddb.addOverride("/LAR/ElecCalibOfl/AutoCorrs/AutoCorr","")
+
+ServiceMgr.TileInfoLoader.filenameDeltaPhysicsSuffix="of2_Delta_Phys_7Samples"
+
+# Post-include
+if hasattr(runArgs,"postInclude"):
+    for fragment in runArgs.postInclude:
+        include(fragment)
+
+if hasattr(runArgs, "postExec") and runArgs.postExec != 'NONE':
+    exec( runArgs.postExec )
+
+#print "overlay_trf: at the end. job=\n", job
+print "\noverlay_trf: at the end. ServiceMgr=\n", ServiceMgr
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.OverlayPool_tf.py b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.OverlayPool_tf.py
new file mode 100644
index 0000000000000000000000000000000000000000..6539ead8cdf28fba3125dea0f4a85fd7bcec797d
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.OverlayPool_tf.py
@@ -0,0 +1,188 @@
+from AthenaCommon.Logging import logging
+overlaylog = logging.getLogger('overlay')
+overlaylog.info( '****************** STARTING OVERLAY *****************' )
+
+overlaylog.info( '**** Transformation run arguments' )
+overlaylog.info( str(runArgs) )
+
+from PerfMonComps.PerfMonFlags import jobproperties as jp
+jp.PerfMonFlags.doMonitoring = True # to enable monitoring
+jp.PerfMonFlags.doFastMon = True    # to only enable a lightweight monitoring
+
+if hasattr(runArgs, "preExec") and runArgs.preExec != 'NONE':
+    exec( runArgs.preExec )
+
+import AthenaCommon.AtlasUnixStandardJob
+
+if hasattr(runArgs, 'preInclude'):
+for cf in runArgs.preInclude:
+    include(cf)
+
+#==============================================================
+# Job definition parameters:
+#==============================================================
+from AthenaCommon.GlobalFlags  import globalflags
+
+globalflags.isOverlay.set_Value_and_Lock(True)
+
+if hasattr(runArgs,"maxEvents"): from AthenaCommon.AthenaCommonFlags  import athenaCommonFlags
+athenaCommonFlags.EvtMax = runArgs.maxEvents
+if hasattr(runArgs,"skipEvents"): athenaCommonFlags.SkipEvents= runArgs.skipEvents
+
+if hasattr(runArgs,"inputHITSFile"):
+    athenaCommonFlags.PoolHitsInput.set_Value_and_Lock( runArgs.inputHITSFile )
+    SignalInputCollections = runArgs.inputHITSFile
+else:
+    raise RuntimeError ("No input HITS file defined")
+
+if hasattr(runArgs,"outputRDOFile"): 
+    athenaCommonFlags.PoolRDOOutput.set_Value_and_Lock( runArgs.outputRDOFile )
+    OverlayCollection = runArgs.outputRDOFile
+
+if hasattr(runArgs,"geometryVersion"): globalFlags.DetDescrVersion=runArgs.geometryVersion
+
+from Digitization.DigitizationFlags import digitizationFlags
+if hasattr(runArgs,"digiSeedOffset1"): digitizationFlags.rndmSeedOffset1=int(runArgs.digiSeedOffset1)
+if hasattr(runArgs,"digiSeedOffset2"): digitizationFlags.rndmSeedOffset2=int(runArgs.digiSeedOffset2)
+if hasattr(runArgs,"samplingFractionDbTag"): digitizationFlags.physicsList=runArgs.samplingFractionDbTag
+if hasattr(runArgs,"digiRndmSvc"): digitizationFlags.rndmSvc=runArgs.digiRndmSvc
+
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+if not hasattr(runArgs, 'outputRDO_SGNLFile') or runArgs.outputRDO_SGNLFile=="NONE":
+   OverlayFlags.set_SignalOff()
+   SignalCollection = "NONE"
+else:
+   SignalCollection = runArgs.outputRDO_SGNLFile
+
+OverlayFlags.set_BCMOff()
+OverlayFlags.set_LUCIDOff()
+
+readBS = False
+if hasattr(runArgs, 'ReadByteStream'):
+    readBS = runArgs.ReadByteStream
+isRealData = False
+
+if readBS:
+   globalflags.InputFormat.set_Value_and_Lock('bytestream')
+   DataInputCollections=runArgs.inputPileUpBSFile
+else:
+   DataInputCollections=runArgs.inputRDO_BKGFile
+   athenaCommonFlags.PoolRDOInput=runArgs.inputRDO_BKGFile
+
+if hasattr(runArgs, 'conditionsTag') and runArgs.conditionsTag!='NONE' and runArgs.conditionsTag!='':
+   globalflags.ConditionsTag=runArgs.conditionsTag
+   if len(globalflags.ConditionsTag())!=0:
+      from IOVDbSvc.CondDB import conddb
+      conddb.setGlobalTag(globalflags.ConditionsTag())
+
+OverlayFlags.Print()
+
+# LVL1 Trigger Menu
+if hasattr(runArgs, "triggerConfig") and runArgs.triggerConfig!="NONE":
+    # LVL1 Trigger Menu
+    # PJB 9/2/2009 Setup the new triggerConfig flags here
+    from TriggerJobOpts.TriggerFlags import TriggerFlags
+    triggerArg = runArgs.triggerConfig
+    #if not prefixed with LVL1: add it here
+    Args = triggerArg.split(":")
+    if Args[0] != "LVL1":
+        TriggerFlags.triggerConfig ="LVL1:"+triggerArg
+    else:
+        TriggerFlags.triggerConfig =triggerArg
+    overlaylog.info( 'triggerConfig argument is: %s ', TriggerFlags.triggerConfig.get_Value() )
+    from TriggerJobOpts.TriggerConfigGetter import TriggerConfigGetter
+    cfg = TriggerConfigGetter("HIT2RDO")
+
+from AthenaCommon.DetFlags import DetFlags
+DetFlags.ID_setOn()
+DetFlags.Muon_setOn()
+DetFlags.LAr_setOn()
+DetFlags.Tile_setOn()
+if hasattr(runArgs, "triggerConfig") and runArgs.triggerConfig=="NONE":
+  DetFlags.LVL1_setOff()
+else:
+  DetFlags.LVL1_setOn()
+
+DetFlags.BCM_setOn()
+DetFlags.Lucid_on()
+
+DetFlags.simulateLVL1.Lucid_setOff()
+
+print "================ DetFlags ================ "
+DetFlags.Print()
+
+# Geometry, controlled by DetFlags
+#GlobalFlags.DetGeo.set_atlas()
+#GlobalFlags.DataSource.set_geant4()
+globalflags.DataSource.set_Value_and_Lock('geant4')
+
+#from AtlasGeoModel import SetGeometryVersion
+#from AtlasGeoModel import GeoModelInit
+
+include ( "RecExCond/AllDet_detDescr.py" )
+
+#from AtlasGeoModel import SetGeometryVersion
+#from AtlasGeoModel import GeoModelInit
+#from AtlasGeoModel import SetupRecoGeometry
+
+#include( "BFieldAth/BFieldAth_jobOptions.py" )
+
+#--------
+from AthenaCommon.AppMgr import theApp
+theApp.EventLoop = "PileUpEventLoopMgr"
+if hasattr(runArgs,"maxEvents"): theApp.EvtMax = runArgs.maxEvents
+
+include ( "EventOverlayJobTransforms/ConfiguredOverlay_jobOptions.py" )
+
+if OverlayFlags.doTruth():
+   include ( "EventOverlayJobTransforms/TruthOverlay_jobOptions.py" )
+
+if OverlayFlags.doBCM() or OverlayFlags.doLUCID():
+   include ( "EventOverlayJobTransforms/BeamOverlay_jobOptions.py" )
+
+if OverlayFlags.doPixel() or OverlayFlags.doSCT() or OverlayFlags.doTRT():
+   include ( "EventOverlayJobTransforms/InnerDetectorOverlay_jobOptions.py" )
+
+if OverlayFlags.doLAr() or OverlayFlags.doTile():
+   include ( "EventOverlayJobTransforms/CaloOverlay_jobOptions.py" )
+
+if OverlayFlags.doCSC() or OverlayFlags.doMDT() or OverlayFlags.doRPC() or OverlayFlags.doTGC():
+   include ( "EventOverlayJobTransforms/MuonOverlay_jobOptions.py" )
+
+if OverlayFlags.doLVL1():
+   include ( "EventOverlayJobTransforms/Level1Overlay_jobOptions.py" )
+
+# save the overlay output first
+include ( "EventOverlayJobTransforms/OverlayOutputItemList_jobOptions.py" )
+
+# now save the signal information in the same job
+if OverlayFlags.doSignal():
+   include ( "EventOverlayJobTransforms/SignalOutputItemList_jobOptions.py" )
+
+# For random number initialization
+from AthenaCommon.ConfigurableDb import getConfigurable
+ServiceMgr += getConfigurable(digitizationFlags.rndmSvc.get_Value())()
+digitizationFlags.rndmSeedList.addtoService()
+digitizationFlags.rndmSeedList.printSeeds()
+
+# To not overwrite the BCID
+#from AthenaCommon.AppMgr import ServiceMgr
+#ServiceMgr.PileUpEventLoopMgr.IsEventOverlayJob=True
+
+#================================================================
+print "overlay_trf: final outStream = ", outStream
+
+#================================================================
+# Logging
+#--------------------------------------------------------------
+#ServiceMgr.MessageSvc.OutputLevel = INFO
+ServiceMgr.MessageSvc.OutputLevel = INFO
+ServiceMgr.MessageSvc.Format = "% F%45W%S%7W%R%T %0W%M"
+
+print "overlay_trf: at the end. job=\n", job
+print "\n\noverlay_trf: at the end. ServiceMgr=\n", ServiceMgr
+
+# Post-include
+if hasattr(runArgs,"postInclude"):
+    for fragment in runArgs.postInclude:
+        include(fragment)
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.combinedinput.py b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.combinedinput.py
new file mode 100644
index 0000000000000000000000000000000000000000..a6775e93b71a8f1965a1794453961491b53a1775
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.combinedinput.py
@@ -0,0 +1,229 @@
+include("SimuJobTransforms/CommonSkeletonJobOptions.py")
+
+## Get the logger
+from AthenaCommon.Logging import *
+atlasG4log = logging.getLogger('AtlasG4')
+atlasG4log.info('****************** STARTING ATLASG4 ******************')
+
+
+## Simulation flags need to be imported first
+from G4AtlasApps.SimFlags import SimFlags, simFlags #FIXME drop import of SimFlags rather than simFlags asap
+simFlags.load_atlas_flags()
+
+
+## Set simulation geometry tag
+if hasattr(runArgs, 'geometryVersion'):
+    simFlags.SimLayout.set_Value_and_Lock(runArgs.geometryVersion)
+    atlasG4log.debug('SimLayout set to %s' % simFlags.SimLayout)
+else:
+    raise RuntimeError("No geometryVersion provided.")
+
+
+## AthenaCommon flags
+from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
+# Jobs should stop if an include fails.
+if hasattr(runArgs, "IgnoreConfigError"):
+    athenaCommonFlags.AllowIgnoreConfigError = runArgs.IgnoreConfigError
+else:
+    athenaCommonFlags.AllowIgnoreConfigError = False
+if hasattr(runArgs, "inputFile"):
+    athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputFile )
+from AthenaCommon.BeamFlags import jobproperties
+if hasattr(runArgs, "inputEvgenFile"):
+    globalflags.InputFormat.set_Value_and_Lock('pool')
+    athenaCommonFlags.PoolEvgenInput.set_Value_and_Lock( runArgs.inputEvgenFile )
+    # We don't expect both inputFile and inputEvgenFile to be specified
+    athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.inputEvgenFile )
+elif jobproperties.Beam.beamType.get_Value() == 'cosmics':
+    atlasG4log.debug('No inputEvgenFile provided. OK, as performing cosmics simulation.')
+    athenaCommonFlags.PoolEvgenInput.set_Off()
+else:
+    atlasG4log.info('No inputEvgenFile provided. Assuming that you are running a generator on the fly.')
+    athenaCommonFlags.PoolEvgenInput.set_Off()
+
+import os
+
+os.system("rm -f combinedinputfile.txt")
+
+f = open('overlayfile.txt', 'r')
+overlay = f.readline()
+overlay = overlay.rstrip()
+
+for evgenfile in runArgs.inputEvgenFile:
+  combinedinput = "grep \"inputEvgenFile=" + evgenfile + " " + overlay + "\" matchfile.txt | sed s/" + overlay + "//  >> combinedinputfile.txt"
+  os.system(combinedinput)
+
+## Handle cosmics configs
+if jobproperties.Beam.beamType.get_Value() == 'cosmics':
+    if hasattr(runArgs, "inputEvgenFile"):
+        if simFlags.CosmicFilterVolumeName.statusOn and simFlags.CosmicFilterVolumeName.get_Value() != "Muon":
+            atlasG4log.warning("Filtering was already done. Using CosmicFilterVolumeName=Muon rather than "
+                               "provided value (%s)" % str(runArgs.CosmicFilterVolumeName))
+        simFlags.CosmicFilterVolumeName = "Muon"
+    else:
+        simFlags.CosmicFilterVolumeName = getattr(runArgs, "CosmicFilterVolume", "InnerDetector")
+        simFlags.CosmicFilterVolumeName2 = getattr(runArgs, "CosmicFilterVolume2", "NONE")
+        simFlags.CosmicPtSlice = getattr(runArgs, "CosmicPtSlice", "NONE")
+
+
+## Output hits file config
+if hasattr(runArgs, "outputHitsFile"):
+    athenaCommonFlags.PoolHitsOutput.set_Value_and_Lock( runArgs.outputHitsFile )
+else:
+    #raise RuntimeError("No outputHitsFile provided.")
+    atlasG4log.info('No outputHitsFile provided. This simulation job will not write out any HITS file.')
+    athenaCommonFlags.PoolHitsOutput = ""
+    athenaCommonFlags.PoolHitsOutput.statusOn = False
+
+
+## Write out runArgs configuration
+atlasG4log.info( '**** Transformation run arguments' )
+atlasG4log.info( str(runArgs) )
+
+
+#==============================================================
+# Job Configuration parameters:
+#==============================================================
+## Pre-exec
+if hasattr(runArgs, "preExec"):
+    atlasG4log.info("transform pre-exec")
+    for cmd in runArgs.preExec:
+        atlasG4log.info(cmd)
+        exec(cmd)
+
+## Pre-include
+if hasattr(runArgs, "preInclude"):
+    for fragment in runArgs.preInclude:
+        include(fragment)
+
+include( "AthenaCommon/MemTraceInclude.py" )
+
+
+## Select detectors
+if 'DetFlags' not in dir():
+    ## If you configure one det flag, you're responsible for configuring them all!
+    from AthenaCommon.DetFlags import DetFlags
+    DetFlags.all_setOn()
+DetFlags.LVL1_setOff() # LVL1 is not part of G4 sim
+DetFlags.Truth_setOn()
+## Forward dets are off by default
+DetFlags.Lucid_setOff()
+DetFlags.ALFA_setOff()
+DetFlags.ZDC_setOff()
+if hasattr(runArgs, "LucidOn"):
+    if runArgs.LucidOn:
+        DetFlags.Lucid_setOn()
+if hasattr(runArgs, "ALFAOn"):
+    if runArgs.ALFAOn:
+        DetFlags.ALFA_setOn()
+if hasattr(runArgs, "ZDCOn"):
+    if runArgs.ZDCOn:
+        DetFlags.ZDC_setOn()
+
+
+## Random seed
+if hasattr(runArgs, "randomSeed"):
+    simFlags.RandomSeedOffset = int(runArgs.randomSeed)
+else:
+    atlasG4log.warning('randomSeed not set')
+if hasattr(runArgs, 'runNumber'):
+    simFlags.RunNumber = runArgs.runNumber
+
+if hasattr(runArgs, 'physicsList'):
+    simFlags.PhysicsList = runArgs.physicsList
+
+## Handle cosmics track record
+from AthenaCommon.BeamFlags import jobproperties
+if jobproperties.Beam.beamType.get_Value() == 'cosmics':
+    if hasattr(runArgs, "inputEvgenFile"):
+        simFlags.ReadTR = athenaCommonFlags.PoolEvgenInput()[0]
+    else:
+        if hasattr(runArgs, "outputEvgenFile"):
+            simFlags.WriteTR = runArgs.outputEvgenFile
+        include( 'CosmicGenerator/jobOptions_ConfigCosmicProd.py' )
+
+
+## Add filters for non-cosmics simulation
+if jobproperties.Beam.beamType.get_Value() != 'cosmics':
+    if simFlags.CavernCuts:
+        simFlags.EventFilter.set_Off()
+        # Make a bigger world volume for cavern bg
+        simFlags.WorldZRange.set_Value(24000)
+    else:
+        simFlags.EventFilter.set_On()
+
+
+## Add G4 alg to alg sequence
+from AthenaCommon.AlgSequence import AlgSequence
+topSeq = AlgSequence()
+from G4AtlasApps.PyG4Atlas import PyG4AtlasAlg
+topSeq += PyG4AtlasAlg()
+
+
+## Add AMITag MetaData to TagInfoMgr
+if hasattr(runArgs, 'AMITag'):
+    if runArgs.AMITag != "NONE":
+        from AthenaCommon.AppMgr import ServiceMgr as svcMgr
+        svcMgr.TagInfoMgr.ExtraTagValuePairs += ["AMITag", runArgs.AMITag]
+
+
+## Random number configuration
+from G4AtlasAlg.G4AtlasAlgConf import G4AtlasAlg
+g4AtlasAlg = G4AtlasAlg()
+g4AtlasAlg.RandomGenerator = "athena"
+from AthenaCommon.AppMgr import ServiceMgr
+from AthenaServices.AthenaServicesConf import AtRndmGenSvc
+## Set up AtRndmGenSvc (don't use the SeedsG4 override)
+simFlags.SeedsG4.set_Off()
+atRndmGenSvc = AtRndmGenSvc()
+ServiceMgr += atRndmGenSvc
+if jobproperties.Beam.beamType.get_Value() == 'cosmics':
+    if athenaCommonFlags.SkipEvents.statusOn:
+        atRndmGenSvc.Seeds += ["COSMICS %d %d" % (athenaCommonFlags.SkipEvents.get_Value()+2040160768, simFlags.RandomSeedOffset.get_Value())]
+    else:
+        atRndmGenSvc.Seeds += ["COSMICS 2040160768 %d" % (simFlags.RandomSeedOffset.get_Value())]
+
+if athenaCommonFlags.SkipEvents.statusOn:
+    atRndmGenSvc.Seeds += ["AtlasG4 %d %d " % (athenaCommonFlags.SkipEvents.get_Value()+1, simFlags.RandomSeedOffset.get_Value())]
+else:
+    atRndmGenSvc.Seeds += ["AtlasG4 1 %d " % (simFlags.RandomSeedOffset.get_Value())]
+atlasG4log.info( "Random seeds set to: '%s'", repr(atRndmGenSvc.Seeds) )
+
+
+## Set firstEvent for cosmics jobs
+if jobproperties.Beam.beamType.get_Value() == 'cosmics':
+    if hasattr(runArgs, "firstEvent"):
+        #print runArgs.firstEvent
+        svcMgr.EventSelector.FirstEvent = runArgs.firstEvent
+    else:
+        svcMgr.EventSelector.FirstEvent = 0
+
+
+## Increase max RDO output file size to 10 GB
+## NB. We use 10GB since Athena complains that 15GB files are not supported
+from AthenaCommon.AppMgr import ServiceMgr as svcMgr
+svcMgr.AthenaPoolCnvSvc.MaxFileSizes = [ "10000000000" ]
+
+
+## Post-include
+if hasattr(runArgs, "postInclude"):
+    for fragment in runArgs.postInclude:
+        include(fragment)
+
+## Post-exec
+if hasattr(runArgs, "postExec"):
+    atlasG4log.info("transform post-exec")
+    for cmd in runArgs.postExec:
+        atlasG4log.info(cmd)
+        exec(cmd)
+
+
+## Always enable the looper killer, unless it's been disabled
+if not hasattr(runArgs, "enableLooperKiller") or runArgs.enableLooperKiller:
+    def use_looperkiller():
+        from G4AtlasApps import PyG4Atlas, AtlasG4Eng
+        lkAction = PyG4Atlas.UserAction('G4UserActions', 'LooperKiller', ['BeginOfRun', 'EndOfRun', 'BeginOfEvent', 'EndOfEvent', 'Step'])
+        AtlasG4Eng.G4Eng.menu_UserActions.add_UserAction(lkAction)
+    simFlags.InitFunctions.add_function("postInit", use_looperkiller)
+else:
+    atlasG4log.warning("The looper killer will NOT be run in this job.")
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.commis.py b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.commis.py
new file mode 100644
index 0000000000000000000000000000000000000000..6c0eb9d2b863ac8fe97cd539948db4dd5286dc8c
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.commis.py
@@ -0,0 +1,227 @@
+from AthenaCommon.Logging import logging
+overlaylog = logging.getLogger('overlay')
+overlaylog.info( '****************** STARTING OVERLAY *****************' )
+
+from PerfMonComps.PerfMonFlags import jobproperties as jp
+jp.PerfMonFlags.doMonitoring = True # to enable monitoring
+jp.PerfMonFlags.doFastMon = True    # to only enable a lightweight monitoring
+
+if hasattr(runArgs, "preExec") and runArgs.preExec != 'NONE':
+    exec( runArgs.preExec )
+
+TileFrameLength=7
+
+from AthenaCommon.AppMgr import ServiceMgr
+from AthenaCommon.GlobalFlags import GlobalFlags
+from AthenaCommon.GlobalFlags  import globalflags
+from AthenaCommon.AthenaCommonFlags  import athenaCommonFlags
+
+from MuonRecExample.MuonRecFlags import muonRecFlags
+muonRecFlags.doCSCs.set_Value_and_Lock(True)
+
+from LArConditionsCommon.LArCondFlags import larCondFlags
+larCondFlags.LArCoolChannelSelection.set_Value_and_Lock("")
+larCondFlags.useShape.set_Value_and_Lock(True)
+larCondFlags.OFCShapeFolder.set_Value_and_Lock("")
+
+globalflags.isOverlay.set_Value_and_Lock(True)
+isRealData = globalflags.isOverlay()
+
+from AthenaCommon.BeamFlags import jobproperties
+jobproperties.Beam.beamType.set_Value_and_Lock("collisions")
+
+import AthenaCommon.AtlasUnixStandardJob
+
+for cf in runArgs.jobConfig:
+    include(cf)
+
+#==============================================================
+# Job definition parameters:
+#==============================================================
+from AthenaCommon.GlobalFlags import GlobalFlags
+from AthenaCommon.GlobalFlags  import globalflags
+
+
+from AthenaCommon.AthenaCommonFlags  import athenaCommonFlags
+
+if hasattr(runArgs,"pileupBSFile"): athenaCommonFlags.FilesInput.set_Value_and_Lock( runArgs.pileupBSFile )
+
+if hasattr(runArgs,"skipEvents"): athenaCommonFlags.SkipEvents.set_Value_and_Lock( runArgs.skipEvents )
+if hasattr(runArgs,"maxEvents"): athenaCommonFlags.EvtMax.set_Value_and_Lock( runArgs.maxEvents )
+
+if hasattr(runArgs,"inputHitsFile"): athenaCommonFlags.PoolHitsInput.set_Value_and_Lock( runArgs.inputHitsFile )
+if hasattr(runArgs,"outputRDOFile"): athenaCommonFlags.PoolRDOOutput.set_Value_and_Lock( runArgs.outputRDOFile )
+
+OverlayCollection = runArgs.outputRDOFile
+
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+SignalCollection = runArgs.signalRDOFile
+if runArgs.signalRDOFile=="NONE":
+   OverlayFlags.set_SignalOff()
+
+if hasattr(runArgs,"geometryVersion"): globalflags.DetDescrVersion.set_Value_and_Lock( runArgs.geometryVersion )
+if hasattr(runArgs,"conditionsTag"): globalflags.ConditionsTag.set_Value_and_Lock( runArgs.conditionsTag )
+
+from Digitization.DigitizationFlags import jobproperties
+jobproperties.Digitization.rndmSeedOffset1=int(runArgs.digiSeedOffset1)
+jobproperties.Digitization.rndmSeedOffset2=int(runArgs.digiSeedOffset2)
+jobproperties.Digitization.physicsList=runArgs.samplingFractionDbTag
+jobproperties.Digitization.rndmSvc=runArgs.digiRndmSvc
+
+SignalInputCollections = runArgs.inputHitsFile
+
+readBS = True
+OverlayFlags.set_BkgOff() #ACH
+
+#GlobalFlags.InputFormat.set_bytestream()
+globalflags.InputFormat.set_Value_and_Lock('bytestream')
+globalflags.DetGeo = 'commis'
+globalflags.DataSource.set_Value_and_Lock('data')
+#GlobalFlags.DataSource.set_data()
+
+DataInputCollections=runArgs.pileupBSFile
+
+import PyUtils.AthFile as af
+def getHITSFile(runArgs):
+    if hasattr(runArgs,"inputHITSFile"):
+        return runArgs.inputHITSFile[0]
+    elif hasattr(runArgs,"inputHitsFile"):
+        return runArgs.inputHitsFile[0]
+    else:
+        raise SystemExit("No HITS file in runArgs!!")
+try:
+    f = af.fopen(getHITSFile(runArgs))
+except AssertionError:
+    skeletonLog.error("Failed to open input file: %s", getHITSFile(runArgs))
+metadatadict = dict()
+if 'metadata' in f.infos.keys():
+    if '/Simulation/Parameters' in f.infos['metadata'].keys():
+        metadatadict = f.infos['metadata']['/Simulation/Parameters']
+        if isinstance(metadatadict, list):
+            skeletonLog.warning("%s inputfile: %s contained %s sets of Simulation Metadata. Using the final set in the list.",inputtype,inputfile,len(metadatadict))
+            metadatadict=metadatadict[-1]
+        if 'RunNumber' in metadatadict.keys():
+            year = metadatadict['RunNumber']%100
+            print "Found Year = %s", year
+            from RecExConfig.RecFlags import rec
+            rec.projectName = 'data'+str(year)
+            pass
+        pass
+    pass
+pass
+
+import MagFieldServices.SetupField
+
+from IOVDbSvc.CondDB import conddb
+
+if runArgs.conditionsTag!='NONE' and runArgs.conditionsTag!='':
+   globalflags.ConditionsTag=runArgs.conditionsTag
+   if len(globalflags.ConditionsTag())!=0:
+      conddb.setGlobalTag(globalflags.ConditionsTag())
+
+OverlayFlags.Print()
+
+# LVL1 Trigger Menu
+if hasattr(runArgs, "triggerConfig") and runArgs.triggerConfig!="NONE":
+    # LVL1 Trigger Menu
+    # PJB 9/2/2009 Setup the new triggerConfig flags here
+    from TriggerJobOpts.TriggerFlags import TriggerFlags
+    triggerArg = runArgs.triggerConfig
+    #if not prefixed with LVL1: add it here
+    Args = triggerArg.split(":")
+    if Args[0] != "LVL1":
+        TriggerFlags.triggerConfig ="LVL1:"+triggerArg
+    else:
+        TriggerFlags.triggerConfig =triggerArg
+    overlaylog.info( 'triggerConfig argument is: %s ', TriggerFlags.triggerConfig.get_Value() )
+    from TriggerJobOpts.TriggerConfigGetter import TriggerConfigGetter
+    cfg = TriggerConfigGetter("HIT2RDO")
+
+from AthenaCommon.DetFlags import DetFlags
+DetFlags.ID_setOn()
+DetFlags.Muon_setOn()
+DetFlags.LAr_setOn()
+DetFlags.Tile_setOn()
+if not hasattr(runArgs, "triggerConfig") or runArgs.triggerConfig=="NONE":
+  DetFlags.LVL1_setOff()
+else:
+  DetFlags.LVL1_setOn()
+
+DetFlags.BCM_setOn()
+DetFlags.Lucid_on()
+
+DetFlags.simulateLVL1.Lucid_setOff()
+
+print "================ DetFlags ================ "
+DetFlags.Print()
+
+include ( "RecExCond/AllDet_detDescr.py" )
+
+from AthenaCommon.AppMgr import theApp
+theApp.EventLoop = "PileUpEventLoopMgr"
+theApp.EvtMax = runArgs.maxEvents
+
+include ( "EventOverlayJobTransforms/ConfiguredOverlay_jobOptions.py" )
+
+if OverlayFlags.doTruth():
+   include ( "EventOverlayJobTransforms/TruthOverlay_jobOptions.py" )
+
+if OverlayFlags.doBCM() or OverlayFlags.doLUCID():
+   include ( "EventOverlayJobTransforms/BeamOverlay_jobOptions.py" )
+
+if OverlayFlags.doPixel() or OverlayFlags.doSCT() or OverlayFlags.doTRT():
+
+   include ( "EventOverlayJobTransforms/InnerDetectorOverlay_jobOptions.py" )
+
+if OverlayFlags.doLAr() or OverlayFlags.doTile():
+   include ( "EventOverlayJobTransforms/CaloOverlay_jobOptions.py" )
+
+if OverlayFlags.doCSC() or OverlayFlags.doMDT() or OverlayFlags.doRPC() or OverlayFlags.doTGC():
+   include ( "EventOverlayJobTransforms/MuonOverlay_jobOptions.py" )
+
+if OverlayFlags.doLVL1():
+   include ( "EventOverlayJobTransforms/Level1Overlay_jobOptions.py" )
+
+# save the overlay output first
+include ( "EventOverlayJobTransforms/OverlayOutputItemList_jobOptions.py" )
+
+# now save the signal information in the same job
+if OverlayFlags.doSignal():
+   include ( "EventOverlayJobTransforms/SignalOutputItemList_jobOptions.py" )
+
+# For random number initialization
+from AthenaCommon.ConfigurableDb import getConfigurable
+ServiceMgr += getConfigurable(jobproperties.Digitization.rndmSvc.get_Value())()
+jobproperties.Digitization.rndmSeedList.addtoService()
+jobproperties.Digitization.rndmSeedList.printSeeds()
+
+#================================================================
+print "overlay_trf: final outStream = ", outStream
+
+#================================================================
+# Logging
+#--------------------------------------------------------------
+#ServiceMgr.MessageSvc.OutputLevel = INFO
+ServiceMgr.MessageSvc.OutputLevel = INFO
+ServiceMgr.MessageSvc.Format = "% F%45W%S%7W%R%T %0W%M"
+
+conddb.addFolder("LAR","/LAR/ElecCalib/fSampl/Symmetry")
+conddb.addOverride("/LAR/ElecCalib/fSampl/Symmetry",runArgs.fSampltag+runArgs.samplingFractionDbTag )
+
+#if OverlayFlags.doSignal():
+#   InputDBConnection = "COOLOFL_LAR/COMP200"
+#   conddb.addFolder("","/LAR/ElecCalibOfl/AutoCorrs/AutoCorr"+"<dbConnection>"+InputDBConnection+"</dbConnection>")
+#   conddb.addOverride("/LAR/ElecCalibOfl/AutoCorrs/AutoCorr","")
+
+ServiceMgr.TileInfoLoader.filenameDeltaPhysicsSuffix="of2_Delta_Phys_7Samples"
+
+# Post-include
+if hasattr(runArgs,"postInclude"):
+    for fragment in runArgs.postInclude:
+        include(fragment)
+
+if hasattr(runArgs, "postExec") and runArgs.postExec != 'NONE':
+    exec( runArgs.postExec )
+
+#print "overlay_trf: at the end. job=\n", job
+print "\noverlay_trf: at the end. ServiceMgr=\n", ServiceMgr
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.overlay.py b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.overlay.py
new file mode 100644
index 0000000000000000000000000000000000000000..0afe6cc01841677bb00540e66034f26be307bc0f
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/skeleton.overlay.py
@@ -0,0 +1,173 @@
+from AthenaCommon.Logging import logging
+overlaylog = logging.getLogger('overlay')
+overlaylog.info( '****************** STARTING OVERLAY *****************' )
+
+import AthenaCommon.AtlasUnixStandardJob
+
+for cf in runArgs.jobConfig:
+    include(cf)
+
+#==============================================================
+# Job definition parameters:
+#==============================================================
+from AthenaCommon.GlobalFlags import GlobalFlags
+from AthenaCommon.GlobalFlags  import globalflags
+
+globalflags.isOverlay.set_Value_and_Lock(True)
+
+from AthenaCommon.AthenaCommonFlags  import athenaCommonFlags
+from AthenaCommon.AthenaCommonFlags import jobproperties
+jobproperties.AthenaCommonFlags.EvtMax = runArgs.maxEvents
+jobproperties.AthenaCommonFlags.SkipEvents= runArgs.skipEvents
+
+jobproperties.AthenaCommonFlags.PoolHitsInput=runArgs.inputHitsFile
+jobproperties.AthenaCommonFlags.PoolRDOOutput=runArgs.outputRDOFile
+
+from AthenaCommon.GlobalFlags import jobproperties
+jobproperties.Global.DetDescrVersion=runArgs.geometryVersion
+
+from Digitization.DigitizationFlags import jobproperties
+jobproperties.Digitization.rndmSeedOffset1=int(runArgs.digiSeedOffset1)
+jobproperties.Digitization.rndmSeedOffset2=int(runArgs.digiSeedOffset2)
+jobproperties.Digitization.physicsList=runArgs.samplingFractionDbTag
+jobproperties.Digitization.rndmSvc=runArgs.digiRndmSvc
+
+SignalInputCollections = runArgs.inputHitsFile
+jobproperties.AthenaCommonFlags.PoolRDOOutput = runArgs.outputRDOFile
+OverlayCollection = runArgs.outputRDOFile
+
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+SignalCollection = runArgs.signalRDOFile
+if runArgs.signalRDOFile=="NONE":
+   OverlayFlags.set_SignalOff()
+
+OverlayFlags.set_BCMOff()
+OverlayFlags.set_LUCIDOff()
+
+readBS = runArgs.ReadByteStream
+isRealData = False
+
+if readBS:
+   globalflags.InputFormat.set_Value_and_Lock('bytestream')
+   DataInputCollections=runArgs.inputBSFile
+else:
+   DataInputCollections=runArgs.pileupBSFile
+   jobproperties.AthenaCommonFlags.PoolRDOInput=runArgs.pileupBSFile
+
+if runArgs.conditionsTag!='NONE' and runArgs.conditionsTag!='':
+   globalflags.ConditionsTag=runArgs.conditionsTag
+   if len(globalflags.ConditionsTag())!=0:
+      from IOVDbSvc.CondDB import conddb
+      conddb.setGlobalTag(globalflags.ConditionsTag())
+
+OverlayFlags.Print()
+
+# LVL1 Trigger Menu
+if runArgs.triggerConfig!="NONE":
+    # LVL1 Trigger Menu
+    # PJB 9/2/2009 Setup the new triggerConfig flags here
+    from TriggerJobOpts.TriggerFlags import TriggerFlags
+    triggerArg = runArgs.triggerConfig
+    #if not prefixed with LVL1: add it here
+    Args = triggerArg.split(":")
+    if Args[0] != "LVL1":
+        TriggerFlags.triggerConfig ="LVL1:"+triggerArg
+    else:
+        TriggerFlags.triggerConfig =triggerArg
+    overlaylog.info( 'triggerConfig argument is: %s ', TriggerFlags.triggerConfig.get_Value() )
+    from TriggerJobOpts.TriggerConfigGetter import TriggerConfigGetter
+    cfg = TriggerConfigGetter("HIT2RDO")
+
+from AthenaCommon.DetFlags import DetFlags
+DetFlags.ID_setOn()
+DetFlags.Muon_setOn()
+DetFlags.LAr_setOn()
+DetFlags.Tile_setOn()
+if runArgs.triggerConfig=="NONE":
+  DetFlags.LVL1_setOff()
+else:
+  DetFlags.LVL1_setOn()
+
+DetFlags.BCM_setOn()
+DetFlags.Lucid_on()
+
+DetFlags.simulateLVL1.Lucid_setOff()
+
+print "================ DetFlags ================ "
+DetFlags.Print()
+
+# Geometry, controlled by DetFlags
+#GlobalFlags.DetGeo.set_atlas()
+#GlobalFlags.DataSource.set_geant4()
+globalflags.DataSource.set_Value_and_Lock('geant4')
+
+#from AtlasGeoModel import SetGeometryVersion
+#from AtlasGeoModel import GeoModelInit
+
+include ( "RecExCond/AllDet_detDescr.py" )
+
+#from AtlasGeoModel import SetGeometryVersion
+#from AtlasGeoModel import GeoModelInit
+#from AtlasGeoModel import SetupRecoGeometry
+
+#include( "BFieldAth/BFieldAth_jobOptions.py" )
+
+#--------
+from AthenaCommon.AppMgr import theApp
+theApp.EventLoop = "PileUpEventLoopMgr"
+theApp.EvtMax = runArgs.maxEvents
+
+include ( "EventOverlayJobTransforms/ConfiguredOverlay_jobOptions.py" )
+
+if OverlayFlags.doTruth():
+   include ( "EventOverlayJobTransforms/TruthOverlay_jobOptions.py" )
+
+if OverlayFlags.doBCM() or OverlayFlags.doLUCID():
+   include ( "EventOverlayJobTransforms/BeamOverlay_jobOptions.py" )
+
+if OverlayFlags.doPixel() or OverlayFlags.doSCT() or OverlayFlags.doTRT():
+   include ( "EventOverlayJobTransforms/InnerDetectorOverlay_jobOptions.py" )
+
+if OverlayFlags.doLAr() or OverlayFlags.doTile():
+   include ( "EventOverlayJobTransforms/CaloOverlay_jobOptions.py" )
+
+if OverlayFlags.doCSC() or OverlayFlags.doMDT() or OverlayFlags.doRPC() or OverlayFlags.doTGC():
+   include ( "EventOverlayJobTransforms/MuonOverlay_jobOptions.py" )
+
+if OverlayFlags.doLVL1():
+   include ( "EventOverlayJobTransforms/Level1Overlay_jobOptions.py" )
+
+# save the overlay output first
+include ( "EventOverlayJobTransforms/OverlayOutputItemList_jobOptions.py" )
+
+# now save the signal information in the same job
+if OverlayFlags.doSignal():
+   include ( "EventOverlayJobTransforms/SignalOutputItemList_jobOptions.py" )
+
+# For random number initialization
+from AthenaCommon.ConfigurableDb import getConfigurable
+ServiceMgr += getConfigurable(jobproperties.Digitization.rndmSvc.get_Value())()
+jobproperties.Digitization.rndmSeedList.addtoService()
+jobproperties.Digitization.rndmSeedList.printSeeds()
+
+# To not overwrite the BCID
+#from AthenaCommon.AppMgr import ServiceMgr
+#ServiceMgr.PileUpEventLoopMgr.IsEventOverlayJob=True
+
+#================================================================
+print "overlay_trf: final outStream = ", outStream
+
+#================================================================
+# Logging
+#--------------------------------------------------------------
+#ServiceMgr.MessageSvc.OutputLevel = INFO
+ServiceMgr.MessageSvc.OutputLevel = INFO
+ServiceMgr.MessageSvc.Format = "% F%45W%S%7W%R%T %0W%M"
+
+print "overlay_trf: at the end. job=\n", job
+print "\n\noverlay_trf: at the end. ServiceMgr=\n", ServiceMgr
+
+# Post-include
+if hasattr(runArgs,"postInclude"):
+    for fragment in runArgs.postInclude:
+        include(fragment)
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/synchFlags_jobOptions.py b/Event/EventOverlay/EventOverlayJobTransforms/share/synchFlags_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..10a2889fa17c59435fddd9729a356b044a8d8fe5
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/synchFlags_jobOptions.py
@@ -0,0 +1,117 @@
+# setup job fragment to synchronize between Detector Flags and Overlay Flags 
+# Piyali Banerjee <piyali.banerjee@cern.sh> 07/06/10
+
+from AthenaCommon.DetFlags import DetFlags
+
+from OverlayCommonAlgs.OverlayFlags import OverlayFlags
+
+
+if DetFlags.ID_on():
+    OverlayFlags.set_InDetOn()
+else:
+    OverlayFlags.set_InDetOff()
+        
+if DetFlags.pixel_on():
+    OverlayFlags.set_PixelOn()
+else:
+    OverlayFlags.set_PixelOff()
+
+if DetFlags.TRT_on():
+    OverlayFlags.set_TRTOn()
+else:
+    OverlayFlags.set_TRTOff()
+
+if DetFlags.SCT_on():
+    OverlayFlags.set_SCTOn()
+else:
+    OverlayFlags.set_SCTOff()
+
+
+#if DetFlags.Beam_on():
+#    OverlayFlags.set_BeamOn()
+#else:
+#    OverlayFlags.set_BeamOff()
+                
+if DetFlags.BCM_on():
+    OverlayFlags.set_BCMOn()
+else:
+    OverlayFlags.set_BCMOff()
+                
+    
+if DetFlags.Calo_on():
+    OverlayFlags.set_CaloOn()
+else:
+    OverlayFlags.set_CaloOff()
+    
+if DetFlags.LAr_on():
+    OverlayFlags.set_LArOn()
+else:
+    OverlayFlags.set_LArOff()
+
+if DetFlags.Tile_on():
+    OverlayFlags.set_TileOn()
+else:
+    OverlayFlags.set_TileOff()
+
+                
+if DetFlags.Muon_on():
+    OverlayFlags.set_MuonOn()
+else:
+    OverlayFlags.set_MuonOff()
+
+if DetFlags.MDT_on():
+    OverlayFlags.set_MDTOn()
+else:
+    OverlayFlags.set_MDTOff()
+
+if DetFlags.CSC_on():
+    OverlayFlags.set_CSCOn()
+else:
+    OverlayFlags.set_CSCOff()
+
+if DetFlags.TGC_on():
+    OverlayFlags.set_TGCOn()
+else:
+    OverlayFlags.set_TGCOff()
+
+if DetFlags.RPC_on():
+    OverlayFlags.set_RPCOn()
+else:
+    OverlayFlags.set_RPCOff()
+
+if DetFlags.Forward_on():
+    OverlayFlags.set_ForwardOn()
+#else:
+#    OverlayFlags.set_ForwardOff()
+
+if DetFlags.Lucid_on():
+    OverlayFlags.set_LucidOn()
+#else:
+#    OverlayFlags.set_LucidOff()
+
+if DetFlags.ALFA_on():
+    OverlayFlags.set_ALFAOn()
+#else:
+#    OverlayFlags.set_ALFAOff()
+                
+if DetFlags.ZDC_on():
+    OverlayFlags.set_ZDCOn()
+#else:
+#    OverlayFlags.set_ZDCOff()
+
+                
+#if DetFlags.Trigger_on():
+#    OverlayFlags.set_TriggerOn()
+#else:
+#    OverlayFlags.set_TriggerOff()
+                
+if DetFlags.LVL1_on():
+    OverlayFlags.set_LVL1On()
+else:
+    OverlayFlags.set_LVL1Off()
+
+#Synchronizing of truth flag does not work
+#if DetFlags.Truth_on():
+#    OverlayFlags.set_TruthOn()
+#else:
+#    OverlayFlags.set_TruthOff()
diff --git a/Event/EventOverlay/EventOverlayJobTransforms/share/vertex_override.py b/Event/EventOverlay/EventOverlayJobTransforms/share/vertex_override.py
new file mode 100644
index 0000000000000000000000000000000000000000..46bf9b0098c4cfdb2460b46996afad6dbb0e1a70
--- /dev/null
+++ b/Event/EventOverlay/EventOverlayJobTransforms/share/vertex_override.py
@@ -0,0 +1,15 @@
+
+#For override of vertex position
+
+#old way, MC11, used to be a postInclude
+#from AthenaCommon.AlgSequence import AlgSequence
+#job = AlgSequence()
+#job.VertexPositionGenerator.VertexOverrideFile = "vtx.txt"
+
+#new way, MC12, should be a preInclude
+from G4AtlasApps.SimFlags import SimFlags
+SimFlags.load_atlas_flags()
+SimFlags.VertexOverrideFile.set_Value_and_Lock("vtx.txt")
+# Can also add "preInclude=SimulationJobOptions/preInclude.VerboseVertexPositioner.py"
+
+