diff --git a/Trigger/TriggerCommon/TrigTier0/cmt/requirements b/Trigger/TriggerCommon/TrigTier0/cmt/requirements
new file mode 100644
index 0000000000000000000000000000000000000000..e2455bfd77e516a7a56cff0fa50b7fcef84f4495
--- /dev/null
+++ b/Trigger/TriggerCommon/TrigTier0/cmt/requirements
@@ -0,0 +1,13 @@
+package TrigTier0
+author Moritz Backes <Moritz.Backes@cern.ch>
+
+## For Athena policies: it has to be the first use statement
+use AtlasPolicy 	AtlasPolicy-*
+
+## For Gaudi tools, services and objects
+use GaudiInterface 	GaudiInterface-* 	External
+
+
+## apply_pattern component_library
+apply_pattern declare_joboptions files="*.py"
+apply_pattern declare_python_modules files="*.py"
diff --git a/Trigger/TriggerCommon/TrigTier0/python/NtupleProdFlags.py b/Trigger/TriggerCommon/TrigTier0/python/NtupleProdFlags.py
new file mode 100644
index 0000000000000000000000000000000000000000..4d55b64d74aec52dae24b5aa06f7a6e514335ac0
--- /dev/null
+++ b/Trigger/TriggerCommon/TrigTier0/python/NtupleProdFlags.py
@@ -0,0 +1,183 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+from AthenaCommon.JobProperties import JobProperty, JobPropertyContainer, jobproperties
+
+from AthenaCommon.Logging import logging 
+	
+__author__  = 'Emily Nurse <Emily.Nurse@cern.ch>, Joerg Stelzer <Joerg.Stelzer@cern.ch>'
+__doc__ = "Flags to steer the trigger ntuple production at the various sites and for development"
+
+
+###
+### property container class
+###
+class NtupleProductionFlags(JobPropertyContainer):
+    """Container for trigger ntuple production setup"""
+    log = logging.getLogger("TriggerFlags.NtupleProductionFlags")
+
+    def produceNtuples(self):
+        return self.ProductionLocation() != ''
+
+
+from TriggerJobOpts.TriggerFlags import TriggerFlags
+TriggerFlags.add_Container(NtupleProductionFlags)
+NPF = TriggerFlags.NtupleProductionFlags
+
+
+
+
+###
+### JobProperty SliceTuples: to group the different ntuples into files
+###
+class SliceTuples(JobProperty):
+    '''Slice specification, and Grouping
+
+    * A list of strings can be specified, each string corresponding to one root file.
+    
+    * Within a string a comma separated list of slices needs to be given, which specifies the
+    ntuples that are contained in that file. For example
+    
+    * e.g. TriggerFlags.NtupleProduction.SliceTuples = ["EGamma, Tau", "MinBias", "MET, Jet", "BPhys, BJet"]
+    '''
+    statusOn=True
+    allowedType=['list']
+    StoredValue=[]
+    # list of possible slice ntuples
+    definedSlices=['EGamma', 'Muon', 'MuonL2', 'MuonEF', 'Tau', 'Jet', 'MET', 'MinBias', 'BPhys', 'BJet','L1Calo','TrigMenu']
+    
+    def _do_action(self):
+        """Check if only defined slices are requested"""
+        if self.get_Value()==[]: return
+        for sl in ','.join(self.get_Value()).split(','):
+            if not sl.strip() in self.definedSlices:
+                raise RuntimeError, 'Slice "%s" is not a slice that has an ntuple defined! Must be on of %r !' % (sl,self.definedSlices)
+
+    def doSlice(self,slicename):
+        """Checks if a slice was requested
+
+        This is for the individual slice to ease the entrance to their
+        ntuple production. It respects the DisabledSlices Flag
+        """
+        if not slicename in self.definedSlices:
+            raise RuntimeError, 'Slice "%s" is not a slice that has an ntuple defined! Must be on of %r !' % (slicename,self.definedSlices)
+        inSliceTuples = slicename in [sl.strip() for sl in ','.join(self.get_Value()).split(',')]
+        inDisabledSlices = slicename in [sl.strip() for sl in NPF.DisabledSlices()]
+        return inSliceTuples and not inDisabledSlices
+
+NPF.add_JobProperty(SliceTuples)
+    
+
+
+###
+### JobProperty FileNames: File names for the different groups
+###
+class FileNames(JobProperty):
+    '''File name specification for each group of slices
+
+    * A list of filenames must be specified, each corresponding to the entry in SliceTuples.
+
+    * Must always be set after the SliceTuples are specified, because we are checking that the length is the same
+    
+    * e.g. TriggerFlags.NtupleProduction.FileNames = ["EgammaTau.root", "MinBiasValidation.root", "MissingETandJet.root", "B.root"]
+    '''
+    statusOn=True
+    allowedType=['list']
+    StoredValue=[]
+
+    def _do_action(self):
+        """Check if number of filename agrees with number of slice groups"""
+        if not NPF.SliceTuples.isDefault() and len(NPF.SliceTuples()) != len(self.get_Value()):
+            raise RuntimeError, "Expected %i root file names, since that many slice groups were defined" % len(NPF.SliceTuples())
+
+    def forSlice(self,slicename): 
+        """return the root file name for an individual slice
+        
+        This is not to be used in the production, where one algorithm
+        will write all the files. This function respects the DisabledSlices flag
+        """
+
+        if not NPF.SliceTuples.doSlice(slicename):
+            return ""
+
+        if len(self.get_Value())==0:
+                NPF.log.error("NPF.FileNames is empty, please provide at least one file name")
+
+        for index, group in enumerate(NPF.SliceTuples()):
+            inSliceGroup = slicename in [sl.strip() for sl in group.split(',')]
+            if inSliceGroup:
+                if index>=len(self.get_Value()): index=0
+                return self.get_Value()[index]
+
+        assert RuntimeError, "Should not get here"
+
+        
+NPF.add_JobProperty(FileNames)
+
+
+###
+### JobProperty DisabledSlices: To quickly disable a certain slice from NTuple Production
+###
+class DisabledSlices(JobProperty):
+    '''Names of slices (list) to be excluded from production'''
+    statusOn=True
+    allowedType=['list']
+    StoredValue=[]
+
+    def _do_action(self):
+        """Check if only defined slices are listed"""
+        sl = [x for x in self.get_Value() if not x in NPF.SliceTuples.definedSlices]
+        if sl:
+            raise RuntimeError, 'Slices %s do not have an ntuple defined! Must be on of %r !' % (','.join(sl),NPF.SliceTuples.definedSlices)
+
+NPF.add_JobProperty(DisabledSlices)
+
+
+
+###
+### JobProperty ProductionLocation: to specify where to run and setting the defaults
+###
+class ProductionLocation(JobProperty):
+    '''Production environment'''
+    statusOn=True
+    allowedType=['string']
+    allowedValues=['','Tier0','Tier1','CAF','User']
+    StoredValue=''
+    
+    def _do_action(self):
+        """Define ntuple production at the different places"""
+
+        if NPF.SliceTuples.is_locked(): NPF.SliceTuples.unlock()
+        #if NPF.FileNames.is_locked(): NPF.FileNames.unlock()
+
+        #
+        # Here the default values for ntuple production are set
+        #
+        if self.get_Value() == '':
+            NPF.SliceTuples.set_Value([])
+            NPF.FileNames.set_Value([])
+            NPF.log.info("Disabling Ntuple Production")
+
+        elif self.get_Value() == 'Tier0':
+            NPF.SliceTuples.set_Value(['MinBias, EGamma, Tau, Muon, TrigMenu'])
+            #NPF.FileNames.set_Value(['TriggerNT.root'])
+            
+        elif self.get_Value() == 'Tier1':
+            NPF.SliceTuples.set_Value(['L1Calo'])
+            #NPF.FileNames.set_Value(['TriggerNT.root'])
+            
+        elif self.get_Value() == 'CAF':
+            NPF.SliceTuples.set_Value(['MinBias', 'EGamma', 'Tau', 'Muon', 'L1Calo'])
+            if NPF.FileNames()==[]:
+                NPF.FileNames.set_Value(['MinBiasNT.root', 'EGammaNT.root', 'TauNT.root', 'MuonNT.root', 'L1CaloNT.root'])
+            
+        elif self.get_Value() == 'User':
+            pass
+
+        # lock the properties
+        if self.get_Value()!= 'User':
+            NPF.SliceTuples.lock()
+
+        if self.get_Value()!= 'User' and self.get_Value()!= 'Tier0':
+            NPF.FileNames.lock()
+
+NPF.add_JobProperty(ProductionLocation)
diff --git a/Trigger/TriggerCommon/TrigTier0/python/__init__.py b/Trigger/TriggerCommon/TrigTier0/python/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..44eb2f950ae699b90ee6d1e64a52c9e5ac764e2b
--- /dev/null
+++ b/Trigger/TriggerCommon/TrigTier0/python/__init__.py
@@ -0,0 +1,7 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+__version__ = '1.0.0'
+__author__  = 'Moritz.Backes@cern.ch'
+__all__ = [ 'NtupleProdFlags' ]
+
+
diff --git a/Trigger/TriggerCommon/TrigTier0/share/BStoESD_Tier0_HLTConfig_jobOptions.py b/Trigger/TriggerCommon/TrigTier0/share/BStoESD_Tier0_HLTConfig_jobOptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..34b37b4872f58d8ab53087beaa6d323df3bda323
--- /dev/null
+++ b/Trigger/TriggerCommon/TrigTier0/share/BStoESD_Tier0_HLTConfig_jobOptions.py
@@ -0,0 +1,141 @@
+##############################################################
+# BStoESD_Tier0_HLTConfig_jobOptions.py
+# For DATA reconstruction
+##############################################################
+
+# First check is HLT psk is ok, if not, turn trigger off.
+include( "TrigTier0/TriggerConfigCheckHLTpsk.py" )
+
+if rec.doTrigger():
+    
+    # configure the HLT config
+    if not hasattr( ServiceMgr, "ByteStreamAddressProviderSvc" ):
+        from ByteStreamCnvSvcBase. ByteStreamCnvSvcBaseConf import ByteStreamAddressProviderSvc
+        ServiceMgr += ByteStreamAddressProviderSvc()
+
+    from TriggerJobOpts.TriggerFlags import TriggerFlags as tf
+    tf.readBS=True # needed in HLTTriggerGetter - do not understand why it is not
+    # true by default when globalflags.InputFormat = 'bytestream'
+    tf.doLVL1= False # needed to not rerun the trigger
+    tf.doEF= False # needed to not rerun the trigger
+    tf.doLVL2 = False  # needed to not rerun the trigger
+    tf.configurationSourceList = ['ds']
+
+
+    #HLTonlineNoL1Thr : Everything is taken from COOL, except LVL1ConfigSvc for thresholds
+    #HLToffline       : HLT is ran offline. Needs to specify the .xml files
+    #HLTonline        : Normal running, everything is taken from COOL
+
+    tf.configForStartup= "HLTonlineNoL1Thr"
+
+    try:
+        from TriggerJobOpts.TriggerConfigGetter import TriggerConfigGetter
+        cfg=TriggerConfigGetter()
+    except Exception:
+        treatException("Could not run TriggerConfigGetter()")
+
+
+    #---------------------------------------------------------------------------
+    if tf.configForStartup() == "HLTonlineNoL1Thr":
+
+        # Want to use LVL1ConfigSvc for LVL1 thresholds only
+        from TrigConfigSvc.TrigConfigSvcConf import TrigConf__LVL1ConfigSvc
+        from AthenaCommon.AppMgr import ServiceMgr
+        l1 = TrigConf__LVL1ConfigSvc("LVL1ConfigSvc")
+        l1.XMLFile = "LVL1config_SingleBeam_v1_7-bit_trigger_types.xml"
+        l1.CreateLegacyObjects=True
+        ServiceMgr += l1
+
+        tf.inputLVL1configFile = "LVL1config_SingleBeam_v1_7-bit_trigger_types.xml"
+        tf.inputLVL1configFile.lock() # this is needed to not be overwritten by TrigT1CTMonitoring
+
+        # The following are using LVL1ConfigSvc (no thresholds involved)
+        # They should use COOL and not the xml file
+        if not hasattr(ToolSvc,'RecMuCTPIByteStreamTool'):
+            from TrigT1ResultByteStream.TrigT1ResultByteStreamConf import RecMuCTPIByteStreamTool
+            ToolSvc += RecMuCTPIByteStreamTool("RecMuCTPIByteStreamTool")
+        ToolSvc.RecMuCTPIByteStreamTool.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+
+        if not hasattr(ToolSvc,'L1EmTauTools'):
+            from TrigT1CaloTools.TrigT1CaloToolsConf import LVL1__L1EmTauTools
+            ToolSvc += LVL1__L1EmTauTools("L1EmTauTools")
+        ToolSvc.L1EmTauTools.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+
+        #from TrigT1CTP.TrigT1CTPConf import LVL1CTP__CBNTAA_CTP_RDO
+        #a1 = LVL1CTP__CBNTAA_CTP_RDO("CBNTAA_CTP_RDO")
+        #a1.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+
+    #---------------------------------------------------------------------------    
+    elif tf.configForStartup()=="HLToffline": # HLT is ran offline so cannot read from COOL.
+        tf.readLVL1configFromXML = True # has to use the .xml file used for reco
+        tf.readHLTconfigFromXML = True # has to use the .xml file used for reco
+        # You have to set the 2 following files to the .xml files you want.
+        # Here are the default files for reprocessing special case with trigger
+        tf.inputHLTconfigFile = "HLTMenu.xml" # Has to be set correctly
+        tf.inputLVL1configFile = "LVL1Menu.xml" # Has to be set correctly
+        tf.inputHLTconfigFile.lock() 
+        tf.inputLVL1configFile.lock() # this is needed to not be overwritten by TrigT1CTMonitoring
+
+    #---------------------------------------------------------------------------    
+    elif tf.configForStartup()=="HLTonline": # need to talk to clients using LVL1ConfigSvc and add new folders into
+        #Just to be sure we use no LVL1 xml file and do not forget any clients
+        tf.inputLVL1configFile = "LVL1configDUMMY.xml"
+        tf.inputLVL1configFile.lock()
+        # do not need thresholds but are using LVL1ConfigSvc
+        if not hasattr(ToolSvc,'RecMuCTPIByteStreamTool'):
+            from TrigT1ResultByteStream.TrigT1ResultByteStreamConf import RecMuCTPIByteStreamTool
+            ToolSvc += RecMuCTPIByteStreamTool("RecMuCTPIByteStreamTool")
+        ToolSvc.RecMuCTPIByteStreamTool.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+
+        #from TrigT1CTP.TrigT1CTPConf import LVL1CTP__CBNTAA_CTP_RDO
+        #a1 = LVL1CTP__CBNTAA_CTP_RDO("CBNTAA_CTP_RDO")
+        #a1.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+
+        # need thresholds so should be called just if the info is there in COOL
+        from AnalysisTriggerAlgs.AnalysisTriggerAlgsConfig import RoIBResultToAOD
+        a2 = RoIBResultToAOD("RoIBResultToAOD")
+        a2.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+        a2.L1JetTools.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+        a2.L1EmTauTools.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+        #L1JetTools exists after we've created RoIBResultToAOD
+        ToolSvc.L1JetTools.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+        ToolSvc.L1EmTauTools.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+
+        from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__EmTauTrigger
+        a3 = LVL1__EmTauTrigger("EmTauTrigger")
+        a3.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+
+        from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__TriggerTowerMaker
+        a4 = LVL1__TriggerTowerMaker("TriggerTowerMaker")
+        a4.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+
+        from TrigT1CaloSim.TrigT1CaloSimConf import LVL1__ROD
+        a5 = LVL1__ROD("ROD")
+        a5.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+
+        if not hasattr(ToolSvc,'L1EtTools'):
+            from TrigT1CaloTools.TrigT1CaloToolsConf import LVL1__L1EtTools
+            ToolSvc +=LVL1__L1EtTools("L1EtTools")
+        ToolSvc.L1EtTools.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+
+        if not hasattr(ToolSvc,'L1JEPEtSumsTools'):
+            from TrigT1CaloTools.TrigT1CaloToolsConf import LVL1__L1JEPEtSumsTools
+            ToolSvc += LVL1__L1JEPEtSumsTools("L1JEPEtSumsTools")
+        ToolSvc.L1JEPEtSumsTools.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+
+        if not hasattr(ToolSvc,'L1JEPHitsTools'):
+            from TrigT1CaloTools.TrigT1CaloToolsConf import LVL1__L1JEPHitsTools
+            ToolSvc += LVL1__L1JEPHitsTools("L1JEPHitsTools")
+        ToolSvc.L1JEPHitsTools.LVL1ConfigSvc="TrigConf::TrigConfigSvc/TrigConfigSvc"
+
+    #---------------------------------------------------------------------------
+    if recAlgs.doTrigger():
+        try:
+            from TriggerJobOpts.T0TriggerGetter import T0TriggerGetter
+            triggerGetter = T0TriggerGetter()
+        except Exception:
+            treatException("Could not import TriggerJobOpts.TriggerGetter . Switched off !" )
+            recAlgs.doTrigger=False
+    elif rec.doWriteBS():
+        include( "ByteStreamCnvSvc/RDP_ByteStream_jobOptions.py" )
+## end of configure the HLT config
diff --git a/Trigger/TriggerCommon/TrigTier0/share/TrigConfigForPrivateRerunHLT.py b/Trigger/TriggerCommon/TrigTier0/share/TrigConfigForPrivateRerunHLT.py
new file mode 100644
index 0000000000000000000000000000000000000000..5ee24cd523354172d857838b2cb8a7ccad112ead
--- /dev/null
+++ b/Trigger/TriggerCommon/TrigTier0/share/TrigConfigForPrivateRerunHLT.py
@@ -0,0 +1,18 @@
+# This sets up the trigger config for a BStoESD job
+# to use the HLT output XML file generated by a previous BStoBS step
+# and a fixed LVL1 file from the release which should be what was used to rerun the trigger over BS
+# It is intended for private trigger reprocessing only.
+# Contact:  Clemencia Mora or  trigger configuration experts
+
+##preInclude for all steps but enable only for RAWtoESD
+##don't set this in ESDtoAOD, it works with HLTonline since DS folders are stored in ESD metadata
+
+from RecExConfig.RecFlags import rec
+if rec.readRDO and rec.doESD:
+    from TriggerJobOpts.TriggerFlags import TriggerFlags as tf
+    tf.inputHLTconfigFile.set_Value_and_Lock("outputHLTconfig.xml")
+    tf.inputLVL1configFile.set_Value_and_Lock("TriggerMenuXML/LVL1config_InitialBeam_v3.xml")
+    tf.configForStartup.set_Value_and_Lock("HLToffline")
+    tf.configurationSourceList.set_Value_and_Lock(['xml'])
+    tf.readHLTconfigFromXML.set_Value_and_Lock(True)
+    tf.readLVL1configFromXML.set_Value_and_Lock(True)
diff --git a/Trigger/TriggerCommon/TrigTier0/share/TriggerConfigCheckHLTpsk.py b/Trigger/TriggerCommon/TrigTier0/share/TriggerConfigCheckHLTpsk.py
new file mode 100644
index 0000000000000000000000000000000000000000..61ab93d566cefe5d419769071bc268116fe0161d
--- /dev/null
+++ b/Trigger/TriggerCommon/TrigTier0/share/TriggerConfigCheckHLTpsk.py
@@ -0,0 +1,100 @@
+include.block("TriggerTier0/TriggerConfigCheckHLTpsk.py")
+
+from AthenaCommon.Logging import logging
+mlog = logging.getLogger( 'TriggerConfigCheckHLTpsk' ) ## get the logger
+
+runNumbers=[]
+from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
+if len(athenaCommonFlags.BSRDOInput()) > 0 :
+    from RecExConfig.InputFilePeeker import inputFileSummary
+    if inputFileSummary.has_key('run_number'): # online monitoring does not provide a run_number in the inputFileSummary (hence the rest of this program will be skipped)
+        runNumbers=inputFileSummary['run_number']
+
+if len(runNumbers)>0:
+
+    #from RecExConfig.RecAlgsFlags import recAlgs
+    #from RecExConfig.RecFlags import rec
+    from TriggerJobOpts.TriggerFlags import TriggerFlags
+
+    ### Loop over the input files and find if a problematic one is there.
+    ### If so, turn off the trigger.
+    from PyCool import cool
+    from CoolConvUtilities.AtlCoolLib import indirectOpen
+    import PyUtils.AthFile as AthFile
+
+    # get connection to COOL and find the HLT ps key
+    coolDbConn = indirectOpen("COOLONL_TRIGGER/COMP200",oracle=True)
+    hltfolder=coolDbConn.getFolder('/TRIGGER/HLT/HltConfigKeys')
+    lvl1folder=coolDbConn.getFolder('/TRIGGER/LVL1/Lvl1ConfigKey')
+    chansel = cool.ChannelSelection(0,0,cool.ChannelSelection.sinceBeforeChannel)
+
+    needToTurnOffHLT = False
+    needToTurnOffLVL1 = False
+    for RunNumber in runNumbers:
+        # translate to begin and end IOV
+        iovmin=(RunNumber << 32)+0
+        iovmax=((RunNumber+1) << 32)-1
+        # read info from COOL
+        hltobjs = hltfolder.browseObjects( iovmin, iovmax, chansel)
+        if hltobjs.goToNext():    
+            obj=hltobjs.currentRef()
+            ch = obj.channelId()
+            hltpsk = int(obj.payloadValue("HltPrescaleConfigurationKey")) # By default is a string
+            mlog.info("HLT prescale key for run %d is %d "%(RunNumber,hltpsk))
+            if hltpsk == 0:
+                needToTurnOffHLT = True
+        else:
+            needToTurnOffHLT = True
+        # same thing for lvl1    
+        lvl1objs = lvl1folder.browseObjects( iovmin, iovmax, chansel)
+        if lvl1objs.goToNext():
+            obj=lvl1objs.currentRef()
+            ch = obj.channelId()
+            lvl1psk = int(obj.payloadValue("Lvl1PrescaleConfigurationKey"))
+            mlog.info("LVL1 prescale key for run %d is %d "%(RunNumber,lvl1psk))
+            if lvl1psk == 0:
+                needToTurnOffLVL1 = True
+        else:
+            needToTurnOffLVL1 = True
+          
+    if needToTurnOffHLT and needToTurnOffLVL1:
+        mlog.warning("ERROR At least one run (%r) does not contain any trigger information in cool" % runNumbers)
+        mlog.warning("turning off trigger [rec.doTrigger=False, and TriggerFlags.dataTakingConditions='NoTrigger']")
+        TriggerFlags.dataTakingConditions='NoTrigger'
+        from RecExConfig.RecFlags import rec
+        from RecExConfig.RecAlgsFlags import recAlgs
+        rec.doTrigger=False
+        recAlgs.doTrigger=False
+        if not 'DQMonFlags' in dir():
+            print "TrigTier0/TriggerConfigCheckHLTpsk.py: DQMonFlags not yet imported - I import them now"
+            from AthenaMonitoring.DQMonFlags import DQMonFlags
+        DQMonFlags.doHLTMon.set_Value_and_Lock(False)
+        DQMonFlags.useTrigger.set_Value_and_Lock(False)
+        DQMonFlags.doLVL1CaloMon.set_Value_and_Lock(False)
+        DQMonFlags.doCTPMon.set_Value_and_Lock(False)
+
+    elif needToTurnOffHLT:
+        mlog.warning("ERROR At least one run (%r) does not contain HLT information" % runNumbers)
+        mlog.warning("turning off hlt [rec.doTrigger=True, recAlgs.doTrigger=True, and TriggerFlags.dataTakingConditions='Lvl1Only'].")
+        TriggerFlags.dataTakingConditions='Lvl1Only'
+        if not 'DQMonFlags' in dir():
+            print "TrigTier0/TriggerConfigCheckHLTpsk.py: DQMonFlags not yet imported - I import them now"
+            from AthenaMonitoring.DQMonFlags import DQMonFlags
+        DQMonFlags.doHLTMon.set_Value_and_Lock(False)
+        DQMonFlags.useTrigger.set_Value_and_Lock(False)
+
+    elif needToTurnOffLVL1:
+        mlog.warning("ERROR At least one run (%r) does not contain LVL1 information" % runNumbers)
+        mlog.warning("turning off lvl1 [rec.doTrigger=True, recAlgs.doTrigger=True, and TriggerFlags.dataTakingConditions='HltOnly'].")
+        TriggerFlags.dataTakingConditions='HltOnly'
+        if not 'DQMonFlags' in dir():
+            print "TrigTier0/TriggerConfigCheckHLTpsk.py: DQMonFlags not yet imported - I import them now"
+            from AthenaMonitoring.DQMonFlags import DQMonFlags
+#        DQMonFlags.doHLTMon.set_Value_and_Lock(False)
+        DQMonFlags.useTrigger.set_Value_and_Lock(False)
+        DQMonFlags.doLVL1CaloMon.set_Value_and_Lock(False)
+        DQMonFlags.doCTPMon.set_Value_and_Lock(False)
+
+
+    coolDbConn.closeDatabase()
+
diff --git a/Trigger/TriggerCommon/TrigTier0/share/TriggerConfigCheckMetadata.py b/Trigger/TriggerCommon/TrigTier0/share/TriggerConfigCheckMetadata.py
new file mode 100644
index 0000000000000000000000000000000000000000..61ae67634c69970c6df4ddc7f114ac174ec68851
--- /dev/null
+++ b/Trigger/TriggerCommon/TrigTier0/share/TriggerConfigCheckMetadata.py
@@ -0,0 +1,81 @@
+include.block("TrigTier0/TriggerConfigCheckMetadata.py")
+
+from AthenaCommon.AthenaCommonFlags import athenaCommonFlags
+from AthenaCommon.GlobalFlags  import globalflags
+from AthenaCommon.Logging import logging
+from TriggerJobOpts.TriggerFlags import TriggerFlags
+
+log = logging.getLogger( "TriggerConfigCheckMetadata.py" )
+
+if len(athenaCommonFlags.PoolESDInput())>0 or len(athenaCommonFlags.PoolAODInput())>0 :
+    from RecExConfig.InputFilePeeker import inputFileSummary
+    if not 'DQMonFlags' in dir():
+        print "DataQualityMon_RecExCommon_Flags_jobOptions.py: DQMonFlags not yet imported - I import them now"
+        from AthenaMonitoring.DQMonFlags import DQMonFlags
+        
+    if inputFileSummary.has_key('metadata'):
+        hasLVL1 = inputFileSummary['metadata'].has_key('/TRIGGER/LVL1/Lvl1ConfigKey')
+        hasHLT  = inputFileSummary['metadata'].has_key('/TRIGGER/HLT/HltConfigKeys')
+        
+
+        if globalflags.DataSource()=='data':
+            if hasLVL1 and not hasHLT:
+                log.warning("ERROR This pool file does not contain HLT trigger information ")
+                log.info("Disabling HLT monitoring, trigger aware DQMonitoring and trigger ntuples.")
+                TriggerFlags.dataTakingConditions.set_Value_and_Lock('Lvl1Only')
+                DQMonFlags.doHLTMon.set_Value_and_Lock(False) # now that some hltmon moved to ESDtoAOD
+                DQMonFlags.useTrigger.set_Value_and_Lock(False)
+                TriggerFlags.NtupleProductionFlags.ProductionLocation.set_Value_and_Lock('')
+
+            if hasHLT and not hasLVL1:
+                log.warning("ERROR This pool file does not contain LVL1 trigger information ")
+                log.info("Disabling trigger ntuples and trigger aware DQMonitoring but not HLT monitoring.")
+                TriggerFlags.dataTakingConditions.set_Value_and_Lock('HltOnly')
+                TriggerFlags.NtupleProductionFlags.ProductionLocation.set_Value_and_Lock('')
+                DQMonFlags.useTrigger.set_Value_and_Lock(False)
+                DQMonFlags.doLVL1CaloMon.set_Value_and_Lock(False)
+                DQMonFlags.doCTPMon.set_Value_and_Lock(False)
+                try:
+                    from MuonDQAMonFlags.MuonDQAProperFlags import MuonDQADetFlags
+                    MuonDQADetFlags.doTGCL1Mon.set_Value_and_Lock(False)
+                    MuonDQADetFlags.doTGCMon.set_Value_and_Lock(False)                    
+                    log.warning("Turning off TgcLv1RawMonitoring because lvl1 info is missing")
+                except Exception:
+                    treatException("Could not import MuonDQADetFlags")
+
+            if not hasHLT and not hasLVL1:
+                log.warning("ERROR This pool file does not contain any trigger information ")
+                log.info("Disabling HLT monitoring and trigger aware DQMonitoring and trigger ntuples.")
+                TriggerFlags.dataTakingConditions.set_Value_and_Lock('NoTrigger')
+                rec.doTrigger.set_Value_and_Lock(False)
+                DQMonFlags.doHLTMon.set_Value_and_Lock(False)
+                DQMonFlags.useTrigger.set_Value_and_Lock(False)
+                DQMonFlags.doLVL1CaloMon.set_Value_and_Lock(False)
+                DQMonFlags.doCTPMon.set_Value_and_Lock(False)
+                TriggerFlags.NtupleProductionFlags.ProductionLocation.set_Value_and_Lock('')
+                try:
+                    from MuonDQAMonFlags.MuonDQAProperFlags import MuonDQADetFlags
+                    MuonDQADetFlags.doTGCL1Mon.set_Value_and_Lock(False)
+                    MuonDQADetFlags.doTGCMon.set_Value_and_Lock(False)                    
+                    log.warning("Turning off TgcLv1RawMonitoring  because all trigger info is missing")
+                except Exception:
+                    treatException("Could not import MuonDQADetFlags")
+
+        else:
+            if hasLVL1 and not hasHLT:
+                log.info("This pool file does not contain HLT trigger information ")
+                TriggerFlags.dataTakingConditions.set_Value_and_Lock('Lvl1Only')
+
+            if hasHLT and not hasLVL1:
+                log.info("This pool file does not contain LVL1 trigger information ")
+                TriggerFlags.dataTakingConditions.set_Value_and_Lock('HltOnly')
+
+            if not hasHLT and not hasLVL1:
+                log.info("This pool file does not contain any trigger information, setting rec.doTrigger to False")
+                TriggerFlags.dataTakingConditions.set_Value_and_Lock('NoTrigger')
+                rec.doTrigger.set_Value_and_Lock(False)
+            
+    else:
+        log.warning("Either inputFileSummary does not have key 'metadata' or something strange is happening.")
+else:
+    log.warning("Wrong flags setting for pool input, try calling TriggerConfigGetter with 'ReadPool' or 'WritePool' as argument.")
diff --git a/Trigger/TriggerCommon/TrigTier0/share/TriggerNTupleProduction.py b/Trigger/TriggerCommon/TrigTier0/share/TriggerNTupleProduction.py
new file mode 100644
index 0000000000000000000000000000000000000000..082ce8d3bac8d92c85bbf7947bec7c20ad0ff4c7
--- /dev/null
+++ b/Trigger/TriggerCommon/TrigTier0/share/TriggerNTupleProduction.py
@@ -0,0 +1,70 @@
+include.block ("TrigTier0/TriggerNTupleProduction.py")
+
+## 
+## Job options to steer the inclusion of the ntuple production setup for all slices
+## (this has to be included after the TriggerFlags.NtupleProductionFlags have been set)
+##
+
+## Inside the specific slice JO the TriggerFlags.NtupleProductionFlags
+## can be used to access the name of the root file where each slice
+## should go:
+## example: NTAlg.DestinationFile = TriggerFlags.NtupleProductionFlags.FileNames.forSlice('MinBias')
+
+
+## Root trees should not be written directly to file but registered in
+## the THistSvc together with the file name (see example in
+## Trigger/TrigAnalysis/TrigNtupleBase/NtupleAlgorithm.cxx)
+
+from RecExConfig.RecFlags import rec
+
+from TriggerJobOpts.TriggerConfigGetter import TriggerConfigGetter
+cfg = TriggerConfigGetter()
+
+from TriggerJobOpts.TriggerFlags import TriggerFlags as tf
+from AthenaCommon.Logging import logging
+
+logTriggerNTupleProduction = logging.getLogger("TriggerNTupleProduction.py")
+
+
+logTriggerNTupleProduction.info("Including trigger ntuple setups according to the following specifications")
+logTriggerNTupleProduction.info("Production Site: %r" % tf.NtupleProductionFlags.ProductionLocation() )
+logTriggerNTupleProduction.info("Slices         : %r" % tf.NtupleProductionFlags.SliceTuples() )
+logTriggerNTupleProduction.info("Files          : %r" % tf.NtupleProductionFlags.FileNames() )
+logTriggerNTupleProduction.info("Disabled slices: %r" % tf.NtupleProductionFlags.DisabledSlices() )
+
+if not hasattr(ServiceMgr, 'THistSvc'):
+    from GaudiSvc.GaudiSvcConf import THistSvc
+    ServiceMgr += THistSvc()
+  
+for filename in tf.NtupleProductionFlags.FileNames():
+    ServiceMgr.THistSvc.Output += [filename+" DATAFILE=\'"+filename+"\' OPT=\'RECREATE\'"]  
+
+# MinBias
+if tf.NtupleProductionFlags.SliceTuples.doSlice( "MinBias" ):
+    include("TrigMinBiasNtuple/TrigMinBiasNtuple_jo.py")
+    pass
+
+# Muon
+if tf.NtupleProductionFlags.SliceTuples.doSlice( "Muon" ) and rec.doMuon() :
+    include("TrigMuonNtuple/TrigMuonNtuple_jo.py")
+    pass
+
+# Tau
+if tf.NtupleProductionFlags.SliceTuples.doSlice( "Tau" ) and rec.doTau() :
+    include("TrigTauPerformAthena/TrigTauNtuple_jo.py")
+    pass
+
+# Egamma
+if tf.NtupleProductionFlags.SliceTuples.doSlice( "EGamma" ) and rec.doEgamma() :
+    include("TrigEgammaNtuple/TrigEgammaNtuple_jo.py")
+    pass
+
+#L1Calo
+if tf.NtupleProductionFlags.SliceTuples.doSlice( "L1Calo" ):
+    include("TrigT1CaloCalibTools/L1Calo_D3PD_Tier1_jobOptions.py")
+    pass
+
+# TrigMenu
+if tf.NtupleProductionFlags.SliceTuples.doSlice( "TrigMenu" ):
+    include("TriggerMenuAnalysis/TriggerMenuAnalysis_Nt_Tier0.py")
+    pass