From 6d95c694c1b24c37c46bd18dd91bd68b63f18129 Mon Sep 17 00:00:00 2001
From: Carlos Chavez Barajas <carlos.chavez.barajas@cern.ch>
Date: Thu, 12 Jun 2014 09:47:03 +0200
Subject: [PATCH] Update reco parser options to
 RecJobTransforms.recTransformUtils (TrigTransform-00-00-07)

---
 .../TrigTransform/cmt/requirements            |  16 +
 .../TrigTransform/doc/Trig_tf.txt             | 275 +++++++++++++
 .../TrigTransform/python/__init__.py          |   2 +
 .../TrigTransform/python/trigExe.py           | 113 ++++++
 .../TrigTransform/python/trigPostRun.py       |  19 +
 .../TrigTransform/python/trigPreRun.py        |  32 ++
 .../TrigTransform/python/trigRecoExe.py       | 178 +++++++++
 .../TrigTransform/python/trigUpdateArgs.py    | 363 ++++++++++++++++++
 .../TrigTransform/scripts/Trig_reco_tf.py     | 220 +++++++++++
 .../TrigTransform/scripts/Trig_tf_main.py     | 165 ++++++++
 .../TrigTransform/share/Trig_tf_simple.py     |  67 ++++
 11 files changed, 1450 insertions(+)
 create mode 100644 HLT/Trigger/TrigTransforms/TrigTransform/cmt/requirements
 create mode 100644 HLT/Trigger/TrigTransforms/TrigTransform/doc/Trig_tf.txt
 create mode 100755 HLT/Trigger/TrigTransforms/TrigTransform/python/__init__.py
 create mode 100644 HLT/Trigger/TrigTransforms/TrigTransform/python/trigExe.py
 create mode 100644 HLT/Trigger/TrigTransforms/TrigTransform/python/trigPostRun.py
 create mode 100644 HLT/Trigger/TrigTransforms/TrigTransform/python/trigPreRun.py
 create mode 100644 HLT/Trigger/TrigTransforms/TrigTransform/python/trigRecoExe.py
 create mode 100644 HLT/Trigger/TrigTransforms/TrigTransform/python/trigUpdateArgs.py
 create mode 100755 HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_reco_tf.py
 create mode 100644 HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_tf_main.py
 create mode 100755 HLT/Trigger/TrigTransforms/TrigTransform/share/Trig_tf_simple.py

diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/cmt/requirements b/HLT/Trigger/TrigTransforms/TrigTransform/cmt/requirements
new file mode 100644
index 00000000000..983c42f99bf
--- /dev/null
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/cmt/requirements
@@ -0,0 +1,16 @@
+package TrigTransform
+
+author Carsten Kendziorra <kendzi@physik.hu-berlin.de>
+author Simon George <S.George@rhul.ac.uk>
+author Martin Zur Nedden <martin.zur.nedden@desy.de>
+author Joerg Stelzer <stelzer@cern.ch>
+author Mark Stockton <mark.stockton@cern.ch>
+
+use PyJobTransforms PyJobTransforms-* Tools
+use RecJobTransforms RecJobTransforms-* Reconstruction
+
+use AtlasPolicy AtlasPolicy-*
+
+apply_pattern declare_python_modules files="*.py"
+
+apply_pattern declare_job_transforms tfs="*.py"
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/doc/Trig_tf.txt b/HLT/Trigger/TrigTransforms/TrigTransform/doc/Trig_tf.txt
new file mode 100644
index 00000000000..85771db2994
--- /dev/null
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/doc/Trig_tf.txt
@@ -0,0 +1,275 @@
+
+***** 
+scripts/Trig_tf_main.py
+***** 
+
+  ***** 
+  Code
+  *****
+  This is the new transform class to run the transforms using the new transforms infrastructure (see below).
+    A list of requirements for the new trigger transforms are here:
+    https://twiki.cern.ch/twiki/bin/viewauth/Atlas/TriggerTransformRequirements
+  /scripts/Trig_tf_main.py -> The script containing the transform
+    So far just runs MT will add PT later, but matches all the arguments in the simple script + all the arguments used by athenaMT in the ATN tests 
+  /python/trigUpdateArgs.py -> The class which takes care of the argument changes from the input format to those needed by athena
+  /python/trigPreRun.py -> The class where actions can be performed before running the transform
+  /python/trigExe.py -> The class which inherits from the base transform, with changes needed for athenaMT/PT
+  /python/trigPostRun.py -> The class where actions can be performed after running the transform
+
+  /scripts/Trig_reco_tf.py -> runs HLT_standalone along with the stand reco_tf transforms
+  /python/trigRecoExe.py -> The class which inherits from athenaExecutor and is used to rename the HLTMON output file
+  
+  *****
+  Example of running:
+  *****
+  Current release I've tested with:
+    asetup 17.1.4.7.1,AtlasCAFHLT,here
+  Checkout
+    svn co svn+ssh://svn.cern.ch/reps/atlasoff/HLT/Trigger/TrigTransforms/TrigTransform/trunk HLT/Trigger/TrigTransforms/TrigTransform/
+    cd HLT/Trigger/TrigTransforms/TrigTransform/cmt && cmt config && cd -
+  Make  
+    cd HLT/Trigger/TrigTransforms/TrigTransform/cmt && make && cd -
+  Current example of how to test code being passed to athenaMT:
+    Trig_tf_main.py --athenaoptsMT='-f test1 -D' --inputBSFile test3,test6 --outputRAWFile test4  --connection TRIGGERDBATN --smk 1111 --lvl1ps 2222 --hltps 3333 --run_number=202798 --maxEvents 10 --post_commands='ignoreOnlineKeys;confMagFromDCS;setHardTimeout;costMonitor'
+    gives:
+    athenaMT.py --postcommand L2_PROCESS_postCommands.py --save-output ['test4'] --run-number 202798 --event-modifier TrigTransform.PreloadL2 --use-database DBServer=TRIGGERDBATN:DBSMKey=1111:DBHLTPSKey=3333:DBLV1PSKey=2222 --number-of-events 10 --file ['test3', 'test6'] --joboptionsvc-type TrigConf::HLTJobOptionsSvc -f test1 -D
+    This will setup the transform infrastructure and run, passing the options to athenaMT.
+    The -D flag then shows what has been passed to athenaMT.
+      -> it shows that the input file list correctly gets all 3 files listed above (even though 1 is in the optsMT arg)
+    The postcommands option also now creates the py file with most of the features of the existing transform
+    
+    In addition can add --verbose or --loglevel DEBUG to see many of the trig transform statements
+      Could also switch on the debug logging of just the trigger transform by doing something like:
+      export TRF_LOGCONF=~graemes/public/trf/logging/logging.conf-debug
+  
+  *****
+  Base Transform Code
+  *****
+  Trigger Transform relies on the code from the package (rev 518534 onwards):
+    https://svnweb.cern.ch/trac/atlasoff/browser/Tools/PyJobTransforms/trunk/
+    For example use: pkgco.py PyJobTransforms -A
+                     cd Tools/PyJobTransforms/cmt/ && make && cd -
+    Originally used the code from the branch (rev 512541 onwards):
+    https://svnweb.cern.ch/trac/atlasoff/browser/Tools/PyJobTransforms/branches/PyJobTransforms-01-01-13-branch
+    use: pkgco.py PyJobTransforms-01-01-13-branch  (or pkgco.py PyJobTransforms -A)
+  Documentation:
+  https://twiki.cern.ch/twiki/bin/viewauth/Atlas/JobTransformDesiderata
+  https://twiki.cern.ch/twiki/bin/viewauth/Atlas/TransformWorkflow
+  Summary of execution:
+    Initialise  Setup signal handling
+                Read command line args (Check validity, no clashes, completeness)
+                Setup job to be run
+    Execute     Run job and monitor
+    Finalise    Read job exit status
+                Parse log files for errors
+                Generate reports (Metadata for outputs, Job reports in other formats)
+                Final transform exit status  
+                
+  Originally all the base transform code was located in one place. Now the code controlling the reco steps is here:
+  https://svnweb.cern.ch/trac/atlasoff/browser/Reconstruction/RecJobTransforms/
+  so additionally the code here is needed:
+  pkgco.py RecJobTransforms -A
+  cd Reconstruction/RecJobTransforms/cmt/ && make && cd -
+
+  In addition some other skeleton files are now located in: PATJobTransforms -not yet tested! (and SimuJobTransforms too)
+
+***** 
+scripts/Trig_reco_tf.py
+***** 
+  Similar to scripts/Trig_tf_main.py in that it uses the base transforms
+  Code is actually based on: https://svnweb.cern.ch/trac/atlasoff/browser/Reconstruction/RecJobTransforms/trunk/scripts/Reco_tf.py
+  Difference is that the aim is to run HLT_standalone directly with athena
+  Accepts a few options to then run BS->BS (using TriggerRelease/runHLT_standalone.py) and then BS->ESD, etc
+  Some of the options used in with runHLT_standalone have different names to be used here:
+  Trigger option = option to set in transform
+  testPhysicsV4 = testPhysicsV4
+  writeBS = writeBS *requires updated TriggerRelease, see below
+  BSRDOInput = inputBS_RDOFile
+  EvtMax = maxEvents
+
+  Example of Trig_reco_tf version in use:
+  asetup devval,rel_2,here
+  Trig_reco_tf.py --inputBS_RDOFile=root://eosatlas//eos/atlas/atlascerngroupdisk/trig-daq/validation/test_data/data12_8TeV.00212967.physics_eb_zee_zmumu_cc._0001.data --outputESDFile=testESD2 --maxEvents=5  | tee outputLOG.txt
+  creates:
+  tmp.BS (bs) and testESD2 (esd)
+  
+  Extra options that can be added:
+  1) ability to change release (b2r = BS->BS with HLTstandalone, r2e BS->ESD)
+     can define just one of the steps to be different to that already in or both
+     --asetup b2r:devval,rel_2 r2e:devval,rel_2 
+  2) name of temporary BS file, by default set to tmp.BS 
+     --outputBSFile=out.BS
+  3) To save the HLTMON file can add it to the outputs and also rename the file
+     --outputHIST_HLTMONFile=expert-monitoring-new.root
+  
+  Options now set automatically so no longer required above:
+  --testPhysicsV4=true 
+  --writeBS=true
+
+  ***** 
+  TriggerRelease
+  ***** 
+  To make use of writeBS the TriggerRelease package needs to be updated:
+  pkgco.py TriggerRelease-00-10-07
+  cd Trigger/TriggerRelease/cmt && cmt config && cd -
+  cd Trigger/TriggerRelease/cmt && make && cd -  
+  
+***** 
+share/Trig_tf_simple.py
+***** 
+  
+  This was a starting point from Simon George on how to create a simple job transform
+  Documentation:
+  https://twiki.cern.ch/twiki/bin/viewauth/Atlas/TriggerTransformDevStatus
+  
+  *****
+  Example of running:
+  *****
+  python ./TrigTransformNew/share/Trig_tf_simple.py --inputBSFile test1 --outputRAWFile test2 --connection TRIGGERDBATN --smk 1111 --lvl1ps 2222 --hltps 3333
+  gives: 
+  athenaMT.py -f test1 -o test2 -J TrigConf::HLTJobOptionsSvc -b "DBServer=TRIGGERDBATN:DBSMKey=1111:DBHLTPSKey=3333:DBLV1PSKey=2222"
+  
+***** 
+athenaMT/PT options added
+***** 
+          long        = short
+  ---------------------------
+  --file              = -f
+  --save-output       = -o 
+  --joboptionsvc-type = -J    
+  --use-database      = -b    
+  --postcommand       = -C
+  --run-number        = -R
+  --number-of-events  = -n
+  --event-modifier    = -Z
+
+***** 
+BatchHLTTrf_wrap.py
+***** 
+  https://twiki.cern.ch/twiki/bin/viewauth/Atlas/TriggerTransformOldDoc
+  asetup AtlasCAFHLT,17.1.4.7.1,here
+  Recipe to run a checked out version 
+    # create a working directory somewhere with about 10 GB free, e.g. /tmp/$USER on lxplus or your afs scratch space. 
+    pkgco.py TrigTransformOld
+    cd HLT/Trigger/TrigTransforms/TrigTransformOld/cmt && make && cd -
+  Data file
+    xrdcp root://eosatlas//eos/atlas/atlascerngroupdisk/trig-daq/validation/test_data/data11_7TeV.00191628.physics_eb_zee_zmumu._0001.data .
+  Keys
+    can use ../UploadMenuKeys/exportMenuKeys.sh
+    or take from:
+      http://atlas-computing.web.cern.ch/atlas-computing/links/buildDirectory/nightlies/17.1.X.Y.Z-VAL/AtlasCAFHLT/rel_5/NICOS_area/NICOS_atntest171XYZVALAtlasCAFHLT32BS5G4AtlasCAFHLTOpt/trigp1test_testconfiguration_work/TrfTestPPV4/atn_test.log
+    hltpsk=475
+    l1psk=33
+    smk=2382
+  then run with:
+    BatchHLTTrf_wrap.py inputBSFile=data11_7TeV.00191628.physics_eb_zee_zmumu._0001.data outputRAWFile=RAW.999999._000001.data.1 doStreaming=True filters="express" stream_out=debug connection=TRIGGERDBATN smk=${smk} lvl1ps=${l1psk} hltps=${hltpsk} outputNTUP_TRIGCOSTEFFile=NTUP_TRIGCOSTEF.root outputNTUP_TRIGCOSTL2File=NTUP_TRIGCOSTL2.root outputNTUP_TRIGRATEEFFile=NTUP_TRIGRATEEF.root outputNTUP_TRIGRATEL2File=NTUP_TRIGRATEL2.root outputHIST_HLTMONEFFile=HIST_HLTMONEF.root outputHIST_HLTMONL2File=HIST_HLTMONL2.root max_events=10
+  which in turn runs:
+    BatchHLTApps.py data11_7TeV.00191628.physics_eb_zee_zmumu._0001.data --run_number=191628 --job_id=999999_000001 --doEF --doDummyStreaming --connection=TRIGGERDBATN --hltps=475 --lvl1ps=33 --smk=2382 --file_prefix=data11_7TeV.00191628 --filters=express --stream_out=debug --max_events=10 --doL2 --file_stream=physics_eb_zee_zmumu --post_commands='ignoreOnlineKeys;confMagFromDCS;setHardTimeout;costMonitor' --doStreaming -v 
+  executing:
+    athenaMT.py -R 191628 -C 'include("L2_PROCESS_postCommands.py")' -n 10 -f [ "data11_7TeV.00191628.physics_eb_zee_zmumu._0001.data" ] -o data11_7TeV.00191628.physics_eb_zee_zmumu.AthenaMTout_1.RAW._lb0000._CAF_999999_000001 -J TrigConf::HLTJobOptionsSvc --use-database DBServer=TRIGGERDBATN:Instance=L2:DBHLTPSKey=475:DBSMKey=2382:DBL1PSKey=33 -Z TrigTransformOld.PreloadL2
+        
+     
+***** 
+ATN test
+*****    
+        
+  http://atlas-computing.web.cern.ch/atlas-computing/links/buildDirectory/nightlies/17.1.X.Y.Z-VAL/AtlasCAFHLT/rel_5/NICOS_area/NICOS_atntest171XYZVALAtlasCAFHLT32BS5G4AtlasCAFHLTOpt/trigp1test_testconfiguration_work/TrfTestPPV4/atn_test.log        
+        
+
+    BatchHLTTrf_wrap.py: arguments : ['inputBSFile=data12_8TeV.00202798.physics_eb_zee_zmumu_cc._0001.data', 
+                                      'outputRAWFile=RAW.999999._000001.data.1', 
+                                      'doStreaming=True', 'filters=express', 
+                                      'stream_out=debug', 'connection=TRIGGERDBATN', 
+                                      'smk=2382', 'lvl1ps=33', 'hltps=475', 
+                                      'outputNTUP_TRIGCOSTEFFile=NTUP_TRIGCOSTEF.root', 
+                                      'outputNTUP_TRIGCOSTL2File=NTUP_TRIGCOSTL2.root', 
+                                      'outputNTUP_TRIGRATEEFFile=NTUP_TRIGRATEEF.root', 
+                                      'outputNTUP_TRIGRATEL2File=NTUP_TRIGRATEL2.root', 
+                                      'outputHIST_HLTMONEFFile=HIST_HLTMONEF.root', 
+                                      'outputHIST_HLTMONL2File=HIST_HLTMONL2.root', 
+                                      'max_events=10']
+    BatchHLTTrf_wrap.py: arg map   : {'outputHIST_HLTMONEFFile': 'HIST_HLTMONEF.root', 
+                                      'outputNTUP_TRIGRATEEFFile': 'NTUP_TRIGRATEEF.root', 
+                                      'inputBSFile': ['data12_8TeV.00202798.physics_eb_zee_zmumu_cc._0001.data'], 
+                                      'outputNTUP_TRIGCOSTL2File': 'NTUP_TRIGCOSTL2.root', 
+                                      'outputHIST_HLTMONL2File': 'HIST_HLTMONL2.root', 
+                                      'lvl1ps': 33, 'hltps': 475, 'max_events': 10, 
+                                      'connection': 'TRIGGERDBATN', 'smk': 2382, 
+                                      'outputNTUP_TRIGCOSTEFFile': 'NTUP_TRIGCOSTEF.root', 
+                                      'filters': 'express', 'stream_out': 'debug', 
+                                      'outputRAWFile': 'RAW.999999._000001.data.1', 
+                                      'outputNTUP_TRIGRATEL2File': 'NTUP_TRIGRATEL2.root', 
+                                      'doStreaming': True}
+    BatchHLTTrf_wrap.py: arg map 2  : {'outputHIST_HLTMONEFFile': '#HIST_HLTMONEF.root', 
+                                       'outputNTUP_TRIGRATEEFFile': '#NTUP_TRIGRATEEF.root', 
+                                       'inputBSFile': ['#data12_8TeV.00202798.physics_eb_zee_zmumu_cc._0001.data'], 
+                                       'outputNTUP_TRIGCOSTL2File': '#NTUP_TRIGCOSTL2.root', 
+                                       'outputHIST_HLTMONL2File': '#HIST_HLTMONL2.root', 
+                                       'lvl1ps': 33, 'hltps': 475, 'max_events': 10, 
+                                       'connection': 'TRIGGERDBATN', 'smk': 2382, 
+                                       'outputNTUP_TRIGCOSTEFFile': '#NTUP_TRIGCOSTEF.root', 
+                                       'filters': 'express', 'stream_out': 'debug', 
+                                       'outputRAWFile': '#RAW.999999._000001.data.1', 
+                                       'outputNTUP_TRIGRATEL2File': '#NTUP_TRIGRATEL2.root', 
+                                       'doStreaming': True}
+    BatchHLTTrf_wrap.py: signArgMap: {'outputRAWFiles': '#RAW.999999._000001.data.1', 
+                                      'outputRateEF': '#NTUP_TRIGRATEEF.root', 
+                                      'outputRoot': '#HIST_HLTMONL2.root', 
+                                      'outputRateL2': '#NTUP_TRIGRATEL2.root', 
+                                      'outputCostL2': '#NTUP_TRIGCOSTL2.root', 
+                                      'inputRAWFiles': ['#data12_8TeV.00202798.physics_eb_zee_zmumu_cc._0001.data'], 
+                                      'outputCostEF': '#NTUP_TRIGCOSTEF.root', 
+                                      'applicationConfig': {'lvl1ps': 33, 'hltps': 475, 
+                                                            'connection': 'TRIGGERDBATN', 'smk': 2382, 
+                                                            'filters': 'express', 'stream_out': 'debug', 
+                                                            'max_events': 10, 'doStreaming': True}, 
+                                      '_partid': '999999_000001'}
+   ***BatchHLTTrf
+     '_partid': '999999_000001',
+     'applicationConfig': {'connection': 'TRIGGERDBATN',
+                         'doStreaming': True,
+                         'filters': 'express',
+                         'hltps': 475,
+                         'lvl1ps': 33,
+                         'max_events': 10,
+                         'smk': 2382,
+                         'stream_out': 'debug'},
+     'inputRAWFiles': ['#data12_8TeV.00202798.physics_eb_zee_zmumu_cc._0001.data'],
+     'outputCostEF': '#NTUP_TRIGCOSTEF.root',
+     'outputCostL2': '#NTUP_TRIGCOSTL2.root',
+     'outputRAWFiles': '#RAW.999999._000001.data.1',
+     'outputRateEF': '#NTUP_TRIGRATEEF.root',
+     'outputRateL2': '#NTUP_TRIGRATEL2.root',
+     'outputRoot': '#HIST_HLTMONL2.root'}
+     {'outputRAWFiles': ['#RAW.999999._000001.data.1'], 'outputRateEF': '#NTUP_TRIGRATEEF.root', 'outputRoot': '#HIST_HLTMONL2.root', 'outputRateL2': '#NTUP_TRIGRATEL2.root', 'outputCostL2': '#NTUP_TRIGCOSTL2.root', 'inputRAWFiles': ['#data12_8TeV.00202798.physics_eb_zee_zmumu_cc._0001.data'], 'outputCostEF': '#NTUP_TRIGCOSTEF.root', 'applicationConfig': {'connection': 'TRIGGERDBATN', 'hltps': 475, 'lvl1ps': 33, 'smk': 2382, 'filters': 'express', 'stream_out': 'debug', 'max_events': 10, 'doStreaming': True}, '_partid': '999999_000001'}
+     set outdsname to  ['']
+     Peaking into first input raw data file data12_8TeV.00202798.physics_eb_zee_zmumu_cc._0001.data ...
+     FileCollection: getFileInfo INFO EventStorage DataReader: runNumber=202798 projectTag=data12_8TeV stream=physics_EnhancedBias
+     FileCollection: getFileInfo INFO From input data file: runNumber=202798 runType=data12_8TeV streamType=physics streamName=EnhancedBias
+     FileCollection: getFileInfo INFO From input data file name: runNumber=00202798 runType=data12_8TeV streamType=physics streamName=eb_zee_zmumu_cc
+     FileCollection: getFileInfo INFO Setting value for runType from file contents
+     FileCollection: getFileInfo INFO Setting value for runNumber from file contents
+     FileCollection: getFileInfo INFO Setting value for streamType from file contents
+     FileCollection: getFileInfo INFO Setting value for streamName from file contents
+     FileCollection: getFileInfo INFO From input data (final): 
+     runNumber=202798 runType=data12_8TeV streamType=physics streamName=EnhancedBias
+   runs:
+     BatchHLTApps.py data12_8TeV.00202798.physics_eb_zee_zmumu_cc._0001.data 
+       --run_number=202798 --job_id=999999_000001 --doEF --doDummyStreaming --connection=TRIGGERDBATN 
+       --hltps=475 --lvl1ps=33 --smk=2382 --file_prefix=data12_8TeV.00202798 --filters=express --stream_out=debug 
+       --max_events=10 --doL2 --file_stream=physics_EnhancedBias 
+       --post_commands='ignoreOnlineKeys;confMagFromDCS;setHardTimeout;costMonitor' --doStreaming
+   updated to:
+     BatchHLTApps.py data12_8TeV.00202798.physics_eb_zee_zmumu_cc._0001.data --run_number=202798 --job_id=999999_000001 
+       --doEF --doDummyStreaming --connection=TRIGGERDBATN --hltps=475 --lvl1ps=33 --smk=2382 
+       --file_prefix=data12_8TeV.00202798 --filters=express --stream_out=debug --max_events=10
+       --doL2 --file_stream=physics_EnhancedBias 
+     --post_commands='ignoreOnlineKeys;confMagFromDCS;setHardTimeout;costMonitor' --doStreaming -v  
+   executing:
+     athenaMT.py -R 202798 -C 'include("L2_PROCESS_postCommands.py")' -n 10 
+                 -f [ "data12_8TeV.00202798.physics_eb_zee_zmumu_cc._0001.data" ] 
+                 -o data12_8TeV.00202798.physics_EnhancedBias.AthenaMTout_1.RAW._lb0000._CAF_999999_000001 
+                 -J TrigConf::HLTJobOptionsSvc 
+                 --use-database DBServer=TRIGGERDBATN:Instance=L2:DBHLTPSKey=475:DBSMKey=2382:DBL1PSKey=33 
+                 -Z TrigTransformOld.PreloadL2
+   
\ No newline at end of file
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/python/__init__.py b/HLT/Trigger/TrigTransforms/TrigTransform/python/__init__.py
new file mode 100755
index 00000000000..74583d364ec
--- /dev/null
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/python/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/python/trigExe.py b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigExe.py
new file mode 100644
index 00000000000..29f76259877
--- /dev/null
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigExe.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+# @brief: Trigger executor to call base transforms
+# @details: Based on athenaExecutor with some modifications
+# @author: Mark Stockton
+
+import logging
+msg = logging.getLogger("PyJobTransforms." + __name__)
+
+from PyJobTransforms.trfExe import athenaExecutor
+from PyJobTransforms.trfUtils import asetupReport
+
+import PyJobTransforms.trfArgClasses as trfArgClasses
+
+from TrigTransform.trigUpdateArgs import swapArgs
+     
+class trigExecutor(athenaExecutor):
+    # Trigger executor
+    # different from standard athena as runs athenaMT/PT  from the command line rather than via JOs
+    def __init__(self, name = 'athena', trf = None, inData = set(), outData = set(), exe = 'athena.py', exeArgs = ['athenaopts']):
+        #don't need any of the JO file creation so set to None every time
+        skeletonFile=None
+        #remainder of init comes from parent
+        super(trigExecutor, self).__init__(name=name, trf=trf, inData=inData, outData=outData, skeletonFile=skeletonFile, exe=exe, exeArgs=exeArgs)
+        
+    def preExecute(self, input = set(), output = set()):
+        msg.info('Run athena preExecute, note will recreate command line from scratch after this is complete')
+        super(trigExecutor, self).preExecute(input=input, output=output)
+             
+        msg.info('athena preExecute complete, will now create command line from scratch')
+        
+        #Swap the remaining arguments into the athenaMT/PT variable names
+        swapArgs(self.conf.argdict)
+        
+        ##TODO expect arg order important i.e. want exeArgs at end
+        #      so code restarts command from scratch (below) then in loop it skips exeArgs
+        self._cmd = [self.exe, ]
+        
+        ## Code below is based on that from scriptExecutor in trfExe
+        ## Build up the appropriate list for execution (need something rather more general...)
+        for arg in self.conf.argdict:
+            #skip exeArgs
+            if arg in self.exeArgs:
+                continue
+             
+            #skip any runargs (code from trfJobOptions)
+            _isRunTimeArg=False
+            if isinstance(self.conf.argdict[arg], trfArgClasses.argument): 
+                if self.conf.argdict[arg].isRunarg == True:
+                    _isRunTimeArg=True
+            
+            if _isRunTimeArg == False:
+                msg.debug('Argument %s is not a runarg so skipped' % arg)
+                continue
+
+            # Add the arg and its value to the command
+            # all args are given the -- option infront of their name
+            # TODO currently passes a list object, need to check this is read correctly
+            elif isinstance(self.conf.argdict[arg].value, list):
+                self._cmd.append(str('--' + arg))
+                #TODO if list object will be fine...
+                self._cmd.append(str(self.conf.argdict[arg].value))
+                #if not use below to create a single string
+                #temp = ''
+                #for v in self.conf.argdict[arg].value:
+                #    temp+=v+' '
+                #self._cmd.append(temp)
+            #for substep arguments need to get the specific value for the step
+            elif isinstance(self.conf.argdict[arg], trfArgClasses.argSubstep):
+                self._cmd.append(str('--' + arg))
+                #TODO for now it is set to first substep, will need to get the step currently in
+                #For info this was added to fix the maxEvents which has a default of first
+                self._cmd.append(str(self.conf.argdict[arg].returnMyValue(name=self._name, substep=self._substep, first=True)))
+            else:
+                self._cmd.append(str('--' + arg))
+                self._cmd.append(str(self.conf.argdict[arg].value))
+
+        #now add exeArgs to end of command
+        for arg in self.exeArgs:
+            if arg in self.conf.argdict:
+                # If we have a list then add each element to our list, else just str() the argument value
+                # Note if there are arguments which need more complex transformations then
+                # consider introducing a special toExeArg() method.
+                if isinstance(self.conf.argdict[arg].value, list):
+                    self._cmd.extend([ str(v) for v in self.conf.argdict[arg].value])
+                else:
+                    self._cmd.append(str(self.conf.argdict[arg].value))
+        
+        msg.info('Updated script arguments: %s' % self._cmd)
+        #TODO output in human readable form, using something like:
+        
+        #from graeme
+        #' '.join([str(item) for item in argdict])
+        #add if type list...
+        
+        #testing in python standalone
+        #test=['athenaMT.py', '--postcommand', 'L2_PROCESS_postCommands.py', '--run-number', '202798', '--event-modifier', 'TrigTransform.PreloadL2', '--use-database', 'DBServer=TRIGGERDBATN:DBSMKey=1111:DBHLTPSKey=3333:DBLV1PSKey=2222', '--number-of-events', '10', '--file', "['test3', 'test6']", '--joboptionsvc-type', 'TrigConf::HLTJobOptionsSvc', '--save-output', "['test4']", '-f', 'test1', '-D']
+        #' '.join(test)
+        #for item in test:
+        #    print item
+        #    print type(item)
+        
+        test1=' '.join(self._cmd)
+        #test1=' '.join([str(item) for item in self._cmd])
+        msg.info('Mark test joining: %s' % (test1))
+        #for testitem in self._cmd:
+        #    #test2=' '.join(item)
+        #    ##test2=' '.join([str(item) for item in testitem])
+        #    #msg.info('Mark test joining: %s %s %s' % (testitem, type(testitem),test2))
+        #    msg.info('Mark test joining: %s %s' % (testitem, type(testitem)))
+                
\ No newline at end of file
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/python/trigPostRun.py b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigPostRun.py
new file mode 100644
index 00000000000..43c9f1f47f1
--- /dev/null
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigPostRun.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+# @brief: Post-transform operations for trigger transform
+# @details: Code to carry out operations that are needed after running the transform
+# @author: Mark Stockton
+
+import logging
+msg = logging.getLogger(__name__)
+
+def trigPostRun(trfMT):
+    
+     msg.info('Running post-transform operations')
+    
+     msg.info('Finished running post-transform operations')
+     
+        
+  
\ No newline at end of file
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/python/trigPreRun.py b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigPreRun.py
new file mode 100644
index 00000000000..87782dcfd6a
--- /dev/null
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigPreRun.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+# @brief: Pre-transform operations for trigger transform
+# @details: Code to carry out operations that are needed before running the transform
+# @author: Mark Stockton
+
+import logging
+msg = logging.getLogger(__name__)
+
+from TrigTransform.trigUpdateArgs import trigUpdateArgs
+
+def trigPreRun(trfMT):
+    
+     msg.info('Running pre-transform operations')
+    
+     #TODO might be useful to switch off validation if just dumping options, 
+     #     better to only stop post trf validation
+     ##Turn off validation for dump-options
+     #if 'dump-options' in trfMT.argdict:
+     #   trfMT.standardFileValidation=False
+
+     #Do conversion of input arguments into the athenaMT/PT variable names
+     trigUpdateArgs(trfMT.argdict,trfMT.parser)
+     
+     #Perform tasks needed to be done before running
+
+     msg.info('Finished running pre-transform operations')
+     
+        
+  
\ No newline at end of file
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/python/trigRecoExe.py b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigRecoExe.py
new file mode 100644
index 00000000000..b2cc039c15b
--- /dev/null
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigRecoExe.py
@@ -0,0 +1,178 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+# @brief: Trigger executor to call base transforms
+# @details: Based on athenaExecutor with some modifications
+# @author: Mark Stockton
+
+import logging
+msg = logging.getLogger("PyJobTransforms." + __name__)
+
+import os
+import fnmatch
+
+from PyJobTransforms.trfExe import athenaExecutor
+
+#imports for preExecute
+from PyJobTransforms.trfUtils import asetupReport
+import PyJobTransforms.trfEnv as trfEnv
+     
+class trigRecoExecutor(athenaExecutor):
+    # Trig_reco_tf.py executor
+    # used to change the extra output filenames
+    
+    #preExe is based on athenaExecutor but with key changes:
+    # - removed athenaMP
+    # - removed environment so does not require the noimf notcmalloc flags
+    # - added swap of argument name for runargs file so that athenaHLT reads it in
+    
+    def preExecute(self, input = set(), output = set()):
+        msg.debug('Preparing for execution of {0} with inputs {1} and outputs {2}'.format(self.name, input, output))
+        
+        ## Try to detect AthenaMP mode
+        #self._athenaMP = self._detectAthenaMP()
+        #
+        ## And if this is athenaMP, then set some options for workers and output file report
+        #if self._athenaMP:
+        #    self._athenaMPWorkerTopDir = 'athenaMP-workers-{0}-{1}'.format(self._name, self._substep)
+        #    self._athenaMPFileReport = 'athenaMP-outputs-{0}-{1}'.format(self._name, self._substep)
+        #else:
+        #    self._athenaMPWorkerTopDir = self._athenaMPFileReport = None
+
+
+        # Check we actually have events to process!
+        if (self._inputEventTest and 'skipEvents' in self.conf.argdict and 
+            self.conf.argdict['skipEvents'].returnMyValue(name=self._name, substep=self._substep, first=self.conf.firstExecutor) is not None):
+            msg.debug('Will test for events to process')
+            for dataType in input:
+                inputEvents = self.conf.dataDictionary[dataType].nentries
+                msg.debug('Got {0} events for {1}'.format(inputEvents, dataType))
+                if not isinstance(inputEvents, (int, long)):
+                    msg.warning('Are input events countable? Got nevents={0} so disabling event count check for this input'.format(inputEvents))
+                elif self.conf.argdict['skipEvents'].returnMyValue(name=self._name, substep=self._substep, first=self.conf.firstExecutor) >= inputEvents:
+                    raise trfExceptions.TransformExecutionException(trfExit.nameToCode('TRF_NOEVENTS'),
+                                                                    'No events to process: {0} (skipEvents) >= {1} (inputEvents of {2}'.format(self.conf.argdict['skipEvents'].returnMyValue(name=self._name, substep=self._substep, first=self.conf.firstExecutor), inputEvents, dataType))
+    
+        ## Write the skeleton file and prep athena
+        if self._skeleton is not None:
+            inputFiles = dict()
+            for dataType in input:
+                inputFiles[dataType] = self.conf.dataDictionary[dataType]
+            outputFiles = dict()
+            for dataType in output:
+                outputFiles[dataType] = self.conf.dataDictionary[dataType]
+                
+            # See if we have any 'extra' file arguments
+            for dataType, dataArg in self.conf.dataDictionary.iteritems():
+                if dataArg.io == 'input' and self._name in dataArg.executor:
+                    inputFiles[dataArg.subtype] = dataArg
+                
+            msg.debug('Input Files: {0}; Output Files: {1}'.format(inputFiles, outputFiles))
+            
+            # Get the list of top options files that will be passed to athena (=runargs file + all skeletons)
+            self._topOptionsFiles = self._jobOptionsTemplate.getTopOptions(input = inputFiles, 
+                                                                           output = outputFiles)
+
+        ## Add input/output file information - this can't be done in __init__ as we don't know what our
+        #  inputs and outputs will be then
+        if len(input) > 0:
+            self._extraMetadata['inputs'] = list(input)
+        if len(output) > 0:
+            self._extraMetadata['outputs'] = list(output)
+
+        ## Do we need to run asetup first?
+        asetupString = None
+        if 'asetup' in self.conf.argdict:
+            asetupString = self.conf.argdict['asetup'].returnMyValue(name=self._name, substep=self._substep, first=self.conf.firstExecutor)
+        else:
+            msg.info('Asetup report: {0}'.format(asetupReport()))
+        
+        ## DBRelease configuration
+        dbrelease = dbsetup = None
+        if 'DBRelease' in self.conf.argdict:
+            dbrelease = self.conf.argdict['DBRelease'].returnMyValue(name=self._name, substep=self._substep, first=self.conf.firstExecutor)
+            if dbrelease:
+                # Classic tarball - filename format is DBRelease-X.Y.Z.tar.gz
+                dbdMatch = re.match(r'DBRelease-([\d\.]+)\.tar\.gz', os.path.basename(dbrelease))
+                if dbdMatch:
+                    msg.debug('DBRelease setting {0} matches classic tarball file'.format(dbrelease))
+                    if not os.access(dbrelease, os.R_OK):
+                        msg.warning('Transform was given tarball DBRelease file {0}, but this is not there'.format(dbrelease))
+                        msg.warning('I will now try to find DBRelease {0} in cvmfs'.format(dbdMatch.group(1)))
+                        dbrelease = dbdMatch.group(1)
+                        dbsetup = cvmfsDBReleaseCheck(dbrelease)
+                    else:
+                        # Check if the DBRelease is setup
+                        unpacked, dbsetup = unpackDBRelease(tarball=dbrelease, dbversion=dbdMatch.group(1))
+                        if unpacked:
+                            # Now run the setup.py script to customise the paths to the current location...
+                            setupDBRelease(dbsetup)
+                # For cvmfs we want just the X.Y.Z release string (and also support 'current')
+                else:
+                    dbsetup = cvmfsDBReleaseCheck(dbrelease)
+        
+        # Look for environment updates and perpare the athena command line
+        self._envUpdate = trfEnv.environmentUpdate()
+        #above is needed by _prepAthenaCommandLine, but remove the setStandardEnvironment so doesn't include imf or tcmalloc 
+        #self._envUpdate.setStandardEnvironment(self.conf.argdict)
+        self._prepAthenaCommandLine() 
+        
+        #to get athenaHLT to read in the relevant parts from the runargs file we have to add the -F option
+        self._cmd=['-F runargs.BSRDOtoRAW.py' if x=='runargs.BSRDOtoRAW.py' else x for x in self._cmd]
+                
+        #call athenaExecutor parent as the above overrides what athenaExecutor would have done 
+        super(athenaExecutor, self).preExecute(input, output)
+        
+        # Now we always write a wrapper, because it's very convenient for re-running individual substeps
+        # This will have asetup and/or DB release setups in it
+        # Do this last in this preExecute as the _cmd needs to be finalised
+        msg.info('Now writing wrapper for substep executor {0}'.format(self._name))
+        self._writeAthenaWrapper(asetup=asetupString, dbsetup=dbsetup)
+        msg.info('Athena will be executed in a subshell via {0}'.format(self._cmd))
+            
+    def postExecute(self):
+                
+        #runHLT_standalone.py generates the file expert-monitoring.root
+        #to save on panda it is defined as output via the outputHIST_HLTMONFile argument        
+        
+        #TODO hard-coded default filename!
+        fileName = 'expert-monitoring.root'
+        #check file is created
+        if(os.path.isfile(fileName)):
+            #keep filename if not defined
+            newOutputFileName = 'expert-monitoring.root'   
+            #find transform argument value for name     
+            for arg in self.conf.argdict:
+                if arg == "outputHIST_HLTMONFile":
+                    newOutputFileName = self.conf.argdict[arg].value[0]
+            #rename file
+            msg.info('Renaming %s to %s' % (fileName, newOutputFileName) )        
+            os.rename(fileName, newOutputFileName)
+        else:
+            for arg in self.conf.argdict:
+                if arg == "outputHIST_HLTMONFile":
+                    msg.error('HLTMON argument defined but %s not created' % fileName )
+                    
+        #The following is needed to handle the BS file being written with a different name (or names)
+        #base is from either the tmp value created by the transform or the value entered by the user
+        originalFileArg = self.conf.dataDictionary['BS']
+        fileNameBase = originalFileArg.value[0] + '*'
+        fileNameMatches = []
+        #loop over all files in folder to find matching outputs
+        for file in os.listdir('.'):
+            if fnmatch.fnmatch(file, fileNameBase):
+                fileNameMatches.append(file)
+        #check there are file matches
+        if(len(fileNameMatches)):
+            originalFileArg.multipleOK = True
+            originalFileArg.value = fileNameMatches
+            msg.info('Renaming internal BS arg from %s to %s' % (originalFileArg.value[0], fileNameMatches))
+        else:
+            msg.error('no BS files created with expected name' % fileNameMatches )
+        
+
+        msg.info('Now run athenaExecutor:postExecute')
+        super(trigRecoExecutor, self).postExecute()
+        
+    
\ No newline at end of file
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/python/trigUpdateArgs.py b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigUpdateArgs.py
new file mode 100644
index 00000000000..f7a6e97bc6b
--- /dev/null
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigUpdateArgs.py
@@ -0,0 +1,363 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+# @brief: Argument altering for trigger transform
+# @details: Code to convert trf args into the athenaMT/PT variable names
+# @author: Mark Stockton
+
+####
+#Notes:
+#     
+#Input arguments need to be converted into the athenaMT/PT variable names
+#
+#However some are needed by the base transform and so the switch has to be done after preExecute
+#this applies to the input and output argument names at least        
+#
+#Always use long names for the options to be used for athenaMT/PT
+#due to how the athenaTExecutor is set up
+
+import logging
+msg = logging.getLogger(__name__)
+
+import PyJobTransforms.trfArgClasses as trfArgClasses
+
+def trigUpdateArgs(inArgDict,parser):
+     #Calls all functions defined below to update the arguments
+     #EXCEPT for the swapArgs which is called from trigExe:preExecute
+          
+     msg.info('Updating input arguments into athenaMT/PT arguments')
+     
+     #edit the database options
+     configDatabase(inArgDict,parser)
+     
+     #if have any post commands create the appropriate JO file
+     if 'post_commands' in inArgDict:
+        postCommand(inArgDict)
+
+     msg.info('Finished updating input arguments into athenaMT/PT arguments')
+     
+def outputArgs(inArgDict,description):
+     #debugging function useful for logging the status of args
+     for arg in inArgDict:
+       msg.debug('Checking args (%s): %s' % (description,arg))
+        
+def swapArgs(inArgDict):
+     #swap any arguments that just change the key name
+
+     msg.info('Swapping input argument names')
+     outputArgs(inArgDict,'before swapArgs')
+     
+     #Create dictionary of arguments that are to be swapped
+     
+     swapDict = {}
+     swapDict['inputBSFile']   = 'file'#can't swap until after athenaExectutor:preExecute
+     swapDict['outputRAWFile'] = 'save-output'
+     swapDict['outputBS_MTFile'] = 'save-output'#can't swap until after athenaExectutor:preExecute
+     swapDict['run_number']    = 'run-number'
+     swapDict['maxEvents']     = 'number-of-events'#TODO need to check not already set
+     
+     ##swapDict['version']   = ''
+     #swapDict['doCTP']   = ''
+     #swapDict['doMUCTPI']   = ''
+     #swapDict['doCopyIn']   = ''
+     #swapDict['doStreaming']   = ''
+     #swapDict['doDummyStreaming']   = ''
+     #swapDict['doRecording']   = ''
+     #swapDict['doL2']   = ''
+     #swapDict['doNotTruncateL2R']   = ''
+     #swapDict['doEF']   = ''
+     #swapDict['doPre']   = ''
+     #swapDict['doPost']   = ''
+     #swapDict['doHLTDebugWrite']   = ''
+     ##swapDict['file_stream']   = ''
+     #swapDict['output_dir']   = ''
+     #swapDict['crash_dir']   = ''
+     #swapDict['histo_dir']   = ''
+     #swapDict['service_class']   = ''
+     #swapDict['file_prefix']   = ''
+     #swapDict['job_id']   = ''
+     #swapDict['jobOptions']   = ''
+     #swapDict['options']   = ''
+     #swapDict['conditions']   = ''
+     #swapDict['filters']   = ''
+     #swapDict['stream_out']   = ''
+     #swapDict['proc_session']   = ''
+     #swapDict['doLVL1Remapping']   = ''
+     #swapDict['enLVL1prescales']   = ''
+     #swapDict['verbose']   = ''
+        
+     #swap simple item names
+     tempDict = {}
+     while inArgDict:
+         key, value = inArgDict.popitem()
+         if key in swapDict:
+             tempkey=key
+             key=swapDict.get(key)
+             msg.debug('Swapping keys: %s to %s' % (tempkey,key))
+         tempDict[key]=value
+
+     #save back into inArgDict
+     while tempDict:
+         key, value = tempDict.popitem()
+         inArgDict[key]=value
+
+     outputArgs(inArgDict,'after swapArgs')
+     msg.info('Finished swapping input argument names')
+       
+     
+def configDatabase(inArgDict,parser):
+    
+     msg.info('Setting up the databse options')
+     
+     # Uses: connection, smk, hltps, lvl1ps
+     # Sets: joboptionsvc-type, use-database, event-modifier
+
+     # code below is to reproduce the simple transform output
+     # needs to be altered later to match that from current transforms 
+     if 'smk' in inArgDict and 'hltps' in inArgDict and 'lvl1ps' in inArgDict and 'connection' in inArgDict:
+
+        #arguments remain in dict but wont be used in final command
+        connection = inArgDict.get('connection')
+        smk = inArgDict.get('smk')
+        hltps = inArgDict.get('hltps')
+        lvl1ps = inArgDict.get('lvl1ps')
+        
+        #Create the new arguments to be used based on the input above
+        inArgDict['joboptionsvc-type']=trfArgClasses.argString('TrigConf::HLTJobOptionsSvc', runarg=True)
+        temp='DBServer=%s:DBSMKey=%s:DBHLTPSKey=%s:DBLV1PSKey=%s' % (connection.value, smk.value, hltps.value, lvl1ps.value)
+        inArgDict['use-database']=trfArgClasses.argString(temp, runarg=True)
+
+        #TODO will need to check that when compiled this is picked up (may need to rename folder)
+        #TODO will need to know if doing L2 or EF
+        #TODO note this was always added in old transforms
+        inArgDict['event-modifier']= trfArgClasses.argString('TrigTransform.PreloadL2', runarg=True)
+        
+        #Out put what keys are swapped
+        msg.debug('Replacing keys: connection,smk,hltps,lvl1ps with joboptionsvc-type,use-database,event-modifier')
+     
+     #code below from current transforms not yet in use
+     #TODO need to get strings from the input, eg smk isnt defined so needs to be inArgDict['smk']
+
+     #config = {}
+     #if 'jobOptions' in inArgDict:
+     #   config = { 'type':'JOBOPTIONS',         \
+     #              'joboptions':joboptions,     \
+     #              'options'  :options,         \
+     #              'post_commands':'',          \
+     #              'conditions':conditions,     \
+     #              'run_number':str(run_number),\
+     #             }
+     #   msg.info("  trigger configuration from JobOptions %s, with options %s"%(joboptions,options))
+     #elif 'smk' in inArgDict and 'hltps' in inArgDict and 'lvl1ps' in inArgDict and 'connection' in inArgDict:
+     #   config = { 'type':'DB',                  \
+     #              'pairs':{                     \
+     #                     'DBSMKey'   :inArgDict['smk'],      \
+     #                     'DBHLTPSKey':inArgDict['hltps'],    \
+     #                     'DBL1PSKey' :inArgDict['lvl1ps'],   \
+     #                     },                     \
+     #              'post_commands':post_commands,\
+     #              'connection':connection,      \
+     #              'conditions':conditions,      \
+     #              'run_number':str(run_number), \
+     #             }
+     #   msg.info("  trigger configuration from %s, smk %s, hltps %s, lvl1ps %s"%(connection,smk,hltps,lvl1ps))
+     #TODO add extra here and give proper exit
+     #elif 'run_number' in inArgDict and 'connection' in inArgDict:
+     #   trigconf = TrigConf.get(run_number)
+     #   try:
+     #       smkey  = trigconf.SMK()
+     #       hltkey = trigconf.HLT_PS(lumi_block)
+     #       l1key  = trigconf.LVL1_PS(lumi_block)
+     #       msg.info("  FULL PS configuration %s" % trigconf.FULL_HLT_PS())
+     #       if trigconf.FULL_HLT_PS(): 
+     #           hltkey = trigconf.FULL_HLT_PS()
+     #   except:
+     #       msg.error("BatchHLTApps: cannot get the BD configuration for run %d" % run_number)
+     #       msg.error(tigconf.str())
+     #       sys.exit(1)
+     #           
+     #   config = { 'type':'DB',                  \
+     #              'pairs':{                     \
+     #                     'DBSMKey'   :smkey,    \
+     #                     'DBHLTPSKey':hltkey,   \
+     #                     'DBL1PSKey' :l1key,    \
+     #                     },                     \
+     #              'post_commands':post_commands,\
+     #              'connection':connection,      \
+     #              'conditions':conditions,      \
+     #              'run_number':str(run_number), \
+     #             }
+     #   msg.info("  trigger configuration from %s, smk %s, hltps %s, lvl1ps %s"%(connection,smkey,hltkey,l1key))
+     #else:
+     #   Step().cancel("bad configuration for L2/EF")
+
+
+     #TODO: currently disabled
+       #need to check which names need saving
+       #need to make sure that new args are listed as runTime args
+     #save back into inArgDict
+     #while config:
+     #    key, value = config.popitem()
+     #    inArgDict[key]=value
+     
+     msg.info('Finished setting up the database options for athenaMT/PT')
+       
+def postCommand(inArgDict):
+    
+     msg.info('Creating the PostCommands JO file')     
+     
+     # Uses: post_commands
+     # Sets: postcommand
+     # Makes: L2_PROCESS_postCommands.py
+     
+     #TODO need to pass process name correctly
+     #post_command_name = '%s_postCommands.py' % self.name
+     post_command_name = 'L2_PROCESS_postCommands.py'
+     postCommands = file(post_command_name,"w")
+                
+                
+     conUp_text = ['from GaudiPython.Bindings import iProperty\n','\n',]
+                
+     #TODO instead of writing the file should have it in the code already so can find on alxr easier
+     #simon suggested pluggins on 30Aug   
+     #--postcommand|-C            Optional python commands executed after jobOption
+     #                        s script or database configuration (defaults to [
+     #                        ])        
+     
+     #TODO can the file be put into output log?? maybe then doesnt need to be in alxr
+                
+     #TODO need to work out what to get from conditions in terms of my argDict
+     #for c in self.config['conditions']:
+     #        cond = c.split(',')
+     #        string = "\"\'<prefix>%s</prefix> <tag>%s</tag>\'\"" % (cond[0],cond[1])
+     #        conUp_text += ['iProperty(\'IOVDbSvc\').overrideTags.push_back(%s)\n'%string]
+                
+     dgbMU_text = ['from GaudiPython.Bindings import iProperty\n',
+                   '\n',
+                   'iProperty(\'ByteStreamAddressProviderSvc\').OutputLevel=DEBUG\n',
+                   '\n',
+                   'iProperty(\'ToolSvc.MuCTPIByteStreamTool\').OutputLevel=VERBOSE\n',
+                   'if len(iProperty(\'TrigSteer_L2.Lvl1Converter\').properties())>0:\n',
+                   '    iProperty(\'muFast_900GeV\').OutputLevel   = DEBUG\n',
+                   '    iProperty(\'muFast_900GeV\').MUlvl1INFO    = True\n',
+                   '    iProperty(\'muFast_900GeV\').MUtrackINFO   = True\n',    
+                   '    iProperty(\'muFast_900GeV\').MUroadsINFO   = True\n',
+                   '    iProperty(\'muFast_900GeV\').MUdecoINFO    = True\n',
+                   '    iProperty(\'muFast_900GeV\').MUcontINFO    = True\n',
+                   '    iProperty(\'muFast_900GeV\').MUfitINFO     = True\n',
+                   '    iProperty(\'muFast_900GeV\').MUsagINFO     = True\n',
+                   '    iProperty(\'muFast_900GeV\').MUptINFO      = True\n', 
+                   '    iProperty(\'muFast_900GeV\').MUtgcDecoINFO = True\n',
+                   '\n',
+                   '    iProperty(\'muFast_Muon\').OutputLevel   = DEBUG\n',
+                   '    iProperty(\'muFast_Muon\').MUlvl1INFO    = True\n',
+                   '    iProperty(\'muFast_Muon\').MUtrackINFO   = True\n',    
+                   '    iProperty(\'muFast_Muon\').MUroadsINFO   = True\n',
+                   '    iProperty(\'muFast_Muon\').MUdecoINFO    = True\n',
+                   '    iProperty(\'muFast_Muon\').MUcontINFO    = True\n',
+                   '    iProperty(\'muFast_Muon\').MUfitINFO     = True\n',
+                   '    iProperty(\'muFast_Muon\').MUsagINFO     = True\n',
+                   '    iProperty(\'muFast_Muon\').MUptINFO      = True\n', 
+                   '    iProperty(\'muFast_Muon\').MUtgcDecoINFO = True\n',
+                   '\n',
+                   '    iProperty(\'Lvl2EventLoopMgr\').OutputLevel = DEBUG\n',
+                   '\n',
+                   #'    iProperty(\'ByteStreamAddressProviderSvc\').MuCTPIModuleID=0x0\n',
+                   '    iProperty(\'ServiceMgr.RoIBResultByteStreamTool\').OutputLevel=VERBOSE\n',
+                   '\n',
+                   '    iProperty(\'ServiceMgr.PtLUTSvc\').OutputLevel=DEBUG\n'
+                  ]
+                
+     magDB_text = ['from GaudiPython.Bindings import iProperty\n',
+                   '\n',
+                   '# Add the DCS folder (note the double-use of quotes)\n',
+                   'iProperty(\'IOVDbSvc\').Folders.push_back(\"\'<db>COOLOFL_DCS/COMP200</db> /EXT/DCS/MAGNETS/SENSORDATA\'\")\n',
+                   'iProperty(\'MagFieldAthenaSvc\').UseDCS = True\n',
+                   'iProperty(\'MagFieldAthenaSvc\').NameOfTheSource = \'COOL\'\n\n',
+                  ]
+
+     ignPR_text = ['from GaudiPython.Bindings import iProperty\n',
+                   '\n',
+                   '# This will work for L2\n',
+                   'if len(iProperty(\'TrigSteer_L2.Lvl1Converter\').properties())>0:\n',
+                   '    iProperty(\'TrigSteer_L2.Lvl1Converter\').ignorePrescales = True\n',
+                   '\n',  
+                   '# and this for EF\n',
+                   'if len(iProperty(\'TrigSteer_EF.Lvl2Converter\').properties())>0:\n',
+                   '    iProperty(\'TrigSteer_EF.Lvl2Converter\').ignorePrescales = True\n\n',   
+                  ]
+
+     noPs_check = ['from GaudiPython.Bindings import iProperty\n',
+                   '\n',
+                   '# This will work for L2\n',
+                   '#if len(iProperty(\'TrigSteer_L2.Lvl1Converter\').properties())>0:\n',
+                   '#    iProperty(\'TrigSteer_L2.Lvl1Converter\').checkConfig = False\n',
+                   '\n',
+                   '# and this for EF\n',
+                   'if len(iProperty(\'TrigSteer_EF.Lvl2Converter\').properties())>0:\n',
+                   '    iProperty(\'TrigSteer_EF.Lvl2Converter\').checkConfig = False\n\n',
+                  ]
+
+     hardT_text = ['from GaudiPython.Bindings import iProperty\n',
+                   '\n',
+                   '# This will work for L2\n',
+                   'if len(iProperty(\'TrigSteer_L2\').properties())>0:\n',
+                   '    iProperty(\'TrigSteer_L2\').hardEventTimeout = 1800000000000\n',
+                   '\n',  
+                   '# and this for EF\n',
+                   'if len(iProperty(\'TrigSteer_EF\').properties())>0:\n',
+                   #'    iProperty(\'TrigSteer_EF\').hardEventTimeout = 1800000000000\n\n',   
+                   '    iProperty(\'TrigSteer_EF\').hardEventTimeout = 1800000000000\n\n',
+                  ]
+                
+     costM_text = ['from GaudiPython.Bindings import iProperty\n',
+                   '\n',
+                   'if \'doOperationalInfo\' in iProperty(\'TrigSteer_L2\').properties():\n',
+                   '    iProperty(\'TrigSteer_L2\').doOperationalInfo=1\n',
+                   '\n',
+                   'if \'writeAlways\' in iProperty(\'TrigSteer_L2.TrigCostExecL2\').properties():\n',
+                   '    iProperty(\'TrigSteer_L2.TrigCostExecL2\').writeAlways=True',
+                   '\n',  
+                   'if \'doOperationalInfo\' in iProperty(\'TrigSteer_EF\').properties():\n',
+                   '    iProperty(\'TrigSteer_EF\').doOperationalInfo=1\n',
+                   '\n',
+                   'if \'writeAlways\' in iProperty(\'TrigSteer_EF.TrigCostExecEF\').properties():\n',
+                   '    iProperty(\'TrigSteer_EF.TrigCostExecEF\').writeAlways=True\n',
+                  ]
+     
+     Hndlr_text = ['from GaudiPython.Bindings import iProperty\n',
+                              '# Change core dump handler to avoid gdb hanging when stdout is redirected, see bug 87861\n',
+                              'iProperty(\'CoreDumpSvc\').FatalHandler = 438\n\n',
+                  ]
+                
+     file_text = []
+
+     #if doMagFromDCS:      file_text.extend(magDB_text)
+     #if doIgnorePrescales: file_text.extend(ignPR_text)
+                
+     conf = str(inArgDict['post_commands'])
+             
+     #TODO need to work out what to get from conditions in terms of my argDict
+     #if len(inArgDict['conditions']):    file_text.extend(conUp_text)
+     if conf.find('ignorePrescales') !=-1: file_text.extend(ignPR_text)
+     if conf.find('ignoreOnlineKeys')!=-1: file_text.extend(noPs_check)
+     if conf.find('confMagFromDCS')  !=-1: file_text.extend(magDB_text)
+     if conf.find('setHardTimeout')  !=-1: file_text.extend(hardT_text)
+     if conf.find('debugMuon')       !=-1: file_text.extend(dgbMU_text)
+     if conf.find('costMonitor')     !=-1: file_text.extend(costM_text)
+     if conf.find('CoreHandler')     !=-1: file_text.extend(Hndlr_text)
+
+     print >> postCommands, "".join(file_text)
+     postCommands.close()
+         
+     #swap item name now finished using input values
+     value = inArgDict.pop('post_commands')
+     key='postcommand'
+     value.value=post_command_name
+     msg.debug('Swapping keys: post_commands to %s' % (key))
+     inArgDict[key]=value
+     
+     msg.info('Finished creating the PostCommands JO file')    
+     
+  
\ No newline at end of file
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_reco_tf.py b/HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_reco_tf.py
new file mode 100755
index 00000000000..0d88f7ebab1
--- /dev/null
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_reco_tf.py
@@ -0,0 +1,220 @@
+#! /usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+## Trig_tf_reco.py - based on PyJobTransforms/Reco_tf.py now stored here:
+## https://svnweb.cern.ch/trac/atlasoff/browser/Reconstruction/RecJobTransforms/trunk/scripts
+
+import sys
+import time
+
+import logging
+
+# Setup core logging here
+from PyJobTransforms.trfLogger import msg
+msg.info('logging set in %s' % sys.argv[0])
+
+from PyJobTransforms.transform import transform
+from PyJobTransforms.trfExe import athenaExecutor, DQMergeExecutor, reductionFrameworkExecutor, reductionFrameworkExecutorNTUP
+from PyJobTransforms.trfArgs import addAthenaArguments, addD3PDArguments, addPrimaryDPDArguments, addExtraDPDTypes, addDetectorArguments, addReductionArguments
+from PyJobTransforms.trfDecorators import stdTrfExceptionHandler, sigUsrStackTrace
+from RecJobTransforms.recTransformUtils import addCommonRecTrfArgs, addStandardRecoFiles
+
+import PyJobTransforms.trfArgClasses as trfArgClasses
+
+from TrigTransform.trigRecoExe import trigRecoExecutor
+
+@stdTrfExceptionHandler
+@sigUsrStackTrace
+def main():
+    
+    msg.info('This is %s' % sys.argv[0])
+
+    trf = getTransform()
+    trf.parseCmdLineArgs(sys.argv[1:])
+    trf.execute()
+    trf.generateReport()
+
+    msg.info("%s stopped at %s, trf exit code %d" % (sys.argv[0], time.asctime(), trf.exitCode))
+    sys.exit(trf.exitCode)
+
+def getTransform():
+    executorSet = set()
+        
+    #BSRDOtoRAW is new option for trigger transform
+    #now setup to run athenaHLT, so TODO is to remove:
+    #writeBS BSRDOInput EvtMax from AthenaCommon...  athenaCommonFlags....
+    executorSet.add(trigRecoExecutor(name = 'BSRDOtoRAW', skeletonFile = 'TriggerRelease/runHLT_standalone.py',
+                                     exe = 'athenaHLT.py',
+                                     substep = 'b2r', tryDropAndReload = False,
+                                     inData = ['BS_RDO', 'RDO'], outData = ['BS', 'HIST_HLTMON'], 
+                                     perfMonFile = 'ntuple_BSRDOtoRAW.pmon.gz',
+                                     literalRunargs = ['writeBS = runArgs.writeBS',
+                                                       'BSRDOInput = runArgs.inputBS_RDOFile',
+                                                       'EvtMax = runArgs.maxEvents',
+                                                       'from AthenaCommon.AthenaCommonFlags import athenaCommonFlags',
+                                                       'athenaCommonFlags.BSRDOOutput.set_Value_and_Lock( runArgs.outputBSFile )',
+                                                       'option = {}',
+                                                       'option[\'file\'] = runArgs.inputBS_RDOFile',
+                                                       'option[\'save-output\'] = runArgs.outputBSFile',
+                                                       'option[\'number-of-events\'] = runArgs.maxEvents']))
+
+    #add default reconstruction steps
+    # eventually to be replaced by:
+    #from RecJobTransforms.recTransformUtils import addRecoSubsteps
+    #addRecoSubsteps(executorSet)
+       
+    # one difference to reco_tf is that NTUP_TRIG has to be added - needs pyjobtf update before can use above    
+        #NTUP_TRIG is added as is not available in ATLASP1HLT, but is available in the reco release
+        #hence can be used later in a ATLASP1HLT job if switch releases
+
+    executorSet.add(athenaExecutor(name = 'RAWtoESD', skeletonFile = 'RecJobTransforms/skeleton.RAWtoESD_tf.py',
+                                   substep = 'r2e', inData = ['BS', 'RDO'], outData = ['ESD', 'HIST_ESD_INT'], 
+                                   perfMonFile = 'ntuple_RAWtoESD.pmon.gz'))
+    executorSet.add(athenaExecutor(name = 'ESDtoAOD', skeletonFile = 'RecJobTransforms/skeleton.ESDtoAOD_tf.py',
+                                   substep = 'e2a', inData = ['ESD'], outData = ['AOD', 'HIST_AOD_INT'], 
+                                   perfMonFile = 'ntuple_ESDtoAOD.pmon.gz'))
+    executorSet.add(DQMergeExecutor(name = 'DQHistogramMerge', inData = [('HIST_ESD_INT', 'HIST_AOD_INT')], outData = ['HIST']))
+    executorSet.add(athenaExecutor(name = 'ESDtoDPD', skeletonFile = 'PATJobTransforms/skeleton.ESDtoDPD_tf.py',
+                                   substep = 'e2d', inData = ['ESD'], outData = ['NTUP_TRIG'],
+                                   perfMonFile = 'ntuple_ESDtoDPD.pmon.gz'))
+    executorSet.add(athenaExecutor(name = 'AODtoDPD', skeletonFile = 'PATJobTransforms/skeleton.AODtoDPD_tf.py',
+                                   substep = 'a2d', inData = ['AOD', 'EVNT'], outData = ['NTUP_TRIG'],
+                                   perfMonFile = 'ntuple_AODtoDPD.pmon.gz'))
+    executorSet.add(athenaExecutor(name = 'AODtoTAG', skeletonFile = 'RecJobTransforms/skeleton.AODtoTAG_tf.py',
+                                   inData = ['AOD'], outData = ['TAG'],))
+#     executorSet.add(athenaExecutor(name = 'AODtoHIST', skeletonFile = 'RecJobTransforms/skeleton.FROM_PETER.py',
+#                                    inData = ['AOD'], outData = ['HIST_AOD'],))
+    executorSet.add(reductionFrameworkExecutor(name = 'AODtoRED', skeletonFile = 'PATJobTransforms/skeleton.AODtoRED_tf.py',
+                                   substep = 'a2r', inData = ['AOD'], outData = ['DAOD_RED']))
+    executorSet.add(reductionFrameworkExecutorNTUP(name = 'NTUPtoRED', skeletonFile = 'PATJobTransforms/skeleton.NTUPtoRED_tf.py',
+                                   substep = 'n2n', inData = ['NTUP_COMMON'], outData = ['NTUP_RED']))
+    
+    trf = transform(executor = executorSet, description = 'Trigger transform to run HLT_standalone, followed by'
+                    ' general purpose ATLAS reconstruction transform. Input to HLT_Standalone is inputBS_RDOFile'
+                    ' with outputs of RDO, ESD, AOD or DPDs. For more details on reco_tf, see:'
+                    ' https://twiki.cern.ch/twiki/bin/viewauth/Atlas/RecoTf')
+    
+    #add arguments as donw in reco_tf
+    addAthenaArguments(trf.parser)
+    addDetectorArguments(trf.parser)
+    addCommonRecTrfArgs(trf.parser)
+    addStandardRecoFiles(trf.parser)
+    addPrimaryDPDArguments(trf.parser, transform = trf)
+    addD3PDArguments(trf.parser, transform = trf)
+    addExtraDPDTypes(trf.parser, transform = trf)
+    addReductionArguments(trf.parser, transform = trf)
+
+    #currently ignoring simulation arguments as can't find the package when have below in cmt/requirements:
+    #use SimuJobTransforms SimuJobTransforms-* Simulation
+    
+    # For digi step - make sure we can add the digitisation/simulation arguments
+    # before we add this substep
+#    try:
+#        from SimuJobTransforms.simTrfArgs import addForwardDetTrfArgs, addBasicDigiArgs, addPileUpTrfArgs, addCommonSimDigTrfArgs
+#        addBasicDigiArgs(trf.parser)
+#        addForwardDetTrfArgs(trf.parser)
+#        addPileUpTrfArgs(trf.parser)
+#        #addCommonSimDigTrfArgs(trf.parser)
+#        executorSet.add(athenaExecutor(name = 'HITtoRDO', skeletonFile = 'SimuJobTransforms/skeleton.HITtoRDO.py',
+#                                       substep = 'h2r', tryDropAndReload = False, perfMonFile = 'ntuple.pmon.gz',
+#                                       inData = ['Hits'], outData = ['RDO'],
+#                                       runtimeRunargs = {'LowPtMinbiasHitsFile' : 'runArgs.inputLowPtMinbiasHitsFile',
+#                                                         'HighPtMinbiasHitsFile' : 'runArgs.inputHighPtMinbiasHitsFile',}))
+#        
+#    except ImportError:
+#        msg.warning('Failed to import simulation/digitisation arguments. These substeps will not be available.')
+
+    #now add specific trigger arguments
+    #  putting this last makes them appear last in the help so easier to find
+    addTriggerArgs(trf.parser)
+
+    return trf
+
+
+def addTriggerArgs(parser):
+    # Use arggroup to get these arguments in their own sub-section (of --help)
+    parser.defineArgGroup('Trigger', 'Specific options related to the trigger configuration')
+    
+    #new for trigger transform
+    #now setup to run athenaHLT, so TODO is to remove:testPhysicsV4 and writeBS
+    #TODO: testPhysicsV4 needs deleting as causes double menu loading but left in for now to not add conflicts to the panda tag page
+    parser.add_argument('--testPhysicsV4', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='Please do not use this command, to be deleted', group='Trigger')
+    parser.add_argument('--writeBS', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='Needed if running BSRDO to BS step (default: True)', group='Trigger', default=trfArgClasses.argBool(True, runarg=True))
+    parser.add_argument('--inputBS_RDOFile', nargs='+', 
+                        type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='input', runarg=True, type='bs'),
+                        help='Input bytestream file', group='Trigger')
+    #without an outputBSFile name specified then any further steps will know to use tmp.BS
+    parser.add_argument('--outputBSFile', nargs='+', 
+                        type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='output', runarg=True, type='bs'),
+                        help='Output bytestream file', group='Trigger')
+    parser.add_argument('--outputHIST_HLTMONFile', nargs='+', 
+                        type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False),
+                        help='Output HLTMON file', group='Trigger')    
+    #NTUP_TRIG is added as is not available in ATLASP1HLT, but is available in the reco release
+    #hence can be used later in a ATLASP1HLT job if switch releases
+    parser.add_argument('--outputNTUP_TRIGFile', nargs='+', 
+                        type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False), 
+                        help='D3PD output NTUP_TRIG file (can be made in substeps e2d,a2d)', group='Trigger')
+    parser.add_argument('--triggerConfig', nargs='+', metavar='substep:TRIGGERCONFIG',
+                        type=trfArgClasses.argFactory(trfArgClasses.argSubstep, runarg=True),
+                        help='Trigger Configuration String.', group='Trigger')
+
+
+    
+#below commented out after cleanup to latest reco_tf as don't believe needed
+#    
+# This is copied from Digi_tf, but should be in a loadable module
+#def addDigiArgs(parser):
+#    # Use arggroup to get these arguments in their own sub-section (of --help)
+#    parser.defineArgGroup('Digi', 'Digitisation specific options')
+#    parser.add_argument('--inputHitsFile', nargs='+',
+#                        type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='input', runarg=True, type='hits'),
+#                        help='Input HITS file', group='Digi')
+#    parser.add_argument('--outputRDOFile', nargs='+',
+#                        type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='output', runarg=True, type='rdo'),
+#                        help='Output RDO file', group='Digi')
+#    parser.add_argument('--digiSeedOffset1',
+#                        type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=True),
+#                        help='Offset for first random seed', group='Digi')
+#    parser.add_argument('--digiSeedOffset2',                                                             
+#                        type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=True),
+#                        help='Offset for second random seed', group='Digi')
+#    parser.add_argument('--digiRndmSvc',
+#                        type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), ##STRING CHOICES ARG,
+#                        help='Random Number Service to use.', group='Digi' )
+#    parser.add_argument('--samplingFractionDbTag',
+#                        type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
+#                        help='This argument can be used to override the PhysicsList retrieved from the Simulation metadata. This information is used in the LAr sampling fraction data base tag used by LArfSamplG4Phys.', group='Digi')
+#    parser.add_argument('--doAllNoise',
+#                        type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+#                        help='Overall control of noise simulation during digitization - useful for overlay jobs, will set doCaloNoise, doMuonNoise, doInDetNoise digitizationFlags.', group='Digi')
+#    parser.add_argument('--AddCaloDigi',
+#                        type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+#                        help='Save Calo Digits too, not just RawChannels.', group='Digi')
+#    parser.add_argument('--triggerConfig',
+#                        type=trfArgClasses.argFactory(trfArgClasses.argSubstep, runarg=True),
+#                        help='Trigger Configuration String.', group='Digi')
+#    parser.add_argument('--DataRunNumber',
+#                        type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=True),
+#                        help='Override existing run number with this value - deprecated?', group='Digi')
+#    parser.add_argument('--jobNumber',
+#                        type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=True),
+#                        help='The number of this job in the current RunDependentSimulation task.', group='Digi')
+#    parser.add_argument('--LowPtMinbiasHitsFile',
+#                        type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='input', runarg=True, type='hits', executor=['HITtoRDO']),
+#                        help='Files with low Pt hits', group='Digi')
+#    parser.add_argument('--HighPtMinbiasHitsFile',
+#                        type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='input', runarg=True, type='hits', executor=['HITtoRDO']),
+#                        help='Files with high Pt hits', group='Digi')
+#    parser.add_argument('--numberOfLowPtMinBias',
+#                        type=trfArgClasses.argFactory(trfArgClasses.argFloat, runarg=True),
+#                        help='Number of low Pt minbias events', group='Digi')
+#    parser.add_argument('--numberOfHighPtMinBias',
+#                        type=trfArgClasses.argFactory(trfArgClasses.argFloat, runarg=True),
+#                        help='Number of high Pt minbias events', group='Digi')
+
+if __name__ == '__main__':
+    main()
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_tf_main.py b/HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_tf_main.py
new file mode 100644
index 00000000000..813686ea494
--- /dev/null
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_tf_main.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python
+
+# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+
+# @brief: Trigger transform using PyJobTranforms.
+# @details: Code based on PyJobTransforms/share/Athena_tf.py
+# @author: Mark Stockton
+
+import argparse
+import sys
+import time
+import traceback
+
+import logging
+
+# Setup core logging here
+from PyJobTransforms.trfLogger import msg
+msg.info('logging set in %s' % sys.argv[0])
+
+from PyJobTransforms.trfExitCodes import trfExit
+from PyJobTransforms.transform import transform
+from PyJobTransforms.trfExe import athenaExecutor
+from PyJobTransforms.trfArgs import addAthenaArguments
+from PyJobTransforms.trfDecorators import stdTrfExceptionHandler, sigUsrStackTrace
+
+import PyJobTransforms.trfExceptions as trfExceptions
+import PyJobTransforms.trfArgClasses as trfArgClasses
+
+from TrigTransform.trigPreRun import trigPreRun
+from TrigTransform.trigExe import trigExecutor
+from TrigTransform.trigPostRun import trigPostRun
+
+@stdTrfExceptionHandler
+@sigUsrStackTrace
+def main(): 
+    msg.info('This is %s' % sys.argv[0])
+    
+    #note that indata (outdata) can not be the same and must be the same text, ['**'], as in the argument input**File (output**File)  
+    trfMT = transform(trfName = 'Trig_trf', executor = trigExecutor(name = 'athena',exe = 'athenaMT.py',exeArgs=['athenaoptsMT'],inData = ['BS'], outData = ['BS_MT']))
+    addAthenaArguments(trfMT.parser)
+    addTriggerArgs(trfMT.parser)
+    trfMT.parseCmdLineArgs(sys.argv[1:])
+
+    #any debug statements will work from here onwards if using --verbose or --loglevel DEBUG
+    
+    #Convert arg names and carry out operations from any of the options that are to be done before running
+    trigPreRun(trfMT)
+    
+    #Run the transform and generate final report
+    trfMT.execute()
+    trfMT.generateReport()
+
+    #Carry out operations from any of the options that are to be done after running     
+    trigPostRun(trfMT)
+
+    msg.info("%s stopped at %s, trf exit code %d" % (sys.argv[0], time.asctime(), trfMT.exitCode))
+    sys.exit(trfMT.exitCode)
+
+
+def addTriggerArgs(parser):
+    ## Notes:
+    #     Use group='Trig_trf' to get these arguments in their own sub-section (of --help)
+    #     Use runarg=True for commands to be passed on the command line to athenaMT/PT
+    
+    #Options currently implemented for use within the transform
+    parser.defineArgGroup('Trig_trf', 'Trig_trf specific options')
+    parser.add_argument('--inputBSFile', type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='input', runarg=True, type='bs'),
+                          help='Set the input file name', group='Trig_trf') 
+    parser.add_argument('--outputBS_MTFile', '--outputRAWFile', type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='output', runarg=True, type='bs'),
+                          help='Set the output file name', group='Trig_trf')
+
+    parser.add_argument('--athenaoptsMT', group = 'Trig_trf', type=trfArgClasses.argFactory(trfArgClasses.argList, splitter=' '), metavar='OPT1 OPT2 OPT3', 
+                        help='Extra options to pass to athenaMT. Options starting with "-" must be given as --athenaopts=\'--opt1 --opt2 ...\'') 
+    parser.add_argument('--athenaoptsPT', group = 'Trig_trf', type=trfArgClasses.argFactory(trfArgClasses.argList, splitter=' '), metavar='OPT1 OPT2 OPT3', 
+                        help='Extra options to pass to athenaPT. Options starting with "-" must be given as --athenaopts=\'--opt1 --opt2 ...\'') 
+
+    parser.add_argument('--connection', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=False), metavar='STR',
+                          help='Set the connection to the configuration database', group='Trig_trf')
+    parser.add_argument('--smk', type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=False), metavar='SMK',
+                          help='Set the super master key for the database configuration', group='Trig_trf')
+    parser.add_argument('--lvl1ps', type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=False), metavar='L1PS',
+                          help='Set the prescales sets for the LVL1', group='Trig_trf')
+    parser.add_argument('--hltps', type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=False), metavar='HLTPS',
+                          help='Set the prescales sets for the HLT', group='Trig_trf')
+    parser.add_argument('--run_number', type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=True), metavar='NUM',
+                          help='Set the run number to which the events belong', group='Trig_trf')
+    parser.add_argument('--post_commands', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), metavar='STR',
+                          help='Postcommands to change configuration on top of DB configuration', group='Trig_trf')
+    
+    #TODO might be useful, see trigPreRun
+    #parser.add_argument('--dump_options', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+    #                      help='dumps the current default options to stdout', group='Trig_trf')
+    
+    #TODO implement other options from existing transforms
+
+    #example log level
+    #parser.add_argument('--maxMsgLevel', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
+    #                      help='highest message level to print in athena', group='Trig_trf')
+    
+    #TODO: do we need the transform version number?          
+    #parser.add_argument('--version', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+    #                      help='show program\'s version number and exit', group='Trig_trf')
+    parser.add_argument('--doCTP', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='Force the RoIb data to be extracted from the CTP fragment and attached to the event data', group='Trig_trf')
+    parser.add_argument('--doMUCTPI', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='Fix the Muctpi data', group='Trig_trf')
+    parser.add_argument('--doCopyIn', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='Force the copying of the input file into the run dir', group='Trig_trf')
+    parser.add_argument('--doStreaming', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='Stream the events after the trigger reprocessing', group='Trig_trf')
+    parser.add_argument('--doDummyStreaming', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='Just rename the events out of AthenaPT as they would come from an unknow stream', group='Trig_trf')
+    parser.add_argument('--doRecording', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='Record the output files on CASTOR', group='Trig_trf')
+    parser.add_argument('--doL2', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='Reprocess the LVL2 with AthenaMT', group='Trig_trf')
+    parser.add_argument('--doNotTruncateL2R', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='raise the limit of the L2 result size to 1200000 words', group='Trig_trf')
+    parser.add_argument('--doEF', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='Reprocess the EF with AthenaPT', group='Trig_trf')
+    parser.add_argument('--doPre', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='Reprocess the LVL2 with AthenaMT', group='Trig_trf')
+    parser.add_argument('--doPost', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='Reprocess the EF with AthenaPT', group='Trig_trf')
+    parser.add_argument('--doHLTDebugWrite', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='Set the DEBUG output level for the trigger reprocessing', group='Trig_trf')
+    
+    #parser.add_argument('--file_stream=STR', type=trfArgClasses.argFactory(trfArgClasses., runarg=True),
+    #                      help='Set the input file stream name', group='Trig_trf')
+    
+    parser.add_argument('--output_dir', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), metavar='STR',
+                          help='Set the directory name for copying the output files', group='Trig_trf')
+    parser.add_argument('--crash_dir', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), metavar='STR',
+                          help='Set the directory name for copying the events that crashed the trigger', group='Trig_trf')
+    parser.add_argument('--histo_dir', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), metavar='STR',
+                          help='Set the directory name for copying the output histograms', group='Trig_trf')
+    parser.add_argument('--service_class', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), metavar='STR',
+                          help='Set the service class for CASTOR IO', group='Trig_trf')
+    parser.add_argument('--file_prefix', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), metavar='STR',
+                          help='Set the directory name for copying the output files', group='Trig_trf')
+    parser.add_argument('--job_id', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), metavar='STR',
+                          help='Set the jobId that identifys the processing', group='Trig_trf')
+    parser.add_argument('--jobOptions', type=trfArgClasses.argFactory(trfArgClasses.argFile, runarg=True), metavar='FILE',
+                          help='Set the jobOptions to be used for the reprocessing', group='Trig_trf')
+    parser.add_argument('--options', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), metavar='STR',
+                          help='Set the options for the jobOptions', group='Trig_trf')
+    parser.add_argument('--conditions', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), metavar='STR',
+                          help='List of new \'folder tag\' to be used for the run', group='Trig_trf')
+    parser.add_argument('--filters', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), metavar='STR',
+                          help='List of filters to be applied on the stream tag of the accept event', group='Trig_trf')
+    parser.add_argument('--stream_out', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), metavar='STR',
+                          help='Specified which event tags have to be streamed out by Streamer', group='Trig_trf')
+    parser.add_argument('--proc_session', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), metavar='STR',
+                          help='Set the processing version', group='Trig_trf')
+    parser.add_argument('--doLVL1Remapping', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), metavar='MOD',
+                          help='Set the module to be used for LVL1 remapping', group='Trig_trf')
+    parser.add_argument('--enLVL1prescales', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), metavar='MOD',
+                          help='Set the module name to be used for enabling the LVL1 prescales', group='Trig_trf')
+    parser.add_argument('-v, --verbose', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
+                          help='enable the printout verbosity', group='Trig_trf')
+    
+    
+
+if __name__ == '__main__':
+    main()
\ No newline at end of file
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/share/Trig_tf_simple.py b/HLT/Trigger/TrigTransforms/TrigTransform/share/Trig_tf_simple.py
new file mode 100755
index 00000000000..2524003c4d5
--- /dev/null
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/share/Trig_tf_simple.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+
+from optparse import OptionParser
+import subprocess
+import shlex
+
+parser = OptionParser()
+
+parser.add_option("--inputBSFile",
+                  dest    = "inputfile",
+                  metavar = "FILE",
+                  help    = "Set the input file name")
+
+parser.add_option("--outputRAWFile",
+                  dest    = "outputfile",
+                  metavar = "FILE",
+                  help    = "Set the output file name")
+
+parser.add_option("--connection",
+                  dest    = "connection",
+                  metavar = "STR",
+                  help    = "Set the connection to the configuration database")
+
+parser.add_option("--smk",
+                  dest    = "smk",
+                  metavar = "SMK",
+                  help    = "Set the super master key for the database configuration")
+
+parser.add_option("--lvl1ps",
+                  dest    = "lvl1ps",
+                  metavar = "L1PS",
+                  help    = "Set the prescale key for the LVL1")
+
+parser.add_option("--hltps",
+                  dest    = "hltps",
+                  metavar = "HLTPS",
+                  help    = "Set the prescale key for the HLT")
+
+(options, args) = parser.parse_args()
+
+com = "athenaMT.py"
+#com=""
+com += " -f %s" % options.inputfile
+com += " -o %s" % options.outputfile
+com += " -J TrigConf::HLTJobOptionsSvc"
+database = "DBServer=%s:DBSMKey=%s:DBHLTPSKey=%s:DBLV1PSKey=%s" % (options.connection, options.smk, options.hltps, options.lvl1ps)
+com += " -b \"%s\"" % database
+
+args = shlex.split(com)
+
+temp = subprocess.Popen(["which", "athenaMT.py"],stdout=subprocess.PIPE )
+path = temp.communicate()[0].strip()
+
+temp = ["python", path]
+temp.extend(args[1:])
+
+print com
+print args
+print temp
+
+#this works not
+process = subprocess.Popen(args)
+
+#this works
+process = subprocess.Popen(temp)
+process.wait()
+print process.returncode
-- 
GitLab