diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/doc/Trig_tf.txt b/HLT/Trigger/TrigTransforms/TrigTransform/doc/Trig_tf.txt
index e0152a0e6305ab2cf532c42020a7396bfbf9e6d2..e39a836744d10dbb012fa6e5a1573d5bfa610299 100644
--- a/HLT/Trigger/TrigTransforms/TrigTransform/doc/Trig_tf.txt
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/doc/Trig_tf.txt
@@ -92,7 +92,6 @@ scripts/Trig_reco_tf.py
   save-output = outputBSFile
   number-of-events = maxEvents
   skip-events = skipEvents
-  dump-options = dumpOptions
   precommand(aka -c) = precommand
 
   database options can't have hyphens (ATLASJT-229) so names are different to athenaHLT
@@ -132,7 +131,7 @@ scripts/Trig_reco_tf.py
     --db-smkey 11 --db-hltpskey 7 --db-extra \"{'lvl1key': 4}\" \
     -c \"rerunLVL1=True;markTest=True\" " \
     --ignoreErrors="True" --outputHIST_HLTMONFile="HIST_HLTMON.04854087._000852.pool.root.1" --runNumber="212967" --outputBSFile="temp.BS" \
-    --dumpOptions=True | tee log.txt
+    | tee log.txt
 
   New example using db arguments:
   Trig_reco_tf.py --inputBS_RDOFile=root://eosatlas.cern.ch//eos/atlas/atlascerngroupdisk/trig-daq/validation/test_data/data12_8TeV.00212967.physics_eb_zee_zmumu_cc._0001.data \
@@ -140,7 +139,7 @@ scripts/Trig_reco_tf.py
     --DBsmkey 11 --DBhltpskey 7 --DBextra "{'lvl1key': 4}" \
     --precommand "rerunLVL1=True;markTest=True" \
     --ignoreErrors="True" --outputHIST_HLTMONFile="HIST_HLTMON.04854087._000852.pool.root.1" --runNumber="212967" --outputBSFile="temp.BS" \
-    --dumpOptions=True | tee log.txt
+    | tee log.txt
 
 
   #Older documentation 
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/python/trigRecoExe.py b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigRecoExe.py
index 8726c20d3edd14120f200da041ed11359594acaa..aa89d8f82e7ee031759ba4562880065d4fb615ed 100644
--- a/HLT/Trigger/TrigTransforms/TrigTransform/python/trigRecoExe.py
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigRecoExe.py
@@ -12,16 +12,17 @@ msg = logging.getLogger("PyJobTransforms." + __name__)
 import os
 import fnmatch
 import re
+import subprocess
 
 from PyJobTransforms.trfExe import athenaExecutor
 
 #imports for preExecute
-from PyJobTransforms.trfUtils import asetupReport, cvmfsDBReleaseCheck
+from PyJobTransforms.trfUtils import asetupReport, cvmfsDBReleaseCheck, lineByLine
 import PyJobTransforms.trfEnv as trfEnv
 import PyJobTransforms.trfExceptions as trfExceptions
 from PyJobTransforms.trfExitCodes import trfExit as trfExit
 import TrigTransform.dbgAnalysis as dbgStream
-from TrigTransform.trigTranslate import writeTranslate as writeTranslate
+from TrigTransform.trigTranslate import getTranslated as getTranslated
 
 
 class trigRecoExecutor(athenaExecutor):
@@ -123,18 +124,13 @@ class trigRecoExecutor(athenaExecutor):
         #self._envUpdate.setStandardEnvironment(self.conf.argdict)
         self._prepAthenaCommandLine() 
         
-        #to get athenaHLT to read in the relevant parts from the runargs file we have to add the -F option
+        #to get athenaHLT to read in the relevant parts from the runargs file we have to translate them
         if 'athenaHLT' in self._exe:
-            self._cmd=['-F runtranslate.BSRDOtoRAW.py' if x=='runargs.BSRDOtoRAW.py' else x for x in self._cmd]
-            
-            # write runTranslate file to be used by athenaHLT
-            writeTranslate('runtranslate.BSRDOtoRAW.py', self.conf.argdict, name=self._name, substep=self._substep, first=self.conf.firstExecutor, output = outputFiles)
-            
-            #instead of running athenaHLT we can dump the options it has loaded
-            #note the -D needs to go after the -F in the command
-            if 'dumpOptions' in self.conf.argdict:
-                self._cmd=['-F runtranslate.BSRDOtoRAW.py -D' if x=='-F runtranslate.BSRDOtoRAW.py' else x for x in self._cmd]
-            
+            self._cmd.remove('runargs.BSRDOtoRAW.py')
+            # get list of translated arguments to be used by athenaHLT
+            optionList = getTranslated(self.conf.argdict, name=self._name, substep=self._substep, first=self.conf.firstExecutor, output = outputFiles)
+            self._cmd.extend(optionList)
+
             #Run preRun step debug_stream analysis if debug_stream=True
             if 'debug_stream' in self.conf.argdict:
                 inputFiles = dict()
@@ -199,22 +195,67 @@ class trigRecoExecutor(athenaExecutor):
             
     def postExecute(self):
                 
+        #Adding check for HLTMPPU.*Child Issue in the log file
+        #   Throws an error message if there so we catch that the child died
+        #   Also sets the return code of the mother process to mark the job as failed
+        #   Is based on trfValidation.scanLogFile
+        log = self._logFileName
+        msg.debug('Now scanning logfile {0}'.format(log))
+        # Using the generator so that lines can be grabbed by subroutines if needed for more reporting
+        try:
+            myGen = lineByLine(log, substepName=self._substep)
+        except IOError as e:
+            msg.error('Failed to open transform logfile {0}: {1:s}'.format(log, e))
+        for line, lineCounter in myGen:
+            # Check to see if any of the hlt children had an issue
+            if 'Child Issue' in line > -1:
+                try:
+                    signal = int((re.search('signal ([0-9]*)', line)).group(1))
+                except AttributeError:
+                    #signal not found in message, so return 1 to highlight failure
+                    signal = 1
+                msg.error('Detected issue with HLTChild, setting mother return code to %s' % (signal) )
+                self._rc = signal
+
         msg.info("Check for expert-monitoring.root file")
-        #the BS-BS step generates the file expert-monitoring.root
+        #the BS-BS step generates the files:
+        #   expert-monitoring.root (from mother process)
+        #   athenaHLT_workers/*/expert-monitoring.root (from child processes)        
         #to save on panda it needs to be renamed via the outputHIST_HLTMONFile argument
         expectedFileName = 'expert-monitoring.root'
-        #first check argument is in dict
-        if 'outputHIST_HLTMONFile' in self.conf.argdict:
-             #check file is created
-             if(os.path.isfile(expectedFileName)):
-                 msg.info('Renaming %s to %s' % (expectedFileName, self.conf.argdict['outputHIST_HLTMONFile'].value[0]) ) 
-                 try:
-                      os.rename(expectedFileName, self.conf.argdict['outputHIST_HLTMONFile'].value[0])
-                 except OSError, e:
-                      raise trfExceptions.TransformExecutionException(trfExit.nameToCode('TRF_OUTPUT_FILE_ERROR'),
-                                    'Exception raised when renaming {0} to {1}: {2}'.format(expectedFileName, self.conf.argdict['outputHIST_HLTMONFile'].value[0], e))
-             else:
-                 msg.error('HLTMON argument defined %s but %s not created' % (self.conf.argdict['outputHIST_HLTMONFile'].value[0], expectedFileName ))
+
+        #first check athenaHLT step actually completed
+        if self._rc != 0:
+            msg.info('HLT step failed (with status %s) so skip HIST_HLTMON filename check' % self._rc)
+        #next check argument is in dictionary as a requested output
+        elif 'outputHIST_HLTMONFile' in self.conf.argdict:
+
+            #rename the mother file
+            expectedMotherFileName = 'expert-monitoring-mother.root'
+            if(os.path.isfile(expectedFileName)):
+                msg.info('Renaming %s to %s' % (expectedFileName, expectedMotherFileName) )
+                try:
+                    os.rename(expectedFileName, expectedMotherFileName)
+                except OSError, e:
+                    raise trfExceptions.TransformExecutionException(trfExit.nameToCode('TRF_OUTPUT_FILE_ERROR'),
+                        'Exception raised when renaming {0} to {1}: {2}'.format(expectedFileName, expectedMotherFileName, e))
+            else:
+                msg.error('HLTMON argument defined but mother %s not created' % (expectedFileName ))
+
+            #merge worker files
+            expectedWorkerFileName = 'athenaHLT_workers/athenaHLT-01/' + expectedFileName
+            if(os.path.isfile(expectedWorkerFileName) and os.path.isfile(expectedMotherFileName)):
+                msg.info('Merging worker and mother %s files to %s' % (expectedFileName, self.conf.argdict['outputHIST_HLTMONFile'].value[0]) )
+                try:
+                    #have checked that at least one worker file exists
+                    cmd = 'hadd ' + self.conf.argdict['outputHIST_HLTMONFile'].value[0] + ' athenaHLT_workers/*/expert-monitoring.root expert-monitoring-mother.root'
+                    subprocess.call(cmd, shell=True)
+                except OSError, e:
+                    raise trfExceptions.TransformExecutionException(trfExit.nameToCode('TRF_OUTPUT_FILE_ERROR'),
+                        'Exception raised when merging worker and mother {0} files to {1}: {2}'.format(expectedFileName, self.conf.argdict['outputHIST_HLTMONFile'].value[0], e))
+            else:
+                msg.error('HLTMON argument defined %s but worker %s not created' % (self.conf.argdict['outputHIST_HLTMONFile'].value[0], expectedFileName ))
+
         else:
             msg.info('HLTMON argument not defined so skip %s check' % expectedFileName)
         
@@ -223,7 +264,9 @@ class trigRecoExecutor(athenaExecutor):
         msg.info("Search for created BS files, and rename if single file found")
         #The following is needed to handle the BS file being written with a different name (or names)
         #base is from either the tmp value created by the transform or the value entered by the user
-        if 'BS' in self.conf.dataDictionary:
+        if self._rc != 0:
+            msg.info('HLT step failed (with status %s) so skip BS filename check' % self._rc)
+        elif 'BS' in self.conf.dataDictionary:
             argInDict = self.conf.dataDictionary['BS']
             #create expected string by taking only some of input
             # removes uncertainty of which parts of the filename are used by athenaHLT
@@ -280,7 +323,9 @@ class trigRecoExecutor(athenaExecutor):
             if "outputHIST_DEBUGSTREAMMONFile" in self.conf.argdict:
                 fileNameDbg= self.conf.argdict["outputHIST_DEBUGSTREAMMONFile"].value                
                 msg.info('outputHIST_DEBUGSTREAMMONFile argument is {0}'.format(fileNameDbg) )
-                
+
+            #TODO add merging of mother and child debug files
+
             if(os.path.isfile(fileNameDbg[0])):
                 #keep filename if not defined
                 msg.info('Will use file created  in PreRun step {0}'.format(fileNameDbg) )
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/python/trigTranslate.py b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigTranslate.py
index b7573174114185fbfe6f84de88d3b8fe5b2277e2..7c9793c62cf7138fea410deabc3e31fa55a99ed5 100644
--- a/HLT/Trigger/TrigTransforms/TrigTransform/python/trigTranslate.py
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/python/trigTranslate.py
@@ -30,16 +30,13 @@ def getOption(runArgs,name, substep, first, output):
     tfToAthenaHLT['runNumber'] = 'run-number'
     tfToAthenaHLT['precommand'] = 'precommand'
     tfToAthenaHLT['postcommand'] = 'postcommand'
-    tfToAthenaHLT['eventmodifier'] = 'event-modifier'
-    tfToAthenaHLT['jobOptionSvcType'] = 'joboptionsvc-type'
     tfToAthenaHLT['useDB'] = 'use-database'
-    tfToAthenaHLT['DBtype'] = 'db-type'
     tfToAthenaHLT['DBserver'] = 'db-server'
-    tfToAthenaHLT['DBsmkey'] = 'db-smkey'
-    tfToAthenaHLT['DBhltpskey'] = 'db-hltpskey'
+    tfToAthenaHLT['DBsmkey'] = 'smk'
+    tfToAthenaHLT['DBhltpskey'] = 'hltpsk'
+    tfToAthenaHLT['DBl1pskey'] = 'l1psk'
     ## Some arguments need specific code so aren't included here
     # save-output = outputBSFile (or filled by default in multi step tf)
-    # db-extra = DBextra
     
     # Output needs the string not a list 
     # (as in PyJobTransforms/python/trfJobOptions.py)
@@ -51,16 +48,18 @@ def getOption(runArgs,name, substep, first, output):
     else:
         msg.warning('No BS filename defined, athenaHLT will not save the output')
     
-    #DBextra needs to be a dictionary not a string
-    if 'DBextra' in runArgs:
-        option['db-extra'] = ast.literal_eval(runArgs['DBextra'].value)
-        if 'DBlvl1pskey' in runArgs:
-            if 'lvl1key' in option['db-extra']:
-                raise trfExceptions.TransformArgException(trfExit.nameToCode('TRF_ARG_ERROR'), 'Multiple definition of lvl1key as both --DBlvl1pskey %s and --DBextra %s' % (runArgs['DBlvl1pskey'],runArgs['DBextra'] ))
-            else:
-                option['db-extra']['lvl1key'] = runArgs['DBlvl1pskey']
-    if 'DBlvl1pskey' in runArgs and 'db-extra' not in option:
-        option['db-extra'] = {'lvl1key': runArgs['DBlvl1pskey']}
+    #TODO (ATR-11854) l1psk, hltpsk, smk should be compared to triggerConfig
+    #example below based on old comparison but needs work to retrieve keys and do comparisons of all three keys
+    #if 'triggerConfig' in runArgs:
+        #retrieve keys from triggerConfig string
+        #if 'lvl1key' in triggerConfig:
+            #if 'DBlvl1pskey' in runArgs:
+            #add check to compare DBlvl1pskey to lvl1key from triggerConfig
+            #    raise trfExceptions.TransformArgException(trfExit.nameToCode('TRF_ARG_ERROR'), 'Multiple definition of lvl1key as both --DBlvl1pskey %s and --triggerConfig %s' % (runArgs['DBlvl1pskey'],runArgs['triggerConfig'] ))
+            #else:
+            #    set lvl1key in triggerConfig
+    #if 'DBlvl1pskey' in runArgs and 'triggerConfig' not in option:
+        #    set lvl1key in triggerConfig
     
     # below based on PyJobTransforms/python/trfJobOptions.py
     for k in set(tfToAthenaHLT) & set(runArgs):
@@ -73,7 +72,8 @@ def getOption(runArgs,name, substep, first, output):
             if myValue is not None:
                 option[tfToAthenaHLT[k]]=myValue
         else:
-            option[tfToAthenaHLT[k]]=v
+            #return just the value to avoid returning all the properties (e.g. isRunArg=True)
+            option[tfToAthenaHLT[k]]=v.value
     
     # Now make sure that if we did not add maxEvents then we set this to -1, which
     # avoids some strange defaults that only allow 5 events to be processed
@@ -81,24 +81,22 @@ def getOption(runArgs,name, substep, first, output):
         option[tfToAthenaHLT['maxEvents']] = -1
         msg.info('maxEvents not defined, explicitly set to -1')
     
-    # TODO - currently skips all the extra, literal, etc options
-    # as these are not saved in options no point in being saved to file
+    # Skips all the other runArgs (extra, literal, etc) 
+    # as these are for running with athena not athenaHLT
     
     return option
-    
-#write option dict to runTranslate file
-def writeTranslate(runTranslate,runArgs,name,substep,first,output):
-    msg.info('Writing options to file \"%s\"' % runTranslate)
-        
-    option = getOption(runArgs, name, substep, first, output)  
-        
+
+#return option list to be used as command line for athenaHLT jobs
+#In Run2 this was handled by producing runTranslate file which is no longer needed
+def getTranslated(runArgs,name,substep,first,output):
+    option = getOption(runArgs, name, substep, first, output)
     msg.info('Options set to: \"%s\":' % option)
-    
-    with open(runTranslate, 'w') as runTranslateFile:
-        try:
-            print >>runTranslateFile, os.linesep, "option = ", option
-        except (IOError, OSError) as e:
-            errMsg = 'Got an error when writing JO template {0}: {1}'.format(runTranslateFile, e)
-            msg.error(errMsg)
-            raise trfExceptions.TransformExecutionException(trfExit.nameToCode('TRF_EXEC_RUNARGS_ERROR'), errMsg)
+    optionList = list()
+    for k,v in option.items():
+        item = "--{0}={1}"
+        if(type(v)==list):
+            v=''.join(v) 
+        optionList.append(item.format(k,v))
+
+    return optionList
 
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/scripts/TrigMT_reco_tf.py b/HLT/Trigger/TrigTransforms/TrigTransform/scripts/TrigMT_reco_tf.py
index 3f093b78849f2ad62ca1cd27c97e00099aecc0c5..188330c12d3a97588c04208e065169e48e8dfa92 100755
--- a/HLT/Trigger/TrigTransforms/TrigTransform/scripts/TrigMT_reco_tf.py
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/scripts/TrigMT_reco_tf.py
@@ -2,8 +2,9 @@
 
 # Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
 
-## TrigMT_tf_reco.py - based on PyJobTransforms/Reco_tf.py now stored here:
-## https://svnweb.cern.ch/trac/atlasoff/browser/Reconstruction/RecJobTransforms/trunk/scripts
+## TrigMT_reco_tf.py
+## - based on PyJobTransforms/Reco_tf.py
+## Documentation on the twiki https://twiki.cern.ch/twiki/bin/viewauth/Atlas/TriggerTransform
 
 import sys
 import time
@@ -43,9 +44,9 @@ def getTransform():
     executorSet = set()
         
     #BSRDOtoRAW is new option for trigger transform
-    #now setup to run athenaHLT, so TODO is to remove:
-    #writeBS BSRDOInput EvtMax from AthenaCommon...  athenaCommonFlags....
-    executorSet.add(trigRecoExecutor(name = 'BSRDOtoRAW', skeletonFile = 'TrigUpgradeTest/full_menu.py', 
+    #runs primarily using athenaHLT
+    #literalRunargs used for when running with athena
+    executorSet.add(trigRecoExecutor(name = 'BSRDOtoRAW', skeletonFile = 'TrigUpgradeTest/full_menu.py',
                                      exe = 'athenaHLT.py',
                                      substep = 'b2r', tryDropAndReload = False,
                                      inData = ['BS_RDO', 'RDO'], outData = ['BS', 'HIST_HLTMON','HIST_DEBUGSTREAMMON'], 
@@ -92,8 +93,6 @@ def getTransform():
                                    perfMonFile = 'ntuple_AODtoDPD.pmon.gz'))
     executorSet.add(athenaExecutor(name = 'AODtoTAG', skeletonFile = 'RecJobTransforms/skeleton.AODtoTAG_tf.py',
                                    inData = ['AOD'], outData = ['TAG'],))
-#     executorSet.add(athenaExecutor(name = 'AODtoHIST', skeletonFile = 'RecJobTransforms/skeleton.FROM_PETER.py',
-#                                    inData = ['AOD'], outData = ['HIST_AOD'],))
     executorSet.add(reductionFrameworkExecutor(name = 'AODtoRED', skeletonFile = 'PATJobTransforms/skeleton.AODtoRED_tf.py',
                                    substep = 'a2r', inData = ['AOD'], outData = ['DAOD_RED']))
     executorSet.add(reductionFrameworkExecutorNTUP(name = 'NTUPtoRED', skeletonFile = 'PATJobTransforms/skeleton.NTUPtoRED_tf.py',
@@ -101,7 +100,8 @@ def getTransform():
     
     trf = transform(executor = executorSet, description = 'Trigger transform to run HLT_standalone, followed by'
                     ' general purpose ATLAS reconstruction transform. Input to HLT_Standalone is inputBS_RDOFile'
-                    ' with outputs of RDO, ESD, AOD or DPDs. For more details on reco_tf, see:'
+                    ' with outputs of RDO, ESD, AOD or DPDs. For more details see:'
+                    ' https://twiki.cern.ch/twiki/bin/viewauth/Atlas/TriggerTransform or for reco_tf, see:'
                     ' https://twiki.cern.ch/twiki/bin/viewauth/Atlas/RecoTf')
     
     #add arguments as donw in reco_tf
@@ -133,7 +133,7 @@ def getTransform():
 #        
 #    except ImportError:
 #        msg.warning('Failed to import simulation/digitisation arguments. These substeps will not be available.')
-    
+
     #now add specific trigger arguments
     #  putting this last makes them appear last in the help so easier to find
     addTriggerArgs(trf.parser)
@@ -147,13 +147,11 @@ def addTriggerArgs(parser):
     # Use arggroup to get these arguments in their own sub-section (of --help)
     parser.defineArgGroup('Trigger', 'Specific options related to the trigger configuration used for reprocessing')
     
-    #new for trigger transform
-    #now setup to run athenaHLT, so TODO is to remove:testPhysicsV4 and writeBS
-    #TODO: testPhysicsV4 needs deleting as causes double menu loading but left in for now to not add conflicts to the panda tag page
-    parser.add_argument('--testPhysicsV4', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
-                          help='Please do not use this command, to be deleted', group='Trigger')
+    #arguments specific for trigger transform
+    #writeBS used in literal arguments when running HLT step in athena (not athenaHLT)
     parser.add_argument('--writeBS', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
                           help='Needed if running BSRDO to BS step in athena (default: True)', group='Trigger', default=trfArgClasses.argBool(True, runarg=True))
+    #input BS file for the HLT step (name just to be unique identifier)
     parser.add_argument('--inputBS_RDOFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='input', runarg=True, type='bs'),
                         help='Input bytestream file', group='Trigger')
@@ -161,43 +159,38 @@ def addTriggerArgs(parser):
     parser.add_argument('--outputBSFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='output', runarg=True, type='bs'),
                         help='Output bytestream file', group='Trigger')
+    #HLT out histogram file, if defined renames expert-monitoring file that is produced automatically
     parser.add_argument('--outputHIST_HLTMONFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False),
                         help='Output HLTMON file', group='Trigger')    
-    #NTUP_TRIG is added as is not available in ATLASP1HLT, but is available in the reco release
-    #hence can be used later in a ATLASP1HLT job if switch releases
+    #NTUP_TRIG is used for COST monitoring - used in the reco release
     parser.add_argument('--outputNTUP_TRIGFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False), 
                         help='D3PD output NTUP_TRIG file (can be made in substeps e2d,a2d)', group='Trigger')
-    #NTUP_COST is added as is not available in ATLASP1HLT, but is available in the reco release
-    #hence can be used later in a ATLASP1HLT job if switch releases
+    #NTUP_COST is used for COST monitoring - used in the reco release
     parser.add_argument('--outputNTUP_TRIGCOSTFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False), 
                         help='D3PD output NTUP_TRIGCOST file', group='Trigger')
-    #NTUP_RATE is added as is not available in ATLASP1HLT, but is available in the reco release
-    #hence can be used later in a ATLASP1HLT job if switch releases
+    #NTUP_RATE is used for COST monitoring - used in the reco release
     parser.add_argument('--outputNTUP_TRIGRATEFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False), 
                         help='D3PD output NTUP_TRIGRATE file', group='Trigger')
-    #NTUP_TRIGEBWGHT is added as is not available in ATLASP1HLT, but is available in the reco release
-    #hence can be used later in a ATLASP1HLT job if switch releases
+    #NTUP_TRIGEBWGHT is used for COST monitoring - used in the reco release
     parser.add_argument('--outputNTUP_TRIGEBWGHTFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False), 
                         help='D3PD output NTUP_TRIGEBWGHT file', group='Trigger')
+    #Trigger Configuration String as used in reco Steps
     parser.add_argument('--triggerConfig', nargs='+', metavar='substep=TRIGGERCONFIG',
                         type=trfArgClasses.argFactory(trfArgClasses.argSubstep, runarg=True, separator='='),
                         help='Trigger Configuration String. '
 			'N.B. This argument uses EQUALS (=) to separate the substep name from the value.', group='Trigger')
-    parser.add_argument('--dumpOptions', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
-                        help='Only for testing: '
-                        'dump to stdout the options athenaHLT has received. '
-                        'N.B. option consistency is not checked.', group='Trigger')
+    #precommand for athenaHLT aka -c
     parser.add_argument('--precommand', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argList, runarg=True),
                         help='precommand for athenaHLT aka -c', group='Trigger')
+    #postcommand for athenaHLT aka -C
     parser.add_argument('--postcommand', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argList, runarg=True),
                         help='postcommand for athenaHLT aka -C', group='Trigger')
-    parser.add_argument('--eventmodifier', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argList, runarg=True),
-                        help='event-modifier for athenaHLT aka -Z', group='Trigger')
+    #For prodsys to make sure uses inputBS_RDOFile rather than inputBSFile when running the b2r step
     parser.add_argument('--prodSysBSRDO', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
                         help='For prodsys to make sure uses inputBS_RDOFile rather than inputBSFile when running the b2r step', group='Trigger')
 
@@ -206,24 +199,16 @@ def addTriggerDBArgs(parser):
     # Use arggroup to get these arguments in their own sub-section (of --help)
     parser.defineArgGroup('TriggerDB', 'Specific options related to the trigger DB')
     
-    #TODO add short forms and help messages
-        
-    parser.add_argument('--jobOptionSvcType', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-                        help='jobOptionSvcType aka -J', group='TriggerDB')
     parser.add_argument('--useDB', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
-                        help='useDB', group='TriggerDB')
-    parser.add_argument('--DBtype', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-                        help='DBtype', group='TriggerDB')
+                        help='read from DB for athenaHLT aka use-database', group='TriggerDB')
     parser.add_argument('--DBserver', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-                        help='DBserver', group='TriggerDB')
+                        help='DB name: aka athenaHLT db-server', group='TriggerDB')
     parser.add_argument('--DBsmkey', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-                        help='DBsmkey', group='TriggerDB')
+                        help='DB SMK', group='TriggerDB')
     parser.add_argument('--DBhltpskey', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-                        help='DBhltpskey', group='TriggerDB')
-    parser.add_argument('--DBlvl1pskey', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-                        help='DBlvl1pskey', group='TriggerDB')
-    parser.add_argument('--DBextra', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-                        help='DBextra', group='TriggerDB')
+                        help='DB hltpskey', group='TriggerDB')
+    parser.add_argument('--DBl1pskey', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
+                        help='DB l1pskey', group='TriggerDB')
 
 
 def addDebugArgs(parser):
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_reco_tf.py b/HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_reco_tf.py
index cdb6b1616ad0a658201d07dd07bdec033899f479..3c3a2b169d5f330ab73e614e9ad782f384e9bdf6 100755
--- a/HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_reco_tf.py
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/scripts/Trig_reco_tf.py
@@ -1,9 +1,10 @@
 #! /usr/bin/env python
 
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2019 CERN for the benefit of the ATLAS collaboration
 
-## Trig_tf_reco.py - based on PyJobTransforms/Reco_tf.py now stored here:
-## https://svnweb.cern.ch/trac/atlasoff/browser/Reconstruction/RecJobTransforms/trunk/scripts
+## Trig_reco_tf.py
+## - based on PyJobTransforms/Reco_tf.py
+## Documentation on the twiki https://twiki.cern.ch/twiki/bin/viewauth/Atlas/TriggerTransform
 
 import sys
 import time
@@ -43,19 +44,21 @@ def getTransform():
     executorSet = set()
         
     #BSRDOtoRAW is new option for trigger transform
-    #now setup to run athenaHLT, so TODO is to remove:
-    #writeBS BSRDOInput EvtMax from AthenaCommon...  athenaCommonFlags....
-    executorSet.add(trigRecoExecutor(name = 'BSRDOtoRAW', skeletonFile = 'TriggerRelease/runHLT_standalone.py',
+    #runs primarily using athenaHLT
+    #literalRunargs used for when running with athena
+    executorSet.add(trigRecoExecutor(name = 'BSRDOtoRAW', skeletonFile = 'TrigUpgradeTest/full_menu.py',
                                      exe = 'athenaHLT.py',
                                      substep = 'b2r', tryDropAndReload = False,
                                      inData = ['BS_RDO', 'RDO'], outData = ['BS', 'HIST_HLTMON','HIST_DEBUGSTREAMMON'], 
                                      perfMonFile = 'ntuple_BSRDOtoRAW.pmon.gz',
                                      literalRunargs = ['writeBS = runArgs.writeBS',
-                                                       'BSRDOInput = runArgs.inputBS_RDOFile',
-                                                       'EvtMax = runArgs.maxEvents',
+                                                       'from AthenaCommon.AthenaCommonFlags import jobproperties as jps',
+                                                       'jps.AthenaCommonFlags.FilesInput.set_Value_and_Lock(runArgs.inputBS_RDOFile)',
+                                                       'jps.AthenaCommonFlags.EvtMax.set_Value_and_Lock(runArgs.maxEvents)',
+                                                       'if hasattr(runArgs,\'skipEvents\'):',
+                                                       '   jps.AthenaCommonFlags.SkipEvents.set_Value_and_Lock(runArgs.skipEvents)',
                                                        'if hasattr(runArgs,\'outputBSFile\'):',
-                                                       '   from AthenaCommon.AthenaCommonFlags import athenaCommonFlags',
-                                                       '   athenaCommonFlags.BSRDOOutput.set_Value_and_Lock( runArgs.outputBSFile )']))
+                                                       '   jps.AthenaCommonFlags.BSRDOOutput.set_Value_and_Lock( runArgs.outputBSFile )']))
 
     #RAWtoCOST is new option for trigger transform
     #runs in athena and will succeed if input BS file has costmon enabled
@@ -90,8 +93,6 @@ def getTransform():
                                    perfMonFile = 'ntuple_AODtoDPD.pmon.gz'))
     executorSet.add(athenaExecutor(name = 'AODtoTAG', skeletonFile = 'RecJobTransforms/skeleton.AODtoTAG_tf.py',
                                    inData = ['AOD'], outData = ['TAG'],))
-#     executorSet.add(athenaExecutor(name = 'AODtoHIST', skeletonFile = 'RecJobTransforms/skeleton.FROM_PETER.py',
-#                                    inData = ['AOD'], outData = ['HIST_AOD'],))
     executorSet.add(reductionFrameworkExecutor(name = 'AODtoRED', skeletonFile = 'PATJobTransforms/skeleton.AODtoRED_tf.py',
                                    substep = 'a2r', inData = ['AOD'], outData = ['DAOD_RED']))
     executorSet.add(reductionFrameworkExecutorNTUP(name = 'NTUPtoRED', skeletonFile = 'PATJobTransforms/skeleton.NTUPtoRED_tf.py',
@@ -99,7 +100,8 @@ def getTransform():
     
     trf = transform(executor = executorSet, description = 'Trigger transform to run HLT_standalone, followed by'
                     ' general purpose ATLAS reconstruction transform. Input to HLT_Standalone is inputBS_RDOFile'
-                    ' with outputs of RDO, ESD, AOD or DPDs. For more details on reco_tf, see:'
+                    ' with outputs of RDO, ESD, AOD or DPDs. For more details see:'
+                    ' https://twiki.cern.ch/twiki/bin/viewauth/Atlas/TriggerTransform or for reco_tf, see:'
                     ' https://twiki.cern.ch/twiki/bin/viewauth/Atlas/RecoTf')
     
     #add arguments as donw in reco_tf
@@ -145,13 +147,11 @@ def addTriggerArgs(parser):
     # Use arggroup to get these arguments in their own sub-section (of --help)
     parser.defineArgGroup('Trigger', 'Specific options related to the trigger configuration used for reprocessing')
     
-    #new for trigger transform
-    #now setup to run athenaHLT, so TODO is to remove:testPhysicsV4 and writeBS
-    #TODO: testPhysicsV4 needs deleting as causes double menu loading but left in for now to not add conflicts to the panda tag page
-    parser.add_argument('--testPhysicsV4', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
-                          help='Please do not use this command, to be deleted', group='Trigger')
+    #arguments specific for trigger transform
+    #writeBS used in literal arguments when running HLT step in athena (not athenaHLT)
     parser.add_argument('--writeBS', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
                           help='Needed if running BSRDO to BS step in athena (default: True)', group='Trigger', default=trfArgClasses.argBool(True, runarg=True))
+    #input BS file for the HLT step (name just to be unique identifier)
     parser.add_argument('--inputBS_RDOFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='input', runarg=True, type='bs'),
                         help='Input bytestream file', group='Trigger')
@@ -159,43 +159,38 @@ def addTriggerArgs(parser):
     parser.add_argument('--outputBSFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argBSFile, io='output', runarg=True, type='bs'),
                         help='Output bytestream file', group='Trigger')
+    #HLT out histogram file, if defined renames expert-monitoring file that is produced automatically
     parser.add_argument('--outputHIST_HLTMONFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False),
                         help='Output HLTMON file', group='Trigger')    
-    #NTUP_TRIG is added as is not available in ATLASP1HLT, but is available in the reco release
-    #hence can be used later in a ATLASP1HLT job if switch releases
+    #NTUP_TRIG is used for COST monitoring - used in the reco release
     parser.add_argument('--outputNTUP_TRIGFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False), 
                         help='D3PD output NTUP_TRIG file (can be made in substeps e2d,a2d)', group='Trigger')
-    #NTUP_COST is added as is not available in ATLASP1HLT, but is available in the reco release
-    #hence can be used later in a ATLASP1HLT job if switch releases
+    #NTUP_COST is used for COST monitoring - used in the reco release
     parser.add_argument('--outputNTUP_TRIGCOSTFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False), 
                         help='D3PD output NTUP_TRIGCOST file', group='Trigger')
-    #NTUP_RATE is added as is not available in ATLASP1HLT, but is available in the reco release
-    #hence can be used later in a ATLASP1HLT job if switch releases
+    #NTUP_RATE is used for COST monitoring - used in the reco release
     parser.add_argument('--outputNTUP_TRIGRATEFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False), 
                         help='D3PD output NTUP_TRIGRATE file', group='Trigger')
-    #NTUP_TRIGEBWGHT is added as is not available in ATLASP1HLT, but is available in the reco release
-    #hence can be used later in a ATLASP1HLT job if switch releases
+    #NTUP_TRIGEBWGHT is used for COST monitoring - used in the reco release
     parser.add_argument('--outputNTUP_TRIGEBWGHTFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False), 
                         help='D3PD output NTUP_TRIGEBWGHT file', group='Trigger')
+    #Trigger Configuration String as used in reco Steps
     parser.add_argument('--triggerConfig', nargs='+', metavar='substep=TRIGGERCONFIG',
                         type=trfArgClasses.argFactory(trfArgClasses.argSubstep, runarg=True, separator='='),
                         help='Trigger Configuration String. '
 			'N.B. This argument uses EQUALS (=) to separate the substep name from the value.', group='Trigger')
-    parser.add_argument('--dumpOptions', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
-                        help='Only for testing: '
-                        'dump to stdout the options athenaHLT has received. '
-                        'N.B. option consistency is not checked.', group='Trigger')
+    #precommand for athenaHLT aka -c
     parser.add_argument('--precommand', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argList, runarg=True),
                         help='precommand for athenaHLT aka -c', group='Trigger')
+    #postcommand for athenaHLT aka -C
     parser.add_argument('--postcommand', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argList, runarg=True),
                         help='postcommand for athenaHLT aka -C', group='Trigger')
-    parser.add_argument('--eventmodifier', nargs='+', type=trfArgClasses.argFactory(trfArgClasses.argList, runarg=True),
-                        help='event-modifier for athenaHLT aka -Z', group='Trigger')
+    #For prodsys to make sure uses inputBS_RDOFile rather than inputBSFile when running the b2r step
     parser.add_argument('--prodSysBSRDO', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
                         help='For prodsys to make sure uses inputBS_RDOFile rather than inputBSFile when running the b2r step', group='Trigger')
 
@@ -204,24 +199,16 @@ def addTriggerDBArgs(parser):
     # Use arggroup to get these arguments in their own sub-section (of --help)
     parser.defineArgGroup('TriggerDB', 'Specific options related to the trigger DB')
     
-    #TODO add short forms and help messages
-        
-    parser.add_argument('--jobOptionSvcType', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-                        help='jobOptionSvcType aka -J', group='TriggerDB')
     parser.add_argument('--useDB', type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
-                        help='useDB', group='TriggerDB')
-    parser.add_argument('--DBtype', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-                        help='DBtype', group='TriggerDB')
+                        help='read from DB for athenaHLT aka use-database', group='TriggerDB')
     parser.add_argument('--DBserver', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-                        help='DBserver', group='TriggerDB')
+                        help='DB name: aka athenaHLT db-server', group='TriggerDB')
     parser.add_argument('--DBsmkey', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-                        help='DBsmkey', group='TriggerDB')
+                        help='DB SMK', group='TriggerDB')
     parser.add_argument('--DBhltpskey', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-                        help='DBhltpskey', group='TriggerDB')
-    parser.add_argument('--DBlvl1pskey', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-                        help='DBlvl1pskey', group='TriggerDB')
-    parser.add_argument('--DBextra', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-                        help='DBextra', group='TriggerDB')
+                        help='DB hltpskey', group='TriggerDB')
+    parser.add_argument('--DBl1pskey', type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
+                        help='DB l1pskey', group='TriggerDB')
 
 
 def addDebugArgs(parser):
@@ -233,61 +220,7 @@ def addDebugArgs(parser):
     parser.add_argument('--outputHIST_DEBUGSTREAMMONFile', nargs='+', 
                         type=trfArgClasses.argFactory(trfArgClasses.argHISTFile, io='output', runarg=True, countable=False),
                         help='Output DEBUGSTREAMMON file', group='Debug')    
-
-
-
     
-#below commented out after cleanup to latest reco_tf as don't believe needed
-#    
-# This is copied from Digi_tf, but should be in a loadable module
-#def addDigiArgs(parser):
-#    # Use arggroup to get these arguments in their own sub-section (of --help)
-#    parser.defineArgGroup('Digi', 'Digitisation specific options')
-#    parser.add_argument('--inputHitsFile', nargs='+',
-#                        type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='input', runarg=True, type='hits'),
-#                        help='Input HITS file', group='Digi')
-#    parser.add_argument('--outputRDOFile', nargs='+',
-#                        type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='output', runarg=True, type='rdo'),
-#                        help='Output RDO file', group='Digi')
-#    parser.add_argument('--digiSeedOffset1',
-#                        type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=True),
-#                        help='Offset for first random seed', group='Digi')
-#    parser.add_argument('--digiSeedOffset2',                                                             
-#                        type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=True),
-#                        help='Offset for second random seed', group='Digi')
-#    parser.add_argument('--digiRndmSvc',
-#                        type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True), ##STRING CHOICES ARG,
-#                        help='Random Number Service to use.', group='Digi' )
-#    parser.add_argument('--samplingFractionDbTag',
-#                        type=trfArgClasses.argFactory(trfArgClasses.argString, runarg=True),
-#                        help='This argument can be used to override the PhysicsList retrieved from the Simulation metadata. This information is used in the LAr sampling fraction data base tag used by LArfSamplG4Phys.', group='Digi')
-#    parser.add_argument('--doAllNoise',
-#                        type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
-#                        help='Overall control of noise simulation during digitization - useful for overlay jobs, will set doCaloNoise, doMuonNoise, doInDetNoise digitizationFlags.', group='Digi')
-#    parser.add_argument('--AddCaloDigi',
-#                        type=trfArgClasses.argFactory(trfArgClasses.argBool, runarg=True),
-#                        help='Save Calo Digits too, not just RawChannels.', group='Digi')
-#    parser.add_argument('--triggerConfig',
-#                        type=trfArgClasses.argFactory(trfArgClasses.argSubstep, runarg=True),
-#                        help='Trigger Configuration String.', group='Digi')
-#    parser.add_argument('--DataRunNumber',
-#                        type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=True),
-#                        help='Override existing run number with this value - deprecated?', group='Digi')
-#    parser.add_argument('--jobNumber',
-#                        type=trfArgClasses.argFactory(trfArgClasses.argInt, runarg=True),
-#                        help='The number of this job in the current RunDependentSimulation task.', group='Digi')
-#    parser.add_argument('--LowPtMinbiasHitsFile',
-#                        type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='input', runarg=True, type='hits', executor=['HITtoRDO']),
-#                        help='Files with low Pt hits', group='Digi')
-#    parser.add_argument('--HighPtMinbiasHitsFile',
-#                        type=trfArgClasses.argFactory(trfArgClasses.argPOOLFile, io='input', runarg=True, type='hits', executor=['HITtoRDO']),
-#                        help='Files with high Pt hits', group='Digi')
-#    parser.add_argument('--numberOfLowPtMinBias',
-#                        type=trfArgClasses.argFactory(trfArgClasses.argFloat, runarg=True),
-#                        help='Number of low Pt minbias events', group='Digi')
-#    parser.add_argument('--numberOfHighPtMinBias',
-#                        type=trfArgClasses.argFactory(trfArgClasses.argFloat, runarg=True),
-#                        help='Number of high Pt minbias events', group='Digi')
 
 if __name__ == '__main__':
     main()
diff --git a/HLT/Trigger/TrigTransforms/TrigTransform/share/test_run.sh b/HLT/Trigger/TrigTransforms/TrigTransform/share/test_run.sh
index 58e8accc8bd58870464d55a3f4ca742309700689..6023b3fcbfaf97476a993b33f068353e9195a03a 100644
--- a/HLT/Trigger/TrigTransforms/TrigTransform/share/test_run.sh
+++ b/HLT/Trigger/TrigTransforms/TrigTransform/share/test_run.sh
@@ -168,8 +168,7 @@ Trig_reco_tf.py \
 --outputBSFile="RAW.05530098._000001.pool.root.1" \
 > log.txt 2>&1
 
-#may also want to make use of dumpoptions or testing a real precommand
-#--dumpOptions=True \
+#may also want to make use of testing a real precommand
 #--precommand "rerunLVL1=True;markTest=True" \
 
 echo "***trf command***"
@@ -504,7 +503,6 @@ echo "{\
 Trig_reco_tf.py --argJSON=data15_cos.00253010.debug_all.daq.RAW.g17.dbgrec.task._0011.job.argdict.json \
 > log.txt 2>&1
 
-#--dumpOptions=True \
 #--precommand "rerunLVL1=True;markTest=True" \
 
 echo "***trf command***"
@@ -555,7 +553,6 @@ echo "{\
 Trig_reco_tf.py --argJSON=data15_cos.00263146.debug_all.daq.RAW.g17.dbgrec.task._0005.job.argdict.json \
 > log.txt 2>&1
 
-#--dumpOptions=True \
 #--precommand "rerunLVL1=True;markTest=True" \
 
 #\"jobOptionSvcType\": \"TrigConf::HLTJobOptionsSvc\", \
@@ -643,6 +640,9 @@ fi
 
 if $runTest04 ; then
 
+#TODO tests to be updated to latest examples
+#this test was using --dumpOptions=True which has now been removed - is comparing the wrapper scripts now enough?
+
 echo -e "\n******runTest04: BSRDO->BS with athenaopt******"
 
 rm -rf runTest04_BSRDO_BS_athOpt
@@ -659,7 +659,6 @@ Trig_reco_tf.py \
 --maxEvents=1 --skipEvents=2 \
 --outputHIST_HLTMONFile="HIST_HLTMON.04854087._000852.pool.root.1" \
 --outputBSFile="RAW.05530098._000001.pool.root.1" \
---dumpOptions=True \
 > logDBargs.txt 2>&1
 
 echo "***trf command using DB args***"
@@ -679,7 +678,6 @@ Trig_reco_tf.py \
 --maxEvents=1 --skipEvents=2 \
 --outputHIST_HLTMONFile="HIST_HLTMON.04854087._000852.pool.root.1" \
 --outputBSFile="RAW.05530098._000001.pool.root.1" \
---dumpOptions=True \
 > logAthenaOpts.txt 2>&1
 
 echo "***trf command using athenaopts***"