From e011aab9d98f11488de3a22fd2d359a936a4c6d2 Mon Sep 17 00:00:00 2001
From: Frank Winklmeier <frank.winklmeier@cern.ch>
Date: Fri, 16 Nov 2018 17:18:34 +0100
Subject: [PATCH] Add new athenaHLT.py script

The previous athenaHLT.py from HLTTestApps is now replaced by a simple
wrapper script around runHLTMPPy. Most options are the same as before,
but some were removed, some renamed for consistency.
---
 .../TrigControl/TrigCommon/CMakeLists.txt     |  12 +
 .../TrigControl/TrigCommon/bin/athenaHLT.py   | 339 ++++++++++++++++++
 .../TrigControl/TrigCommon/python/AthHLT.py   |  57 +++
 .../TrigCommon/python/TrigPyHelper.py         |  10 +-
 .../TrigCommon/share/LeakTracer.py            |   6 +-
 5 files changed, 416 insertions(+), 8 deletions(-)
 create mode 100755 HLT/Trigger/TrigControl/TrigCommon/bin/athenaHLT.py
 create mode 100644 HLT/Trigger/TrigControl/TrigCommon/python/AthHLT.py

diff --git a/HLT/Trigger/TrigControl/TrigCommon/CMakeLists.txt b/HLT/Trigger/TrigControl/TrigCommon/CMakeLists.txt
index e1f77dcff66..4396893f04e 100644
--- a/HLT/Trigger/TrigControl/TrigCommon/CMakeLists.txt
+++ b/HLT/Trigger/TrigControl/TrigCommon/CMakeLists.txt
@@ -8,4 +8,16 @@ atlas_subdir( TrigCommon )
 # Install files from the package:
 atlas_install_python_modules( python/*.py )
 atlas_install_joboptions( share/*.py )
+atlas_install_scripts( bin/athenaHLT.py )
 
+# Aliases:
+atlas_add_alias( athenaHLT "athenaHLT.py" )
+
+# Check python syntax:
+atlas_add_test( flake8
+   SCRIPT flake8 --select=F,E7,E9,W6 --ignore=E701 ${CMAKE_CURRENT_SOURCE_DIR}/python ${CMAKE_CURRENT_SOURCE_DIR}/bin
+   POST_EXEC_SCRIPT nopost.sh )
+
+# Tests:
+atlas_add_test( AthHLT
+   SCRIPT python ${CMAKE_CURRENT_SOURCE_DIR}/python/AthHLT.py )
diff --git a/HLT/Trigger/TrigControl/TrigCommon/bin/athenaHLT.py b/HLT/Trigger/TrigControl/TrigCommon/bin/athenaHLT.py
new file mode 100755
index 00000000000..f9b355415ef
--- /dev/null
+++ b/HLT/Trigger/TrigControl/TrigCommon/bin/athenaHLT.py
@@ -0,0 +1,339 @@
+#!/bin/sh
+#
+# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+#
+# This is a script that is born as shell to setup the preloading and then
+# resurrected as python script for the actual athenaHLT.py application.
+#
+"""date"
+
+# defaults
+export USETCMALLOC=1
+export USEIMF=1
+
+# parse command line arguments
+for a in ${@}
+do
+    case "$a" in
+	--leak-check*)   USETCMALLOC=0;;
+	--stdcmalloc)    USETCMALLOC=0;;
+	--tcmalloc)      USETCMALLOC=1;;
+	--stdcmath)      USEIMF=0;;
+	--imf)           USEIMF=1;;
+	--preloadlib*)   export ATHENA_ADD_PRELOAD=${a#*=};;
+        --no-ers-signal-handlers)  export TDAQ_ERS_NO_SIGNAL_HANDLERS=1;;
+    esac
+done
+
+# Do the actual preloading via LD_PRELOAD
+source `which athena_preload.sh `
+
+# Now resurrect ourselves as python script
+python_path=`which python`
+"exec" "$python_path" "-tt" "$0" "$@";
+
+"""
+
+import sys
+import os
+import argparse
+import ast
+import collections
+from datetime import datetime as dt
+
+from TrigCommon import AthHLT
+from AthenaCommon.Logging import logging
+log = logging.getLogger('athenaHLT')
+
+#
+# The following arg_* methods are used as custom types in argparse
+#
+def arg_sor_time(s):
+   """Convert possible SOR time arguments to an OWLTime compatible string"""
+   fmt = '%d/%m/%y %H:%M:%S.%f'
+   if s=='now':        return dt.now().strftime(fmt)
+   elif s.isdigit():   return dt.fromtimestamp(float(s)/1e9).strftime(fmt)
+   else:               return s
+
+def arg_detector_mask(s):
+   """Convert detector mask to format expected by eformat"""
+   dmask = hex(int(s,16))                                    # Normalize input to hex-string
+   dmask = dmask.lower().replace('0x', '').replace('l', '')  # remove markers
+   return '0' * (32 - len(dmask)) + dmask                    # (pad with 0s)
+
+def arg_log_level(s):
+   """Argument handler for log levels"""
+   lvls = s.split(',')
+   if len(lvls)==1: lvls.append('ERROR')
+   return lvls
+
+def arg_eval(s):
+   """Argument handler for pyton types (list, dict, ...)"""
+   return ast.literal_eval(s)
+
+
+def update_pcommands(args, cdict):
+   """Apply modifications to pre/postcommands"""
+
+   cdict['trigger']['precommand'].append('_run_number=%d' % args.run_number)
+
+   if not args.oh_monitoring:
+       cdict['trigger']['precommand'].append("include('TrigServices/OfflineTHistSvc.py')")
+
+   if args.perfmon:
+      cdict['trigger']['precommand'].insert(0, "include('TrigCommon/PerfMon.py')")
+
+   if args.leak_check:
+      doLeakCheck = [] if args.leak_check=='all' else [args.leak_check]
+
+      #early import is needed for proper offloading later
+      import Hephaestus.MemoryTracker as memtrack  # noqa
+      cdict['trigger']['precommand'].insert(0, "memChkMode='leak-check';doLeakCheck=%s;"
+                                            "include('TrigCommon/LeakTracer.py')" % doLeakCheck)
+
+def update_run_params(args):
+   """Update run parameters from file/COOL"""
+
+   if args.run_number is None:
+      from eformat import EventStorage
+      args.run_number = EventStorage.pickDataReader(args.file[0]).runNumber()
+
+   if args.sor_time is None:
+      args.sort_time = arg_sor_time(str(AthHLT.get_sor_params(args.run_number)['SORTime']))
+
+   if args.detector_mask is None:
+      dmask = AthHLT.get_sor_params(args.run_number)['DetectorMask']
+      if args.run_number < AthHLT.CondDB._run2:
+         dmask = hex(dmask)
+      args.detector_mask = arg_detector_mask(dmask)
+
+
+def update_nested_dict(d, u):
+   """Update nested dictionary (https://stackoverflow.com/q/3232943)"""
+   for k, v in u.iteritems():
+      if isinstance(v, collections.Mapping):
+         d[k] = update_nested_dict(d.get(k, {}), v)
+      else:
+         d[k] = v
+   return d
+
+
+def HLTMPPy_cfgdict(args):
+   """Create the configuration dictionary as expected by HLTMPPy"""
+
+   cdict = {}
+   cdict['HLTMPPU'] = {
+      'application_name' : 'athenaHLT',
+      'extra_params' : None,
+      'interactive' : args.interactive,
+      'log_root' : os.getcwd(),
+      'module' : 'HLTMPPU',
+      'num_forks' : args.nprocs,
+      'num_threads' : args.threads,
+      'num_slots' : args.threads,
+      'partition_name' : args.partition,
+      'hard_timeout' : args.timeout,
+      'soft_timeout_fraction' : 0.9
+   }
+
+   cdict['datasource'] = {
+      'module': 'dffileds',
+      'dslibrary': 'DFFileBackend',
+      'compressionFormat': 'ZLIB',
+      'compressionLevel': 2,
+      'file': args.file,
+      'loopFiles': False,
+      'numEvents': args.number_of_events,
+      'outFile': args.save_output,
+      'preload': False,
+      'extraL1Robs': args.extra_l1r_robs,
+      'skipEvents': args.skip_events
+   }
+
+   cdict['global'] = {
+      'date': args.sor_time,
+      'detector_mask': args.detector_mask,
+      'log_root': cdict['HLTMPPU']['log_root'],
+      'options_file': None,
+      'partition_name': args.partition,
+      'run_number': args.run_number,
+      'save_options': None,
+      'solenoid_current': 7730,
+      'toroid_current': 20400,
+      'with_infrastructure': args.oh_monitoring
+   }
+
+   cdict['monitoring'] = {
+      'module': 'monsvcis',
+      'library': 'MonSvcInfoService',
+      'ISInterval': 10,
+      'ISRegex': '.*',
+      'ISServer': '${TDAQ_IS_SERVER=DF}',
+      'ISSlots': 1,
+      'OHInterval': args.oh_interval,
+      'OHInclude': '.*',
+      'OHExclude': '',
+      'OHServerName': 'HLT-Histogramming',
+      'OHSlots': 5
+   }
+
+   cdict['trigger'] = {
+      'library': ['TrigServices', 'TrigPSC', 'TrigConfigSvc'],
+      'joType' : args.joboptionsvc_type
+   }
+   if not args.use_database:      # job options
+      cdict['trigger'].update({
+         'module': 'joboptions',
+         'pythonSetupFile' : args.python_setup,
+         'joFile': args.jobOptions,
+         'SMK': None,
+         'l1PSK': None,
+         'l1BG': 0,
+         'l1MenuConfig': 'DB',
+         'precommand' : args.precommand,
+         'postcommand' : args.postcommand,
+         'logLevels' : args.log_level
+      })
+   else:
+      cdict['trigger'].update({
+         'module': 'DBPython',
+         'pythonSetupFile' : args.python_setup,
+         'db_alias': args.db_server,
+         'SMK': args.smk,
+         'l1PSK': args.l1pks,
+         'HLTPSK': args.hltpks,
+         'l1BG': 0,
+         'l1MenuConfig': 'DB',
+         'precommand' : args.precommand,
+         'postcommand' : args.postcommand,
+         'logLevels' : args.log_level
+      })
+      if args.db_type == "Coral":           # DBPython (with CORAL)
+         cdict['trigger'].update({
+            'use_coral': True,
+            'coral_server': args.db_server
+         })
+      else:                                 # DBPython (without CORAL)
+         cdict['trigger'].update({
+            'use_coral': False,
+            'db_alias': args.db_server
+         })
+
+   return cdict
+
+
+class MyHelp(argparse.Action):
+   """Custom help to hide/show expert groups"""
+   def __call__(self, parser, namespace, values, option_string=None):
+
+      for g in parser.expert_groups:
+         for a in g._group_actions:
+            if values!='all':
+               a.help = argparse.SUPPRESS
+
+      parser.print_help()
+      if values!='all':
+         print('\nUse --help=all to show all (expert) options')
+      sys.exit(0)
+
+
+def main():
+   parser = argparse.ArgumentParser(prog='athenaHLT.py', formatter_class=
+                                    lambda prog : argparse.ArgumentDefaultsHelpFormatter(prog, max_help_position=32, width=100),
+                                    usage = '%(prog)s [OPTION]... -f FILE jobOptions',
+                                    add_help=False)
+   parser.expert_groups = []   # Keep list of expert option groups
+
+   g = parser.add_argument_group('Options')
+   g.add_argument('jobOptions', help='job options file')
+   g.add_argument('--file', '-f', action='append', required=True, help='input RAW file')
+   g.add_argument('--save-output', '-o', metavar='FILE', help='output file name')
+   g.add_argument('--number-of-events', '-n', metavar='N', default=-1, help='processes N events (<=0 means all)')
+   g.add_argument('--skip-events', '-k', metavar='N', default=0, help='skip N first events')
+   g.add_argument('--threads', metavar='N', type=int, default=1, help='number of threads')
+   g.add_argument('--nprocs', metavar='N', type=int, default=1, help='number of children to fork')
+   g.add_argument('--log-level', '-l', metavar='LVL', type=arg_log_level, default='INFO,ERROR', help='OutputLevel of athena,POOL')
+   g.add_argument('--precommand', '-c', metavar='CMD', action='append', default=[],
+                  help='Python commands executed before job options or database configuration')
+   g.add_argument('--postcommand', '-C', metavar='CMD', action='append', default=[],
+                  help='Python commands executed after job options or database configuration')
+   g.add_argument('--interactive', '-i', action='store_true', help='interactive mode')
+   g.add_argument('--help', '-h', nargs='?', choices=['all'], action=MyHelp, help='show help')
+
+   g = parser.add_argument_group('Performance and debugging')
+   g.add_argument('--perfmon', action='store_true', help='enable PerfMon')
+   g.add_argument('--leak-check', metavar='<stage>', nargs='?', const='execute',
+                  choices=['all','initialize','start','beginrun','execute','finalize','endrun','stop'],
+                  help='Perform leak checking during <stage>')
+   g.add_argument('--tcmalloc', action='store_true', default=True, help='use tcmalloc')
+   g.add_argument('--stdcmalloc', action='store_true', help='use stdcmalloc')
+   g.add_argument('--stdcmath', action='store_true', help='use stdcmath library')
+   g.add_argument('--imf', action='store_true', help='use Intel math library')
+   g.add_argument('--show-includes', '-s', action='store_true', help='show printout of included files')
+   g.add_argument('--timeout', metavar='SEC', default=3600*10, help='timeout in seconds')
+
+   g = parser.add_argument_group('Database')
+   g.add_argument('--use-database', '-b', action='store_true', help='configure from trigger database')
+   g.add_argument('--db-type', default='Coral', choices=['MySQL','Oracle','SQLite','Coral'], help='database type')
+   g.add_argument('--db-server', metavar='DB', default='TRIGGERDB', help='DB server name')
+   g.add_argument('--smk', type=int, default=0, help='Super Master Key')
+   g.add_argument('--l1psk', type=int, default=0, help='L1 prescale key')
+   g.add_argument('--hltpsk', type=int, default=0, help='HLT prescale key')
+
+   g = parser.add_argument_group('Online Histogramming')
+   g.add_argument('--oh-monitoring', '-M', action='store_true',
+                  help='enable OH monitoring')
+   g.add_argument('--oh-interval', metavar='SEC', type=int, default=5,
+                  help='seconds between histogram publications.')
+
+   g = parser.add_argument_group('Conditions')
+   g.add_argument('--run-number', '-R', metavar='RUN', type=int,
+                  help='run number (if None, read from first event)')
+   g.add_argument('--sor-time', type=arg_sor_time,
+                  help='The Start Of Run time. Three formats are accepted: '
+                  '1) the string "now", for current time; '
+                  '2) the number of nanoseconds since epoch (e.g. 1386355338658000000 or int(time.time() * 1e9)); '
+                  '3) human-readable "20/11/18 17:40:42.3043". If not specified the sor-time is read from COOL')
+   g.add_argument('--detector-mask', metavar='MASK', type=arg_detector_mask,
+                  help='detector mask (if None, read from COOL)')
+
+   # Expert options
+   g = parser.add_argument_group('Expert')
+   parser.expert_groups.append(g)
+   g.add_argument('--joboptionsvc-type', metavar='TYPE', default='JobOptionsSvc', help='JobOptionsSvc type')
+   g.add_argument('--msgsvc-type', metavar='TYPE', default='TrigMessageSvc', help='MessageSvc type')
+   g.add_argument('--python-setup', default='TrigPSC/TrigPSCPythonSetup.py', help='Python bootstrap/setup file')
+   g.add_argument('--partition', '-p', metavar='NAME', default='athenaHLT', help='partition name')
+   g.add_argument('--no-ers-signal-handlers', action='store_true', help='disable ERS signal handlers')
+   g.add_argument('--preloadlib', metavar='LIB', help='preload an arbitrary library')
+   g.add_argument('--extra-l1r-robs', metavar='ROBS', type=arg_eval, default=[],
+                  help='List of additional ROB IDs that are considered part of the L1 result and passed to the HLT')
+   g.add_argument('--cfgdict', metavar='DICT', type=arg_eval, default={},
+                  help='HLTMPPy config dictionary with additional options, e.g.: '
+                  '--cfgdict \'{"global": {"log_root" : "/tmp"}}\'')
+
+   args = parser.parse_args()
+
+   # set default OutputLevels and file inclusion
+   import AthenaCommon.Logging
+   AthenaCommon.Logging.log.setLevel(getattr(logging, args.log_level[0]))
+   from AthenaCommon.Include import include
+   include.setShowIncludes( args.show_includes )
+
+   # update parameters based on SOR
+   update_run_params(args)
+
+   # configure HLTMPPU and run
+   cdict = HLTMPPy_cfgdict(args)
+
+   # Apply any expert-level overrides
+   update_nested_dict(cdict, args.cfgdict)
+
+   # Modify pre/postcommands if necessary
+   update_pcommands(args, cdict)
+
+   from HLTMPPy.runner import runHLTMPPy
+   runHLTMPPy(cdict)
+
+if "__main__" in __name__:
+   main()
diff --git a/HLT/Trigger/TrigControl/TrigCommon/python/AthHLT.py b/HLT/Trigger/TrigControl/TrigCommon/python/AthHLT.py
new file mode 100644
index 00000000000..4a4050670f6
--- /dev/null
+++ b/HLT/Trigger/TrigControl/TrigCommon/python/AthHLT.py
@@ -0,0 +1,57 @@
+# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
+#
+# Utilities used in athenaHLT.py
+#
+from PyUtils.Decorators import memoize
+from AthenaCommon.Logging import logging
+log = logging.getLogger('athenaHLT')
+
+class CondDB:
+   _run2 = 236108
+   def __init__(self, run):
+      self.run = run
+   def db_instance(self):
+      if self.run>=self._run2:
+         return 'CONDBR2'
+      else:
+         return 'COMP200'
+   def sor_folder(self):
+      if self.run>=self._run2:
+         return '/TDAQ/RunCtrl/SOR'
+      else:
+         return '/TDAQ/RunCtrl/SOR_Params'
+
+@memoize
+def get_sor_params(run_number):
+
+   log.info('Reading SOR record for run %s from COOL' % run_number)
+
+   from CoolConvUtilities import AtlCoolLib
+   cdb = CondDB(run_number)
+   dbcon = AtlCoolLib.readOpen('COOLONL_TDAQ/%s' % cdb.db_instance())
+   folder = dbcon.getFolder(cdb.sor_folder())
+
+   # need to keep sor variable while using payload (cannot do the following in
+   # one single line nor overwrite sor). Otherwise: 1) GC comes into play;
+   # 2) the object is deleted; 3) since it's a shared_ptr, the internal
+   # cool::IObject also gets deleted; 4) payload is not valid any longer
+   try:
+      sor = folder.findObject(run_number << 32, 0)
+   except Exception:
+      return None        # This can happen for unknown run numbers
+
+   payload = sor.payload()
+   return {k: payload[k] for k in payload}
+
+
+#
+# Testing (used as ctest)
+#
+if __name__=='__main__':
+   d = get_sor_params(327265)  # Run-2
+   print(d)
+   assert(d['DetectorMask']=='0000000000000000c10069fffffffff7')
+
+   d = get_sor_params(216416)  # Run-1
+   print(d)
+   assert(d['DetectorMask']==281474976710647)
diff --git a/HLT/Trigger/TrigControl/TrigCommon/python/TrigPyHelper.py b/HLT/Trigger/TrigControl/TrigCommon/python/TrigPyHelper.py
index 9cdb10c6332..c489e36b1a9 100644
--- a/HLT/Trigger/TrigControl/TrigCommon/python/TrigPyHelper.py
+++ b/HLT/Trigger/TrigControl/TrigCommon/python/TrigPyHelper.py
@@ -1,13 +1,13 @@
-# Copyright (C) 2002-2017 CERN for the benefit of the ATLAS collaboration
+# Copyright (C) 2002-2018 CERN for the benefit of the ATLAS collaboration
 
 ######################################################################
 ## @file   TrigPyHelper.py
 ## @brief  Helpers for C++ Python bindings
 ## @author Frank Winklmeier
-## $Id: TrigPyHelper.py,v 1.1 2008-09-02 16:37:18 fwinkl Exp $
 ######################################################################
 
-from GaudiPython import *
+from GaudiPython import *    # noqa
+from GaudiPython import gbl, InterfaceCast
 from GaudiPython.Bindings import Helper, iProperty
 from AthenaCommon.Logging import logging
     
@@ -32,7 +32,7 @@ class TrigApp(object):
       
       jobOptSvc = InterfaceCast(gbl.IJobOptionsSvc)(Helper.service(gbl.Gaudi.svcLocator(), "JobOptionsSvc"))
       if not jobOptSvc:
-         log.error("Cannot find JobOptionsSvc")
+         self.log.error("Cannot find JobOptionsSvc")
          return
 
       import re
@@ -43,7 +43,7 @@ class TrigApp(object):
       for client in jobOptSvc.getClients():
          for prop in jobOptSvc.getProperties(client):      
             if reClient.match(client) and reProp.match(prop.name()):
-               self.log.info("Changing %s.%s from '%s' to '%s'" % \
+               self.log.info("Changing %s.%s from '%s' to '%s'" %
                              (client, prop.name(), prop.value(), newValue))
                iprop = iProperty(client)
                setattr(iprop, prop.name(), newValue)
diff --git a/HLT/Trigger/TrigControl/TrigCommon/share/LeakTracer.py b/HLT/Trigger/TrigControl/TrigCommon/share/LeakTracer.py
index 324c4e6ec32..33e6ac52aff 100644
--- a/HLT/Trigger/TrigControl/TrigCommon/share/LeakTracer.py
+++ b/HLT/Trigger/TrigControl/TrigCommon/share/LeakTracer.py
@@ -1,13 +1,13 @@
 # Job option fragment to enable the leak tracer
-# doLeakCheck is set by athenaMT/PT
+# doLeakCheck/memChkMode are set by athenaHLT
 
 # from Control/AthenaCommon/share/Preparation.py
-if doLeakCheck != False:
+if doLeakCheck is not False:
    from AthenaCommon.AppMgr import theApp, ServiceMgr
    
    from Hephaestus.Auditor import HephaestusAuditor
    theApp.AuditAlgorithms = True
-   ServiceMgr.AuditorSvc += HephaestusAuditor( mode = memChkMode ,auditOn = doLeakCheck )
+   ServiceMgr.AuditorSvc += HephaestusAuditor( mode = memChkMode, auditOn = doLeakCheck )
 
 del doLeakCheck
 del memChkMode
-- 
GitLab